Compare commits
100 Commits
6ec5db2ea2
...
v0.4.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
14c956b6fa | ||
|
|
502a822bb4 | ||
|
|
6cc21b5f36 | ||
|
|
0034f8ca97 | ||
|
|
eedcbaed0a | ||
|
|
653ce7b417 | ||
|
|
b80c055826 | ||
|
|
c5fc6ac13d | ||
|
|
fea8ea4b70 | ||
|
|
1bead05d19 | ||
|
|
9fe2a842e9 | ||
|
|
849d03d054 | ||
|
|
3a87bbbba6 | ||
|
|
ab5e8e53e1 | ||
|
|
0ab2408444 | ||
|
|
bc1e0889e7 | ||
|
|
6dfbe1022a | ||
|
|
d3d2de8a0d | ||
|
|
0728ece4b8 | ||
|
|
02e375fbf2 | ||
|
|
14e6ce8458 | ||
|
|
d314b6024d | ||
|
|
d921629947 | ||
|
|
65490e2a7f | ||
|
|
6c5b518e4d | ||
|
|
e3c18f22d4 | ||
|
|
57185c7f10 | ||
|
|
1ff9ecd4b6 | ||
|
|
037388886e | ||
|
|
e614ca5d75 | ||
|
|
c0b3977ea6 | ||
|
|
d9639561ce | ||
|
|
cbf43e5d6c | ||
|
|
d6e885517d | ||
|
|
2593c95b5c | ||
|
|
8a8c43ecdf | ||
|
|
6170ac641c | ||
|
|
497c0e500b | ||
|
|
008c643272 | ||
|
|
c302b30e63 | ||
|
|
7926407534 | ||
|
|
0894ac2fab | ||
|
|
353759b983 | ||
|
|
454f5f0656 | ||
|
|
0e51d6337f | ||
|
|
a24b6dedaa | ||
|
|
8361fc536b | ||
|
|
4d4272e5e8 | ||
|
|
559869ca68 | ||
|
|
0e4fae538b | ||
|
|
12509cd4e2 | ||
|
|
2595bfe4f4 | ||
|
|
3df11be5e9 | ||
|
|
fc9d94aeee | ||
|
|
111df11199 | ||
|
|
f0d4c36bc3 | ||
|
|
ef0d6e9274 | ||
|
|
d05b01cfb2 | ||
|
|
9dc08d48e9 | ||
|
|
20bdfc0dd7 | ||
|
|
4365e083dc | ||
|
|
528915a235 | ||
|
|
9a980b5766 | ||
|
|
5eee7e1161 | ||
|
|
0a41998e29 | ||
|
|
ebdc23c3ce | ||
|
|
9611930949 | ||
|
|
609f93b783 | ||
|
|
52c6462fa8 | ||
|
|
358ef18f77 | ||
|
|
fc729a2414 | ||
|
|
0939a0c6c2 | ||
|
|
c384d54c12 | ||
|
|
71553aad32 | ||
|
|
d19e69990a | ||
|
|
be0f4b4c4e | ||
|
|
01b5fdb289 | ||
|
|
60ae58500a | ||
|
|
f9c8b8ac5e | ||
|
|
72c735c3ee | ||
|
|
5871ae30ad | ||
|
|
381a62046b | ||
|
|
52724ecc5b | ||
|
|
f288d445e4 | ||
|
|
d9db6da191 | ||
|
|
5443881ea1 | ||
|
|
8946b57456 | ||
|
|
686239491b | ||
|
|
126ba4487c | ||
|
|
447cda19ef | ||
|
|
f1ba913a98 | ||
|
|
59336aafb7 | ||
|
|
fd5ad3ed56 | ||
|
|
2d03a3bb10 | ||
|
|
4dc02d52a2 | ||
|
|
ed0cea9c26 | ||
|
|
15bfbdbe88 | ||
|
|
c354ba09ba | ||
| 2eeea08be0 | |||
| fbfc8ea965 |
19
.gitignore
vendored
19
.gitignore
vendored
@@ -1,4 +1,5 @@
|
|||||||
__pycache__
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
junk/
|
junk/
|
||||||
.vscode
|
.vscode
|
||||||
.ipynb_checkpoints/
|
.ipynb_checkpoints/
|
||||||
@@ -8,5 +9,19 @@ tools/ansible/inventory/cappuccino.yml
|
|||||||
tools/ansible/inventory/group_vars/all.yml
|
tools/ansible/inventory/group_vars/all.yml
|
||||||
ffx_test_report.log
|
ffx_test_report.log
|
||||||
bin/conversiontest.py
|
bin/conversiontest.py
|
||||||
*.egg-info/
|
|
||||||
|
|
||||||
|
tests/assets/
|
||||||
|
|
||||||
|
build/
|
||||||
|
dist/
|
||||||
|
*.egg-info/
|
||||||
|
.venv/
|
||||||
|
venv/
|
||||||
|
.codex
|
||||||
|
|
||||||
|
|
||||||
|
*.mkv
|
||||||
|
*.webm
|
||||||
|
*.mp4
|
||||||
|
ffmpeg2pass-0.log
|
||||||
|
*.sup
|
||||||
181
README.md
181
README.md
@@ -1,48 +1,187 @@
|
|||||||
# FFX
|
# FFX
|
||||||
|
|
||||||
|
FFX is a local CLI and Textual TUI for inspecting TV episode files, storing normalization rules in SQLite, and converting outputs into a predictable stream, metadata, and filename layout.
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
- Linux-like environment
|
||||||
|
- `python3`
|
||||||
|
- `ffmpeg`
|
||||||
|
- `ffprobe`
|
||||||
|
- `cpulimit`
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
per https:
|
FFX uses a two-step local setup flow.
|
||||||
|
|
||||||
|
### 1. Install The Bundle
|
||||||
|
|
||||||
|
This step creates or reuses the persistent bundle virtualenv in `~/.local/share/ffx.venv`, installs FFX into it, and ensures `ffx` is exposed through a shell alias.
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
pip install https://<URL>/<Releaser>/ffx.git@<Branch>
|
bash tools/setup.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
per git:
|
If you also want the Python packages needed for the modern test suite:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
pip install git+ssh://<Username>@<URL>/<Releaser>/ffx.git@<Branch>
|
bash tools/setup.sh --with-tests
|
||||||
```
|
```
|
||||||
|
|
||||||
## Version history
|
You can verify the bundle state without changing anything:
|
||||||
|
|
||||||
### 0.1.1
|
```sh
|
||||||
|
bash tools/setup.sh --check
|
||||||
|
```
|
||||||
|
|
||||||
Bugfixes, TMBD identify shows
|
### 2. Prepare System Dependencies And Local User Files
|
||||||
|
|
||||||
### 0.1.2
|
This step installs or verifies workstation dependencies and seeds local config and data directories. It is the step wrapped by the CLI command `ffx configure_workstation`.
|
||||||
|
|
||||||
Bugfixes
|
Run it directly:
|
||||||
|
|
||||||
### 0.1.3
|
```sh
|
||||||
|
bash tools/configure_workstation.sh
|
||||||
|
```
|
||||||
|
|
||||||
Subtitle file imports
|
Or through the installed CLI:
|
||||||
|
|
||||||
### 0.2.0
|
```sh
|
||||||
|
ffx configure_workstation
|
||||||
|
```
|
||||||
|
|
||||||
Tests, Config-File
|
Check-only mode is available in both forms:
|
||||||
|
|
||||||
### 0.2.1
|
```sh
|
||||||
|
bash tools/configure_workstation.sh --check
|
||||||
|
ffx configure_workstation --check
|
||||||
|
```
|
||||||
|
|
||||||
Signature, Tags cleaning, Bugfixes, Refactoring
|
`tools/configure_workstation.sh` does not manage the bundle virtualenv. Python-side test packages belong to `tools/setup.sh --with-tests`.
|
||||||
|
|
||||||
### 0.2.2
|
## Basic Usage
|
||||||
|
|
||||||
CLI-Overrides
|
Examples:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
ffx version
|
||||||
|
ffx inspect /path/to/episode.mkv
|
||||||
|
ffx convert /path/to/episode.mkv
|
||||||
|
ffx shows
|
||||||
|
```
|
||||||
|
|
||||||
|
## Modern Tests
|
||||||
|
|
||||||
|
Install Python test packages first:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
bash tools/setup.sh --with-tests
|
||||||
|
```
|
||||||
|
|
||||||
|
Then run the modern automatically discovered test suite:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
./tools/test.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
This runner uses `pytest` and intentionally excludes the legacy harness under `tests/legacy/`.
|
||||||
|
|
||||||
|
## Default Local Paths
|
||||||
|
|
||||||
|
- Config: `~/.local/etc/ffx.json`
|
||||||
|
- Database: `~/.local/var/ffx/ffx.db`
|
||||||
|
- Log file: `~/.local/var/log/ffx.log`
|
||||||
|
- Bundle venv: `~/.local/share/ffx.venv`
|
||||||
|
|
||||||
|
## TMDB
|
||||||
|
|
||||||
|
TMDB-backed metadata enrichment requires `TMDB_API_KEY` to be set in the environment.
|
||||||
|
|
||||||
|
## Version History
|
||||||
|
|
||||||
|
### 0.4.1
|
||||||
|
|
||||||
|
- `convert` now supports `--copy-video` and `--copy-audio` to keep the selected stream type in copy mode without applying the corresponding reencode flags, filters, or formatting options
|
||||||
|
- ffmpeg conversions now monitor diagnostics while the process is running, retry unset AVI packet timestamps once with `-fflags +genpts`, and stop early when a file should be skipped instead of waiting for the full job to finish
|
||||||
|
- end-of-run convert summaries now list only ffmpeg findings that still require review, including named remedy identifiers such as `warn-corrupt-mpeg-audio`
|
||||||
|
- `upgrade` now finishes by reporting the installed FFX version together with the active bundle branch
|
||||||
|
|
||||||
|
### 0.3.1
|
||||||
|
|
||||||
|
- debug mode screen titles now append the active Textual screen class name, making screen-specific troubleshooting easier during inspect and edit flows
|
||||||
|
- `--cut` again works as a combined flag/option: omitted disables cutting, bare `--cut` applies the default `60,180`, and explicit duration or `START,DURATION` values stay supported
|
||||||
|
- H.265 unmux commands no longer force an invalid `-f h265` output format, keeping ffmpeg copy extraction aligned with the required Annex B bitstream filter
|
||||||
|
- H.264 encoding now falls back from `libx264` to `libopenh264` with a warning when needed, and the test fixtures use the same encoder fallback so the suite remains portable across ffmpeg builds
|
||||||
|
|
||||||
|
### 0.3.0
|
||||||
|
|
||||||
|
- inspect and edit screens now refresh nested track and pattern changes more reliably, with inspect-mode tables aligned to the target pattern view shown in the differences pane
|
||||||
|
- metadata editing got a follow-up polish pass with clearer ffmpeg notifications, a shared in-screen log pane, safer apply/reload handling, and expanded cleanup and normalization coverage
|
||||||
|
- track and asset probing recognize additional codecs, and the modern test suite now covers more metadata-editor, change-set, screen-state, and asset-probe behavior
|
||||||
|
- Textual now requires version `8.0` or newer to match the UI APIs used by the current screens
|
||||||
|
|
||||||
|
### 0.2.6
|
||||||
|
|
||||||
|
- DB-free `ffx edit` workflow for in-place metadata editing via temporary-file rewrite
|
||||||
|
- inspect and edit workflows split into dedicated Textual screens with shared media-workflow support
|
||||||
|
- Textual tables and row actions now separate raw data from rendered labels to avoid markup leaking into stored metadata
|
||||||
|
- responsive screen layout pass, `Esc` back handling, sortable show/inspect tables, and improved edit-screen notifications/toggles
|
||||||
|
- application-wide UTF-8 i18n catalogs with language precedence from CLI over config over system over German default
|
||||||
|
- metadata normalization extended for localized subtitle titles, ISO language cleanup, and smarter track editor language/title helpers
|
||||||
|
|
||||||
|
### 0.2.5
|
||||||
|
|
||||||
|
- show-level quality and notes fields
|
||||||
|
- pattern-over-show-over-default season-shift resolution with dynamic DB migration loading
|
||||||
|
- migration prompt now reports the upgrade path and creates an in-place DB backup before applying schema changes
|
||||||
|
- `upgrade --branch <name>` now fetches remote-only branches before switching
|
||||||
|
- `unmux` now applies season shifting to subtitle output filenames
|
||||||
|
- convert now keeps DB-defined target subtitle dispositions authoritative over sidecar filename disposition flags when a pattern definition exists
|
||||||
|
- focused modern tests added around migrations, unmux, upgrade, and subtitle-disposition import precedence
|
||||||
|
|
||||||
|
### 0.2.4
|
||||||
|
|
||||||
|
- lightweight CLI commands now stay import-light via lazy runtime loading
|
||||||
|
- setup/config templating moved to `assets/ffx.json.j2`
|
||||||
|
- aligned two-step local setup wrappers: `ffx setup` and `ffx configure_workstation`
|
||||||
|
- combined `ffprobe` payload reuse in `FileProperties`
|
||||||
|
- configurable crop-detect sampling plus per-process crop result caching
|
||||||
|
- single-query controller accessors and conditional DB schema bootstrap
|
||||||
|
- shared screen bootstrap/controller wiring for large detail screens
|
||||||
|
- configurable default season/episode digit lengths
|
||||||
|
- digit-aware `rename` and padded `unmux` filename markers
|
||||||
|
|
||||||
### 0.2.3
|
### 0.2.3
|
||||||
|
|
||||||
PyPi packaging
|
- PyPI packaging
|
||||||
Templating output filename
|
- output filename templating
|
||||||
Season shiftung
|
- season shifting
|
||||||
DB-Versionierung
|
- DB versioning
|
||||||
|
|
||||||
|
### 0.2.2
|
||||||
|
|
||||||
|
- CLI overrides
|
||||||
|
|
||||||
|
### 0.2.1
|
||||||
|
|
||||||
|
- signature handling
|
||||||
|
- tag cleanup
|
||||||
|
- bugfixes and refactoring
|
||||||
|
|
||||||
|
### 0.2.0
|
||||||
|
|
||||||
|
- tests
|
||||||
|
- config file
|
||||||
|
|
||||||
|
### 0.1.3
|
||||||
|
|
||||||
|
- subtitle file imports
|
||||||
|
|
||||||
|
### 0.1.2
|
||||||
|
|
||||||
|
- bugfixes
|
||||||
|
|
||||||
|
### 0.1.1
|
||||||
|
|
||||||
|
- bugfixes
|
||||||
|
- TMDB show identification
|
||||||
|
|||||||
37
assets/ffx.json.j2
Normal file
37
assets/ffx.json.j2
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
{
|
||||||
|
"language": {{ language_json }},
|
||||||
|
"databasePath": {{ database_path_json }},
|
||||||
|
"logDirectory": {{ log_directory_json }},
|
||||||
|
"subtitlesDirectory": {{ subtitles_directory_json }},
|
||||||
|
"defaultIndexSeasonDigits": {{ default_index_season_digits }},
|
||||||
|
"defaultIndexEpisodeDigits": {{ default_index_episode_digits }},
|
||||||
|
"defaultIndicatorSeasonDigits": {{ default_indicator_season_digits }},
|
||||||
|
"defaultIndicatorEpisodeDigits": {{ default_indicator_episode_digits }},
|
||||||
|
"metadata": {
|
||||||
|
"signature": {
|
||||||
|
"RECODED_WITH": "FFX"
|
||||||
|
},
|
||||||
|
"remove": [
|
||||||
|
"VERSION-eng",
|
||||||
|
"creation_time",
|
||||||
|
"NAME"
|
||||||
|
],
|
||||||
|
"streams": {
|
||||||
|
"remove": [
|
||||||
|
"BPS",
|
||||||
|
"NUMBER_OF_FRAMES",
|
||||||
|
"NUMBER_OF_BYTES",
|
||||||
|
"_STATISTICS_WRITING_APP",
|
||||||
|
"_STATISTICS_WRITING_DATE_UTC",
|
||||||
|
"_STATISTICS_TAGS",
|
||||||
|
"BPS-eng",
|
||||||
|
"DURATION-eng",
|
||||||
|
"NUMBER_OF_FRAMES-eng",
|
||||||
|
"NUMBER_OF_BYTES-eng",
|
||||||
|
"_STATISTICS_WRITING_APP-eng",
|
||||||
|
"_STATISTICS_WRITING_DATE_UTC-eng",
|
||||||
|
"_STATISTICS_TAGS-eng"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
361
assets/i18n/de.json
Normal file
361
assets/i18n/de.json
Normal file
@@ -0,0 +1,361 @@
|
|||||||
|
{
|
||||||
|
"iso_languages": {
|
||||||
|
"ABKHAZIAN": "Abchasisch",
|
||||||
|
"AFAR": "Afar",
|
||||||
|
"AFRIKAANS": "Afrikaans",
|
||||||
|
"AKAN": "Akan",
|
||||||
|
"ALBANIAN": "Albanisch",
|
||||||
|
"AMHARIC": "Amharisch",
|
||||||
|
"ARABIC": "Arabisch",
|
||||||
|
"ARAGONESE": "Aragonesisch",
|
||||||
|
"ARMENIAN": "Armenisch",
|
||||||
|
"ASSAMESE": "Assamesisch",
|
||||||
|
"AVARIC": "Awarisch",
|
||||||
|
"AVESTAN": "Avestisch",
|
||||||
|
"AYMARA": "Aymara",
|
||||||
|
"AZERBAIJANI": "Aserbaidschanisch",
|
||||||
|
"BAMBARA": "Bambara",
|
||||||
|
"BASHKIR": "Baschkirisch",
|
||||||
|
"BASQUE": "Baskisch",
|
||||||
|
"BELARUSIAN": "Weißrussisch",
|
||||||
|
"BENGALI": "Bengalisch",
|
||||||
|
"BISLAMA": "Bislama",
|
||||||
|
"BOKMAL": "Bokmål",
|
||||||
|
"BOSNIAN": "Bosnisch",
|
||||||
|
"BRETON": "Bretonisch",
|
||||||
|
"BULGARIAN": "Bulgarisch",
|
||||||
|
"BURMESE": "Burmesisch",
|
||||||
|
"CATALAN": "Catalan",
|
||||||
|
"CHAMORRO": "Chamorro",
|
||||||
|
"CHECHEN": "Tschetschenisch",
|
||||||
|
"CHICHEWA": "Chichewa",
|
||||||
|
"CHINESE": "Chinesisch",
|
||||||
|
"CHURCH_SLAVIC": "Church Slavic",
|
||||||
|
"CHUVASH": "Tschuwaschisch",
|
||||||
|
"CORNISH": "Kornisch",
|
||||||
|
"CORSICAN": "Korsisch",
|
||||||
|
"CREE": "Cree",
|
||||||
|
"CROATIAN": "Kroatisch",
|
||||||
|
"CZECH": "Tschechisch",
|
||||||
|
"DANISH": "Dänisch",
|
||||||
|
"DIVEHI": "Divehi",
|
||||||
|
"DUTCH": "Dutch",
|
||||||
|
"DZONGKHA": "Dzongkha",
|
||||||
|
"ENGLISH": "Englisch",
|
||||||
|
"ESPERANTO": "Esperanto",
|
||||||
|
"ESTONIAN": "Estnisch",
|
||||||
|
"EWE": "Ewe-Sprache",
|
||||||
|
"FAROESE": "Färöisch",
|
||||||
|
"FIJIAN": "Fidschianisch",
|
||||||
|
"FILIPINO": "Filipino",
|
||||||
|
"FINNISH": "Finnisch",
|
||||||
|
"FRENCH": "Französisch",
|
||||||
|
"FULAH": "Ful",
|
||||||
|
"GALICIAN": "Galizisch",
|
||||||
|
"GANDA": "Ganda",
|
||||||
|
"GEORGIAN": "Georgisch",
|
||||||
|
"GERMAN": "Deutsch",
|
||||||
|
"GREEK": "Greek",
|
||||||
|
"GUARANI": "Guaraní",
|
||||||
|
"GUJARATI": "Gujarati",
|
||||||
|
"HAITIAN": "Haitian",
|
||||||
|
"HAUSA": "Haussa",
|
||||||
|
"HEBREW": "Hebräisch",
|
||||||
|
"HERERO": "Herero",
|
||||||
|
"HINDI": "Hindi",
|
||||||
|
"HIRI_MOTU": "Hiri-Motu",
|
||||||
|
"HUNGARIAN": "Ungarisch",
|
||||||
|
"ICELANDIC": "Isländisch",
|
||||||
|
"IDO": "Ido",
|
||||||
|
"IGBO": "Ibo",
|
||||||
|
"INDONESIAN": "Indonesisch",
|
||||||
|
"INTERLINGUA": "Interlingua",
|
||||||
|
"INTERLINGUE": "Interlingue",
|
||||||
|
"INUKTITUT": "Inuktitut",
|
||||||
|
"INUPIAQ": "Inupiaq",
|
||||||
|
"IRISH": "Irisch",
|
||||||
|
"ITALIAN": "Italienisch",
|
||||||
|
"JAPANESE": "Japanisch",
|
||||||
|
"JAVANESE": "Javanisch",
|
||||||
|
"KALAALLISUT": "Kalaallisut",
|
||||||
|
"KANNADA": "Kannada",
|
||||||
|
"KANURI": "Kanuri",
|
||||||
|
"KASHMIRI": "Kaschmirisch",
|
||||||
|
"KAZAKH": "Kasachisch",
|
||||||
|
"KHMER": "Khmer",
|
||||||
|
"KIKUYU": "Kikuyu",
|
||||||
|
"KINYARWANDA": "Kinyarwanda",
|
||||||
|
"KIRGHIZ": "Kirghiz",
|
||||||
|
"KOMI": "Komi",
|
||||||
|
"KONGO": "Kongo",
|
||||||
|
"KOREAN": "Koreanisch",
|
||||||
|
"KUANYAMA": "Kuanyama",
|
||||||
|
"KURDISH": "Kurdisch",
|
||||||
|
"LAO": "Laotisch",
|
||||||
|
"LATIN": "Lateinisch",
|
||||||
|
"LATVIAN": "Lettisch",
|
||||||
|
"LIMBURGAN": "Limburgan",
|
||||||
|
"LINGALA": "Lingala",
|
||||||
|
"LITHUANIAN": "Litauisch",
|
||||||
|
"LUBA_KATANGA": "Luba-Katanga",
|
||||||
|
"LUXEMBOURGISH": "Luxembourgish",
|
||||||
|
"MACEDONIAN": "Makedonisch",
|
||||||
|
"MALAGASY": "Malagasi",
|
||||||
|
"MALAY": "Malaiisch",
|
||||||
|
"MALAYALAM": "Malayalam",
|
||||||
|
"MALTESE": "Maltesisch",
|
||||||
|
"MANX": "Manx",
|
||||||
|
"MAORI": "Maori",
|
||||||
|
"MARATHI": "Marathi",
|
||||||
|
"MARSHALLESE": "Marschallesisch",
|
||||||
|
"MONGOLIAN": "Mongolisch",
|
||||||
|
"NAURU": "Nauru",
|
||||||
|
"NAVAJO": "Navajo",
|
||||||
|
"NDONGA": "Ndonga",
|
||||||
|
"NEPALI": "Nepali",
|
||||||
|
"NORTHERN_SAMI": "Nord-Samisch",
|
||||||
|
"NORTH_NDEBELE": "North Ndebele",
|
||||||
|
"NORWEGIAN": "Norwegisch",
|
||||||
|
"NORWEGIAN_NYNORSK": "Nynorsk",
|
||||||
|
"OCCITAN": "Occitan",
|
||||||
|
"OJIBWA": "Ojibwa",
|
||||||
|
"ORIYA": "Oriya",
|
||||||
|
"OROMO": "Oromo",
|
||||||
|
"OSSETIAN": "Ossetian",
|
||||||
|
"PALI": "Pali",
|
||||||
|
"PANJABI": "Panjabi",
|
||||||
|
"PERSIAN": "Persisch",
|
||||||
|
"POLISH": "Polnisch",
|
||||||
|
"PORTUGUESE": "Portugiesisch",
|
||||||
|
"PUSHTO": "Pushto",
|
||||||
|
"QUECHUA": "Quechua",
|
||||||
|
"ROMANIAN": "Romanian",
|
||||||
|
"ROMANSH": "Bündnerromanisch",
|
||||||
|
"RUNDI": "Kirundi",
|
||||||
|
"RUSSIAN": "Russisch",
|
||||||
|
"SAMOAN": "Samoanisch",
|
||||||
|
"SANGO": "Sango",
|
||||||
|
"SANSKRIT": "Sanskrit",
|
||||||
|
"SARDINIAN": "Sardisch",
|
||||||
|
"SCOTTISH_GAELIC": "Scottish Gaelic",
|
||||||
|
"SERBIAN": "Serbisch",
|
||||||
|
"SHONA": "Schona",
|
||||||
|
"SICHUAN_YI": "Sichuan Yi",
|
||||||
|
"SINDHI": "Sindhi",
|
||||||
|
"SINHALA": "Sinhala",
|
||||||
|
"SLOVAK": "Slowakisch",
|
||||||
|
"SLOVENIAN": "Slowenisch",
|
||||||
|
"SOMALI": "Somali",
|
||||||
|
"SOUTHERN_SOTHO": "Southern Sotho",
|
||||||
|
"SOUTH_NDEBELE": "South Ndebele",
|
||||||
|
"SPANISH": "Spanish",
|
||||||
|
"SUNDANESE": "Sundanesisch",
|
||||||
|
"SWAHILI": "Suaheli; Swaheli",
|
||||||
|
"SWATI": "Swazi",
|
||||||
|
"SWEDISH": "Schwedisch",
|
||||||
|
"TAGALOG": "Tagalog",
|
||||||
|
"TAHITIAN": "Tahitisch",
|
||||||
|
"TAJIK": "Tadschikisch",
|
||||||
|
"TAMIL": "Tamilisch",
|
||||||
|
"TATAR": "Tatarisch",
|
||||||
|
"TELUGU": "Telugu",
|
||||||
|
"THAI": "Thai",
|
||||||
|
"TIBETAN": "Tibetisch",
|
||||||
|
"TIGRINYA": "Tigrinja",
|
||||||
|
"TONGA": "Tonga",
|
||||||
|
"TSONGA": "Tsonga",
|
||||||
|
"TSWANA": "Tswana",
|
||||||
|
"TURKISH": "Türkisch",
|
||||||
|
"TURKMEN": "Turkmenisch",
|
||||||
|
"TWI": "Twi",
|
||||||
|
"UIGHUR": "Uighur",
|
||||||
|
"UKRAINIAN": "Ukrainisch",
|
||||||
|
"UNDEFINED": "undefined",
|
||||||
|
"URDU": "Urdu",
|
||||||
|
"UZBEK": "Usbekisch",
|
||||||
|
"VENDA": "Venda",
|
||||||
|
"VIETNAMESE": "Vietnamesisch",
|
||||||
|
"VOLAPUK": "Volapük",
|
||||||
|
"WALLOON": "Wallonisch",
|
||||||
|
"WELSH": "Walisisch",
|
||||||
|
"WESTERN_FRISIAN": "Westfriesisch",
|
||||||
|
"WOLOF": "Wolof",
|
||||||
|
"XHOSA": "Xhosa",
|
||||||
|
"YIDDISH": "Jiddisch",
|
||||||
|
"YORUBA": "Joruba",
|
||||||
|
"ZHUANG": "Zhuang",
|
||||||
|
"ZULU": "Zulu"
|
||||||
|
},
|
||||||
|
"phrases": {
|
||||||
|
"5.0(side)": "5.0(side)",
|
||||||
|
"5.1(side)": "5.1(side)",
|
||||||
|
"6.1": "6.1",
|
||||||
|
"6ch": "6ch",
|
||||||
|
"7.1": "7.1",
|
||||||
|
"<New show>": "<Neue Serie>",
|
||||||
|
"Add": "Hinzufügen",
|
||||||
|
"Add Pattern": "Muster hinzufügen",
|
||||||
|
"Apply": "Anwenden",
|
||||||
|
"Apply failed: {error}": "Anwenden fehlgeschlagen: {error}",
|
||||||
|
"Are you sure to delete the following filename pattern?": "Möchtest du das folgende Dateinamensmuster wirklich löschen?",
|
||||||
|
"Are you sure to delete the following shifted season?": "Möchtest du die folgende verschobene Staffel wirklich löschen?",
|
||||||
|
"Are you sure to delete the following show?": "Möchtest du die folgende Serie wirklich löschen?",
|
||||||
|
"Are you sure to delete the following {track_type} track?": "Möchtest du den folgenden {track_type}-Stream wirklich löschen?",
|
||||||
|
"Are you sure to delete this tag?": "Möchtest du dieses Tag wirklich löschen?",
|
||||||
|
"Audio Layout": "Audiolayout",
|
||||||
|
"Back": "Zurück",
|
||||||
|
"Cancel": "Abbrechen",
|
||||||
|
"Cannot add another stream with disposition flag 'default' or 'forced' set": "Es kann kein weiterer Stream mit gesetztem Dispositions-Flag 'default' oder 'forced' hinzugefügt werden",
|
||||||
|
"Changes applied and file reloaded.": "Änderungen angewendet und Datei neu geladen.",
|
||||||
|
"Cleanup": "Bereinigen",
|
||||||
|
"Cleanup disabled.": "Bereinigung deaktiviert.",
|
||||||
|
"Cleanup enabled.": "Bereinigung aktiviert.",
|
||||||
|
"Codec": "Codec",
|
||||||
|
"Continuing edit session.": "Bearbeitung wird fortgesetzt.",
|
||||||
|
"Default": "Standard",
|
||||||
|
"Delete": "Löschen",
|
||||||
|
"Delete Show": "Serie löschen",
|
||||||
|
"Deleted media tag {tag!r}.": "Medien-Tag {tag!r} gelöscht.",
|
||||||
|
"Differences": "Unterschiede",
|
||||||
|
"Differences (file->db/output)": "Unterschiede (Datei->DB/Ausgabe)",
|
||||||
|
"Discard": "Verwerfen",
|
||||||
|
"Discard pending metadata changes and quit?": "Ausstehende Metadatenänderungen verwerfen und beenden?",
|
||||||
|
"Discard pending metadata changes and reload the file state?": "Ausstehende Metadatenänderungen verwerfen und Dateistand neu laden?",
|
||||||
|
"Down": "Runter",
|
||||||
|
"Dry-run: would rewrite via temporary file {target_path}": "Trockenlauf: würde über temporäre Datei {target_path} neu schreiben",
|
||||||
|
"Edit": "Bearbeiten",
|
||||||
|
"Edit Pattern": "Muster bearbeiten",
|
||||||
|
"Edit Show": "Serie bearbeiten",
|
||||||
|
"Edit filename pattern": "Dateinamensmuster bearbeiten",
|
||||||
|
"Edit shifted season": "Verschobene Staffel bearbeiten",
|
||||||
|
"Edit stream": "Stream bearbeiten",
|
||||||
|
"Episode Offset": "Episodenoffset",
|
||||||
|
"Episode offset": "Episodenoffset",
|
||||||
|
"File": "Datei",
|
||||||
|
"File patterns": "Datei-Namensmuster",
|
||||||
|
"First Episode": "Erste Episode",
|
||||||
|
"First episode": "Erste Episode",
|
||||||
|
"Forced": "Erzwungen",
|
||||||
|
"Help": "Hilfe",
|
||||||
|
"Help Screen": "Hilfe-Bildschirm",
|
||||||
|
"ID": "ID",
|
||||||
|
"Identify": "Identifizieren",
|
||||||
|
"Index": "Index",
|
||||||
|
"Index / Subindex": "Index / Unterindex",
|
||||||
|
"Index Episode Digits": "Ep. Index Stellen",
|
||||||
|
"Index Season Digits": "Sta. Index Stellen",
|
||||||
|
"Indicator Edisode Digits": "Ep. Indikator Stellen",
|
||||||
|
"Indicator Season Digits": "Sta. Indikator Stellen",
|
||||||
|
"Keep Editing": "Weiter bearbeiten",
|
||||||
|
"Keeping pending changes.": "Ausstehende Änderungen bleiben erhalten.",
|
||||||
|
"Key": "Schlüssel",
|
||||||
|
"Language": "Sprache",
|
||||||
|
"Last Episode": "Letzte Episode",
|
||||||
|
"Last episode": "Letzte Episode",
|
||||||
|
"Layout": "Layout",
|
||||||
|
"Media Tags": "Medien-Tags",
|
||||||
|
"More than one default audio stream detected and no prompt set": "Mehr als ein Standard-Audiostream erkannt und keine Abfrage aktiviert",
|
||||||
|
"More than one default audio stream detected! Please select stream": "Mehr als ein Standard-Audiostream erkannt! Bitte Stream auswählen",
|
||||||
|
"More than one default subtitle stream detected and no prompt set": "Mehr als ein Standard-Untertitelstream erkannt und keine Abfrage aktiviert",
|
||||||
|
"More than one default subtitle stream detected! Please select stream": "Mehr als ein Standard-Untertitelstream erkannt! Bitte Stream auswählen",
|
||||||
|
"More than one default video stream detected and no prompt set": "Mehr als ein Standard-Videostream erkannt und keine Abfrage aktiviert",
|
||||||
|
"More than one default video stream detected! Please select stream": "Mehr als ein Standard-Videostream erkannt! Bitte Stream auswählen",
|
||||||
|
"More than one forced audio stream detected and no prompt set": "Mehr als ein erzwungener Audiostream erkannt und keine Abfrage aktiviert",
|
||||||
|
"More than one forced audio stream detected! Please select stream": "Mehr als ein erzwungener Audiostream erkannt! Bitte Stream auswählen",
|
||||||
|
"More than one forced subtitle stream detected and no prompt set": "Mehr als ein erzwungener Untertitelstream erkannt und keine Abfrage aktiviert",
|
||||||
|
"More than one forced subtitle stream detected! Please select stream": "Mehr als ein erzwungener Untertitelstream erkannt! Bitte Stream auswählen",
|
||||||
|
"More than one forced video stream detected and no prompt set": "Mehr als ein erzwungener Videostream erkannt und keine Abfrage aktiviert",
|
||||||
|
"More than one forced video stream detected! Please select stream": "Mehr als ein erzwungener Videostream erkannt! Bitte Stream auswählen",
|
||||||
|
"Name": "Name",
|
||||||
|
"New Pattern": "Neues Muster",
|
||||||
|
"New Show": "Neue Serie",
|
||||||
|
"New filename pattern": "Neues Dateinamensmuster",
|
||||||
|
"New shifted season": "Neue verschobene Staffel",
|
||||||
|
"New stream": "Neuer Stream",
|
||||||
|
"No": "Nein",
|
||||||
|
"No changes to apply.": "Keine Änderungen zum Anwenden.",
|
||||||
|
"No changes to revert.": "Keine Änderungen zum Zurücksetzen.",
|
||||||
|
"Normalization disabled.": "Normalisierung deaktiviert.",
|
||||||
|
"Normalization enabled.": "Normalisierung aktiviert.",
|
||||||
|
"Normalize": "Normalisieren",
|
||||||
|
"Notes": "Notizen",
|
||||||
|
"Pattern": "Muster",
|
||||||
|
"Planned Changes (file->edited output)": "Geplante Änderungen (Datei->bearbeitete Ausgabe)",
|
||||||
|
"Quality": "Qualität",
|
||||||
|
"Quit": "Beenden",
|
||||||
|
"Remove Pattern": "Muster entfernen",
|
||||||
|
"Revert": "Zurücksetzen",
|
||||||
|
"Reverted pending changes.": "Ausstehende Änderungen verworfen.",
|
||||||
|
"Save": "Speichern",
|
||||||
|
"Season Offset": "Staffeloffset",
|
||||||
|
"Select a stream first.": "Bitte zuerst einen Stream auswählen.",
|
||||||
|
"Set Default": "Als Standard setzen",
|
||||||
|
"Set Forced": "Als erzwungen setzen",
|
||||||
|
"Settings Screen": "Einstellungsbildschirm",
|
||||||
|
"Numbering Mapping": "Abbildung Nummerierung",
|
||||||
|
"Show": "Serie",
|
||||||
|
"Shows": "Serien",
|
||||||
|
"Source Season": "Quellstaffel",
|
||||||
|
"SrcIndex": "QuellIndex",
|
||||||
|
"Status": "Status",
|
||||||
|
"Stay": "Bleiben",
|
||||||
|
"Stream dispositions": "Stream-Dispositionen",
|
||||||
|
"Stream tags": "Stream-Tags",
|
||||||
|
"Streams": "Streams",
|
||||||
|
"SubIndex": "Unterindex",
|
||||||
|
"Substitute": "Ersetzen",
|
||||||
|
"Substitute pattern": "Muster ersetzen",
|
||||||
|
"Title": "Titel",
|
||||||
|
"Type": "Typ",
|
||||||
|
"Unable to update selected stream.": "Ausgewählten Stream konnte nicht aktualisiert werden.",
|
||||||
|
"Up": "Hoch",
|
||||||
|
"Update Pattern": "Muster aktualisieren",
|
||||||
|
"Updated media tag {tag!r}.": "Medien-Tag {tag!r} aktualisiert.",
|
||||||
|
"Updated stream #{index} ({track_type}).": "Stream #{index} ({track_type}) aktualisiert.",
|
||||||
|
"Value": "Wert",
|
||||||
|
"Year": "Jahr",
|
||||||
|
"Yes": "Ja",
|
||||||
|
"add media tag: key='{key}' value='{value}'": "Medien-Tag hinzufügen: Schlüssel='{key}' Wert='{value}'",
|
||||||
|
"add {track_type} track: index={index} lang={language}": "{track_type}-Stream hinzufügen: Index={index} Sprache={language}",
|
||||||
|
"attached_pic": "attached_pic",
|
||||||
|
"attachment": "Anhang",
|
||||||
|
"audio": "Audio",
|
||||||
|
"captions": "Untertitel",
|
||||||
|
"change media tag: key='{key}' value='{value}'": "Medien-Tag ändern: Schlüssel='{key}' Wert='{value}'",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add disposition={disposition}": "Stream #{index} ({track_type}:{sub_index}) Disposition hinzufügen={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add key={key} value={value}": "Stream #{index} ({track_type}:{sub_index}) Schlüssel hinzufügen={key} Wert={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) change key={key} value={value}": "Stream #{index} ({track_type}:{sub_index}) Schlüssel ändern={key} Wert={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove disposition={disposition}": "Stream #{index} ({track_type}:{sub_index}) Disposition entfernen={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove key={key} value={value}": "Stream #{index} ({track_type}:{sub_index}) Schlüssel entfernen={key} Wert={value}",
|
||||||
|
"clean_effects": "Nur Effekte",
|
||||||
|
"comment": "Kommentar",
|
||||||
|
"default": "Standard",
|
||||||
|
"dependent": "abhängig",
|
||||||
|
"descriptions": "Beschreibungen",
|
||||||
|
"dub": "Synchronisiert",
|
||||||
|
"for pattern": "für Muster",
|
||||||
|
"forced": "erzwungen",
|
||||||
|
"from": "von",
|
||||||
|
"from pattern": "aus Muster",
|
||||||
|
"from show": "aus Serie",
|
||||||
|
"hearing_impaired": "hörgeschädigt",
|
||||||
|
"karaoke": "Karaoke",
|
||||||
|
"lyrics": "Liedtext",
|
||||||
|
"metadata": "Metadaten",
|
||||||
|
"non_diegetic": "nicht-diegetisch",
|
||||||
|
"original": "Original",
|
||||||
|
"pattern #{id}": "Muster #{id}",
|
||||||
|
"remove media tag: key='{key}' value='{value}'": "Medien-Tag entfernen: Schlüssel='{key}' Wert='{value}'",
|
||||||
|
"remove stream #{index}": "Stream #{index} entfernen",
|
||||||
|
"show #{id}": "Serie #{id}",
|
||||||
|
"stereo": "Stereo",
|
||||||
|
"still_image": "Standbild",
|
||||||
|
"sub index": "Unterindex",
|
||||||
|
"subtitle": "Untertitel",
|
||||||
|
"timed_thumbnails": "zeitgesteuerte Vorschaubilder",
|
||||||
|
"undefined": "undefiniert",
|
||||||
|
"unknown": "unbekannt",
|
||||||
|
"video": "Video",
|
||||||
|
"visual_impaired": "sehgeschädigt"
|
||||||
|
}
|
||||||
|
}
|
||||||
360
assets/i18n/en.json
Normal file
360
assets/i18n/en.json
Normal file
@@ -0,0 +1,360 @@
|
|||||||
|
{
|
||||||
|
"iso_languages": {
|
||||||
|
"ABKHAZIAN": "Abkhazian",
|
||||||
|
"AFAR": "Afar",
|
||||||
|
"AFRIKAANS": "Afrikaans",
|
||||||
|
"AKAN": "Akan",
|
||||||
|
"ALBANIAN": "Albanian",
|
||||||
|
"AMHARIC": "Amharic",
|
||||||
|
"ARABIC": "Arabic",
|
||||||
|
"ARAGONESE": "Aragonese",
|
||||||
|
"ARMENIAN": "Armenian",
|
||||||
|
"ASSAMESE": "Assamese",
|
||||||
|
"AVARIC": "Avaric",
|
||||||
|
"AVESTAN": "Avestan",
|
||||||
|
"AYMARA": "Aymara",
|
||||||
|
"AZERBAIJANI": "Azerbaijani",
|
||||||
|
"BAMBARA": "Bambara",
|
||||||
|
"BASHKIR": "Bashkir",
|
||||||
|
"BASQUE": "Basque",
|
||||||
|
"BELARUSIAN": "Belarusian",
|
||||||
|
"BENGALI": "Bengali",
|
||||||
|
"BISLAMA": "Bislama",
|
||||||
|
"BOKMAL": "Bokmål",
|
||||||
|
"BOSNIAN": "Bosnian",
|
||||||
|
"BRETON": "Breton",
|
||||||
|
"BULGARIAN": "Bulgarian",
|
||||||
|
"BURMESE": "Burmese",
|
||||||
|
"CATALAN": "Catalan",
|
||||||
|
"CHAMORRO": "Chamorro",
|
||||||
|
"CHECHEN": "Chechen",
|
||||||
|
"CHICHEWA": "Chichewa",
|
||||||
|
"CHINESE": "Chinese",
|
||||||
|
"CHURCH_SLAVIC": "Church Slavic",
|
||||||
|
"CHUVASH": "Chuvash",
|
||||||
|
"CORNISH": "Cornish",
|
||||||
|
"CORSICAN": "Corsican",
|
||||||
|
"CREE": "Cree",
|
||||||
|
"CROATIAN": "Croatian",
|
||||||
|
"CZECH": "Czech",
|
||||||
|
"DANISH": "Danish",
|
||||||
|
"DIVEHI": "Divehi",
|
||||||
|
"DUTCH": "Dutch",
|
||||||
|
"DZONGKHA": "Dzongkha",
|
||||||
|
"ENGLISH": "English",
|
||||||
|
"ESPERANTO": "Esperanto",
|
||||||
|
"ESTONIAN": "Estonian",
|
||||||
|
"EWE": "Ewe",
|
||||||
|
"FAROESE": "Faroese",
|
||||||
|
"FIJIAN": "Fijian",
|
||||||
|
"FILIPINO": "Filipino",
|
||||||
|
"FINNISH": "Finnish",
|
||||||
|
"FRENCH": "French",
|
||||||
|
"FULAH": "Fulah",
|
||||||
|
"GALICIAN": "Galician",
|
||||||
|
"GANDA": "Ganda",
|
||||||
|
"GEORGIAN": "Georgian",
|
||||||
|
"GERMAN": "German",
|
||||||
|
"GREEK": "Greek",
|
||||||
|
"GUARANI": "Guarani",
|
||||||
|
"GUJARATI": "Gujarati",
|
||||||
|
"HAITIAN": "Haitian",
|
||||||
|
"HAUSA": "Hausa",
|
||||||
|
"HEBREW": "Hebrew",
|
||||||
|
"HERERO": "Herero",
|
||||||
|
"HINDI": "Hindi",
|
||||||
|
"HIRI_MOTU": "Hiri Motu",
|
||||||
|
"HUNGARIAN": "Hungarian",
|
||||||
|
"ICELANDIC": "Icelandic",
|
||||||
|
"IDO": "Ido",
|
||||||
|
"IGBO": "Igbo",
|
||||||
|
"INDONESIAN": "Indonesian",
|
||||||
|
"INTERLINGUA": "Interlingua",
|
||||||
|
"INTERLINGUE": "Interlingue",
|
||||||
|
"INUKTITUT": "Inuktitut",
|
||||||
|
"INUPIAQ": "Inupiaq",
|
||||||
|
"IRISH": "Irish",
|
||||||
|
"ITALIAN": "Italian",
|
||||||
|
"JAPANESE": "Japanese",
|
||||||
|
"JAVANESE": "Javanese",
|
||||||
|
"KALAALLISUT": "Kalaallisut",
|
||||||
|
"KANNADA": "Kannada",
|
||||||
|
"KANURI": "Kanuri",
|
||||||
|
"KASHMIRI": "Kashmiri",
|
||||||
|
"KAZAKH": "Kazakh",
|
||||||
|
"KHMER": "Khmer",
|
||||||
|
"KIKUYU": "Kikuyu",
|
||||||
|
"KINYARWANDA": "Kinyarwanda",
|
||||||
|
"KIRGHIZ": "Kirghiz",
|
||||||
|
"KOMI": "Komi",
|
||||||
|
"KONGO": "Kongo",
|
||||||
|
"KOREAN": "Korean",
|
||||||
|
"KUANYAMA": "Kuanyama",
|
||||||
|
"KURDISH": "Kurdish",
|
||||||
|
"LAO": "Lao",
|
||||||
|
"LATIN": "Latin",
|
||||||
|
"LATVIAN": "Latvian",
|
||||||
|
"LIMBURGAN": "Limburgan",
|
||||||
|
"LINGALA": "Lingala",
|
||||||
|
"LITHUANIAN": "Lithuanian",
|
||||||
|
"LUBA_KATANGA": "Luba-Katanga",
|
||||||
|
"LUXEMBOURGISH": "Luxembourgish",
|
||||||
|
"MACEDONIAN": "Macedonian",
|
||||||
|
"MALAGASY": "Malagasy",
|
||||||
|
"MALAY": "Malay",
|
||||||
|
"MALAYALAM": "Malayalam",
|
||||||
|
"MALTESE": "Maltese",
|
||||||
|
"MANX": "Manx",
|
||||||
|
"MAORI": "Maori",
|
||||||
|
"MARATHI": "Marathi",
|
||||||
|
"MARSHALLESE": "Marshallese",
|
||||||
|
"MONGOLIAN": "Mongolian",
|
||||||
|
"NAURU": "Nauru",
|
||||||
|
"NAVAJO": "Navajo",
|
||||||
|
"NDONGA": "Ndonga",
|
||||||
|
"NEPALI": "Nepali",
|
||||||
|
"NORTHERN_SAMI": "Northern Sami",
|
||||||
|
"NORTH_NDEBELE": "North Ndebele",
|
||||||
|
"NORWEGIAN": "Norwegian",
|
||||||
|
"NORWEGIAN_NYNORSK": "Nynorsk",
|
||||||
|
"OCCITAN": "Occitan",
|
||||||
|
"OJIBWA": "Ojibwa",
|
||||||
|
"ORIYA": "Oriya",
|
||||||
|
"OROMO": "Oromo",
|
||||||
|
"OSSETIAN": "Ossetian",
|
||||||
|
"PALI": "Pali",
|
||||||
|
"PANJABI": "Panjabi",
|
||||||
|
"PERSIAN": "Persian",
|
||||||
|
"POLISH": "Polish",
|
||||||
|
"PORTUGUESE": "Portuguese",
|
||||||
|
"PUSHTO": "Pushto",
|
||||||
|
"QUECHUA": "Quechua",
|
||||||
|
"ROMANIAN": "Romanian",
|
||||||
|
"ROMANSH": "Romansh",
|
||||||
|
"RUNDI": "Rundi",
|
||||||
|
"RUSSIAN": "Russian",
|
||||||
|
"SAMOAN": "Samoan",
|
||||||
|
"SANGO": "Sango",
|
||||||
|
"SANSKRIT": "Sanskrit",
|
||||||
|
"SARDINIAN": "Sardinian",
|
||||||
|
"SCOTTISH_GAELIC": "Scottish Gaelic",
|
||||||
|
"SERBIAN": "Serbian",
|
||||||
|
"SHONA": "Shona",
|
||||||
|
"SICHUAN_YI": "Sichuan Yi",
|
||||||
|
"SINDHI": "Sindhi",
|
||||||
|
"SINHALA": "Sinhala",
|
||||||
|
"SLOVAK": "Slovak",
|
||||||
|
"SLOVENIAN": "Slovenian",
|
||||||
|
"SOMALI": "Somali",
|
||||||
|
"SOUTHERN_SOTHO": "Southern Sotho",
|
||||||
|
"SOUTH_NDEBELE": "South Ndebele",
|
||||||
|
"SPANISH": "Spanish",
|
||||||
|
"SUNDANESE": "Sundanese",
|
||||||
|
"SWAHILI": "Swahili",
|
||||||
|
"SWATI": "Swati",
|
||||||
|
"SWEDISH": "Swedish",
|
||||||
|
"TAGALOG": "Tagalog",
|
||||||
|
"TAHITIAN": "Tahitian",
|
||||||
|
"TAJIK": "Tajik",
|
||||||
|
"TAMIL": "Tamil",
|
||||||
|
"TATAR": "Tatar",
|
||||||
|
"TELUGU": "Telugu",
|
||||||
|
"THAI": "Thai",
|
||||||
|
"TIBETAN": "Tibetan",
|
||||||
|
"TIGRINYA": "Tigrinya",
|
||||||
|
"TONGA": "Tonga",
|
||||||
|
"TSONGA": "Tsonga",
|
||||||
|
"TSWANA": "Tswana",
|
||||||
|
"TURKISH": "Turkish",
|
||||||
|
"TURKMEN": "Turkmen",
|
||||||
|
"TWI": "Twi",
|
||||||
|
"UIGHUR": "Uighur",
|
||||||
|
"UKRAINIAN": "Ukrainian",
|
||||||
|
"UNDEFINED": "undefined",
|
||||||
|
"URDU": "Urdu",
|
||||||
|
"UZBEK": "Uzbek",
|
||||||
|
"VENDA": "Venda",
|
||||||
|
"VIETNAMESE": "Vietnamese",
|
||||||
|
"VOLAPUK": "Volapük",
|
||||||
|
"WALLOON": "Walloon",
|
||||||
|
"WELSH": "Welsh",
|
||||||
|
"WESTERN_FRISIAN": "Western Frisian",
|
||||||
|
"WOLOF": "Wolof",
|
||||||
|
"XHOSA": "Xhosa",
|
||||||
|
"YIDDISH": "Yiddish",
|
||||||
|
"YORUBA": "Yoruba",
|
||||||
|
"ZHUANG": "Zhuang",
|
||||||
|
"ZULU": "Zulu"
|
||||||
|
},
|
||||||
|
"phrases": {
|
||||||
|
"5.0(side)": "5.0(side)",
|
||||||
|
"5.1(side)": "5.1(side)",
|
||||||
|
"6.1": "6.1",
|
||||||
|
"6ch": "6ch",
|
||||||
|
"7.1": "7.1",
|
||||||
|
"<New show>": "<New show>",
|
||||||
|
"Add": "Add",
|
||||||
|
"Add Pattern": "Add Pattern",
|
||||||
|
"Apply": "Apply",
|
||||||
|
"Apply failed: {error}": "Apply failed: {error}",
|
||||||
|
"Are you sure to delete the following filename pattern?": "Are you sure to delete the following filename pattern?",
|
||||||
|
"Are you sure to delete the following shifted season?": "Are you sure to delete the following shifted season?",
|
||||||
|
"Are you sure to delete the following show?": "Are you sure to delete the following show?",
|
||||||
|
"Are you sure to delete the following {track_type} track?": "Are you sure to delete the following {track_type} track?",
|
||||||
|
"Are you sure to delete this tag?": "Are you sure to delete this tag?",
|
||||||
|
"Audio Layout": "Audio Layout",
|
||||||
|
"Back": "Back",
|
||||||
|
"Cancel": "Cancel",
|
||||||
|
"Cannot add another stream with disposition flag 'default' or 'forced' set": "Cannot add another stream with disposition flag 'default' or 'forced' set",
|
||||||
|
"Changes applied and file reloaded.": "Changes applied and file reloaded.",
|
||||||
|
"Cleanup": "Cleanup",
|
||||||
|
"Cleanup disabled.": "Cleanup disabled.",
|
||||||
|
"Cleanup enabled.": "Cleanup enabled.",
|
||||||
|
"Codec": "Codec",
|
||||||
|
"Continuing edit session.": "Continuing edit session.",
|
||||||
|
"Default": "Default",
|
||||||
|
"Delete": "Delete",
|
||||||
|
"Delete Show": "Delete Show",
|
||||||
|
"Deleted media tag {tag!r}.": "Deleted media tag {tag!r}.",
|
||||||
|
"Differences": "Differences",
|
||||||
|
"Differences (file->db/output)": "Differences (file->db/output)",
|
||||||
|
"Discard": "Discard",
|
||||||
|
"Discard pending metadata changes and quit?": "Discard pending metadata changes and quit?",
|
||||||
|
"Discard pending metadata changes and reload the file state?": "Discard pending metadata changes and reload the file state?",
|
||||||
|
"Down": "Down",
|
||||||
|
"Dry-run: would rewrite via temporary file {target_path}": "Dry-run: would rewrite via temporary file {target_path}",
|
||||||
|
"Edit": "Edit",
|
||||||
|
"Edit Pattern": "Edit Pattern",
|
||||||
|
"Edit Show": "Edit Show",
|
||||||
|
"Edit filename pattern": "Edit filename pattern",
|
||||||
|
"Edit shifted season": "Edit shifted season",
|
||||||
|
"Edit stream": "Edit stream",
|
||||||
|
"Episode Offset": "Episode Offset",
|
||||||
|
"Episode offset": "Episode offset",
|
||||||
|
"File": "File",
|
||||||
|
"File patterns": "File patterns",
|
||||||
|
"First Episode": "First Episode",
|
||||||
|
"First episode": "First episode",
|
||||||
|
"Forced": "Forced",
|
||||||
|
"Help": "Help",
|
||||||
|
"Help Screen": "Help Screen",
|
||||||
|
"ID": "ID",
|
||||||
|
"Identify": "Identify",
|
||||||
|
"Index": "Index",
|
||||||
|
"Index / Subindex": "Index / Subindex",
|
||||||
|
"Index Episode Digits": "Index Episode Digits",
|
||||||
|
"Index Season Digits": "Index Season Digits",
|
||||||
|
"Indicator Edisode Digits": "Indicator Edisode Digits",
|
||||||
|
"Indicator Season Digits": "Indicator Season Digits",
|
||||||
|
"Keep Editing": "Keep Editing",
|
||||||
|
"Keeping pending changes.": "Keeping pending changes.",
|
||||||
|
"Key": "Key",
|
||||||
|
"Language": "Language",
|
||||||
|
"Last Episode": "Last Episode",
|
||||||
|
"Last episode": "Last episode",
|
||||||
|
"Layout": "Layout",
|
||||||
|
"Media Tags": "Media Tags",
|
||||||
|
"More than one default audio stream detected and no prompt set": "More than one default audio stream detected and no prompt set",
|
||||||
|
"More than one default audio stream detected! Please select stream": "More than one default audio stream detected! Please select stream",
|
||||||
|
"More than one default subtitle stream detected and no prompt set": "More than one default subtitle stream detected and no prompt set",
|
||||||
|
"More than one default subtitle stream detected! Please select stream": "More than one default subtitle stream detected! Please select stream",
|
||||||
|
"More than one default video stream detected and no prompt set": "More than one default video stream detected and no prompt set",
|
||||||
|
"More than one default video stream detected! Please select stream": "More than one default video stream detected! Please select stream",
|
||||||
|
"More than one forced audio stream detected and no prompt set": "More than one forced audio stream detected and no prompt set",
|
||||||
|
"More than one forced audio stream detected! Please select stream": "More than one forced audio stream detected! Please select stream",
|
||||||
|
"More than one forced subtitle stream detected and no prompt set": "More than one forced subtitle stream detected and no prompt set",
|
||||||
|
"More than one forced subtitle stream detected! Please select stream": "More than one forced subtitle stream detected! Please select stream",
|
||||||
|
"More than one forced video stream detected and no prompt set": "More than one forced video stream detected and no prompt set",
|
||||||
|
"More than one forced video stream detected! Please select stream": "More than one forced video stream detected! Please select stream",
|
||||||
|
"Name": "Name",
|
||||||
|
"New Pattern": "New Pattern",
|
||||||
|
"New Show": "New Show",
|
||||||
|
"New filename pattern": "New filename pattern",
|
||||||
|
"New shifted season": "New shifted season",
|
||||||
|
"New stream": "New stream",
|
||||||
|
"No": "No",
|
||||||
|
"No changes to apply.": "No changes to apply.",
|
||||||
|
"No changes to revert.": "No changes to revert.",
|
||||||
|
"Normalization disabled.": "Normalization disabled.",
|
||||||
|
"Normalization enabled.": "Normalization enabled.",
|
||||||
|
"Normalize": "Normalize",
|
||||||
|
"Notes": "Notes",
|
||||||
|
"Pattern": "Pattern",
|
||||||
|
"Planned Changes (file->edited output)": "Planned Changes (file->edited output)",
|
||||||
|
"Quality": "Quality",
|
||||||
|
"Quit": "Quit",
|
||||||
|
"Remove Pattern": "Remove Pattern",
|
||||||
|
"Revert": "Revert",
|
||||||
|
"Reverted pending changes.": "Reverted pending changes.",
|
||||||
|
"Save": "Save",
|
||||||
|
"Season Offset": "Season Offset",
|
||||||
|
"Select a stream first.": "Select a stream first.",
|
||||||
|
"Set Default": "Set Default",
|
||||||
|
"Set Forced": "Set Forced",
|
||||||
|
"Settings Screen": "Settings Screen",
|
||||||
|
"Numbering Mapping": "Numbering Mapping",
|
||||||
|
"Show": "Show",
|
||||||
|
"Shows": "Shows",
|
||||||
|
"SrcIndex": "SrcIndex",
|
||||||
|
"Status": "Status",
|
||||||
|
"Stay": "Stay",
|
||||||
|
"Stream dispositions": "Stream dispositions",
|
||||||
|
"Stream tags": "Stream tags",
|
||||||
|
"Streams": "Streams",
|
||||||
|
"SubIndex": "SubIndex",
|
||||||
|
"Substitute": "Substitute",
|
||||||
|
"Substitute pattern": "Substitute pattern",
|
||||||
|
"Title": "Title",
|
||||||
|
"Type": "Type",
|
||||||
|
"Unable to update selected stream.": "Unable to update selected stream.",
|
||||||
|
"Up": "Up",
|
||||||
|
"Update Pattern": "Update Pattern",
|
||||||
|
"Updated media tag {tag!r}.": "Updated media tag {tag!r}.",
|
||||||
|
"Updated stream #{index} ({track_type}).": "Updated stream #{index} ({track_type}).",
|
||||||
|
"Value": "Value",
|
||||||
|
"Year": "Year",
|
||||||
|
"Yes": "Yes",
|
||||||
|
"add media tag: key='{key}' value='{value}'": "add media tag: key='{key}' value='{value}'",
|
||||||
|
"add {track_type} track: index={index} lang={language}": "add {track_type} track: index={index} lang={language}",
|
||||||
|
"attached_pic": "attached_pic",
|
||||||
|
"attachment": "attachment",
|
||||||
|
"audio": "audio",
|
||||||
|
"captions": "captions",
|
||||||
|
"change media tag: key='{key}' value='{value}'": "change media tag: key='{key}' value='{value}'",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add disposition={disposition}": "change stream #{index} ({track_type}:{sub_index}) add disposition={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add key={key} value={value}": "change stream #{index} ({track_type}:{sub_index}) add key={key} value={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) change key={key} value={value}": "change stream #{index} ({track_type}:{sub_index}) change key={key} value={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove disposition={disposition}": "change stream #{index} ({track_type}:{sub_index}) remove disposition={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove key={key} value={value}": "change stream #{index} ({track_type}:{sub_index}) remove key={key} value={value}",
|
||||||
|
"clean_effects": "clean_effects",
|
||||||
|
"comment": "comment",
|
||||||
|
"default": "default",
|
||||||
|
"dependent": "dependent",
|
||||||
|
"descriptions": "descriptions",
|
||||||
|
"dub": "dub",
|
||||||
|
"for pattern": "for pattern",
|
||||||
|
"forced": "forced",
|
||||||
|
"from": "from",
|
||||||
|
"from pattern": "from pattern",
|
||||||
|
"from show": "from show",
|
||||||
|
"hearing_impaired": "hearing_impaired",
|
||||||
|
"karaoke": "karaoke",
|
||||||
|
"lyrics": "lyrics",
|
||||||
|
"metadata": "metadata",
|
||||||
|
"non_diegetic": "non_diegetic",
|
||||||
|
"original": "original",
|
||||||
|
"pattern #{id}": "pattern #{id}",
|
||||||
|
"remove media tag: key='{key}' value='{value}'": "remove media tag: key='{key}' value='{value}'",
|
||||||
|
"remove stream #{index}": "remove stream #{index}",
|
||||||
|
"show #{id}": "show #{id}",
|
||||||
|
"stereo": "stereo",
|
||||||
|
"still_image": "still_image",
|
||||||
|
"sub index": "sub index",
|
||||||
|
"subtitle": "subtitle",
|
||||||
|
"timed_thumbnails": "timed_thumbnails",
|
||||||
|
"undefined": "undefined",
|
||||||
|
"unknown": "unknown",
|
||||||
|
"video": "video",
|
||||||
|
"visual_impaired": "visual_impaired"
|
||||||
|
}
|
||||||
|
}
|
||||||
361
assets/i18n/eo.json
Normal file
361
assets/i18n/eo.json
Normal file
@@ -0,0 +1,361 @@
|
|||||||
|
{
|
||||||
|
"iso_languages": {
|
||||||
|
"ABKHAZIAN": "Abĥaza",
|
||||||
|
"AFAR": "Afara",
|
||||||
|
"AFRIKAANS": "Afrikansa",
|
||||||
|
"AKAN": "Akana",
|
||||||
|
"ALBANIAN": "Albana",
|
||||||
|
"AMHARIC": "Amhara",
|
||||||
|
"ARABIC": "Araba",
|
||||||
|
"ARAGONESE": "Aragona",
|
||||||
|
"ARMENIAN": "Armena",
|
||||||
|
"ASSAMESE": "Asama",
|
||||||
|
"AVARIC": "Avara",
|
||||||
|
"AVESTAN": "Avesta",
|
||||||
|
"AYMARA": "Ajmara",
|
||||||
|
"AZERBAIJANI": "Azerbajĝana",
|
||||||
|
"BAMBARA": "Bambara",
|
||||||
|
"BASHKIR": "Baŝkira",
|
||||||
|
"BASQUE": "Eŭska",
|
||||||
|
"BELARUSIAN": "Belorusa",
|
||||||
|
"BENGALI": "Bengala",
|
||||||
|
"BISLAMA": "Bislamo",
|
||||||
|
"BOKMAL": "Bokmål",
|
||||||
|
"BOSNIAN": "Bosna",
|
||||||
|
"BRETON": "Bretona",
|
||||||
|
"BULGARIAN": "Bulgara",
|
||||||
|
"BURMESE": "Birma",
|
||||||
|
"CATALAN": "Catalan",
|
||||||
|
"CHAMORRO": "Ĉamora",
|
||||||
|
"CHECHEN": "Ĉeĉena",
|
||||||
|
"CHICHEWA": "Chichewa",
|
||||||
|
"CHINESE": "Ĉina",
|
||||||
|
"CHURCH_SLAVIC": "Church Slavic",
|
||||||
|
"CHUVASH": "Ĉuvaŝa",
|
||||||
|
"CORNISH": "Kornvala",
|
||||||
|
"CORSICAN": "Korsika",
|
||||||
|
"CREE": "Kria",
|
||||||
|
"CROATIAN": "Kroata",
|
||||||
|
"CZECH": "Ĉeĥa",
|
||||||
|
"DANISH": "Dana",
|
||||||
|
"DIVEHI": "Divehi",
|
||||||
|
"DUTCH": "Dutch",
|
||||||
|
"DZONGKHA": "Dzonka",
|
||||||
|
"ENGLISH": "Angla",
|
||||||
|
"ESPERANTO": "Esperanto",
|
||||||
|
"ESTONIAN": "Estona",
|
||||||
|
"EWE": "Evea",
|
||||||
|
"FAROESE": "Feroa",
|
||||||
|
"FIJIAN": "Fiĝia",
|
||||||
|
"FILIPINO": "Filipino",
|
||||||
|
"FINNISH": "Finna",
|
||||||
|
"FRENCH": "Franca",
|
||||||
|
"FULAH": "Fula",
|
||||||
|
"GALICIAN": "Galega",
|
||||||
|
"GANDA": "Ganda",
|
||||||
|
"GEORGIAN": "Kartvela",
|
||||||
|
"GERMAN": "Germana",
|
||||||
|
"GREEK": "Greek",
|
||||||
|
"GUARANI": "Gvarania",
|
||||||
|
"GUJARATI": "Guĝarata",
|
||||||
|
"HAITIAN": "Haitian",
|
||||||
|
"HAUSA": "Haŭsa",
|
||||||
|
"HEBREW": "Hebrea",
|
||||||
|
"HERERO": "Herera",
|
||||||
|
"HINDI": "Hindia",
|
||||||
|
"HIRI_MOTU": "Hirimotua",
|
||||||
|
"HUNGARIAN": "Hungara",
|
||||||
|
"ICELANDIC": "Islanda",
|
||||||
|
"IDO": "Ido",
|
||||||
|
"IGBO": "Igba",
|
||||||
|
"INDONESIAN": "Indonezia",
|
||||||
|
"INTERLINGUA": "Interlingua",
|
||||||
|
"INTERLINGUE": "Interlingue",
|
||||||
|
"INUKTITUT": "Inuktituta",
|
||||||
|
"INUPIAQ": "Inupiaka",
|
||||||
|
"IRISH": "Irlanda",
|
||||||
|
"ITALIAN": "Itala",
|
||||||
|
"JAPANESE": "Japana",
|
||||||
|
"JAVANESE": "Java",
|
||||||
|
"KALAALLISUT": "Kalaallisut",
|
||||||
|
"KANNADA": "Kanara",
|
||||||
|
"KANURI": "Kanura",
|
||||||
|
"KASHMIRI": "Kaŝmira",
|
||||||
|
"KAZAKH": "Kazaĥa",
|
||||||
|
"KHMER": "Khmer",
|
||||||
|
"KIKUYU": "Kikuyu",
|
||||||
|
"KINYARWANDA": "Ruanda",
|
||||||
|
"KIRGHIZ": "Kirghiz",
|
||||||
|
"KOMI": "Komia",
|
||||||
|
"KONGO": "Konga",
|
||||||
|
"KOREAN": "Korea",
|
||||||
|
"KUANYAMA": "Kuanyama",
|
||||||
|
"KURDISH": "Kurda",
|
||||||
|
"LAO": "Laosa",
|
||||||
|
"LATIN": "Latina",
|
||||||
|
"LATVIAN": "Latva",
|
||||||
|
"LIMBURGAN": "Limburgan",
|
||||||
|
"LINGALA": "Lingala",
|
||||||
|
"LITHUANIAN": "Litova",
|
||||||
|
"LUBA_KATANGA": "Luba-katanga",
|
||||||
|
"LUXEMBOURGISH": "Luxembourgish",
|
||||||
|
"MACEDONIAN": "Makedona",
|
||||||
|
"MALAGASY": "Malagasa",
|
||||||
|
"MALAY": "Malaja",
|
||||||
|
"MALAYALAM": "Malajala",
|
||||||
|
"MALTESE": "Malta",
|
||||||
|
"MANX": "Manksa",
|
||||||
|
"MAORI": "Maoria",
|
||||||
|
"MARATHI": "Marata",
|
||||||
|
"MARSHALLESE": "Marŝala",
|
||||||
|
"MONGOLIAN": "Mongola",
|
||||||
|
"NAURU": "Naura",
|
||||||
|
"NAVAJO": "Navajo",
|
||||||
|
"NDONGA": "Ndonga",
|
||||||
|
"NEPALI": "Nepala",
|
||||||
|
"NORTHERN_SAMI": "Norda samea",
|
||||||
|
"NORTH_NDEBELE": "North Ndebele",
|
||||||
|
"NORWEGIAN": "Norvega",
|
||||||
|
"NORWEGIAN_NYNORSK": "Nynorsk",
|
||||||
|
"OCCITAN": "Occitan",
|
||||||
|
"OJIBWA": "Oĝibva",
|
||||||
|
"ORIYA": "Orija",
|
||||||
|
"OROMO": "Oroma",
|
||||||
|
"OSSETIAN": "Ossetian",
|
||||||
|
"PALI": "Palia",
|
||||||
|
"PANJABI": "Panjabi",
|
||||||
|
"PERSIAN": "Persa",
|
||||||
|
"POLISH": "Pola",
|
||||||
|
"PORTUGUESE": "Portugala",
|
||||||
|
"PUSHTO": "Pushto",
|
||||||
|
"QUECHUA": "Keĉua",
|
||||||
|
"ROMANIAN": "Romanian",
|
||||||
|
"ROMANSH": "Romanĉa",
|
||||||
|
"RUNDI": "Burunda",
|
||||||
|
"RUSSIAN": "Rusa",
|
||||||
|
"SAMOAN": "Samoa",
|
||||||
|
"SANGO": "Sangoa",
|
||||||
|
"SANSKRIT": "Sanskrito",
|
||||||
|
"SARDINIAN": "Sarda",
|
||||||
|
"SCOTTISH_GAELIC": "Scottish Gaelic",
|
||||||
|
"SERBIAN": "Serba",
|
||||||
|
"SHONA": "Ŝona",
|
||||||
|
"SICHUAN_YI": "Sichuan Yi",
|
||||||
|
"SINDHI": "Sinda",
|
||||||
|
"SINHALA": "Sinhala",
|
||||||
|
"SLOVAK": "Slovaka",
|
||||||
|
"SLOVENIAN": "Slovena",
|
||||||
|
"SOMALI": "Somalia",
|
||||||
|
"SOUTHERN_SOTHO": "Southern Sotho",
|
||||||
|
"SOUTH_NDEBELE": "South Ndebele",
|
||||||
|
"SPANISH": "Spanish",
|
||||||
|
"SUNDANESE": "Sunda",
|
||||||
|
"SWAHILI": "Svahila",
|
||||||
|
"SWATI": "Svazia",
|
||||||
|
"SWEDISH": "Sveda",
|
||||||
|
"TAGALOG": "Tagaloga",
|
||||||
|
"TAHITIAN": "Tahitia",
|
||||||
|
"TAJIK": "Taĝika",
|
||||||
|
"TAMIL": "Tamila",
|
||||||
|
"TATAR": "Tatara",
|
||||||
|
"TELUGU": "Telugua",
|
||||||
|
"THAI": "Taja",
|
||||||
|
"TIBETAN": "Tibeta",
|
||||||
|
"TIGRINYA": "Tigraja",
|
||||||
|
"TONGA": "Tonga",
|
||||||
|
"TSONGA": "Conga",
|
||||||
|
"TSWANA": "Cvana",
|
||||||
|
"TURKISH": "Turka",
|
||||||
|
"TURKMEN": "Turkmena",
|
||||||
|
"TWI": "Tvia",
|
||||||
|
"UIGHUR": "Uighur",
|
||||||
|
"UKRAINIAN": "Ukraina",
|
||||||
|
"UNDEFINED": "undefined",
|
||||||
|
"URDU": "Urdua",
|
||||||
|
"UZBEK": "Uzbeka",
|
||||||
|
"VENDA": "Vendaa",
|
||||||
|
"VIETNAMESE": "Vjetnama",
|
||||||
|
"VOLAPUK": "Volapuko",
|
||||||
|
"WALLOON": "Valona",
|
||||||
|
"WELSH": "Kimra",
|
||||||
|
"WESTERN_FRISIAN": "Okcidenta frisa",
|
||||||
|
"WOLOF": "Volofa",
|
||||||
|
"XHOSA": "Kosa",
|
||||||
|
"YIDDISH": "Jida",
|
||||||
|
"YORUBA": "Joruba",
|
||||||
|
"ZHUANG": "Zhuang",
|
||||||
|
"ZULU": "Zulua"
|
||||||
|
},
|
||||||
|
"phrases": {
|
||||||
|
"5.0(side)": "5.0(side)",
|
||||||
|
"5.1(side)": "5.1(side)",
|
||||||
|
"6.1": "6.1",
|
||||||
|
"6ch": "6ch",
|
||||||
|
"7.1": "7.1",
|
||||||
|
"<New show>": "<Nova serio>",
|
||||||
|
"Add": "Aldoni",
|
||||||
|
"Add Pattern": "Aldoni ŝablonon",
|
||||||
|
"Apply": "Apliki",
|
||||||
|
"Apply failed: {error}": "Apliko malsukcesis: {error}",
|
||||||
|
"Are you sure to delete the following filename pattern?": "Ĉu vi certe volas forigi la jenan dosiernoman ŝablonon?",
|
||||||
|
"Are you sure to delete the following shifted season?": "Ĉu vi certe volas forigi la jenan ŝovitan sezonon?",
|
||||||
|
"Are you sure to delete the following show?": "Ĉu vi certe volas forigi la jenan serion?",
|
||||||
|
"Are you sure to delete the following {track_type} track?": "Ĉu vi certe volas forigi la jenan {track_type}-trakon?",
|
||||||
|
"Are you sure to delete this tag?": "Ĉu vi certe volas forigi ĉi tiun etikedon?",
|
||||||
|
"Audio Layout": "Aŭda aranĝo",
|
||||||
|
"Back": "Reen",
|
||||||
|
"Cancel": "Nuligi",
|
||||||
|
"Cannot add another stream with disposition flag 'default' or 'forced' set": "Ne eblas aldoni alian fluon kun la dispozicia flago 'default' aŭ 'forced' aktiva",
|
||||||
|
"Changes applied and file reloaded.": "Ŝanĝoj aplikitaj kaj dosiero reŝargita.",
|
||||||
|
"Cleanup": "Purigado",
|
||||||
|
"Cleanup disabled.": "Purigado malŝaltita.",
|
||||||
|
"Cleanup enabled.": "Purigado ŝaltita.",
|
||||||
|
"Codec": "Kodeko",
|
||||||
|
"Continuing edit session.": "Daŭrigante la redaktan seancon.",
|
||||||
|
"Default": "Defaŭlta",
|
||||||
|
"Delete": "Forigi",
|
||||||
|
"Delete Show": "Forigi serion",
|
||||||
|
"Deleted media tag {tag!r}.": "Forigis la aŭdvidan etikedon {tag!r}.",
|
||||||
|
"Differences": "Diferencoj",
|
||||||
|
"Differences (file->db/output)": "Diferencoj (dosiero->DB/eligo)",
|
||||||
|
"Discard": "Forĵeti",
|
||||||
|
"Discard pending metadata changes and quit?": "Ĉu forĵeti atendatajn metadatumajn ŝanĝojn kaj eliri?",
|
||||||
|
"Discard pending metadata changes and reload the file state?": "Ĉu forĵeti atendatajn metadatumajn ŝanĝojn kaj reŝargi la dosieran staton?",
|
||||||
|
"Down": "Malsupren",
|
||||||
|
"Dry-run: would rewrite via temporary file {target_path}": "Seka provo: reskribus per provizora dosiero {target_path}",
|
||||||
|
"Edit": "Redakti",
|
||||||
|
"Edit Pattern": "Redakti ŝablonon",
|
||||||
|
"Edit Show": "Redakti serion",
|
||||||
|
"Edit filename pattern": "Redakti dosiernoman ŝablonon",
|
||||||
|
"Edit shifted season": "Redakti ŝovitan sezonon",
|
||||||
|
"Edit stream": "Redakti fluon",
|
||||||
|
"Episode Offset": "Epizoda deŝovo",
|
||||||
|
"Episode offset": "Epizoda deŝovo",
|
||||||
|
"File": "Dosiero",
|
||||||
|
"File patterns": "Dosieraj ŝablonoj",
|
||||||
|
"First Episode": "Unua epizodo",
|
||||||
|
"First episode": "Unua epizodo",
|
||||||
|
"Forced": "Devigita",
|
||||||
|
"Help": "Helpo",
|
||||||
|
"Help Screen": "Helpa ekrano",
|
||||||
|
"ID": "ID",
|
||||||
|
"Identify": "Identigi",
|
||||||
|
"Index": "Indekso",
|
||||||
|
"Index / Subindex": "Indekso / Subindekso",
|
||||||
|
"Index Episode Digits": "Ciferoj de epizoda indekso",
|
||||||
|
"Index Season Digits": "Ciferoj de sezona indekso",
|
||||||
|
"Indicator Edisode Digits": "Ciferoj de epizoda indikilo",
|
||||||
|
"Indicator Season Digits": "Ciferoj de sezona indikilo",
|
||||||
|
"Keep Editing": "Daŭrigi redaktadon",
|
||||||
|
"Keeping pending changes.": "Konservas atendatajn ŝanĝojn.",
|
||||||
|
"Key": "Ŝlosilo",
|
||||||
|
"Language": "Lingvo",
|
||||||
|
"Last Episode": "Lasta epizodo",
|
||||||
|
"Last episode": "Lasta epizodo",
|
||||||
|
"Layout": "Aranĝo",
|
||||||
|
"Media Tags": "Aŭdvidaj etikedoj",
|
||||||
|
"More than one default audio stream detected and no prompt set": "Pli ol unu defaŭlta sonfluo detektita kaj neniu instigo agordita",
|
||||||
|
"More than one default audio stream detected! Please select stream": "Pli ol unu defaŭlta sonfluo detektita! Bonvolu elekti fluon",
|
||||||
|
"More than one default subtitle stream detected and no prompt set": "Pli ol unu defaŭlta subtitola fluo detektita kaj neniu instigo agordita",
|
||||||
|
"More than one default subtitle stream detected! Please select stream": "Pli ol unu defaŭlta subtitola fluo detektita! Bonvolu elekti fluon",
|
||||||
|
"More than one default video stream detected and no prompt set": "Pli ol unu defaŭlta videofluo detektita kaj neniu instigo agordita",
|
||||||
|
"More than one default video stream detected! Please select stream": "Pli ol unu defaŭlta videofluo detektita! Bonvolu elekti fluon",
|
||||||
|
"More than one forced audio stream detected and no prompt set": "Pli ol unu devigita sonfluo detektita kaj neniu instigo agordita",
|
||||||
|
"More than one forced audio stream detected! Please select stream": "Pli ol unu devigita sonfluo detektita! Bonvolu elekti fluon",
|
||||||
|
"More than one forced subtitle stream detected and no prompt set": "Pli ol unu devigita subtitola fluo detektita kaj neniu instigo agordita",
|
||||||
|
"More than one forced subtitle stream detected! Please select stream": "Pli ol unu devigita subtitola fluo detektita! Bonvolu elekti fluon",
|
||||||
|
"More than one forced video stream detected and no prompt set": "Pli ol unu devigita videofluo detektita kaj neniu instigo agordita",
|
||||||
|
"More than one forced video stream detected! Please select stream": "Pli ol unu devigita videofluo detektita! Bonvolu elekti fluon",
|
||||||
|
"Name": "Nomo",
|
||||||
|
"New Pattern": "Nova ŝablono",
|
||||||
|
"New Show": "Nova serio",
|
||||||
|
"New filename pattern": "Nova dosiernoma ŝablono",
|
||||||
|
"New shifted season": "Nova ŝovita sezono",
|
||||||
|
"New stream": "Nova fluo",
|
||||||
|
"No": "Ne",
|
||||||
|
"No changes to apply.": "Neniuj ŝanĝoj por apliki.",
|
||||||
|
"No changes to revert.": "Neniuj ŝanĝoj por malfari.",
|
||||||
|
"Normalization disabled.": "Normaligo malŝaltita.",
|
||||||
|
"Normalization enabled.": "Normaligo ŝaltita.",
|
||||||
|
"Normalize": "Normaligi",
|
||||||
|
"Notes": "Notoj",
|
||||||
|
"Pattern": "Ŝablono",
|
||||||
|
"Planned Changes (file->edited output)": "Planitaj ŝanĝoj (dosiero->redaktita eligo)",
|
||||||
|
"Quality": "Kvalito",
|
||||||
|
"Quit": "Eliri",
|
||||||
|
"Remove Pattern": "Forigi ŝablonon",
|
||||||
|
"Revert": "Malfari",
|
||||||
|
"Reverted pending changes.": "Malfaris atendatajn ŝanĝojn.",
|
||||||
|
"Save": "Konservi",
|
||||||
|
"Season Offset": "Sezona deŝovo",
|
||||||
|
"Select a stream first.": "Bonvolu unue elekti fluon.",
|
||||||
|
"Set Default": "Agordi kiel defaŭltan",
|
||||||
|
"Set Forced": "Agordi kiel devigitan",
|
||||||
|
"Settings Screen": "Agorda ekrano",
|
||||||
|
"Numbering Mapping": "Ŝovitaj sezonoj",
|
||||||
|
"Show": "Serio",
|
||||||
|
"Shows": "Serioj",
|
||||||
|
"Source Season": "Fonta sezono",
|
||||||
|
"SrcIndex": "Fontindekso",
|
||||||
|
"Status": "Stato",
|
||||||
|
"Stay": "Resti",
|
||||||
|
"Stream dispositions": "Fluaj dispozicioj",
|
||||||
|
"Stream tags": "Fluaj etikedoj",
|
||||||
|
"Streams": "Fluoj",
|
||||||
|
"SubIndex": "Subindekso",
|
||||||
|
"Substitute": "Anstataŭigi",
|
||||||
|
"Substitute pattern": "Anstataŭigi ŝablonon",
|
||||||
|
"Title": "Titolo",
|
||||||
|
"Type": "Tipo",
|
||||||
|
"Unable to update selected stream.": "Ne eblis ĝisdatigi la elektitan fluon.",
|
||||||
|
"Up": "Supren",
|
||||||
|
"Update Pattern": "Ĝisdatigi ŝablonon",
|
||||||
|
"Updated media tag {tag!r}.": "Ĝisdatigis la aŭdvidan etikedon {tag!r}.",
|
||||||
|
"Updated stream #{index} ({track_type}).": "Ĝisdatigis fluon #{index} ({track_type}).",
|
||||||
|
"Value": "Valoro",
|
||||||
|
"Year": "Jaro",
|
||||||
|
"Yes": "Jes",
|
||||||
|
"add media tag: key='{key}' value='{value}'": "aldoni aŭdvidan etikedon: ŝlosilo='{key}' valoro='{value}'",
|
||||||
|
"add {track_type} track: index={index} lang={language}": "aldoni {track_type}-trakon: indekso={index} lingvo={language}",
|
||||||
|
"attached_pic": "attached_pic",
|
||||||
|
"attachment": "aldonaĵo",
|
||||||
|
"audio": "sono",
|
||||||
|
"captions": "subtekstoj",
|
||||||
|
"change media tag: key='{key}' value='{value}'": "ŝanĝi aŭdvidan etikedon: ŝlosilo='{key}' valoro='{value}'",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add disposition={disposition}": "ŝanĝi fluon #{index} ({track_type}:{sub_index}) aldoni dispozicion={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add key={key} value={value}": "ŝanĝi fluon #{index} ({track_type}:{sub_index}) aldoni ŝlosilon={key} valoron={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) change key={key} value={value}": "ŝanĝi fluon #{index} ({track_type}:{sub_index}) ŝanĝi ŝlosilon={key} valoron={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove disposition={disposition}": "ŝanĝi fluon #{index} ({track_type}:{sub_index}) forigi dispozicion={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove key={key} value={value}": "ŝanĝi fluon #{index} ({track_type}:{sub_index}) forigi ŝlosilon={key} valoron={value}",
|
||||||
|
"clean_effects": "nur efektoj",
|
||||||
|
"comment": "komento",
|
||||||
|
"default": "defaŭlta",
|
||||||
|
"dependent": "dependa",
|
||||||
|
"descriptions": "priskriboj",
|
||||||
|
"dub": "dublado",
|
||||||
|
"for pattern": "por ŝablono",
|
||||||
|
"forced": "devigita",
|
||||||
|
"from": "de",
|
||||||
|
"from pattern": "de ŝablono",
|
||||||
|
"from show": "el serio",
|
||||||
|
"hearing_impaired": "aŭdmalhelpita",
|
||||||
|
"karaoke": "karaokeo",
|
||||||
|
"lyrics": "kantoteksto",
|
||||||
|
"metadata": "metadatenoj",
|
||||||
|
"non_diegetic": "nediĝeta",
|
||||||
|
"original": "originala",
|
||||||
|
"pattern #{id}": "ŝablono #{id}",
|
||||||
|
"remove media tag: key='{key}' value='{value}'": "forigi aŭdvidan etikedon: ŝlosilo='{key}' valoro='{value}'",
|
||||||
|
"remove stream #{index}": "forigi fluon #{index}",
|
||||||
|
"show #{id}": "serio #{id}",
|
||||||
|
"stereo": "stereo",
|
||||||
|
"still_image": "senmova bildo",
|
||||||
|
"sub index": "subindekso",
|
||||||
|
"subtitle": "subtitolo",
|
||||||
|
"timed_thumbnails": "tempigitaj bildetoj",
|
||||||
|
"undefined": "nedifinita",
|
||||||
|
"unknown": "nekonata",
|
||||||
|
"video": "video",
|
||||||
|
"visual_impaired": "vidmalhelpita"
|
||||||
|
}
|
||||||
|
}
|
||||||
361
assets/i18n/es.json
Normal file
361
assets/i18n/es.json
Normal file
@@ -0,0 +1,361 @@
|
|||||||
|
{
|
||||||
|
"iso_languages": {
|
||||||
|
"ABKHAZIAN": "Abjaziano",
|
||||||
|
"AFAR": "Afar",
|
||||||
|
"AFRIKAANS": "Afrikaans",
|
||||||
|
"AKAN": "Akan",
|
||||||
|
"ALBANIAN": "Albanés",
|
||||||
|
"AMHARIC": "Ámárico",
|
||||||
|
"ARABIC": "Árábe",
|
||||||
|
"ARAGONESE": "Aragonés",
|
||||||
|
"ARMENIAN": "Armenio",
|
||||||
|
"ASSAMESE": "Assamais",
|
||||||
|
"AVARIC": "Avaric",
|
||||||
|
"AVESTAN": "Avestan",
|
||||||
|
"AYMARA": "Aymará",
|
||||||
|
"AZERBAIJANI": "Azerbayano",
|
||||||
|
"BAMBARA": "Bambara",
|
||||||
|
"BASHKIR": "Bashkir",
|
||||||
|
"BASQUE": "Vasco",
|
||||||
|
"BELARUSIAN": "Bieloruso",
|
||||||
|
"BENGALI": "Bengalí",
|
||||||
|
"BISLAMA": "Bislama",
|
||||||
|
"BOKMAL": "Bokmål",
|
||||||
|
"BOSNIAN": "Bosnio",
|
||||||
|
"BRETON": "Bretón",
|
||||||
|
"BULGARIAN": "Búlgaro",
|
||||||
|
"BURMESE": "Birmano",
|
||||||
|
"CATALAN": "Catalan",
|
||||||
|
"CHAMORRO": "Chamorro",
|
||||||
|
"CHECHEN": "Checheno",
|
||||||
|
"CHICHEWA": "Chichewa",
|
||||||
|
"CHINESE": "Chino",
|
||||||
|
"CHURCH_SLAVIC": "Church Slavic",
|
||||||
|
"CHUVASH": "Chuvash",
|
||||||
|
"CORNISH": "Córnico",
|
||||||
|
"CORSICAN": "Corso",
|
||||||
|
"CREE": "Cree",
|
||||||
|
"CROATIAN": "Croata",
|
||||||
|
"CZECH": "Checo",
|
||||||
|
"DANISH": "Danés",
|
||||||
|
"DIVEHI": "Divehi",
|
||||||
|
"DUTCH": "Dutch",
|
||||||
|
"DZONGKHA": "Butaní",
|
||||||
|
"ENGLISH": "Inglés",
|
||||||
|
"ESPERANTO": "Esperanto",
|
||||||
|
"ESTONIAN": "Estonio",
|
||||||
|
"EWE": "Ewe",
|
||||||
|
"FAROESE": "Feroés",
|
||||||
|
"FIJIAN": "Fidji",
|
||||||
|
"FILIPINO": "Filipino",
|
||||||
|
"FINNISH": "Finés",
|
||||||
|
"FRENCH": "Francés",
|
||||||
|
"FULAH": "Fulah",
|
||||||
|
"GALICIAN": "Gallego",
|
||||||
|
"GANDA": "Ganda",
|
||||||
|
"GEORGIAN": "Georgiano",
|
||||||
|
"GERMAN": "Alemán",
|
||||||
|
"GREEK": "Greek",
|
||||||
|
"GUARANI": "Guaraní",
|
||||||
|
"GUJARATI": "guyaratí",
|
||||||
|
"HAITIAN": "Haitian",
|
||||||
|
"HAUSA": "Haussa",
|
||||||
|
"HEBREW": "Hebreo",
|
||||||
|
"HERERO": "Herero",
|
||||||
|
"HINDI": "Hindi",
|
||||||
|
"HIRI_MOTU": "Hiri Motu",
|
||||||
|
"HUNGARIAN": "Húngaro",
|
||||||
|
"ICELANDIC": "Islandés",
|
||||||
|
"IDO": "Ido",
|
||||||
|
"IGBO": "Igbo",
|
||||||
|
"INDONESIAN": "Indonesio",
|
||||||
|
"INTERLINGUA": "Interlingua",
|
||||||
|
"INTERLINGUE": "Interlingue",
|
||||||
|
"INUKTITUT": "Inuktitut",
|
||||||
|
"INUPIAQ": "Inupiak",
|
||||||
|
"IRISH": "Irlandés",
|
||||||
|
"ITALIAN": "Italiano",
|
||||||
|
"JAPANESE": "Japonés",
|
||||||
|
"JAVANESE": "Javanés",
|
||||||
|
"KALAALLISUT": "Kalaallisut",
|
||||||
|
"KANNADA": "Canarés",
|
||||||
|
"KANURI": "Kanuri",
|
||||||
|
"KASHMIRI": "Kashmir",
|
||||||
|
"KAZAKH": "Kazako",
|
||||||
|
"KHMER": "Khmer",
|
||||||
|
"KIKUYU": "Kikuyu",
|
||||||
|
"KINYARWANDA": "Kinyarwanda",
|
||||||
|
"KIRGHIZ": "Kirghiz",
|
||||||
|
"KOMI": "Komi",
|
||||||
|
"KONGO": "Kongo",
|
||||||
|
"KOREAN": "Coreano",
|
||||||
|
"KUANYAMA": "Kuanyama",
|
||||||
|
"KURDISH": "Kurdo",
|
||||||
|
"LAO": "laosiano",
|
||||||
|
"LATIN": "Latín",
|
||||||
|
"LATVIAN": "Letón",
|
||||||
|
"LIMBURGAN": "Limburgan",
|
||||||
|
"LINGALA": "Lingala",
|
||||||
|
"LITHUANIAN": "Lituano",
|
||||||
|
"LUBA_KATANGA": "Luba-Katanga",
|
||||||
|
"LUXEMBOURGISH": "Luxembourgish",
|
||||||
|
"MACEDONIAN": "Macedonio",
|
||||||
|
"MALAGASY": "Malgache",
|
||||||
|
"MALAY": "Malayo",
|
||||||
|
"MALAYALAM": "malabar",
|
||||||
|
"MALTESE": "Maltés",
|
||||||
|
"MANX": "Manx [Gaélico de Manx]",
|
||||||
|
"MAORI": "Maorí",
|
||||||
|
"MARATHI": "Marath",
|
||||||
|
"MARSHALLESE": "Marshall",
|
||||||
|
"MONGOLIAN": "Mongol",
|
||||||
|
"NAURU": "Nauru",
|
||||||
|
"NAVAJO": "Navajo",
|
||||||
|
"NDONGA": "Ndonga",
|
||||||
|
"NEPALI": "Nepalés",
|
||||||
|
"NORTHERN_SAMI": "Sami del Norte",
|
||||||
|
"NORTH_NDEBELE": "North Ndebele",
|
||||||
|
"NORWEGIAN": "Noruego",
|
||||||
|
"NORWEGIAN_NYNORSK": "Nynorsk",
|
||||||
|
"OCCITAN": "Occitan",
|
||||||
|
"OJIBWA": "Ojibwa",
|
||||||
|
"ORIYA": "Oriya",
|
||||||
|
"OROMO": "Oromo (Afan)",
|
||||||
|
"OSSETIAN": "Ossetian",
|
||||||
|
"PALI": "Pali",
|
||||||
|
"PANJABI": "Panjabi",
|
||||||
|
"PERSIAN": "Persa",
|
||||||
|
"POLISH": "Polaco",
|
||||||
|
"PORTUGUESE": "Portugués",
|
||||||
|
"PUSHTO": "Pushto",
|
||||||
|
"QUECHUA": "Quechua",
|
||||||
|
"ROMANIAN": "Romanian",
|
||||||
|
"ROMANSH": "Romaní",
|
||||||
|
"RUNDI": "Kiroundi",
|
||||||
|
"RUSSIAN": "Ruso",
|
||||||
|
"SAMOAN": "Samoano",
|
||||||
|
"SANGO": "Sango",
|
||||||
|
"SANSKRIT": "Sánscrito",
|
||||||
|
"SARDINIAN": "Sardo",
|
||||||
|
"SCOTTISH_GAELIC": "Scottish Gaelic",
|
||||||
|
"SERBIAN": "Serbio",
|
||||||
|
"SHONA": "Shona",
|
||||||
|
"SICHUAN_YI": "Sichuan Yi",
|
||||||
|
"SINDHI": "Sindhi",
|
||||||
|
"SINHALA": "Sinhala",
|
||||||
|
"SLOVAK": "Eslovaco",
|
||||||
|
"SLOVENIAN": "Esloveno",
|
||||||
|
"SOMALI": "Somalí",
|
||||||
|
"SOUTHERN_SOTHO": "Southern Sotho",
|
||||||
|
"SOUTH_NDEBELE": "South Ndebele",
|
||||||
|
"SPANISH": "Spanish",
|
||||||
|
"SUNDANESE": "Sondanés",
|
||||||
|
"SWAHILI": "Swahili",
|
||||||
|
"SWATI": "Siswati",
|
||||||
|
"SWEDISH": "Sueco",
|
||||||
|
"TAGALOG": "Tagalo",
|
||||||
|
"TAHITIAN": "Tahitiano",
|
||||||
|
"TAJIK": "Tajiko",
|
||||||
|
"TAMIL": "Tamil",
|
||||||
|
"TATAR": "Tataro",
|
||||||
|
"TELUGU": "Telugu",
|
||||||
|
"THAI": "Tailandés",
|
||||||
|
"TIBETAN": "Tibetano",
|
||||||
|
"TIGRINYA": "Tigrinya",
|
||||||
|
"TONGA": "Tonga",
|
||||||
|
"TSONGA": "Tsonga",
|
||||||
|
"TSWANA": "Setchwana",
|
||||||
|
"TURKISH": "Turco",
|
||||||
|
"TURKMEN": "Turkmeno",
|
||||||
|
"TWI": "Tchi",
|
||||||
|
"UIGHUR": "Uighur",
|
||||||
|
"UKRAINIAN": "Ukranio",
|
||||||
|
"UNDEFINED": "undefined",
|
||||||
|
"URDU": "Urdu",
|
||||||
|
"UZBEK": "Uzbeko",
|
||||||
|
"VENDA": "Venda",
|
||||||
|
"VIETNAMESE": "Vietnamita",
|
||||||
|
"VOLAPUK": "Volapük",
|
||||||
|
"WALLOON": "valón",
|
||||||
|
"WELSH": "Galés",
|
||||||
|
"WESTERN_FRISIAN": "Frisón occidental",
|
||||||
|
"WOLOF": "Wolof",
|
||||||
|
"XHOSA": "Xhosa",
|
||||||
|
"YIDDISH": "Yidish",
|
||||||
|
"YORUBA": "Yoruba",
|
||||||
|
"ZHUANG": "Zhuang",
|
||||||
|
"ZULU": "Zulu"
|
||||||
|
},
|
||||||
|
"phrases": {
|
||||||
|
"5.0(side)": "5.0(side)",
|
||||||
|
"5.1(side)": "5.1(side)",
|
||||||
|
"6.1": "6.1",
|
||||||
|
"6ch": "6ch",
|
||||||
|
"7.1": "7.1",
|
||||||
|
"<New show>": "<Nueva serie>",
|
||||||
|
"Add": "Añadir",
|
||||||
|
"Add Pattern": "Añadir patrón",
|
||||||
|
"Apply": "Aplicar",
|
||||||
|
"Apply failed: {error}": "Error al aplicar: {error}",
|
||||||
|
"Are you sure to delete the following filename pattern?": "¿Seguro que quieres eliminar el siguiente patrón de nombre de archivo?",
|
||||||
|
"Are you sure to delete the following shifted season?": "¿Seguro que quieres eliminar la siguiente temporada desplazada?",
|
||||||
|
"Are you sure to delete the following show?": "¿Seguro que quieres eliminar la siguiente serie?",
|
||||||
|
"Are you sure to delete the following {track_type} track?": "¿Seguro que quieres eliminar la pista {track_type} siguiente?",
|
||||||
|
"Are you sure to delete this tag?": "¿Seguro que quieres eliminar esta etiqueta?",
|
||||||
|
"Audio Layout": "Disposición de audio",
|
||||||
|
"Back": "Volver",
|
||||||
|
"Cancel": "Cancelar",
|
||||||
|
"Cannot add another stream with disposition flag 'default' or 'forced' set": "No se puede añadir otro flujo con la marca de disposición 'default' o 'forced' activada",
|
||||||
|
"Changes applied and file reloaded.": "Cambios aplicados y archivo recargado.",
|
||||||
|
"Cleanup": "Limpieza",
|
||||||
|
"Cleanup disabled.": "Limpieza desactivada.",
|
||||||
|
"Cleanup enabled.": "Limpieza activada.",
|
||||||
|
"Codec": "Códec",
|
||||||
|
"Continuing edit session.": "Continuando la sesión de edición.",
|
||||||
|
"Default": "Predeterminado",
|
||||||
|
"Delete": "Eliminar",
|
||||||
|
"Delete Show": "Eliminar serie",
|
||||||
|
"Deleted media tag {tag!r}.": "Etiqueta de medios {tag!r} eliminada.",
|
||||||
|
"Differences": "Diferencias",
|
||||||
|
"Differences (file->db/output)": "Diferencias (archivo->BD/salida)",
|
||||||
|
"Discard": "Descartar",
|
||||||
|
"Discard pending metadata changes and quit?": "¿Descartar los cambios pendientes de metadatos y salir?",
|
||||||
|
"Discard pending metadata changes and reload the file state?": "¿Descartar los cambios pendientes de metadatos y recargar el estado del archivo?",
|
||||||
|
"Down": "Abajo",
|
||||||
|
"Dry-run: would rewrite via temporary file {target_path}": "Simulación: reescribiría mediante el archivo temporal {target_path}",
|
||||||
|
"Edit": "Editar",
|
||||||
|
"Edit Pattern": "Editar patrón",
|
||||||
|
"Edit Show": "Editar serie",
|
||||||
|
"Edit filename pattern": "Editar patrón de nombre de archivo",
|
||||||
|
"Edit shifted season": "Editar temporada desplazada",
|
||||||
|
"Edit stream": "Editar flujo",
|
||||||
|
"Episode Offset": "Desplazamiento de episodio",
|
||||||
|
"Episode offset": "Desplazamiento de episodio",
|
||||||
|
"File": "Archivo",
|
||||||
|
"File patterns": "Patrones de archivo",
|
||||||
|
"First Episode": "Primer episodio",
|
||||||
|
"First episode": "Primer episodio",
|
||||||
|
"Forced": "Forzado",
|
||||||
|
"Help": "Ayuda",
|
||||||
|
"Help Screen": "Pantalla de ayuda",
|
||||||
|
"ID": "ID",
|
||||||
|
"Identify": "Identificar",
|
||||||
|
"Index": "Índice",
|
||||||
|
"Index / Subindex": "Índice / Subíndice",
|
||||||
|
"Index Episode Digits": "Dígitos del índice de episodio",
|
||||||
|
"Index Season Digits": "Dígitos del índice de temporada",
|
||||||
|
"Indicator Edisode Digits": "Dígitos del indicador de episodio",
|
||||||
|
"Indicator Season Digits": "Dígitos del indicador de temporada",
|
||||||
|
"Keep Editing": "Seguir editando",
|
||||||
|
"Keeping pending changes.": "Se conservan los cambios pendientes.",
|
||||||
|
"Key": "Clave",
|
||||||
|
"Language": "Idioma",
|
||||||
|
"Last Episode": "Último episodio",
|
||||||
|
"Last episode": "Último episodio",
|
||||||
|
"Layout": "Diseño",
|
||||||
|
"Media Tags": "Etiquetas de medios",
|
||||||
|
"More than one default audio stream detected and no prompt set": "Se detectó más de un flujo de audio predeterminado y no hay aviso configurado",
|
||||||
|
"More than one default audio stream detected! Please select stream": "Se detectó más de un flujo de audio predeterminado. Selecciona el flujo",
|
||||||
|
"More than one default subtitle stream detected and no prompt set": "Se detectó más de un flujo de subtítulos predeterminado y no hay aviso configurado",
|
||||||
|
"More than one default subtitle stream detected! Please select stream": "Se detectó más de un flujo de subtítulos predeterminado. Selecciona el flujo",
|
||||||
|
"More than one default video stream detected and no prompt set": "Se detectó más de un flujo de vídeo predeterminado y no hay aviso configurado",
|
||||||
|
"More than one default video stream detected! Please select stream": "Se detectó más de un flujo de vídeo predeterminado. Selecciona el flujo",
|
||||||
|
"More than one forced audio stream detected and no prompt set": "Se detectó más de un flujo de audio forzado y no hay aviso configurado",
|
||||||
|
"More than one forced audio stream detected! Please select stream": "Se detectó más de un flujo de audio forzado. Selecciona el flujo",
|
||||||
|
"More than one forced subtitle stream detected and no prompt set": "Se detectó más de un flujo de subtítulos forzados y no hay aviso configurado",
|
||||||
|
"More than one forced subtitle stream detected! Please select stream": "Se detectó más de un flujo de subtítulos forzados. Selecciona el flujo",
|
||||||
|
"More than one forced video stream detected and no prompt set": "Se detectó más de un flujo de vídeo forzado y no hay aviso configurado",
|
||||||
|
"More than one forced video stream detected! Please select stream": "Se detectó más de un flujo de vídeo forzado. Selecciona el flujo",
|
||||||
|
"Name": "Nombre",
|
||||||
|
"New Pattern": "Nuevo patrón",
|
||||||
|
"New Show": "Nueva serie",
|
||||||
|
"New filename pattern": "Nuevo patrón de nombre de archivo",
|
||||||
|
"New shifted season": "Nueva temporada desplazada",
|
||||||
|
"New stream": "Nuevo flujo",
|
||||||
|
"No": "No",
|
||||||
|
"No changes to apply.": "No hay cambios para aplicar.",
|
||||||
|
"No changes to revert.": "No hay cambios para revertir.",
|
||||||
|
"Normalization disabled.": "Normalización desactivada.",
|
||||||
|
"Normalization enabled.": "Normalización activada.",
|
||||||
|
"Normalize": "Normalizar",
|
||||||
|
"Notes": "Notas",
|
||||||
|
"Pattern": "Patrón",
|
||||||
|
"Planned Changes (file->edited output)": "Cambios planificados (archivo->salida editada)",
|
||||||
|
"Quality": "Calidad",
|
||||||
|
"Quit": "Salir",
|
||||||
|
"Remove Pattern": "Eliminar patrón",
|
||||||
|
"Revert": "Revertir",
|
||||||
|
"Reverted pending changes.": "Se revirtieron los cambios pendientes.",
|
||||||
|
"Save": "Guardar",
|
||||||
|
"Season Offset": "Desplazamiento de temporada",
|
||||||
|
"Select a stream first.": "Selecciona primero un flujo.",
|
||||||
|
"Set Default": "Establecer como predeterminado",
|
||||||
|
"Set Forced": "Establecer como forzado",
|
||||||
|
"Settings Screen": "Pantalla de ajustes",
|
||||||
|
"Numbering Mapping": "Temporadas desplazadas",
|
||||||
|
"Show": "Serie",
|
||||||
|
"Shows": "Series",
|
||||||
|
"Source Season": "Temporada de origen",
|
||||||
|
"SrcIndex": "Índice origen",
|
||||||
|
"Status": "Estado",
|
||||||
|
"Stay": "Permanecer",
|
||||||
|
"Stream dispositions": "Disposiciones del flujo",
|
||||||
|
"Stream tags": "Etiquetas del flujo",
|
||||||
|
"Streams": "Flujos",
|
||||||
|
"SubIndex": "Subíndice",
|
||||||
|
"Substitute": "Sustituir",
|
||||||
|
"Substitute pattern": "Sustituir patrón",
|
||||||
|
"Title": "Título",
|
||||||
|
"Type": "Tipo",
|
||||||
|
"Unable to update selected stream.": "No se pudo actualizar el flujo seleccionado.",
|
||||||
|
"Up": "Arriba",
|
||||||
|
"Update Pattern": "Actualizar patrón",
|
||||||
|
"Updated media tag {tag!r}.": "Etiqueta de medios {tag!r} actualizada.",
|
||||||
|
"Updated stream #{index} ({track_type}).": "Flujo #{index} ({track_type}) actualizado.",
|
||||||
|
"Value": "Valor",
|
||||||
|
"Year": "Año",
|
||||||
|
"Yes": "Sí",
|
||||||
|
"add media tag: key='{key}' value='{value}'": "añadir etiqueta de medios: clave='{key}' valor='{value}'",
|
||||||
|
"add {track_type} track: index={index} lang={language}": "añadir pista {track_type}: índice={index} idioma={language}",
|
||||||
|
"attached_pic": "attached_pic",
|
||||||
|
"attachment": "adjunto",
|
||||||
|
"audio": "audio",
|
||||||
|
"captions": "subtítulos",
|
||||||
|
"change media tag: key='{key}' value='{value}'": "cambiar etiqueta de medios: clave='{key}' valor='{value}'",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add disposition={disposition}": "cambiar flujo #{index} ({track_type}:{sub_index}) añadir disposición={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add key={key} value={value}": "cambiar flujo #{index} ({track_type}:{sub_index}) añadir clave={key} valor={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) change key={key} value={value}": "cambiar flujo #{index} ({track_type}:{sub_index}) cambiar clave={key} valor={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove disposition={disposition}": "cambiar flujo #{index} ({track_type}:{sub_index}) quitar disposición={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove key={key} value={value}": "cambiar flujo #{index} ({track_type}:{sub_index}) quitar clave={key} valor={value}",
|
||||||
|
"clean_effects": "solo efectos",
|
||||||
|
"comment": "comentario",
|
||||||
|
"default": "predeterminado",
|
||||||
|
"dependent": "dependiente",
|
||||||
|
"descriptions": "descripciones",
|
||||||
|
"dub": "doblaje",
|
||||||
|
"for pattern": "para el patrón",
|
||||||
|
"forced": "forzado",
|
||||||
|
"from": "de",
|
||||||
|
"from pattern": "del patrón",
|
||||||
|
"from show": "de la serie",
|
||||||
|
"hearing_impaired": "personas con discapacidad auditiva",
|
||||||
|
"karaoke": "karaoke",
|
||||||
|
"lyrics": "letra",
|
||||||
|
"metadata": "metadatos",
|
||||||
|
"non_diegetic": "no diegético",
|
||||||
|
"original": "original",
|
||||||
|
"pattern #{id}": "patrón #{id}",
|
||||||
|
"remove media tag: key='{key}' value='{value}'": "eliminar etiqueta de medios: clave='{key}' valor='{value}'",
|
||||||
|
"remove stream #{index}": "eliminar flujo #{index}",
|
||||||
|
"show #{id}": "serie #{id}",
|
||||||
|
"stereo": "estéreo",
|
||||||
|
"still_image": "imagen fija",
|
||||||
|
"sub index": "subíndice",
|
||||||
|
"subtitle": "subtítulo",
|
||||||
|
"timed_thumbnails": "miniaturas temporizadas",
|
||||||
|
"undefined": "indefinido",
|
||||||
|
"unknown": "desconocido",
|
||||||
|
"video": "vídeo",
|
||||||
|
"visual_impaired": "personas con discapacidad visual"
|
||||||
|
}
|
||||||
|
}
|
||||||
361
assets/i18n/fr.json
Normal file
361
assets/i18n/fr.json
Normal file
@@ -0,0 +1,361 @@
|
|||||||
|
{
|
||||||
|
"iso_languages": {
|
||||||
|
"ABKHAZIAN": "Abkhaze",
|
||||||
|
"AFAR": "Afar",
|
||||||
|
"AFRIKAANS": "Afrikaans",
|
||||||
|
"AKAN": "Akan",
|
||||||
|
"ALBANIAN": "Albanais",
|
||||||
|
"AMHARIC": "Amharique",
|
||||||
|
"ARABIC": "Arabe",
|
||||||
|
"ARAGONESE": "Aragonais",
|
||||||
|
"ARMENIAN": "Arménien",
|
||||||
|
"ASSAMESE": "Assamais",
|
||||||
|
"AVARIC": "Avar",
|
||||||
|
"AVESTAN": "Avestique",
|
||||||
|
"AYMARA": "Aymara",
|
||||||
|
"AZERBAIJANI": "Azéri",
|
||||||
|
"BAMBARA": "Bambara",
|
||||||
|
"BASHKIR": "Bachkir",
|
||||||
|
"BASQUE": "Basque",
|
||||||
|
"BELARUSIAN": "Biélorusse",
|
||||||
|
"BENGALI": "Bengali",
|
||||||
|
"BISLAMA": "Bichelamar",
|
||||||
|
"BOKMAL": "Bokmål",
|
||||||
|
"BOSNIAN": "Bosniaque",
|
||||||
|
"BRETON": "Breton",
|
||||||
|
"BULGARIAN": "Bulgare",
|
||||||
|
"BURMESE": "Birman",
|
||||||
|
"CATALAN": "Catalan",
|
||||||
|
"CHAMORRO": "Chamorro",
|
||||||
|
"CHECHEN": "Tchétchène",
|
||||||
|
"CHICHEWA": "Chichewa",
|
||||||
|
"CHINESE": "Chinois",
|
||||||
|
"CHURCH_SLAVIC": "Church Slavic",
|
||||||
|
"CHUVASH": "Tchouvache",
|
||||||
|
"CORNISH": "Cornique",
|
||||||
|
"CORSICAN": "Corse",
|
||||||
|
"CREE": "Cri",
|
||||||
|
"CROATIAN": "Croate",
|
||||||
|
"CZECH": "Tchèque",
|
||||||
|
"DANISH": "Danois",
|
||||||
|
"DIVEHI": "Divehi",
|
||||||
|
"DUTCH": "Dutch",
|
||||||
|
"DZONGKHA": "Dzongkha",
|
||||||
|
"ENGLISH": "Anglais",
|
||||||
|
"ESPERANTO": "Espéranto",
|
||||||
|
"ESTONIAN": "Estonien",
|
||||||
|
"EWE": "Éwé",
|
||||||
|
"FAROESE": "Féroïen",
|
||||||
|
"FIJIAN": "Fidjien",
|
||||||
|
"FILIPINO": "Filipino",
|
||||||
|
"FINNISH": "Finnois",
|
||||||
|
"FRENCH": "Français",
|
||||||
|
"FULAH": "Peul",
|
||||||
|
"GALICIAN": "Galicien",
|
||||||
|
"GANDA": "Ganda",
|
||||||
|
"GEORGIAN": "Géorgien",
|
||||||
|
"GERMAN": "Allemand",
|
||||||
|
"GREEK": "Greek",
|
||||||
|
"GUARANI": "Guarani",
|
||||||
|
"GUJARATI": "Goudjarâtî (Gujrâtî)",
|
||||||
|
"HAITIAN": "Haitian",
|
||||||
|
"HAUSA": "Haoussa",
|
||||||
|
"HEBREW": "Hébreu",
|
||||||
|
"HERERO": "Herero",
|
||||||
|
"HINDI": "Hindi",
|
||||||
|
"HIRI_MOTU": "Hiri Motu",
|
||||||
|
"HUNGARIAN": "Hongrois",
|
||||||
|
"ICELANDIC": "Islandais",
|
||||||
|
"IDO": "Ido",
|
||||||
|
"IGBO": "Igbo",
|
||||||
|
"INDONESIAN": "Indonésien",
|
||||||
|
"INTERLINGUA": "Interlingua",
|
||||||
|
"INTERLINGUE": "Interlingue",
|
||||||
|
"INUKTITUT": "Inuktitut",
|
||||||
|
"INUPIAQ": "Inupiaq",
|
||||||
|
"IRISH": "Irlandais",
|
||||||
|
"ITALIAN": "Italien",
|
||||||
|
"JAPANESE": "Japonais",
|
||||||
|
"JAVANESE": "Javanais",
|
||||||
|
"KALAALLISUT": "Kalaallisut",
|
||||||
|
"KANNADA": "Kannara (Canara)",
|
||||||
|
"KANURI": "Kanouri",
|
||||||
|
"KASHMIRI": "Kashmiri",
|
||||||
|
"KAZAKH": "Kazakh",
|
||||||
|
"KHMER": "Khmer",
|
||||||
|
"KIKUYU": "Kikuyu",
|
||||||
|
"KINYARWANDA": "Kinyarwanda",
|
||||||
|
"KIRGHIZ": "Kirghiz",
|
||||||
|
"KOMI": "Komi",
|
||||||
|
"KONGO": "Kongo",
|
||||||
|
"KOREAN": "Coréen",
|
||||||
|
"KUANYAMA": "Kuanyama",
|
||||||
|
"KURDISH": "Kurde",
|
||||||
|
"LAO": "Laotien",
|
||||||
|
"LATIN": "Latin",
|
||||||
|
"LATVIAN": "Letton",
|
||||||
|
"LIMBURGAN": "Limburgan",
|
||||||
|
"LINGALA": "Lingala",
|
||||||
|
"LITHUANIAN": "Lituanien",
|
||||||
|
"LUBA_KATANGA": "Luba-katanga",
|
||||||
|
"LUXEMBOURGISH": "Luxembourgish",
|
||||||
|
"MACEDONIAN": "Macédonien",
|
||||||
|
"MALAGASY": "Malgache",
|
||||||
|
"MALAY": "Malais",
|
||||||
|
"MALAYALAM": "Malayalam",
|
||||||
|
"MALTESE": "Maltais",
|
||||||
|
"MANX": "Mannois",
|
||||||
|
"MAORI": "Maori",
|
||||||
|
"MARATHI": "Marathe",
|
||||||
|
"MARSHALLESE": "Marshallais",
|
||||||
|
"MONGOLIAN": "Mongol",
|
||||||
|
"NAURU": "Nauru",
|
||||||
|
"NAVAJO": "Navajo",
|
||||||
|
"NDONGA": "Ndonga",
|
||||||
|
"NEPALI": "Népalais",
|
||||||
|
"NORTHERN_SAMI": "Same du Nord",
|
||||||
|
"NORTH_NDEBELE": "North Ndebele",
|
||||||
|
"NORWEGIAN": "Norvégien",
|
||||||
|
"NORWEGIAN_NYNORSK": "Nynorsk",
|
||||||
|
"OCCITAN": "Occitan",
|
||||||
|
"OJIBWA": "Ojibwa",
|
||||||
|
"ORIYA": "Oriya",
|
||||||
|
"OROMO": "Oromo",
|
||||||
|
"OSSETIAN": "Ossetian",
|
||||||
|
"PALI": "Pali",
|
||||||
|
"PANJABI": "Panjabi",
|
||||||
|
"PERSIAN": "Persan",
|
||||||
|
"POLISH": "Polonais",
|
||||||
|
"PORTUGUESE": "Portugais",
|
||||||
|
"PUSHTO": "Pushto",
|
||||||
|
"QUECHUA": "Quechua",
|
||||||
|
"ROMANIAN": "Romanian",
|
||||||
|
"ROMANSH": "Romanche",
|
||||||
|
"RUNDI": "Rundi",
|
||||||
|
"RUSSIAN": "Russe",
|
||||||
|
"SAMOAN": "Samoan",
|
||||||
|
"SANGO": "Sango",
|
||||||
|
"SANSKRIT": "Sanskrit",
|
||||||
|
"SARDINIAN": "Sarde",
|
||||||
|
"SCOTTISH_GAELIC": "Scottish Gaelic",
|
||||||
|
"SERBIAN": "Serbe",
|
||||||
|
"SHONA": "Shona",
|
||||||
|
"SICHUAN_YI": "Sichuan Yi",
|
||||||
|
"SINDHI": "Sindhi",
|
||||||
|
"SINHALA": "Sinhala",
|
||||||
|
"SLOVAK": "Slovaque",
|
||||||
|
"SLOVENIAN": "Slovène",
|
||||||
|
"SOMALI": "Somali",
|
||||||
|
"SOUTHERN_SOTHO": "Southern Sotho",
|
||||||
|
"SOUTH_NDEBELE": "South Ndebele",
|
||||||
|
"SPANISH": "Spanish",
|
||||||
|
"SUNDANESE": "Sundanais",
|
||||||
|
"SWAHILI": "Swahili",
|
||||||
|
"SWATI": "Swati",
|
||||||
|
"SWEDISH": "Suédois",
|
||||||
|
"TAGALOG": "Tagalog",
|
||||||
|
"TAHITIAN": "Tahitien",
|
||||||
|
"TAJIK": "Tadjik",
|
||||||
|
"TAMIL": "Tamoul",
|
||||||
|
"TATAR": "Tatar",
|
||||||
|
"TELUGU": "Télougou",
|
||||||
|
"THAI": "Thaï",
|
||||||
|
"TIBETAN": "Tibétain",
|
||||||
|
"TIGRINYA": "Tigrigna",
|
||||||
|
"TONGA": "Tonga",
|
||||||
|
"TSONGA": "Tsonga",
|
||||||
|
"TSWANA": "Tswana",
|
||||||
|
"TURKISH": "Turc",
|
||||||
|
"TURKMEN": "Turkmène",
|
||||||
|
"TWI": "Twi",
|
||||||
|
"UIGHUR": "Uighur",
|
||||||
|
"UKRAINIAN": "Ukrainien",
|
||||||
|
"UNDEFINED": "undefined",
|
||||||
|
"URDU": "Ourdou",
|
||||||
|
"UZBEK": "Ouszbek",
|
||||||
|
"VENDA": "Venda",
|
||||||
|
"VIETNAMESE": "Vietnamien",
|
||||||
|
"VOLAPUK": "Volapük",
|
||||||
|
"WALLOON": "Wallon",
|
||||||
|
"WELSH": "Gallois",
|
||||||
|
"WESTERN_FRISIAN": "Frison occidental",
|
||||||
|
"WOLOF": "Wolof",
|
||||||
|
"XHOSA": "Xhosa",
|
||||||
|
"YIDDISH": "Yiddish",
|
||||||
|
"YORUBA": "Yoruba",
|
||||||
|
"ZHUANG": "Zhuang",
|
||||||
|
"ZULU": "Zoulou"
|
||||||
|
},
|
||||||
|
"phrases": {
|
||||||
|
"5.0(side)": "5.0(side)",
|
||||||
|
"5.1(side)": "5.1(side)",
|
||||||
|
"6.1": "6.1",
|
||||||
|
"6ch": "6ch",
|
||||||
|
"7.1": "7.1",
|
||||||
|
"<New show>": "<Nouvelle série>",
|
||||||
|
"Add": "Ajouter",
|
||||||
|
"Add Pattern": "Ajouter un modèle",
|
||||||
|
"Apply": "Appliquer",
|
||||||
|
"Apply failed: {error}": "Échec de l'application : {error}",
|
||||||
|
"Are you sure to delete the following filename pattern?": "Voulez-vous vraiment supprimer le modèle de nom de fichier suivant ?",
|
||||||
|
"Are you sure to delete the following shifted season?": "Voulez-vous vraiment supprimer la saison décalée suivante ?",
|
||||||
|
"Are you sure to delete the following show?": "Voulez-vous vraiment supprimer la série suivante ?",
|
||||||
|
"Are you sure to delete the following {track_type} track?": "Voulez-vous vraiment supprimer la piste {track_type} suivante ?",
|
||||||
|
"Are you sure to delete this tag?": "Voulez-vous vraiment supprimer cette balise ?",
|
||||||
|
"Audio Layout": "Disposition audio",
|
||||||
|
"Back": "Retour",
|
||||||
|
"Cancel": "Annuler",
|
||||||
|
"Cannot add another stream with disposition flag 'default' or 'forced' set": "Impossible d'ajouter un autre flux avec l'indicateur de disposition 'default' ou 'forced'",
|
||||||
|
"Changes applied and file reloaded.": "Modifications appliquées et fichier rechargé.",
|
||||||
|
"Cleanup": "Nettoyage",
|
||||||
|
"Cleanup disabled.": "Nettoyage désactivé.",
|
||||||
|
"Cleanup enabled.": "Nettoyage activé.",
|
||||||
|
"Codec": "Codec",
|
||||||
|
"Continuing edit session.": "Poursuite de la session d'édition.",
|
||||||
|
"Default": "Par défaut",
|
||||||
|
"Delete": "Supprimer",
|
||||||
|
"Delete Show": "Supprimer la série",
|
||||||
|
"Deleted media tag {tag!r}.": "Balise média {tag!r} supprimée.",
|
||||||
|
"Differences": "Différences",
|
||||||
|
"Differences (file->db/output)": "Différences (fichier->BD/sortie)",
|
||||||
|
"Discard": "Ignorer",
|
||||||
|
"Discard pending metadata changes and quit?": "Ignorer les modifications de métadonnées en attente et quitter ?",
|
||||||
|
"Discard pending metadata changes and reload the file state?": "Ignorer les modifications de métadonnées en attente et recharger l'état du fichier ?",
|
||||||
|
"Down": "Descendre",
|
||||||
|
"Dry-run: would rewrite via temporary file {target_path}": "Simulation : réécrirait via le fichier temporaire {target_path}",
|
||||||
|
"Edit": "Modifier",
|
||||||
|
"Edit Pattern": "Modifier le modèle",
|
||||||
|
"Edit Show": "Modifier la série",
|
||||||
|
"Edit filename pattern": "Modifier le modèle de nom de fichier",
|
||||||
|
"Edit shifted season": "Modifier la saison décalée",
|
||||||
|
"Edit stream": "Modifier le flux",
|
||||||
|
"Episode Offset": "Décalage d'épisode",
|
||||||
|
"Episode offset": "Décalage d'épisode",
|
||||||
|
"File": "Fichier",
|
||||||
|
"File patterns": "Modèles de fichiers",
|
||||||
|
"First Episode": "Premier épisode",
|
||||||
|
"First episode": "Premier épisode",
|
||||||
|
"Forced": "Forcé",
|
||||||
|
"Help": "Aide",
|
||||||
|
"Help Screen": "Écran d'aide",
|
||||||
|
"ID": "ID",
|
||||||
|
"Identify": "Identifier",
|
||||||
|
"Index": "Index",
|
||||||
|
"Index / Subindex": "Index / Sous-index",
|
||||||
|
"Index Episode Digits": "Chiffres d'épisode d'index",
|
||||||
|
"Index Season Digits": "Chiffres de saison d'index",
|
||||||
|
"Indicator Edisode Digits": "Chiffres d'épisode de l'indicateur",
|
||||||
|
"Indicator Season Digits": "Chiffres de saison de l'indicateur",
|
||||||
|
"Keep Editing": "Continuer l'édition",
|
||||||
|
"Keeping pending changes.": "Les modifications en attente sont conservées.",
|
||||||
|
"Key": "Clé",
|
||||||
|
"Language": "Langue",
|
||||||
|
"Last Episode": "Dernier épisode",
|
||||||
|
"Last episode": "Dernier épisode",
|
||||||
|
"Layout": "Disposition",
|
||||||
|
"Media Tags": "Balises média",
|
||||||
|
"More than one default audio stream detected and no prompt set": "Plus d'un flux audio par défaut détecté et aucune invite définie",
|
||||||
|
"More than one default audio stream detected! Please select stream": "Plus d'un flux audio par défaut détecté ! Veuillez sélectionner un flux",
|
||||||
|
"More than one default subtitle stream detected and no prompt set": "Plus d'un flux de sous-titres par défaut détecté et aucune invite définie",
|
||||||
|
"More than one default subtitle stream detected! Please select stream": "Plus d'un flux de sous-titres par défaut détecté ! Veuillez sélectionner un flux",
|
||||||
|
"More than one default video stream detected and no prompt set": "Plus d'un flux vidéo par défaut détecté et aucune invite définie",
|
||||||
|
"More than one default video stream detected! Please select stream": "Plus d'un flux vidéo par défaut détecté ! Veuillez sélectionner un flux",
|
||||||
|
"More than one forced audio stream detected and no prompt set": "Plus d'un flux audio forcé détecté et aucune invite définie",
|
||||||
|
"More than one forced audio stream detected! Please select stream": "Plus d'un flux audio forcé détecté ! Veuillez sélectionner un flux",
|
||||||
|
"More than one forced subtitle stream detected and no prompt set": "Plus d'un flux de sous-titres forcé détecté et aucune invite définie",
|
||||||
|
"More than one forced subtitle stream detected! Please select stream": "Plus d'un flux de sous-titres forcé détecté ! Veuillez sélectionner un flux",
|
||||||
|
"More than one forced video stream detected and no prompt set": "Plus d'un flux vidéo forcé détecté et aucune invite définie",
|
||||||
|
"More than one forced video stream detected! Please select stream": "Plus d'un flux vidéo forcé détecté ! Veuillez sélectionner un flux",
|
||||||
|
"Name": "Nom",
|
||||||
|
"New Pattern": "Nouveau modèle",
|
||||||
|
"New Show": "Nouvelle série",
|
||||||
|
"New filename pattern": "Nouveau modèle de nom de fichier",
|
||||||
|
"New shifted season": "Nouvelle saison décalée",
|
||||||
|
"New stream": "Nouveau flux",
|
||||||
|
"No": "Non",
|
||||||
|
"No changes to apply.": "Aucune modification à appliquer.",
|
||||||
|
"No changes to revert.": "Aucune modification à annuler.",
|
||||||
|
"Normalization disabled.": "Normalisation désactivée.",
|
||||||
|
"Normalization enabled.": "Normalisation activée.",
|
||||||
|
"Normalize": "Normaliser",
|
||||||
|
"Notes": "Notes",
|
||||||
|
"Pattern": "Modèle",
|
||||||
|
"Planned Changes (file->edited output)": "Modifications prévues (fichier->sortie modifiée)",
|
||||||
|
"Quality": "Qualité",
|
||||||
|
"Quit": "Quitter",
|
||||||
|
"Remove Pattern": "Supprimer le modèle",
|
||||||
|
"Revert": "Annuler les modifications",
|
||||||
|
"Reverted pending changes.": "Modifications en attente annulées.",
|
||||||
|
"Save": "Enregistrer",
|
||||||
|
"Season Offset": "Décalage de saison",
|
||||||
|
"Select a stream first.": "Veuillez d'abord sélectionner un flux.",
|
||||||
|
"Set Default": "Définir par défaut",
|
||||||
|
"Set Forced": "Définir comme forcé",
|
||||||
|
"Settings Screen": "Écran des paramètres",
|
||||||
|
"Numbering Mapping": "Saisons décalées",
|
||||||
|
"Show": "Série",
|
||||||
|
"Shows": "Séries",
|
||||||
|
"Source Season": "Saison source",
|
||||||
|
"SrcIndex": "Index source",
|
||||||
|
"Status": "Statut",
|
||||||
|
"Stay": "Rester",
|
||||||
|
"Stream dispositions": "Dispositions des flux",
|
||||||
|
"Stream tags": "Balises du flux",
|
||||||
|
"Streams": "Flux",
|
||||||
|
"SubIndex": "Sous-index",
|
||||||
|
"Substitute": "Remplacer",
|
||||||
|
"Substitute pattern": "Remplacer le modèle",
|
||||||
|
"Title": "Titre",
|
||||||
|
"Type": "Type",
|
||||||
|
"Unable to update selected stream.": "Impossible de mettre à jour le flux sélectionné.",
|
||||||
|
"Up": "Monter",
|
||||||
|
"Update Pattern": "Mettre à jour le modèle",
|
||||||
|
"Updated media tag {tag!r}.": "Balise média {tag!r} mise à jour.",
|
||||||
|
"Updated stream #{index} ({track_type}).": "Flux #{index} ({track_type}) mis à jour.",
|
||||||
|
"Value": "Valeur",
|
||||||
|
"Year": "Année",
|
||||||
|
"Yes": "Oui",
|
||||||
|
"add media tag: key='{key}' value='{value}'": "ajouter une balise média : clé='{key}' valeur='{value}'",
|
||||||
|
"add {track_type} track: index={index} lang={language}": "ajouter une piste {track_type} : index={index} langue={language}",
|
||||||
|
"attached_pic": "attached_pic",
|
||||||
|
"attachment": "pièce jointe",
|
||||||
|
"audio": "audio",
|
||||||
|
"captions": "sous-titres",
|
||||||
|
"change media tag: key='{key}' value='{value}'": "modifier une balise média : clé='{key}' valeur='{value}'",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add disposition={disposition}": "modifier le flux #{index} ({track_type}:{sub_index}) ajouter disposition={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add key={key} value={value}": "modifier le flux #{index} ({track_type}:{sub_index}) ajouter clé={key} valeur={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) change key={key} value={value}": "modifier le flux #{index} ({track_type}:{sub_index}) changer clé={key} valeur={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove disposition={disposition}": "modifier le flux #{index} ({track_type}:{sub_index}) supprimer disposition={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove key={key} value={value}": "modifier le flux #{index} ({track_type}:{sub_index}) supprimer clé={key} valeur={value}",
|
||||||
|
"clean_effects": "effets seuls",
|
||||||
|
"comment": "commentaire",
|
||||||
|
"default": "par défaut",
|
||||||
|
"dependent": "dépendant",
|
||||||
|
"descriptions": "descriptions",
|
||||||
|
"dub": "doublage",
|
||||||
|
"for pattern": "pour le modèle",
|
||||||
|
"forced": "forcé",
|
||||||
|
"from": "de",
|
||||||
|
"from pattern": "depuis le modèle",
|
||||||
|
"from show": "depuis la série",
|
||||||
|
"hearing_impaired": "malentendants",
|
||||||
|
"karaoke": "karaoké",
|
||||||
|
"lyrics": "paroles",
|
||||||
|
"metadata": "métadonnées",
|
||||||
|
"non_diegetic": "non diégétique",
|
||||||
|
"original": "original",
|
||||||
|
"pattern #{id}": "modèle #{id}",
|
||||||
|
"remove media tag: key='{key}' value='{value}'": "supprimer une balise média : clé='{key}' valeur='{value}'",
|
||||||
|
"remove stream #{index}": "supprimer le flux #{index}",
|
||||||
|
"show #{id}": "série #{id}",
|
||||||
|
"stereo": "stéréo",
|
||||||
|
"still_image": "image fixe",
|
||||||
|
"sub index": "sous-index",
|
||||||
|
"subtitle": "sous-titre",
|
||||||
|
"timed_thumbnails": "miniatures horodatées",
|
||||||
|
"undefined": "indéfini",
|
||||||
|
"unknown": "inconnu",
|
||||||
|
"video": "vidéo",
|
||||||
|
"visual_impaired": "malvoyants"
|
||||||
|
}
|
||||||
|
}
|
||||||
361
assets/i18n/ja.json
Normal file
361
assets/i18n/ja.json
Normal file
@@ -0,0 +1,361 @@
|
|||||||
|
{
|
||||||
|
"iso_languages": {
|
||||||
|
"ABKHAZIAN": "アブハジア語",
|
||||||
|
"AFAR": "アファル語",
|
||||||
|
"AFRIKAANS": "アフリカーンス語",
|
||||||
|
"AKAN": "アカン語",
|
||||||
|
"ALBANIAN": "アルバニア語",
|
||||||
|
"AMHARIC": "アムハラ語",
|
||||||
|
"ARABIC": "アラビア語",
|
||||||
|
"ARAGONESE": "アラゴン語",
|
||||||
|
"ARMENIAN": "アルメニア語",
|
||||||
|
"ASSAMESE": "アッサム語",
|
||||||
|
"AVARIC": "アヴァル語",
|
||||||
|
"AVESTAN": "アヴェスタ語",
|
||||||
|
"AYMARA": "アイマラ語",
|
||||||
|
"AZERBAIJANI": "アゼルバイジャン語",
|
||||||
|
"BAMBARA": "バンバラ語",
|
||||||
|
"BASHKIR": "バシキール語",
|
||||||
|
"BASQUE": "バスク語",
|
||||||
|
"BELARUSIAN": "白ロシア語",
|
||||||
|
"BENGALI": "ベンガル語",
|
||||||
|
"BISLAMA": "ビスラマ語",
|
||||||
|
"BOKMAL": "Bokmål",
|
||||||
|
"BOSNIAN": "ボスニア語",
|
||||||
|
"BRETON": "ブルトン語",
|
||||||
|
"BULGARIAN": "ブルガリア語",
|
||||||
|
"BURMESE": "ビルマ語",
|
||||||
|
"CATALAN": "Catalan",
|
||||||
|
"CHAMORRO": "チャモロ語",
|
||||||
|
"CHECHEN": "チェチェン語",
|
||||||
|
"CHICHEWA": "Chichewa",
|
||||||
|
"CHINESE": "中国語",
|
||||||
|
"CHURCH_SLAVIC": "Church Slavic",
|
||||||
|
"CHUVASH": "チュヴァシュ語",
|
||||||
|
"CORNISH": "コーンウォール語",
|
||||||
|
"CORSICAN": "コルシカ語",
|
||||||
|
"CREE": "クリー語",
|
||||||
|
"CROATIAN": "クロアチア語",
|
||||||
|
"CZECH": "チェコ語",
|
||||||
|
"DANISH": "デンマーク語",
|
||||||
|
"DIVEHI": "Divehi",
|
||||||
|
"DUTCH": "Dutch",
|
||||||
|
"DZONGKHA": "ゾンカ語",
|
||||||
|
"ENGLISH": "英語",
|
||||||
|
"ESPERANTO": "エスペラント語",
|
||||||
|
"ESTONIAN": "エストニア語",
|
||||||
|
"EWE": "エウェ語",
|
||||||
|
"FAROESE": "フェロー語",
|
||||||
|
"FIJIAN": "フィジー語",
|
||||||
|
"FILIPINO": "Filipino",
|
||||||
|
"FINNISH": "フィン語",
|
||||||
|
"FRENCH": "フランス語",
|
||||||
|
"FULAH": "フラ語",
|
||||||
|
"GALICIAN": "ガリシア語",
|
||||||
|
"GANDA": "ガンダ語",
|
||||||
|
"GEORGIAN": "グルジア語",
|
||||||
|
"GERMAN": "ドイツ語",
|
||||||
|
"GREEK": "Greek",
|
||||||
|
"GUARANI": "グアラニー",
|
||||||
|
"GUJARATI": "グジャラーティー語",
|
||||||
|
"HAITIAN": "Haitian",
|
||||||
|
"HAUSA": "ハウサ語",
|
||||||
|
"HEBREW": "ヘブライ語",
|
||||||
|
"HERERO": "ヘレロ語",
|
||||||
|
"HINDI": "ヒンディー語",
|
||||||
|
"HIRI_MOTU": "ヒリモトゥ語",
|
||||||
|
"HUNGARIAN": "ハンガリー語",
|
||||||
|
"ICELANDIC": "アイスランド語",
|
||||||
|
"IDO": "イド語",
|
||||||
|
"IGBO": "イボ語",
|
||||||
|
"INDONESIAN": "インドネシア語",
|
||||||
|
"INTERLINGUA": "Interlingua",
|
||||||
|
"INTERLINGUE": "Interlingue",
|
||||||
|
"INUKTITUT": "イヌクウティトット語",
|
||||||
|
"INUPIAQ": "イヌピアック語",
|
||||||
|
"IRISH": "アイルランド語",
|
||||||
|
"ITALIAN": "イタリア語",
|
||||||
|
"JAPANESE": "日本語",
|
||||||
|
"JAVANESE": "ジャワ語",
|
||||||
|
"KALAALLISUT": "Kalaallisut",
|
||||||
|
"KANNADA": "カンナダ語",
|
||||||
|
"KANURI": "カヌリ語",
|
||||||
|
"KASHMIRI": "カシミーリー語",
|
||||||
|
"KAZAKH": "カザーフ語",
|
||||||
|
"KHMER": "Khmer",
|
||||||
|
"KIKUYU": "Kikuyu",
|
||||||
|
"KINYARWANDA": "キンヤルワンダ語",
|
||||||
|
"KIRGHIZ": "Kirghiz",
|
||||||
|
"KOMI": "コミ語",
|
||||||
|
"KONGO": "コンゴ語",
|
||||||
|
"KOREAN": "朝鮮語",
|
||||||
|
"KUANYAMA": "Kuanyama",
|
||||||
|
"KURDISH": "クルド語",
|
||||||
|
"LAO": "ラオ語",
|
||||||
|
"LATIN": "ラテン語",
|
||||||
|
"LATVIAN": "ラトビア語",
|
||||||
|
"LIMBURGAN": "Limburgan",
|
||||||
|
"LINGALA": "リンガラ語",
|
||||||
|
"LITHUANIAN": "リトアニア語",
|
||||||
|
"LUBA_KATANGA": "ルバ語",
|
||||||
|
"LUXEMBOURGISH": "Luxembourgish",
|
||||||
|
"MACEDONIAN": "マケドニア語",
|
||||||
|
"MALAGASY": "マラガシ語",
|
||||||
|
"MALAY": "マライ語",
|
||||||
|
"MALAYALAM": "マラヤーラム語",
|
||||||
|
"MALTESE": "マルタ語",
|
||||||
|
"MANX": "マン島語",
|
||||||
|
"MAORI": "マオリ語",
|
||||||
|
"MARATHI": "マラーティー語",
|
||||||
|
"MARSHALLESE": "マーシャル語",
|
||||||
|
"MONGOLIAN": "蒙古語",
|
||||||
|
"NAURU": "ナウル語",
|
||||||
|
"NAVAJO": "Navajo",
|
||||||
|
"NDONGA": "ンドンガ語",
|
||||||
|
"NEPALI": "ネパール語",
|
||||||
|
"NORTHERN_SAMI": "北サーミ語",
|
||||||
|
"NORTH_NDEBELE": "North Ndebele",
|
||||||
|
"NORWEGIAN": "ノルウェー語",
|
||||||
|
"NORWEGIAN_NYNORSK": "Nynorsk",
|
||||||
|
"OCCITAN": "Occitan",
|
||||||
|
"OJIBWA": "オジブワ語",
|
||||||
|
"ORIYA": "オリヤー語",
|
||||||
|
"OROMO": "オロモ語",
|
||||||
|
"OSSETIAN": "Ossetian",
|
||||||
|
"PALI": "パーリ語",
|
||||||
|
"PANJABI": "Panjabi",
|
||||||
|
"PERSIAN": "ペルシア語",
|
||||||
|
"POLISH": "ポーランド語",
|
||||||
|
"PORTUGUESE": "ポルトガル語",
|
||||||
|
"PUSHTO": "Pushto",
|
||||||
|
"QUECHUA": "キチュワ語",
|
||||||
|
"ROMANIAN": "Romanian",
|
||||||
|
"ROMANSH": "ロマンシュ語",
|
||||||
|
"RUNDI": "ルンディ語",
|
||||||
|
"RUSSIAN": "ロシア語",
|
||||||
|
"SAMOAN": "サモア語",
|
||||||
|
"SANGO": "サンゴ語",
|
||||||
|
"SANSKRIT": "梵語",
|
||||||
|
"SARDINIAN": "サルデーニャ語",
|
||||||
|
"SCOTTISH_GAELIC": "Scottish Gaelic",
|
||||||
|
"SERBIAN": "セルビア語",
|
||||||
|
"SHONA": "ショナ語",
|
||||||
|
"SICHUAN_YI": "Sichuan Yi",
|
||||||
|
"SINDHI": "シンディー語",
|
||||||
|
"SINHALA": "Sinhala",
|
||||||
|
"SLOVAK": "スロヴァキア語",
|
||||||
|
"SLOVENIAN": "スロヴェニア語",
|
||||||
|
"SOMALI": "ソマリ語",
|
||||||
|
"SOUTHERN_SOTHO": "Southern Sotho",
|
||||||
|
"SOUTH_NDEBELE": "South Ndebele",
|
||||||
|
"SPANISH": "Spanish",
|
||||||
|
"SUNDANESE": "スンダ語",
|
||||||
|
"SWAHILI": "スワヒリ語",
|
||||||
|
"SWATI": "シスワティ語",
|
||||||
|
"SWEDISH": "スウェーデン語",
|
||||||
|
"TAGALOG": "タガログ語",
|
||||||
|
"TAHITIAN": "タヒチ語",
|
||||||
|
"TAJIK": "タジク語",
|
||||||
|
"TAMIL": "タミル語",
|
||||||
|
"TATAR": "タタール語",
|
||||||
|
"TELUGU": "テルグ語",
|
||||||
|
"THAI": "タイ語",
|
||||||
|
"TIBETAN": "チベット語",
|
||||||
|
"TIGRINYA": "ティグリニア語",
|
||||||
|
"TONGA": "Tonga",
|
||||||
|
"TSONGA": "ツォンガ語",
|
||||||
|
"TSWANA": "ツワナ語",
|
||||||
|
"TURKISH": "トルコ語",
|
||||||
|
"TURKMEN": "トゥルクメン語",
|
||||||
|
"TWI": "トウィ語",
|
||||||
|
"UIGHUR": "Uighur",
|
||||||
|
"UKRAINIAN": "ウクライナ語",
|
||||||
|
"UNDEFINED": "undefined",
|
||||||
|
"URDU": "ウルドゥー語",
|
||||||
|
"UZBEK": "ウズベク語",
|
||||||
|
"VENDA": "ベンダ語",
|
||||||
|
"VIETNAMESE": "ベトナム語",
|
||||||
|
"VOLAPUK": "ボラピューク語",
|
||||||
|
"WALLOON": "ワロン語",
|
||||||
|
"WELSH": "ウェールズ語",
|
||||||
|
"WESTERN_FRISIAN": "西フリジア語",
|
||||||
|
"WOLOF": "ウォロフ語",
|
||||||
|
"XHOSA": "ホサ語",
|
||||||
|
"YIDDISH": "イディッシュ語",
|
||||||
|
"YORUBA": "ヨルバ語",
|
||||||
|
"ZHUANG": "Zhuang",
|
||||||
|
"ZULU": "ズールー語"
|
||||||
|
},
|
||||||
|
"phrases": {
|
||||||
|
"5.0(side)": "5.0(side)",
|
||||||
|
"5.1(side)": "5.1(side)",
|
||||||
|
"6.1": "6.1",
|
||||||
|
"6ch": "6ch",
|
||||||
|
"7.1": "7.1",
|
||||||
|
"<New show>": "<新しい番組>",
|
||||||
|
"Add": "追加",
|
||||||
|
"Add Pattern": "パターンを追加",
|
||||||
|
"Apply": "適用",
|
||||||
|
"Apply failed: {error}": "適用に失敗しました: {error}",
|
||||||
|
"Are you sure to delete the following filename pattern?": "次のファイル名パターンを削除してもよろしいですか?",
|
||||||
|
"Are you sure to delete the following shifted season?": "次のシーズンシフト設定を削除してもよろしいですか?",
|
||||||
|
"Are you sure to delete the following show?": "次の番組を削除してもよろしいですか?",
|
||||||
|
"Are you sure to delete the following {track_type} track?": "次の{track_type}ストリームを削除してもよろしいですか?",
|
||||||
|
"Are you sure to delete this tag?": "このタグを削除してもよろしいですか?",
|
||||||
|
"Audio Layout": "音声レイアウト",
|
||||||
|
"Back": "戻る",
|
||||||
|
"Cancel": "キャンセル",
|
||||||
|
"Cannot add another stream with disposition flag 'default' or 'forced' set": "default または forced の disposition が設定されたストリームはこれ以上追加できません",
|
||||||
|
"Changes applied and file reloaded.": "変更を適用し、ファイルを再読み込みしました。",
|
||||||
|
"Cleanup": "クリーンアップ",
|
||||||
|
"Cleanup disabled.": "クリーンアップを無効にしました。",
|
||||||
|
"Cleanup enabled.": "クリーンアップを有効にしました。",
|
||||||
|
"Codec": "コーデック",
|
||||||
|
"Continuing edit session.": "編集セッションを続行します。",
|
||||||
|
"Default": "デフォルト",
|
||||||
|
"Delete": "削除",
|
||||||
|
"Delete Show": "番組を削除",
|
||||||
|
"Deleted media tag {tag!r}.": "メディアタグ {tag!r} を削除しました。",
|
||||||
|
"Differences": "差分",
|
||||||
|
"Differences (file->db/output)": "差分 (ファイル->DB/出力)",
|
||||||
|
"Discard": "破棄",
|
||||||
|
"Discard pending metadata changes and quit?": "保留中のメタデータ変更を破棄して終了しますか?",
|
||||||
|
"Discard pending metadata changes and reload the file state?": "保留中のメタデータ変更を破棄してファイル状態を再読み込みしますか?",
|
||||||
|
"Down": "下へ",
|
||||||
|
"Dry-run: would rewrite via temporary file {target_path}": "ドライラン: 一時ファイル {target_path} 経由で再書き込みします",
|
||||||
|
"Edit": "編集",
|
||||||
|
"Edit Pattern": "パターンを編集",
|
||||||
|
"Edit Show": "番組を編集",
|
||||||
|
"Edit filename pattern": "ファイル名パターンを編集",
|
||||||
|
"Edit shifted season": "シフト済みシーズンを編集",
|
||||||
|
"Edit stream": "ストリームを編集",
|
||||||
|
"Episode Offset": "エピソードオフセット",
|
||||||
|
"Episode offset": "エピソードオフセット",
|
||||||
|
"File": "ファイル",
|
||||||
|
"File patterns": "ファイルパターン",
|
||||||
|
"First Episode": "最初のエピソード",
|
||||||
|
"First episode": "最初のエピソード",
|
||||||
|
"Forced": "強制",
|
||||||
|
"Help": "ヘルプ",
|
||||||
|
"Help Screen": "ヘルプ画面",
|
||||||
|
"ID": "ID",
|
||||||
|
"Identify": "識別",
|
||||||
|
"Index": "インデックス",
|
||||||
|
"Index / Subindex": "インデックス / サブインデックス",
|
||||||
|
"Index Episode Digits": "インデックスのエピソード桁数",
|
||||||
|
"Index Season Digits": "インデックスのシーズン桁数",
|
||||||
|
"Indicator Edisode Digits": "インジケーターのエピソード桁数",
|
||||||
|
"Indicator Season Digits": "インジケーターのシーズン桁数",
|
||||||
|
"Keep Editing": "編集を続ける",
|
||||||
|
"Keeping pending changes.": "保留中の変更を保持します。",
|
||||||
|
"Key": "キー",
|
||||||
|
"Language": "言語",
|
||||||
|
"Last Episode": "最後のエピソード",
|
||||||
|
"Last episode": "最後のエピソード",
|
||||||
|
"Layout": "レイアウト",
|
||||||
|
"Media Tags": "メディアタグ",
|
||||||
|
"More than one default audio stream detected and no prompt set": "デフォルト音声ストリームが複数検出され、プロンプトも設定されていません",
|
||||||
|
"More than one default audio stream detected! Please select stream": "デフォルト音声ストリームが複数検出されました。ストリームを選択してください",
|
||||||
|
"More than one default subtitle stream detected and no prompt set": "デフォルト字幕ストリームが複数検出され、プロンプトも設定されていません",
|
||||||
|
"More than one default subtitle stream detected! Please select stream": "デフォルト字幕ストリームが複数検出されました。ストリームを選択してください",
|
||||||
|
"More than one default video stream detected and no prompt set": "デフォルト映像ストリームが複数検出され、プロンプトも設定されていません",
|
||||||
|
"More than one default video stream detected! Please select stream": "デフォルト映像ストリームが複数検出されました。ストリームを選択してください",
|
||||||
|
"More than one forced audio stream detected and no prompt set": "強制音声ストリームが複数検出され、プロンプトも設定されていません",
|
||||||
|
"More than one forced audio stream detected! Please select stream": "強制音声ストリームが複数検出されました。ストリームを選択してください",
|
||||||
|
"More than one forced subtitle stream detected and no prompt set": "強制字幕ストリームが複数検出され、プロンプトも設定されていません",
|
||||||
|
"More than one forced subtitle stream detected! Please select stream": "強制字幕ストリームが複数検出されました。ストリームを選択してください",
|
||||||
|
"More than one forced video stream detected and no prompt set": "強制映像ストリームが複数検出され、プロンプトも設定されていません",
|
||||||
|
"More than one forced video stream detected! Please select stream": "強制映像ストリームが複数検出されました。ストリームを選択してください",
|
||||||
|
"Name": "名前",
|
||||||
|
"New Pattern": "新しいパターン",
|
||||||
|
"New Show": "新しい番組",
|
||||||
|
"New filename pattern": "新しいファイル名パターン",
|
||||||
|
"New shifted season": "新しいシーズンシフト",
|
||||||
|
"New stream": "新しいストリーム",
|
||||||
|
"No": "いいえ",
|
||||||
|
"No changes to apply.": "適用する変更はありません。",
|
||||||
|
"No changes to revert.": "元に戻す変更はありません。",
|
||||||
|
"Normalization disabled.": "正規化を無効にしました。",
|
||||||
|
"Normalization enabled.": "正規化を有効にしました。",
|
||||||
|
"Normalize": "正規化",
|
||||||
|
"Notes": "メモ",
|
||||||
|
"Pattern": "パターン",
|
||||||
|
"Planned Changes (file->edited output)": "予定された変更 (ファイル->編集後出力)",
|
||||||
|
"Quality": "品質",
|
||||||
|
"Quit": "終了",
|
||||||
|
"Remove Pattern": "パターンを削除",
|
||||||
|
"Revert": "元に戻す",
|
||||||
|
"Reverted pending changes.": "保留中の変更を元に戻しました。",
|
||||||
|
"Save": "保存",
|
||||||
|
"Season Offset": "シーズンオフセット",
|
||||||
|
"Select a stream first.": "まずストリームを選択してください。",
|
||||||
|
"Set Default": "デフォルトに設定",
|
||||||
|
"Set Forced": "強制に設定",
|
||||||
|
"Settings Screen": "設定画面",
|
||||||
|
"Numbering Mapping": "シフト済みシーズン",
|
||||||
|
"Show": "番組",
|
||||||
|
"Shows": "番組一覧",
|
||||||
|
"Source Season": "元シーズン",
|
||||||
|
"SrcIndex": "元インデックス",
|
||||||
|
"Status": "状態",
|
||||||
|
"Stay": "このまま",
|
||||||
|
"Stream dispositions": "ストリーム disposition",
|
||||||
|
"Stream tags": "ストリームタグ",
|
||||||
|
"Streams": "ストリーム",
|
||||||
|
"SubIndex": "サブインデックス",
|
||||||
|
"Substitute": "置換",
|
||||||
|
"Substitute pattern": "パターンを置換",
|
||||||
|
"Title": "タイトル",
|
||||||
|
"Type": "タイプ",
|
||||||
|
"Unable to update selected stream.": "選択したストリームを更新できませんでした。",
|
||||||
|
"Up": "上へ",
|
||||||
|
"Update Pattern": "パターンを更新",
|
||||||
|
"Updated media tag {tag!r}.": "メディアタグ {tag!r} を更新しました。",
|
||||||
|
"Updated stream #{index} ({track_type}).": "ストリーム #{index} ({track_type}) を更新しました。",
|
||||||
|
"Value": "値",
|
||||||
|
"Year": "年",
|
||||||
|
"Yes": "はい",
|
||||||
|
"add media tag: key='{key}' value='{value}'": "メディアタグを追加: key='{key}' value='{value}'",
|
||||||
|
"add {track_type} track: index={index} lang={language}": "{track_type}ストリームを追加: index={index} lang={language}",
|
||||||
|
"attached_pic": "attached_pic",
|
||||||
|
"attachment": "添付",
|
||||||
|
"audio": "音声",
|
||||||
|
"captions": "キャプション",
|
||||||
|
"change media tag: key='{key}' value='{value}'": "メディアタグを変更: key='{key}' value='{value}'",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add disposition={disposition}": "ストリーム #{index} ({track_type}:{sub_index}) disposition を追加={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add key={key} value={value}": "ストリーム #{index} ({track_type}:{sub_index}) key を追加={key} value={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) change key={key} value={value}": "ストリーム #{index} ({track_type}:{sub_index}) key を変更={key} value={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove disposition={disposition}": "ストリーム #{index} ({track_type}:{sub_index}) disposition を削除={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove key={key} value={value}": "ストリーム #{index} ({track_type}:{sub_index}) key を削除={key} value={value}",
|
||||||
|
"clean_effects": "効果音のみ",
|
||||||
|
"comment": "コメント",
|
||||||
|
"default": "デフォルト",
|
||||||
|
"dependent": "依存",
|
||||||
|
"descriptions": "解説",
|
||||||
|
"dub": "吹替",
|
||||||
|
"for pattern": "パターン用",
|
||||||
|
"forced": "強制",
|
||||||
|
"from": "元",
|
||||||
|
"from pattern": "パターンから",
|
||||||
|
"from show": "番組から",
|
||||||
|
"hearing_impaired": "聴覚障害者向け",
|
||||||
|
"karaoke": "カラオケ",
|
||||||
|
"lyrics": "歌詞",
|
||||||
|
"metadata": "メタデータ",
|
||||||
|
"non_diegetic": "非ダイジェティック",
|
||||||
|
"original": "オリジナル",
|
||||||
|
"pattern #{id}": "パターン #{id}",
|
||||||
|
"remove media tag: key='{key}' value='{value}'": "メディアタグを削除: key='{key}' value='{value}'",
|
||||||
|
"remove stream #{index}": "ストリーム #{index} を削除",
|
||||||
|
"show #{id}": "番組 #{id}",
|
||||||
|
"stereo": "ステレオ",
|
||||||
|
"still_image": "静止画",
|
||||||
|
"sub index": "サブインデックス",
|
||||||
|
"subtitle": "字幕",
|
||||||
|
"timed_thumbnails": "時間指定サムネイル",
|
||||||
|
"undefined": "未定義",
|
||||||
|
"unknown": "不明",
|
||||||
|
"video": "映像",
|
||||||
|
"visual_impaired": "視覚障害者向け"
|
||||||
|
}
|
||||||
|
}
|
||||||
361
assets/i18n/nb.json
Normal file
361
assets/i18n/nb.json
Normal file
@@ -0,0 +1,361 @@
|
|||||||
|
{
|
||||||
|
"iso_languages": {
|
||||||
|
"ABKHAZIAN": "Abkhazian",
|
||||||
|
"AFAR": "afar",
|
||||||
|
"AFRIKAANS": "Afrikansk",
|
||||||
|
"AKAN": "Akan",
|
||||||
|
"ALBANIAN": "Albansk",
|
||||||
|
"AMHARIC": "Amharic",
|
||||||
|
"ARABIC": "Arabisk",
|
||||||
|
"ARAGONESE": "aragonsk",
|
||||||
|
"ARMENIAN": "armensk",
|
||||||
|
"ASSAMESE": "assamisk",
|
||||||
|
"AVARIC": "Avaric",
|
||||||
|
"AVESTAN": "avestisk",
|
||||||
|
"AYMARA": "aymara",
|
||||||
|
"AZERBAIJANI": "Aserbadjansk",
|
||||||
|
"BAMBARA": "bambara",
|
||||||
|
"BASHKIR": "basjkirsk",
|
||||||
|
"BASQUE": "Baskisk",
|
||||||
|
"BELARUSIAN": "Hviterussisk",
|
||||||
|
"BENGALI": "bengali",
|
||||||
|
"BISLAMA": "bislama",
|
||||||
|
"BOKMAL": "Bokmål",
|
||||||
|
"BOSNIAN": "Bosnisk",
|
||||||
|
"BRETON": "Breton",
|
||||||
|
"BULGARIAN": "Bulgarsk",
|
||||||
|
"BURMESE": "burmesisk",
|
||||||
|
"CATALAN": "Catalan",
|
||||||
|
"CHAMORRO": "chamorro",
|
||||||
|
"CHECHEN": "Chechen",
|
||||||
|
"CHICHEWA": "Chichewa",
|
||||||
|
"CHINESE": "Kinesisk",
|
||||||
|
"CHURCH_SLAVIC": "Church Slavic",
|
||||||
|
"CHUVASH": "tsjuvansk",
|
||||||
|
"CORNISH": "Cornish",
|
||||||
|
"CORSICAN": "Korsikansk",
|
||||||
|
"CREE": "Cree",
|
||||||
|
"CROATIAN": "Kroatsisk",
|
||||||
|
"CZECH": "Tjekkisk",
|
||||||
|
"DANISH": "Dansk",
|
||||||
|
"DIVEHI": "Divehi",
|
||||||
|
"DUTCH": "Dutch",
|
||||||
|
"DZONGKHA": "dzongkha",
|
||||||
|
"ENGLISH": "Engelsk",
|
||||||
|
"ESPERANTO": "Esperanto",
|
||||||
|
"ESTONIAN": "Estonsk",
|
||||||
|
"EWE": "ewe",
|
||||||
|
"FAROESE": "færøysk",
|
||||||
|
"FIJIAN": "fijiansk",
|
||||||
|
"FILIPINO": "Filipino",
|
||||||
|
"FINNISH": "Finsk",
|
||||||
|
"FRENCH": "Fransk",
|
||||||
|
"FULAH": "fulani",
|
||||||
|
"GALICIAN": "Galisisk",
|
||||||
|
"GANDA": "ganda",
|
||||||
|
"GEORGIAN": "Georgisk",
|
||||||
|
"GERMAN": "Tysk",
|
||||||
|
"GREEK": "Greek",
|
||||||
|
"GUARANI": "Guarani",
|
||||||
|
"GUJARATI": "gujarati",
|
||||||
|
"HAITIAN": "Haitian",
|
||||||
|
"HAUSA": "Hausa",
|
||||||
|
"HEBREW": "Hebraisk",
|
||||||
|
"HERERO": "Herero",
|
||||||
|
"HINDI": "hindi",
|
||||||
|
"HIRI_MOTU": "Hiri Motu",
|
||||||
|
"HUNGARIAN": "Ungarsk",
|
||||||
|
"ICELANDIC": "Islandsk",
|
||||||
|
"IDO": "ido",
|
||||||
|
"IGBO": "ibo",
|
||||||
|
"INDONESIAN": "Indonesisk",
|
||||||
|
"INTERLINGUA": "Interlingua",
|
||||||
|
"INTERLINGUE": "Interlingue",
|
||||||
|
"INUKTITUT": "inuktitut",
|
||||||
|
"INUPIAQ": "unupiak",
|
||||||
|
"IRISH": "Irsk",
|
||||||
|
"ITALIAN": "Italiensk",
|
||||||
|
"JAPANESE": "Japansk",
|
||||||
|
"JAVANESE": "Javanesisk",
|
||||||
|
"KALAALLISUT": "Kalaallisut",
|
||||||
|
"KANNADA": "kannada",
|
||||||
|
"KANURI": "Kanuri",
|
||||||
|
"KASHMIRI": "kasjmiri",
|
||||||
|
"KAZAKH": "kasakhisk",
|
||||||
|
"KHMER": "Khmer",
|
||||||
|
"KIKUYU": "Kikuyu",
|
||||||
|
"KINYARWANDA": "kinjarwanda",
|
||||||
|
"KIRGHIZ": "Kirghiz",
|
||||||
|
"KOMI": "komi",
|
||||||
|
"KONGO": "kikongo",
|
||||||
|
"KOREAN": "Koreansk",
|
||||||
|
"KUANYAMA": "Kuanyama",
|
||||||
|
"KURDISH": "Kurdisk",
|
||||||
|
"LAO": "laotisk",
|
||||||
|
"LATIN": "Latin",
|
||||||
|
"LATVIAN": "Latvisk",
|
||||||
|
"LIMBURGAN": "Limburgan",
|
||||||
|
"LINGALA": "lingala",
|
||||||
|
"LITHUANIAN": "Lituaisk",
|
||||||
|
"LUBA_KATANGA": "luba-katanga",
|
||||||
|
"LUXEMBOURGISH": "Luxembourgish",
|
||||||
|
"MACEDONIAN": "Makedonsk",
|
||||||
|
"MALAGASY": "madagassisk",
|
||||||
|
"MALAY": "malayisk",
|
||||||
|
"MALAYALAM": "malayalam",
|
||||||
|
"MALTESE": "Maltisk",
|
||||||
|
"MANX": "manx",
|
||||||
|
"MAORI": "Maori",
|
||||||
|
"MARATHI": "Marathi",
|
||||||
|
"MARSHALLESE": "Marshallese",
|
||||||
|
"MONGOLIAN": "Mongolsk",
|
||||||
|
"NAURU": "nauru",
|
||||||
|
"NAVAJO": "Navajo",
|
||||||
|
"NDONGA": "Ndonga",
|
||||||
|
"NEPALI": "nepalsk",
|
||||||
|
"NORTHERN_SAMI": "nordsamisk",
|
||||||
|
"NORTH_NDEBELE": "North Ndebele",
|
||||||
|
"NORWEGIAN": "Norsk",
|
||||||
|
"NORWEGIAN_NYNORSK": "Nynorsk",
|
||||||
|
"OCCITAN": "Occitan",
|
||||||
|
"OJIBWA": "ojibwa",
|
||||||
|
"ORIYA": "oriya",
|
||||||
|
"OROMO": "oromo",
|
||||||
|
"OSSETIAN": "Ossetian",
|
||||||
|
"PALI": "Pali",
|
||||||
|
"PANJABI": "Panjabi",
|
||||||
|
"PERSIAN": "Persisk",
|
||||||
|
"POLISH": "Polsk",
|
||||||
|
"PORTUGUESE": "Portugisisk",
|
||||||
|
"PUSHTO": "Pushto",
|
||||||
|
"QUECHUA": "quechua",
|
||||||
|
"ROMANIAN": "Romanian",
|
||||||
|
"ROMANSH": "Romansh",
|
||||||
|
"RUNDI": "rundi",
|
||||||
|
"RUSSIAN": "Russisk",
|
||||||
|
"SAMOAN": "samoansk",
|
||||||
|
"SANGO": "sango",
|
||||||
|
"SANSKRIT": "sanskrit",
|
||||||
|
"SARDINIAN": "Sardinsk",
|
||||||
|
"SCOTTISH_GAELIC": "Scottish Gaelic",
|
||||||
|
"SERBIAN": "Serbisk",
|
||||||
|
"SHONA": "Shona",
|
||||||
|
"SICHUAN_YI": "Sichuan Yi",
|
||||||
|
"SINDHI": "sindhi",
|
||||||
|
"SINHALA": "Sinhala",
|
||||||
|
"SLOVAK": "Slovakisk",
|
||||||
|
"SLOVENIAN": "Slovensk",
|
||||||
|
"SOMALI": "somalisk",
|
||||||
|
"SOUTHERN_SOTHO": "Southern Sotho",
|
||||||
|
"SOUTH_NDEBELE": "South Ndebele",
|
||||||
|
"SPANISH": "Spanish",
|
||||||
|
"SUNDANESE": "sundanesisk",
|
||||||
|
"SWAHILI": "swahili",
|
||||||
|
"SWATI": "swati",
|
||||||
|
"SWEDISH": "Svensk",
|
||||||
|
"TAGALOG": "tagalog",
|
||||||
|
"TAHITIAN": "Tahitisk",
|
||||||
|
"TAJIK": "Tajik",
|
||||||
|
"TAMIL": "Tamilsk",
|
||||||
|
"TATAR": "tatarisk",
|
||||||
|
"TELUGU": "telugu",
|
||||||
|
"THAI": "Thai",
|
||||||
|
"TIBETAN": "tibetansk",
|
||||||
|
"TIGRINYA": "Tigrinya",
|
||||||
|
"TONGA": "Tonga",
|
||||||
|
"TSONGA": "tsonga",
|
||||||
|
"TSWANA": "tswana",
|
||||||
|
"TURKISH": "Tyrkisk",
|
||||||
|
"TURKMEN": "turkmensk",
|
||||||
|
"TWI": "twi",
|
||||||
|
"UIGHUR": "Uighur",
|
||||||
|
"UKRAINIAN": "Ukrainsk",
|
||||||
|
"UNDEFINED": "undefined",
|
||||||
|
"URDU": "urdu",
|
||||||
|
"UZBEK": "usbekisk",
|
||||||
|
"VENDA": "venda",
|
||||||
|
"VIETNAMESE": "Vietnamesisk",
|
||||||
|
"VOLAPUK": "Volapük",
|
||||||
|
"WALLOON": "Vietnamesisk",
|
||||||
|
"WELSH": "Walisisk",
|
||||||
|
"WESTERN_FRISIAN": "Vestfrisisk",
|
||||||
|
"WOLOF": "wolof",
|
||||||
|
"XHOSA": "Xhosa",
|
||||||
|
"YIDDISH": "jiddisk",
|
||||||
|
"YORUBA": "joruba",
|
||||||
|
"ZHUANG": "Zhuang",
|
||||||
|
"ZULU": "Zulu"
|
||||||
|
},
|
||||||
|
"phrases": {
|
||||||
|
"5.0(side)": "5.0(side)",
|
||||||
|
"5.1(side)": "5.1(side)",
|
||||||
|
"6.1": "6.1",
|
||||||
|
"6ch": "6ch",
|
||||||
|
"7.1": "7.1",
|
||||||
|
"<New show>": "<Ny serie>",
|
||||||
|
"Add": "Legg til",
|
||||||
|
"Add Pattern": "Legg til mønster",
|
||||||
|
"Apply": "Bruk",
|
||||||
|
"Apply failed: {error}": "Kunne ikke bruke endringene: {error}",
|
||||||
|
"Are you sure to delete the following filename pattern?": "Er du sikker på at du vil slette følgende filnavnmønster?",
|
||||||
|
"Are you sure to delete the following shifted season?": "Er du sikker på at du vil slette følgende forskjøvede sesong?",
|
||||||
|
"Are you sure to delete the following show?": "Er du sikker på at du vil slette følgende serie?",
|
||||||
|
"Are you sure to delete the following {track_type} track?": "Er du sikker på at du vil slette følgende {track_type}-spor?",
|
||||||
|
"Are you sure to delete this tag?": "Er du sikker på at du vil slette denne taggen?",
|
||||||
|
"Audio Layout": "Lydoppsett",
|
||||||
|
"Back": "Tilbake",
|
||||||
|
"Cancel": "Avbryt",
|
||||||
|
"Cannot add another stream with disposition flag 'default' or 'forced' set": "Kan ikke legge til en ny strøm med disposisjonsflagget 'default' eller 'forced' satt",
|
||||||
|
"Changes applied and file reloaded.": "Endringene er brukt og filen er lastet inn på nytt.",
|
||||||
|
"Cleanup": "Rydd opp",
|
||||||
|
"Cleanup disabled.": "Rydding deaktivert.",
|
||||||
|
"Cleanup enabled.": "Rydding aktivert.",
|
||||||
|
"Codec": "Kodek",
|
||||||
|
"Continuing edit session.": "Fortsetter redigeringsøkten.",
|
||||||
|
"Default": "Standard",
|
||||||
|
"Delete": "Slett",
|
||||||
|
"Delete Show": "Slett serie",
|
||||||
|
"Deleted media tag {tag!r}.": "Mediataggen {tag!r} ble slettet.",
|
||||||
|
"Differences": "Forskjeller",
|
||||||
|
"Differences (file->db/output)": "Forskjeller (fil->DB/utdata)",
|
||||||
|
"Discard": "Forkast",
|
||||||
|
"Discard pending metadata changes and quit?": "Forkaste ventende metadataendringer og avslutte?",
|
||||||
|
"Discard pending metadata changes and reload the file state?": "Forkaste ventende metadataendringer og laste filtilstanden på nytt?",
|
||||||
|
"Down": "Ned",
|
||||||
|
"Dry-run: would rewrite via temporary file {target_path}": "Tørrkjøring: ville skrevet om via midlertidig fil {target_path}",
|
||||||
|
"Edit": "Rediger",
|
||||||
|
"Edit Pattern": "Rediger mønster",
|
||||||
|
"Edit Show": "Rediger serie",
|
||||||
|
"Edit filename pattern": "Rediger filnavnmønster",
|
||||||
|
"Edit shifted season": "Rediger forskjøvet sesong",
|
||||||
|
"Edit stream": "Rediger strøm",
|
||||||
|
"Episode Offset": "Episodeforskyvning",
|
||||||
|
"Episode offset": "Episodeforskyvning",
|
||||||
|
"File": "Fil",
|
||||||
|
"File patterns": "Filmønstre",
|
||||||
|
"First Episode": "Første episode",
|
||||||
|
"First episode": "Første episode",
|
||||||
|
"Forced": "Tvungen",
|
||||||
|
"Help": "Hjelp",
|
||||||
|
"Help Screen": "Hjelpeskjerm",
|
||||||
|
"ID": "ID",
|
||||||
|
"Identify": "Identifiser",
|
||||||
|
"Index": "Indeks",
|
||||||
|
"Index / Subindex": "Indeks / Underindeks",
|
||||||
|
"Index Episode Digits": "Siffer for episodeindeks",
|
||||||
|
"Index Season Digits": "Siffer for sesongindeks",
|
||||||
|
"Indicator Edisode Digits": "Siffer for episodeindikator",
|
||||||
|
"Indicator Season Digits": "Siffer for sesongindikator",
|
||||||
|
"Keep Editing": "Fortsett redigeringen",
|
||||||
|
"Keeping pending changes.": "Beholder ventende endringer.",
|
||||||
|
"Key": "Nøkkel",
|
||||||
|
"Language": "Språk",
|
||||||
|
"Last Episode": "Siste episode",
|
||||||
|
"Last episode": "Siste episode",
|
||||||
|
"Layout": "Oppsett",
|
||||||
|
"Media Tags": "Mediatagger",
|
||||||
|
"More than one default audio stream detected and no prompt set": "Mer enn én standard lydstrøm funnet og ingen forespørsel satt",
|
||||||
|
"More than one default audio stream detected! Please select stream": "Mer enn én standard lydstrøm funnet. Velg strøm",
|
||||||
|
"More than one default subtitle stream detected and no prompt set": "Mer enn én standard undertekststrøm funnet og ingen forespørsel satt",
|
||||||
|
"More than one default subtitle stream detected! Please select stream": "Mer enn én standard undertekststrøm funnet. Velg strøm",
|
||||||
|
"More than one default video stream detected and no prompt set": "Mer enn én standard videostrøm funnet og ingen forespørsel satt",
|
||||||
|
"More than one default video stream detected! Please select stream": "Mer enn én standard videostrøm funnet. Velg strøm",
|
||||||
|
"More than one forced audio stream detected and no prompt set": "Mer enn én tvungen lydstrøm funnet og ingen forespørsel satt",
|
||||||
|
"More than one forced audio stream detected! Please select stream": "Mer enn én tvungen lydstrøm funnet. Velg strøm",
|
||||||
|
"More than one forced subtitle stream detected and no prompt set": "Mer enn én tvungen undertekststrøm funnet og ingen forespørsel satt",
|
||||||
|
"More than one forced subtitle stream detected! Please select stream": "Mer enn én tvungen undertekststrøm funnet. Velg strøm",
|
||||||
|
"More than one forced video stream detected and no prompt set": "Mer enn én tvungen videostrøm funnet og ingen forespørsel satt",
|
||||||
|
"More than one forced video stream detected! Please select stream": "Mer enn én tvungen videostrøm funnet. Velg strøm",
|
||||||
|
"Name": "Navn",
|
||||||
|
"New Pattern": "Nytt mønster",
|
||||||
|
"New Show": "Ny serie",
|
||||||
|
"New filename pattern": "Nytt filnavnmønster",
|
||||||
|
"New shifted season": "Ny forskjøvet sesong",
|
||||||
|
"New stream": "Ny strøm",
|
||||||
|
"No": "Nei",
|
||||||
|
"No changes to apply.": "Ingen endringer å bruke.",
|
||||||
|
"No changes to revert.": "Ingen endringer å tilbakestille.",
|
||||||
|
"Normalization disabled.": "Normalisering deaktivert.",
|
||||||
|
"Normalization enabled.": "Normalisering aktivert.",
|
||||||
|
"Normalize": "Normaliser",
|
||||||
|
"Notes": "Notater",
|
||||||
|
"Pattern": "Mønster",
|
||||||
|
"Planned Changes (file->edited output)": "Planlagte endringer (fil->redigert utdata)",
|
||||||
|
"Quality": "Kvalitet",
|
||||||
|
"Quit": "Avslutt",
|
||||||
|
"Remove Pattern": "Fjern mønster",
|
||||||
|
"Revert": "Tilbakestill",
|
||||||
|
"Reverted pending changes.": "Ventende endringer ble tilbakestilt.",
|
||||||
|
"Save": "Lagre",
|
||||||
|
"Season Offset": "Sesongforskyvning",
|
||||||
|
"Select a stream first.": "Velg en strøm først.",
|
||||||
|
"Set Default": "Sett som standard",
|
||||||
|
"Set Forced": "Sett som tvungen",
|
||||||
|
"Settings Screen": "Innstillingsskjerm",
|
||||||
|
"Numbering Mapping": "Forskjøvne sesonger",
|
||||||
|
"Show": "Serie",
|
||||||
|
"Shows": "Serier",
|
||||||
|
"Source Season": "Kildesesong",
|
||||||
|
"SrcIndex": "Kildeindeks",
|
||||||
|
"Status": "Status",
|
||||||
|
"Stay": "Bli",
|
||||||
|
"Stream dispositions": "Strømdisposisjoner",
|
||||||
|
"Stream tags": "Strømtagger",
|
||||||
|
"Streams": "Strømmer",
|
||||||
|
"SubIndex": "Underindeks",
|
||||||
|
"Substitute": "Erstatt",
|
||||||
|
"Substitute pattern": "Erstatt mønster",
|
||||||
|
"Title": "Tittel",
|
||||||
|
"Type": "Type",
|
||||||
|
"Unable to update selected stream.": "Kunne ikke oppdatere valgt strøm.",
|
||||||
|
"Up": "Opp",
|
||||||
|
"Update Pattern": "Oppdater mønster",
|
||||||
|
"Updated media tag {tag!r}.": "Mediataggen {tag!r} ble oppdatert.",
|
||||||
|
"Updated stream #{index} ({track_type}).": "Strøm #{index} ({track_type}) oppdatert.",
|
||||||
|
"Value": "Verdi",
|
||||||
|
"Year": "År",
|
||||||
|
"Yes": "Ja",
|
||||||
|
"add media tag: key='{key}' value='{value}'": "legg til mediatagg: nøkkel='{key}' verdi='{value}'",
|
||||||
|
"add {track_type} track: index={index} lang={language}": "legg til {track_type}-spor: indeks={index} språk={language}",
|
||||||
|
"attached_pic": "attached_pic",
|
||||||
|
"attachment": "vedlegg",
|
||||||
|
"audio": "lyd",
|
||||||
|
"captions": "teksting",
|
||||||
|
"change media tag: key='{key}' value='{value}'": "endre mediatagg: nøkkel='{key}' verdi='{value}'",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add disposition={disposition}": "endre strøm #{index} ({track_type}:{sub_index}) legg til disposisjon={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add key={key} value={value}": "endre strøm #{index} ({track_type}:{sub_index}) legg til nøkkel={key} verdi={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) change key={key} value={value}": "endre strøm #{index} ({track_type}:{sub_index}) endre nøkkel={key} verdi={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove disposition={disposition}": "endre strøm #{index} ({track_type}:{sub_index}) fjern disposisjon={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove key={key} value={value}": "endre strøm #{index} ({track_type}:{sub_index}) fjern nøkkel={key} verdi={value}",
|
||||||
|
"clean_effects": "bare effekter",
|
||||||
|
"comment": "kommentar",
|
||||||
|
"default": "standard",
|
||||||
|
"dependent": "avhengig",
|
||||||
|
"descriptions": "beskrivelser",
|
||||||
|
"dub": "dubbet",
|
||||||
|
"for pattern": "for mønster",
|
||||||
|
"forced": "tvungen",
|
||||||
|
"from": "fra",
|
||||||
|
"from pattern": "fra mønster",
|
||||||
|
"from show": "fra serie",
|
||||||
|
"hearing_impaired": "hørselshemmet",
|
||||||
|
"karaoke": "karaoke",
|
||||||
|
"lyrics": "sangtekst",
|
||||||
|
"metadata": "metadata",
|
||||||
|
"non_diegetic": "ikke-diegetisk",
|
||||||
|
"original": "original",
|
||||||
|
"pattern #{id}": "mønster #{id}",
|
||||||
|
"remove media tag: key='{key}' value='{value}'": "fjern mediatagg: nøkkel='{key}' verdi='{value}'",
|
||||||
|
"remove stream #{index}": "fjern strøm #{index}",
|
||||||
|
"show #{id}": "serie #{id}",
|
||||||
|
"stereo": "stereo",
|
||||||
|
"still_image": "stillbilde",
|
||||||
|
"sub index": "underindeks",
|
||||||
|
"subtitle": "undertekst",
|
||||||
|
"timed_thumbnails": "tidsbestemte miniatyrer",
|
||||||
|
"undefined": "udefinert",
|
||||||
|
"unknown": "ukjent",
|
||||||
|
"video": "video",
|
||||||
|
"visual_impaired": "synshemmet"
|
||||||
|
}
|
||||||
|
}
|
||||||
361
assets/i18n/pt.json
Normal file
361
assets/i18n/pt.json
Normal file
@@ -0,0 +1,361 @@
|
|||||||
|
{
|
||||||
|
"iso_languages": {
|
||||||
|
"ABKHAZIAN": "abkhazian",
|
||||||
|
"AFAR": "afar",
|
||||||
|
"AFRIKAANS": "Africanos",
|
||||||
|
"AKAN": "Akan",
|
||||||
|
"ALBANIAN": "Albanês",
|
||||||
|
"AMHARIC": "Amárico",
|
||||||
|
"ARABIC": "Árabe",
|
||||||
|
"ARAGONESE": "Aragonês",
|
||||||
|
"ARMENIAN": "arménio",
|
||||||
|
"ASSAMESE": "assamês",
|
||||||
|
"AVARIC": "Avárico",
|
||||||
|
"AVESTAN": "avéstico",
|
||||||
|
"AYMARA": "aimara",
|
||||||
|
"AZERBAIJANI": "Azerbaijani",
|
||||||
|
"BAMBARA": "bambara",
|
||||||
|
"BASHKIR": "bashkir",
|
||||||
|
"BASQUE": "Basco",
|
||||||
|
"BELARUSIAN": "Bielorusso",
|
||||||
|
"BENGALI": "Bengali",
|
||||||
|
"BISLAMA": "bislamá",
|
||||||
|
"BOKMAL": "Bokmål",
|
||||||
|
"BOSNIAN": "Bósnio",
|
||||||
|
"BRETON": "Bretão",
|
||||||
|
"BULGARIAN": "Búlgaro",
|
||||||
|
"BURMESE": "birmanês",
|
||||||
|
"CATALAN": "Catalan",
|
||||||
|
"CHAMORRO": "chamorro",
|
||||||
|
"CHECHEN": "Checheno",
|
||||||
|
"CHICHEWA": "Chichewa",
|
||||||
|
"CHINESE": "Chinês",
|
||||||
|
"CHURCH_SLAVIC": "Church Slavic",
|
||||||
|
"CHUVASH": "chuvash",
|
||||||
|
"CORNISH": "Córnico",
|
||||||
|
"CORSICAN": "córsico",
|
||||||
|
"CREE": "Cree",
|
||||||
|
"CROATIAN": "Croata",
|
||||||
|
"CZECH": "Checo",
|
||||||
|
"DANISH": "Dinamarquês",
|
||||||
|
"DIVEHI": "Divehi",
|
||||||
|
"DUTCH": "Dutch",
|
||||||
|
"DZONGKHA": "dzonga",
|
||||||
|
"ENGLISH": "Inglês",
|
||||||
|
"ESPERANTO": "Esperanto",
|
||||||
|
"ESTONIAN": "Estoniano",
|
||||||
|
"EWE": "eve",
|
||||||
|
"FAROESE": "Faroês",
|
||||||
|
"FIJIAN": "fijiano",
|
||||||
|
"FILIPINO": "Filipino",
|
||||||
|
"FINNISH": "Finlandês",
|
||||||
|
"FRENCH": "Francês",
|
||||||
|
"FULAH": "fula",
|
||||||
|
"GALICIAN": "Galego",
|
||||||
|
"GANDA": "luganda",
|
||||||
|
"GEORGIAN": "georgiano",
|
||||||
|
"GERMAN": "Alemão",
|
||||||
|
"GREEK": "Greek",
|
||||||
|
"GUARANI": "Guarani",
|
||||||
|
"GUJARATI": "Guzerate",
|
||||||
|
"HAITIAN": "Haitian",
|
||||||
|
"HAUSA": "Hauçá",
|
||||||
|
"HEBREW": "Hebreu",
|
||||||
|
"HERERO": "Hereró",
|
||||||
|
"HINDI": "Hindi",
|
||||||
|
"HIRI_MOTU": "Hiri Motu",
|
||||||
|
"HUNGARIAN": "Húngaro",
|
||||||
|
"ICELANDIC": "Islandês",
|
||||||
|
"IDO": "ido",
|
||||||
|
"IGBO": "ibo",
|
||||||
|
"INDONESIAN": "Indonésio",
|
||||||
|
"INTERLINGUA": "Interlingua",
|
||||||
|
"INTERLINGUE": "Interlingue",
|
||||||
|
"INUKTITUT": "inuktitut",
|
||||||
|
"INUPIAQ": "Inupiaque",
|
||||||
|
"IRISH": "Irlandês",
|
||||||
|
"ITALIAN": "Italiano",
|
||||||
|
"JAPANESE": "Japonês",
|
||||||
|
"JAVANESE": "Javanês",
|
||||||
|
"KALAALLISUT": "Kalaallisut",
|
||||||
|
"KANNADA": "Kannada",
|
||||||
|
"KANURI": "Canúri",
|
||||||
|
"KASHMIRI": "kashmiri",
|
||||||
|
"KAZAKH": "cazaque",
|
||||||
|
"KHMER": "Khmer",
|
||||||
|
"KIKUYU": "Kikuyu",
|
||||||
|
"KINYARWANDA": "kinyarwanda",
|
||||||
|
"KIRGHIZ": "Kirghiz",
|
||||||
|
"KOMI": "komi",
|
||||||
|
"KONGO": "congolês",
|
||||||
|
"KOREAN": "Coreano",
|
||||||
|
"KUANYAMA": "Kuanyama",
|
||||||
|
"KURDISH": "Curdo",
|
||||||
|
"LAO": "Laosiano",
|
||||||
|
"LATIN": "Latim",
|
||||||
|
"LATVIAN": "Letão",
|
||||||
|
"LIMBURGAN": "Limburgan",
|
||||||
|
"LINGALA": "Lingala",
|
||||||
|
"LITHUANIAN": "Lituano",
|
||||||
|
"LUBA_KATANGA": "luba-catanga",
|
||||||
|
"LUXEMBOURGISH": "Luxembourgish",
|
||||||
|
"MACEDONIAN": "Macedônio",
|
||||||
|
"MALAGASY": "malgaxe",
|
||||||
|
"MALAY": "Malaio",
|
||||||
|
"MALAYALAM": "malaiala",
|
||||||
|
"MALTESE": "Maltês",
|
||||||
|
"MANX": "Manx",
|
||||||
|
"MAORI": "Maori",
|
||||||
|
"MARATHI": "marata",
|
||||||
|
"MARSHALLESE": "Marshalês",
|
||||||
|
"MONGOLIAN": "Mongol",
|
||||||
|
"NAURU": "nauruano",
|
||||||
|
"NAVAJO": "Navajo",
|
||||||
|
"NDONGA": "dongo",
|
||||||
|
"NEPALI": "Nepalês",
|
||||||
|
"NORTHERN_SAMI": "northern sami",
|
||||||
|
"NORTH_NDEBELE": "North Ndebele",
|
||||||
|
"NORWEGIAN": "Norueguês",
|
||||||
|
"NORWEGIAN_NYNORSK": "Nynorsk",
|
||||||
|
"OCCITAN": "Occitan",
|
||||||
|
"OJIBWA": "ojibwa",
|
||||||
|
"ORIYA": "oriya",
|
||||||
|
"OROMO": "Oromo",
|
||||||
|
"OSSETIAN": "Ossetian",
|
||||||
|
"PALI": "Páli",
|
||||||
|
"PANJABI": "Panjabi",
|
||||||
|
"PERSIAN": "Persa",
|
||||||
|
"POLISH": "Polaco",
|
||||||
|
"PORTUGUESE": "Português",
|
||||||
|
"PUSHTO": "Pushto",
|
||||||
|
"QUECHUA": "quíchua",
|
||||||
|
"ROMANIAN": "Romanian",
|
||||||
|
"ROMANSH": "Romanche",
|
||||||
|
"RUNDI": "rundi",
|
||||||
|
"RUSSIAN": "Russo",
|
||||||
|
"SAMOAN": "Samoano",
|
||||||
|
"SANGO": "sango",
|
||||||
|
"SANSKRIT": "Sânscrito",
|
||||||
|
"SARDINIAN": "Sardo",
|
||||||
|
"SCOTTISH_GAELIC": "Scottish Gaelic",
|
||||||
|
"SERBIAN": "Sérvio",
|
||||||
|
"SHONA": "Xona",
|
||||||
|
"SICHUAN_YI": "Sichuan Yi",
|
||||||
|
"SINDHI": "sindi",
|
||||||
|
"SINHALA": "Sinhala",
|
||||||
|
"SLOVAK": "Eslovaco",
|
||||||
|
"SLOVENIAN": "Eslovêno",
|
||||||
|
"SOMALI": "somali",
|
||||||
|
"SOUTHERN_SOTHO": "Southern Sotho",
|
||||||
|
"SOUTH_NDEBELE": "South Ndebele",
|
||||||
|
"SPANISH": "Spanish",
|
||||||
|
"SUNDANESE": "sundanês",
|
||||||
|
"SWAHILI": "suaíli",
|
||||||
|
"SWATI": "swati",
|
||||||
|
"SWEDISH": "Sueco",
|
||||||
|
"TAGALOG": "Tagalo",
|
||||||
|
"TAHITIAN": "Taitiano",
|
||||||
|
"TAJIK": "Tadjique",
|
||||||
|
"TAMIL": "Tâmil",
|
||||||
|
"TATAR": "tatar",
|
||||||
|
"TELUGU": "Telugu",
|
||||||
|
"THAI": "Tailandês",
|
||||||
|
"TIBETAN": "tibetano",
|
||||||
|
"TIGRINYA": "Tigrínia",
|
||||||
|
"TONGA": "Tonga",
|
||||||
|
"TSONGA": "tsonga",
|
||||||
|
"TSWANA": "tswana",
|
||||||
|
"TURKISH": "Turco",
|
||||||
|
"TURKMEN": "turcomano",
|
||||||
|
"TWI": "twi",
|
||||||
|
"UIGHUR": "Uighur",
|
||||||
|
"UKRAINIAN": "Ucraniano",
|
||||||
|
"UNDEFINED": "undefined",
|
||||||
|
"URDU": "urdu",
|
||||||
|
"UZBEK": "usbeque",
|
||||||
|
"VENDA": "venda",
|
||||||
|
"VIETNAMESE": "Vietnamita",
|
||||||
|
"VOLAPUK": "Volapuque",
|
||||||
|
"WALLOON": "walloon",
|
||||||
|
"WELSH": "galês",
|
||||||
|
"WESTERN_FRISIAN": "Frísio ocidental",
|
||||||
|
"WOLOF": "uolofe",
|
||||||
|
"XHOSA": "xosa",
|
||||||
|
"YIDDISH": "iídiche",
|
||||||
|
"YORUBA": "ioruba",
|
||||||
|
"ZHUANG": "Zhuang",
|
||||||
|
"ZULU": "zulu"
|
||||||
|
},
|
||||||
|
"phrases": {
|
||||||
|
"5.0(side)": "5.0(side)",
|
||||||
|
"5.1(side)": "5.1(side)",
|
||||||
|
"6.1": "6.1",
|
||||||
|
"6ch": "6ch",
|
||||||
|
"7.1": "7.1",
|
||||||
|
"<New show>": "<Nova série>",
|
||||||
|
"Add": "Adicionar",
|
||||||
|
"Add Pattern": "Adicionar padrão",
|
||||||
|
"Apply": "Aplicar",
|
||||||
|
"Apply failed: {error}": "Falha ao aplicar: {error}",
|
||||||
|
"Are you sure to delete the following filename pattern?": "Tem certeza de que deseja excluir o seguinte padrão de nome de arquivo?",
|
||||||
|
"Are you sure to delete the following shifted season?": "Tem certeza de que deseja excluir a seguinte temporada deslocada?",
|
||||||
|
"Are you sure to delete the following show?": "Tem certeza de que deseja excluir a seguinte série?",
|
||||||
|
"Are you sure to delete the following {track_type} track?": "Tem certeza de que deseja excluir a seguinte faixa {track_type}?",
|
||||||
|
"Are you sure to delete this tag?": "Tem certeza de que deseja excluir esta tag?",
|
||||||
|
"Audio Layout": "Layout de áudio",
|
||||||
|
"Back": "Voltar",
|
||||||
|
"Cancel": "Cancelar",
|
||||||
|
"Cannot add another stream with disposition flag 'default' or 'forced' set": "Não é possível adicionar outro fluxo com a flag de disposição 'default' ou 'forced' definida",
|
||||||
|
"Changes applied and file reloaded.": "Alterações aplicadas e arquivo recarregado.",
|
||||||
|
"Cleanup": "Limpeza",
|
||||||
|
"Cleanup disabled.": "Limpeza desativada.",
|
||||||
|
"Cleanup enabled.": "Limpeza ativada.",
|
||||||
|
"Codec": "Codec",
|
||||||
|
"Continuing edit session.": "Continuando a sessão de edição.",
|
||||||
|
"Default": "Padrão",
|
||||||
|
"Delete": "Excluir",
|
||||||
|
"Delete Show": "Excluir série",
|
||||||
|
"Deleted media tag {tag!r}.": "Tag de mídia {tag!r} excluída.",
|
||||||
|
"Differences": "Diferenças",
|
||||||
|
"Differences (file->db/output)": "Diferenças (arquivo->BD/saída)",
|
||||||
|
"Discard": "Descartar",
|
||||||
|
"Discard pending metadata changes and quit?": "Descartar alterações pendentes de metadados e sair?",
|
||||||
|
"Discard pending metadata changes and reload the file state?": "Descartar alterações pendentes de metadados e recarregar o estado do arquivo?",
|
||||||
|
"Down": "Baixo",
|
||||||
|
"Dry-run: would rewrite via temporary file {target_path}": "Execução simulada: regravaria via arquivo temporário {target_path}",
|
||||||
|
"Edit": "Editar",
|
||||||
|
"Edit Pattern": "Editar padrão",
|
||||||
|
"Edit Show": "Editar série",
|
||||||
|
"Edit filename pattern": "Editar padrão de nome de arquivo",
|
||||||
|
"Edit shifted season": "Editar temporada deslocada",
|
||||||
|
"Edit stream": "Editar fluxo",
|
||||||
|
"Episode Offset": "Deslocamento de episódio",
|
||||||
|
"Episode offset": "Deslocamento de episódio",
|
||||||
|
"File": "Arquivo",
|
||||||
|
"File patterns": "Padrões de arquivo",
|
||||||
|
"First Episode": "Primeiro episódio",
|
||||||
|
"First episode": "Primeiro episódio",
|
||||||
|
"Forced": "Forçado",
|
||||||
|
"Help": "Ajuda",
|
||||||
|
"Help Screen": "Tela de ajuda",
|
||||||
|
"ID": "ID",
|
||||||
|
"Identify": "Identificar",
|
||||||
|
"Index": "Índice",
|
||||||
|
"Index / Subindex": "Índice / Subíndice",
|
||||||
|
"Index Episode Digits": "Dígitos do índice do episódio",
|
||||||
|
"Index Season Digits": "Dígitos do índice da temporada",
|
||||||
|
"Indicator Edisode Digits": "Dígitos do indicador do episódio",
|
||||||
|
"Indicator Season Digits": "Dígitos do indicador da temporada",
|
||||||
|
"Keep Editing": "Continuar editando",
|
||||||
|
"Keeping pending changes.": "Mantendo alterações pendentes.",
|
||||||
|
"Key": "Chave",
|
||||||
|
"Language": "Idioma",
|
||||||
|
"Last Episode": "Último episódio",
|
||||||
|
"Last episode": "Último episódio",
|
||||||
|
"Layout": "Layout",
|
||||||
|
"Media Tags": "Tags de mídia",
|
||||||
|
"More than one default audio stream detected and no prompt set": "Mais de um fluxo de áudio padrão detectado e nenhum prompt definido",
|
||||||
|
"More than one default audio stream detected! Please select stream": "Mais de um fluxo de áudio padrão detectado! Selecione o fluxo",
|
||||||
|
"More than one default subtitle stream detected and no prompt set": "Mais de um fluxo de legenda padrão detectado e nenhum prompt definido",
|
||||||
|
"More than one default subtitle stream detected! Please select stream": "Mais de um fluxo de legenda padrão detectado! Selecione o fluxo",
|
||||||
|
"More than one default video stream detected and no prompt set": "Mais de um fluxo de vídeo padrão detectado e nenhum prompt definido",
|
||||||
|
"More than one default video stream detected! Please select stream": "Mais de um fluxo de vídeo padrão detectado! Selecione o fluxo",
|
||||||
|
"More than one forced audio stream detected and no prompt set": "Mais de um fluxo de áudio forçado detectado e nenhum prompt definido",
|
||||||
|
"More than one forced audio stream detected! Please select stream": "Mais de um fluxo de áudio forçado detectado! Selecione o fluxo",
|
||||||
|
"More than one forced subtitle stream detected and no prompt set": "Mais de um fluxo de legenda forçada detectado e nenhum prompt definido",
|
||||||
|
"More than one forced subtitle stream detected! Please select stream": "Mais de um fluxo de legenda forçada detectado! Selecione o fluxo",
|
||||||
|
"More than one forced video stream detected and no prompt set": "Mais de um fluxo de vídeo forçado detectado e nenhum prompt definido",
|
||||||
|
"More than one forced video stream detected! Please select stream": "Mais de um fluxo de vídeo forçado detectado! Selecione o fluxo",
|
||||||
|
"Name": "Nome",
|
||||||
|
"New Pattern": "Novo padrão",
|
||||||
|
"New Show": "Nova série",
|
||||||
|
"New filename pattern": "Novo padrão de nome de arquivo",
|
||||||
|
"New shifted season": "Nova temporada deslocada",
|
||||||
|
"New stream": "Novo fluxo",
|
||||||
|
"No": "Não",
|
||||||
|
"No changes to apply.": "Nenhuma alteração para aplicar.",
|
||||||
|
"No changes to revert.": "Nenhuma alteração para reverter.",
|
||||||
|
"Normalization disabled.": "Normalização desativada.",
|
||||||
|
"Normalization enabled.": "Normalização ativada.",
|
||||||
|
"Normalize": "Normalizar",
|
||||||
|
"Notes": "Notas",
|
||||||
|
"Pattern": "Padrão",
|
||||||
|
"Planned Changes (file->edited output)": "Alterações planejadas (arquivo->saída editada)",
|
||||||
|
"Quality": "Qualidade",
|
||||||
|
"Quit": "Sair",
|
||||||
|
"Remove Pattern": "Remover padrão",
|
||||||
|
"Revert": "Reverter",
|
||||||
|
"Reverted pending changes.": "Alterações pendentes revertidas.",
|
||||||
|
"Save": "Salvar",
|
||||||
|
"Season Offset": "Deslocamento de temporada",
|
||||||
|
"Select a stream first.": "Selecione um fluxo primeiro.",
|
||||||
|
"Set Default": "Definir como padrão",
|
||||||
|
"Set Forced": "Definir como forçado",
|
||||||
|
"Settings Screen": "Tela de configurações",
|
||||||
|
"Numbering Mapping": "Temporadas deslocadas",
|
||||||
|
"Show": "Série",
|
||||||
|
"Shows": "Séries",
|
||||||
|
"Source Season": "Temporada de origem",
|
||||||
|
"SrcIndex": "Índice de origem",
|
||||||
|
"Status": "Status",
|
||||||
|
"Stay": "Permanecer",
|
||||||
|
"Stream dispositions": "Disposições do fluxo",
|
||||||
|
"Stream tags": "Tags do fluxo",
|
||||||
|
"Streams": "Fluxos",
|
||||||
|
"SubIndex": "Subíndice",
|
||||||
|
"Substitute": "Substituir",
|
||||||
|
"Substitute pattern": "Substituir padrão",
|
||||||
|
"Title": "Título",
|
||||||
|
"Type": "Tipo",
|
||||||
|
"Unable to update selected stream.": "Não foi possível atualizar o fluxo selecionado.",
|
||||||
|
"Up": "Cima",
|
||||||
|
"Update Pattern": "Atualizar padrão",
|
||||||
|
"Updated media tag {tag!r}.": "Tag de mídia {tag!r} atualizada.",
|
||||||
|
"Updated stream #{index} ({track_type}).": "Fluxo #{index} ({track_type}) atualizado.",
|
||||||
|
"Value": "Valor",
|
||||||
|
"Year": "Ano",
|
||||||
|
"Yes": "Sim",
|
||||||
|
"add media tag: key='{key}' value='{value}'": "adicionar tag de mídia: chave='{key}' valor='{value}'",
|
||||||
|
"add {track_type} track: index={index} lang={language}": "adicionar faixa {track_type}: índice={index} idioma={language}",
|
||||||
|
"attached_pic": "attached_pic",
|
||||||
|
"attachment": "anexo",
|
||||||
|
"audio": "áudio",
|
||||||
|
"captions": "legendas",
|
||||||
|
"change media tag: key='{key}' value='{value}'": "alterar tag de mídia: chave='{key}' valor='{value}'",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add disposition={disposition}": "alterar fluxo #{index} ({track_type}:{sub_index}) adicionar disposição={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add key={key} value={value}": "alterar fluxo #{index} ({track_type}:{sub_index}) adicionar chave={key} valor={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) change key={key} value={value}": "alterar fluxo #{index} ({track_type}:{sub_index}) alterar chave={key} valor={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove disposition={disposition}": "alterar fluxo #{index} ({track_type}:{sub_index}) remover disposição={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove key={key} value={value}": "alterar fluxo #{index} ({track_type}:{sub_index}) remover chave={key} valor={value}",
|
||||||
|
"clean_effects": "apenas efeitos",
|
||||||
|
"comment": "comentário",
|
||||||
|
"default": "padrão",
|
||||||
|
"dependent": "dependente",
|
||||||
|
"descriptions": "descrições",
|
||||||
|
"dub": "dublado",
|
||||||
|
"for pattern": "para o padrão",
|
||||||
|
"forced": "forçado",
|
||||||
|
"from": "de",
|
||||||
|
"from pattern": "do padrão",
|
||||||
|
"from show": "da série",
|
||||||
|
"hearing_impaired": "deficiência auditiva",
|
||||||
|
"karaoke": "karaokê",
|
||||||
|
"lyrics": "letra",
|
||||||
|
"metadata": "metadados",
|
||||||
|
"non_diegetic": "não diegético",
|
||||||
|
"original": "original",
|
||||||
|
"pattern #{id}": "padrão #{id}",
|
||||||
|
"remove media tag: key='{key}' value='{value}'": "remover tag de mídia: chave='{key}' valor='{value}'",
|
||||||
|
"remove stream #{index}": "remover fluxo #{index}",
|
||||||
|
"show #{id}": "série #{id}",
|
||||||
|
"stereo": "estéreo",
|
||||||
|
"still_image": "imagem estática",
|
||||||
|
"sub index": "subíndice",
|
||||||
|
"subtitle": "legenda",
|
||||||
|
"timed_thumbnails": "miniaturas temporizadas",
|
||||||
|
"undefined": "indefinido",
|
||||||
|
"unknown": "desconhecido",
|
||||||
|
"video": "vídeo",
|
||||||
|
"visual_impaired": "deficiência visual"
|
||||||
|
}
|
||||||
|
}
|
||||||
361
assets/i18n/ta.json
Normal file
361
assets/i18n/ta.json
Normal file
@@ -0,0 +1,361 @@
|
|||||||
|
{
|
||||||
|
"iso_languages": {
|
||||||
|
"ABKHAZIAN": "அப்காசியன்",
|
||||||
|
"AFAR": "அஃபர்",
|
||||||
|
"AFRIKAANS": "ஆப்ரிக்கான்ச்",
|
||||||
|
"AKAN": "அகான்",
|
||||||
|
"ALBANIAN": "அல்பேனியன்",
|
||||||
|
"AMHARIC": "அம்ஆரிக்",
|
||||||
|
"ARABIC": "அராபிக்",
|
||||||
|
"ARAGONESE": "அரகோன்ச்",
|
||||||
|
"ARMENIAN": "அர்மேனியன்",
|
||||||
|
"ASSAMESE": "அச்சாமி",
|
||||||
|
"AVARIC": "அவாரிக்",
|
||||||
|
"AVESTAN": "அவேச்டன்",
|
||||||
|
"AYMARA": "அய்மாரா",
|
||||||
|
"AZERBAIJANI": "அசெர்பெய்சானி",
|
||||||
|
"BAMBARA": "பம்பரா",
|
||||||
|
"BASHKIR": "பாச்கிர்",
|
||||||
|
"BASQUE": "பாச்க்",
|
||||||
|
"BELARUSIAN": "பெலாருசியன்",
|
||||||
|
"BENGALI": "பெங்காலி",
|
||||||
|
"BISLAMA": "பிச்லாமா",
|
||||||
|
"BOKMAL": "Bokmål",
|
||||||
|
"BOSNIAN": "போச்னியன்",
|
||||||
|
"BRETON": "ப்ரெடன்",
|
||||||
|
"BULGARIAN": "பல்கேரியன்",
|
||||||
|
"BURMESE": "பர்மீசி",
|
||||||
|
"CATALAN": "Catalan",
|
||||||
|
"CHAMORRO": "சாமோர்ரோ",
|
||||||
|
"CHECHEN": "செக்சன்",
|
||||||
|
"CHICHEWA": "Chichewa",
|
||||||
|
"CHINESE": "சைனீச்",
|
||||||
|
"CHURCH_SLAVIC": "Church Slavic",
|
||||||
|
"CHUVASH": "சுவாச்",
|
||||||
|
"CORNISH": "கோர்னிச்",
|
||||||
|
"CORSICAN": "கோர்சிகேன்",
|
||||||
|
"CREE": "சிரீ",
|
||||||
|
"CROATIAN": "குரேசியன்",
|
||||||
|
"CZECH": "செக்",
|
||||||
|
"DANISH": "டானிச்",
|
||||||
|
"DIVEHI": "Divehi",
|
||||||
|
"DUTCH": "Dutch",
|
||||||
|
"DZONGKHA": "ட்சொங்க்கா",
|
||||||
|
"ENGLISH": "ஆங்கிலம்",
|
||||||
|
"ESPERANTO": "எச்பெரான்டொ",
|
||||||
|
"ESTONIAN": "எச்டோனியன்",
|
||||||
|
"EWE": "இவ்",
|
||||||
|
"FAROESE": "ஃபரோச்",
|
||||||
|
"FIJIAN": "ஃபிசியன்",
|
||||||
|
"FILIPINO": "Filipino",
|
||||||
|
"FINNISH": "பின்னிச்",
|
||||||
|
"FRENCH": "பிரெஞ்சு",
|
||||||
|
"FULAH": "ஃபுல்லா",
|
||||||
|
"GALICIAN": "காலிசியன்",
|
||||||
|
"GANDA": "கான்டா",
|
||||||
|
"GEORGIAN": "சியார்சியன்",
|
||||||
|
"GERMAN": "செர்மன்",
|
||||||
|
"GREEK": "Greek",
|
||||||
|
"GUARANI": "குர்ரானி",
|
||||||
|
"GUJARATI": "குசராத்தி",
|
||||||
|
"HAITIAN": "Haitian",
|
||||||
|
"HAUSA": "ஔசா",
|
||||||
|
"HEBREW": "ஈப்ரு",
|
||||||
|
"HERERO": "இரீரோ",
|
||||||
|
"HINDI": "இந்தி",
|
||||||
|
"HIRI_MOTU": "இரி மோட்டு",
|
||||||
|
"HUNGARIAN": "அங்கேரியன்",
|
||||||
|
"ICELANDIC": "ஐச்லாண்டிக்",
|
||||||
|
"IDO": "ஐடூ",
|
||||||
|
"IGBO": "இக்போ",
|
||||||
|
"INDONESIAN": "இந்தோனேசியன்",
|
||||||
|
"INTERLINGUA": "Interlingua",
|
||||||
|
"INTERLINGUE": "Interlingue",
|
||||||
|
"INUKTITUT": "இனுடிடட்",
|
||||||
|
"INUPIAQ": "இனுபைக்யூ",
|
||||||
|
"IRISH": "ஐரிச்",
|
||||||
|
"ITALIAN": "இத்தாலியன்",
|
||||||
|
"JAPANESE": "சப்பானிய",
|
||||||
|
"JAVANESE": "சவானிச்",
|
||||||
|
"KALAALLISUT": "Kalaallisut",
|
||||||
|
"KANNADA": "கன்னடம்",
|
||||||
|
"KANURI": "கனுரி",
|
||||||
|
"KASHMIRI": "காச்மீரி",
|
||||||
|
"KAZAKH": "கசாக்ச்",
|
||||||
|
"KHMER": "Khmer",
|
||||||
|
"KIKUYU": "Kikuyu",
|
||||||
|
"KINYARWANDA": "கின்யார்வான்டா",
|
||||||
|
"KIRGHIZ": "Kirghiz",
|
||||||
|
"KOMI": "கோமி",
|
||||||
|
"KONGO": "காங்கோ",
|
||||||
|
"KOREAN": "கொரியன்",
|
||||||
|
"KUANYAMA": "Kuanyama",
|
||||||
|
"KURDISH": "குர்திச்",
|
||||||
|
"LAO": "லாவோ",
|
||||||
|
"LATIN": "லத்தீன்",
|
||||||
|
"LATVIAN": "லாட்வியன்",
|
||||||
|
"LIMBURGAN": "Limburgan",
|
||||||
|
"LINGALA": "லின்காலா",
|
||||||
|
"LITHUANIAN": "லிதுவேனியன்",
|
||||||
|
"LUBA_KATANGA": "லூபா-கடான்கா",
|
||||||
|
"LUXEMBOURGISH": "Luxembourgish",
|
||||||
|
"MACEDONIAN": "மேசடோனியன்",
|
||||||
|
"MALAGASY": "மலகாசி",
|
||||||
|
"MALAY": "மலாய்",
|
||||||
|
"MALAYALAM": "மலையாளம்",
|
||||||
|
"MALTESE": "மல்டீச்",
|
||||||
|
"MANX": "மான்ச்",
|
||||||
|
"MAORI": "மௌரி",
|
||||||
|
"MARATHI": "மராத்தி",
|
||||||
|
"MARSHALLESE": "மார்சலீசீ",
|
||||||
|
"MONGOLIAN": "மங்கோலியன்",
|
||||||
|
"NAURU": "நவூரு",
|
||||||
|
"NAVAJO": "Navajo",
|
||||||
|
"NDONGA": "நடோன்கா",
|
||||||
|
"NEPALI": "நேபாலி",
|
||||||
|
"NORTHERN_SAMI": "கிழக்கு சாமி",
|
||||||
|
"NORTH_NDEBELE": "North Ndebele",
|
||||||
|
"NORWEGIAN": "நார்வேசியன்",
|
||||||
|
"NORWEGIAN_NYNORSK": "Nynorsk",
|
||||||
|
"OCCITAN": "Occitan",
|
||||||
|
"OJIBWA": "ஒசிப்வா",
|
||||||
|
"ORIYA": "ஒரியா",
|
||||||
|
"OROMO": "ஒரோமோ",
|
||||||
|
"OSSETIAN": "Ossetian",
|
||||||
|
"PALI": "பாலி",
|
||||||
|
"PANJABI": "Panjabi",
|
||||||
|
"PERSIAN": "பெர்சியன்",
|
||||||
|
"POLISH": "போலிச்",
|
||||||
|
"PORTUGUESE": "போர்த்துக்கீசிய",
|
||||||
|
"PUSHTO": "Pushto",
|
||||||
|
"QUECHUA": "க்யுசோ",
|
||||||
|
"ROMANIAN": "Romanian",
|
||||||
|
"ROMANSH": "ரோமான்ச்ச்",
|
||||||
|
"RUNDI": "ருண்டி",
|
||||||
|
"RUSSIAN": "ரச்யன்",
|
||||||
|
"SAMOAN": "சாமோயன்",
|
||||||
|
"SANGO": "சான்ங்கோ",
|
||||||
|
"SANSKRIT": "சான்ச்கிரிட்",
|
||||||
|
"SARDINIAN": "சார்டினியன்",
|
||||||
|
"SCOTTISH_GAELIC": "Scottish Gaelic",
|
||||||
|
"SERBIAN": "செர்பியன்",
|
||||||
|
"SHONA": "சோனா",
|
||||||
|
"SICHUAN_YI": "Sichuan Yi",
|
||||||
|
"SINDHI": "சிந்தி",
|
||||||
|
"SINHALA": "Sinhala",
|
||||||
|
"SLOVAK": "சுலோவாக்",
|
||||||
|
"SLOVENIAN": "ச்லோவெனியன்",
|
||||||
|
"SOMALI": "சோமாலி",
|
||||||
|
"SOUTHERN_SOTHO": "Southern Sotho",
|
||||||
|
"SOUTH_NDEBELE": "South Ndebele",
|
||||||
|
"SPANISH": "Spanish",
|
||||||
|
"SUNDANESE": "சூடானீச்",
|
||||||
|
"SWAHILI": "ச்வாஇலி",
|
||||||
|
"SWATI": "ச்வாதி",
|
||||||
|
"SWEDISH": "சுவீடிச்",
|
||||||
|
"TAGALOG": "டங்லாக்",
|
||||||
|
"TAHITIAN": "தஇதியன்",
|
||||||
|
"TAJIK": "தாசிக்",
|
||||||
|
"TAMIL": "தமிழ்",
|
||||||
|
"TATAR": "டாட்டர்",
|
||||||
|
"TELUGU": "தெலுங்கு",
|
||||||
|
"THAI": "தாய்",
|
||||||
|
"TIBETAN": "திபெத்திய",
|
||||||
|
"TIGRINYA": "தைக்ரின்யா",
|
||||||
|
"TONGA": "Tonga",
|
||||||
|
"TSONGA": "ட்சாங்கோ",
|
||||||
|
"TSWANA": "ட்ச்வனா",
|
||||||
|
"TURKISH": "துருக்கி",
|
||||||
|
"TURKMEN": "டர்க்மென்",
|
||||||
|
"TWI": "டிவி",
|
||||||
|
"UIGHUR": "Uighur",
|
||||||
|
"UKRAINIAN": "உக்ரெனியன்",
|
||||||
|
"UNDEFINED": "undefined",
|
||||||
|
"URDU": "உருது",
|
||||||
|
"UZBEK": "உச்பெக்",
|
||||||
|
"VENDA": "வேண்டா",
|
||||||
|
"VIETNAMESE": "வியட்னாம்",
|
||||||
|
"VOLAPUK": "வோலாபுக்",
|
||||||
|
"WALLOON": "வாலூன்",
|
||||||
|
"WELSH": "வெல்ச்",
|
||||||
|
"WESTERN_FRISIAN": "மேற்கு ஃபிரிசியன்",
|
||||||
|
"WOLOF": "ஓலோஃப்",
|
||||||
|
"XHOSA": "சோசா",
|
||||||
|
"YIDDISH": "இட்டிச்",
|
||||||
|
"YORUBA": "யோருபா",
|
||||||
|
"ZHUANG": "Zhuang",
|
||||||
|
"ZULU": "சுலு"
|
||||||
|
},
|
||||||
|
"phrases": {
|
||||||
|
"5.0(side)": "5.0(side)",
|
||||||
|
"5.1(side)": "5.1(side)",
|
||||||
|
"6.1": "6.1",
|
||||||
|
"6ch": "6ch",
|
||||||
|
"7.1": "7.1",
|
||||||
|
"<New show>": "<புதிய தொடர்>",
|
||||||
|
"Add": "சேர்",
|
||||||
|
"Add Pattern": "வடிவத்தை சேர்",
|
||||||
|
"Apply": "பயன்படுத்து",
|
||||||
|
"Apply failed: {error}": "பயன்படுத்தல் தோல்வியடைந்தது: {error}",
|
||||||
|
"Are you sure to delete the following filename pattern?": "பின்வரும் கோப்பு பெயர் வடிவத்தை நீக்க விரும்புகிறீர்களா?",
|
||||||
|
"Are you sure to delete the following shifted season?": "பின்வரும் மாற்றிய சீசனை நீக்க விரும்புகிறீர்களா?",
|
||||||
|
"Are you sure to delete the following show?": "பின்வரும் தொடரை நீக்க விரும்புகிறீர்களா?",
|
||||||
|
"Are you sure to delete the following {track_type} track?": "பின்வரும் {track_type} ஸ்ட்ரீமை நீக்க விரும்புகிறீர்களா?",
|
||||||
|
"Are you sure to delete this tag?": "இந்த குறிச்சொல்லை நீக்க விரும்புகிறீர்களா?",
|
||||||
|
"Audio Layout": "ஒலி அமைப்பு",
|
||||||
|
"Back": "பின்",
|
||||||
|
"Cancel": "ரத்து",
|
||||||
|
"Cannot add another stream with disposition flag 'default' or 'forced' set": "'default' அல்லது 'forced' disposition கொடி அமைந்த மற்றொரு ஸ்ட்ரீமை சேர்க்க முடியாது",
|
||||||
|
"Changes applied and file reloaded.": "மாற்றங்கள் பயன்படுத்தப்பட்டு கோப்பு மீளேற்றப்பட்டது.",
|
||||||
|
"Cleanup": "சுத்திகரிப்பு",
|
||||||
|
"Cleanup disabled.": "சுத்திகரிப்பு முடக்கப்பட்டது.",
|
||||||
|
"Cleanup enabled.": "சுத்திகரிப்பு இயக்கப்பட்டது.",
|
||||||
|
"Codec": "கோடெக்",
|
||||||
|
"Continuing edit session.": "திருத்த அமர்வு தொடர்கிறது.",
|
||||||
|
"Default": "இயல்புநிலை",
|
||||||
|
"Delete": "நீக்கு",
|
||||||
|
"Delete Show": "தொடரை நீக்கு",
|
||||||
|
"Deleted media tag {tag!r}.": "மீடியா குறிச்சொல் {tag!r} நீக்கப்பட்டது.",
|
||||||
|
"Differences": "வேறுபாடுகள்",
|
||||||
|
"Differences (file->db/output)": "வேறுபாடுகள் (கோப்பு->DB/வெளியீடு)",
|
||||||
|
"Discard": "கைவிடு",
|
||||||
|
"Discard pending metadata changes and quit?": "நிலுவையில் உள்ள மெட்டாடேட்டா மாற்றங்களை கைவிட்டு வெளியேறவா?",
|
||||||
|
"Discard pending metadata changes and reload the file state?": "நிலுவையில் உள்ள மெட்டாடேட்டா மாற்றங்களை கைவிட்டு கோப்பு நிலையை மீளேற்றவா?",
|
||||||
|
"Down": "கீழ்",
|
||||||
|
"Dry-run: would rewrite via temporary file {target_path}": "Dry-run: தற்காலிக கோப்பு {target_path} வழியாக மறுஎழுதப்படும்",
|
||||||
|
"Edit": "திருத்து",
|
||||||
|
"Edit Pattern": "வடிவத்தை திருத்து",
|
||||||
|
"Edit Show": "தொடரை திருத்து",
|
||||||
|
"Edit filename pattern": "கோப்பு பெயர் வடிவத்தை திருத்து",
|
||||||
|
"Edit shifted season": "மாற்றிய சீசனை திருத்து",
|
||||||
|
"Edit stream": "ஸ்ட்ரீமை திருத்து",
|
||||||
|
"Episode Offset": "அத்தியாய இடச்சரிவு",
|
||||||
|
"Episode offset": "அத்தியாய இடச்சரிவு",
|
||||||
|
"File": "கோப்பு",
|
||||||
|
"File patterns": "கோப்பு வடிவங்கள்",
|
||||||
|
"First Episode": "முதல் அத்தியாயம்",
|
||||||
|
"First episode": "முதல் அத்தியாயம்",
|
||||||
|
"Forced": "கட்டாயம்",
|
||||||
|
"Help": "உதவி",
|
||||||
|
"Help Screen": "உதவி திரை",
|
||||||
|
"ID": "அடையாளம்",
|
||||||
|
"Identify": "அடையாளம் காட்டு",
|
||||||
|
"Index": "சுட்டி",
|
||||||
|
"Index / Subindex": "சுட்டி / துணைச்சுட்டி",
|
||||||
|
"Index Episode Digits": "அத்தியாய சுட்டி இலக்கங்கள்",
|
||||||
|
"Index Season Digits": "சீசன் சுட்டி இலக்கங்கள்",
|
||||||
|
"Indicator Edisode Digits": "அத்தியாய குறியீட்டு இலக்கங்கள்",
|
||||||
|
"Indicator Season Digits": "சீசன் குறியீட்டு இலக்கங்கள்",
|
||||||
|
"Keep Editing": "திருத்தலை தொடரு",
|
||||||
|
"Keeping pending changes.": "நிலுவையில் உள்ள மாற்றங்கள் வைக்கப்படுகின்றன.",
|
||||||
|
"Key": "சாவி",
|
||||||
|
"Language": "மொழி",
|
||||||
|
"Last Episode": "கடைசி அத்தியாயம்",
|
||||||
|
"Last episode": "கடைசி அத்தியாயம்",
|
||||||
|
"Layout": "அமைப்பு",
|
||||||
|
"Media Tags": "மீடியா குறிச்சொற்கள்",
|
||||||
|
"More than one default audio stream detected and no prompt set": "ஒருக்கும் மேற்பட்ட இயல்புநிலை ஒலி ஸ்ட்ரீம்கள் கண்டறியப்பட்டன, மேலும் எந்த prompt-வும் அமைக்கப்படவில்லை",
|
||||||
|
"More than one default audio stream detected! Please select stream": "ஒருக்கும் மேற்பட்ட இயல்புநிலை ஒலி ஸ்ட்ரீம்கள் கண்டறியப்பட்டன! ஸ்ட்ரீமைத் தேர்ந்தெடுக்கவும்",
|
||||||
|
"More than one default subtitle stream detected and no prompt set": "ஒருக்கும் மேற்பட்ட இயல்புநிலை வசன ஸ்ட்ரீம்கள் கண்டறியப்பட்டன, மேலும் எந்த prompt-வும் அமைக்கப்படவில்லை",
|
||||||
|
"More than one default subtitle stream detected! Please select stream": "ஒருக்கும் மேற்பட்ட இயல்புநிலை வசன ஸ்ட்ரீம்கள் கண்டறியப்பட்டன! ஸ்ட்ரீமைத் தேர்ந்தெடுக்கவும்",
|
||||||
|
"More than one default video stream detected and no prompt set": "ஒருக்கும் மேற்பட்ட இயல்புநிலை வீடியோ ஸ்ட்ரீம்கள் கண்டறியப்பட்டன, மேலும் எந்த prompt-வும் அமைக்கப்படவில்லை",
|
||||||
|
"More than one default video stream detected! Please select stream": "ஒருக்கும் மேற்பட்ட இயல்புநிலை வீடியோ ஸ்ட்ரீம்கள் கண்டறியப்பட்டன! ஸ்ட்ரீமைத் தேர்ந்தெடுக்கவும்",
|
||||||
|
"More than one forced audio stream detected and no prompt set": "ஒருக்கும் மேற்பட்ட கட்டாய ஒலி ஸ்ட்ரீம்கள் கண்டறியப்பட்டன, மேலும் எந்த prompt-வும் அமைக்கப்படவில்லை",
|
||||||
|
"More than one forced audio stream detected! Please select stream": "ஒருக்கும் மேற்பட்ட கட்டாய ஒலி ஸ்ட்ரீம்கள் கண்டறியப்பட்டன! ஸ்ட்ரீமைத் தேர்ந்தெடுக்கவும்",
|
||||||
|
"More than one forced subtitle stream detected and no prompt set": "ஒருக்கும் மேற்பட்ட கட்டாய வசன ஸ்ட்ரீம்கள் கண்டறியப்பட்டன, மேலும் எந்த prompt-வும் அமைக்கப்படவில்லை",
|
||||||
|
"More than one forced subtitle stream detected! Please select stream": "ஒருக்கும் மேற்பட்ட கட்டாய வசன ஸ்ட்ரீம்கள் கண்டறியப்பட்டன! ஸ்ட்ரீமைத் தேர்ந்தெடுக்கவும்",
|
||||||
|
"More than one forced video stream detected and no prompt set": "ஒருக்கும் மேற்பட்ட கட்டாய வீடியோ ஸ்ட்ரீம்கள் கண்டறியப்பட்டன, மேலும் எந்த prompt-வும் அமைக்கப்படவில்லை",
|
||||||
|
"More than one forced video stream detected! Please select stream": "ஒருக்கும் மேற்பட்ட கட்டாய வீடியோ ஸ்ட்ரீம்கள் கண்டறியப்பட்டன! ஸ்ட்ரீமைத் தேர்ந்தெடுக்கவும்",
|
||||||
|
"Name": "பெயர்",
|
||||||
|
"New Pattern": "புதிய வடிவம்",
|
||||||
|
"New Show": "புதிய தொடர்",
|
||||||
|
"New filename pattern": "புதிய கோப்பு பெயர் வடிவம்",
|
||||||
|
"New shifted season": "புதிய மாற்றிய சீசன்",
|
||||||
|
"New stream": "புதிய ஸ்ட்ரீம்",
|
||||||
|
"No": "இல்லை",
|
||||||
|
"No changes to apply.": "பயன்படுத்த மாற்றங்கள் இல்லை.",
|
||||||
|
"No changes to revert.": "மீட்டெடுக்க மாற்றங்கள் இல்லை.",
|
||||||
|
"Normalization disabled.": "சீரமைப்பு முடக்கப்பட்டது.",
|
||||||
|
"Normalization enabled.": "சீரமைப்பு இயக்கப்பட்டது.",
|
||||||
|
"Normalize": "சீரமை",
|
||||||
|
"Notes": "குறிப்புகள்",
|
||||||
|
"Pattern": "வடிவம்",
|
||||||
|
"Planned Changes (file->edited output)": "திட்டமிட்ட மாற்றங்கள் (கோப்பு->திருத்திய வெளியீடு)",
|
||||||
|
"Quality": "தரம்",
|
||||||
|
"Quit": "வெளியேறு",
|
||||||
|
"Remove Pattern": "வடிவத்தை நீக்கு",
|
||||||
|
"Revert": "மீட்டு",
|
||||||
|
"Reverted pending changes.": "நிலுவையில் உள்ள மாற்றங்கள் மீட்டெடுக்கப்பட்டன.",
|
||||||
|
"Save": "சேமி",
|
||||||
|
"Season Offset": "சீசன் இடச்சரிவு",
|
||||||
|
"Select a stream first.": "முதலில் ஒரு ஸ்ட்ரீமைத் தேர்ந்தெடுக்கவும்.",
|
||||||
|
"Set Default": "இயல்புநிலையாக அமை",
|
||||||
|
"Set Forced": "கட்டாயமாக அமை",
|
||||||
|
"Settings Screen": "அமைப்புகள் திரை",
|
||||||
|
"Numbering Mapping": "மாற்றிய சீசன்கள்",
|
||||||
|
"Show": "தொடர்",
|
||||||
|
"Shows": "தொடர்கள்",
|
||||||
|
"Source Season": "மூல சீசன்",
|
||||||
|
"SrcIndex": "மூலச் சுட்டி",
|
||||||
|
"Status": "நிலை",
|
||||||
|
"Stay": "இரு",
|
||||||
|
"Stream dispositions": "ஸ்ட்ரீம் disposition-கள்",
|
||||||
|
"Stream tags": "ஸ்ட்ரீம் குறிச்சொற்கள்",
|
||||||
|
"Streams": "ஸ்ட்ரீம்கள்",
|
||||||
|
"SubIndex": "துணைச்சுட்டி",
|
||||||
|
"Substitute": "மாற்று",
|
||||||
|
"Substitute pattern": "வடிவத்தை மாற்று",
|
||||||
|
"Title": "தலைப்பு",
|
||||||
|
"Type": "வகை",
|
||||||
|
"Unable to update selected stream.": "தேர்ந்தெடுக்கப்பட்ட ஸ்ட்ரீமைப் புதுப்பிக்க முடியவில்லை.",
|
||||||
|
"Up": "மேல்",
|
||||||
|
"Update Pattern": "வடிவத்தை புதுப்பி",
|
||||||
|
"Updated media tag {tag!r}.": "மீடியா குறிச்சொல் {tag!r} புதுப்பிக்கப்பட்டது.",
|
||||||
|
"Updated stream #{index} ({track_type}).": "ஸ்ட்ரீம் #{index} ({track_type}) புதுப்பிக்கப்பட்டது.",
|
||||||
|
"Value": "மதிப்பு",
|
||||||
|
"Year": "ஆண்டு",
|
||||||
|
"Yes": "ஆம்",
|
||||||
|
"add media tag: key='{key}' value='{value}'": "மீடியா குறிச்சொல் சேர்: key='{key}' value='{value}'",
|
||||||
|
"add {track_type} track: index={index} lang={language}": "{track_type} ஸ்ட்ரீம் சேர்: index={index} lang={language}",
|
||||||
|
"attached_pic": "attached_pic",
|
||||||
|
"attachment": "இணைப்பு",
|
||||||
|
"audio": "ஒலி",
|
||||||
|
"captions": "உரைப்பதிவுகள்",
|
||||||
|
"change media tag: key='{key}' value='{value}'": "மீடியா குறிச்சொல் மாற்று: key='{key}' value='{value}'",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add disposition={disposition}": "ஸ்ட்ரீம் #{index} ({track_type}:{sub_index}) disposition சேர்={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add key={key} value={value}": "ஸ்ட்ரீம் #{index} ({track_type}:{sub_index}) key சேர்={key} value={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) change key={key} value={value}": "ஸ்ட்ரீம் #{index} ({track_type}:{sub_index}) key மாற்று={key} value={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove disposition={disposition}": "ஸ்ட்ரீம் #{index} ({track_type}:{sub_index}) disposition நீக்கு={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove key={key} value={value}": "ஸ்ட்ரீம் #{index} ({track_type}:{sub_index}) key நீக்கு={key} value={value}",
|
||||||
|
"clean_effects": "ஒலி விளைவுகள் மட்டும்",
|
||||||
|
"comment": "கருத்துரை",
|
||||||
|
"default": "இயல்புநிலை",
|
||||||
|
"dependent": "சார்ந்த",
|
||||||
|
"descriptions": "விளக்கங்கள்",
|
||||||
|
"dub": "டப்",
|
||||||
|
"for pattern": "வடிவத்திற்கு",
|
||||||
|
"forced": "கட்டாயம்",
|
||||||
|
"from": "இருந்து",
|
||||||
|
"from pattern": "வடிவத்திலிருந்து",
|
||||||
|
"from show": "தொடரிலிருந்து",
|
||||||
|
"hearing_impaired": "கேள்வித்திறன் குறைபாடு",
|
||||||
|
"karaoke": "கரோக்கே",
|
||||||
|
"lyrics": "பாடல்வரிகள்",
|
||||||
|
"metadata": "மெட்டாடேட்டா",
|
||||||
|
"non_diegetic": "அல்லாத-டைஜெடிக்",
|
||||||
|
"original": "மூலம்",
|
||||||
|
"pattern #{id}": "வடிவு #{id}",
|
||||||
|
"remove media tag: key='{key}' value='{value}'": "மீடியா குறிச்சொல் நீக்கு: key='{key}' value='{value}'",
|
||||||
|
"remove stream #{index}": "ஸ்ட்ரீம் #{index} நீக்கு",
|
||||||
|
"show #{id}": "தொடர் #{id}",
|
||||||
|
"stereo": "ஸ்டீரியோ",
|
||||||
|
"still_image": "நிலைப்படம்",
|
||||||
|
"sub index": "துணைச்சுட்டி",
|
||||||
|
"subtitle": "வசனம்",
|
||||||
|
"timed_thumbnails": "நேர நிர்ணய சிறுபடங்கள்",
|
||||||
|
"undefined": "வரையறுக்கப்படாத",
|
||||||
|
"unknown": "தெரியாத",
|
||||||
|
"video": "வீடியோ",
|
||||||
|
"visual_impaired": "பார்வைத்திறன் குறைபாடு"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,62 +0,0 @@
|
|||||||
from enum import Enum
|
|
||||||
from .track_type import TrackType
|
|
||||||
|
|
||||||
class AudioLayout(Enum):
|
|
||||||
|
|
||||||
LAYOUT_STEREO = {"label": "stereo", "index": 1}
|
|
||||||
LAYOUT_5_1 = {"label": "5.1(side)", "index": 2}
|
|
||||||
LAYOUT_6_1 = {"label": "6.1", "index": 3}
|
|
||||||
LAYOUT_7_1 = {"label": "7.1", "index": 4} #TODO: Does this exist?
|
|
||||||
|
|
||||||
LAYOUT_6CH = {"label": "6ch", "index": 5}
|
|
||||||
LAYOUT_5_0 = {"label": "5.0(side)", "index": 6}
|
|
||||||
|
|
||||||
LAYOUT_UNDEFINED = {"label": "undefined", "index": 0}
|
|
||||||
|
|
||||||
|
|
||||||
def label(self):
|
|
||||||
"""Returns the audio layout as string"""
|
|
||||||
return str(self.value['label'])
|
|
||||||
|
|
||||||
def index(self):
|
|
||||||
"""Returns the audio layout as integer"""
|
|
||||||
return int(self.value['index'])
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fromLabel(label : str):
|
|
||||||
try:
|
|
||||||
|
|
||||||
return [a for a in AudioLayout if a.value['label'] == str(label)][0]
|
|
||||||
except:
|
|
||||||
return AudioLayout.LAYOUT_UNDEFINED
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fromIndex(index : int):
|
|
||||||
try:
|
|
||||||
return [a for a in AudioLayout if a.value['index'] == int(index)][0]
|
|
||||||
except:
|
|
||||||
return AudioLayout.LAYOUT_UNDEFINED
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def identify(streamObj):
|
|
||||||
|
|
||||||
FFPROBE_LAYOUT_KEY = 'channel_layout'
|
|
||||||
FFPROBE_CHANNELS_KEY = 'channels'
|
|
||||||
FFPROBE_CODEC_TYPE_KEY = 'codec_type'
|
|
||||||
|
|
||||||
if (type(streamObj) is not dict
|
|
||||||
or FFPROBE_CODEC_TYPE_KEY not in streamObj.keys()
|
|
||||||
or streamObj[FFPROBE_CODEC_TYPE_KEY] != TrackType.AUDIO.label()):
|
|
||||||
raise Exception('Not an ffprobe audio stream object')
|
|
||||||
|
|
||||||
if FFPROBE_LAYOUT_KEY in streamObj.keys():
|
|
||||||
matchingLayouts = [l for l in AudioLayout if l.label() == streamObj[FFPROBE_LAYOUT_KEY]]
|
|
||||||
if matchingLayouts:
|
|
||||||
return matchingLayouts[0]
|
|
||||||
|
|
||||||
if (FFPROBE_CHANNELS_KEY in streamObj.keys()
|
|
||||||
and int(streamObj[FFPROBE_CHANNELS_KEY]) == 6):
|
|
||||||
|
|
||||||
return AudioLayout.LAYOUT_6CH
|
|
||||||
|
|
||||||
return AudioLayout.LAYOUT_UNDEFINED
|
|
||||||
@@ -1,142 +0,0 @@
|
|||||||
import os, json
|
|
||||||
|
|
||||||
class ConfigurationController():
|
|
||||||
|
|
||||||
CONFIG_FILENAME = 'ffx.json'
|
|
||||||
DATABASE_FILENAME = 'ffx.db'
|
|
||||||
LOG_FILENAME = 'ffx.log'
|
|
||||||
|
|
||||||
DATABASE_PATH_CONFIG_KEY = 'databasePath'
|
|
||||||
LOG_DIRECTORY_CONFIG_KEY = 'logDirectory'
|
|
||||||
OUTPUT_FILENAME_TEMPLATE_KEY = 'outputFilenameTemplate'
|
|
||||||
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
|
|
||||||
self.__homeDir = os.path.expanduser("~")
|
|
||||||
self.__localVarDir = os.path.join(self.__homeDir, '.local', 'var')
|
|
||||||
self.__localEtcDir = os.path.join(self.__homeDir, '.local', 'etc')
|
|
||||||
|
|
||||||
self.__configurationData = {}
|
|
||||||
|
|
||||||
# .local/etc/ffx.json
|
|
||||||
self.__configFilePath = os.path.join(self.__localEtcDir, ConfigurationController.CONFIG_FILENAME)
|
|
||||||
if os.path.isfile(self.__configFilePath):
|
|
||||||
with open(self.__configFilePath, 'r') as configurationFile:
|
|
||||||
self.__configurationData = json.load(configurationFile)
|
|
||||||
|
|
||||||
if ConfigurationController.DATABASE_PATH_CONFIG_KEY in self.__configurationData.keys():
|
|
||||||
self.__databaseFilePath = self.__configurationData[ConfigurationController.DATABASE_PATH_CONFIG_KEY]
|
|
||||||
os.makedirs(os.path.dirname(self.__databaseFilePath), exist_ok=True)
|
|
||||||
else:
|
|
||||||
ffxVarDir = os.path.join(self.__localVarDir, 'ffx')
|
|
||||||
os.makedirs(ffxVarDir, exist_ok=True)
|
|
||||||
self.__databaseFilePath = os.path.join(ffxVarDir, ConfigurationController.DATABASE_FILENAME)
|
|
||||||
|
|
||||||
if ConfigurationController.LOG_DIRECTORY_CONFIG_KEY in self.__configurationData.keys():
|
|
||||||
self.__logDir = self.__configurationData[ConfigurationController.LOG_DIRECTORY_CONFIG_KEY]
|
|
||||||
else:
|
|
||||||
self.__logDir = os.path.join(self.__localVarDir, 'log')
|
|
||||||
os.makedirs(self.__logDir, exist_ok=True)
|
|
||||||
|
|
||||||
|
|
||||||
def getHomeDirectory(self):
|
|
||||||
return self.__homeDir
|
|
||||||
|
|
||||||
def getLogFilePath(self):
|
|
||||||
return os.path.join(self.__logDir, ConfigurationController.LOG_FILENAME)
|
|
||||||
|
|
||||||
def getDatabaseFilePath(self):
|
|
||||||
return self.__databaseFilePath
|
|
||||||
|
|
||||||
|
|
||||||
def getData(self):
|
|
||||||
return self.__configurationData
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# def addPattern(self, patternDescriptor):
|
|
||||||
#
|
|
||||||
# try:
|
|
||||||
#
|
|
||||||
# s = self.Session()
|
|
||||||
# q = s.query(Pattern).filter(Pattern.show_id == int(patternDescriptor['show_id']),
|
|
||||||
# Pattern.pattern == str(patternDescriptor['pattern']))
|
|
||||||
#
|
|
||||||
# if not q.count():
|
|
||||||
# pattern = Pattern(show_id = int(patternDescriptor['show_id']),
|
|
||||||
# pattern = str(patternDescriptor['pattern']))
|
|
||||||
# s.add(pattern)
|
|
||||||
# s.commit()
|
|
||||||
# return pattern.getId()
|
|
||||||
# else:
|
|
||||||
# return 0
|
|
||||||
#
|
|
||||||
# except Exception as ex:
|
|
||||||
# raise click.ClickException(f"PatternController.addPattern(): {repr(ex)}")
|
|
||||||
# finally:
|
|
||||||
# s.close()
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# def updatePattern(self, patternId, patternDescriptor):
|
|
||||||
#
|
|
||||||
# try:
|
|
||||||
# s = self.Session()
|
|
||||||
# q = s.query(Pattern).filter(Pattern.id == int(patternId))
|
|
||||||
#
|
|
||||||
# if q.count():
|
|
||||||
#
|
|
||||||
# pattern = q.first()
|
|
||||||
#
|
|
||||||
# pattern.show_id = int(patternDescriptor['show_id'])
|
|
||||||
# pattern.pattern = str(patternDescriptor['pattern'])
|
|
||||||
#
|
|
||||||
# s.commit()
|
|
||||||
# return True
|
|
||||||
#
|
|
||||||
# else:
|
|
||||||
# return False
|
|
||||||
#
|
|
||||||
# except Exception as ex:
|
|
||||||
# raise click.ClickException(f"PatternController.updatePattern(): {repr(ex)}")
|
|
||||||
# finally:
|
|
||||||
# s.close()
|
|
||||||
#
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# def findPattern(self, patternDescriptor):
|
|
||||||
#
|
|
||||||
# try:
|
|
||||||
# s = self.Session()
|
|
||||||
# q = s.query(Pattern).filter(Pattern.show_id == int(patternDescriptor['show_id']), Pattern.pattern == str(patternDescriptor['pattern']))
|
|
||||||
#
|
|
||||||
# if q.count():
|
|
||||||
# pattern = q.first()
|
|
||||||
# return int(pattern.id)
|
|
||||||
# else:
|
|
||||||
# return None
|
|
||||||
#
|
|
||||||
# except Exception as ex:
|
|
||||||
# raise click.ClickException(f"PatternController.findPattern(): {repr(ex)}")
|
|
||||||
# finally:
|
|
||||||
# s.close()
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# def getPattern(self, patternId : int):
|
|
||||||
#
|
|
||||||
# if type(patternId) is not int:
|
|
||||||
# raise ValueError(f"PatternController.getPattern(): Argument patternId is required to be of type int")
|
|
||||||
#
|
|
||||||
# try:
|
|
||||||
# s = self.Session()
|
|
||||||
# q = s.query(Pattern).filter(Pattern.id == int(patternId))
|
|
||||||
#
|
|
||||||
# return q.first() if q.count() else None
|
|
||||||
#
|
|
||||||
# except Exception as ex:
|
|
||||||
# raise click.ClickException(f"PatternController.getPattern(): {repr(ex)}")
|
|
||||||
# finally:
|
|
||||||
# s.close()
|
|
||||||
#
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
VERSION='0.2.3'
|
|
||||||
DATABASE_VERSION = 2
|
|
||||||
|
|
||||||
DEFAULT_QUALITY = 32
|
|
||||||
DEFAULT_AV1_PRESET = 5
|
|
||||||
|
|
||||||
DEFAULT_STEREO_BANDWIDTH = "112"
|
|
||||||
DEFAULT_AC3_BANDWIDTH = "256"
|
|
||||||
DEFAULT_DTS_BANDWIDTH = "320"
|
|
||||||
DEFAULT_7_1_BANDWIDTH = "384"
|
|
||||||
|
|
||||||
DEFAULT_cut_start = 60
|
|
||||||
DEFAULT_cut_length = 180
|
|
||||||
|
|
||||||
DEFAULT_OUTPUT_FILENAME_TEMPLATE = '{{ ffx_show_name }} - {{ ffx_index }}{{ ffx_index_separator }}{{ ffx_episode_name }}{{ ffx_indicator_separator }}{{ ffx_indicator }}'
|
|
||||||
@@ -1,102 +0,0 @@
|
|||||||
import os, click
|
|
||||||
|
|
||||||
from sqlalchemy import create_engine
|
|
||||||
from sqlalchemy.orm import sessionmaker
|
|
||||||
|
|
||||||
from ffx.model.show import Base
|
|
||||||
|
|
||||||
from ffx.model.property import Property
|
|
||||||
|
|
||||||
from ffx.constants import DATABASE_VERSION
|
|
||||||
|
|
||||||
|
|
||||||
DATABASE_VERSION_KEY = 'database_version'
|
|
||||||
|
|
||||||
class DatabaseVersionException(Exception):
|
|
||||||
def __init__(self, errorMessage):
|
|
||||||
super().__init__(errorMessage)
|
|
||||||
|
|
||||||
def databaseContext(databasePath: str = ''):
|
|
||||||
|
|
||||||
databaseContext = {}
|
|
||||||
|
|
||||||
if databasePath is None:
|
|
||||||
# sqlite:///:memory:
|
|
||||||
databasePath = ':memory:'
|
|
||||||
elif not databasePath:
|
|
||||||
homeDir = os.path.expanduser("~")
|
|
||||||
ffxVarDir = os.path.join(homeDir, '.local', 'var', 'ffx')
|
|
||||||
if not os.path.exists(ffxVarDir):
|
|
||||||
os.makedirs(ffxVarDir)
|
|
||||||
databasePath = os.path.join(ffxVarDir, 'ffx.db')
|
|
||||||
|
|
||||||
databaseContext['url'] = f"sqlite:///{databasePath}"
|
|
||||||
databaseContext['engine'] = create_engine(databaseContext['url'])
|
|
||||||
databaseContext['session'] = sessionmaker(bind=databaseContext['engine'])
|
|
||||||
|
|
||||||
Base.metadata.create_all(databaseContext['engine'])
|
|
||||||
|
|
||||||
# isSyncronuous = False
|
|
||||||
# while not isSyncronuous:
|
|
||||||
# while True:
|
|
||||||
# try:
|
|
||||||
# with databaseContext['database_engine'].connect() as connection:
|
|
||||||
# connection.execute(sqlalchemy.text('PRAGMA foreign_keys=ON;'))
|
|
||||||
# #isSyncronuous = True
|
|
||||||
# break
|
|
||||||
# except sqlite3.OperationalError:
|
|
||||||
# time.sleep(0.1)
|
|
||||||
|
|
||||||
ensureDatabaseVersion(databaseContext)
|
|
||||||
|
|
||||||
return databaseContext
|
|
||||||
|
|
||||||
def ensureDatabaseVersion(databaseContext):
|
|
||||||
|
|
||||||
currentDatabaseVersion = getDatabaseVersion(databaseContext)
|
|
||||||
if currentDatabaseVersion:
|
|
||||||
if currentDatabaseVersion != DATABASE_VERSION:
|
|
||||||
raise DatabaseVersionException(f"Current database version ({currentDatabaseVersion}) does not match required ({DATABASE_VERSION})")
|
|
||||||
else:
|
|
||||||
setDatabaseVersion(databaseContext, DATABASE_VERSION)
|
|
||||||
|
|
||||||
|
|
||||||
def getDatabaseVersion(databaseContext):
|
|
||||||
|
|
||||||
try:
|
|
||||||
|
|
||||||
Session = databaseContext['session']
|
|
||||||
s = Session()
|
|
||||||
q = s.query(Property).filter(Property.key == DATABASE_VERSION_KEY)
|
|
||||||
|
|
||||||
return int(q.first().value) if q.count() else 0
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"getDatabaseVersion(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
|
|
||||||
def setDatabaseVersion(databaseContext, databaseVersion: int):
|
|
||||||
|
|
||||||
try:
|
|
||||||
Session = databaseContext['session']
|
|
||||||
s = Session()
|
|
||||||
|
|
||||||
q = s.query(Property).filter(Property.key == DATABASE_VERSION_KEY)
|
|
||||||
|
|
||||||
dbVersion = int(databaseVersion)
|
|
||||||
|
|
||||||
versionProperty = q.first()
|
|
||||||
if versionProperty:
|
|
||||||
versionProperty.value = str(dbVersion)
|
|
||||||
else:
|
|
||||||
versionProperty = Property(key = DATABASE_VERSION_KEY,
|
|
||||||
value = str(dbVersion))
|
|
||||||
s.add(versionProperty)
|
|
||||||
s.commit()
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"setDatabaseVersion(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
@@ -1,809 +0,0 @@
|
|||||||
#! /usr/bin/python3
|
|
||||||
|
|
||||||
import os, click, time, logging, shutil
|
|
||||||
|
|
||||||
from ffx.configuration_controller import ConfigurationController
|
|
||||||
|
|
||||||
from ffx.file_properties import FileProperties
|
|
||||||
|
|
||||||
from ffx.ffx_app import FfxApp
|
|
||||||
from ffx.ffx_controller import FfxController
|
|
||||||
from ffx.tmdb_controller import TmdbController
|
|
||||||
|
|
||||||
from ffx.database import databaseContext
|
|
||||||
|
|
||||||
from ffx.media_descriptor import MediaDescriptor
|
|
||||||
from ffx.track_descriptor import TrackDescriptor
|
|
||||||
from ffx.show_descriptor import ShowDescriptor
|
|
||||||
|
|
||||||
from ffx.track_type import TrackType
|
|
||||||
from ffx.video_encoder import VideoEncoder
|
|
||||||
from ffx.track_disposition import TrackDisposition
|
|
||||||
from ffx.track_codec import TrackCodec
|
|
||||||
|
|
||||||
from ffx.process import executeProcess
|
|
||||||
from ffx.helper import filterFilename, substituteTmdbFilename
|
|
||||||
from ffx.helper import getEpisodeFileBasename
|
|
||||||
|
|
||||||
from ffx.constants import DEFAULT_STEREO_BANDWIDTH, DEFAULT_AC3_BANDWIDTH, DEFAULT_DTS_BANDWIDTH, DEFAULT_7_1_BANDWIDTH
|
|
||||||
|
|
||||||
from ffx.filter.quality_filter import QualityFilter
|
|
||||||
from ffx.filter.preset_filter import PresetFilter
|
|
||||||
|
|
||||||
from ffx.filter.crop_filter import CropFilter
|
|
||||||
from ffx.filter.nlmeans_filter import NlmeansFilter
|
|
||||||
|
|
||||||
from ffx.constants import VERSION
|
|
||||||
|
|
||||||
from ffx.shifted_season_controller import ShiftedSeasonController
|
|
||||||
|
|
||||||
|
|
||||||
@click.group()
|
|
||||||
@click.pass_context
|
|
||||||
@click.option('--database-file', type=str, default='', help='Path to database file')
|
|
||||||
@click.option('-v', '--verbose', type=int, default=0, help='Set verbosity of output')
|
|
||||||
@click.option("--dry-run", is_flag=True, default=False)
|
|
||||||
def ffx(ctx, database_file, verbose, dry_run):
|
|
||||||
"""FFX"""
|
|
||||||
|
|
||||||
ctx.obj = {}
|
|
||||||
|
|
||||||
ctx.obj['config'] = ConfigurationController()
|
|
||||||
|
|
||||||
ctx.obj['database'] = databaseContext(databasePath=database_file
|
|
||||||
if database_file else ctx.obj['config'].getDatabaseFilePath())
|
|
||||||
|
|
||||||
ctx.obj['dry_run'] = dry_run
|
|
||||||
ctx.obj['verbosity'] = verbose
|
|
||||||
|
|
||||||
# Critical 50
|
|
||||||
# Error 40
|
|
||||||
# Warning 30
|
|
||||||
# Info 20
|
|
||||||
# Debug 10
|
|
||||||
fileLogVerbosity = max(40 - verbose * 10, 10)
|
|
||||||
consoleLogVerbosity = max(20 - verbose * 10, 10)
|
|
||||||
|
|
||||||
ctx.obj['logger'] = logging.getLogger('FFX')
|
|
||||||
ctx.obj['logger'].setLevel(logging.DEBUG)
|
|
||||||
|
|
||||||
ffxFileHandler = logging.FileHandler(ctx.obj['config'].getLogFilePath())
|
|
||||||
ffxFileHandler.setLevel(fileLogVerbosity)
|
|
||||||
ffxConsoleHandler = logging.StreamHandler()
|
|
||||||
ffxConsoleHandler.setLevel(consoleLogVerbosity)
|
|
||||||
|
|
||||||
fileFormatter = logging.Formatter(
|
|
||||||
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
|
||||||
ffxFileHandler.setFormatter(fileFormatter)
|
|
||||||
consoleFormatter = logging.Formatter(
|
|
||||||
'%(message)s')
|
|
||||||
ffxConsoleHandler.setFormatter(consoleFormatter)
|
|
||||||
|
|
||||||
ctx.obj['logger'].addHandler(ffxConsoleHandler)
|
|
||||||
ctx.obj['logger'].addHandler(ffxFileHandler)
|
|
||||||
|
|
||||||
|
|
||||||
# Define a subcommand
|
|
||||||
@ffx.command()
|
|
||||||
def version():
|
|
||||||
click.echo(VERSION)
|
|
||||||
|
|
||||||
|
|
||||||
# Another subcommand
|
|
||||||
@ffx.command()
|
|
||||||
def help():
|
|
||||||
click.echo(f"ffx {VERSION}\n")
|
|
||||||
click.echo(f"Usage: ffx [input file] [output file] [vp9|av1] [q=[nn[,nn,...]]] [p=nn] [a=nnn[k]] [ac3=nnn[k]] [dts=nnn[k]] [crop]")
|
|
||||||
|
|
||||||
|
|
||||||
@ffx.command()
|
|
||||||
@click.pass_context
|
|
||||||
@click.argument('filename', nargs=1)
|
|
||||||
def inspect(ctx, filename):
|
|
||||||
|
|
||||||
ctx.obj['command'] = 'inspect'
|
|
||||||
ctx.obj['arguments'] = {}
|
|
||||||
ctx.obj['arguments']['filename'] = filename
|
|
||||||
|
|
||||||
app = FfxApp(ctx.obj)
|
|
||||||
app.run()
|
|
||||||
|
|
||||||
|
|
||||||
def getUnmuxSequence(trackDescriptor: TrackDescriptor, sourcePath, targetPrefix, targetDirectory = ''):
|
|
||||||
|
|
||||||
# executable and input file
|
|
||||||
commandTokens = FfxController.COMMAND_TOKENS + ['-i', sourcePath]
|
|
||||||
|
|
||||||
trackType = trackDescriptor.getType()
|
|
||||||
|
|
||||||
targetPathBase = os.path.join(targetDirectory, targetPrefix) if targetDirectory else targetPrefix
|
|
||||||
|
|
||||||
# mapping
|
|
||||||
commandTokens += ['-map',
|
|
||||||
f"0:{trackType.indicator()}:{trackDescriptor.getSubIndex()}",
|
|
||||||
'-c',
|
|
||||||
'copy']
|
|
||||||
|
|
||||||
trackCodec = trackDescriptor.getCodec()
|
|
||||||
|
|
||||||
# output format
|
|
||||||
codecFormat = trackCodec.format()
|
|
||||||
if codecFormat is not None:
|
|
||||||
commandTokens += ['-f', codecFormat]
|
|
||||||
|
|
||||||
# output filename
|
|
||||||
commandTokens += [f"{targetPathBase}.{trackCodec.extension()}"]
|
|
||||||
|
|
||||||
return commandTokens
|
|
||||||
|
|
||||||
|
|
||||||
@ffx.command()
|
|
||||||
@click.pass_context
|
|
||||||
|
|
||||||
@click.argument('paths', nargs=-1)
|
|
||||||
@click.option('-l', '--label', type=str, default='', help='Label to be used as filename prefix')
|
|
||||||
@click.option("-o", "--output-directory", type=str, default='')
|
|
||||||
@click.option("-s", "--subtitles-only", is_flag=True, default=False)
|
|
||||||
@click.option('--nice', type=int, default=99, help='Niceness of started processes')
|
|
||||||
@click.option('--cpu', type=int, default=0, help='Limit CPU for started processes to percent')
|
|
||||||
def unmux(ctx,
|
|
||||||
paths,
|
|
||||||
label,
|
|
||||||
output_directory,
|
|
||||||
subtitles_only,
|
|
||||||
nice,
|
|
||||||
cpu):
|
|
||||||
|
|
||||||
existingSourcePaths = [p for p in paths if os.path.isfile(p)]
|
|
||||||
ctx.obj['logger'].debug(f"\nUnmuxing {len(existingSourcePaths)} files")
|
|
||||||
|
|
||||||
ctx.obj['resource_limits'] = {}
|
|
||||||
ctx.obj['resource_limits']['niceness'] = nice
|
|
||||||
ctx.obj['resource_limits']['cpu_percent'] = cpu
|
|
||||||
|
|
||||||
for sourcePath in existingSourcePaths:
|
|
||||||
|
|
||||||
fp = FileProperties(ctx.obj, sourcePath)
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
sourceMediaDescriptor = fp.getMediaDescriptor()
|
|
||||||
|
|
||||||
season = fp.getSeason()
|
|
||||||
episode = fp.getEpisode()
|
|
||||||
|
|
||||||
#TODO: Recognition für alle Formate anpassen
|
|
||||||
targetLabel = label if label else fp.getFileBasename()
|
|
||||||
targetIndicator = f"_S{season}E{episode}" if label and season != -1 and episode != -1 else ''
|
|
||||||
|
|
||||||
if label and not targetIndicator:
|
|
||||||
ctx.obj['logger'].warning(f"Skipping file {fp.getFilename()}: Label set but no indicator recognized")
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
ctx.obj['logger'].info(f"\nUnmuxing file {fp.getFilename()}\n")
|
|
||||||
|
|
||||||
# for trackDescriptor in sourceMediaDescriptor.getAllTrackDescriptors():
|
|
||||||
for trackDescriptor in sourceMediaDescriptor.getTrackDescriptors():
|
|
||||||
|
|
||||||
if trackDescriptor.getType() == TrackType.SUBTITLE or not subtitles_only:
|
|
||||||
|
|
||||||
# SEASON_EPISODE_STREAM_LANGUAGE_DISPOSITIONS_MATCH = '[sS]([0-9]+)[eE]([0-9]+)_([0-9]+)_([a-z]{3})(?:_([A-Z]{3}))*'
|
|
||||||
targetPrefix = f"{targetLabel}{targetIndicator}_{trackDescriptor.getIndex()}_{trackDescriptor.getLanguage().threeLetter()}"
|
|
||||||
|
|
||||||
td: TrackDisposition
|
|
||||||
for td in sorted(trackDescriptor.getDispositionSet(), key=lambda d: d.index()):
|
|
||||||
targetPrefix += f"_{td.indicator()}"
|
|
||||||
|
|
||||||
unmuxSequence = getUnmuxSequence(trackDescriptor, sourcePath, targetPrefix, targetDirectory = output_directory)
|
|
||||||
|
|
||||||
if unmuxSequence:
|
|
||||||
if not ctx.obj['dry_run']:
|
|
||||||
|
|
||||||
#TODO #425: Codec Enum
|
|
||||||
ctx.obj['logger'].info(f"Unmuxing stream {trackDescriptor.getIndex()} into file {targetPrefix}.{trackDescriptor.getCodec().extension()}")
|
|
||||||
|
|
||||||
ctx.obj['logger'].debug(f"Executing unmuxing sequence")
|
|
||||||
|
|
||||||
out, err, rc = executeProcess(unmuxSequence, context = ctx.obj)
|
|
||||||
if rc:
|
|
||||||
ctx.obj['logger'].error(f"Unmuxing of stream {trackDescriptor.getIndex()} failed with error ({rc}) {err}")
|
|
||||||
else:
|
|
||||||
ctx.obj['logger'].warning(f"Skipping stream with unknown codec")
|
|
||||||
except Exception as ex:
|
|
||||||
ctx.obj['logger'].warning(f"Skipping File {sourcePath} ({ex})")
|
|
||||||
|
|
||||||
|
|
||||||
@ffx.command()
|
|
||||||
@click.pass_context
|
|
||||||
|
|
||||||
@click.argument('paths', nargs=-1)
|
|
||||||
@click.option('--nice', type=int, default=99, help='Niceness of started processes')
|
|
||||||
@click.option('--cpu', type=int, default=0, help='Limit CPU for started processes to percent')
|
|
||||||
def cropdetect(ctx,
|
|
||||||
paths,
|
|
||||||
nice,
|
|
||||||
cpu):
|
|
||||||
|
|
||||||
existingSourcePaths = [p for p in paths if os.path.isfile(p)]
|
|
||||||
ctx.obj['logger'].debug(f"\nUnmuxing {len(existingSourcePaths)} files")
|
|
||||||
|
|
||||||
ctx.obj['resource_limits'] = {}
|
|
||||||
ctx.obj['resource_limits']['niceness'] = nice
|
|
||||||
ctx.obj['resource_limits']['cpu_percent'] = cpu
|
|
||||||
|
|
||||||
for sourcePath in existingSourcePaths:
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
|
|
||||||
fp = FileProperties(ctx.obj, sourcePath)
|
|
||||||
cropParams = fp.findCropParams()
|
|
||||||
|
|
||||||
click.echo(cropParams)
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
ctx.obj['logger'].warning(f"Skipping File {sourcePath} ({ex})")
|
|
||||||
|
|
||||||
|
|
||||||
@ffx.command()
|
|
||||||
@click.pass_context
|
|
||||||
|
|
||||||
def shows(ctx):
|
|
||||||
|
|
||||||
ctx.obj['command'] = 'shows'
|
|
||||||
|
|
||||||
app = FfxApp(ctx.obj)
|
|
||||||
app.run()
|
|
||||||
|
|
||||||
|
|
||||||
def checkUniqueDispositions(context, mediaDescriptor: MediaDescriptor):
|
|
||||||
|
|
||||||
# Check for multiple default or forced dispositions if not set by user input or database requirements
|
|
||||||
#
|
|
||||||
# Query user for the correct sub indices, then configure flags in track descriptors associated with media descriptor accordingly.
|
|
||||||
# The correct tokens should then be created by
|
|
||||||
if len([v for v in mediaDescriptor.getVideoTracks() if v.getDispositionFlag(TrackDisposition.DEFAULT)]) > 1:
|
|
||||||
if context['no_prompt']:
|
|
||||||
raise click.ClickException('More than one default video stream detected and no prompt set')
|
|
||||||
defaultVideoTrackSubIndex = click.prompt("More than one default video stream detected! Please select stream", type=int)
|
|
||||||
mediaDescriptor.setDefaultSubTrack(TrackType.VIDEO, defaultVideoTrackSubIndex)
|
|
||||||
|
|
||||||
if len([v for v in mediaDescriptor.getVideoTracks() if v.getDispositionFlag(TrackDisposition.FORCED)]) > 1:
|
|
||||||
if context['no_prompt']:
|
|
||||||
raise click.ClickException('More than one forced video stream detected and no prompt set')
|
|
||||||
forcedVideoTrackSubIndex = click.prompt("More than one forced video stream detected! Please select stream", type=int)
|
|
||||||
mediaDescriptor.setForcedSubTrack(TrackType.VIDEO, forcedVideoTrackSubIndex)
|
|
||||||
|
|
||||||
if len([a for a in mediaDescriptor.getAudioTracks() if a.getDispositionFlag(TrackDisposition.DEFAULT)]) > 1:
|
|
||||||
if context['no_prompt']:
|
|
||||||
raise click.ClickException('More than one default audio stream detected and no prompt set')
|
|
||||||
defaultAudioTrackSubIndex = click.prompt("More than one default audio stream detected! Please select stream", type=int)
|
|
||||||
mediaDescriptor.setDefaultSubTrack(TrackType.AUDIO, defaultAudioTrackSubIndex)
|
|
||||||
|
|
||||||
if len([a for a in mediaDescriptor.getAudioTracks() if a.getDispositionFlag(TrackDisposition.FORCED)]) > 1:
|
|
||||||
if context['no_prompt']:
|
|
||||||
raise click.ClickException('More than one forced audio stream detected and no prompt set')
|
|
||||||
forcedAudioTrackSubIndex = click.prompt("More than one forced audio stream detected! Please select stream", type=int)
|
|
||||||
mediaDescriptor.setForcedSubTrack(TrackType.AUDIO, forcedAudioTrackSubIndex)
|
|
||||||
|
|
||||||
if len([s for s in mediaDescriptor.getSubtitleTracks() if s.getDispositionFlag(TrackDisposition.DEFAULT)]) > 1:
|
|
||||||
if context['no_prompt']:
|
|
||||||
raise click.ClickException('More than one default subtitle stream detected and no prompt set')
|
|
||||||
defaultSubtitleTrackSubIndex = click.prompt("More than one default subtitle stream detected! Please select stream", type=int)
|
|
||||||
mediaDescriptor.setDefaultSubTrack(TrackType.SUBTITLE, defaultSubtitleTrackSubIndex)
|
|
||||||
|
|
||||||
if len([s for s in mediaDescriptor.getSubtitleTracks() if s.getDispositionFlag(TrackDisposition.FORCED)]) > 1:
|
|
||||||
if context['no_prompt']:
|
|
||||||
raise click.ClickException('More than one forced subtitle stream detected and no prompt set')
|
|
||||||
forcedSubtitleTrackSubIndex = click.prompt("More than one forced subtitle stream detected! Please select stream", type=int)
|
|
||||||
mediaDescriptor.setForcedSubTrack(TrackType.SUBTITLE, forcedSubtitleTrackSubIndex)
|
|
||||||
|
|
||||||
|
|
||||||
@ffx.command()
|
|
||||||
@click.pass_context
|
|
||||||
|
|
||||||
@click.argument('paths', nargs=-1)
|
|
||||||
|
|
||||||
@click.option('-l', '--label', type=str, default='', help='Label to be used as filename prefix')
|
|
||||||
|
|
||||||
@click.option('-v', '--video-encoder', type=str, default=FfxController.DEFAULT_VIDEO_ENCODER, help=f"Target video encoder (vp9, av1 or h264)", show_default=True)
|
|
||||||
|
|
||||||
@click.option('-q', '--quality', type=str, default="", help=f"Quality settings to be used with VP9/H264 encoder")
|
|
||||||
@click.option('-p', '--preset', type=str, default="", help=f"Quality preset to be used with AV1 encoder")
|
|
||||||
|
|
||||||
@click.option('-a', '--stereo-bitrate', type=int, default=DEFAULT_STEREO_BANDWIDTH, help=f"Bitrate in kbit/s to be used to encode stereo audio streams", show_default=True)
|
|
||||||
@click.option('--ac3', type=int, default=DEFAULT_AC3_BANDWIDTH, help=f"Bitrate in kbit/s to be used to encode 5.1 audio streams", show_default=True)
|
|
||||||
@click.option('--dts', type=int, default=DEFAULT_DTS_BANDWIDTH, help=f"Bitrate in kbit/s to be used to encode 6.1 audio streams", show_default=True)
|
|
||||||
|
|
||||||
@click.option('--subtitle-directory', type=str, default='', help='Load subtitles from here')
|
|
||||||
@click.option('--subtitle-prefix', type=str, default='', help='Subtitle filename prefix')
|
|
||||||
|
|
||||||
@click.option('--language', type=str, multiple=True, help='Set stream language. Use format <stream index>:<3 letter iso code>')
|
|
||||||
@click.option('--title', type=str, multiple=True, help='Set stream title. Use format <stream index>:<title>')
|
|
||||||
|
|
||||||
@click.option('--default-video', type=int, default=-1, help='Index of default video stream')
|
|
||||||
@click.option('--forced-video', type=int, default=-1, help='Index of forced video stream')
|
|
||||||
@click.option('--default-audio', type=int, default=-1, help='Index of default audio stream')
|
|
||||||
@click.option('--forced-audio', type=int, default=-1, help='Index of forced audio stream')
|
|
||||||
@click.option('--default-subtitle', type=int, default=-1, help='Index of default subtitle stream')
|
|
||||||
@click.option('--forced-subtitle', type=int, default=-1, help='Index of forced subtitle stream')
|
|
||||||
|
|
||||||
@click.option('--rearrange-streams', type=str, default="", help='Rearrange output streams order. Use format comma separated integers')
|
|
||||||
|
|
||||||
@click.option("--crop", is_flag=False, flag_value="auto", default="none")
|
|
||||||
@click.option("--cut", is_flag=False, flag_value="default", default="none")
|
|
||||||
|
|
||||||
@click.option("--output-directory", type=str, default='')
|
|
||||||
|
|
||||||
@click.option("--denoise", is_flag=False, flag_value="default", default="none")
|
|
||||||
@click.option("--denoise-use-hw", is_flag=True, default=False)
|
|
||||||
@click.option('--denoise-strength', type=str, default='', help='Denoising strength, more blurring vs more details.')
|
|
||||||
@click.option('--denoise-patch-size', type=str, default='', help='Subimage size to apply filtering on luminosity plane. Reduces broader noise patterns but costly.')
|
|
||||||
@click.option('--denoise-chroma-patch-size', type=str, default='', help='Subimage size to apply filtering on chroma planes.')
|
|
||||||
@click.option('--denoise-research-window', type=str, default='', help='Range to search for comparable patches on luminosity plane. Better filtering but costly.')
|
|
||||||
@click.option('--denoise-chroma-research-window', type=str, default='', help='Range to search for comparable patches on chroma planes.')
|
|
||||||
|
|
||||||
@click.option('--show', type=int, default=-1, help='Set TMDB show identifier')
|
|
||||||
@click.option('--season', type=int, default=-1, help='Set season of show')
|
|
||||||
@click.option('--episode', type=int, default=-1, help='Set episode of show')
|
|
||||||
|
|
||||||
@click.option("--no-tmdb", is_flag=True, default=False)
|
|
||||||
@click.option("--no-pattern", is_flag=True, default=False)
|
|
||||||
|
|
||||||
@click.option("--dont-pass-dispositions", is_flag=True, default=False)
|
|
||||||
|
|
||||||
@click.option("--no-prompt", is_flag=True, default=False)
|
|
||||||
@click.option("--no-signature", is_flag=True, default=False)
|
|
||||||
@click.option("--keep-mkvmerge-metadata", is_flag=True, default=False)
|
|
||||||
|
|
||||||
@click.option('--nice', type=int, default=99, help='Niceness of started processes')
|
|
||||||
@click.option('--cpu', type=int, default=0, help='Limit CPU for started processes to percent')
|
|
||||||
|
|
||||||
@click.option('--rename-only', is_flag=True, default=False, help='Only renaming, no recoding')
|
|
||||||
|
|
||||||
def convert(ctx,
|
|
||||||
paths,
|
|
||||||
label,
|
|
||||||
video_encoder,
|
|
||||||
quality,
|
|
||||||
preset,
|
|
||||||
stereo_bitrate,
|
|
||||||
ac3,
|
|
||||||
dts,
|
|
||||||
|
|
||||||
subtitle_directory,
|
|
||||||
subtitle_prefix,
|
|
||||||
|
|
||||||
language,
|
|
||||||
title,
|
|
||||||
|
|
||||||
default_video,
|
|
||||||
forced_video,
|
|
||||||
default_audio,
|
|
||||||
forced_audio,
|
|
||||||
default_subtitle,
|
|
||||||
forced_subtitle,
|
|
||||||
|
|
||||||
rearrange_streams,
|
|
||||||
|
|
||||||
crop,
|
|
||||||
cut,
|
|
||||||
|
|
||||||
output_directory,
|
|
||||||
|
|
||||||
denoise,
|
|
||||||
denoise_use_hw,
|
|
||||||
denoise_strength,
|
|
||||||
denoise_patch_size,
|
|
||||||
denoise_chroma_patch_size,
|
|
||||||
denoise_research_window,
|
|
||||||
denoise_chroma_research_window,
|
|
||||||
|
|
||||||
show,
|
|
||||||
season,
|
|
||||||
episode,
|
|
||||||
|
|
||||||
no_tmdb,
|
|
||||||
no_pattern,
|
|
||||||
dont_pass_dispositions,
|
|
||||||
no_prompt,
|
|
||||||
no_signature,
|
|
||||||
keep_mkvmerge_metadata,
|
|
||||||
|
|
||||||
nice,
|
|
||||||
cpu,
|
|
||||||
rename_only):
|
|
||||||
"""Batch conversion of audiovideo files in format suitable for web playback, e.g. jellyfin
|
|
||||||
|
|
||||||
Files found under PATHS will be converted according to parameters.
|
|
||||||
Filename extensions will be changed appropriately.
|
|
||||||
Suffices will we appended to filename in case of multiple created files
|
|
||||||
or if the filename has not changed."""
|
|
||||||
|
|
||||||
startTime = time.perf_counter()
|
|
||||||
|
|
||||||
context = ctx.obj
|
|
||||||
|
|
||||||
context['video_encoder'] = VideoEncoder.fromLabel(video_encoder)
|
|
||||||
|
|
||||||
#HINT: quick and dirty override for h264, todo improve
|
|
||||||
targetFormat = '' if context['video_encoder'] == VideoEncoder.H264 else FfxController.DEFAULT_FILE_FORMAT
|
|
||||||
targetExtension = 'mkv' if context['video_encoder'] == VideoEncoder.H264 else FfxController.DEFAULT_FILE_EXTENSION
|
|
||||||
|
|
||||||
context['use_tmdb'] = not no_tmdb
|
|
||||||
context['use_pattern'] = not no_pattern
|
|
||||||
context['no_prompt'] = no_prompt
|
|
||||||
context['no_signature'] = no_signature
|
|
||||||
context['keep_mkvmerge_metadata'] = keep_mkvmerge_metadata
|
|
||||||
|
|
||||||
|
|
||||||
context['resource_limits'] = {}
|
|
||||||
context['resource_limits']['niceness'] = nice
|
|
||||||
context['resource_limits']['cpu_percent'] = cpu
|
|
||||||
|
|
||||||
|
|
||||||
context['import_subtitles'] = (subtitle_directory and subtitle_prefix)
|
|
||||||
if context['import_subtitles']:
|
|
||||||
context['subtitle_directory'] = subtitle_directory
|
|
||||||
context['subtitle_prefix'] = subtitle_prefix
|
|
||||||
|
|
||||||
|
|
||||||
existingSourcePaths = [p for p in paths if os.path.isfile(p) and p.split('.')[-1] in FfxController.INPUT_FILE_EXTENSIONS]
|
|
||||||
|
|
||||||
|
|
||||||
# CLI Overrides
|
|
||||||
|
|
||||||
cliOverrides = {}
|
|
||||||
|
|
||||||
if language:
|
|
||||||
cliOverrides['languages'] = {}
|
|
||||||
for overLang in language:
|
|
||||||
olTokens = overLang.split(':')
|
|
||||||
if len(olTokens) == 2:
|
|
||||||
try:
|
|
||||||
cliOverrides['languages'][int(olTokens[0])] = olTokens[1]
|
|
||||||
except ValueError:
|
|
||||||
ctx.obj['logger'].warning(f"Ignoring non-integer language index {olTokens[0]}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
if title:
|
|
||||||
cliOverrides['titles'] = {}
|
|
||||||
for overTitle in title:
|
|
||||||
otTokens = overTitle.split(':')
|
|
||||||
if len(otTokens) == 2:
|
|
||||||
try:
|
|
||||||
cliOverrides['titles'][int(otTokens[0])] = otTokens[1]
|
|
||||||
except ValueError:
|
|
||||||
ctx.obj['logger'].warning(f"Ignoring non-integer title index {otTokens[0]}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
if default_video != -1:
|
|
||||||
cliOverrides['default_video'] = default_video
|
|
||||||
if forced_video != -1:
|
|
||||||
cliOverrides['forced_video'] = forced_video
|
|
||||||
if default_audio != -1:
|
|
||||||
cliOverrides['default_audio'] = default_audio
|
|
||||||
if forced_audio != -1:
|
|
||||||
cliOverrides['forced_audio'] = forced_audio
|
|
||||||
if default_subtitle != -1:
|
|
||||||
cliOverrides['default_subtitle'] = default_subtitle
|
|
||||||
if forced_subtitle != -1:
|
|
||||||
cliOverrides['forced_subtitle'] = forced_subtitle
|
|
||||||
|
|
||||||
if show != -1 or season != -1 or episode != -1:
|
|
||||||
if len(existingSourcePaths) > 1:
|
|
||||||
context['logger'].warning(f"Ignoring TMDB show, season, episode overrides, not supported for multiple source files")
|
|
||||||
else:
|
|
||||||
cliOverrides['tmdb'] = {}
|
|
||||||
if show != -1:
|
|
||||||
cliOverrides['tmdb']['show'] = show
|
|
||||||
if season != -1:
|
|
||||||
cliOverrides['tmdb']['season'] = season
|
|
||||||
if episode != -1:
|
|
||||||
cliOverrides['tmdb']['episode'] = episode
|
|
||||||
|
|
||||||
if cliOverrides:
|
|
||||||
context['overrides'] = cliOverrides
|
|
||||||
|
|
||||||
|
|
||||||
if rearrange_streams:
|
|
||||||
try:
|
|
||||||
cliOverrides['stream_order'] = [int(si) for si in rearrange_streams.split(",")]
|
|
||||||
except ValueError as ve:
|
|
||||||
errorMessage = "Non-integer in rearrange stream parameter"
|
|
||||||
ctx.obj['logger'].error(errorMessage)
|
|
||||||
raise click.Abort()
|
|
||||||
|
|
||||||
|
|
||||||
ctx.obj['logger'].debug(f"\nVideo encoder: {video_encoder}")
|
|
||||||
|
|
||||||
qualityTokens = quality.split(',')
|
|
||||||
q_list = [q for q in qualityTokens if q.isnumeric()]
|
|
||||||
ctx.obj['logger'].debug(f"Qualities: {q_list}")
|
|
||||||
|
|
||||||
presetTokens = preset.split(',')
|
|
||||||
p_list = [p for p in presetTokens if p.isnumeric()]
|
|
||||||
ctx.obj['logger'].debug(f"Presets: {p_list}")
|
|
||||||
|
|
||||||
|
|
||||||
context['bitrates'] = {}
|
|
||||||
context['bitrates']['stereo'] = str(stereo_bitrate) if str(stereo_bitrate).endswith('k') else f"{stereo_bitrate}k"
|
|
||||||
context['bitrates']['ac3'] = str(ac3) if str(ac3).endswith('k') else f"{ac3}k"
|
|
||||||
context['bitrates']['dts'] = str(dts) if str(dts).endswith('k') else f"{dts}k"
|
|
||||||
|
|
||||||
ctx.obj['logger'].debug(f"Stereo bitrate: {context['bitrates']['stereo']}")
|
|
||||||
ctx.obj['logger'].debug(f"AC3 bitrate: {context['bitrates']['ac3']}")
|
|
||||||
ctx.obj['logger'].debug(f"DTS bitrate: {context['bitrates']['dts']}")
|
|
||||||
|
|
||||||
#->
|
|
||||||
# Process cut parameters
|
|
||||||
context['perform_cut'] = (cut != 'none')
|
|
||||||
if context['perform_cut']:
|
|
||||||
cutTokens = cut.split(',')
|
|
||||||
if cutTokens and len(cutTokens) == 2:
|
|
||||||
context['cut_start'] = int(cutTokens[0])
|
|
||||||
context['cut_length'] = int(cutTokens[1])
|
|
||||||
ctx.obj['logger'].debug(f"Cut start={context['cut_start']} length={context['cut_length']}")
|
|
||||||
|
|
||||||
|
|
||||||
tc = TmdbController() if context['use_tmdb'] else None
|
|
||||||
|
|
||||||
qualityKwargs = {QualityFilter.QUALITY_KEY: str(QualityFilter.DEFAULT_H264_QUALITY if (context['video_encoder'] == VideoEncoder.H264 and not quality) else quality)}
|
|
||||||
qf = QualityFilter(**qualityKwargs)
|
|
||||||
|
|
||||||
if context['video_encoder'] == VideoEncoder.AV1 and preset:
|
|
||||||
presetKwargs = {PresetFilter.PRESET_KEY: preset}
|
|
||||||
PresetFilter(**presetKwargs)
|
|
||||||
|
|
||||||
cf = None
|
|
||||||
# if crop != 'none':
|
|
||||||
if crop == 'auto':
|
|
||||||
cropKwargs = {}
|
|
||||||
cf = CropFilter(**cropKwargs)
|
|
||||||
|
|
||||||
denoiseKwargs = {}
|
|
||||||
if denoise_strength:
|
|
||||||
denoiseKwargs[NlmeansFilter.STRENGTH_KEY] = denoise_strength
|
|
||||||
if denoise_patch_size:
|
|
||||||
denoiseKwargs[NlmeansFilter.PATCH_SIZE_KEY] = denoise_patch_size
|
|
||||||
if denoise_chroma_patch_size:
|
|
||||||
denoiseKwargs[NlmeansFilter.CHROMA_PATCH_SIZE_KEY] = denoise_chroma_patch_size
|
|
||||||
if denoise_research_window:
|
|
||||||
denoiseKwargs[NlmeansFilter.RESEARCH_WINDOW_KEY] = denoise_research_window
|
|
||||||
if denoise_chroma_research_window:
|
|
||||||
denoiseKwargs[NlmeansFilter.CHROMA_RESEARCH_WINDOW_KEY] = denoise_chroma_research_window
|
|
||||||
if denoise != 'none' or denoiseKwargs:
|
|
||||||
NlmeansFilter(**denoiseKwargs)
|
|
||||||
|
|
||||||
chainYield = list(qf.getChainYield())
|
|
||||||
|
|
||||||
ctx.obj['logger'].info(f"\nRunning {len(existingSourcePaths) * len(chainYield)} jobs")
|
|
||||||
|
|
||||||
jobIndex = 0
|
|
||||||
|
|
||||||
for sourcePath in existingSourcePaths:
|
|
||||||
|
|
||||||
# Separate basedir, basename and extension for current source file
|
|
||||||
sourceDirectory = os.path.dirname(sourcePath)
|
|
||||||
sourceFilename = os.path.basename(sourcePath)
|
|
||||||
sourcePathTokens = sourceFilename.split('.')
|
|
||||||
|
|
||||||
sourceFileBasename = '.'.join(sourcePathTokens[:-1])
|
|
||||||
sourceFilenameExtension = sourcePathTokens[-1]
|
|
||||||
|
|
||||||
ctx.obj['logger'].info(f"\nProcessing file {sourcePath}")
|
|
||||||
|
|
||||||
targetSuffices = {}
|
|
||||||
|
|
||||||
mediaFileProperties = FileProperties(context, sourcePath)
|
|
||||||
|
|
||||||
|
|
||||||
# if not cf is None:
|
|
||||||
#
|
|
||||||
cropArguments = {} if cf is None else mediaFileProperties.findCropArguments()
|
|
||||||
#
|
|
||||||
# ctx.obj['logger'].info(f"\nSetting crop arguments: ouput width: {cropArguments[CropFilter.OUTPUT_WIDTH_KEY]} "
|
|
||||||
# + f"height: {cropArguments[CropFilter.OUTPUT_HEIGHT_KEY]} "
|
|
||||||
# + f"offset x: {cropArguments[CropFilter.OFFSET_X_KEY]} "
|
|
||||||
# + f"y: {cropArguments[CropFilter.OFFSET_Y_KEY]}")
|
|
||||||
#
|
|
||||||
# cf.setArguments(**cropArguments)
|
|
||||||
|
|
||||||
|
|
||||||
ssc = ShiftedSeasonController(context)
|
|
||||||
|
|
||||||
showId = mediaFileProperties.getShowId()
|
|
||||||
|
|
||||||
#HINT: -1 if not set
|
|
||||||
if 'tmdb' in cliOverrides.keys() and 'season' in cliOverrides['tmdb']:
|
|
||||||
showSeason = cliOverrides['tmdb']['season']
|
|
||||||
else:
|
|
||||||
showSeason = mediaFileProperties.getSeason()
|
|
||||||
|
|
||||||
if 'tmdb' in cliOverrides.keys() and 'episode' in cliOverrides['tmdb']:
|
|
||||||
showEpisode = cliOverrides['tmdb']['episode']
|
|
||||||
else:
|
|
||||||
showEpisode = mediaFileProperties.getEpisode()
|
|
||||||
|
|
||||||
ctx.obj['logger'].debug(f"Season={showSeason} Episode={showEpisode}")
|
|
||||||
|
|
||||||
|
|
||||||
sourceMediaDescriptor = mediaFileProperties.getMediaDescriptor()
|
|
||||||
|
|
||||||
#HINT: This is None if the filename did not match anything in database
|
|
||||||
currentPattern = mediaFileProperties.getPattern() if context['use_pattern'] else None
|
|
||||||
|
|
||||||
ctx.obj['logger'].debug(f"Pattern matching: {'No' if currentPattern is None else 'Yes'}")
|
|
||||||
|
|
||||||
# Setup FfxController accordingly depending on pattern matching is enabled and a pattern was matched
|
|
||||||
if currentPattern is None:
|
|
||||||
|
|
||||||
checkUniqueDispositions(context, sourceMediaDescriptor)
|
|
||||||
currentShowDescriptor = None
|
|
||||||
|
|
||||||
if context['import_subtitles']:
|
|
||||||
sourceMediaDescriptor.importSubtitles(context['subtitle_directory'],
|
|
||||||
context['subtitle_prefix'],
|
|
||||||
showSeason,
|
|
||||||
showEpisode)
|
|
||||||
|
|
||||||
if cliOverrides:
|
|
||||||
sourceMediaDescriptor.applyOverrides(cliOverrides)
|
|
||||||
|
|
||||||
fc = FfxController(context, sourceMediaDescriptor)
|
|
||||||
|
|
||||||
else:
|
|
||||||
targetMediaDescriptor = currentPattern.getMediaDescriptor(ctx.obj)
|
|
||||||
checkUniqueDispositions(context, targetMediaDescriptor)
|
|
||||||
currentShowDescriptor = currentPattern.getShowDescriptor(ctx.obj)
|
|
||||||
|
|
||||||
|
|
||||||
# Check if source and target track descriptors match
|
|
||||||
sourceTrackDescriptorList = sourceMediaDescriptor.getTrackDescriptors()
|
|
||||||
targetTrackDescriptorList = targetMediaDescriptor.getTrackDescriptors()
|
|
||||||
|
|
||||||
for ttd in targetTrackDescriptorList:
|
|
||||||
|
|
||||||
tti = ttd.getIndex()
|
|
||||||
ttsi = ttd.getSourceIndex()
|
|
||||||
|
|
||||||
stList = [st for st in sourceTrackDescriptorList if st.getIndex() == ttsi]
|
|
||||||
std = stList[0] if stList else None
|
|
||||||
|
|
||||||
if std is None:
|
|
||||||
raise click.ClickException(f"Target track #{tti} refering to non-existent source track #{ttsi}")
|
|
||||||
|
|
||||||
ttType = ttd.getType()
|
|
||||||
stType = std.getType()
|
|
||||||
|
|
||||||
if ttType != stType:
|
|
||||||
raise click.ClickException(f"Target track #{tti} type ({ttType.label()}) not matching source track #{ttsi} type ({stType.label()})")
|
|
||||||
|
|
||||||
|
|
||||||
if context['import_subtitles']:
|
|
||||||
targetMediaDescriptor.importSubtitles(context['subtitle_directory'],
|
|
||||||
context['subtitle_prefix'],
|
|
||||||
showSeason,
|
|
||||||
showEpisode)
|
|
||||||
|
|
||||||
# ctx.obj['logger'].debug(f"tmd subindices: {[t.getIndex() for t in targetMediaDescriptor.getAllTrackDescriptors()]} {[t.getSubIndex() for t in targetMediaDescriptor.getAllTrackDescriptors()]} {[t.getDispositionFlag(TrackDisposition.DEFAULT) for t in targetMediaDescriptor.getAllTrackDescriptors()]}")
|
|
||||||
ctx.obj['logger'].debug(f"tmd subindices: {[t.getIndex() for t in targetMediaDescriptor.getTrackDescriptors()]} {[t.getSubIndex() for t in targetMediaDescriptor.getTrackDescriptors()]} {[t.getDispositionFlag(TrackDisposition.DEFAULT) for t in targetMediaDescriptor.getTrackDescriptors()]}")
|
|
||||||
|
|
||||||
if cliOverrides:
|
|
||||||
targetMediaDescriptor.applyOverrides(cliOverrides)
|
|
||||||
|
|
||||||
# ctx.obj['logger'].debug(f"tmd subindices: {[t.getIndex() for t in targetMediaDescriptor.getAllTrackDescriptors()]} {[t.getSubIndex() for t in targetMediaDescriptor.getAllTrackDescriptors()]} {[t.getDispositionFlag(TrackDisposition.DEFAULT) for t in targetMediaDescriptor.getAllTrackDescriptors()]}")
|
|
||||||
ctx.obj['logger'].debug(f"tmd subindices: {[t.getIndex() for t in targetMediaDescriptor.getTrackDescriptors()]} {[t.getSubIndex() for t in targetMediaDescriptor.getTrackDescriptors()]} {[t.getDispositionFlag(TrackDisposition.DEFAULT) for t in targetMediaDescriptor.getTrackDescriptors()]}")
|
|
||||||
|
|
||||||
ctx.obj['logger'].debug(f"Input mapping tokens (2nd pass): {targetMediaDescriptor.getInputMappingTokens()}")
|
|
||||||
|
|
||||||
fc = FfxController(context, targetMediaDescriptor, sourceMediaDescriptor)
|
|
||||||
|
|
||||||
|
|
||||||
indexSeasonDigits = currentShowDescriptor.getIndexSeasonDigits() if not currentPattern is None else ShowDescriptor.DEFAULT_INDEX_SEASON_DIGITS
|
|
||||||
indexEpisodeDigits = currentShowDescriptor.getIndexEpisodeDigits() if not currentPattern is None else ShowDescriptor.DEFAULT_INDEX_EPISODE_DIGITS
|
|
||||||
indicatorSeasonDigits = currentShowDescriptor.getIndicatorSeasonDigits() if not currentPattern is None else ShowDescriptor.DEFAULT_INDICATOR_SEASON_DIGITS
|
|
||||||
indicatorEpisodeDigits = currentShowDescriptor.getIndicatorEpisodeDigits() if not currentPattern is None else ShowDescriptor.DEFAULT_INDICATOR_EPISODE_DIGITS
|
|
||||||
|
|
||||||
|
|
||||||
# Shift season and episode if defined for this show
|
|
||||||
if ('tmdb' not in cliOverrides.keys() and showId != -1
|
|
||||||
and showSeason != -1 and showEpisode != -1):
|
|
||||||
shiftedShowSeason, shiftedShowEpisode = ssc.shiftSeason(showId,
|
|
||||||
season=showSeason,
|
|
||||||
episode=showEpisode)
|
|
||||||
else:
|
|
||||||
shiftedShowSeason = showSeason
|
|
||||||
shiftedShowEpisode = showEpisode
|
|
||||||
|
|
||||||
# Assemble target filename accordingly depending on TMDB lookup is enabled
|
|
||||||
#HINT: -1 if not set
|
|
||||||
showId = cliOverrides['tmdb']['show'] if 'tmdb' in cliOverrides.keys() and 'show' in cliOverrides['tmdb'] else (-1 if currentShowDescriptor is None else currentShowDescriptor.getId())
|
|
||||||
|
|
||||||
if context['use_tmdb'] and showId != -1 and shiftedShowSeason != -1 and shiftedShowEpisode != -1:
|
|
||||||
|
|
||||||
ctx.obj['logger'].debug(f"Querying TMDB for show_id={showId} season={shiftedShowSeason} episode{shiftedShowEpisode}")
|
|
||||||
|
|
||||||
if currentPattern is None:
|
|
||||||
sName, showYear = tc.getShowNameAndYear(showId)
|
|
||||||
showName = filterFilename(sName)
|
|
||||||
showFilenamePrefix = f"{showName} ({str(showYear)})"
|
|
||||||
else:
|
|
||||||
showFilenamePrefix = currentShowDescriptor.getFilenamePrefix()
|
|
||||||
|
|
||||||
tmdbEpisodeResult = tc.queryEpisode(showId, shiftedShowSeason, shiftedShowEpisode)
|
|
||||||
|
|
||||||
ctx.obj['logger'].debug(f"tmdbEpisodeResult={tmdbEpisodeResult}")
|
|
||||||
|
|
||||||
if tmdbEpisodeResult:
|
|
||||||
substitutedEpisodeName = filterFilename(substituteTmdbFilename(tmdbEpisodeResult['name']))
|
|
||||||
sourceFileBasename = getEpisodeFileBasename(showFilenamePrefix,
|
|
||||||
substitutedEpisodeName,
|
|
||||||
shiftedShowSeason,
|
|
||||||
shiftedShowEpisode,
|
|
||||||
indexSeasonDigits,
|
|
||||||
indexEpisodeDigits,
|
|
||||||
indicatorSeasonDigits,
|
|
||||||
indicatorEpisodeDigits,
|
|
||||||
context=ctx.obj)
|
|
||||||
|
|
||||||
if label:
|
|
||||||
if shiftedShowSeason > -1 and shiftedShowEpisode > -1:
|
|
||||||
targetSuffices['se'] = f"S{shiftedShowSeason:0{indicatorSeasonDigits}d}E{shiftedShowEpisode:0{indicatorEpisodeDigits}d}"
|
|
||||||
elif shiftedShowEpisode > -1:
|
|
||||||
targetSuffices['se'] = f"E{shiftedShowEpisode:0{indicatorEpisodeDigits}d}"
|
|
||||||
else:
|
|
||||||
if 'se' in targetSuffices.keys():
|
|
||||||
del targetSuffices['se']
|
|
||||||
|
|
||||||
ctx.obj['logger'].debug(f"fileBasename={sourceFileBasename}")
|
|
||||||
|
|
||||||
|
|
||||||
for chainIteration in chainYield:
|
|
||||||
|
|
||||||
ctx.obj['logger'].debug(f"\nchain iteration: {chainIteration}\n")
|
|
||||||
|
|
||||||
chainVariant = '-'.join([fy['variant'] for fy in chainIteration])
|
|
||||||
|
|
||||||
ctx.obj['logger'].debug(f"\nRunning job {jobIndex} file={sourcePath} variant={chainVariant}")
|
|
||||||
jobIndex += 1
|
|
||||||
|
|
||||||
ctx.obj['logger'].debug(f"label={label if label else 'Falsy'}")
|
|
||||||
ctx.obj['logger'].debug(f"sourceFileBasename={sourceFileBasename}")
|
|
||||||
|
|
||||||
targetFileBasename = sourceFileBasename if context['use_tmdb'] and not label else label
|
|
||||||
|
|
||||||
targetFilenameTokens = [targetFileBasename]
|
|
||||||
|
|
||||||
if 'se' in targetSuffices.keys():
|
|
||||||
targetFilenameTokens += [targetSuffices['se']]
|
|
||||||
|
|
||||||
for filterYield in chainIteration:
|
|
||||||
targetFilenameTokens += filterYield['suffices']
|
|
||||||
|
|
||||||
targetFilename = f"{'_'.join(targetFilenameTokens)}.{sourceFilenameExtension if rename_only else targetExtension}"
|
|
||||||
|
|
||||||
if sourceFilename == targetFilename:
|
|
||||||
targetFilename = f"out_{targetFilename}"
|
|
||||||
|
|
||||||
|
|
||||||
targetPath = os.path.join(output_directory, targetFilename) if output_directory else targetFilename
|
|
||||||
|
|
||||||
ctx.obj['logger'].info(f"Creating file {targetFilename}")
|
|
||||||
|
|
||||||
if rename_only:
|
|
||||||
shutil.copyfile(sourcePath, targetPath)
|
|
||||||
else:
|
|
||||||
fc.runJob(sourcePath,
|
|
||||||
targetPath,
|
|
||||||
targetFormat,
|
|
||||||
context['video_encoder'],
|
|
||||||
chainIteration,
|
|
||||||
cropArguments)
|
|
||||||
|
|
||||||
endTime = time.perf_counter()
|
|
||||||
ctx.obj['logger'].info(f"\nDONE\nTime elapsed {endTime - startTime}")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
ffx()
|
|
||||||
@@ -1,38 +0,0 @@
|
|||||||
from textual.app import App
|
|
||||||
|
|
||||||
from .shows_screen import ShowsScreen
|
|
||||||
from .media_details_screen import MediaDetailsScreen
|
|
||||||
|
|
||||||
|
|
||||||
class FfxApp(App):
|
|
||||||
|
|
||||||
TITLE = "FFX"
|
|
||||||
|
|
||||||
BINDINGS = [
|
|
||||||
("q", "quit()", "Quit"),
|
|
||||||
("h", "switch_mode('help')", "Help"),
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def __init__(self, context = {}):
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
# Data 'input' variable
|
|
||||||
self.context = context
|
|
||||||
|
|
||||||
|
|
||||||
def on_mount(self) -> None:
|
|
||||||
|
|
||||||
if 'command' in self.context.keys():
|
|
||||||
|
|
||||||
if self.context['command'] == 'shows':
|
|
||||||
self.push_screen(ShowsScreen())
|
|
||||||
|
|
||||||
if self.context['command'] == 'inspect':
|
|
||||||
self.push_screen(MediaDetailsScreen())
|
|
||||||
|
|
||||||
|
|
||||||
def getContext(self):
|
|
||||||
"""Data 'output' method"""
|
|
||||||
return self.context
|
|
||||||
|
|
||||||
@@ -1,360 +0,0 @@
|
|||||||
import os, click
|
|
||||||
|
|
||||||
from ffx.media_descriptor_change_set import MediaDescriptorChangeSet
|
|
||||||
|
|
||||||
from ffx.media_descriptor import MediaDescriptor
|
|
||||||
from ffx.audio_layout import AudioLayout
|
|
||||||
from ffx.track_type import TrackType
|
|
||||||
from ffx.track_codec import TrackCodec
|
|
||||||
from ffx.video_encoder import VideoEncoder
|
|
||||||
from ffx.process import executeProcess
|
|
||||||
|
|
||||||
from ffx.constants import DEFAULT_cut_start, DEFAULT_cut_length
|
|
||||||
|
|
||||||
from ffx.filter.quality_filter import QualityFilter
|
|
||||||
from ffx.filter.preset_filter import PresetFilter
|
|
||||||
from ffx.filter.crop_filter import CropFilter
|
|
||||||
|
|
||||||
|
|
||||||
class FfxController():
|
|
||||||
|
|
||||||
COMMAND_TOKENS = ['ffmpeg', '-y']
|
|
||||||
NULL_TOKENS = ['-f', 'null', '/dev/null'] # -f null /dev/null
|
|
||||||
|
|
||||||
TEMP_FILE_NAME = "ffmpeg2pass-0.log"
|
|
||||||
|
|
||||||
DEFAULT_VIDEO_ENCODER = VideoEncoder.VP9.label()
|
|
||||||
|
|
||||||
DEFAULT_FILE_FORMAT = 'webm'
|
|
||||||
DEFAULT_FILE_EXTENSION = 'webm'
|
|
||||||
|
|
||||||
INPUT_FILE_EXTENSIONS = ['mkv', 'mp4', 'avi', 'flv', 'webm']
|
|
||||||
|
|
||||||
CHANNEL_MAP_5_1 = 'FL-FL|FR-FR|FC-FC|LFE-LFE|SL-BL|SR-BR:5.1'
|
|
||||||
|
|
||||||
# SIGNATURE_TAGS = {'RECODED_WITH': 'FFX'}
|
|
||||||
|
|
||||||
def __init__(self,
|
|
||||||
context : dict,
|
|
||||||
targetMediaDescriptor : MediaDescriptor,
|
|
||||||
sourceMediaDescriptor : MediaDescriptor = None):
|
|
||||||
|
|
||||||
self.__context = context
|
|
||||||
|
|
||||||
self.__targetMediaDescriptor = targetMediaDescriptor
|
|
||||||
self.__mdcs = MediaDescriptorChangeSet(context,
|
|
||||||
targetMediaDescriptor,
|
|
||||||
sourceMediaDescriptor)
|
|
||||||
|
|
||||||
self.__logger = context['logger']
|
|
||||||
|
|
||||||
|
|
||||||
def generateAV1Tokens(self, quality, preset, subIndex : int = 0):
|
|
||||||
|
|
||||||
return [f"-c:v:{int(subIndex)}", 'libsvtav1',
|
|
||||||
'-svtav1-params', f"crf={quality}:preset={preset}:tune=0:enable-overlays=1:scd=1:scm=0",
|
|
||||||
'-pix_fmt', 'yuv420p10le']
|
|
||||||
|
|
||||||
|
|
||||||
# -c:v libx264 -preset slow -crf 17
|
|
||||||
def generateH264Tokens(self, quality, subIndex : int = 0):
|
|
||||||
|
|
||||||
return [f"-c:v:{int(subIndex)}", 'libx264',
|
|
||||||
"-preset", "slow",
|
|
||||||
'-crf', str(quality)]
|
|
||||||
|
|
||||||
|
|
||||||
# -c:v:0 libvpx-vp9 -row-mt 1 -crf 32 -pass 1 -speed 4 -frame-parallel 0 -g 9999 -aq-mode 0
|
|
||||||
def generateVP9Pass1Tokens(self, quality, subIndex : int = 0):
|
|
||||||
|
|
||||||
return [f"-c:v:{int(subIndex)}",
|
|
||||||
'libvpx-vp9',
|
|
||||||
'-row-mt', '1',
|
|
||||||
'-crf', str(quality),
|
|
||||||
'-pass', '1',
|
|
||||||
'-speed', '4',
|
|
||||||
'-frame-parallel', '0',
|
|
||||||
'-g', '9999',
|
|
||||||
'-aq-mode', '0']
|
|
||||||
|
|
||||||
# -c:v:0 libvpx-vp9 -row-mt 1 -crf 32 -pass 2 -frame-parallel 0 -g 9999 -aq-mode 0 -auto-alt-ref 1 -lag-in-frames 25
|
|
||||||
def generateVP9Pass2Tokens(self, quality, subIndex : int = 0):
|
|
||||||
|
|
||||||
return [f"-c:v:{int(subIndex)}",
|
|
||||||
'libvpx-vp9',
|
|
||||||
'-row-mt', '1',
|
|
||||||
'-crf', str(quality),
|
|
||||||
'-pass', '2',
|
|
||||||
'-frame-parallel', '0',
|
|
||||||
'-g', '9999',
|
|
||||||
'-aq-mode', '0',
|
|
||||||
'-auto-alt-ref', '1',
|
|
||||||
'-lag-in-frames', '25']
|
|
||||||
|
|
||||||
def generateVideoCopyTokens(self, subIndex):
|
|
||||||
return [f"-c:v:{int(subIndex)}",
|
|
||||||
'copy']
|
|
||||||
|
|
||||||
|
|
||||||
def generateCropTokens(self):
|
|
||||||
|
|
||||||
if 'cut_start' in self.__context.keys() and 'cut_length' in self.__context.keys():
|
|
||||||
cropStart = int(self.__context['cut_start'])
|
|
||||||
cropLength = int(self.__context['cut_length'])
|
|
||||||
else:
|
|
||||||
cropStart = DEFAULT_cut_start
|
|
||||||
cropLength = DEFAULT_cut_length
|
|
||||||
|
|
||||||
return ['-ss', str(cropStart), '-t', str(cropLength)]
|
|
||||||
|
|
||||||
|
|
||||||
def generateOutputTokens(self, filePathBase, format = '', ext = ''):
|
|
||||||
|
|
||||||
self.__logger.debug(f"FfxController.generateOutputTokens(): base='{filePathBase}' format='{format}' ext='{ext}'")
|
|
||||||
|
|
||||||
outputFilePath = f"{filePathBase}{('.'+str(ext)) if ext else ''}"
|
|
||||||
if format:
|
|
||||||
return ['-f', format, outputFilePath]
|
|
||||||
else:
|
|
||||||
return [outputFilePath]
|
|
||||||
|
|
||||||
|
|
||||||
def generateAudioEncodingTokens(self):
|
|
||||||
"""Generates ffmpeg options audio streams including channel remapping, codec and bitrate"""
|
|
||||||
|
|
||||||
audioTokens = []
|
|
||||||
|
|
||||||
# targetAudioTrackDescriptors = [td for td in self.__targetMediaDescriptor.getAllTrackDescriptors() if td.getType() == TrackType.AUDIO]
|
|
||||||
targetAudioTrackDescriptors = self.__targetMediaDescriptor.getTrackDescriptors(trackType=TrackType.AUDIO)
|
|
||||||
|
|
||||||
trackSubIndex = 0
|
|
||||||
for trackDescriptor in targetAudioTrackDescriptors:
|
|
||||||
|
|
||||||
trackAudioLayout = trackDescriptor.getAudioLayout()
|
|
||||||
|
|
||||||
if trackAudioLayout == AudioLayout.LAYOUT_6_1:
|
|
||||||
audioTokens += [f"-c:a:{trackSubIndex}",
|
|
||||||
'libopus',
|
|
||||||
f"-filter:a:{trackSubIndex}",
|
|
||||||
'channelmap=channel_layout=6.1',
|
|
||||||
f"-b:a:{trackSubIndex}",
|
|
||||||
self.__context['bitrates']['dts']]
|
|
||||||
|
|
||||||
if trackAudioLayout == AudioLayout.LAYOUT_5_1:
|
|
||||||
audioTokens += [f"-c:a:{trackSubIndex}",
|
|
||||||
'libopus',
|
|
||||||
f"-filter:a:{trackSubIndex}",
|
|
||||||
f"channelmap={FfxController.CHANNEL_MAP_5_1}",
|
|
||||||
f"-b:a:{trackSubIndex}",
|
|
||||||
self.__context['bitrates']['ac3']]
|
|
||||||
|
|
||||||
if trackAudioLayout == AudioLayout.LAYOUT_STEREO:
|
|
||||||
audioTokens += [f"-c:a:{trackSubIndex}",
|
|
||||||
'libopus',
|
|
||||||
f"-b:a:{trackSubIndex}",
|
|
||||||
self.__context['bitrates']['stereo']]
|
|
||||||
|
|
||||||
if trackAudioLayout == AudioLayout.LAYOUT_6CH:
|
|
||||||
audioTokens += [f"-c:a:{trackSubIndex}",
|
|
||||||
'libopus',
|
|
||||||
f"-filter:a:{trackSubIndex}",
|
|
||||||
f"channelmap={FfxController.CHANNEL_MAP_5_1}",
|
|
||||||
f"-b:a:{trackSubIndex}",
|
|
||||||
self.__context['bitrates']['ac3']]
|
|
||||||
|
|
||||||
# -ac 5 ?
|
|
||||||
if trackAudioLayout == AudioLayout.LAYOUT_5_0:
|
|
||||||
audioTokens += [f"-c:a:{trackSubIndex}",
|
|
||||||
'libopus',
|
|
||||||
f"-filter:a:{trackSubIndex}",
|
|
||||||
'channelmap=channel_layout=5.0',
|
|
||||||
f"-b:a:{trackSubIndex}",
|
|
||||||
self.__context['bitrates']['ac3']]
|
|
||||||
|
|
||||||
trackSubIndex += 1
|
|
||||||
return audioTokens
|
|
||||||
|
|
||||||
|
|
||||||
def runJob(self,
|
|
||||||
sourcePath,
|
|
||||||
targetPath,
|
|
||||||
targetFormat: str = '',
|
|
||||||
videoEncoder: VideoEncoder = VideoEncoder.VP9,
|
|
||||||
chainIteration: list = [],
|
|
||||||
cropArguments: dict = {}):
|
|
||||||
# quality: int = DEFAULT_QUALITY,
|
|
||||||
# preset: int = DEFAULT_AV1_PRESET):
|
|
||||||
|
|
||||||
qualityFilters = [fy for fy in chainIteration if fy['identifier'] == 'quality']
|
|
||||||
presetFilters = [fy for fy in chainIteration if fy['identifier'] == 'preset']
|
|
||||||
|
|
||||||
cropFilters = [fy for fy in chainIteration if fy['identifier'] == 'crop']
|
|
||||||
denoiseFilters = [fy for fy in chainIteration if fy['identifier'] == 'nlmeans']
|
|
||||||
|
|
||||||
quality = (qualityFilters[0]['parameters']['quality'] if qualityFilters else QualityFilter.DEFAULT_VP9_QUALITY)
|
|
||||||
preset = presetFilters[0]['parameters']['preset'] if presetFilters else PresetFilter.DEFAULT_PRESET
|
|
||||||
|
|
||||||
|
|
||||||
filterParamTokens = []
|
|
||||||
|
|
||||||
if cropArguments:
|
|
||||||
|
|
||||||
cropParams = (f"crop="
|
|
||||||
+ f"{cropArguments[CropFilter.OUTPUT_WIDTH_KEY]}"
|
|
||||||
+ f":{cropArguments[CropFilter.OUTPUT_HEIGHT_KEY]}"
|
|
||||||
+ f":{cropArguments[CropFilter.OFFSET_X_KEY]}"
|
|
||||||
+ f":{cropArguments[CropFilter.OFFSET_Y_KEY]}")
|
|
||||||
|
|
||||||
filterParamTokens.append(cropParams)
|
|
||||||
|
|
||||||
filterParamTokens.extend(denoiseFilters[0]['tokens'] if denoiseFilters else [])
|
|
||||||
|
|
||||||
filterTokens = ['-vf', ', '.join(filterParamTokens)] if filterParamTokens else []
|
|
||||||
|
|
||||||
|
|
||||||
commandTokens = FfxController.COMMAND_TOKENS + ['-i', sourcePath]
|
|
||||||
|
|
||||||
if videoEncoder == VideoEncoder.AV1:
|
|
||||||
|
|
||||||
commandSequence = (commandTokens
|
|
||||||
+ self.__targetMediaDescriptor.getImportFileTokens()
|
|
||||||
+ self.__targetMediaDescriptor.getInputMappingTokens()
|
|
||||||
+ self.__mdcs.generateDispositionTokens())
|
|
||||||
|
|
||||||
# Optional tokens
|
|
||||||
commandSequence += self.__mdcs.generateMetadataTokens()
|
|
||||||
commandSequence += filterTokens
|
|
||||||
|
|
||||||
for td in self.__targetMediaDescriptor.getTrackDescriptors(trackType=TrackType.VIDEO):
|
|
||||||
#HINT: Attached thumbnails are not supported by .webm container format
|
|
||||||
if td.getCodec != TrackCodec.PNG:
|
|
||||||
commandSequence += self.generateAV1Tokens(int(quality), int(preset))
|
|
||||||
|
|
||||||
commandSequence += self.generateAudioEncodingTokens()
|
|
||||||
|
|
||||||
if self.__context['perform_cut']:
|
|
||||||
commandSequence += self.generateCropTokens()
|
|
||||||
|
|
||||||
commandSequence += self.generateOutputTokens(targetPath,
|
|
||||||
targetFormat)
|
|
||||||
|
|
||||||
self.__logger.debug(f"FfxController.runJob(): Running command sequence")
|
|
||||||
|
|
||||||
if not self.__context['dry_run']:
|
|
||||||
executeProcess(commandSequence, context = self.__context)
|
|
||||||
|
|
||||||
|
|
||||||
if videoEncoder == VideoEncoder.H264:
|
|
||||||
|
|
||||||
commandSequence = (commandTokens
|
|
||||||
+ self.__targetMediaDescriptor.getImportFileTokens()
|
|
||||||
+ self.__targetMediaDescriptor.getInputMappingTokens()
|
|
||||||
+ self.__mdcs.generateDispositionTokens())
|
|
||||||
|
|
||||||
# Optional tokens
|
|
||||||
commandSequence += self.__mdcs.generateMetadataTokens()
|
|
||||||
commandSequence += filterTokens
|
|
||||||
|
|
||||||
for td in self.__targetMediaDescriptor.getTrackDescriptors(trackType=TrackType.VIDEO):
|
|
||||||
#HINT: Attached thumbnails are not supported by .webm container format
|
|
||||||
if td.getCodec != TrackCodec.PNG:
|
|
||||||
commandSequence += self.generateH264Tokens(int(quality))
|
|
||||||
|
|
||||||
commandSequence += self.generateAudioEncodingTokens()
|
|
||||||
|
|
||||||
if self.__context['perform_cut']:
|
|
||||||
commandSequence += self.generateCropTokens()
|
|
||||||
|
|
||||||
commandSequence += self.generateOutputTokens(targetPath,
|
|
||||||
targetFormat)
|
|
||||||
|
|
||||||
self.__logger.debug(f"FfxController.runJob(): Running command sequence")
|
|
||||||
|
|
||||||
if not self.__context['dry_run']:
|
|
||||||
executeProcess(commandSequence, context = self.__context)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if videoEncoder == VideoEncoder.VP9:
|
|
||||||
|
|
||||||
commandSequence1 = (commandTokens
|
|
||||||
+ self.__targetMediaDescriptor.getInputMappingTokens(only_video=True))
|
|
||||||
|
|
||||||
# Optional tokens
|
|
||||||
#NOTE: Filters and so needs to run on the first pass as well, as here
|
|
||||||
# the required bitrate for the second run is determined and recorded
|
|
||||||
# TODO: Results seems to be slightly better with first pass omitted,
|
|
||||||
# Confirm or find better filter settings for 2-pass
|
|
||||||
# commandSequence1 += self.__context['denoiser'].generatefilterTokens()
|
|
||||||
|
|
||||||
for td in self.__targetMediaDescriptor.getTrackDescriptors(trackType=TrackType.VIDEO):
|
|
||||||
#HINT: Attached thumbnails are not supported by .webm container format
|
|
||||||
if td.getCodec != TrackCodec.PNG:
|
|
||||||
commandSequence1 += self.generateVP9Pass1Tokens(int(quality))
|
|
||||||
|
|
||||||
if self.__context['perform_cut']:
|
|
||||||
commandSequence1 += self.generateCropTokens()
|
|
||||||
|
|
||||||
commandSequence1 += FfxController.NULL_TOKENS
|
|
||||||
|
|
||||||
if os.path.exists(FfxController.TEMP_FILE_NAME):
|
|
||||||
os.remove(FfxController.TEMP_FILE_NAME)
|
|
||||||
|
|
||||||
self.__logger.debug(f"FfxController.runJob(): Running command sequence 1")
|
|
||||||
|
|
||||||
if not self.__context['dry_run']:
|
|
||||||
executeProcess(commandSequence1, context = self.__context)
|
|
||||||
|
|
||||||
commandSequence2 = (commandTokens
|
|
||||||
+ self.__targetMediaDescriptor.getImportFileTokens()
|
|
||||||
+ self.__targetMediaDescriptor.getInputMappingTokens()
|
|
||||||
+ self.__mdcs.generateDispositionTokens())
|
|
||||||
|
|
||||||
# Optional tokens
|
|
||||||
commandSequence2 += self.__mdcs.generateMetadataTokens()
|
|
||||||
commandSequence2 += filterTokens
|
|
||||||
|
|
||||||
for td in self.__targetMediaDescriptor.getTrackDescriptors(trackType=TrackType.VIDEO):
|
|
||||||
#HINT: Attached thumbnails are not supported by .webm container format
|
|
||||||
if td.getCodec != TrackCodec.PNG:
|
|
||||||
commandSequence2 += self.generateVP9Pass2Tokens(int(quality))
|
|
||||||
|
|
||||||
commandSequence2 += self.generateAudioEncodingTokens()
|
|
||||||
|
|
||||||
if self.__context['perform_cut']:
|
|
||||||
commandSequence2 += self.generateCropTokens()
|
|
||||||
|
|
||||||
commandSequence2 += self.generateOutputTokens(targetPath,
|
|
||||||
targetFormat)
|
|
||||||
|
|
||||||
self.__logger.debug(f"FfxController.runJob(): Running command sequence 2")
|
|
||||||
|
|
||||||
if not self.__context['dry_run']:
|
|
||||||
out, err, rc = executeProcess(commandSequence2, context = self.__context)
|
|
||||||
if rc:
|
|
||||||
raise click.ClickException(f"Command resulted in error: rc={rc} error={err}")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def createEmptyFile(self,
|
|
||||||
path: str = 'empty.mkv',
|
|
||||||
sizeX: int = 1280,
|
|
||||||
sizeY: int = 720,
|
|
||||||
rate: int = 25,
|
|
||||||
length: int = 10):
|
|
||||||
|
|
||||||
commandTokens = FfxController.COMMAND_TOKENS
|
|
||||||
|
|
||||||
commandTokens += ['-f',
|
|
||||||
'lavfi',
|
|
||||||
'-i',
|
|
||||||
f"color=size={sizeX}x{sizeY}:rate={rate}:color=black",
|
|
||||||
'-f',
|
|
||||||
'lavfi',
|
|
||||||
'-i',
|
|
||||||
'anullsrc=channel_layout=stereo:sample_rate=44100',
|
|
||||||
'-t',
|
|
||||||
str(length),
|
|
||||||
path]
|
|
||||||
|
|
||||||
out, err, rc = executeProcess(commandTokens, context = self.__context)
|
|
||||||
@@ -1,248 +0,0 @@
|
|||||||
import os, re, json
|
|
||||||
|
|
||||||
from .media_descriptor import MediaDescriptor
|
|
||||||
from .pattern_controller import PatternController
|
|
||||||
|
|
||||||
from ffx.filter.crop_filter import CropFilter
|
|
||||||
|
|
||||||
from .process import executeProcess
|
|
||||||
|
|
||||||
from ffx.model.pattern import Pattern
|
|
||||||
|
|
||||||
|
|
||||||
class FileProperties():
|
|
||||||
|
|
||||||
FILE_EXTENSIONS = ['mkv', 'mp4', 'avi', 'flv', 'webm']
|
|
||||||
|
|
||||||
SE_INDICATOR_PATTERN = '([sS][0-9]+[eE][0-9]+)'
|
|
||||||
SEASON_EPISODE_INDICATOR_MATCH = '[sS]([0-9]+)[eE]([0-9]+)'
|
|
||||||
EPISODE_INDICATOR_MATCH = '[eE]([0-9]+)'
|
|
||||||
|
|
||||||
CROPDETECT_PATTERN = 'crop=[0-9]+:[0-9]+:[0-9]+:[0-9]+$'
|
|
||||||
|
|
||||||
DEFAULT_INDEX_DIGITS = 3
|
|
||||||
|
|
||||||
def __init__(self, context, sourcePath):
|
|
||||||
|
|
||||||
self.context = context
|
|
||||||
|
|
||||||
self.__logger = context['logger']
|
|
||||||
|
|
||||||
# Separate basedir, basename and extension for current source file
|
|
||||||
self.__sourcePath = sourcePath
|
|
||||||
|
|
||||||
self.__sourceDirectory = os.path.dirname(self.__sourcePath)
|
|
||||||
self.__sourceFilename = os.path.basename(self.__sourcePath)
|
|
||||||
|
|
||||||
sourcePathTokens = self.__sourceFilename.split('.')
|
|
||||||
|
|
||||||
if sourcePathTokens[-1] in FileProperties.FILE_EXTENSIONS:
|
|
||||||
self.__sourceFileBasename = '.'.join(sourcePathTokens[:-1])
|
|
||||||
self.__sourceFilenameExtension = sourcePathTokens[-1]
|
|
||||||
else:
|
|
||||||
self.__sourceFileBasename = self.__sourceFilename
|
|
||||||
self.__sourceFilenameExtension = ''
|
|
||||||
|
|
||||||
self.__pc = PatternController(context)
|
|
||||||
|
|
||||||
# Checking if database contains matching pattern
|
|
||||||
matchResult = self.__pc.matchFilename(self.__sourceFilename)
|
|
||||||
|
|
||||||
self.__logger.debug(f"FileProperties.__init__(): Match result: {matchResult}")
|
|
||||||
|
|
||||||
self.__pattern: Pattern = matchResult['pattern'] if matchResult else None
|
|
||||||
|
|
||||||
if matchResult:
|
|
||||||
databaseMatchedGroups = matchResult['match'].groups()
|
|
||||||
self.__logger.debug(f"FileProperties.__init__(): Matched groups: {databaseMatchedGroups}")
|
|
||||||
|
|
||||||
seIndicator = databaseMatchedGroups[0]
|
|
||||||
|
|
||||||
se_match = re.search(FileProperties.SEASON_EPISODE_INDICATOR_MATCH, seIndicator)
|
|
||||||
e_match = re.search(FileProperties.EPISODE_INDICATOR_MATCH, seIndicator)
|
|
||||||
|
|
||||||
else:
|
|
||||||
self.__logger.debug(f"FileProperties.__init__(): Checking file name for indicator {self.__sourceFilename}")
|
|
||||||
|
|
||||||
se_match = re.search(FileProperties.SEASON_EPISODE_INDICATOR_MATCH, self.__sourceFilename)
|
|
||||||
e_match = re.search(FileProperties.EPISODE_INDICATOR_MATCH, self.__sourceFilename)
|
|
||||||
|
|
||||||
if se_match is not None:
|
|
||||||
self.__season = int(se_match.group(1))
|
|
||||||
self.__episode = int(se_match.group(2))
|
|
||||||
elif e_match is not None:
|
|
||||||
self.__season = -1
|
|
||||||
self.__episode = int(e_match.group(1))
|
|
||||||
else:
|
|
||||||
self.__season = -1
|
|
||||||
self.__episode = -1
|
|
||||||
|
|
||||||
|
|
||||||
def getFormatData(self):
|
|
||||||
"""
|
|
||||||
"format": {
|
|
||||||
"filename": "Downloads/nagatoro_s02/nagatoro_s01e02.mkv",
|
|
||||||
"nb_streams": 18,
|
|
||||||
"nb_programs": 0,
|
|
||||||
"nb_stream_groups": 0,
|
|
||||||
"format_name": "matroska,webm",
|
|
||||||
"format_long_name": "Matroska / WebM",
|
|
||||||
"start_time": "0.000000",
|
|
||||||
"duration": "1420.063000",
|
|
||||||
"size": "1489169824",
|
|
||||||
"bit_rate": "8389316",
|
|
||||||
"probe_score": 100,
|
|
||||||
"tags": {
|
|
||||||
"PUBLISHER": "Crunchyroll",
|
|
||||||
"ENCODER": "Lavf58.29.100"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
|
|
||||||
# ffprobe -hide_banner -show_format -of json
|
|
||||||
ffprobeOutput, ffprobeError, returnCode = executeProcess(["ffprobe",
|
|
||||||
"-hide_banner",
|
|
||||||
"-show_format",
|
|
||||||
"-of", "json",
|
|
||||||
self.__sourcePath]) #,
|
|
||||||
#context = self.context)
|
|
||||||
|
|
||||||
if 'Invalid data found when processing input' in ffprobeError:
|
|
||||||
raise Exception(f"File {self.__sourcePath} does not contain valid stream data")
|
|
||||||
|
|
||||||
if returnCode != 0:
|
|
||||||
raise Exception(f"ffprobe returned with error {returnCode}")
|
|
||||||
|
|
||||||
return json.loads(ffprobeOutput)['format']
|
|
||||||
|
|
||||||
|
|
||||||
def getStreamData(self):
|
|
||||||
"""Returns ffprobe stream data as array with elements according to the following example
|
|
||||||
{
|
|
||||||
"index": 4,
|
|
||||||
"codec_name": "hdmv_pgs_subtitle",
|
|
||||||
"codec_long_name": "HDMV Presentation Graphic Stream subtitles",
|
|
||||||
"codec_type": "subtitle",
|
|
||||||
"codec_tag_string": "[0][0][0][0]",
|
|
||||||
"codec_tag": "0x0000",
|
|
||||||
"r_frame_rate": "0/0",
|
|
||||||
"avg_frame_rate": "0/0",
|
|
||||||
"time_base": "1/1000",
|
|
||||||
"start_pts": 0,
|
|
||||||
"start_time": "0.000000",
|
|
||||||
"duration_ts": 1421035,
|
|
||||||
"duration": "1421.035000",
|
|
||||||
"disposition": {
|
|
||||||
"default": 1,
|
|
||||||
"dub": 0,
|
|
||||||
"original": 0,
|
|
||||||
"comment": 0,
|
|
||||||
"lyrics": 0,
|
|
||||||
"karaoke": 0,
|
|
||||||
"forced": 0,
|
|
||||||
"hearing_impaired": 0,
|
|
||||||
"visual_impaired": 0,
|
|
||||||
"clean_effects": 0,
|
|
||||||
"attached_pic": 0,
|
|
||||||
"timed_thumbnails": 0,
|
|
||||||
"non_diegetic": 0,
|
|
||||||
"captions": 0,
|
|
||||||
"descriptions": 0,
|
|
||||||
"metadata": 0,
|
|
||||||
"dependent": 0,
|
|
||||||
"still_image": 0
|
|
||||||
},
|
|
||||||
"tags": {
|
|
||||||
"language": "ger",
|
|
||||||
"title": "German Full"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
|
|
||||||
# ffprobe -hide_banner -show_streams -of json
|
|
||||||
ffprobeOutput, ffprobeError, returnCode = executeProcess(["ffprobe",
|
|
||||||
"-hide_banner",
|
|
||||||
"-show_streams",
|
|
||||||
"-of", "json",
|
|
||||||
self.__sourcePath]) #,
|
|
||||||
#context = self.context)
|
|
||||||
|
|
||||||
if 'Invalid data found when processing input' in ffprobeError:
|
|
||||||
raise Exception(f"File {self.__sourcePath} does not contain valid stream data")
|
|
||||||
|
|
||||||
|
|
||||||
if returnCode != 0:
|
|
||||||
raise Exception(f"ffprobe returned with error {returnCode}")
|
|
||||||
|
|
||||||
|
|
||||||
return json.loads(ffprobeOutput)['streams']
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def findCropArguments(self):
|
|
||||||
""""""
|
|
||||||
|
|
||||||
# ffmpeg -i <input.file> -vf cropdetect -f null -
|
|
||||||
ffprobeOutput, ffprobeError, returnCode = executeProcess(["ffmpeg", "-i",
|
|
||||||
self.__sourcePath,
|
|
||||||
"-vf", "cropdetect",
|
|
||||||
"-ss", "60",
|
|
||||||
"-t", "180",
|
|
||||||
"-f", "null", "-"
|
|
||||||
])
|
|
||||||
|
|
||||||
errorLines = ffprobeError.split('\n')
|
|
||||||
|
|
||||||
crops = {}
|
|
||||||
for el in errorLines:
|
|
||||||
|
|
||||||
cropdetect_match = re.search(FileProperties.CROPDETECT_PATTERN, el)
|
|
||||||
|
|
||||||
if cropdetect_match is not None:
|
|
||||||
cropParam = str(cropdetect_match.group(0))
|
|
||||||
|
|
||||||
crops[cropParam] = crops.get(cropParam, 0) + 1
|
|
||||||
|
|
||||||
if crops:
|
|
||||||
cropHistogram = sorted(crops, reverse=True)
|
|
||||||
cropString = cropHistogram[0]
|
|
||||||
|
|
||||||
cropTokens = cropString.split('=')
|
|
||||||
cropValueTokens = cropTokens[1]
|
|
||||||
cropValues = cropValueTokens.split(':')
|
|
||||||
|
|
||||||
return {
|
|
||||||
CropFilter.OUTPUT_WIDTH_KEY: cropValues[0],
|
|
||||||
CropFilter.OUTPUT_HEIGHT_KEY: cropValues[1],
|
|
||||||
CropFilter.OFFSET_X_KEY: cropValues[2],
|
|
||||||
CropFilter.OFFSET_Y_KEY: cropValues[3]
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
def getMediaDescriptor(self):
|
|
||||||
return MediaDescriptor.fromFfprobe(self.context, self.getFormatData(), self.getStreamData())
|
|
||||||
|
|
||||||
|
|
||||||
def getShowId(self) -> int:
|
|
||||||
"""Result is -1 if the filename did not match anything in database"""
|
|
||||||
return self.__pattern.getShowId() if self.__pattern is not None else -1
|
|
||||||
|
|
||||||
def getPattern(self) -> Pattern:
|
|
||||||
"""Result is None if the filename did not match anything in database"""
|
|
||||||
return self.__pattern
|
|
||||||
|
|
||||||
|
|
||||||
def getSeason(self) -> int:
|
|
||||||
return int(self.__season)
|
|
||||||
|
|
||||||
def getEpisode(self) -> int:
|
|
||||||
return int(self.__episode)
|
|
||||||
|
|
||||||
|
|
||||||
def getFilename(self):
|
|
||||||
return self.__sourceFilename
|
|
||||||
|
|
||||||
def getFileBasename(self):
|
|
||||||
return self.__sourceFileBasename
|
|
||||||
@@ -1,51 +0,0 @@
|
|||||||
import itertools
|
|
||||||
|
|
||||||
from .filter import Filter
|
|
||||||
|
|
||||||
|
|
||||||
class CropFilter(Filter):
|
|
||||||
|
|
||||||
IDENTIFIER = 'crop'
|
|
||||||
|
|
||||||
OUTPUT_WIDTH_KEY = 'output_width'
|
|
||||||
OUTPUT_HEIGHT_KEY = 'output_height'
|
|
||||||
OFFSET_X_KEY = 'x_offset'
|
|
||||||
OFFSET_Y_KEY = 'y_offset'
|
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
|
|
||||||
self.__outputWidth = int(kwargs.get(CropFilter.OUTPUT_WIDTH_KEY, 0))
|
|
||||||
self.__outputHeight = int(kwargs.get(CropFilter.OUTPUT_HEIGHT_KEY, 0))
|
|
||||||
self.__offsetX = int(kwargs.get(CropFilter.OFFSET_X_KEY, 0))
|
|
||||||
self.__offsetY = int(kwargs.get(CropFilter.OFFSET_Y_KEY, 0))
|
|
||||||
|
|
||||||
super().__init__(self)
|
|
||||||
|
|
||||||
def setArguments(self, **kwargs):
|
|
||||||
self.__outputWidth = int(kwargs.get(CropFilter.OUTPUT_WIDTH_KEY))
|
|
||||||
self.__outputHeight = int(kwargs.get(CropFilter.OUTPUT_HEIGHT_KEY))
|
|
||||||
self.__offsetX = int(kwargs.get(CropFilter.OFFSET_X_KEY,))
|
|
||||||
self.__offsetY = int(kwargs.get(CropFilter.OFFSET_Y_KEY,))
|
|
||||||
|
|
||||||
def getPayload(self):
|
|
||||||
|
|
||||||
payload = {'identifier': CropFilter.IDENTIFIER,
|
|
||||||
'parameters': {
|
|
||||||
CropFilter.OUTPUT_WIDTH_KEY: self.__outputWidth,
|
|
||||||
CropFilter.OUTPUT_HEIGHT_KEY: self.__outputHeight,
|
|
||||||
CropFilter.OFFSET_X_KEY: self.__offsetX,
|
|
||||||
CropFilter.OFFSET_Y_KEY: self.__offsetY
|
|
||||||
},
|
|
||||||
'suffices': [],
|
|
||||||
'variant': f"C{self.__outputWidth}-{self.__outputHeight}-{self.__offsetX}-{self.__offsetY}",
|
|
||||||
'tokens': ['crop='
|
|
||||||
+ f"{self.__outputWidth}"
|
|
||||||
+ f":{self.__outputHeight}"
|
|
||||||
+ f":{self.__offsetX}"
|
|
||||||
+ f":{self.__offsetY}"]}
|
|
||||||
|
|
||||||
return payload
|
|
||||||
|
|
||||||
|
|
||||||
def getYield(self):
|
|
||||||
yield self.getPayload()
|
|
||||||
@@ -1,140 +0,0 @@
|
|||||||
import itertools
|
|
||||||
|
|
||||||
from .filter import Filter
|
|
||||||
|
|
||||||
|
|
||||||
class DeinterlaceFilter(Filter):
|
|
||||||
|
|
||||||
IDENTIFIER = 'bwdif'
|
|
||||||
|
|
||||||
# DEFAULT_STRENGTH: float = 2.8
|
|
||||||
# DEFAULT_PATCH_SIZE: int = 13
|
|
||||||
# DEFAULT_CHROMA_PATCH_SIZE: int = 9
|
|
||||||
# DEFAULT_RESEARCH_WINDOW: int = 23
|
|
||||||
# DEFAULT_CHROMA_RESEARCH_WINDOW: int= 17
|
|
||||||
|
|
||||||
# STRENGTH_KEY = 'strength'
|
|
||||||
# PATCH_SIZE_KEY = 'patch_size'
|
|
||||||
# CHROMA_PATCH_SIZE_KEY = 'chroma_patch_size'
|
|
||||||
# RESEARCH_WINDOW_KEY = 'research_window'
|
|
||||||
# CHROMA_RESEARCH_WINDOW_KEY = 'chroma_research_window'
|
|
||||||
|
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
|
|
||||||
# self.__useHardware = kwargs.get('use_hardware', False)
|
|
||||||
|
|
||||||
# self.__strengthList = []
|
|
||||||
# strength = kwargs.get(NlmeansFilter.STRENGTH_KEY, '')
|
|
||||||
# if strength:
|
|
||||||
# strengthTokens = strength.split(',')
|
|
||||||
# for st in strengthTokens:
|
|
||||||
# try:
|
|
||||||
# strengthValue = float(st)
|
|
||||||
# except:
|
|
||||||
# raise ValueError('NlmeansFilter: Strength value has to be of type float')
|
|
||||||
# if strengthValue < 1.0 or strengthValue > 30.0:
|
|
||||||
# raise ValueError('NlmeansFilter: Strength value has to be between 1.0 and 30.0')
|
|
||||||
# self.__strengthList.append(strengthValue)
|
|
||||||
# else:
|
|
||||||
# self.__strengthList = [NlmeansFilter.DEFAULT_STRENGTH]
|
|
||||||
|
|
||||||
# self.__patchSizeList = []
|
|
||||||
# patchSize = kwargs.get(NlmeansFilter.PATCH_SIZE_KEY, '')
|
|
||||||
# if patchSize:
|
|
||||||
# patchSizeTokens = patchSize.split(',')
|
|
||||||
# for pst in patchSizeTokens:
|
|
||||||
# try:
|
|
||||||
# patchSizeValue = int(pst)
|
|
||||||
# except:
|
|
||||||
# raise ValueError('NlmeansFilter: Patch size value has to be of type int')
|
|
||||||
# if patchSizeValue < 0 or patchSizeValue > 99:
|
|
||||||
# raise ValueError('NlmeansFilter: Patch size value has to be between 0 and 99')
|
|
||||||
# if patchSizeValue % 2 == 0:
|
|
||||||
# raise ValueError('NlmeansFilter: Patch size value has to an odd number')
|
|
||||||
# self.__patchSizeList.append(patchSizeValue)
|
|
||||||
# else:
|
|
||||||
# self.__patchSizeList = [NlmeansFilter.DEFAULT_PATCH_SIZE]
|
|
||||||
|
|
||||||
# self.__chromaPatchSizeList = []
|
|
||||||
# chromaPatchSize = kwargs.get(NlmeansFilter.CHROMA_PATCH_SIZE_KEY, '')
|
|
||||||
# if chromaPatchSize:
|
|
||||||
# chromaPatchSizeTokens = chromaPatchSize.split(',')
|
|
||||||
# for cpst in chromaPatchSizeTokens:
|
|
||||||
# try:
|
|
||||||
# chromaPatchSizeValue = int(pst)
|
|
||||||
# except:
|
|
||||||
# raise ValueError('NlmeansFilter: Chroma patch size value has to be of type int')
|
|
||||||
# if chromaPatchSizeValue < 0 or chromaPatchSizeValue > 99:
|
|
||||||
# raise ValueError('NlmeansFilter: Chroma patch value has to be between 0 and 99')
|
|
||||||
# if chromaPatchSizeValue % 2 == 0:
|
|
||||||
# raise ValueError('NlmeansFilter: Chroma patch value has to an odd number')
|
|
||||||
# self.__chromaPatchSizeList.append(chromaPatchSizeValue)
|
|
||||||
# else:
|
|
||||||
# self.__chromaPatchSizeList = [NlmeansFilter.DEFAULT_CHROMA_PATCH_SIZE]
|
|
||||||
|
|
||||||
# self.__researchWindowList = []
|
|
||||||
# researchWindow = kwargs.get(NlmeansFilter.RESEARCH_WINDOW_KEY, '')
|
|
||||||
# if researchWindow:
|
|
||||||
# researchWindowTokens = researchWindow.split(',')
|
|
||||||
# for rwt in researchWindowTokens:
|
|
||||||
# try:
|
|
||||||
# researchWindowValue = int(rwt)
|
|
||||||
# except:
|
|
||||||
# raise ValueError('NlmeansFilter: Research window value has to be of type int')
|
|
||||||
# if researchWindowValue < 0 or researchWindowValue > 99:
|
|
||||||
# raise ValueError('NlmeansFilter: Research window value has to be between 0 and 99')
|
|
||||||
# if researchWindowValue % 2 == 0:
|
|
||||||
# raise ValueError('NlmeansFilter: Research window value has to an odd number')
|
|
||||||
# self.__researchWindowList.append(researchWindowValue)
|
|
||||||
# else:
|
|
||||||
# self.__researchWindowList = [NlmeansFilter.DEFAULT_RESEARCH_WINDOW]
|
|
||||||
|
|
||||||
# self.__chromaResearchWindowList = []
|
|
||||||
# chromaResearchWindow = kwargs.get(NlmeansFilter.CHROMA_RESEARCH_WINDOW_KEY, '')
|
|
||||||
# if chromaResearchWindow:
|
|
||||||
# chromaResearchWindowTokens = chromaResearchWindow.split(',')
|
|
||||||
# for crwt in chromaResearchWindowTokens:
|
|
||||||
# try:
|
|
||||||
# chromaResearchWindowValue = int(crwt)
|
|
||||||
# except:
|
|
||||||
# raise ValueError('NlmeansFilter: Chroma research window value has to be of type int')
|
|
||||||
# if chromaResearchWindowValue < 0 or chromaResearchWindowValue > 99:
|
|
||||||
# raise ValueError('NlmeansFilter: Chroma research window value has to be between 0 and 99')
|
|
||||||
# if chromaResearchWindowValue % 2 == 0:
|
|
||||||
# raise ValueError('NlmeansFilter: Chroma research window value has to an odd number')
|
|
||||||
# self.__chromaResearchWindowList.append(chromaResearchWindowValue)
|
|
||||||
# else:
|
|
||||||
# self.__chromaResearchWindowList = [NlmeansFilter.DEFAULT_CHROMA_RESEARCH_WINDOW]
|
|
||||||
|
|
||||||
super().__init__(self)
|
|
||||||
|
|
||||||
|
|
||||||
def getPayload(self):
|
|
||||||
|
|
||||||
# strength = iteration[0]
|
|
||||||
# patchSize = iteration[1]
|
|
||||||
# chromaPatchSize = iteration[2]
|
|
||||||
# researchWindow = iteration[3]
|
|
||||||
# chromaResearchWindow = iteration[4]
|
|
||||||
|
|
||||||
suffices = []
|
|
||||||
|
|
||||||
# filterName = 'nlmeans_opencl' if self.__useHardware else 'nlmeans'
|
|
||||||
|
|
||||||
payload = {'identifier': DeinterlaceFilter.IDENTIFIER,
|
|
||||||
'parameters': {},
|
|
||||||
'suffices': suffices,
|
|
||||||
'variant': f"DEINT",
|
|
||||||
'tokens': ['bwdif=mode=1']}
|
|
||||||
|
|
||||||
return payload
|
|
||||||
|
|
||||||
|
|
||||||
def getYield(self):
|
|
||||||
# for it in itertools.product(self.__strengthList,
|
|
||||||
# self.__patchSizeList,
|
|
||||||
# self.__chromaPatchSizeList,
|
|
||||||
# self.__researchWindowList,
|
|
||||||
# self.__chromaResearchWindowList):
|
|
||||||
yield self.getPayload()
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
import itertools
|
|
||||||
|
|
||||||
|
|
||||||
class Filter():
|
|
||||||
|
|
||||||
filterChain: list = []
|
|
||||||
|
|
||||||
def __init__(self, filter):
|
|
||||||
|
|
||||||
self.filterChain.append(filter)
|
|
||||||
|
|
||||||
def getFilterChain(self):
|
|
||||||
return self.filterChain
|
|
||||||
|
|
||||||
def getChainYield(self):
|
|
||||||
for fy in itertools.product(*[f.getYield() for f in self.filterChain]):
|
|
||||||
yield fy
|
|
||||||
@@ -1,162 +0,0 @@
|
|||||||
import itertools
|
|
||||||
|
|
||||||
from .filter import Filter
|
|
||||||
|
|
||||||
|
|
||||||
class NlmeansFilter(Filter):
|
|
||||||
|
|
||||||
IDENTIFIER = 'nlmeans'
|
|
||||||
|
|
||||||
DEFAULT_STRENGTH: float = 2.8
|
|
||||||
DEFAULT_PATCH_SIZE: int = 13
|
|
||||||
DEFAULT_CHROMA_PATCH_SIZE: int = 9
|
|
||||||
DEFAULT_RESEARCH_WINDOW: int = 23
|
|
||||||
DEFAULT_CHROMA_RESEARCH_WINDOW: int= 17
|
|
||||||
|
|
||||||
STRENGTH_KEY = 'strength'
|
|
||||||
PATCH_SIZE_KEY = 'patch_size'
|
|
||||||
CHROMA_PATCH_SIZE_KEY = 'chroma_patch_size'
|
|
||||||
RESEARCH_WINDOW_KEY = 'research_window'
|
|
||||||
CHROMA_RESEARCH_WINDOW_KEY = 'chroma_research_window'
|
|
||||||
|
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
|
|
||||||
self.__useHardware = kwargs.get('use_hardware', False)
|
|
||||||
|
|
||||||
self.__strengthList = []
|
|
||||||
strength = kwargs.get(NlmeansFilter.STRENGTH_KEY, '')
|
|
||||||
if strength:
|
|
||||||
strengthTokens = strength.split(',')
|
|
||||||
for st in strengthTokens:
|
|
||||||
try:
|
|
||||||
strengthValue = float(st)
|
|
||||||
except:
|
|
||||||
raise ValueError('NlmeansFilter: Strength value has to be of type float')
|
|
||||||
if strengthValue < 1.0 or strengthValue > 30.0:
|
|
||||||
raise ValueError('NlmeansFilter: Strength value has to be between 1.0 and 30.0')
|
|
||||||
self.__strengthList.append(strengthValue)
|
|
||||||
else:
|
|
||||||
self.__strengthList = [NlmeansFilter.DEFAULT_STRENGTH]
|
|
||||||
|
|
||||||
self.__patchSizeList = []
|
|
||||||
patchSize = kwargs.get(NlmeansFilter.PATCH_SIZE_KEY, '')
|
|
||||||
if patchSize:
|
|
||||||
patchSizeTokens = patchSize.split(',')
|
|
||||||
for pst in patchSizeTokens:
|
|
||||||
try:
|
|
||||||
patchSizeValue = int(pst)
|
|
||||||
except:
|
|
||||||
raise ValueError('NlmeansFilter: Patch size value has to be of type int')
|
|
||||||
if patchSizeValue < 0 or patchSizeValue > 99:
|
|
||||||
raise ValueError('NlmeansFilter: Patch size value has to be between 0 and 99')
|
|
||||||
if patchSizeValue % 2 == 0:
|
|
||||||
raise ValueError('NlmeansFilter: Patch size value has to an odd number')
|
|
||||||
self.__patchSizeList.append(patchSizeValue)
|
|
||||||
else:
|
|
||||||
self.__patchSizeList = [NlmeansFilter.DEFAULT_PATCH_SIZE]
|
|
||||||
|
|
||||||
self.__chromaPatchSizeList = []
|
|
||||||
chromaPatchSize = kwargs.get(NlmeansFilter.CHROMA_PATCH_SIZE_KEY, '')
|
|
||||||
if chromaPatchSize:
|
|
||||||
chromaPatchSizeTokens = chromaPatchSize.split(',')
|
|
||||||
for cpst in chromaPatchSizeTokens:
|
|
||||||
try:
|
|
||||||
chromaPatchSizeValue = int(pst)
|
|
||||||
except:
|
|
||||||
raise ValueError('NlmeansFilter: Chroma patch size value has to be of type int')
|
|
||||||
if chromaPatchSizeValue < 0 or chromaPatchSizeValue > 99:
|
|
||||||
raise ValueError('NlmeansFilter: Chroma patch value has to be between 0 and 99')
|
|
||||||
if chromaPatchSizeValue % 2 == 0:
|
|
||||||
raise ValueError('NlmeansFilter: Chroma patch value has to an odd number')
|
|
||||||
self.__chromaPatchSizeList.append(chromaPatchSizeValue)
|
|
||||||
else:
|
|
||||||
self.__chromaPatchSizeList = [NlmeansFilter.DEFAULT_CHROMA_PATCH_SIZE]
|
|
||||||
|
|
||||||
self.__researchWindowList = []
|
|
||||||
researchWindow = kwargs.get(NlmeansFilter.RESEARCH_WINDOW_KEY, '')
|
|
||||||
if researchWindow:
|
|
||||||
researchWindowTokens = researchWindow.split(',')
|
|
||||||
for rwt in researchWindowTokens:
|
|
||||||
try:
|
|
||||||
researchWindowValue = int(rwt)
|
|
||||||
except:
|
|
||||||
raise ValueError('NlmeansFilter: Research window value has to be of type int')
|
|
||||||
if researchWindowValue < 0 or researchWindowValue > 99:
|
|
||||||
raise ValueError('NlmeansFilter: Research window value has to be between 0 and 99')
|
|
||||||
if researchWindowValue % 2 == 0:
|
|
||||||
raise ValueError('NlmeansFilter: Research window value has to an odd number')
|
|
||||||
self.__researchWindowList.append(researchWindowValue)
|
|
||||||
else:
|
|
||||||
self.__researchWindowList = [NlmeansFilter.DEFAULT_RESEARCH_WINDOW]
|
|
||||||
|
|
||||||
self.__chromaResearchWindowList = []
|
|
||||||
chromaResearchWindow = kwargs.get(NlmeansFilter.CHROMA_RESEARCH_WINDOW_KEY, '')
|
|
||||||
if chromaResearchWindow:
|
|
||||||
chromaResearchWindowTokens = chromaResearchWindow.split(',')
|
|
||||||
for crwt in chromaResearchWindowTokens:
|
|
||||||
try:
|
|
||||||
chromaResearchWindowValue = int(crwt)
|
|
||||||
except:
|
|
||||||
raise ValueError('NlmeansFilter: Chroma research window value has to be of type int')
|
|
||||||
if chromaResearchWindowValue < 0 or chromaResearchWindowValue > 99:
|
|
||||||
raise ValueError('NlmeansFilter: Chroma research window value has to be between 0 and 99')
|
|
||||||
if chromaResearchWindowValue % 2 == 0:
|
|
||||||
raise ValueError('NlmeansFilter: Chroma research window value has to an odd number')
|
|
||||||
self.__chromaResearchWindowList.append(chromaResearchWindowValue)
|
|
||||||
else:
|
|
||||||
self.__chromaResearchWindowList = [NlmeansFilter.DEFAULT_CHROMA_RESEARCH_WINDOW]
|
|
||||||
|
|
||||||
super().__init__(self)
|
|
||||||
|
|
||||||
|
|
||||||
def getPayload(self, iteration):
|
|
||||||
|
|
||||||
strength = iteration[0]
|
|
||||||
patchSize = iteration[1]
|
|
||||||
chromaPatchSize = iteration[2]
|
|
||||||
researchWindow = iteration[3]
|
|
||||||
chromaResearchWindow = iteration[4]
|
|
||||||
|
|
||||||
suffices = []
|
|
||||||
|
|
||||||
if len(self.__strengthList) > 1:
|
|
||||||
suffices += [f"ds{strength}"]
|
|
||||||
if len(self.__patchSizeList) > 1:
|
|
||||||
suffices += [f"dp{patchSize}"]
|
|
||||||
if len(self.__chromaPatchSizeList) > 1:
|
|
||||||
suffices += [f"dpc{chromaPatchSize}"]
|
|
||||||
if len(self.__researchWindowList) > 1:
|
|
||||||
suffices += [f"dr{researchWindow}"]
|
|
||||||
if len(self.__chromaResearchWindowList) > 1:
|
|
||||||
suffices += [f"drc{chromaResearchWindow}"]
|
|
||||||
|
|
||||||
filterName = 'nlmeans_opencl' if self.__useHardware else 'nlmeans'
|
|
||||||
|
|
||||||
payload = {'identifier': NlmeansFilter.IDENTIFIER,
|
|
||||||
'parameters': {
|
|
||||||
'strength': strength,
|
|
||||||
'patch_size': patchSize,
|
|
||||||
'chroma_patch_size': chromaPatchSize,
|
|
||||||
'research_window': researchWindow,
|
|
||||||
'chroma_research_window': chromaResearchWindow
|
|
||||||
},
|
|
||||||
'suffices': suffices,
|
|
||||||
'variant': f"DS{strength}-DP{patchSize}-DPC{chromaPatchSize}"
|
|
||||||
+ f"-DR{researchWindow}-DRC{chromaResearchWindow}",
|
|
||||||
'tokens': [f"{filterName}=s={strength}"
|
|
||||||
+ f":p={patchSize}"
|
|
||||||
+ f":pc={chromaPatchSize}"
|
|
||||||
+ f":r={researchWindow}"
|
|
||||||
+ f":rc={chromaResearchWindow}"]}
|
|
||||||
|
|
||||||
return payload
|
|
||||||
|
|
||||||
|
|
||||||
def getYield(self):
|
|
||||||
for it in itertools.product(self.__strengthList,
|
|
||||||
self.__patchSizeList,
|
|
||||||
self.__chromaPatchSizeList,
|
|
||||||
self.__researchWindowList,
|
|
||||||
self.__chromaResearchWindowList):
|
|
||||||
yield self.getPayload(it)
|
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
import itertools
|
|
||||||
|
|
||||||
from .filter import Filter
|
|
||||||
|
|
||||||
|
|
||||||
class PresetFilter(Filter):
|
|
||||||
|
|
||||||
IDENTIFIER = 'preset'
|
|
||||||
|
|
||||||
DEFAULT_PRESET = 5
|
|
||||||
|
|
||||||
PRESET_KEY = 'preset'
|
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
|
|
||||||
self.__presetsList = []
|
|
||||||
presets = str(kwargs.get(PresetFilter.PRESET_KEY, ''))
|
|
||||||
if presets:
|
|
||||||
presetTokens = presets.split(',')
|
|
||||||
for q in presetTokens:
|
|
||||||
try:
|
|
||||||
presetValue = int(q)
|
|
||||||
except:
|
|
||||||
raise ValueError('PresetFilter: Preset value has to be of type int')
|
|
||||||
if presetValue < 0 or presetValue > 13:
|
|
||||||
raise ValueError('PresetFilter: Preset value has to be between 0 and 13')
|
|
||||||
self.__presetsList.append(presetValue)
|
|
||||||
else:
|
|
||||||
self.__presetsList = [PresetFilter.DEFAULT_PRESET]
|
|
||||||
|
|
||||||
super().__init__(self)
|
|
||||||
|
|
||||||
|
|
||||||
def getPayload(self, preset):
|
|
||||||
|
|
||||||
suffices = []
|
|
||||||
|
|
||||||
if len(self.__presetsList) > 1:
|
|
||||||
suffices += [f"p{preset}"]
|
|
||||||
|
|
||||||
payload = {'identifier': PresetFilter.IDENTIFIER,
|
|
||||||
'parameters': {
|
|
||||||
'preset': preset
|
|
||||||
},
|
|
||||||
'suffices': suffices,
|
|
||||||
'variant': f"P{preset}",
|
|
||||||
'tokens': []}
|
|
||||||
|
|
||||||
return payload
|
|
||||||
|
|
||||||
|
|
||||||
def getYield(self):
|
|
||||||
for q in self.__presetsList:
|
|
||||||
yield self.getPayload(q)
|
|
||||||
@@ -1,55 +0,0 @@
|
|||||||
import itertools
|
|
||||||
|
|
||||||
from .filter import Filter
|
|
||||||
|
|
||||||
|
|
||||||
class QualityFilter(Filter):
|
|
||||||
|
|
||||||
IDENTIFIER = 'quality'
|
|
||||||
|
|
||||||
DEFAULT_VP9_QUALITY = 32
|
|
||||||
DEFAULT_H264_QUALITY = 17
|
|
||||||
|
|
||||||
QUALITY_KEY = 'quality'
|
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
|
|
||||||
self.__qualitiesList = []
|
|
||||||
qualities = kwargs.get(QualityFilter.QUALITY_KEY, '')
|
|
||||||
if qualities:
|
|
||||||
qualityTokens = qualities.split(',')
|
|
||||||
for q in qualityTokens:
|
|
||||||
try:
|
|
||||||
qualityValue = int(q)
|
|
||||||
except:
|
|
||||||
raise ValueError('QualityFilter: Quality value has to be of type int')
|
|
||||||
if qualityValue < 0 or qualityValue > 63:
|
|
||||||
raise ValueError('QualityFilter: Quality value has to be between 0 and 63')
|
|
||||||
self.__qualitiesList.append(qualityValue)
|
|
||||||
else:
|
|
||||||
self.__qualitiesList = [QualityFilter.DEFAULT_VP9_QUALITY]
|
|
||||||
|
|
||||||
super().__init__(self)
|
|
||||||
|
|
||||||
|
|
||||||
def getPayload(self, quality):
|
|
||||||
|
|
||||||
suffices = []
|
|
||||||
|
|
||||||
if len(self.__qualitiesList) > 1:
|
|
||||||
suffices += [f"q{quality}"]
|
|
||||||
|
|
||||||
payload = {'identifier': QualityFilter.IDENTIFIER,
|
|
||||||
'parameters': {
|
|
||||||
'quality': quality
|
|
||||||
},
|
|
||||||
'suffices': suffices,
|
|
||||||
'variant': f"Q{quality}",
|
|
||||||
'tokens': []}
|
|
||||||
|
|
||||||
return payload
|
|
||||||
|
|
||||||
|
|
||||||
def getYield(self):
|
|
||||||
for q in self.__qualitiesList:
|
|
||||||
yield self.getPayload(q)
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
from .filter import Filter
|
|
||||||
|
|
||||||
class ScaleFilter(Filter):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super().__init__(self)
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
from textual.app import ComposeResult
|
|
||||||
from textual.screen import Screen
|
|
||||||
from textual.widgets import Footer, Placeholder
|
|
||||||
|
|
||||||
class HelpScreen(Screen):
|
|
||||||
def __init__(self):
|
|
||||||
super().__init__()
|
|
||||||
context = self.app.getContext()
|
|
||||||
|
|
||||||
def compose(self) -> ComposeResult:
|
|
||||||
yield Placeholder("Help Screen")
|
|
||||||
yield Footer()
|
|
||||||
|
|
||||||
@@ -1,239 +0,0 @@
|
|||||||
import re, logging
|
|
||||||
|
|
||||||
from jinja2 import Environment, Undefined
|
|
||||||
from .constants import DEFAULT_OUTPUT_FILENAME_TEMPLATE
|
|
||||||
from .configuration_controller import ConfigurationController
|
|
||||||
|
|
||||||
|
|
||||||
class EmptyStringUndefined(Undefined):
|
|
||||||
def __str__(self):
|
|
||||||
return ''
|
|
||||||
|
|
||||||
|
|
||||||
DIFF_ADDED_KEY = 'added'
|
|
||||||
DIFF_REMOVED_KEY = 'removed'
|
|
||||||
DIFF_CHANGED_KEY = 'changed'
|
|
||||||
DIFF_UNCHANGED_KEY = 'unchanged'
|
|
||||||
|
|
||||||
RICH_COLOR_PATTERN = '\[[a-z_]+\](.+)\[\/[a-z_]+\]'
|
|
||||||
|
|
||||||
|
|
||||||
def dictDiff(a : dict, b : dict, ignoreKeys: list = [], removeKeys: list = []):
|
|
||||||
"""
|
|
||||||
ignoreKeys: Ignored keys are filtered from calculating diff at all
|
|
||||||
removeKeys: Override diff calculation to remove keys certainly
|
|
||||||
"""
|
|
||||||
|
|
||||||
a_filtered = {k:v for k,v in a.items() if not k in ignoreKeys}
|
|
||||||
b_filtered = {k:v for k,v in b.items() if not k in ignoreKeys and k not in removeKeys}
|
|
||||||
|
|
||||||
a_only = {k:v for k,v in a_filtered.items() if not k in b_filtered.keys()}
|
|
||||||
b_only = {k:v for k,v in b_filtered.items() if not k in a_filtered.keys()}
|
|
||||||
|
|
||||||
a_b = set(a_filtered.keys()) & set(b_filtered.keys())
|
|
||||||
|
|
||||||
changed = {k:b_filtered[k] for k in a_b if a_filtered[k] != b_filtered[k]}
|
|
||||||
unchanged = {k:b_filtered[k] for k in a_b if a_filtered[k] == b_filtered[k]}
|
|
||||||
|
|
||||||
diffResult = {}
|
|
||||||
|
|
||||||
|
|
||||||
if a_only:
|
|
||||||
diffResult[DIFF_REMOVED_KEY] = a_only
|
|
||||||
diffResult[DIFF_UNCHANGED_KEY] = unchanged
|
|
||||||
if b_only:
|
|
||||||
diffResult[DIFF_ADDED_KEY] = b_only
|
|
||||||
if changed:
|
|
||||||
diffResult[DIFF_CHANGED_KEY] = changed
|
|
||||||
|
|
||||||
return diffResult
|
|
||||||
|
|
||||||
|
|
||||||
def dictKeysDiff(a : dict, b : dict):
|
|
||||||
|
|
||||||
a_keys = set(a.keys())
|
|
||||||
b_keys = set(b.keys())
|
|
||||||
|
|
||||||
a_only = a_keys - b_keys
|
|
||||||
b_only = b_keys - a_keys
|
|
||||||
a_b = a_keys & b_keys
|
|
||||||
|
|
||||||
changed = {k for k in a_b if a[k] != b[k]}
|
|
||||||
|
|
||||||
diffResult = {}
|
|
||||||
|
|
||||||
|
|
||||||
if a_only:
|
|
||||||
diffResult[DIFF_REMOVED_KEY] = a_only
|
|
||||||
diffResult[DIFF_UNCHANGED_KEY] = b_keys
|
|
||||||
if b_only:
|
|
||||||
diffResult[DIFF_ADDED_KEY] = b_only
|
|
||||||
if changed:
|
|
||||||
diffResult[DIFF_CHANGED_KEY] = changed
|
|
||||||
|
|
||||||
return diffResult
|
|
||||||
|
|
||||||
|
|
||||||
def dictCache(element: dict, cache: list = []):
|
|
||||||
for index in range(len(cache)):
|
|
||||||
diff = dictKeysDiff(cache[index], element)
|
|
||||||
if not diff:
|
|
||||||
return index, cache
|
|
||||||
cache.append(element)
|
|
||||||
return -1, cache
|
|
||||||
|
|
||||||
|
|
||||||
def setDiff(a : set, b : set) -> set:
|
|
||||||
|
|
||||||
a_only = a - b
|
|
||||||
b_only = b - a
|
|
||||||
a_and_b = a & b
|
|
||||||
|
|
||||||
diffResult = {}
|
|
||||||
|
|
||||||
if a_only:
|
|
||||||
diffResult[DIFF_REMOVED_KEY] = a_only
|
|
||||||
diffResult[DIFF_UNCHANGED_KEY] = a_and_b
|
|
||||||
if b_only:
|
|
||||||
diffResult[DIFF_ADDED_KEY] = b_only
|
|
||||||
|
|
||||||
return diffResult
|
|
||||||
|
|
||||||
|
|
||||||
def permutateList(inputList: list, permutation: list):
|
|
||||||
|
|
||||||
# 0,1,2: ABC
|
|
||||||
# 0,2,1: ACB
|
|
||||||
# 1,2,0: BCA
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def filterFilename(fileName: str) -> str:
|
|
||||||
"""This filter replaces charactes from TMDB responses with characters
|
|
||||||
less problemating when using in filenames or removes them"""
|
|
||||||
|
|
||||||
fileName = str(fileName).replace('/', '-')
|
|
||||||
fileName = str(fileName).replace(':', ';')
|
|
||||||
fileName = str(fileName).replace('*', '')
|
|
||||||
fileName = str(fileName).replace("'", '')
|
|
||||||
fileName = str(fileName).replace("?", '#')
|
|
||||||
fileName = str(fileName).replace('♥', '')
|
|
||||||
fileName = str(fileName).replace('’', '')
|
|
||||||
|
|
||||||
return fileName.strip()
|
|
||||||
|
|
||||||
def substituteTmdbFilename(fileName: str) -> str:
|
|
||||||
"""If chaining this method with filterFilename use this one first as the latter will destroy some patterns"""
|
|
||||||
|
|
||||||
# This indicates filler episodes in TMDB episode names
|
|
||||||
fileName = str(fileName).replace(' (*)', '')
|
|
||||||
fileName = str(fileName).replace('(*)', '')
|
|
||||||
|
|
||||||
# This indicates the index of multi-episode files
|
|
||||||
episodePartMatch = re.search("\\(([0-9]+)\\)$", fileName)
|
|
||||||
if episodePartMatch is not None:
|
|
||||||
partSuffix = str(episodePartMatch.group(0))
|
|
||||||
partIndex = episodePartMatch.groups()[0]
|
|
||||||
fileName = str(fileName).replace(partSuffix, f"Teil {partIndex}")
|
|
||||||
|
|
||||||
# Also multi-episodes with first and last episode index
|
|
||||||
episodePartMatch = re.search("\\(([0-9]+)[-\\/]([0-9]+)\\)$", fileName)
|
|
||||||
if episodePartMatch is not None:
|
|
||||||
partSuffix = str(episodePartMatch.group(0))
|
|
||||||
partFirstIndex = episodePartMatch.groups()[0]
|
|
||||||
partLastIndex = episodePartMatch.groups()[1]
|
|
||||||
fileName = str(fileName).replace(partSuffix, f"Teil {partFirstIndex}-{partLastIndex}")
|
|
||||||
|
|
||||||
return fileName
|
|
||||||
|
|
||||||
|
|
||||||
def getEpisodeFileBasename(showName,
|
|
||||||
episodeName,
|
|
||||||
season,
|
|
||||||
episode,
|
|
||||||
indexSeasonDigits = 2,
|
|
||||||
indexEpisodeDigits = 2,
|
|
||||||
indicatorSeasonDigits = 2,
|
|
||||||
indicatorEpisodeDigits = 2,
|
|
||||||
context = None):
|
|
||||||
"""
|
|
||||||
One Piece:
|
|
||||||
indexSeasonDigits = 0,
|
|
||||||
indexEpisodeDigits = 4,
|
|
||||||
indicatorSeasonDigits = 2,
|
|
||||||
indicatorEpisodeDigits = 4
|
|
||||||
|
|
||||||
Three-Body:
|
|
||||||
indexSeasonDigits = 0,
|
|
||||||
indexEpisodeDigits = 2,
|
|
||||||
indicatorSeasonDigits = 2,
|
|
||||||
indicatorEpisodeDigits = 2
|
|
||||||
|
|
||||||
Dragonball:
|
|
||||||
indexSeasonDigits = 0,
|
|
||||||
indexEpisodeDigits = 3,
|
|
||||||
indicatorSeasonDigits = 2,
|
|
||||||
indicatorEpisodeDigits = 3
|
|
||||||
|
|
||||||
Boruto:
|
|
||||||
indexSeasonDigits = 0,
|
|
||||||
indexEpisodeDigits = 4,
|
|
||||||
indicatorSeasonDigits = 2,
|
|
||||||
indicatorEpisodeDigits = 4
|
|
||||||
"""
|
|
||||||
|
|
||||||
cc: ConfigurationController = context['config'] if context is not None and 'config' in context.keys() else None
|
|
||||||
configData = cc.getData() if cc is not None else {}
|
|
||||||
outputFilenameTemplate = configData.get(ConfigurationController.OUTPUT_FILENAME_TEMPLATE_KEY,
|
|
||||||
DEFAULT_OUTPUT_FILENAME_TEMPLATE)
|
|
||||||
|
|
||||||
if context is not None and 'logger' in context.keys():
|
|
||||||
logger = context['logger']
|
|
||||||
else:
|
|
||||||
logger = logging.getLogger('FFX')
|
|
||||||
logger.addHandler(logging.NullHandler())
|
|
||||||
|
|
||||||
|
|
||||||
indexSeparator = ' ' if indexSeasonDigits or indexEpisodeDigits else ''
|
|
||||||
seasonIndex = '{num:{fill}{width}}'.format(num=season, fill='0', width=indexSeasonDigits) if indexSeasonDigits else ''
|
|
||||||
episodeIndex = '{num:{fill}{width}}'.format(num=episode, fill='0', width=indexEpisodeDigits) if indexEpisodeDigits else ''
|
|
||||||
|
|
||||||
indicatorSeparator = ' - ' if indicatorSeasonDigits or indicatorEpisodeDigits else ''
|
|
||||||
seasonIndicator = 'S{num:{fill}{width}}'.format(num=season, fill='0', width=indicatorSeasonDigits) if indicatorSeasonDigits else ''
|
|
||||||
episodeIndicator = 'E{num:{fill}{width}}'.format(num=episode, fill='0', width=indicatorEpisodeDigits) if indicatorEpisodeDigits else ''
|
|
||||||
|
|
||||||
jinjaKwargs = {
|
|
||||||
'ffx_show_name': showName,
|
|
||||||
'ffx_index_separator': indexSeparator,
|
|
||||||
'ffx_season_index': str(seasonIndex),
|
|
||||||
'ffx_episode_index': str(episodeIndex),
|
|
||||||
'ffx_index': str(seasonIndex) + str(episodeIndex),
|
|
||||||
'ffx_episode_name': episodeName,
|
|
||||||
'ffx_indicator_separator': indicatorSeparator,
|
|
||||||
'ffx_season_indicator': str(seasonIndicator),
|
|
||||||
'ffx_episode_indicator': str(episodeIndicator),
|
|
||||||
'ffx_indicator': str(seasonIndicator) + str(episodeIndicator)
|
|
||||||
}
|
|
||||||
|
|
||||||
jinjaEnv = Environment(undefined=EmptyStringUndefined)
|
|
||||||
jinjaTemplate = jinjaEnv.from_string(outputFilenameTemplate)
|
|
||||||
return jinjaTemplate.render(**jinjaKwargs)
|
|
||||||
|
|
||||||
# return ''.join(filenameTokens)
|
|
||||||
|
|
||||||
|
|
||||||
def formatRichColor(text: str, color: str = None):
|
|
||||||
if color is None:
|
|
||||||
return text
|
|
||||||
else:
|
|
||||||
return f"[{color}]{text}[/{color}]"
|
|
||||||
|
|
||||||
def removeRichColor(text: str):
|
|
||||||
richColorMatch = re.search(RICH_COLOR_PATTERN, text)
|
|
||||||
if richColorMatch is None:
|
|
||||||
return text
|
|
||||||
else:
|
|
||||||
return str(richColorMatch.group(1))
|
|
||||||
|
|
||||||
@@ -1,107 +0,0 @@
|
|||||||
from enum import Enum
|
|
||||||
import difflib
|
|
||||||
|
|
||||||
class IsoLanguage(Enum):
|
|
||||||
|
|
||||||
AFRIKAANS = {"name": "Afrikaans", "iso639_1": "af", "iso639_2": ["afr"]}
|
|
||||||
ALBANIAN = {"name": "Albanian", "iso639_1": "sq", "iso639_2": ["alb"]}
|
|
||||||
ARABIC = {"name": "Arabic", "iso639_1": "ar", "iso639_2": ["ara"]}
|
|
||||||
ARMENIAN = {"name": "Armenian", "iso639_1": "hy", "iso639_2": ["arm"]}
|
|
||||||
AZERBAIJANI = {"name": "Azerbaijani", "iso639_1": "az", "iso639_2": ["aze"]}
|
|
||||||
BASQUE = {"name": "Basque", "iso639_1": "eu", "iso639_2": ["baq"]}
|
|
||||||
BELARUSIAN = {"name": "Belarusian", "iso639_1": "be", "iso639_2": ["bel"]}
|
|
||||||
BOKMAL = {"name": "Bokmål", "iso639_1": "nb", "iso639_2": ["nob"]} # Norwegian Bokmål
|
|
||||||
BULGARIAN = {"name": "Bulgarian", "iso639_1": "bg", "iso639_2": ["bul"]}
|
|
||||||
CATALAN = {"name": "Catalan", "iso639_1": "ca", "iso639_2": ["cat"]}
|
|
||||||
CHINESE = {"name": "Chinese", "iso639_1": "zh", "iso639_2": ["zho", "chi"]}
|
|
||||||
CROATIAN = {"name": "Croatian", "iso639_1": "hr", "iso639_2": ["hrv"]}
|
|
||||||
CZECH = {"name": "Czech", "iso639_1": "cs", "iso639_2": ["cze"]}
|
|
||||||
DANISH = {"name": "Danish", "iso639_1": "da", "iso639_2": ["dan"]}
|
|
||||||
DUTCH = {"name": "Dutch", "iso639_1": "nl", "iso639_2": ["nld", "dut"]}
|
|
||||||
ENGLISH = {"name": "English", "iso639_1": "en", "iso639_2": ["eng"]}
|
|
||||||
ESTONIAN = {"name": "Estonian", "iso639_1": "et", "iso639_2": ["est"]}
|
|
||||||
FILIPINO = {"name": "Filipino", "iso639_1": "tl", "iso639_2": ["fil"]} # Tagalog
|
|
||||||
FINNISH = {"name": "Finnish", "iso639_1": "fi", "iso639_2": ["fin"]}
|
|
||||||
FRENCH = {"name": "French", "iso639_1": "fr", "iso639_2": ["fra", "fre"]}
|
|
||||||
GEORGIAN = {"name": "Georgian", "iso639_1": "ka", "iso639_2": ["geo"]}
|
|
||||||
GERMAN = {"name": "German", "iso639_1": "de", "iso639_2": ["deu", "ger"]}
|
|
||||||
GREEK = {"name": "Greek", "iso639_1": "el", "iso639_2": ["gre"]}
|
|
||||||
HEBREW = {"name": "Hebrew", "iso639_1": "he", "iso639_2": ["heb"]}
|
|
||||||
HINDI = {"name": "Hindi", "iso639_1": "hi", "iso639_2": ["hin"]}
|
|
||||||
HUNGARIAN = {"name": "Hungarian", "iso639_1": "hu", "iso639_2": ["hun"]}
|
|
||||||
ICELANDIC = {"name": "Icelandic", "iso639_1": "is", "iso639_2": ["ice"]}
|
|
||||||
INDONESIAN = {"name": "Indonesian", "iso639_1": "id", "iso639_2": ["ind"]}
|
|
||||||
IRISH = {"name": "Irish", "iso639_1": "ga", "iso639_2": ["gle"]}
|
|
||||||
ITALIAN = {"name": "Italian", "iso639_1": "it", "iso639_2": ["ita"]}
|
|
||||||
JAPANESE = {"name": "Japanese", "iso639_1": "ja", "iso639_2": ["jpn"]}
|
|
||||||
KAZAKH = {"name": "Kazakh", "iso639_1": "kk", "iso639_2": ["kaz"]}
|
|
||||||
KOREAN = {"name": "Korean", "iso639_1": "ko", "iso639_2": ["kor"]}
|
|
||||||
LATIN = {"name": "Latin", "iso639_1": "la", "iso639_2": ["lat"]}
|
|
||||||
LATVIAN = {"name": "Latvian", "iso639_1": "lv", "iso639_2": ["lav"]}
|
|
||||||
LITHUANIAN = {"name": "Lithuanian", "iso639_1": "lt", "iso639_2": ["lit"]}
|
|
||||||
MACEDONIAN = {"name": "Macedonian", "iso639_1": "mk", "iso639_2": ["mac"]}
|
|
||||||
MALAY = {"name": "Malay", "iso639_1": "ms", "iso639_2": ["may"]}
|
|
||||||
MALTESE = {"name": "Maltese", "iso639_1": "mt", "iso639_2": ["mlt"]}
|
|
||||||
NORWEGIAN = {"name": "Norwegian", "iso639_1": "no", "iso639_2": ["nor"]}
|
|
||||||
PERSIAN = {"name": "Persian", "iso639_1": "fa", "iso639_2": ["per"]}
|
|
||||||
POLISH = {"name": "Polish", "iso639_1": "pl", "iso639_2": ["pol"]}
|
|
||||||
PORTUGUESE = {"name": "Portuguese", "iso639_1": "pt", "iso639_2": ["por"]}
|
|
||||||
ROMANIAN = {"name": "Romanian", "iso639_1": "ro", "iso639_2": ["rum"]}
|
|
||||||
RUSSIAN = {"name": "Russian", "iso639_1": "ru", "iso639_2": ["rus"]}
|
|
||||||
NORTHERN_SAMI = {"name": "Northern Sami", "iso639_1": "se", "iso639_2": ["sme"]}
|
|
||||||
SAMOAN = {"name": "Samoan", "iso639_1": "sm", "iso639_2": ["smo"]}
|
|
||||||
SANGO = {"name": "Sango", "iso639_1": "sg", "iso639_2": ["sag"]}
|
|
||||||
SANSKRIT = {"name": "Sanskrit", "iso639_1": "sa", "iso639_2": ["san"]}
|
|
||||||
SARDINIAN = {"name": "Sardinian", "iso639_1": "sc", "iso639_2": ["srd"]}
|
|
||||||
SERBIAN = {"name": "Serbian", "iso639_1": "sr", "iso639_2": ["srp"]}
|
|
||||||
SHONA = {"name": "Shona", "iso639_1": "sn", "iso639_2": ["sna"]}
|
|
||||||
SINDHI = {"name": "Sindhi", "iso639_1": "sd", "iso639_2": ["snd"]}
|
|
||||||
SINHALA = {"name": "Sinhala", "iso639_1": "si", "iso639_2": ["sin"]}
|
|
||||||
SLOVAK = {"name": "Slovak", "iso639_1": "sk", "iso639_2": ["slo", "slk"]}
|
|
||||||
SLOVENIAN = {"name": "Slovenian", "iso639_1": "sl", "iso639_2": ["slv"]}
|
|
||||||
SOMALI = {"name": "Somali", "iso639_1": "so", "iso639_2": ["som"]}
|
|
||||||
SOUTHERN_SOTHO = {"name": "Southern Sotho", "iso639_1": "st", "iso639_2": ["sot"]}
|
|
||||||
SPANISH = {"name": "Spanish", "iso639_1": "es", "iso639_2": ["spa"]}
|
|
||||||
SUNDANESE = {"name": "Sundanese", "iso639_1": "su", "iso639_2": ["sun"]}
|
|
||||||
SWAHILI = {"name": "Swahili", "iso639_1": "sw", "iso639_2": ["swa"]}
|
|
||||||
SWATI = {"name": "Swati", "iso639_1": "ss", "iso639_2": ["ssw"]}
|
|
||||||
SWEDISH = {"name": "Swedish", "iso639_1": "sv", "iso639_2": ["swe"]}
|
|
||||||
TAGALOG = {"name": "Tagalog", "iso639_1": "tl", "iso639_2": ["tgl"]}
|
|
||||||
TAMIL = {"name": "Tamil", "iso639_1": "ta", "iso639_2": ["tam"]}
|
|
||||||
THAI = {"name": "Thai", "iso639_1": "th", "iso639_2": ["tha"]}
|
|
||||||
TURKISH = {"name": "Turkish", "iso639_1": "tr", "iso639_2": ["tur"]}
|
|
||||||
UKRAINIAN = {"name": "Ukrainian", "iso639_1": "uk", "iso639_2": ["ukr"]}
|
|
||||||
URDU = {"name": "Urdu", "iso639_1": "ur", "iso639_2": ["urd"]}
|
|
||||||
VIETNAMESE = {"name": "Vietnamese", "iso639_1": "vi", "iso639_2":[ "vie"]}
|
|
||||||
WELSH = {"name": "Welsh", "iso639_1": "cy", "iso639_2": ["wel"]}
|
|
||||||
|
|
||||||
UNDEFINED = {"name": "undefined", "iso639_1": "xx", "iso639_2": ["und"]}
|
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def find(label : str):
|
|
||||||
|
|
||||||
closestMatches = difflib.get_close_matches(label, [l.value["name"] for l in IsoLanguage], n=1)
|
|
||||||
|
|
||||||
if closestMatches:
|
|
||||||
foundLangs = [l for l in IsoLanguage if l.value['name'] == closestMatches[0]]
|
|
||||||
return foundLangs[0] if foundLangs else IsoLanguage.UNDEFINED
|
|
||||||
else:
|
|
||||||
return IsoLanguage.UNDEFINED
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def findThreeLetter(theeLetter : str):
|
|
||||||
foundLangs = [l for l in IsoLanguage if str(theeLetter) in l.value['iso639_2']]
|
|
||||||
return foundLangs[0] if foundLangs else IsoLanguage.UNDEFINED
|
|
||||||
|
|
||||||
|
|
||||||
def label(self):
|
|
||||||
return str(self.value['name'])
|
|
||||||
|
|
||||||
def twoLetter(self):
|
|
||||||
return str(self.value['iso639_1'])
|
|
||||||
|
|
||||||
def threeLetter(self):
|
|
||||||
return str(self.value['iso639_2'][0])
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
import click
|
|
||||||
|
|
||||||
from ffx.model.pattern import Pattern
|
|
||||||
from ffx.media_descriptor import MediaDescriptor
|
|
||||||
|
|
||||||
from ffx.tag_controller import TagController
|
|
||||||
from ffx.track_controller import TrackController
|
|
||||||
|
|
||||||
class MediaController():
|
|
||||||
|
|
||||||
def __init__(self, context):
|
|
||||||
|
|
||||||
self.context = context
|
|
||||||
self.Session = self.context['database']['session'] # convenience
|
|
||||||
|
|
||||||
self.__logger = context['logger']
|
|
||||||
|
|
||||||
self.__tc = TrackController(context = context)
|
|
||||||
self.__tac = TagController(context = context)
|
|
||||||
|
|
||||||
def setPatternMediaDescriptor(self, mediaDescriptor: MediaDescriptor, patternId: int):
|
|
||||||
|
|
||||||
try:
|
|
||||||
|
|
||||||
pid = int(patternId)
|
|
||||||
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(Pattern).filter(Pattern.id == pid)
|
|
||||||
|
|
||||||
if q.count():
|
|
||||||
pattern = q.first
|
|
||||||
|
|
||||||
for mediaTagKey, mediaTagValue in mediaDescriptor.getTags():
|
|
||||||
self.__tac.updateMediaTag(pid, mediaTagKey, mediaTagValue)
|
|
||||||
# for trackDescriptor in mediaDescriptor.getAllTrackDescriptors():
|
|
||||||
for trackDescriptor in mediaDescriptor.getTrackDescriptors():
|
|
||||||
self.__tc.addTrack(trackDescriptor, patternId = pid)
|
|
||||||
|
|
||||||
s.commit()
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
self.__logger.error(f"MediaController.setPatternMediaDescriptor(): {repr(ex)}")
|
|
||||||
raise click.ClickException(f"MediaController.setPatternMediaDescriptor(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
@@ -1,512 +0,0 @@
|
|||||||
import os, re, click, logging
|
|
||||||
|
|
||||||
from typing import List, Self
|
|
||||||
|
|
||||||
from ffx.track_type import TrackType
|
|
||||||
from ffx.iso_language import IsoLanguage
|
|
||||||
|
|
||||||
from ffx.track_disposition import TrackDisposition
|
|
||||||
from ffx.track_codec import TrackCodec
|
|
||||||
|
|
||||||
from ffx.track_descriptor import TrackDescriptor
|
|
||||||
|
|
||||||
|
|
||||||
class MediaDescriptor:
|
|
||||||
"""This class represents the structural content of a media file including streams and metadata"""
|
|
||||||
|
|
||||||
CONTEXT_KEY = "context"
|
|
||||||
|
|
||||||
TAGS_KEY = "tags"
|
|
||||||
TRACKS_KEY = "tracks"
|
|
||||||
|
|
||||||
TRACK_DESCRIPTOR_LIST_KEY = "track_descriptors"
|
|
||||||
CLEAR_TAGS_FLAG_KEY = "clear_tags"
|
|
||||||
|
|
||||||
FFPROBE_DISPOSITION_KEY = "disposition"
|
|
||||||
FFPROBE_TAGS_KEY = "tags"
|
|
||||||
FFPROBE_CODEC_TYPE_KEY = "codec_type"
|
|
||||||
|
|
||||||
#407 remove as well
|
|
||||||
EXCLUDED_MEDIA_TAGS = ["creation_time"]
|
|
||||||
|
|
||||||
SEASON_EPISODE_STREAM_LANGUAGE_DISPOSITIONS_MATCH = '[sS]([0-9]+)[eE]([0-9]+)_([0-9]+)_([a-z]{3})(?:_([A-Z]{3}))*'
|
|
||||||
STREAM_LANGUAGE_DISPOSITIONS_MATCH = '([0-9]+)_([a-z]{3})(?:_([A-Z]{3}))*'
|
|
||||||
|
|
||||||
SUBTITLE_FILE_EXTENSION = 'vtt'
|
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
|
|
||||||
if MediaDescriptor.CONTEXT_KEY in kwargs.keys():
|
|
||||||
if type(kwargs[MediaDescriptor.CONTEXT_KEY]) is not dict:
|
|
||||||
raise TypeError(
|
|
||||||
f"MediaDescriptor.__init__(): Argument {MediaDescriptor.CONTEXT_KEY} is required to be of type dict"
|
|
||||||
)
|
|
||||||
self.__context = kwargs[MediaDescriptor.CONTEXT_KEY]
|
|
||||||
self.__logger = self.__context['logger']
|
|
||||||
else:
|
|
||||||
self.__context = {}
|
|
||||||
self.__logger = logging.getLogger('FFX')
|
|
||||||
self.__logger.addHandler(logging.NullHandler())
|
|
||||||
|
|
||||||
if MediaDescriptor.TAGS_KEY in kwargs.keys():
|
|
||||||
if type(kwargs[MediaDescriptor.TAGS_KEY]) is not dict:
|
|
||||||
raise TypeError(
|
|
||||||
f"MediaDescriptor.__init__(): Argument {MediaDescriptor.TAGS_KEY} is required to be of type dict"
|
|
||||||
)
|
|
||||||
self.__mediaTags = kwargs[MediaDescriptor.TAGS_KEY]
|
|
||||||
else:
|
|
||||||
self.__mediaTags = {}
|
|
||||||
|
|
||||||
if MediaDescriptor.TRACK_DESCRIPTOR_LIST_KEY in kwargs.keys():
|
|
||||||
if (
|
|
||||||
type(kwargs[MediaDescriptor.TRACK_DESCRIPTOR_LIST_KEY]) is not list
|
|
||||||
): # Use List typehint for TrackDescriptor as well if it works
|
|
||||||
raise TypeError(
|
|
||||||
f"MediaDescriptor.__init__(): Argument {MediaDescriptor.TRACK_DESCRIPTOR_LIST_KEY} is required to be of type list"
|
|
||||||
)
|
|
||||||
for d in kwargs[MediaDescriptor.TRACK_DESCRIPTOR_LIST_KEY]:
|
|
||||||
if type(d) is not TrackDescriptor:
|
|
||||||
raise TypeError(
|
|
||||||
f"TrackDesciptor.__init__(): All elements of argument list {MediaDescriptor.TRACK_DESCRIPTOR_LIST_KEY} are required to be of type TrackDescriptor"
|
|
||||||
)
|
|
||||||
self.__trackDescriptors = kwargs[MediaDescriptor.TRACK_DESCRIPTOR_LIST_KEY]
|
|
||||||
else:
|
|
||||||
self.__trackDescriptors = []
|
|
||||||
|
|
||||||
def setTrackLanguage(self, language: str, index: int, trackType: TrackType = None):
|
|
||||||
|
|
||||||
trackLanguage = IsoLanguage.findThreeLetter(language)
|
|
||||||
if trackLanguage == IsoLanguage.UNDEFINED:
|
|
||||||
self.__logger.warning('MediaDescriptor.setTrackLanguage(): Parameter language does not contain a registered '
|
|
||||||
+ f"ISO 639 3-letter language code, skipping to set language for"
|
|
||||||
+ str('' if trackType is None else trackType.label()) + f"track {index}")
|
|
||||||
|
|
||||||
trackList = self.getTrackDescriptors(trackType=trackType)
|
|
||||||
|
|
||||||
if index < 0 or index > len(trackList) - 1:
|
|
||||||
self.__logger.warning(f"MediaDescriptor.setTrackLanguage(): Parameter index ({index}) is "
|
|
||||||
+ f"out of range of {'' if trackType is None else trackType.label()}track list")
|
|
||||||
|
|
||||||
td: TrackDescriptor = trackList[index]
|
|
||||||
td.setLanguage(trackLanguage)
|
|
||||||
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
def setTrackTitle(self, title: str, index: int, trackType: TrackType = None):
|
|
||||||
|
|
||||||
trackList = self.getTrackDescriptors(trackType=trackType)
|
|
||||||
|
|
||||||
if index < 0 or index > len(trackList) - 1:
|
|
||||||
self.__logger.error(f"MediaDescriptor.setTrackTitle(): Parameter index ({index}) is "
|
|
||||||
+ f"out of range of {'' if trackType is None else trackType.label()}track list")
|
|
||||||
raise click.Abort()
|
|
||||||
|
|
||||||
td: TrackDescriptor = trackList[index]
|
|
||||||
td.setTitle(title)
|
|
||||||
|
|
||||||
|
|
||||||
def setDefaultSubTrack(self, trackType: TrackType, subIndex: int):
|
|
||||||
# for t in self.getAllTrackDescriptors():
|
|
||||||
for t in self.getTrackDescriptors():
|
|
||||||
if t.getType() == trackType:
|
|
||||||
t.setDispositionFlag(
|
|
||||||
TrackDisposition.DEFAULT, t.getSubIndex() == int(subIndex)
|
|
||||||
)
|
|
||||||
|
|
||||||
def setForcedSubTrack(self, trackType: TrackType, subIndex: int):
|
|
||||||
# for t in self.getAllTrackDescriptors():
|
|
||||||
for t in self.getTrackDescriptors():
|
|
||||||
if t.getType() == trackType:
|
|
||||||
t.setDispositionFlag(
|
|
||||||
TrackDisposition.FORCED, t.getSubIndex() == int(subIndex)
|
|
||||||
)
|
|
||||||
|
|
||||||
def checkConfiguration(self):
|
|
||||||
|
|
||||||
videoTracks = self.getVideoTracks()
|
|
||||||
audioTracks = self.getAudioTracks()
|
|
||||||
subtitleTracks = self.getSubtitleTracks()
|
|
||||||
|
|
||||||
if len([v for v in videoTracks if v.getDispositionFlag(TrackDisposition.DEFAULT)]) > 1:
|
|
||||||
raise ValueError('More than one default video track')
|
|
||||||
if len([a for a in audioTracks if a.getDispositionFlag(TrackDisposition.DEFAULT)]) > 1:
|
|
||||||
raise ValueError('More than one default audio track')
|
|
||||||
if len([s for s in subtitleTracks if s.getDispositionFlag(TrackDisposition.DEFAULT)]) > 1:
|
|
||||||
raise ValueError('More than one default subtitle track')
|
|
||||||
|
|
||||||
if len([v for v in videoTracks if v.getDispositionFlag(TrackDisposition.FORCED)]) > 1:
|
|
||||||
raise ValueError('More than one forced video track')
|
|
||||||
if len([a for a in audioTracks if a.getDispositionFlag(TrackDisposition.FORCED)]) > 1:
|
|
||||||
raise ValueError('More than one forced audio track')
|
|
||||||
if len([s for s in subtitleTracks if s.getDispositionFlag(TrackDisposition.FORCED)]) > 1:
|
|
||||||
raise ValueError('More than one forced subtitle track')
|
|
||||||
|
|
||||||
trackDescriptors = videoTracks + audioTracks + subtitleTracks
|
|
||||||
sourceIndices = [
|
|
||||||
t.getSourceIndex() for t in trackDescriptors
|
|
||||||
]
|
|
||||||
if len(set(sourceIndices)) < len(trackDescriptors):
|
|
||||||
raise ValueError('Multiple streams originating from the same source stream')
|
|
||||||
|
|
||||||
|
|
||||||
def applyOverrides(self, overrides: dict):
|
|
||||||
|
|
||||||
if 'languages' in overrides.keys():
|
|
||||||
for trackIndex in overrides['languages'].keys():
|
|
||||||
self.setTrackLanguage(overrides['languages'][trackIndex], trackIndex)
|
|
||||||
|
|
||||||
if 'titles' in overrides.keys():
|
|
||||||
for trackIndex in overrides['titles'].keys():
|
|
||||||
self.setTrackTitle(overrides['titles'][trackIndex], trackIndex)
|
|
||||||
|
|
||||||
if 'forced_video' in overrides.keys():
|
|
||||||
sti = int(overrides['forced_video'])
|
|
||||||
self.setForcedSubTrack(TrackType.VIDEO, sti)
|
|
||||||
self.setDefaultSubTrack(TrackType.VIDEO, sti)
|
|
||||||
|
|
||||||
elif 'default_video' in overrides.keys():
|
|
||||||
sti = int(overrides['default_video'])
|
|
||||||
self.setDefaultSubTrack(TrackType.VIDEO, sti)
|
|
||||||
|
|
||||||
if 'forced_audio' in overrides.keys():
|
|
||||||
sti = int(overrides['forced_audio'])
|
|
||||||
self.setForcedSubTrack(TrackType.AUDIO, sti)
|
|
||||||
self.setDefaultSubTrack(TrackType.AUDIO, sti)
|
|
||||||
|
|
||||||
elif 'default_audio' in overrides.keys():
|
|
||||||
sti = int(overrides['default_audio'])
|
|
||||||
self.setDefaultSubTrack(TrackType.AUDIO, sti)
|
|
||||||
|
|
||||||
if 'forced_subtitle' in overrides.keys():
|
|
||||||
sti = int(overrides['forced_subtitle'])
|
|
||||||
self.setForcedSubTrack(TrackType.SUBTITLE, sti)
|
|
||||||
self.setDefaultSubTrack(TrackType.SUBTITLE, sti)
|
|
||||||
|
|
||||||
elif 'default_subtitle' in overrides.keys():
|
|
||||||
sti = int(overrides['default_subtitle'])
|
|
||||||
self.setDefaultSubTrack(TrackType.SUBTITLE, sti)
|
|
||||||
|
|
||||||
if 'stream_order' in overrides.keys():
|
|
||||||
self.rearrangeTrackDescriptors(overrides['stream_order'])
|
|
||||||
|
|
||||||
|
|
||||||
def applySourceIndices(self, sourceMediaDescriptor: Self):
|
|
||||||
# sourceTrackDescriptors = sourceMediaDescriptor.getAllTrackDescriptors()
|
|
||||||
sourceTrackDescriptors = sourceMediaDescriptor.getTrackDescriptors()
|
|
||||||
|
|
||||||
numTrackDescriptors = len(self.__trackDescriptors)
|
|
||||||
if len(sourceTrackDescriptors) != numTrackDescriptors:
|
|
||||||
raise ValueError('MediaDescriptor.applySourceIndices (): Number of track descriptors does not match')
|
|
||||||
|
|
||||||
for trackIndex in range(numTrackDescriptors):
|
|
||||||
self.__trackDescriptors[trackIndex].setSourceIndex(sourceTrackDescriptors[trackIndex].getSourceIndex())
|
|
||||||
|
|
||||||
|
|
||||||
def rearrangeTrackDescriptors(self, newOrder: List[int]):
|
|
||||||
if len(newOrder) != len(self.__trackDescriptors):
|
|
||||||
raise ValueError('Length of list with reordered indices does not match number of track descriptors')
|
|
||||||
reorderedTrackDescriptors = {}
|
|
||||||
for oldIndex in newOrder:
|
|
||||||
reorderedTrackDescriptors.append(self.__trackDescriptors[oldIndex])
|
|
||||||
self.__trackDescriptors = reorderedTrackDescriptors
|
|
||||||
self.reindexSubIndices()
|
|
||||||
self.reindexIndices()
|
|
||||||
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def fromFfprobe(cls, context, formatData, streamData):
|
|
||||||
|
|
||||||
kwargs = {}
|
|
||||||
|
|
||||||
kwargs[MediaDescriptor.CONTEXT_KEY] = context
|
|
||||||
|
|
||||||
if MediaDescriptor.FFPROBE_TAGS_KEY in formatData.keys():
|
|
||||||
kwargs[MediaDescriptor.TAGS_KEY] = formatData[
|
|
||||||
MediaDescriptor.FFPROBE_TAGS_KEY
|
|
||||||
]
|
|
||||||
|
|
||||||
kwargs[MediaDescriptor.TRACK_DESCRIPTOR_LIST_KEY] = []
|
|
||||||
|
|
||||||
# TODO: Evtl obsolet
|
|
||||||
subIndexCounters = {}
|
|
||||||
|
|
||||||
for streamObj in streamData:
|
|
||||||
|
|
||||||
ffprobeCodecType = streamObj[MediaDescriptor.FFPROBE_CODEC_TYPE_KEY]
|
|
||||||
trackType = TrackType.fromLabel(ffprobeCodecType)
|
|
||||||
|
|
||||||
if trackType != TrackType.UNKNOWN:
|
|
||||||
|
|
||||||
if trackType not in subIndexCounters.keys():
|
|
||||||
subIndexCounters[trackType] = 0
|
|
||||||
|
|
||||||
kwargs[MediaDescriptor.TRACK_DESCRIPTOR_LIST_KEY].append(
|
|
||||||
TrackDescriptor.fromFfprobe(
|
|
||||||
streamObj, subIndex=subIndexCounters[trackType]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
subIndexCounters[trackType] += 1
|
|
||||||
|
|
||||||
return cls(**kwargs)
|
|
||||||
|
|
||||||
def getTags(self):
|
|
||||||
return self.__mediaTags
|
|
||||||
|
|
||||||
|
|
||||||
def sortSubIndices(
|
|
||||||
self, descriptors: List[TrackDescriptor]
|
|
||||||
) -> List[TrackDescriptor]:
|
|
||||||
subIndex = 0
|
|
||||||
for d in descriptors:
|
|
||||||
d.setSubIndex(subIndex)
|
|
||||||
subIndex += 1
|
|
||||||
return descriptors
|
|
||||||
|
|
||||||
def reindexSubIndices(self, trackDescriptors: list = []):
|
|
||||||
tdList = trackDescriptors if trackDescriptors else self.__trackDescriptors
|
|
||||||
subIndexCounter = {}
|
|
||||||
for td in tdList:
|
|
||||||
trackType = td.getType()
|
|
||||||
if trackType not in subIndexCounter.keys():
|
|
||||||
subIndexCounter[trackType] = 0
|
|
||||||
td.setSubIndex(subIndexCounter[trackType])
|
|
||||||
subIndexCounter[trackType] += 1
|
|
||||||
|
|
||||||
def sortIndices(
|
|
||||||
self, descriptors: List[TrackDescriptor]
|
|
||||||
) -> List[TrackDescriptor]:
|
|
||||||
index = 0
|
|
||||||
for d in descriptors:
|
|
||||||
d.setIndex(index)
|
|
||||||
index += 1
|
|
||||||
return descriptors
|
|
||||||
|
|
||||||
def reindexIndices(self, trackDescriptors: list = []):
|
|
||||||
tdList = trackDescriptors if trackDescriptors else self.__trackDescriptors
|
|
||||||
for trackIndex in range(len(tdList)):
|
|
||||||
tdList[trackIndex].setIndex(trackIndex)
|
|
||||||
|
|
||||||
|
|
||||||
# def getAllTrackDescriptors(self):
|
|
||||||
# """Returns all track descriptors sorted by type: video, audio then subtitles"""
|
|
||||||
# return self.getVideoTracks() + self.getAudioTracks() + self.getSubtitleTracks()
|
|
||||||
|
|
||||||
|
|
||||||
def getTrackDescriptors(self,
|
|
||||||
trackType: TrackType = None) -> List[TrackDescriptor]:
|
|
||||||
|
|
||||||
if trackType is None:
|
|
||||||
return self.__trackDescriptors
|
|
||||||
|
|
||||||
descriptorList = []
|
|
||||||
for td in self.__trackDescriptors:
|
|
||||||
if td.getType() == trackType:
|
|
||||||
descriptorList.append(td)
|
|
||||||
|
|
||||||
return descriptorList
|
|
||||||
|
|
||||||
|
|
||||||
def getVideoTracks(self) -> List[TrackDescriptor]:
|
|
||||||
return [v for v in self.__trackDescriptors if v.getType() == TrackType.VIDEO]
|
|
||||||
|
|
||||||
def getAudioTracks(self) -> List[TrackDescriptor]:
|
|
||||||
return [a for a in self.__trackDescriptors if a.getType() == TrackType.AUDIO]
|
|
||||||
|
|
||||||
def getSubtitleTracks(self) -> List[TrackDescriptor]:
|
|
||||||
return [
|
|
||||||
s
|
|
||||||
for s in self.__trackDescriptors
|
|
||||||
if s.getType() == TrackType.SUBTITLE
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def getImportFileTokens(self, use_sub_index: bool = True):
|
|
||||||
"""Generate ffmpeg import options for external stream files"""
|
|
||||||
|
|
||||||
importFileTokens = []
|
|
||||||
|
|
||||||
td: TrackDescriptor
|
|
||||||
for td in self.__trackDescriptors:
|
|
||||||
|
|
||||||
importedFilePath = td.getExternalSourceFilePath()
|
|
||||||
|
|
||||||
if importedFilePath:
|
|
||||||
|
|
||||||
self.__logger.info(f"Substituting subtitle stream #{td.getIndex()} "
|
|
||||||
+ f"({td.getType().label()}:{td.getSubIndex()}) "
|
|
||||||
+ f"with import from file {td.getExternalSourceFilePath()}")
|
|
||||||
|
|
||||||
importFileTokens += [
|
|
||||||
"-i",
|
|
||||||
importedFilePath,
|
|
||||||
]
|
|
||||||
|
|
||||||
return importFileTokens
|
|
||||||
|
|
||||||
|
|
||||||
def getInputMappingTokens(self, use_sub_index: bool = True, only_video: bool = False):
|
|
||||||
"""Tracks must be reordered for source index order"""
|
|
||||||
|
|
||||||
inputMappingTokens = []
|
|
||||||
|
|
||||||
sortedTrackDescriptors = sorted(self.__trackDescriptors, key=lambda d: d.getIndex())
|
|
||||||
|
|
||||||
# raise click.ClickException(' '.join([f"\nindex={td.getIndex()} subIndex={td.getSubIndex()} srcIndex={td.getSourceIndex()} type={td.getType().label()}" for td in self.__trackDescriptors]))
|
|
||||||
|
|
||||||
filePointer = 1
|
|
||||||
for trackIndex in range(len(sortedTrackDescriptors)):
|
|
||||||
|
|
||||||
td: TrackDescriptor = sortedTrackDescriptors[trackIndex]
|
|
||||||
|
|
||||||
#HINT: Attached thumbnails are not supported by .webm container format
|
|
||||||
if td.getCodec() != TrackCodec.PNG:
|
|
||||||
|
|
||||||
stdi = sortedTrackDescriptors[td.getSourceIndex()].getIndex()
|
|
||||||
stdsi = sortedTrackDescriptors[td.getSourceIndex()].getSubIndex()
|
|
||||||
|
|
||||||
trackType = td.getType()
|
|
||||||
|
|
||||||
if (trackType == TrackType.VIDEO or not only_video):
|
|
||||||
|
|
||||||
importedFilePath = td.getExternalSourceFilePath()
|
|
||||||
|
|
||||||
if use_sub_index:
|
|
||||||
|
|
||||||
if importedFilePath:
|
|
||||||
|
|
||||||
inputMappingTokens += [
|
|
||||||
"-map",
|
|
||||||
f"{filePointer}:{trackType.indicator()}:0",
|
|
||||||
]
|
|
||||||
filePointer += 1
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
if not td.getCodec() in [TrackCodec.PGS, TrackCodec.VOBSUB]:
|
|
||||||
inputMappingTokens += [
|
|
||||||
"-map",
|
|
||||||
f"0:{trackType.indicator()}:{stdsi}",
|
|
||||||
]
|
|
||||||
|
|
||||||
else:
|
|
||||||
if not td.getCodec() in [TrackCodec.PGS, TrackCodec.VOBSUB]:
|
|
||||||
inputMappingTokens += ["-map", f"0:{stdi}"]
|
|
||||||
|
|
||||||
return inputMappingTokens
|
|
||||||
|
|
||||||
|
|
||||||
def searchSubtitleFiles(self, searchDirectory, prefix):
|
|
||||||
|
|
||||||
sesld_match = re.compile(f"{prefix}_{MediaDescriptor.SEASON_EPISODE_STREAM_LANGUAGE_DISPOSITIONS_MATCH}")
|
|
||||||
sld_match = re.compile(f"{prefix}_{MediaDescriptor.STREAM_LANGUAGE_DISPOSITIONS_MATCH}")
|
|
||||||
|
|
||||||
subtitleFileDescriptors = []
|
|
||||||
|
|
||||||
for subtitleFilename in os.listdir(searchDirectory):
|
|
||||||
if subtitleFilename.startswith(prefix) and subtitleFilename.endswith(
|
|
||||||
"." + MediaDescriptor.SUBTITLE_FILE_EXTENSION
|
|
||||||
):
|
|
||||||
|
|
||||||
sesld_result = sesld_match.search(subtitleFilename)
|
|
||||||
sld_result = None if not sesld_result is None else sld_match.search(subtitleFilename)
|
|
||||||
|
|
||||||
if not sesld_result is None:
|
|
||||||
|
|
||||||
subtitleFilePath = os.path.join(searchDirectory, subtitleFilename)
|
|
||||||
if os.path.isfile(subtitleFilePath):
|
|
||||||
|
|
||||||
subtitleFileDescriptor = {}
|
|
||||||
subtitleFileDescriptor["path"] = subtitleFilePath
|
|
||||||
subtitleFileDescriptor["season"] = int(sesld_result.group(1))
|
|
||||||
subtitleFileDescriptor["episode"] = int(sesld_result.group(2))
|
|
||||||
subtitleFileDescriptor["index"] = int(sesld_result.group(3))
|
|
||||||
subtitleFileDescriptor["language"] = sesld_result.group(4)
|
|
||||||
|
|
||||||
dispSet = set()
|
|
||||||
dispCaptGroups = sesld_result.groups()
|
|
||||||
numCaptGroups = len(dispCaptGroups)
|
|
||||||
if numCaptGroups > 4:
|
|
||||||
for groupIndex in range(numCaptGroups - 4):
|
|
||||||
disp = TrackDisposition.fromIndicator(dispCaptGroups[groupIndex + 4])
|
|
||||||
if disp is not None:
|
|
||||||
dispSet.add(disp)
|
|
||||||
subtitleFileDescriptor["disposition_set"] = dispSet
|
|
||||||
|
|
||||||
subtitleFileDescriptors.append(subtitleFileDescriptor)
|
|
||||||
|
|
||||||
if not sld_result is None:
|
|
||||||
|
|
||||||
subtitleFilePath = os.path.join(searchDirectory, subtitleFilename)
|
|
||||||
if os.path.isfile(subtitleFilePath):
|
|
||||||
|
|
||||||
subtitleFileDescriptor = {}
|
|
||||||
subtitleFileDescriptor["path"] = subtitleFilePath
|
|
||||||
subtitleFileDescriptor["index"] = int(sld_result.group(1))
|
|
||||||
subtitleFileDescriptor["language"] = sld_result.group(2)
|
|
||||||
|
|
||||||
dispSet = set()
|
|
||||||
dispCaptGroups = sld_result.groups()
|
|
||||||
numCaptGroups = len(dispCaptGroups)
|
|
||||||
if numCaptGroups > 2:
|
|
||||||
for groupIndex in range(numCaptGroups - 2):
|
|
||||||
disp = TrackDisposition.fromIndicator(dispCaptGroups[groupIndex + 2])
|
|
||||||
if disp is not None:
|
|
||||||
dispSet.add(disp)
|
|
||||||
subtitleFileDescriptor["disposition_set"] = dispSet
|
|
||||||
|
|
||||||
subtitleFileDescriptors.append(subtitleFileDescriptor)
|
|
||||||
|
|
||||||
|
|
||||||
self.__logger.debug(f"searchSubtitleFiles(): Available subtitle files {subtitleFileDescriptors}")
|
|
||||||
|
|
||||||
return subtitleFileDescriptors
|
|
||||||
|
|
||||||
|
|
||||||
def importSubtitles(self, searchDirectory, prefix, season: int = -1, episode: int = -1):
|
|
||||||
|
|
||||||
# click.echo(f"Season: {season} Episode: {episode}")
|
|
||||||
self.__logger.debug(f"importSubtitles(): Season: {season} Episode: {episode}")
|
|
||||||
|
|
||||||
availableFileSubtitleDescriptors = self.searchSubtitleFiles(searchDirectory, prefix)
|
|
||||||
|
|
||||||
self.__logger.debug(f"importSubtitles(): availableFileSubtitleDescriptors: {availableFileSubtitleDescriptors}")
|
|
||||||
|
|
||||||
subtitleTracks = self.getSubtitleTracks()
|
|
||||||
|
|
||||||
self.__logger.debug(f"importSubtitles(): subtitleTracks: {[s.getIndex() for s in subtitleTracks]}")
|
|
||||||
|
|
||||||
matchingSubtitleFileDescriptors = (
|
|
||||||
sorted(
|
|
||||||
[
|
|
||||||
d
|
|
||||||
for d in availableFileSubtitleDescriptors
|
|
||||||
if ((season == -1 and episode == -1)
|
|
||||||
or (d["season"] == int(season) and d["episode"] == int(episode)))
|
|
||||||
],
|
|
||||||
key=lambda d: d["index"],
|
|
||||||
)
|
|
||||||
if availableFileSubtitleDescriptors
|
|
||||||
else []
|
|
||||||
)
|
|
||||||
|
|
||||||
self.__logger.debug(f"importSubtitles(): matchingSubtitleFileDescriptors: {matchingSubtitleFileDescriptors}")
|
|
||||||
|
|
||||||
for msfd in matchingSubtitleFileDescriptors:
|
|
||||||
matchingSubtitleTrackDescriptor = [s for s in subtitleTracks if s.getIndex() == msfd["index"]]
|
|
||||||
if matchingSubtitleTrackDescriptor:
|
|
||||||
# click.echo(f"Found matching subtitle file {msfd["path"]}\n")
|
|
||||||
self.__logger.debug(f"importSubtitles(): Found matching subtitle file {msfd['path']}")
|
|
||||||
matchingSubtitleTrackDescriptor[0].setExternalSourceFilePath(msfd["path"])
|
|
||||||
|
|
||||||
# TODO: Check if useful
|
|
||||||
# matchingSubtitleTrackDescriptor[0].setDispositionSet(msfd["disposition_set"])
|
|
||||||
|
|
||||||
|
|
||||||
def getConfiguration(self, label: str = ''):
|
|
||||||
yield f"--- {label if label else 'MediaDescriptor '+str(id(self))} {' '.join([str(k)+'='+str(v) for k,v in self.__mediaTags.items()])}"
|
|
||||||
# for td in self.getAllTrackDescriptors():
|
|
||||||
for td in self.getTrackDescriptors():
|
|
||||||
yield (f"{td.getIndex()}:{td.getType().indicator()}:{td.getSubIndex()} "
|
|
||||||
+ '|'.join([d.indicator() for d in td.getDispositionSet()])
|
|
||||||
+ ' ' + ' '.join([str(k)+'='+str(v) for k,v in td.getTags().items()]))
|
|
||||||
@@ -1,302 +0,0 @@
|
|||||||
import click
|
|
||||||
|
|
||||||
from ffx.media_descriptor import MediaDescriptor
|
|
||||||
from ffx.track_descriptor import TrackDescriptor
|
|
||||||
|
|
||||||
from ffx.helper import dictDiff, setDiff, DIFF_ADDED_KEY, DIFF_CHANGED_KEY, DIFF_REMOVED_KEY, DIFF_UNCHANGED_KEY
|
|
||||||
|
|
||||||
from ffx.track_codec import TrackCodec
|
|
||||||
from ffx.track_disposition import TrackDisposition
|
|
||||||
|
|
||||||
|
|
||||||
class MediaDescriptorChangeSet():
|
|
||||||
|
|
||||||
TAGS_KEY = "tags"
|
|
||||||
TRACKS_KEY = "tracks"
|
|
||||||
DISPOSITION_SET_KEY = "disposition_set"
|
|
||||||
|
|
||||||
TRACK_DESCRIPTOR_KEY = "track_descriptor"
|
|
||||||
|
|
||||||
|
|
||||||
def __init__(self, context,
|
|
||||||
targetMediaDescriptor: MediaDescriptor = None,
|
|
||||||
sourceMediaDescriptor: MediaDescriptor = None):
|
|
||||||
|
|
||||||
self.__context = context
|
|
||||||
self.__logger = context['logger']
|
|
||||||
|
|
||||||
self.__configurationData = self.__context['config'].getData()
|
|
||||||
|
|
||||||
metadataConfiguration = self.__configurationData['metadata'] if 'metadata' in self.__configurationData.keys() else {}
|
|
||||||
|
|
||||||
self.__signatureTags = metadataConfiguration['signature'] if 'signature' in metadataConfiguration.keys() else {}
|
|
||||||
self.__removeGlobalKeys = metadataConfiguration['remove'] if 'remove' in metadataConfiguration.keys() else []
|
|
||||||
self.__ignoreGlobalKeys = metadataConfiguration['ignore'] if 'ignore' in metadataConfiguration.keys() else []
|
|
||||||
self.__removeTrackKeys = (metadataConfiguration['streams']['remove']
|
|
||||||
if 'streams' in metadataConfiguration.keys()
|
|
||||||
and 'remove' in metadataConfiguration['streams'].keys() else [])
|
|
||||||
self.__ignoreTrackKeys = (metadataConfiguration['streams']['ignore']
|
|
||||||
if 'streams' in metadataConfiguration.keys()
|
|
||||||
and 'ignore' in metadataConfiguration['streams'].keys() else [])
|
|
||||||
|
|
||||||
|
|
||||||
self.__targetTrackDescriptors = targetMediaDescriptor.getTrackDescriptors() if targetMediaDescriptor is not None else []
|
|
||||||
self.__sourceTrackDescriptors = sourceMediaDescriptor.getTrackDescriptors() if sourceMediaDescriptor is not None else []
|
|
||||||
|
|
||||||
targetMediaTags = targetMediaDescriptor.getTags() if targetMediaDescriptor is not None else {}
|
|
||||||
sourceMediaTags = sourceMediaDescriptor.getTags() if sourceMediaDescriptor is not None else {}
|
|
||||||
|
|
||||||
|
|
||||||
self.__changeSetObj = {}
|
|
||||||
|
|
||||||
#if targetMediaDescriptor is not None:
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#!!#
|
|
||||||
tagsDiff = dictDiff(sourceMediaTags,
|
|
||||||
targetMediaTags,
|
|
||||||
ignoreKeys=self.__ignoreGlobalKeys,
|
|
||||||
removeKeys=self.__removeGlobalKeys)
|
|
||||||
|
|
||||||
if tagsDiff:
|
|
||||||
self.__changeSetObj[MediaDescriptorChangeSet.TAGS_KEY] = tagsDiff
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
self.__numTargetTracks = len(self.__targetTrackDescriptors)
|
|
||||||
|
|
||||||
# Current track configuration (of file)
|
|
||||||
|
|
||||||
self.__numSourceTracks = len(self.__sourceTrackDescriptors)
|
|
||||||
|
|
||||||
maxNumOfTracks = max(self.__numSourceTracks, self.__numTargetTracks)
|
|
||||||
|
|
||||||
trackCompareResult = {}
|
|
||||||
|
|
||||||
|
|
||||||
for trackIndex in range(maxNumOfTracks):
|
|
||||||
|
|
||||||
correspondingSourceTrackDescriptors = [st for st in self.__sourceTrackDescriptors if st.getIndex() == trackIndex]
|
|
||||||
correspondingTargetTrackDescriptors = [tt for tt in self.__targetTrackDescriptors if tt.getIndex() == trackIndex]
|
|
||||||
|
|
||||||
# Track present in target but not in source
|
|
||||||
if (not correspondingSourceTrackDescriptors
|
|
||||||
and correspondingTargetTrackDescriptors):
|
|
||||||
|
|
||||||
if DIFF_ADDED_KEY not in trackCompareResult.keys():
|
|
||||||
trackCompareResult[DIFF_ADDED_KEY] = {}
|
|
||||||
|
|
||||||
trackCompareResult[DIFF_ADDED_KEY][trackIndex] = correspondingTargetTrackDescriptors[0]
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Track present in target but not in source
|
|
||||||
if (correspondingSourceTrackDescriptors
|
|
||||||
and not correspondingTargetTrackDescriptors):
|
|
||||||
|
|
||||||
if DIFF_REMOVED_KEY not in trackCompareResult.keys():
|
|
||||||
trackCompareResult[DIFF_REMOVED_KEY] = {}
|
|
||||||
|
|
||||||
trackCompareResult[DIFF_REMOVED_KEY][trackIndex] = correspondingSourceTrackDescriptors[0]
|
|
||||||
continue
|
|
||||||
|
|
||||||
if (correspondingSourceTrackDescriptors
|
|
||||||
and correspondingTargetTrackDescriptors):
|
|
||||||
|
|
||||||
# if correspondingTargetTrackDescriptors[0].getIndex() == 3:
|
|
||||||
# raise click.ClickException(f"{correspondingSourceTrackDescriptors[0].getDispositionSet()} {correspondingTargetTrackDescriptors[0].getDispositionSet()}")
|
|
||||||
|
|
||||||
|
|
||||||
trackDiff = self.compareTracks(correspondingTargetTrackDescriptors[0],
|
|
||||||
correspondingSourceTrackDescriptors[0])
|
|
||||||
|
|
||||||
if trackDiff:
|
|
||||||
if DIFF_CHANGED_KEY not in trackCompareResult.keys():
|
|
||||||
trackCompareResult[DIFF_CHANGED_KEY] = {}
|
|
||||||
|
|
||||||
trackCompareResult[DIFF_CHANGED_KEY][trackIndex] = trackDiff
|
|
||||||
|
|
||||||
|
|
||||||
if trackCompareResult:
|
|
||||||
self.__changeSetObj[MediaDescriptorChangeSet.TRACKS_KEY] = trackCompareResult
|
|
||||||
|
|
||||||
|
|
||||||
def compareTracks(self,
|
|
||||||
targetTrackDescriptor: TrackDescriptor = None,
|
|
||||||
sourceTrackDescriptor: TrackDescriptor = None):
|
|
||||||
|
|
||||||
sourceTrackTags = sourceTrackDescriptor.getTags() if sourceTrackDescriptor is not None else {}
|
|
||||||
targetTrackTags = targetTrackDescriptor.getTags() if targetTrackDescriptor is not None else {}
|
|
||||||
|
|
||||||
trackCompareResult = {}
|
|
||||||
|
|
||||||
tagsDiffResult = dictDiff(sourceTrackTags,
|
|
||||||
targetTrackTags,
|
|
||||||
ignoreKeys=self.__ignoreTrackKeys,
|
|
||||||
removeKeys=self.__removeTrackKeys)
|
|
||||||
|
|
||||||
if tagsDiffResult:
|
|
||||||
trackCompareResult[MediaDescriptorChangeSet.TAGS_KEY] = tagsDiffResult
|
|
||||||
|
|
||||||
sourceDispositionSet = sourceTrackDescriptor.getDispositionSet() if sourceTrackDescriptor is not None else set()
|
|
||||||
targetDispositionSet = targetTrackDescriptor.getDispositionSet() if targetTrackDescriptor is not None else set()
|
|
||||||
|
|
||||||
# if targetTrackDescriptor.getIndex() == 3:
|
|
||||||
# raise click.ClickException(f"{sourceDispositionSet} {targetDispositionSet}")
|
|
||||||
|
|
||||||
dispositionDiffResult = setDiff(sourceDispositionSet, targetDispositionSet)
|
|
||||||
|
|
||||||
if dispositionDiffResult:
|
|
||||||
trackCompareResult[MediaDescriptorChangeSet.DISPOSITION_SET_KEY] = dispositionDiffResult
|
|
||||||
|
|
||||||
return trackCompareResult
|
|
||||||
|
|
||||||
|
|
||||||
def generateDispositionTokens(self):
|
|
||||||
"""
|
|
||||||
#Example: -disposition:s:0 default -disposition:s:1 0
|
|
||||||
"""
|
|
||||||
dispositionTokens = []
|
|
||||||
|
|
||||||
# if MediaDescriptorChangeSet.TRACKS_KEY in self.__changeSetObj.keys():
|
|
||||||
#
|
|
||||||
# if DIFF_ADDED_KEY in self.__changeSetObj[MediaDescriptorChangeSet.TRACKS_KEY].keys():
|
|
||||||
# addedTracks: dict = self.__changeSetObj[MediaDescriptorChangeSet.TRACKS_KEY][DIFF_ADDED_KEY]
|
|
||||||
# trackDescriptor: TrackDescriptor
|
|
||||||
# for trackDescriptor in addedTracks.values():
|
|
||||||
#
|
|
||||||
# dispositionSet = trackDescriptor.getDispositionSet()
|
|
||||||
#
|
|
||||||
# if dispositionSet:
|
|
||||||
# dispositionTokens += [f"-disposition:{trackDescriptor.getType().indicator()}:{trackDescriptor.getSubIndex()}",
|
|
||||||
# '+'.join([d.label() for d in dispositionSet])]
|
|
||||||
#
|
|
||||||
# if DIFF_CHANGED_KEY in self.__changeSetObj[MediaDescriptorChangeSet.TRACKS_KEY].keys():
|
|
||||||
# changedTracks: dict = self.__changeSetObj[MediaDescriptorChangeSet.TRACKS_KEY][DIFF_CHANGED_KEY]
|
|
||||||
# trackDiffObj: dict
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# for trackIndex, trackDiffObj in changedTracks.items():
|
|
||||||
#
|
|
||||||
# if MediaDescriptorChangeSet.DISPOSITION_SET_KEY in trackDiffObj.keys():
|
|
||||||
#
|
|
||||||
# dispositionDiffObj: dict = trackDiffObj[MediaDescriptorChangeSet.DISPOSITION_SET_KEY]
|
|
||||||
#
|
|
||||||
# addedDispositions = dispositionDiffObj[DIFF_ADDED_KEY] if DIFF_ADDED_KEY in dispositionDiffObj.keys() else set()
|
|
||||||
# removedDispositions = dispositionDiffObj[DIFF_REMOVED_KEY] if DIFF_REMOVED_KEY in dispositionDiffObj.keys() else set()
|
|
||||||
# unchangedDispositions = dispositionDiffObj[DIFF_UNCHANGED_KEY] if DIFF_UNCHANGED_KEY in dispositionDiffObj.keys() else set()
|
|
||||||
#
|
|
||||||
# targetDispositions = addedDispositions | unchangedDispositions
|
|
||||||
#
|
|
||||||
# trackDescriptor = self.__targetTrackDescriptors[trackIndex]
|
|
||||||
# streamIndicator = trackDescriptor.getType().indicator()
|
|
||||||
# subIndex = trackDescriptor.getSubIndex()
|
|
||||||
#
|
|
||||||
# if targetDispositions:
|
|
||||||
# dispositionTokens += [f"-disposition:{streamIndicator}:{subIndex}", '+'.join([d.label() for d in targetDispositions])]
|
|
||||||
# # if not targetDispositions and removedDispositions:
|
|
||||||
# else:
|
|
||||||
# dispositionTokens += [f"-disposition:{streamIndicator}:{subIndex}", '0']
|
|
||||||
for ttd in self.__targetTrackDescriptors:
|
|
||||||
|
|
||||||
targetDispositions = ttd.getDispositionSet()
|
|
||||||
streamIndicator = ttd.getType().indicator()
|
|
||||||
subIndex = ttd.getSubIndex()
|
|
||||||
|
|
||||||
if targetDispositions:
|
|
||||||
dispositionTokens += [f"-disposition:{streamIndicator}:{subIndex}", '+'.join([d.label() for d in targetDispositions])]
|
|
||||||
# if not targetDispositions and removedDispositions:
|
|
||||||
else:
|
|
||||||
dispositionTokens += [f"-disposition:{streamIndicator}:{subIndex}", '0']
|
|
||||||
|
|
||||||
return dispositionTokens
|
|
||||||
|
|
||||||
|
|
||||||
def generateMetadataTokens(self):
|
|
||||||
|
|
||||||
metadataTokens = []
|
|
||||||
|
|
||||||
if MediaDescriptorChangeSet.TAGS_KEY in self.__changeSetObj.keys():
|
|
||||||
|
|
||||||
addedMediaTags = (self.__changeSetObj[MediaDescriptorChangeSet.TAGS_KEY][DIFF_ADDED_KEY]
|
|
||||||
if DIFF_ADDED_KEY in self.__changeSetObj[MediaDescriptorChangeSet.TAGS_KEY].keys() else {})
|
|
||||||
removedMediaTags = (self.__changeSetObj[MediaDescriptorChangeSet.TAGS_KEY][DIFF_REMOVED_KEY]
|
|
||||||
if DIFF_REMOVED_KEY in self.__changeSetObj[MediaDescriptorChangeSet.TAGS_KEY].keys() else {})
|
|
||||||
changedMediaTags = (self.__changeSetObj[MediaDescriptorChangeSet.TAGS_KEY][DIFF_CHANGED_KEY]
|
|
||||||
if DIFF_CHANGED_KEY in self.__changeSetObj[MediaDescriptorChangeSet.TAGS_KEY].keys() else {})
|
|
||||||
|
|
||||||
outputMediaTags = addedMediaTags | changedMediaTags
|
|
||||||
|
|
||||||
if (not 'no_signature' in self.__context.keys()
|
|
||||||
or not self.__context['no_signature']):
|
|
||||||
outputMediaTags = outputMediaTags | self.__signatureTags
|
|
||||||
|
|
||||||
# outputMediaTags = {k:v for k,v in outputMediaTags.items() if k not in self.__removeGlobalKeys}
|
|
||||||
|
|
||||||
for tagKey, tagValue in outputMediaTags.items():
|
|
||||||
metadataTokens += [f"-metadata:g",
|
|
||||||
f"{tagKey}={tagValue}"]
|
|
||||||
|
|
||||||
for tagKey, tagValue in changedMediaTags.items():
|
|
||||||
metadataTokens += [f"-metadata:g",
|
|
||||||
f"{tagKey}={tagValue}"]
|
|
||||||
|
|
||||||
for removeKey in removedMediaTags.keys():
|
|
||||||
metadataTokens += [f"-metadata:g",
|
|
||||||
f"{removeKey}="]
|
|
||||||
|
|
||||||
|
|
||||||
if MediaDescriptorChangeSet.TRACKS_KEY in self.__changeSetObj.keys():
|
|
||||||
|
|
||||||
if DIFF_ADDED_KEY in self.__changeSetObj[MediaDescriptorChangeSet.TRACKS_KEY].keys():
|
|
||||||
addedTracks: dict = self.__changeSetObj[MediaDescriptorChangeSet.TRACKS_KEY][DIFF_ADDED_KEY]
|
|
||||||
trackDescriptor: TrackDescriptor
|
|
||||||
for trackDescriptor in addedTracks.values():
|
|
||||||
for tagKey, tagValue in trackDescriptor.getTags().items():
|
|
||||||
if not tagKey in self.__removeTrackKeys:
|
|
||||||
metadataTokens += [f"-metadata:s:{trackDescriptor.getType().indicator()}"
|
|
||||||
+ f":{trackDescriptor.getSubIndex()}",
|
|
||||||
f"{tagKey}={tagValue}"]
|
|
||||||
|
|
||||||
if DIFF_CHANGED_KEY in self.__changeSetObj[MediaDescriptorChangeSet.TRACKS_KEY].keys():
|
|
||||||
changedTracks: dict = self.__changeSetObj[MediaDescriptorChangeSet.TRACKS_KEY][DIFF_CHANGED_KEY]
|
|
||||||
trackDiffObj: dict
|
|
||||||
for trackIndex, trackDiffObj in changedTracks.items():
|
|
||||||
|
|
||||||
if MediaDescriptorChangeSet.TAGS_KEY in trackDiffObj.keys():
|
|
||||||
|
|
||||||
tagsDiffObj = trackDiffObj[MediaDescriptorChangeSet.TAGS_KEY]
|
|
||||||
|
|
||||||
addedTrackTags = tagsDiffObj[DIFF_ADDED_KEY] if DIFF_ADDED_KEY in tagsDiffObj.keys() else {}
|
|
||||||
changedTrackTags = tagsDiffObj[DIFF_CHANGED_KEY] if DIFF_CHANGED_KEY in tagsDiffObj.keys() else {}
|
|
||||||
unchangedTrackTags = tagsDiffObj[DIFF_UNCHANGED_KEY] if DIFF_UNCHANGED_KEY in tagsDiffObj.keys() else {}
|
|
||||||
removedTrackTags = tagsDiffObj[DIFF_REMOVED_KEY] if DIFF_REMOVED_KEY in tagsDiffObj.keys() else {}
|
|
||||||
|
|
||||||
outputTrackTags = addedTrackTags | changedTrackTags
|
|
||||||
|
|
||||||
trackDescriptor = self.__targetTrackDescriptors[trackIndex]
|
|
||||||
|
|
||||||
for tagKey, tagValue in outputTrackTags.items():
|
|
||||||
metadataTokens += [f"-metadata:s:{trackDescriptor.getType().indicator()}"
|
|
||||||
+ f":{trackDescriptor.getSubIndex()}",
|
|
||||||
f"{tagKey}={tagValue}"]
|
|
||||||
|
|
||||||
for removeKey in removedTrackTags.keys():
|
|
||||||
metadataTokens += [f"-metadata:s:{trackDescriptor.getType().indicator()}"
|
|
||||||
+ f":{trackDescriptor.getSubIndex()}",
|
|
||||||
f"{removeKey}="]
|
|
||||||
|
|
||||||
#HINT: In case of loading a track from an external file
|
|
||||||
# no tags from source are present for the track so
|
|
||||||
# the unchanged tracks are passed to the output file as well
|
|
||||||
if trackDescriptor.getExternalSourceFilePath():
|
|
||||||
for tagKey, tagValue in unchangedTrackTags.items():
|
|
||||||
metadataTokens += [f"-metadata:s:{trackDescriptor.getType().indicator()}"
|
|
||||||
+ f":{trackDescriptor.getSubIndex()}",
|
|
||||||
f"{tagKey}={tagValue}"]
|
|
||||||
|
|
||||||
return metadataTokens
|
|
||||||
|
|
||||||
|
|
||||||
def getChangeSetObj(self):
|
|
||||||
return self.__changeSetObj
|
|
||||||
@@ -1,757 +0,0 @@
|
|||||||
import os, click, re
|
|
||||||
|
|
||||||
from textual.screen import Screen
|
|
||||||
from textual.widgets import Header, Footer, Static, Button, Input, DataTable
|
|
||||||
from textual.containers import Grid
|
|
||||||
|
|
||||||
from ffx.audio_layout import AudioLayout
|
|
||||||
|
|
||||||
from .pattern_controller import PatternController
|
|
||||||
from .show_controller import ShowController
|
|
||||||
from .track_controller import TrackController
|
|
||||||
from .tag_controller import TagController
|
|
||||||
|
|
||||||
from .show_details_screen import ShowDetailsScreen
|
|
||||||
from .pattern_details_screen import PatternDetailsScreen
|
|
||||||
|
|
||||||
from ffx.track_type import TrackType
|
|
||||||
from ffx.track_codec import TrackCodec
|
|
||||||
from ffx.model.track import Track
|
|
||||||
|
|
||||||
from ffx.track_disposition import TrackDisposition
|
|
||||||
from ffx.track_descriptor import TrackDescriptor
|
|
||||||
from ffx.show_descriptor import ShowDescriptor
|
|
||||||
|
|
||||||
from textual.widgets._data_table import CellDoesNotExist
|
|
||||||
|
|
||||||
from ffx.media_descriptor import MediaDescriptor
|
|
||||||
from ffx.file_properties import FileProperties
|
|
||||||
|
|
||||||
from ffx.media_descriptor_change_set import MediaDescriptorChangeSet
|
|
||||||
|
|
||||||
from ffx.helper import formatRichColor, DIFF_ADDED_KEY, DIFF_CHANGED_KEY, DIFF_REMOVED_KEY, DIFF_UNCHANGED_KEY
|
|
||||||
|
|
||||||
|
|
||||||
# Screen[dict[int, str, int]]
|
|
||||||
class MediaDetailsScreen(Screen):
|
|
||||||
|
|
||||||
CSS = """
|
|
||||||
|
|
||||||
Grid {
|
|
||||||
grid-size: 5 8;
|
|
||||||
grid-rows: 8 2 2 2 2 8 2 2 8;
|
|
||||||
grid-columns: 15 25 90 10 105;
|
|
||||||
height: 100%;
|
|
||||||
width: 100%;
|
|
||||||
padding: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
DataTable .datatable--cursor {
|
|
||||||
background: darkorange;
|
|
||||||
color: black;
|
|
||||||
}
|
|
||||||
|
|
||||||
DataTable .datatable--header {
|
|
||||||
background: steelblue;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
Input {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
Button {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
DataTable {
|
|
||||||
min-height: 40;
|
|
||||||
}
|
|
||||||
|
|
||||||
#toplabel {
|
|
||||||
height: 1;
|
|
||||||
}
|
|
||||||
.two {
|
|
||||||
column-span: 2;
|
|
||||||
}
|
|
||||||
.three {
|
|
||||||
column-span: 3;
|
|
||||||
}
|
|
||||||
|
|
||||||
.four {
|
|
||||||
column-span: 4;
|
|
||||||
}
|
|
||||||
.five {
|
|
||||||
column-span: 5;
|
|
||||||
}
|
|
||||||
|
|
||||||
.triple {
|
|
||||||
row-span: 3;
|
|
||||||
}
|
|
||||||
|
|
||||||
.box {
|
|
||||||
height: 100%;
|
|
||||||
border: solid green;
|
|
||||||
}
|
|
||||||
|
|
||||||
.purple {
|
|
||||||
tint: purple 40%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.yellow {
|
|
||||||
tint: yellow 40%;
|
|
||||||
}
|
|
||||||
|
|
||||||
#differences-table {
|
|
||||||
row-span: 8;
|
|
||||||
/* tint: magenta 40%; */
|
|
||||||
}
|
|
||||||
|
|
||||||
/* #pattern_input {
|
|
||||||
tint: red 40%;
|
|
||||||
}*/
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
TRACKS_TABLE_INDEX_COLUMN_LABEL = "Index"
|
|
||||||
TRACKS_TABLE_TYPE_COLUMN_LABEL = "Type"
|
|
||||||
TRACKS_TABLE_SUB_INDEX_COLUMN_LABEL = "SubIndex"
|
|
||||||
TRACKS_TABLE_CODEC_COLUMN_LABEL = "Codec"
|
|
||||||
TRACKS_TABLE_LAYOUT_COLUMN_LABEL = "Layout"
|
|
||||||
TRACKS_TABLE_LANGUAGE_COLUMN_LABEL = "Language"
|
|
||||||
TRACKS_TABLE_TITLE_COLUMN_LABEL = "Title"
|
|
||||||
TRACKS_TABLE_DEFAULT_COLUMN_LABEL = "Default"
|
|
||||||
TRACKS_TABLE_FORCED_COLUMN_LABEL = "Forced"
|
|
||||||
|
|
||||||
DIFFERENCES_TABLE_DIFFERENCES_COLUMN_LABEL = 'Differences (file->db/output)'
|
|
||||||
|
|
||||||
|
|
||||||
BINDINGS = [
|
|
||||||
("n", "new_pattern", "New Pattern"),
|
|
||||||
("u", "update_pattern", "Update Pattern"),
|
|
||||||
("e", "edit_pattern", "Edit Pattern"),
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
self.context = self.app.getContext()
|
|
||||||
self.Session = self.context['database']['session'] # convenience
|
|
||||||
|
|
||||||
|
|
||||||
self.__configurationData = self.context['config'].getData()
|
|
||||||
|
|
||||||
metadataConfiguration = self.__configurationData['metadata'] if 'metadata' in self.__configurationData.keys() else {}
|
|
||||||
|
|
||||||
self.__signatureTags = metadataConfiguration['signature'] if 'signature' in metadataConfiguration.keys() else {}
|
|
||||||
self.__removeGlobalKeys = metadataConfiguration['remove'] if 'remove' in metadataConfiguration.keys() else []
|
|
||||||
self.__ignoreGlobalKeys = metadataConfiguration['ignore'] if 'ignore' in metadataConfiguration.keys() else []
|
|
||||||
self.__removeTrackKeys = (metadataConfiguration['streams']['remove']
|
|
||||||
if 'streams' in metadataConfiguration.keys()
|
|
||||||
and 'remove' in metadataConfiguration['streams'].keys() else [])
|
|
||||||
self.__ignoreTrackKeys = (metadataConfiguration['streams']['ignore']
|
|
||||||
if 'streams' in metadataConfiguration.keys()
|
|
||||||
and 'ignore' in metadataConfiguration['streams'].keys() else [])
|
|
||||||
|
|
||||||
|
|
||||||
self.__pc = PatternController(context = self.context)
|
|
||||||
self.__sc = ShowController(context = self.context)
|
|
||||||
self.__tc = TrackController(context = self.context)
|
|
||||||
self.__tac = TagController(context = self.context)
|
|
||||||
|
|
||||||
if not 'command' in self.context.keys() or self.context['command'] != 'inspect':
|
|
||||||
raise click.ClickException(f"MediaDetailsScreen.__init__(): Can only perform command 'inspect'")
|
|
||||||
|
|
||||||
if not 'arguments' in self.context.keys() or not 'filename' in self.context['arguments'].keys() or not self.context['arguments']['filename']:
|
|
||||||
raise click.ClickException(f"MediaDetailsScreen.__init__(): Argument 'filename' is required to be provided for command 'inspect'")
|
|
||||||
|
|
||||||
self.__mediaFilename = self.context['arguments']['filename']
|
|
||||||
|
|
||||||
if not os.path.isfile(self.__mediaFilename):
|
|
||||||
raise click.ClickException(f"MediaDetailsScreen.__init__(): Media file {self.__mediaFilename} does not exist")
|
|
||||||
|
|
||||||
self.loadProperties()
|
|
||||||
|
|
||||||
|
|
||||||
def removeShow(self, showId : int = -1):
|
|
||||||
"""Remove show entry from DataTable.
|
|
||||||
Removes the <New show> entry if showId is not set"""
|
|
||||||
|
|
||||||
for rowKey, row in self.showsTable.rows.items(): # dict[RowKey, Row]
|
|
||||||
|
|
||||||
rowData = self.showsTable.get_row(rowKey)
|
|
||||||
|
|
||||||
try:
|
|
||||||
if (showId == -1 and rowData[0] == ' '
|
|
||||||
or showId == int(rowData[0])):
|
|
||||||
self.showsTable.remove_row(rowKey)
|
|
||||||
return
|
|
||||||
except:
|
|
||||||
continue
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def getRowIndexFromShowId(self, showId : int = -1) -> int:
|
|
||||||
"""Find the index of the row where the value in the specified column matches the target_value."""
|
|
||||||
|
|
||||||
for rowKey, row in self.showsTable.rows.items(): # dict[RowKey, Row]
|
|
||||||
|
|
||||||
rowData = self.showsTable.get_row(rowKey)
|
|
||||||
|
|
||||||
try:
|
|
||||||
if ((showId == -1 and rowData[0] == ' ')
|
|
||||||
or showId == int(rowData[0])):
|
|
||||||
return int(self.showsTable.get_row_index(rowKey))
|
|
||||||
except:
|
|
||||||
continue
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def loadProperties(self):
|
|
||||||
|
|
||||||
self.__mediaFileProperties = FileProperties(self.context, self.__mediaFilename)
|
|
||||||
self.__sourceMediaDescriptor = self.__mediaFileProperties.getMediaDescriptor()
|
|
||||||
|
|
||||||
#HINT: This is None if the filename did not match anything in database
|
|
||||||
self.__currentPattern = self.__mediaFileProperties.getPattern()
|
|
||||||
|
|
||||||
# keine tags vorhanden
|
|
||||||
self.__targetMediaDescriptor = self.__currentPattern.getMediaDescriptor(self.context) if self.__currentPattern is not None else None
|
|
||||||
|
|
||||||
# Enumerating differences between media descriptors
|
|
||||||
# from file (=current) vs from stored in database (=target)
|
|
||||||
try:
|
|
||||||
mdcs = MediaDescriptorChangeSet(self.context,
|
|
||||||
self.__targetMediaDescriptor,
|
|
||||||
self.__sourceMediaDescriptor)
|
|
||||||
|
|
||||||
self.__mediaChangeSetObj = mdcs.getChangeSetObj()
|
|
||||||
except ValueError:
|
|
||||||
self.__mediaChangeSetObj = {}
|
|
||||||
|
|
||||||
|
|
||||||
def updateDifferences(self):
|
|
||||||
|
|
||||||
self.loadProperties()
|
|
||||||
|
|
||||||
self.differencesTable.clear()
|
|
||||||
|
|
||||||
|
|
||||||
if MediaDescriptorChangeSet.TAGS_KEY in self.__mediaChangeSetObj.keys():
|
|
||||||
|
|
||||||
if DIFF_ADDED_KEY in self.__mediaChangeSetObj[MediaDescriptorChangeSet.TAGS_KEY].keys():
|
|
||||||
for tagKey, tagValue in self.__mediaChangeSetObj[MediaDescriptorChangeSet.TAGS_KEY][DIFF_ADDED_KEY].items():
|
|
||||||
if tagKey not in self.__ignoreGlobalKeys:
|
|
||||||
row = (f"add media tag: key='{tagKey}' value='{tagValue}'",)
|
|
||||||
self.differencesTable.add_row(*map(str, row))
|
|
||||||
|
|
||||||
if DIFF_REMOVED_KEY in self.__mediaChangeSetObj[MediaDescriptorChangeSet.TAGS_KEY].keys():
|
|
||||||
for tagKey, tagValue in self.__mediaChangeSetObj[MediaDescriptorChangeSet.TAGS_KEY][DIFF_REMOVED_KEY].items():
|
|
||||||
if tagKey not in self.__ignoreGlobalKeys and tagKey not in self.__removeGlobalKeys:
|
|
||||||
row = (f"remove media tag: key='{tagKey}' value='{tagValue}'",)
|
|
||||||
self.differencesTable.add_row(*map(str, row))
|
|
||||||
|
|
||||||
if DIFF_CHANGED_KEY in self.__mediaChangeSetObj[MediaDescriptorChangeSet.TAGS_KEY].keys():
|
|
||||||
for tagKey, tagValue in self.__mediaChangeSetObj[MediaDescriptorChangeSet.TAGS_KEY][DIFF_CHANGED_KEY].items():
|
|
||||||
if tagKey not in self.__ignoreGlobalKeys:
|
|
||||||
row = (f"change media tag: key='{tagKey}' value='{tagValue}'",)
|
|
||||||
self.differencesTable.add_row(*map(str, row))
|
|
||||||
|
|
||||||
|
|
||||||
if MediaDescriptorChangeSet.TRACKS_KEY in self.__mediaChangeSetObj.keys():
|
|
||||||
|
|
||||||
if DIFF_ADDED_KEY in self.__mediaChangeSetObj[MediaDescriptorChangeSet.TRACKS_KEY].keys():
|
|
||||||
|
|
||||||
trackDescriptor: TrackDescriptor
|
|
||||||
for trackIndex, trackDescriptor in self.__mediaChangeSetObj[MediaDescriptorChangeSet.TRACKS_KEY][DIFF_ADDED_KEY].items():
|
|
||||||
row = (f"add {trackDescriptor.getType().label()} track: index={trackDescriptor.getIndex()} lang={trackDescriptor.getLanguage().threeLetter()}",)
|
|
||||||
self.differencesTable.add_row(*map(str, row))
|
|
||||||
|
|
||||||
if DIFF_REMOVED_KEY in self.__mediaChangeSetObj[MediaDescriptorChangeSet.TRACKS_KEY].keys():
|
|
||||||
for trackIndex, trackDescriptor in self.__mediaChangeSetObj[MediaDescriptorChangeSet.TRACKS_KEY][DIFF_REMOVED_KEY].items():
|
|
||||||
row = (f"remove stream #{trackIndex}",)
|
|
||||||
self.differencesTable.add_row(*map(str, row))
|
|
||||||
|
|
||||||
if DIFF_CHANGED_KEY in self.__mediaChangeSetObj[MediaDescriptorChangeSet.TRACKS_KEY].keys():
|
|
||||||
|
|
||||||
changedTracks: dict = self.__mediaChangeSetObj[MediaDescriptorChangeSet.TRACKS_KEY][DIFF_CHANGED_KEY]
|
|
||||||
|
|
||||||
targetTrackDescriptors = self.__targetMediaDescriptor.getTrackDescriptors()
|
|
||||||
|
|
||||||
trackDiffObj: dict
|
|
||||||
for trackIndex, trackDiffObj in changedTracks.items():
|
|
||||||
|
|
||||||
ttd: TrackDescriptor = targetTrackDescriptors[trackIndex]
|
|
||||||
|
|
||||||
|
|
||||||
if MediaDescriptorChangeSet.TAGS_KEY in trackDiffObj.keys():
|
|
||||||
|
|
||||||
removedTags = (trackDiffObj[MediaDescriptorChangeSet.TAGS_KEY][DIFF_REMOVED_KEY]
|
|
||||||
if DIFF_REMOVED_KEY in trackDiffObj[MediaDescriptorChangeSet.TAGS_KEY].keys() else {})
|
|
||||||
for tagKey, tagValue in removedTags.items():
|
|
||||||
row = (f"change stream #{ttd.getIndex()} ({ttd.getType().label()}:{ttd.getSubIndex()}) remove key={tagKey} value={tagValue}",)
|
|
||||||
self.differencesTable.add_row(*map(str, row))
|
|
||||||
|
|
||||||
addedTags = (trackDiffObj[MediaDescriptorChangeSet.TAGS_KEY][DIFF_ADDED_KEY]
|
|
||||||
if DIFF_ADDED_KEY in trackDiffObj[MediaDescriptorChangeSet.TAGS_KEY].keys() else {})
|
|
||||||
for tagKey, tagValue in addedTags.items():
|
|
||||||
row = (f"change stream #{ttd.getIndex()} ({ttd.getType().label()}:{ttd.getSubIndex()}) add key={tagKey} value={tagValue}",)
|
|
||||||
self.differencesTable.add_row(*map(str, row))
|
|
||||||
|
|
||||||
changedTags = (trackDiffObj[MediaDescriptorChangeSet.TAGS_KEY][DIFF_CHANGED_KEY]
|
|
||||||
if DIFF_CHANGED_KEY in trackDiffObj[MediaDescriptorChangeSet.TAGS_KEY].keys() else {})
|
|
||||||
for tagKey, tagValue in changedTags.items():
|
|
||||||
row = (f"change stream #{ttd.getIndex()} ({ttd.getType().label()}:{ttd.getSubIndex()}) change key={tagKey} value={tagValue}",)
|
|
||||||
self.differencesTable.add_row(*map(str, row))
|
|
||||||
|
|
||||||
|
|
||||||
if MediaDescriptorChangeSet.DISPOSITION_SET_KEY in trackDiffObj.keys():
|
|
||||||
|
|
||||||
addedDispositions = (trackDiffObj[MediaDescriptorChangeSet.DISPOSITION_SET_KEY][DIFF_ADDED_KEY]
|
|
||||||
if DIFF_ADDED_KEY in trackDiffObj[MediaDescriptorChangeSet.DISPOSITION_SET_KEY].keys() else set())
|
|
||||||
for ad in addedDispositions:
|
|
||||||
row = (f"change stream #{ttd.getIndex()} ({ttd.getType().label()}:{ttd.getSubIndex()}) add disposition={ad.label()}",)
|
|
||||||
self.differencesTable.add_row(*map(str, row))
|
|
||||||
|
|
||||||
removedDispositions = (trackDiffObj[MediaDescriptorChangeSet.DISPOSITION_SET_KEY][DIFF_REMOVED_KEY]
|
|
||||||
if DIFF_REMOVED_KEY in trackDiffObj[MediaDescriptorChangeSet.DISPOSITION_SET_KEY].keys() else set())
|
|
||||||
for rd in removedDispositions:
|
|
||||||
row = (f"change stream #{ttd.getIndex()} ({ttd.getType().label()}:{ttd.getSubIndex()}) remove disposition={rd.label()}",)
|
|
||||||
self.differencesTable.add_row(*map(str, row))
|
|
||||||
|
|
||||||
|
|
||||||
def on_mount(self):
|
|
||||||
|
|
||||||
if self.__currentPattern is None:
|
|
||||||
row = (' ', '<New show>', ' ') # Convert each element to a string before adding
|
|
||||||
self.showsTable.add_row(*map(str, row))
|
|
||||||
|
|
||||||
for show in self.__sc.getAllShows():
|
|
||||||
row = (int(show.id), show.name, show.year) # Convert each element to a string before adding
|
|
||||||
self.showsTable.add_row(*map(str, row))
|
|
||||||
|
|
||||||
for mediaTagKey, mediaTagValue in self.__sourceMediaDescriptor.getTags().items():
|
|
||||||
|
|
||||||
textColor = None
|
|
||||||
if mediaTagKey in self.__ignoreGlobalKeys:
|
|
||||||
textColor = 'blue'
|
|
||||||
if mediaTagKey in self.__removeGlobalKeys:
|
|
||||||
textColor = 'red'
|
|
||||||
|
|
||||||
row = (formatRichColor(mediaTagKey, textColor), formatRichColor(mediaTagValue, textColor)) # Convert each element to a string before adding
|
|
||||||
self.mediaTagsTable.add_row(*map(str, row))
|
|
||||||
|
|
||||||
self.updateTracks()
|
|
||||||
|
|
||||||
|
|
||||||
if self.__currentPattern is not None:
|
|
||||||
|
|
||||||
showIdentifier = self.__currentPattern.getShowId()
|
|
||||||
showRowIndex = self.getRowIndexFromShowId(showIdentifier)
|
|
||||||
if showRowIndex is not None:
|
|
||||||
self.showsTable.move_cursor(row=showRowIndex)
|
|
||||||
|
|
||||||
self.query_one("#pattern_input", Input).value = self.__currentPattern.getPattern()
|
|
||||||
|
|
||||||
self.updateDifferences()
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
self.query_one("#pattern_input", Input).value = self.__mediaFilename
|
|
||||||
self.highlightPattern(True)
|
|
||||||
|
|
||||||
|
|
||||||
def highlightPattern(self, state : bool):
|
|
||||||
if state:
|
|
||||||
self.query_one("#pattern_input", Input).styles.background = 'red'
|
|
||||||
else:
|
|
||||||
self.query_one("#pattern_input", Input).styles.background = None
|
|
||||||
|
|
||||||
|
|
||||||
def updateTracks(self):
|
|
||||||
|
|
||||||
self.tracksTable.clear()
|
|
||||||
|
|
||||||
# trackDescriptorList = self.__sourceMediaDescriptor.getAllTrackDescriptors()
|
|
||||||
trackDescriptorList = self.__sourceMediaDescriptor.getTrackDescriptors()
|
|
||||||
|
|
||||||
typeCounter = {}
|
|
||||||
|
|
||||||
for td in trackDescriptorList:
|
|
||||||
|
|
||||||
trackType = td.getType()
|
|
||||||
if not trackType in typeCounter.keys():
|
|
||||||
typeCounter[trackType] = 0
|
|
||||||
|
|
||||||
dispoSet = td.getDispositionSet()
|
|
||||||
audioLayout = td.getAudioLayout()
|
|
||||||
row = (td.getIndex(),
|
|
||||||
trackType.label(),
|
|
||||||
typeCounter[trackType],
|
|
||||||
td.getCodec().label(),
|
|
||||||
audioLayout.label() if trackType == TrackType.AUDIO
|
|
||||||
and audioLayout != AudioLayout.LAYOUT_UNDEFINED else ' ',
|
|
||||||
td.getLanguage().label(),
|
|
||||||
td.getTitle(),
|
|
||||||
'Yes' if TrackDisposition.DEFAULT in dispoSet else 'No',
|
|
||||||
'Yes' if TrackDisposition.FORCED in dispoSet else 'No')
|
|
||||||
|
|
||||||
self.tracksTable.add_row(*map(str, row))
|
|
||||||
|
|
||||||
typeCounter[trackType] += 1
|
|
||||||
|
|
||||||
|
|
||||||
def compose(self):
|
|
||||||
|
|
||||||
# Create the DataTable widget
|
|
||||||
self.showsTable = DataTable(classes="two")
|
|
||||||
|
|
||||||
# Define the columns with headers
|
|
||||||
self.column_key_show_id = self.showsTable.add_column("ID", width=10)
|
|
||||||
self.column_key_show_name = self.showsTable.add_column("Name", width=80)
|
|
||||||
self.column_key_show_year = self.showsTable.add_column("Year", width=10)
|
|
||||||
|
|
||||||
self.showsTable.cursor_type = 'row'
|
|
||||||
|
|
||||||
|
|
||||||
self.mediaTagsTable = DataTable(classes="two")
|
|
||||||
|
|
||||||
# Define the columns with headers
|
|
||||||
self.column_key_track_tag_key = self.mediaTagsTable.add_column("Key", width=30)
|
|
||||||
self.column_key_track_tag_value = self.mediaTagsTable.add_column("Value", width=70)
|
|
||||||
|
|
||||||
self.mediaTagsTable.cursor_type = 'row'
|
|
||||||
|
|
||||||
|
|
||||||
self.tracksTable = DataTable(classes="two")
|
|
||||||
|
|
||||||
# Define the columns with headers
|
|
||||||
self.column_key_track_index = self.tracksTable.add_column(MediaDetailsScreen.TRACKS_TABLE_INDEX_COLUMN_LABEL, width=5)
|
|
||||||
self.column_key_track_type = self.tracksTable.add_column(MediaDetailsScreen.TRACKS_TABLE_TYPE_COLUMN_LABEL, width=10)
|
|
||||||
self.column_key_track_sub_index = self.tracksTable.add_column(MediaDetailsScreen.TRACKS_TABLE_SUB_INDEX_COLUMN_LABEL, width=8)
|
|
||||||
self.column_key_track_codec = self.tracksTable.add_column(MediaDetailsScreen.TRACKS_TABLE_CODEC_COLUMN_LABEL, width=10)
|
|
||||||
self.column_key_track_layout = self.tracksTable.add_column(MediaDetailsScreen.TRACKS_TABLE_LAYOUT_COLUMN_LABEL, width=10)
|
|
||||||
self.column_key_track_language = self.tracksTable.add_column(MediaDetailsScreen.TRACKS_TABLE_LANGUAGE_COLUMN_LABEL, width=15)
|
|
||||||
self.column_key_track_title = self.tracksTable.add_column(MediaDetailsScreen.TRACKS_TABLE_TITLE_COLUMN_LABEL, width=48)
|
|
||||||
self.column_key_track_default = self.tracksTable.add_column(MediaDetailsScreen.TRACKS_TABLE_DEFAULT_COLUMN_LABEL, width=8)
|
|
||||||
self.column_key_track_forced = self.tracksTable.add_column(MediaDetailsScreen.TRACKS_TABLE_FORCED_COLUMN_LABEL, width=8)
|
|
||||||
|
|
||||||
self.tracksTable.cursor_type = 'row'
|
|
||||||
|
|
||||||
|
|
||||||
# Create the DataTable widget
|
|
||||||
self.differencesTable = DataTable(id='differences-table') # classes="triple"
|
|
||||||
|
|
||||||
# Define the columns with headers
|
|
||||||
self.column_key_differences = self.differencesTable.add_column(MediaDetailsScreen.DIFFERENCES_TABLE_DIFFERENCES_COLUMN_LABEL, width=100)
|
|
||||||
|
|
||||||
self.differencesTable.cursor_type = 'row'
|
|
||||||
|
|
||||||
yield Header()
|
|
||||||
|
|
||||||
with Grid():
|
|
||||||
|
|
||||||
# 1
|
|
||||||
yield Static("Show")
|
|
||||||
yield self.showsTable
|
|
||||||
yield Static(" ")
|
|
||||||
yield self.differencesTable
|
|
||||||
|
|
||||||
# 2
|
|
||||||
yield Static(" ", classes="four")
|
|
||||||
|
|
||||||
# 3
|
|
||||||
yield Static(" ")
|
|
||||||
yield Button("Substitute", id="pattern_button")
|
|
||||||
yield Static(" ", classes="two")
|
|
||||||
|
|
||||||
# 4
|
|
||||||
yield Static("Pattern")
|
|
||||||
yield Input(type="text", id='pattern_input', classes="two")
|
|
||||||
|
|
||||||
yield Static(" ")
|
|
||||||
|
|
||||||
# 5
|
|
||||||
yield Static(" ", classes="four")
|
|
||||||
|
|
||||||
# 6
|
|
||||||
yield Static("Media Tags")
|
|
||||||
yield self.mediaTagsTable
|
|
||||||
yield Static(" ")
|
|
||||||
|
|
||||||
# 7
|
|
||||||
yield Static(" ", classes="four")
|
|
||||||
|
|
||||||
# 8
|
|
||||||
yield Static(" ")
|
|
||||||
yield Button("Set Default", id="select_default_button")
|
|
||||||
yield Button("Set Forced", id="select_forced_button")
|
|
||||||
yield Static(" ")
|
|
||||||
# 9
|
|
||||||
yield Static("Streams")
|
|
||||||
yield self.tracksTable
|
|
||||||
yield Static(" ")
|
|
||||||
|
|
||||||
yield Footer()
|
|
||||||
|
|
||||||
|
|
||||||
def getPatternObjFromInput(self):
|
|
||||||
"""Returns show id and pattern as obj from corresponding inputs"""
|
|
||||||
patternObj = {}
|
|
||||||
try:
|
|
||||||
patternObj['show_id'] = self.getSelectedShowDescriptor().getId()
|
|
||||||
patternObj['pattern'] = str(self.query_one("#pattern_input", Input).value)
|
|
||||||
except:
|
|
||||||
return {}
|
|
||||||
return patternObj
|
|
||||||
|
|
||||||
|
|
||||||
def on_button_pressed(self, event: Button.Pressed) -> None:
|
|
||||||
|
|
||||||
if event.button.id == "pattern_button":
|
|
||||||
|
|
||||||
pattern = self.query_one("#pattern_input", Input).value
|
|
||||||
|
|
||||||
patternMatch = re.search(FileProperties.SE_INDICATOR_PATTERN, pattern)
|
|
||||||
|
|
||||||
if patternMatch:
|
|
||||||
self.query_one("#pattern_input", Input).value = pattern.replace(patternMatch.group(1), FileProperties.SE_INDICATOR_PATTERN)
|
|
||||||
|
|
||||||
|
|
||||||
if event.button.id == "select_default_button":
|
|
||||||
selectedTrackDescriptor = self.getSelectedTrackDescriptor()
|
|
||||||
self.__sourceMediaDescriptor.setDefaultSubTrack(selectedTrackDescriptor.getType(), selectedTrackDescriptor.getSubIndex())
|
|
||||||
self.updateTracks()
|
|
||||||
|
|
||||||
if event.button.id == "select_forced_button":
|
|
||||||
selectedTrackDescriptor = self.getSelectedTrackDescriptor()
|
|
||||||
self.__sourceMediaDescriptor.setForcedSubTrack(selectedTrackDescriptor.getType(), selectedTrackDescriptor.getSubIndex())
|
|
||||||
self.updateTracks()
|
|
||||||
|
|
||||||
|
|
||||||
def getSelectedTrackDescriptor(self):
|
|
||||||
"""Returns a partial track descriptor"""
|
|
||||||
try:
|
|
||||||
|
|
||||||
# Fetch the currently selected row when 'Enter' is pressed
|
|
||||||
#selected_row_index = self.table.cursor_row
|
|
||||||
row_key, col_key = self.tracksTable.coordinate_to_cell_key(self.tracksTable.cursor_coordinate)
|
|
||||||
|
|
||||||
if row_key is not None:
|
|
||||||
selected_track_data = self.tracksTable.get_row(row_key)
|
|
||||||
|
|
||||||
kwargs = {}
|
|
||||||
kwargs[TrackDescriptor.CONTEXT_KEY] = self.context
|
|
||||||
kwargs[TrackDescriptor.INDEX_KEY] = int(selected_track_data[0])
|
|
||||||
kwargs[TrackDescriptor.TRACK_TYPE_KEY] = TrackType.fromLabel(selected_track_data[1])
|
|
||||||
kwargs[TrackDescriptor.SUB_INDEX_KEY] = int(selected_track_data[2])
|
|
||||||
kwargs[TrackDescriptor.CODEC_KEY] = TrackCodec.fromLabel(selected_track_data[3])
|
|
||||||
kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY] = AudioLayout.fromLabel(selected_track_data[4])
|
|
||||||
|
|
||||||
return TrackDescriptor(**kwargs)
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
except CellDoesNotExist:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def getSelectedShowDescriptor(self) -> ShowDescriptor:
|
|
||||||
|
|
||||||
try:
|
|
||||||
|
|
||||||
row_key, col_key = self.showsTable.coordinate_to_cell_key(self.showsTable.cursor_coordinate)
|
|
||||||
|
|
||||||
if row_key is not None:
|
|
||||||
selected_row_data = self.showsTable.get_row(row_key)
|
|
||||||
|
|
||||||
try:
|
|
||||||
kwargs = {}
|
|
||||||
|
|
||||||
kwargs[ShowDescriptor.ID_KEY] = int(selected_row_data[0])
|
|
||||||
kwargs[ShowDescriptor.NAME_KEY] = str(selected_row_data[1])
|
|
||||||
kwargs[ShowDescriptor.YEAR_KEY] = int(selected_row_data[2])
|
|
||||||
|
|
||||||
return ShowDescriptor(**kwargs)
|
|
||||||
|
|
||||||
except ValueError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
except CellDoesNotExist:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def handle_new_pattern(self, showDescriptor: ShowDescriptor):
|
|
||||||
""""""
|
|
||||||
|
|
||||||
if type(showDescriptor) is not ShowDescriptor:
|
|
||||||
raise TypeError("MediaDetailsScreen.handle_new_pattern(): Argument 'showDescriptor' has to be of type ShowDescriptor")
|
|
||||||
|
|
||||||
self.removeShow()
|
|
||||||
|
|
||||||
showRowIndex = self.getRowIndexFromShowId(showDescriptor.getId())
|
|
||||||
if showRowIndex is None:
|
|
||||||
show = (showDescriptor.getId(), showDescriptor.getName(), showDescriptor.getYear())
|
|
||||||
self.showsTable.add_row(*map(str, show))
|
|
||||||
|
|
||||||
showRowIndex = self.getRowIndexFromShowId(showDescriptor.getId())
|
|
||||||
if showRowIndex is not None:
|
|
||||||
self.showsTable.move_cursor(row=showRowIndex)
|
|
||||||
|
|
||||||
patternObj = self.getPatternObjFromInput()
|
|
||||||
|
|
||||||
if patternObj:
|
|
||||||
patternId = self.__pc.addPattern(patternObj)
|
|
||||||
if patternId:
|
|
||||||
self.highlightPattern(False)
|
|
||||||
|
|
||||||
for tagKey, tagValue in self.__sourceMediaDescriptor.getTags().items():
|
|
||||||
|
|
||||||
# Filter tags that make no sense to preserve
|
|
||||||
if tagKey not in self.__ignoreGlobalKeys and not tagKey in self.__removeGlobalKeys:
|
|
||||||
self.__tac.updateMediaTag(patternId, tagKey, tagValue)
|
|
||||||
|
|
||||||
# for trackDescriptor in self.__sourceMediaDescriptor.getAllTrackDescriptors():
|
|
||||||
for trackDescriptor in self.__sourceMediaDescriptor.getTrackDescriptors():
|
|
||||||
self.__tc.addTrack(trackDescriptor, patternId = patternId)
|
|
||||||
|
|
||||||
|
|
||||||
def action_new_pattern(self):
|
|
||||||
"""Adding new patterns
|
|
||||||
|
|
||||||
If the corresponding show does not exists in DB it is added beforehand"""
|
|
||||||
|
|
||||||
selectedShowDescriptor = self.getSelectedShowDescriptor()
|
|
||||||
|
|
||||||
#HINT: Callback is invoked after this method has exited. As a workaround the callback is executed directly
|
|
||||||
# from here with a mock-up screen result containing the necessary part of keys to perform correctly.
|
|
||||||
if selectedShowDescriptor is None:
|
|
||||||
self.app.push_screen(ShowDetailsScreen(), self.handle_new_pattern)
|
|
||||||
else:
|
|
||||||
self.handle_new_pattern(selectedShowDescriptor)
|
|
||||||
|
|
||||||
|
|
||||||
def action_update_pattern(self):
|
|
||||||
"""Updating patterns
|
|
||||||
|
|
||||||
When updating the database the actions must reverse the difference (eq to diff db->file)"""
|
|
||||||
|
|
||||||
if self.__currentPattern is not None:
|
|
||||||
patternObj = self.getPatternObjFromInput()
|
|
||||||
if (patternObj
|
|
||||||
and self.__currentPattern.getPattern() != patternObj['pattern']):
|
|
||||||
return self.__pc.updatePattern(self.__currentPattern.getId(), patternObj)
|
|
||||||
|
|
||||||
self.loadProperties()
|
|
||||||
|
|
||||||
# __mediaChangeSetObj is file vs database
|
|
||||||
if MediaDescriptorChangeSet.TAGS_KEY in self.__mediaChangeSetObj.keys():
|
|
||||||
|
|
||||||
if DIFF_ADDED_KEY in self.__mediaChangeSetObj[MediaDescriptorChangeSet.TAGS_KEY].keys():
|
|
||||||
for addedTagKey in self.__mediaChangeSetObj[MediaDescriptorChangeSet.TAGS_KEY][DIFF_ADDED_KEY].keys():
|
|
||||||
# click.ClickException(f"delete media tag patternId={self.__currentPattern.getId()} addedTagKey={addedTagKey}")
|
|
||||||
self.__tac.deleteMediaTagByKey(self.__currentPattern.getId(), addedTagKey)
|
|
||||||
|
|
||||||
if DIFF_REMOVED_KEY in self.__mediaChangeSetObj[MediaDescriptorChangeSet.TAGS_KEY].keys():
|
|
||||||
for removedTagKey in self.__mediaChangeSetObj[MediaDescriptorChangeSet.TAGS_KEY][DIFF_REMOVED_KEY].keys():
|
|
||||||
currentTags = self.__sourceMediaDescriptor.getTags()
|
|
||||||
# click.ClickException(f"delete media tag patternId={self.__currentPattern.getId()} removedTagKey={removedTagKey} currentTags={currentTags[removedTagKey]}")
|
|
||||||
self.__tac.updateMediaTag(self.__currentPattern.getId(), removedTagKey, currentTags[removedTagKey])
|
|
||||||
|
|
||||||
if DIFF_CHANGED_KEY in self.__mediaChangeSetObj[MediaDescriptorChangeSet.TAGS_KEY].keys():
|
|
||||||
for changedTagKey in self.__mediaChangeSetObj[MediaDescriptorChangeSet.TAGS_KEY][DIFF_CHANGED_KEY].keys():
|
|
||||||
currentTags = self.__sourceMediaDescriptor.getTags()
|
|
||||||
# click.ClickException(f"delete media tag patternId={self.__currentPattern.getId()} changedTagKey={changedTagKey} currentTags={currentTags[changedTagKey]}")
|
|
||||||
self.__tac.updateMediaTag(self.__currentPattern.getId(), changedTagKey, currentTags[changedTagKey])
|
|
||||||
|
|
||||||
if MediaDescriptorChangeSet.TRACKS_KEY in self.__mediaChangeSetObj.keys():
|
|
||||||
|
|
||||||
if DIFF_ADDED_KEY in self.__mediaChangeSetObj[MediaDescriptorChangeSet.TRACKS_KEY].keys():
|
|
||||||
|
|
||||||
for trackIndex, trackDescriptor in self.__mediaChangeSetObj[MediaDescriptorChangeSet.TRACKS_KEY][DIFF_ADDED_KEY].items():
|
|
||||||
#targetTracks = [t for t in self.__targetMediaDescriptor.getAllTrackDescriptors() if t.getIndex() == addedTrackIndex]
|
|
||||||
# if targetTracks:
|
|
||||||
# self.__tc.deleteTrack(targetTracks[0].getId()) # id
|
|
||||||
# self.__tc.deleteTrack(targetTracks[0].getId())
|
|
||||||
self.__tc.addTrack(trackDescriptor, patternId = self.__currentPattern.getId())
|
|
||||||
|
|
||||||
if DIFF_REMOVED_KEY in self.__mediaChangeSetObj[MediaDescriptorChangeSet.TRACKS_KEY].keys():
|
|
||||||
trackDescriptor: TrackDescriptor
|
|
||||||
for trackIndex, trackDescriptor in self.__mediaChangeSetObj[MediaDescriptorChangeSet.TRACKS_KEY][DIFF_REMOVED_KEY].items():
|
|
||||||
# Track per inspect/update hinzufügen
|
|
||||||
#self.__tc.addTrack(removedTrack, patternId = self.__currentPattern.getId())
|
|
||||||
self.__tc.deleteTrack(trackDescriptor.getId())
|
|
||||||
|
|
||||||
if DIFF_CHANGED_KEY in self.__mediaChangeSetObj[MediaDescriptorChangeSet.TRACKS_KEY].keys():
|
|
||||||
|
|
||||||
# [vsTracks[tp].getIndex()] = trackDiff
|
|
||||||
for trackIndex, trackDiff in self.__mediaChangeSetObj[MediaDescriptorChangeSet.TRACKS_KEY][DIFF_CHANGED_KEY].items():
|
|
||||||
|
|
||||||
targetTracks = [t for t in self.__targetMediaDescriptor.getTrackDescriptors() if t.getIndex() == trackIndex]
|
|
||||||
targetTrackId = targetTracks[0].getId() if targetTracks else None
|
|
||||||
targetTrackIndex = targetTracks[0].getIndex() if targetTracks else None
|
|
||||||
|
|
||||||
changedCurrentTracks = [t for t in self.__sourceMediaDescriptor.getTrackDescriptors() if t.getIndex() == trackIndex]
|
|
||||||
# changedCurrentTrackId #HINT: Undefined as track descriptors do not come from file with track_id
|
|
||||||
|
|
||||||
if TrackDescriptor.TAGS_KEY in trackDiff.keys():
|
|
||||||
tagsDiff = trackDiff[TrackDescriptor.TAGS_KEY]
|
|
||||||
|
|
||||||
if DIFF_ADDED_KEY in tagsDiff.keys():
|
|
||||||
for tagKey, tagValue in tagsDiff[DIFF_ADDED_KEY].items():
|
|
||||||
|
|
||||||
# if targetTracks:
|
|
||||||
# self.__tac.deleteTrackTagByKey(targetTrackId, addedTrackTagKey)
|
|
||||||
self.__tac.updateTrackTag(targetTrackId, tagKey, tagValue)
|
|
||||||
|
|
||||||
|
|
||||||
if DIFF_REMOVED_KEY in tagsDiff.keys():
|
|
||||||
for tagKey, tagValue in tagsDiff[DIFF_REMOVED_KEY].items():
|
|
||||||
# if changedCurrentTracks:
|
|
||||||
# self.__tac.updateTrackTag(targetTrackId, removedTrackTagKey, changedCurrentTracks[0].getTags()[removedTrackTagKey])
|
|
||||||
self.__tac.deleteTrackTagByKey(targetTrackId, tagKey)
|
|
||||||
|
|
||||||
if DIFF_CHANGED_KEY in tagsDiff.keys():
|
|
||||||
for tagKey, tagValue in tagsDiff[DIFF_CHANGED_KEY].items():
|
|
||||||
# if changedCurrentTracks:
|
|
||||||
# self.__tac.updateTrackTag(targetTrackId, changedTrackTagKey, changedCurrentTracks[0].getTags()[changedTrackTagKey])
|
|
||||||
self.__tac.updateTrackTag(targetTrackId, tagKey, tagValue)
|
|
||||||
|
|
||||||
|
|
||||||
if TrackDescriptor.DISPOSITION_SET_KEY in trackDiff.keys():
|
|
||||||
changedTrackDispositionDiff = trackDiff[TrackDescriptor.DISPOSITION_SET_KEY]
|
|
||||||
|
|
||||||
if DIFF_ADDED_KEY in changedTrackDispositionDiff.keys():
|
|
||||||
for changedDisposition in changedTrackDispositionDiff[DIFF_ADDED_KEY]:
|
|
||||||
if targetTrackIndex is not None:
|
|
||||||
self.__tc.setDispositionState(self.__currentPattern.getId(), targetTrackIndex, changedDisposition, True)
|
|
||||||
|
|
||||||
if DIFF_REMOVED_KEY in changedTrackDispositionDiff.keys():
|
|
||||||
for changedDisposition in changedTrackDispositionDiff[DIFF_REMOVED_KEY]:
|
|
||||||
if targetTrackIndex is not None:
|
|
||||||
self.__tc.setDispositionState(self.__currentPattern.getId(), targetTrackIndex, changedDisposition, False)
|
|
||||||
|
|
||||||
|
|
||||||
self.updateDifferences()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def action_edit_pattern(self):
|
|
||||||
|
|
||||||
patternObj = self.getPatternObjFromInput()
|
|
||||||
|
|
||||||
if patternObj['pattern']:
|
|
||||||
|
|
||||||
selectedPatternId = self.__pc.findPattern(patternObj)
|
|
||||||
|
|
||||||
if selectedPatternId is None:
|
|
||||||
raise click.ClickException(f"MediaDetailsScreen.action_edit_pattern(): Pattern to edit has no id")
|
|
||||||
|
|
||||||
self.app.push_screen(PatternDetailsScreen(patternId = selectedPatternId, showId = self.getSelectedShowDescriptor().getId()), self.handle_edit_pattern) # <-
|
|
||||||
|
|
||||||
|
|
||||||
def handle_edit_pattern(self, screenResult):
|
|
||||||
self.query_one("#pattern_input", Input).value = screenResult['pattern']
|
|
||||||
self.updateDifferences()
|
|
||||||
|
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
import os, sys, importlib, inspect, glob, re
|
|
||||||
|
|
||||||
from ffx.configuration_controller import ConfigurationController
|
|
||||||
from ffx.database import databaseContext
|
|
||||||
|
|
||||||
from sqlalchemy import Engine
|
|
||||||
from sqlalchemy.orm import sessionmaker
|
|
||||||
|
|
||||||
|
|
||||||
class Conversion():
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
|
|
||||||
self._context = {}
|
|
||||||
self._context['config'] = ConfigurationController()
|
|
||||||
|
|
||||||
self._context['database'] = databaseContext(databasePath=self._context['config'].getDatabaseFilePath())
|
|
||||||
|
|
||||||
self.__databaseSession: sessionmaker = self._context['database']['session']
|
|
||||||
self.__databaseEngine: Engine = self._context['database']['engine']
|
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def list():
|
|
||||||
|
|
||||||
basePath = os.path.dirname(__file__)
|
|
||||||
|
|
||||||
filenamePattern = re.compile("conversion_([0-9]+)_([0-9]+)\\.py")
|
|
||||||
|
|
||||||
filenameList = [os.path.basename(fp) for fp in glob.glob(f"{ basePath }/*.py") if fp != __file__]
|
|
||||||
|
|
||||||
versionTupleList = [(fm.group(1), fm.group(2)) for fn in filenameList if (fm := filenamePattern.search(fn))]
|
|
||||||
|
|
||||||
return versionTupleList
|
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def getClassReference(versionFrom, versionTo):
|
|
||||||
importlib.import_module(f"ffx.model.conversions.conversion_{ versionFrom }_{ versionTo }")
|
|
||||||
for name, obj in inspect.getmembers(sys.modules[f"ffx.model.conversions.conversion_{ versionFrom }_{ versionTo }"]):
|
|
||||||
#HINT: Excluding DispositionCombination as it seems to be included by import (?)
|
|
||||||
if inspect.isclass(obj) and name != 'Conversion' and name.startswith('Conversion'):
|
|
||||||
return obj
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def getAllClassReferences():
|
|
||||||
return [Conversion.getClassReference(verFrom, verTo) for verFrom, verTo in Conversion.list()]
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
import os, sys, importlib, inspect, glob, re
|
|
||||||
|
|
||||||
from .conversion import Conversion
|
|
||||||
|
|
||||||
|
|
||||||
class Conversion_2_3(Conversion):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
def applyConversion(self):
|
|
||||||
|
|
||||||
s = self.__databaseSession()
|
|
||||||
e = self.__databaseEngine
|
|
||||||
|
|
||||||
with e.connect() as c:
|
|
||||||
c.execute("ALTER TABLE user ADD COLUMN email VARCHAR(255)")
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
import os, sys, importlib, inspect, glob, re
|
|
||||||
|
|
||||||
from .conversion import Conversion
|
|
||||||
|
|
||||||
|
|
||||||
class Conversion_3_4(Conversion):
|
|
||||||
pass
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
# from typing import List
|
|
||||||
from sqlalchemy import create_engine, Column, Integer, String, ForeignKey, Enum
|
|
||||||
from sqlalchemy.orm import relationship, declarative_base, sessionmaker
|
|
||||||
|
|
||||||
from .show import Base
|
|
||||||
|
|
||||||
|
|
||||||
class MediaTag(Base):
|
|
||||||
"""
|
|
||||||
relationship(argument, opt1, opt2, ...)
|
|
||||||
argument is string of class or Mapped class of the target entity
|
|
||||||
backref creates a bi-directional corresponding relationship (back_populates preferred)
|
|
||||||
back_populates points to the corresponding relationship (the actual class attribute identifier)
|
|
||||||
|
|
||||||
See: https://docs.sqlalchemy.org/en/(14|20)/orm/basic_relationships.html
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = 'media_tags'
|
|
||||||
|
|
||||||
# v1.x
|
|
||||||
id = Column(Integer, primary_key=True)
|
|
||||||
|
|
||||||
key = Column(String)
|
|
||||||
value = Column(String)
|
|
||||||
|
|
||||||
# v1.x
|
|
||||||
pattern_id = Column(Integer, ForeignKey('patterns.id', ondelete="CASCADE"))
|
|
||||||
pattern = relationship('Pattern', back_populates='media_tags')
|
|
||||||
@@ -1,76 +0,0 @@
|
|||||||
import click
|
|
||||||
|
|
||||||
from sqlalchemy import Column, Integer, String, ForeignKey
|
|
||||||
from sqlalchemy.orm import relationship
|
|
||||||
|
|
||||||
from .show import Base, Show
|
|
||||||
|
|
||||||
from ffx.media_descriptor import MediaDescriptor
|
|
||||||
from ffx.show_descriptor import ShowDescriptor
|
|
||||||
|
|
||||||
|
|
||||||
class Pattern(Base):
|
|
||||||
|
|
||||||
__tablename__ = 'patterns'
|
|
||||||
|
|
||||||
# v1.x
|
|
||||||
id = Column(Integer, primary_key=True)
|
|
||||||
pattern = Column(String)
|
|
||||||
|
|
||||||
# v2.0
|
|
||||||
# id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
|
||||||
# pattern: Mapped[str] = mapped_column(String, nullable=False)
|
|
||||||
|
|
||||||
# v1.x
|
|
||||||
show_id = Column(Integer, ForeignKey('shows.id', ondelete="CASCADE"))
|
|
||||||
show = relationship(Show, back_populates='patterns', lazy='joined')
|
|
||||||
|
|
||||||
# v2.0
|
|
||||||
# show_id: Mapped[int] = mapped_column(ForeignKey("shows.id", ondelete="CASCADE"))
|
|
||||||
# show: Mapped["Show"] = relationship(back_populates="patterns")
|
|
||||||
|
|
||||||
tracks = relationship('Track', back_populates='pattern', cascade="all, delete", lazy='joined')
|
|
||||||
|
|
||||||
|
|
||||||
media_tags = relationship('MediaTag', back_populates='pattern', cascade="all, delete", lazy='joined')
|
|
||||||
|
|
||||||
|
|
||||||
def getId(self):
|
|
||||||
return int(self.id)
|
|
||||||
|
|
||||||
def getShowId(self):
|
|
||||||
return int(self.show_id)
|
|
||||||
|
|
||||||
def getShowDescriptor(self, context) -> ShowDescriptor:
|
|
||||||
# click.echo(f"self.show {self.show} id={self.show_id}")
|
|
||||||
return self.show.getDescriptor(context)
|
|
||||||
|
|
||||||
def getId(self):
|
|
||||||
return int(self.id)
|
|
||||||
|
|
||||||
def getPattern(self):
|
|
||||||
return str(self.pattern)
|
|
||||||
|
|
||||||
def getTags(self):
|
|
||||||
return {str(t.key):str(t.value) for t in self.media_tags}
|
|
||||||
|
|
||||||
|
|
||||||
def getMediaDescriptor(self, context):
|
|
||||||
|
|
||||||
kwargs = {}
|
|
||||||
|
|
||||||
kwargs[MediaDescriptor.CONTEXT_KEY] = context
|
|
||||||
|
|
||||||
kwargs[MediaDescriptor.TAGS_KEY] = self.getTags()
|
|
||||||
kwargs[MediaDescriptor.TRACK_DESCRIPTOR_LIST_KEY] = []
|
|
||||||
|
|
||||||
# Set ordered subindices
|
|
||||||
subIndexCounter = {}
|
|
||||||
for track in self.tracks:
|
|
||||||
trackType = track.getType()
|
|
||||||
if not trackType in subIndexCounter.keys():
|
|
||||||
subIndexCounter[trackType] = 0
|
|
||||||
kwargs[MediaDescriptor.TRACK_DESCRIPTOR_LIST_KEY].append(track.getDescriptor(context, subIndex = subIndexCounter[trackType]))
|
|
||||||
subIndexCounter[trackType] += 1
|
|
||||||
|
|
||||||
return MediaDescriptor(**kwargs)
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
# from typing import List
|
|
||||||
from sqlalchemy import create_engine, Column, Integer, String, ForeignKey, Enum
|
|
||||||
from sqlalchemy.orm import relationship, declarative_base, sessionmaker
|
|
||||||
|
|
||||||
from .show import Base
|
|
||||||
|
|
||||||
|
|
||||||
class Property(Base):
|
|
||||||
|
|
||||||
__tablename__ = 'properties'
|
|
||||||
|
|
||||||
# v1.x
|
|
||||||
id = Column(Integer, primary_key=True)
|
|
||||||
|
|
||||||
key = Column(String)
|
|
||||||
value = Column(String)
|
|
||||||
@@ -1,71 +0,0 @@
|
|||||||
import click
|
|
||||||
|
|
||||||
from sqlalchemy import Column, Integer, ForeignKey
|
|
||||||
from sqlalchemy.orm import relationship
|
|
||||||
|
|
||||||
from .show import Base, Show
|
|
||||||
|
|
||||||
|
|
||||||
class ShiftedSeason(Base):
|
|
||||||
|
|
||||||
__tablename__ = 'shifted_seasons'
|
|
||||||
|
|
||||||
# v1.x
|
|
||||||
id = Column(Integer, primary_key=True)
|
|
||||||
|
|
||||||
|
|
||||||
# v2.0
|
|
||||||
# id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
|
||||||
# pattern: Mapped[str] = mapped_column(String, nullable=False)
|
|
||||||
|
|
||||||
# v1.x
|
|
||||||
show_id = Column(Integer, ForeignKey('shows.id', ondelete="CASCADE"))
|
|
||||||
show = relationship(Show, back_populates='shifted_seasons', lazy='joined')
|
|
||||||
|
|
||||||
# v2.0
|
|
||||||
# show_id: Mapped[int] = mapped_column(ForeignKey("shows.id", ondelete="CASCADE"))
|
|
||||||
# show: Mapped["Show"] = relationship(back_populates="patterns")
|
|
||||||
|
|
||||||
|
|
||||||
original_season = Column(Integer)
|
|
||||||
|
|
||||||
first_episode = Column(Integer, default = -1)
|
|
||||||
last_episode = Column(Integer, default = -1)
|
|
||||||
|
|
||||||
season_offset = Column(Integer, default = 0)
|
|
||||||
episode_offset = Column(Integer, default = 0)
|
|
||||||
|
|
||||||
|
|
||||||
def getId(self):
|
|
||||||
return self.id
|
|
||||||
|
|
||||||
|
|
||||||
def getOriginalSeason(self):
|
|
||||||
return self.original_season
|
|
||||||
|
|
||||||
def getFirstEpisode(self):
|
|
||||||
return self.first_episode
|
|
||||||
|
|
||||||
def getLastEpisode(self):
|
|
||||||
return self.last_episode
|
|
||||||
|
|
||||||
|
|
||||||
def getSeasonOffset(self):
|
|
||||||
return self.season_offset
|
|
||||||
|
|
||||||
def getEpisodeOffset(self):
|
|
||||||
return self.episode_offset
|
|
||||||
|
|
||||||
|
|
||||||
def getObj(self):
|
|
||||||
|
|
||||||
shiftedSeasonObj = {}
|
|
||||||
|
|
||||||
shiftedSeasonObj['original_season'] = self.getOriginalSeason()
|
|
||||||
shiftedSeasonObj['first_episode'] = self.getFirstEpisode()
|
|
||||||
shiftedSeasonObj['last_episode'] = self.getLastEpisode()
|
|
||||||
shiftedSeasonObj['season_offset'] = self.getSeasonOffset()
|
|
||||||
shiftedSeasonObj['episode_offset'] = self.getEpisodeOffset()
|
|
||||||
|
|
||||||
return shiftedSeasonObj
|
|
||||||
|
|
||||||
@@ -1,62 +0,0 @@
|
|||||||
# from typing import List
|
|
||||||
from sqlalchemy import create_engine, Column, Integer, String, ForeignKey
|
|
||||||
from sqlalchemy.orm import relationship, declarative_base, sessionmaker
|
|
||||||
|
|
||||||
from ffx.show_descriptor import ShowDescriptor
|
|
||||||
|
|
||||||
Base = declarative_base()
|
|
||||||
|
|
||||||
|
|
||||||
class Show(Base):
|
|
||||||
"""
|
|
||||||
relationship(argument, opt1, opt2, ...)
|
|
||||||
argument is string of class or Mapped class of the target entity
|
|
||||||
backref creates a bi-directional corresponding relationship (back_populates preferred)
|
|
||||||
back_populates points to the corresponding relationship (the actual class attribute identifier)
|
|
||||||
|
|
||||||
See: https://docs.sqlalchemy.org/en/(14|20)/orm/basic_relationships.html
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = 'shows'
|
|
||||||
|
|
||||||
# v1.x
|
|
||||||
id = Column(Integer, primary_key=True)
|
|
||||||
|
|
||||||
name = Column(String)
|
|
||||||
year = Column(Integer)
|
|
||||||
|
|
||||||
# v2.0
|
|
||||||
# id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
|
||||||
# name: Mapped[str] = mapped_column(String, nullable=False)
|
|
||||||
# year: Mapped[int] = mapped_column(Integer, nullable=False)
|
|
||||||
|
|
||||||
# v1.x
|
|
||||||
#patterns = relationship('Pattern', back_populates='show', cascade="all, delete", passive_deletes=True)
|
|
||||||
patterns = relationship('Pattern', back_populates='show', cascade="all, delete")
|
|
||||||
# patterns = relationship('Pattern', back_populates='show', cascade="all")
|
|
||||||
|
|
||||||
# v2.0
|
|
||||||
# patterns: Mapped[List["Pattern"]] = relationship(back_populates="show", cascade="all, delete")
|
|
||||||
|
|
||||||
shifted_seasons = relationship('ShiftedSeason', back_populates='show', cascade="all, delete")
|
|
||||||
|
|
||||||
|
|
||||||
index_season_digits = Column(Integer, default=ShowDescriptor.DEFAULT_INDEX_SEASON_DIGITS)
|
|
||||||
index_episode_digits = Column(Integer, default=ShowDescriptor.DEFAULT_INDEX_EPISODE_DIGITS)
|
|
||||||
indicator_season_digits = Column(Integer, default=ShowDescriptor.DEFAULT_INDICATOR_SEASON_DIGITS)
|
|
||||||
indicator_episode_digits = Column(Integer, default=ShowDescriptor.DEFAULT_INDICATOR_EPISODE_DIGITS)
|
|
||||||
|
|
||||||
|
|
||||||
def getDescriptor(self, context):
|
|
||||||
|
|
||||||
kwargs = {}
|
|
||||||
kwargs[ShowDescriptor.CONTEXT_KEY] = context
|
|
||||||
kwargs[ShowDescriptor.ID_KEY] = int(self.id)
|
|
||||||
kwargs[ShowDescriptor.NAME_KEY] = str(self.name)
|
|
||||||
kwargs[ShowDescriptor.YEAR_KEY] = int(self.year)
|
|
||||||
kwargs[ShowDescriptor.INDEX_SEASON_DIGITS_KEY] = int(self.index_season_digits)
|
|
||||||
kwargs[ShowDescriptor.INDEX_EPISODE_DIGITS_KEY] = int(self.index_episode_digits)
|
|
||||||
kwargs[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY] = int(self.indicator_season_digits)
|
|
||||||
kwargs[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY] = int(self.indicator_episode_digits)
|
|
||||||
|
|
||||||
return ShowDescriptor(**kwargs)
|
|
||||||
@@ -1,216 +0,0 @@
|
|||||||
# from typing import List
|
|
||||||
from sqlalchemy import create_engine, Column, Integer, String, ForeignKey
|
|
||||||
from sqlalchemy.orm import relationship, declarative_base, sessionmaker
|
|
||||||
|
|
||||||
from .show import Base
|
|
||||||
|
|
||||||
from ffx.track_type import TrackType
|
|
||||||
|
|
||||||
from ffx.iso_language import IsoLanguage
|
|
||||||
|
|
||||||
from ffx.track_disposition import TrackDisposition
|
|
||||||
from ffx.track_descriptor import TrackDescriptor
|
|
||||||
|
|
||||||
from ffx.audio_layout import AudioLayout
|
|
||||||
from ffx.track_codec import TrackCodec
|
|
||||||
|
|
||||||
|
|
||||||
class Track(Base):
|
|
||||||
"""
|
|
||||||
relationship(argument, opt1, opt2, ...)
|
|
||||||
argument is string of class or Mapped class of the target entity
|
|
||||||
backref creates a bi-directional corresponding relationship (back_populates preferred)
|
|
||||||
back_populates points to the corresponding relationship (the actual class attribute identifier)
|
|
||||||
|
|
||||||
See: https://docs.sqlalchemy.org/en/(14|20)/orm/basic_relationships.html
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = 'tracks'
|
|
||||||
|
|
||||||
# v1.x
|
|
||||||
id = Column(Integer, primary_key=True, autoincrement = True)
|
|
||||||
|
|
||||||
# P=pattern_id+sub_index+track_type
|
|
||||||
track_type = Column(Integer) # TrackType
|
|
||||||
|
|
||||||
index = Column(Integer)
|
|
||||||
source_index = Column(Integer)
|
|
||||||
|
|
||||||
# v1.x
|
|
||||||
pattern_id = Column(Integer, ForeignKey('patterns.id', ondelete="CASCADE"))
|
|
||||||
pattern = relationship('Pattern', back_populates='tracks')
|
|
||||||
|
|
||||||
track_tags = relationship('TrackTag', back_populates='track', cascade="all, delete", lazy="joined")
|
|
||||||
|
|
||||||
disposition_flags = Column(Integer)
|
|
||||||
|
|
||||||
codec_name = Column(String)
|
|
||||||
audio_layout = Column(Integer)
|
|
||||||
|
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
|
|
||||||
trackType = kwargs.pop('track_type', None)
|
|
||||||
if trackType is not None:
|
|
||||||
self.track_type = int(trackType)
|
|
||||||
|
|
||||||
dispositionSet = kwargs.pop(TrackDescriptor.DISPOSITION_SET_KEY, set())
|
|
||||||
self.disposition_flags = int(TrackDisposition.toFlags(dispositionSet))
|
|
||||||
|
|
||||||
super().__init__(**kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def fromFfprobeStreamObj(cls, streamObj, patternId):
|
|
||||||
"""{
|
|
||||||
'index': 4,
|
|
||||||
'codec_name': 'hdmv_pgs_subtitle',
|
|
||||||
'codec_long_name': 'HDMV Presentation Graphic Stream subtitles',
|
|
||||||
'codec_type': 'subtitle',
|
|
||||||
'codec_tag_string': '[0][0][0][0]',
|
|
||||||
'codec_tag': '0x0000',
|
|
||||||
'r_frame_rate': '0/0',
|
|
||||||
'avg_frame_rate': '0/0',
|
|
||||||
'time_base': '1/1000',
|
|
||||||
'start_pts': 0,
|
|
||||||
'start_time': '0.000000',
|
|
||||||
'duration_ts': 1421035,
|
|
||||||
'duration': '1421.035000',
|
|
||||||
'disposition': {
|
|
||||||
'default': 1,
|
|
||||||
'dub': 0,
|
|
||||||
'original': 0,
|
|
||||||
'comment': 0,
|
|
||||||
'lyrics': 0,
|
|
||||||
'karaoke': 0,
|
|
||||||
'forced': 0,
|
|
||||||
'hearing_impaired': 0,
|
|
||||||
'visual_impaired': 0,
|
|
||||||
'clean_effects': 0,
|
|
||||||
'attached_pic': 0,
|
|
||||||
'timed_thumbnails': 0,
|
|
||||||
'non_diegetic': 0,
|
|
||||||
'captions': 0,
|
|
||||||
'descriptions': 0,
|
|
||||||
'metadata': 0,
|
|
||||||
'dependent': 0,
|
|
||||||
'still_image': 0
|
|
||||||
},
|
|
||||||
'tags': {
|
|
||||||
'language': 'ger',
|
|
||||||
'title': 'German Full'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# v1.x
|
|
||||||
id = Column(Integer, primary_key=True, autoincrement = True)
|
|
||||||
|
|
||||||
# P=pattern_id+sub_index+track_type
|
|
||||||
track_type = Column(Integer) # TrackType
|
|
||||||
sub_index = Column(Integer)
|
|
||||||
|
|
||||||
# v1.x
|
|
||||||
pattern_id = Column(Integer, ForeignKey('patterns.id', ondelete='CASCADE'))
|
|
||||||
pattern = relationship('Pattern', back_populates='tracks')
|
|
||||||
|
|
||||||
|
|
||||||
language = Column(String) # IsoLanguage threeLetter
|
|
||||||
title = Column(String)
|
|
||||||
|
|
||||||
|
|
||||||
track_tags = relationship('TrackTag', back_populates='track', cascade='all, delete')
|
|
||||||
|
|
||||||
|
|
||||||
disposition_flags = Column(Integer)
|
|
||||||
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
trackType = streamObj[TrackDescriptor.FFPROBE_CODEC_TYPE_KEY]
|
|
||||||
|
|
||||||
if trackType in [t.label() for t in TrackType]:
|
|
||||||
|
|
||||||
return cls(pattern_id = patternId,
|
|
||||||
track_type = trackType,
|
|
||||||
codec_name = streamObj[TrackDescriptor.FFPROBE_CODEC_NAME_KEY],
|
|
||||||
disposition_flags = sum([2**t.index() for (k,v) in streamObj[TrackDescriptor.FFPROBE_DISPOSITION_KEY].items()
|
|
||||||
if v and (t := TrackDisposition.find(k)) is not None]),
|
|
||||||
audio_layout = AudioLayout.identify(streamObj))
|
|
||||||
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def getId(self):
|
|
||||||
return int(self.id)
|
|
||||||
|
|
||||||
def getPatternId(self):
|
|
||||||
return int(self.pattern_id)
|
|
||||||
|
|
||||||
def getType(self):
|
|
||||||
return TrackType.fromIndex(self.track_type)
|
|
||||||
|
|
||||||
def getCodec(self) -> TrackCodec:
|
|
||||||
return TrackCodec.identify(self.codec_name)
|
|
||||||
|
|
||||||
def getIndex(self):
|
|
||||||
return int(self.index) if self.index is not None else -1
|
|
||||||
|
|
||||||
def getSourceIndex(self):
|
|
||||||
return int(self.source_index) if self.source_index is not None else -1
|
|
||||||
|
|
||||||
def getLanguage(self):
|
|
||||||
tags = {t.key:t.value for t in self.track_tags}
|
|
||||||
return IsoLanguage.findThreeLetter(tags['language']) if 'language' in tags.keys() else IsoLanguage.UNDEFINED
|
|
||||||
|
|
||||||
def getTitle(self):
|
|
||||||
tags = {t.key:t.value for t in self.track_tags}
|
|
||||||
return tags['title'] if 'title' in tags.keys() else ''
|
|
||||||
|
|
||||||
def getDispositionSet(self):
|
|
||||||
return TrackDisposition.toSet(self.disposition_flags)
|
|
||||||
|
|
||||||
def getAudioLayout(self):
|
|
||||||
return AudioLayout.fromIndex(self.audio_layout)
|
|
||||||
|
|
||||||
def getTags(self):
|
|
||||||
return {str(t.key):str(t.value) for t in self.track_tags}
|
|
||||||
|
|
||||||
|
|
||||||
def setDisposition(self, disposition : TrackDisposition):
|
|
||||||
self.disposition_flags = self.disposition_flags | int(2**disposition.index())
|
|
||||||
|
|
||||||
def resetDisposition(self, disposition : TrackDisposition):
|
|
||||||
self.disposition_flags = self.disposition_flags & sum([2**d.index() for d in TrackDisposition if d != disposition])
|
|
||||||
|
|
||||||
def getDisposition(self, disposition : TrackDisposition):
|
|
||||||
return bool(self.disposition_flags & 2**disposition.index())
|
|
||||||
|
|
||||||
|
|
||||||
def getDescriptor(self, context = None, subIndex : int = -1) -> TrackDescriptor:
|
|
||||||
|
|
||||||
kwargs = {}
|
|
||||||
|
|
||||||
if not context is None:
|
|
||||||
kwargs[TrackDescriptor.CONTEXT_KEY] = context
|
|
||||||
|
|
||||||
kwargs[TrackDescriptor.ID_KEY] = self.getId()
|
|
||||||
kwargs[TrackDescriptor.PATTERN_ID_KEY] = self.getPatternId()
|
|
||||||
|
|
||||||
kwargs[TrackDescriptor.INDEX_KEY] = self.getIndex()
|
|
||||||
kwargs[TrackDescriptor.SOURCE_INDEX_KEY] = self.getSourceIndex()
|
|
||||||
|
|
||||||
if subIndex > -1:
|
|
||||||
kwargs[TrackDescriptor.SUB_INDEX_KEY] = subIndex
|
|
||||||
|
|
||||||
kwargs[TrackDescriptor.TRACK_TYPE_KEY] = self.getType()
|
|
||||||
kwargs[TrackDescriptor.CODEC_KEY] = self.getCodec()
|
|
||||||
|
|
||||||
kwargs[TrackDescriptor.DISPOSITION_SET_KEY] = self.getDispositionSet()
|
|
||||||
kwargs[TrackDescriptor.TAGS_KEY] = self.getTags()
|
|
||||||
|
|
||||||
kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY] = self.getAudioLayout()
|
|
||||||
|
|
||||||
return TrackDescriptor(**kwargs)
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
# from typing import List
|
|
||||||
from sqlalchemy import create_engine, Column, Integer, String, ForeignKey, Enum
|
|
||||||
from sqlalchemy.orm import relationship, declarative_base, sessionmaker
|
|
||||||
|
|
||||||
from .show import Base
|
|
||||||
|
|
||||||
|
|
||||||
class TrackTag(Base):
|
|
||||||
"""
|
|
||||||
relationship(argument, opt1, opt2, ...)
|
|
||||||
argument is string of class or Mapped class of the target entity
|
|
||||||
backref creates a bi-directional corresponding relationship (back_populates preferred)
|
|
||||||
back_populates points to the corresponding relationship (the actual class attribute identifier)
|
|
||||||
|
|
||||||
See: https://docs.sqlalchemy.org/en/(14|20)/orm/basic_relationships.html
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = 'track_tags'
|
|
||||||
|
|
||||||
# v1.x
|
|
||||||
id = Column(Integer, primary_key=True)
|
|
||||||
|
|
||||||
key = Column(String)
|
|
||||||
value = Column(String)
|
|
||||||
|
|
||||||
# v1.x
|
|
||||||
track_id = Column(Integer, ForeignKey('tracks.id', ondelete="CASCADE"))
|
|
||||||
track = relationship('Track', back_populates='track_tags')
|
|
||||||
@@ -1,159 +0,0 @@
|
|||||||
import click, re
|
|
||||||
|
|
||||||
from ffx.model.pattern import Pattern
|
|
||||||
|
|
||||||
|
|
||||||
class PatternController():
|
|
||||||
|
|
||||||
def __init__(self, context):
|
|
||||||
|
|
||||||
self.context = context
|
|
||||||
self.Session = self.context['database']['session'] # convenience
|
|
||||||
|
|
||||||
|
|
||||||
def addPattern(self, patternObj):
|
|
||||||
"""Adds pattern to database from obj
|
|
||||||
|
|
||||||
Returns database id or 0 if pattern already exists"""
|
|
||||||
|
|
||||||
try:
|
|
||||||
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(Pattern).filter(Pattern.show_id == int(patternObj['show_id']),
|
|
||||||
Pattern.pattern == str(patternObj['pattern']))
|
|
||||||
|
|
||||||
if not q.count():
|
|
||||||
pattern = Pattern(show_id = int(patternObj['show_id']),
|
|
||||||
pattern = str(patternObj['pattern']))
|
|
||||||
s.add(pattern)
|
|
||||||
s.commit()
|
|
||||||
return pattern.getId()
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"PatternController.addPattern(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
|
|
||||||
def updatePattern(self, patternId, patternObj):
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(Pattern).filter(Pattern.id == int(patternId))
|
|
||||||
|
|
||||||
if q.count():
|
|
||||||
|
|
||||||
pattern = q.first()
|
|
||||||
|
|
||||||
pattern.show_id = int(patternObj['show_id'])
|
|
||||||
pattern.pattern = str(patternObj['pattern'])
|
|
||||||
|
|
||||||
s.commit()
|
|
||||||
return True
|
|
||||||
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"PatternController.updatePattern(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def findPattern(self, patternObj):
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(Pattern).filter(Pattern.show_id == int(patternObj['show_id']), Pattern.pattern == str(patternObj['pattern']))
|
|
||||||
|
|
||||||
if q.count():
|
|
||||||
pattern = q.first()
|
|
||||||
return int(pattern.id)
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"PatternController.findPattern(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
|
|
||||||
def getPattern(self, patternId : int):
|
|
||||||
|
|
||||||
if type(patternId) is not int:
|
|
||||||
raise ValueError(f"PatternController.getPattern(): Argument patternId is required to be of type int")
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(Pattern).filter(Pattern.id == int(patternId))
|
|
||||||
|
|
||||||
return q.first() if q.count() else None
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"PatternController.getPattern(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
|
|
||||||
def deletePattern(self, patternId):
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(Pattern).filter(Pattern.id == int(patternId))
|
|
||||||
|
|
||||||
if q.count():
|
|
||||||
|
|
||||||
#DAFUQ: https://stackoverflow.com/a/19245058
|
|
||||||
# q.delete()
|
|
||||||
pattern = q.first()
|
|
||||||
s.delete(pattern)
|
|
||||||
|
|
||||||
s.commit()
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"PatternController.deletePattern(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
|
|
||||||
def matchFilename(self, filename : str) -> dict:
|
|
||||||
"""Returns dict {'match': <a regex match obj>, 'pattern': <ffx pattern obj>} or empty dict of no pattern was found"""
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(Pattern)
|
|
||||||
|
|
||||||
matchResult = {}
|
|
||||||
|
|
||||||
for pattern in q.all():
|
|
||||||
patternMatch = re.search(str(pattern.pattern), str(filename))
|
|
||||||
if patternMatch is not None:
|
|
||||||
matchResult['match'] = patternMatch
|
|
||||||
matchResult['pattern'] = pattern
|
|
||||||
|
|
||||||
return matchResult
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"PatternController.matchFilename(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
# def getMediaDescriptor(self, context, patternId):
|
|
||||||
#
|
|
||||||
# try:
|
|
||||||
# s = self.Session()
|
|
||||||
# q = s.query(Pattern).filter(Pattern.id == int(patternId))
|
|
||||||
#
|
|
||||||
# if q.count():
|
|
||||||
# return q.first().getMediaDescriptor(context)
|
|
||||||
# else:
|
|
||||||
# return None
|
|
||||||
#
|
|
||||||
# except Exception as ex:
|
|
||||||
# raise click.ClickException(f"PatternController.getMediaDescriptor(): {repr(ex)}")
|
|
||||||
# finally:
|
|
||||||
# s.close()
|
|
||||||
@@ -1,111 +0,0 @@
|
|||||||
import click
|
|
||||||
|
|
||||||
from textual.screen import Screen
|
|
||||||
from textual.widgets import Header, Footer, Static, Button
|
|
||||||
from textual.containers import Grid
|
|
||||||
|
|
||||||
from .show_controller import ShowController
|
|
||||||
from .pattern_controller import PatternController
|
|
||||||
|
|
||||||
from ffx.model.pattern import Pattern
|
|
||||||
|
|
||||||
|
|
||||||
# Screen[dict[int, str, int]]
|
|
||||||
class PatternDeleteScreen(Screen):
|
|
||||||
|
|
||||||
CSS = """
|
|
||||||
|
|
||||||
Grid {
|
|
||||||
grid-size: 2;
|
|
||||||
grid-rows: 2 auto;
|
|
||||||
grid-columns: 30 330;
|
|
||||||
height: 100%;
|
|
||||||
width: 100%;
|
|
||||||
padding: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
Input {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
Button {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
#toplabel {
|
|
||||||
height: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
.two {
|
|
||||||
column-span: 2;
|
|
||||||
}
|
|
||||||
|
|
||||||
.box {
|
|
||||||
height: 100%;
|
|
||||||
border: solid green;
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, patternId = None, showId = None):
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
self.context = self.app.getContext()
|
|
||||||
self.Session = self.context['database']['session'] # convenience
|
|
||||||
|
|
||||||
self.__pc = PatternController(context = self.context)
|
|
||||||
self.__sc = ShowController(context = self.context)
|
|
||||||
|
|
||||||
self.__patternId = patternId
|
|
||||||
self.__pattern: Pattern = self.__pc.getPattern(patternId) if patternId is not None else {}
|
|
||||||
self.__showDescriptor = self.__sc.getShowDescriptor(showId) if showId is not None else {}
|
|
||||||
|
|
||||||
|
|
||||||
def on_mount(self):
|
|
||||||
if self.__showDescriptor:
|
|
||||||
self.query_one("#showlabel", Static).update(f"{self.__showDescriptor.getId()} - {self.__showDescriptor.getName()} ({self.__showDescriptor.getYear()})")
|
|
||||||
if not self.__pattern is None:
|
|
||||||
self.query_one("#patternlabel", Static).update(str(self.__pattern.pattern))
|
|
||||||
|
|
||||||
|
|
||||||
def compose(self):
|
|
||||||
|
|
||||||
yield Header()
|
|
||||||
|
|
||||||
with Grid():
|
|
||||||
|
|
||||||
yield Static("Are you sure to delete the following filename pattern?", id="toplabel", classes="two")
|
|
||||||
|
|
||||||
yield Static("", classes="two")
|
|
||||||
|
|
||||||
yield Static("Pattern")
|
|
||||||
yield Static("", id="patternlabel")
|
|
||||||
|
|
||||||
yield Static("", classes="two")
|
|
||||||
|
|
||||||
yield Static("from show")
|
|
||||||
yield Static("", id="showlabel")
|
|
||||||
|
|
||||||
yield Static("", classes="two")
|
|
||||||
|
|
||||||
yield Button("Delete", id="delete_button")
|
|
||||||
yield Button("Cancel", id="cancel_button")
|
|
||||||
|
|
||||||
yield Footer()
|
|
||||||
|
|
||||||
|
|
||||||
# Event handler for button press
|
|
||||||
def on_button_pressed(self, event: Button.Pressed) -> None:
|
|
||||||
|
|
||||||
if event.button.id == "delete_button":
|
|
||||||
|
|
||||||
if self.__patternId is None:
|
|
||||||
raise click.ClickException('PatternDeleteScreen.on_button_pressed(): pattern id is undefined')
|
|
||||||
|
|
||||||
if self.__pc.deletePattern(self.__patternId):
|
|
||||||
self.dismiss(self.__pattern)
|
|
||||||
|
|
||||||
else:
|
|
||||||
#TODO: Meldung
|
|
||||||
self.app.pop_screen()
|
|
||||||
|
|
||||||
if event.button.id == "cancel_button":
|
|
||||||
self.app.pop_screen()
|
|
||||||
|
|
||||||
@@ -1,577 +0,0 @@
|
|||||||
import click, re
|
|
||||||
from typing import List
|
|
||||||
|
|
||||||
from textual.screen import Screen
|
|
||||||
from textual.widgets import Header, Footer, Static, Button, Input, DataTable
|
|
||||||
from textual.containers import Grid
|
|
||||||
|
|
||||||
from ffx.model.pattern import Pattern
|
|
||||||
from ffx.model.track import Track
|
|
||||||
|
|
||||||
from .pattern_controller import PatternController
|
|
||||||
from .show_controller import ShowController
|
|
||||||
from .track_controller import TrackController
|
|
||||||
from .tag_controller import TagController
|
|
||||||
|
|
||||||
from .track_details_screen import TrackDetailsScreen
|
|
||||||
from .track_delete_screen import TrackDeleteScreen
|
|
||||||
|
|
||||||
from .tag_details_screen import TagDetailsScreen
|
|
||||||
from .tag_delete_screen import TagDeleteScreen
|
|
||||||
|
|
||||||
from ffx.track_type import TrackType
|
|
||||||
|
|
||||||
from ffx.track_disposition import TrackDisposition
|
|
||||||
from ffx.track_descriptor import TrackDescriptor
|
|
||||||
|
|
||||||
from textual.widgets._data_table import CellDoesNotExist
|
|
||||||
|
|
||||||
from ffx.file_properties import FileProperties
|
|
||||||
from ffx.iso_language import IsoLanguage
|
|
||||||
from ffx.audio_layout import AudioLayout
|
|
||||||
|
|
||||||
from ffx.helper import formatRichColor, removeRichColor
|
|
||||||
|
|
||||||
|
|
||||||
# Screen[dict[int, str, int]]
|
|
||||||
class PatternDetailsScreen(Screen):
|
|
||||||
|
|
||||||
CSS = """
|
|
||||||
|
|
||||||
Grid {
|
|
||||||
grid-size: 7 13;
|
|
||||||
grid-rows: 2 2 2 2 2 8 2 2 8 2 2 2 2;
|
|
||||||
grid-columns: 25 25 25 25 25 25 25;
|
|
||||||
height: 100%;
|
|
||||||
width: 100%;
|
|
||||||
padding: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
Input {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
Button {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
DataTable {
|
|
||||||
min-height: 6;
|
|
||||||
}
|
|
||||||
|
|
||||||
DataTable .datatable--cursor {
|
|
||||||
background: darkorange;
|
|
||||||
color: black;
|
|
||||||
}
|
|
||||||
|
|
||||||
DataTable .datatable--header {
|
|
||||||
background: steelblue;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
#toplabel {
|
|
||||||
height: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
.three {
|
|
||||||
column-span: 3;
|
|
||||||
}
|
|
||||||
|
|
||||||
.four {
|
|
||||||
column-span: 4;
|
|
||||||
}
|
|
||||||
.five {
|
|
||||||
column-span: 5;
|
|
||||||
}
|
|
||||||
.six {
|
|
||||||
column-span: 6;
|
|
||||||
}
|
|
||||||
.seven {
|
|
||||||
column-span: 7;
|
|
||||||
}
|
|
||||||
|
|
||||||
.box {
|
|
||||||
height: 100%;
|
|
||||||
border: solid green;
|
|
||||||
}
|
|
||||||
|
|
||||||
.yellow {
|
|
||||||
tint: yellow 40%;
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, patternId = None, showId = None):
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
self.context = self.app.getContext()
|
|
||||||
self.Session = self.context['database']['session'] # convenience
|
|
||||||
|
|
||||||
self.__configurationData = self.context['config'].getData()
|
|
||||||
|
|
||||||
metadataConfiguration = self.__configurationData['metadata'] if 'metadata' in self.__configurationData.keys() else {}
|
|
||||||
|
|
||||||
self.__signatureTags = metadataConfiguration['signature'] if 'signature' in metadataConfiguration.keys() else {}
|
|
||||||
self.__removeGlobalKeys = metadataConfiguration['remove'] if 'remove' in metadataConfiguration.keys() else []
|
|
||||||
self.__ignoreGlobalKeys = metadataConfiguration['ignore'] if 'ignore' in metadataConfiguration.keys() else []
|
|
||||||
self.__removeTrackKeys = (metadataConfiguration['streams']['remove']
|
|
||||||
if 'streams' in metadataConfiguration.keys()
|
|
||||||
and 'remove' in metadataConfiguration['streams'].keys() else [])
|
|
||||||
self.__ignoreTrackKeys = (metadataConfiguration['streams']['ignore']
|
|
||||||
if 'streams' in metadataConfiguration.keys()
|
|
||||||
and 'ignore' in metadataConfiguration['streams'].keys() else [])
|
|
||||||
|
|
||||||
self.__pc = PatternController(context = self.context)
|
|
||||||
self.__sc = ShowController(context = self.context)
|
|
||||||
self.__tc = TrackController(context = self.context)
|
|
||||||
self.__tac = TagController(context = self.context)
|
|
||||||
|
|
||||||
self.__pattern : Pattern = self.__pc.getPattern(patternId) if patternId is not None else None
|
|
||||||
self.__showDescriptor = self.__sc.getShowDescriptor(showId) if showId is not None else None
|
|
||||||
|
|
||||||
|
|
||||||
#TODO: per controller
|
|
||||||
def loadTracks(self, show_id):
|
|
||||||
|
|
||||||
try:
|
|
||||||
|
|
||||||
tracks = {}
|
|
||||||
tracks['audio'] = {}
|
|
||||||
tracks['subtitle'] = {}
|
|
||||||
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(Pattern).filter(Pattern.show_id == int(show_id))
|
|
||||||
|
|
||||||
return [{'id': int(p.id), 'pattern': p.pattern} for p in q.all()]
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"loadTracks(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
|
|
||||||
def updateTracks(self):
|
|
||||||
|
|
||||||
self.tracksTable.clear()
|
|
||||||
|
|
||||||
if self.__pattern is not None:
|
|
||||||
|
|
||||||
tracks = self.__tc.findTracks(self.__pattern.getId())
|
|
||||||
|
|
||||||
typeCounter = {}
|
|
||||||
|
|
||||||
tr: Track
|
|
||||||
for tr in tracks:
|
|
||||||
|
|
||||||
td : TrackDescriptor = tr.getDescriptor(self.context)
|
|
||||||
|
|
||||||
trackType = td.getType()
|
|
||||||
if not trackType in typeCounter.keys():
|
|
||||||
typeCounter[trackType] = 0
|
|
||||||
|
|
||||||
dispoSet = td.getDispositionSet()
|
|
||||||
|
|
||||||
trackLanguage = td.getLanguage()
|
|
||||||
audioLayout = td.getAudioLayout()
|
|
||||||
|
|
||||||
row = (td.getIndex(),
|
|
||||||
trackType.label(),
|
|
||||||
typeCounter[trackType],
|
|
||||||
td.getCodec().label(),
|
|
||||||
audioLayout.label() if trackType == TrackType.AUDIO
|
|
||||||
and audioLayout != AudioLayout.LAYOUT_UNDEFINED else ' ',
|
|
||||||
trackLanguage.label() if trackLanguage != IsoLanguage.UNDEFINED else ' ',
|
|
||||||
td.getTitle(),
|
|
||||||
'Yes' if TrackDisposition.DEFAULT in dispoSet else 'No',
|
|
||||||
'Yes' if TrackDisposition.FORCED in dispoSet else 'No',
|
|
||||||
td.getSourceIndex())
|
|
||||||
|
|
||||||
self.tracksTable.add_row(*map(str, row))
|
|
||||||
|
|
||||||
typeCounter[trackType] += 1
|
|
||||||
|
|
||||||
|
|
||||||
def swapTracks(self, trackIndex1: int, trackIndex2: int):
|
|
||||||
|
|
||||||
ti1 = int(trackIndex1)
|
|
||||||
ti2 = int(trackIndex2)
|
|
||||||
|
|
||||||
siblingDescriptors: List[TrackDescriptor] = self.__tc.findSiblingDescriptors(self.__pattern.getId())
|
|
||||||
|
|
||||||
numSiblings = len(siblingDescriptors)
|
|
||||||
|
|
||||||
if ti1 < 0 or ti1 >= numSiblings:
|
|
||||||
raise ValueError(f"PatternDetailsScreen.swapTracks(): trackIndex1 ({ti1}) is out of range ({numSiblings})")
|
|
||||||
|
|
||||||
if ti2 < 0 or ti2 >= numSiblings:
|
|
||||||
raise ValueError(f"PatternDetailsScreen.swapTracks(): trackIndex2 ({ti2}) is out of range ({numSiblings})")
|
|
||||||
|
|
||||||
sibling1 = siblingDescriptors[trackIndex1]
|
|
||||||
sibling2 = siblingDescriptors[trackIndex2]
|
|
||||||
|
|
||||||
# raise click.ClickException(f"siblings id1={sibling1.getId()} id2={sibling2.getId()}")
|
|
||||||
|
|
||||||
subIndex2 = sibling2.getSubIndex()
|
|
||||||
|
|
||||||
sibling2.setIndex(sibling1.getIndex())
|
|
||||||
sibling2.setSubIndex(sibling1.getSubIndex())
|
|
||||||
|
|
||||||
sibling1.setIndex(trackIndex2)
|
|
||||||
sibling1.setSubIndex(subIndex2)
|
|
||||||
|
|
||||||
if not self.__tc.updateTrack(sibling1.getId(), sibling1):
|
|
||||||
raise click.ClickException('Update sibling1 failed')
|
|
||||||
if not self.__tc.updateTrack(sibling2.getId(), sibling2):
|
|
||||||
raise click.ClickException('Update sibling2 failed')
|
|
||||||
|
|
||||||
self.updateTracks()
|
|
||||||
|
|
||||||
|
|
||||||
def updateTags(self):
|
|
||||||
|
|
||||||
self.tagsTable.clear()
|
|
||||||
|
|
||||||
if self.__pattern is not None:
|
|
||||||
|
|
||||||
tags = self.__tac.findAllMediaTags(self.__pattern.getId())
|
|
||||||
|
|
||||||
for tagKey, tagValue in tags.items():
|
|
||||||
|
|
||||||
textColor = None
|
|
||||||
if tagKey in self.__ignoreGlobalKeys:
|
|
||||||
textColor = 'blue'
|
|
||||||
if tagKey in self.__removeGlobalKeys:
|
|
||||||
textColor = 'red'
|
|
||||||
|
|
||||||
# if tagKey not in self.__ignoreTrackKeys:
|
|
||||||
row = (formatRichColor(tagKey, textColor), formatRichColor(tagValue, textColor))
|
|
||||||
self.tagsTable.add_row(*map(str, row))
|
|
||||||
|
|
||||||
|
|
||||||
def on_mount(self):
|
|
||||||
|
|
||||||
if not self.__showDescriptor is None:
|
|
||||||
self.query_one("#showlabel", Static).update(f"{self.__showDescriptor.getId()} - {self.__showDescriptor.getName()} ({self.__showDescriptor.getYear()})")
|
|
||||||
|
|
||||||
if self.__pattern is not None:
|
|
||||||
|
|
||||||
self.query_one("#pattern_input", Input).value = str(self.__pattern.getPattern())
|
|
||||||
|
|
||||||
self.updateTags()
|
|
||||||
self.updateTracks()
|
|
||||||
|
|
||||||
def compose(self):
|
|
||||||
|
|
||||||
|
|
||||||
self.tagsTable = DataTable(classes="seven")
|
|
||||||
|
|
||||||
# Define the columns with headers
|
|
||||||
self.column_key_tag_key = self.tagsTable.add_column("Key", width=50)
|
|
||||||
self.column_key_tag_value = self.tagsTable.add_column("Value", width=100)
|
|
||||||
|
|
||||||
self.tagsTable.cursor_type = 'row'
|
|
||||||
|
|
||||||
|
|
||||||
self.tracksTable = DataTable(id="tracks_table", classes="seven")
|
|
||||||
|
|
||||||
self.column_key_track_index = self.tracksTable.add_column("Index", width=5)
|
|
||||||
self.column_key_track_type = self.tracksTable.add_column("Type", width=10)
|
|
||||||
self.column_key_track_sub_index = self.tracksTable.add_column("SubIndex", width=8)
|
|
||||||
self.column_key_track_codec = self.tracksTable.add_column("Codec", width=10)
|
|
||||||
self.column_key_track_audio_layout = self.tracksTable.add_column("Layout", width=10)
|
|
||||||
self.column_key_track_language = self.tracksTable.add_column("Language", width=15)
|
|
||||||
self.column_key_track_title = self.tracksTable.add_column("Title", width=48)
|
|
||||||
self.column_key_track_default = self.tracksTable.add_column("Default", width=8)
|
|
||||||
self.column_key_track_forced = self.tracksTable.add_column("Forced", width=8)
|
|
||||||
self.column_key_track_source_index = self.tracksTable.add_column("SrcIndex", width=8)
|
|
||||||
|
|
||||||
self.tracksTable.cursor_type = 'row'
|
|
||||||
|
|
||||||
|
|
||||||
yield Header()
|
|
||||||
|
|
||||||
with Grid():
|
|
||||||
|
|
||||||
# 1
|
|
||||||
yield Static("Edit filename pattern" if self.__pattern is not None else "New filename pattern", id="toplabel")
|
|
||||||
yield Input(type="text", id="pattern_input", classes="six")
|
|
||||||
|
|
||||||
# 2
|
|
||||||
yield Static("from show")
|
|
||||||
yield Static("", id="showlabel", classes="five")
|
|
||||||
yield Button("Substitute pattern", id="pattern_button")
|
|
||||||
|
|
||||||
# 3
|
|
||||||
yield Static(" ", classes="seven")
|
|
||||||
# 4
|
|
||||||
yield Static(" ", classes="seven")
|
|
||||||
|
|
||||||
# 5
|
|
||||||
yield Static("Media Tags")
|
|
||||||
|
|
||||||
|
|
||||||
if self.__pattern is not None:
|
|
||||||
yield Button("Add", id="button_add_tag")
|
|
||||||
yield Button("Edit", id="button_edit_tag")
|
|
||||||
yield Button("Delete", id="button_delete_tag")
|
|
||||||
else:
|
|
||||||
yield Static(" ")
|
|
||||||
yield Static(" ")
|
|
||||||
yield Static(" ")
|
|
||||||
|
|
||||||
yield Static(" ")
|
|
||||||
yield Static(" ")
|
|
||||||
yield Static(" ")
|
|
||||||
|
|
||||||
# 6
|
|
||||||
yield self.tagsTable
|
|
||||||
|
|
||||||
# 7
|
|
||||||
yield Static(" ", classes="seven")
|
|
||||||
|
|
||||||
# 8
|
|
||||||
yield Static("Streams")
|
|
||||||
|
|
||||||
|
|
||||||
if self.__pattern is not None:
|
|
||||||
yield Button("Add", id="button_add_track")
|
|
||||||
yield Button("Edit", id="button_edit_track")
|
|
||||||
yield Button("Delete", id="button_delete_track")
|
|
||||||
else:
|
|
||||||
yield Static(" ")
|
|
||||||
yield Static(" ")
|
|
||||||
yield Static(" ")
|
|
||||||
|
|
||||||
yield Static(" ")
|
|
||||||
yield Button("Up", id="button_track_up")
|
|
||||||
yield Button("Down", id="button_track_down")
|
|
||||||
|
|
||||||
# 9
|
|
||||||
yield self.tracksTable
|
|
||||||
|
|
||||||
# 10
|
|
||||||
yield Static(" ", classes="seven")
|
|
||||||
|
|
||||||
# 11
|
|
||||||
yield Static(" ", classes="seven")
|
|
||||||
|
|
||||||
# 12
|
|
||||||
yield Button("Save", id="save_button")
|
|
||||||
yield Button("Cancel", id="cancel_button")
|
|
||||||
yield Static(" ", classes="five")
|
|
||||||
|
|
||||||
# 13
|
|
||||||
yield Static(" ", classes="seven")
|
|
||||||
|
|
||||||
yield Footer()
|
|
||||||
|
|
||||||
|
|
||||||
def getPatternFromInput(self):
|
|
||||||
return str(self.query_one("#pattern_input", Input).value)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def getSelectedTrackDescriptor(self):
|
|
||||||
|
|
||||||
if not self.__pattern:
|
|
||||||
return None
|
|
||||||
|
|
||||||
try:
|
|
||||||
|
|
||||||
# Fetch the currently selected row when 'Enter' is pressed
|
|
||||||
#selected_row_index = self.table.cursor_row
|
|
||||||
row_key, col_key = self.tracksTable.coordinate_to_cell_key(self.tracksTable.cursor_coordinate)
|
|
||||||
|
|
||||||
if row_key is not None:
|
|
||||||
selected_track_data = self.tracksTable.get_row(row_key)
|
|
||||||
|
|
||||||
trackIndex = int(selected_track_data[0])
|
|
||||||
trackSubIndex = int(selected_track_data[2])
|
|
||||||
|
|
||||||
return self.__tc.getTrack(self.__pattern.getId(), trackIndex).getDescriptor(self.context, subIndex=trackSubIndex)
|
|
||||||
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
except CellDoesNotExist:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def getSelectedTag(self):
|
|
||||||
|
|
||||||
try:
|
|
||||||
|
|
||||||
# Fetch the currently selected row when 'Enter' is pressed
|
|
||||||
#selected_row_index = self.table.cursor_row
|
|
||||||
row_key, col_key = self.tagsTable.coordinate_to_cell_key(self.tagsTable.cursor_coordinate)
|
|
||||||
|
|
||||||
if row_key is not None:
|
|
||||||
selected_tag_data = self.tagsTable.get_row(row_key)
|
|
||||||
|
|
||||||
tagKey = removeRichColor(selected_tag_data[0])
|
|
||||||
tagValue = removeRichColor(selected_tag_data[1])
|
|
||||||
|
|
||||||
return tagKey, tagValue
|
|
||||||
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
except CellDoesNotExist:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Event handler for button press
|
|
||||||
def on_button_pressed(self, event: Button.Pressed) -> None:
|
|
||||||
# Check if the button pressed is the one we are interested in
|
|
||||||
if event.button.id == "save_button":
|
|
||||||
|
|
||||||
patternDescriptor = {}
|
|
||||||
patternDescriptor['show_id'] = self.__showDescriptor.getId()
|
|
||||||
patternDescriptor['pattern'] = self.getPatternFromInput()
|
|
||||||
|
|
||||||
if self.__pattern is not None:
|
|
||||||
|
|
||||||
if self.__pc.updatePattern(self.__pattern.getId(), patternDescriptor):
|
|
||||||
self.dismiss(patternDescriptor)
|
|
||||||
else:
|
|
||||||
#TODO: Meldung
|
|
||||||
self.app.pop_screen()
|
|
||||||
|
|
||||||
else:
|
|
||||||
patternId = self.__pc.addPattern(patternDescriptor)
|
|
||||||
if patternId:
|
|
||||||
self.dismiss(patternDescriptor)
|
|
||||||
else:
|
|
||||||
#TODO: Meldung
|
|
||||||
self.app.pop_screen()
|
|
||||||
|
|
||||||
|
|
||||||
if event.button.id == "cancel_button":
|
|
||||||
self.app.pop_screen()
|
|
||||||
|
|
||||||
|
|
||||||
# Save pattern when just created before adding streams
|
|
||||||
if self.__pattern is not None:
|
|
||||||
|
|
||||||
numTracks = len(self.tracksTable.rows)
|
|
||||||
|
|
||||||
if event.button.id == "button_add_track":
|
|
||||||
self.app.push_screen(TrackDetailsScreen(patternId = self.__pattern.getId(), index = numTracks), self.handle_add_track)
|
|
||||||
|
|
||||||
selectedTrack = self.getSelectedTrackDescriptor()
|
|
||||||
if selectedTrack is not None:
|
|
||||||
if event.button.id == "button_edit_track":
|
|
||||||
self.app.push_screen(TrackDetailsScreen(trackDescriptor = selectedTrack), self.handle_edit_track)
|
|
||||||
if event.button.id == "button_delete_track":
|
|
||||||
self.app.push_screen(TrackDeleteScreen(trackDescriptor = selectedTrack), self.handle_delete_track)
|
|
||||||
|
|
||||||
|
|
||||||
if event.button.id == "button_add_tag":
|
|
||||||
if self.__pattern is not None:
|
|
||||||
self.app.push_screen(TagDetailsScreen(), self.handle_update_tag)
|
|
||||||
|
|
||||||
if event.button.id == "button_edit_tag":
|
|
||||||
tagKey, tagValue = self.getSelectedTag()
|
|
||||||
self.app.push_screen(TagDetailsScreen(key=tagKey, value=tagValue), self.handle_update_tag)
|
|
||||||
|
|
||||||
if event.button.id == "button_delete_tag":
|
|
||||||
tagKey, tagValue = self.getSelectedTag()
|
|
||||||
self.app.push_screen(TagDeleteScreen(key=tagKey, value=tagValue), self.handle_delete_tag)
|
|
||||||
|
|
||||||
|
|
||||||
if event.button.id == "pattern_button":
|
|
||||||
|
|
||||||
pattern = self.query_one("#pattern_input", Input).value
|
|
||||||
|
|
||||||
patternMatch = re.search(FileProperties.SE_INDICATOR_PATTERN, pattern)
|
|
||||||
|
|
||||||
if patternMatch:
|
|
||||||
self.query_one("#pattern_input", Input).value = pattern.replace(patternMatch.group(1),
|
|
||||||
FileProperties.SE_INDICATOR_PATTERN)
|
|
||||||
|
|
||||||
|
|
||||||
if event.button.id == "button_track_up":
|
|
||||||
|
|
||||||
selectedTrackDescriptor = self.getSelectedTrackDescriptor()
|
|
||||||
selectedTrackIndex = selectedTrackDescriptor.getIndex()
|
|
||||||
|
|
||||||
if selectedTrackIndex > 0 and selectedTrackIndex < self.tracksTable.row_count:
|
|
||||||
correspondingTrackIndex = selectedTrackIndex - 1
|
|
||||||
self.swapTracks(selectedTrackIndex, correspondingTrackIndex)
|
|
||||||
|
|
||||||
|
|
||||||
if event.button.id == "button_track_down":
|
|
||||||
|
|
||||||
selectedTrackDescriptor = self.getSelectedTrackDescriptor()
|
|
||||||
selectedTrackIndex = selectedTrackDescriptor.getIndex()
|
|
||||||
|
|
||||||
if selectedTrackIndex >= 0 and selectedTrackIndex < (self.tracksTable.row_count - 1):
|
|
||||||
correspondingTrackIndex = selectedTrackIndex + 1
|
|
||||||
self.swapTracks(selectedTrackIndex, correspondingTrackIndex)
|
|
||||||
|
|
||||||
|
|
||||||
def handle_add_track(self, trackDescriptor : TrackDescriptor):
|
|
||||||
|
|
||||||
dispoSet = trackDescriptor.getDispositionSet()
|
|
||||||
trackType = trackDescriptor.getType()
|
|
||||||
index = trackDescriptor.getIndex()
|
|
||||||
subIndex = trackDescriptor.getSubIndex()
|
|
||||||
codec = trackDescriptor.getCodec()
|
|
||||||
language = trackDescriptor.getLanguage()
|
|
||||||
title = trackDescriptor.getTitle()
|
|
||||||
|
|
||||||
row = (index,
|
|
||||||
trackType.label(),
|
|
||||||
subIndex,
|
|
||||||
codec.label(),
|
|
||||||
language.label(),
|
|
||||||
title,
|
|
||||||
'Yes' if TrackDisposition.DEFAULT in dispoSet else 'No',
|
|
||||||
'Yes' if TrackDisposition.FORCED in dispoSet else 'No')
|
|
||||||
|
|
||||||
self.tracksTable.add_row(*map(str, row))
|
|
||||||
|
|
||||||
|
|
||||||
def handle_edit_track(self, trackDescriptor : TrackDescriptor):
|
|
||||||
|
|
||||||
try:
|
|
||||||
|
|
||||||
row_key, col_key = self.tracksTable.coordinate_to_cell_key(self.tracksTable.cursor_coordinate)
|
|
||||||
|
|
||||||
self.tracksTable.update_cell(row_key, self.column_key_track_audio_layout,
|
|
||||||
trackDescriptor.getAudioLayout().label()
|
|
||||||
if trackDescriptor.getType() == TrackType.AUDIO else ' ')
|
|
||||||
|
|
||||||
self.tracksTable.update_cell(row_key, self.column_key_track_language, trackDescriptor.getLanguage().label())
|
|
||||||
self.tracksTable.update_cell(row_key, self.column_key_track_title, trackDescriptor.getTitle())
|
|
||||||
self.tracksTable.update_cell(row_key, self.column_key_track_default,
|
|
||||||
'Yes' if TrackDisposition.DEFAULT in trackDescriptor.getDispositionSet() else 'No')
|
|
||||||
self.tracksTable.update_cell(row_key, self.column_key_track_forced,
|
|
||||||
'Yes' if TrackDisposition.FORCED in trackDescriptor.getDispositionSet() else 'No')
|
|
||||||
|
|
||||||
except CellDoesNotExist:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def handle_delete_track(self, trackDescriptor : TrackDescriptor):
|
|
||||||
self.updateTracks()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def handle_update_tag(self, tag):
|
|
||||||
|
|
||||||
if self.__pattern is None:
|
|
||||||
raise click.ClickException(f"PatternDetailsScreen.handle_update_tag: pattern not set")
|
|
||||||
|
|
||||||
if self.__tac.updateMediaTag(self.__pattern.getId(), tag[0], tag[1]) is not None:
|
|
||||||
self.updateTags()
|
|
||||||
|
|
||||||
def handle_delete_tag(self, tag):
|
|
||||||
|
|
||||||
if self.__pattern is None:
|
|
||||||
raise click.ClickException(f"PatternDetailsScreen.handle_delete_tag: pattern not set")
|
|
||||||
|
|
||||||
if self.__tac.deleteMediaTagByKey(self.__pattern.getId(), tag[0]):
|
|
||||||
self.updateTags()
|
|
||||||
else:
|
|
||||||
raise click.ClickException('tag delete failed')
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
import subprocess, logging
|
|
||||||
from typing import List
|
|
||||||
|
|
||||||
def executeProcess(commandSequence: List[str], directory: str = None, context: dict = None):
|
|
||||||
"""
|
|
||||||
niceness -20 bis +19
|
|
||||||
cpu_percent: 1 bis 99
|
|
||||||
"""
|
|
||||||
|
|
||||||
if context is None:
|
|
||||||
logger = logging.getLogger('FFX')
|
|
||||||
logger.addHandler(logging.NullHandler())
|
|
||||||
else:
|
|
||||||
logger = context['logger']
|
|
||||||
|
|
||||||
niceSequence = []
|
|
||||||
|
|
||||||
niceness = (int(context['resource_limits']['niceness'])
|
|
||||||
if not context is None
|
|
||||||
and 'resource_limits' in context.keys()
|
|
||||||
and 'niceness' in context['resource_limits'].keys() else 99)
|
|
||||||
cpu_percent = (int(context['resource_limits']['cpu_percent'])
|
|
||||||
if not context is None
|
|
||||||
and 'resource_limits' in context.keys()
|
|
||||||
and 'cpu_percent' in context['resource_limits'].keys() else 0)
|
|
||||||
|
|
||||||
if niceness >= -20 and niceness <= 19:
|
|
||||||
niceSequence += ['nice', '-n', str(niceness)]
|
|
||||||
if cpu_percent >= 1:
|
|
||||||
niceSequence += ['cpulimit', '-l', str(cpu_percent), '--']
|
|
||||||
|
|
||||||
niceCommand = niceSequence + commandSequence
|
|
||||||
|
|
||||||
logger.debug(f"executeProcess() command sequence: {' '.join(niceCommand)}")
|
|
||||||
|
|
||||||
process = subprocess.Popen(niceCommand, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf-8', cwd = directory)
|
|
||||||
output, error = process.communicate()
|
|
||||||
|
|
||||||
return output, error, process.returncode
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
from textual.app import ComposeResult
|
|
||||||
from textual.screen import Screen
|
|
||||||
from textual.widgets import Footer, Placeholder
|
|
||||||
|
|
||||||
|
|
||||||
class SettingsScreen(Screen):
|
|
||||||
def __init__(self):
|
|
||||||
super().__init__()
|
|
||||||
context = self.app.getContext()
|
|
||||||
def compose(self) -> ComposeResult:
|
|
||||||
yield Placeholder("Settings Screen")
|
|
||||||
yield Footer()
|
|
||||||
@@ -1,233 +0,0 @@
|
|||||||
import click
|
|
||||||
|
|
||||||
from ffx.model.shifted_season import ShiftedSeason
|
|
||||||
|
|
||||||
|
|
||||||
class EpisodeOrderException(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class RangeOverlapException(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ShiftedSeasonController():
|
|
||||||
|
|
||||||
def __init__(self, context):
|
|
||||||
|
|
||||||
self.context = context
|
|
||||||
self.Session = self.context['database']['session'] # convenience
|
|
||||||
|
|
||||||
def checkShiftedSeason(self, showId: int, shiftedSeasonObj: dict, shiftedSeasonId: int = 0):
|
|
||||||
"""
|
|
||||||
Check if for a particula season
|
|
||||||
|
|
||||||
shiftedSeasonId
|
|
||||||
"""
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
|
|
||||||
originalSeason = shiftedSeasonObj['original_season']
|
|
||||||
firstEpisode = int(shiftedSeasonObj['first_episode'])
|
|
||||||
lastEpisode = int(shiftedSeasonObj['last_episode'])
|
|
||||||
|
|
||||||
q = s.query(ShiftedSeason).filter(ShiftedSeason.show_id == int(showId))
|
|
||||||
if shiftedSeasonId:
|
|
||||||
q = q.filter(ShiftedSeason.id != int(shiftedSeasonId))
|
|
||||||
|
|
||||||
siblingShiftedSeason: ShiftedSeason
|
|
||||||
for siblingShiftedSeason in q.all():
|
|
||||||
|
|
||||||
siblingOriginalSeason = siblingShiftedSeason.getOriginalSeason
|
|
||||||
siblingFirstEpisode = siblingShiftedSeason.getFirstEpisode()
|
|
||||||
siblingLastEpisode = siblingShiftedSeason.getLastEpisode()
|
|
||||||
|
|
||||||
if (originalSeason == siblingOriginalSeason
|
|
||||||
and lastEpisode >= siblingFirstEpisode
|
|
||||||
and siblingLastEpisode >= firstEpisode):
|
|
||||||
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"ShiftedSeasonController.addShiftedSeason(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
|
|
||||||
def addShiftedSeason(self, showId: int, shiftedSeasonObj: dict):
|
|
||||||
|
|
||||||
if type(showId) is not int:
|
|
||||||
raise ValueError(f"ShiftedSeasonController.addShiftedSeason(): Argument showId is required to be of type int")
|
|
||||||
|
|
||||||
if type(shiftedSeasonObj) is not dict:
|
|
||||||
raise ValueError(f"ShiftedSeasonController.addShiftedSeason(): Argument shiftedSeasonObj is required to be of type dict")
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
|
|
||||||
firstEpisode = int(shiftedSeasonObj['first_episode'])
|
|
||||||
lastEpisode = int(shiftedSeasonObj['last_episode'])
|
|
||||||
|
|
||||||
if lastEpisode < firstEpisode:
|
|
||||||
raise EpisodeOrderException()
|
|
||||||
|
|
||||||
q = s.query(ShiftedSeason).filter(ShiftedSeason.show_id == int(showId))
|
|
||||||
|
|
||||||
shiftedSeason = ShiftedSeason(show_id = int(showId),
|
|
||||||
original_season = int(shiftedSeasonObj['original_season']),
|
|
||||||
first_episode = firstEpisode,
|
|
||||||
last_episode = lastEpisode,
|
|
||||||
season_offset = int(shiftedSeasonObj['season_offset']),
|
|
||||||
episode_offset = int(shiftedSeasonObj['episode_offset']))
|
|
||||||
s.add(shiftedSeason)
|
|
||||||
s.commit()
|
|
||||||
return shiftedSeason.getId()
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"ShiftedSeasonController.addShiftedSeason(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
|
|
||||||
def updateShiftedSeason(self, shiftedSeasonId: int, shiftedSeasonObj: dict):
|
|
||||||
|
|
||||||
if type(shiftedSeasonId) is not int:
|
|
||||||
raise ValueError(f"ShiftedSeasonController.updateShiftedSeason(): Argument shiftedSeasonId is required to be of type int")
|
|
||||||
|
|
||||||
if type(shiftedSeasonObj) is not dict:
|
|
||||||
raise ValueError(f"ShiftedSeasonController.updateShiftedSeason(): Argument shiftedSeasonObj is required to be of type dict")
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
|
|
||||||
q = s.query(ShiftedSeason).filter(ShiftedSeason.id == int(shiftedSeasonId))
|
|
||||||
|
|
||||||
if q.count():
|
|
||||||
|
|
||||||
shiftedSeason = q.first()
|
|
||||||
|
|
||||||
shiftedSeason.original_season = int(shiftedSeasonObj['original_season'])
|
|
||||||
shiftedSeason.first_episode = int(shiftedSeasonObj['first_episode'])
|
|
||||||
shiftedSeason.last_episode = int(shiftedSeasonObj['last_episode'])
|
|
||||||
shiftedSeason.season_offset = int(shiftedSeasonObj['season_offset'])
|
|
||||||
shiftedSeason.episode_offset = int(shiftedSeasonObj['episode_offset'])
|
|
||||||
|
|
||||||
s.commit()
|
|
||||||
return True
|
|
||||||
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"ShiftedSeasonController.updateShiftedSeason(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
|
|
||||||
def findShiftedSeason(self, showId: int, originalSeason: int, firstEpisode: int, lastEpisode: int):
|
|
||||||
|
|
||||||
if type(showId) is not int:
|
|
||||||
raise ValueError(f"ShiftedSeasonController.findShiftedSeason(): Argument shiftedSeasonId is required to be of type int")
|
|
||||||
|
|
||||||
if type(originalSeason) is not int:
|
|
||||||
raise ValueError(f"ShiftedSeasonController.findShiftedSeason(): Argument originalSeason is required to be of type int")
|
|
||||||
|
|
||||||
if type(firstEpisode) is not int:
|
|
||||||
raise ValueError(f"ShiftedSeasonController.findShiftedSeason(): Argument firstEpisode is required to be of type int")
|
|
||||||
|
|
||||||
if type(lastEpisode) is not int:
|
|
||||||
raise ValueError(f"ShiftedSeasonController.findShiftedSeason(): Argument lastEpisode is required to be of type int")
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(ShiftedSeason).filter(ShiftedSeason.show_id == int(showId),
|
|
||||||
ShiftedSeason.original_season == int(originalSeason),
|
|
||||||
ShiftedSeason.first_episode == int(firstEpisode),
|
|
||||||
ShiftedSeason.last_episode == int(lastEpisode))
|
|
||||||
|
|
||||||
return q.first().getId() if q.count() else None
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"PatternController.findShiftedSeason(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
def getShiftedSeasonSiblings(self, showId: int):
|
|
||||||
|
|
||||||
if type(showId) is not int:
|
|
||||||
raise ValueError(f"ShiftedSeasonController.getShiftedSeasonSiblings(): Argument shiftedSeasonId is required to be of type int")
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(ShiftedSeason).filter(ShiftedSeason.show_id == int(showId))
|
|
||||||
|
|
||||||
return q.all()
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"PatternController.getShiftedSeasonSiblings(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
|
|
||||||
def getShiftedSeason(self, shiftedSeasonId: int):
|
|
||||||
|
|
||||||
if type(shiftedSeasonId) is not int:
|
|
||||||
raise ValueError(f"ShiftedSeasonController.getShiftedSeason(): Argument shiftedSeasonId is required to be of type int")
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(ShiftedSeason).filter(ShiftedSeason.id == int(shiftedSeasonId))
|
|
||||||
|
|
||||||
return q.first() if q.count() else None
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"ShiftedSeasonController.getShiftedSeason(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
|
|
||||||
def deleteShiftedSeason(self, shiftedSeasonId):
|
|
||||||
|
|
||||||
if type(shiftedSeasonId) is not int:
|
|
||||||
raise ValueError(f"ShiftedSeasonController.deleteShiftedSeason(): Argument shiftedSeasonId is required to be of type int")
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(ShiftedSeason).filter(ShiftedSeason.id == int(shiftedSeasonId))
|
|
||||||
|
|
||||||
if q.count():
|
|
||||||
|
|
||||||
#DAFUQ: https://stackoverflow.com/a/19245058
|
|
||||||
# q.delete()
|
|
||||||
shiftedSeason = q.first()
|
|
||||||
s.delete(shiftedSeason)
|
|
||||||
|
|
||||||
s.commit()
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"ShiftedSeasonController.deleteShiftedSeason(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
|
|
||||||
def shiftSeason(self, showId, season, episode):
|
|
||||||
|
|
||||||
shiftedSeasonEntry: ShiftedSeason
|
|
||||||
for shiftedSeasonEntry in self.getShiftedSeasonSiblings(showId):
|
|
||||||
|
|
||||||
if (season == shiftedSeasonEntry.getOriginalSeason()
|
|
||||||
and (shiftedSeasonEntry.getFirstEpisode() == -1 or episode >= shiftedSeasonEntry.getFirstEpisode())
|
|
||||||
and (shiftedSeasonEntry.getLastEpisode() == -1 or episode <= shiftedSeasonEntry.getLastEpisode())):
|
|
||||||
|
|
||||||
shiftedSeason = season + shiftedSeasonEntry.getSeasonOffset()
|
|
||||||
shiftedEpisode = episode + shiftedSeasonEntry.getEpisodeOffset()
|
|
||||||
|
|
||||||
self.context['logger'].info(f"Shifting season: {season} episode: {episode} "
|
|
||||||
+f"-> season: {shiftedSeason} episode: {shiftedEpisode}")
|
|
||||||
|
|
||||||
return shiftedSeason, shiftedEpisode
|
|
||||||
|
|
||||||
return season, episode
|
|
||||||
@@ -1,125 +0,0 @@
|
|||||||
import click
|
|
||||||
|
|
||||||
from textual.screen import Screen
|
|
||||||
from textual.widgets import Header, Footer, Static, Button
|
|
||||||
from textual.containers import Grid
|
|
||||||
|
|
||||||
from .shifted_season_controller import ShiftedSeasonController
|
|
||||||
|
|
||||||
from ffx.model.shifted_season import ShiftedSeason
|
|
||||||
|
|
||||||
|
|
||||||
# Screen[dict[int, str, int]]
|
|
||||||
class ShiftedSeasonDeleteScreen(Screen):
|
|
||||||
|
|
||||||
CSS = """
|
|
||||||
|
|
||||||
Grid {
|
|
||||||
grid-size: 2;
|
|
||||||
grid-rows: 2 auto;
|
|
||||||
grid-columns: 30 330;
|
|
||||||
height: 100%;
|
|
||||||
width: 100%;
|
|
||||||
padding: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
Input {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
Button {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
#toplabel {
|
|
||||||
height: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
.two {
|
|
||||||
column-span: 2;
|
|
||||||
}
|
|
||||||
|
|
||||||
.box {
|
|
||||||
height: 100%;
|
|
||||||
border: solid green;
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, showId = None, shiftedSeasonId = None):
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
self.context = self.app.getContext()
|
|
||||||
self.Session = self.context['database']['session'] # convenience
|
|
||||||
|
|
||||||
self.__ssc = ShiftedSeasonController(context = self.context)
|
|
||||||
|
|
||||||
self._showId = showId
|
|
||||||
self.__shiftedSeasonId = shiftedSeasonId
|
|
||||||
|
|
||||||
|
|
||||||
def on_mount(self):
|
|
||||||
|
|
||||||
shiftedSeason: ShiftedSeason = self.__ssc.getShiftedSeason(self.__shiftedSeasonId)
|
|
||||||
|
|
||||||
self.query_one("#static_show_id", Static).update(str(self._showId))
|
|
||||||
self.query_one("#static_original_season", Static).update(str(shiftedSeason.getOriginalSeason()))
|
|
||||||
self.query_one("#static_first_episode", Static).update(str(shiftedSeason.getFirstEpisode()))
|
|
||||||
self.query_one("#static_last_episode", Static).update(str(shiftedSeason.getLastEpisode()))
|
|
||||||
self.query_one("#static_season_offset", Static).update(str(shiftedSeason.getSeasonOffset()))
|
|
||||||
self.query_one("#static_episode_offset", Static).update(str(shiftedSeason.getEpisodeOffset()))
|
|
||||||
|
|
||||||
|
|
||||||
def compose(self):
|
|
||||||
|
|
||||||
yield Header()
|
|
||||||
|
|
||||||
with Grid():
|
|
||||||
|
|
||||||
yield Static("Are you sure to delete the following shifted season?", id="toplabel", classes="two")
|
|
||||||
|
|
||||||
yield Static(" ", classes="two")
|
|
||||||
|
|
||||||
yield Static("from show")
|
|
||||||
yield Static(" ", id="static_show_id")
|
|
||||||
|
|
||||||
yield Static(" ", classes="two")
|
|
||||||
|
|
||||||
yield Static("Original season")
|
|
||||||
yield Static(" ", id="static_original_season")
|
|
||||||
|
|
||||||
yield Static("First episode")
|
|
||||||
yield Static(" ", id="static_first_episode")
|
|
||||||
|
|
||||||
yield Static("Last episode")
|
|
||||||
yield Static(" ", id="static_last_episode")
|
|
||||||
|
|
||||||
yield Static("Season offset")
|
|
||||||
yield Static(" ", id="static_season_offset")
|
|
||||||
|
|
||||||
yield Static("Episode offset")
|
|
||||||
yield Static(" ", id="static_episode_offset")
|
|
||||||
|
|
||||||
yield Static(" ", classes="two")
|
|
||||||
|
|
||||||
yield Button("Delete", id="delete_button")
|
|
||||||
yield Button("Cancel", id="cancel_button")
|
|
||||||
|
|
||||||
yield Footer()
|
|
||||||
|
|
||||||
|
|
||||||
# Event handler for button press
|
|
||||||
def on_button_pressed(self, event: Button.Pressed) -> None:
|
|
||||||
|
|
||||||
if event.button.id == "delete_button":
|
|
||||||
|
|
||||||
if self.__shiftedSeasonId is None:
|
|
||||||
raise click.ClickException('ShiftedSeasonDeleteScreen.on_button_pressed(): shifted season id is undefined')
|
|
||||||
|
|
||||||
if self.__ssc.deleteShiftedSeason(self.__shiftedSeasonId):
|
|
||||||
self.dismiss(self.__shiftedSeasonId)
|
|
||||||
|
|
||||||
else:
|
|
||||||
#TODO: Meldung
|
|
||||||
self.app.pop_screen()
|
|
||||||
|
|
||||||
if event.button.id == "cancel_button":
|
|
||||||
self.app.pop_screen()
|
|
||||||
|
|
||||||
@@ -1,221 +0,0 @@
|
|||||||
from typing import List
|
|
||||||
|
|
||||||
from textual.screen import Screen
|
|
||||||
from textual.widgets import Header, Footer, Static, Button, Input
|
|
||||||
from textual.containers import Grid
|
|
||||||
|
|
||||||
from .shifted_season_controller import ShiftedSeasonController
|
|
||||||
|
|
||||||
from ffx.model.shifted_season import ShiftedSeason
|
|
||||||
|
|
||||||
|
|
||||||
# Screen[dict[int, str, int]]
|
|
||||||
class ShiftedSeasonDetailsScreen(Screen):
|
|
||||||
|
|
||||||
CSS = """
|
|
||||||
|
|
||||||
Grid {
|
|
||||||
grid-size: 3 10;
|
|
||||||
grid-rows: 2 2 2 2 2 2 2 2 2 2;
|
|
||||||
grid-columns: 40 40 40;
|
|
||||||
height: 100%;
|
|
||||||
width: 100%;
|
|
||||||
padding: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
Input {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
Button {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
DataTable {
|
|
||||||
min-height: 6;
|
|
||||||
}
|
|
||||||
|
|
||||||
DataTable .datatable--cursor {
|
|
||||||
background: darkorange;
|
|
||||||
color: black;
|
|
||||||
}
|
|
||||||
|
|
||||||
DataTable .datatable--header {
|
|
||||||
background: steelblue;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
#toplabel {
|
|
||||||
height: 1;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
.two {
|
|
||||||
column-span: 3;
|
|
||||||
}
|
|
||||||
|
|
||||||
.three {
|
|
||||||
column-span: 3;
|
|
||||||
}
|
|
||||||
|
|
||||||
.four {
|
|
||||||
column-span: 4;
|
|
||||||
}
|
|
||||||
.five {
|
|
||||||
column-span: 5;
|
|
||||||
}
|
|
||||||
.six {
|
|
||||||
column-span: 6;
|
|
||||||
}
|
|
||||||
.seven {
|
|
||||||
column-span: 7;
|
|
||||||
}
|
|
||||||
|
|
||||||
.box {
|
|
||||||
height: 100%;
|
|
||||||
border: solid green;
|
|
||||||
}
|
|
||||||
|
|
||||||
.yellow {
|
|
||||||
tint: yellow 40%;
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, showId = None, shiftedSeasonId = None):
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
self.context = self.app.getContext()
|
|
||||||
self.Session = self.context['database']['session'] # convenience
|
|
||||||
|
|
||||||
self.__ssc = ShiftedSeasonController(context = self.context)
|
|
||||||
|
|
||||||
self.__showId = showId
|
|
||||||
self.__shiftedSeasonId = shiftedSeasonId
|
|
||||||
|
|
||||||
def on_mount(self):
|
|
||||||
|
|
||||||
if self.__shiftedSeasonId is not None:
|
|
||||||
shiftedSeason: ShiftedSeason = self.__ssc.getShiftedSeason(self.__shiftedSeasonId)
|
|
||||||
|
|
||||||
originalSeason = shiftedSeason.getOriginalSeason()
|
|
||||||
self.query_one("#input_original_season", Input).value = str(originalSeason)
|
|
||||||
|
|
||||||
firstEpisode = shiftedSeason.getFirstEpisode()
|
|
||||||
self.query_one("#input_first_episode", Input).value = str(firstEpisode) if firstEpisode != -1 else ''
|
|
||||||
|
|
||||||
lastEpisode = shiftedSeason.getLastEpisode()
|
|
||||||
self.query_one("#input_last_episode", Input).value = str(lastEpisode) if lastEpisode != -1 else ''
|
|
||||||
|
|
||||||
seasonOffset = shiftedSeason.getSeasonOffset()
|
|
||||||
self.query_one("#input_season_offset", Input).value = str(seasonOffset) if seasonOffset else ''
|
|
||||||
|
|
||||||
episodeOffset = shiftedSeason.getEpisodeOffset()
|
|
||||||
self.query_one("#input_episode_offset", Input).value = str(episodeOffset) if episodeOffset else ''
|
|
||||||
|
|
||||||
|
|
||||||
def compose(self):
|
|
||||||
|
|
||||||
yield Header()
|
|
||||||
|
|
||||||
with Grid():
|
|
||||||
|
|
||||||
# 1
|
|
||||||
yield Static("Edit shifted season" if self.__shiftedSeasonId is not None else "New shifted season", id="toplabel", classes="three")
|
|
||||||
|
|
||||||
# 2
|
|
||||||
yield Static(" ", classes="three")
|
|
||||||
|
|
||||||
# 3
|
|
||||||
yield Static("Original season")
|
|
||||||
yield Input(id="input_original_season", classes="two")
|
|
||||||
|
|
||||||
# 4
|
|
||||||
yield Static("First Episode")
|
|
||||||
yield Input(id="input_first_episode", classes="two")
|
|
||||||
|
|
||||||
# 5
|
|
||||||
yield Static("Last Episode")
|
|
||||||
yield Input(id="input_last_episode", classes="two")
|
|
||||||
|
|
||||||
# 6
|
|
||||||
yield Static("Season offset")
|
|
||||||
yield Input(id="input_season_offset", classes="two")
|
|
||||||
|
|
||||||
# 7
|
|
||||||
yield Static("Episode offset")
|
|
||||||
yield Input(id="input_episode_offset", classes="two")
|
|
||||||
|
|
||||||
# 8
|
|
||||||
yield Static(" ", classes="three")
|
|
||||||
|
|
||||||
# 9
|
|
||||||
yield Button("Save", id="save_button")
|
|
||||||
yield Button("Cancel", id="cancel_button")
|
|
||||||
yield Static(" ")
|
|
||||||
|
|
||||||
# 10
|
|
||||||
yield Static(" ", classes="three")
|
|
||||||
|
|
||||||
yield Footer()
|
|
||||||
|
|
||||||
|
|
||||||
def getShiftedSeasonObjFromInput(self):
|
|
||||||
|
|
||||||
shiftedSeasonObj = {}
|
|
||||||
|
|
||||||
originalSeason = self.query_one("#input_original_season", Input).value
|
|
||||||
if not originalSeason:
|
|
||||||
return None
|
|
||||||
shiftedSeasonObj['original_season'] = int(originalSeason)
|
|
||||||
|
|
||||||
try:
|
|
||||||
shiftedSeasonObj['first_episode'] = int(self.query_one("#input_first_episode", Input).value)
|
|
||||||
except ValueError:
|
|
||||||
shiftedSeasonObj['first_episode'] = -1
|
|
||||||
|
|
||||||
try:
|
|
||||||
shiftedSeasonObj['last_episode'] = int(self.query_one("#input_last_episode", Input).value)
|
|
||||||
except ValueError:
|
|
||||||
shiftedSeasonObj['last_episode'] = -1
|
|
||||||
|
|
||||||
try:
|
|
||||||
shiftedSeasonObj['season_offset'] = int(self.query_one("#input_season_offset", Input).value)
|
|
||||||
except ValueError:
|
|
||||||
shiftedSeasonObj['season_offset'] = 0
|
|
||||||
|
|
||||||
try:
|
|
||||||
shiftedSeasonObj['episode_offset'] = int(self.query_one("#input_episode_offset", Input).value)
|
|
||||||
except ValueError:
|
|
||||||
shiftedSeasonObj['episode_offset'] = 0
|
|
||||||
|
|
||||||
return shiftedSeasonObj
|
|
||||||
|
|
||||||
|
|
||||||
# Event handler for button press
|
|
||||||
def on_button_pressed(self, event: Button.Pressed) -> None:
|
|
||||||
|
|
||||||
# Check if the button pressed is the one we are interested in
|
|
||||||
if event.button.id == "save_button":
|
|
||||||
|
|
||||||
shiftedSeasonObj = self.getShiftedSeasonObjFromInput()
|
|
||||||
|
|
||||||
if shiftedSeasonObj is not None:
|
|
||||||
|
|
||||||
if self.__shiftedSeasonId is not None:
|
|
||||||
|
|
||||||
if self.__ssc.checkShiftedSeason(self.__showId, shiftedSeasonObj,
|
|
||||||
shiftedSeasonId = self.__shiftedSeasonId):
|
|
||||||
if self.__ssc.updateShiftedSeason(self.__shiftedSeasonId, shiftedSeasonObj):
|
|
||||||
self.dismiss((self.__shiftedSeasonId, shiftedSeasonObj))
|
|
||||||
else:
|
|
||||||
#TODO: Meldung
|
|
||||||
self.app.pop_screen()
|
|
||||||
|
|
||||||
else:
|
|
||||||
if self.__ssc.checkShiftedSeason(self.__showId, shiftedSeasonObj):
|
|
||||||
self.__shiftedSeasonId = self.__ssc.addShiftedSeason(self.__showId, shiftedSeasonObj)
|
|
||||||
self.dismiss((self.__shiftedSeasonId, shiftedSeasonObj))
|
|
||||||
|
|
||||||
|
|
||||||
if event.button.id == "cancel_button":
|
|
||||||
self.app.pop_screen()
|
|
||||||
@@ -1,133 +0,0 @@
|
|||||||
import click
|
|
||||||
|
|
||||||
from ffx.model.show import Show
|
|
||||||
from ffx.show_descriptor import ShowDescriptor
|
|
||||||
|
|
||||||
|
|
||||||
class ShowController():
|
|
||||||
|
|
||||||
def __init__(self, context):
|
|
||||||
|
|
||||||
self.context = context
|
|
||||||
self.Session = self.context['database']['session'] # convenience
|
|
||||||
|
|
||||||
|
|
||||||
def getShowDescriptor(self, showId):
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(Show).filter(Show.id == showId)
|
|
||||||
|
|
||||||
if q.count():
|
|
||||||
show: Show = q.first()
|
|
||||||
return show.getDescriptor(self.context)
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"ShowController.getShowDescriptor(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
def getShow(self, showId):
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(Show).filter(Show.id == showId)
|
|
||||||
|
|
||||||
return q.first() if q.count() else None
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"ShowController.getShow(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
def getAllShows(self):
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(Show)
|
|
||||||
|
|
||||||
if q.count():
|
|
||||||
return q.all()
|
|
||||||
else:
|
|
||||||
return []
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"ShowController.getAllShows(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
|
|
||||||
def updateShow(self, showDescriptor: ShowDescriptor):
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(Show).filter(Show.id == showDescriptor.getId())
|
|
||||||
|
|
||||||
if not q.count():
|
|
||||||
show = Show(id = int(showDescriptor.getId()),
|
|
||||||
name = str(showDescriptor.getName()),
|
|
||||||
year = int(showDescriptor.getYear()),
|
|
||||||
index_season_digits = showDescriptor.getIndexSeasonDigits(),
|
|
||||||
index_episode_digits = showDescriptor.getIndexEpisodeDigits(),
|
|
||||||
indicator_season_digits = showDescriptor.getIndicatorSeasonDigits(),
|
|
||||||
indicator_episode_digits = showDescriptor.getIndicatorEpisodeDigits())
|
|
||||||
|
|
||||||
s.add(show)
|
|
||||||
s.commit()
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
|
|
||||||
currentShow = q.first()
|
|
||||||
|
|
||||||
changed = False
|
|
||||||
if currentShow.name != str(showDescriptor.getName()):
|
|
||||||
currentShow.name = str(showDescriptor.getName())
|
|
||||||
changed = True
|
|
||||||
if currentShow.year != int(showDescriptor.getYear()):
|
|
||||||
currentShow.year = int(showDescriptor.getYear())
|
|
||||||
changed = True
|
|
||||||
|
|
||||||
if currentShow.index_season_digits != int(showDescriptor.getIndexSeasonDigits()):
|
|
||||||
currentShow.index_season_digits = int(showDescriptor.getIndexSeasonDigits())
|
|
||||||
changed = True
|
|
||||||
if currentShow.index_episode_digits != int(showDescriptor.getIndexEpisodeDigits()):
|
|
||||||
currentShow.index_episode_digits = int(showDescriptor.getIndexEpisodeDigits())
|
|
||||||
changed = True
|
|
||||||
if currentShow.indicator_season_digits != int(showDescriptor.getIndicatorSeasonDigits()):
|
|
||||||
currentShow.indicator_season_digits = int(showDescriptor.getIndicatorSeasonDigits())
|
|
||||||
changed = True
|
|
||||||
if currentShow.indicator_episode_digits != int(showDescriptor.getIndicatorEpisodeDigits()):
|
|
||||||
currentShow.indicator_episode_digits = int(showDescriptor.getIndicatorEpisodeDigits())
|
|
||||||
changed = True
|
|
||||||
|
|
||||||
if changed:
|
|
||||||
s.commit()
|
|
||||||
return changed
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"ShowController.updateShow(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
|
|
||||||
def deleteShow(self, show_id):
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(Show).filter(Show.id == int(show_id))
|
|
||||||
|
|
||||||
|
|
||||||
if q.count():
|
|
||||||
|
|
||||||
#DAFUQ: https://stackoverflow.com/a/19245058
|
|
||||||
# q.delete()
|
|
||||||
show = q.first()
|
|
||||||
s.delete(show)
|
|
||||||
|
|
||||||
s.commit()
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"ShowController.deleteShow(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
@@ -1,95 +0,0 @@
|
|||||||
from textual.screen import Screen
|
|
||||||
from textual.widgets import Header, Footer, Static, Button
|
|
||||||
from textual.containers import Grid
|
|
||||||
|
|
||||||
from .show_controller import ShowController
|
|
||||||
|
|
||||||
# Screen[dict[int, str, int]]
|
|
||||||
class ShowDeleteScreen(Screen):
|
|
||||||
|
|
||||||
CSS = """
|
|
||||||
|
|
||||||
Grid {
|
|
||||||
grid-size: 2;
|
|
||||||
grid-rows: 2 auto;
|
|
||||||
grid-columns: 30 auto;
|
|
||||||
height: 100%;
|
|
||||||
width: 100%;
|
|
||||||
padding: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
Input {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
Button {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
#toplabel {
|
|
||||||
height: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
.two {
|
|
||||||
column-span: 2;
|
|
||||||
}
|
|
||||||
|
|
||||||
.box {
|
|
||||||
height: 100%;
|
|
||||||
border: solid green;
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, showId = None):
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
self.context = self.app.getContext()
|
|
||||||
self.Session = self.context['database']['session'] # convenience
|
|
||||||
|
|
||||||
self.__sc = ShowController(context = self.context)
|
|
||||||
|
|
||||||
self.__showDescriptor = self.__sc.getShowDescriptor(showId) if showId is not None else {}
|
|
||||||
|
|
||||||
|
|
||||||
def on_mount(self):
|
|
||||||
if not self.__showDescriptor is None:
|
|
||||||
self.query_one("#showlabel", Static).update(f"{self.__showDescriptor.getId()} - {self.__showDescriptor.getName()} ({self.__showDescriptor.getYear()})")
|
|
||||||
|
|
||||||
|
|
||||||
def compose(self):
|
|
||||||
|
|
||||||
yield Header()
|
|
||||||
|
|
||||||
with Grid():
|
|
||||||
|
|
||||||
yield Static("Are you sure to delete the following show?", id="toplabel", classes="two")
|
|
||||||
|
|
||||||
yield Static("", classes="two")
|
|
||||||
|
|
||||||
yield Static("", id="showlabel")
|
|
||||||
yield Static("")
|
|
||||||
|
|
||||||
yield Static("", classes="two")
|
|
||||||
|
|
||||||
yield Static("", classes="two")
|
|
||||||
|
|
||||||
yield Button("Delete", id="delete_button")
|
|
||||||
yield Button("Cancel", id="cancel_button")
|
|
||||||
|
|
||||||
|
|
||||||
yield Footer()
|
|
||||||
|
|
||||||
|
|
||||||
# Event handler for button press
|
|
||||||
def on_button_pressed(self, event: Button.Pressed) -> None:
|
|
||||||
|
|
||||||
if event.button.id == "delete_button":
|
|
||||||
|
|
||||||
if not self.__showDescriptor is None:
|
|
||||||
if self.__sc.deleteShow(self.__showDescriptor.getId()):
|
|
||||||
self.dismiss(self.__showDescriptor)
|
|
||||||
|
|
||||||
else:
|
|
||||||
#TODO: Meldung
|
|
||||||
self.app.pop_screen()
|
|
||||||
|
|
||||||
if event.button.id == "cancel_button":
|
|
||||||
self.app.pop_screen()
|
|
||||||
@@ -1,106 +0,0 @@
|
|||||||
import logging
|
|
||||||
|
|
||||||
|
|
||||||
class ShowDescriptor():
|
|
||||||
"""This class represents the structural content of a media file including streams and metadata"""
|
|
||||||
|
|
||||||
CONTEXT_KEY = 'context'
|
|
||||||
|
|
||||||
ID_KEY = 'id'
|
|
||||||
NAME_KEY = 'name'
|
|
||||||
YEAR_KEY = 'year'
|
|
||||||
|
|
||||||
INDEX_SEASON_DIGITS_KEY = 'index_season_digits'
|
|
||||||
INDEX_EPISODE_DIGITS_KEY = 'index_episode_digits'
|
|
||||||
INDICATOR_SEASON_DIGITS_KEY = 'indicator_season_digits'
|
|
||||||
INDICATOR_EPISODE_DIGITS_KEY = 'indicator_episode_digits'
|
|
||||||
|
|
||||||
DEFAULT_INDEX_SEASON_DIGITS = 2
|
|
||||||
DEFAULT_INDEX_EPISODE_DIGITS = 2
|
|
||||||
DEFAULT_INDICATOR_SEASON_DIGITS = 2
|
|
||||||
DEFAULT_INDICATOR_EPISODE_DIGITS = 2
|
|
||||||
|
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
|
|
||||||
if ShowDescriptor.CONTEXT_KEY in kwargs.keys():
|
|
||||||
if type(kwargs[ShowDescriptor.CONTEXT_KEY]) is not dict:
|
|
||||||
raise TypeError(
|
|
||||||
f"ShowDescriptor.__init__(): Argument {ShowDescriptor.CONTEXT_KEY} is required to be of type dict"
|
|
||||||
)
|
|
||||||
self.__context = kwargs[ShowDescriptor.CONTEXT_KEY]
|
|
||||||
self.__logger = self.__context['logger']
|
|
||||||
else:
|
|
||||||
self.__context = {}
|
|
||||||
self.__logger = logging.getLogger('FFX')
|
|
||||||
self.__logger.addHandler(logging.NullHandler())
|
|
||||||
|
|
||||||
if ShowDescriptor.ID_KEY in kwargs.keys():
|
|
||||||
if type(kwargs[ShowDescriptor.ID_KEY]) is not int:
|
|
||||||
raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.ID_KEY} is required to be of type int")
|
|
||||||
self.__showId = kwargs[ShowDescriptor.ID_KEY]
|
|
||||||
else:
|
|
||||||
self.__showId = -1
|
|
||||||
|
|
||||||
if ShowDescriptor.NAME_KEY in kwargs.keys():
|
|
||||||
if type(kwargs[ShowDescriptor.NAME_KEY]) is not str:
|
|
||||||
raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.NAME_KEY} is required to be of type str")
|
|
||||||
self.__showName = kwargs[ShowDescriptor.NAME_KEY]
|
|
||||||
else:
|
|
||||||
self.__showName = ''
|
|
||||||
|
|
||||||
if ShowDescriptor.YEAR_KEY in kwargs.keys():
|
|
||||||
if type(kwargs[ShowDescriptor.YEAR_KEY]) is not int:
|
|
||||||
raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.YEAR_KEY} is required to be of type int")
|
|
||||||
self.__showYear = kwargs[ShowDescriptor.YEAR_KEY]
|
|
||||||
else:
|
|
||||||
self.__showYear = -1
|
|
||||||
|
|
||||||
|
|
||||||
if ShowDescriptor.INDEX_SEASON_DIGITS_KEY in kwargs.keys():
|
|
||||||
if type(kwargs[ShowDescriptor.INDEX_SEASON_DIGITS_KEY]) is not int:
|
|
||||||
raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.INDEX_SEASON_DIGITS_KEY} is required to be of type int")
|
|
||||||
self.__indexSeasonDigits = kwargs[ShowDescriptor.INDEX_SEASON_DIGITS_KEY]
|
|
||||||
else:
|
|
||||||
self.__indexSeasonDigits = ShowDescriptor.DEFAULT_INDEX_SEASON_DIGITS
|
|
||||||
|
|
||||||
if ShowDescriptor.INDEX_EPISODE_DIGITS_KEY in kwargs.keys():
|
|
||||||
if type(kwargs[ShowDescriptor.INDEX_EPISODE_DIGITS_KEY]) is not int:
|
|
||||||
raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.INDEX_EPISODE_DIGITS_KEY} is required to be of type int")
|
|
||||||
self.__indexEpisodeDigits = kwargs[ShowDescriptor.INDEX_EPISODE_DIGITS_KEY]
|
|
||||||
else:
|
|
||||||
self.__indexEpisodeDigits = ShowDescriptor.DEFAULT_INDEX_EPISODE_DIGITS
|
|
||||||
|
|
||||||
if ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY in kwargs.keys():
|
|
||||||
if type(kwargs[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY]) is not int:
|
|
||||||
raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY} is required to be of type int")
|
|
||||||
self.__indicatorSeasonDigits = kwargs[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY]
|
|
||||||
else:
|
|
||||||
self.__indicatorSeasonDigits = ShowDescriptor.DEFAULT_INDICATOR_SEASON_DIGITS
|
|
||||||
|
|
||||||
if ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY in kwargs.keys():
|
|
||||||
if type(kwargs[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY]) is not int:
|
|
||||||
raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY} is required to be of type int")
|
|
||||||
self.__indicatorEpisodeDigits = kwargs[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY]
|
|
||||||
else:
|
|
||||||
self.__indicatorEpisodeDigits = ShowDescriptor.DEFAULT_INDICATOR_EPISODE_DIGITS
|
|
||||||
|
|
||||||
|
|
||||||
def getId(self):
|
|
||||||
return self.__showId
|
|
||||||
def getName(self):
|
|
||||||
return self.__showName
|
|
||||||
def getYear(self):
|
|
||||||
return self.__showYear
|
|
||||||
|
|
||||||
def getIndexSeasonDigits(self):
|
|
||||||
return self.__indexSeasonDigits
|
|
||||||
def getIndexEpisodeDigits(self):
|
|
||||||
return self.__indexEpisodeDigits
|
|
||||||
def getIndicatorSeasonDigits(self):
|
|
||||||
return self.__indicatorSeasonDigits
|
|
||||||
def getIndicatorEpisodeDigits(self):
|
|
||||||
return self.__indicatorEpisodeDigits
|
|
||||||
|
|
||||||
def getFilenamePrefix(self):
|
|
||||||
return f"{self.__showName} ({str(self.__showYear)})"
|
|
||||||
@@ -1,492 +0,0 @@
|
|||||||
import click
|
|
||||||
|
|
||||||
from textual.screen import Screen
|
|
||||||
from textual.widgets import Header, Footer, Static, Button, DataTable, Input
|
|
||||||
from textual.containers import Grid
|
|
||||||
from textual.widgets._data_table import CellDoesNotExist
|
|
||||||
|
|
||||||
from ffx.model.pattern import Pattern
|
|
||||||
|
|
||||||
from .pattern_details_screen import PatternDetailsScreen
|
|
||||||
from .pattern_delete_screen import PatternDeleteScreen
|
|
||||||
|
|
||||||
from .show_controller import ShowController
|
|
||||||
from .pattern_controller import PatternController
|
|
||||||
from .tmdb_controller import TmdbController
|
|
||||||
from .shifted_season_controller import ShiftedSeasonController
|
|
||||||
|
|
||||||
from .show_descriptor import ShowDescriptor
|
|
||||||
|
|
||||||
from .shifted_season_details_screen import ShiftedSeasonDetailsScreen
|
|
||||||
from .shifted_season_delete_screen import ShiftedSeasonDeleteScreen
|
|
||||||
|
|
||||||
from ffx.model.shifted_season import ShiftedSeason
|
|
||||||
|
|
||||||
from .helper import filterFilename
|
|
||||||
|
|
||||||
|
|
||||||
# Screen[dict[int, str, int]]
|
|
||||||
class ShowDetailsScreen(Screen):
|
|
||||||
|
|
||||||
CSS = """
|
|
||||||
|
|
||||||
Grid {
|
|
||||||
grid-size: 5 16;
|
|
||||||
grid-rows: 2 2 2 2 2 2 2 2 2 2 2 9 2 9 2 2;
|
|
||||||
grid-columns: 30 30 30 30 30;
|
|
||||||
height: 100%;
|
|
||||||
width: 100%;
|
|
||||||
padding: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
Input {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
Button {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
DataTable {
|
|
||||||
column-span: 2;
|
|
||||||
min-height: 8;
|
|
||||||
}
|
|
||||||
|
|
||||||
DataTable .datatable--cursor {
|
|
||||||
background: darkorange;
|
|
||||||
color: black;
|
|
||||||
}
|
|
||||||
|
|
||||||
DataTable .datatable--header {
|
|
||||||
background: steelblue;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
#toplabel {
|
|
||||||
height: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
.two {
|
|
||||||
column-span: 2;
|
|
||||||
}
|
|
||||||
.three {
|
|
||||||
column-span: 3;
|
|
||||||
}
|
|
||||||
.four {
|
|
||||||
column-span: 4;
|
|
||||||
}
|
|
||||||
.five {
|
|
||||||
column-span: 5;
|
|
||||||
}
|
|
||||||
|
|
||||||
.box {
|
|
||||||
height: 100%;
|
|
||||||
border: solid green;
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
|
|
||||||
BINDINGS = [
|
|
||||||
("a", "add_pattern", "Add Pattern"),
|
|
||||||
("e", "edit_pattern", "Edit Pattern"),
|
|
||||||
("r", "remove_pattern", "Remove Pattern"),
|
|
||||||
]
|
|
||||||
|
|
||||||
def __init__(self, showId = None):
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
self.context = self.app.getContext()
|
|
||||||
self.Session = self.context['database']['session'] # convenience
|
|
||||||
|
|
||||||
self.__sc = ShowController(context = self.context)
|
|
||||||
self.__pc = PatternController(context = self.context)
|
|
||||||
self.__tc = TmdbController()
|
|
||||||
self.__ssc = ShiftedSeasonController(context = self.context)
|
|
||||||
|
|
||||||
self.__showDescriptor = self.__sc.getShowDescriptor(showId) if showId is not None else None
|
|
||||||
|
|
||||||
|
|
||||||
def loadPatterns(self, show_id : int):
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(Pattern).filter(Pattern.show_id == int(show_id))
|
|
||||||
|
|
||||||
return [{'id': int(p.id), 'pattern': str(p.pattern)} for p in q.all()]
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"ShowDetailsScreen.loadPatterns(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def updateShiftedSeasons(self):
|
|
||||||
|
|
||||||
self.shiftedSeasonsTable.clear()
|
|
||||||
|
|
||||||
if not self.__showDescriptor is None:
|
|
||||||
|
|
||||||
showId = int(self.__showDescriptor.getId())
|
|
||||||
|
|
||||||
shiftedSeason: ShiftedSeason
|
|
||||||
for shiftedSeason in self.__ssc.getShiftedSeasonSiblings(showId=showId):
|
|
||||||
|
|
||||||
shiftedSeasonObj = shiftedSeason.getObj()
|
|
||||||
|
|
||||||
firstEpisode = shiftedSeasonObj['first_episode']
|
|
||||||
firstEpisodeStr = str(firstEpisode) if firstEpisode != -1 else ''
|
|
||||||
|
|
||||||
lastEpisode = shiftedSeasonObj['last_episode']
|
|
||||||
lastEpisodeStr = str(lastEpisode) if lastEpisode != -1 else ''
|
|
||||||
|
|
||||||
row = (shiftedSeasonObj['original_season'],
|
|
||||||
firstEpisodeStr,
|
|
||||||
lastEpisodeStr,
|
|
||||||
shiftedSeasonObj['season_offset'],
|
|
||||||
shiftedSeasonObj['episode_offset'])
|
|
||||||
|
|
||||||
self.shiftedSeasonsTable.add_row(*map(str, row))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def on_mount(self):
|
|
||||||
|
|
||||||
if self.__showDescriptor is not None:
|
|
||||||
|
|
||||||
showId = int(self.__showDescriptor.getId())
|
|
||||||
|
|
||||||
self.query_one("#id_static", Static).update(str(showId))
|
|
||||||
self.query_one("#name_input", Input).value = str(self.__showDescriptor.getName())
|
|
||||||
self.query_one("#year_input", Input).value = str(self.__showDescriptor.getYear())
|
|
||||||
|
|
||||||
self.query_one("#index_season_digits_input", Input).value = str(self.__showDescriptor.getIndexSeasonDigits())
|
|
||||||
self.query_one("#index_episode_digits_input", Input).value = str(self.__showDescriptor.getIndexEpisodeDigits())
|
|
||||||
self.query_one("#indicator_season_digits_input", Input).value = str(self.__showDescriptor.getIndicatorSeasonDigits())
|
|
||||||
self.query_one("#indicator_episode_digits_input", Input).value = str(self.__showDescriptor.getIndicatorEpisodeDigits())
|
|
||||||
|
|
||||||
|
|
||||||
#raise click.ClickException(f"show_id {showId}")
|
|
||||||
patternList = self.loadPatterns(showId)
|
|
||||||
# raise click.ClickException(f"patternList {patternList}")
|
|
||||||
for pattern in patternList:
|
|
||||||
row = (pattern['pattern'],)
|
|
||||||
self.patternTable.add_row(*map(str, row))
|
|
||||||
|
|
||||||
self.updateShiftedSeasons()
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
self.query_one("#index_season_digits_input", Input).value = "2"
|
|
||||||
self.query_one("#index_episode_digits_input", Input).value = "2"
|
|
||||||
self.query_one("#indicator_season_digits_input", Input).value = "2"
|
|
||||||
self.query_one("#indicator_episode_digits_input", Input).value = "2"
|
|
||||||
|
|
||||||
|
|
||||||
def getSelectedPatternDescriptor(self):
|
|
||||||
|
|
||||||
selectedPattern = {}
|
|
||||||
|
|
||||||
try:
|
|
||||||
|
|
||||||
# Fetch the currently selected row when 'Enter' is pressed
|
|
||||||
#selected_row_index = self.table.cursor_row
|
|
||||||
row_key, col_key = self.patternTable.coordinate_to_cell_key(self.patternTable.cursor_coordinate)
|
|
||||||
|
|
||||||
if row_key is not None:
|
|
||||||
selected_row_data = self.patternTable.get_row(row_key)
|
|
||||||
|
|
||||||
selectedPattern['show_id'] = self.__showDescriptor.getId()
|
|
||||||
selectedPattern['pattern'] = str(selected_row_data[0])
|
|
||||||
|
|
||||||
except CellDoesNotExist:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return selectedPattern
|
|
||||||
|
|
||||||
|
|
||||||
def getSelectedShiftedSeasonObjFromInput(self):
|
|
||||||
|
|
||||||
shiftedSeasonObj = {}
|
|
||||||
|
|
||||||
try:
|
|
||||||
|
|
||||||
# Fetch the currently selected row when 'Enter' is pressed
|
|
||||||
#selected_row_index = self.table.cursor_row
|
|
||||||
row_key, col_key = self.shiftedSeasonsTable.coordinate_to_cell_key(self.shiftedSeasonsTable.cursor_coordinate)
|
|
||||||
|
|
||||||
if row_key is not None:
|
|
||||||
selected_row_data = self.shiftedSeasonsTable.get_row(row_key)
|
|
||||||
|
|
||||||
shiftedSeasonObj['original_season'] = int(selected_row_data[0])
|
|
||||||
shiftedSeasonObj['first_episode'] = int(selected_row_data[1]) if selected_row_data[1].isnumeric() else -1
|
|
||||||
shiftedSeasonObj['last_episode'] = int(selected_row_data[2]) if selected_row_data[2].isnumeric() else -1
|
|
||||||
shiftedSeasonObj['season_offset'] = int(selected_row_data[3]) if selected_row_data[3].isnumeric() else 0
|
|
||||||
shiftedSeasonObj['episode_offset'] = int(selected_row_data[4]) if selected_row_data[4].isnumeric() else 0
|
|
||||||
|
|
||||||
|
|
||||||
if self.__showDescriptor is not None:
|
|
||||||
|
|
||||||
showId = int(self.__showDescriptor.getId())
|
|
||||||
|
|
||||||
shiftedSeasonId = self.__ssc.findShiftedSeason(showId,
|
|
||||||
originalSeason=shiftedSeasonObj['original_season'],
|
|
||||||
firstEpisode=shiftedSeasonObj['first_episode'],
|
|
||||||
lastEpisode=shiftedSeasonObj['last_episode'])
|
|
||||||
if shiftedSeasonId is not None:
|
|
||||||
shiftedSeasonObj['id'] = shiftedSeasonId
|
|
||||||
|
|
||||||
except CellDoesNotExist:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return shiftedSeasonObj
|
|
||||||
|
|
||||||
|
|
||||||
def action_add_pattern(self):
|
|
||||||
if not self.__showDescriptor is None:
|
|
||||||
self.app.push_screen(PatternDetailsScreen(showId = self.__showDescriptor.getId()), self.handle_add_pattern)
|
|
||||||
|
|
||||||
|
|
||||||
def handle_add_pattern(self, screenResult):
|
|
||||||
|
|
||||||
pattern = (screenResult['pattern'],)
|
|
||||||
self.patternTable.add_row(*map(str, pattern))
|
|
||||||
|
|
||||||
|
|
||||||
def action_edit_pattern(self):
|
|
||||||
|
|
||||||
selectedPatternDescriptor = self.getSelectedPatternDescriptor()
|
|
||||||
|
|
||||||
if selectedPatternDescriptor:
|
|
||||||
|
|
||||||
selectedPatternId = self.__pc.findPattern(selectedPatternDescriptor)
|
|
||||||
|
|
||||||
if selectedPatternId is None:
|
|
||||||
raise click.ClickException(f"ShowDetailsScreen.action_edit_pattern(): Pattern to edit has no id")
|
|
||||||
|
|
||||||
self.app.push_screen(PatternDetailsScreen(patternId = selectedPatternId, showId = self.__showDescriptor.getId()), self.handle_edit_pattern) # <-
|
|
||||||
|
|
||||||
|
|
||||||
def handle_edit_pattern(self, screenResult):
|
|
||||||
|
|
||||||
try:
|
|
||||||
|
|
||||||
row_key, col_key = self.patternTable.coordinate_to_cell_key(self.patternTable.cursor_coordinate)
|
|
||||||
self.patternTable.update_cell(row_key, self.column_key_pattern, screenResult['pattern'])
|
|
||||||
|
|
||||||
except CellDoesNotExist:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def action_remove_pattern(self):
|
|
||||||
|
|
||||||
selectedPatternDescriptor = self.getSelectedPatternDescriptor()
|
|
||||||
|
|
||||||
if selectedPatternDescriptor:
|
|
||||||
|
|
||||||
selectedPatternId = self.__pc.findPattern(selectedPatternDescriptor)
|
|
||||||
|
|
||||||
if selectedPatternId is None:
|
|
||||||
raise click.ClickException(f"ShowDetailsScreen.action_remove_pattern(): Pattern to remove has no id")
|
|
||||||
|
|
||||||
self.app.push_screen(PatternDeleteScreen(patternId = selectedPatternId, showId = self.__showDescriptor.getId()), self.handle_remove_pattern)
|
|
||||||
|
|
||||||
|
|
||||||
def handle_remove_pattern(self, pattern):
|
|
||||||
|
|
||||||
try:
|
|
||||||
row_key, col_key = self.patternTable.coordinate_to_cell_key(self.patternTable.cursor_coordinate)
|
|
||||||
self.patternTable.remove_row(row_key)
|
|
||||||
|
|
||||||
except CellDoesNotExist:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def compose(self):
|
|
||||||
|
|
||||||
# Create the DataTable widget
|
|
||||||
self.patternTable = DataTable(classes="five")
|
|
||||||
|
|
||||||
# Define the columns with headers
|
|
||||||
self.column_key_pattern = self.patternTable.add_column("Pattern", width=150)
|
|
||||||
|
|
||||||
self.patternTable.cursor_type = 'row'
|
|
||||||
|
|
||||||
|
|
||||||
self.shiftedSeasonsTable = DataTable(classes="five")
|
|
||||||
|
|
||||||
self.column_key_original_season = self.shiftedSeasonsTable.add_column("Original Season", width=30)
|
|
||||||
self.column_key_first_episode = self.shiftedSeasonsTable.add_column("First Episode", width=30)
|
|
||||||
self.column_key_last_episode = self.shiftedSeasonsTable.add_column("Last Episode", width=30)
|
|
||||||
self.column_key_season_offset = self.shiftedSeasonsTable.add_column("Season Offset", width=30)
|
|
||||||
self.column_key_episode_offset = self.shiftedSeasonsTable.add_column("Episode Offset", width=30)
|
|
||||||
|
|
||||||
self.shiftedSeasonsTable.cursor_type = 'row'
|
|
||||||
|
|
||||||
|
|
||||||
yield Header()
|
|
||||||
|
|
||||||
with Grid():
|
|
||||||
|
|
||||||
# 1
|
|
||||||
yield Static("Show" if not self.__showDescriptor is None else "New Show", id="toplabel")
|
|
||||||
yield Button("Identify", id="identify_button")
|
|
||||||
yield Static(" ", classes="three")
|
|
||||||
|
|
||||||
# 2
|
|
||||||
yield Static("ID")
|
|
||||||
if not self.__showDescriptor is None:
|
|
||||||
yield Static("", id="id_static", classes="four")
|
|
||||||
else:
|
|
||||||
yield Input(type="integer", id="id_input", classes="four")
|
|
||||||
|
|
||||||
# 3
|
|
||||||
yield Static("Name")
|
|
||||||
yield Input(type="text", id="name_input", classes="four")
|
|
||||||
|
|
||||||
# 4
|
|
||||||
yield Static("Year")
|
|
||||||
yield Input(type="integer", id="year_input", classes="four")
|
|
||||||
|
|
||||||
#5
|
|
||||||
yield Static(" ", classes="five")
|
|
||||||
|
|
||||||
#6
|
|
||||||
yield Static("Index Season Digits")
|
|
||||||
yield Input(type="integer", id="index_season_digits_input", classes="four")
|
|
||||||
|
|
||||||
#7
|
|
||||||
yield Static("Index Episode Digits")
|
|
||||||
yield Input(type="integer", id="index_episode_digits_input", classes="four")
|
|
||||||
|
|
||||||
#8
|
|
||||||
yield Static("Indicator Season Digits")
|
|
||||||
yield Input(type="integer", id="indicator_season_digits_input", classes="four")
|
|
||||||
|
|
||||||
#9
|
|
||||||
yield Static("Indicator Edisode Digits")
|
|
||||||
yield Input(type="integer", id="indicator_episode_digits_input", classes="four")
|
|
||||||
|
|
||||||
# 10
|
|
||||||
yield Static(" ", classes="five")
|
|
||||||
|
|
||||||
# 11
|
|
||||||
yield Static("Shifted seasons", classes="two")
|
|
||||||
|
|
||||||
if self.__showDescriptor is not None:
|
|
||||||
yield Button("Add", id="button_add_shifted_season")
|
|
||||||
yield Button("Edit", id="button_edit_shifted_season")
|
|
||||||
yield Button("Delete", id="button_delete_shifted_season")
|
|
||||||
else:
|
|
||||||
yield Static(" ")
|
|
||||||
yield Static(" ")
|
|
||||||
yield Static(" ")
|
|
||||||
|
|
||||||
# 12
|
|
||||||
yield self.shiftedSeasonsTable
|
|
||||||
|
|
||||||
# 13
|
|
||||||
yield Static("File patterns", classes="five")
|
|
||||||
# 14
|
|
||||||
yield self.patternTable
|
|
||||||
|
|
||||||
# 15
|
|
||||||
yield Static(" ", classes="five")
|
|
||||||
|
|
||||||
# 16
|
|
||||||
yield Button("Save", id="save_button")
|
|
||||||
yield Button("Cancel", id="cancel_button")
|
|
||||||
|
|
||||||
|
|
||||||
yield Footer()
|
|
||||||
|
|
||||||
|
|
||||||
def getShowDescriptorFromInput(self) -> ShowDescriptor:
|
|
||||||
|
|
||||||
kwargs = {}
|
|
||||||
|
|
||||||
try:
|
|
||||||
if self.__showDescriptor:
|
|
||||||
kwargs[ShowDescriptor.ID_KEY] = int(self.__showDescriptor.getId())
|
|
||||||
else:
|
|
||||||
kwargs[ShowDescriptor.ID_KEY] = int(self.query_one("#id_input", Input).value)
|
|
||||||
except ValueError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
try:
|
|
||||||
kwargs[ShowDescriptor.NAME_KEY] = str(self.query_one("#name_input", Input).value)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
try:
|
|
||||||
kwargs[ShowDescriptor.YEAR_KEY] = int(self.query_one("#year_input", Input).value)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
kwargs[ShowDescriptor.INDEX_SEASON_DIGITS_KEY] = int(self.query_one("#index_season_digits_input", Input).value)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
kwargs[ShowDescriptor.INDEX_EPISODE_DIGITS_KEY] = int(self.query_one("#index_episode_digits_input", Input).value)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
try:
|
|
||||||
kwargs[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY] = int(self.query_one("#indicator_season_digits_input", Input).value)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
try:
|
|
||||||
kwargs[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY] = int(self.query_one("#indicator_episode_digits_input", Input).value)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return ShowDescriptor(**kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
# Event handler for button press
|
|
||||||
def on_button_pressed(self, event: Button.Pressed) -> None:
|
|
||||||
|
|
||||||
if event.button.id == "save_button":
|
|
||||||
|
|
||||||
showDescriptor = self.getShowDescriptorFromInput()
|
|
||||||
|
|
||||||
if not showDescriptor is None:
|
|
||||||
if self.__sc.updateShow(showDescriptor):
|
|
||||||
self.dismiss(showDescriptor)
|
|
||||||
else:
|
|
||||||
#TODO: Meldung
|
|
||||||
self.app.pop_screen()
|
|
||||||
|
|
||||||
if event.button.id == "cancel_button":
|
|
||||||
self.app.pop_screen()
|
|
||||||
|
|
||||||
|
|
||||||
if event.button.id == "identify_button":
|
|
||||||
|
|
||||||
showDescriptor = self.getShowDescriptorFromInput()
|
|
||||||
if not showDescriptor is None:
|
|
||||||
showName, showYear = self.__tc.getShowNameAndYear(showDescriptor.getId())
|
|
||||||
|
|
||||||
self.query_one("#name_input", Input).value = filterFilename(showName)
|
|
||||||
self.query_one("#year_input", Input).value = str(showYear)
|
|
||||||
|
|
||||||
|
|
||||||
if event.button.id == "button_add_shifted_season":
|
|
||||||
if not self.__showDescriptor is None:
|
|
||||||
self.app.push_screen(ShiftedSeasonDetailsScreen(showId = self.__showDescriptor.getId()), self.handle_update_shifted_season)
|
|
||||||
|
|
||||||
if event.button.id == "button_edit_shifted_season":
|
|
||||||
selectedShiftedSeasonObj = self.getSelectedShiftedSeasonObjFromInput()
|
|
||||||
if 'id' in selectedShiftedSeasonObj.keys():
|
|
||||||
self.app.push_screen(ShiftedSeasonDetailsScreen(showId = self.__showDescriptor.getId(), shiftedSeasonId=selectedShiftedSeasonObj['id']), self.handle_update_shifted_season)
|
|
||||||
|
|
||||||
if event.button.id == "button_delete_shifted_season":
|
|
||||||
selectedShiftedSeasonObj = self.getSelectedShiftedSeasonObjFromInput()
|
|
||||||
if 'id' in selectedShiftedSeasonObj.keys():
|
|
||||||
self.app.push_screen(ShiftedSeasonDeleteScreen(showId = self.__showDescriptor.getId(), shiftedSeasonId=selectedShiftedSeasonObj['id']), self.handle_delete_shifted_season)
|
|
||||||
|
|
||||||
|
|
||||||
def handle_update_shifted_season(self, screenResult):
|
|
||||||
self.updateShiftedSeasons()
|
|
||||||
|
|
||||||
def handle_delete_shifted_season(self, screenResult):
|
|
||||||
self.updateShiftedSeasons()
|
|
||||||
@@ -1,168 +0,0 @@
|
|||||||
from textual.screen import Screen
|
|
||||||
from textual.widgets import Header, Footer, Static, DataTable
|
|
||||||
from textual.containers import Grid
|
|
||||||
|
|
||||||
from .show_controller import ShowController
|
|
||||||
|
|
||||||
from .show_details_screen import ShowDetailsScreen
|
|
||||||
from .show_delete_screen import ShowDeleteScreen
|
|
||||||
|
|
||||||
from ffx.show_descriptor import ShowDescriptor
|
|
||||||
|
|
||||||
from textual.widgets._data_table import CellDoesNotExist
|
|
||||||
|
|
||||||
|
|
||||||
class ShowsScreen(Screen):
|
|
||||||
|
|
||||||
CSS = """
|
|
||||||
|
|
||||||
Grid {
|
|
||||||
grid-size: 1;
|
|
||||||
grid-rows: 2 auto;
|
|
||||||
height: 100%;
|
|
||||||
width: 100%;
|
|
||||||
padding: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
DataTable .datatable--cursor {
|
|
||||||
background: darkorange;
|
|
||||||
color: black;
|
|
||||||
}
|
|
||||||
|
|
||||||
DataTable .datatable--header {
|
|
||||||
background: steelblue;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
#top {
|
|
||||||
height: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#two {
|
|
||||||
column-span: 2;
|
|
||||||
row-span: 2;
|
|
||||||
tint: magenta 40%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.box {
|
|
||||||
height: 100%;
|
|
||||||
border: solid green;
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
|
|
||||||
BINDINGS = [
|
|
||||||
("e", "edit_show", "Edit Show"),
|
|
||||||
("n", "new_show", "New Show"),
|
|
||||||
("d", "delete_show", "Delete Show"),
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
self.context = self.app.getContext()
|
|
||||||
self.Session = self.context['database']['session'] # convenience
|
|
||||||
|
|
||||||
self.__sc = ShowController(context = self.context)
|
|
||||||
|
|
||||||
|
|
||||||
def getSelectedShowId(self):
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Fetch the currently selected row when 'Enter' is pressed
|
|
||||||
#selected_row_index = self.table.cursor_row
|
|
||||||
row_key, col_key = self.table.coordinate_to_cell_key(self.table.cursor_coordinate)
|
|
||||||
|
|
||||||
if row_key is not None:
|
|
||||||
selected_row_data = self.table.get_row(row_key)
|
|
||||||
|
|
||||||
return selected_row_data[0]
|
|
||||||
|
|
||||||
except CellDoesNotExist:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def action_new_show(self):
|
|
||||||
self.app.push_screen(ShowDetailsScreen(), self.handle_new_screen)
|
|
||||||
|
|
||||||
def handle_new_screen(self, screenResult):
|
|
||||||
|
|
||||||
show = (screenResult['id'], screenResult['name'], screenResult['year'])
|
|
||||||
self.table.add_row(*map(str, show))
|
|
||||||
|
|
||||||
|
|
||||||
def action_edit_show(self):
|
|
||||||
|
|
||||||
selectedShowId = self.getSelectedShowId()
|
|
||||||
|
|
||||||
if selectedShowId is not None:
|
|
||||||
self.app.push_screen(ShowDetailsScreen(showId = selectedShowId), self.handle_edit_screen)
|
|
||||||
|
|
||||||
|
|
||||||
def handle_edit_screen(self, showDescriptor: ShowDescriptor):
|
|
||||||
|
|
||||||
try:
|
|
||||||
|
|
||||||
row_key, col_key = self.table.coordinate_to_cell_key(self.table.cursor_coordinate)
|
|
||||||
|
|
||||||
self.table.update_cell(row_key, self.column_key_name, showDescriptor.getName())
|
|
||||||
self.table.update_cell(row_key, self.column_key_year, showDescriptor.getYear())
|
|
||||||
|
|
||||||
except CellDoesNotExist:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def action_delete_show(self):
|
|
||||||
|
|
||||||
selectedShowId = self.getSelectedShowId()
|
|
||||||
|
|
||||||
if selectedShowId is not None:
|
|
||||||
self.app.push_screen(ShowDeleteScreen(showId = selectedShowId), self.handle_delete_show)
|
|
||||||
|
|
||||||
|
|
||||||
def handle_delete_show(self, showDescriptor: ShowDescriptor):
|
|
||||||
|
|
||||||
try:
|
|
||||||
row_key, col_key = self.table.coordinate_to_cell_key(self.table.cursor_coordinate)
|
|
||||||
self.table.remove_row(row_key)
|
|
||||||
|
|
||||||
except CellDoesNotExist:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def on_mount(self) -> None:
|
|
||||||
for show in self.__sc.getAllShows():
|
|
||||||
row = (int(show.id), show.name, show.year) # Convert each element to a string before adding
|
|
||||||
self.table.add_row(*map(str, row))
|
|
||||||
|
|
||||||
|
|
||||||
def compose(self):
|
|
||||||
|
|
||||||
# Create the DataTable widget
|
|
||||||
self.table = DataTable()
|
|
||||||
|
|
||||||
# Define the columns with headers
|
|
||||||
self.column_key_id = self.table.add_column("ID", width=10)
|
|
||||||
self.column_key_name = self.table.add_column("Name", width=50)
|
|
||||||
self.column_key_year = self.table.add_column("Year", width=10)
|
|
||||||
|
|
||||||
self.table.cursor_type = 'row'
|
|
||||||
|
|
||||||
yield Header()
|
|
||||||
|
|
||||||
with Grid():
|
|
||||||
|
|
||||||
yield Static("Shows")
|
|
||||||
|
|
||||||
yield self.table
|
|
||||||
|
|
||||||
f = Footer()
|
|
||||||
f.description = "yolo"
|
|
||||||
|
|
||||||
yield f
|
|
||||||
@@ -1,220 +0,0 @@
|
|||||||
import click
|
|
||||||
|
|
||||||
from ffx.model.track import Track
|
|
||||||
|
|
||||||
from ffx.model.media_tag import MediaTag
|
|
||||||
from ffx.model.track_tag import TrackTag
|
|
||||||
|
|
||||||
|
|
||||||
class TagController():
|
|
||||||
|
|
||||||
def __init__(self, context):
|
|
||||||
|
|
||||||
self.context = context
|
|
||||||
self.Session = self.context['database']['session'] # convenience
|
|
||||||
|
|
||||||
|
|
||||||
def updateMediaTag(self, patternId, tagKey, tagValue):
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
|
|
||||||
q = s.query(MediaTag).filter(MediaTag.pattern_id == int(patternId),
|
|
||||||
MediaTag.key == str(tagKey))
|
|
||||||
tag = q.first()
|
|
||||||
if tag:
|
|
||||||
tag.value = str(tagValue)
|
|
||||||
else:
|
|
||||||
tag = MediaTag(pattern_id = int(patternId),
|
|
||||||
key = str(tagKey),
|
|
||||||
value = str(tagValue))
|
|
||||||
s.add(tag)
|
|
||||||
s.commit()
|
|
||||||
|
|
||||||
return int(tag.id)
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"TagController.updateTrackTag(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
def updateTrackTag(self, trackId, tagKey, tagValue):
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
|
|
||||||
q = s.query(TrackTag).filter(TrackTag.track_id == int(trackId),
|
|
||||||
TrackTag.key == str(tagKey))
|
|
||||||
tag = q.first()
|
|
||||||
if tag:
|
|
||||||
tag.value = str(tagValue)
|
|
||||||
else:
|
|
||||||
tag = TrackTag(track_id = int(trackId),
|
|
||||||
key = str(tagKey),
|
|
||||||
value = str(tagValue))
|
|
||||||
s.add(tag)
|
|
||||||
s.commit()
|
|
||||||
|
|
||||||
return int(tag.id)
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"TagController.updateTrackTag(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
def deleteMediaTagByKey(self, patternId, tagKey):
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
|
|
||||||
q = s.query(MediaTag).filter(MediaTag.pattern_id == int(patternId),
|
|
||||||
MediaTag.key == str(tagKey))
|
|
||||||
if q.count():
|
|
||||||
tag = q.first()
|
|
||||||
s.delete(tag)
|
|
||||||
s.commit()
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"TagController.deleteMediaTagByKey(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
def deleteTrackTagByKey(self, trackId, tagKey):
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
|
|
||||||
q = s.query(TrackTag).filter(TrackTag.track_id == int(trackId),
|
|
||||||
TrackTag.key == str(tagKey))
|
|
||||||
tag = q.first()
|
|
||||||
if tag:
|
|
||||||
s.delete(tag)
|
|
||||||
s.commit()
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"TagController.deleteTrackTagByKey(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
def findAllMediaTags(self, patternId) -> dict:
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
|
|
||||||
q = s.query(MediaTag).filter(MediaTag.pattern_id == int(patternId))
|
|
||||||
|
|
||||||
if q.count():
|
|
||||||
return {t.key:t.value for t in q.all()}
|
|
||||||
else:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"TagController.findAllMediaTags(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
|
|
||||||
def findAllTrackTags(self, trackId) -> dict:
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
|
|
||||||
q = s.query(TrackTag).filter(TrackTag.track_id == int(trackId))
|
|
||||||
|
|
||||||
if q.count():
|
|
||||||
return {t.key:t.value for t in q.all()}
|
|
||||||
else:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"TagController.findAllTracks(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
|
|
||||||
def findMediaTag(self, trackId : int, trackKey : str) -> MediaTag:
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(Track).filter(MediaTag.track_id == int(trackId), MediaTag.key == str(trackKey))
|
|
||||||
|
|
||||||
if q.count():
|
|
||||||
return q.first()
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"TagController.findMediaTag(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
def findTrackTag(self, trackId : int, tagKey : str) -> TrackTag:
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(TrackTag).filter(TrackTag.track_id == int(trackId), TrackTag.key == str(tagKey))
|
|
||||||
|
|
||||||
if q.count():
|
|
||||||
return q.first()
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"TagController.findTrackTag(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def deleteMediaTag(self, tagId) -> bool:
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(MediaTag).filter(MediaTag.id == int(tagId))
|
|
||||||
|
|
||||||
if q.count():
|
|
||||||
|
|
||||||
tag = q.first()
|
|
||||||
|
|
||||||
s.delete(tag)
|
|
||||||
|
|
||||||
s.commit()
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"TagController.deleteMediaTag(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
|
|
||||||
def deleteTrackTag(self, tagId : int) -> bool:
|
|
||||||
|
|
||||||
if type(tagId) is not int:
|
|
||||||
raise TypeError('TagController.deleteTrackTag(): Argument tagId is required to be of type int')
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(TrackTag).filter(TrackTag.id == int(tagId))
|
|
||||||
|
|
||||||
if q.count():
|
|
||||||
|
|
||||||
tag = q.first()
|
|
||||||
|
|
||||||
s.delete(tag)
|
|
||||||
|
|
||||||
s.commit()
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"TagController.deleteTrackTag(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
@@ -1,98 +0,0 @@
|
|||||||
from textual.screen import Screen
|
|
||||||
from textual.widgets import Header, Footer, Static, Button
|
|
||||||
from textual.containers import Grid
|
|
||||||
|
|
||||||
|
|
||||||
# Screen[dict[int, str, int]]
|
|
||||||
class TagDeleteScreen(Screen):
|
|
||||||
|
|
||||||
CSS = """
|
|
||||||
|
|
||||||
Grid {
|
|
||||||
grid-size: 4 9;
|
|
||||||
grid-rows: 2 2 2 2 2 2 2 2 2;
|
|
||||||
grid-columns: 30 30 30 30;
|
|
||||||
height: 100%;
|
|
||||||
width: 100%;
|
|
||||||
padding: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
Input {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
Button {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
#toplabel {
|
|
||||||
height: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
.two {
|
|
||||||
column-span: 2;
|
|
||||||
}
|
|
||||||
.three {
|
|
||||||
column-span: 3;
|
|
||||||
}
|
|
||||||
.four {
|
|
||||||
column-span: 4;
|
|
||||||
}
|
|
||||||
.five {
|
|
||||||
column-span: 5;
|
|
||||||
}
|
|
||||||
|
|
||||||
.box {
|
|
||||||
height: 100%;
|
|
||||||
border: solid green;
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, key=None, value=None):
|
|
||||||
super().__init__()
|
|
||||||
self.__key = key
|
|
||||||
self.__value = value
|
|
||||||
|
|
||||||
|
|
||||||
def on_mount(self):
|
|
||||||
|
|
||||||
self.query_one("#keylabel", Static).update(str(self.__key))
|
|
||||||
self.query_one("#valuelabel", Static).update(str(self.__value))
|
|
||||||
|
|
||||||
|
|
||||||
def compose(self):
|
|
||||||
|
|
||||||
yield Header()
|
|
||||||
|
|
||||||
with Grid():
|
|
||||||
|
|
||||||
#1
|
|
||||||
yield Static(f"Are you sure to delete this tag ?", id="toplabel", classes="five")
|
|
||||||
|
|
||||||
#2
|
|
||||||
yield Static("Key")
|
|
||||||
yield Static(" ", id="keylabel", classes="four")
|
|
||||||
|
|
||||||
#3
|
|
||||||
yield Static("Value")
|
|
||||||
yield Static(" ", id="valuelabel", classes="four")
|
|
||||||
|
|
||||||
#4
|
|
||||||
yield Static(" ", classes="five")
|
|
||||||
|
|
||||||
#9
|
|
||||||
yield Button("Delete", id="delete_button")
|
|
||||||
yield Button("Cancel", id="cancel_button")
|
|
||||||
|
|
||||||
yield Footer()
|
|
||||||
|
|
||||||
|
|
||||||
# Event handler for button press
|
|
||||||
def on_button_pressed(self, event: Button.Pressed) -> None:
|
|
||||||
|
|
||||||
if event.button.id == "delete_button":
|
|
||||||
|
|
||||||
tag = (self.__key, self.__value)
|
|
||||||
self.dismiss(tag)
|
|
||||||
|
|
||||||
if event.button.id == "cancel_button":
|
|
||||||
self.app.pop_screen()
|
|
||||||
|
|
||||||
@@ -1,132 +0,0 @@
|
|||||||
from textual.screen import Screen
|
|
||||||
from textual.widgets import Header, Footer, Static, Button, Input
|
|
||||||
from textual.containers import Grid
|
|
||||||
|
|
||||||
|
|
||||||
# Screen[dict[int, str, int]]
|
|
||||||
class TagDetailsScreen(Screen):
|
|
||||||
|
|
||||||
CSS = """
|
|
||||||
|
|
||||||
Grid {
|
|
||||||
grid-size: 5 20;
|
|
||||||
grid-rows: 2 2 2 2 2 3 2 2 2 2 2 6 2 2 6 2 2 2 2 6;
|
|
||||||
grid-columns: 25 25 25 25 225;
|
|
||||||
height: 100%;
|
|
||||||
width: 100%;
|
|
||||||
padding: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
Input {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
Button {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
SelectionList {
|
|
||||||
border: none;
|
|
||||||
min-height: 6;
|
|
||||||
}
|
|
||||||
Select {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
DataTable {
|
|
||||||
min-height: 6;
|
|
||||||
}
|
|
||||||
|
|
||||||
DataTable .datatable--cursor {
|
|
||||||
background: darkorange;
|
|
||||||
color: black;
|
|
||||||
}
|
|
||||||
|
|
||||||
DataTable .datatable--header {
|
|
||||||
background: steelblue;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
#toplabel {
|
|
||||||
height: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
.two {
|
|
||||||
column-span: 2;
|
|
||||||
}
|
|
||||||
.three {
|
|
||||||
column-span: 3;
|
|
||||||
}
|
|
||||||
|
|
||||||
.four {
|
|
||||||
column-span: 4;
|
|
||||||
}
|
|
||||||
.five {
|
|
||||||
column-span: 5;
|
|
||||||
}
|
|
||||||
|
|
||||||
.box {
|
|
||||||
height: 100%;
|
|
||||||
border: solid green;
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, key=None, value=None):
|
|
||||||
super().__init__()
|
|
||||||
self.__key = key
|
|
||||||
self.__value = value
|
|
||||||
|
|
||||||
|
|
||||||
def on_mount(self):
|
|
||||||
|
|
||||||
if self.__key is not None:
|
|
||||||
self.query_one("#key_input", Input).value = str(self.__key)
|
|
||||||
|
|
||||||
if self.__value is not None:
|
|
||||||
self.query_one("#value_input", Input).value = str(self.__value)
|
|
||||||
|
|
||||||
|
|
||||||
def compose(self):
|
|
||||||
|
|
||||||
yield Header()
|
|
||||||
|
|
||||||
with Grid():
|
|
||||||
|
|
||||||
# 8
|
|
||||||
yield Static("Key")
|
|
||||||
yield Input(id="key_input", classes="four")
|
|
||||||
|
|
||||||
yield Static("Value")
|
|
||||||
yield Input(id="value_input", classes="four")
|
|
||||||
|
|
||||||
# 17
|
|
||||||
yield Static(" ", classes="five")
|
|
||||||
|
|
||||||
# 18
|
|
||||||
yield Button("Save", id="save_button")
|
|
||||||
yield Button("Cancel", id="cancel_button")
|
|
||||||
|
|
||||||
# 19
|
|
||||||
yield Static(" ", classes="five")
|
|
||||||
|
|
||||||
# 20
|
|
||||||
yield Static(" ", classes="five", id="messagestatic")
|
|
||||||
|
|
||||||
yield Footer(id="footer")
|
|
||||||
|
|
||||||
|
|
||||||
def getTagFromInput(self):
|
|
||||||
|
|
||||||
tagKey = self.query_one("#key_input", Input).value
|
|
||||||
tagValue = self.query_one("#value_input", Input).value
|
|
||||||
|
|
||||||
return (tagKey, tagValue)
|
|
||||||
|
|
||||||
|
|
||||||
# Event handler for button press
|
|
||||||
def on_button_pressed(self, event: Button.Pressed) -> None:
|
|
||||||
|
|
||||||
# Check if the button pressed is the one we are interested in
|
|
||||||
if event.button.id == "save_button":
|
|
||||||
self.dismiss(self.getTagFromInput())
|
|
||||||
|
|
||||||
if event.button.id == "cancel_button":
|
|
||||||
self.app.pop_screen()
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
import os, sys, importlib, glob, inspect, itertools
|
|
||||||
|
|
||||||
class DispositionCombinator3():
|
|
||||||
|
|
||||||
IDENTIFIER = 'disposition3'
|
|
||||||
|
|
||||||
def __init__(self, context = None):
|
|
||||||
self._context = context
|
|
||||||
self._logger = context['logger']
|
|
||||||
self._reportLogger = context['report_logger']
|
|
||||||
|
|
||||||
def getIdentifier(self):
|
|
||||||
return DispositionCombinator3.IDENTIFIER
|
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def list():
|
|
||||||
basePath = os.path.dirname(__file__)
|
|
||||||
return [os.path.basename(p)[25:-3]
|
|
||||||
for p
|
|
||||||
in glob.glob(f"{ basePath }/disposition_combinator_3_*.py", recursive = True)
|
|
||||||
if p != __file__]
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def getClassReference(identifier):
|
|
||||||
importlib.import_module(f"ffx.test.disposition_combinator_3_{ identifier }")
|
|
||||||
for name, obj in inspect.getmembers(sys.modules[f"ffx.test.disposition_combinator_3_{ identifier }"]):
|
|
||||||
#HINT: Excluding DispositionCombination as it seems to be included by import (?)
|
|
||||||
if inspect.isclass(obj) and name != 'DispositionCombinator3' and name.startswith('DispositionCombinator3'):
|
|
||||||
return obj
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def getAllClassReferences():
|
|
||||||
return [DispositionCombinator3.getClassReference(i) for i in DispositionCombinator3.list()]
|
|
||||||
@@ -1,279 +0,0 @@
|
|||||||
import os, math, tempfile, click
|
|
||||||
|
|
||||||
|
|
||||||
from ffx.ffx_controller import FfxController
|
|
||||||
|
|
||||||
from ffx.process import executeProcess
|
|
||||||
|
|
||||||
from ffx.media_descriptor import MediaDescriptor
|
|
||||||
from ffx.track_type import TrackType
|
|
||||||
|
|
||||||
from ffx.helper import dictCache
|
|
||||||
from ffx.configuration_controller import ConfigurationController
|
|
||||||
|
|
||||||
|
|
||||||
SHORT_SUBTITLE_SEQUENCE = [{'start': 1, 'end': 2, 'text': 'yolo'},
|
|
||||||
{'start': 3, 'end': 4, 'text': 'zolo'},
|
|
||||||
{'start': 5, 'end': 6, 'text': 'golo'}]
|
|
||||||
|
|
||||||
def getTimeString(hours: float = 0.0,
|
|
||||||
minutes: float = 0.0,
|
|
||||||
seconds: float = 0.0,
|
|
||||||
millis: float = 0.0,
|
|
||||||
format: str = ''):
|
|
||||||
|
|
||||||
duration = (hours * 3600.0
|
|
||||||
+ minutes * 60.0
|
|
||||||
+ seconds
|
|
||||||
+ millis / 1000.0)
|
|
||||||
|
|
||||||
hours = math.floor(duration / 3600.0)
|
|
||||||
remaining = duration - 3600.0 * hours
|
|
||||||
|
|
||||||
minutes = math.floor(remaining / 60.0)
|
|
||||||
remaining = remaining - 60.0 * minutes
|
|
||||||
|
|
||||||
seconds = math.floor(remaining)
|
|
||||||
remaining = remaining - seconds
|
|
||||||
|
|
||||||
millis = math.floor(remaining * 1000)
|
|
||||||
|
|
||||||
if format == 'ass':
|
|
||||||
return f"{hours:01d}:{minutes:02d}:{seconds:02d}.{millis:02d}"
|
|
||||||
|
|
||||||
# srt & vtt
|
|
||||||
return f"{hours:02d}:{minutes:02d}:{seconds:02d}.{millis:03d}"
|
|
||||||
|
|
||||||
|
|
||||||
def createAssFile(entries: dict, directory = None):
|
|
||||||
|
|
||||||
# [Script Info]
|
|
||||||
# ; Script generated by FFmpeg/Lavc61.3.100
|
|
||||||
# ScriptType: v4.00+
|
|
||||||
# PlayResX: 384
|
|
||||||
# PlayResY: 288
|
|
||||||
# ScaledBorderAndShadow: yes
|
|
||||||
# YCbCr Matrix: None
|
|
||||||
#
|
|
||||||
# [V4+ Styles]
|
|
||||||
# Format: Name, Fontname, Fontsize, PrimaryColour, SecondaryColour, OutlineColour, BackColour, Bold, Italic, Underline, StrikeOut, ScaleX, ScaleY, Spacing, Angle, BorderStyle, Outline, Shadow, Alignment, MarginL, MarginR, MarginV, Encoding
|
|
||||||
# Style: Default,Arial,16,&Hffffff,&Hffffff,&H0,&H0,0,0,0,0,100,100,0,0,1,1,0,2,10,10,10,1
|
|
||||||
#
|
|
||||||
# [Events]
|
|
||||||
# Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text
|
|
||||||
# Dialogue: 0,0:00:01.00,0:00:02.00,Default,,0,0,0,,yolo
|
|
||||||
# Dialogue: 0,0:00:03.00,0:00:04.00,Default,,0,0,0,,zolo
|
|
||||||
# Dialogue: 0,0:00:05.00,0:00:06.00,Default,,0,0,0,,golo
|
|
||||||
tmpFileName = tempfile.mktemp(suffix=".ass", dir = directory)
|
|
||||||
|
|
||||||
with open(tmpFileName, 'w') as tmpFile:
|
|
||||||
|
|
||||||
tmpFile.write("[Script Info]\n")
|
|
||||||
tmpFile.write("; Script generated by Ffx\n")
|
|
||||||
tmpFile.write("ScriptType: v4.00+\n")
|
|
||||||
tmpFile.write("PlayResX: 384\n")
|
|
||||||
tmpFile.write("PlayResY: 288\n")
|
|
||||||
tmpFile.write("ScaledBorderAndShadow: yes\n")
|
|
||||||
tmpFile.write("YCbCr Matrix: None\n")
|
|
||||||
tmpFile.write("\n")
|
|
||||||
tmpFile.write("[V4+ Styles]\n")
|
|
||||||
tmpFile.write("Format: Name, Fontname, Fontsize, PrimaryColour, SecondaryColour, OutlineColour, BackColour, Bold, Italic, Underline, StrikeOut, ScaleX, ScaleY, Spacing, Angle, BorderStyle, Outline, Shadow, Alignment, MarginL, MarginR, MarginV, Encoding\n")
|
|
||||||
tmpFile.write("Style: Default,Arial,16,&Hffffff,&Hffffff,&H0,&H0,0,0,0,0,100,100,0,0,1,1,0,2,10,10,10,1\n")
|
|
||||||
tmpFile.write("\n")
|
|
||||||
tmpFile.write("[Events]\n")
|
|
||||||
tmpFile.write("Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text\n")
|
|
||||||
|
|
||||||
for entryIndex in range(len(entries)):
|
|
||||||
tmpFile.write(f"Dialogue: 0,{getTimeString(seconds=entries[entryIndex]['start'], format='ass')},{getTimeString(seconds=entries[entryIndex]['end'], format='ass')},Default,,0,0,0,,{entries[entryIndex]['text']}\n")
|
|
||||||
|
|
||||||
return tmpFileName
|
|
||||||
|
|
||||||
def createSrtFile(entries: dict, directory = None):
|
|
||||||
# 1
|
|
||||||
# 00:00:00,000 --> 00:00:02,500
|
|
||||||
# Welcome to the Example Subtitle File!
|
|
||||||
#
|
|
||||||
# 2
|
|
||||||
# 00:00:03,000 --> 00:00:06,000
|
|
||||||
# This is a demonstration of SRT subtitles.
|
|
||||||
#
|
|
||||||
# 3
|
|
||||||
# 00:00:07,000 --> 00:00:10,500
|
|
||||||
# You can use SRT files to add subtitles to your videos.
|
|
||||||
|
|
||||||
tmpFileName = tempfile.mktemp(suffix=".srt", dir = directory)
|
|
||||||
|
|
||||||
with open(tmpFileName, 'w') as tmpFile:
|
|
||||||
|
|
||||||
for entryIndex in range(len(entries)):
|
|
||||||
|
|
||||||
tmpFile.write(f"{entryIndex}\n")
|
|
||||||
tmpFile.write(f"{getTimeString(seconds=entries[entryIndex]['start'])} --> {getTimeString(seconds=entries[entryIndex]['end'])}\n")
|
|
||||||
tmpFile.write(f"{entries[entryIndex]['text']}\n\n")
|
|
||||||
|
|
||||||
return tmpFileName
|
|
||||||
|
|
||||||
|
|
||||||
def createVttFile(entries: dict, directory = None):
|
|
||||||
# WEBVTT
|
|
||||||
#
|
|
||||||
# 01:20:33.050 --> 01:20:35.050
|
|
||||||
# Yolo
|
|
||||||
|
|
||||||
tmpFileName = tempfile.mktemp(suffix=".vtt", dir = directory)
|
|
||||||
|
|
||||||
with open(tmpFileName, 'w') as tmpFile:
|
|
||||||
|
|
||||||
tmpFile.write("WEBVTT\n")
|
|
||||||
|
|
||||||
for entryIndex in range(len(entries)):
|
|
||||||
|
|
||||||
tmpFile.write("\n")
|
|
||||||
tmpFile.write(f"{getTimeString(seconds=entries[entryIndex]['start'])} --> {getTimeString(seconds=entries[entryIndex]['end'])}\n")
|
|
||||||
tmpFile.write(f"{entries[entryIndex]['text']}\n")
|
|
||||||
|
|
||||||
|
|
||||||
return tmpFileName
|
|
||||||
|
|
||||||
|
|
||||||
def createMediaTestFile(mediaDescriptor: MediaDescriptor,
|
|
||||||
directory: str = '',
|
|
||||||
baseName: str = 'media',
|
|
||||||
format: str = '',
|
|
||||||
extension: str = 'mkv',
|
|
||||||
sizeX: int = 1280,
|
|
||||||
sizeY: int = 720,
|
|
||||||
rate: int = 25,
|
|
||||||
length: int = 10,
|
|
||||||
logger = None):
|
|
||||||
|
|
||||||
# subtitleFilePath = createVttFile(SHORT_SUBTITLE_SEQUENCE)
|
|
||||||
|
|
||||||
# commandTokens = FfxController.COMMAND_TOKENS
|
|
||||||
commandTokens = ['ffmpeg', '-y']
|
|
||||||
|
|
||||||
generatorCache = []
|
|
||||||
generatorTokens = []
|
|
||||||
mappingTokens = []
|
|
||||||
importTokens = []
|
|
||||||
metadataTokens = []
|
|
||||||
|
|
||||||
|
|
||||||
for mediaTagKey, mediaTagValue in mediaDescriptor.getTags().items():
|
|
||||||
metadataTokens += ['-metadata:g', f"{mediaTagKey}={mediaTagValue}"]
|
|
||||||
|
|
||||||
subIndexCounter = {}
|
|
||||||
|
|
||||||
# for trackDescriptor in mediaDescriptor.getAllTrackDescriptors():
|
|
||||||
for trackDescriptor in mediaDescriptor.getTrackDescriptors():
|
|
||||||
|
|
||||||
trackType = trackDescriptor.getType()
|
|
||||||
|
|
||||||
if trackType == TrackType.VIDEO:
|
|
||||||
|
|
||||||
cacheIndex, generatorCache = dictCache({'type': TrackType.VIDEO}, generatorCache)
|
|
||||||
# click.echo(f"createMediaTestFile() cache index={cacheIndex} size={len(generatorCache)}")
|
|
||||||
|
|
||||||
if cacheIndex == -1:
|
|
||||||
generatorTokens += ['-f',
|
|
||||||
'lavfi',
|
|
||||||
'-i',
|
|
||||||
f"color=size={sizeX}x{sizeY}:rate={rate}:color=black"]
|
|
||||||
|
|
||||||
sourceIndex = len(generatorCache) - 1 if cacheIndex == -1 else cacheIndex
|
|
||||||
mappingTokens += ['-map', f"{sourceIndex}:v:0"]
|
|
||||||
|
|
||||||
if not trackType in subIndexCounter.keys():
|
|
||||||
subIndexCounter[trackType] = 0
|
|
||||||
for mediaTagKey, mediaTagValue in trackDescriptor.getTags().items():
|
|
||||||
metadataTokens += [f"-metadata:s:{trackType.indicator()}:{subIndexCounter[trackType]}",
|
|
||||||
f"{mediaTagKey}={mediaTagValue}"]
|
|
||||||
subIndexCounter[trackType] += 1
|
|
||||||
|
|
||||||
if trackType == TrackType.AUDIO:
|
|
||||||
|
|
||||||
audioLayout = 'stereo'
|
|
||||||
|
|
||||||
cacheIndex, generatorCache = dictCache({'type': TrackType.AUDIO, 'layout': audioLayout}, generatorCache)
|
|
||||||
# click.echo(f"createMediaTestFile() cache index={cacheIndex} size={len(generatorCache)}")
|
|
||||||
|
|
||||||
# click.echo(f"generartorCache index={cacheIndex} len={len(generatorCache)}")
|
|
||||||
if cacheIndex == -1:
|
|
||||||
generatorTokens += ['-f',
|
|
||||||
'lavfi',
|
|
||||||
'-i',
|
|
||||||
f"anullsrc=channel_layout={audioLayout}:sample_rate=44100"]
|
|
||||||
|
|
||||||
sourceIndex = len(generatorCache) - 1 if cacheIndex == -1 else cacheIndex
|
|
||||||
mappingTokens += ['-map', f"{sourceIndex}:a:0"]
|
|
||||||
|
|
||||||
if not trackType in subIndexCounter.keys():
|
|
||||||
subIndexCounter[trackType] = 0
|
|
||||||
for mediaTagKey, mediaTagValue in trackDescriptor.getTags().items():
|
|
||||||
metadataTokens += [f"-metadata:s:{trackType.indicator()}:{subIndexCounter[trackType]}",
|
|
||||||
f"{mediaTagKey}={mediaTagValue}"]
|
|
||||||
subIndexCounter[trackType] += 1
|
|
||||||
|
|
||||||
if trackType == TrackType.SUBTITLE:
|
|
||||||
|
|
||||||
cacheIndex, generatorCache = dictCache({'type': TrackType.SUBTITLE}, generatorCache)
|
|
||||||
# click.echo(f"createMediaTestFile() cache index={cacheIndex} size={len(generatorCache)}")
|
|
||||||
|
|
||||||
if cacheIndex == -1:
|
|
||||||
importTokens = ['-i', createVttFile(SHORT_SUBTITLE_SEQUENCE, directory=directory if directory else None)]
|
|
||||||
|
|
||||||
sourceIndex = len(generatorCache) - 1 if cacheIndex == -1 else cacheIndex
|
|
||||||
mappingTokens += ['-map', f"{sourceIndex}:s:0"]
|
|
||||||
|
|
||||||
if not trackType in subIndexCounter.keys():
|
|
||||||
subIndexCounter[trackType] = 0
|
|
||||||
for mediaTagKey, mediaTagValue in trackDescriptor.getTags().items():
|
|
||||||
metadataTokens += [f"-metadata:s:{trackType.indicator()}:{subIndexCounter[trackType]}",
|
|
||||||
f"{mediaTagKey}={mediaTagValue}"]
|
|
||||||
subIndexCounter[trackType] += 1
|
|
||||||
|
|
||||||
#TODO: Optimize too many runs
|
|
||||||
ffxContext = {'config': ConfigurationController(), 'logger': logger}
|
|
||||||
fc = FfxController(ffxContext, mediaDescriptor)
|
|
||||||
|
|
||||||
commandTokens += (generatorTokens
|
|
||||||
+ importTokens
|
|
||||||
+ mappingTokens
|
|
||||||
+ metadataTokens
|
|
||||||
+ fc.generateDispositionTokens())
|
|
||||||
|
|
||||||
|
|
||||||
commandTokens += ['-t', str(length)]
|
|
||||||
|
|
||||||
if format:
|
|
||||||
commandTokens += ['-f', format]
|
|
||||||
|
|
||||||
fileName = f"{baseName}.{extension}"
|
|
||||||
|
|
||||||
if directory:
|
|
||||||
outputPath = os.path.join(directory, fileName)
|
|
||||||
else:
|
|
||||||
outputPath = fileName
|
|
||||||
|
|
||||||
commandTokens += [outputPath]
|
|
||||||
|
|
||||||
|
|
||||||
ctx = {'logger': logger}
|
|
||||||
|
|
||||||
out, err, rc = executeProcess(commandTokens, context = ctx)
|
|
||||||
|
|
||||||
if not logger is None:
|
|
||||||
if out:
|
|
||||||
logger.debug(f"createMediaTestFile(): Process output: {out}")
|
|
||||||
if rc:
|
|
||||||
logger.debug(f"createMediaTestFile(): Process returned ERROR {rc} ({err})")
|
|
||||||
|
|
||||||
|
|
||||||
return outputPath
|
|
||||||
|
|
||||||
|
|
||||||
def createEmptyDirectory():
|
|
||||||
return tempfile.mkdtemp()
|
|
||||||
|
|
||||||
def createEmptyFile(suffix=None):
|
|
||||||
return tempfile.mkstemp(suffix=suffix)
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
import os, sys, importlib, glob, inspect, itertools
|
|
||||||
|
|
||||||
class LabelCombinator():
|
|
||||||
|
|
||||||
IDENTIFIER = 'label'
|
|
||||||
PREFIX = 'label_combinator_'
|
|
||||||
|
|
||||||
LABEL = 'ffx'
|
|
||||||
|
|
||||||
def __init__(self, context = None):
|
|
||||||
self._context = context
|
|
||||||
self._logger = context['logger']
|
|
||||||
self._reportLogger = context['report_logger']
|
|
||||||
|
|
||||||
def getIdentifier(self):
|
|
||||||
return LabelCombinator.IDENTIFIER
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def list():
|
|
||||||
basePath = os.path.dirname(__file__)
|
|
||||||
return [os.path.basename(p)[len(LabelCombinator.PREFIX):-3]
|
|
||||||
for p
|
|
||||||
in glob.glob(f"{ basePath }/{LabelCombinator.PREFIX}*.py", recursive = True)
|
|
||||||
if p != __file__]
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def getClassReference(identifier):
|
|
||||||
importlib.import_module(f"ffx.test.{LabelCombinator.PREFIX}{ identifier }")
|
|
||||||
for name, obj in inspect.getmembers(sys.modules[f"ffx.test.{LabelCombinator.PREFIX}{ identifier }"]):
|
|
||||||
#HINT: Excluding MediaCombinator as it seems to be included by import (?)
|
|
||||||
if inspect.isclass(obj) and name != 'LabelCombinator' and name.startswith('LabelCombinator'):
|
|
||||||
return obj
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def getAllClassReferences():
|
|
||||||
return [LabelCombinator.getClassReference(i) for i in LabelCombinator.list()]
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
import os, sys, importlib, glob, inspect, itertools
|
|
||||||
|
|
||||||
class MediaCombinator():
|
|
||||||
|
|
||||||
IDENTIFIER = 'media'
|
|
||||||
|
|
||||||
def __init__(self, context = None):
|
|
||||||
self._context = context
|
|
||||||
self._logger = context['logger']
|
|
||||||
self._reportLogger = context['report_logger']
|
|
||||||
|
|
||||||
def getIdentifier(self):
|
|
||||||
return MediaCombinator.IDENTIFIER
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def list():
|
|
||||||
basePath = os.path.dirname(__file__)
|
|
||||||
return [os.path.basename(p)[17:-3]
|
|
||||||
for p
|
|
||||||
in glob.glob(f"{ basePath }/media_combinator_*.py", recursive = True)
|
|
||||||
if p != __file__]
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def getClassReference(identifier):
|
|
||||||
importlib.import_module(f"ffx.test.media_combinator_{ identifier }")
|
|
||||||
for name, obj in inspect.getmembers(sys.modules[f"ffx.test.media_combinator_{ identifier }"]):
|
|
||||||
#HINT: Excluding MediaCombinator as it seems to be included by import (?)
|
|
||||||
if inspect.isclass(obj) and name != 'MediaCombinator' and name.startswith('MediaCombinator'):
|
|
||||||
return obj
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def getAllClassReferences():
|
|
||||||
return [MediaCombinator.getClassReference(i) for i in MediaCombinator.list()]
|
|
||||||
@@ -1,166 +0,0 @@
|
|||||||
import os, sys, click
|
|
||||||
|
|
||||||
from .scenario import Scenario
|
|
||||||
|
|
||||||
from ffx.test.helper import createMediaTestFile
|
|
||||||
from ffx.process import executeProcess
|
|
||||||
|
|
||||||
from ffx.file_properties import FileProperties
|
|
||||||
|
|
||||||
from ffx.media_descriptor import MediaDescriptor
|
|
||||||
from ffx.track_descriptor import TrackDescriptor
|
|
||||||
|
|
||||||
from ffx.track_type import TrackType
|
|
||||||
from ffx.track_disposition import TrackDisposition
|
|
||||||
|
|
||||||
from ffx.test.media_combinator import MediaCombinator
|
|
||||||
|
|
||||||
|
|
||||||
class Scenario2(Scenario):
|
|
||||||
"""Creating file VAa, h264/aac/aac
|
|
||||||
Converting to VaA, vp9/opus/opus
|
|
||||||
No tmdb, default parameters"""
|
|
||||||
|
|
||||||
TEST_FILE_EXTENSION = 'mkv'
|
|
||||||
EXPECTED_FILE_EXTENSION = 'webm'
|
|
||||||
|
|
||||||
|
|
||||||
def __init__(self, context):
|
|
||||||
|
|
||||||
context['use_tmdb'] = False
|
|
||||||
context['use_pattern'] = False
|
|
||||||
|
|
||||||
super().__init__(context)
|
|
||||||
|
|
||||||
def getScenario(self):
|
|
||||||
return self.__class__.__name__[8:]
|
|
||||||
|
|
||||||
|
|
||||||
def job(self, yieldObj: dict):
|
|
||||||
|
|
||||||
testContext = self._context.copy()
|
|
||||||
|
|
||||||
targetYieldObj = yieldObj['target']
|
|
||||||
# presetYieldObj = yieldObj['preset'] # not used here
|
|
||||||
|
|
||||||
identifier = targetYieldObj['identifier']
|
|
||||||
variantList = targetYieldObj['variants']
|
|
||||||
|
|
||||||
variantIdentifier = '-'.join(variantList)
|
|
||||||
variantLabel = f"{self.__class__.__name__} Variant {variantIdentifier}"
|
|
||||||
|
|
||||||
sourceMediaDescriptor: MediaDescriptor = targetYieldObj['payload']
|
|
||||||
|
|
||||||
assertSelectorList: list = targetYieldObj['assertSelectors']
|
|
||||||
assertFuncList = targetYieldObj['assertFuncs']
|
|
||||||
shouldFail = targetYieldObj['shouldFail']
|
|
||||||
|
|
||||||
|
|
||||||
if self._context['test_variant'] and not variantIdentifier.startswith(self._context['test_variant']):
|
|
||||||
return
|
|
||||||
|
|
||||||
if (self._context['test_limit'] and (self._context['test_passed_counter'] + self._context['test_failed_counter'])
|
|
||||||
>= self._context['test_limit']):
|
|
||||||
return
|
|
||||||
|
|
||||||
self._logger.debug(f"Running Job: {variantLabel}")
|
|
||||||
|
|
||||||
|
|
||||||
# Phase 1: Setup source files
|
|
||||||
|
|
||||||
self.clearTestDirectory()
|
|
||||||
mediaFilePath = createMediaTestFile(mediaDescriptor=sourceMediaDescriptor,
|
|
||||||
directory=self._testDirectory,
|
|
||||||
logger=self._logger,
|
|
||||||
length = 2)
|
|
||||||
|
|
||||||
|
|
||||||
# Phase 2: Run ffx
|
|
||||||
|
|
||||||
commandSequence = [sys.executable,
|
|
||||||
self._ffxExecutablePath]
|
|
||||||
|
|
||||||
if self._context['verbosity']:
|
|
||||||
commandSequence += ['--verbose',
|
|
||||||
str(self._context['verbosity'])]
|
|
||||||
|
|
||||||
commandSequence += ['convert',
|
|
||||||
mediaFilePath,
|
|
||||||
'--no-prompt',
|
|
||||||
'--no-signature']
|
|
||||||
|
|
||||||
out, err, rc = executeProcess(commandSequence, directory = self._testDirectory, context = self._context)
|
|
||||||
|
|
||||||
if out and self._context['verbosity'] >= 9:
|
|
||||||
self._logger.debug(f"{variantLabel}: Process output: {out}")
|
|
||||||
if rc:
|
|
||||||
self._logger.debug(f"{variantLabel}: Process returned ERROR {rc} ({err})")
|
|
||||||
|
|
||||||
|
|
||||||
# Phase 3: Evaluate results
|
|
||||||
|
|
||||||
resultFilenames = [rf for rf in self.getFilenamesInTestDirectory() if rf.endswith(f".{Scenario2.EXPECTED_FILE_EXTENSION}")]
|
|
||||||
|
|
||||||
self._logger.debug(f"{variantLabel}: Result filenames: {resultFilenames}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
|
|
||||||
jobFailed = bool(rc)
|
|
||||||
self._logger.debug(f"{variantLabel}: Should fail: {shouldFail} / actually failed: {jobFailed}")
|
|
||||||
|
|
||||||
assert (jobFailed == shouldFail
|
|
||||||
), f"Process {'failed' if jobFailed else 'did not fail'}"
|
|
||||||
|
|
||||||
|
|
||||||
if not jobFailed:
|
|
||||||
|
|
||||||
resultFile = os.path.join(self._testDirectory, 'media.webm')
|
|
||||||
|
|
||||||
assert (os.path.isfile(resultFile)
|
|
||||||
), f"Result file 'media.webm' in path '{self._testDirectory}' wasn't created"
|
|
||||||
|
|
||||||
resultFileProperties = FileProperties(testContext, resultFile)
|
|
||||||
resultMediaDescriptor = resultFileProperties.getMediaDescriptor()
|
|
||||||
|
|
||||||
# resultMediaTracks = resultMediaDescriptor.getAllTrackDescriptors()
|
|
||||||
resultMediaTracks = resultMediaDescriptor.getTrackDescriptors()
|
|
||||||
|
|
||||||
for assertIndex in range(len(assertSelectorList)):
|
|
||||||
|
|
||||||
assertSelector = assertSelectorList[assertIndex]
|
|
||||||
assertFunc = assertFuncList[assertIndex]
|
|
||||||
assertVariant = variantList[assertIndex]
|
|
||||||
|
|
||||||
if assertSelector == 'M':
|
|
||||||
assertFunc()
|
|
||||||
for variantIndex in range(len(assertVariant)):
|
|
||||||
assert (assertVariant[variantIndex].lower() == resultMediaTracks[variantIndex].getType().indicator()
|
|
||||||
), f"Stream #{variantIndex} is not of type {resultMediaTracks[variantIndex].getType().label()}"
|
|
||||||
|
|
||||||
elif assertSelector == 'AD' or assertSelector == 'AT':
|
|
||||||
assertFunc({'tracks': resultMediaDescriptor.getAudioTracks()})
|
|
||||||
|
|
||||||
elif assertSelector == 'SD' or assertSelector == 'ST':
|
|
||||||
assertFunc({'tracks': resultMediaDescriptor.getSubtitleTracks()})
|
|
||||||
|
|
||||||
elif type(assertSelector) is str:
|
|
||||||
if assertSelector == 'J':
|
|
||||||
assertFunc()
|
|
||||||
|
|
||||||
|
|
||||||
self._context['test_passed_counter'] += 1
|
|
||||||
self._reportLogger.info(f"{variantLabel}: Test passed")
|
|
||||||
|
|
||||||
except AssertionError as ae:
|
|
||||||
|
|
||||||
self._context['test_failed_counter'] += 1
|
|
||||||
self._reportLogger.error(f"{variantLabel}: Test FAILED ({ae})")
|
|
||||||
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
MC_list = MediaCombinator.getAllClassReferences()
|
|
||||||
for MC in MC_list:
|
|
||||||
self._logger.debug(f"MC={MC.__name__}")
|
|
||||||
mc = MC(context = self._context)
|
|
||||||
for y in mc.getYield():
|
|
||||||
self.job(y)
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
import os, sys, importlib, glob, inspect, itertools
|
|
||||||
|
|
||||||
class TrackTagCombinator2():
|
|
||||||
|
|
||||||
IDENTIFIER = 'trackTag2'
|
|
||||||
|
|
||||||
def __init__(self, context = None):
|
|
||||||
self._context = context
|
|
||||||
self._logger = context['logger']
|
|
||||||
self._reportLogger = context['report_logger']
|
|
||||||
|
|
||||||
def getIdentifier(self):
|
|
||||||
return TrackTagCombinator2.IDENTIFIER
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def list():
|
|
||||||
basePath = os.path.dirname(__file__)
|
|
||||||
return [os.path.basename(p)[23:-3]
|
|
||||||
for p
|
|
||||||
in glob.glob(f"{ basePath }/track_tag_combinator_2_*.py", recursive = True)
|
|
||||||
if p != __file__]
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def getClassReference(identifier):
|
|
||||||
importlib.import_module(f"ffx.test.track_tag_combinator_2_{ identifier }")
|
|
||||||
for name, obj in inspect.getmembers(sys.modules[f"ffx.test.track_tag_combinator_2_{ identifier }"]):
|
|
||||||
#HINT: Excluding DispositionCombination as it seems to be included by import (?)
|
|
||||||
if inspect.isclass(obj) and name != 'TrackTagCombinator2' and name.startswith('TrackTagCombinator2'):
|
|
||||||
return obj
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def getAllClassReferences():
|
|
||||||
return [TrackTagCombinator2.getClassReference(i) for i in TrackTagCombinator2.list()]
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
import os, sys, importlib, glob, inspect, itertools
|
|
||||||
|
|
||||||
class TrackTagCombinator3():
|
|
||||||
|
|
||||||
IDENTIFIER = 'trackTag3'
|
|
||||||
|
|
||||||
def __init__(self, context = None):
|
|
||||||
self._context = context
|
|
||||||
self._logger = context['logger']
|
|
||||||
self._reportLogger = context['report_logger']
|
|
||||||
|
|
||||||
def getIdentifier(self):
|
|
||||||
return TrackTagCombinator3.IDENTIFIER
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def list():
|
|
||||||
basePath = os.path.dirname(__file__)
|
|
||||||
return [os.path.basename(p)[23:-3]
|
|
||||||
for p
|
|
||||||
in glob.glob(f"{ basePath }/track_tag_combinator_3_*.py", recursive = True)
|
|
||||||
if p != __file__]
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def getClassReference(identifier):
|
|
||||||
importlib.import_module(f"ffx.test.track_tag_combinator_3_{ identifier }")
|
|
||||||
for name, obj in inspect.getmembers(sys.modules[f"ffx.test.track_tag_combinator_3_{ identifier }"]):
|
|
||||||
#HINT: Excluding DispositionCombination as it seems to be included by import (?)
|
|
||||||
if inspect.isclass(obj) and name != 'TrackTagCombinator3' and name.startswith('TrackTagCombinator3'):
|
|
||||||
return obj
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def getAllClassReferences():
|
|
||||||
return [TrackTagCombinator3.getClassReference(i) for i in TrackTagCombinator3.list()]
|
|
||||||
@@ -1,134 +0,0 @@
|
|||||||
import os, requests, time, logging
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
|
|
||||||
class TMDB_REQUEST_EXCEPTION(Exception):
|
|
||||||
def __init__(self, statusCode, statusMessage):
|
|
||||||
errorMessage = f"TMDB query failed with status code {statusCode}: {statusMessage}"
|
|
||||||
super().__init__(errorMessage)
|
|
||||||
|
|
||||||
class TMDB_API_KEY_NOT_PRESENT_EXCEPTION(Exception):
|
|
||||||
def __str__(self):
|
|
||||||
return 'TMDB api key is not available, please set environment variable TMDB_API_KEY'
|
|
||||||
|
|
||||||
class TMDB_EXCESSIVE_USAGE_EXCEPTION(Exception):
|
|
||||||
def __str__(self):
|
|
||||||
return 'Rate limit was triggered too often'
|
|
||||||
|
|
||||||
|
|
||||||
class TmdbController():
|
|
||||||
|
|
||||||
DEFAULT_LANGUAGE = 'de-DE'
|
|
||||||
|
|
||||||
RATE_LIMIT_WAIT_SECONDS = 10
|
|
||||||
RATE_LIMIT_RETRIES = 3
|
|
||||||
|
|
||||||
def __init__(self, context = None):
|
|
||||||
self.__context = context
|
|
||||||
|
|
||||||
if context is None:
|
|
||||||
self.__logger = logging.getLogger('FFX')
|
|
||||||
self.__logger.addHandler(logging.NullHandler())
|
|
||||||
else:
|
|
||||||
self.__logger = context['logger']
|
|
||||||
|
|
||||||
self.__tmdbApiKey = os.environ.get('TMDB_API_KEY', None)
|
|
||||||
if self.__tmdbApiKey is None:
|
|
||||||
raise TMDB_API_KEY_NOT_PRESENT_EXCEPTION
|
|
||||||
|
|
||||||
self.tmdbLanguage = TmdbController.DEFAULT_LANGUAGE
|
|
||||||
|
|
||||||
|
|
||||||
def getTmdbRequest(self, tmdbUrl):
|
|
||||||
retries = TmdbController.RATE_LIMIT_RETRIES
|
|
||||||
while True:
|
|
||||||
response = requests.get(tmdbUrl)
|
|
||||||
if response.status_code == 429:
|
|
||||||
if not retries:
|
|
||||||
raise TMDB_EXCESSIVE_USAGE_EXCEPTION()
|
|
||||||
self.__logger.warning('TMDB Rate limit (status_code 429)')
|
|
||||||
time.sleep(TmdbController.RATE_LIMIT_WAIT_SECONDS)
|
|
||||||
retries -= 1
|
|
||||||
else:
|
|
||||||
jsonResult = response.json()
|
|
||||||
if ('success' in jsonResult.keys()
|
|
||||||
and not jsonResult['success']):
|
|
||||||
raise TMDB_REQUEST_EXCEPTION(jsonResult['status_code'], jsonResult['status_message'])
|
|
||||||
return jsonResult
|
|
||||||
|
|
||||||
|
|
||||||
def queryShow(self, showId):
|
|
||||||
"""
|
|
||||||
First level keys in the response object:
|
|
||||||
adult bool
|
|
||||||
backdrop_path str
|
|
||||||
created_by []
|
|
||||||
episode_run_time []
|
|
||||||
first_air_date str YYYY-MM-DD
|
|
||||||
genres []
|
|
||||||
homepage str
|
|
||||||
id int
|
|
||||||
in_production bool
|
|
||||||
languages []
|
|
||||||
last_air_date str YYYY-MM-DD
|
|
||||||
last_episode_to_air {}
|
|
||||||
name str
|
|
||||||
next_episode_to_air null
|
|
||||||
networks []
|
|
||||||
number_of_episodes int
|
|
||||||
number_of_seasons int
|
|
||||||
origin_country []
|
|
||||||
original_language str
|
|
||||||
original_name str
|
|
||||||
overview str
|
|
||||||
popularity float
|
|
||||||
poster_path str
|
|
||||||
production_companies []
|
|
||||||
production_countries []
|
|
||||||
seasons []
|
|
||||||
spoken_languages []
|
|
||||||
status str
|
|
||||||
tagline str
|
|
||||||
type str
|
|
||||||
vote_average float
|
|
||||||
vote_count int
|
|
||||||
"""
|
|
||||||
|
|
||||||
urlParams = f"?language={self.tmdbLanguage}&api_key={self.__tmdbApiKey}"
|
|
||||||
|
|
||||||
tmdbUrl = f"https://api.themoviedb.org/3/tv/{showId}{urlParams}"
|
|
||||||
|
|
||||||
return self.getTmdbRequest(tmdbUrl)
|
|
||||||
|
|
||||||
|
|
||||||
def getShowNameAndYear(self, showId: int):
|
|
||||||
|
|
||||||
showResult = self.queryShow(int(showId))
|
|
||||||
firstAirDate = datetime.strptime(showResult['first_air_date'], '%Y-%m-%d')
|
|
||||||
|
|
||||||
return str(showResult['name']), int(firstAirDate.year)
|
|
||||||
|
|
||||||
|
|
||||||
def queryEpisode(self, showId, season, episode):
|
|
||||||
"""
|
|
||||||
First level keys in the response object:
|
|
||||||
air_date str 'YYY-MM-DD'
|
|
||||||
crew []
|
|
||||||
episode_number int
|
|
||||||
guest_stars []
|
|
||||||
name str
|
|
||||||
overview str
|
|
||||||
id int
|
|
||||||
production_code
|
|
||||||
runtime int
|
|
||||||
season_number int
|
|
||||||
still_path str '/filename.jpg'
|
|
||||||
vote_average float
|
|
||||||
vote_count int
|
|
||||||
"""
|
|
||||||
|
|
||||||
urlParams = f"?language={self.tmdbLanguage}&api_key={self.__tmdbApiKey}"
|
|
||||||
|
|
||||||
tmdbUrl = f"https://api.themoviedb.org/3/tv/{showId}/season/{season}/episode/{episode}{urlParams}"
|
|
||||||
|
|
||||||
return self.getTmdbRequest(tmdbUrl)
|
|
||||||
@@ -1,59 +0,0 @@
|
|||||||
from enum import Enum
|
|
||||||
|
|
||||||
|
|
||||||
class TrackCodec(Enum):
|
|
||||||
|
|
||||||
H265 = {'identifier': 'hevc', 'format': 'h265', 'extension': 'h265' ,'label': 'H.265'}
|
|
||||||
H264 = {'identifier': 'h264', 'format': 'h264', 'extension': 'h264' ,'label': 'H.264'}
|
|
||||||
MPEG4 = {'identifier': 'mpeg4', 'format': 'm4v', 'extension': 'm4v' ,'label': 'MPEG-4'}
|
|
||||||
MPEG2 = {'identifier': 'mpeg2video', 'format': 'mpeg2video', 'extension': 'mpg' ,'label': 'MPEG-2'}
|
|
||||||
|
|
||||||
AAC = {'identifier': 'aac', 'format': None, 'extension': 'aac' , 'label': 'AAC'}
|
|
||||||
AC3 = {'identifier': 'ac3', 'format': 'ac3', 'extension': 'ac3' , 'label': 'AC3'}
|
|
||||||
EAC3 = {'identifier': 'eac3', 'format': 'eac3', 'extension': 'eac3' , 'label': 'EAC3'}
|
|
||||||
DTS = {'identifier': 'dts', 'format': 'dts', 'extension': 'dts' , 'label': 'DTS'}
|
|
||||||
MP3 = {'identifier': 'mp3', 'format': 'mp3', 'extension': 'mp3' , 'label': 'MP3'}
|
|
||||||
|
|
||||||
PCM_S24LE = {'identifier': 'pcm_s24le', 'format': 's32', 'extension': 'raw' , 'label': 'PCM_S24LE'}
|
|
||||||
|
|
||||||
SRT = {'identifier': 'subrip', 'format': 'srt', 'extension': 'srt' , 'label': 'SRT'}
|
|
||||||
ASS = {'identifier': 'ass', 'format': 'ass', 'extension': 'ass' , 'label': 'ASS'}
|
|
||||||
PGS = {'identifier': 'hdmv_pgs_subtitle', 'format': 'sup', 'extension': 'sup' , 'label': 'PGS'}
|
|
||||||
VOBSUB = {'identifier': 'dvd_subtitle', 'format': None, 'extension': 'mkv' , 'label': 'VobSub'}
|
|
||||||
|
|
||||||
PNG = {'identifier': 'png', 'format': None, 'extension': 'png' , 'label': 'PNG'}
|
|
||||||
|
|
||||||
UNKNOWN = {'identifier': 'unknown', 'format': None, 'extension': None, 'label': 'UNKNOWN'}
|
|
||||||
|
|
||||||
|
|
||||||
def identifier(self):
|
|
||||||
"""Returns the codec identifier"""
|
|
||||||
return str(self.value['identifier'])
|
|
||||||
|
|
||||||
def label(self):
|
|
||||||
"""Returns the codec as string"""
|
|
||||||
return str(self.value['label'])
|
|
||||||
|
|
||||||
def format(self):
|
|
||||||
"""Returns the codec """
|
|
||||||
return self.value['format']
|
|
||||||
|
|
||||||
def extension(self):
|
|
||||||
"""Returns the corresponding extension"""
|
|
||||||
return str(self.value['extension'])
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def identify(identifier: str):
|
|
||||||
clist = [c for c in TrackCodec if c.value['identifier'] == str(identifier)]
|
|
||||||
if clist:
|
|
||||||
return clist[0]
|
|
||||||
else:
|
|
||||||
return TrackCodec.UNKNOWN
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fromLabel(label: str):
|
|
||||||
clist = [c for c in TrackCodec if c.value['identifier'] == str(label)]
|
|
||||||
if clist:
|
|
||||||
return clist[0]
|
|
||||||
else:
|
|
||||||
return TrackCodec.UNKNOWN
|
|
||||||
@@ -1,278 +0,0 @@
|
|||||||
import click
|
|
||||||
|
|
||||||
from ffx.model.track import Track
|
|
||||||
|
|
||||||
from .track_type import TrackType
|
|
||||||
|
|
||||||
from .track_disposition import TrackDisposition
|
|
||||||
|
|
||||||
from .track_type import TrackType
|
|
||||||
|
|
||||||
from ffx.model.track_tag import TrackTag
|
|
||||||
from ffx.track_descriptor import TrackDescriptor
|
|
||||||
|
|
||||||
|
|
||||||
class TrackController():
|
|
||||||
|
|
||||||
def __init__(self, context):
|
|
||||||
|
|
||||||
self.context = context
|
|
||||||
self.Session = self.context['database']['session'] # convenience
|
|
||||||
|
|
||||||
self.__configurationData = self.context['config'].getData()
|
|
||||||
|
|
||||||
metadataConfiguration = self.__configurationData['metadata'] if 'metadata' in self.__configurationData.keys() else {}
|
|
||||||
|
|
||||||
self.__signatureTags = metadataConfiguration['signature'] if 'signature' in metadataConfiguration.keys() else {}
|
|
||||||
self.__removeGlobalKeys = metadataConfiguration['remove'] if 'remove' in metadataConfiguration.keys() else []
|
|
||||||
self.__ignoreGlobalKeys = metadataConfiguration['ignore'] if 'ignore' in metadataConfiguration.keys() else []
|
|
||||||
self.__removeTrackKeys = (metadataConfiguration['streams']['remove']
|
|
||||||
if 'streams' in metadataConfiguration.keys()
|
|
||||||
and 'remove' in metadataConfiguration['streams'].keys() else [])
|
|
||||||
self.__ignoreTrackKeys = (metadataConfiguration['streams']['ignore']
|
|
||||||
if 'streams' in metadataConfiguration.keys()
|
|
||||||
and 'ignore' in metadataConfiguration['streams'].keys() else [])
|
|
||||||
|
|
||||||
|
|
||||||
def addTrack(self, trackDescriptor : TrackDescriptor, patternId = None):
|
|
||||||
|
|
||||||
# option to override pattern id in case track descriptor has not set it
|
|
||||||
patId = int(trackDescriptor.getPatternId() if patternId is None else patternId)
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
track = Track(pattern_id = patId,
|
|
||||||
track_type = int(trackDescriptor.getType().index()),
|
|
||||||
codec_name = str(trackDescriptor.getCodec().identifier()),
|
|
||||||
index = int(trackDescriptor.getIndex()),
|
|
||||||
source_index = int(trackDescriptor.getSourceIndex()),
|
|
||||||
disposition_flags = int(TrackDisposition.toFlags(trackDescriptor.getDispositionSet())),
|
|
||||||
audio_layout = trackDescriptor.getAudioLayout().index())
|
|
||||||
|
|
||||||
s.add(track)
|
|
||||||
s.commit()
|
|
||||||
|
|
||||||
for k,v in trackDescriptor.getTags().items():
|
|
||||||
|
|
||||||
# Filter tags that make no sense to preserve
|
|
||||||
if k not in self.__ignoreTrackKeys and k not in self.__removeTrackKeys:
|
|
||||||
tag = TrackTag(track_id = track.id,
|
|
||||||
key = k,
|
|
||||||
value = v)
|
|
||||||
s.add(tag)
|
|
||||||
s.commit()
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"TrackController.addTrack(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
|
|
||||||
def updateTrack(self, trackId, trackDescriptor : TrackDescriptor):
|
|
||||||
|
|
||||||
if type(trackDescriptor) is not TrackDescriptor:
|
|
||||||
raise TypeError('TrackController.updateTrack(): Argument trackDescriptor is required to be of type TrackDescriptor')
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(Track).filter(Track.id == int(trackId))
|
|
||||||
|
|
||||||
if q.count():
|
|
||||||
|
|
||||||
track : Track = q.first()
|
|
||||||
|
|
||||||
track.index = int(trackDescriptor.getIndex())
|
|
||||||
|
|
||||||
track.track_type = int(trackDescriptor.getType().index())
|
|
||||||
track.codec_name = str(trackDescriptor.getCodec().identifier())
|
|
||||||
track.audio_layout = int(trackDescriptor.getAudioLayout().index())
|
|
||||||
|
|
||||||
track.disposition_flags = int(TrackDisposition.toFlags(trackDescriptor.getDispositionSet()))
|
|
||||||
|
|
||||||
descriptorTagKeys = trackDescriptor.getTags()
|
|
||||||
tagKeysInDescriptor = set(descriptorTagKeys.keys())
|
|
||||||
tagKeysInDb = {t.key for t in track.track_tags}
|
|
||||||
|
|
||||||
for k in tagKeysInDescriptor & tagKeysInDb: # to update
|
|
||||||
tags = [t for t in track.track_tags if t.key == k]
|
|
||||||
tags[0].value = descriptorTagKeys[k]
|
|
||||||
for k in tagKeysInDescriptor - tagKeysInDb: # to add
|
|
||||||
tag = TrackTag(track_id=track.id, key=k, value=descriptorTagKeys[k])
|
|
||||||
s.add(tag)
|
|
||||||
for k in tagKeysInDb - tagKeysInDescriptor: # to remove
|
|
||||||
tags = [t for t in track.track_tags if t.key == k]
|
|
||||||
s.delete(tags[0])
|
|
||||||
|
|
||||||
s.commit()
|
|
||||||
return True
|
|
||||||
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"TrackController.updateTrack(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
def findTracks(self, patternId):
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
|
|
||||||
q = s.query(Track).filter(Track.pattern_id == int(patternId))
|
|
||||||
return sorted([t for t in q.all()], key=lambda d: d.getIndex())
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"TrackController.findTracks(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
|
|
||||||
def findSiblingDescriptors(self, patternId):
|
|
||||||
"""Finds all stored tracks related to a pattern, packs them in descriptors
|
|
||||||
and also setting sub indices and returns list of descriptors"""
|
|
||||||
|
|
||||||
siblingTracks = self.findTracks(patternId)
|
|
||||||
siblingDescriptors = []
|
|
||||||
|
|
||||||
subIndexCounter = {}
|
|
||||||
st: Track
|
|
||||||
for st in siblingTracks:
|
|
||||||
trackType = st.getType()
|
|
||||||
|
|
||||||
if not trackType in subIndexCounter.keys():
|
|
||||||
subIndexCounter[trackType] = 0
|
|
||||||
siblingDescriptors.append(st.getDescriptor(subIndex=subIndexCounter[trackType]))
|
|
||||||
subIndexCounter[trackType] += 1
|
|
||||||
|
|
||||||
return siblingDescriptors
|
|
||||||
|
|
||||||
|
|
||||||
#TODO: mit optionalem Parameter lösen ^
|
|
||||||
def findVideoTracks(self, patternId):
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
|
|
||||||
q = s.query(Track).filter(Track.pattern_id == int(patternId), Track.track_type == TrackType.VIDEO.index())
|
|
||||||
return [a for a in q.all()]
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"TrackController.findVideoTracks(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
def findAudioTracks(self, patternId):
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
|
|
||||||
q = s.query(Track).filter(Track.pattern_id == int(patternId), Track.track_type == TrackType.AUDIO.index())
|
|
||||||
return [a for a in q.all()]
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"TrackController.findAudioTracks(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
def findSubtitleTracks(self, patternId):
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
|
|
||||||
q = s.query(Track).filter(Track.pattern_id == int(patternId), Track.track_type == TrackType.SUBTITLE.index())
|
|
||||||
return [s for s in q.all()]
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"TrackController.findSubtitleTracks(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
|
|
||||||
def getTrack(self, patternId : int, index: int) -> Track:
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(Track).filter(Track.pattern_id == int(patternId), Track.index == int(index))
|
|
||||||
|
|
||||||
if q.count():
|
|
||||||
return q.first()
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"TrackController.getTrack(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
def setDispositionState(self, patternId: int, index: int, disposition : TrackDisposition, state : bool):
|
|
||||||
|
|
||||||
if type(patternId) is not int:
|
|
||||||
raise TypeError('TrackController.setTrackDisposition(): Argument patternId is required to be of type int')
|
|
||||||
if type(index) is not int:
|
|
||||||
raise TypeError('TrackController.setTrackDisposition(): Argument index is required to be of type int')
|
|
||||||
if type(disposition) is not TrackDisposition:
|
|
||||||
raise TypeError('TrackController.setTrackDisposition(): Argument disposition is required to be of type TrackDisposition')
|
|
||||||
if type(state) is not bool:
|
|
||||||
raise TypeError('TrackController.setTrackDisposition(): Argument state is required to be of type bool')
|
|
||||||
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
q = s.query(Track).filter(Track.pattern_id == patternId, Track.index == index)
|
|
||||||
|
|
||||||
if q.count():
|
|
||||||
|
|
||||||
track : Track = q.first()
|
|
||||||
|
|
||||||
if state:
|
|
||||||
track.setDisposition(disposition)
|
|
||||||
else:
|
|
||||||
track.resetDisposition(disposition)
|
|
||||||
|
|
||||||
s.commit()
|
|
||||||
return True
|
|
||||||
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"TrackController.updateTrack(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
def deleteTrack(self, trackId):
|
|
||||||
try:
|
|
||||||
s = self.Session()
|
|
||||||
|
|
||||||
q = s.query(Track).filter(Track.id == int(trackId))
|
|
||||||
|
|
||||||
if q.count():
|
|
||||||
patternId = int(q.first().pattern_id)
|
|
||||||
|
|
||||||
q_siblings = s.query(Track).filter(Track.pattern_id == patternId).order_by(Track.index)
|
|
||||||
|
|
||||||
index = 0
|
|
||||||
for track in q_siblings.all():
|
|
||||||
|
|
||||||
if track.id == int(trackId):
|
|
||||||
s.delete(track)
|
|
||||||
else:
|
|
||||||
track.index = index
|
|
||||||
index += 1
|
|
||||||
|
|
||||||
s.commit()
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
raise click.ClickException(f"TrackController.deleteTrack(): {repr(ex)}")
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
|
|
||||||
# def setDefaultSubTrack(self, trackType, subIndex):
|
|
||||||
# pass
|
|
||||||
#
|
|
||||||
# def setForcedSubTrack(self, trackType, subIndex):
|
|
||||||
# pass
|
|
||||||
@@ -1,136 +0,0 @@
|
|||||||
import click
|
|
||||||
|
|
||||||
from textual.screen import Screen
|
|
||||||
from textual.widgets import Header, Footer, Static, Button
|
|
||||||
from textual.containers import Grid
|
|
||||||
|
|
||||||
from ffx.track_descriptor import TrackDescriptor
|
|
||||||
|
|
||||||
from .track_controller import TrackController
|
|
||||||
|
|
||||||
|
|
||||||
# Screen[dict[int, str, int]]
|
|
||||||
class TrackDeleteScreen(Screen):
|
|
||||||
|
|
||||||
CSS = """
|
|
||||||
|
|
||||||
Grid {
|
|
||||||
grid-size: 4 9;
|
|
||||||
grid-rows: 2 2 2 2 2 2 2 2 2;
|
|
||||||
grid-columns: 30 30 30 30;
|
|
||||||
height: 100%;
|
|
||||||
width: 100%;
|
|
||||||
padding: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
Input {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
Button {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
#toplabel {
|
|
||||||
height: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
.two {
|
|
||||||
column-span: 2;
|
|
||||||
}
|
|
||||||
.three {
|
|
||||||
column-span: 3;
|
|
||||||
}
|
|
||||||
.four {
|
|
||||||
column-span: 4;
|
|
||||||
}
|
|
||||||
|
|
||||||
.box {
|
|
||||||
height: 100%;
|
|
||||||
border: solid green;
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, trackDescriptor : TrackDescriptor):
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
self.context = self.app.getContext()
|
|
||||||
self.Session = self.context['database']['session'] # convenience
|
|
||||||
|
|
||||||
if type(trackDescriptor) is not TrackDescriptor:
|
|
||||||
raise click.ClickException('TrackDeleteScreen.init(): trackDescriptor is required to be of type TrackDescriptor')
|
|
||||||
|
|
||||||
self.__tc = TrackController(context = self.context)
|
|
||||||
|
|
||||||
self.__trackDescriptor = trackDescriptor
|
|
||||||
|
|
||||||
|
|
||||||
def on_mount(self):
|
|
||||||
|
|
||||||
self.query_one("#subindexlabel", Static).update(str(self.__trackDescriptor.getSubIndex()))
|
|
||||||
self.query_one("#patternlabel", Static).update(str(self.__trackDescriptor.getPatternId()))
|
|
||||||
self.query_one("#languagelabel", Static).update(str(self.__trackDescriptor.getLanguage().label()))
|
|
||||||
self.query_one("#titlelabel", Static).update(str(str(self.__trackDescriptor.getTitle())))
|
|
||||||
|
|
||||||
|
|
||||||
def compose(self):
|
|
||||||
|
|
||||||
yield Header()
|
|
||||||
|
|
||||||
with Grid():
|
|
||||||
|
|
||||||
#1
|
|
||||||
yield Static(f"Are you sure to delete the following {self.__trackDescriptor.getType().label()} track?", id="toplabel", classes="four")
|
|
||||||
|
|
||||||
#2
|
|
||||||
yield Static("sub index")
|
|
||||||
yield Static(" ", id="subindexlabel", classes="three")
|
|
||||||
|
|
||||||
#3
|
|
||||||
yield Static("from pattern")
|
|
||||||
yield Static(" ", id="patternlabel", classes="three")
|
|
||||||
|
|
||||||
#4
|
|
||||||
yield Static(" ", classes="four")
|
|
||||||
|
|
||||||
#5
|
|
||||||
yield Static("Language")
|
|
||||||
yield Static(" ", id="languagelabel", classes="three")
|
|
||||||
|
|
||||||
#6
|
|
||||||
yield Static("Title")
|
|
||||||
yield Static(" ", id="titlelabel", classes="three")
|
|
||||||
|
|
||||||
#7
|
|
||||||
yield Static(" ", classes="four")
|
|
||||||
|
|
||||||
#8
|
|
||||||
yield Static(" ", classes="four")
|
|
||||||
|
|
||||||
#9
|
|
||||||
yield Button("Delete", id="delete_button")
|
|
||||||
yield Button("Cancel", id="cancel_button")
|
|
||||||
|
|
||||||
yield Footer()
|
|
||||||
|
|
||||||
|
|
||||||
# Event handler for button press
|
|
||||||
def on_button_pressed(self, event: Button.Pressed) -> None:
|
|
||||||
|
|
||||||
if event.button.id == "delete_button":
|
|
||||||
|
|
||||||
track = self.__tc.getTrack(self.__trackDescriptor.getPatternId(), self.__trackDescriptor.getIndex())
|
|
||||||
|
|
||||||
if track is None:
|
|
||||||
raise click.ClickException(f"Track is none: patternId={self.__trackDescriptor.getPatternId()} type={self.__trackDescriptor.getType()} subIndex={self.__trackDescriptor.getSubIndex()}")
|
|
||||||
|
|
||||||
if track is not None:
|
|
||||||
|
|
||||||
if self.__tc.deleteTrack(track.getId()):
|
|
||||||
self.dismiss(self.__trackDescriptor)
|
|
||||||
|
|
||||||
else:
|
|
||||||
#TODO: Meldung
|
|
||||||
self.app.pop_screen()
|
|
||||||
|
|
||||||
if event.button.id == "cancel_button":
|
|
||||||
self.app.pop_screen()
|
|
||||||
|
|
||||||
@@ -1,346 +0,0 @@
|
|||||||
import logging
|
|
||||||
from typing import Self
|
|
||||||
|
|
||||||
from .iso_language import IsoLanguage
|
|
||||||
from .track_type import TrackType
|
|
||||||
from .audio_layout import AudioLayout
|
|
||||||
from .track_disposition import TrackDisposition
|
|
||||||
from .track_codec import TrackCodec
|
|
||||||
|
|
||||||
# from .helper import dictDiff, setDiff
|
|
||||||
|
|
||||||
|
|
||||||
class TrackDescriptor:
|
|
||||||
|
|
||||||
CONTEXT_KEY = "context"
|
|
||||||
|
|
||||||
ID_KEY = "id"
|
|
||||||
INDEX_KEY = "index"
|
|
||||||
SOURCE_INDEX_KEY = "source_index"
|
|
||||||
SUB_INDEX_KEY = "sub_index"
|
|
||||||
PATTERN_ID_KEY = "pattern_id"
|
|
||||||
EXTERNAL_SOURCE_FILE_PATH_KEY = "external_source_file"
|
|
||||||
|
|
||||||
DISPOSITION_SET_KEY = "disposition_set"
|
|
||||||
TAGS_KEY = "tags"
|
|
||||||
|
|
||||||
TRACK_TYPE_KEY = "track_type"
|
|
||||||
CODEC_KEY = "codec_name"
|
|
||||||
AUDIO_LAYOUT_KEY = "audio_layout"
|
|
||||||
|
|
||||||
FFPROBE_INDEX_KEY = "index"
|
|
||||||
FFPROBE_DISPOSITION_KEY = "disposition"
|
|
||||||
FFPROBE_TAGS_KEY = "tags"
|
|
||||||
FFPROBE_CODEC_TYPE_KEY = "codec_type"
|
|
||||||
FFPROBE_CODEC_KEY = "codec_name"
|
|
||||||
|
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
|
|
||||||
if TrackDescriptor.CONTEXT_KEY in kwargs.keys():
|
|
||||||
if type(kwargs[TrackDescriptor.CONTEXT_KEY]) is not dict:
|
|
||||||
raise TypeError(
|
|
||||||
f"TrackDescriptor.__init__(): Argument {TrackDescriptor.CONTEXT_KEY} is required to be of type dict"
|
|
||||||
)
|
|
||||||
self.__context = kwargs[TrackDescriptor.CONTEXT_KEY]
|
|
||||||
self.__logger = self.__context['logger']
|
|
||||||
else:
|
|
||||||
self.__context = {}
|
|
||||||
self.__logger = logging.getLogger('FFX')
|
|
||||||
self.__logger.addHandler(logging.NullHandler())
|
|
||||||
|
|
||||||
if TrackDescriptor.ID_KEY in kwargs.keys():
|
|
||||||
if type(kwargs[TrackDescriptor.ID_KEY]) is not int:
|
|
||||||
raise TypeError(
|
|
||||||
f"TrackDesciptor.__init__(): Argument {TrackDescriptor.ID_KEY} is required to be of type int"
|
|
||||||
)
|
|
||||||
self.__trackId = kwargs[TrackDescriptor.ID_KEY]
|
|
||||||
else:
|
|
||||||
self.__trackId = -1
|
|
||||||
|
|
||||||
if TrackDescriptor.PATTERN_ID_KEY in kwargs.keys():
|
|
||||||
if type(kwargs[TrackDescriptor.PATTERN_ID_KEY]) is not int:
|
|
||||||
raise TypeError(
|
|
||||||
f"TrackDesciptor.__init__(): Argument {TrackDescriptor.PATTERN_ID_KEY} is required to be of type int"
|
|
||||||
)
|
|
||||||
self.__patternId = kwargs[TrackDescriptor.PATTERN_ID_KEY]
|
|
||||||
else:
|
|
||||||
self.__patternId = -1
|
|
||||||
|
|
||||||
if TrackDescriptor.EXTERNAL_SOURCE_FILE_PATH_KEY in kwargs.keys():
|
|
||||||
if type(kwargs[TrackDescriptor.EXTERNAL_SOURCE_FILE_PATH_KEY]) is not str:
|
|
||||||
raise TypeError(
|
|
||||||
f"TrackDesciptor.__init__(): Argument {TrackDescriptor.EXTERNAL_SOURCE_FILE_PATH_KEY} is required to be of type str"
|
|
||||||
)
|
|
||||||
self.__externalSourceFilePath = kwargs[TrackDescriptor.EXTERNAL_SOURCE_FILE_PATH_KEY]
|
|
||||||
else:
|
|
||||||
self.__externalSourceFilePath = ''
|
|
||||||
|
|
||||||
if TrackDescriptor.INDEX_KEY in kwargs.keys():
|
|
||||||
if type(kwargs[TrackDescriptor.INDEX_KEY]) is not int:
|
|
||||||
raise TypeError(
|
|
||||||
f"TrackDesciptor.__init__(): Argument {TrackDescriptor.INDEX_KEY} is required to be of type int"
|
|
||||||
)
|
|
||||||
self.__index = kwargs[TrackDescriptor.INDEX_KEY]
|
|
||||||
else:
|
|
||||||
self.__index = -1
|
|
||||||
|
|
||||||
if (
|
|
||||||
TrackDescriptor.SOURCE_INDEX_KEY in kwargs.keys()
|
|
||||||
and type(kwargs[TrackDescriptor.SOURCE_INDEX_KEY]) is int
|
|
||||||
):
|
|
||||||
self.__sourceIndex = kwargs[TrackDescriptor.SOURCE_INDEX_KEY]
|
|
||||||
else:
|
|
||||||
self.__sourceIndex = self.__index
|
|
||||||
|
|
||||||
if TrackDescriptor.SUB_INDEX_KEY in kwargs.keys():
|
|
||||||
if type(kwargs[TrackDescriptor.SUB_INDEX_KEY]) is not int:
|
|
||||||
raise TypeError(
|
|
||||||
f"TrackDesciptor.__init__(): Argument {TrackDescriptor.SUB_INDEX_KEY} is required to be of type int"
|
|
||||||
)
|
|
||||||
self.__subIndex = kwargs[TrackDescriptor.SUB_INDEX_KEY]
|
|
||||||
else:
|
|
||||||
self.__subIndex = -1
|
|
||||||
|
|
||||||
if TrackDescriptor.TRACK_TYPE_KEY in kwargs.keys():
|
|
||||||
if type(kwargs[TrackDescriptor.TRACK_TYPE_KEY]) is not TrackType:
|
|
||||||
raise TypeError(
|
|
||||||
f"TrackDesciptor.__init__(): Argument {TrackDescriptor.TRACK_TYPE_KEY} is required to be of type TrackType"
|
|
||||||
)
|
|
||||||
self.__trackType = kwargs[TrackDescriptor.TRACK_TYPE_KEY]
|
|
||||||
else:
|
|
||||||
self.__trackType = TrackType.UNKNOWN
|
|
||||||
|
|
||||||
if TrackDescriptor.CODEC_KEY in kwargs.keys():
|
|
||||||
if type(kwargs[TrackDescriptor.CODEC_KEY]) is not TrackCodec:
|
|
||||||
raise TypeError(
|
|
||||||
f"TrackDesciptor.__init__(): Argument {TrackDescriptor.CODEC_KEY} is required to be of type TrackCodec"
|
|
||||||
)
|
|
||||||
self.__trackCodec = kwargs[TrackDescriptor.CODEC_KEY]
|
|
||||||
else:
|
|
||||||
self.__trackCodec = TrackCodec.UNKNOWN
|
|
||||||
|
|
||||||
if TrackDescriptor.TAGS_KEY in kwargs.keys():
|
|
||||||
if type(kwargs[TrackDescriptor.TAGS_KEY]) is not dict:
|
|
||||||
raise TypeError(
|
|
||||||
f"TrackDesciptor.__init__(): Argument {TrackDescriptor.TAGS_KEY} is required to be of type dict"
|
|
||||||
)
|
|
||||||
self.__trackTags = kwargs[TrackDescriptor.TAGS_KEY]
|
|
||||||
else:
|
|
||||||
self.__trackTags = {}
|
|
||||||
|
|
||||||
if TrackDescriptor.DISPOSITION_SET_KEY in kwargs.keys():
|
|
||||||
if type(kwargs[TrackDescriptor.DISPOSITION_SET_KEY]) is not set:
|
|
||||||
raise TypeError(
|
|
||||||
f"TrackDesciptor.__init__(): Argument {TrackDescriptor.DISPOSITION_SET_KEY} is required to be of type set"
|
|
||||||
)
|
|
||||||
for d in kwargs[TrackDescriptor.DISPOSITION_SET_KEY]:
|
|
||||||
if type(d) is not TrackDisposition:
|
|
||||||
raise TypeError(
|
|
||||||
f"TrackDesciptor.__init__(): All elements of argument set {TrackDescriptor.DISPOSITION_SET_KEY} is required to be of type TrackDisposition"
|
|
||||||
)
|
|
||||||
self.__dispositionSet = kwargs[TrackDescriptor.DISPOSITION_SET_KEY]
|
|
||||||
else:
|
|
||||||
self.__dispositionSet = set()
|
|
||||||
|
|
||||||
if TrackDescriptor.AUDIO_LAYOUT_KEY in kwargs.keys():
|
|
||||||
if type(kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY]) is not AudioLayout:
|
|
||||||
raise TypeError(
|
|
||||||
f"TrackDesciptor.__init__(): Argument {TrackDescriptor.AUDIO_LAYOUT_KEY} is required to be of type AudioLayout"
|
|
||||||
)
|
|
||||||
self.__audioLayout = kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY]
|
|
||||||
else:
|
|
||||||
self.__audioLayout = AudioLayout.LAYOUT_UNDEFINED
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def fromFfprobe(cls, streamObj, subIndex: int = -1):
|
|
||||||
"""Processes ffprobe stream data as array with elements according to the following example
|
|
||||||
{
|
|
||||||
"index": 4,
|
|
||||||
"codec_name": "hdmv_pgs_subtitle",
|
|
||||||
"codec_long_name": "HDMV Presentation Graphic Stream subtitles",
|
|
||||||
"codec_type": "subtitle",
|
|
||||||
"codec_tag_string": "[0][0][0][0]",
|
|
||||||
"codec_tag": "0x0000",
|
|
||||||
"r_frame_rate": "0/0",
|
|
||||||
"avg_frame_rate": "0/0",
|
|
||||||
"time_base": "1/1000",
|
|
||||||
"start_pts": 0,
|
|
||||||
"start_time": "0.000000",
|
|
||||||
"duration_ts": 1421035,
|
|
||||||
"duration": "1421.035000",
|
|
||||||
"disposition": {
|
|
||||||
"default": 1,
|
|
||||||
"dub": 0,
|
|
||||||
"original": 0,
|
|
||||||
"comment": 0,
|
|
||||||
"lyrics": 0,
|
|
||||||
"karaoke": 0,
|
|
||||||
"forced": 0,
|
|
||||||
"hearing_impaired": 0,
|
|
||||||
"visual_impaired": 0,
|
|
||||||
"clean_effects": 0,
|
|
||||||
"attached_pic": 0,
|
|
||||||
"timed_thumbnails": 0,
|
|
||||||
"non_diegetic": 0,
|
|
||||||
"captions": 0,
|
|
||||||
"descriptions": 0,
|
|
||||||
"metadata": 0,
|
|
||||||
"dependent": 0,
|
|
||||||
"still_image": 0
|
|
||||||
},
|
|
||||||
"tags": {
|
|
||||||
"language": "ger",
|
|
||||||
"title": "German Full"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
|
|
||||||
trackType = (
|
|
||||||
TrackType.fromLabel(streamObj["codec_type"])
|
|
||||||
if "codec_type" in streamObj.keys()
|
|
||||||
else TrackType.UNKNOWN
|
|
||||||
)
|
|
||||||
|
|
||||||
if trackType != TrackType.UNKNOWN:
|
|
||||||
|
|
||||||
kwargs = {}
|
|
||||||
|
|
||||||
kwargs[TrackDescriptor.INDEX_KEY] = (
|
|
||||||
int(streamObj[TrackDescriptor.FFPROBE_INDEX_KEY])
|
|
||||||
if TrackDescriptor.FFPROBE_INDEX_KEY in streamObj.keys()
|
|
||||||
else -1
|
|
||||||
)
|
|
||||||
kwargs[TrackDescriptor.SOURCE_INDEX_KEY] = kwargs[TrackDescriptor.INDEX_KEY]
|
|
||||||
kwargs[TrackDescriptor.SUB_INDEX_KEY] = subIndex
|
|
||||||
|
|
||||||
kwargs[TrackDescriptor.TRACK_TYPE_KEY] = trackType
|
|
||||||
|
|
||||||
kwargs[TrackDescriptor.CODEC_KEY] = TrackCodec.identify(streamObj[TrackDescriptor.FFPROBE_CODEC_KEY])
|
|
||||||
|
|
||||||
kwargs[TrackDescriptor.DISPOSITION_SET_KEY] = (
|
|
||||||
{
|
|
||||||
t
|
|
||||||
for d in (
|
|
||||||
k
|
|
||||||
for (k, v) in streamObj[
|
|
||||||
TrackDescriptor.FFPROBE_DISPOSITION_KEY
|
|
||||||
].items()
|
|
||||||
if v
|
|
||||||
)
|
|
||||||
if (t := TrackDisposition.find(d)) is not None
|
|
||||||
}
|
|
||||||
if TrackDescriptor.FFPROBE_DISPOSITION_KEY in streamObj.keys()
|
|
||||||
else set()
|
|
||||||
)
|
|
||||||
kwargs[TrackDescriptor.TAGS_KEY] = (
|
|
||||||
streamObj[TrackDescriptor.FFPROBE_TAGS_KEY]
|
|
||||||
if TrackDescriptor.FFPROBE_TAGS_KEY in streamObj.keys()
|
|
||||||
else {}
|
|
||||||
)
|
|
||||||
kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY] = (
|
|
||||||
AudioLayout.identify(streamObj)
|
|
||||||
if trackType == TrackType.AUDIO
|
|
||||||
else AudioLayout.LAYOUT_UNDEFINED
|
|
||||||
)
|
|
||||||
|
|
||||||
return cls(**kwargs)
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def getId(self):
|
|
||||||
return self.__trackId
|
|
||||||
|
|
||||||
def getPatternId(self):
|
|
||||||
return self.__patternId
|
|
||||||
|
|
||||||
def getIndex(self):
|
|
||||||
return self.__index
|
|
||||||
|
|
||||||
def setIndex(self, index):
|
|
||||||
self.__index = index
|
|
||||||
|
|
||||||
def getSourceIndex(self):
|
|
||||||
return self.__sourceIndex
|
|
||||||
|
|
||||||
def setSourceIndex(self, sourceIndex: int):
|
|
||||||
self.__sourceIndex = int(sourceIndex)
|
|
||||||
|
|
||||||
def getSubIndex(self):
|
|
||||||
return self.__subIndex
|
|
||||||
|
|
||||||
def setSubIndex(self, subIndex):
|
|
||||||
self.__subIndex = subIndex
|
|
||||||
|
|
||||||
def getType(self):
|
|
||||||
return self.__trackType
|
|
||||||
|
|
||||||
def getCodec(self) -> TrackCodec:
|
|
||||||
return self.__trackCodec
|
|
||||||
|
|
||||||
def getLanguage(self):
|
|
||||||
if "language" in self.__trackTags.keys():
|
|
||||||
return IsoLanguage.findThreeLetter(self.__trackTags["language"])
|
|
||||||
else:
|
|
||||||
return IsoLanguage.UNDEFINED
|
|
||||||
|
|
||||||
def setLanguage(self, language: IsoLanguage):
|
|
||||||
if not type(language) is IsoLanguage:
|
|
||||||
raise TypeError('language has to be of type IsoLanguage')
|
|
||||||
self.__trackTags["language"] = language
|
|
||||||
|
|
||||||
def getTitle(self):
|
|
||||||
if "title" in self.__trackTags.keys():
|
|
||||||
return str(self.__trackTags["title"])
|
|
||||||
else:
|
|
||||||
return ""
|
|
||||||
|
|
||||||
def setTitle(self, title: str):
|
|
||||||
self.__trackTags["title"] = str(title)
|
|
||||||
|
|
||||||
|
|
||||||
def getAudioLayout(self):
|
|
||||||
return self.__audioLayout
|
|
||||||
|
|
||||||
def getTags(self):
|
|
||||||
return self.__trackTags
|
|
||||||
|
|
||||||
def getDispositionSet(self):
|
|
||||||
return self.__dispositionSet
|
|
||||||
|
|
||||||
def setDispositionSet(self, dispositionSet: set):
|
|
||||||
self.__dispositionSet = dispositionSet
|
|
||||||
|
|
||||||
def getDispositionFlag(self, disposition: TrackDisposition) -> bool:
|
|
||||||
return bool(disposition in self.__dispositionSet)
|
|
||||||
|
|
||||||
def setDispositionFlag(self, disposition: TrackDisposition, state: bool):
|
|
||||||
if state:
|
|
||||||
self.__dispositionSet.add(disposition)
|
|
||||||
else:
|
|
||||||
self.__dispositionSet.discard(disposition)
|
|
||||||
|
|
||||||
# def compare(self, vsTrackDescriptor: Self):
|
|
||||||
#
|
|
||||||
# compareResult = {}
|
|
||||||
#
|
|
||||||
# tagsDiffResult = dictKeysDiff(vsTrackDescriptor.getTags(), self.getTags())
|
|
||||||
#
|
|
||||||
# if tagsDiffResult:
|
|
||||||
# compareResult[TrackDescriptor.TAGS_KEY] = tagsDiffResult
|
|
||||||
#
|
|
||||||
# vsDispositions = vsTrackDescriptor.getDispositionSet()
|
|
||||||
# dispositions = self.getDispositionSet()
|
|
||||||
#
|
|
||||||
# dispositionDiffResult = setDiff(vsDispositions, dispositions)
|
|
||||||
#
|
|
||||||
# if dispositionDiffResult:
|
|
||||||
# compareResult[TrackDescriptor.DISPOSITION_SET_KEY] = dispositionDiffResult
|
|
||||||
#
|
|
||||||
# return compareResult
|
|
||||||
|
|
||||||
def setExternalSourceFilePath(self, filePath: str):
|
|
||||||
self.__externalSourceFilePath = str(filePath)
|
|
||||||
|
|
||||||
def getExternalSourceFilePath(self):
|
|
||||||
return self.__externalSourceFilePath
|
|
||||||
@@ -1,457 +0,0 @@
|
|||||||
import click
|
|
||||||
|
|
||||||
from textual.screen import Screen
|
|
||||||
from textual.widgets import Header, Footer, Static, Button, SelectionList, Select, DataTable, Input
|
|
||||||
from textual.containers import Grid
|
|
||||||
|
|
||||||
from ffx.model.pattern import Pattern
|
|
||||||
|
|
||||||
from .track_controller import TrackController
|
|
||||||
from .pattern_controller import PatternController
|
|
||||||
from .tag_controller import TagController
|
|
||||||
|
|
||||||
from .track_type import TrackType
|
|
||||||
from .track_codec import TrackCodec
|
|
||||||
|
|
||||||
from .iso_language import IsoLanguage
|
|
||||||
from .track_disposition import TrackDisposition
|
|
||||||
from .audio_layout import AudioLayout
|
|
||||||
|
|
||||||
from .track_descriptor import TrackDescriptor
|
|
||||||
|
|
||||||
from .tag_details_screen import TagDetailsScreen
|
|
||||||
from .tag_delete_screen import TagDeleteScreen
|
|
||||||
|
|
||||||
from textual.widgets._data_table import CellDoesNotExist
|
|
||||||
|
|
||||||
from ffx.helper import formatRichColor, removeRichColor
|
|
||||||
|
|
||||||
|
|
||||||
# Screen[dict[int, str, int]]
|
|
||||||
class TrackDetailsScreen(Screen):
|
|
||||||
|
|
||||||
CSS = """
|
|
||||||
|
|
||||||
Grid {
|
|
||||||
grid-size: 5 24;
|
|
||||||
grid-rows: 2 2 2 2 2 3 3 2 2 3 2 2 2 2 2 6 2 2 6 2 2 2;
|
|
||||||
grid-columns: 25 25 25 25 125;
|
|
||||||
height: 100%;
|
|
||||||
width: 100%;
|
|
||||||
padding: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
Input {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
Button {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
SelectionList {
|
|
||||||
border: none;
|
|
||||||
min-height: 6;
|
|
||||||
}
|
|
||||||
Select {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
DataTable {
|
|
||||||
min-height: 6;
|
|
||||||
}
|
|
||||||
|
|
||||||
DataTable .datatable--cursor {
|
|
||||||
background: darkorange;
|
|
||||||
color: black;
|
|
||||||
}
|
|
||||||
|
|
||||||
DataTable .datatable--header {
|
|
||||||
background: steelblue;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
#toplabel {
|
|
||||||
height: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
.two {
|
|
||||||
column-span: 2;
|
|
||||||
}
|
|
||||||
.three {
|
|
||||||
column-span: 3;
|
|
||||||
}
|
|
||||||
|
|
||||||
.four {
|
|
||||||
column-span: 4;
|
|
||||||
}
|
|
||||||
.five {
|
|
||||||
column-span: 5;
|
|
||||||
}
|
|
||||||
|
|
||||||
.box {
|
|
||||||
height: 100%;
|
|
||||||
border: solid green;
|
|
||||||
}
|
|
||||||
|
|
||||||
.yellow {
|
|
||||||
tint: yellow 40%;
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, trackDescriptor : TrackDescriptor = None, patternId = None, trackType : TrackType = None, index = None, subIndex = None):
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
self.context = self.app.getContext()
|
|
||||||
self.Session = self.context['database']['session'] # convenience
|
|
||||||
|
|
||||||
self.__configurationData = self.context['config'].getData()
|
|
||||||
|
|
||||||
metadataConfiguration = self.__configurationData['metadata'] if 'metadata' in self.__configurationData.keys() else {}
|
|
||||||
|
|
||||||
self.__signatureTags = metadataConfiguration['signature'] if 'signature' in metadataConfiguration.keys() else {}
|
|
||||||
self.__removeGlobalKeys = metadataConfiguration['remove'] if 'remove' in metadataConfiguration.keys() else []
|
|
||||||
self.__ignoreGlobalKeys = metadataConfiguration['ignore'] if 'ignore' in metadataConfiguration.keys() else []
|
|
||||||
self.__removeTrackKeys = (metadataConfiguration['streams']['remove']
|
|
||||||
if 'streams' in metadataConfiguration.keys()
|
|
||||||
and 'remove' in metadataConfiguration['streams'].keys() else [])
|
|
||||||
self.__ignoreTrackKeys = (metadataConfiguration['streams']['ignore']
|
|
||||||
if 'streams' in metadataConfiguration.keys()
|
|
||||||
and 'ignore' in metadataConfiguration['streams'].keys() else [])
|
|
||||||
|
|
||||||
|
|
||||||
self.__tc = TrackController(context = self.context)
|
|
||||||
self.__pc = PatternController(context = self.context)
|
|
||||||
self.__tac = TagController(context = self.context)
|
|
||||||
|
|
||||||
self.__isNew = trackDescriptor is None
|
|
||||||
if self.__isNew:
|
|
||||||
self.__trackType = trackType
|
|
||||||
self.__trackCodec = TrackCodec.UNKNOWN
|
|
||||||
self.__audioLayout = AudioLayout.LAYOUT_UNDEFINED
|
|
||||||
self.__index = index
|
|
||||||
self.__subIndex = subIndex
|
|
||||||
self.__trackDescriptor : TrackDescriptor = None
|
|
||||||
self.__pattern : Pattern = self.__pc.getPattern(patternId) if patternId is not None else {}
|
|
||||||
else:
|
|
||||||
self.__trackType = trackDescriptor.getType()
|
|
||||||
self.__trackCodec = trackDescriptor.getCodec()
|
|
||||||
self.__audioLayout = trackDescriptor.getAudioLayout()
|
|
||||||
self.__index = trackDescriptor.getIndex()
|
|
||||||
self.__subIndex = trackDescriptor.getSubIndex()
|
|
||||||
self.__trackDescriptor : TrackDescriptor = trackDescriptor
|
|
||||||
self.__pattern : Pattern = self.__pc.getPattern(self.__trackDescriptor.getPatternId())
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def updateTags(self):
|
|
||||||
|
|
||||||
self.trackTagsTable.clear()
|
|
||||||
|
|
||||||
trackId = self.__trackDescriptor.getId()
|
|
||||||
|
|
||||||
if trackId != -1:
|
|
||||||
|
|
||||||
trackTags = self.__tac.findAllTrackTags(trackId)
|
|
||||||
|
|
||||||
for k,v in trackTags.items():
|
|
||||||
|
|
||||||
if k != 'language' and k != 'title':
|
|
||||||
|
|
||||||
textColor = None
|
|
||||||
if k in self.__ignoreTrackKeys:
|
|
||||||
textColor = 'blue'
|
|
||||||
if k in self.__removeTrackKeys:
|
|
||||||
textColor = 'red'
|
|
||||||
|
|
||||||
row = (formatRichColor(k, textColor), formatRichColor(v, textColor))
|
|
||||||
self.trackTagsTable.add_row(*map(str, row))
|
|
||||||
|
|
||||||
|
|
||||||
def on_mount(self):
|
|
||||||
|
|
||||||
self.query_one("#index_label", Static).update(str(self.__index) if self.__index is not None else '-')
|
|
||||||
self.query_one("#subindex_label", Static).update(str(self.__subIndex)if self.__subIndex is not None else '-')
|
|
||||||
|
|
||||||
if self.__pattern is not None:
|
|
||||||
self.query_one("#pattern_label", Static).update(self.__pattern.getPattern())
|
|
||||||
|
|
||||||
if self.__trackType is not None:
|
|
||||||
self.query_one("#type_select", Select).value = self.__trackType.label()
|
|
||||||
if self.__trackType == TrackType.AUDIO:
|
|
||||||
self.query_one("#audio_layout_select", Select).value = self.__audioLayout.label()
|
|
||||||
|
|
||||||
for d in TrackDisposition:
|
|
||||||
|
|
||||||
dispositionIsSet = (self.__trackDescriptor is not None
|
|
||||||
and d in self.__trackDescriptor.getDispositionSet())
|
|
||||||
|
|
||||||
dispositionOption = (d.label(), d.index(), dispositionIsSet)
|
|
||||||
self.query_one("#dispositions_selection_list", SelectionList).add_option(dispositionOption)
|
|
||||||
|
|
||||||
if self.__trackDescriptor is not None:
|
|
||||||
|
|
||||||
self.query_one("#language_select", Select).value = self.__trackDescriptor.getLanguage().label()
|
|
||||||
self.query_one("#title_input", Input).value = self.__trackDescriptor.getTitle()
|
|
||||||
self.updateTags()
|
|
||||||
|
|
||||||
|
|
||||||
def compose(self):
|
|
||||||
|
|
||||||
self.trackTagsTable = DataTable(classes="five")
|
|
||||||
|
|
||||||
# Define the columns with headers
|
|
||||||
self.column_key_track_tag_key = self.trackTagsTable.add_column("Key", width=50)
|
|
||||||
self.column_key_track_tag_value = self.trackTagsTable.add_column("Value", width=100)
|
|
||||||
|
|
||||||
self.trackTagsTable.cursor_type = 'row'
|
|
||||||
|
|
||||||
|
|
||||||
languages = [l.label() for l in IsoLanguage]
|
|
||||||
|
|
||||||
yield Header()
|
|
||||||
|
|
||||||
with Grid():
|
|
||||||
|
|
||||||
# 1
|
|
||||||
yield Static(f"New stream" if self.__isNew else f"Edit stream", id="toplabel", classes="five")
|
|
||||||
|
|
||||||
# 2
|
|
||||||
yield Static("for pattern")
|
|
||||||
yield Static("", id="pattern_label", classes="four")
|
|
||||||
|
|
||||||
# 3
|
|
||||||
yield Static(" ", classes="five")
|
|
||||||
|
|
||||||
# 4
|
|
||||||
yield Static("Index / Subindex")
|
|
||||||
yield Static("", id="index_label", classes="two")
|
|
||||||
yield Static("", id="subindex_label", classes="two")
|
|
||||||
|
|
||||||
# 5
|
|
||||||
yield Static(" ", classes="five")
|
|
||||||
|
|
||||||
# 6
|
|
||||||
yield Static("Type")
|
|
||||||
yield Select.from_values([t.label() for t in TrackType], classes="four", id="type_select")
|
|
||||||
|
|
||||||
# 7
|
|
||||||
if self.__trackType == TrackType.AUDIO:
|
|
||||||
yield Static("Audio Layout")
|
|
||||||
yield Select.from_values([t.label() for t in AudioLayout], classes="four", id="audio_layout_select")
|
|
||||||
else:
|
|
||||||
yield Static(" ", classes="five")
|
|
||||||
|
|
||||||
# 8
|
|
||||||
yield Static(" ", classes="five")
|
|
||||||
|
|
||||||
# 9
|
|
||||||
yield Static(" ", classes="five")
|
|
||||||
|
|
||||||
# 10
|
|
||||||
yield Static("Language")
|
|
||||||
yield Select.from_values(languages, classes="four", id="language_select")
|
|
||||||
# 11
|
|
||||||
yield Static(" ", classes="five")
|
|
||||||
|
|
||||||
# 12
|
|
||||||
yield Static("Title")
|
|
||||||
yield Input(id="title_input", classes="four")
|
|
||||||
|
|
||||||
# 13
|
|
||||||
yield Static(" ", classes="five")
|
|
||||||
|
|
||||||
# 14
|
|
||||||
yield Static(" ", classes="five")
|
|
||||||
|
|
||||||
# 15
|
|
||||||
yield Static("Stream tags")
|
|
||||||
yield Static(" ")
|
|
||||||
yield Button("Add", id="button_add_stream_tag")
|
|
||||||
yield Button("Edit", id="button_edit_stream_tag")
|
|
||||||
yield Button("Delete", id="button_delete_stream_tag")
|
|
||||||
# 16
|
|
||||||
yield self.trackTagsTable
|
|
||||||
|
|
||||||
# 17
|
|
||||||
yield Static(" ", classes="five")
|
|
||||||
|
|
||||||
# 18
|
|
||||||
yield Static("Stream dispositions", classes="five")
|
|
||||||
|
|
||||||
# 19
|
|
||||||
yield SelectionList[int](
|
|
||||||
classes="five",
|
|
||||||
id = "dispositions_selection_list"
|
|
||||||
)
|
|
||||||
|
|
||||||
# 20
|
|
||||||
yield Static(" ", classes="five")
|
|
||||||
# 21
|
|
||||||
yield Static(" ", classes="five")
|
|
||||||
|
|
||||||
# 22
|
|
||||||
yield Button("Save", id="save_button")
|
|
||||||
yield Button("Cancel", id="cancel_button")
|
|
||||||
|
|
||||||
# 23
|
|
||||||
yield Static(" ", classes="five")
|
|
||||||
|
|
||||||
# 24
|
|
||||||
yield Static(" ", classes="five", id="messagestatic")
|
|
||||||
|
|
||||||
|
|
||||||
yield Footer(id="footer")
|
|
||||||
|
|
||||||
|
|
||||||
def getTrackDescriptorFromInput(self):
|
|
||||||
|
|
||||||
kwargs = {}
|
|
||||||
|
|
||||||
kwargs[TrackDescriptor.CONTEXT_KEY] = self.context
|
|
||||||
|
|
||||||
kwargs[TrackDescriptor.PATTERN_ID_KEY] = int(self.__pattern.getId())
|
|
||||||
|
|
||||||
kwargs[TrackDescriptor.INDEX_KEY] = self.__index
|
|
||||||
kwargs[TrackDescriptor.SUB_INDEX_KEY] = self.__subIndex #!
|
|
||||||
|
|
||||||
kwargs[TrackDescriptor.TRACK_TYPE_KEY] = TrackType.fromLabel(self.query_one("#type_select", Select).value)
|
|
||||||
|
|
||||||
kwargs[TrackDescriptor.CODEC_KEY] = self.__trackCodec
|
|
||||||
|
|
||||||
if self.__trackType == TrackType.AUDIO:
|
|
||||||
kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY] = AudioLayout.fromLabel(self.query_one("#audio_layout_select", Select).value)
|
|
||||||
else:
|
|
||||||
kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY] = AudioLayout.LAYOUT_UNDEFINED
|
|
||||||
|
|
||||||
trackTags = {}
|
|
||||||
language = self.query_one("#language_select", Select).value
|
|
||||||
if language:
|
|
||||||
trackTags['language'] = IsoLanguage.find(language).threeLetter()
|
|
||||||
title = self.query_one("#title_input", Input).value
|
|
||||||
if title:
|
|
||||||
trackTags['title'] = title
|
|
||||||
|
|
||||||
tableTags = {row[0]:row[1] for r in self.trackTagsTable.rows if (row := self.trackTagsTable.get_row(r)) and row[0] != 'language' and row[0] != 'title'}
|
|
||||||
|
|
||||||
kwargs[TrackDescriptor.TAGS_KEY] = trackTags | tableTags
|
|
||||||
|
|
||||||
dispositionFlags = sum([2**f for f in self.query_one("#dispositions_selection_list", SelectionList).selected])
|
|
||||||
kwargs[TrackDescriptor.DISPOSITION_SET_KEY] = TrackDisposition.toSet(dispositionFlags)
|
|
||||||
|
|
||||||
return TrackDescriptor(**kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def getSelectedTag(self):
|
|
||||||
|
|
||||||
try:
|
|
||||||
|
|
||||||
# Fetch the currently selected row when 'Enter' is pressed
|
|
||||||
#selected_row_index = self.table.cursor_row
|
|
||||||
row_key, col_key = self.trackTagsTable.coordinate_to_cell_key(self.trackTagsTable.cursor_coordinate)
|
|
||||||
|
|
||||||
if row_key is not None:
|
|
||||||
selected_tag_data = self.trackTagsTable.get_row(row_key)
|
|
||||||
|
|
||||||
tagKey = removeRichColor(selected_tag_data[0])
|
|
||||||
tagValue = removeRichColor(selected_tag_data[1])
|
|
||||||
|
|
||||||
return tagKey, tagValue
|
|
||||||
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
except CellDoesNotExist:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Event handler for button press
|
|
||||||
def on_button_pressed(self, event: Button.Pressed) -> None:
|
|
||||||
|
|
||||||
# Check if the button pressed is the one we are interested in
|
|
||||||
if event.button.id == "save_button":
|
|
||||||
|
|
||||||
# Check for multiple default/forced disposition flags
|
|
||||||
|
|
||||||
if self.__trackType == TrackType.VIDEO:
|
|
||||||
trackList = self.__tc.findVideoTracks(self.__pattern.getId())
|
|
||||||
if self.__trackType == TrackType.AUDIO:
|
|
||||||
trackList = self.__tc.findAudioTracks(self.__pattern.getId())
|
|
||||||
elif self.__trackType == TrackType.SUBTITLE:
|
|
||||||
trackList = self.__tc.findSubtitleTracks(self.__pattern.getId())
|
|
||||||
else:
|
|
||||||
trackList = []
|
|
||||||
|
|
||||||
siblingTrackList = [t for t in trackList if t.getType() == self.__trackType and t.getIndex() != self.__index]
|
|
||||||
|
|
||||||
numDefaultTracks = len([t for t in siblingTrackList if TrackDisposition.DEFAULT in t.getDispositionSet()])
|
|
||||||
numForcedTracks = len([t for t in siblingTrackList if TrackDisposition.FORCED in t.getDispositionSet()])
|
|
||||||
|
|
||||||
self.__subIndex = len(trackList)
|
|
||||||
trackDescriptor = self.getTrackDescriptorFromInput()
|
|
||||||
|
|
||||||
if ((TrackDisposition.DEFAULT in trackDescriptor.getDispositionSet() and numDefaultTracks)
|
|
||||||
or (TrackDisposition.FORCED in trackDescriptor.getDispositionSet() and numForcedTracks)):
|
|
||||||
|
|
||||||
self.query_one("#messagestatic", Static).update("Cannot add another stream with disposition flag 'debug' or 'forced' set")
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
self.query_one("#messagestatic", Static).update(" ")
|
|
||||||
|
|
||||||
if self.__isNew:
|
|
||||||
|
|
||||||
# Track per Screen hinzufügen
|
|
||||||
self.__tc.addTrack(trackDescriptor)
|
|
||||||
self.dismiss(trackDescriptor)
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
track = self.__tc.getTrack(self.__pattern.getId(), self.__index)
|
|
||||||
|
|
||||||
# Track per details screen updaten
|
|
||||||
if self.__tc.updateTrack(track.getId(), trackDescriptor):
|
|
||||||
self.dismiss(trackDescriptor)
|
|
||||||
|
|
||||||
else:
|
|
||||||
self.app.pop_screen()
|
|
||||||
|
|
||||||
if event.button.id == "cancel_button":
|
|
||||||
self.app.pop_screen()
|
|
||||||
|
|
||||||
|
|
||||||
if event.button.id == "button_add_stream_tag":
|
|
||||||
if not self.__isNew:
|
|
||||||
self.app.push_screen(TagDetailsScreen(), self.handle_update_tag)
|
|
||||||
|
|
||||||
if event.button.id == "button_edit_stream_tag":
|
|
||||||
tagKey, tagValue = self.getSelectedTag()
|
|
||||||
self.app.push_screen(TagDetailsScreen(key=tagKey, value=tagValue), self.handle_update_tag)
|
|
||||||
|
|
||||||
if event.button.id == "button_delete_stream_tag":
|
|
||||||
tagKey, tagValue = self.getSelectedTag()
|
|
||||||
self.app.push_screen(TagDeleteScreen(key=tagKey, value=tagValue), self.handle_delete_tag)
|
|
||||||
|
|
||||||
|
|
||||||
def handle_update_tag(self, tag):
|
|
||||||
|
|
||||||
trackId = self.__trackDescriptor.getId()
|
|
||||||
|
|
||||||
if trackId == -1:
|
|
||||||
raise click.ClickException(f"TrackDetailsScreen.handle_update_tag: trackId not set (-1) trackDescriptor={self.__trackDescriptor}")
|
|
||||||
|
|
||||||
if self.__tac.updateTrackTag(trackId, tag[0], tag[1]) is not None:
|
|
||||||
self.updateTags()
|
|
||||||
|
|
||||||
def handle_delete_tag(self, trackTag):
|
|
||||||
|
|
||||||
trackId = self.__trackDescriptor.getId()
|
|
||||||
|
|
||||||
if trackId == -1:
|
|
||||||
raise click.ClickException(f"TrackDetailsScreen.handle_delete_tag: trackId not set (-1) trackDescriptor={self.__trackDescriptor}")
|
|
||||||
|
|
||||||
tag = self.__tac.findTrackTag(trackId, trackTag[0])
|
|
||||||
|
|
||||||
if tag is not None:
|
|
||||||
if self.__tac.deleteTrackTag(tag.id):
|
|
||||||
self.updateTags()
|
|
||||||
@@ -1,76 +0,0 @@
|
|||||||
import difflib, click
|
|
||||||
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
|
|
||||||
class TrackDisposition(Enum):
|
|
||||||
|
|
||||||
DEFAULT = {"name": "default", "index": 0, "indicator": "DEF"}
|
|
||||||
FORCED = {"name": "forced", "index": 1, "indicator": "FOR"}
|
|
||||||
|
|
||||||
DUB = {"name": "dub", "index": 2, "indicator": "DUB"}
|
|
||||||
ORIGINAL = {"name": "original", "index": 3, "indicator": "ORG"}
|
|
||||||
COMMENT = {"name": "comment", "index": 4, "indicator": "COM"}
|
|
||||||
LYRICS = {"name": "lyrics", "index": 5, "indicator": "LYR"}
|
|
||||||
KARAOKE = {"name": "karaoke", "index": 6, "indicator": "KAR"}
|
|
||||||
HEARING_IMPAIRED = {"name": "hearing_impaired", "index": 7, "indicator": "HIM"}
|
|
||||||
VISUAL_IMPAIRED = {"name": "visual_impaired", "index": 8, "indicator": "VIM"}
|
|
||||||
CLEAN_EFFECTS = {"name": "clean_effects", "index": 9, "indicator": "CLE"}
|
|
||||||
ATTACHED_PIC = {"name": "attached_pic", "index": 10, "indicator": "ATP"}
|
|
||||||
TIMED_THUMBNAILS = {"name": "timed_thumbnails", "index": 11, "indicator": "TTH"}
|
|
||||||
NON_DIEGETICS = {"name": "non_diegetic", "index": 12, "indicator": "NOD"}
|
|
||||||
CAPTIONS = {"name": "captions", "index": 13, "indicator": "CAP"}
|
|
||||||
DESCRIPTIONS = {"name": "descriptions", "index": 14, "indicator": "DES"}
|
|
||||||
METADATA = {"name": "metadata", "index": 15, "indicator": "MED"}
|
|
||||||
DEPENDENT = {"name": "dependent", "index": 16, "indicator": "DEP"}
|
|
||||||
STILL_IMAGE = {"name": "still_image", "index": 17, "indicator": "STI"}
|
|
||||||
|
|
||||||
|
|
||||||
def label(self):
|
|
||||||
return str(self.value['name'])
|
|
||||||
|
|
||||||
def index(self):
|
|
||||||
return int(self.value['index'])
|
|
||||||
|
|
||||||
def indicator(self):
|
|
||||||
return str(self.value['indicator'])
|
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def toFlags(dispositionSet):
|
|
||||||
"""Flags stored in integer bits (2**index)"""
|
|
||||||
|
|
||||||
if type(dispositionSet) is not set:
|
|
||||||
raise click.ClickException('TrackDisposition.toFlags(): Argument is not of type set')
|
|
||||||
|
|
||||||
flags = 0
|
|
||||||
for d in dispositionSet:
|
|
||||||
if type(d) is not TrackDisposition:
|
|
||||||
raise click.ClickException('TrackDisposition.toFlags(): Element not of type TrackDisposition')
|
|
||||||
flags += 2 ** d.index()
|
|
||||||
return flags
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def toSet(flags):
|
|
||||||
dispositionSet = set()
|
|
||||||
for d in TrackDisposition:
|
|
||||||
if flags & int(2 ** d.index()):
|
|
||||||
dispositionSet.add(d)
|
|
||||||
return dispositionSet
|
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def find(label):
|
|
||||||
matchingDispositions = [d for d in TrackDisposition if d.label() == str(label)]
|
|
||||||
if matchingDispositions:
|
|
||||||
return matchingDispositions[0]
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fromIndicator(indicator: str):
|
|
||||||
matchingDispositions = [d for d in TrackDisposition if d.indicator() == str(indicator)]
|
|
||||||
if matchingDispositions:
|
|
||||||
return matchingDispositions[0]
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
@@ -1,38 +0,0 @@
|
|||||||
from enum import Enum
|
|
||||||
|
|
||||||
class TrackType(Enum):
|
|
||||||
|
|
||||||
VIDEO = {'label': 'video', 'index': 1}
|
|
||||||
AUDIO = {'label': 'audio', 'index': 2}
|
|
||||||
SUBTITLE = {'label': 'subtitle', 'index': 3}
|
|
||||||
|
|
||||||
UNKNOWN = {'label': 'unknown', 'index': 0}
|
|
||||||
|
|
||||||
|
|
||||||
def label(self):
|
|
||||||
"""Returns the stream type as string"""
|
|
||||||
return str(self.value['label'])
|
|
||||||
|
|
||||||
def indicator(self):
|
|
||||||
"""Returns the stream type as single letter"""
|
|
||||||
return self.label()[0]
|
|
||||||
|
|
||||||
def index(self):
|
|
||||||
"""Returns the stream type index"""
|
|
||||||
return int(self.value['index'])
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fromLabel(label : str):
|
|
||||||
tlist = [t for t in TrackType if t.value['label'] == str(label)]
|
|
||||||
if tlist:
|
|
||||||
return tlist[0]
|
|
||||||
else:
|
|
||||||
return TrackType.UNKNOWN
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fromIndex(index : int):
|
|
||||||
tlist = [t for t in TrackType if t.value['index'] == int(index)]
|
|
||||||
if tlist:
|
|
||||||
return tlist[0]
|
|
||||||
else:
|
|
||||||
return TrackType.UNKNOWN
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
from enum import Enum
|
|
||||||
|
|
||||||
class VideoEncoder(Enum):
|
|
||||||
|
|
||||||
AV1 = {'label': 'av1', 'index': 1}
|
|
||||||
VP9 = {'label': 'vp9', 'index': 2}
|
|
||||||
H264 = {'label': 'h264', 'index': 3}
|
|
||||||
|
|
||||||
UNDEFINED = {'label': 'undefined', 'index': 0}
|
|
||||||
|
|
||||||
def label(self):
|
|
||||||
"""Returns the stream type as string"""
|
|
||||||
return str(self.value['label'])
|
|
||||||
|
|
||||||
def index(self):
|
|
||||||
"""Returns the stream type index"""
|
|
||||||
return int(self.value['index'])
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fromLabel(label : str):
|
|
||||||
tlist = [t for t in VideoEncoder if t.value['label'] == str(label)]
|
|
||||||
if tlist:
|
|
||||||
return tlist[0]
|
|
||||||
else:
|
|
||||||
return VideoEncoder.UNDEFINED
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fromIndex(index : int):
|
|
||||||
tlist = [t for t in VideoEncoder if t.value['index'] == int(index)]
|
|
||||||
if tlist:
|
|
||||||
return tlist[0]
|
|
||||||
else:
|
|
||||||
return VideoEncoder.UNDEFINED
|
|
||||||
170
docs/file_formats.md
Normal file
170
docs/file_formats.md
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
# File Formats
|
||||||
|
|
||||||
|
This document captures source-file-format notes that complement the normative
|
||||||
|
requirements in `requirements/source_file_formats.md`.
|
||||||
|
|
||||||
|
The first documented format is a Matroska source that carries styled ASS/SSA
|
||||||
|
subtitle streams together with embedded font attachments.
|
||||||
|
|
||||||
|
## Styled ASS In Matroska With Embedded Fonts
|
||||||
|
|
||||||
|
These files are typically `.mkv` releases where subtitle rendering quality
|
||||||
|
depends on keeping both parts of the subtitle package together:
|
||||||
|
|
||||||
|
- one or more subtitle streams with codec `ass`
|
||||||
|
- one or more attachment streams that embed font files used by those subtitles
|
||||||
|
|
||||||
|
This matters because ASS subtitles are not plain text subtitles in the narrow
|
||||||
|
WebVTT sense. They can carry layout, styling, positioning, karaoke, signs, and
|
||||||
|
other typesetting effects. If the matching embedded fonts are lost, consumers
|
||||||
|
can still see subtitle text but the intended styling and sometimes glyph
|
||||||
|
coverage can be degraded.
|
||||||
|
|
||||||
|
For FFX this format is special because the ASS subtitle streams should remain
|
||||||
|
normally editable and mappable, while the related font attachments should be
|
||||||
|
transported unchanged.
|
||||||
|
|
||||||
|
## Observed Sample
|
||||||
|
|
||||||
|
Assessment date: `2026-04-17`
|
||||||
|
|
||||||
|
Observed sample file:
|
||||||
|
|
||||||
|
- `tests/assets/boruto_s01e283_ssa.mkv`
|
||||||
|
|
||||||
|
Commands used for assessment:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
ffprobe tests/assets/boruto_s01e283_ssa.mkv
|
||||||
|
ffprobe -hide_banner -show_format -show_streams -of json tests/assets/boruto_s01e283_ssa.mkv
|
||||||
|
```
|
||||||
|
|
||||||
|
Observed stream layout:
|
||||||
|
|
||||||
|
| Stream index | Kind | Key details |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| `0` | video | `codec_name=h264` |
|
||||||
|
| `1` | audio | `codec_name=aac`, `language=jpn` |
|
||||||
|
| `2` | subtitle | `codec_name=ass`, `language=ger`, default |
|
||||||
|
| `3` | subtitle | `codec_name=ass`, `language=eng` |
|
||||||
|
| `4`-`13` | attachment | `tags.mimetype=font/ttf`, `.ttf` filenames |
|
||||||
|
|
||||||
|
Observed attachment filenames:
|
||||||
|
|
||||||
|
- `AmazonEmberTanuki-Italic.ttf`
|
||||||
|
- `AmazonEmberTanuki-Regular.ttf`
|
||||||
|
- `Arial.ttf`
|
||||||
|
- `Arial Bold.ttf`
|
||||||
|
- `Georgia.ttf`
|
||||||
|
- `Times New Roman.ttf`
|
||||||
|
- `Times New Roman Bold.ttf`
|
||||||
|
- `Trebuchet MS.ttf`
|
||||||
|
- `Verdana.ttf`
|
||||||
|
- `Verdana Bold.ttf`
|
||||||
|
|
||||||
|
Important probe behavior from the real sample:
|
||||||
|
|
||||||
|
- Plain `ffprobe` lists the font streams as `Attachment: none`.
|
||||||
|
- Plain `ffprobe` also prints warnings such as `Could not find codec
|
||||||
|
parameters for stream 4 (Attachment: none): unknown codec` and later
|
||||||
|
`Unsupported codec with id 0 for input stream ...`.
|
||||||
|
- The JSON produced by `FileProperties.FFPROBE_COMMAND_TOKENS`
|
||||||
|
(`ffprobe -hide_banner -show_format -show_streams -of json`) still exposes
|
||||||
|
the attachment streams clearly through `codec_type="attachment"` and the
|
||||||
|
attachment tags.
|
||||||
|
- In that JSON, the attachment streams do not expose `codec_name`.
|
||||||
|
|
||||||
|
This last point is important for FFX: robust detection must not depend on
|
||||||
|
attachment `codec_name` being present.
|
||||||
|
|
||||||
|
## Detection Guidance
|
||||||
|
|
||||||
|
Current known indicators for this format are:
|
||||||
|
|
||||||
|
- one or more subtitle streams with `codec_type="subtitle"` and
|
||||||
|
`codec_name="ass"`
|
||||||
|
- one or more attachment streams with `codec_type="attachment"`
|
||||||
|
- attachment tags that identify embedded fonts, especially
|
||||||
|
`tags.mimetype="font/ttf"`
|
||||||
|
- attachment filenames that end in `.ttf`
|
||||||
|
|
||||||
|
The pattern can vary. FFX should therefore treat the above as a cluster of
|
||||||
|
signals rather than an exact signature tied to one file.
|
||||||
|
|
||||||
|
Inference from the observed sample plus FFmpeg documentation:
|
||||||
|
|
||||||
|
- MIME matching should not be limited to `font/ttf` alone.
|
||||||
|
- The Boruto sample uses `font/ttf`.
|
||||||
|
- FFmpeg's Matroska attachment example uses
|
||||||
|
`mimetype=application/x-truetype-font` for a `.ttf` attachment.
|
||||||
|
- Detection should therefore normalize multiple TTF-like MIME values rather
|
||||||
|
than depend on a single exact string.
|
||||||
|
|
||||||
|
## Processing Expectations In FFX
|
||||||
|
|
||||||
|
The format-specific requirements live in
|
||||||
|
`requirements/source_file_formats.md`. In practical terms, FFX should:
|
||||||
|
|
||||||
|
- recognize the ASS-plus-font-attachment pattern even when attachment probe
|
||||||
|
data is incomplete
|
||||||
|
- tell the operator that the pattern was detected and that special handling is
|
||||||
|
being used
|
||||||
|
- reject sidecar subtitle import for such sources, because converting or
|
||||||
|
replacing these subtitle tracks with ordinary external text subtitles would
|
||||||
|
break the intended subtitle package
|
||||||
|
- continue to allow normal manipulation of the ASS subtitle tracks themselves
|
||||||
|
- preserve the font attachment streams unchanged
|
||||||
|
|
||||||
|
## FFmpeg Notes
|
||||||
|
|
||||||
|
Relevant FFmpeg documentation confirms several behaviors that line up with
|
||||||
|
FFX's needs:
|
||||||
|
|
||||||
|
- FFmpeg documents `-attach` as adding an attachment stream to the output, and
|
||||||
|
explicitly names Matroska fonts used in subtitle rendering as an example.
|
||||||
|
- FFmpeg documents attachment streams as regular streams that are created after
|
||||||
|
the mapped media streams.
|
||||||
|
- FFmpeg documents `-dump_attachment` for extracting attachment streams, which
|
||||||
|
is useful for debugging or validating a source file's embedded fonts.
|
||||||
|
- FFmpeg's Matroska example requires a `mimetype` metadata tag for attached
|
||||||
|
fonts, which is consistent with using attachment tags as detection signals.
|
||||||
|
- FFmpeg also notes that attachments are implemented as codec extradata. That
|
||||||
|
helps explain why probe output for attachment streams can look different from
|
||||||
|
ordinary audio, video, and subtitle streams.
|
||||||
|
|
||||||
|
Implication for FFX:
|
||||||
|
|
||||||
|
- Attachment preservation is not an optional cosmetic feature for this format.
|
||||||
|
It is part of preserving the subtitle package correctly.
|
||||||
|
|
||||||
|
## Jellyfin Notes
|
||||||
|
|
||||||
|
Jellyfin's documentation also supports keeping this format intact:
|
||||||
|
|
||||||
|
- Jellyfin's subtitle compatibility table lists `ASS/SSA` as supported in
|
||||||
|
`MKV` and not supported in `MP4`.
|
||||||
|
- Jellyfin notes that when subtitles must be transcoded, they are either
|
||||||
|
converted to a supported format or burned into the video, and burning them in
|
||||||
|
is the most CPU-intensive path.
|
||||||
|
- Jellyfin's subtitle-extraction example for `SSA/ASS` first dumps attachment
|
||||||
|
streams and then extracts the ASS subtitle stream, which reflects the real
|
||||||
|
relationship between ASS subtitles and embedded fonts in MKV releases.
|
||||||
|
- Jellyfin's font documentation says text-based subtitles require fonts to
|
||||||
|
render properly.
|
||||||
|
- Jellyfin's configuration documentation says the web client uses configured
|
||||||
|
fallback fonts for ASS subtitles when other fonts such as MKV attachments or
|
||||||
|
client-side fonts are not available.
|
||||||
|
|
||||||
|
Inference from the Jellyfin compatibility tables:
|
||||||
|
|
||||||
|
- Keeping this subtitle format in Matroska is the safest interoperability
|
||||||
|
choice for Jellyfin consumers.
|
||||||
|
- Converting the subtitle payload to WebVTT would lose styled ASS behavior.
|
||||||
|
- Dropping the attachment streams would force client or fallback font
|
||||||
|
substitution and can change appearance or glyph coverage.
|
||||||
|
|
||||||
|
## References
|
||||||
|
|
||||||
|
- FFmpeg documentation: https://ffmpeg.org/ffmpeg.html
|
||||||
|
- Jellyfin codec support: https://jellyfin.org/docs/general/clients/codec-support/
|
||||||
|
- Jellyfin configuration and fonts: https://jellyfin.org/docs/general/administration/configuration/
|
||||||
@@ -1,13 +1,13 @@
|
|||||||
[project]
|
[project]
|
||||||
name = "ffx"
|
name = "ffx"
|
||||||
description = "FFX recoding and metadata managing tool"
|
description = "FFX recoding and metadata managing tool"
|
||||||
version = "0.2.3"
|
version = "0.4.1"
|
||||||
license = {file = "LICENSE.md"}
|
license = {file = "LICENSE.md"}
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"requests",
|
"requests",
|
||||||
"jinja2",
|
"jinja2",
|
||||||
"click",
|
"click",
|
||||||
"textual",
|
"textual>=8.0",
|
||||||
"sqlalchemy",
|
"sqlalchemy",
|
||||||
]
|
]
|
||||||
readme = {file = "README.md", content-type = "text/markdown"}
|
readme = {file = "README.md", content-type = "text/markdown"}
|
||||||
@@ -27,6 +27,11 @@ Homepage = "https://gitea.maveno.de/Javanaut/ffx"
|
|||||||
Repository = "https://gitea.maveno.de/Javanaut/ffx.git"
|
Repository = "https://gitea.maveno.de/Javanaut/ffx.git"
|
||||||
Issues = "https://gitea.maveno.de/Javanaut/ffx/issues"
|
Issues = "https://gitea.maveno.de/Javanaut/ffx/issues"
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
test = [
|
||||||
|
"pytest",
|
||||||
|
]
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = [
|
requires = [
|
||||||
"setuptools",
|
"setuptools",
|
||||||
@@ -35,4 +40,15 @@ requires = [
|
|||||||
build-backend = "setuptools.build_meta"
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
[project.scripts]
|
[project.scripts]
|
||||||
ffx = "ffx.ffx:ffx"
|
ffx = "ffx.cli:ffx"
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
testpaths = ["tests"]
|
||||||
|
python_files = ["test_*.py"]
|
||||||
|
norecursedirs = ["tests/legacy", "tests/support"]
|
||||||
|
addopts = "-ra"
|
||||||
|
markers = [
|
||||||
|
"integration: exercises the FFX bundle with real ffmpeg/ffprobe processes",
|
||||||
|
"pattern_management: covers requirements/pattern_management.md",
|
||||||
|
"subtrack_mapping: covers requirements/subtrack_mapping.md",
|
||||||
|
]
|
||||||
|
|||||||
9
src/ffx/__main__.py
Normal file
9
src/ffx/__main__.py
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
from .cli import ffx
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
ffx()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
220
src/ffx/_iso_language.py
Normal file
220
src/ffx/_iso_language.py
Normal file
@@ -0,0 +1,220 @@
|
|||||||
|
from enum import Enum
|
||||||
|
import difflib
|
||||||
|
|
||||||
|
|
||||||
|
class IsoLanguage(Enum):
|
||||||
|
|
||||||
|
ABKHAZIAN = {"name": "Abkhazian", "iso639_1": "ab", "iso639_2": ["abk"]}
|
||||||
|
AFAR = {"name": "Afar", "iso639_1": "aa", "iso639_2": ["aar"]}
|
||||||
|
AFRIKAANS = {"name": "Afrikaans", "iso639_1": "af", "iso639_2": ["afr"]}
|
||||||
|
AKAN = {"name": "Akan", "iso639_1": "ak", "iso639_2": ["aka"]}
|
||||||
|
ALBANIAN = {"name": "Albanian", "iso639_1": "sq", "iso639_2": ["sqi", "alb"]}
|
||||||
|
AMHARIC = {"name": "Amharic", "iso639_1": "am", "iso639_2": ["amh"]}
|
||||||
|
ARABIC = {"name": "Arabic", "iso639_1": "ar", "iso639_2": ["ara"]}
|
||||||
|
ARAGONESE = {"name": "Aragonese", "iso639_1": "an", "iso639_2": ["arg"]}
|
||||||
|
ARMENIAN = {"name": "Armenian", "iso639_1": "hy", "iso639_2": ["hye", "arm"]}
|
||||||
|
ASSAMESE = {"name": "Assamese", "iso639_1": "as", "iso639_2": ["asm"]}
|
||||||
|
AVARIC = {"name": "Avaric", "iso639_1": "av", "iso639_2": ["ava"]}
|
||||||
|
AVESTAN = {"name": "Avestan", "iso639_1": "ae", "iso639_2": ["ave"]}
|
||||||
|
AYMARA = {"name": "Aymara", "iso639_1": "ay", "iso639_2": ["aym"]}
|
||||||
|
AZERBAIJANI = {"name": "Azerbaijani", "iso639_1": "az", "iso639_2": ["aze"]}
|
||||||
|
BAMBARA = {"name": "Bambara", "iso639_1": "bm", "iso639_2": ["bam"]}
|
||||||
|
BASHKIR = {"name": "Bashkir", "iso639_1": "ba", "iso639_2": ["bak"]}
|
||||||
|
BASQUE = {"name": "Basque", "iso639_1": "eu", "iso639_2": ["eus", "baq"]}
|
||||||
|
BELARUSIAN = {"name": "Belarusian", "iso639_1": "be", "iso639_2": ["bel"]}
|
||||||
|
BENGALI = {"name": "Bengali", "iso639_1": "bn", "iso639_2": ["ben"]}
|
||||||
|
BISLAMA = {"name": "Bislama", "iso639_1": "bi", "iso639_2": ["bis"]}
|
||||||
|
BOKMAL = {"name": "Bokmål", "iso639_1": "nb", "iso639_2": ["nob"]}
|
||||||
|
BOSNIAN = {"name": "Bosnian", "iso639_1": "bs", "iso639_2": ["bos"]}
|
||||||
|
BRETON = {"name": "Breton", "iso639_1": "br", "iso639_2": ["bre"]}
|
||||||
|
BULGARIAN = {"name": "Bulgarian", "iso639_1": "bg", "iso639_2": ["bul"]}
|
||||||
|
BURMESE = {"name": "Burmese", "iso639_1": "my", "iso639_2": ["mya", "bur"]}
|
||||||
|
CATALAN = {"name": "Catalan", "iso639_1": "ca", "iso639_2": ["cat"]}
|
||||||
|
CHAMORRO = {"name": "Chamorro", "iso639_1": "ch", "iso639_2": ["cha"]}
|
||||||
|
CHECHEN = {"name": "Chechen", "iso639_1": "ce", "iso639_2": ["che"]}
|
||||||
|
CHICHEWA = {"name": "Chichewa", "iso639_1": "ny", "iso639_2": ["nya"]}
|
||||||
|
CHINESE = {"name": "Chinese", "iso639_1": "zh", "iso639_2": ["zho", "chi"]}
|
||||||
|
CHURCH_SLAVIC = {"name": "Church Slavic", "iso639_1": "cu", "iso639_2": ["chu"]}
|
||||||
|
CHUVASH = {"name": "Chuvash", "iso639_1": "cv", "iso639_2": ["chv"]}
|
||||||
|
CORNISH = {"name": "Cornish", "iso639_1": "kw", "iso639_2": ["cor"]}
|
||||||
|
CORSICAN = {"name": "Corsican", "iso639_1": "co", "iso639_2": ["cos"]}
|
||||||
|
CREE = {"name": "Cree", "iso639_1": "cr", "iso639_2": ["cre"]}
|
||||||
|
CROATIAN = {"name": "Croatian", "iso639_1": "hr", "iso639_2": ["hrv"]}
|
||||||
|
CZECH = {"name": "Czech", "iso639_1": "cs", "iso639_2": ["ces", "cze"]}
|
||||||
|
DANISH = {"name": "Danish", "iso639_1": "da", "iso639_2": ["dan"]}
|
||||||
|
DIVEHI = {"name": "Divehi", "iso639_1": "dv", "iso639_2": ["div"]}
|
||||||
|
DUTCH = {"name": "Dutch", "iso639_1": "nl", "iso639_2": ["nld", "dut"]}
|
||||||
|
DZONGKHA = {"name": "Dzongkha", "iso639_1": "dz", "iso639_2": ["dzo"]}
|
||||||
|
ENGLISH = {"name": "English", "iso639_1": "en", "iso639_2": ["eng"]}
|
||||||
|
ESPERANTO = {"name": "Esperanto", "iso639_1": "eo", "iso639_2": ["epo"]}
|
||||||
|
ESTONIAN = {"name": "Estonian", "iso639_1": "et", "iso639_2": ["est"]}
|
||||||
|
EWE = {"name": "Ewe", "iso639_1": "ee", "iso639_2": ["ewe"]}
|
||||||
|
FAROESE = {"name": "Faroese", "iso639_1": "fo", "iso639_2": ["fao"]}
|
||||||
|
FIJIAN = {"name": "Fijian", "iso639_1": "fj", "iso639_2": ["fij"]}
|
||||||
|
FINNISH = {"name": "Finnish", "iso639_1": "fi", "iso639_2": ["fin"]}
|
||||||
|
FRENCH = {"name": "French", "iso639_1": "fr", "iso639_2": ["fra", "fre"]}
|
||||||
|
FULAH = {"name": "Fulah", "iso639_1": "ff", "iso639_2": ["ful"]}
|
||||||
|
GALICIAN = {"name": "Galician", "iso639_1": "gl", "iso639_2": ["glg"]}
|
||||||
|
GANDA = {"name": "Ganda", "iso639_1": "lg", "iso639_2": ["lug"]}
|
||||||
|
GEORGIAN = {"name": "Georgian", "iso639_1": "ka", "iso639_2": ["kat", "geo"]}
|
||||||
|
GERMAN = {"name": "German", "iso639_1": "de", "iso639_2": ["deu", "ger"]}
|
||||||
|
GREEK = {"name": "Greek", "iso639_1": "el", "iso639_2": ["ell", "gre"]}
|
||||||
|
GUARANI = {"name": "Guarani", "iso639_1": "gn", "iso639_2": ["grn"]}
|
||||||
|
GUJARATI = {"name": "Gujarati", "iso639_1": "gu", "iso639_2": ["guj"]}
|
||||||
|
HAITIAN = {"name": "Haitian", "iso639_1": "ht", "iso639_2": ["hat"]}
|
||||||
|
HAUSA = {"name": "Hausa", "iso639_1": "ha", "iso639_2": ["hau"]}
|
||||||
|
HEBREW = {"name": "Hebrew", "iso639_1": "he", "iso639_2": ["heb"]}
|
||||||
|
HERERO = {"name": "Herero", "iso639_1": "hz", "iso639_2": ["her"]}
|
||||||
|
HINDI = {"name": "Hindi", "iso639_1": "hi", "iso639_2": ["hin"]}
|
||||||
|
HIRI_MOTU = {"name": "Hiri Motu", "iso639_1": "ho", "iso639_2": ["hmo"]}
|
||||||
|
HUNGARIAN = {"name": "Hungarian", "iso639_1": "hu", "iso639_2": ["hun"]}
|
||||||
|
ICELANDIC = {"name": "Icelandic", "iso639_1": "is", "iso639_2": ["isl", "ice"]}
|
||||||
|
IDO = {"name": "Ido", "iso639_1": "io", "iso639_2": ["ido"]}
|
||||||
|
IGBO = {"name": "Igbo", "iso639_1": "ig", "iso639_2": ["ibo"]}
|
||||||
|
INDONESIAN = {"name": "Indonesian", "iso639_1": "id", "iso639_2": ["ind"]}
|
||||||
|
INTERLINGUA = {"name": "Interlingua", "iso639_1": "ia", "iso639_2": ["ina"]}
|
||||||
|
INTERLINGUE = {"name": "Interlingue", "iso639_1": "ie", "iso639_2": ["ile"]}
|
||||||
|
INUKTITUT = {"name": "Inuktitut", "iso639_1": "iu", "iso639_2": ["iku"]}
|
||||||
|
INUPIAQ = {"name": "Inupiaq", "iso639_1": "ik", "iso639_2": ["ipk"]}
|
||||||
|
IRISH = {"name": "Irish", "iso639_1": "ga", "iso639_2": ["gle"]}
|
||||||
|
ITALIAN = {"name": "Italian", "iso639_1": "it", "iso639_2": ["ita"]}
|
||||||
|
JAPANESE = {"name": "Japanese", "iso639_1": "ja", "iso639_2": ["jpn"]}
|
||||||
|
JAVANESE = {"name": "Javanese", "iso639_1": "jv", "iso639_2": ["jav"]}
|
||||||
|
KALAALLISUT = {"name": "Kalaallisut", "iso639_1": "kl", "iso639_2": ["kal"]}
|
||||||
|
KANNADA = {"name": "Kannada", "iso639_1": "kn", "iso639_2": ["kan"]}
|
||||||
|
KANURI = {"name": "Kanuri", "iso639_1": "kr", "iso639_2": ["kau"]}
|
||||||
|
KASHMIRI = {"name": "Kashmiri", "iso639_1": "ks", "iso639_2": ["kas"]}
|
||||||
|
KAZAKH = {"name": "Kazakh", "iso639_1": "kk", "iso639_2": ["kaz"]}
|
||||||
|
KHMER = {"name": "Khmer", "iso639_1": "km", "iso639_2": ["khm"]}
|
||||||
|
KIKUYU = {"name": "Kikuyu", "iso639_1": "ki", "iso639_2": ["kik"]}
|
||||||
|
KINYARWANDA = {"name": "Kinyarwanda", "iso639_1": "rw", "iso639_2": ["kin"]}
|
||||||
|
KIRGHIZ = {"name": "Kirghiz", "iso639_1": "ky", "iso639_2": ["kir"]}
|
||||||
|
KOMI = {"name": "Komi", "iso639_1": "kv", "iso639_2": ["kom"]}
|
||||||
|
KONGO = {"name": "Kongo", "iso639_1": "kg", "iso639_2": ["kon"]}
|
||||||
|
KOREAN = {"name": "Korean", "iso639_1": "ko", "iso639_2": ["kor"]}
|
||||||
|
KUANYAMA = {"name": "Kuanyama", "iso639_1": "kj", "iso639_2": ["kua"]}
|
||||||
|
KURDISH = {"name": "Kurdish", "iso639_1": "ku", "iso639_2": ["kur"]}
|
||||||
|
LAO = {"name": "Lao", "iso639_1": "lo", "iso639_2": ["lao"]}
|
||||||
|
LATIN = {"name": "Latin", "iso639_1": "la", "iso639_2": ["lat"]}
|
||||||
|
LATVIAN = {"name": "Latvian", "iso639_1": "lv", "iso639_2": ["lav"]}
|
||||||
|
LIMBURGAN = {"name": "Limburgan", "iso639_1": "li", "iso639_2": ["lim"]}
|
||||||
|
LINGALA = {"name": "Lingala", "iso639_1": "ln", "iso639_2": ["lin"]}
|
||||||
|
LITHUANIAN = {"name": "Lithuanian", "iso639_1": "lt", "iso639_2": ["lit"]}
|
||||||
|
LUBA_KATANGA = {"name": "Luba-Katanga", "iso639_1": "lu", "iso639_2": ["lub"]}
|
||||||
|
LUXEMBOURGISH = {"name": "Luxembourgish", "iso639_1": "lb", "iso639_2": ["ltz"]}
|
||||||
|
MACEDONIAN = {"name": "Macedonian", "iso639_1": "mk", "iso639_2": ["mkd", "mac"]}
|
||||||
|
MALAGASY = {"name": "Malagasy", "iso639_1": "mg", "iso639_2": ["mlg"]}
|
||||||
|
MALAY = {"name": "Malay", "iso639_1": "ms", "iso639_2": ["msa", "may"]}
|
||||||
|
MALAYALAM = {"name": "Malayalam", "iso639_1": "ml", "iso639_2": ["mal"]}
|
||||||
|
MALTESE = {"name": "Maltese", "iso639_1": "mt", "iso639_2": ["mlt"]}
|
||||||
|
MANX = {"name": "Manx", "iso639_1": "gv", "iso639_2": ["glv"]}
|
||||||
|
MAORI = {"name": "Maori", "iso639_1": "mi", "iso639_2": ["mri", "mao"]}
|
||||||
|
MARATHI = {"name": "Marathi", "iso639_1": "mr", "iso639_2": ["mar"]}
|
||||||
|
MARSHALLESE = {"name": "Marshallese", "iso639_1": "mh", "iso639_2": ["mah"]}
|
||||||
|
MONGOLIAN = {"name": "Mongolian", "iso639_1": "mn", "iso639_2": ["mon"]}
|
||||||
|
NAURU = {"name": "Nauru", "iso639_1": "na", "iso639_2": ["nau"]}
|
||||||
|
NAVAJO = {"name": "Navajo", "iso639_1": "nv", "iso639_2": ["nav"]}
|
||||||
|
NDONGA = {"name": "Ndonga", "iso639_1": "ng", "iso639_2": ["ndo"]}
|
||||||
|
NEPALI = {"name": "Nepali", "iso639_1": "ne", "iso639_2": ["nep"]}
|
||||||
|
NORTH_NDEBELE = {"name": "North Ndebele", "iso639_1": "nd", "iso639_2": ["nde"]}
|
||||||
|
NORTHERN_SAMI = {"name": "Northern Sami", "iso639_1": "se", "iso639_2": ["sme"]}
|
||||||
|
NORWEGIAN = {"name": "Norwegian", "iso639_1": "no", "iso639_2": ["nor"]}
|
||||||
|
NORWEGIAN_NYNORSK = {"name": "Nynorsk", "iso639_1": "nn", "iso639_2": ["nno"]}
|
||||||
|
OCCITAN = {"name": "Occitan", "iso639_1": "oc", "iso639_2": ["oci"]}
|
||||||
|
OJIBWA = {"name": "Ojibwa", "iso639_1": "oj", "iso639_2": ["oji"]}
|
||||||
|
ORIYA = {"name": "Oriya", "iso639_1": "or", "iso639_2": ["ori"]}
|
||||||
|
OROMO = {"name": "Oromo", "iso639_1": "om", "iso639_2": ["orm"]}
|
||||||
|
OSSETIAN = {"name": "Ossetian", "iso639_1": "os", "iso639_2": ["oss"]}
|
||||||
|
PALI = {"name": "Pali", "iso639_1": "pi", "iso639_2": ["pli"]}
|
||||||
|
PANJABI = {"name": "Panjabi", "iso639_1": "pa", "iso639_2": ["pan"]}
|
||||||
|
PERSIAN = {"name": "Persian", "iso639_1": "fa", "iso639_2": ["fas", "per"]}
|
||||||
|
POLISH = {"name": "Polish", "iso639_1": "pl", "iso639_2": ["pol"]}
|
||||||
|
PORTUGUESE = {"name": "Portuguese", "iso639_1": "pt", "iso639_2": ["por"]}
|
||||||
|
PUSHTO = {"name": "Pushto", "iso639_1": "ps", "iso639_2": ["pus"]}
|
||||||
|
QUECHUA = {"name": "Quechua", "iso639_1": "qu", "iso639_2": ["que"]}
|
||||||
|
ROMANIAN = {"name": "Romanian", "iso639_1": "ro", "iso639_2": ["ron", "rum"]}
|
||||||
|
ROMANSH = {"name": "Romansh", "iso639_1": "rm", "iso639_2": ["roh"]}
|
||||||
|
RUNDI = {"name": "Rundi", "iso639_1": "rn", "iso639_2": ["run"]}
|
||||||
|
RUSSIAN = {"name": "Russian", "iso639_1": "ru", "iso639_2": ["rus"]}
|
||||||
|
SAMOAN = {"name": "Samoan", "iso639_1": "sm", "iso639_2": ["smo"]}
|
||||||
|
SANGO = {"name": "Sango", "iso639_1": "sg", "iso639_2": ["sag"]}
|
||||||
|
SANSKRIT = {"name": "Sanskrit", "iso639_1": "sa", "iso639_2": ["san"]}
|
||||||
|
SARDINIAN = {"name": "Sardinian", "iso639_1": "sc", "iso639_2": ["srd"]}
|
||||||
|
SCOTTISH_GAELIC = {"name": "Scottish Gaelic", "iso639_1": "gd", "iso639_2": ["gla"]}
|
||||||
|
SERBIAN = {"name": "Serbian", "iso639_1": "sr", "iso639_2": ["srp"]}
|
||||||
|
SHONA = {"name": "Shona", "iso639_1": "sn", "iso639_2": ["sna"]}
|
||||||
|
SICHUAN_YI = {"name": "Sichuan Yi", "iso639_1": "ii", "iso639_2": ["iii"]}
|
||||||
|
SINDHI = {"name": "Sindhi", "iso639_1": "sd", "iso639_2": ["snd"]}
|
||||||
|
SINHALA = {"name": "Sinhala", "iso639_1": "si", "iso639_2": ["sin"]}
|
||||||
|
SLOVAK = {"name": "Slovak", "iso639_1": "sk", "iso639_2": ["slk", "slo"]}
|
||||||
|
SLOVENIAN = {"name": "Slovenian", "iso639_1": "sl", "iso639_2": ["slv"]}
|
||||||
|
SOMALI = {"name": "Somali", "iso639_1": "so", "iso639_2": ["som"]}
|
||||||
|
SOUTH_NDEBELE = {"name": "South Ndebele", "iso639_1": "nr", "iso639_2": ["nbl"]}
|
||||||
|
SOUTHERN_SOTHO = {"name": "Southern Sotho", "iso639_1": "st", "iso639_2": ["sot"]}
|
||||||
|
SPANISH = {"name": "Spanish", "iso639_1": "es", "iso639_2": ["spa"]}
|
||||||
|
SUNDANESE = {"name": "Sundanese", "iso639_1": "su", "iso639_2": ["sun"]}
|
||||||
|
SWAHILI = {"name": "Swahili", "iso639_1": "sw", "iso639_2": ["swa"]}
|
||||||
|
SWATI = {"name": "Swati", "iso639_1": "ss", "iso639_2": ["ssw"]}
|
||||||
|
SWEDISH = {"name": "Swedish", "iso639_1": "sv", "iso639_2": ["swe"]}
|
||||||
|
TAGALOG = {"name": "Tagalog", "iso639_1": "tl", "iso639_2": ["tgl"]}
|
||||||
|
TAHITIAN = {"name": "Tahitian", "iso639_1": "ty", "iso639_2": ["tah"]}
|
||||||
|
TAJIK = {"name": "Tajik", "iso639_1": "tg", "iso639_2": ["tgk"]}
|
||||||
|
TAMIL = {"name": "Tamil", "iso639_1": "ta", "iso639_2": ["tam"]}
|
||||||
|
TATAR = {"name": "Tatar", "iso639_1": "tt", "iso639_2": ["tat"]}
|
||||||
|
TELUGU = {"name": "Telugu", "iso639_1": "te", "iso639_2": ["tel"]}
|
||||||
|
THAI = {"name": "Thai", "iso639_1": "th", "iso639_2": ["tha"]}
|
||||||
|
TIBETAN = {"name": "Tibetan", "iso639_1": "bo", "iso639_2": ["bod", "tib"]}
|
||||||
|
TIGRINYA = {"name": "Tigrinya", "iso639_1": "ti", "iso639_2": ["tir"]}
|
||||||
|
TONGA = {"name": "Tonga", "iso639_1": "to", "iso639_2": ["ton"]}
|
||||||
|
TSONGA = {"name": "Tsonga", "iso639_1": "ts", "iso639_2": ["tso"]}
|
||||||
|
TSWANA = {"name": "Tswana", "iso639_1": "tn", "iso639_2": ["tsn"]}
|
||||||
|
TURKISH = {"name": "Turkish", "iso639_1": "tr", "iso639_2": ["tur"]}
|
||||||
|
TURKMEN = {"name": "Turkmen", "iso639_1": "tk", "iso639_2": ["tuk"]}
|
||||||
|
TWI = {"name": "Twi", "iso639_1": "tw", "iso639_2": ["twi"]}
|
||||||
|
UIGHUR = {"name": "Uighur", "iso639_1": "ug", "iso639_2": ["uig"]}
|
||||||
|
UKRAINIAN = {"name": "Ukrainian", "iso639_1": "uk", "iso639_2": ["ukr"]}
|
||||||
|
URDU = {"name": "Urdu", "iso639_1": "ur", "iso639_2": ["urd"]}
|
||||||
|
UZBEK = {"name": "Uzbek", "iso639_1": "uz", "iso639_2": ["uzb"]}
|
||||||
|
VENDA = {"name": "Venda", "iso639_1": "ve", "iso639_2": ["ven"]}
|
||||||
|
VIETNAMESE = {"name": "Vietnamese", "iso639_1": "vi", "iso639_2": ["vie"]}
|
||||||
|
VOLAPUK = {"name": "Volapük", "iso639_1": "vo", "iso639_2": ["vol"]}
|
||||||
|
WALLOON = {"name": "Walloon", "iso639_1": "wa", "iso639_2": ["wln"]}
|
||||||
|
WELSH = {"name": "Welsh", "iso639_1": "cy", "iso639_2": ["cym", "wel"]}
|
||||||
|
WESTERN_FRISIAN = {"name": "Western Frisian", "iso639_1": "fy", "iso639_2": ["fry"]}
|
||||||
|
WOLOF = {"name": "Wolof", "iso639_1": "wo", "iso639_2": ["wol"]}
|
||||||
|
XHOSA = {"name": "Xhosa", "iso639_1": "xh", "iso639_2": ["xho"]}
|
||||||
|
YIDDISH = {"name": "Yiddish", "iso639_1": "yi", "iso639_2": ["yid"]}
|
||||||
|
YORUBA = {"name": "Yoruba", "iso639_1": "yo", "iso639_2": ["yor"]}
|
||||||
|
ZHUANG = {"name": "Zhuang", "iso639_1": "za", "iso639_2": ["zha"]}
|
||||||
|
ZULU = {"name": "Zulu", "iso639_1": "zu", "iso639_2": ["zul"]}
|
||||||
|
|
||||||
|
FILIPINO = {"name": "Filipino", "iso639_1": "tl", "iso639_2": ["fil"]}
|
||||||
|
|
||||||
|
UNDEFINED = {"name": "undefined", "iso639_1": "xx", "iso639_2": ["und"]}
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def find(label : str):
|
||||||
|
|
||||||
|
closestMatches = difflib.get_close_matches(label, [l.value["name"] for l in IsoLanguage], n=1)
|
||||||
|
|
||||||
|
if closestMatches:
|
||||||
|
foundLangs = [l for l in IsoLanguage if l.value["name"] == closestMatches[0]]
|
||||||
|
return foundLangs[0] if foundLangs else IsoLanguage.UNDEFINED
|
||||||
|
else:
|
||||||
|
return IsoLanguage.UNDEFINED
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def findThreeLetter(theeLetter : str):
|
||||||
|
foundLangs = [l for l in IsoLanguage if str(theeLetter) in l.value["iso639_2"]]
|
||||||
|
return foundLangs[0] if foundLangs else IsoLanguage.UNDEFINED
|
||||||
|
|
||||||
|
|
||||||
|
def label(self):
|
||||||
|
return str(self.value["name"])
|
||||||
|
|
||||||
|
def twoLetter(self):
|
||||||
|
return str(self.value["iso639_1"])
|
||||||
|
|
||||||
|
def threeLetter(self):
|
||||||
|
return str(self.value["iso639_2"][0])
|
||||||
67
src/ffx/attachment_format.py
Normal file
67
src/ffx/attachment_format.py
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
from enum import Enum
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
class AttachmentFormat(Enum):
|
||||||
|
|
||||||
|
TTF = {'identifier': 'ttf', 'format': None, 'extension': 'ttf', 'label': 'TTF'}
|
||||||
|
PNG = {'identifier': 'png', 'format': None, 'extension': 'png', 'label': 'PNG'}
|
||||||
|
|
||||||
|
UNKNOWN = {'identifier': 'unknown', 'format': None, 'extension': None, 'label': 'UNKNOWN'}
|
||||||
|
|
||||||
|
def identifier(self):
|
||||||
|
return str(self.value['identifier'])
|
||||||
|
|
||||||
|
def label(self):
|
||||||
|
return str(self.value['label'])
|
||||||
|
|
||||||
|
def format(self):
|
||||||
|
return self.value['format']
|
||||||
|
|
||||||
|
def extension(self):
|
||||||
|
return str(self.value['extension'])
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def identify(identifier: str):
|
||||||
|
formats = [f for f in AttachmentFormat if f.value['identifier'] == str(identifier)]
|
||||||
|
if formats:
|
||||||
|
return formats[0]
|
||||||
|
return AttachmentFormat.UNKNOWN
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def identifyFfprobeStream(streamObj: dict):
|
||||||
|
identifier = streamObj.get("codec_name")
|
||||||
|
identifiedFormat = AttachmentFormat.identify(identifier)
|
||||||
|
if identifiedFormat != AttachmentFormat.UNKNOWN:
|
||||||
|
return identifiedFormat
|
||||||
|
|
||||||
|
if str(streamObj.get("codec_type", "")).strip() != "attachment":
|
||||||
|
return AttachmentFormat.UNKNOWN
|
||||||
|
|
||||||
|
tags = streamObj.get("tags", {}) or {}
|
||||||
|
mimetype = str(tags.get("mimetype", "")).strip().lower()
|
||||||
|
filename = str(tags.get("filename", "")).strip().lower()
|
||||||
|
filenameExtension = os.path.splitext(filename)[1]
|
||||||
|
|
||||||
|
if (
|
||||||
|
mimetype in {
|
||||||
|
"font/ttf",
|
||||||
|
"application/x-truetype-font",
|
||||||
|
"application/x-font-ttf",
|
||||||
|
}
|
||||||
|
or "truetype" in mimetype
|
||||||
|
or filenameExtension == ".ttf"
|
||||||
|
):
|
||||||
|
return AttachmentFormat.TTF
|
||||||
|
|
||||||
|
if mimetype in {"image/png", "image/x-png"} or filenameExtension == ".png":
|
||||||
|
return AttachmentFormat.PNG
|
||||||
|
|
||||||
|
return AttachmentFormat.UNKNOWN
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def fromTrackCodec(trackCodec):
|
||||||
|
identifier = getattr(trackCodec, "identifier", None)
|
||||||
|
if callable(identifier):
|
||||||
|
return AttachmentFormat.identify(trackCodec.identifier())
|
||||||
|
return AttachmentFormat.UNKNOWN
|
||||||
@@ -30,6 +30,15 @@ class AudioLayout(Enum):
|
|||||||
except:
|
except:
|
||||||
return AudioLayout.LAYOUT_UNDEFINED
|
return AudioLayout.LAYOUT_UNDEFINED
|
||||||
|
|
||||||
|
# @staticmethod
|
||||||
|
# def fromIndex(index : int):
|
||||||
|
# try:
|
||||||
|
# target_index = int(index)
|
||||||
|
# except (TypeError, ValueError):
|
||||||
|
# return AudioLayout.LAYOUT_UNDEFINED
|
||||||
|
# return next((a for a in AudioLayout if a.value['index'] == target_index),
|
||||||
|
# AudioLayout.LAYOUT_UNDEFINED)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def fromIndex(index : int):
|
def fromIndex(index : int):
|
||||||
try:
|
try:
|
||||||
|
|||||||
1635
src/ffx/cli.py
Executable file
1635
src/ffx/cli.py
Executable file
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,12 @@
|
|||||||
import os, json
|
import os, json
|
||||||
|
|
||||||
|
from .constants import (
|
||||||
|
DEFAULT_SHOW_INDEX_EPISODE_DIGITS,
|
||||||
|
DEFAULT_SHOW_INDEX_SEASON_DIGITS,
|
||||||
|
DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS,
|
||||||
|
DEFAULT_SHOW_INDICATOR_SEASON_DIGITS,
|
||||||
|
)
|
||||||
|
|
||||||
class ConfigurationController():
|
class ConfigurationController():
|
||||||
|
|
||||||
CONFIG_FILENAME = 'ffx.json'
|
CONFIG_FILENAME = 'ffx.json'
|
||||||
@@ -8,7 +15,13 @@ class ConfigurationController():
|
|||||||
|
|
||||||
DATABASE_PATH_CONFIG_KEY = 'databasePath'
|
DATABASE_PATH_CONFIG_KEY = 'databasePath'
|
||||||
LOG_DIRECTORY_CONFIG_KEY = 'logDirectory'
|
LOG_DIRECTORY_CONFIG_KEY = 'logDirectory'
|
||||||
|
SUBTITLES_DIRECTORY_CONFIG_KEY = 'subtitlesDirectory'
|
||||||
|
LANGUAGE_CONFIG_KEY = 'language'
|
||||||
OUTPUT_FILENAME_TEMPLATE_KEY = 'outputFilenameTemplate'
|
OUTPUT_FILENAME_TEMPLATE_KEY = 'outputFilenameTemplate'
|
||||||
|
DEFAULT_INDEX_SEASON_DIGITS_CONFIG_KEY = 'defaultIndexSeasonDigits'
|
||||||
|
DEFAULT_INDEX_EPISODE_DIGITS_CONFIG_KEY = 'defaultIndexEpisodeDigits'
|
||||||
|
DEFAULT_INDICATOR_SEASON_DIGITS_CONFIG_KEY = 'defaultIndicatorSeasonDigits'
|
||||||
|
DEFAULT_INDICATOR_EPISODE_DIGITS_CONFIG_KEY = 'defaultIndicatorEpisodeDigits'
|
||||||
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@@ -49,6 +62,51 @@ class ConfigurationController():
|
|||||||
def getDatabaseFilePath(self):
|
def getDatabaseFilePath(self):
|
||||||
return self.__databaseFilePath
|
return self.__databaseFilePath
|
||||||
|
|
||||||
|
def getSubtitlesDirectoryPath(self):
|
||||||
|
subtitlesDirectory = self.__configurationData.get(
|
||||||
|
ConfigurationController.SUBTITLES_DIRECTORY_CONFIG_KEY,
|
||||||
|
'',
|
||||||
|
)
|
||||||
|
return os.path.expanduser(str(subtitlesDirectory)) if subtitlesDirectory else ''
|
||||||
|
|
||||||
|
def getLanguage(self):
|
||||||
|
return str(self.__configurationData.get(ConfigurationController.LANGUAGE_CONFIG_KEY, '')).strip()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def getConfiguredIntegerValue(cls, configurationData: dict, configKey: str, defaultValue: int) -> int:
|
||||||
|
configuredValue = configurationData.get(configKey, defaultValue)
|
||||||
|
try:
|
||||||
|
return int(configuredValue)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
return int(defaultValue)
|
||||||
|
|
||||||
|
def getDefaultIndexSeasonDigits(self):
|
||||||
|
return ConfigurationController.getConfiguredIntegerValue(
|
||||||
|
self.__configurationData,
|
||||||
|
ConfigurationController.DEFAULT_INDEX_SEASON_DIGITS_CONFIG_KEY,
|
||||||
|
DEFAULT_SHOW_INDEX_SEASON_DIGITS,
|
||||||
|
)
|
||||||
|
|
||||||
|
def getDefaultIndexEpisodeDigits(self):
|
||||||
|
return ConfigurationController.getConfiguredIntegerValue(
|
||||||
|
self.__configurationData,
|
||||||
|
ConfigurationController.DEFAULT_INDEX_EPISODE_DIGITS_CONFIG_KEY,
|
||||||
|
DEFAULT_SHOW_INDEX_EPISODE_DIGITS,
|
||||||
|
)
|
||||||
|
|
||||||
|
def getDefaultIndicatorSeasonDigits(self):
|
||||||
|
return ConfigurationController.getConfiguredIntegerValue(
|
||||||
|
self.__configurationData,
|
||||||
|
ConfigurationController.DEFAULT_INDICATOR_SEASON_DIGITS_CONFIG_KEY,
|
||||||
|
DEFAULT_SHOW_INDICATOR_SEASON_DIGITS,
|
||||||
|
)
|
||||||
|
|
||||||
|
def getDefaultIndicatorEpisodeDigits(self):
|
||||||
|
return ConfigurationController.getConfiguredIntegerValue(
|
||||||
|
self.__configurationData,
|
||||||
|
ConfigurationController.DEFAULT_INDICATOR_EPISODE_DIGITS_CONFIG_KEY,
|
||||||
|
DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS,
|
||||||
|
)
|
||||||
|
|
||||||
def getData(self):
|
def getData(self):
|
||||||
return self.__configurationData
|
return self.__configurationData
|
||||||
|
|||||||
80
src/ffx/confirm_screen.py
Normal file
80
src/ffx/confirm_screen.py
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
from textual.containers import Grid
|
||||||
|
from textual.screen import Screen
|
||||||
|
from textual.widgets import Button, Footer, Header, Static
|
||||||
|
|
||||||
|
from .i18n import t
|
||||||
|
from .screen_support import build_screen_log_pane
|
||||||
|
|
||||||
|
class ConfirmScreen(Screen):
|
||||||
|
|
||||||
|
BINDINGS = [
|
||||||
|
("escape", "back", t("Back")),
|
||||||
|
]
|
||||||
|
|
||||||
|
CSS = """
|
||||||
|
|
||||||
|
Grid {
|
||||||
|
grid-size: 4 7;
|
||||||
|
grid-rows: 2 2 2 2 2 2 2;
|
||||||
|
grid-columns: 1fr 1fr 1fr 1fr;
|
||||||
|
height: 100%;
|
||||||
|
width: 100%;
|
||||||
|
min-width: 80;
|
||||||
|
padding: 1;
|
||||||
|
overflow-x: auto;
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
Button {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.four {
|
||||||
|
column-span: 4;
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
message: str,
|
||||||
|
confirm_label: str = "Confirm",
|
||||||
|
cancel_label: str = "Cancel",
|
||||||
|
):
|
||||||
|
super().__init__()
|
||||||
|
self.__message = str(message)
|
||||||
|
self.__confirmLabel = str(t(confirm_label))
|
||||||
|
self.__cancelLabel = str(t(cancel_label))
|
||||||
|
|
||||||
|
def compose(self):
|
||||||
|
yield Header()
|
||||||
|
|
||||||
|
with Grid():
|
||||||
|
# Row 1
|
||||||
|
yield Static(self.__message, classes="four")
|
||||||
|
|
||||||
|
# Row 2
|
||||||
|
yield Static(" ", classes="four")
|
||||||
|
|
||||||
|
# Row 3
|
||||||
|
yield Button(self.__confirmLabel, id="confirm_button")
|
||||||
|
yield Button(self.__cancelLabel, id="cancel_button")
|
||||||
|
|
||||||
|
yield build_screen_log_pane()
|
||||||
|
yield Footer()
|
||||||
|
|
||||||
|
|
||||||
|
def on_mount(self):
|
||||||
|
|
||||||
|
if getattr(self, 'context', {}).get('debug', False):
|
||||||
|
self.title = f"{self.app.title} - {self.__class__.__name__}"
|
||||||
|
|
||||||
|
|
||||||
|
def on_button_pressed(self, event: Button.Pressed) -> None:
|
||||||
|
if event.button.id == "confirm_button":
|
||||||
|
self.dismiss(True)
|
||||||
|
|
||||||
|
if event.button.id == "cancel_button":
|
||||||
|
self.dismiss(False)
|
||||||
|
|
||||||
|
def action_back(self):
|
||||||
|
self.dismiss(False)
|
||||||
@@ -1,15 +1,30 @@
|
|||||||
VERSION='0.2.3'
|
VERSION='0.4.1'
|
||||||
DATABASE_VERSION = 2
|
DATABASE_VERSION = 3
|
||||||
|
|
||||||
DEFAULT_QUALITY = 32
|
DEFAULT_QUALITY = 32
|
||||||
DEFAULT_AV1_PRESET = 5
|
DEFAULT_AV1_PRESET = 5
|
||||||
|
|
||||||
|
DEFAULT_VIDEO_ENCODER_LABEL = "vp9"
|
||||||
|
DEFAULT_CONTAINER_FORMAT = "webm"
|
||||||
|
DEFAULT_CONTAINER_EXTENSION = "webm"
|
||||||
|
SUPPORTED_INPUT_FILE_EXTENSIONS = ("mkv", "mp4", "avi", "flv", "webm")
|
||||||
|
FFMPEG_COMMAND_TOKENS = ("ffmpeg", "-y")
|
||||||
|
FFMPEG_NULL_OUTPUT_TOKENS = ("-f", "null", "/dev/null")
|
||||||
|
|
||||||
DEFAULT_STEREO_BANDWIDTH = "112"
|
DEFAULT_STEREO_BANDWIDTH = "112"
|
||||||
DEFAULT_AC3_BANDWIDTH = "256"
|
DEFAULT_AC3_BANDWIDTH = "256"
|
||||||
DEFAULT_DTS_BANDWIDTH = "320"
|
DEFAULT_DTS_BANDWIDTH = "320"
|
||||||
DEFAULT_7_1_BANDWIDTH = "384"
|
DEFAULT_7_1_BANDWIDTH = "384"
|
||||||
|
|
||||||
|
DEFAULT_CROPDETECT_SEEK_SECONDS = 60
|
||||||
|
DEFAULT_CROPDETECT_DURATION_SECONDS = 180
|
||||||
|
|
||||||
DEFAULT_cut_start = 60
|
DEFAULT_cut_start = 60
|
||||||
DEFAULT_cut_length = 180
|
DEFAULT_cut_length = 180
|
||||||
|
|
||||||
|
DEFAULT_SHOW_INDEX_SEASON_DIGITS = 2
|
||||||
|
DEFAULT_SHOW_INDEX_EPISODE_DIGITS = 2
|
||||||
|
DEFAULT_SHOW_INDICATOR_SEASON_DIGITS = 2
|
||||||
|
DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS = 2
|
||||||
|
|
||||||
DEFAULT_OUTPUT_FILENAME_TEMPLATE = '{{ ffx_show_name }} - {{ ffx_index }}{{ ffx_index_separator }}{{ ffx_episode_name }}{{ ffx_indicator_separator }}{{ ffx_indicator }}'
|
DEFAULT_OUTPUT_FILENAME_TEMPLATE = '{{ ffx_show_name }} - {{ ffx_index }}{{ ffx_index_separator }}{{ ffx_episode_name }}{{ ffx_indicator_separator }}{{ ffx_indicator }}'
|
||||||
|
|||||||
@@ -1,20 +1,25 @@
|
|||||||
import os, click
|
import os, shutil, click
|
||||||
|
|
||||||
from sqlalchemy import create_engine
|
from sqlalchemy import create_engine, inspect, text
|
||||||
from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
|
# Import the full model package so SQLAlchemy registers every mapped class
|
||||||
|
# before metadata creation and the first ORM query.
|
||||||
|
import ffx.model
|
||||||
from ffx.model.show import Base
|
from ffx.model.show import Base
|
||||||
|
|
||||||
from ffx.model.property import Property
|
from ffx.model.property import Property
|
||||||
|
from ffx.model.migration import (
|
||||||
|
DatabaseVersionException,
|
||||||
|
getMigrationPlan,
|
||||||
|
migrateDatabase,
|
||||||
|
)
|
||||||
|
|
||||||
from ffx.constants import DATABASE_VERSION
|
from ffx.constants import DATABASE_VERSION
|
||||||
|
|
||||||
|
|
||||||
DATABASE_VERSION_KEY = 'database_version'
|
DATABASE_VERSION_KEY = 'database_version'
|
||||||
|
EXPECTED_TABLE_NAMES = set(Base.metadata.tables.keys())
|
||||||
class DatabaseVersionException(Exception):
|
|
||||||
def __init__(self, errorMessage):
|
|
||||||
super().__init__(errorMessage)
|
|
||||||
|
|
||||||
def databaseContext(databasePath: str = ''):
|
def databaseContext(databasePath: str = ''):
|
||||||
|
|
||||||
@@ -29,12 +34,18 @@ def databaseContext(databasePath: str = ''):
|
|||||||
if not os.path.exists(ffxVarDir):
|
if not os.path.exists(ffxVarDir):
|
||||||
os.makedirs(ffxVarDir)
|
os.makedirs(ffxVarDir)
|
||||||
databasePath = os.path.join(ffxVarDir, 'ffx.db')
|
databasePath = os.path.join(ffxVarDir, 'ffx.db')
|
||||||
|
else:
|
||||||
|
databasePath = os.path.expanduser(databasePath)
|
||||||
|
|
||||||
|
if databasePath != ':memory:':
|
||||||
|
databasePath = os.path.abspath(databasePath)
|
||||||
|
|
||||||
|
databaseContext['path'] = databasePath
|
||||||
databaseContext['url'] = f"sqlite:///{databasePath}"
|
databaseContext['url'] = f"sqlite:///{databasePath}"
|
||||||
databaseContext['engine'] = create_engine(databaseContext['url'])
|
databaseContext['engine'] = create_engine(databaseContext['url'])
|
||||||
databaseContext['session'] = sessionmaker(bind=databaseContext['engine'])
|
databaseContext['session'] = sessionmaker(bind=databaseContext['engine'])
|
||||||
|
|
||||||
Base.metadata.create_all(databaseContext['engine'])
|
bootstrapDatabaseIfNeeded(databaseContext)
|
||||||
|
|
||||||
# isSyncronuous = False
|
# isSyncronuous = False
|
||||||
# while not isSyncronuous:
|
# while not isSyncronuous:
|
||||||
@@ -51,14 +62,126 @@ def databaseContext(databasePath: str = ''):
|
|||||||
|
|
||||||
return databaseContext
|
return databaseContext
|
||||||
|
|
||||||
|
|
||||||
|
def databaseNeedsBootstrap(databaseContext) -> bool:
|
||||||
|
inspector = inspect(databaseContext['engine'])
|
||||||
|
existingTableNames = set(inspector.get_table_names())
|
||||||
|
return not EXPECTED_TABLE_NAMES.issubset(existingTableNames)
|
||||||
|
|
||||||
|
|
||||||
|
def bootstrapDatabaseIfNeeded(databaseContext):
|
||||||
|
if not databaseNeedsBootstrap(databaseContext):
|
||||||
|
return
|
||||||
|
|
||||||
|
Base.metadata.create_all(databaseContext['engine'])
|
||||||
|
|
||||||
|
|
||||||
def ensureDatabaseVersion(databaseContext):
|
def ensureDatabaseVersion(databaseContext):
|
||||||
|
|
||||||
currentDatabaseVersion = getDatabaseVersion(databaseContext)
|
currentDatabaseVersion = getDatabaseVersion(databaseContext)
|
||||||
if currentDatabaseVersion:
|
if not currentDatabaseVersion:
|
||||||
if currentDatabaseVersion != DATABASE_VERSION:
|
|
||||||
raise DatabaseVersionException(f"Current database version ({currentDatabaseVersion}) does not match required ({DATABASE_VERSION})")
|
|
||||||
else:
|
|
||||||
setDatabaseVersion(databaseContext, DATABASE_VERSION)
|
setDatabaseVersion(databaseContext, DATABASE_VERSION)
|
||||||
|
return
|
||||||
|
|
||||||
|
if currentDatabaseVersion > DATABASE_VERSION:
|
||||||
|
raise DatabaseVersionException(
|
||||||
|
f"Current database version ({currentDatabaseVersion}) does not match required ({DATABASE_VERSION})"
|
||||||
|
)
|
||||||
|
|
||||||
|
if currentDatabaseVersion < DATABASE_VERSION:
|
||||||
|
promptForDatabaseMigration(databaseContext, currentDatabaseVersion, DATABASE_VERSION)
|
||||||
|
migrateDatabase(databaseContext, currentDatabaseVersion, DATABASE_VERSION, setDatabaseVersion)
|
||||||
|
currentDatabaseVersion = getDatabaseVersion(databaseContext)
|
||||||
|
|
||||||
|
if currentDatabaseVersion != DATABASE_VERSION:
|
||||||
|
raise DatabaseVersionException(
|
||||||
|
f"Current database version ({currentDatabaseVersion}) does not match required ({DATABASE_VERSION})"
|
||||||
|
)
|
||||||
|
|
||||||
|
ensureCurrentSchemaCompatibility(databaseContext)
|
||||||
|
|
||||||
|
|
||||||
|
def ensureCurrentSchemaCompatibility(databaseContext):
|
||||||
|
engine = databaseContext['engine']
|
||||||
|
inspector = inspect(engine)
|
||||||
|
showColumns = {
|
||||||
|
column['name']
|
||||||
|
for column in inspector.get_columns('shows')
|
||||||
|
}
|
||||||
|
|
||||||
|
alterStatements = []
|
||||||
|
if 'quality' not in showColumns:
|
||||||
|
alterStatements.append("ALTER TABLE shows ADD COLUMN quality INTEGER DEFAULT 0")
|
||||||
|
if 'notes' not in showColumns:
|
||||||
|
alterStatements.append("ALTER TABLE shows ADD COLUMN notes TEXT DEFAULT ''")
|
||||||
|
|
||||||
|
if not alterStatements:
|
||||||
|
return
|
||||||
|
|
||||||
|
with engine.begin() as connection:
|
||||||
|
for alterStatement in alterStatements:
|
||||||
|
connection.execute(text(alterStatement))
|
||||||
|
|
||||||
|
|
||||||
|
def promptForDatabaseMigration(databaseContext, currentDatabaseVersion: int, targetDatabaseVersion: int):
|
||||||
|
migrationPlan = getMigrationPlan(currentDatabaseVersion, targetDatabaseVersion)
|
||||||
|
|
||||||
|
click.echo("Database migration required.")
|
||||||
|
click.echo(f"Current version: {currentDatabaseVersion}")
|
||||||
|
click.echo(f"Target version: {targetDatabaseVersion}")
|
||||||
|
click.echo("Steps required:")
|
||||||
|
|
||||||
|
missingSteps = []
|
||||||
|
for migrationStep in migrationPlan:
|
||||||
|
moduleStatus = "present" if migrationStep.modulePresent else "missing"
|
||||||
|
click.echo(
|
||||||
|
f" {migrationStep.versionFrom} -> {migrationStep.versionTo}: "
|
||||||
|
+ f"{migrationStep.moduleName} [{moduleStatus}]"
|
||||||
|
)
|
||||||
|
if not migrationStep.modulePresent:
|
||||||
|
missingSteps.append(migrationStep)
|
||||||
|
|
||||||
|
if missingSteps:
|
||||||
|
firstMissingStep = missingSteps[0]
|
||||||
|
raise DatabaseVersionException(
|
||||||
|
f"No migration path from database version "
|
||||||
|
+ f"{firstMissingStep.versionFrom} to {firstMissingStep.versionTo}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not click.confirm(
|
||||||
|
"Create a backup and continue with database migration?",
|
||||||
|
default=True,
|
||||||
|
):
|
||||||
|
raise click.ClickException("Database migration aborted by user.")
|
||||||
|
|
||||||
|
backupPath = backupDatabaseBeforeMigration(
|
||||||
|
databaseContext,
|
||||||
|
currentDatabaseVersion,
|
||||||
|
targetDatabaseVersion,
|
||||||
|
)
|
||||||
|
click.echo(f"Database backup created: {backupPath}")
|
||||||
|
|
||||||
|
|
||||||
|
def backupDatabaseBeforeMigration(databaseContext, currentDatabaseVersion: int, targetDatabaseVersion: int) -> str:
|
||||||
|
databasePath = databaseContext.get('path', '')
|
||||||
|
if not databasePath or databasePath == ':memory:':
|
||||||
|
raise click.ClickException("Database migration backup requires a file-backed SQLite database.")
|
||||||
|
|
||||||
|
if not os.path.isfile(databasePath):
|
||||||
|
raise click.ClickException(f"Database file not found for backup: {databasePath}")
|
||||||
|
|
||||||
|
backupPath = f"{databasePath}.v{currentDatabaseVersion}-to-v{targetDatabaseVersion}.bak"
|
||||||
|
backupIndex = 1
|
||||||
|
while os.path.exists(backupPath):
|
||||||
|
backupPath = (
|
||||||
|
f"{databasePath}.v{currentDatabaseVersion}-to-v{targetDatabaseVersion}.{backupIndex}.bak"
|
||||||
|
)
|
||||||
|
backupIndex += 1
|
||||||
|
|
||||||
|
databaseContext['engine'].dispose()
|
||||||
|
shutil.copy2(databasePath, backupPath)
|
||||||
|
|
||||||
|
return backupPath
|
||||||
|
|
||||||
|
|
||||||
def getDatabaseVersion(databaseContext):
|
def getDatabaseVersion(databaseContext):
|
||||||
@@ -67,9 +190,9 @@ def getDatabaseVersion(databaseContext):
|
|||||||
|
|
||||||
Session = databaseContext['session']
|
Session = databaseContext['session']
|
||||||
s = Session()
|
s = Session()
|
||||||
q = s.query(Property).filter(Property.key == DATABASE_VERSION_KEY)
|
versionProperty = s.query(Property).filter(Property.key == DATABASE_VERSION_KEY).first()
|
||||||
|
|
||||||
return int(q.first().value) if q.count() else 0
|
return int(versionProperty.value) if versionProperty is not None else 0
|
||||||
|
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
raise click.ClickException(f"getDatabaseVersion(): {repr(ex)}")
|
raise click.ClickException(f"getDatabaseVersion(): {repr(ex)}")
|
||||||
|
|||||||
24
src/ffx/diagnostics/__init__.py
Normal file
24
src/ffx/diagnostics/__init__.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
from .base import FfmpegRemedy, FfmpegRemedyDecision, FfmpegSkipFileWarning
|
||||||
|
from .monitor import FfmpegCommandRunner, FfmpegDiagnosticMonitor
|
||||||
|
from .retry_with_generated_pts import RetryWithGeneratedPtsRemedy
|
||||||
|
from .state import (
|
||||||
|
getDiagnosticsState,
|
||||||
|
getUnremediedIssues,
|
||||||
|
iterUnremediedIssueSummaryLines,
|
||||||
|
recordUnremediedIssue,
|
||||||
|
)
|
||||||
|
from .warn_corrupt_mpeg_audio import WarnCorruptMpegAudioRemedy
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"FfmpegCommandRunner",
|
||||||
|
"FfmpegDiagnosticMonitor",
|
||||||
|
"FfmpegRemedy",
|
||||||
|
"FfmpegRemedyDecision",
|
||||||
|
"FfmpegSkipFileWarning",
|
||||||
|
"RetryWithGeneratedPtsRemedy",
|
||||||
|
"WarnCorruptMpegAudioRemedy",
|
||||||
|
"getDiagnosticsState",
|
||||||
|
"getUnremediedIssues",
|
||||||
|
"iterUnremediedIssueSummaryLines",
|
||||||
|
"recordUnremediedIssue",
|
||||||
|
]
|
||||||
33
src/ffx/diagnostics/base.py
Normal file
33
src/ffx/diagnostics/base.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
|
||||||
|
class FfmpegSkipFileWarning(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class FfmpegRemedyDecision:
|
||||||
|
stop_process: bool = False
|
||||||
|
retry_input_tokens: tuple[str, ...] = ()
|
||||||
|
skip_file: bool = False
|
||||||
|
console_warning: str = ""
|
||||||
|
summary_identifier: str = ""
|
||||||
|
unremedied_issue_identifier: str = ""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def retry_requested(self) -> bool:
|
||||||
|
return bool(self.retry_input_tokens)
|
||||||
|
|
||||||
|
|
||||||
|
class FfmpegRemedy:
|
||||||
|
identifier = "ffmpeg-remedy"
|
||||||
|
harmless = False
|
||||||
|
|
||||||
|
def inspect_line(
|
||||||
|
self,
|
||||||
|
line: str,
|
||||||
|
session: "FfmpegDiagnosticMonitor",
|
||||||
|
) -> FfmpegRemedyDecision | None:
|
||||||
|
raise NotImplementedError
|
||||||
222
src/ffx/diagnostics/monitor.py
Normal file
222
src/ffx/diagnostics/monitor.py
Normal file
@@ -0,0 +1,222 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
from ffx.logging_utils import get_ffx_logger
|
||||||
|
from ffx.process import executeProcess
|
||||||
|
|
||||||
|
from .base import FfmpegSkipFileWarning, FfmpegRemedy
|
||||||
|
from .retry_with_generated_pts import RetryWithGeneratedPtsRemedy
|
||||||
|
from .state import recordUnremediedIssue
|
||||||
|
from .warn_corrupt_mpeg_audio import WarnCorruptMpegAudioRemedy
|
||||||
|
|
||||||
|
UNHANDLED_DIAGNOSTIC_PATTERNS = (
|
||||||
|
re.compile(r"\bwarning\b", re.IGNORECASE),
|
||||||
|
re.compile(r"\berror\b", re.IGNORECASE),
|
||||||
|
re.compile(r"\bfailed\b", re.IGNORECASE),
|
||||||
|
re.compile(r"\binvalid\b", re.IGNORECASE),
|
||||||
|
re.compile(r"\bmissing\b", re.IGNORECASE),
|
||||||
|
re.compile(r"\bcorrupt\b", re.IGNORECASE),
|
||||||
|
re.compile(r"\boverflow\b", re.IGNORECASE),
|
||||||
|
re.compile(r"\bdeprecated\b", re.IGNORECASE),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class FfmpegDiagnosticMonitor:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
context: dict | None,
|
||||||
|
command_sequence: list[str],
|
||||||
|
*,
|
||||||
|
remedies: list[FfmpegRemedy] | None = None,
|
||||||
|
emittedWarnings: set[str] | None = None,
|
||||||
|
):
|
||||||
|
self.context = context or {}
|
||||||
|
self.command_sequence = list(command_sequence)
|
||||||
|
self.logger = self.context.get("logger", get_ffx_logger())
|
||||||
|
self.source_path = str(self.context.get("current_source_path", "")).strip()
|
||||||
|
self.remedies = remedies or [
|
||||||
|
RetryWithGeneratedPtsRemedy(),
|
||||||
|
WarnCorruptMpegAudioRemedy(),
|
||||||
|
]
|
||||||
|
self._emittedWarnings = emittedWarnings if emittedWarnings is not None else set()
|
||||||
|
self.retry_input_tokens: tuple[str, ...] = ()
|
||||||
|
self.skip_file = False
|
||||||
|
self.skip_file_message = ""
|
||||||
|
|
||||||
|
def describe_source(self) -> str:
|
||||||
|
return self.source_path if self.source_path else "current file"
|
||||||
|
|
||||||
|
def command_contains_tokens(self, tokens: tuple[str, ...]) -> bool:
|
||||||
|
tokenCount = len(tokens)
|
||||||
|
if tokenCount == 0:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return any(
|
||||||
|
tuple(self.command_sequence[index:index + tokenCount]) == tuple(tokens)
|
||||||
|
for index in range(len(self.command_sequence) - tokenCount + 1)
|
||||||
|
)
|
||||||
|
|
||||||
|
def emitConsoleWarning(self, warningMessage: str) -> None:
|
||||||
|
if warningMessage and warningMessage not in self._emittedWarnings:
|
||||||
|
self.logger.warning(warningMessage)
|
||||||
|
self._emittedWarnings.add(warningMessage)
|
||||||
|
|
||||||
|
def recordUnremediedIssue(self, issueIdentifier: str, issueLine: str) -> None:
|
||||||
|
isFirstIssueForFile = recordUnremediedIssue(
|
||||||
|
self.context,
|
||||||
|
self.describe_source(),
|
||||||
|
issueIdentifier,
|
||||||
|
)
|
||||||
|
if not isFirstIssueForFile:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.emitConsoleWarning(
|
||||||
|
f"ffmpeg reported a diagnostic with no automatic remedy while converting "
|
||||||
|
+ f"{self.describe_source()}. FFX will continue, but review the output "
|
||||||
|
+ f"file. First unhandled line: {issueLine}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def lineLooksLikeUnhandledDiagnostic(self, line: str) -> bool:
|
||||||
|
return any(pattern.search(line) for pattern in UNHANDLED_DIAGNOSTIC_PATTERNS)
|
||||||
|
|
||||||
|
def getUnhandledDiagnosticIdentifier(self, line: str) -> str:
|
||||||
|
loweredLine = str(line).lower()
|
||||||
|
|
||||||
|
if any(token in loweredLine for token in ("error", "failed", "invalid", "missing", "corrupt", "overflow")):
|
||||||
|
return "unhandled-error"
|
||||||
|
if any(token in loweredLine for token in ("warning", "deprecated")):
|
||||||
|
return "unhandled-warning"
|
||||||
|
return "unhandled-diagnostic"
|
||||||
|
|
||||||
|
def getSummaryIdentifier(
|
||||||
|
self,
|
||||||
|
remedy: FfmpegRemedy,
|
||||||
|
decision,
|
||||||
|
) -> str:
|
||||||
|
explicitIdentifier = str(decision.summary_identifier).strip()
|
||||||
|
if explicitIdentifier:
|
||||||
|
return explicitIdentifier
|
||||||
|
|
||||||
|
remedyIdentifier = str(getattr(remedy, "identifier", "")).strip()
|
||||||
|
if remedyIdentifier and remedyIdentifier != FfmpegRemedy.identifier:
|
||||||
|
return remedyIdentifier
|
||||||
|
|
||||||
|
return str(decision.unremedied_issue_identifier).strip()
|
||||||
|
|
||||||
|
def shouldRecordSummary(
|
||||||
|
self,
|
||||||
|
remedy: FfmpegRemedy,
|
||||||
|
decision,
|
||||||
|
) -> bool:
|
||||||
|
if getattr(remedy, "harmless", False):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if decision.retry_requested and not decision.skip_file:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return bool(self.getSummaryIdentifier(remedy, decision))
|
||||||
|
|
||||||
|
def handle_stderr_line(self, line: str) -> bool:
|
||||||
|
strippedLine = str(line).strip()
|
||||||
|
if not strippedLine:
|
||||||
|
return False
|
||||||
|
|
||||||
|
for remedy in self.remedies:
|
||||||
|
decision = remedy.inspect_line(strippedLine, self)
|
||||||
|
if decision is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
self.emitConsoleWarning(decision.console_warning)
|
||||||
|
|
||||||
|
if decision.retry_requested:
|
||||||
|
self.retry_input_tokens = tuple(decision.retry_input_tokens)
|
||||||
|
|
||||||
|
if self.shouldRecordSummary(remedy, decision):
|
||||||
|
recordUnremediedIssue(
|
||||||
|
self.context,
|
||||||
|
self.describe_source(),
|
||||||
|
self.getSummaryIdentifier(remedy, decision),
|
||||||
|
)
|
||||||
|
|
||||||
|
if decision.skip_file:
|
||||||
|
self.skip_file = True
|
||||||
|
self.skip_file_message = (
|
||||||
|
decision.console_warning
|
||||||
|
or f"Skipping file {self.describe_source()} because ffmpeg reported a fatal diagnostic."
|
||||||
|
)
|
||||||
|
|
||||||
|
return bool(decision.stop_process)
|
||||||
|
|
||||||
|
if self.lineLooksLikeUnhandledDiagnostic(strippedLine):
|
||||||
|
self.recordUnremediedIssue(
|
||||||
|
self.getUnhandledDiagnosticIdentifier(strippedLine),
|
||||||
|
strippedLine,
|
||||||
|
)
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def retry_requested(self) -> bool:
|
||||||
|
return bool(self.retry_input_tokens)
|
||||||
|
|
||||||
|
|
||||||
|
def insertFfmpegInputOptions(
|
||||||
|
commandSequence: list[str],
|
||||||
|
extraTokens: tuple[str, ...],
|
||||||
|
) -> list[str]:
|
||||||
|
if not extraTokens:
|
||||||
|
return list(commandSequence)
|
||||||
|
|
||||||
|
if not commandSequence:
|
||||||
|
return list(extraTokens)
|
||||||
|
|
||||||
|
return [commandSequence[0]] + list(extraTokens) + list(commandSequence[1:])
|
||||||
|
|
||||||
|
|
||||||
|
class FfmpegCommandRunner:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
context: dict | None,
|
||||||
|
*,
|
||||||
|
remedies: list[FfmpegRemedy] | None = None,
|
||||||
|
):
|
||||||
|
self.__context = context or {}
|
||||||
|
self.__remedies = remedies
|
||||||
|
|
||||||
|
def execute(
|
||||||
|
self,
|
||||||
|
commandSequence: list[str],
|
||||||
|
*,
|
||||||
|
directory: str = None,
|
||||||
|
timeoutSeconds: float = None,
|
||||||
|
):
|
||||||
|
emittedWarnings: set[str] = set()
|
||||||
|
attemptCommandSequence = list(commandSequence)
|
||||||
|
|
||||||
|
while True:
|
||||||
|
monitor = FfmpegDiagnosticMonitor(
|
||||||
|
self.__context,
|
||||||
|
attemptCommandSequence,
|
||||||
|
remedies=self.__remedies,
|
||||||
|
emittedWarnings=emittedWarnings,
|
||||||
|
)
|
||||||
|
out, err, rc = executeProcess(
|
||||||
|
attemptCommandSequence,
|
||||||
|
directory=directory,
|
||||||
|
context=self.__context,
|
||||||
|
timeoutSeconds=timeoutSeconds,
|
||||||
|
stderrLineHandler=monitor.handle_stderr_line,
|
||||||
|
)
|
||||||
|
|
||||||
|
if monitor.retry_requested:
|
||||||
|
attemptCommandSequence = insertFfmpegInputOptions(
|
||||||
|
attemptCommandSequence,
|
||||||
|
monitor.retry_input_tokens,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if monitor.skip_file:
|
||||||
|
raise FfmpegSkipFileWarning(monitor.skip_file_message)
|
||||||
|
|
||||||
|
return out, err, rc
|
||||||
41
src/ffx/diagnostics/retry_with_generated_pts.py
Normal file
41
src/ffx/diagnostics/retry_with_generated_pts.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
from .base import FfmpegRemedy, FfmpegRemedyDecision
|
||||||
|
|
||||||
|
|
||||||
|
class RetryWithGeneratedPtsRemedy(FfmpegRemedy):
|
||||||
|
identifier = "retry-with-generated-pts"
|
||||||
|
RETRY_INPUT_TOKENS = ("-fflags", "+genpts")
|
||||||
|
TIMESTAMP_UNSET_PATTERN = re.compile(
|
||||||
|
r"Timestamps are unset in a packet for stream \d+"
|
||||||
|
)
|
||||||
|
|
||||||
|
def inspect_line(
|
||||||
|
self,
|
||||||
|
line: str,
|
||||||
|
session: "FfmpegDiagnosticMonitor",
|
||||||
|
) -> FfmpegRemedyDecision | None:
|
||||||
|
if self.TIMESTAMP_UNSET_PATTERN.search(line) is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if session.command_contains_tokens(self.RETRY_INPUT_TOKENS):
|
||||||
|
return FfmpegRemedyDecision(
|
||||||
|
stop_process=True,
|
||||||
|
skip_file=True,
|
||||||
|
console_warning=(
|
||||||
|
f"Skipping file {session.describe_source()}: ffmpeg still reported "
|
||||||
|
+ "unset packet timestamps after retry with -fflags +genpts."
|
||||||
|
),
|
||||||
|
unremedied_issue_identifier="timestamp-unset-after-genpts",
|
||||||
|
)
|
||||||
|
|
||||||
|
return FfmpegRemedyDecision(
|
||||||
|
stop_process=True,
|
||||||
|
retry_input_tokens=self.RETRY_INPUT_TOKENS,
|
||||||
|
console_warning=(
|
||||||
|
f"ffmpeg reported unset packet timestamps for {session.describe_source()}. "
|
||||||
|
+ "Stopping early and retrying with -fflags +genpts."
|
||||||
|
),
|
||||||
|
)
|
||||||
53
src/ffx/diagnostics/state.py
Normal file
53
src/ffx/diagnostics/state.py
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
DIAGNOSTICS_STATE_KEY = "diagnostics_state"
|
||||||
|
UNREMEDIED_ISSUES_KEY = "unremedied_issues"
|
||||||
|
|
||||||
|
|
||||||
|
def getDiagnosticsState(context: dict | None) -> dict:
|
||||||
|
if context is None:
|
||||||
|
return {UNREMEDIED_ISSUES_KEY: {}}
|
||||||
|
|
||||||
|
if DIAGNOSTICS_STATE_KEY not in context:
|
||||||
|
context[DIAGNOSTICS_STATE_KEY] = {
|
||||||
|
UNREMEDIED_ISSUES_KEY: {},
|
||||||
|
}
|
||||||
|
|
||||||
|
return context[DIAGNOSTICS_STATE_KEY]
|
||||||
|
|
||||||
|
|
||||||
|
def recordUnremediedIssue(
|
||||||
|
context: dict | None,
|
||||||
|
sourcePath: str,
|
||||||
|
identifier: str,
|
||||||
|
) -> bool:
|
||||||
|
if not sourcePath:
|
||||||
|
return False
|
||||||
|
|
||||||
|
diagnosticsState = getDiagnosticsState(context)
|
||||||
|
unremediedIssues = diagnosticsState[UNREMEDIED_ISSUES_KEY]
|
||||||
|
issueList = unremediedIssues.setdefault(sourcePath, [])
|
||||||
|
strippedIdentifier = str(identifier).strip()
|
||||||
|
|
||||||
|
if not strippedIdentifier or strippedIdentifier in issueList:
|
||||||
|
return False
|
||||||
|
|
||||||
|
issueList.append(strippedIdentifier)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def getUnremediedIssues(context: dict | None) -> dict[str, list[str]]:
|
||||||
|
diagnosticsState = getDiagnosticsState(context)
|
||||||
|
return diagnosticsState.get(UNREMEDIED_ISSUES_KEY, {})
|
||||||
|
|
||||||
|
|
||||||
|
def iterUnremediedIssueSummaryLines(context: dict | None) -> list[str]:
|
||||||
|
summaryLines = []
|
||||||
|
unremediedIssues = getUnremediedIssues(context)
|
||||||
|
for sourcePath in sorted(unremediedIssues.keys()):
|
||||||
|
identifiers = unremediedIssues[sourcePath]
|
||||||
|
summaryLines.append(f"{os.path.basename(sourcePath)}: {', '.join(identifiers)}")
|
||||||
|
return summaryLines
|
||||||
34
src/ffx/diagnostics/warn_corrupt_mpeg_audio.py
Normal file
34
src/ffx/diagnostics/warn_corrupt_mpeg_audio.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
from .base import FfmpegRemedy, FfmpegRemedyDecision
|
||||||
|
|
||||||
|
|
||||||
|
class WarnCorruptMpegAudioRemedy(FfmpegRemedy):
|
||||||
|
identifier = "warn-corrupt-mpeg-audio"
|
||||||
|
PATTERNS = (
|
||||||
|
re.compile(r"\[mp3float @ .*\] invalid block type", re.IGNORECASE),
|
||||||
|
re.compile(r"\[mp3float @ .*\] Header missing"),
|
||||||
|
re.compile(r"\[mp3float @ .*\] overread, skip ", re.IGNORECASE),
|
||||||
|
re.compile(r"Error while decoding MPEG audio frame\."),
|
||||||
|
re.compile(
|
||||||
|
r"Error submitting packet to decoder: Invalid data found when processing input"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def inspect_line(
|
||||||
|
self,
|
||||||
|
line: str,
|
||||||
|
session: "FfmpegDiagnosticMonitor",
|
||||||
|
) -> FfmpegRemedyDecision | None:
|
||||||
|
if not any(pattern.search(line) for pattern in self.PATTERNS):
|
||||||
|
return None
|
||||||
|
|
||||||
|
return FfmpegRemedyDecision(
|
||||||
|
console_warning=(
|
||||||
|
f"ffmpeg reported damaged MPEG audio frames while converting "
|
||||||
|
+ f"{session.describe_source()}. FFX will continue, but the output "
|
||||||
|
+ "audio may contain gaps or glitches."
|
||||||
|
),
|
||||||
|
)
|
||||||
27
src/ffx/ffmpeg_diagnostics.py
Normal file
27
src/ffx/ffmpeg_diagnostics.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
from .diagnostics import (
|
||||||
|
FfmpegCommandRunner,
|
||||||
|
FfmpegDiagnosticMonitor,
|
||||||
|
FfmpegRemedy,
|
||||||
|
FfmpegRemedyDecision,
|
||||||
|
FfmpegSkipFileWarning,
|
||||||
|
RetryWithGeneratedPtsRemedy,
|
||||||
|
WarnCorruptMpegAudioRemedy,
|
||||||
|
getDiagnosticsState,
|
||||||
|
getUnremediedIssues,
|
||||||
|
iterUnremediedIssueSummaryLines,
|
||||||
|
recordUnremediedIssue,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"FfmpegCommandRunner",
|
||||||
|
"FfmpegDiagnosticMonitor",
|
||||||
|
"FfmpegRemedy",
|
||||||
|
"FfmpegRemedyDecision",
|
||||||
|
"FfmpegSkipFileWarning",
|
||||||
|
"RetryWithGeneratedPtsRemedy",
|
||||||
|
"WarnCorruptMpegAudioRemedy",
|
||||||
|
"getDiagnosticsState",
|
||||||
|
"getUnremediedIssues",
|
||||||
|
"iterUnremediedIssueSummaryLines",
|
||||||
|
"recordUnremediedIssue",
|
||||||
|
]
|
||||||
817
src/ffx/ffx.py
817
src/ffx/ffx.py
@@ -1,817 +0,0 @@
|
|||||||
#! /usr/bin/python3
|
|
||||||
|
|
||||||
import os, click, time, logging, shutil
|
|
||||||
|
|
||||||
from ffx.configuration_controller import ConfigurationController
|
|
||||||
|
|
||||||
from ffx.file_properties import FileProperties
|
|
||||||
|
|
||||||
from ffx.ffx_app import FfxApp
|
|
||||||
from ffx.ffx_controller import FfxController
|
|
||||||
from ffx.tmdb_controller import TmdbController
|
|
||||||
|
|
||||||
from ffx.database import databaseContext
|
|
||||||
|
|
||||||
from ffx.media_descriptor import MediaDescriptor
|
|
||||||
from ffx.track_descriptor import TrackDescriptor
|
|
||||||
from ffx.show_descriptor import ShowDescriptor
|
|
||||||
|
|
||||||
from ffx.track_type import TrackType
|
|
||||||
from ffx.video_encoder import VideoEncoder
|
|
||||||
from ffx.track_disposition import TrackDisposition
|
|
||||||
from ffx.track_codec import TrackCodec
|
|
||||||
|
|
||||||
from ffx.process import executeProcess
|
|
||||||
from ffx.helper import filterFilename, substituteTmdbFilename
|
|
||||||
from ffx.helper import getEpisodeFileBasename
|
|
||||||
|
|
||||||
from ffx.constants import DEFAULT_STEREO_BANDWIDTH, DEFAULT_AC3_BANDWIDTH, DEFAULT_DTS_BANDWIDTH, DEFAULT_7_1_BANDWIDTH
|
|
||||||
|
|
||||||
from ffx.filter.quality_filter import QualityFilter
|
|
||||||
from ffx.filter.preset_filter import PresetFilter
|
|
||||||
|
|
||||||
from ffx.filter.crop_filter import CropFilter
|
|
||||||
from ffx.filter.nlmeans_filter import NlmeansFilter
|
|
||||||
from ffx.filter.deinterlace_filter import DeinterlaceFilter
|
|
||||||
|
|
||||||
from ffx.constants import VERSION
|
|
||||||
|
|
||||||
from ffx.shifted_season_controller import ShiftedSeasonController
|
|
||||||
|
|
||||||
|
|
||||||
@click.group()
|
|
||||||
@click.pass_context
|
|
||||||
@click.option('--database-file', type=str, default='', help='Path to database file')
|
|
||||||
@click.option('-v', '--verbose', type=int, default=0, help='Set verbosity of output')
|
|
||||||
@click.option("--dry-run", is_flag=True, default=False)
|
|
||||||
def ffx(ctx, database_file, verbose, dry_run):
|
|
||||||
"""FFX"""
|
|
||||||
|
|
||||||
ctx.obj = {}
|
|
||||||
|
|
||||||
ctx.obj['config'] = ConfigurationController()
|
|
||||||
|
|
||||||
ctx.obj['database'] = databaseContext(databasePath=database_file
|
|
||||||
if database_file else ctx.obj['config'].getDatabaseFilePath())
|
|
||||||
|
|
||||||
ctx.obj['dry_run'] = dry_run
|
|
||||||
ctx.obj['verbosity'] = verbose
|
|
||||||
|
|
||||||
# Critical 50
|
|
||||||
# Error 40
|
|
||||||
# Warning 30
|
|
||||||
# Info 20
|
|
||||||
# Debug 10
|
|
||||||
fileLogVerbosity = max(40 - verbose * 10, 10)
|
|
||||||
consoleLogVerbosity = max(20 - verbose * 10, 10)
|
|
||||||
|
|
||||||
ctx.obj['logger'] = logging.getLogger('FFX')
|
|
||||||
ctx.obj['logger'].setLevel(logging.DEBUG)
|
|
||||||
|
|
||||||
ffxFileHandler = logging.FileHandler(ctx.obj['config'].getLogFilePath())
|
|
||||||
ffxFileHandler.setLevel(fileLogVerbosity)
|
|
||||||
ffxConsoleHandler = logging.StreamHandler()
|
|
||||||
ffxConsoleHandler.setLevel(consoleLogVerbosity)
|
|
||||||
|
|
||||||
fileFormatter = logging.Formatter(
|
|
||||||
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
|
||||||
ffxFileHandler.setFormatter(fileFormatter)
|
|
||||||
consoleFormatter = logging.Formatter(
|
|
||||||
'%(message)s')
|
|
||||||
ffxConsoleHandler.setFormatter(consoleFormatter)
|
|
||||||
|
|
||||||
ctx.obj['logger'].addHandler(ffxConsoleHandler)
|
|
||||||
ctx.obj['logger'].addHandler(ffxFileHandler)
|
|
||||||
|
|
||||||
|
|
||||||
# Define a subcommand
|
|
||||||
@ffx.command()
|
|
||||||
def version():
|
|
||||||
click.echo(VERSION)
|
|
||||||
|
|
||||||
|
|
||||||
# Another subcommand
|
|
||||||
@ffx.command()
|
|
||||||
def help():
|
|
||||||
click.echo(f"ffx {VERSION}\n")
|
|
||||||
click.echo(f"Usage: ffx [input file] [output file] [vp9|av1] [q=[nn[,nn,...]]] [p=nn] [a=nnn[k]] [ac3=nnn[k]] [dts=nnn[k]] [crop]")
|
|
||||||
|
|
||||||
|
|
||||||
@ffx.command()
|
|
||||||
@click.pass_context
|
|
||||||
@click.argument('filename', nargs=1)
|
|
||||||
def inspect(ctx, filename):
|
|
||||||
|
|
||||||
ctx.obj['command'] = 'inspect'
|
|
||||||
ctx.obj['arguments'] = {}
|
|
||||||
ctx.obj['arguments']['filename'] = filename
|
|
||||||
|
|
||||||
app = FfxApp(ctx.obj)
|
|
||||||
app.run()
|
|
||||||
|
|
||||||
|
|
||||||
def getUnmuxSequence(trackDescriptor: TrackDescriptor, sourcePath, targetPrefix, targetDirectory = ''):
|
|
||||||
|
|
||||||
# executable and input file
|
|
||||||
commandTokens = FfxController.COMMAND_TOKENS + ['-i', sourcePath]
|
|
||||||
|
|
||||||
trackType = trackDescriptor.getType()
|
|
||||||
|
|
||||||
targetPathBase = os.path.join(targetDirectory, targetPrefix) if targetDirectory else targetPrefix
|
|
||||||
|
|
||||||
# mapping
|
|
||||||
commandTokens += ['-map',
|
|
||||||
f"0:{trackType.indicator()}:{trackDescriptor.getSubIndex()}",
|
|
||||||
'-c',
|
|
||||||
'copy']
|
|
||||||
|
|
||||||
trackCodec = trackDescriptor.getCodec()
|
|
||||||
|
|
||||||
# output format
|
|
||||||
codecFormat = trackCodec.format()
|
|
||||||
if codecFormat is not None:
|
|
||||||
commandTokens += ['-f', codecFormat]
|
|
||||||
|
|
||||||
# output filename
|
|
||||||
commandTokens += [f"{targetPathBase}.{trackCodec.extension()}"]
|
|
||||||
|
|
||||||
return commandTokens
|
|
||||||
|
|
||||||
|
|
||||||
@ffx.command()
|
|
||||||
@click.pass_context
|
|
||||||
|
|
||||||
@click.argument('paths', nargs=-1)
|
|
||||||
@click.option('-l', '--label', type=str, default='', help='Label to be used as filename prefix')
|
|
||||||
@click.option("-o", "--output-directory", type=str, default='')
|
|
||||||
@click.option("-s", "--subtitles-only", is_flag=True, default=False)
|
|
||||||
@click.option('--nice', type=int, default=99, help='Niceness of started processes')
|
|
||||||
@click.option('--cpu', type=int, default=0, help='Limit CPU for started processes to percent')
|
|
||||||
def unmux(ctx,
|
|
||||||
paths,
|
|
||||||
label,
|
|
||||||
output_directory,
|
|
||||||
subtitles_only,
|
|
||||||
nice,
|
|
||||||
cpu):
|
|
||||||
|
|
||||||
existingSourcePaths = [p for p in paths if os.path.isfile(p)]
|
|
||||||
ctx.obj['logger'].debug(f"\nUnmuxing {len(existingSourcePaths)} files")
|
|
||||||
|
|
||||||
ctx.obj['resource_limits'] = {}
|
|
||||||
ctx.obj['resource_limits']['niceness'] = nice
|
|
||||||
ctx.obj['resource_limits']['cpu_percent'] = cpu
|
|
||||||
|
|
||||||
for sourcePath in existingSourcePaths:
|
|
||||||
|
|
||||||
fp = FileProperties(ctx.obj, sourcePath)
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
sourceMediaDescriptor = fp.getMediaDescriptor()
|
|
||||||
|
|
||||||
season = fp.getSeason()
|
|
||||||
episode = fp.getEpisode()
|
|
||||||
|
|
||||||
#TODO: Recognition für alle Formate anpassen
|
|
||||||
targetLabel = label if label else fp.getFileBasename()
|
|
||||||
targetIndicator = f"_S{season}E{episode}" if label and season != -1 and episode != -1 else ''
|
|
||||||
|
|
||||||
if label and not targetIndicator:
|
|
||||||
ctx.obj['logger'].warning(f"Skipping file {fp.getFilename()}: Label set but no indicator recognized")
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
ctx.obj['logger'].info(f"\nUnmuxing file {fp.getFilename()}\n")
|
|
||||||
|
|
||||||
# for trackDescriptor in sourceMediaDescriptor.getAllTrackDescriptors():
|
|
||||||
for trackDescriptor in sourceMediaDescriptor.getTrackDescriptors():
|
|
||||||
|
|
||||||
if trackDescriptor.getType() == TrackType.SUBTITLE or not subtitles_only:
|
|
||||||
|
|
||||||
# SEASON_EPISODE_STREAM_LANGUAGE_DISPOSITIONS_MATCH = '[sS]([0-9]+)[eE]([0-9]+)_([0-9]+)_([a-z]{3})(?:_([A-Z]{3}))*'
|
|
||||||
targetPrefix = f"{targetLabel}{targetIndicator}_{trackDescriptor.getIndex()}_{trackDescriptor.getLanguage().threeLetter()}"
|
|
||||||
|
|
||||||
td: TrackDisposition
|
|
||||||
for td in sorted(trackDescriptor.getDispositionSet(), key=lambda d: d.index()):
|
|
||||||
targetPrefix += f"_{td.indicator()}"
|
|
||||||
|
|
||||||
unmuxSequence = getUnmuxSequence(trackDescriptor, sourcePath, targetPrefix, targetDirectory = output_directory)
|
|
||||||
|
|
||||||
if unmuxSequence:
|
|
||||||
if not ctx.obj['dry_run']:
|
|
||||||
|
|
||||||
#TODO #425: Codec Enum
|
|
||||||
ctx.obj['logger'].info(f"Unmuxing stream {trackDescriptor.getIndex()} into file {targetPrefix}.{trackDescriptor.getCodec().extension()}")
|
|
||||||
|
|
||||||
ctx.obj['logger'].debug(f"Executing unmuxing sequence")
|
|
||||||
|
|
||||||
out, err, rc = executeProcess(unmuxSequence, context = ctx.obj)
|
|
||||||
if rc:
|
|
||||||
ctx.obj['logger'].error(f"Unmuxing of stream {trackDescriptor.getIndex()} failed with error ({rc}) {err}")
|
|
||||||
else:
|
|
||||||
ctx.obj['logger'].warning(f"Skipping stream with unknown codec")
|
|
||||||
except Exception as ex:
|
|
||||||
ctx.obj['logger'].warning(f"Skipping File {sourcePath} ({ex})")
|
|
||||||
|
|
||||||
|
|
||||||
@ffx.command()
|
|
||||||
@click.pass_context
|
|
||||||
|
|
||||||
@click.argument('paths', nargs=-1)
|
|
||||||
@click.option('--nice', type=int, default=99, help='Niceness of started processes')
|
|
||||||
@click.option('--cpu', type=int, default=0, help='Limit CPU for started processes to percent')
|
|
||||||
def cropdetect(ctx,
|
|
||||||
paths,
|
|
||||||
nice,
|
|
||||||
cpu):
|
|
||||||
|
|
||||||
existingSourcePaths = [p for p in paths if os.path.isfile(p)]
|
|
||||||
ctx.obj['logger'].debug(f"\nUnmuxing {len(existingSourcePaths)} files")
|
|
||||||
|
|
||||||
ctx.obj['resource_limits'] = {}
|
|
||||||
ctx.obj['resource_limits']['niceness'] = nice
|
|
||||||
ctx.obj['resource_limits']['cpu_percent'] = cpu
|
|
||||||
|
|
||||||
for sourcePath in existingSourcePaths:
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
|
|
||||||
fp = FileProperties(ctx.obj, sourcePath)
|
|
||||||
cropParams = fp.findCropParams()
|
|
||||||
|
|
||||||
click.echo(cropParams)
|
|
||||||
|
|
||||||
except Exception as ex:
|
|
||||||
ctx.obj['logger'].warning(f"Skipping File {sourcePath} ({ex})")
|
|
||||||
|
|
||||||
|
|
||||||
@ffx.command()
|
|
||||||
@click.pass_context
|
|
||||||
|
|
||||||
def shows(ctx):
|
|
||||||
|
|
||||||
ctx.obj['command'] = 'shows'
|
|
||||||
|
|
||||||
app = FfxApp(ctx.obj)
|
|
||||||
app.run()
|
|
||||||
|
|
||||||
|
|
||||||
def checkUniqueDispositions(context, mediaDescriptor: MediaDescriptor):
|
|
||||||
|
|
||||||
# Check for multiple default or forced dispositions if not set by user input or database requirements
|
|
||||||
#
|
|
||||||
# Query user for the correct sub indices, then configure flags in track descriptors associated with media descriptor accordingly.
|
|
||||||
# The correct tokens should then be created by
|
|
||||||
if len([v for v in mediaDescriptor.getVideoTracks() if v.getDispositionFlag(TrackDisposition.DEFAULT)]) > 1:
|
|
||||||
if context['no_prompt']:
|
|
||||||
raise click.ClickException('More than one default video stream detected and no prompt set')
|
|
||||||
defaultVideoTrackSubIndex = click.prompt("More than one default video stream detected! Please select stream", type=int)
|
|
||||||
mediaDescriptor.setDefaultSubTrack(TrackType.VIDEO, defaultVideoTrackSubIndex)
|
|
||||||
|
|
||||||
if len([v for v in mediaDescriptor.getVideoTracks() if v.getDispositionFlag(TrackDisposition.FORCED)]) > 1:
|
|
||||||
if context['no_prompt']:
|
|
||||||
raise click.ClickException('More than one forced video stream detected and no prompt set')
|
|
||||||
forcedVideoTrackSubIndex = click.prompt("More than one forced video stream detected! Please select stream", type=int)
|
|
||||||
mediaDescriptor.setForcedSubTrack(TrackType.VIDEO, forcedVideoTrackSubIndex)
|
|
||||||
|
|
||||||
if len([a for a in mediaDescriptor.getAudioTracks() if a.getDispositionFlag(TrackDisposition.DEFAULT)]) > 1:
|
|
||||||
if context['no_prompt']:
|
|
||||||
raise click.ClickException('More than one default audio stream detected and no prompt set')
|
|
||||||
defaultAudioTrackSubIndex = click.prompt("More than one default audio stream detected! Please select stream", type=int)
|
|
||||||
mediaDescriptor.setDefaultSubTrack(TrackType.AUDIO, defaultAudioTrackSubIndex)
|
|
||||||
|
|
||||||
if len([a for a in mediaDescriptor.getAudioTracks() if a.getDispositionFlag(TrackDisposition.FORCED)]) > 1:
|
|
||||||
if context['no_prompt']:
|
|
||||||
raise click.ClickException('More than one forced audio stream detected and no prompt set')
|
|
||||||
forcedAudioTrackSubIndex = click.prompt("More than one forced audio stream detected! Please select stream", type=int)
|
|
||||||
mediaDescriptor.setForcedSubTrack(TrackType.AUDIO, forcedAudioTrackSubIndex)
|
|
||||||
|
|
||||||
if len([s for s in mediaDescriptor.getSubtitleTracks() if s.getDispositionFlag(TrackDisposition.DEFAULT)]) > 1:
|
|
||||||
if context['no_prompt']:
|
|
||||||
raise click.ClickException('More than one default subtitle stream detected and no prompt set')
|
|
||||||
defaultSubtitleTrackSubIndex = click.prompt("More than one default subtitle stream detected! Please select stream", type=int)
|
|
||||||
mediaDescriptor.setDefaultSubTrack(TrackType.SUBTITLE, defaultSubtitleTrackSubIndex)
|
|
||||||
|
|
||||||
if len([s for s in mediaDescriptor.getSubtitleTracks() if s.getDispositionFlag(TrackDisposition.FORCED)]) > 1:
|
|
||||||
if context['no_prompt']:
|
|
||||||
raise click.ClickException('More than one forced subtitle stream detected and no prompt set')
|
|
||||||
forcedSubtitleTrackSubIndex = click.prompt("More than one forced subtitle stream detected! Please select stream", type=int)
|
|
||||||
mediaDescriptor.setForcedSubTrack(TrackType.SUBTITLE, forcedSubtitleTrackSubIndex)
|
|
||||||
|
|
||||||
|
|
||||||
@ffx.command()
|
|
||||||
@click.pass_context
|
|
||||||
|
|
||||||
@click.argument('paths', nargs=-1)
|
|
||||||
|
|
||||||
@click.option('-l', '--label', type=str, default='', help='Label to be used as filename prefix')
|
|
||||||
|
|
||||||
@click.option('-v', '--video-encoder', type=str, default=FfxController.DEFAULT_VIDEO_ENCODER, help=f"Target video encoder (vp9, av1 or h264)", show_default=True)
|
|
||||||
|
|
||||||
@click.option('-q', '--quality', type=str, default="", help=f"Quality settings to be used with VP9/H264 encoder")
|
|
||||||
@click.option('-p', '--preset', type=str, default="", help=f"Quality preset to be used with AV1 encoder")
|
|
||||||
|
|
||||||
@click.option('-a', '--stereo-bitrate', type=int, default=DEFAULT_STEREO_BANDWIDTH, help=f"Bitrate in kbit/s to be used to encode stereo audio streams", show_default=True)
|
|
||||||
@click.option('--ac3', type=int, default=DEFAULT_AC3_BANDWIDTH, help=f"Bitrate in kbit/s to be used to encode 5.1 audio streams", show_default=True)
|
|
||||||
@click.option('--dts', type=int, default=DEFAULT_DTS_BANDWIDTH, help=f"Bitrate in kbit/s to be used to encode 6.1 audio streams", show_default=True)
|
|
||||||
|
|
||||||
@click.option('--subtitle-directory', type=str, default='', help='Load subtitles from here')
|
|
||||||
@click.option('--subtitle-prefix', type=str, default='', help='Subtitle filename prefix')
|
|
||||||
|
|
||||||
@click.option('--language', type=str, multiple=True, help='Set stream language. Use format <stream index>:<3 letter iso code>')
|
|
||||||
@click.option('--title', type=str, multiple=True, help='Set stream title. Use format <stream index>:<title>')
|
|
||||||
|
|
||||||
@click.option('--default-video', type=int, default=-1, help='Index of default video stream')
|
|
||||||
@click.option('--forced-video', type=int, default=-1, help='Index of forced video stream')
|
|
||||||
@click.option('--default-audio', type=int, default=-1, help='Index of default audio stream')
|
|
||||||
@click.option('--forced-audio', type=int, default=-1, help='Index of forced audio stream')
|
|
||||||
@click.option('--default-subtitle', type=int, default=-1, help='Index of default subtitle stream')
|
|
||||||
@click.option('--forced-subtitle', type=int, default=-1, help='Index of forced subtitle stream')
|
|
||||||
|
|
||||||
@click.option('--rearrange-streams', type=str, default="", help='Rearrange output streams order. Use format comma separated integers')
|
|
||||||
|
|
||||||
@click.option("--crop", is_flag=False, flag_value="auto", default="none")
|
|
||||||
@click.option("--cut", is_flag=False, flag_value="default", default="none")
|
|
||||||
|
|
||||||
@click.option("--output-directory", type=str, default='')
|
|
||||||
|
|
||||||
@click.option("--deinterlace", is_flag=False, flag_value="default", default="none")
|
|
||||||
|
|
||||||
@click.option("--denoise", is_flag=False, flag_value="default", default="none")
|
|
||||||
@click.option("--denoise-use-hw", is_flag=True, default=False)
|
|
||||||
@click.option('--denoise-strength', type=str, default='', help='Denoising strength, more blurring vs more details.')
|
|
||||||
@click.option('--denoise-patch-size', type=str, default='', help='Subimage size to apply filtering on luminosity plane. Reduces broader noise patterns but costly.')
|
|
||||||
@click.option('--denoise-chroma-patch-size', type=str, default='', help='Subimage size to apply filtering on chroma planes.')
|
|
||||||
@click.option('--denoise-research-window', type=str, default='', help='Range to search for comparable patches on luminosity plane. Better filtering but costly.')
|
|
||||||
@click.option('--denoise-chroma-research-window', type=str, default='', help='Range to search for comparable patches on chroma planes.')
|
|
||||||
|
|
||||||
@click.option('--show', type=int, default=-1, help='Set TMDB show identifier')
|
|
||||||
@click.option('--season', type=int, default=-1, help='Set season of show')
|
|
||||||
@click.option('--episode', type=int, default=-1, help='Set episode of show')
|
|
||||||
|
|
||||||
@click.option("--no-tmdb", is_flag=True, default=False)
|
|
||||||
@click.option("--no-pattern", is_flag=True, default=False)
|
|
||||||
|
|
||||||
@click.option("--dont-pass-dispositions", is_flag=True, default=False)
|
|
||||||
|
|
||||||
@click.option("--no-prompt", is_flag=True, default=False)
|
|
||||||
@click.option("--no-signature", is_flag=True, default=False)
|
|
||||||
@click.option("--keep-mkvmerge-metadata", is_flag=True, default=False)
|
|
||||||
|
|
||||||
@click.option('--nice', type=int, default=99, help='Niceness of started processes')
|
|
||||||
@click.option('--cpu', type=int, default=0, help='Limit CPU for started processes to percent')
|
|
||||||
|
|
||||||
@click.option('--rename-only', is_flag=True, default=False, help='Only renaming, no recoding')
|
|
||||||
|
|
||||||
def convert(ctx,
|
|
||||||
paths,
|
|
||||||
label,
|
|
||||||
video_encoder,
|
|
||||||
quality,
|
|
||||||
preset,
|
|
||||||
stereo_bitrate,
|
|
||||||
ac3,
|
|
||||||
dts,
|
|
||||||
|
|
||||||
subtitle_directory,
|
|
||||||
subtitle_prefix,
|
|
||||||
|
|
||||||
language,
|
|
||||||
title,
|
|
||||||
|
|
||||||
default_video,
|
|
||||||
forced_video,
|
|
||||||
default_audio,
|
|
||||||
forced_audio,
|
|
||||||
default_subtitle,
|
|
||||||
forced_subtitle,
|
|
||||||
|
|
||||||
rearrange_streams,
|
|
||||||
|
|
||||||
crop,
|
|
||||||
cut,
|
|
||||||
|
|
||||||
output_directory,
|
|
||||||
|
|
||||||
deinterlace,
|
|
||||||
|
|
||||||
denoise,
|
|
||||||
denoise_use_hw,
|
|
||||||
denoise_strength,
|
|
||||||
denoise_patch_size,
|
|
||||||
denoise_chroma_patch_size,
|
|
||||||
denoise_research_window,
|
|
||||||
denoise_chroma_research_window,
|
|
||||||
|
|
||||||
show,
|
|
||||||
season,
|
|
||||||
episode,
|
|
||||||
|
|
||||||
no_tmdb,
|
|
||||||
no_pattern,
|
|
||||||
dont_pass_dispositions,
|
|
||||||
no_prompt,
|
|
||||||
no_signature,
|
|
||||||
keep_mkvmerge_metadata,
|
|
||||||
|
|
||||||
nice,
|
|
||||||
cpu,
|
|
||||||
rename_only):
|
|
||||||
"""Batch conversion of audiovideo files in format suitable for web playback, e.g. jellyfin
|
|
||||||
|
|
||||||
Files found under PATHS will be converted according to parameters.
|
|
||||||
Filename extensions will be changed appropriately.
|
|
||||||
Suffices will we appended to filename in case of multiple created files
|
|
||||||
or if the filename has not changed."""
|
|
||||||
|
|
||||||
startTime = time.perf_counter()
|
|
||||||
|
|
||||||
context = ctx.obj
|
|
||||||
|
|
||||||
context['video_encoder'] = VideoEncoder.fromLabel(video_encoder)
|
|
||||||
|
|
||||||
#HINT: quick and dirty override for h264, todo improve
|
|
||||||
targetFormat = '' if context['video_encoder'] == VideoEncoder.H264 else FfxController.DEFAULT_FILE_FORMAT
|
|
||||||
targetExtension = 'mkv' if context['video_encoder'] == VideoEncoder.H264 else FfxController.DEFAULT_FILE_EXTENSION
|
|
||||||
|
|
||||||
context['use_tmdb'] = not no_tmdb
|
|
||||||
context['use_pattern'] = not no_pattern
|
|
||||||
context['no_prompt'] = no_prompt
|
|
||||||
context['no_signature'] = no_signature
|
|
||||||
context['keep_mkvmerge_metadata'] = keep_mkvmerge_metadata
|
|
||||||
|
|
||||||
|
|
||||||
context['resource_limits'] = {}
|
|
||||||
context['resource_limits']['niceness'] = nice
|
|
||||||
context['resource_limits']['cpu_percent'] = cpu
|
|
||||||
|
|
||||||
|
|
||||||
context['import_subtitles'] = (subtitle_directory and subtitle_prefix)
|
|
||||||
if context['import_subtitles']:
|
|
||||||
context['subtitle_directory'] = subtitle_directory
|
|
||||||
context['subtitle_prefix'] = subtitle_prefix
|
|
||||||
|
|
||||||
|
|
||||||
existingSourcePaths = [p for p in paths if os.path.isfile(p) and p.split('.')[-1] in FfxController.INPUT_FILE_EXTENSIONS]
|
|
||||||
|
|
||||||
|
|
||||||
# CLI Overrides
|
|
||||||
|
|
||||||
cliOverrides = {}
|
|
||||||
|
|
||||||
if language:
|
|
||||||
cliOverrides['languages'] = {}
|
|
||||||
for overLang in language:
|
|
||||||
olTokens = overLang.split(':')
|
|
||||||
if len(olTokens) == 2:
|
|
||||||
try:
|
|
||||||
cliOverrides['languages'][int(olTokens[0])] = olTokens[1]
|
|
||||||
except ValueError:
|
|
||||||
ctx.obj['logger'].warning(f"Ignoring non-integer language index {olTokens[0]}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
if title:
|
|
||||||
cliOverrides['titles'] = {}
|
|
||||||
for overTitle in title:
|
|
||||||
otTokens = overTitle.split(':')
|
|
||||||
if len(otTokens) == 2:
|
|
||||||
try:
|
|
||||||
cliOverrides['titles'][int(otTokens[0])] = otTokens[1]
|
|
||||||
except ValueError:
|
|
||||||
ctx.obj['logger'].warning(f"Ignoring non-integer title index {otTokens[0]}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
if default_video != -1:
|
|
||||||
cliOverrides['default_video'] = default_video
|
|
||||||
if forced_video != -1:
|
|
||||||
cliOverrides['forced_video'] = forced_video
|
|
||||||
if default_audio != -1:
|
|
||||||
cliOverrides['default_audio'] = default_audio
|
|
||||||
if forced_audio != -1:
|
|
||||||
cliOverrides['forced_audio'] = forced_audio
|
|
||||||
if default_subtitle != -1:
|
|
||||||
cliOverrides['default_subtitle'] = default_subtitle
|
|
||||||
if forced_subtitle != -1:
|
|
||||||
cliOverrides['forced_subtitle'] = forced_subtitle
|
|
||||||
|
|
||||||
if show != -1 or season != -1 or episode != -1:
|
|
||||||
if len(existingSourcePaths) > 1:
|
|
||||||
context['logger'].warning(f"Ignoring TMDB show, season, episode overrides, not supported for multiple source files")
|
|
||||||
else:
|
|
||||||
cliOverrides['tmdb'] = {}
|
|
||||||
if show != -1:
|
|
||||||
cliOverrides['tmdb']['show'] = show
|
|
||||||
if season != -1:
|
|
||||||
cliOverrides['tmdb']['season'] = season
|
|
||||||
if episode != -1:
|
|
||||||
cliOverrides['tmdb']['episode'] = episode
|
|
||||||
|
|
||||||
if cliOverrides:
|
|
||||||
context['overrides'] = cliOverrides
|
|
||||||
|
|
||||||
|
|
||||||
if rearrange_streams:
|
|
||||||
try:
|
|
||||||
cliOverrides['stream_order'] = [int(si) for si in rearrange_streams.split(",")]
|
|
||||||
except ValueError as ve:
|
|
||||||
errorMessage = "Non-integer in rearrange stream parameter"
|
|
||||||
ctx.obj['logger'].error(errorMessage)
|
|
||||||
raise click.Abort()
|
|
||||||
|
|
||||||
|
|
||||||
ctx.obj['logger'].debug(f"\nVideo encoder: {video_encoder}")
|
|
||||||
|
|
||||||
qualityTokens = quality.split(',')
|
|
||||||
q_list = [q for q in qualityTokens if q.isnumeric()]
|
|
||||||
ctx.obj['logger'].debug(f"Qualities: {q_list}")
|
|
||||||
|
|
||||||
presetTokens = preset.split(',')
|
|
||||||
p_list = [p for p in presetTokens if p.isnumeric()]
|
|
||||||
ctx.obj['logger'].debug(f"Presets: {p_list}")
|
|
||||||
|
|
||||||
|
|
||||||
context['bitrates'] = {}
|
|
||||||
context['bitrates']['stereo'] = str(stereo_bitrate) if str(stereo_bitrate).endswith('k') else f"{stereo_bitrate}k"
|
|
||||||
context['bitrates']['ac3'] = str(ac3) if str(ac3).endswith('k') else f"{ac3}k"
|
|
||||||
context['bitrates']['dts'] = str(dts) if str(dts).endswith('k') else f"{dts}k"
|
|
||||||
|
|
||||||
ctx.obj['logger'].debug(f"Stereo bitrate: {context['bitrates']['stereo']}")
|
|
||||||
ctx.obj['logger'].debug(f"AC3 bitrate: {context['bitrates']['ac3']}")
|
|
||||||
ctx.obj['logger'].debug(f"DTS bitrate: {context['bitrates']['dts']}")
|
|
||||||
|
|
||||||
#->
|
|
||||||
# Process cut parameters
|
|
||||||
context['perform_cut'] = (cut != 'none')
|
|
||||||
if context['perform_cut']:
|
|
||||||
cutTokens = cut.split(',')
|
|
||||||
if cutTokens and len(cutTokens) == 2:
|
|
||||||
context['cut_start'] = int(cutTokens[0])
|
|
||||||
context['cut_length'] = int(cutTokens[1])
|
|
||||||
ctx.obj['logger'].debug(f"Cut start={context['cut_start']} length={context['cut_length']}")
|
|
||||||
|
|
||||||
|
|
||||||
tc = TmdbController() if context['use_tmdb'] else None
|
|
||||||
|
|
||||||
qualityKwargs = {QualityFilter.QUALITY_KEY: str(QualityFilter.DEFAULT_H264_QUALITY if (context['video_encoder'] == VideoEncoder.H264 and not quality) else quality)}
|
|
||||||
qf = QualityFilter(**qualityKwargs)
|
|
||||||
|
|
||||||
if context['video_encoder'] == VideoEncoder.AV1 and preset:
|
|
||||||
presetKwargs = {PresetFilter.PRESET_KEY: preset}
|
|
||||||
PresetFilter(**presetKwargs)
|
|
||||||
|
|
||||||
cf = None
|
|
||||||
# if crop != 'none':
|
|
||||||
if crop == 'auto':
|
|
||||||
cropKwargs = {}
|
|
||||||
cf = CropFilter(**cropKwargs)
|
|
||||||
|
|
||||||
denoiseKwargs = {}
|
|
||||||
if denoise_strength:
|
|
||||||
denoiseKwargs[NlmeansFilter.STRENGTH_KEY] = denoise_strength
|
|
||||||
if denoise_patch_size:
|
|
||||||
denoiseKwargs[NlmeansFilter.PATCH_SIZE_KEY] = denoise_patch_size
|
|
||||||
if denoise_chroma_patch_size:
|
|
||||||
denoiseKwargs[NlmeansFilter.CHROMA_PATCH_SIZE_KEY] = denoise_chroma_patch_size
|
|
||||||
if denoise_research_window:
|
|
||||||
denoiseKwargs[NlmeansFilter.RESEARCH_WINDOW_KEY] = denoise_research_window
|
|
||||||
if denoise_chroma_research_window:
|
|
||||||
denoiseKwargs[NlmeansFilter.CHROMA_RESEARCH_WINDOW_KEY] = denoise_chroma_research_window
|
|
||||||
if denoise != 'none' or denoiseKwargs:
|
|
||||||
NlmeansFilter(**denoiseKwargs)
|
|
||||||
|
|
||||||
if deinterlace != 'none':
|
|
||||||
DeinterlaceFilter()
|
|
||||||
|
|
||||||
chainYield = list(qf.getChainYield())
|
|
||||||
|
|
||||||
ctx.obj['logger'].info(f"\nRunning {len(existingSourcePaths) * len(chainYield)} jobs")
|
|
||||||
|
|
||||||
jobIndex = 0
|
|
||||||
|
|
||||||
for sourcePath in existingSourcePaths:
|
|
||||||
|
|
||||||
# Separate basedir, basename and extension for current source file
|
|
||||||
sourceDirectory = os.path.dirname(sourcePath)
|
|
||||||
sourceFilename = os.path.basename(sourcePath)
|
|
||||||
sourcePathTokens = sourceFilename.split('.')
|
|
||||||
|
|
||||||
sourceFileBasename = '.'.join(sourcePathTokens[:-1])
|
|
||||||
sourceFilenameExtension = sourcePathTokens[-1]
|
|
||||||
|
|
||||||
ctx.obj['logger'].info(f"\nProcessing file {sourcePath}")
|
|
||||||
|
|
||||||
targetSuffices = {}
|
|
||||||
|
|
||||||
mediaFileProperties = FileProperties(context, sourcePath)
|
|
||||||
|
|
||||||
|
|
||||||
# if not cf is None:
|
|
||||||
#
|
|
||||||
cropArguments = {} if cf is None else mediaFileProperties.findCropArguments()
|
|
||||||
#
|
|
||||||
# ctx.obj['logger'].info(f"\nSetting crop arguments: ouput width: {cropArguments[CropFilter.OUTPUT_WIDTH_KEY]} "
|
|
||||||
# + f"height: {cropArguments[CropFilter.OUTPUT_HEIGHT_KEY]} "
|
|
||||||
# + f"offset x: {cropArguments[CropFilter.OFFSET_X_KEY]} "
|
|
||||||
# + f"y: {cropArguments[CropFilter.OFFSET_Y_KEY]}")
|
|
||||||
#
|
|
||||||
# cf.setArguments(**cropArguments)
|
|
||||||
|
|
||||||
|
|
||||||
ssc = ShiftedSeasonController(context)
|
|
||||||
|
|
||||||
showId = mediaFileProperties.getShowId()
|
|
||||||
|
|
||||||
#HINT: -1 if not set
|
|
||||||
if 'tmdb' in cliOverrides.keys() and 'season' in cliOverrides['tmdb']:
|
|
||||||
showSeason = cliOverrides['tmdb']['season']
|
|
||||||
else:
|
|
||||||
showSeason = mediaFileProperties.getSeason()
|
|
||||||
|
|
||||||
if 'tmdb' in cliOverrides.keys() and 'episode' in cliOverrides['tmdb']:
|
|
||||||
showEpisode = cliOverrides['tmdb']['episode']
|
|
||||||
else:
|
|
||||||
showEpisode = mediaFileProperties.getEpisode()
|
|
||||||
|
|
||||||
ctx.obj['logger'].debug(f"Season={showSeason} Episode={showEpisode}")
|
|
||||||
|
|
||||||
|
|
||||||
sourceMediaDescriptor = mediaFileProperties.getMediaDescriptor()
|
|
||||||
|
|
||||||
#HINT: This is None if the filename did not match anything in database
|
|
||||||
currentPattern = mediaFileProperties.getPattern() if context['use_pattern'] else None
|
|
||||||
|
|
||||||
ctx.obj['logger'].debug(f"Pattern matching: {'No' if currentPattern is None else 'Yes'}")
|
|
||||||
|
|
||||||
# Setup FfxController accordingly depending on pattern matching is enabled and a pattern was matched
|
|
||||||
if currentPattern is None:
|
|
||||||
|
|
||||||
checkUniqueDispositions(context, sourceMediaDescriptor)
|
|
||||||
currentShowDescriptor = None
|
|
||||||
|
|
||||||
if context['import_subtitles']:
|
|
||||||
sourceMediaDescriptor.importSubtitles(context['subtitle_directory'],
|
|
||||||
context['subtitle_prefix'],
|
|
||||||
showSeason,
|
|
||||||
showEpisode)
|
|
||||||
|
|
||||||
if cliOverrides:
|
|
||||||
sourceMediaDescriptor.applyOverrides(cliOverrides)
|
|
||||||
|
|
||||||
fc = FfxController(context, sourceMediaDescriptor)
|
|
||||||
|
|
||||||
else:
|
|
||||||
targetMediaDescriptor = currentPattern.getMediaDescriptor(ctx.obj)
|
|
||||||
checkUniqueDispositions(context, targetMediaDescriptor)
|
|
||||||
currentShowDescriptor = currentPattern.getShowDescriptor(ctx.obj)
|
|
||||||
|
|
||||||
|
|
||||||
# Check if source and target track descriptors match
|
|
||||||
sourceTrackDescriptorList = sourceMediaDescriptor.getTrackDescriptors()
|
|
||||||
targetTrackDescriptorList = targetMediaDescriptor.getTrackDescriptors()
|
|
||||||
|
|
||||||
for ttd in targetTrackDescriptorList:
|
|
||||||
|
|
||||||
tti = ttd.getIndex()
|
|
||||||
ttsi = ttd.getSourceIndex()
|
|
||||||
|
|
||||||
stList = [st for st in sourceTrackDescriptorList if st.getIndex() == ttsi]
|
|
||||||
std = stList[0] if stList else None
|
|
||||||
|
|
||||||
if std is None:
|
|
||||||
raise click.ClickException(f"Target track #{tti} refering to non-existent source track #{ttsi}")
|
|
||||||
|
|
||||||
ttType = ttd.getType()
|
|
||||||
stType = std.getType()
|
|
||||||
|
|
||||||
if ttType != stType:
|
|
||||||
raise click.ClickException(f"Target track #{tti} type ({ttType.label()}) not matching source track #{ttsi} type ({stType.label()})")
|
|
||||||
|
|
||||||
|
|
||||||
if context['import_subtitles']:
|
|
||||||
targetMediaDescriptor.importSubtitles(context['subtitle_directory'],
|
|
||||||
context['subtitle_prefix'],
|
|
||||||
showSeason,
|
|
||||||
showEpisode)
|
|
||||||
|
|
||||||
# ctx.obj['logger'].debug(f"tmd subindices: {[t.getIndex() for t in targetMediaDescriptor.getAllTrackDescriptors()]} {[t.getSubIndex() for t in targetMediaDescriptor.getAllTrackDescriptors()]} {[t.getDispositionFlag(TrackDisposition.DEFAULT) for t in targetMediaDescriptor.getAllTrackDescriptors()]}")
|
|
||||||
ctx.obj['logger'].debug(f"tmd subindices: {[t.getIndex() for t in targetMediaDescriptor.getTrackDescriptors()]} {[t.getSubIndex() for t in targetMediaDescriptor.getTrackDescriptors()]} {[t.getDispositionFlag(TrackDisposition.DEFAULT) for t in targetMediaDescriptor.getTrackDescriptors()]}")
|
|
||||||
|
|
||||||
if cliOverrides:
|
|
||||||
targetMediaDescriptor.applyOverrides(cliOverrides)
|
|
||||||
|
|
||||||
# ctx.obj['logger'].debug(f"tmd subindices: {[t.getIndex() for t in targetMediaDescriptor.getAllTrackDescriptors()]} {[t.getSubIndex() for t in targetMediaDescriptor.getAllTrackDescriptors()]} {[t.getDispositionFlag(TrackDisposition.DEFAULT) for t in targetMediaDescriptor.getAllTrackDescriptors()]}")
|
|
||||||
ctx.obj['logger'].debug(f"tmd subindices: {[t.getIndex() for t in targetMediaDescriptor.getTrackDescriptors()]} {[t.getSubIndex() for t in targetMediaDescriptor.getTrackDescriptors()]} {[t.getDispositionFlag(TrackDisposition.DEFAULT) for t in targetMediaDescriptor.getTrackDescriptors()]}")
|
|
||||||
|
|
||||||
ctx.obj['logger'].debug(f"Input mapping tokens (2nd pass): {targetMediaDescriptor.getInputMappingTokens()}")
|
|
||||||
|
|
||||||
fc = FfxController(context, targetMediaDescriptor, sourceMediaDescriptor)
|
|
||||||
|
|
||||||
|
|
||||||
indexSeasonDigits = currentShowDescriptor.getIndexSeasonDigits() if not currentPattern is None else ShowDescriptor.DEFAULT_INDEX_SEASON_DIGITS
|
|
||||||
indexEpisodeDigits = currentShowDescriptor.getIndexEpisodeDigits() if not currentPattern is None else ShowDescriptor.DEFAULT_INDEX_EPISODE_DIGITS
|
|
||||||
indicatorSeasonDigits = currentShowDescriptor.getIndicatorSeasonDigits() if not currentPattern is None else ShowDescriptor.DEFAULT_INDICATOR_SEASON_DIGITS
|
|
||||||
indicatorEpisodeDigits = currentShowDescriptor.getIndicatorEpisodeDigits() if not currentPattern is None else ShowDescriptor.DEFAULT_INDICATOR_EPISODE_DIGITS
|
|
||||||
|
|
||||||
|
|
||||||
# Shift season and episode if defined for this show
|
|
||||||
if ('tmdb' not in cliOverrides.keys() and showId != -1
|
|
||||||
and showSeason != -1 and showEpisode != -1):
|
|
||||||
shiftedShowSeason, shiftedShowEpisode = ssc.shiftSeason(showId,
|
|
||||||
season=showSeason,
|
|
||||||
episode=showEpisode)
|
|
||||||
else:
|
|
||||||
shiftedShowSeason = showSeason
|
|
||||||
shiftedShowEpisode = showEpisode
|
|
||||||
|
|
||||||
# Assemble target filename accordingly depending on TMDB lookup is enabled
|
|
||||||
#HINT: -1 if not set
|
|
||||||
showId = cliOverrides['tmdb']['show'] if 'tmdb' in cliOverrides.keys() and 'show' in cliOverrides['tmdb'] else (-1 if currentShowDescriptor is None else currentShowDescriptor.getId())
|
|
||||||
|
|
||||||
if context['use_tmdb'] and showId != -1 and shiftedShowSeason != -1 and shiftedShowEpisode != -1:
|
|
||||||
|
|
||||||
ctx.obj['logger'].debug(f"Querying TMDB for show_id={showId} season={shiftedShowSeason} episode{shiftedShowEpisode}")
|
|
||||||
|
|
||||||
if currentPattern is None:
|
|
||||||
sName, showYear = tc.getShowNameAndYear(showId)
|
|
||||||
showName = filterFilename(sName)
|
|
||||||
showFilenamePrefix = f"{showName} ({str(showYear)})"
|
|
||||||
else:
|
|
||||||
showFilenamePrefix = currentShowDescriptor.getFilenamePrefix()
|
|
||||||
|
|
||||||
tmdbEpisodeResult = tc.queryEpisode(showId, shiftedShowSeason, shiftedShowEpisode)
|
|
||||||
|
|
||||||
ctx.obj['logger'].debug(f"tmdbEpisodeResult={tmdbEpisodeResult}")
|
|
||||||
|
|
||||||
if tmdbEpisodeResult:
|
|
||||||
substitutedEpisodeName = filterFilename(substituteTmdbFilename(tmdbEpisodeResult['name']))
|
|
||||||
sourceFileBasename = getEpisodeFileBasename(showFilenamePrefix,
|
|
||||||
substitutedEpisodeName,
|
|
||||||
shiftedShowSeason,
|
|
||||||
shiftedShowEpisode,
|
|
||||||
indexSeasonDigits,
|
|
||||||
indexEpisodeDigits,
|
|
||||||
indicatorSeasonDigits,
|
|
||||||
indicatorEpisodeDigits,
|
|
||||||
context=ctx.obj)
|
|
||||||
|
|
||||||
if label:
|
|
||||||
if shiftedShowSeason > -1 and shiftedShowEpisode > -1:
|
|
||||||
targetSuffices['se'] = f"S{shiftedShowSeason:0{indicatorSeasonDigits}d}E{shiftedShowEpisode:0{indicatorEpisodeDigits}d}"
|
|
||||||
elif shiftedShowEpisode > -1:
|
|
||||||
targetSuffices['se'] = f"E{shiftedShowEpisode:0{indicatorEpisodeDigits}d}"
|
|
||||||
else:
|
|
||||||
if 'se' in targetSuffices.keys():
|
|
||||||
del targetSuffices['se']
|
|
||||||
|
|
||||||
ctx.obj['logger'].debug(f"fileBasename={sourceFileBasename}")
|
|
||||||
|
|
||||||
|
|
||||||
for chainIteration in chainYield:
|
|
||||||
|
|
||||||
ctx.obj['logger'].debug(f"\nchain iteration: {chainIteration}\n")
|
|
||||||
|
|
||||||
chainVariant = '-'.join([fy['variant'] for fy in chainIteration])
|
|
||||||
|
|
||||||
ctx.obj['logger'].debug(f"\nRunning job {jobIndex} file={sourcePath} variant={chainVariant}")
|
|
||||||
jobIndex += 1
|
|
||||||
|
|
||||||
ctx.obj['logger'].debug(f"label={label if label else 'Falsy'}")
|
|
||||||
ctx.obj['logger'].debug(f"sourceFileBasename={sourceFileBasename}")
|
|
||||||
|
|
||||||
targetFileBasename = sourceFileBasename if context['use_tmdb'] and not label else label
|
|
||||||
|
|
||||||
targetFilenameTokens = [targetFileBasename]
|
|
||||||
|
|
||||||
if 'se' in targetSuffices.keys():
|
|
||||||
targetFilenameTokens += [targetSuffices['se']]
|
|
||||||
|
|
||||||
for filterYield in chainIteration:
|
|
||||||
targetFilenameTokens += filterYield['suffices']
|
|
||||||
|
|
||||||
targetFilename = f"{'_'.join(targetFilenameTokens)}.{sourceFilenameExtension if rename_only else targetExtension}"
|
|
||||||
|
|
||||||
if sourceFilename == targetFilename:
|
|
||||||
targetFilename = f"out_{targetFilename}"
|
|
||||||
|
|
||||||
|
|
||||||
targetPath = os.path.join(output_directory, targetFilename) if output_directory else targetFilename
|
|
||||||
|
|
||||||
ctx.obj['logger'].info(f"Creating file {targetFilename}")
|
|
||||||
|
|
||||||
if rename_only:
|
|
||||||
shutil.copyfile(sourcePath, targetPath)
|
|
||||||
else:
|
|
||||||
fc.runJob(sourcePath,
|
|
||||||
targetPath,
|
|
||||||
targetFormat,
|
|
||||||
context['video_encoder'],
|
|
||||||
chainIteration,
|
|
||||||
cropArguments)
|
|
||||||
|
|
||||||
endTime = time.perf_counter()
|
|
||||||
ctx.obj['logger'].info(f"\nDONE\nTime elapsed {endTime - startTime}")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
ffx()
|
|
||||||
@@ -1,7 +1,10 @@
|
|||||||
from textual.app import App
|
from textual.app import App
|
||||||
|
|
||||||
|
from .i18n import set_current_language, t
|
||||||
from .shows_screen import ShowsScreen
|
from .shows_screen import ShowsScreen
|
||||||
from .media_details_screen import MediaDetailsScreen
|
from .inspect_details_screen import InspectDetailsScreen
|
||||||
|
from .media_edit_screen import MediaEditScreen
|
||||||
|
from .screen_support import configure_screen_log_handler, set_screen_log_pane_enabled
|
||||||
|
|
||||||
|
|
||||||
class FfxApp(App):
|
class FfxApp(App):
|
||||||
@@ -9,8 +12,8 @@ class FfxApp(App):
|
|||||||
TITLE = "FFX"
|
TITLE = "FFX"
|
||||||
|
|
||||||
BINDINGS = [
|
BINDINGS = [
|
||||||
("q", "quit()", "Quit"),
|
("q", "quit()", t("Quit")),
|
||||||
("h", "switch_mode('help')", "Help"),
|
("h", "switch_mode('help')", t("Help")),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@@ -19,6 +22,14 @@ class FfxApp(App):
|
|||||||
|
|
||||||
# Data 'input' variable
|
# Data 'input' variable
|
||||||
self.context = context
|
self.context = context
|
||||||
|
set_current_language(self.context.get("language"))
|
||||||
|
debug_mode = bool(self.context.get("debug", False))
|
||||||
|
set_screen_log_pane_enabled(debug_mode)
|
||||||
|
configure_screen_log_handler(
|
||||||
|
self.context.get("logger"),
|
||||||
|
self,
|
||||||
|
enabled=debug_mode,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def on_mount(self) -> None:
|
def on_mount(self) -> None:
|
||||||
@@ -29,10 +40,12 @@ class FfxApp(App):
|
|||||||
self.push_screen(ShowsScreen())
|
self.push_screen(ShowsScreen())
|
||||||
|
|
||||||
if self.context['command'] == 'inspect':
|
if self.context['command'] == 'inspect':
|
||||||
self.push_screen(MediaDetailsScreen())
|
self.push_screen(InspectDetailsScreen())
|
||||||
|
|
||||||
|
if self.context['command'] == 'edit':
|
||||||
|
self.push_screen(MediaEditScreen())
|
||||||
|
|
||||||
|
|
||||||
def getContext(self):
|
def getContext(self):
|
||||||
"""Data 'output' method"""
|
"""Data 'output' method"""
|
||||||
return self.context
|
return self.context
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user