Compare commits
334 Commits
dd8f472ac5
...
dev-editor
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
037388886e | ||
|
|
e614ca5d75 | ||
|
|
c0b3977ea6 | ||
|
|
d9639561ce | ||
|
|
cbf43e5d6c | ||
|
|
d6e885517d | ||
|
|
8a8c43ecdf | ||
|
|
6170ac641c | ||
|
|
497c0e500b | ||
|
|
008c643272 | ||
|
|
c302b30e63 | ||
|
|
7926407534 | ||
|
|
0894ac2fab | ||
|
|
353759b983 | ||
|
|
454f5f0656 | ||
|
|
0e51d6337f | ||
|
|
a24b6dedaa | ||
|
|
8361fc536b | ||
|
|
4d4272e5e8 | ||
|
|
559869ca68 | ||
|
|
0e4fae538b | ||
|
|
2595bfe4f4 | ||
|
|
fc9d94aeee | ||
|
|
111df11199 | ||
|
|
f0d4c36bc3 | ||
|
|
ef0d6e9274 | ||
|
|
d05b01cfb2 | ||
|
|
9dc08d48e9 | ||
|
|
20bdfc0dd7 | ||
|
|
4365e083dc | ||
|
|
528915a235 | ||
|
|
9a980b5766 | ||
|
|
5eee7e1161 | ||
|
|
0a41998e29 | ||
|
|
ebdc23c3ce | ||
|
|
9611930949 | ||
|
|
609f93b783 | ||
|
|
52c6462fa8 | ||
|
|
358ef18f77 | ||
|
|
fc729a2414 | ||
|
|
0939a0c6c2 | ||
|
|
c384d54c12 | ||
|
|
71553aad32 | ||
|
|
d19e69990a | ||
|
|
be0f4b4c4e | ||
|
|
01b5fdb289 | ||
|
|
60ae58500a | ||
|
|
f9c8b8ac5e | ||
|
|
5871ae30ad | ||
|
|
52724ecc5b | ||
|
|
f288d445e4 | ||
|
|
d9db6da191 | ||
|
|
5443881ea1 | ||
|
|
8946b57456 | ||
|
|
686239491b | ||
|
|
126ba4487c | ||
|
|
447cda19ef | ||
|
|
f1ba913a98 | ||
|
|
59336aafb7 | ||
|
|
fd5ad3ed56 | ||
|
|
2d03a3bb10 | ||
|
|
4dc02d52a2 | ||
|
|
ed0cea9c26 | ||
|
|
15bfbdbe88 | ||
|
|
c354ba09ba | ||
| 2eeea08be0 | |||
| fbfc8ea965 | |||
|
|
6ec5db2ea2 | ||
|
|
8feced6f1c | ||
|
|
285649c30a | ||
|
|
558da817f1 | ||
|
|
2a84327f69 | ||
| 535b11dca5 | |||
| 8edc715795 | |||
| cd203703e8 | |||
| 8f2367b71e | |||
| 101c7605d2 | |||
| a5b58e34e4 | |||
| a32e86550c | |||
| 5de3778ae5 | |||
| 81aab0657e | |||
| 8514a0c152 | |||
| c846147c64 | |||
| e52297b2ba | |||
| 655833f13e | |||
| 03dd02ed87 | |||
| b6ee197536 | |||
| d8374ae9f2 | |||
| f262eaa120 | |||
| d940a6e92a | |||
| e1395aeca0 | |||
| 48841c5750 | |||
| d558bbf6bd | |||
| b05d989581 | |||
| bc8af53525 | |||
| 6bd1587947 | |||
| 7d6531b40e | |||
| ab435a4c76 | |||
| 0a88e366b1 | |||
| 1c80cd7d7d | |||
| a45c180aaa | |||
| 0b204ff19c | |||
| d7ec5f7620 | |||
| 3f64304374 | |||
| b459272149 | |||
| 4b05fc194b | |||
| 9d088819ab | |||
| e20f7a1f67 | |||
| 9d683dfa84 | |||
| 867756c661 | |||
| f81a6edb07 | |||
| ec4bce473c | |||
| bf882b741f | |||
| a4e25b5ec8 | |||
| ff6bacb0d5 | |||
| f32b7a06c0 | |||
| 7ceed58e7b | |||
| 153f401dd3 | |||
| 7f1f34fb9f | |||
| 21fe7cb1eb | |||
| 9e63184524 | |||
| 3742221189 | |||
| 478ac15ab8 | |||
| ef0a01bc9b | |||
| 802c11be44 | |||
| 4cbb135772 | |||
| 3d52442471 | |||
| 81640192ab | |||
| 81d760aabe | |||
| c0eff679f7 | |||
| 07097058d7 | |||
| cd7a338541 | |||
| be652f8efb | |||
| dd51b14d49 | |||
| a471808392 | |||
| b3da8ce738 | |||
| fe0c078c3f | |||
| 962522b974 | |||
| 24367ea08a | |||
| f0eebd0bea | |||
| c8e21b9260 | |||
| cdc1664779 | |||
|
|
2849eda05a | ||
|
|
cfb2df8d66 | ||
|
|
12c8ad3782 | ||
|
|
74a39a8f9a | ||
|
|
5eacb0d0cb | ||
|
|
e8c0c3d646 | ||
|
|
6b2671a1f5 | ||
|
|
2d8622506e | ||
|
|
86cc7dfc6f | ||
|
|
d84bee74c4 | ||
|
|
488caa7a08 | ||
|
|
62877dfed6 | ||
|
|
87ff94e204 | ||
|
|
0c78ed7cf7 | ||
|
|
4db9bfd103 | ||
|
|
db7700a6b9 | ||
|
|
222234f978 | ||
|
|
3672474ff5 | ||
|
|
5ff0fc3fad | ||
|
|
554ca4cc03 | ||
|
|
51febdfcc0 | ||
|
|
0588f47837 | ||
|
|
8b036af47f | ||
|
|
c66d4c2568 | ||
|
|
59dbb8985a | ||
|
|
5e57e57ad2 | ||
|
|
739acb0dd8 | ||
|
|
e9e535044e | ||
|
|
bc62801949 | ||
|
|
e2b6a4bf7c | ||
|
|
4716c4d11c | ||
|
|
871c22d8e8 | ||
|
|
e44fc93b4e | ||
|
|
71943dbabc | ||
|
|
0a026afba4 | ||
|
|
a4b3db3eba | ||
|
|
06978e6862 | ||
|
|
457b599188 | ||
|
|
125aeca340 | ||
|
|
1caf4beb72 | ||
|
|
49c1164b8b | ||
|
|
0f635b8a86 | ||
|
|
4b9f9a0364 | ||
|
|
2f3658de5b | ||
|
|
ec4af18e7a | ||
|
|
58b01f2be7 | ||
|
|
1d4507782b | ||
|
|
8c7eee580d | ||
|
|
303fd4bc80 | ||
|
|
5febb96916 | ||
|
|
b16e76370b | ||
|
|
feb5441251 | ||
|
|
ea182d4ddb | ||
|
|
f853cf0f85 | ||
|
|
b492be227a | ||
|
|
7fe5b66c0c | ||
|
|
07cc0cd95e | ||
|
|
de2d7c0593 | ||
|
|
826677cb03 | ||
|
|
95aeacf694 | ||
|
|
a3bb16e850 | ||
|
|
0ed85fce4a | ||
|
|
1a0a5f4482 | ||
|
|
06f6322d32 | ||
|
|
0cbcf1a702 | ||
|
|
f94310fdb7 | ||
|
|
06b523f3e8 | ||
|
|
efb4fbfc95 | ||
|
|
2abda01fe6 | ||
|
|
f007ada29f | ||
|
|
44916bf062 | ||
|
|
89129ae5c4 | ||
|
|
696b2b56d3 | ||
|
|
b9185b5b07 | ||
|
|
de0f4a57c1 | ||
|
|
207472283b | ||
|
|
22f4b00e76 | ||
|
|
95d858b2c6 | ||
| 3f0efab49b | |||
| 5c47f193d4 | |||
| 324084c845 | |||
|
|
79f088a86a | ||
|
|
9e37ff18c4 | ||
|
|
3647b25b4c | ||
|
|
772c1d8f90 | ||
|
|
ad58ba5ce6 | ||
|
|
24d0700db2 | ||
|
|
3463c1e371 | ||
|
|
5ca7d6d12c | ||
|
|
77dfb4b1d3 | ||
| 1df2e74566 | |||
| 8f9f77e891 | |||
| 6a03d4d6e2 | |||
| a263c735aa | |||
| 5e0fdd6476 | |||
| 1cfa51f2ca | |||
| a3dc894ba7 | |||
| c0305ec0a7 | |||
| 00ca7b92c1 | |||
| c4b3dd94f9 | |||
| 5307eda92e | |||
| 1620638110 | |||
| 467977c81c | |||
| fee7940660 | |||
| c03d4389ae | |||
| abdf13142e | |||
| 5c96439fa8 | |||
| 45a1c5aa4e | |||
| 42f9486c64 | |||
| 1eecd5a4f8 | |||
| c83f17dd44 | |||
| a01e8bfca5 | |||
| cf49ff06d1 | |||
| e3115e7557 | |||
| ce2f3993e1 | |||
| ee31634b0b | |||
| 30d22892f8 | |||
|
|
260c605201 | ||
| 747ff41ad3 | |||
| fe1ed57758 | |||
| a082058ce2 | |||
|
|
9739f9aee4 | ||
|
|
5bb7dcc795 | ||
| ec3fb25c7b | |||
| 9fee9d1ae4 | |||
|
|
e1cff6c8db | ||
|
|
93cc8a23c9 | ||
|
|
bcb4e4d3d6 | ||
|
|
dba494b4e6 | ||
|
|
ca57e981a6 | ||
|
|
ff93875a07 | ||
|
|
bc3b593362 | ||
|
|
f920dec475 | ||
|
|
84b1135861 | ||
|
|
aaa6b2cabc | ||
|
|
aea8c7e9ea | ||
|
|
b9aed1f0b6 | ||
|
|
12689fe61d | ||
|
|
a280248f6a | ||
|
|
6b99091a55 | ||
|
|
8ed6809ad0 | ||
|
|
882d021bb6 | ||
|
|
131cca2c53 | ||
|
|
a03449a32b | ||
|
|
7d7e43c6f0 | ||
|
|
1c9f67e47a | ||
|
|
eaee3b34da | ||
|
|
5e017a8373 | ||
| 123d8659e1 | |||
| 82b257d809 | |||
|
|
8ec7f9c2d1 | ||
|
|
7cc6efb9f1 | ||
|
|
87ccb7e8a6 | ||
|
|
3765f25fd8 | ||
|
|
74dfbe30d7 | ||
|
|
84baeb2d87 | ||
|
|
9df5973676 | ||
|
|
b492ebdab9 | ||
|
|
1ae52399b9 | ||
| 3008d66dfe | |||
| 9f22c70e89 | |||
| a5d568ba34 | |||
| 73c957c9bb | |||
| 7c899e32bb | |||
| e325aaa529 | |||
| fdc8f8f602 | |||
| 7fc025821b | |||
|
|
322321b1ed | ||
|
|
a46a2b421e | ||
|
|
0cda6390cd | ||
|
|
c963c2c675 | ||
|
|
37786f56b5 | ||
|
|
24e85d7005 | ||
|
|
9b57007d5e | ||
|
|
59503b566c | ||
|
|
b390eaf2a0 | ||
|
|
dcfc0f734b | ||
|
|
ab0df0fed3 | ||
|
|
8ad50fd881 | ||
|
|
5ed97e0b9b | ||
|
|
7d85b45d68 | ||
|
|
de56e85d2e | ||
| d84797e6a5 | |||
|
|
e3964b0002 | ||
|
|
54a3df7f7c | ||
|
|
f381fad31f | ||
|
|
e6734cb4ef | ||
|
|
dcd79b74fd | ||
|
|
4d7f728f25 | ||
|
|
ad7562f387 | ||
| e7426398ee | |||
| 20ae4e763f |
24
.gitignore
vendored
Normal file
24
.gitignore
vendored
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
junk/
|
||||||
|
.vscode
|
||||||
|
.ipynb_checkpoints/
|
||||||
|
tools/ansible/inventory/hawaii.yml
|
||||||
|
tools/ansible/inventory/peppermint.yml
|
||||||
|
tools/ansible/inventory/cappuccino.yml
|
||||||
|
tools/ansible/inventory/group_vars/all.yml
|
||||||
|
ffx_test_report.log
|
||||||
|
bin/conversiontest.py
|
||||||
|
|
||||||
|
build/
|
||||||
|
dist/
|
||||||
|
*.egg-info/
|
||||||
|
.venv/
|
||||||
|
venv/
|
||||||
|
.codex
|
||||||
|
|
||||||
|
|
||||||
|
*.mkv
|
||||||
|
*.webm
|
||||||
|
ffmpeg2pass-0.log
|
||||||
|
*.sup
|
||||||
376
AGENTS.md
Normal file
376
AGENTS.md
Normal file
@@ -0,0 +1,376 @@
|
|||||||
|
# AGENTS.md
|
||||||
|
|
||||||
|
This file is the entry point for agent guidance in this repository.
|
||||||
|
|
||||||
|
It is intentionally generic and reusable across projects. Keep this file focused on non-project-specific constraints, working style, and the structure used to link more detailed guidance.
|
||||||
|
|
||||||
|
# Purpose
|
||||||
|
|
||||||
|
- Provide a small default rule set for agents working in this repository.
|
||||||
|
- Keep the base guidance modular and easy to extend.
|
||||||
|
- Separate reusable agent behavior from project-specific requirements.
|
||||||
|
|
||||||
|
# Comment Syntax
|
||||||
|
|
||||||
|
- A segment wrapped in `<!--` and `-->` is a comment and must be ignored by agents.
|
||||||
|
- Use HTML comments for optional guidance that should stay inactive until enabled.
|
||||||
|
- To enable an optional segment, remove the surrounding `<!--` and `-->` markers.
|
||||||
|
|
||||||
|
# Core Principles
|
||||||
|
|
||||||
|
- Prefer the simplest solution that satisfies the current goal.
|
||||||
|
- Keep guidance lightweight: only add detail when it meaningfully improves outcomes.
|
||||||
|
- Reuse modular guideline files instead of expanding this file indefinitely.
|
||||||
|
- Treat project-specific documents as the source of truth for project behavior.
|
||||||
|
- When guidance conflicts, use the most specific applicable document.
|
||||||
|
|
||||||
|
# Rule Terms
|
||||||
|
|
||||||
|
- A `rule` is the general term for any constraint, requirement, definition, or similar guidance item.
|
||||||
|
- A `rule set` addresses all rules inside one file that share the same rule set ID.
|
||||||
|
- Any rule inside a rule set shall use an ID following the schema `RULESET-0001`, `RULESET-0002`, and so on.
|
||||||
|
- Rules without a rule set ID are also valid, but they are not addressable by rule ID.
|
||||||
|
|
||||||
|
# Scope Of This File
|
||||||
|
|
||||||
|
This file should contain:
|
||||||
|
|
||||||
|
- Generic agent behavior and constraints.
|
||||||
|
- Rules that are reusable across multiple projects.
|
||||||
|
- Links to optional guideline modules.
|
||||||
|
- Links to project-specific requirements.
|
||||||
|
- Commented optional templates for released-product documentation and agent-output locations.
|
||||||
|
|
||||||
|
This file should not contain:
|
||||||
|
|
||||||
|
- Project business requirements.
|
||||||
|
- Project architecture decisions.
|
||||||
|
- Stack-specific implementation details unless they are universally applicable.
|
||||||
|
- Task-specific runbooks that belong in dedicated modules.
|
||||||
|
|
||||||
|
# Default Agent Behavior
|
||||||
|
|
||||||
|
- Read the relevant context before making changes.
|
||||||
|
- Prefer small, understandable edits over broad refactors.
|
||||||
|
- Preserve existing patterns unless there is a clear reason to change them.
|
||||||
|
- Document assumptions when context is missing.
|
||||||
|
- Ignore HTML comment segments.
|
||||||
|
- If a more specific enabled guideline exists for the current task, follow it.
|
||||||
|
|
||||||
|
# Guideline Structure
|
||||||
|
|
||||||
|
Use the following structure for reusable guidance files and project-specific documentation as needed:
|
||||||
|
|
||||||
|
```text
|
||||||
|
/
|
||||||
|
|-- AGENTS.md
|
||||||
|
|-- guidance/
|
||||||
|
| |-- stacks/
|
||||||
|
| |-- conventions/
|
||||||
|
| `-- workflows/
|
||||||
|
|-- prompts/
|
||||||
|
`-- requirements/
|
||||||
|
|
||||||
|
Optional files and directories
|
||||||
|
|-- SCRATCHPAD.md
|
||||||
|
|-- docs/
|
||||||
|
| |-- readme.md
|
||||||
|
| |-- installation.md
|
||||||
|
| `-- history.md
|
||||||
|
|-- process/
|
||||||
|
| |-- log.md
|
||||||
|
| `-- coding-handbook.md
|
||||||
|
```
|
||||||
|
|
||||||
|
# Optional Reusable Modules
|
||||||
|
|
||||||
|
Add files under `guidance/` only when they are needed.
|
||||||
|
|
||||||
|
# Optional Scratchpad
|
||||||
|
|
||||||
|
- `SCRATCHPAD.md` is an optional repo-root scratchpad for temporary
|
||||||
|
information aimed at the next iteration.
|
||||||
|
- Developers may create or delete `SCRATCHPAD.md` at any time.
|
||||||
|
- Developers may refer to `SCRATCHPAD.md` as `scratchpad` when giving agents a
|
||||||
|
source or target for information.
|
||||||
|
- Agents may read, update, create, or remove the scratchpad when the task
|
||||||
|
explicitly calls for it.
|
||||||
|
- Treat the scratchpad as low-formality working context rather than canonical
|
||||||
|
project truth.
|
||||||
|
- Use the scratchpad for short-lived notes, open questions, sketches, and
|
||||||
|
temporary decisions that should be resolved away.
|
||||||
|
- Move durable outcomes into `requirements/`, `guidance/`, code, tests, or
|
||||||
|
another long-lived location.
|
||||||
|
- If `SCRATCHPAD.md` is absent, agents should continue normally.
|
||||||
|
|
||||||
|
# Optional Rule Sets
|
||||||
|
|
||||||
|
- Optional rule sets may be stored in `guidance/optional/` or in `guidance/{section}/optional/`.
|
||||||
|
- Optional rule sets are inactive by default and shall only be applied when a prompt explicitly requests them, for example by phrases such as `Apply rules for lean interface iteration in the following steps.` or `Apply LII rules.`
|
||||||
|
- An optional rule set may be requested by its descriptive name, by its rule set ID, or by another equally clear explicit reference.
|
||||||
|
- Agents shall never infer or auto-enable optional rule sets from general intent alone.
|
||||||
|
- If an optional rule or rule set cannot be identified and addressed clearly, agents shall stop and ask before proceeding.
|
||||||
|
|
||||||
|
# Prepared Orders
|
||||||
|
|
||||||
|
- An `order` is a prepared prompt for one isolated operation rather than a general workflow or standing rule set.
|
||||||
|
- Orders shall be stored under `prompts/`.
|
||||||
|
- Order files shall use the naming schema `ORDER-0001-<slug>.md`, `ORDER-0002-<slug>.md`, and so on.
|
||||||
|
- The canonical order identifier is the `ORDER-0001` style prefix. The trailing slug is descriptive only.
|
||||||
|
- Recommended internal order file structure is: prompt ID, prompt name, purpose, trigger examples, scope, operation, and expected output.
|
||||||
|
- Orders shall only be executed when they are explicitly requested by a prompt such as `Execute ORDER-0007.` or `Execute ORDER 7.`
|
||||||
|
- Agents may accept an unambiguous short numeric reference such as `ORDER 7` as an alias for `ORDER-0007`.
|
||||||
|
- If an order cannot be identified uniquely and clearly, agents shall stop and ask before proceeding.
|
||||||
|
|
||||||
|
# Toolstack Guides
|
||||||
|
|
||||||
|
Location:
|
||||||
|
|
||||||
|
```text
|
||||||
|
guidance/stacks/
|
||||||
|
```
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
- `guidance/stacks/python.md`
|
||||||
|
- `guidance/stacks/typescript.md`
|
||||||
|
- `guidance/stacks/docker.md`
|
||||||
|
- `guidance/stacks/terraform.md`
|
||||||
|
|
||||||
|
Use for:
|
||||||
|
|
||||||
|
- Language or framework expectations.
|
||||||
|
- Tooling and environment conventions.
|
||||||
|
- Build, test, and runtime guidance tied to a specific stack.
|
||||||
|
|
||||||
|
# Coding Conventions
|
||||||
|
|
||||||
|
Location:
|
||||||
|
|
||||||
|
```text
|
||||||
|
guidance/conventions/
|
||||||
|
```
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
- `guidance/conventions/naming.md`
|
||||||
|
- `guidance/conventions/testing.md`
|
||||||
|
- `guidance/conventions/review.md`
|
||||||
|
|
||||||
|
Use for:
|
||||||
|
|
||||||
|
- Naming and structure conventions.
|
||||||
|
- Testing expectations.
|
||||||
|
- Code review and quality rules.
|
||||||
|
|
||||||
|
# Recurring Workflows
|
||||||
|
|
||||||
|
Location:
|
||||||
|
|
||||||
|
```text
|
||||||
|
guidance/workflows/
|
||||||
|
```
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
- `guidance/workflows/feature-delivery.md`
|
||||||
|
- `guidance/workflows/bugfix.md`
|
||||||
|
- `guidance/workflows/release.md`
|
||||||
|
- `guidance/workflows/incident-response.md`
|
||||||
|
|
||||||
|
Use for:
|
||||||
|
|
||||||
|
- Repeatable task flows.
|
||||||
|
- Checklists for common delivery work.
|
||||||
|
- Operational or maintenance procedures.
|
||||||
|
|
||||||
|
|
||||||
|
<!-- Enable this optional section by removing the outer HTML comment markers from this segment
|
||||||
|
when you want agents to create, update, and consult released-product
|
||||||
|
documentation in `docs/`.
|
||||||
|
|
||||||
|
# Released Product Documentation
|
||||||
|
|
||||||
|
Released-product documentation should live outside the generic sections above.
|
||||||
|
|
||||||
|
Recommended location:
|
||||||
|
|
||||||
|
```text
|
||||||
|
docs/
|
||||||
|
```
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
- `docs/readme.md`
|
||||||
|
- `docs/installation.md`
|
||||||
|
- `docs/history.md`
|
||||||
|
|
||||||
|
Agent rules for docs output:
|
||||||
|
|
||||||
|
- Keep content compact but comprehensive.
|
||||||
|
- Write for end users, operators, or other consumers of the released product.
|
||||||
|
- Prefer shipped behavior, supported workflows, and stable terminology over
|
||||||
|
internal implementation detail.
|
||||||
|
- Keep documentation synchronized with released behavior.
|
||||||
|
- Update release history when user-visible changes are shipped.
|
||||||
|
|
||||||
|
Recommended topics:
|
||||||
|
|
||||||
|
- Product overview and intended use.
|
||||||
|
- Installation, configuration, and upgrade guidance.
|
||||||
|
- Usage patterns, operational instructions, and support boundaries.
|
||||||
|
- Compatibility notes, migration notes, and release history.
|
||||||
|
- Troubleshooting and common pitfalls when relevant. -->
|
||||||
|
|
||||||
|
|
||||||
|
<!-- Enable this optional section by removing the outer HTML comment markers from this
|
||||||
|
segment when you want agents to produce and consult workflow output in `process/`.
|
||||||
|
|
||||||
|
# Agent Output In `process/`
|
||||||
|
|
||||||
|
The `process/` directory is primarily for agent output created during
|
||||||
|
delivery, maintenance, and review work.
|
||||||
|
|
||||||
|
Recommended location:
|
||||||
|
|
||||||
|
```text
|
||||||
|
process/
|
||||||
|
```
|
||||||
|
|
||||||
|
Agent rules for process output:
|
||||||
|
|
||||||
|
- Use `process/` for agent-produced artifacts rather than released-product
|
||||||
|
documentation.
|
||||||
|
- Keep entries concise, traceable, and tied to resulting changes.
|
||||||
|
- Treat `process/` as workflow output, not as the primary source of product
|
||||||
|
truth.
|
||||||
|
- Prefer summaries and rationale over raw transcript dumps unless a workflow
|
||||||
|
explicitly requires full prompt history.
|
||||||
|
|
||||||
|
# Agent Change Log
|
||||||
|
|
||||||
|
Location:
|
||||||
|
|
||||||
|
```text
|
||||||
|
process/log.md
|
||||||
|
```
|
||||||
|
|
||||||
|
Use for:
|
||||||
|
|
||||||
|
- Capturing prompts given to agents.
|
||||||
|
- Recording concise explanations of the resulting changes made by agents.
|
||||||
|
- Preserving task-by-task rationale, decisions, and implementation notes.
|
||||||
|
|
||||||
|
# Coding Handbook
|
||||||
|
|
||||||
|
Location:
|
||||||
|
|
||||||
|
```text
|
||||||
|
process/coding-handbook.md
|
||||||
|
```
|
||||||
|
|
||||||
|
Use for:
|
||||||
|
|
||||||
|
- A tutorial-style handbook that explains the programming components used in
|
||||||
|
the project.
|
||||||
|
- Compact but comprehensive technical onboarding material for future
|
||||||
|
contributors.
|
||||||
|
- Written explanations that connect code structure, concepts, and
|
||||||
|
implementation patterns. -->
|
||||||
|
|
||||||
|
|
||||||
|
# Project-Specific Requirements
|
||||||
|
|
||||||
|
|
||||||
|
Project-specific material should live outside the generic sections above.
|
||||||
|
|
||||||
|
Recommended location:
|
||||||
|
|
||||||
|
```text
|
||||||
|
requirements/
|
||||||
|
```
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
- `requirements/project.md`
|
||||||
|
- `requirements/architecture.md`
|
||||||
|
- `requirements/decisions.md`
|
||||||
|
- `requirements/domain.md`
|
||||||
|
|
||||||
|
Use for:
|
||||||
|
|
||||||
|
- Product and business requirements.
|
||||||
|
- Project goals and constraints.
|
||||||
|
- Architecture and design decisions.
|
||||||
|
- Domain knowledge that is specific to this repository.
|
||||||
|
|
||||||
|
# Agent-Level Variables
|
||||||
|
|
||||||
|
When present, `requirements/identifiers.yml` is an optional project-specific
|
||||||
|
input that defines agent-level variables for use inside `requirements/` and
|
||||||
|
`guidance/`.
|
||||||
|
|
||||||
|
Variable schema:
|
||||||
|
|
||||||
|
- Use `@{VARIABLE_NAME}` for agent-level variables.
|
||||||
|
- Prefer uppercase snake case names such as `@{PROJECT_ID}` or `@{VENDOR_ID}`.
|
||||||
|
- Do not treat `${...}` as an agent-level variable form; that syntax may appear
|
||||||
|
in Bash or other code and should not be interpreted as agent metadata.
|
||||||
|
|
||||||
|
Scope:
|
||||||
|
|
||||||
|
- The effective scope of `requirements/identifiers.yml` is limited to
|
||||||
|
`requirements/` and `guidance/`.
|
||||||
|
- Definitions from `requirements/identifiers.yml` must not leak into product code.
|
||||||
|
|
||||||
|
Defaults:
|
||||||
|
|
||||||
|
- Default `@{VENDOR_ID}` is `osgw`.
|
||||||
|
- Default `@{PROJECT_ID}` is the current repository directory name.
|
||||||
|
|
||||||
|
Resolution rules:
|
||||||
|
|
||||||
|
- Treat `requirements/identifiers.yml` as optional; when it is absent, agents
|
||||||
|
may still resolve the defaults defined above.
|
||||||
|
- If a variable is used in `requirements/` or `guidance/` and it is not
|
||||||
|
defined in `requirements/identifiers.yml` and does not have a default in this
|
||||||
|
file, agents may stop and report the undefined variable.
|
||||||
|
- Prefer updating duplicated identifier values in `requirements/` and
|
||||||
|
`guidance/` to use the variable schema when that improves consistency.
|
||||||
|
|
||||||
|
# Precedence
|
||||||
|
|
||||||
|
Some precedence levels may be absent because optional levels can remain inside
|
||||||
|
HTML comments. The smaller numeric index wins.
|
||||||
|
|
||||||
|
Apply guidance in this order:
|
||||||
|
|
||||||
|
1. Direct user or task instructions.
|
||||||
|
2. Project-specific documents in `requirements/`.
|
||||||
|
<!-- 3. Released-product documentation in `docs/` when shipped behavior or
|
||||||
|
user-facing expectations are relevant. -->
|
||||||
|
4. Relevant modular guides in `guidance/stacks/`, `guidance/conventions/`, or `guidance/workflows/`.
|
||||||
|
<!-- 5. Agent output in `process/` when prior prompts, rationale, or
|
||||||
|
implementation notes are relevant. -->
|
||||||
|
6. This `AGENTS.md`.
|
||||||
|
|
||||||
|
# Maintenance
|
||||||
|
|
||||||
|
- Keep this file short and stable.
|
||||||
|
- Move detail into dedicated modules when a section becomes too specific or too long.
|
||||||
|
- Add new guideline files only when they solve a recurring need.
|
||||||
|
- Remove outdated references when the repository structure changes.
|
||||||
|
|
||||||
|
# Current Status
|
||||||
|
|
||||||
|
This repository defines the base `AGENTS.md` structure plus project-specific
|
||||||
|
requirements and modular guidance.
|
||||||
|
|
||||||
|
Future project work can add:
|
||||||
|
|
||||||
|
- Reusable modules under `guidance/`
|
||||||
|
- Project-specific documentation under `requirements/`
|
||||||
|
- Optional temporary iteration context in `SCRATCHPAD.md`
|
||||||
|
- Optional released-product documentation under `docs/` by uncommenting its segment
|
||||||
|
- Optional agent output under `process/` by uncommenting its segment
|
||||||
|
- Cross-references from this file once those documents exist
|
||||||
595
LICENSE.md
Normal file
595
LICENSE.md
Normal file
@@ -0,0 +1,595 @@
|
|||||||
|
GNU General Public License
|
||||||
|
==========================
|
||||||
|
|
||||||
|
_Version 3, 29 June 2007_
|
||||||
|
_Copyright © 2007 Free Software Foundation, Inc. <<http://fsf.org/>>_
|
||||||
|
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies of this license
|
||||||
|
document, but changing it is not allowed.
|
||||||
|
|
||||||
|
## Preamble
|
||||||
|
|
||||||
|
The GNU General Public License is a free, copyleft license for software and other
|
||||||
|
kinds of works.
|
||||||
|
|
||||||
|
The licenses for most software and other practical works are designed to take away
|
||||||
|
your freedom to share and change the works. By contrast, the GNU General Public
|
||||||
|
License is intended to guarantee your freedom to share and change all versions of a
|
||||||
|
program--to make sure it remains free software for all its users. We, the Free
|
||||||
|
Software Foundation, use the GNU General Public License for most of our software; it
|
||||||
|
applies also to any other work released this way by its authors. You can apply it to
|
||||||
|
your programs, too.
|
||||||
|
|
||||||
|
When we speak of free software, we are referring to freedom, not price. Our General
|
||||||
|
Public Licenses are designed to make sure that you have the freedom to distribute
|
||||||
|
copies of free software (and charge for them if you wish), that you receive source
|
||||||
|
code or can get it if you want it, that you can change the software or use pieces of
|
||||||
|
it in new free programs, and that you know you can do these things.
|
||||||
|
|
||||||
|
To protect your rights, we need to prevent others from denying you these rights or
|
||||||
|
asking you to surrender the rights. Therefore, you have certain responsibilities if
|
||||||
|
you distribute copies of the software, or if you modify it: responsibilities to
|
||||||
|
respect the freedom of others.
|
||||||
|
|
||||||
|
For example, if you distribute copies of such a program, whether gratis or for a fee,
|
||||||
|
you must pass on to the recipients the same freedoms that you received. You must make
|
||||||
|
sure that they, too, receive or can get the source code. And you must show them these
|
||||||
|
terms so they know their rights.
|
||||||
|
|
||||||
|
Developers that use the GNU GPL protect your rights with two steps: **(1)** assert
|
||||||
|
copyright on the software, and **(2)** offer you this License giving you legal permission
|
||||||
|
to copy, distribute and/or modify it.
|
||||||
|
|
||||||
|
For the developers' and authors' protection, the GPL clearly explains that there is
|
||||||
|
no warranty for this free software. For both users' and authors' sake, the GPL
|
||||||
|
requires that modified versions be marked as changed, so that their problems will not
|
||||||
|
be attributed erroneously to authors of previous versions.
|
||||||
|
|
||||||
|
Some devices are designed to deny users access to install or run modified versions of
|
||||||
|
the software inside them, although the manufacturer can do so. This is fundamentally
|
||||||
|
incompatible with the aim of protecting users' freedom to change the software. The
|
||||||
|
systematic pattern of such abuse occurs in the area of products for individuals to
|
||||||
|
use, which is precisely where it is most unacceptable. Therefore, we have designed
|
||||||
|
this version of the GPL to prohibit the practice for those products. If such problems
|
||||||
|
arise substantially in other domains, we stand ready to extend this provision to
|
||||||
|
those domains in future versions of the GPL, as needed to protect the freedom of
|
||||||
|
users.
|
||||||
|
|
||||||
|
Finally, every program is threatened constantly by software patents. States should
|
||||||
|
not allow patents to restrict development and use of software on general-purpose
|
||||||
|
computers, but in those that do, we wish to avoid the special danger that patents
|
||||||
|
applied to a free program could make it effectively proprietary. To prevent this, the
|
||||||
|
GPL assures that patents cannot be used to render the program non-free.
|
||||||
|
|
||||||
|
The precise terms and conditions for copying, distribution and modification follow.
|
||||||
|
|
||||||
|
## TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
### 0. Definitions
|
||||||
|
|
||||||
|
“This License” refers to version 3 of the GNU General Public License.
|
||||||
|
|
||||||
|
“Copyright” also means copyright-like laws that apply to other kinds of
|
||||||
|
works, such as semiconductor masks.
|
||||||
|
|
||||||
|
“The Program” refers to any copyrightable work licensed under this
|
||||||
|
License. Each licensee is addressed as “you”. “Licensees” and
|
||||||
|
“recipients” may be individuals or organizations.
|
||||||
|
|
||||||
|
To “modify” a work means to copy from or adapt all or part of the work in
|
||||||
|
a fashion requiring copyright permission, other than the making of an exact copy. The
|
||||||
|
resulting work is called a “modified version” of the earlier work or a
|
||||||
|
work “based on” the earlier work.
|
||||||
|
|
||||||
|
A “covered work” means either the unmodified Program or a work based on
|
||||||
|
the Program.
|
||||||
|
|
||||||
|
To “propagate” a work means to do anything with it that, without
|
||||||
|
permission, would make you directly or secondarily liable for infringement under
|
||||||
|
applicable copyright law, except executing it on a computer or modifying a private
|
||||||
|
copy. Propagation includes copying, distribution (with or without modification),
|
||||||
|
making available to the public, and in some countries other activities as well.
|
||||||
|
|
||||||
|
To “convey” a work means any kind of propagation that enables other
|
||||||
|
parties to make or receive copies. Mere interaction with a user through a computer
|
||||||
|
network, with no transfer of a copy, is not conveying.
|
||||||
|
|
||||||
|
An interactive user interface displays “Appropriate Legal Notices” to the
|
||||||
|
extent that it includes a convenient and prominently visible feature that **(1)**
|
||||||
|
displays an appropriate copyright notice, and **(2)** tells the user that there is no
|
||||||
|
warranty for the work (except to the extent that warranties are provided), that
|
||||||
|
licensees may convey the work under this License, and how to view a copy of this
|
||||||
|
License. If the interface presents a list of user commands or options, such as a
|
||||||
|
menu, a prominent item in the list meets this criterion.
|
||||||
|
|
||||||
|
### 1. Source Code
|
||||||
|
|
||||||
|
The “source code” for a work means the preferred form of the work for
|
||||||
|
making modifications to it. “Object code” means any non-source form of a
|
||||||
|
work.
|
||||||
|
|
||||||
|
A “Standard Interface” means an interface that either is an official
|
||||||
|
standard defined by a recognized standards body, or, in the case of interfaces
|
||||||
|
specified for a particular programming language, one that is widely used among
|
||||||
|
developers working in that language.
|
||||||
|
|
||||||
|
The “System Libraries” of an executable work include anything, other than
|
||||||
|
the work as a whole, that **(a)** is included in the normal form of packaging a Major
|
||||||
|
Component, but which is not part of that Major Component, and **(b)** serves only to
|
||||||
|
enable use of the work with that Major Component, or to implement a Standard
|
||||||
|
Interface for which an implementation is available to the public in source code form.
|
||||||
|
A “Major Component”, in this context, means a major essential component
|
||||||
|
(kernel, window system, and so on) of the specific operating system (if any) on which
|
||||||
|
the executable work runs, or a compiler used to produce the work, or an object code
|
||||||
|
interpreter used to run it.
|
||||||
|
|
||||||
|
The “Corresponding Source” for a work in object code form means all the
|
||||||
|
source code needed to generate, install, and (for an executable work) run the object
|
||||||
|
code and to modify the work, including scripts to control those activities. However,
|
||||||
|
it does not include the work's System Libraries, or general-purpose tools or
|
||||||
|
generally available free programs which are used unmodified in performing those
|
||||||
|
activities but which are not part of the work. For example, Corresponding Source
|
||||||
|
includes interface definition files associated with source files for the work, and
|
||||||
|
the source code for shared libraries and dynamically linked subprograms that the work
|
||||||
|
is specifically designed to require, such as by intimate data communication or
|
||||||
|
control flow between those subprograms and other parts of the work.
|
||||||
|
|
||||||
|
The Corresponding Source need not include anything that users can regenerate
|
||||||
|
automatically from other parts of the Corresponding Source.
|
||||||
|
|
||||||
|
The Corresponding Source for a work in source code form is that same work.
|
||||||
|
|
||||||
|
### 2. Basic Permissions
|
||||||
|
|
||||||
|
All rights granted under this License are granted for the term of copyright on the
|
||||||
|
Program, and are irrevocable provided the stated conditions are met. This License
|
||||||
|
explicitly affirms your unlimited permission to run the unmodified Program. The
|
||||||
|
output from running a covered work is covered by this License only if the output,
|
||||||
|
given its content, constitutes a covered work. This License acknowledges your rights
|
||||||
|
of fair use or other equivalent, as provided by copyright law.
|
||||||
|
|
||||||
|
You may make, run and propagate covered works that you do not convey, without
|
||||||
|
conditions so long as your license otherwise remains in force. You may convey covered
|
||||||
|
works to others for the sole purpose of having them make modifications exclusively
|
||||||
|
for you, or provide you with facilities for running those works, provided that you
|
||||||
|
comply with the terms of this License in conveying all material for which you do not
|
||||||
|
control copyright. Those thus making or running the covered works for you must do so
|
||||||
|
exclusively on your behalf, under your direction and control, on terms that prohibit
|
||||||
|
them from making any copies of your copyrighted material outside their relationship
|
||||||
|
with you.
|
||||||
|
|
||||||
|
Conveying under any other circumstances is permitted solely under the conditions
|
||||||
|
stated below. Sublicensing is not allowed; section 10 makes it unnecessary.
|
||||||
|
|
||||||
|
### 3. Protecting Users' Legal Rights From Anti-Circumvention Law
|
||||||
|
|
||||||
|
No covered work shall be deemed part of an effective technological measure under any
|
||||||
|
applicable law fulfilling obligations under article 11 of the WIPO copyright treaty
|
||||||
|
adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention
|
||||||
|
of such measures.
|
||||||
|
|
||||||
|
When you convey a covered work, you waive any legal power to forbid circumvention of
|
||||||
|
technological measures to the extent such circumvention is effected by exercising
|
||||||
|
rights under this License with respect to the covered work, and you disclaim any
|
||||||
|
intention to limit operation or modification of the work as a means of enforcing,
|
||||||
|
against the work's users, your or third parties' legal rights to forbid circumvention
|
||||||
|
of technological measures.
|
||||||
|
|
||||||
|
### 4. Conveying Verbatim Copies
|
||||||
|
|
||||||
|
You may convey verbatim copies of the Program's source code as you receive it, in any
|
||||||
|
medium, provided that you conspicuously and appropriately publish on each copy an
|
||||||
|
appropriate copyright notice; keep intact all notices stating that this License and
|
||||||
|
any non-permissive terms added in accord with section 7 apply to the code; keep
|
||||||
|
intact all notices of the absence of any warranty; and give all recipients a copy of
|
||||||
|
this License along with the Program.
|
||||||
|
|
||||||
|
You may charge any price or no price for each copy that you convey, and you may offer
|
||||||
|
support or warranty protection for a fee.
|
||||||
|
|
||||||
|
### 5. Conveying Modified Source Versions
|
||||||
|
|
||||||
|
You may convey a work based on the Program, or the modifications to produce it from
|
||||||
|
the Program, in the form of source code under the terms of section 4, provided that
|
||||||
|
you also meet all of these conditions:
|
||||||
|
|
||||||
|
* **a)** The work must carry prominent notices stating that you modified it, and giving a
|
||||||
|
relevant date.
|
||||||
|
* **b)** The work must carry prominent notices stating that it is released under this
|
||||||
|
License and any conditions added under section 7. This requirement modifies the
|
||||||
|
requirement in section 4 to “keep intact all notices”.
|
||||||
|
* **c)** You must license the entire work, as a whole, under this License to anyone who
|
||||||
|
comes into possession of a copy. This License will therefore apply, along with any
|
||||||
|
applicable section 7 additional terms, to the whole of the work, and all its parts,
|
||||||
|
regardless of how they are packaged. This License gives no permission to license the
|
||||||
|
work in any other way, but it does not invalidate such permission if you have
|
||||||
|
separately received it.
|
||||||
|
* **d)** If the work has interactive user interfaces, each must display Appropriate Legal
|
||||||
|
Notices; however, if the Program has interactive interfaces that do not display
|
||||||
|
Appropriate Legal Notices, your work need not make them do so.
|
||||||
|
|
||||||
|
A compilation of a covered work with other separate and independent works, which are
|
||||||
|
not by their nature extensions of the covered work, and which are not combined with
|
||||||
|
it such as to form a larger program, in or on a volume of a storage or distribution
|
||||||
|
medium, is called an “aggregate” if the compilation and its resulting
|
||||||
|
copyright are not used to limit the access or legal rights of the compilation's users
|
||||||
|
beyond what the individual works permit. Inclusion of a covered work in an aggregate
|
||||||
|
does not cause this License to apply to the other parts of the aggregate.
|
||||||
|
|
||||||
|
### 6. Conveying Non-Source Forms
|
||||||
|
|
||||||
|
You may convey a covered work in object code form under the terms of sections 4 and
|
||||||
|
5, provided that you also convey the machine-readable Corresponding Source under the
|
||||||
|
terms of this License, in one of these ways:
|
||||||
|
|
||||||
|
* **a)** Convey the object code in, or embodied in, a physical product (including a
|
||||||
|
physical distribution medium), accompanied by the Corresponding Source fixed on a
|
||||||
|
durable physical medium customarily used for software interchange.
|
||||||
|
* **b)** Convey the object code in, or embodied in, a physical product (including a
|
||||||
|
physical distribution medium), accompanied by a written offer, valid for at least
|
||||||
|
three years and valid for as long as you offer spare parts or customer support for
|
||||||
|
that product model, to give anyone who possesses the object code either **(1)** a copy of
|
||||||
|
the Corresponding Source for all the software in the product that is covered by this
|
||||||
|
License, on a durable physical medium customarily used for software interchange, for
|
||||||
|
a price no more than your reasonable cost of physically performing this conveying of
|
||||||
|
source, or **(2)** access to copy the Corresponding Source from a network server at no
|
||||||
|
charge.
|
||||||
|
* **c)** Convey individual copies of the object code with a copy of the written offer to
|
||||||
|
provide the Corresponding Source. This alternative is allowed only occasionally and
|
||||||
|
noncommercially, and only if you received the object code with such an offer, in
|
||||||
|
accord with subsection 6b.
|
||||||
|
* **d)** Convey the object code by offering access from a designated place (gratis or for
|
||||||
|
a charge), and offer equivalent access to the Corresponding Source in the same way
|
||||||
|
through the same place at no further charge. You need not require recipients to copy
|
||||||
|
the Corresponding Source along with the object code. If the place to copy the object
|
||||||
|
code is a network server, the Corresponding Source may be on a different server
|
||||||
|
(operated by you or a third party) that supports equivalent copying facilities,
|
||||||
|
provided you maintain clear directions next to the object code saying where to find
|
||||||
|
the Corresponding Source. Regardless of what server hosts the Corresponding Source,
|
||||||
|
you remain obligated to ensure that it is available for as long as needed to satisfy
|
||||||
|
these requirements.
|
||||||
|
* **e)** Convey the object code using peer-to-peer transmission, provided you inform
|
||||||
|
other peers where the object code and Corresponding Source of the work are being
|
||||||
|
offered to the general public at no charge under subsection 6d.
|
||||||
|
|
||||||
|
A separable portion of the object code, whose source code is excluded from the
|
||||||
|
Corresponding Source as a System Library, need not be included in conveying the
|
||||||
|
object code work.
|
||||||
|
|
||||||
|
A “User Product” is either **(1)** a “consumer product”, which
|
||||||
|
means any tangible personal property which is normally used for personal, family, or
|
||||||
|
household purposes, or **(2)** anything designed or sold for incorporation into a
|
||||||
|
dwelling. In determining whether a product is a consumer product, doubtful cases
|
||||||
|
shall be resolved in favor of coverage. For a particular product received by a
|
||||||
|
particular user, “normally used” refers to a typical or common use of
|
||||||
|
that class of product, regardless of the status of the particular user or of the way
|
||||||
|
in which the particular user actually uses, or expects or is expected to use, the
|
||||||
|
product. A product is a consumer product regardless of whether the product has
|
||||||
|
substantial commercial, industrial or non-consumer uses, unless such uses represent
|
||||||
|
the only significant mode of use of the product.
|
||||||
|
|
||||||
|
“Installation Information” for a User Product means any methods,
|
||||||
|
procedures, authorization keys, or other information required to install and execute
|
||||||
|
modified versions of a covered work in that User Product from a modified version of
|
||||||
|
its Corresponding Source. The information must suffice to ensure that the continued
|
||||||
|
functioning of the modified object code is in no case prevented or interfered with
|
||||||
|
solely because modification has been made.
|
||||||
|
|
||||||
|
If you convey an object code work under this section in, or with, or specifically for
|
||||||
|
use in, a User Product, and the conveying occurs as part of a transaction in which
|
||||||
|
the right of possession and use of the User Product is transferred to the recipient
|
||||||
|
in perpetuity or for a fixed term (regardless of how the transaction is
|
||||||
|
characterized), the Corresponding Source conveyed under this section must be
|
||||||
|
accompanied by the Installation Information. But this requirement does not apply if
|
||||||
|
neither you nor any third party retains the ability to install modified object code
|
||||||
|
on the User Product (for example, the work has been installed in ROM).
|
||||||
|
|
||||||
|
The requirement to provide Installation Information does not include a requirement to
|
||||||
|
continue to provide support service, warranty, or updates for a work that has been
|
||||||
|
modified or installed by the recipient, or for the User Product in which it has been
|
||||||
|
modified or installed. Access to a network may be denied when the modification itself
|
||||||
|
materially and adversely affects the operation of the network or violates the rules
|
||||||
|
and protocols for communication across the network.
|
||||||
|
|
||||||
|
Corresponding Source conveyed, and Installation Information provided, in accord with
|
||||||
|
this section must be in a format that is publicly documented (and with an
|
||||||
|
implementation available to the public in source code form), and must require no
|
||||||
|
special password or key for unpacking, reading or copying.
|
||||||
|
|
||||||
|
### 7. Additional Terms
|
||||||
|
|
||||||
|
“Additional permissions” are terms that supplement the terms of this
|
||||||
|
License by making exceptions from one or more of its conditions. Additional
|
||||||
|
permissions that are applicable to the entire Program shall be treated as though they
|
||||||
|
were included in this License, to the extent that they are valid under applicable
|
||||||
|
law. If additional permissions apply only to part of the Program, that part may be
|
||||||
|
used separately under those permissions, but the entire Program remains governed by
|
||||||
|
this License without regard to the additional permissions.
|
||||||
|
|
||||||
|
When you convey a copy of a covered work, you may at your option remove any
|
||||||
|
additional permissions from that copy, or from any part of it. (Additional
|
||||||
|
permissions may be written to require their own removal in certain cases when you
|
||||||
|
modify the work.) You may place additional permissions on material, added by you to a
|
||||||
|
covered work, for which you have or can give appropriate copyright permission.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, for material you add to a
|
||||||
|
covered work, you may (if authorized by the copyright holders of that material)
|
||||||
|
supplement the terms of this License with terms:
|
||||||
|
|
||||||
|
* **a)** Disclaiming warranty or limiting liability differently from the terms of
|
||||||
|
sections 15 and 16 of this License; or
|
||||||
|
* **b)** Requiring preservation of specified reasonable legal notices or author
|
||||||
|
attributions in that material or in the Appropriate Legal Notices displayed by works
|
||||||
|
containing it; or
|
||||||
|
* **c)** Prohibiting misrepresentation of the origin of that material, or requiring that
|
||||||
|
modified versions of such material be marked in reasonable ways as different from the
|
||||||
|
original version; or
|
||||||
|
* **d)** Limiting the use for publicity purposes of names of licensors or authors of the
|
||||||
|
material; or
|
||||||
|
* **e)** Declining to grant rights under trademark law for use of some trade names,
|
||||||
|
trademarks, or service marks; or
|
||||||
|
* **f)** Requiring indemnification of licensors and authors of that material by anyone
|
||||||
|
who conveys the material (or modified versions of it) with contractual assumptions of
|
||||||
|
liability to the recipient, for any liability that these contractual assumptions
|
||||||
|
directly impose on those licensors and authors.
|
||||||
|
|
||||||
|
All other non-permissive additional terms are considered “further
|
||||||
|
restrictions” within the meaning of section 10. If the Program as you received
|
||||||
|
it, or any part of it, contains a notice stating that it is governed by this License
|
||||||
|
along with a term that is a further restriction, you may remove that term. If a
|
||||||
|
license document contains a further restriction but permits relicensing or conveying
|
||||||
|
under this License, you may add to a covered work material governed by the terms of
|
||||||
|
that license document, provided that the further restriction does not survive such
|
||||||
|
relicensing or conveying.
|
||||||
|
|
||||||
|
If you add terms to a covered work in accord with this section, you must place, in
|
||||||
|
the relevant source files, a statement of the additional terms that apply to those
|
||||||
|
files, or a notice indicating where to find the applicable terms.
|
||||||
|
|
||||||
|
Additional terms, permissive or non-permissive, may be stated in the form of a
|
||||||
|
separately written license, or stated as exceptions; the above requirements apply
|
||||||
|
either way.
|
||||||
|
|
||||||
|
### 8. Termination
|
||||||
|
|
||||||
|
You may not propagate or modify a covered work except as expressly provided under
|
||||||
|
this License. Any attempt otherwise to propagate or modify it is void, and will
|
||||||
|
automatically terminate your rights under this License (including any patent licenses
|
||||||
|
granted under the third paragraph of section 11).
|
||||||
|
|
||||||
|
However, if you cease all violation of this License, then your license from a
|
||||||
|
particular copyright holder is reinstated **(a)** provisionally, unless and until the
|
||||||
|
copyright holder explicitly and finally terminates your license, and **(b)** permanently,
|
||||||
|
if the copyright holder fails to notify you of the violation by some reasonable means
|
||||||
|
prior to 60 days after the cessation.
|
||||||
|
|
||||||
|
Moreover, your license from a particular copyright holder is reinstated permanently
|
||||||
|
if the copyright holder notifies you of the violation by some reasonable means, this
|
||||||
|
is the first time you have received notice of violation of this License (for any
|
||||||
|
work) from that copyright holder, and you cure the violation prior to 30 days after
|
||||||
|
your receipt of the notice.
|
||||||
|
|
||||||
|
Termination of your rights under this section does not terminate the licenses of
|
||||||
|
parties who have received copies or rights from you under this License. If your
|
||||||
|
rights have been terminated and not permanently reinstated, you do not qualify to
|
||||||
|
receive new licenses for the same material under section 10.
|
||||||
|
|
||||||
|
### 9. Acceptance Not Required for Having Copies
|
||||||
|
|
||||||
|
You are not required to accept this License in order to receive or run a copy of the
|
||||||
|
Program. Ancillary propagation of a covered work occurring solely as a consequence of
|
||||||
|
using peer-to-peer transmission to receive a copy likewise does not require
|
||||||
|
acceptance. However, nothing other than this License grants you permission to
|
||||||
|
propagate or modify any covered work. These actions infringe copyright if you do not
|
||||||
|
accept this License. Therefore, by modifying or propagating a covered work, you
|
||||||
|
indicate your acceptance of this License to do so.
|
||||||
|
|
||||||
|
### 10. Automatic Licensing of Downstream Recipients
|
||||||
|
|
||||||
|
Each time you convey a covered work, the recipient automatically receives a license
|
||||||
|
from the original licensors, to run, modify and propagate that work, subject to this
|
||||||
|
License. You are not responsible for enforcing compliance by third parties with this
|
||||||
|
License.
|
||||||
|
|
||||||
|
An “entity transaction” is a transaction transferring control of an
|
||||||
|
organization, or substantially all assets of one, or subdividing an organization, or
|
||||||
|
merging organizations. If propagation of a covered work results from an entity
|
||||||
|
transaction, each party to that transaction who receives a copy of the work also
|
||||||
|
receives whatever licenses to the work the party's predecessor in interest had or
|
||||||
|
could give under the previous paragraph, plus a right to possession of the
|
||||||
|
Corresponding Source of the work from the predecessor in interest, if the predecessor
|
||||||
|
has it or can get it with reasonable efforts.
|
||||||
|
|
||||||
|
You may not impose any further restrictions on the exercise of the rights granted or
|
||||||
|
affirmed under this License. For example, you may not impose a license fee, royalty,
|
||||||
|
or other charge for exercise of rights granted under this License, and you may not
|
||||||
|
initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging
|
||||||
|
that any patent claim is infringed by making, using, selling, offering for sale, or
|
||||||
|
importing the Program or any portion of it.
|
||||||
|
|
||||||
|
### 11. Patents
|
||||||
|
|
||||||
|
A “contributor” is a copyright holder who authorizes use under this
|
||||||
|
License of the Program or a work on which the Program is based. The work thus
|
||||||
|
licensed is called the contributor's “contributor version”.
|
||||||
|
|
||||||
|
A contributor's “essential patent claims” are all patent claims owned or
|
||||||
|
controlled by the contributor, whether already acquired or hereafter acquired, that
|
||||||
|
would be infringed by some manner, permitted by this License, of making, using, or
|
||||||
|
selling its contributor version, but do not include claims that would be infringed
|
||||||
|
only as a consequence of further modification of the contributor version. For
|
||||||
|
purposes of this definition, “control” includes the right to grant patent
|
||||||
|
sublicenses in a manner consistent with the requirements of this License.
|
||||||
|
|
||||||
|
Each contributor grants you a non-exclusive, worldwide, royalty-free patent license
|
||||||
|
under the contributor's essential patent claims, to make, use, sell, offer for sale,
|
||||||
|
import and otherwise run, modify and propagate the contents of its contributor
|
||||||
|
version.
|
||||||
|
|
||||||
|
In the following three paragraphs, a “patent license” is any express
|
||||||
|
agreement or commitment, however denominated, not to enforce a patent (such as an
|
||||||
|
express permission to practice a patent or covenant not to sue for patent
|
||||||
|
infringement). To “grant” such a patent license to a party means to make
|
||||||
|
such an agreement or commitment not to enforce a patent against the party.
|
||||||
|
|
||||||
|
If you convey a covered work, knowingly relying on a patent license, and the
|
||||||
|
Corresponding Source of the work is not available for anyone to copy, free of charge
|
||||||
|
and under the terms of this License, through a publicly available network server or
|
||||||
|
other readily accessible means, then you must either **(1)** cause the Corresponding
|
||||||
|
Source to be so available, or **(2)** arrange to deprive yourself of the benefit of the
|
||||||
|
patent license for this particular work, or **(3)** arrange, in a manner consistent with
|
||||||
|
the requirements of this License, to extend the patent license to downstream
|
||||||
|
recipients. “Knowingly relying” means you have actual knowledge that, but
|
||||||
|
for the patent license, your conveying the covered work in a country, or your
|
||||||
|
recipient's use of the covered work in a country, would infringe one or more
|
||||||
|
identifiable patents in that country that you have reason to believe are valid.
|
||||||
|
|
||||||
|
If, pursuant to or in connection with a single transaction or arrangement, you
|
||||||
|
convey, or propagate by procuring conveyance of, a covered work, and grant a patent
|
||||||
|
license to some of the parties receiving the covered work authorizing them to use,
|
||||||
|
propagate, modify or convey a specific copy of the covered work, then the patent
|
||||||
|
license you grant is automatically extended to all recipients of the covered work and
|
||||||
|
works based on it.
|
||||||
|
|
||||||
|
A patent license is “discriminatory” if it does not include within the
|
||||||
|
scope of its coverage, prohibits the exercise of, or is conditioned on the
|
||||||
|
non-exercise of one or more of the rights that are specifically granted under this
|
||||||
|
License. You may not convey a covered work if you are a party to an arrangement with
|
||||||
|
a third party that is in the business of distributing software, under which you make
|
||||||
|
payment to the third party based on the extent of your activity of conveying the
|
||||||
|
work, and under which the third party grants, to any of the parties who would receive
|
||||||
|
the covered work from you, a discriminatory patent license **(a)** in connection with
|
||||||
|
copies of the covered work conveyed by you (or copies made from those copies), or **(b)**
|
||||||
|
primarily for and in connection with specific products or compilations that contain
|
||||||
|
the covered work, unless you entered into that arrangement, or that patent license
|
||||||
|
was granted, prior to 28 March 2007.
|
||||||
|
|
||||||
|
Nothing in this License shall be construed as excluding or limiting any implied
|
||||||
|
license or other defenses to infringement that may otherwise be available to you
|
||||||
|
under applicable patent law.
|
||||||
|
|
||||||
|
### 12. No Surrender of Others' Freedom
|
||||||
|
|
||||||
|
If conditions are imposed on you (whether by court order, agreement or otherwise)
|
||||||
|
that contradict the conditions of this License, they do not excuse you from the
|
||||||
|
conditions of this License. If you cannot convey a covered work so as to satisfy
|
||||||
|
simultaneously your obligations under this License and any other pertinent
|
||||||
|
obligations, then as a consequence you may not convey it at all. For example, if you
|
||||||
|
agree to terms that obligate you to collect a royalty for further conveying from
|
||||||
|
those to whom you convey the Program, the only way you could satisfy both those terms
|
||||||
|
and this License would be to refrain entirely from conveying the Program.
|
||||||
|
|
||||||
|
### 13. Use with the GNU Affero General Public License
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, you have permission to link or
|
||||||
|
combine any covered work with a work licensed under version 3 of the GNU Affero
|
||||||
|
General Public License into a single combined work, and to convey the resulting work.
|
||||||
|
The terms of this License will continue to apply to the part which is the covered
|
||||||
|
work, but the special requirements of the GNU Affero General Public License, section
|
||||||
|
13, concerning interaction through a network will apply to the combination as such.
|
||||||
|
|
||||||
|
### 14. Revised Versions of this License
|
||||||
|
|
||||||
|
The Free Software Foundation may publish revised and/or new versions of the GNU
|
||||||
|
General Public License from time to time. Such new versions will be similar in spirit
|
||||||
|
to the present version, but may differ in detail to address new problems or concerns.
|
||||||
|
|
||||||
|
Each version is given a distinguishing version number. If the Program specifies that
|
||||||
|
a certain numbered version of the GNU General Public License “or any later
|
||||||
|
version” applies to it, you have the option of following the terms and
|
||||||
|
conditions either of that numbered version or of any later version published by the
|
||||||
|
Free Software Foundation. If the Program does not specify a version number of the GNU
|
||||||
|
General Public License, you may choose any version ever published by the Free
|
||||||
|
Software Foundation.
|
||||||
|
|
||||||
|
If the Program specifies that a proxy can decide which future versions of the GNU
|
||||||
|
General Public License can be used, that proxy's public statement of acceptance of a
|
||||||
|
version permanently authorizes you to choose that version for the Program.
|
||||||
|
|
||||||
|
Later license versions may give you additional or different permissions. However, no
|
||||||
|
additional obligations are imposed on any author or copyright holder as a result of
|
||||||
|
your choosing to follow a later version.
|
||||||
|
|
||||||
|
### 15. Disclaimer of Warranty
|
||||||
|
|
||||||
|
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
|
||||||
|
EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
|
||||||
|
PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY OF ANY KIND, EITHER
|
||||||
|
EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE
|
||||||
|
QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE
|
||||||
|
DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||||
|
|
||||||
|
### 16. Limitation of Liability
|
||||||
|
|
||||||
|
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY
|
||||||
|
COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS
|
||||||
|
PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL,
|
||||||
|
INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
|
||||||
|
PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE
|
||||||
|
OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE
|
||||||
|
WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
|
||||||
|
POSSIBILITY OF SUCH DAMAGES.
|
||||||
|
|
||||||
|
### 17. Interpretation of Sections 15 and 16
|
||||||
|
|
||||||
|
If the disclaimer of warranty and limitation of liability provided above cannot be
|
||||||
|
given local legal effect according to their terms, reviewing courts shall apply local
|
||||||
|
law that most closely approximates an absolute waiver of all civil liability in
|
||||||
|
connection with the Program, unless a warranty or assumption of liability accompanies
|
||||||
|
a copy of the Program in return for a fee.
|
||||||
|
|
||||||
|
_END OF TERMS AND CONDITIONS_
|
||||||
|
|
||||||
|
## How to Apply These Terms to Your New Programs
|
||||||
|
|
||||||
|
If you develop a new program, and you want it to be of the greatest possible use to
|
||||||
|
the public, the best way to achieve this is to make it free software which everyone
|
||||||
|
can redistribute and change under these terms.
|
||||||
|
|
||||||
|
To do so, attach the following notices to the program. It is safest to attach them
|
||||||
|
to the start of each source file to most effectively state the exclusion of warranty;
|
||||||
|
and each file should have at least the “copyright” line and a pointer to
|
||||||
|
where the full notice is found.
|
||||||
|
|
||||||
|
<one line to give the program's name and a brief idea of what it does.>
|
||||||
|
Copyright (C) <year> <name of author>
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU General Public License
|
||||||
|
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
Also add information on how to contact you by electronic and paper mail.
|
||||||
|
|
||||||
|
If the program does terminal interaction, make it output a short notice like this
|
||||||
|
when it starts in an interactive mode:
|
||||||
|
|
||||||
|
<program> Copyright (C) <year> <name of author>
|
||||||
|
This program comes with ABSOLUTELY NO WARRANTY; for details type 'show w'.
|
||||||
|
This is free software, and you are welcome to redistribute it
|
||||||
|
under certain conditions; type 'show c' for details.
|
||||||
|
|
||||||
|
The hypothetical commands `show w` and `show c` should show the appropriate parts of
|
||||||
|
the General Public License. Of course, your program's commands might be different;
|
||||||
|
for a GUI interface, you would use an “about box”.
|
||||||
|
|
||||||
|
You should also get your employer (if you work as a programmer) or school, if any, to
|
||||||
|
sign a “copyright disclaimer” for the program, if necessary. For more
|
||||||
|
information on this, and how to apply and follow the GNU GPL, see
|
||||||
|
<<http://www.gnu.org/licenses/>>.
|
||||||
|
|
||||||
|
The GNU General Public License does not permit incorporating your program into
|
||||||
|
proprietary programs. If your program is a subroutine library, you may consider it
|
||||||
|
more useful to permit linking proprietary applications with the library. If this is
|
||||||
|
what you want to do, use the GNU Lesser General Public License instead of this
|
||||||
|
License. But first, please read
|
||||||
|
<<http://www.gnu.org/philosophy/why-not-lgpl.html>>.
|
||||||
166
README.md
Normal file
166
README.md
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
# FFX
|
||||||
|
|
||||||
|
FFX is a local CLI and Textual TUI for inspecting TV episode files, storing normalization rules in SQLite, and converting outputs into a predictable stream, metadata, and filename layout.
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
- Linux-like environment
|
||||||
|
- `python3`
|
||||||
|
- `ffmpeg`
|
||||||
|
- `ffprobe`
|
||||||
|
- `cpulimit`
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
FFX uses a two-step local setup flow.
|
||||||
|
|
||||||
|
### 1. Install The Bundle
|
||||||
|
|
||||||
|
This step creates or reuses the persistent bundle virtualenv in `~/.local/share/ffx.venv`, installs FFX into it, and ensures `ffx` is exposed through a shell alias.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
bash tools/setup.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
If you also want the Python packages needed for the modern test suite:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
bash tools/setup.sh --with-tests
|
||||||
|
```
|
||||||
|
|
||||||
|
You can verify the bundle state without changing anything:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
bash tools/setup.sh --check
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Prepare System Dependencies And Local User Files
|
||||||
|
|
||||||
|
This step installs or verifies workstation dependencies and seeds local config and data directories. It is the step wrapped by the CLI command `ffx configure_workstation`.
|
||||||
|
|
||||||
|
Run it directly:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
bash tools/configure_workstation.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
Or through the installed CLI:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
ffx configure_workstation
|
||||||
|
```
|
||||||
|
|
||||||
|
Check-only mode is available in both forms:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
bash tools/configure_workstation.sh --check
|
||||||
|
ffx configure_workstation --check
|
||||||
|
```
|
||||||
|
|
||||||
|
`tools/configure_workstation.sh` does not manage the bundle virtualenv. Python-side test packages belong to `tools/setup.sh --with-tests`.
|
||||||
|
|
||||||
|
## Basic Usage
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
ffx version
|
||||||
|
ffx inspect /path/to/episode.mkv
|
||||||
|
ffx convert /path/to/episode.mkv
|
||||||
|
ffx shows
|
||||||
|
```
|
||||||
|
|
||||||
|
## Modern Tests
|
||||||
|
|
||||||
|
Install Python test packages first:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
bash tools/setup.sh --with-tests
|
||||||
|
```
|
||||||
|
|
||||||
|
Then run the modern automatically discovered test suite:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
./tools/test.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
This runner uses `pytest` and intentionally excludes the legacy harness under `tests/legacy/`.
|
||||||
|
|
||||||
|
## Default Local Paths
|
||||||
|
|
||||||
|
- Config: `~/.local/etc/ffx.json`
|
||||||
|
- Database: `~/.local/var/ffx/ffx.db`
|
||||||
|
- Log file: `~/.local/var/log/ffx.log`
|
||||||
|
- Bundle venv: `~/.local/share/ffx.venv`
|
||||||
|
|
||||||
|
## TMDB
|
||||||
|
|
||||||
|
TMDB-backed metadata enrichment requires `TMDB_API_KEY` to be set in the environment.
|
||||||
|
|
||||||
|
## Version History
|
||||||
|
|
||||||
|
### 0.2.6
|
||||||
|
|
||||||
|
- DB-free `ffx edit` workflow for in-place metadata editing via temporary-file rewrite
|
||||||
|
- inspect and edit workflows split into dedicated Textual screens with shared media-workflow support
|
||||||
|
- Textual tables and row actions now separate raw data from rendered labels to avoid markup leaking into stored metadata
|
||||||
|
- responsive screen layout pass, `Esc` back handling, sortable show/inspect tables, and improved edit-screen notifications/toggles
|
||||||
|
- application-wide UTF-8 i18n catalogs with language precedence from CLI over config over system over German default
|
||||||
|
- metadata normalization extended for localized subtitle titles, ISO language cleanup, and smarter track editor language/title helpers
|
||||||
|
|
||||||
|
### 0.2.5
|
||||||
|
|
||||||
|
- show-level quality and notes fields
|
||||||
|
- pattern-over-show-over-default season-shift resolution with dynamic DB migration loading
|
||||||
|
- migration prompt now reports the upgrade path and creates an in-place DB backup before applying schema changes
|
||||||
|
- `upgrade --branch <name>` now fetches remote-only branches before switching
|
||||||
|
- `unmux` now applies season shifting to subtitle output filenames
|
||||||
|
- convert now keeps DB-defined target subtitle dispositions authoritative over sidecar filename disposition flags when a pattern definition exists
|
||||||
|
- focused modern tests added around migrations, unmux, upgrade, and subtitle-disposition import precedence
|
||||||
|
|
||||||
|
### 0.2.4
|
||||||
|
|
||||||
|
- lightweight CLI commands now stay import-light via lazy runtime loading
|
||||||
|
- setup/config templating moved to `assets/ffx.json.j2`
|
||||||
|
- aligned two-step local setup wrappers: `ffx setup` and `ffx configure_workstation`
|
||||||
|
- combined `ffprobe` payload reuse in `FileProperties`
|
||||||
|
- configurable crop-detect sampling plus per-process crop result caching
|
||||||
|
- single-query controller accessors and conditional DB schema bootstrap
|
||||||
|
- shared screen bootstrap/controller wiring for large detail screens
|
||||||
|
- configurable default season/episode digit lengths
|
||||||
|
- digit-aware `rename` and padded `unmux` filename markers
|
||||||
|
|
||||||
|
### 0.2.3
|
||||||
|
|
||||||
|
- PyPI packaging
|
||||||
|
- output filename templating
|
||||||
|
- season shifting
|
||||||
|
- DB versioning
|
||||||
|
|
||||||
|
### 0.2.2
|
||||||
|
|
||||||
|
- CLI overrides
|
||||||
|
|
||||||
|
### 0.2.1
|
||||||
|
|
||||||
|
- signature handling
|
||||||
|
- tag cleanup
|
||||||
|
- bugfixes and refactoring
|
||||||
|
|
||||||
|
### 0.2.0
|
||||||
|
|
||||||
|
- tests
|
||||||
|
- config file
|
||||||
|
|
||||||
|
### 0.1.3
|
||||||
|
|
||||||
|
- subtitle file imports
|
||||||
|
|
||||||
|
### 0.1.2
|
||||||
|
|
||||||
|
- bugfixes
|
||||||
|
|
||||||
|
### 0.1.1
|
||||||
|
|
||||||
|
- bugfixes
|
||||||
|
- TMDB show identification
|
||||||
71
SCRATCHPAD.md
Normal file
71
SCRATCHPAD.md
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
# Scratchpad
|
||||||
|
|
||||||
|
## Goal
|
||||||
|
|
||||||
|
- Capture a compact, project-wide list of optimization candidates after a broad scan of the current FFX codebase, tooling, and requirements.
|
||||||
|
|
||||||
|
## Focused Snapshot
|
||||||
|
|
||||||
|
- Highest-leverage application optimizations:
|
||||||
|
- Decide whether placeholder help/settings screens should ship or disappear.
|
||||||
|
- Trim dead helpers and other dormant surface that still looks active.
|
||||||
|
|
||||||
|
- Highest-leverage repo and workflow optimizations:
|
||||||
|
- Continue migrating the oversized legacy test/combinator surface into focused modern tests so it is easier to run, debug, and extend.
|
||||||
|
|
||||||
|
## Optimization Candidates
|
||||||
|
|
||||||
|
1. Placeholder UI surfaces should either ship or disappear
|
||||||
|
- [`src/ffx/help_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/help_screen.py) and [`src/ffx/settings_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/settings_screen.py) are placeholders.
|
||||||
|
- Optimization:
|
||||||
|
- Either remove them from the active UI surface or complete them.
|
||||||
|
- Avoid paying ongoing maintenance cost for unfinished navigation targets.
|
||||||
|
- Expected value:
|
||||||
|
- Leaner interface.
|
||||||
|
- Lower UX ambiguity.
|
||||||
|
|
||||||
|
2. Several helper functions are unfinished or dead-weight
|
||||||
|
- [`src/ffx/helper.py`](/home/osgw/.local/src/codex/ffx/src/ffx/helper.py) contains `permutateList(...): pass`.
|
||||||
|
- There are many combinator and conversion placeholders across tests and migrations.
|
||||||
|
- Optimization:
|
||||||
|
- Remove dead code, finish it, or isolate it behind a clearly dormant area.
|
||||||
|
- Avoid carrying stubbed utility surface that looks reusable but is not.
|
||||||
|
- Expected value:
|
||||||
|
- Smaller mental model.
|
||||||
|
- Less time spent re-evaluating inactive paths.
|
||||||
|
|
||||||
|
3. Test suite shape is expensive to understand and likely expensive to run
|
||||||
|
- The project still carries a large legacy matrix of combinator files under [`tests/legacy`](/home/osgw/.local/src/codex/ffx/tests/legacy), several placeholder `pass` implementations, and at least one suspicious filename with an embedded space: [`tests/legacy/disposition_combinator_2_3 .py`](/home/osgw/.local/src/codex/ffx/tests/legacy/disposition_combinator_2_3 .py).
|
||||||
|
- A first focused replacement slice now exists in [`tests/integration/subtrack_mapping/test_cli_bundle.py`](/home/osgw/.local/src/codex/ffx/tests/integration/subtrack_mapping/test_cli_bundle.py), so the remaining work is migration and consolidation rather than creating the modern test shape from scratch.
|
||||||
|
- Optimization:
|
||||||
|
- Continue replacing broad combinator matrices with focused parametrized integration and unit tests.
|
||||||
|
- Retire the bespoke legacy discovery and runner path once equivalent coverage exists.
|
||||||
|
- Normalize file naming and test discovery conventions.
|
||||||
|
- Expected value:
|
||||||
|
- Faster contributor onboarding.
|
||||||
|
- Easier CI adoption later.
|
||||||
|
|
||||||
|
## Open
|
||||||
|
|
||||||
|
- Durable shipped items have been moved into [`README.md`](/home/osgw/.local/src/codex/ffx/README.md) version history through `0.2.6`.
|
||||||
|
- Should optimization work focus first on operator-perceived latency, internal maintainability, or correctness-risk cleanup that also has performance upside?
|
||||||
|
- Is the long-term supported model still “local Linux workstation plus Textual UI,” or should optimization decisions bias toward a more scriptable/headless CLI?
|
||||||
|
|
||||||
|
## Gaps Right Now
|
||||||
|
|
||||||
|
- No explicit prioritization owner or milestone for the optimization backlog.
|
||||||
|
- No benchmark or timing harness exists for startup, probe, DB, or conversion orchestration overhead.
|
||||||
|
- Repo hygiene is still mixed with generated artifacts and some clearly unfinished files.
|
||||||
|
- The legacy TMDB-backed `Scenario 4` path is currently blocked by a pattern/track regression: `Patterns must define at least one track before they can be stored.` This surfaced while rerunning TMDB-dependent checks after the zero-track pattern hardening.
|
||||||
|
|
||||||
|
## Next
|
||||||
|
|
||||||
|
1. Triage the list into quick wins, medium refactors, and long-horizon cleanup.
|
||||||
|
2. Tackle the cheapest remaining product-surface cleanup first:
|
||||||
|
- placeholder UI surfaces and dead helper cleanup.
|
||||||
|
3. Continue replacing oversized legacy test matrices with focused modern integration and unit coverage.
|
||||||
|
4. Triage the legacy `Scenario 4` pattern/track failure and decide whether to fix the harness, adapt it to the zero-track guard, or retire that path during the ongoing test-suite migration.
|
||||||
|
|
||||||
|
## Delete When
|
||||||
|
|
||||||
|
- Delete this scratchpad once the optimization backlog is either converted into issues/work items or distilled into durable project guidance.
|
||||||
37
assets/ffx.json.j2
Normal file
37
assets/ffx.json.j2
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
{
|
||||||
|
"language": {{ language_json }},
|
||||||
|
"databasePath": {{ database_path_json }},
|
||||||
|
"logDirectory": {{ log_directory_json }},
|
||||||
|
"subtitlesDirectory": {{ subtitles_directory_json }},
|
||||||
|
"defaultIndexSeasonDigits": {{ default_index_season_digits }},
|
||||||
|
"defaultIndexEpisodeDigits": {{ default_index_episode_digits }},
|
||||||
|
"defaultIndicatorSeasonDigits": {{ default_indicator_season_digits }},
|
||||||
|
"defaultIndicatorEpisodeDigits": {{ default_indicator_episode_digits }},
|
||||||
|
"metadata": {
|
||||||
|
"signature": {
|
||||||
|
"RECODED_WITH": "FFX"
|
||||||
|
},
|
||||||
|
"remove": [
|
||||||
|
"VERSION-eng",
|
||||||
|
"creation_time",
|
||||||
|
"NAME"
|
||||||
|
],
|
||||||
|
"streams": {
|
||||||
|
"remove": [
|
||||||
|
"BPS",
|
||||||
|
"NUMBER_OF_FRAMES",
|
||||||
|
"NUMBER_OF_BYTES",
|
||||||
|
"_STATISTICS_WRITING_APP",
|
||||||
|
"_STATISTICS_WRITING_DATE_UTC",
|
||||||
|
"_STATISTICS_TAGS",
|
||||||
|
"BPS-eng",
|
||||||
|
"DURATION-eng",
|
||||||
|
"NUMBER_OF_FRAMES-eng",
|
||||||
|
"NUMBER_OF_BYTES-eng",
|
||||||
|
"_STATISTICS_WRITING_APP-eng",
|
||||||
|
"_STATISTICS_WRITING_DATE_UTC-eng",
|
||||||
|
"_STATISTICS_TAGS-eng"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
361
assets/i18n/de.json
Normal file
361
assets/i18n/de.json
Normal file
@@ -0,0 +1,361 @@
|
|||||||
|
{
|
||||||
|
"iso_languages": {
|
||||||
|
"ABKHAZIAN": "Abchasisch",
|
||||||
|
"AFAR": "Afar",
|
||||||
|
"AFRIKAANS": "Afrikaans",
|
||||||
|
"AKAN": "Akan",
|
||||||
|
"ALBANIAN": "Albanisch",
|
||||||
|
"AMHARIC": "Amharisch",
|
||||||
|
"ARABIC": "Arabisch",
|
||||||
|
"ARAGONESE": "Aragonesisch",
|
||||||
|
"ARMENIAN": "Armenisch",
|
||||||
|
"ASSAMESE": "Assamesisch",
|
||||||
|
"AVARIC": "Awarisch",
|
||||||
|
"AVESTAN": "Avestisch",
|
||||||
|
"AYMARA": "Aymara",
|
||||||
|
"AZERBAIJANI": "Aserbaidschanisch",
|
||||||
|
"BAMBARA": "Bambara",
|
||||||
|
"BASHKIR": "Baschkirisch",
|
||||||
|
"BASQUE": "Baskisch",
|
||||||
|
"BELARUSIAN": "Weißrussisch",
|
||||||
|
"BENGALI": "Bengalisch",
|
||||||
|
"BISLAMA": "Bislama",
|
||||||
|
"BOKMAL": "Bokmål",
|
||||||
|
"BOSNIAN": "Bosnisch",
|
||||||
|
"BRETON": "Bretonisch",
|
||||||
|
"BULGARIAN": "Bulgarisch",
|
||||||
|
"BURMESE": "Burmesisch",
|
||||||
|
"CATALAN": "Catalan",
|
||||||
|
"CHAMORRO": "Chamorro",
|
||||||
|
"CHECHEN": "Tschetschenisch",
|
||||||
|
"CHICHEWA": "Chichewa",
|
||||||
|
"CHINESE": "Chinesisch",
|
||||||
|
"CHURCH_SLAVIC": "Church Slavic",
|
||||||
|
"CHUVASH": "Tschuwaschisch",
|
||||||
|
"CORNISH": "Kornisch",
|
||||||
|
"CORSICAN": "Korsisch",
|
||||||
|
"CREE": "Cree",
|
||||||
|
"CROATIAN": "Kroatisch",
|
||||||
|
"CZECH": "Tschechisch",
|
||||||
|
"DANISH": "Dänisch",
|
||||||
|
"DIVEHI": "Divehi",
|
||||||
|
"DUTCH": "Dutch",
|
||||||
|
"DZONGKHA": "Dzongkha",
|
||||||
|
"ENGLISH": "Englisch",
|
||||||
|
"ESPERANTO": "Esperanto",
|
||||||
|
"ESTONIAN": "Estnisch",
|
||||||
|
"EWE": "Ewe-Sprache",
|
||||||
|
"FAROESE": "Färöisch",
|
||||||
|
"FIJIAN": "Fidschianisch",
|
||||||
|
"FILIPINO": "Filipino",
|
||||||
|
"FINNISH": "Finnisch",
|
||||||
|
"FRENCH": "Französisch",
|
||||||
|
"FULAH": "Ful",
|
||||||
|
"GALICIAN": "Galizisch",
|
||||||
|
"GANDA": "Ganda",
|
||||||
|
"GEORGIAN": "Georgisch",
|
||||||
|
"GERMAN": "Deutsch",
|
||||||
|
"GREEK": "Greek",
|
||||||
|
"GUARANI": "Guaraní",
|
||||||
|
"GUJARATI": "Gujarati",
|
||||||
|
"HAITIAN": "Haitian",
|
||||||
|
"HAUSA": "Haussa",
|
||||||
|
"HEBREW": "Hebräisch",
|
||||||
|
"HERERO": "Herero",
|
||||||
|
"HINDI": "Hindi",
|
||||||
|
"HIRI_MOTU": "Hiri-Motu",
|
||||||
|
"HUNGARIAN": "Ungarisch",
|
||||||
|
"ICELANDIC": "Isländisch",
|
||||||
|
"IDO": "Ido",
|
||||||
|
"IGBO": "Ibo",
|
||||||
|
"INDONESIAN": "Indonesisch",
|
||||||
|
"INTERLINGUA": "Interlingua",
|
||||||
|
"INTERLINGUE": "Interlingue",
|
||||||
|
"INUKTITUT": "Inuktitut",
|
||||||
|
"INUPIAQ": "Inupiaq",
|
||||||
|
"IRISH": "Irisch",
|
||||||
|
"ITALIAN": "Italienisch",
|
||||||
|
"JAPANESE": "Japanisch",
|
||||||
|
"JAVANESE": "Javanisch",
|
||||||
|
"KALAALLISUT": "Kalaallisut",
|
||||||
|
"KANNADA": "Kannada",
|
||||||
|
"KANURI": "Kanuri",
|
||||||
|
"KASHMIRI": "Kaschmirisch",
|
||||||
|
"KAZAKH": "Kasachisch",
|
||||||
|
"KHMER": "Khmer",
|
||||||
|
"KIKUYU": "Kikuyu",
|
||||||
|
"KINYARWANDA": "Kinyarwanda",
|
||||||
|
"KIRGHIZ": "Kirghiz",
|
||||||
|
"KOMI": "Komi",
|
||||||
|
"KONGO": "Kongo",
|
||||||
|
"KOREAN": "Koreanisch",
|
||||||
|
"KUANYAMA": "Kuanyama",
|
||||||
|
"KURDISH": "Kurdisch",
|
||||||
|
"LAO": "Laotisch",
|
||||||
|
"LATIN": "Lateinisch",
|
||||||
|
"LATVIAN": "Lettisch",
|
||||||
|
"LIMBURGAN": "Limburgan",
|
||||||
|
"LINGALA": "Lingala",
|
||||||
|
"LITHUANIAN": "Litauisch",
|
||||||
|
"LUBA_KATANGA": "Luba-Katanga",
|
||||||
|
"LUXEMBOURGISH": "Luxembourgish",
|
||||||
|
"MACEDONIAN": "Makedonisch",
|
||||||
|
"MALAGASY": "Malagasi",
|
||||||
|
"MALAY": "Malaiisch",
|
||||||
|
"MALAYALAM": "Malayalam",
|
||||||
|
"MALTESE": "Maltesisch",
|
||||||
|
"MANX": "Manx",
|
||||||
|
"MAORI": "Maori",
|
||||||
|
"MARATHI": "Marathi",
|
||||||
|
"MARSHALLESE": "Marschallesisch",
|
||||||
|
"MONGOLIAN": "Mongolisch",
|
||||||
|
"NAURU": "Nauru",
|
||||||
|
"NAVAJO": "Navajo",
|
||||||
|
"NDONGA": "Ndonga",
|
||||||
|
"NEPALI": "Nepali",
|
||||||
|
"NORTHERN_SAMI": "Nord-Samisch",
|
||||||
|
"NORTH_NDEBELE": "North Ndebele",
|
||||||
|
"NORWEGIAN": "Norwegisch",
|
||||||
|
"NORWEGIAN_NYNORSK": "Nynorsk",
|
||||||
|
"OCCITAN": "Occitan",
|
||||||
|
"OJIBWA": "Ojibwa",
|
||||||
|
"ORIYA": "Oriya",
|
||||||
|
"OROMO": "Oromo",
|
||||||
|
"OSSETIAN": "Ossetian",
|
||||||
|
"PALI": "Pali",
|
||||||
|
"PANJABI": "Panjabi",
|
||||||
|
"PERSIAN": "Persisch",
|
||||||
|
"POLISH": "Polnisch",
|
||||||
|
"PORTUGUESE": "Portugiesisch",
|
||||||
|
"PUSHTO": "Pushto",
|
||||||
|
"QUECHUA": "Quechua",
|
||||||
|
"ROMANIAN": "Romanian",
|
||||||
|
"ROMANSH": "Bündnerromanisch",
|
||||||
|
"RUNDI": "Kirundi",
|
||||||
|
"RUSSIAN": "Russisch",
|
||||||
|
"SAMOAN": "Samoanisch",
|
||||||
|
"SANGO": "Sango",
|
||||||
|
"SANSKRIT": "Sanskrit",
|
||||||
|
"SARDINIAN": "Sardisch",
|
||||||
|
"SCOTTISH_GAELIC": "Scottish Gaelic",
|
||||||
|
"SERBIAN": "Serbisch",
|
||||||
|
"SHONA": "Schona",
|
||||||
|
"SICHUAN_YI": "Sichuan Yi",
|
||||||
|
"SINDHI": "Sindhi",
|
||||||
|
"SINHALA": "Sinhala",
|
||||||
|
"SLOVAK": "Slowakisch",
|
||||||
|
"SLOVENIAN": "Slowenisch",
|
||||||
|
"SOMALI": "Somali",
|
||||||
|
"SOUTHERN_SOTHO": "Southern Sotho",
|
||||||
|
"SOUTH_NDEBELE": "South Ndebele",
|
||||||
|
"SPANISH": "Spanish",
|
||||||
|
"SUNDANESE": "Sundanesisch",
|
||||||
|
"SWAHILI": "Suaheli; Swaheli",
|
||||||
|
"SWATI": "Swazi",
|
||||||
|
"SWEDISH": "Schwedisch",
|
||||||
|
"TAGALOG": "Tagalog",
|
||||||
|
"TAHITIAN": "Tahitisch",
|
||||||
|
"TAJIK": "Tadschikisch",
|
||||||
|
"TAMIL": "Tamilisch",
|
||||||
|
"TATAR": "Tatarisch",
|
||||||
|
"TELUGU": "Telugu",
|
||||||
|
"THAI": "Thai",
|
||||||
|
"TIBETAN": "Tibetisch",
|
||||||
|
"TIGRINYA": "Tigrinja",
|
||||||
|
"TONGA": "Tonga",
|
||||||
|
"TSONGA": "Tsonga",
|
||||||
|
"TSWANA": "Tswana",
|
||||||
|
"TURKISH": "Türkisch",
|
||||||
|
"TURKMEN": "Turkmenisch",
|
||||||
|
"TWI": "Twi",
|
||||||
|
"UIGHUR": "Uighur",
|
||||||
|
"UKRAINIAN": "Ukrainisch",
|
||||||
|
"UNDEFINED": "undefined",
|
||||||
|
"URDU": "Urdu",
|
||||||
|
"UZBEK": "Usbekisch",
|
||||||
|
"VENDA": "Venda",
|
||||||
|
"VIETNAMESE": "Vietnamesisch",
|
||||||
|
"VOLAPUK": "Volapük",
|
||||||
|
"WALLOON": "Wallonisch",
|
||||||
|
"WELSH": "Walisisch",
|
||||||
|
"WESTERN_FRISIAN": "Westfriesisch",
|
||||||
|
"WOLOF": "Wolof",
|
||||||
|
"XHOSA": "Xhosa",
|
||||||
|
"YIDDISH": "Jiddisch",
|
||||||
|
"YORUBA": "Joruba",
|
||||||
|
"ZHUANG": "Zhuang",
|
||||||
|
"ZULU": "Zulu"
|
||||||
|
},
|
||||||
|
"phrases": {
|
||||||
|
"5.0(side)": "5.0(side)",
|
||||||
|
"5.1(side)": "5.1(side)",
|
||||||
|
"6.1": "6.1",
|
||||||
|
"6ch": "6ch",
|
||||||
|
"7.1": "7.1",
|
||||||
|
"<New show>": "<Neue Serie>",
|
||||||
|
"Add": "Hinzufügen",
|
||||||
|
"Add Pattern": "Muster hinzufügen",
|
||||||
|
"Apply": "Anwenden",
|
||||||
|
"Apply failed: {error}": "Anwenden fehlgeschlagen: {error}",
|
||||||
|
"Are you sure to delete the following filename pattern?": "Möchtest du das folgende Dateinamensmuster wirklich löschen?",
|
||||||
|
"Are you sure to delete the following shifted season?": "Möchtest du die folgende verschobene Staffel wirklich löschen?",
|
||||||
|
"Are you sure to delete the following show?": "Möchtest du die folgende Serie wirklich löschen?",
|
||||||
|
"Are you sure to delete the following {track_type} track?": "Möchtest du den folgenden {track_type}-Stream wirklich löschen?",
|
||||||
|
"Are you sure to delete this tag?": "Möchtest du dieses Tag wirklich löschen?",
|
||||||
|
"Audio Layout": "Audiolayout",
|
||||||
|
"Back": "Zurück",
|
||||||
|
"Cancel": "Abbrechen",
|
||||||
|
"Cannot add another stream with disposition flag 'default' or 'forced' set": "Es kann kein weiterer Stream mit gesetztem Dispositions-Flag 'default' oder 'forced' hinzugefügt werden",
|
||||||
|
"Changes applied and file reloaded.": "Änderungen angewendet und Datei neu geladen.",
|
||||||
|
"Cleanup": "Bereinigen",
|
||||||
|
"Cleanup disabled.": "Bereinigung deaktiviert.",
|
||||||
|
"Cleanup enabled.": "Bereinigung aktiviert.",
|
||||||
|
"Codec": "Codec",
|
||||||
|
"Continuing edit session.": "Bearbeitung wird fortgesetzt.",
|
||||||
|
"Default": "Standard",
|
||||||
|
"Delete": "Löschen",
|
||||||
|
"Delete Show": "Serie löschen",
|
||||||
|
"Deleted media tag {tag!r}.": "Medien-Tag {tag!r} gelöscht.",
|
||||||
|
"Differences": "Unterschiede",
|
||||||
|
"Differences (file->db/output)": "Unterschiede (Datei->DB/Ausgabe)",
|
||||||
|
"Discard": "Verwerfen",
|
||||||
|
"Discard pending metadata changes and quit?": "Ausstehende Metadatenänderungen verwerfen und beenden?",
|
||||||
|
"Discard pending metadata changes and reload the file state?": "Ausstehende Metadatenänderungen verwerfen und Dateistand neu laden?",
|
||||||
|
"Down": "Runter",
|
||||||
|
"Dry-run: would rewrite via temporary file {target_path}": "Trockenlauf: würde über temporäre Datei {target_path} neu schreiben",
|
||||||
|
"Edit": "Bearbeiten",
|
||||||
|
"Edit Pattern": "Muster bearbeiten",
|
||||||
|
"Edit Show": "Serie bearbeiten",
|
||||||
|
"Edit filename pattern": "Dateinamensmuster bearbeiten",
|
||||||
|
"Edit shifted season": "Verschobene Staffel bearbeiten",
|
||||||
|
"Edit stream": "Stream bearbeiten",
|
||||||
|
"Episode Offset": "Episodenoffset",
|
||||||
|
"Episode offset": "Episodenoffset",
|
||||||
|
"File": "Datei",
|
||||||
|
"File patterns": "Datei-Namensmuster",
|
||||||
|
"First Episode": "Erste Episode",
|
||||||
|
"First episode": "Erste Episode",
|
||||||
|
"Forced": "Erzwungen",
|
||||||
|
"Help": "Hilfe",
|
||||||
|
"Help Screen": "Hilfe-Bildschirm",
|
||||||
|
"ID": "ID",
|
||||||
|
"Identify": "Identifizieren",
|
||||||
|
"Index": "Index",
|
||||||
|
"Index / Subindex": "Index / Unterindex",
|
||||||
|
"Index Episode Digits": "Ep. Index Stellen",
|
||||||
|
"Index Season Digits": "Sta. Index Stellen",
|
||||||
|
"Indicator Edisode Digits": "Ep. Indikator Stellen",
|
||||||
|
"Indicator Season Digits": "Sta. Indikator Stellen",
|
||||||
|
"Keep Editing": "Weiter bearbeiten",
|
||||||
|
"Keeping pending changes.": "Ausstehende Änderungen bleiben erhalten.",
|
||||||
|
"Key": "Schlüssel",
|
||||||
|
"Language": "Sprache",
|
||||||
|
"Last Episode": "Letzte Episode",
|
||||||
|
"Last episode": "Letzte Episode",
|
||||||
|
"Layout": "Layout",
|
||||||
|
"Media Tags": "Medien-Tags",
|
||||||
|
"More than one default audio stream detected and no prompt set": "Mehr als ein Standard-Audiostream erkannt und keine Abfrage aktiviert",
|
||||||
|
"More than one default audio stream detected! Please select stream": "Mehr als ein Standard-Audiostream erkannt! Bitte Stream auswählen",
|
||||||
|
"More than one default subtitle stream detected and no prompt set": "Mehr als ein Standard-Untertitelstream erkannt und keine Abfrage aktiviert",
|
||||||
|
"More than one default subtitle stream detected! Please select stream": "Mehr als ein Standard-Untertitelstream erkannt! Bitte Stream auswählen",
|
||||||
|
"More than one default video stream detected and no prompt set": "Mehr als ein Standard-Videostream erkannt und keine Abfrage aktiviert",
|
||||||
|
"More than one default video stream detected! Please select stream": "Mehr als ein Standard-Videostream erkannt! Bitte Stream auswählen",
|
||||||
|
"More than one forced audio stream detected and no prompt set": "Mehr als ein erzwungener Audiostream erkannt und keine Abfrage aktiviert",
|
||||||
|
"More than one forced audio stream detected! Please select stream": "Mehr als ein erzwungener Audiostream erkannt! Bitte Stream auswählen",
|
||||||
|
"More than one forced subtitle stream detected and no prompt set": "Mehr als ein erzwungener Untertitelstream erkannt und keine Abfrage aktiviert",
|
||||||
|
"More than one forced subtitle stream detected! Please select stream": "Mehr als ein erzwungener Untertitelstream erkannt! Bitte Stream auswählen",
|
||||||
|
"More than one forced video stream detected and no prompt set": "Mehr als ein erzwungener Videostream erkannt und keine Abfrage aktiviert",
|
||||||
|
"More than one forced video stream detected! Please select stream": "Mehr als ein erzwungener Videostream erkannt! Bitte Stream auswählen",
|
||||||
|
"Name": "Name",
|
||||||
|
"New Pattern": "Neues Muster",
|
||||||
|
"New Show": "Neue Serie",
|
||||||
|
"New filename pattern": "Neues Dateinamensmuster",
|
||||||
|
"New shifted season": "Neue verschobene Staffel",
|
||||||
|
"New stream": "Neuer Stream",
|
||||||
|
"No": "Nein",
|
||||||
|
"No changes to apply.": "Keine Änderungen zum Anwenden.",
|
||||||
|
"No changes to revert.": "Keine Änderungen zum Zurücksetzen.",
|
||||||
|
"Normalization disabled.": "Normalisierung deaktiviert.",
|
||||||
|
"Normalization enabled.": "Normalisierung aktiviert.",
|
||||||
|
"Normalize": "Normalisieren",
|
||||||
|
"Notes": "Notizen",
|
||||||
|
"Pattern": "Muster",
|
||||||
|
"Planned Changes (file->edited output)": "Geplante Änderungen (Datei->bearbeitete Ausgabe)",
|
||||||
|
"Quality": "Qualität",
|
||||||
|
"Quit": "Beenden",
|
||||||
|
"Remove Pattern": "Muster entfernen",
|
||||||
|
"Revert": "Zurücksetzen",
|
||||||
|
"Reverted pending changes.": "Ausstehende Änderungen verworfen.",
|
||||||
|
"Save": "Speichern",
|
||||||
|
"Season Offset": "Staffeloffset",
|
||||||
|
"Select a stream first.": "Bitte zuerst einen Stream auswählen.",
|
||||||
|
"Set Default": "Als Standard setzen",
|
||||||
|
"Set Forced": "Als erzwungen setzen",
|
||||||
|
"Settings Screen": "Einstellungsbildschirm",
|
||||||
|
"Numbering Mapping": "Abbildung Nummerierung",
|
||||||
|
"Show": "Serie",
|
||||||
|
"Shows": "Serien",
|
||||||
|
"Source Season": "Quellstaffel",
|
||||||
|
"SrcIndex": "QuellIndex",
|
||||||
|
"Status": "Status",
|
||||||
|
"Stay": "Bleiben",
|
||||||
|
"Stream dispositions": "Stream-Dispositionen",
|
||||||
|
"Stream tags": "Stream-Tags",
|
||||||
|
"Streams": "Streams",
|
||||||
|
"SubIndex": "Unterindex",
|
||||||
|
"Substitute": "Ersetzen",
|
||||||
|
"Substitute pattern": "Muster ersetzen",
|
||||||
|
"Title": "Titel",
|
||||||
|
"Type": "Typ",
|
||||||
|
"Unable to update selected stream.": "Ausgewählten Stream konnte nicht aktualisiert werden.",
|
||||||
|
"Up": "Hoch",
|
||||||
|
"Update Pattern": "Muster aktualisieren",
|
||||||
|
"Updated media tag {tag!r}.": "Medien-Tag {tag!r} aktualisiert.",
|
||||||
|
"Updated stream #{index} ({track_type}).": "Stream #{index} ({track_type}) aktualisiert.",
|
||||||
|
"Value": "Wert",
|
||||||
|
"Year": "Jahr",
|
||||||
|
"Yes": "Ja",
|
||||||
|
"add media tag: key='{key}' value='{value}'": "Medien-Tag hinzufügen: Schlüssel='{key}' Wert='{value}'",
|
||||||
|
"add {track_type} track: index={index} lang={language}": "{track_type}-Stream hinzufügen: Index={index} Sprache={language}",
|
||||||
|
"attached_pic": "attached_pic",
|
||||||
|
"attachment": "Anhang",
|
||||||
|
"audio": "Audio",
|
||||||
|
"captions": "Untertitel",
|
||||||
|
"change media tag: key='{key}' value='{value}'": "Medien-Tag ändern: Schlüssel='{key}' Wert='{value}'",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add disposition={disposition}": "Stream #{index} ({track_type}:{sub_index}) Disposition hinzufügen={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add key={key} value={value}": "Stream #{index} ({track_type}:{sub_index}) Schlüssel hinzufügen={key} Wert={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) change key={key} value={value}": "Stream #{index} ({track_type}:{sub_index}) Schlüssel ändern={key} Wert={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove disposition={disposition}": "Stream #{index} ({track_type}:{sub_index}) Disposition entfernen={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove key={key} value={value}": "Stream #{index} ({track_type}:{sub_index}) Schlüssel entfernen={key} Wert={value}",
|
||||||
|
"clean_effects": "Nur Effekte",
|
||||||
|
"comment": "Kommentar",
|
||||||
|
"default": "Standard",
|
||||||
|
"dependent": "abhängig",
|
||||||
|
"descriptions": "Beschreibungen",
|
||||||
|
"dub": "Synchronisiert",
|
||||||
|
"for pattern": "für Muster",
|
||||||
|
"forced": "erzwungen",
|
||||||
|
"from": "von",
|
||||||
|
"from pattern": "aus Muster",
|
||||||
|
"from show": "aus Serie",
|
||||||
|
"hearing_impaired": "hörgeschädigt",
|
||||||
|
"karaoke": "Karaoke",
|
||||||
|
"lyrics": "Liedtext",
|
||||||
|
"metadata": "Metadaten",
|
||||||
|
"non_diegetic": "nicht-diegetisch",
|
||||||
|
"original": "Original",
|
||||||
|
"pattern #{id}": "Muster #{id}",
|
||||||
|
"remove media tag: key='{key}' value='{value}'": "Medien-Tag entfernen: Schlüssel='{key}' Wert='{value}'",
|
||||||
|
"remove stream #{index}": "Stream #{index} entfernen",
|
||||||
|
"show #{id}": "Serie #{id}",
|
||||||
|
"stereo": "Stereo",
|
||||||
|
"still_image": "Standbild",
|
||||||
|
"sub index": "Unterindex",
|
||||||
|
"subtitle": "Untertitel",
|
||||||
|
"timed_thumbnails": "zeitgesteuerte Vorschaubilder",
|
||||||
|
"undefined": "undefiniert",
|
||||||
|
"unknown": "unbekannt",
|
||||||
|
"video": "Video",
|
||||||
|
"visual_impaired": "sehgeschädigt"
|
||||||
|
}
|
||||||
|
}
|
||||||
360
assets/i18n/en.json
Normal file
360
assets/i18n/en.json
Normal file
@@ -0,0 +1,360 @@
|
|||||||
|
{
|
||||||
|
"iso_languages": {
|
||||||
|
"ABKHAZIAN": "Abkhazian",
|
||||||
|
"AFAR": "Afar",
|
||||||
|
"AFRIKAANS": "Afrikaans",
|
||||||
|
"AKAN": "Akan",
|
||||||
|
"ALBANIAN": "Albanian",
|
||||||
|
"AMHARIC": "Amharic",
|
||||||
|
"ARABIC": "Arabic",
|
||||||
|
"ARAGONESE": "Aragonese",
|
||||||
|
"ARMENIAN": "Armenian",
|
||||||
|
"ASSAMESE": "Assamese",
|
||||||
|
"AVARIC": "Avaric",
|
||||||
|
"AVESTAN": "Avestan",
|
||||||
|
"AYMARA": "Aymara",
|
||||||
|
"AZERBAIJANI": "Azerbaijani",
|
||||||
|
"BAMBARA": "Bambara",
|
||||||
|
"BASHKIR": "Bashkir",
|
||||||
|
"BASQUE": "Basque",
|
||||||
|
"BELARUSIAN": "Belarusian",
|
||||||
|
"BENGALI": "Bengali",
|
||||||
|
"BISLAMA": "Bislama",
|
||||||
|
"BOKMAL": "Bokmål",
|
||||||
|
"BOSNIAN": "Bosnian",
|
||||||
|
"BRETON": "Breton",
|
||||||
|
"BULGARIAN": "Bulgarian",
|
||||||
|
"BURMESE": "Burmese",
|
||||||
|
"CATALAN": "Catalan",
|
||||||
|
"CHAMORRO": "Chamorro",
|
||||||
|
"CHECHEN": "Chechen",
|
||||||
|
"CHICHEWA": "Chichewa",
|
||||||
|
"CHINESE": "Chinese",
|
||||||
|
"CHURCH_SLAVIC": "Church Slavic",
|
||||||
|
"CHUVASH": "Chuvash",
|
||||||
|
"CORNISH": "Cornish",
|
||||||
|
"CORSICAN": "Corsican",
|
||||||
|
"CREE": "Cree",
|
||||||
|
"CROATIAN": "Croatian",
|
||||||
|
"CZECH": "Czech",
|
||||||
|
"DANISH": "Danish",
|
||||||
|
"DIVEHI": "Divehi",
|
||||||
|
"DUTCH": "Dutch",
|
||||||
|
"DZONGKHA": "Dzongkha",
|
||||||
|
"ENGLISH": "English",
|
||||||
|
"ESPERANTO": "Esperanto",
|
||||||
|
"ESTONIAN": "Estonian",
|
||||||
|
"EWE": "Ewe",
|
||||||
|
"FAROESE": "Faroese",
|
||||||
|
"FIJIAN": "Fijian",
|
||||||
|
"FILIPINO": "Filipino",
|
||||||
|
"FINNISH": "Finnish",
|
||||||
|
"FRENCH": "French",
|
||||||
|
"FULAH": "Fulah",
|
||||||
|
"GALICIAN": "Galician",
|
||||||
|
"GANDA": "Ganda",
|
||||||
|
"GEORGIAN": "Georgian",
|
||||||
|
"GERMAN": "German",
|
||||||
|
"GREEK": "Greek",
|
||||||
|
"GUARANI": "Guarani",
|
||||||
|
"GUJARATI": "Gujarati",
|
||||||
|
"HAITIAN": "Haitian",
|
||||||
|
"HAUSA": "Hausa",
|
||||||
|
"HEBREW": "Hebrew",
|
||||||
|
"HERERO": "Herero",
|
||||||
|
"HINDI": "Hindi",
|
||||||
|
"HIRI_MOTU": "Hiri Motu",
|
||||||
|
"HUNGARIAN": "Hungarian",
|
||||||
|
"ICELANDIC": "Icelandic",
|
||||||
|
"IDO": "Ido",
|
||||||
|
"IGBO": "Igbo",
|
||||||
|
"INDONESIAN": "Indonesian",
|
||||||
|
"INTERLINGUA": "Interlingua",
|
||||||
|
"INTERLINGUE": "Interlingue",
|
||||||
|
"INUKTITUT": "Inuktitut",
|
||||||
|
"INUPIAQ": "Inupiaq",
|
||||||
|
"IRISH": "Irish",
|
||||||
|
"ITALIAN": "Italian",
|
||||||
|
"JAPANESE": "Japanese",
|
||||||
|
"JAVANESE": "Javanese",
|
||||||
|
"KALAALLISUT": "Kalaallisut",
|
||||||
|
"KANNADA": "Kannada",
|
||||||
|
"KANURI": "Kanuri",
|
||||||
|
"KASHMIRI": "Kashmiri",
|
||||||
|
"KAZAKH": "Kazakh",
|
||||||
|
"KHMER": "Khmer",
|
||||||
|
"KIKUYU": "Kikuyu",
|
||||||
|
"KINYARWANDA": "Kinyarwanda",
|
||||||
|
"KIRGHIZ": "Kirghiz",
|
||||||
|
"KOMI": "Komi",
|
||||||
|
"KONGO": "Kongo",
|
||||||
|
"KOREAN": "Korean",
|
||||||
|
"KUANYAMA": "Kuanyama",
|
||||||
|
"KURDISH": "Kurdish",
|
||||||
|
"LAO": "Lao",
|
||||||
|
"LATIN": "Latin",
|
||||||
|
"LATVIAN": "Latvian",
|
||||||
|
"LIMBURGAN": "Limburgan",
|
||||||
|
"LINGALA": "Lingala",
|
||||||
|
"LITHUANIAN": "Lithuanian",
|
||||||
|
"LUBA_KATANGA": "Luba-Katanga",
|
||||||
|
"LUXEMBOURGISH": "Luxembourgish",
|
||||||
|
"MACEDONIAN": "Macedonian",
|
||||||
|
"MALAGASY": "Malagasy",
|
||||||
|
"MALAY": "Malay",
|
||||||
|
"MALAYALAM": "Malayalam",
|
||||||
|
"MALTESE": "Maltese",
|
||||||
|
"MANX": "Manx",
|
||||||
|
"MAORI": "Maori",
|
||||||
|
"MARATHI": "Marathi",
|
||||||
|
"MARSHALLESE": "Marshallese",
|
||||||
|
"MONGOLIAN": "Mongolian",
|
||||||
|
"NAURU": "Nauru",
|
||||||
|
"NAVAJO": "Navajo",
|
||||||
|
"NDONGA": "Ndonga",
|
||||||
|
"NEPALI": "Nepali",
|
||||||
|
"NORTHERN_SAMI": "Northern Sami",
|
||||||
|
"NORTH_NDEBELE": "North Ndebele",
|
||||||
|
"NORWEGIAN": "Norwegian",
|
||||||
|
"NORWEGIAN_NYNORSK": "Nynorsk",
|
||||||
|
"OCCITAN": "Occitan",
|
||||||
|
"OJIBWA": "Ojibwa",
|
||||||
|
"ORIYA": "Oriya",
|
||||||
|
"OROMO": "Oromo",
|
||||||
|
"OSSETIAN": "Ossetian",
|
||||||
|
"PALI": "Pali",
|
||||||
|
"PANJABI": "Panjabi",
|
||||||
|
"PERSIAN": "Persian",
|
||||||
|
"POLISH": "Polish",
|
||||||
|
"PORTUGUESE": "Portuguese",
|
||||||
|
"PUSHTO": "Pushto",
|
||||||
|
"QUECHUA": "Quechua",
|
||||||
|
"ROMANIAN": "Romanian",
|
||||||
|
"ROMANSH": "Romansh",
|
||||||
|
"RUNDI": "Rundi",
|
||||||
|
"RUSSIAN": "Russian",
|
||||||
|
"SAMOAN": "Samoan",
|
||||||
|
"SANGO": "Sango",
|
||||||
|
"SANSKRIT": "Sanskrit",
|
||||||
|
"SARDINIAN": "Sardinian",
|
||||||
|
"SCOTTISH_GAELIC": "Scottish Gaelic",
|
||||||
|
"SERBIAN": "Serbian",
|
||||||
|
"SHONA": "Shona",
|
||||||
|
"SICHUAN_YI": "Sichuan Yi",
|
||||||
|
"SINDHI": "Sindhi",
|
||||||
|
"SINHALA": "Sinhala",
|
||||||
|
"SLOVAK": "Slovak",
|
||||||
|
"SLOVENIAN": "Slovenian",
|
||||||
|
"SOMALI": "Somali",
|
||||||
|
"SOUTHERN_SOTHO": "Southern Sotho",
|
||||||
|
"SOUTH_NDEBELE": "South Ndebele",
|
||||||
|
"SPANISH": "Spanish",
|
||||||
|
"SUNDANESE": "Sundanese",
|
||||||
|
"SWAHILI": "Swahili",
|
||||||
|
"SWATI": "Swati",
|
||||||
|
"SWEDISH": "Swedish",
|
||||||
|
"TAGALOG": "Tagalog",
|
||||||
|
"TAHITIAN": "Tahitian",
|
||||||
|
"TAJIK": "Tajik",
|
||||||
|
"TAMIL": "Tamil",
|
||||||
|
"TATAR": "Tatar",
|
||||||
|
"TELUGU": "Telugu",
|
||||||
|
"THAI": "Thai",
|
||||||
|
"TIBETAN": "Tibetan",
|
||||||
|
"TIGRINYA": "Tigrinya",
|
||||||
|
"TONGA": "Tonga",
|
||||||
|
"TSONGA": "Tsonga",
|
||||||
|
"TSWANA": "Tswana",
|
||||||
|
"TURKISH": "Turkish",
|
||||||
|
"TURKMEN": "Turkmen",
|
||||||
|
"TWI": "Twi",
|
||||||
|
"UIGHUR": "Uighur",
|
||||||
|
"UKRAINIAN": "Ukrainian",
|
||||||
|
"UNDEFINED": "undefined",
|
||||||
|
"URDU": "Urdu",
|
||||||
|
"UZBEK": "Uzbek",
|
||||||
|
"VENDA": "Venda",
|
||||||
|
"VIETNAMESE": "Vietnamese",
|
||||||
|
"VOLAPUK": "Volapük",
|
||||||
|
"WALLOON": "Walloon",
|
||||||
|
"WELSH": "Welsh",
|
||||||
|
"WESTERN_FRISIAN": "Western Frisian",
|
||||||
|
"WOLOF": "Wolof",
|
||||||
|
"XHOSA": "Xhosa",
|
||||||
|
"YIDDISH": "Yiddish",
|
||||||
|
"YORUBA": "Yoruba",
|
||||||
|
"ZHUANG": "Zhuang",
|
||||||
|
"ZULU": "Zulu"
|
||||||
|
},
|
||||||
|
"phrases": {
|
||||||
|
"5.0(side)": "5.0(side)",
|
||||||
|
"5.1(side)": "5.1(side)",
|
||||||
|
"6.1": "6.1",
|
||||||
|
"6ch": "6ch",
|
||||||
|
"7.1": "7.1",
|
||||||
|
"<New show>": "<New show>",
|
||||||
|
"Add": "Add",
|
||||||
|
"Add Pattern": "Add Pattern",
|
||||||
|
"Apply": "Apply",
|
||||||
|
"Apply failed: {error}": "Apply failed: {error}",
|
||||||
|
"Are you sure to delete the following filename pattern?": "Are you sure to delete the following filename pattern?",
|
||||||
|
"Are you sure to delete the following shifted season?": "Are you sure to delete the following shifted season?",
|
||||||
|
"Are you sure to delete the following show?": "Are you sure to delete the following show?",
|
||||||
|
"Are you sure to delete the following {track_type} track?": "Are you sure to delete the following {track_type} track?",
|
||||||
|
"Are you sure to delete this tag?": "Are you sure to delete this tag?",
|
||||||
|
"Audio Layout": "Audio Layout",
|
||||||
|
"Back": "Back",
|
||||||
|
"Cancel": "Cancel",
|
||||||
|
"Cannot add another stream with disposition flag 'default' or 'forced' set": "Cannot add another stream with disposition flag 'default' or 'forced' set",
|
||||||
|
"Changes applied and file reloaded.": "Changes applied and file reloaded.",
|
||||||
|
"Cleanup": "Cleanup",
|
||||||
|
"Cleanup disabled.": "Cleanup disabled.",
|
||||||
|
"Cleanup enabled.": "Cleanup enabled.",
|
||||||
|
"Codec": "Codec",
|
||||||
|
"Continuing edit session.": "Continuing edit session.",
|
||||||
|
"Default": "Default",
|
||||||
|
"Delete": "Delete",
|
||||||
|
"Delete Show": "Delete Show",
|
||||||
|
"Deleted media tag {tag!r}.": "Deleted media tag {tag!r}.",
|
||||||
|
"Differences": "Differences",
|
||||||
|
"Differences (file->db/output)": "Differences (file->db/output)",
|
||||||
|
"Discard": "Discard",
|
||||||
|
"Discard pending metadata changes and quit?": "Discard pending metadata changes and quit?",
|
||||||
|
"Discard pending metadata changes and reload the file state?": "Discard pending metadata changes and reload the file state?",
|
||||||
|
"Down": "Down",
|
||||||
|
"Dry-run: would rewrite via temporary file {target_path}": "Dry-run: would rewrite via temporary file {target_path}",
|
||||||
|
"Edit": "Edit",
|
||||||
|
"Edit Pattern": "Edit Pattern",
|
||||||
|
"Edit Show": "Edit Show",
|
||||||
|
"Edit filename pattern": "Edit filename pattern",
|
||||||
|
"Edit shifted season": "Edit shifted season",
|
||||||
|
"Edit stream": "Edit stream",
|
||||||
|
"Episode Offset": "Episode Offset",
|
||||||
|
"Episode offset": "Episode offset",
|
||||||
|
"File": "File",
|
||||||
|
"File patterns": "File patterns",
|
||||||
|
"First Episode": "First Episode",
|
||||||
|
"First episode": "First episode",
|
||||||
|
"Forced": "Forced",
|
||||||
|
"Help": "Help",
|
||||||
|
"Help Screen": "Help Screen",
|
||||||
|
"ID": "ID",
|
||||||
|
"Identify": "Identify",
|
||||||
|
"Index": "Index",
|
||||||
|
"Index / Subindex": "Index / Subindex",
|
||||||
|
"Index Episode Digits": "Index Episode Digits",
|
||||||
|
"Index Season Digits": "Index Season Digits",
|
||||||
|
"Indicator Edisode Digits": "Indicator Edisode Digits",
|
||||||
|
"Indicator Season Digits": "Indicator Season Digits",
|
||||||
|
"Keep Editing": "Keep Editing",
|
||||||
|
"Keeping pending changes.": "Keeping pending changes.",
|
||||||
|
"Key": "Key",
|
||||||
|
"Language": "Language",
|
||||||
|
"Last Episode": "Last Episode",
|
||||||
|
"Last episode": "Last episode",
|
||||||
|
"Layout": "Layout",
|
||||||
|
"Media Tags": "Media Tags",
|
||||||
|
"More than one default audio stream detected and no prompt set": "More than one default audio stream detected and no prompt set",
|
||||||
|
"More than one default audio stream detected! Please select stream": "More than one default audio stream detected! Please select stream",
|
||||||
|
"More than one default subtitle stream detected and no prompt set": "More than one default subtitle stream detected and no prompt set",
|
||||||
|
"More than one default subtitle stream detected! Please select stream": "More than one default subtitle stream detected! Please select stream",
|
||||||
|
"More than one default video stream detected and no prompt set": "More than one default video stream detected and no prompt set",
|
||||||
|
"More than one default video stream detected! Please select stream": "More than one default video stream detected! Please select stream",
|
||||||
|
"More than one forced audio stream detected and no prompt set": "More than one forced audio stream detected and no prompt set",
|
||||||
|
"More than one forced audio stream detected! Please select stream": "More than one forced audio stream detected! Please select stream",
|
||||||
|
"More than one forced subtitle stream detected and no prompt set": "More than one forced subtitle stream detected and no prompt set",
|
||||||
|
"More than one forced subtitle stream detected! Please select stream": "More than one forced subtitle stream detected! Please select stream",
|
||||||
|
"More than one forced video stream detected and no prompt set": "More than one forced video stream detected and no prompt set",
|
||||||
|
"More than one forced video stream detected! Please select stream": "More than one forced video stream detected! Please select stream",
|
||||||
|
"Name": "Name",
|
||||||
|
"New Pattern": "New Pattern",
|
||||||
|
"New Show": "New Show",
|
||||||
|
"New filename pattern": "New filename pattern",
|
||||||
|
"New shifted season": "New shifted season",
|
||||||
|
"New stream": "New stream",
|
||||||
|
"No": "No",
|
||||||
|
"No changes to apply.": "No changes to apply.",
|
||||||
|
"No changes to revert.": "No changes to revert.",
|
||||||
|
"Normalization disabled.": "Normalization disabled.",
|
||||||
|
"Normalization enabled.": "Normalization enabled.",
|
||||||
|
"Normalize": "Normalize",
|
||||||
|
"Notes": "Notes",
|
||||||
|
"Pattern": "Pattern",
|
||||||
|
"Planned Changes (file->edited output)": "Planned Changes (file->edited output)",
|
||||||
|
"Quality": "Quality",
|
||||||
|
"Quit": "Quit",
|
||||||
|
"Remove Pattern": "Remove Pattern",
|
||||||
|
"Revert": "Revert",
|
||||||
|
"Reverted pending changes.": "Reverted pending changes.",
|
||||||
|
"Save": "Save",
|
||||||
|
"Season Offset": "Season Offset",
|
||||||
|
"Select a stream first.": "Select a stream first.",
|
||||||
|
"Set Default": "Set Default",
|
||||||
|
"Set Forced": "Set Forced",
|
||||||
|
"Settings Screen": "Settings Screen",
|
||||||
|
"Numbering Mapping": "Numbering Mapping",
|
||||||
|
"Show": "Show",
|
||||||
|
"Shows": "Shows",
|
||||||
|
"SrcIndex": "SrcIndex",
|
||||||
|
"Status": "Status",
|
||||||
|
"Stay": "Stay",
|
||||||
|
"Stream dispositions": "Stream dispositions",
|
||||||
|
"Stream tags": "Stream tags",
|
||||||
|
"Streams": "Streams",
|
||||||
|
"SubIndex": "SubIndex",
|
||||||
|
"Substitute": "Substitute",
|
||||||
|
"Substitute pattern": "Substitute pattern",
|
||||||
|
"Title": "Title",
|
||||||
|
"Type": "Type",
|
||||||
|
"Unable to update selected stream.": "Unable to update selected stream.",
|
||||||
|
"Up": "Up",
|
||||||
|
"Update Pattern": "Update Pattern",
|
||||||
|
"Updated media tag {tag!r}.": "Updated media tag {tag!r}.",
|
||||||
|
"Updated stream #{index} ({track_type}).": "Updated stream #{index} ({track_type}).",
|
||||||
|
"Value": "Value",
|
||||||
|
"Year": "Year",
|
||||||
|
"Yes": "Yes",
|
||||||
|
"add media tag: key='{key}' value='{value}'": "add media tag: key='{key}' value='{value}'",
|
||||||
|
"add {track_type} track: index={index} lang={language}": "add {track_type} track: index={index} lang={language}",
|
||||||
|
"attached_pic": "attached_pic",
|
||||||
|
"attachment": "attachment",
|
||||||
|
"audio": "audio",
|
||||||
|
"captions": "captions",
|
||||||
|
"change media tag: key='{key}' value='{value}'": "change media tag: key='{key}' value='{value}'",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add disposition={disposition}": "change stream #{index} ({track_type}:{sub_index}) add disposition={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add key={key} value={value}": "change stream #{index} ({track_type}:{sub_index}) add key={key} value={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) change key={key} value={value}": "change stream #{index} ({track_type}:{sub_index}) change key={key} value={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove disposition={disposition}": "change stream #{index} ({track_type}:{sub_index}) remove disposition={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove key={key} value={value}": "change stream #{index} ({track_type}:{sub_index}) remove key={key} value={value}",
|
||||||
|
"clean_effects": "clean_effects",
|
||||||
|
"comment": "comment",
|
||||||
|
"default": "default",
|
||||||
|
"dependent": "dependent",
|
||||||
|
"descriptions": "descriptions",
|
||||||
|
"dub": "dub",
|
||||||
|
"for pattern": "for pattern",
|
||||||
|
"forced": "forced",
|
||||||
|
"from": "from",
|
||||||
|
"from pattern": "from pattern",
|
||||||
|
"from show": "from show",
|
||||||
|
"hearing_impaired": "hearing_impaired",
|
||||||
|
"karaoke": "karaoke",
|
||||||
|
"lyrics": "lyrics",
|
||||||
|
"metadata": "metadata",
|
||||||
|
"non_diegetic": "non_diegetic",
|
||||||
|
"original": "original",
|
||||||
|
"pattern #{id}": "pattern #{id}",
|
||||||
|
"remove media tag: key='{key}' value='{value}'": "remove media tag: key='{key}' value='{value}'",
|
||||||
|
"remove stream #{index}": "remove stream #{index}",
|
||||||
|
"show #{id}": "show #{id}",
|
||||||
|
"stereo": "stereo",
|
||||||
|
"still_image": "still_image",
|
||||||
|
"sub index": "sub index",
|
||||||
|
"subtitle": "subtitle",
|
||||||
|
"timed_thumbnails": "timed_thumbnails",
|
||||||
|
"undefined": "undefined",
|
||||||
|
"unknown": "unknown",
|
||||||
|
"video": "video",
|
||||||
|
"visual_impaired": "visual_impaired"
|
||||||
|
}
|
||||||
|
}
|
||||||
361
assets/i18n/eo.json
Normal file
361
assets/i18n/eo.json
Normal file
@@ -0,0 +1,361 @@
|
|||||||
|
{
|
||||||
|
"iso_languages": {
|
||||||
|
"ABKHAZIAN": "Abĥaza",
|
||||||
|
"AFAR": "Afara",
|
||||||
|
"AFRIKAANS": "Afrikansa",
|
||||||
|
"AKAN": "Akana",
|
||||||
|
"ALBANIAN": "Albana",
|
||||||
|
"AMHARIC": "Amhara",
|
||||||
|
"ARABIC": "Araba",
|
||||||
|
"ARAGONESE": "Aragona",
|
||||||
|
"ARMENIAN": "Armena",
|
||||||
|
"ASSAMESE": "Asama",
|
||||||
|
"AVARIC": "Avara",
|
||||||
|
"AVESTAN": "Avesta",
|
||||||
|
"AYMARA": "Ajmara",
|
||||||
|
"AZERBAIJANI": "Azerbajĝana",
|
||||||
|
"BAMBARA": "Bambara",
|
||||||
|
"BASHKIR": "Baŝkira",
|
||||||
|
"BASQUE": "Eŭska",
|
||||||
|
"BELARUSIAN": "Belorusa",
|
||||||
|
"BENGALI": "Bengala",
|
||||||
|
"BISLAMA": "Bislamo",
|
||||||
|
"BOKMAL": "Bokmål",
|
||||||
|
"BOSNIAN": "Bosna",
|
||||||
|
"BRETON": "Bretona",
|
||||||
|
"BULGARIAN": "Bulgara",
|
||||||
|
"BURMESE": "Birma",
|
||||||
|
"CATALAN": "Catalan",
|
||||||
|
"CHAMORRO": "Ĉamora",
|
||||||
|
"CHECHEN": "Ĉeĉena",
|
||||||
|
"CHICHEWA": "Chichewa",
|
||||||
|
"CHINESE": "Ĉina",
|
||||||
|
"CHURCH_SLAVIC": "Church Slavic",
|
||||||
|
"CHUVASH": "Ĉuvaŝa",
|
||||||
|
"CORNISH": "Kornvala",
|
||||||
|
"CORSICAN": "Korsika",
|
||||||
|
"CREE": "Kria",
|
||||||
|
"CROATIAN": "Kroata",
|
||||||
|
"CZECH": "Ĉeĥa",
|
||||||
|
"DANISH": "Dana",
|
||||||
|
"DIVEHI": "Divehi",
|
||||||
|
"DUTCH": "Dutch",
|
||||||
|
"DZONGKHA": "Dzonka",
|
||||||
|
"ENGLISH": "Angla",
|
||||||
|
"ESPERANTO": "Esperanto",
|
||||||
|
"ESTONIAN": "Estona",
|
||||||
|
"EWE": "Evea",
|
||||||
|
"FAROESE": "Feroa",
|
||||||
|
"FIJIAN": "Fiĝia",
|
||||||
|
"FILIPINO": "Filipino",
|
||||||
|
"FINNISH": "Finna",
|
||||||
|
"FRENCH": "Franca",
|
||||||
|
"FULAH": "Fula",
|
||||||
|
"GALICIAN": "Galega",
|
||||||
|
"GANDA": "Ganda",
|
||||||
|
"GEORGIAN": "Kartvela",
|
||||||
|
"GERMAN": "Germana",
|
||||||
|
"GREEK": "Greek",
|
||||||
|
"GUARANI": "Gvarania",
|
||||||
|
"GUJARATI": "Guĝarata",
|
||||||
|
"HAITIAN": "Haitian",
|
||||||
|
"HAUSA": "Haŭsa",
|
||||||
|
"HEBREW": "Hebrea",
|
||||||
|
"HERERO": "Herera",
|
||||||
|
"HINDI": "Hindia",
|
||||||
|
"HIRI_MOTU": "Hirimotua",
|
||||||
|
"HUNGARIAN": "Hungara",
|
||||||
|
"ICELANDIC": "Islanda",
|
||||||
|
"IDO": "Ido",
|
||||||
|
"IGBO": "Igba",
|
||||||
|
"INDONESIAN": "Indonezia",
|
||||||
|
"INTERLINGUA": "Interlingua",
|
||||||
|
"INTERLINGUE": "Interlingue",
|
||||||
|
"INUKTITUT": "Inuktituta",
|
||||||
|
"INUPIAQ": "Inupiaka",
|
||||||
|
"IRISH": "Irlanda",
|
||||||
|
"ITALIAN": "Itala",
|
||||||
|
"JAPANESE": "Japana",
|
||||||
|
"JAVANESE": "Java",
|
||||||
|
"KALAALLISUT": "Kalaallisut",
|
||||||
|
"KANNADA": "Kanara",
|
||||||
|
"KANURI": "Kanura",
|
||||||
|
"KASHMIRI": "Kaŝmira",
|
||||||
|
"KAZAKH": "Kazaĥa",
|
||||||
|
"KHMER": "Khmer",
|
||||||
|
"KIKUYU": "Kikuyu",
|
||||||
|
"KINYARWANDA": "Ruanda",
|
||||||
|
"KIRGHIZ": "Kirghiz",
|
||||||
|
"KOMI": "Komia",
|
||||||
|
"KONGO": "Konga",
|
||||||
|
"KOREAN": "Korea",
|
||||||
|
"KUANYAMA": "Kuanyama",
|
||||||
|
"KURDISH": "Kurda",
|
||||||
|
"LAO": "Laosa",
|
||||||
|
"LATIN": "Latina",
|
||||||
|
"LATVIAN": "Latva",
|
||||||
|
"LIMBURGAN": "Limburgan",
|
||||||
|
"LINGALA": "Lingala",
|
||||||
|
"LITHUANIAN": "Litova",
|
||||||
|
"LUBA_KATANGA": "Luba-katanga",
|
||||||
|
"LUXEMBOURGISH": "Luxembourgish",
|
||||||
|
"MACEDONIAN": "Makedona",
|
||||||
|
"MALAGASY": "Malagasa",
|
||||||
|
"MALAY": "Malaja",
|
||||||
|
"MALAYALAM": "Malajala",
|
||||||
|
"MALTESE": "Malta",
|
||||||
|
"MANX": "Manksa",
|
||||||
|
"MAORI": "Maoria",
|
||||||
|
"MARATHI": "Marata",
|
||||||
|
"MARSHALLESE": "Marŝala",
|
||||||
|
"MONGOLIAN": "Mongola",
|
||||||
|
"NAURU": "Naura",
|
||||||
|
"NAVAJO": "Navajo",
|
||||||
|
"NDONGA": "Ndonga",
|
||||||
|
"NEPALI": "Nepala",
|
||||||
|
"NORTHERN_SAMI": "Norda samea",
|
||||||
|
"NORTH_NDEBELE": "North Ndebele",
|
||||||
|
"NORWEGIAN": "Norvega",
|
||||||
|
"NORWEGIAN_NYNORSK": "Nynorsk",
|
||||||
|
"OCCITAN": "Occitan",
|
||||||
|
"OJIBWA": "Oĝibva",
|
||||||
|
"ORIYA": "Orija",
|
||||||
|
"OROMO": "Oroma",
|
||||||
|
"OSSETIAN": "Ossetian",
|
||||||
|
"PALI": "Palia",
|
||||||
|
"PANJABI": "Panjabi",
|
||||||
|
"PERSIAN": "Persa",
|
||||||
|
"POLISH": "Pola",
|
||||||
|
"PORTUGUESE": "Portugala",
|
||||||
|
"PUSHTO": "Pushto",
|
||||||
|
"QUECHUA": "Keĉua",
|
||||||
|
"ROMANIAN": "Romanian",
|
||||||
|
"ROMANSH": "Romanĉa",
|
||||||
|
"RUNDI": "Burunda",
|
||||||
|
"RUSSIAN": "Rusa",
|
||||||
|
"SAMOAN": "Samoa",
|
||||||
|
"SANGO": "Sangoa",
|
||||||
|
"SANSKRIT": "Sanskrito",
|
||||||
|
"SARDINIAN": "Sarda",
|
||||||
|
"SCOTTISH_GAELIC": "Scottish Gaelic",
|
||||||
|
"SERBIAN": "Serba",
|
||||||
|
"SHONA": "Ŝona",
|
||||||
|
"SICHUAN_YI": "Sichuan Yi",
|
||||||
|
"SINDHI": "Sinda",
|
||||||
|
"SINHALA": "Sinhala",
|
||||||
|
"SLOVAK": "Slovaka",
|
||||||
|
"SLOVENIAN": "Slovena",
|
||||||
|
"SOMALI": "Somalia",
|
||||||
|
"SOUTHERN_SOTHO": "Southern Sotho",
|
||||||
|
"SOUTH_NDEBELE": "South Ndebele",
|
||||||
|
"SPANISH": "Spanish",
|
||||||
|
"SUNDANESE": "Sunda",
|
||||||
|
"SWAHILI": "Svahila",
|
||||||
|
"SWATI": "Svazia",
|
||||||
|
"SWEDISH": "Sveda",
|
||||||
|
"TAGALOG": "Tagaloga",
|
||||||
|
"TAHITIAN": "Tahitia",
|
||||||
|
"TAJIK": "Taĝika",
|
||||||
|
"TAMIL": "Tamila",
|
||||||
|
"TATAR": "Tatara",
|
||||||
|
"TELUGU": "Telugua",
|
||||||
|
"THAI": "Taja",
|
||||||
|
"TIBETAN": "Tibeta",
|
||||||
|
"TIGRINYA": "Tigraja",
|
||||||
|
"TONGA": "Tonga",
|
||||||
|
"TSONGA": "Conga",
|
||||||
|
"TSWANA": "Cvana",
|
||||||
|
"TURKISH": "Turka",
|
||||||
|
"TURKMEN": "Turkmena",
|
||||||
|
"TWI": "Tvia",
|
||||||
|
"UIGHUR": "Uighur",
|
||||||
|
"UKRAINIAN": "Ukraina",
|
||||||
|
"UNDEFINED": "undefined",
|
||||||
|
"URDU": "Urdua",
|
||||||
|
"UZBEK": "Uzbeka",
|
||||||
|
"VENDA": "Vendaa",
|
||||||
|
"VIETNAMESE": "Vjetnama",
|
||||||
|
"VOLAPUK": "Volapuko",
|
||||||
|
"WALLOON": "Valona",
|
||||||
|
"WELSH": "Kimra",
|
||||||
|
"WESTERN_FRISIAN": "Okcidenta frisa",
|
||||||
|
"WOLOF": "Volofa",
|
||||||
|
"XHOSA": "Kosa",
|
||||||
|
"YIDDISH": "Jida",
|
||||||
|
"YORUBA": "Joruba",
|
||||||
|
"ZHUANG": "Zhuang",
|
||||||
|
"ZULU": "Zulua"
|
||||||
|
},
|
||||||
|
"phrases": {
|
||||||
|
"5.0(side)": "5.0(side)",
|
||||||
|
"5.1(side)": "5.1(side)",
|
||||||
|
"6.1": "6.1",
|
||||||
|
"6ch": "6ch",
|
||||||
|
"7.1": "7.1",
|
||||||
|
"<New show>": "<Nova serio>",
|
||||||
|
"Add": "Aldoni",
|
||||||
|
"Add Pattern": "Aldoni ŝablonon",
|
||||||
|
"Apply": "Apliki",
|
||||||
|
"Apply failed: {error}": "Apliko malsukcesis: {error}",
|
||||||
|
"Are you sure to delete the following filename pattern?": "Ĉu vi certe volas forigi la jenan dosiernoman ŝablonon?",
|
||||||
|
"Are you sure to delete the following shifted season?": "Ĉu vi certe volas forigi la jenan ŝovitan sezonon?",
|
||||||
|
"Are you sure to delete the following show?": "Ĉu vi certe volas forigi la jenan serion?",
|
||||||
|
"Are you sure to delete the following {track_type} track?": "Ĉu vi certe volas forigi la jenan {track_type}-trakon?",
|
||||||
|
"Are you sure to delete this tag?": "Ĉu vi certe volas forigi ĉi tiun etikedon?",
|
||||||
|
"Audio Layout": "Aŭda aranĝo",
|
||||||
|
"Back": "Reen",
|
||||||
|
"Cancel": "Nuligi",
|
||||||
|
"Cannot add another stream with disposition flag 'default' or 'forced' set": "Ne eblas aldoni alian fluon kun la dispozicia flago 'default' aŭ 'forced' aktiva",
|
||||||
|
"Changes applied and file reloaded.": "Ŝanĝoj aplikitaj kaj dosiero reŝargita.",
|
||||||
|
"Cleanup": "Purigado",
|
||||||
|
"Cleanup disabled.": "Purigado malŝaltita.",
|
||||||
|
"Cleanup enabled.": "Purigado ŝaltita.",
|
||||||
|
"Codec": "Kodeko",
|
||||||
|
"Continuing edit session.": "Daŭrigante la redaktan seancon.",
|
||||||
|
"Default": "Defaŭlta",
|
||||||
|
"Delete": "Forigi",
|
||||||
|
"Delete Show": "Forigi serion",
|
||||||
|
"Deleted media tag {tag!r}.": "Forigis la aŭdvidan etikedon {tag!r}.",
|
||||||
|
"Differences": "Diferencoj",
|
||||||
|
"Differences (file->db/output)": "Diferencoj (dosiero->DB/eligo)",
|
||||||
|
"Discard": "Forĵeti",
|
||||||
|
"Discard pending metadata changes and quit?": "Ĉu forĵeti atendatajn metadatumajn ŝanĝojn kaj eliri?",
|
||||||
|
"Discard pending metadata changes and reload the file state?": "Ĉu forĵeti atendatajn metadatumajn ŝanĝojn kaj reŝargi la dosieran staton?",
|
||||||
|
"Down": "Malsupren",
|
||||||
|
"Dry-run: would rewrite via temporary file {target_path}": "Seka provo: reskribus per provizora dosiero {target_path}",
|
||||||
|
"Edit": "Redakti",
|
||||||
|
"Edit Pattern": "Redakti ŝablonon",
|
||||||
|
"Edit Show": "Redakti serion",
|
||||||
|
"Edit filename pattern": "Redakti dosiernoman ŝablonon",
|
||||||
|
"Edit shifted season": "Redakti ŝovitan sezonon",
|
||||||
|
"Edit stream": "Redakti fluon",
|
||||||
|
"Episode Offset": "Epizoda deŝovo",
|
||||||
|
"Episode offset": "Epizoda deŝovo",
|
||||||
|
"File": "Dosiero",
|
||||||
|
"File patterns": "Dosieraj ŝablonoj",
|
||||||
|
"First Episode": "Unua epizodo",
|
||||||
|
"First episode": "Unua epizodo",
|
||||||
|
"Forced": "Devigita",
|
||||||
|
"Help": "Helpo",
|
||||||
|
"Help Screen": "Helpa ekrano",
|
||||||
|
"ID": "ID",
|
||||||
|
"Identify": "Identigi",
|
||||||
|
"Index": "Indekso",
|
||||||
|
"Index / Subindex": "Indekso / Subindekso",
|
||||||
|
"Index Episode Digits": "Ciferoj de epizoda indekso",
|
||||||
|
"Index Season Digits": "Ciferoj de sezona indekso",
|
||||||
|
"Indicator Edisode Digits": "Ciferoj de epizoda indikilo",
|
||||||
|
"Indicator Season Digits": "Ciferoj de sezona indikilo",
|
||||||
|
"Keep Editing": "Daŭrigi redaktadon",
|
||||||
|
"Keeping pending changes.": "Konservas atendatajn ŝanĝojn.",
|
||||||
|
"Key": "Ŝlosilo",
|
||||||
|
"Language": "Lingvo",
|
||||||
|
"Last Episode": "Lasta epizodo",
|
||||||
|
"Last episode": "Lasta epizodo",
|
||||||
|
"Layout": "Aranĝo",
|
||||||
|
"Media Tags": "Aŭdvidaj etikedoj",
|
||||||
|
"More than one default audio stream detected and no prompt set": "Pli ol unu defaŭlta sonfluo detektita kaj neniu instigo agordita",
|
||||||
|
"More than one default audio stream detected! Please select stream": "Pli ol unu defaŭlta sonfluo detektita! Bonvolu elekti fluon",
|
||||||
|
"More than one default subtitle stream detected and no prompt set": "Pli ol unu defaŭlta subtitola fluo detektita kaj neniu instigo agordita",
|
||||||
|
"More than one default subtitle stream detected! Please select stream": "Pli ol unu defaŭlta subtitola fluo detektita! Bonvolu elekti fluon",
|
||||||
|
"More than one default video stream detected and no prompt set": "Pli ol unu defaŭlta videofluo detektita kaj neniu instigo agordita",
|
||||||
|
"More than one default video stream detected! Please select stream": "Pli ol unu defaŭlta videofluo detektita! Bonvolu elekti fluon",
|
||||||
|
"More than one forced audio stream detected and no prompt set": "Pli ol unu devigita sonfluo detektita kaj neniu instigo agordita",
|
||||||
|
"More than one forced audio stream detected! Please select stream": "Pli ol unu devigita sonfluo detektita! Bonvolu elekti fluon",
|
||||||
|
"More than one forced subtitle stream detected and no prompt set": "Pli ol unu devigita subtitola fluo detektita kaj neniu instigo agordita",
|
||||||
|
"More than one forced subtitle stream detected! Please select stream": "Pli ol unu devigita subtitola fluo detektita! Bonvolu elekti fluon",
|
||||||
|
"More than one forced video stream detected and no prompt set": "Pli ol unu devigita videofluo detektita kaj neniu instigo agordita",
|
||||||
|
"More than one forced video stream detected! Please select stream": "Pli ol unu devigita videofluo detektita! Bonvolu elekti fluon",
|
||||||
|
"Name": "Nomo",
|
||||||
|
"New Pattern": "Nova ŝablono",
|
||||||
|
"New Show": "Nova serio",
|
||||||
|
"New filename pattern": "Nova dosiernoma ŝablono",
|
||||||
|
"New shifted season": "Nova ŝovita sezono",
|
||||||
|
"New stream": "Nova fluo",
|
||||||
|
"No": "Ne",
|
||||||
|
"No changes to apply.": "Neniuj ŝanĝoj por apliki.",
|
||||||
|
"No changes to revert.": "Neniuj ŝanĝoj por malfari.",
|
||||||
|
"Normalization disabled.": "Normaligo malŝaltita.",
|
||||||
|
"Normalization enabled.": "Normaligo ŝaltita.",
|
||||||
|
"Normalize": "Normaligi",
|
||||||
|
"Notes": "Notoj",
|
||||||
|
"Pattern": "Ŝablono",
|
||||||
|
"Planned Changes (file->edited output)": "Planitaj ŝanĝoj (dosiero->redaktita eligo)",
|
||||||
|
"Quality": "Kvalito",
|
||||||
|
"Quit": "Eliri",
|
||||||
|
"Remove Pattern": "Forigi ŝablonon",
|
||||||
|
"Revert": "Malfari",
|
||||||
|
"Reverted pending changes.": "Malfaris atendatajn ŝanĝojn.",
|
||||||
|
"Save": "Konservi",
|
||||||
|
"Season Offset": "Sezona deŝovo",
|
||||||
|
"Select a stream first.": "Bonvolu unue elekti fluon.",
|
||||||
|
"Set Default": "Agordi kiel defaŭltan",
|
||||||
|
"Set Forced": "Agordi kiel devigitan",
|
||||||
|
"Settings Screen": "Agorda ekrano",
|
||||||
|
"Numbering Mapping": "Ŝovitaj sezonoj",
|
||||||
|
"Show": "Serio",
|
||||||
|
"Shows": "Serioj",
|
||||||
|
"Source Season": "Fonta sezono",
|
||||||
|
"SrcIndex": "Fontindekso",
|
||||||
|
"Status": "Stato",
|
||||||
|
"Stay": "Resti",
|
||||||
|
"Stream dispositions": "Fluaj dispozicioj",
|
||||||
|
"Stream tags": "Fluaj etikedoj",
|
||||||
|
"Streams": "Fluoj",
|
||||||
|
"SubIndex": "Subindekso",
|
||||||
|
"Substitute": "Anstataŭigi",
|
||||||
|
"Substitute pattern": "Anstataŭigi ŝablonon",
|
||||||
|
"Title": "Titolo",
|
||||||
|
"Type": "Tipo",
|
||||||
|
"Unable to update selected stream.": "Ne eblis ĝisdatigi la elektitan fluon.",
|
||||||
|
"Up": "Supren",
|
||||||
|
"Update Pattern": "Ĝisdatigi ŝablonon",
|
||||||
|
"Updated media tag {tag!r}.": "Ĝisdatigis la aŭdvidan etikedon {tag!r}.",
|
||||||
|
"Updated stream #{index} ({track_type}).": "Ĝisdatigis fluon #{index} ({track_type}).",
|
||||||
|
"Value": "Valoro",
|
||||||
|
"Year": "Jaro",
|
||||||
|
"Yes": "Jes",
|
||||||
|
"add media tag: key='{key}' value='{value}'": "aldoni aŭdvidan etikedon: ŝlosilo='{key}' valoro='{value}'",
|
||||||
|
"add {track_type} track: index={index} lang={language}": "aldoni {track_type}-trakon: indekso={index} lingvo={language}",
|
||||||
|
"attached_pic": "attached_pic",
|
||||||
|
"attachment": "aldonaĵo",
|
||||||
|
"audio": "sono",
|
||||||
|
"captions": "subtekstoj",
|
||||||
|
"change media tag: key='{key}' value='{value}'": "ŝanĝi aŭdvidan etikedon: ŝlosilo='{key}' valoro='{value}'",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add disposition={disposition}": "ŝanĝi fluon #{index} ({track_type}:{sub_index}) aldoni dispozicion={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add key={key} value={value}": "ŝanĝi fluon #{index} ({track_type}:{sub_index}) aldoni ŝlosilon={key} valoron={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) change key={key} value={value}": "ŝanĝi fluon #{index} ({track_type}:{sub_index}) ŝanĝi ŝlosilon={key} valoron={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove disposition={disposition}": "ŝanĝi fluon #{index} ({track_type}:{sub_index}) forigi dispozicion={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove key={key} value={value}": "ŝanĝi fluon #{index} ({track_type}:{sub_index}) forigi ŝlosilon={key} valoron={value}",
|
||||||
|
"clean_effects": "nur efektoj",
|
||||||
|
"comment": "komento",
|
||||||
|
"default": "defaŭlta",
|
||||||
|
"dependent": "dependa",
|
||||||
|
"descriptions": "priskriboj",
|
||||||
|
"dub": "dublado",
|
||||||
|
"for pattern": "por ŝablono",
|
||||||
|
"forced": "devigita",
|
||||||
|
"from": "de",
|
||||||
|
"from pattern": "de ŝablono",
|
||||||
|
"from show": "el serio",
|
||||||
|
"hearing_impaired": "aŭdmalhelpita",
|
||||||
|
"karaoke": "karaokeo",
|
||||||
|
"lyrics": "kantoteksto",
|
||||||
|
"metadata": "metadatenoj",
|
||||||
|
"non_diegetic": "nediĝeta",
|
||||||
|
"original": "originala",
|
||||||
|
"pattern #{id}": "ŝablono #{id}",
|
||||||
|
"remove media tag: key='{key}' value='{value}'": "forigi aŭdvidan etikedon: ŝlosilo='{key}' valoro='{value}'",
|
||||||
|
"remove stream #{index}": "forigi fluon #{index}",
|
||||||
|
"show #{id}": "serio #{id}",
|
||||||
|
"stereo": "stereo",
|
||||||
|
"still_image": "senmova bildo",
|
||||||
|
"sub index": "subindekso",
|
||||||
|
"subtitle": "subtitolo",
|
||||||
|
"timed_thumbnails": "tempigitaj bildetoj",
|
||||||
|
"undefined": "nedifinita",
|
||||||
|
"unknown": "nekonata",
|
||||||
|
"video": "video",
|
||||||
|
"visual_impaired": "vidmalhelpita"
|
||||||
|
}
|
||||||
|
}
|
||||||
361
assets/i18n/es.json
Normal file
361
assets/i18n/es.json
Normal file
@@ -0,0 +1,361 @@
|
|||||||
|
{
|
||||||
|
"iso_languages": {
|
||||||
|
"ABKHAZIAN": "Abjaziano",
|
||||||
|
"AFAR": "Afar",
|
||||||
|
"AFRIKAANS": "Afrikaans",
|
||||||
|
"AKAN": "Akan",
|
||||||
|
"ALBANIAN": "Albanés",
|
||||||
|
"AMHARIC": "Ámárico",
|
||||||
|
"ARABIC": "Árábe",
|
||||||
|
"ARAGONESE": "Aragonés",
|
||||||
|
"ARMENIAN": "Armenio",
|
||||||
|
"ASSAMESE": "Assamais",
|
||||||
|
"AVARIC": "Avaric",
|
||||||
|
"AVESTAN": "Avestan",
|
||||||
|
"AYMARA": "Aymará",
|
||||||
|
"AZERBAIJANI": "Azerbayano",
|
||||||
|
"BAMBARA": "Bambara",
|
||||||
|
"BASHKIR": "Bashkir",
|
||||||
|
"BASQUE": "Vasco",
|
||||||
|
"BELARUSIAN": "Bieloruso",
|
||||||
|
"BENGALI": "Bengalí",
|
||||||
|
"BISLAMA": "Bislama",
|
||||||
|
"BOKMAL": "Bokmål",
|
||||||
|
"BOSNIAN": "Bosnio",
|
||||||
|
"BRETON": "Bretón",
|
||||||
|
"BULGARIAN": "Búlgaro",
|
||||||
|
"BURMESE": "Birmano",
|
||||||
|
"CATALAN": "Catalan",
|
||||||
|
"CHAMORRO": "Chamorro",
|
||||||
|
"CHECHEN": "Checheno",
|
||||||
|
"CHICHEWA": "Chichewa",
|
||||||
|
"CHINESE": "Chino",
|
||||||
|
"CHURCH_SLAVIC": "Church Slavic",
|
||||||
|
"CHUVASH": "Chuvash",
|
||||||
|
"CORNISH": "Córnico",
|
||||||
|
"CORSICAN": "Corso",
|
||||||
|
"CREE": "Cree",
|
||||||
|
"CROATIAN": "Croata",
|
||||||
|
"CZECH": "Checo",
|
||||||
|
"DANISH": "Danés",
|
||||||
|
"DIVEHI": "Divehi",
|
||||||
|
"DUTCH": "Dutch",
|
||||||
|
"DZONGKHA": "Butaní",
|
||||||
|
"ENGLISH": "Inglés",
|
||||||
|
"ESPERANTO": "Esperanto",
|
||||||
|
"ESTONIAN": "Estonio",
|
||||||
|
"EWE": "Ewe",
|
||||||
|
"FAROESE": "Feroés",
|
||||||
|
"FIJIAN": "Fidji",
|
||||||
|
"FILIPINO": "Filipino",
|
||||||
|
"FINNISH": "Finés",
|
||||||
|
"FRENCH": "Francés",
|
||||||
|
"FULAH": "Fulah",
|
||||||
|
"GALICIAN": "Gallego",
|
||||||
|
"GANDA": "Ganda",
|
||||||
|
"GEORGIAN": "Georgiano",
|
||||||
|
"GERMAN": "Alemán",
|
||||||
|
"GREEK": "Greek",
|
||||||
|
"GUARANI": "Guaraní",
|
||||||
|
"GUJARATI": "guyaratí",
|
||||||
|
"HAITIAN": "Haitian",
|
||||||
|
"HAUSA": "Haussa",
|
||||||
|
"HEBREW": "Hebreo",
|
||||||
|
"HERERO": "Herero",
|
||||||
|
"HINDI": "Hindi",
|
||||||
|
"HIRI_MOTU": "Hiri Motu",
|
||||||
|
"HUNGARIAN": "Húngaro",
|
||||||
|
"ICELANDIC": "Islandés",
|
||||||
|
"IDO": "Ido",
|
||||||
|
"IGBO": "Igbo",
|
||||||
|
"INDONESIAN": "Indonesio",
|
||||||
|
"INTERLINGUA": "Interlingua",
|
||||||
|
"INTERLINGUE": "Interlingue",
|
||||||
|
"INUKTITUT": "Inuktitut",
|
||||||
|
"INUPIAQ": "Inupiak",
|
||||||
|
"IRISH": "Irlandés",
|
||||||
|
"ITALIAN": "Italiano",
|
||||||
|
"JAPANESE": "Japonés",
|
||||||
|
"JAVANESE": "Javanés",
|
||||||
|
"KALAALLISUT": "Kalaallisut",
|
||||||
|
"KANNADA": "Canarés",
|
||||||
|
"KANURI": "Kanuri",
|
||||||
|
"KASHMIRI": "Kashmir",
|
||||||
|
"KAZAKH": "Kazako",
|
||||||
|
"KHMER": "Khmer",
|
||||||
|
"KIKUYU": "Kikuyu",
|
||||||
|
"KINYARWANDA": "Kinyarwanda",
|
||||||
|
"KIRGHIZ": "Kirghiz",
|
||||||
|
"KOMI": "Komi",
|
||||||
|
"KONGO": "Kongo",
|
||||||
|
"KOREAN": "Coreano",
|
||||||
|
"KUANYAMA": "Kuanyama",
|
||||||
|
"KURDISH": "Kurdo",
|
||||||
|
"LAO": "laosiano",
|
||||||
|
"LATIN": "Latín",
|
||||||
|
"LATVIAN": "Letón",
|
||||||
|
"LIMBURGAN": "Limburgan",
|
||||||
|
"LINGALA": "Lingala",
|
||||||
|
"LITHUANIAN": "Lituano",
|
||||||
|
"LUBA_KATANGA": "Luba-Katanga",
|
||||||
|
"LUXEMBOURGISH": "Luxembourgish",
|
||||||
|
"MACEDONIAN": "Macedonio",
|
||||||
|
"MALAGASY": "Malgache",
|
||||||
|
"MALAY": "Malayo",
|
||||||
|
"MALAYALAM": "malabar",
|
||||||
|
"MALTESE": "Maltés",
|
||||||
|
"MANX": "Manx [Gaélico de Manx]",
|
||||||
|
"MAORI": "Maorí",
|
||||||
|
"MARATHI": "Marath",
|
||||||
|
"MARSHALLESE": "Marshall",
|
||||||
|
"MONGOLIAN": "Mongol",
|
||||||
|
"NAURU": "Nauru",
|
||||||
|
"NAVAJO": "Navajo",
|
||||||
|
"NDONGA": "Ndonga",
|
||||||
|
"NEPALI": "Nepalés",
|
||||||
|
"NORTHERN_SAMI": "Sami del Norte",
|
||||||
|
"NORTH_NDEBELE": "North Ndebele",
|
||||||
|
"NORWEGIAN": "Noruego",
|
||||||
|
"NORWEGIAN_NYNORSK": "Nynorsk",
|
||||||
|
"OCCITAN": "Occitan",
|
||||||
|
"OJIBWA": "Ojibwa",
|
||||||
|
"ORIYA": "Oriya",
|
||||||
|
"OROMO": "Oromo (Afan)",
|
||||||
|
"OSSETIAN": "Ossetian",
|
||||||
|
"PALI": "Pali",
|
||||||
|
"PANJABI": "Panjabi",
|
||||||
|
"PERSIAN": "Persa",
|
||||||
|
"POLISH": "Polaco",
|
||||||
|
"PORTUGUESE": "Portugués",
|
||||||
|
"PUSHTO": "Pushto",
|
||||||
|
"QUECHUA": "Quechua",
|
||||||
|
"ROMANIAN": "Romanian",
|
||||||
|
"ROMANSH": "Romaní",
|
||||||
|
"RUNDI": "Kiroundi",
|
||||||
|
"RUSSIAN": "Ruso",
|
||||||
|
"SAMOAN": "Samoano",
|
||||||
|
"SANGO": "Sango",
|
||||||
|
"SANSKRIT": "Sánscrito",
|
||||||
|
"SARDINIAN": "Sardo",
|
||||||
|
"SCOTTISH_GAELIC": "Scottish Gaelic",
|
||||||
|
"SERBIAN": "Serbio",
|
||||||
|
"SHONA": "Shona",
|
||||||
|
"SICHUAN_YI": "Sichuan Yi",
|
||||||
|
"SINDHI": "Sindhi",
|
||||||
|
"SINHALA": "Sinhala",
|
||||||
|
"SLOVAK": "Eslovaco",
|
||||||
|
"SLOVENIAN": "Esloveno",
|
||||||
|
"SOMALI": "Somalí",
|
||||||
|
"SOUTHERN_SOTHO": "Southern Sotho",
|
||||||
|
"SOUTH_NDEBELE": "South Ndebele",
|
||||||
|
"SPANISH": "Spanish",
|
||||||
|
"SUNDANESE": "Sondanés",
|
||||||
|
"SWAHILI": "Swahili",
|
||||||
|
"SWATI": "Siswati",
|
||||||
|
"SWEDISH": "Sueco",
|
||||||
|
"TAGALOG": "Tagalo",
|
||||||
|
"TAHITIAN": "Tahitiano",
|
||||||
|
"TAJIK": "Tajiko",
|
||||||
|
"TAMIL": "Tamil",
|
||||||
|
"TATAR": "Tataro",
|
||||||
|
"TELUGU": "Telugu",
|
||||||
|
"THAI": "Tailandés",
|
||||||
|
"TIBETAN": "Tibetano",
|
||||||
|
"TIGRINYA": "Tigrinya",
|
||||||
|
"TONGA": "Tonga",
|
||||||
|
"TSONGA": "Tsonga",
|
||||||
|
"TSWANA": "Setchwana",
|
||||||
|
"TURKISH": "Turco",
|
||||||
|
"TURKMEN": "Turkmeno",
|
||||||
|
"TWI": "Tchi",
|
||||||
|
"UIGHUR": "Uighur",
|
||||||
|
"UKRAINIAN": "Ukranio",
|
||||||
|
"UNDEFINED": "undefined",
|
||||||
|
"URDU": "Urdu",
|
||||||
|
"UZBEK": "Uzbeko",
|
||||||
|
"VENDA": "Venda",
|
||||||
|
"VIETNAMESE": "Vietnamita",
|
||||||
|
"VOLAPUK": "Volapük",
|
||||||
|
"WALLOON": "valón",
|
||||||
|
"WELSH": "Galés",
|
||||||
|
"WESTERN_FRISIAN": "Frisón occidental",
|
||||||
|
"WOLOF": "Wolof",
|
||||||
|
"XHOSA": "Xhosa",
|
||||||
|
"YIDDISH": "Yidish",
|
||||||
|
"YORUBA": "Yoruba",
|
||||||
|
"ZHUANG": "Zhuang",
|
||||||
|
"ZULU": "Zulu"
|
||||||
|
},
|
||||||
|
"phrases": {
|
||||||
|
"5.0(side)": "5.0(side)",
|
||||||
|
"5.1(side)": "5.1(side)",
|
||||||
|
"6.1": "6.1",
|
||||||
|
"6ch": "6ch",
|
||||||
|
"7.1": "7.1",
|
||||||
|
"<New show>": "<Nueva serie>",
|
||||||
|
"Add": "Añadir",
|
||||||
|
"Add Pattern": "Añadir patrón",
|
||||||
|
"Apply": "Aplicar",
|
||||||
|
"Apply failed: {error}": "Error al aplicar: {error}",
|
||||||
|
"Are you sure to delete the following filename pattern?": "¿Seguro que quieres eliminar el siguiente patrón de nombre de archivo?",
|
||||||
|
"Are you sure to delete the following shifted season?": "¿Seguro que quieres eliminar la siguiente temporada desplazada?",
|
||||||
|
"Are you sure to delete the following show?": "¿Seguro que quieres eliminar la siguiente serie?",
|
||||||
|
"Are you sure to delete the following {track_type} track?": "¿Seguro que quieres eliminar la pista {track_type} siguiente?",
|
||||||
|
"Are you sure to delete this tag?": "¿Seguro que quieres eliminar esta etiqueta?",
|
||||||
|
"Audio Layout": "Disposición de audio",
|
||||||
|
"Back": "Volver",
|
||||||
|
"Cancel": "Cancelar",
|
||||||
|
"Cannot add another stream with disposition flag 'default' or 'forced' set": "No se puede añadir otro flujo con la marca de disposición 'default' o 'forced' activada",
|
||||||
|
"Changes applied and file reloaded.": "Cambios aplicados y archivo recargado.",
|
||||||
|
"Cleanup": "Limpieza",
|
||||||
|
"Cleanup disabled.": "Limpieza desactivada.",
|
||||||
|
"Cleanup enabled.": "Limpieza activada.",
|
||||||
|
"Codec": "Códec",
|
||||||
|
"Continuing edit session.": "Continuando la sesión de edición.",
|
||||||
|
"Default": "Predeterminado",
|
||||||
|
"Delete": "Eliminar",
|
||||||
|
"Delete Show": "Eliminar serie",
|
||||||
|
"Deleted media tag {tag!r}.": "Etiqueta de medios {tag!r} eliminada.",
|
||||||
|
"Differences": "Diferencias",
|
||||||
|
"Differences (file->db/output)": "Diferencias (archivo->BD/salida)",
|
||||||
|
"Discard": "Descartar",
|
||||||
|
"Discard pending metadata changes and quit?": "¿Descartar los cambios pendientes de metadatos y salir?",
|
||||||
|
"Discard pending metadata changes and reload the file state?": "¿Descartar los cambios pendientes de metadatos y recargar el estado del archivo?",
|
||||||
|
"Down": "Abajo",
|
||||||
|
"Dry-run: would rewrite via temporary file {target_path}": "Simulación: reescribiría mediante el archivo temporal {target_path}",
|
||||||
|
"Edit": "Editar",
|
||||||
|
"Edit Pattern": "Editar patrón",
|
||||||
|
"Edit Show": "Editar serie",
|
||||||
|
"Edit filename pattern": "Editar patrón de nombre de archivo",
|
||||||
|
"Edit shifted season": "Editar temporada desplazada",
|
||||||
|
"Edit stream": "Editar flujo",
|
||||||
|
"Episode Offset": "Desplazamiento de episodio",
|
||||||
|
"Episode offset": "Desplazamiento de episodio",
|
||||||
|
"File": "Archivo",
|
||||||
|
"File patterns": "Patrones de archivo",
|
||||||
|
"First Episode": "Primer episodio",
|
||||||
|
"First episode": "Primer episodio",
|
||||||
|
"Forced": "Forzado",
|
||||||
|
"Help": "Ayuda",
|
||||||
|
"Help Screen": "Pantalla de ayuda",
|
||||||
|
"ID": "ID",
|
||||||
|
"Identify": "Identificar",
|
||||||
|
"Index": "Índice",
|
||||||
|
"Index / Subindex": "Índice / Subíndice",
|
||||||
|
"Index Episode Digits": "Dígitos del índice de episodio",
|
||||||
|
"Index Season Digits": "Dígitos del índice de temporada",
|
||||||
|
"Indicator Edisode Digits": "Dígitos del indicador de episodio",
|
||||||
|
"Indicator Season Digits": "Dígitos del indicador de temporada",
|
||||||
|
"Keep Editing": "Seguir editando",
|
||||||
|
"Keeping pending changes.": "Se conservan los cambios pendientes.",
|
||||||
|
"Key": "Clave",
|
||||||
|
"Language": "Idioma",
|
||||||
|
"Last Episode": "Último episodio",
|
||||||
|
"Last episode": "Último episodio",
|
||||||
|
"Layout": "Diseño",
|
||||||
|
"Media Tags": "Etiquetas de medios",
|
||||||
|
"More than one default audio stream detected and no prompt set": "Se detectó más de un flujo de audio predeterminado y no hay aviso configurado",
|
||||||
|
"More than one default audio stream detected! Please select stream": "Se detectó más de un flujo de audio predeterminado. Selecciona el flujo",
|
||||||
|
"More than one default subtitle stream detected and no prompt set": "Se detectó más de un flujo de subtítulos predeterminado y no hay aviso configurado",
|
||||||
|
"More than one default subtitle stream detected! Please select stream": "Se detectó más de un flujo de subtítulos predeterminado. Selecciona el flujo",
|
||||||
|
"More than one default video stream detected and no prompt set": "Se detectó más de un flujo de vídeo predeterminado y no hay aviso configurado",
|
||||||
|
"More than one default video stream detected! Please select stream": "Se detectó más de un flujo de vídeo predeterminado. Selecciona el flujo",
|
||||||
|
"More than one forced audio stream detected and no prompt set": "Se detectó más de un flujo de audio forzado y no hay aviso configurado",
|
||||||
|
"More than one forced audio stream detected! Please select stream": "Se detectó más de un flujo de audio forzado. Selecciona el flujo",
|
||||||
|
"More than one forced subtitle stream detected and no prompt set": "Se detectó más de un flujo de subtítulos forzados y no hay aviso configurado",
|
||||||
|
"More than one forced subtitle stream detected! Please select stream": "Se detectó más de un flujo de subtítulos forzados. Selecciona el flujo",
|
||||||
|
"More than one forced video stream detected and no prompt set": "Se detectó más de un flujo de vídeo forzado y no hay aviso configurado",
|
||||||
|
"More than one forced video stream detected! Please select stream": "Se detectó más de un flujo de vídeo forzado. Selecciona el flujo",
|
||||||
|
"Name": "Nombre",
|
||||||
|
"New Pattern": "Nuevo patrón",
|
||||||
|
"New Show": "Nueva serie",
|
||||||
|
"New filename pattern": "Nuevo patrón de nombre de archivo",
|
||||||
|
"New shifted season": "Nueva temporada desplazada",
|
||||||
|
"New stream": "Nuevo flujo",
|
||||||
|
"No": "No",
|
||||||
|
"No changes to apply.": "No hay cambios para aplicar.",
|
||||||
|
"No changes to revert.": "No hay cambios para revertir.",
|
||||||
|
"Normalization disabled.": "Normalización desactivada.",
|
||||||
|
"Normalization enabled.": "Normalización activada.",
|
||||||
|
"Normalize": "Normalizar",
|
||||||
|
"Notes": "Notas",
|
||||||
|
"Pattern": "Patrón",
|
||||||
|
"Planned Changes (file->edited output)": "Cambios planificados (archivo->salida editada)",
|
||||||
|
"Quality": "Calidad",
|
||||||
|
"Quit": "Salir",
|
||||||
|
"Remove Pattern": "Eliminar patrón",
|
||||||
|
"Revert": "Revertir",
|
||||||
|
"Reverted pending changes.": "Se revirtieron los cambios pendientes.",
|
||||||
|
"Save": "Guardar",
|
||||||
|
"Season Offset": "Desplazamiento de temporada",
|
||||||
|
"Select a stream first.": "Selecciona primero un flujo.",
|
||||||
|
"Set Default": "Establecer como predeterminado",
|
||||||
|
"Set Forced": "Establecer como forzado",
|
||||||
|
"Settings Screen": "Pantalla de ajustes",
|
||||||
|
"Numbering Mapping": "Temporadas desplazadas",
|
||||||
|
"Show": "Serie",
|
||||||
|
"Shows": "Series",
|
||||||
|
"Source Season": "Temporada de origen",
|
||||||
|
"SrcIndex": "Índice origen",
|
||||||
|
"Status": "Estado",
|
||||||
|
"Stay": "Permanecer",
|
||||||
|
"Stream dispositions": "Disposiciones del flujo",
|
||||||
|
"Stream tags": "Etiquetas del flujo",
|
||||||
|
"Streams": "Flujos",
|
||||||
|
"SubIndex": "Subíndice",
|
||||||
|
"Substitute": "Sustituir",
|
||||||
|
"Substitute pattern": "Sustituir patrón",
|
||||||
|
"Title": "Título",
|
||||||
|
"Type": "Tipo",
|
||||||
|
"Unable to update selected stream.": "No se pudo actualizar el flujo seleccionado.",
|
||||||
|
"Up": "Arriba",
|
||||||
|
"Update Pattern": "Actualizar patrón",
|
||||||
|
"Updated media tag {tag!r}.": "Etiqueta de medios {tag!r} actualizada.",
|
||||||
|
"Updated stream #{index} ({track_type}).": "Flujo #{index} ({track_type}) actualizado.",
|
||||||
|
"Value": "Valor",
|
||||||
|
"Year": "Año",
|
||||||
|
"Yes": "Sí",
|
||||||
|
"add media tag: key='{key}' value='{value}'": "añadir etiqueta de medios: clave='{key}' valor='{value}'",
|
||||||
|
"add {track_type} track: index={index} lang={language}": "añadir pista {track_type}: índice={index} idioma={language}",
|
||||||
|
"attached_pic": "attached_pic",
|
||||||
|
"attachment": "adjunto",
|
||||||
|
"audio": "audio",
|
||||||
|
"captions": "subtítulos",
|
||||||
|
"change media tag: key='{key}' value='{value}'": "cambiar etiqueta de medios: clave='{key}' valor='{value}'",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add disposition={disposition}": "cambiar flujo #{index} ({track_type}:{sub_index}) añadir disposición={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add key={key} value={value}": "cambiar flujo #{index} ({track_type}:{sub_index}) añadir clave={key} valor={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) change key={key} value={value}": "cambiar flujo #{index} ({track_type}:{sub_index}) cambiar clave={key} valor={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove disposition={disposition}": "cambiar flujo #{index} ({track_type}:{sub_index}) quitar disposición={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove key={key} value={value}": "cambiar flujo #{index} ({track_type}:{sub_index}) quitar clave={key} valor={value}",
|
||||||
|
"clean_effects": "solo efectos",
|
||||||
|
"comment": "comentario",
|
||||||
|
"default": "predeterminado",
|
||||||
|
"dependent": "dependiente",
|
||||||
|
"descriptions": "descripciones",
|
||||||
|
"dub": "doblaje",
|
||||||
|
"for pattern": "para el patrón",
|
||||||
|
"forced": "forzado",
|
||||||
|
"from": "de",
|
||||||
|
"from pattern": "del patrón",
|
||||||
|
"from show": "de la serie",
|
||||||
|
"hearing_impaired": "personas con discapacidad auditiva",
|
||||||
|
"karaoke": "karaoke",
|
||||||
|
"lyrics": "letra",
|
||||||
|
"metadata": "metadatos",
|
||||||
|
"non_diegetic": "no diegético",
|
||||||
|
"original": "original",
|
||||||
|
"pattern #{id}": "patrón #{id}",
|
||||||
|
"remove media tag: key='{key}' value='{value}'": "eliminar etiqueta de medios: clave='{key}' valor='{value}'",
|
||||||
|
"remove stream #{index}": "eliminar flujo #{index}",
|
||||||
|
"show #{id}": "serie #{id}",
|
||||||
|
"stereo": "estéreo",
|
||||||
|
"still_image": "imagen fija",
|
||||||
|
"sub index": "subíndice",
|
||||||
|
"subtitle": "subtítulo",
|
||||||
|
"timed_thumbnails": "miniaturas temporizadas",
|
||||||
|
"undefined": "indefinido",
|
||||||
|
"unknown": "desconocido",
|
||||||
|
"video": "vídeo",
|
||||||
|
"visual_impaired": "personas con discapacidad visual"
|
||||||
|
}
|
||||||
|
}
|
||||||
361
assets/i18n/fr.json
Normal file
361
assets/i18n/fr.json
Normal file
@@ -0,0 +1,361 @@
|
|||||||
|
{
|
||||||
|
"iso_languages": {
|
||||||
|
"ABKHAZIAN": "Abkhaze",
|
||||||
|
"AFAR": "Afar",
|
||||||
|
"AFRIKAANS": "Afrikaans",
|
||||||
|
"AKAN": "Akan",
|
||||||
|
"ALBANIAN": "Albanais",
|
||||||
|
"AMHARIC": "Amharique",
|
||||||
|
"ARABIC": "Arabe",
|
||||||
|
"ARAGONESE": "Aragonais",
|
||||||
|
"ARMENIAN": "Arménien",
|
||||||
|
"ASSAMESE": "Assamais",
|
||||||
|
"AVARIC": "Avar",
|
||||||
|
"AVESTAN": "Avestique",
|
||||||
|
"AYMARA": "Aymara",
|
||||||
|
"AZERBAIJANI": "Azéri",
|
||||||
|
"BAMBARA": "Bambara",
|
||||||
|
"BASHKIR": "Bachkir",
|
||||||
|
"BASQUE": "Basque",
|
||||||
|
"BELARUSIAN": "Biélorusse",
|
||||||
|
"BENGALI": "Bengali",
|
||||||
|
"BISLAMA": "Bichelamar",
|
||||||
|
"BOKMAL": "Bokmål",
|
||||||
|
"BOSNIAN": "Bosniaque",
|
||||||
|
"BRETON": "Breton",
|
||||||
|
"BULGARIAN": "Bulgare",
|
||||||
|
"BURMESE": "Birman",
|
||||||
|
"CATALAN": "Catalan",
|
||||||
|
"CHAMORRO": "Chamorro",
|
||||||
|
"CHECHEN": "Tchétchène",
|
||||||
|
"CHICHEWA": "Chichewa",
|
||||||
|
"CHINESE": "Chinois",
|
||||||
|
"CHURCH_SLAVIC": "Church Slavic",
|
||||||
|
"CHUVASH": "Tchouvache",
|
||||||
|
"CORNISH": "Cornique",
|
||||||
|
"CORSICAN": "Corse",
|
||||||
|
"CREE": "Cri",
|
||||||
|
"CROATIAN": "Croate",
|
||||||
|
"CZECH": "Tchèque",
|
||||||
|
"DANISH": "Danois",
|
||||||
|
"DIVEHI": "Divehi",
|
||||||
|
"DUTCH": "Dutch",
|
||||||
|
"DZONGKHA": "Dzongkha",
|
||||||
|
"ENGLISH": "Anglais",
|
||||||
|
"ESPERANTO": "Espéranto",
|
||||||
|
"ESTONIAN": "Estonien",
|
||||||
|
"EWE": "Éwé",
|
||||||
|
"FAROESE": "Féroïen",
|
||||||
|
"FIJIAN": "Fidjien",
|
||||||
|
"FILIPINO": "Filipino",
|
||||||
|
"FINNISH": "Finnois",
|
||||||
|
"FRENCH": "Français",
|
||||||
|
"FULAH": "Peul",
|
||||||
|
"GALICIAN": "Galicien",
|
||||||
|
"GANDA": "Ganda",
|
||||||
|
"GEORGIAN": "Géorgien",
|
||||||
|
"GERMAN": "Allemand",
|
||||||
|
"GREEK": "Greek",
|
||||||
|
"GUARANI": "Guarani",
|
||||||
|
"GUJARATI": "Goudjarâtî (Gujrâtî)",
|
||||||
|
"HAITIAN": "Haitian",
|
||||||
|
"HAUSA": "Haoussa",
|
||||||
|
"HEBREW": "Hébreu",
|
||||||
|
"HERERO": "Herero",
|
||||||
|
"HINDI": "Hindi",
|
||||||
|
"HIRI_MOTU": "Hiri Motu",
|
||||||
|
"HUNGARIAN": "Hongrois",
|
||||||
|
"ICELANDIC": "Islandais",
|
||||||
|
"IDO": "Ido",
|
||||||
|
"IGBO": "Igbo",
|
||||||
|
"INDONESIAN": "Indonésien",
|
||||||
|
"INTERLINGUA": "Interlingua",
|
||||||
|
"INTERLINGUE": "Interlingue",
|
||||||
|
"INUKTITUT": "Inuktitut",
|
||||||
|
"INUPIAQ": "Inupiaq",
|
||||||
|
"IRISH": "Irlandais",
|
||||||
|
"ITALIAN": "Italien",
|
||||||
|
"JAPANESE": "Japonais",
|
||||||
|
"JAVANESE": "Javanais",
|
||||||
|
"KALAALLISUT": "Kalaallisut",
|
||||||
|
"KANNADA": "Kannara (Canara)",
|
||||||
|
"KANURI": "Kanouri",
|
||||||
|
"KASHMIRI": "Kashmiri",
|
||||||
|
"KAZAKH": "Kazakh",
|
||||||
|
"KHMER": "Khmer",
|
||||||
|
"KIKUYU": "Kikuyu",
|
||||||
|
"KINYARWANDA": "Kinyarwanda",
|
||||||
|
"KIRGHIZ": "Kirghiz",
|
||||||
|
"KOMI": "Komi",
|
||||||
|
"KONGO": "Kongo",
|
||||||
|
"KOREAN": "Coréen",
|
||||||
|
"KUANYAMA": "Kuanyama",
|
||||||
|
"KURDISH": "Kurde",
|
||||||
|
"LAO": "Laotien",
|
||||||
|
"LATIN": "Latin",
|
||||||
|
"LATVIAN": "Letton",
|
||||||
|
"LIMBURGAN": "Limburgan",
|
||||||
|
"LINGALA": "Lingala",
|
||||||
|
"LITHUANIAN": "Lituanien",
|
||||||
|
"LUBA_KATANGA": "Luba-katanga",
|
||||||
|
"LUXEMBOURGISH": "Luxembourgish",
|
||||||
|
"MACEDONIAN": "Macédonien",
|
||||||
|
"MALAGASY": "Malgache",
|
||||||
|
"MALAY": "Malais",
|
||||||
|
"MALAYALAM": "Malayalam",
|
||||||
|
"MALTESE": "Maltais",
|
||||||
|
"MANX": "Mannois",
|
||||||
|
"MAORI": "Maori",
|
||||||
|
"MARATHI": "Marathe",
|
||||||
|
"MARSHALLESE": "Marshallais",
|
||||||
|
"MONGOLIAN": "Mongol",
|
||||||
|
"NAURU": "Nauru",
|
||||||
|
"NAVAJO": "Navajo",
|
||||||
|
"NDONGA": "Ndonga",
|
||||||
|
"NEPALI": "Népalais",
|
||||||
|
"NORTHERN_SAMI": "Same du Nord",
|
||||||
|
"NORTH_NDEBELE": "North Ndebele",
|
||||||
|
"NORWEGIAN": "Norvégien",
|
||||||
|
"NORWEGIAN_NYNORSK": "Nynorsk",
|
||||||
|
"OCCITAN": "Occitan",
|
||||||
|
"OJIBWA": "Ojibwa",
|
||||||
|
"ORIYA": "Oriya",
|
||||||
|
"OROMO": "Oromo",
|
||||||
|
"OSSETIAN": "Ossetian",
|
||||||
|
"PALI": "Pali",
|
||||||
|
"PANJABI": "Panjabi",
|
||||||
|
"PERSIAN": "Persan",
|
||||||
|
"POLISH": "Polonais",
|
||||||
|
"PORTUGUESE": "Portugais",
|
||||||
|
"PUSHTO": "Pushto",
|
||||||
|
"QUECHUA": "Quechua",
|
||||||
|
"ROMANIAN": "Romanian",
|
||||||
|
"ROMANSH": "Romanche",
|
||||||
|
"RUNDI": "Rundi",
|
||||||
|
"RUSSIAN": "Russe",
|
||||||
|
"SAMOAN": "Samoan",
|
||||||
|
"SANGO": "Sango",
|
||||||
|
"SANSKRIT": "Sanskrit",
|
||||||
|
"SARDINIAN": "Sarde",
|
||||||
|
"SCOTTISH_GAELIC": "Scottish Gaelic",
|
||||||
|
"SERBIAN": "Serbe",
|
||||||
|
"SHONA": "Shona",
|
||||||
|
"SICHUAN_YI": "Sichuan Yi",
|
||||||
|
"SINDHI": "Sindhi",
|
||||||
|
"SINHALA": "Sinhala",
|
||||||
|
"SLOVAK": "Slovaque",
|
||||||
|
"SLOVENIAN": "Slovène",
|
||||||
|
"SOMALI": "Somali",
|
||||||
|
"SOUTHERN_SOTHO": "Southern Sotho",
|
||||||
|
"SOUTH_NDEBELE": "South Ndebele",
|
||||||
|
"SPANISH": "Spanish",
|
||||||
|
"SUNDANESE": "Sundanais",
|
||||||
|
"SWAHILI": "Swahili",
|
||||||
|
"SWATI": "Swati",
|
||||||
|
"SWEDISH": "Suédois",
|
||||||
|
"TAGALOG": "Tagalog",
|
||||||
|
"TAHITIAN": "Tahitien",
|
||||||
|
"TAJIK": "Tadjik",
|
||||||
|
"TAMIL": "Tamoul",
|
||||||
|
"TATAR": "Tatar",
|
||||||
|
"TELUGU": "Télougou",
|
||||||
|
"THAI": "Thaï",
|
||||||
|
"TIBETAN": "Tibétain",
|
||||||
|
"TIGRINYA": "Tigrigna",
|
||||||
|
"TONGA": "Tonga",
|
||||||
|
"TSONGA": "Tsonga",
|
||||||
|
"TSWANA": "Tswana",
|
||||||
|
"TURKISH": "Turc",
|
||||||
|
"TURKMEN": "Turkmène",
|
||||||
|
"TWI": "Twi",
|
||||||
|
"UIGHUR": "Uighur",
|
||||||
|
"UKRAINIAN": "Ukrainien",
|
||||||
|
"UNDEFINED": "undefined",
|
||||||
|
"URDU": "Ourdou",
|
||||||
|
"UZBEK": "Ouszbek",
|
||||||
|
"VENDA": "Venda",
|
||||||
|
"VIETNAMESE": "Vietnamien",
|
||||||
|
"VOLAPUK": "Volapük",
|
||||||
|
"WALLOON": "Wallon",
|
||||||
|
"WELSH": "Gallois",
|
||||||
|
"WESTERN_FRISIAN": "Frison occidental",
|
||||||
|
"WOLOF": "Wolof",
|
||||||
|
"XHOSA": "Xhosa",
|
||||||
|
"YIDDISH": "Yiddish",
|
||||||
|
"YORUBA": "Yoruba",
|
||||||
|
"ZHUANG": "Zhuang",
|
||||||
|
"ZULU": "Zoulou"
|
||||||
|
},
|
||||||
|
"phrases": {
|
||||||
|
"5.0(side)": "5.0(side)",
|
||||||
|
"5.1(side)": "5.1(side)",
|
||||||
|
"6.1": "6.1",
|
||||||
|
"6ch": "6ch",
|
||||||
|
"7.1": "7.1",
|
||||||
|
"<New show>": "<Nouvelle série>",
|
||||||
|
"Add": "Ajouter",
|
||||||
|
"Add Pattern": "Ajouter un modèle",
|
||||||
|
"Apply": "Appliquer",
|
||||||
|
"Apply failed: {error}": "Échec de l'application : {error}",
|
||||||
|
"Are you sure to delete the following filename pattern?": "Voulez-vous vraiment supprimer le modèle de nom de fichier suivant ?",
|
||||||
|
"Are you sure to delete the following shifted season?": "Voulez-vous vraiment supprimer la saison décalée suivante ?",
|
||||||
|
"Are you sure to delete the following show?": "Voulez-vous vraiment supprimer la série suivante ?",
|
||||||
|
"Are you sure to delete the following {track_type} track?": "Voulez-vous vraiment supprimer la piste {track_type} suivante ?",
|
||||||
|
"Are you sure to delete this tag?": "Voulez-vous vraiment supprimer cette balise ?",
|
||||||
|
"Audio Layout": "Disposition audio",
|
||||||
|
"Back": "Retour",
|
||||||
|
"Cancel": "Annuler",
|
||||||
|
"Cannot add another stream with disposition flag 'default' or 'forced' set": "Impossible d'ajouter un autre flux avec l'indicateur de disposition 'default' ou 'forced'",
|
||||||
|
"Changes applied and file reloaded.": "Modifications appliquées et fichier rechargé.",
|
||||||
|
"Cleanup": "Nettoyage",
|
||||||
|
"Cleanup disabled.": "Nettoyage désactivé.",
|
||||||
|
"Cleanup enabled.": "Nettoyage activé.",
|
||||||
|
"Codec": "Codec",
|
||||||
|
"Continuing edit session.": "Poursuite de la session d'édition.",
|
||||||
|
"Default": "Par défaut",
|
||||||
|
"Delete": "Supprimer",
|
||||||
|
"Delete Show": "Supprimer la série",
|
||||||
|
"Deleted media tag {tag!r}.": "Balise média {tag!r} supprimée.",
|
||||||
|
"Differences": "Différences",
|
||||||
|
"Differences (file->db/output)": "Différences (fichier->BD/sortie)",
|
||||||
|
"Discard": "Ignorer",
|
||||||
|
"Discard pending metadata changes and quit?": "Ignorer les modifications de métadonnées en attente et quitter ?",
|
||||||
|
"Discard pending metadata changes and reload the file state?": "Ignorer les modifications de métadonnées en attente et recharger l'état du fichier ?",
|
||||||
|
"Down": "Descendre",
|
||||||
|
"Dry-run: would rewrite via temporary file {target_path}": "Simulation : réécrirait via le fichier temporaire {target_path}",
|
||||||
|
"Edit": "Modifier",
|
||||||
|
"Edit Pattern": "Modifier le modèle",
|
||||||
|
"Edit Show": "Modifier la série",
|
||||||
|
"Edit filename pattern": "Modifier le modèle de nom de fichier",
|
||||||
|
"Edit shifted season": "Modifier la saison décalée",
|
||||||
|
"Edit stream": "Modifier le flux",
|
||||||
|
"Episode Offset": "Décalage d'épisode",
|
||||||
|
"Episode offset": "Décalage d'épisode",
|
||||||
|
"File": "Fichier",
|
||||||
|
"File patterns": "Modèles de fichiers",
|
||||||
|
"First Episode": "Premier épisode",
|
||||||
|
"First episode": "Premier épisode",
|
||||||
|
"Forced": "Forcé",
|
||||||
|
"Help": "Aide",
|
||||||
|
"Help Screen": "Écran d'aide",
|
||||||
|
"ID": "ID",
|
||||||
|
"Identify": "Identifier",
|
||||||
|
"Index": "Index",
|
||||||
|
"Index / Subindex": "Index / Sous-index",
|
||||||
|
"Index Episode Digits": "Chiffres d'épisode d'index",
|
||||||
|
"Index Season Digits": "Chiffres de saison d'index",
|
||||||
|
"Indicator Edisode Digits": "Chiffres d'épisode de l'indicateur",
|
||||||
|
"Indicator Season Digits": "Chiffres de saison de l'indicateur",
|
||||||
|
"Keep Editing": "Continuer l'édition",
|
||||||
|
"Keeping pending changes.": "Les modifications en attente sont conservées.",
|
||||||
|
"Key": "Clé",
|
||||||
|
"Language": "Langue",
|
||||||
|
"Last Episode": "Dernier épisode",
|
||||||
|
"Last episode": "Dernier épisode",
|
||||||
|
"Layout": "Disposition",
|
||||||
|
"Media Tags": "Balises média",
|
||||||
|
"More than one default audio stream detected and no prompt set": "Plus d'un flux audio par défaut détecté et aucune invite définie",
|
||||||
|
"More than one default audio stream detected! Please select stream": "Plus d'un flux audio par défaut détecté ! Veuillez sélectionner un flux",
|
||||||
|
"More than one default subtitle stream detected and no prompt set": "Plus d'un flux de sous-titres par défaut détecté et aucune invite définie",
|
||||||
|
"More than one default subtitle stream detected! Please select stream": "Plus d'un flux de sous-titres par défaut détecté ! Veuillez sélectionner un flux",
|
||||||
|
"More than one default video stream detected and no prompt set": "Plus d'un flux vidéo par défaut détecté et aucune invite définie",
|
||||||
|
"More than one default video stream detected! Please select stream": "Plus d'un flux vidéo par défaut détecté ! Veuillez sélectionner un flux",
|
||||||
|
"More than one forced audio stream detected and no prompt set": "Plus d'un flux audio forcé détecté et aucune invite définie",
|
||||||
|
"More than one forced audio stream detected! Please select stream": "Plus d'un flux audio forcé détecté ! Veuillez sélectionner un flux",
|
||||||
|
"More than one forced subtitle stream detected and no prompt set": "Plus d'un flux de sous-titres forcé détecté et aucune invite définie",
|
||||||
|
"More than one forced subtitle stream detected! Please select stream": "Plus d'un flux de sous-titres forcé détecté ! Veuillez sélectionner un flux",
|
||||||
|
"More than one forced video stream detected and no prompt set": "Plus d'un flux vidéo forcé détecté et aucune invite définie",
|
||||||
|
"More than one forced video stream detected! Please select stream": "Plus d'un flux vidéo forcé détecté ! Veuillez sélectionner un flux",
|
||||||
|
"Name": "Nom",
|
||||||
|
"New Pattern": "Nouveau modèle",
|
||||||
|
"New Show": "Nouvelle série",
|
||||||
|
"New filename pattern": "Nouveau modèle de nom de fichier",
|
||||||
|
"New shifted season": "Nouvelle saison décalée",
|
||||||
|
"New stream": "Nouveau flux",
|
||||||
|
"No": "Non",
|
||||||
|
"No changes to apply.": "Aucune modification à appliquer.",
|
||||||
|
"No changes to revert.": "Aucune modification à annuler.",
|
||||||
|
"Normalization disabled.": "Normalisation désactivée.",
|
||||||
|
"Normalization enabled.": "Normalisation activée.",
|
||||||
|
"Normalize": "Normaliser",
|
||||||
|
"Notes": "Notes",
|
||||||
|
"Pattern": "Modèle",
|
||||||
|
"Planned Changes (file->edited output)": "Modifications prévues (fichier->sortie modifiée)",
|
||||||
|
"Quality": "Qualité",
|
||||||
|
"Quit": "Quitter",
|
||||||
|
"Remove Pattern": "Supprimer le modèle",
|
||||||
|
"Revert": "Annuler les modifications",
|
||||||
|
"Reverted pending changes.": "Modifications en attente annulées.",
|
||||||
|
"Save": "Enregistrer",
|
||||||
|
"Season Offset": "Décalage de saison",
|
||||||
|
"Select a stream first.": "Veuillez d'abord sélectionner un flux.",
|
||||||
|
"Set Default": "Définir par défaut",
|
||||||
|
"Set Forced": "Définir comme forcé",
|
||||||
|
"Settings Screen": "Écran des paramètres",
|
||||||
|
"Numbering Mapping": "Saisons décalées",
|
||||||
|
"Show": "Série",
|
||||||
|
"Shows": "Séries",
|
||||||
|
"Source Season": "Saison source",
|
||||||
|
"SrcIndex": "Index source",
|
||||||
|
"Status": "Statut",
|
||||||
|
"Stay": "Rester",
|
||||||
|
"Stream dispositions": "Dispositions des flux",
|
||||||
|
"Stream tags": "Balises du flux",
|
||||||
|
"Streams": "Flux",
|
||||||
|
"SubIndex": "Sous-index",
|
||||||
|
"Substitute": "Remplacer",
|
||||||
|
"Substitute pattern": "Remplacer le modèle",
|
||||||
|
"Title": "Titre",
|
||||||
|
"Type": "Type",
|
||||||
|
"Unable to update selected stream.": "Impossible de mettre à jour le flux sélectionné.",
|
||||||
|
"Up": "Monter",
|
||||||
|
"Update Pattern": "Mettre à jour le modèle",
|
||||||
|
"Updated media tag {tag!r}.": "Balise média {tag!r} mise à jour.",
|
||||||
|
"Updated stream #{index} ({track_type}).": "Flux #{index} ({track_type}) mis à jour.",
|
||||||
|
"Value": "Valeur",
|
||||||
|
"Year": "Année",
|
||||||
|
"Yes": "Oui",
|
||||||
|
"add media tag: key='{key}' value='{value}'": "ajouter une balise média : clé='{key}' valeur='{value}'",
|
||||||
|
"add {track_type} track: index={index} lang={language}": "ajouter une piste {track_type} : index={index} langue={language}",
|
||||||
|
"attached_pic": "attached_pic",
|
||||||
|
"attachment": "pièce jointe",
|
||||||
|
"audio": "audio",
|
||||||
|
"captions": "sous-titres",
|
||||||
|
"change media tag: key='{key}' value='{value}'": "modifier une balise média : clé='{key}' valeur='{value}'",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add disposition={disposition}": "modifier le flux #{index} ({track_type}:{sub_index}) ajouter disposition={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add key={key} value={value}": "modifier le flux #{index} ({track_type}:{sub_index}) ajouter clé={key} valeur={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) change key={key} value={value}": "modifier le flux #{index} ({track_type}:{sub_index}) changer clé={key} valeur={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove disposition={disposition}": "modifier le flux #{index} ({track_type}:{sub_index}) supprimer disposition={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove key={key} value={value}": "modifier le flux #{index} ({track_type}:{sub_index}) supprimer clé={key} valeur={value}",
|
||||||
|
"clean_effects": "effets seuls",
|
||||||
|
"comment": "commentaire",
|
||||||
|
"default": "par défaut",
|
||||||
|
"dependent": "dépendant",
|
||||||
|
"descriptions": "descriptions",
|
||||||
|
"dub": "doublage",
|
||||||
|
"for pattern": "pour le modèle",
|
||||||
|
"forced": "forcé",
|
||||||
|
"from": "de",
|
||||||
|
"from pattern": "depuis le modèle",
|
||||||
|
"from show": "depuis la série",
|
||||||
|
"hearing_impaired": "malentendants",
|
||||||
|
"karaoke": "karaoké",
|
||||||
|
"lyrics": "paroles",
|
||||||
|
"metadata": "métadonnées",
|
||||||
|
"non_diegetic": "non diégétique",
|
||||||
|
"original": "original",
|
||||||
|
"pattern #{id}": "modèle #{id}",
|
||||||
|
"remove media tag: key='{key}' value='{value}'": "supprimer une balise média : clé='{key}' valeur='{value}'",
|
||||||
|
"remove stream #{index}": "supprimer le flux #{index}",
|
||||||
|
"show #{id}": "série #{id}",
|
||||||
|
"stereo": "stéréo",
|
||||||
|
"still_image": "image fixe",
|
||||||
|
"sub index": "sous-index",
|
||||||
|
"subtitle": "sous-titre",
|
||||||
|
"timed_thumbnails": "miniatures horodatées",
|
||||||
|
"undefined": "indéfini",
|
||||||
|
"unknown": "inconnu",
|
||||||
|
"video": "vidéo",
|
||||||
|
"visual_impaired": "malvoyants"
|
||||||
|
}
|
||||||
|
}
|
||||||
361
assets/i18n/ja.json
Normal file
361
assets/i18n/ja.json
Normal file
@@ -0,0 +1,361 @@
|
|||||||
|
{
|
||||||
|
"iso_languages": {
|
||||||
|
"ABKHAZIAN": "アブハジア語",
|
||||||
|
"AFAR": "アファル語",
|
||||||
|
"AFRIKAANS": "アフリカーンス語",
|
||||||
|
"AKAN": "アカン語",
|
||||||
|
"ALBANIAN": "アルバニア語",
|
||||||
|
"AMHARIC": "アムハラ語",
|
||||||
|
"ARABIC": "アラビア語",
|
||||||
|
"ARAGONESE": "アラゴン語",
|
||||||
|
"ARMENIAN": "アルメニア語",
|
||||||
|
"ASSAMESE": "アッサム語",
|
||||||
|
"AVARIC": "アヴァル語",
|
||||||
|
"AVESTAN": "アヴェスタ語",
|
||||||
|
"AYMARA": "アイマラ語",
|
||||||
|
"AZERBAIJANI": "アゼルバイジャン語",
|
||||||
|
"BAMBARA": "バンバラ語",
|
||||||
|
"BASHKIR": "バシキール語",
|
||||||
|
"BASQUE": "バスク語",
|
||||||
|
"BELARUSIAN": "白ロシア語",
|
||||||
|
"BENGALI": "ベンガル語",
|
||||||
|
"BISLAMA": "ビスラマ語",
|
||||||
|
"BOKMAL": "Bokmål",
|
||||||
|
"BOSNIAN": "ボスニア語",
|
||||||
|
"BRETON": "ブルトン語",
|
||||||
|
"BULGARIAN": "ブルガリア語",
|
||||||
|
"BURMESE": "ビルマ語",
|
||||||
|
"CATALAN": "Catalan",
|
||||||
|
"CHAMORRO": "チャモロ語",
|
||||||
|
"CHECHEN": "チェチェン語",
|
||||||
|
"CHICHEWA": "Chichewa",
|
||||||
|
"CHINESE": "中国語",
|
||||||
|
"CHURCH_SLAVIC": "Church Slavic",
|
||||||
|
"CHUVASH": "チュヴァシュ語",
|
||||||
|
"CORNISH": "コーンウォール語",
|
||||||
|
"CORSICAN": "コルシカ語",
|
||||||
|
"CREE": "クリー語",
|
||||||
|
"CROATIAN": "クロアチア語",
|
||||||
|
"CZECH": "チェコ語",
|
||||||
|
"DANISH": "デンマーク語",
|
||||||
|
"DIVEHI": "Divehi",
|
||||||
|
"DUTCH": "Dutch",
|
||||||
|
"DZONGKHA": "ゾンカ語",
|
||||||
|
"ENGLISH": "英語",
|
||||||
|
"ESPERANTO": "エスペラント語",
|
||||||
|
"ESTONIAN": "エストニア語",
|
||||||
|
"EWE": "エウェ語",
|
||||||
|
"FAROESE": "フェロー語",
|
||||||
|
"FIJIAN": "フィジー語",
|
||||||
|
"FILIPINO": "Filipino",
|
||||||
|
"FINNISH": "フィン語",
|
||||||
|
"FRENCH": "フランス語",
|
||||||
|
"FULAH": "フラ語",
|
||||||
|
"GALICIAN": "ガリシア語",
|
||||||
|
"GANDA": "ガンダ語",
|
||||||
|
"GEORGIAN": "グルジア語",
|
||||||
|
"GERMAN": "ドイツ語",
|
||||||
|
"GREEK": "Greek",
|
||||||
|
"GUARANI": "グアラニー",
|
||||||
|
"GUJARATI": "グジャラーティー語",
|
||||||
|
"HAITIAN": "Haitian",
|
||||||
|
"HAUSA": "ハウサ語",
|
||||||
|
"HEBREW": "ヘブライ語",
|
||||||
|
"HERERO": "ヘレロ語",
|
||||||
|
"HINDI": "ヒンディー語",
|
||||||
|
"HIRI_MOTU": "ヒリモトゥ語",
|
||||||
|
"HUNGARIAN": "ハンガリー語",
|
||||||
|
"ICELANDIC": "アイスランド語",
|
||||||
|
"IDO": "イド語",
|
||||||
|
"IGBO": "イボ語",
|
||||||
|
"INDONESIAN": "インドネシア語",
|
||||||
|
"INTERLINGUA": "Interlingua",
|
||||||
|
"INTERLINGUE": "Interlingue",
|
||||||
|
"INUKTITUT": "イヌクウティトット語",
|
||||||
|
"INUPIAQ": "イヌピアック語",
|
||||||
|
"IRISH": "アイルランド語",
|
||||||
|
"ITALIAN": "イタリア語",
|
||||||
|
"JAPANESE": "日本語",
|
||||||
|
"JAVANESE": "ジャワ語",
|
||||||
|
"KALAALLISUT": "Kalaallisut",
|
||||||
|
"KANNADA": "カンナダ語",
|
||||||
|
"KANURI": "カヌリ語",
|
||||||
|
"KASHMIRI": "カシミーリー語",
|
||||||
|
"KAZAKH": "カザーフ語",
|
||||||
|
"KHMER": "Khmer",
|
||||||
|
"KIKUYU": "Kikuyu",
|
||||||
|
"KINYARWANDA": "キンヤルワンダ語",
|
||||||
|
"KIRGHIZ": "Kirghiz",
|
||||||
|
"KOMI": "コミ語",
|
||||||
|
"KONGO": "コンゴ語",
|
||||||
|
"KOREAN": "朝鮮語",
|
||||||
|
"KUANYAMA": "Kuanyama",
|
||||||
|
"KURDISH": "クルド語",
|
||||||
|
"LAO": "ラオ語",
|
||||||
|
"LATIN": "ラテン語",
|
||||||
|
"LATVIAN": "ラトビア語",
|
||||||
|
"LIMBURGAN": "Limburgan",
|
||||||
|
"LINGALA": "リンガラ語",
|
||||||
|
"LITHUANIAN": "リトアニア語",
|
||||||
|
"LUBA_KATANGA": "ルバ語",
|
||||||
|
"LUXEMBOURGISH": "Luxembourgish",
|
||||||
|
"MACEDONIAN": "マケドニア語",
|
||||||
|
"MALAGASY": "マラガシ語",
|
||||||
|
"MALAY": "マライ語",
|
||||||
|
"MALAYALAM": "マラヤーラム語",
|
||||||
|
"MALTESE": "マルタ語",
|
||||||
|
"MANX": "マン島語",
|
||||||
|
"MAORI": "マオリ語",
|
||||||
|
"MARATHI": "マラーティー語",
|
||||||
|
"MARSHALLESE": "マーシャル語",
|
||||||
|
"MONGOLIAN": "蒙古語",
|
||||||
|
"NAURU": "ナウル語",
|
||||||
|
"NAVAJO": "Navajo",
|
||||||
|
"NDONGA": "ンドンガ語",
|
||||||
|
"NEPALI": "ネパール語",
|
||||||
|
"NORTHERN_SAMI": "北サーミ語",
|
||||||
|
"NORTH_NDEBELE": "North Ndebele",
|
||||||
|
"NORWEGIAN": "ノルウェー語",
|
||||||
|
"NORWEGIAN_NYNORSK": "Nynorsk",
|
||||||
|
"OCCITAN": "Occitan",
|
||||||
|
"OJIBWA": "オジブワ語",
|
||||||
|
"ORIYA": "オリヤー語",
|
||||||
|
"OROMO": "オロモ語",
|
||||||
|
"OSSETIAN": "Ossetian",
|
||||||
|
"PALI": "パーリ語",
|
||||||
|
"PANJABI": "Panjabi",
|
||||||
|
"PERSIAN": "ペルシア語",
|
||||||
|
"POLISH": "ポーランド語",
|
||||||
|
"PORTUGUESE": "ポルトガル語",
|
||||||
|
"PUSHTO": "Pushto",
|
||||||
|
"QUECHUA": "キチュワ語",
|
||||||
|
"ROMANIAN": "Romanian",
|
||||||
|
"ROMANSH": "ロマンシュ語",
|
||||||
|
"RUNDI": "ルンディ語",
|
||||||
|
"RUSSIAN": "ロシア語",
|
||||||
|
"SAMOAN": "サモア語",
|
||||||
|
"SANGO": "サンゴ語",
|
||||||
|
"SANSKRIT": "梵語",
|
||||||
|
"SARDINIAN": "サルデーニャ語",
|
||||||
|
"SCOTTISH_GAELIC": "Scottish Gaelic",
|
||||||
|
"SERBIAN": "セルビア語",
|
||||||
|
"SHONA": "ショナ語",
|
||||||
|
"SICHUAN_YI": "Sichuan Yi",
|
||||||
|
"SINDHI": "シンディー語",
|
||||||
|
"SINHALA": "Sinhala",
|
||||||
|
"SLOVAK": "スロヴァキア語",
|
||||||
|
"SLOVENIAN": "スロヴェニア語",
|
||||||
|
"SOMALI": "ソマリ語",
|
||||||
|
"SOUTHERN_SOTHO": "Southern Sotho",
|
||||||
|
"SOUTH_NDEBELE": "South Ndebele",
|
||||||
|
"SPANISH": "Spanish",
|
||||||
|
"SUNDANESE": "スンダ語",
|
||||||
|
"SWAHILI": "スワヒリ語",
|
||||||
|
"SWATI": "シスワティ語",
|
||||||
|
"SWEDISH": "スウェーデン語",
|
||||||
|
"TAGALOG": "タガログ語",
|
||||||
|
"TAHITIAN": "タヒチ語",
|
||||||
|
"TAJIK": "タジク語",
|
||||||
|
"TAMIL": "タミル語",
|
||||||
|
"TATAR": "タタール語",
|
||||||
|
"TELUGU": "テルグ語",
|
||||||
|
"THAI": "タイ語",
|
||||||
|
"TIBETAN": "チベット語",
|
||||||
|
"TIGRINYA": "ティグリニア語",
|
||||||
|
"TONGA": "Tonga",
|
||||||
|
"TSONGA": "ツォンガ語",
|
||||||
|
"TSWANA": "ツワナ語",
|
||||||
|
"TURKISH": "トルコ語",
|
||||||
|
"TURKMEN": "トゥルクメン語",
|
||||||
|
"TWI": "トウィ語",
|
||||||
|
"UIGHUR": "Uighur",
|
||||||
|
"UKRAINIAN": "ウクライナ語",
|
||||||
|
"UNDEFINED": "undefined",
|
||||||
|
"URDU": "ウルドゥー語",
|
||||||
|
"UZBEK": "ウズベク語",
|
||||||
|
"VENDA": "ベンダ語",
|
||||||
|
"VIETNAMESE": "ベトナム語",
|
||||||
|
"VOLAPUK": "ボラピューク語",
|
||||||
|
"WALLOON": "ワロン語",
|
||||||
|
"WELSH": "ウェールズ語",
|
||||||
|
"WESTERN_FRISIAN": "西フリジア語",
|
||||||
|
"WOLOF": "ウォロフ語",
|
||||||
|
"XHOSA": "ホサ語",
|
||||||
|
"YIDDISH": "イディッシュ語",
|
||||||
|
"YORUBA": "ヨルバ語",
|
||||||
|
"ZHUANG": "Zhuang",
|
||||||
|
"ZULU": "ズールー語"
|
||||||
|
},
|
||||||
|
"phrases": {
|
||||||
|
"5.0(side)": "5.0(side)",
|
||||||
|
"5.1(side)": "5.1(side)",
|
||||||
|
"6.1": "6.1",
|
||||||
|
"6ch": "6ch",
|
||||||
|
"7.1": "7.1",
|
||||||
|
"<New show>": "<新しい番組>",
|
||||||
|
"Add": "追加",
|
||||||
|
"Add Pattern": "パターンを追加",
|
||||||
|
"Apply": "適用",
|
||||||
|
"Apply failed: {error}": "適用に失敗しました: {error}",
|
||||||
|
"Are you sure to delete the following filename pattern?": "次のファイル名パターンを削除してもよろしいですか?",
|
||||||
|
"Are you sure to delete the following shifted season?": "次のシーズンシフト設定を削除してもよろしいですか?",
|
||||||
|
"Are you sure to delete the following show?": "次の番組を削除してもよろしいですか?",
|
||||||
|
"Are you sure to delete the following {track_type} track?": "次の{track_type}ストリームを削除してもよろしいですか?",
|
||||||
|
"Are you sure to delete this tag?": "このタグを削除してもよろしいですか?",
|
||||||
|
"Audio Layout": "音声レイアウト",
|
||||||
|
"Back": "戻る",
|
||||||
|
"Cancel": "キャンセル",
|
||||||
|
"Cannot add another stream with disposition flag 'default' or 'forced' set": "default または forced の disposition が設定されたストリームはこれ以上追加できません",
|
||||||
|
"Changes applied and file reloaded.": "変更を適用し、ファイルを再読み込みしました。",
|
||||||
|
"Cleanup": "クリーンアップ",
|
||||||
|
"Cleanup disabled.": "クリーンアップを無効にしました。",
|
||||||
|
"Cleanup enabled.": "クリーンアップを有効にしました。",
|
||||||
|
"Codec": "コーデック",
|
||||||
|
"Continuing edit session.": "編集セッションを続行します。",
|
||||||
|
"Default": "デフォルト",
|
||||||
|
"Delete": "削除",
|
||||||
|
"Delete Show": "番組を削除",
|
||||||
|
"Deleted media tag {tag!r}.": "メディアタグ {tag!r} を削除しました。",
|
||||||
|
"Differences": "差分",
|
||||||
|
"Differences (file->db/output)": "差分 (ファイル->DB/出力)",
|
||||||
|
"Discard": "破棄",
|
||||||
|
"Discard pending metadata changes and quit?": "保留中のメタデータ変更を破棄して終了しますか?",
|
||||||
|
"Discard pending metadata changes and reload the file state?": "保留中のメタデータ変更を破棄してファイル状態を再読み込みしますか?",
|
||||||
|
"Down": "下へ",
|
||||||
|
"Dry-run: would rewrite via temporary file {target_path}": "ドライラン: 一時ファイル {target_path} 経由で再書き込みします",
|
||||||
|
"Edit": "編集",
|
||||||
|
"Edit Pattern": "パターンを編集",
|
||||||
|
"Edit Show": "番組を編集",
|
||||||
|
"Edit filename pattern": "ファイル名パターンを編集",
|
||||||
|
"Edit shifted season": "シフト済みシーズンを編集",
|
||||||
|
"Edit stream": "ストリームを編集",
|
||||||
|
"Episode Offset": "エピソードオフセット",
|
||||||
|
"Episode offset": "エピソードオフセット",
|
||||||
|
"File": "ファイル",
|
||||||
|
"File patterns": "ファイルパターン",
|
||||||
|
"First Episode": "最初のエピソード",
|
||||||
|
"First episode": "最初のエピソード",
|
||||||
|
"Forced": "強制",
|
||||||
|
"Help": "ヘルプ",
|
||||||
|
"Help Screen": "ヘルプ画面",
|
||||||
|
"ID": "ID",
|
||||||
|
"Identify": "識別",
|
||||||
|
"Index": "インデックス",
|
||||||
|
"Index / Subindex": "インデックス / サブインデックス",
|
||||||
|
"Index Episode Digits": "インデックスのエピソード桁数",
|
||||||
|
"Index Season Digits": "インデックスのシーズン桁数",
|
||||||
|
"Indicator Edisode Digits": "インジケーターのエピソード桁数",
|
||||||
|
"Indicator Season Digits": "インジケーターのシーズン桁数",
|
||||||
|
"Keep Editing": "編集を続ける",
|
||||||
|
"Keeping pending changes.": "保留中の変更を保持します。",
|
||||||
|
"Key": "キー",
|
||||||
|
"Language": "言語",
|
||||||
|
"Last Episode": "最後のエピソード",
|
||||||
|
"Last episode": "最後のエピソード",
|
||||||
|
"Layout": "レイアウト",
|
||||||
|
"Media Tags": "メディアタグ",
|
||||||
|
"More than one default audio stream detected and no prompt set": "デフォルト音声ストリームが複数検出され、プロンプトも設定されていません",
|
||||||
|
"More than one default audio stream detected! Please select stream": "デフォルト音声ストリームが複数検出されました。ストリームを選択してください",
|
||||||
|
"More than one default subtitle stream detected and no prompt set": "デフォルト字幕ストリームが複数検出され、プロンプトも設定されていません",
|
||||||
|
"More than one default subtitle stream detected! Please select stream": "デフォルト字幕ストリームが複数検出されました。ストリームを選択してください",
|
||||||
|
"More than one default video stream detected and no prompt set": "デフォルト映像ストリームが複数検出され、プロンプトも設定されていません",
|
||||||
|
"More than one default video stream detected! Please select stream": "デフォルト映像ストリームが複数検出されました。ストリームを選択してください",
|
||||||
|
"More than one forced audio stream detected and no prompt set": "強制音声ストリームが複数検出され、プロンプトも設定されていません",
|
||||||
|
"More than one forced audio stream detected! Please select stream": "強制音声ストリームが複数検出されました。ストリームを選択してください",
|
||||||
|
"More than one forced subtitle stream detected and no prompt set": "強制字幕ストリームが複数検出され、プロンプトも設定されていません",
|
||||||
|
"More than one forced subtitle stream detected! Please select stream": "強制字幕ストリームが複数検出されました。ストリームを選択してください",
|
||||||
|
"More than one forced video stream detected and no prompt set": "強制映像ストリームが複数検出され、プロンプトも設定されていません",
|
||||||
|
"More than one forced video stream detected! Please select stream": "強制映像ストリームが複数検出されました。ストリームを選択してください",
|
||||||
|
"Name": "名前",
|
||||||
|
"New Pattern": "新しいパターン",
|
||||||
|
"New Show": "新しい番組",
|
||||||
|
"New filename pattern": "新しいファイル名パターン",
|
||||||
|
"New shifted season": "新しいシーズンシフト",
|
||||||
|
"New stream": "新しいストリーム",
|
||||||
|
"No": "いいえ",
|
||||||
|
"No changes to apply.": "適用する変更はありません。",
|
||||||
|
"No changes to revert.": "元に戻す変更はありません。",
|
||||||
|
"Normalization disabled.": "正規化を無効にしました。",
|
||||||
|
"Normalization enabled.": "正規化を有効にしました。",
|
||||||
|
"Normalize": "正規化",
|
||||||
|
"Notes": "メモ",
|
||||||
|
"Pattern": "パターン",
|
||||||
|
"Planned Changes (file->edited output)": "予定された変更 (ファイル->編集後出力)",
|
||||||
|
"Quality": "品質",
|
||||||
|
"Quit": "終了",
|
||||||
|
"Remove Pattern": "パターンを削除",
|
||||||
|
"Revert": "元に戻す",
|
||||||
|
"Reverted pending changes.": "保留中の変更を元に戻しました。",
|
||||||
|
"Save": "保存",
|
||||||
|
"Season Offset": "シーズンオフセット",
|
||||||
|
"Select a stream first.": "まずストリームを選択してください。",
|
||||||
|
"Set Default": "デフォルトに設定",
|
||||||
|
"Set Forced": "強制に設定",
|
||||||
|
"Settings Screen": "設定画面",
|
||||||
|
"Numbering Mapping": "シフト済みシーズン",
|
||||||
|
"Show": "番組",
|
||||||
|
"Shows": "番組一覧",
|
||||||
|
"Source Season": "元シーズン",
|
||||||
|
"SrcIndex": "元インデックス",
|
||||||
|
"Status": "状態",
|
||||||
|
"Stay": "このまま",
|
||||||
|
"Stream dispositions": "ストリーム disposition",
|
||||||
|
"Stream tags": "ストリームタグ",
|
||||||
|
"Streams": "ストリーム",
|
||||||
|
"SubIndex": "サブインデックス",
|
||||||
|
"Substitute": "置換",
|
||||||
|
"Substitute pattern": "パターンを置換",
|
||||||
|
"Title": "タイトル",
|
||||||
|
"Type": "タイプ",
|
||||||
|
"Unable to update selected stream.": "選択したストリームを更新できませんでした。",
|
||||||
|
"Up": "上へ",
|
||||||
|
"Update Pattern": "パターンを更新",
|
||||||
|
"Updated media tag {tag!r}.": "メディアタグ {tag!r} を更新しました。",
|
||||||
|
"Updated stream #{index} ({track_type}).": "ストリーム #{index} ({track_type}) を更新しました。",
|
||||||
|
"Value": "値",
|
||||||
|
"Year": "年",
|
||||||
|
"Yes": "はい",
|
||||||
|
"add media tag: key='{key}' value='{value}'": "メディアタグを追加: key='{key}' value='{value}'",
|
||||||
|
"add {track_type} track: index={index} lang={language}": "{track_type}ストリームを追加: index={index} lang={language}",
|
||||||
|
"attached_pic": "attached_pic",
|
||||||
|
"attachment": "添付",
|
||||||
|
"audio": "音声",
|
||||||
|
"captions": "キャプション",
|
||||||
|
"change media tag: key='{key}' value='{value}'": "メディアタグを変更: key='{key}' value='{value}'",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add disposition={disposition}": "ストリーム #{index} ({track_type}:{sub_index}) disposition を追加={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add key={key} value={value}": "ストリーム #{index} ({track_type}:{sub_index}) key を追加={key} value={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) change key={key} value={value}": "ストリーム #{index} ({track_type}:{sub_index}) key を変更={key} value={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove disposition={disposition}": "ストリーム #{index} ({track_type}:{sub_index}) disposition を削除={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove key={key} value={value}": "ストリーム #{index} ({track_type}:{sub_index}) key を削除={key} value={value}",
|
||||||
|
"clean_effects": "効果音のみ",
|
||||||
|
"comment": "コメント",
|
||||||
|
"default": "デフォルト",
|
||||||
|
"dependent": "依存",
|
||||||
|
"descriptions": "解説",
|
||||||
|
"dub": "吹替",
|
||||||
|
"for pattern": "パターン用",
|
||||||
|
"forced": "強制",
|
||||||
|
"from": "元",
|
||||||
|
"from pattern": "パターンから",
|
||||||
|
"from show": "番組から",
|
||||||
|
"hearing_impaired": "聴覚障害者向け",
|
||||||
|
"karaoke": "カラオケ",
|
||||||
|
"lyrics": "歌詞",
|
||||||
|
"metadata": "メタデータ",
|
||||||
|
"non_diegetic": "非ダイジェティック",
|
||||||
|
"original": "オリジナル",
|
||||||
|
"pattern #{id}": "パターン #{id}",
|
||||||
|
"remove media tag: key='{key}' value='{value}'": "メディアタグを削除: key='{key}' value='{value}'",
|
||||||
|
"remove stream #{index}": "ストリーム #{index} を削除",
|
||||||
|
"show #{id}": "番組 #{id}",
|
||||||
|
"stereo": "ステレオ",
|
||||||
|
"still_image": "静止画",
|
||||||
|
"sub index": "サブインデックス",
|
||||||
|
"subtitle": "字幕",
|
||||||
|
"timed_thumbnails": "時間指定サムネイル",
|
||||||
|
"undefined": "未定義",
|
||||||
|
"unknown": "不明",
|
||||||
|
"video": "映像",
|
||||||
|
"visual_impaired": "視覚障害者向け"
|
||||||
|
}
|
||||||
|
}
|
||||||
361
assets/i18n/nb.json
Normal file
361
assets/i18n/nb.json
Normal file
@@ -0,0 +1,361 @@
|
|||||||
|
{
|
||||||
|
"iso_languages": {
|
||||||
|
"ABKHAZIAN": "Abkhazian",
|
||||||
|
"AFAR": "afar",
|
||||||
|
"AFRIKAANS": "Afrikansk",
|
||||||
|
"AKAN": "Akan",
|
||||||
|
"ALBANIAN": "Albansk",
|
||||||
|
"AMHARIC": "Amharic",
|
||||||
|
"ARABIC": "Arabisk",
|
||||||
|
"ARAGONESE": "aragonsk",
|
||||||
|
"ARMENIAN": "armensk",
|
||||||
|
"ASSAMESE": "assamisk",
|
||||||
|
"AVARIC": "Avaric",
|
||||||
|
"AVESTAN": "avestisk",
|
||||||
|
"AYMARA": "aymara",
|
||||||
|
"AZERBAIJANI": "Aserbadjansk",
|
||||||
|
"BAMBARA": "bambara",
|
||||||
|
"BASHKIR": "basjkirsk",
|
||||||
|
"BASQUE": "Baskisk",
|
||||||
|
"BELARUSIAN": "Hviterussisk",
|
||||||
|
"BENGALI": "bengali",
|
||||||
|
"BISLAMA": "bislama",
|
||||||
|
"BOKMAL": "Bokmål",
|
||||||
|
"BOSNIAN": "Bosnisk",
|
||||||
|
"BRETON": "Breton",
|
||||||
|
"BULGARIAN": "Bulgarsk",
|
||||||
|
"BURMESE": "burmesisk",
|
||||||
|
"CATALAN": "Catalan",
|
||||||
|
"CHAMORRO": "chamorro",
|
||||||
|
"CHECHEN": "Chechen",
|
||||||
|
"CHICHEWA": "Chichewa",
|
||||||
|
"CHINESE": "Kinesisk",
|
||||||
|
"CHURCH_SLAVIC": "Church Slavic",
|
||||||
|
"CHUVASH": "tsjuvansk",
|
||||||
|
"CORNISH": "Cornish",
|
||||||
|
"CORSICAN": "Korsikansk",
|
||||||
|
"CREE": "Cree",
|
||||||
|
"CROATIAN": "Kroatsisk",
|
||||||
|
"CZECH": "Tjekkisk",
|
||||||
|
"DANISH": "Dansk",
|
||||||
|
"DIVEHI": "Divehi",
|
||||||
|
"DUTCH": "Dutch",
|
||||||
|
"DZONGKHA": "dzongkha",
|
||||||
|
"ENGLISH": "Engelsk",
|
||||||
|
"ESPERANTO": "Esperanto",
|
||||||
|
"ESTONIAN": "Estonsk",
|
||||||
|
"EWE": "ewe",
|
||||||
|
"FAROESE": "færøysk",
|
||||||
|
"FIJIAN": "fijiansk",
|
||||||
|
"FILIPINO": "Filipino",
|
||||||
|
"FINNISH": "Finsk",
|
||||||
|
"FRENCH": "Fransk",
|
||||||
|
"FULAH": "fulani",
|
||||||
|
"GALICIAN": "Galisisk",
|
||||||
|
"GANDA": "ganda",
|
||||||
|
"GEORGIAN": "Georgisk",
|
||||||
|
"GERMAN": "Tysk",
|
||||||
|
"GREEK": "Greek",
|
||||||
|
"GUARANI": "Guarani",
|
||||||
|
"GUJARATI": "gujarati",
|
||||||
|
"HAITIAN": "Haitian",
|
||||||
|
"HAUSA": "Hausa",
|
||||||
|
"HEBREW": "Hebraisk",
|
||||||
|
"HERERO": "Herero",
|
||||||
|
"HINDI": "hindi",
|
||||||
|
"HIRI_MOTU": "Hiri Motu",
|
||||||
|
"HUNGARIAN": "Ungarsk",
|
||||||
|
"ICELANDIC": "Islandsk",
|
||||||
|
"IDO": "ido",
|
||||||
|
"IGBO": "ibo",
|
||||||
|
"INDONESIAN": "Indonesisk",
|
||||||
|
"INTERLINGUA": "Interlingua",
|
||||||
|
"INTERLINGUE": "Interlingue",
|
||||||
|
"INUKTITUT": "inuktitut",
|
||||||
|
"INUPIAQ": "unupiak",
|
||||||
|
"IRISH": "Irsk",
|
||||||
|
"ITALIAN": "Italiensk",
|
||||||
|
"JAPANESE": "Japansk",
|
||||||
|
"JAVANESE": "Javanesisk",
|
||||||
|
"KALAALLISUT": "Kalaallisut",
|
||||||
|
"KANNADA": "kannada",
|
||||||
|
"KANURI": "Kanuri",
|
||||||
|
"KASHMIRI": "kasjmiri",
|
||||||
|
"KAZAKH": "kasakhisk",
|
||||||
|
"KHMER": "Khmer",
|
||||||
|
"KIKUYU": "Kikuyu",
|
||||||
|
"KINYARWANDA": "kinjarwanda",
|
||||||
|
"KIRGHIZ": "Kirghiz",
|
||||||
|
"KOMI": "komi",
|
||||||
|
"KONGO": "kikongo",
|
||||||
|
"KOREAN": "Koreansk",
|
||||||
|
"KUANYAMA": "Kuanyama",
|
||||||
|
"KURDISH": "Kurdisk",
|
||||||
|
"LAO": "laotisk",
|
||||||
|
"LATIN": "Latin",
|
||||||
|
"LATVIAN": "Latvisk",
|
||||||
|
"LIMBURGAN": "Limburgan",
|
||||||
|
"LINGALA": "lingala",
|
||||||
|
"LITHUANIAN": "Lituaisk",
|
||||||
|
"LUBA_KATANGA": "luba-katanga",
|
||||||
|
"LUXEMBOURGISH": "Luxembourgish",
|
||||||
|
"MACEDONIAN": "Makedonsk",
|
||||||
|
"MALAGASY": "madagassisk",
|
||||||
|
"MALAY": "malayisk",
|
||||||
|
"MALAYALAM": "malayalam",
|
||||||
|
"MALTESE": "Maltisk",
|
||||||
|
"MANX": "manx",
|
||||||
|
"MAORI": "Maori",
|
||||||
|
"MARATHI": "Marathi",
|
||||||
|
"MARSHALLESE": "Marshallese",
|
||||||
|
"MONGOLIAN": "Mongolsk",
|
||||||
|
"NAURU": "nauru",
|
||||||
|
"NAVAJO": "Navajo",
|
||||||
|
"NDONGA": "Ndonga",
|
||||||
|
"NEPALI": "nepalsk",
|
||||||
|
"NORTHERN_SAMI": "nordsamisk",
|
||||||
|
"NORTH_NDEBELE": "North Ndebele",
|
||||||
|
"NORWEGIAN": "Norsk",
|
||||||
|
"NORWEGIAN_NYNORSK": "Nynorsk",
|
||||||
|
"OCCITAN": "Occitan",
|
||||||
|
"OJIBWA": "ojibwa",
|
||||||
|
"ORIYA": "oriya",
|
||||||
|
"OROMO": "oromo",
|
||||||
|
"OSSETIAN": "Ossetian",
|
||||||
|
"PALI": "Pali",
|
||||||
|
"PANJABI": "Panjabi",
|
||||||
|
"PERSIAN": "Persisk",
|
||||||
|
"POLISH": "Polsk",
|
||||||
|
"PORTUGUESE": "Portugisisk",
|
||||||
|
"PUSHTO": "Pushto",
|
||||||
|
"QUECHUA": "quechua",
|
||||||
|
"ROMANIAN": "Romanian",
|
||||||
|
"ROMANSH": "Romansh",
|
||||||
|
"RUNDI": "rundi",
|
||||||
|
"RUSSIAN": "Russisk",
|
||||||
|
"SAMOAN": "samoansk",
|
||||||
|
"SANGO": "sango",
|
||||||
|
"SANSKRIT": "sanskrit",
|
||||||
|
"SARDINIAN": "Sardinsk",
|
||||||
|
"SCOTTISH_GAELIC": "Scottish Gaelic",
|
||||||
|
"SERBIAN": "Serbisk",
|
||||||
|
"SHONA": "Shona",
|
||||||
|
"SICHUAN_YI": "Sichuan Yi",
|
||||||
|
"SINDHI": "sindhi",
|
||||||
|
"SINHALA": "Sinhala",
|
||||||
|
"SLOVAK": "Slovakisk",
|
||||||
|
"SLOVENIAN": "Slovensk",
|
||||||
|
"SOMALI": "somalisk",
|
||||||
|
"SOUTHERN_SOTHO": "Southern Sotho",
|
||||||
|
"SOUTH_NDEBELE": "South Ndebele",
|
||||||
|
"SPANISH": "Spanish",
|
||||||
|
"SUNDANESE": "sundanesisk",
|
||||||
|
"SWAHILI": "swahili",
|
||||||
|
"SWATI": "swati",
|
||||||
|
"SWEDISH": "Svensk",
|
||||||
|
"TAGALOG": "tagalog",
|
||||||
|
"TAHITIAN": "Tahitisk",
|
||||||
|
"TAJIK": "Tajik",
|
||||||
|
"TAMIL": "Tamilsk",
|
||||||
|
"TATAR": "tatarisk",
|
||||||
|
"TELUGU": "telugu",
|
||||||
|
"THAI": "Thai",
|
||||||
|
"TIBETAN": "tibetansk",
|
||||||
|
"TIGRINYA": "Tigrinya",
|
||||||
|
"TONGA": "Tonga",
|
||||||
|
"TSONGA": "tsonga",
|
||||||
|
"TSWANA": "tswana",
|
||||||
|
"TURKISH": "Tyrkisk",
|
||||||
|
"TURKMEN": "turkmensk",
|
||||||
|
"TWI": "twi",
|
||||||
|
"UIGHUR": "Uighur",
|
||||||
|
"UKRAINIAN": "Ukrainsk",
|
||||||
|
"UNDEFINED": "undefined",
|
||||||
|
"URDU": "urdu",
|
||||||
|
"UZBEK": "usbekisk",
|
||||||
|
"VENDA": "venda",
|
||||||
|
"VIETNAMESE": "Vietnamesisk",
|
||||||
|
"VOLAPUK": "Volapük",
|
||||||
|
"WALLOON": "Vietnamesisk",
|
||||||
|
"WELSH": "Walisisk",
|
||||||
|
"WESTERN_FRISIAN": "Vestfrisisk",
|
||||||
|
"WOLOF": "wolof",
|
||||||
|
"XHOSA": "Xhosa",
|
||||||
|
"YIDDISH": "jiddisk",
|
||||||
|
"YORUBA": "joruba",
|
||||||
|
"ZHUANG": "Zhuang",
|
||||||
|
"ZULU": "Zulu"
|
||||||
|
},
|
||||||
|
"phrases": {
|
||||||
|
"5.0(side)": "5.0(side)",
|
||||||
|
"5.1(side)": "5.1(side)",
|
||||||
|
"6.1": "6.1",
|
||||||
|
"6ch": "6ch",
|
||||||
|
"7.1": "7.1",
|
||||||
|
"<New show>": "<Ny serie>",
|
||||||
|
"Add": "Legg til",
|
||||||
|
"Add Pattern": "Legg til mønster",
|
||||||
|
"Apply": "Bruk",
|
||||||
|
"Apply failed: {error}": "Kunne ikke bruke endringene: {error}",
|
||||||
|
"Are you sure to delete the following filename pattern?": "Er du sikker på at du vil slette følgende filnavnmønster?",
|
||||||
|
"Are you sure to delete the following shifted season?": "Er du sikker på at du vil slette følgende forskjøvede sesong?",
|
||||||
|
"Are you sure to delete the following show?": "Er du sikker på at du vil slette følgende serie?",
|
||||||
|
"Are you sure to delete the following {track_type} track?": "Er du sikker på at du vil slette følgende {track_type}-spor?",
|
||||||
|
"Are you sure to delete this tag?": "Er du sikker på at du vil slette denne taggen?",
|
||||||
|
"Audio Layout": "Lydoppsett",
|
||||||
|
"Back": "Tilbake",
|
||||||
|
"Cancel": "Avbryt",
|
||||||
|
"Cannot add another stream with disposition flag 'default' or 'forced' set": "Kan ikke legge til en ny strøm med disposisjonsflagget 'default' eller 'forced' satt",
|
||||||
|
"Changes applied and file reloaded.": "Endringene er brukt og filen er lastet inn på nytt.",
|
||||||
|
"Cleanup": "Rydd opp",
|
||||||
|
"Cleanup disabled.": "Rydding deaktivert.",
|
||||||
|
"Cleanup enabled.": "Rydding aktivert.",
|
||||||
|
"Codec": "Kodek",
|
||||||
|
"Continuing edit session.": "Fortsetter redigeringsøkten.",
|
||||||
|
"Default": "Standard",
|
||||||
|
"Delete": "Slett",
|
||||||
|
"Delete Show": "Slett serie",
|
||||||
|
"Deleted media tag {tag!r}.": "Mediataggen {tag!r} ble slettet.",
|
||||||
|
"Differences": "Forskjeller",
|
||||||
|
"Differences (file->db/output)": "Forskjeller (fil->DB/utdata)",
|
||||||
|
"Discard": "Forkast",
|
||||||
|
"Discard pending metadata changes and quit?": "Forkaste ventende metadataendringer og avslutte?",
|
||||||
|
"Discard pending metadata changes and reload the file state?": "Forkaste ventende metadataendringer og laste filtilstanden på nytt?",
|
||||||
|
"Down": "Ned",
|
||||||
|
"Dry-run: would rewrite via temporary file {target_path}": "Tørrkjøring: ville skrevet om via midlertidig fil {target_path}",
|
||||||
|
"Edit": "Rediger",
|
||||||
|
"Edit Pattern": "Rediger mønster",
|
||||||
|
"Edit Show": "Rediger serie",
|
||||||
|
"Edit filename pattern": "Rediger filnavnmønster",
|
||||||
|
"Edit shifted season": "Rediger forskjøvet sesong",
|
||||||
|
"Edit stream": "Rediger strøm",
|
||||||
|
"Episode Offset": "Episodeforskyvning",
|
||||||
|
"Episode offset": "Episodeforskyvning",
|
||||||
|
"File": "Fil",
|
||||||
|
"File patterns": "Filmønstre",
|
||||||
|
"First Episode": "Første episode",
|
||||||
|
"First episode": "Første episode",
|
||||||
|
"Forced": "Tvungen",
|
||||||
|
"Help": "Hjelp",
|
||||||
|
"Help Screen": "Hjelpeskjerm",
|
||||||
|
"ID": "ID",
|
||||||
|
"Identify": "Identifiser",
|
||||||
|
"Index": "Indeks",
|
||||||
|
"Index / Subindex": "Indeks / Underindeks",
|
||||||
|
"Index Episode Digits": "Siffer for episodeindeks",
|
||||||
|
"Index Season Digits": "Siffer for sesongindeks",
|
||||||
|
"Indicator Edisode Digits": "Siffer for episodeindikator",
|
||||||
|
"Indicator Season Digits": "Siffer for sesongindikator",
|
||||||
|
"Keep Editing": "Fortsett redigeringen",
|
||||||
|
"Keeping pending changes.": "Beholder ventende endringer.",
|
||||||
|
"Key": "Nøkkel",
|
||||||
|
"Language": "Språk",
|
||||||
|
"Last Episode": "Siste episode",
|
||||||
|
"Last episode": "Siste episode",
|
||||||
|
"Layout": "Oppsett",
|
||||||
|
"Media Tags": "Mediatagger",
|
||||||
|
"More than one default audio stream detected and no prompt set": "Mer enn én standard lydstrøm funnet og ingen forespørsel satt",
|
||||||
|
"More than one default audio stream detected! Please select stream": "Mer enn én standard lydstrøm funnet. Velg strøm",
|
||||||
|
"More than one default subtitle stream detected and no prompt set": "Mer enn én standard undertekststrøm funnet og ingen forespørsel satt",
|
||||||
|
"More than one default subtitle stream detected! Please select stream": "Mer enn én standard undertekststrøm funnet. Velg strøm",
|
||||||
|
"More than one default video stream detected and no prompt set": "Mer enn én standard videostrøm funnet og ingen forespørsel satt",
|
||||||
|
"More than one default video stream detected! Please select stream": "Mer enn én standard videostrøm funnet. Velg strøm",
|
||||||
|
"More than one forced audio stream detected and no prompt set": "Mer enn én tvungen lydstrøm funnet og ingen forespørsel satt",
|
||||||
|
"More than one forced audio stream detected! Please select stream": "Mer enn én tvungen lydstrøm funnet. Velg strøm",
|
||||||
|
"More than one forced subtitle stream detected and no prompt set": "Mer enn én tvungen undertekststrøm funnet og ingen forespørsel satt",
|
||||||
|
"More than one forced subtitle stream detected! Please select stream": "Mer enn én tvungen undertekststrøm funnet. Velg strøm",
|
||||||
|
"More than one forced video stream detected and no prompt set": "Mer enn én tvungen videostrøm funnet og ingen forespørsel satt",
|
||||||
|
"More than one forced video stream detected! Please select stream": "Mer enn én tvungen videostrøm funnet. Velg strøm",
|
||||||
|
"Name": "Navn",
|
||||||
|
"New Pattern": "Nytt mønster",
|
||||||
|
"New Show": "Ny serie",
|
||||||
|
"New filename pattern": "Nytt filnavnmønster",
|
||||||
|
"New shifted season": "Ny forskjøvet sesong",
|
||||||
|
"New stream": "Ny strøm",
|
||||||
|
"No": "Nei",
|
||||||
|
"No changes to apply.": "Ingen endringer å bruke.",
|
||||||
|
"No changes to revert.": "Ingen endringer å tilbakestille.",
|
||||||
|
"Normalization disabled.": "Normalisering deaktivert.",
|
||||||
|
"Normalization enabled.": "Normalisering aktivert.",
|
||||||
|
"Normalize": "Normaliser",
|
||||||
|
"Notes": "Notater",
|
||||||
|
"Pattern": "Mønster",
|
||||||
|
"Planned Changes (file->edited output)": "Planlagte endringer (fil->redigert utdata)",
|
||||||
|
"Quality": "Kvalitet",
|
||||||
|
"Quit": "Avslutt",
|
||||||
|
"Remove Pattern": "Fjern mønster",
|
||||||
|
"Revert": "Tilbakestill",
|
||||||
|
"Reverted pending changes.": "Ventende endringer ble tilbakestilt.",
|
||||||
|
"Save": "Lagre",
|
||||||
|
"Season Offset": "Sesongforskyvning",
|
||||||
|
"Select a stream first.": "Velg en strøm først.",
|
||||||
|
"Set Default": "Sett som standard",
|
||||||
|
"Set Forced": "Sett som tvungen",
|
||||||
|
"Settings Screen": "Innstillingsskjerm",
|
||||||
|
"Numbering Mapping": "Forskjøvne sesonger",
|
||||||
|
"Show": "Serie",
|
||||||
|
"Shows": "Serier",
|
||||||
|
"Source Season": "Kildesesong",
|
||||||
|
"SrcIndex": "Kildeindeks",
|
||||||
|
"Status": "Status",
|
||||||
|
"Stay": "Bli",
|
||||||
|
"Stream dispositions": "Strømdisposisjoner",
|
||||||
|
"Stream tags": "Strømtagger",
|
||||||
|
"Streams": "Strømmer",
|
||||||
|
"SubIndex": "Underindeks",
|
||||||
|
"Substitute": "Erstatt",
|
||||||
|
"Substitute pattern": "Erstatt mønster",
|
||||||
|
"Title": "Tittel",
|
||||||
|
"Type": "Type",
|
||||||
|
"Unable to update selected stream.": "Kunne ikke oppdatere valgt strøm.",
|
||||||
|
"Up": "Opp",
|
||||||
|
"Update Pattern": "Oppdater mønster",
|
||||||
|
"Updated media tag {tag!r}.": "Mediataggen {tag!r} ble oppdatert.",
|
||||||
|
"Updated stream #{index} ({track_type}).": "Strøm #{index} ({track_type}) oppdatert.",
|
||||||
|
"Value": "Verdi",
|
||||||
|
"Year": "År",
|
||||||
|
"Yes": "Ja",
|
||||||
|
"add media tag: key='{key}' value='{value}'": "legg til mediatagg: nøkkel='{key}' verdi='{value}'",
|
||||||
|
"add {track_type} track: index={index} lang={language}": "legg til {track_type}-spor: indeks={index} språk={language}",
|
||||||
|
"attached_pic": "attached_pic",
|
||||||
|
"attachment": "vedlegg",
|
||||||
|
"audio": "lyd",
|
||||||
|
"captions": "teksting",
|
||||||
|
"change media tag: key='{key}' value='{value}'": "endre mediatagg: nøkkel='{key}' verdi='{value}'",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add disposition={disposition}": "endre strøm #{index} ({track_type}:{sub_index}) legg til disposisjon={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add key={key} value={value}": "endre strøm #{index} ({track_type}:{sub_index}) legg til nøkkel={key} verdi={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) change key={key} value={value}": "endre strøm #{index} ({track_type}:{sub_index}) endre nøkkel={key} verdi={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove disposition={disposition}": "endre strøm #{index} ({track_type}:{sub_index}) fjern disposisjon={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove key={key} value={value}": "endre strøm #{index} ({track_type}:{sub_index}) fjern nøkkel={key} verdi={value}",
|
||||||
|
"clean_effects": "bare effekter",
|
||||||
|
"comment": "kommentar",
|
||||||
|
"default": "standard",
|
||||||
|
"dependent": "avhengig",
|
||||||
|
"descriptions": "beskrivelser",
|
||||||
|
"dub": "dubbet",
|
||||||
|
"for pattern": "for mønster",
|
||||||
|
"forced": "tvungen",
|
||||||
|
"from": "fra",
|
||||||
|
"from pattern": "fra mønster",
|
||||||
|
"from show": "fra serie",
|
||||||
|
"hearing_impaired": "hørselshemmet",
|
||||||
|
"karaoke": "karaoke",
|
||||||
|
"lyrics": "sangtekst",
|
||||||
|
"metadata": "metadata",
|
||||||
|
"non_diegetic": "ikke-diegetisk",
|
||||||
|
"original": "original",
|
||||||
|
"pattern #{id}": "mønster #{id}",
|
||||||
|
"remove media tag: key='{key}' value='{value}'": "fjern mediatagg: nøkkel='{key}' verdi='{value}'",
|
||||||
|
"remove stream #{index}": "fjern strøm #{index}",
|
||||||
|
"show #{id}": "serie #{id}",
|
||||||
|
"stereo": "stereo",
|
||||||
|
"still_image": "stillbilde",
|
||||||
|
"sub index": "underindeks",
|
||||||
|
"subtitle": "undertekst",
|
||||||
|
"timed_thumbnails": "tidsbestemte miniatyrer",
|
||||||
|
"undefined": "udefinert",
|
||||||
|
"unknown": "ukjent",
|
||||||
|
"video": "video",
|
||||||
|
"visual_impaired": "synshemmet"
|
||||||
|
}
|
||||||
|
}
|
||||||
361
assets/i18n/pt.json
Normal file
361
assets/i18n/pt.json
Normal file
@@ -0,0 +1,361 @@
|
|||||||
|
{
|
||||||
|
"iso_languages": {
|
||||||
|
"ABKHAZIAN": "abkhazian",
|
||||||
|
"AFAR": "afar",
|
||||||
|
"AFRIKAANS": "Africanos",
|
||||||
|
"AKAN": "Akan",
|
||||||
|
"ALBANIAN": "Albanês",
|
||||||
|
"AMHARIC": "Amárico",
|
||||||
|
"ARABIC": "Árabe",
|
||||||
|
"ARAGONESE": "Aragonês",
|
||||||
|
"ARMENIAN": "arménio",
|
||||||
|
"ASSAMESE": "assamês",
|
||||||
|
"AVARIC": "Avárico",
|
||||||
|
"AVESTAN": "avéstico",
|
||||||
|
"AYMARA": "aimara",
|
||||||
|
"AZERBAIJANI": "Azerbaijani",
|
||||||
|
"BAMBARA": "bambara",
|
||||||
|
"BASHKIR": "bashkir",
|
||||||
|
"BASQUE": "Basco",
|
||||||
|
"BELARUSIAN": "Bielorusso",
|
||||||
|
"BENGALI": "Bengali",
|
||||||
|
"BISLAMA": "bislamá",
|
||||||
|
"BOKMAL": "Bokmål",
|
||||||
|
"BOSNIAN": "Bósnio",
|
||||||
|
"BRETON": "Bretão",
|
||||||
|
"BULGARIAN": "Búlgaro",
|
||||||
|
"BURMESE": "birmanês",
|
||||||
|
"CATALAN": "Catalan",
|
||||||
|
"CHAMORRO": "chamorro",
|
||||||
|
"CHECHEN": "Checheno",
|
||||||
|
"CHICHEWA": "Chichewa",
|
||||||
|
"CHINESE": "Chinês",
|
||||||
|
"CHURCH_SLAVIC": "Church Slavic",
|
||||||
|
"CHUVASH": "chuvash",
|
||||||
|
"CORNISH": "Córnico",
|
||||||
|
"CORSICAN": "córsico",
|
||||||
|
"CREE": "Cree",
|
||||||
|
"CROATIAN": "Croata",
|
||||||
|
"CZECH": "Checo",
|
||||||
|
"DANISH": "Dinamarquês",
|
||||||
|
"DIVEHI": "Divehi",
|
||||||
|
"DUTCH": "Dutch",
|
||||||
|
"DZONGKHA": "dzonga",
|
||||||
|
"ENGLISH": "Inglês",
|
||||||
|
"ESPERANTO": "Esperanto",
|
||||||
|
"ESTONIAN": "Estoniano",
|
||||||
|
"EWE": "eve",
|
||||||
|
"FAROESE": "Faroês",
|
||||||
|
"FIJIAN": "fijiano",
|
||||||
|
"FILIPINO": "Filipino",
|
||||||
|
"FINNISH": "Finlandês",
|
||||||
|
"FRENCH": "Francês",
|
||||||
|
"FULAH": "fula",
|
||||||
|
"GALICIAN": "Galego",
|
||||||
|
"GANDA": "luganda",
|
||||||
|
"GEORGIAN": "georgiano",
|
||||||
|
"GERMAN": "Alemão",
|
||||||
|
"GREEK": "Greek",
|
||||||
|
"GUARANI": "Guarani",
|
||||||
|
"GUJARATI": "Guzerate",
|
||||||
|
"HAITIAN": "Haitian",
|
||||||
|
"HAUSA": "Hauçá",
|
||||||
|
"HEBREW": "Hebreu",
|
||||||
|
"HERERO": "Hereró",
|
||||||
|
"HINDI": "Hindi",
|
||||||
|
"HIRI_MOTU": "Hiri Motu",
|
||||||
|
"HUNGARIAN": "Húngaro",
|
||||||
|
"ICELANDIC": "Islandês",
|
||||||
|
"IDO": "ido",
|
||||||
|
"IGBO": "ibo",
|
||||||
|
"INDONESIAN": "Indonésio",
|
||||||
|
"INTERLINGUA": "Interlingua",
|
||||||
|
"INTERLINGUE": "Interlingue",
|
||||||
|
"INUKTITUT": "inuktitut",
|
||||||
|
"INUPIAQ": "Inupiaque",
|
||||||
|
"IRISH": "Irlandês",
|
||||||
|
"ITALIAN": "Italiano",
|
||||||
|
"JAPANESE": "Japonês",
|
||||||
|
"JAVANESE": "Javanês",
|
||||||
|
"KALAALLISUT": "Kalaallisut",
|
||||||
|
"KANNADA": "Kannada",
|
||||||
|
"KANURI": "Canúri",
|
||||||
|
"KASHMIRI": "kashmiri",
|
||||||
|
"KAZAKH": "cazaque",
|
||||||
|
"KHMER": "Khmer",
|
||||||
|
"KIKUYU": "Kikuyu",
|
||||||
|
"KINYARWANDA": "kinyarwanda",
|
||||||
|
"KIRGHIZ": "Kirghiz",
|
||||||
|
"KOMI": "komi",
|
||||||
|
"KONGO": "congolês",
|
||||||
|
"KOREAN": "Coreano",
|
||||||
|
"KUANYAMA": "Kuanyama",
|
||||||
|
"KURDISH": "Curdo",
|
||||||
|
"LAO": "Laosiano",
|
||||||
|
"LATIN": "Latim",
|
||||||
|
"LATVIAN": "Letão",
|
||||||
|
"LIMBURGAN": "Limburgan",
|
||||||
|
"LINGALA": "Lingala",
|
||||||
|
"LITHUANIAN": "Lituano",
|
||||||
|
"LUBA_KATANGA": "luba-catanga",
|
||||||
|
"LUXEMBOURGISH": "Luxembourgish",
|
||||||
|
"MACEDONIAN": "Macedônio",
|
||||||
|
"MALAGASY": "malgaxe",
|
||||||
|
"MALAY": "Malaio",
|
||||||
|
"MALAYALAM": "malaiala",
|
||||||
|
"MALTESE": "Maltês",
|
||||||
|
"MANX": "Manx",
|
||||||
|
"MAORI": "Maori",
|
||||||
|
"MARATHI": "marata",
|
||||||
|
"MARSHALLESE": "Marshalês",
|
||||||
|
"MONGOLIAN": "Mongol",
|
||||||
|
"NAURU": "nauruano",
|
||||||
|
"NAVAJO": "Navajo",
|
||||||
|
"NDONGA": "dongo",
|
||||||
|
"NEPALI": "Nepalês",
|
||||||
|
"NORTHERN_SAMI": "northern sami",
|
||||||
|
"NORTH_NDEBELE": "North Ndebele",
|
||||||
|
"NORWEGIAN": "Norueguês",
|
||||||
|
"NORWEGIAN_NYNORSK": "Nynorsk",
|
||||||
|
"OCCITAN": "Occitan",
|
||||||
|
"OJIBWA": "ojibwa",
|
||||||
|
"ORIYA": "oriya",
|
||||||
|
"OROMO": "Oromo",
|
||||||
|
"OSSETIAN": "Ossetian",
|
||||||
|
"PALI": "Páli",
|
||||||
|
"PANJABI": "Panjabi",
|
||||||
|
"PERSIAN": "Persa",
|
||||||
|
"POLISH": "Polaco",
|
||||||
|
"PORTUGUESE": "Português",
|
||||||
|
"PUSHTO": "Pushto",
|
||||||
|
"QUECHUA": "quíchua",
|
||||||
|
"ROMANIAN": "Romanian",
|
||||||
|
"ROMANSH": "Romanche",
|
||||||
|
"RUNDI": "rundi",
|
||||||
|
"RUSSIAN": "Russo",
|
||||||
|
"SAMOAN": "Samoano",
|
||||||
|
"SANGO": "sango",
|
||||||
|
"SANSKRIT": "Sânscrito",
|
||||||
|
"SARDINIAN": "Sardo",
|
||||||
|
"SCOTTISH_GAELIC": "Scottish Gaelic",
|
||||||
|
"SERBIAN": "Sérvio",
|
||||||
|
"SHONA": "Xona",
|
||||||
|
"SICHUAN_YI": "Sichuan Yi",
|
||||||
|
"SINDHI": "sindi",
|
||||||
|
"SINHALA": "Sinhala",
|
||||||
|
"SLOVAK": "Eslovaco",
|
||||||
|
"SLOVENIAN": "Eslovêno",
|
||||||
|
"SOMALI": "somali",
|
||||||
|
"SOUTHERN_SOTHO": "Southern Sotho",
|
||||||
|
"SOUTH_NDEBELE": "South Ndebele",
|
||||||
|
"SPANISH": "Spanish",
|
||||||
|
"SUNDANESE": "sundanês",
|
||||||
|
"SWAHILI": "suaíli",
|
||||||
|
"SWATI": "swati",
|
||||||
|
"SWEDISH": "Sueco",
|
||||||
|
"TAGALOG": "Tagalo",
|
||||||
|
"TAHITIAN": "Taitiano",
|
||||||
|
"TAJIK": "Tadjique",
|
||||||
|
"TAMIL": "Tâmil",
|
||||||
|
"TATAR": "tatar",
|
||||||
|
"TELUGU": "Telugu",
|
||||||
|
"THAI": "Tailandês",
|
||||||
|
"TIBETAN": "tibetano",
|
||||||
|
"TIGRINYA": "Tigrínia",
|
||||||
|
"TONGA": "Tonga",
|
||||||
|
"TSONGA": "tsonga",
|
||||||
|
"TSWANA": "tswana",
|
||||||
|
"TURKISH": "Turco",
|
||||||
|
"TURKMEN": "turcomano",
|
||||||
|
"TWI": "twi",
|
||||||
|
"UIGHUR": "Uighur",
|
||||||
|
"UKRAINIAN": "Ucraniano",
|
||||||
|
"UNDEFINED": "undefined",
|
||||||
|
"URDU": "urdu",
|
||||||
|
"UZBEK": "usbeque",
|
||||||
|
"VENDA": "venda",
|
||||||
|
"VIETNAMESE": "Vietnamita",
|
||||||
|
"VOLAPUK": "Volapuque",
|
||||||
|
"WALLOON": "walloon",
|
||||||
|
"WELSH": "galês",
|
||||||
|
"WESTERN_FRISIAN": "Frísio ocidental",
|
||||||
|
"WOLOF": "uolofe",
|
||||||
|
"XHOSA": "xosa",
|
||||||
|
"YIDDISH": "iídiche",
|
||||||
|
"YORUBA": "ioruba",
|
||||||
|
"ZHUANG": "Zhuang",
|
||||||
|
"ZULU": "zulu"
|
||||||
|
},
|
||||||
|
"phrases": {
|
||||||
|
"5.0(side)": "5.0(side)",
|
||||||
|
"5.1(side)": "5.1(side)",
|
||||||
|
"6.1": "6.1",
|
||||||
|
"6ch": "6ch",
|
||||||
|
"7.1": "7.1",
|
||||||
|
"<New show>": "<Nova série>",
|
||||||
|
"Add": "Adicionar",
|
||||||
|
"Add Pattern": "Adicionar padrão",
|
||||||
|
"Apply": "Aplicar",
|
||||||
|
"Apply failed: {error}": "Falha ao aplicar: {error}",
|
||||||
|
"Are you sure to delete the following filename pattern?": "Tem certeza de que deseja excluir o seguinte padrão de nome de arquivo?",
|
||||||
|
"Are you sure to delete the following shifted season?": "Tem certeza de que deseja excluir a seguinte temporada deslocada?",
|
||||||
|
"Are you sure to delete the following show?": "Tem certeza de que deseja excluir a seguinte série?",
|
||||||
|
"Are you sure to delete the following {track_type} track?": "Tem certeza de que deseja excluir a seguinte faixa {track_type}?",
|
||||||
|
"Are you sure to delete this tag?": "Tem certeza de que deseja excluir esta tag?",
|
||||||
|
"Audio Layout": "Layout de áudio",
|
||||||
|
"Back": "Voltar",
|
||||||
|
"Cancel": "Cancelar",
|
||||||
|
"Cannot add another stream with disposition flag 'default' or 'forced' set": "Não é possível adicionar outro fluxo com a flag de disposição 'default' ou 'forced' definida",
|
||||||
|
"Changes applied and file reloaded.": "Alterações aplicadas e arquivo recarregado.",
|
||||||
|
"Cleanup": "Limpeza",
|
||||||
|
"Cleanup disabled.": "Limpeza desativada.",
|
||||||
|
"Cleanup enabled.": "Limpeza ativada.",
|
||||||
|
"Codec": "Codec",
|
||||||
|
"Continuing edit session.": "Continuando a sessão de edição.",
|
||||||
|
"Default": "Padrão",
|
||||||
|
"Delete": "Excluir",
|
||||||
|
"Delete Show": "Excluir série",
|
||||||
|
"Deleted media tag {tag!r}.": "Tag de mídia {tag!r} excluída.",
|
||||||
|
"Differences": "Diferenças",
|
||||||
|
"Differences (file->db/output)": "Diferenças (arquivo->BD/saída)",
|
||||||
|
"Discard": "Descartar",
|
||||||
|
"Discard pending metadata changes and quit?": "Descartar alterações pendentes de metadados e sair?",
|
||||||
|
"Discard pending metadata changes and reload the file state?": "Descartar alterações pendentes de metadados e recarregar o estado do arquivo?",
|
||||||
|
"Down": "Baixo",
|
||||||
|
"Dry-run: would rewrite via temporary file {target_path}": "Execução simulada: regravaria via arquivo temporário {target_path}",
|
||||||
|
"Edit": "Editar",
|
||||||
|
"Edit Pattern": "Editar padrão",
|
||||||
|
"Edit Show": "Editar série",
|
||||||
|
"Edit filename pattern": "Editar padrão de nome de arquivo",
|
||||||
|
"Edit shifted season": "Editar temporada deslocada",
|
||||||
|
"Edit stream": "Editar fluxo",
|
||||||
|
"Episode Offset": "Deslocamento de episódio",
|
||||||
|
"Episode offset": "Deslocamento de episódio",
|
||||||
|
"File": "Arquivo",
|
||||||
|
"File patterns": "Padrões de arquivo",
|
||||||
|
"First Episode": "Primeiro episódio",
|
||||||
|
"First episode": "Primeiro episódio",
|
||||||
|
"Forced": "Forçado",
|
||||||
|
"Help": "Ajuda",
|
||||||
|
"Help Screen": "Tela de ajuda",
|
||||||
|
"ID": "ID",
|
||||||
|
"Identify": "Identificar",
|
||||||
|
"Index": "Índice",
|
||||||
|
"Index / Subindex": "Índice / Subíndice",
|
||||||
|
"Index Episode Digits": "Dígitos do índice do episódio",
|
||||||
|
"Index Season Digits": "Dígitos do índice da temporada",
|
||||||
|
"Indicator Edisode Digits": "Dígitos do indicador do episódio",
|
||||||
|
"Indicator Season Digits": "Dígitos do indicador da temporada",
|
||||||
|
"Keep Editing": "Continuar editando",
|
||||||
|
"Keeping pending changes.": "Mantendo alterações pendentes.",
|
||||||
|
"Key": "Chave",
|
||||||
|
"Language": "Idioma",
|
||||||
|
"Last Episode": "Último episódio",
|
||||||
|
"Last episode": "Último episódio",
|
||||||
|
"Layout": "Layout",
|
||||||
|
"Media Tags": "Tags de mídia",
|
||||||
|
"More than one default audio stream detected and no prompt set": "Mais de um fluxo de áudio padrão detectado e nenhum prompt definido",
|
||||||
|
"More than one default audio stream detected! Please select stream": "Mais de um fluxo de áudio padrão detectado! Selecione o fluxo",
|
||||||
|
"More than one default subtitle stream detected and no prompt set": "Mais de um fluxo de legenda padrão detectado e nenhum prompt definido",
|
||||||
|
"More than one default subtitle stream detected! Please select stream": "Mais de um fluxo de legenda padrão detectado! Selecione o fluxo",
|
||||||
|
"More than one default video stream detected and no prompt set": "Mais de um fluxo de vídeo padrão detectado e nenhum prompt definido",
|
||||||
|
"More than one default video stream detected! Please select stream": "Mais de um fluxo de vídeo padrão detectado! Selecione o fluxo",
|
||||||
|
"More than one forced audio stream detected and no prompt set": "Mais de um fluxo de áudio forçado detectado e nenhum prompt definido",
|
||||||
|
"More than one forced audio stream detected! Please select stream": "Mais de um fluxo de áudio forçado detectado! Selecione o fluxo",
|
||||||
|
"More than one forced subtitle stream detected and no prompt set": "Mais de um fluxo de legenda forçada detectado e nenhum prompt definido",
|
||||||
|
"More than one forced subtitle stream detected! Please select stream": "Mais de um fluxo de legenda forçada detectado! Selecione o fluxo",
|
||||||
|
"More than one forced video stream detected and no prompt set": "Mais de um fluxo de vídeo forçado detectado e nenhum prompt definido",
|
||||||
|
"More than one forced video stream detected! Please select stream": "Mais de um fluxo de vídeo forçado detectado! Selecione o fluxo",
|
||||||
|
"Name": "Nome",
|
||||||
|
"New Pattern": "Novo padrão",
|
||||||
|
"New Show": "Nova série",
|
||||||
|
"New filename pattern": "Novo padrão de nome de arquivo",
|
||||||
|
"New shifted season": "Nova temporada deslocada",
|
||||||
|
"New stream": "Novo fluxo",
|
||||||
|
"No": "Não",
|
||||||
|
"No changes to apply.": "Nenhuma alteração para aplicar.",
|
||||||
|
"No changes to revert.": "Nenhuma alteração para reverter.",
|
||||||
|
"Normalization disabled.": "Normalização desativada.",
|
||||||
|
"Normalization enabled.": "Normalização ativada.",
|
||||||
|
"Normalize": "Normalizar",
|
||||||
|
"Notes": "Notas",
|
||||||
|
"Pattern": "Padrão",
|
||||||
|
"Planned Changes (file->edited output)": "Alterações planejadas (arquivo->saída editada)",
|
||||||
|
"Quality": "Qualidade",
|
||||||
|
"Quit": "Sair",
|
||||||
|
"Remove Pattern": "Remover padrão",
|
||||||
|
"Revert": "Reverter",
|
||||||
|
"Reverted pending changes.": "Alterações pendentes revertidas.",
|
||||||
|
"Save": "Salvar",
|
||||||
|
"Season Offset": "Deslocamento de temporada",
|
||||||
|
"Select a stream first.": "Selecione um fluxo primeiro.",
|
||||||
|
"Set Default": "Definir como padrão",
|
||||||
|
"Set Forced": "Definir como forçado",
|
||||||
|
"Settings Screen": "Tela de configurações",
|
||||||
|
"Numbering Mapping": "Temporadas deslocadas",
|
||||||
|
"Show": "Série",
|
||||||
|
"Shows": "Séries",
|
||||||
|
"Source Season": "Temporada de origem",
|
||||||
|
"SrcIndex": "Índice de origem",
|
||||||
|
"Status": "Status",
|
||||||
|
"Stay": "Permanecer",
|
||||||
|
"Stream dispositions": "Disposições do fluxo",
|
||||||
|
"Stream tags": "Tags do fluxo",
|
||||||
|
"Streams": "Fluxos",
|
||||||
|
"SubIndex": "Subíndice",
|
||||||
|
"Substitute": "Substituir",
|
||||||
|
"Substitute pattern": "Substituir padrão",
|
||||||
|
"Title": "Título",
|
||||||
|
"Type": "Tipo",
|
||||||
|
"Unable to update selected stream.": "Não foi possível atualizar o fluxo selecionado.",
|
||||||
|
"Up": "Cima",
|
||||||
|
"Update Pattern": "Atualizar padrão",
|
||||||
|
"Updated media tag {tag!r}.": "Tag de mídia {tag!r} atualizada.",
|
||||||
|
"Updated stream #{index} ({track_type}).": "Fluxo #{index} ({track_type}) atualizado.",
|
||||||
|
"Value": "Valor",
|
||||||
|
"Year": "Ano",
|
||||||
|
"Yes": "Sim",
|
||||||
|
"add media tag: key='{key}' value='{value}'": "adicionar tag de mídia: chave='{key}' valor='{value}'",
|
||||||
|
"add {track_type} track: index={index} lang={language}": "adicionar faixa {track_type}: índice={index} idioma={language}",
|
||||||
|
"attached_pic": "attached_pic",
|
||||||
|
"attachment": "anexo",
|
||||||
|
"audio": "áudio",
|
||||||
|
"captions": "legendas",
|
||||||
|
"change media tag: key='{key}' value='{value}'": "alterar tag de mídia: chave='{key}' valor='{value}'",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add disposition={disposition}": "alterar fluxo #{index} ({track_type}:{sub_index}) adicionar disposição={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add key={key} value={value}": "alterar fluxo #{index} ({track_type}:{sub_index}) adicionar chave={key} valor={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) change key={key} value={value}": "alterar fluxo #{index} ({track_type}:{sub_index}) alterar chave={key} valor={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove disposition={disposition}": "alterar fluxo #{index} ({track_type}:{sub_index}) remover disposição={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove key={key} value={value}": "alterar fluxo #{index} ({track_type}:{sub_index}) remover chave={key} valor={value}",
|
||||||
|
"clean_effects": "apenas efeitos",
|
||||||
|
"comment": "comentário",
|
||||||
|
"default": "padrão",
|
||||||
|
"dependent": "dependente",
|
||||||
|
"descriptions": "descrições",
|
||||||
|
"dub": "dublado",
|
||||||
|
"for pattern": "para o padrão",
|
||||||
|
"forced": "forçado",
|
||||||
|
"from": "de",
|
||||||
|
"from pattern": "do padrão",
|
||||||
|
"from show": "da série",
|
||||||
|
"hearing_impaired": "deficiência auditiva",
|
||||||
|
"karaoke": "karaokê",
|
||||||
|
"lyrics": "letra",
|
||||||
|
"metadata": "metadados",
|
||||||
|
"non_diegetic": "não diegético",
|
||||||
|
"original": "original",
|
||||||
|
"pattern #{id}": "padrão #{id}",
|
||||||
|
"remove media tag: key='{key}' value='{value}'": "remover tag de mídia: chave='{key}' valor='{value}'",
|
||||||
|
"remove stream #{index}": "remover fluxo #{index}",
|
||||||
|
"show #{id}": "série #{id}",
|
||||||
|
"stereo": "estéreo",
|
||||||
|
"still_image": "imagem estática",
|
||||||
|
"sub index": "subíndice",
|
||||||
|
"subtitle": "legenda",
|
||||||
|
"timed_thumbnails": "miniaturas temporizadas",
|
||||||
|
"undefined": "indefinido",
|
||||||
|
"unknown": "desconhecido",
|
||||||
|
"video": "vídeo",
|
||||||
|
"visual_impaired": "deficiência visual"
|
||||||
|
}
|
||||||
|
}
|
||||||
361
assets/i18n/ta.json
Normal file
361
assets/i18n/ta.json
Normal file
@@ -0,0 +1,361 @@
|
|||||||
|
{
|
||||||
|
"iso_languages": {
|
||||||
|
"ABKHAZIAN": "அப்காசியன்",
|
||||||
|
"AFAR": "அஃபர்",
|
||||||
|
"AFRIKAANS": "ஆப்ரிக்கான்ச்",
|
||||||
|
"AKAN": "அகான்",
|
||||||
|
"ALBANIAN": "அல்பேனியன்",
|
||||||
|
"AMHARIC": "அம்ஆரிக்",
|
||||||
|
"ARABIC": "அராபிக்",
|
||||||
|
"ARAGONESE": "அரகோன்ச்",
|
||||||
|
"ARMENIAN": "அர்மேனியன்",
|
||||||
|
"ASSAMESE": "அச்சாமி",
|
||||||
|
"AVARIC": "அவாரிக்",
|
||||||
|
"AVESTAN": "அவேச்டன்",
|
||||||
|
"AYMARA": "அய்மாரா",
|
||||||
|
"AZERBAIJANI": "அசெர்பெய்சானி",
|
||||||
|
"BAMBARA": "பம்பரா",
|
||||||
|
"BASHKIR": "பாச்கிர்",
|
||||||
|
"BASQUE": "பாச்க்",
|
||||||
|
"BELARUSIAN": "பெலாருசியன்",
|
||||||
|
"BENGALI": "பெங்காலி",
|
||||||
|
"BISLAMA": "பிச்லாமா",
|
||||||
|
"BOKMAL": "Bokmål",
|
||||||
|
"BOSNIAN": "போச்னியன்",
|
||||||
|
"BRETON": "ப்ரெடன்",
|
||||||
|
"BULGARIAN": "பல்கேரியன்",
|
||||||
|
"BURMESE": "பர்மீசி",
|
||||||
|
"CATALAN": "Catalan",
|
||||||
|
"CHAMORRO": "சாமோர்ரோ",
|
||||||
|
"CHECHEN": "செக்சன்",
|
||||||
|
"CHICHEWA": "Chichewa",
|
||||||
|
"CHINESE": "சைனீச்",
|
||||||
|
"CHURCH_SLAVIC": "Church Slavic",
|
||||||
|
"CHUVASH": "சுவாச்",
|
||||||
|
"CORNISH": "கோர்னிச்",
|
||||||
|
"CORSICAN": "கோர்சிகேன்",
|
||||||
|
"CREE": "சிரீ",
|
||||||
|
"CROATIAN": "குரேசியன்",
|
||||||
|
"CZECH": "செக்",
|
||||||
|
"DANISH": "டானிச்",
|
||||||
|
"DIVEHI": "Divehi",
|
||||||
|
"DUTCH": "Dutch",
|
||||||
|
"DZONGKHA": "ட்சொங்க்கா",
|
||||||
|
"ENGLISH": "ஆங்கிலம்",
|
||||||
|
"ESPERANTO": "எச்பெரான்டொ",
|
||||||
|
"ESTONIAN": "எச்டோனியன்",
|
||||||
|
"EWE": "இவ்",
|
||||||
|
"FAROESE": "ஃபரோச்",
|
||||||
|
"FIJIAN": "ஃபிசியன்",
|
||||||
|
"FILIPINO": "Filipino",
|
||||||
|
"FINNISH": "பின்னிச்",
|
||||||
|
"FRENCH": "பிரெஞ்சு",
|
||||||
|
"FULAH": "ஃபுல்லா",
|
||||||
|
"GALICIAN": "காலிசியன்",
|
||||||
|
"GANDA": "கான்டா",
|
||||||
|
"GEORGIAN": "சியார்சியன்",
|
||||||
|
"GERMAN": "செர்மன்",
|
||||||
|
"GREEK": "Greek",
|
||||||
|
"GUARANI": "குர்ரானி",
|
||||||
|
"GUJARATI": "குசராத்தி",
|
||||||
|
"HAITIAN": "Haitian",
|
||||||
|
"HAUSA": "ஔசா",
|
||||||
|
"HEBREW": "ஈப்ரு",
|
||||||
|
"HERERO": "இரீரோ",
|
||||||
|
"HINDI": "இந்தி",
|
||||||
|
"HIRI_MOTU": "இரி மோட்டு",
|
||||||
|
"HUNGARIAN": "அங்கேரியன்",
|
||||||
|
"ICELANDIC": "ஐச்லாண்டிக்",
|
||||||
|
"IDO": "ஐடூ",
|
||||||
|
"IGBO": "இக்போ",
|
||||||
|
"INDONESIAN": "இந்தோனேசியன்",
|
||||||
|
"INTERLINGUA": "Interlingua",
|
||||||
|
"INTERLINGUE": "Interlingue",
|
||||||
|
"INUKTITUT": "இனுடிடட்",
|
||||||
|
"INUPIAQ": "இனுபைக்யூ",
|
||||||
|
"IRISH": "ஐரிச்",
|
||||||
|
"ITALIAN": "இத்தாலியன்",
|
||||||
|
"JAPANESE": "சப்பானிய",
|
||||||
|
"JAVANESE": "சவானிச்",
|
||||||
|
"KALAALLISUT": "Kalaallisut",
|
||||||
|
"KANNADA": "கன்னடம்",
|
||||||
|
"KANURI": "கனுரி",
|
||||||
|
"KASHMIRI": "காச்மீரி",
|
||||||
|
"KAZAKH": "கசாக்ச்",
|
||||||
|
"KHMER": "Khmer",
|
||||||
|
"KIKUYU": "Kikuyu",
|
||||||
|
"KINYARWANDA": "கின்யார்வான்டா",
|
||||||
|
"KIRGHIZ": "Kirghiz",
|
||||||
|
"KOMI": "கோமி",
|
||||||
|
"KONGO": "காங்கோ",
|
||||||
|
"KOREAN": "கொரியன்",
|
||||||
|
"KUANYAMA": "Kuanyama",
|
||||||
|
"KURDISH": "குர்திச்",
|
||||||
|
"LAO": "லாவோ",
|
||||||
|
"LATIN": "லத்தீன்",
|
||||||
|
"LATVIAN": "லாட்வியன்",
|
||||||
|
"LIMBURGAN": "Limburgan",
|
||||||
|
"LINGALA": "லின்காலா",
|
||||||
|
"LITHUANIAN": "லிதுவேனியன்",
|
||||||
|
"LUBA_KATANGA": "லூபா-கடான்கா",
|
||||||
|
"LUXEMBOURGISH": "Luxembourgish",
|
||||||
|
"MACEDONIAN": "மேசடோனியன்",
|
||||||
|
"MALAGASY": "மலகாசி",
|
||||||
|
"MALAY": "மலாய்",
|
||||||
|
"MALAYALAM": "மலையாளம்",
|
||||||
|
"MALTESE": "மல்டீச்",
|
||||||
|
"MANX": "மான்ச்",
|
||||||
|
"MAORI": "மௌரி",
|
||||||
|
"MARATHI": "மராத்தி",
|
||||||
|
"MARSHALLESE": "மார்சலீசீ",
|
||||||
|
"MONGOLIAN": "மங்கோலியன்",
|
||||||
|
"NAURU": "நவூரு",
|
||||||
|
"NAVAJO": "Navajo",
|
||||||
|
"NDONGA": "நடோன்கா",
|
||||||
|
"NEPALI": "நேபாலி",
|
||||||
|
"NORTHERN_SAMI": "கிழக்கு சாமி",
|
||||||
|
"NORTH_NDEBELE": "North Ndebele",
|
||||||
|
"NORWEGIAN": "நார்வேசியன்",
|
||||||
|
"NORWEGIAN_NYNORSK": "Nynorsk",
|
||||||
|
"OCCITAN": "Occitan",
|
||||||
|
"OJIBWA": "ஒசிப்வா",
|
||||||
|
"ORIYA": "ஒரியா",
|
||||||
|
"OROMO": "ஒரோமோ",
|
||||||
|
"OSSETIAN": "Ossetian",
|
||||||
|
"PALI": "பாலி",
|
||||||
|
"PANJABI": "Panjabi",
|
||||||
|
"PERSIAN": "பெர்சியன்",
|
||||||
|
"POLISH": "போலிச்",
|
||||||
|
"PORTUGUESE": "போர்த்துக்கீசிய",
|
||||||
|
"PUSHTO": "Pushto",
|
||||||
|
"QUECHUA": "க்யுசோ",
|
||||||
|
"ROMANIAN": "Romanian",
|
||||||
|
"ROMANSH": "ரோமான்ச்ச்",
|
||||||
|
"RUNDI": "ருண்டி",
|
||||||
|
"RUSSIAN": "ரச்யன்",
|
||||||
|
"SAMOAN": "சாமோயன்",
|
||||||
|
"SANGO": "சான்ங்கோ",
|
||||||
|
"SANSKRIT": "சான்ச்கிரிட்",
|
||||||
|
"SARDINIAN": "சார்டினியன்",
|
||||||
|
"SCOTTISH_GAELIC": "Scottish Gaelic",
|
||||||
|
"SERBIAN": "செர்பியன்",
|
||||||
|
"SHONA": "சோனா",
|
||||||
|
"SICHUAN_YI": "Sichuan Yi",
|
||||||
|
"SINDHI": "சிந்தி",
|
||||||
|
"SINHALA": "Sinhala",
|
||||||
|
"SLOVAK": "சுலோவாக்",
|
||||||
|
"SLOVENIAN": "ச்லோவெனியன்",
|
||||||
|
"SOMALI": "சோமாலி",
|
||||||
|
"SOUTHERN_SOTHO": "Southern Sotho",
|
||||||
|
"SOUTH_NDEBELE": "South Ndebele",
|
||||||
|
"SPANISH": "Spanish",
|
||||||
|
"SUNDANESE": "சூடானீச்",
|
||||||
|
"SWAHILI": "ச்வாஇலி",
|
||||||
|
"SWATI": "ச்வாதி",
|
||||||
|
"SWEDISH": "சுவீடிச்",
|
||||||
|
"TAGALOG": "டங்லாக்",
|
||||||
|
"TAHITIAN": "தஇதியன்",
|
||||||
|
"TAJIK": "தாசிக்",
|
||||||
|
"TAMIL": "தமிழ்",
|
||||||
|
"TATAR": "டாட்டர்",
|
||||||
|
"TELUGU": "தெலுங்கு",
|
||||||
|
"THAI": "தாய்",
|
||||||
|
"TIBETAN": "திபெத்திய",
|
||||||
|
"TIGRINYA": "தைக்ரின்யா",
|
||||||
|
"TONGA": "Tonga",
|
||||||
|
"TSONGA": "ட்சாங்கோ",
|
||||||
|
"TSWANA": "ட்ச்வனா",
|
||||||
|
"TURKISH": "துருக்கி",
|
||||||
|
"TURKMEN": "டர்க்மென்",
|
||||||
|
"TWI": "டிவி",
|
||||||
|
"UIGHUR": "Uighur",
|
||||||
|
"UKRAINIAN": "உக்ரெனியன்",
|
||||||
|
"UNDEFINED": "undefined",
|
||||||
|
"URDU": "உருது",
|
||||||
|
"UZBEK": "உச்பெக்",
|
||||||
|
"VENDA": "வேண்டா",
|
||||||
|
"VIETNAMESE": "வியட்னாம்",
|
||||||
|
"VOLAPUK": "வோலாபுக்",
|
||||||
|
"WALLOON": "வாலூன்",
|
||||||
|
"WELSH": "வெல்ச்",
|
||||||
|
"WESTERN_FRISIAN": "மேற்கு ஃபிரிசியன்",
|
||||||
|
"WOLOF": "ஓலோஃப்",
|
||||||
|
"XHOSA": "சோசா",
|
||||||
|
"YIDDISH": "இட்டிச்",
|
||||||
|
"YORUBA": "யோருபா",
|
||||||
|
"ZHUANG": "Zhuang",
|
||||||
|
"ZULU": "சுலு"
|
||||||
|
},
|
||||||
|
"phrases": {
|
||||||
|
"5.0(side)": "5.0(side)",
|
||||||
|
"5.1(side)": "5.1(side)",
|
||||||
|
"6.1": "6.1",
|
||||||
|
"6ch": "6ch",
|
||||||
|
"7.1": "7.1",
|
||||||
|
"<New show>": "<புதிய தொடர்>",
|
||||||
|
"Add": "சேர்",
|
||||||
|
"Add Pattern": "வடிவத்தை சேர்",
|
||||||
|
"Apply": "பயன்படுத்து",
|
||||||
|
"Apply failed: {error}": "பயன்படுத்தல் தோல்வியடைந்தது: {error}",
|
||||||
|
"Are you sure to delete the following filename pattern?": "பின்வரும் கோப்பு பெயர் வடிவத்தை நீக்க விரும்புகிறீர்களா?",
|
||||||
|
"Are you sure to delete the following shifted season?": "பின்வரும் மாற்றிய சீசனை நீக்க விரும்புகிறீர்களா?",
|
||||||
|
"Are you sure to delete the following show?": "பின்வரும் தொடரை நீக்க விரும்புகிறீர்களா?",
|
||||||
|
"Are you sure to delete the following {track_type} track?": "பின்வரும் {track_type} ஸ்ட்ரீமை நீக்க விரும்புகிறீர்களா?",
|
||||||
|
"Are you sure to delete this tag?": "இந்த குறிச்சொல்லை நீக்க விரும்புகிறீர்களா?",
|
||||||
|
"Audio Layout": "ஒலி அமைப்பு",
|
||||||
|
"Back": "பின்",
|
||||||
|
"Cancel": "ரத்து",
|
||||||
|
"Cannot add another stream with disposition flag 'default' or 'forced' set": "'default' அல்லது 'forced' disposition கொடி அமைந்த மற்றொரு ஸ்ட்ரீமை சேர்க்க முடியாது",
|
||||||
|
"Changes applied and file reloaded.": "மாற்றங்கள் பயன்படுத்தப்பட்டு கோப்பு மீளேற்றப்பட்டது.",
|
||||||
|
"Cleanup": "சுத்திகரிப்பு",
|
||||||
|
"Cleanup disabled.": "சுத்திகரிப்பு முடக்கப்பட்டது.",
|
||||||
|
"Cleanup enabled.": "சுத்திகரிப்பு இயக்கப்பட்டது.",
|
||||||
|
"Codec": "கோடெக்",
|
||||||
|
"Continuing edit session.": "திருத்த அமர்வு தொடர்கிறது.",
|
||||||
|
"Default": "இயல்புநிலை",
|
||||||
|
"Delete": "நீக்கு",
|
||||||
|
"Delete Show": "தொடரை நீக்கு",
|
||||||
|
"Deleted media tag {tag!r}.": "மீடியா குறிச்சொல் {tag!r} நீக்கப்பட்டது.",
|
||||||
|
"Differences": "வேறுபாடுகள்",
|
||||||
|
"Differences (file->db/output)": "வேறுபாடுகள் (கோப்பு->DB/வெளியீடு)",
|
||||||
|
"Discard": "கைவிடு",
|
||||||
|
"Discard pending metadata changes and quit?": "நிலுவையில் உள்ள மெட்டாடேட்டா மாற்றங்களை கைவிட்டு வெளியேறவா?",
|
||||||
|
"Discard pending metadata changes and reload the file state?": "நிலுவையில் உள்ள மெட்டாடேட்டா மாற்றங்களை கைவிட்டு கோப்பு நிலையை மீளேற்றவா?",
|
||||||
|
"Down": "கீழ்",
|
||||||
|
"Dry-run: would rewrite via temporary file {target_path}": "Dry-run: தற்காலிக கோப்பு {target_path} வழியாக மறுஎழுதப்படும்",
|
||||||
|
"Edit": "திருத்து",
|
||||||
|
"Edit Pattern": "வடிவத்தை திருத்து",
|
||||||
|
"Edit Show": "தொடரை திருத்து",
|
||||||
|
"Edit filename pattern": "கோப்பு பெயர் வடிவத்தை திருத்து",
|
||||||
|
"Edit shifted season": "மாற்றிய சீசனை திருத்து",
|
||||||
|
"Edit stream": "ஸ்ட்ரீமை திருத்து",
|
||||||
|
"Episode Offset": "அத்தியாய இடச்சரிவு",
|
||||||
|
"Episode offset": "அத்தியாய இடச்சரிவு",
|
||||||
|
"File": "கோப்பு",
|
||||||
|
"File patterns": "கோப்பு வடிவங்கள்",
|
||||||
|
"First Episode": "முதல் அத்தியாயம்",
|
||||||
|
"First episode": "முதல் அத்தியாயம்",
|
||||||
|
"Forced": "கட்டாயம்",
|
||||||
|
"Help": "உதவி",
|
||||||
|
"Help Screen": "உதவி திரை",
|
||||||
|
"ID": "அடையாளம்",
|
||||||
|
"Identify": "அடையாளம் காட்டு",
|
||||||
|
"Index": "சுட்டி",
|
||||||
|
"Index / Subindex": "சுட்டி / துணைச்சுட்டி",
|
||||||
|
"Index Episode Digits": "அத்தியாய சுட்டி இலக்கங்கள்",
|
||||||
|
"Index Season Digits": "சீசன் சுட்டி இலக்கங்கள்",
|
||||||
|
"Indicator Edisode Digits": "அத்தியாய குறியீட்டு இலக்கங்கள்",
|
||||||
|
"Indicator Season Digits": "சீசன் குறியீட்டு இலக்கங்கள்",
|
||||||
|
"Keep Editing": "திருத்தலை தொடரு",
|
||||||
|
"Keeping pending changes.": "நிலுவையில் உள்ள மாற்றங்கள் வைக்கப்படுகின்றன.",
|
||||||
|
"Key": "சாவி",
|
||||||
|
"Language": "மொழி",
|
||||||
|
"Last Episode": "கடைசி அத்தியாயம்",
|
||||||
|
"Last episode": "கடைசி அத்தியாயம்",
|
||||||
|
"Layout": "அமைப்பு",
|
||||||
|
"Media Tags": "மீடியா குறிச்சொற்கள்",
|
||||||
|
"More than one default audio stream detected and no prompt set": "ஒருக்கும் மேற்பட்ட இயல்புநிலை ஒலி ஸ்ட்ரீம்கள் கண்டறியப்பட்டன, மேலும் எந்த prompt-வும் அமைக்கப்படவில்லை",
|
||||||
|
"More than one default audio stream detected! Please select stream": "ஒருக்கும் மேற்பட்ட இயல்புநிலை ஒலி ஸ்ட்ரீம்கள் கண்டறியப்பட்டன! ஸ்ட்ரீமைத் தேர்ந்தெடுக்கவும்",
|
||||||
|
"More than one default subtitle stream detected and no prompt set": "ஒருக்கும் மேற்பட்ட இயல்புநிலை வசன ஸ்ட்ரீம்கள் கண்டறியப்பட்டன, மேலும் எந்த prompt-வும் அமைக்கப்படவில்லை",
|
||||||
|
"More than one default subtitle stream detected! Please select stream": "ஒருக்கும் மேற்பட்ட இயல்புநிலை வசன ஸ்ட்ரீம்கள் கண்டறியப்பட்டன! ஸ்ட்ரீமைத் தேர்ந்தெடுக்கவும்",
|
||||||
|
"More than one default video stream detected and no prompt set": "ஒருக்கும் மேற்பட்ட இயல்புநிலை வீடியோ ஸ்ட்ரீம்கள் கண்டறியப்பட்டன, மேலும் எந்த prompt-வும் அமைக்கப்படவில்லை",
|
||||||
|
"More than one default video stream detected! Please select stream": "ஒருக்கும் மேற்பட்ட இயல்புநிலை வீடியோ ஸ்ட்ரீம்கள் கண்டறியப்பட்டன! ஸ்ட்ரீமைத் தேர்ந்தெடுக்கவும்",
|
||||||
|
"More than one forced audio stream detected and no prompt set": "ஒருக்கும் மேற்பட்ட கட்டாய ஒலி ஸ்ட்ரீம்கள் கண்டறியப்பட்டன, மேலும் எந்த prompt-வும் அமைக்கப்படவில்லை",
|
||||||
|
"More than one forced audio stream detected! Please select stream": "ஒருக்கும் மேற்பட்ட கட்டாய ஒலி ஸ்ட்ரீம்கள் கண்டறியப்பட்டன! ஸ்ட்ரீமைத் தேர்ந்தெடுக்கவும்",
|
||||||
|
"More than one forced subtitle stream detected and no prompt set": "ஒருக்கும் மேற்பட்ட கட்டாய வசன ஸ்ட்ரீம்கள் கண்டறியப்பட்டன, மேலும் எந்த prompt-வும் அமைக்கப்படவில்லை",
|
||||||
|
"More than one forced subtitle stream detected! Please select stream": "ஒருக்கும் மேற்பட்ட கட்டாய வசன ஸ்ட்ரீம்கள் கண்டறியப்பட்டன! ஸ்ட்ரீமைத் தேர்ந்தெடுக்கவும்",
|
||||||
|
"More than one forced video stream detected and no prompt set": "ஒருக்கும் மேற்பட்ட கட்டாய வீடியோ ஸ்ட்ரீம்கள் கண்டறியப்பட்டன, மேலும் எந்த prompt-வும் அமைக்கப்படவில்லை",
|
||||||
|
"More than one forced video stream detected! Please select stream": "ஒருக்கும் மேற்பட்ட கட்டாய வீடியோ ஸ்ட்ரீம்கள் கண்டறியப்பட்டன! ஸ்ட்ரீமைத் தேர்ந்தெடுக்கவும்",
|
||||||
|
"Name": "பெயர்",
|
||||||
|
"New Pattern": "புதிய வடிவம்",
|
||||||
|
"New Show": "புதிய தொடர்",
|
||||||
|
"New filename pattern": "புதிய கோப்பு பெயர் வடிவம்",
|
||||||
|
"New shifted season": "புதிய மாற்றிய சீசன்",
|
||||||
|
"New stream": "புதிய ஸ்ட்ரீம்",
|
||||||
|
"No": "இல்லை",
|
||||||
|
"No changes to apply.": "பயன்படுத்த மாற்றங்கள் இல்லை.",
|
||||||
|
"No changes to revert.": "மீட்டெடுக்க மாற்றங்கள் இல்லை.",
|
||||||
|
"Normalization disabled.": "சீரமைப்பு முடக்கப்பட்டது.",
|
||||||
|
"Normalization enabled.": "சீரமைப்பு இயக்கப்பட்டது.",
|
||||||
|
"Normalize": "சீரமை",
|
||||||
|
"Notes": "குறிப்புகள்",
|
||||||
|
"Pattern": "வடிவம்",
|
||||||
|
"Planned Changes (file->edited output)": "திட்டமிட்ட மாற்றங்கள் (கோப்பு->திருத்திய வெளியீடு)",
|
||||||
|
"Quality": "தரம்",
|
||||||
|
"Quit": "வெளியேறு",
|
||||||
|
"Remove Pattern": "வடிவத்தை நீக்கு",
|
||||||
|
"Revert": "மீட்டு",
|
||||||
|
"Reverted pending changes.": "நிலுவையில் உள்ள மாற்றங்கள் மீட்டெடுக்கப்பட்டன.",
|
||||||
|
"Save": "சேமி",
|
||||||
|
"Season Offset": "சீசன் இடச்சரிவு",
|
||||||
|
"Select a stream first.": "முதலில் ஒரு ஸ்ட்ரீமைத் தேர்ந்தெடுக்கவும்.",
|
||||||
|
"Set Default": "இயல்புநிலையாக அமை",
|
||||||
|
"Set Forced": "கட்டாயமாக அமை",
|
||||||
|
"Settings Screen": "அமைப்புகள் திரை",
|
||||||
|
"Numbering Mapping": "மாற்றிய சீசன்கள்",
|
||||||
|
"Show": "தொடர்",
|
||||||
|
"Shows": "தொடர்கள்",
|
||||||
|
"Source Season": "மூல சீசன்",
|
||||||
|
"SrcIndex": "மூலச் சுட்டி",
|
||||||
|
"Status": "நிலை",
|
||||||
|
"Stay": "இரு",
|
||||||
|
"Stream dispositions": "ஸ்ட்ரீம் disposition-கள்",
|
||||||
|
"Stream tags": "ஸ்ட்ரீம் குறிச்சொற்கள்",
|
||||||
|
"Streams": "ஸ்ட்ரீம்கள்",
|
||||||
|
"SubIndex": "துணைச்சுட்டி",
|
||||||
|
"Substitute": "மாற்று",
|
||||||
|
"Substitute pattern": "வடிவத்தை மாற்று",
|
||||||
|
"Title": "தலைப்பு",
|
||||||
|
"Type": "வகை",
|
||||||
|
"Unable to update selected stream.": "தேர்ந்தெடுக்கப்பட்ட ஸ்ட்ரீமைப் புதுப்பிக்க முடியவில்லை.",
|
||||||
|
"Up": "மேல்",
|
||||||
|
"Update Pattern": "வடிவத்தை புதுப்பி",
|
||||||
|
"Updated media tag {tag!r}.": "மீடியா குறிச்சொல் {tag!r} புதுப்பிக்கப்பட்டது.",
|
||||||
|
"Updated stream #{index} ({track_type}).": "ஸ்ட்ரீம் #{index} ({track_type}) புதுப்பிக்கப்பட்டது.",
|
||||||
|
"Value": "மதிப்பு",
|
||||||
|
"Year": "ஆண்டு",
|
||||||
|
"Yes": "ஆம்",
|
||||||
|
"add media tag: key='{key}' value='{value}'": "மீடியா குறிச்சொல் சேர்: key='{key}' value='{value}'",
|
||||||
|
"add {track_type} track: index={index} lang={language}": "{track_type} ஸ்ட்ரீம் சேர்: index={index} lang={language}",
|
||||||
|
"attached_pic": "attached_pic",
|
||||||
|
"attachment": "இணைப்பு",
|
||||||
|
"audio": "ஒலி",
|
||||||
|
"captions": "உரைப்பதிவுகள்",
|
||||||
|
"change media tag: key='{key}' value='{value}'": "மீடியா குறிச்சொல் மாற்று: key='{key}' value='{value}'",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add disposition={disposition}": "ஸ்ட்ரீம் #{index} ({track_type}:{sub_index}) disposition சேர்={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add key={key} value={value}": "ஸ்ட்ரீம் #{index} ({track_type}:{sub_index}) key சேர்={key} value={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) change key={key} value={value}": "ஸ்ட்ரீம் #{index} ({track_type}:{sub_index}) key மாற்று={key} value={value}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove disposition={disposition}": "ஸ்ட்ரீம் #{index} ({track_type}:{sub_index}) disposition நீக்கு={disposition}",
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove key={key} value={value}": "ஸ்ட்ரீம் #{index} ({track_type}:{sub_index}) key நீக்கு={key} value={value}",
|
||||||
|
"clean_effects": "ஒலி விளைவுகள் மட்டும்",
|
||||||
|
"comment": "கருத்துரை",
|
||||||
|
"default": "இயல்புநிலை",
|
||||||
|
"dependent": "சார்ந்த",
|
||||||
|
"descriptions": "விளக்கங்கள்",
|
||||||
|
"dub": "டப்",
|
||||||
|
"for pattern": "வடிவத்திற்கு",
|
||||||
|
"forced": "கட்டாயம்",
|
||||||
|
"from": "இருந்து",
|
||||||
|
"from pattern": "வடிவத்திலிருந்து",
|
||||||
|
"from show": "தொடரிலிருந்து",
|
||||||
|
"hearing_impaired": "கேள்வித்திறன் குறைபாடு",
|
||||||
|
"karaoke": "கரோக்கே",
|
||||||
|
"lyrics": "பாடல்வரிகள்",
|
||||||
|
"metadata": "மெட்டாடேட்டா",
|
||||||
|
"non_diegetic": "அல்லாத-டைஜெடிக்",
|
||||||
|
"original": "மூலம்",
|
||||||
|
"pattern #{id}": "வடிவு #{id}",
|
||||||
|
"remove media tag: key='{key}' value='{value}'": "மீடியா குறிச்சொல் நீக்கு: key='{key}' value='{value}'",
|
||||||
|
"remove stream #{index}": "ஸ்ட்ரீம் #{index} நீக்கு",
|
||||||
|
"show #{id}": "தொடர் #{id}",
|
||||||
|
"stereo": "ஸ்டீரியோ",
|
||||||
|
"still_image": "நிலைப்படம்",
|
||||||
|
"sub index": "துணைச்சுட்டி",
|
||||||
|
"subtitle": "வசனம்",
|
||||||
|
"timed_thumbnails": "நேர நிர்ணய சிறுபடங்கள்",
|
||||||
|
"undefined": "வரையறுக்கப்படாத",
|
||||||
|
"unknown": "தெரியாத",
|
||||||
|
"video": "வீடியோ",
|
||||||
|
"visual_impaired": "பார்வைத்திறன் குறைபாடு"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,513 +0,0 @@
|
|||||||
#! /usr/bin/python3
|
|
||||||
|
|
||||||
import os, sys, subprocess, json, click, time
|
|
||||||
|
|
||||||
from textual.app import App, ComposeResult
|
|
||||||
from textual.screen import Screen
|
|
||||||
from textual.widgets import Header, Footer, Placeholder
|
|
||||||
|
|
||||||
|
|
||||||
VERSION='0.1.0'
|
|
||||||
|
|
||||||
DEFAULT_VIDEO_ENCODER = 'vp9'
|
|
||||||
|
|
||||||
DEFAULT_QUALITY = 23
|
|
||||||
|
|
||||||
DEFAULT_AV1_PRESET = 5
|
|
||||||
|
|
||||||
DEFAULT_LABEL='output'
|
|
||||||
DEFAULT_FILE_SUFFIX = 'webm'
|
|
||||||
|
|
||||||
DEFAULT_STEREO_BANDWIDTH = "128"
|
|
||||||
DEFAULT_AC3_BANDWIDTH = "256"
|
|
||||||
DEFAULT_DTS_BANDWIDTH = "320"
|
|
||||||
|
|
||||||
DEFAULT_CROP_START = 60
|
|
||||||
DEFAULT_CROP_LENGTH = 180
|
|
||||||
|
|
||||||
TEMP_FILE_NAME = "ffmpeg2pass-0.log"
|
|
||||||
|
|
||||||
|
|
||||||
MKVMERGE_METADATA_KEYS = ['BPS',
|
|
||||||
'NUMBER_OF_FRAMES',
|
|
||||||
'NUMBER_OF_BYTES',
|
|
||||||
'_STATISTICS_WRITING_APP',
|
|
||||||
'_STATISTICS_WRITING_DATE_UTC',
|
|
||||||
'_STATISTICS_TAGS']
|
|
||||||
|
|
||||||
FILE_EXTENSION = ['mkv', 'mp4', 'avi', 'flv', 'webm']
|
|
||||||
|
|
||||||
|
|
||||||
COMMAND_TOKENS = ['ffmpeg', '-y', '-i']
|
|
||||||
NULL_TOKENS = ['-f', 'null', '/dev/null']
|
|
||||||
|
|
||||||
STREAM_TYPE_VIDEO = 'video'
|
|
||||||
STREAM_TYPE_AUDIO = 'audio'
|
|
||||||
STREAM_TYPE_SUBTITLE = 'subtitle'
|
|
||||||
|
|
||||||
STREAM_LAYOUT_6_1 = '6.1'
|
|
||||||
STREAM_LAYOUT_5_1 = '5.1(side)'
|
|
||||||
STREAM_LAYOUT_STEREO = 'stereo'
|
|
||||||
STREAM_LAYOUT_6CH = '6ch'
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class DashboardScreen(Screen):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
context = self.app.getContext()
|
|
||||||
context['dashboard'] = 'dashboard'
|
|
||||||
|
|
||||||
def compose(self) -> ComposeResult:
|
|
||||||
yield Header(show_clock=True)
|
|
||||||
yield Placeholder("Dashboard Screen")
|
|
||||||
yield Footer()
|
|
||||||
|
|
||||||
|
|
||||||
class SettingsScreen(Screen):
|
|
||||||
def __init__(self):
|
|
||||||
super().__init__()
|
|
||||||
context = self.app.getContext()
|
|
||||||
def compose(self) -> ComposeResult:
|
|
||||||
yield Placeholder("Settings Screen")
|
|
||||||
yield Footer()
|
|
||||||
|
|
||||||
|
|
||||||
class HelpScreen(Screen):
|
|
||||||
def __init__(self):
|
|
||||||
super().__init__()
|
|
||||||
context = self.app.getContext()
|
|
||||||
def compose(self) -> ComposeResult:
|
|
||||||
yield Placeholder("Help Screen")
|
|
||||||
yield Footer()
|
|
||||||
|
|
||||||
|
|
||||||
class ModesApp(App):
|
|
||||||
|
|
||||||
BINDINGS = [
|
|
||||||
("d", "switch_mode('dashboard')", "Dashboard"),
|
|
||||||
("s", "switch_mode('settings')", "Settings"),
|
|
||||||
("h", "switch_mode('help')", "Help"),
|
|
||||||
]
|
|
||||||
|
|
||||||
MODES = {
|
|
||||||
"dashboard": DashboardScreen,
|
|
||||||
"settings": SettingsScreen,
|
|
||||||
"help": HelpScreen,
|
|
||||||
}
|
|
||||||
|
|
||||||
def __init__(self, context = {}):
|
|
||||||
super().__init__()
|
|
||||||
self.context = context
|
|
||||||
|
|
||||||
def on_mount(self) -> None:
|
|
||||||
self.switch_mode("dashboard")
|
|
||||||
|
|
||||||
def getContext(self):
|
|
||||||
return self.context
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def executeProcess(commandSequence):
|
|
||||||
|
|
||||||
process = subprocess.Popen(commandSequence, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
||||||
|
|
||||||
output, error = process.communicate()
|
|
||||||
|
|
||||||
return output
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#[{'index': 0, 'codec_name': 'vp9', 'codec_long_name': 'Google VP9', 'profile': 'Profile 0', 'codec_type': 'video', 'codec_tag_string': '[0][0][0][0]', 'codec_tag': '0x0000', 'width': 1920, 'height': 1080, 'coded_width': 1920, 'coded_height': 1080, 'closed_captions': 0, 'film_grain': 0, 'has_b_frames': 0, 'sample_aspect_ratio': '1:1', 'display_aspect_ratio': '16:9', 'pix_fmt': 'yuv420p', 'level': -99, 'color_range': 'tv', 'chroma_location': 'left', 'field_order': 'progressive', 'refs': 1, 'r_frame_rate': '24000/1001', 'avg_frame_rate': '24000/1001', 'time_base': '1/1000', 'start_pts': 0, 'start_time': '0.000000', 'disposition': {'default': 1, 'dub': 0, 'original': 0, 'comment': 0, 'lyrics': 0, 'karaoke': 0, 'forced': 0, 'hearing_impaired': 0, 'visual_impaired': 0, 'clean_effects': 0, 'attached_pic': 0, 'timed_thumbnails': 0, 'non_diegetic': 0, 'captions': 0, 'descriptions': 0, 'metadata': 0, 'dependent': 0, 'still_image': 0}, 'tags': {'BPS': '7974017', 'NUMBER_OF_FRAMES': '34382', 'NUMBER_OF_BYTES': '1429358655', '_STATISTICS_WRITING_APP': "mkvmerge v63.0.0 ('Everything') 64-bit", '_STATISTICS_WRITING_DATE_UTC': '2023-10-07 13:59:46', '_STATISTICS_TAGS': 'BPS DURATION NUMBER_OF_FRAMES NUMBER_OF_BYTES', 'ENCODER': 'Lavc61.3.100 libvpx-vp9', 'DURATION': '00:23:54.016000000'}}]
|
|
||||||
#[{'index': 1, 'codec_name': 'opus', 'codec_long_name': 'Opus (Opus Interactive Audio Codec)', 'codec_type': 'audio', 'codec_tag_string': '[0][0][0][0]', 'codec_tag': '0x0000', 'sample_fmt': 'fltp', 'sample_rate': '48000', 'channels': 2, 'channel_layout': 'stereo', 'bits_per_sample': 0, 'initial_padding': 312, 'r_frame_rate': '0/0', 'avg_frame_rate': '0/0', 'time_base': '1/1000', 'start_pts': -7, 'start_time': '-0.007000', 'extradata_size': 19, 'disposition': {'default': 1, 'dub': 0, 'original': 0, 'comment': 0, 'lyrics': 0, 'karaoke': 0, 'forced': 0, 'hearing_impaired': 0, 'visual_impaired': 0, 'clean_effects': 0, 'attached_pic': 0, 'timed_thumbnails': 0, 'non_diegetic': 0, 'captions': 0, 'descriptions': 0, 'metadata': 0, 'dependent': 0, 'still_image': 0}, 'tags': {'language': 'jpn', 'title': 'Japanisch', 'BPS': '128000', 'NUMBER_OF_FRAMES': '61763', 'NUMBER_OF_BYTES': '22946145', '_STATISTICS_WRITING_APP': "mkvmerge v63.0.0 ('Everything') 64-bit", '_STATISTICS_WRITING_DATE_UTC': '2023-10-07 13:59:46', '_STATISTICS_TAGS': 'BPS DURATION NUMBER_OF_FRAMES NUMBER_OF_BYTES', 'ENCODER': 'Lavc61.3.100 libopus', 'DURATION': '00:23:54.141000000'}}]
|
|
||||||
|
|
||||||
#[{'index': 2, 'codec_name': 'webvtt', 'codec_long_name': 'WebVTT subtitle', 'codec_type': 'subtitle', 'codec_tag_string': '[0][0][0][0]', 'codec_tag': '0x0000', 'r_frame_rate': '0/0', 'avg_frame_rate': '0/0', 'time_base': '1/1000', 'start_pts': -7, 'start_time': '-0.007000', 'duration_ts': 1434141, 'duration': '1434.141000', 'disposition': {'default': 1, 'dub': 0, 'original': 0, 'comment': 0, 'lyrics': 0, 'karaoke': 0, 'forced': 0, 'hearing_impaired': 0, 'visual_impaired': 0, 'clean_effects': 0, 'attached_pic': 0, 'timed_thumbnails': 0, 'non_diegetic': 0, 'captions': 0, 'descriptions': 0, 'metadata': 0, 'dependent': 0, 'still_image': 0}, 'tags': {'language': 'ger', 'title': 'Deutsch [Full]', 'BPS': '118', 'NUMBER_OF_FRAMES': '300', 'NUMBER_OF_BYTES': '21128', '_STATISTICS_WRITING_APP': "mkvmerge v63.0.0 ('Everything') 64-bit", '_STATISTICS_WRITING_DATE_UTC': '2023-10-07 13:59:46', '_STATISTICS_TAGS': 'BPS DURATION NUMBER_OF_FRAMES NUMBER_OF_BYTES', 'ENCODER': 'Lavc61.3.100 webvtt', 'DURATION': '00:23:54.010000000'}}, {'index': 3, 'codec_name': 'webvtt', 'codec_long_name': 'WebVTT subtitle', 'codec_type': 'subtitle', 'codec_tag_string': '[0][0][0][0]', 'codec_tag': '0x0000', 'r_frame_rate': '0/0', 'avg_frame_rate': '0/0', 'time_base': '1/1000', 'start_pts': -7, 'start_time': '-0.007000', 'duration_ts': 1434141, 'duration': '1434.141000', 'disposition': {'default': 0, 'dub': 0, 'original': 0, 'comment': 0, 'lyrics': 0, 'karaoke': 0, 'forced': 0, 'hearing_impaired': 0, 'visual_impaired': 0, 'clean_effects': 0, 'attached_pic': 0, 'timed_thumbnails': 0, 'non_diegetic': 0, 'captions': 0, 'descriptions': 0, 'metadata': 0, 'dependent': 0, 'still_image': 0}, 'tags': {'language': 'eng', 'title': 'Englisch [Full]', 'BPS': '101', 'NUMBER_OF_FRAMES': '276', 'NUMBER_OF_BYTES': '16980', '_STATISTICS_WRITING_APP': "mkvmerge v63.0.0 ('Everything') 64-bit", '_STATISTICS_WRITING_DATE_UTC': '2023-10-07 13:59:46', '_STATISTICS_TAGS': 'BPS DURATION NUMBER_OF_FRAMES NUMBER_OF_BYTES', 'ENCODER': 'Lavc61.3.100 webvtt', 'DURATION': '00:23:53.230000000'}}]
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def getStreamDescriptor(filename):
|
|
||||||
|
|
||||||
ffprobeOutput = executeProcess(["ffprobe",
|
|
||||||
"-show_streams",
|
|
||||||
"-of", "json",
|
|
||||||
filename])
|
|
||||||
|
|
||||||
streamData = json.loads(ffprobeOutput)['streams']
|
|
||||||
|
|
||||||
descriptor = []
|
|
||||||
|
|
||||||
i = 0
|
|
||||||
for d in [s for s in streamData if s['codec_type'] == STREAM_TYPE_VIDEO]:
|
|
||||||
descriptor.append({
|
|
||||||
'index': d['index'],
|
|
||||||
'sub_index': i,
|
|
||||||
'type': STREAM_TYPE_VIDEO,
|
|
||||||
'codec': d['codec_name']
|
|
||||||
})
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
i = 0
|
|
||||||
for d in [s for s in streamData if s['codec_type'] == STREAM_TYPE_AUDIO]:
|
|
||||||
|
|
||||||
streamDescriptor = {
|
|
||||||
'index': d['index'],
|
|
||||||
'sub_index': i,
|
|
||||||
'type': STREAM_TYPE_AUDIO,
|
|
||||||
'codec': d['codec_name'],
|
|
||||||
'channels': d['channels']
|
|
||||||
}
|
|
||||||
|
|
||||||
if 'channel_layout' in d.keys():
|
|
||||||
streamDescriptor['layout'] = d['channel_layout']
|
|
||||||
elif d['channels'] == 6:
|
|
||||||
streamDescriptor['layout'] = STREAM_LAYOUT_6CH
|
|
||||||
else:
|
|
||||||
streamDescriptor['layout'] = 'undefined'
|
|
||||||
|
|
||||||
descriptor.append(streamDescriptor)
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
i = 0
|
|
||||||
for d in [s for s in streamData if s['codec_type'] == STREAM_TYPE_SUBTITLE]:
|
|
||||||
descriptor.append({
|
|
||||||
'index': d['index'],
|
|
||||||
'sub_index': i,
|
|
||||||
'type': STREAM_TYPE_SUBTITLE,
|
|
||||||
'codec': d['codec_name']
|
|
||||||
})
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
return descriptor
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def generateAV1Tokens(q, p):
|
|
||||||
|
|
||||||
return ['-c:v:0', 'libsvtav1',
|
|
||||||
'-svtav1-params', f"crf={q}:preset={p}:tune=0:enable-overlays=1:scd=1:scm=0",
|
|
||||||
'-pix_fmt', 'yuv420p10le']
|
|
||||||
|
|
||||||
def generateVP9Pass1Tokens(q):
|
|
||||||
|
|
||||||
return ['-c:v:0', 'libvpx-vp9',
|
|
||||||
'-row-mt', '1',
|
|
||||||
'-crf', str(q),
|
|
||||||
'-pass', '1',
|
|
||||||
'-speed', '4',
|
|
||||||
'-frame-parallel', '0',
|
|
||||||
'-g', '9999',
|
|
||||||
'-aq-mode', '0']
|
|
||||||
|
|
||||||
def generateVP9Pass2Tokens(q):
|
|
||||||
|
|
||||||
return ['-c:v:0', 'libvpx-vp9',
|
|
||||||
'-row-mt', '1',
|
|
||||||
'-crf', str(q),
|
|
||||||
'-pass', '2',
|
|
||||||
'-frame-parallel', '0',
|
|
||||||
'-g', '9999',
|
|
||||||
'-aq-mode', '0',
|
|
||||||
'-auto-alt-ref', '1',
|
|
||||||
'-lag-in-frames', '25']
|
|
||||||
|
|
||||||
|
|
||||||
def generateCropTokens(start, length):
|
|
||||||
|
|
||||||
return ['-ss', str(start), '-t', str(length)]
|
|
||||||
|
|
||||||
|
|
||||||
def generateDenoiseTokens(spatial=5, patch=7, research=7, hw=False):
|
|
||||||
filterName = 'nlmeans_opencl' if hw else 'nlmeans'
|
|
||||||
return ['-vf', f"{filterName}=s={spatial}:p={patch}:r={research}"]
|
|
||||||
|
|
||||||
|
|
||||||
def generateOutputTokens(f, suffix, q=None):
|
|
||||||
|
|
||||||
if q is None:
|
|
||||||
return ['-f', 'webm', f"{f}.{suffix}"]
|
|
||||||
else:
|
|
||||||
return ['-f', 'webm', f"{f}_q{q}.{suffix}"]
|
|
||||||
|
|
||||||
|
|
||||||
# preset = DEFAULT_AV1_PRESET
|
|
||||||
# presetTokens = [p for p in sys.argv if p.startswith('p=')]
|
|
||||||
# if presetTokens:
|
|
||||||
# preset = int(presetTokens[0].split('=')[1])
|
|
||||||
|
|
||||||
# cropStart = ''
|
|
||||||
# cropLength = ''
|
|
||||||
# cropTokens = [c for c in sys.argv if c.startswith('crop')]
|
|
||||||
# if cropTokens:
|
|
||||||
# if '=' in cropTokens[0]:
|
|
||||||
# cropString = cropTokens[0].split('=')[1]
|
|
||||||
# cropStart, cropLength = cropString.split(',')
|
|
||||||
# else:
|
|
||||||
# cropStart = 60
|
|
||||||
# cropLength = 180
|
|
||||||
#
|
|
||||||
# denoiseTokens = [d for d in sys.argv if d.startswith('denoise')]
|
|
||||||
#
|
|
||||||
|
|
||||||
# for aStream in audioStreams:
|
|
||||||
# if 'channel_layout' in aStream:
|
|
||||||
# print(f"audio stream: {aStream['channel_layout']}") #channel_layout
|
|
||||||
# else:
|
|
||||||
# print(f"unknown audio stream with {aStream['channels']} channels") #channel_layout
|
|
||||||
|
|
||||||
def generateAudioTokens(context, index, layout):
|
|
||||||
|
|
||||||
if layout == STREAM_LAYOUT_6_1:
|
|
||||||
return [f"-c:a:{index}",
|
|
||||||
'libopus',
|
|
||||||
f"-filter:a:{index}",
|
|
||||||
'channelmap=channel_layout=6.1',
|
|
||||||
f"-b:a:{index}",
|
|
||||||
context['bitrates']['dts']]
|
|
||||||
|
|
||||||
elif layout == STREAM_LAYOUT_5_1:
|
|
||||||
return [f"-c:a:{index}",
|
|
||||||
'libopus',
|
|
||||||
f"-filter:a:{index}",
|
|
||||||
"channelmap=FL-FL|FR-FR|FC-FC|LFE-LFE|SL-BL|SR-BR:5.1",
|
|
||||||
f"-b:a:{index}",
|
|
||||||
context['bitrates']['ac3']]
|
|
||||||
|
|
||||||
elif layout == STREAM_LAYOUT_STEREO:
|
|
||||||
return [f"-c:a:{index}",
|
|
||||||
'libopus',
|
|
||||||
f"-b:a:{index}",
|
|
||||||
context['bitrates']['stereo']]
|
|
||||||
|
|
||||||
elif layout == STREAM_LAYOUT_6CH:
|
|
||||||
return [f"-c:a:{index}",
|
|
||||||
'libopus',
|
|
||||||
f"-filter:a:{index}",
|
|
||||||
"channelmap=FL-FL|FR-FR|FC-FC|LFE-LFE|SL-BL|SR-BR:5.1",
|
|
||||||
f"-b:a:{index}",
|
|
||||||
context['bitrates']['ac3']]
|
|
||||||
else:
|
|
||||||
return []
|
|
||||||
|
|
||||||
|
|
||||||
def generateClearTokens(streams):
|
|
||||||
clearTokens = []
|
|
||||||
for s in streams:
|
|
||||||
for k in MKVMERGE_METADATA_KEYS:
|
|
||||||
clearTokens += [f"-metadata:s:{s['type'][0]}:{s['sub_index']}", f"{k}="]
|
|
||||||
return clearTokens
|
|
||||||
|
|
||||||
|
|
||||||
@click.group()
|
|
||||||
@click.pass_context
|
|
||||||
def ffx(ctx):
|
|
||||||
"""FFX"""
|
|
||||||
ctx.obj = {}
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Define a subcommand
|
|
||||||
@ffx.command()
|
|
||||||
def version():
|
|
||||||
click.echo(VERSION)
|
|
||||||
|
|
||||||
|
|
||||||
# Another subcommand
|
|
||||||
@ffx.command()
|
|
||||||
def help():
|
|
||||||
click.echo(f"ffx {VERSION}\n")
|
|
||||||
click.echo(f"Usage: ffx [input file] [output file] [vp9|av1] [q=[nn[,nn,...]]] [p=nn] [a=nnn[k]] [ac3=nnn[k]] [dts=nnn[k]] [crop]")
|
|
||||||
|
|
||||||
|
|
||||||
@click.argument('filename', nargs=1)
|
|
||||||
@ffx.command()
|
|
||||||
def streams(filename):
|
|
||||||
for d in getStreamDescriptor(filename):
|
|
||||||
click.echo(f"{d['codec']}{' (' + str(d['channels']) + ')' if d['type'] == 'audio' else ''}")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ffx.command()
|
|
||||||
@click.pass_context
|
|
||||||
|
|
||||||
@click.argument('paths', nargs=-1)
|
|
||||||
@click.option('-l', '--label', type=str, default=DEFAULT_LABEL, help='Label to be used as filename prefix')
|
|
||||||
|
|
||||||
@click.option('-v', '--video-encoder', type=str, default=DEFAULT_VIDEO_ENCODER, help='Target video encoder (vp9 or av1) default: vp9')
|
|
||||||
|
|
||||||
@click.option('-q', '--quality', type=str, default=DEFAULT_QUALITY, help='Quality settings to be used with VP9 encoder (default: 23)')
|
|
||||||
@click.option('-p', '--preset', type=str, default=DEFAULT_QUALITY, help='Quality preset to be used with AV1 encoder (default: 5)')
|
|
||||||
|
|
||||||
@click.option('-a', '--stereo-bitrate', type=int, default=DEFAULT_STEREO_BANDWIDTH, help='Bitrate in kbit/s to be used to encode stereo audio streams')
|
|
||||||
@click.option('-ac3', '--ac3-bitrate', type=int, default=DEFAULT_AC3_BANDWIDTH, help='Bitrate in kbit/s to be used to encode 5.1 audio streams')
|
|
||||||
@click.option('-dts', '--dts-bitrate', type=int, default=DEFAULT_DTS_BANDWIDTH, help='Bitrate in kbit/s to be used to encode 6.1 audio streams')
|
|
||||||
|
|
||||||
@click.option('-ds', '--default-subtitle', type=int, help='Index of default subtitle stream')
|
|
||||||
|
|
||||||
@click.option('-fa', '--forced-audio', type=int, help='Index of forced audio stream (including default audio stream tag)')
|
|
||||||
@click.option('-da', '--default-audio', type=int, help='Index of default audio stream')
|
|
||||||
|
|
||||||
|
|
||||||
@click.option("--crop", is_flag=False, flag_value="default", default="none")
|
|
||||||
|
|
||||||
@click.option("-c", "--clear-metadata", is_flag=True, default=False)
|
|
||||||
@click.option("-d", "--denoise", is_flag=True, default=False)
|
|
||||||
|
|
||||||
|
|
||||||
def convert(ctx, paths, label, video_encoder, quality, preset, stereo_bitrate, ac3_bitrate, dts_bitrate, crop, clear_metadata, default_subtitle, forced_audio, default_audio, denoise):
|
|
||||||
"""Batch conversion of audiovideo files in format suitable for web playback, e.g. jellyfin
|
|
||||||
|
|
||||||
Files found under PATHS will be converted according to parameters.
|
|
||||||
Filename extensions will be changed appropriately.
|
|
||||||
Suffices will we appended to filename in case of multiple created files
|
|
||||||
or if the filename has not changed."""
|
|
||||||
|
|
||||||
#startTime = time.perf_counter()
|
|
||||||
|
|
||||||
#sourcePath = paths[0]
|
|
||||||
#targetFilename = paths[1]
|
|
||||||
|
|
||||||
#if not os.path.isfile(sourcePath):
|
|
||||||
# raise click.ClickException(f"There is no file with path {sourcePath}")
|
|
||||||
|
|
||||||
#click.echo(f"src: {sourcePath} tgt: {targetFilename}")
|
|
||||||
|
|
||||||
|
|
||||||
#click.echo(f"ve={video_encoder}")
|
|
||||||
|
|
||||||
|
|
||||||
#qualityTokens = quality.split(',')
|
|
||||||
|
|
||||||
#q_list = [q for q in qualityTokens if q.isnumeric()]
|
|
||||||
|
|
||||||
#click.echo(q_list)
|
|
||||||
|
|
||||||
#ctx.obj['bitrates'] = {}
|
|
||||||
#ctx.obj['bitrates']['stereo'] = str(stereo_bitrate) if str(stereo_bitrate).endswith('k') else f"{stereo_bitrate}k"
|
|
||||||
#ctx.obj['bitrates']['ac3'] = str(ac3_bitrate) if str(ac3_bitrate).endswith('k') else f"{ac3_bitrate}k"
|
|
||||||
#ctx.obj['bitrates']['dts'] = str(dts_bitrate) if str(dts_bitrate).endswith('k') else f"{dts_bitrate}k"
|
|
||||||
|
|
||||||
|
|
||||||
#click.echo(f"a={ctx.obj['bitrates']['stereo']}")
|
|
||||||
#click.echo(f"ac3={ctx.obj['bitrates']['ac3']}")
|
|
||||||
#click.echo(f"dts={ctx.obj['bitrates']['dts']}")
|
|
||||||
|
|
||||||
|
|
||||||
#performCrop = (crop != 'none')
|
|
||||||
|
|
||||||
|
|
||||||
#if performCrop:
|
|
||||||
|
|
||||||
#cropTokens = crop.split(',')
|
|
||||||
|
|
||||||
#if cropTokens and len(cropTokens) == 2:
|
|
||||||
|
|
||||||
#cropStart, cropLength = crop.split(',')
|
|
||||||
#else:
|
|
||||||
#cropStart = DEFAULT_CROP_START
|
|
||||||
#cropLength = DEFAULT_CROP_LENGTH
|
|
||||||
|
|
||||||
#click.echo(f"crop start={cropStart} length={cropLength}")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#click.echo(f"\nRunning {len(q_list)} jobs")
|
|
||||||
|
|
||||||
|
|
||||||
#streamDescriptor = getStreamDescriptor(sourcePath)
|
|
||||||
|
|
||||||
#commandTokens = COMMAND_TOKENS + [sourcePath]
|
|
||||||
|
|
||||||
|
|
||||||
#for q in q_list:
|
|
||||||
|
|
||||||
#click.echo(f"\nRunning job q={q}")
|
|
||||||
|
|
||||||
#mappingVideoTokens = ['-map', 'v:0']
|
|
||||||
#mappingTokens = mappingVideoTokens.copy()
|
|
||||||
#audioTokens = []
|
|
||||||
|
|
||||||
#audioIndex = 0
|
|
||||||
#for audioStreamDescriptor in streamDescriptor:
|
|
||||||
|
|
||||||
#if audioStreamDescriptor['type'] == STREAM_TYPE_AUDIO:
|
|
||||||
|
|
||||||
#mappingTokens += ['-map', f"a:{audioIndex}"]
|
|
||||||
#audioTokens += generateAudioTokens(ctx.obj, audioIndex, audioStreamDescriptor['layout'])
|
|
||||||
#audioIndex += 1
|
|
||||||
|
|
||||||
|
|
||||||
#for s in range(len([d for d in streamDescriptor if d['type'] == STREAM_TYPE_SUBTITLE])):
|
|
||||||
#mappingTokens += ['-map', f"s:{s}"]
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#if video_encoder == 'av1':
|
|
||||||
|
|
||||||
#commandSequence = commandTokens + mappingTokens + audioTokens + generateAV1Tokens(q, preset) + audioTokens
|
|
||||||
|
|
||||||
#if clear_metadata:
|
|
||||||
#commandSequence += generateClearTokens(streamDescriptor)
|
|
||||||
|
|
||||||
#if performCrop:
|
|
||||||
#commandSequence += generateCropTokens(cropStart, cropLength)
|
|
||||||
|
|
||||||
#commandSequence += generateOutputTokens(targetFilename, DEFAULT_FILE_SUFFIX, q)
|
|
||||||
|
|
||||||
#click.echo(f"Command: {' '.join(commandSequence)}")
|
|
||||||
|
|
||||||
#executeProcess(commandSequence)
|
|
||||||
|
|
||||||
|
|
||||||
#if video_encoder == 'vp9':
|
|
||||||
|
|
||||||
#commandSequence1 = commandTokens + mappingVideoTokens + generateVP9Pass1Tokens(q)
|
|
||||||
|
|
||||||
#if performCrop:
|
|
||||||
# commandSequence1 += generateCropTokens(cropStart, cropLength)
|
|
||||||
|
|
||||||
#commandSequence1 += NULL_TOKENS
|
|
||||||
|
|
||||||
#click.echo(f"Command 1: {' '.join(commandSequence1)}")
|
|
||||||
|
|
||||||
#if os.path.exists(TEMP_FILE_NAME):
|
|
||||||
# os.remove(TEMP_FILE_NAME)
|
|
||||||
|
|
||||||
#executeProcess(commandSequence1)
|
|
||||||
|
|
||||||
|
|
||||||
#commandSequence2 = commandTokens + mappingTokens
|
|
||||||
|
|
||||||
#if denoise:
|
|
||||||
# commandSequence2 += generateDenoiseTokens()
|
|
||||||
|
|
||||||
#commandSequence2 += generateVP9Pass2Tokens(q) + audioTokens
|
|
||||||
|
|
||||||
#if clear_metadata:
|
|
||||||
# commandSequence2 += generateClearTokens(streamDescriptor)
|
|
||||||
|
|
||||||
#if performCrop:
|
|
||||||
# commandSequence2 += generateCropTokens(cropStart, cropLength)
|
|
||||||
|
|
||||||
#commandSequence2 += generateOutputTokens(targetFilename, DEFAULT_FILE_SUFFIX, q)
|
|
||||||
|
|
||||||
#click.echo(f"Command 2: {' '.join(commandSequence2)}")
|
|
||||||
|
|
||||||
#executeProcess(commandSequence2)
|
|
||||||
|
|
||||||
|
|
||||||
#click.echo('\nDONE\n')
|
|
||||||
|
|
||||||
#endTime = time.perf_counter()
|
|
||||||
#click.echo(f"Time elapsed {endTime - startTime}")
|
|
||||||
|
|
||||||
app = ModesApp(ctx.obj)
|
|
||||||
app.run()
|
|
||||||
|
|
||||||
click.echo(f"app result: {app.getContext()}")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
ffx()
|
|
||||||
444
bin/ffx.py
444
bin/ffx.py
@@ -1,444 +0,0 @@
|
|||||||
#! /usr/bin/python3
|
|
||||||
|
|
||||||
import os, sys, subprocess, json, click, time
|
|
||||||
|
|
||||||
VERSION='0.1.0'
|
|
||||||
|
|
||||||
DEFAULT_VIDEO_ENCODER = 'vp9'
|
|
||||||
|
|
||||||
DEFAULT_QUALITY = 23
|
|
||||||
|
|
||||||
DEFAULT_AV1_PRESET = 5
|
|
||||||
|
|
||||||
DEFAULT_LABEL='output'
|
|
||||||
DEFAULT_FILE_SUFFIX = 'webm'
|
|
||||||
|
|
||||||
DEFAULT_STEREO_BANDWIDTH = "128"
|
|
||||||
DEFAULT_AC3_BANDWIDTH = "256"
|
|
||||||
DEFAULT_DTS_BANDWIDTH = "320"
|
|
||||||
|
|
||||||
DEFAULT_CROP_START = 60
|
|
||||||
DEFAULT_CROP_LENGTH = 180
|
|
||||||
|
|
||||||
TEMP_FILE_NAME = "ffmpeg2pass-0.log"
|
|
||||||
|
|
||||||
|
|
||||||
MKVMERGE_METADATA_KEYS = ['BPS',
|
|
||||||
'NUMBER_OF_FRAMES',
|
|
||||||
'NUMBER_OF_BYTES',
|
|
||||||
'_STATISTICS_WRITING_APP',
|
|
||||||
'_STATISTICS_WRITING_DATE_UTC',
|
|
||||||
'_STATISTICS_TAGS']
|
|
||||||
|
|
||||||
FILE_EXTENSION = ['mkv', 'mp4', 'avi', 'flv', 'webm']
|
|
||||||
|
|
||||||
|
|
||||||
COMMAND_TOKENS = ['ffmpeg', '-y', '-i']
|
|
||||||
NULL_TOKENS = ['-f', 'null', '/dev/null']
|
|
||||||
|
|
||||||
STREAM_TYPE_VIDEO = 'video'
|
|
||||||
STREAM_TYPE_AUDIO = 'audio'
|
|
||||||
STREAM_TYPE_SUBTITLE = 'subtitle'
|
|
||||||
|
|
||||||
STREAM_LAYOUT_6_1 = '6.1'
|
|
||||||
STREAM_LAYOUT_5_1 = '5.1(side)'
|
|
||||||
STREAM_LAYOUT_STEREO = 'stereo'
|
|
||||||
STREAM_LAYOUT_6CH = '6ch'
|
|
||||||
|
|
||||||
|
|
||||||
def executeProcess(commandSequence):
|
|
||||||
|
|
||||||
process = subprocess.Popen(commandSequence, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
||||||
|
|
||||||
output, error = process.communicate()
|
|
||||||
|
|
||||||
return output
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#[{'index': 0, 'codec_name': 'vp9', 'codec_long_name': 'Google VP9', 'profile': 'Profile 0', 'codec_type': 'video', 'codec_tag_string': '[0][0][0][0]', 'codec_tag': '0x0000', 'width': 1920, 'height': 1080, 'coded_width': 1920, 'coded_height': 1080, 'closed_captions': 0, 'film_grain': 0, 'has_b_frames': 0, 'sample_aspect_ratio': '1:1', 'display_aspect_ratio': '16:9', 'pix_fmt': 'yuv420p', 'level': -99, 'color_range': 'tv', 'chroma_location': 'left', 'field_order': 'progressive', 'refs': 1, 'r_frame_rate': '24000/1001', 'avg_frame_rate': '24000/1001', 'time_base': '1/1000', 'start_pts': 0, 'start_time': '0.000000', 'disposition': {'default': 1, 'dub': 0, 'original': 0, 'comment': 0, 'lyrics': 0, 'karaoke': 0, 'forced': 0, 'hearing_impaired': 0, 'visual_impaired': 0, 'clean_effects': 0, 'attached_pic': 0, 'timed_thumbnails': 0, 'non_diegetic': 0, 'captions': 0, 'descriptions': 0, 'metadata': 0, 'dependent': 0, 'still_image': 0}, 'tags': {'BPS': '7974017', 'NUMBER_OF_FRAMES': '34382', 'NUMBER_OF_BYTES': '1429358655', '_STATISTICS_WRITING_APP': "mkvmerge v63.0.0 ('Everything') 64-bit", '_STATISTICS_WRITING_DATE_UTC': '2023-10-07 13:59:46', '_STATISTICS_TAGS': 'BPS DURATION NUMBER_OF_FRAMES NUMBER_OF_BYTES', 'ENCODER': 'Lavc61.3.100 libvpx-vp9', 'DURATION': '00:23:54.016000000'}}]
|
|
||||||
#[{'index': 1, 'codec_name': 'opus', 'codec_long_name': 'Opus (Opus Interactive Audio Codec)', 'codec_type': 'audio', 'codec_tag_string': '[0][0][0][0]', 'codec_tag': '0x0000', 'sample_fmt': 'fltp', 'sample_rate': '48000', 'channels': 2, 'channel_layout': 'stereo', 'bits_per_sample': 0, 'initial_padding': 312, 'r_frame_rate': '0/0', 'avg_frame_rate': '0/0', 'time_base': '1/1000', 'start_pts': -7, 'start_time': '-0.007000', 'extradata_size': 19, 'disposition': {'default': 1, 'dub': 0, 'original': 0, 'comment': 0, 'lyrics': 0, 'karaoke': 0, 'forced': 0, 'hearing_impaired': 0, 'visual_impaired': 0, 'clean_effects': 0, 'attached_pic': 0, 'timed_thumbnails': 0, 'non_diegetic': 0, 'captions': 0, 'descriptions': 0, 'metadata': 0, 'dependent': 0, 'still_image': 0}, 'tags': {'language': 'jpn', 'title': 'Japanisch', 'BPS': '128000', 'NUMBER_OF_FRAMES': '61763', 'NUMBER_OF_BYTES': '22946145', '_STATISTICS_WRITING_APP': "mkvmerge v63.0.0 ('Everything') 64-bit", '_STATISTICS_WRITING_DATE_UTC': '2023-10-07 13:59:46', '_STATISTICS_TAGS': 'BPS DURATION NUMBER_OF_FRAMES NUMBER_OF_BYTES', 'ENCODER': 'Lavc61.3.100 libopus', 'DURATION': '00:23:54.141000000'}}]
|
|
||||||
|
|
||||||
#[{'index': 2, 'codec_name': 'webvtt', 'codec_long_name': 'WebVTT subtitle', 'codec_type': 'subtitle', 'codec_tag_string': '[0][0][0][0]', 'codec_tag': '0x0000', 'r_frame_rate': '0/0', 'avg_frame_rate': '0/0', 'time_base': '1/1000', 'start_pts': -7, 'start_time': '-0.007000', 'duration_ts': 1434141, 'duration': '1434.141000', 'disposition': {'default': 1, 'dub': 0, 'original': 0, 'comment': 0, 'lyrics': 0, 'karaoke': 0, 'forced': 0, 'hearing_impaired': 0, 'visual_impaired': 0, 'clean_effects': 0, 'attached_pic': 0, 'timed_thumbnails': 0, 'non_diegetic': 0, 'captions': 0, 'descriptions': 0, 'metadata': 0, 'dependent': 0, 'still_image': 0}, 'tags': {'language': 'ger', 'title': 'Deutsch [Full]', 'BPS': '118', 'NUMBER_OF_FRAMES': '300', 'NUMBER_OF_BYTES': '21128', '_STATISTICS_WRITING_APP': "mkvmerge v63.0.0 ('Everything') 64-bit", '_STATISTICS_WRITING_DATE_UTC': '2023-10-07 13:59:46', '_STATISTICS_TAGS': 'BPS DURATION NUMBER_OF_FRAMES NUMBER_OF_BYTES', 'ENCODER': 'Lavc61.3.100 webvtt', 'DURATION': '00:23:54.010000000'}}, {'index': 3, 'codec_name': 'webvtt', 'codec_long_name': 'WebVTT subtitle', 'codec_type': 'subtitle', 'codec_tag_string': '[0][0][0][0]', 'codec_tag': '0x0000', 'r_frame_rate': '0/0', 'avg_frame_rate': '0/0', 'time_base': '1/1000', 'start_pts': -7, 'start_time': '-0.007000', 'duration_ts': 1434141, 'duration': '1434.141000', 'disposition': {'default': 0, 'dub': 0, 'original': 0, 'comment': 0, 'lyrics': 0, 'karaoke': 0, 'forced': 0, 'hearing_impaired': 0, 'visual_impaired': 0, 'clean_effects': 0, 'attached_pic': 0, 'timed_thumbnails': 0, 'non_diegetic': 0, 'captions': 0, 'descriptions': 0, 'metadata': 0, 'dependent': 0, 'still_image': 0}, 'tags': {'language': 'eng', 'title': 'Englisch [Full]', 'BPS': '101', 'NUMBER_OF_FRAMES': '276', 'NUMBER_OF_BYTES': '16980', '_STATISTICS_WRITING_APP': "mkvmerge v63.0.0 ('Everything') 64-bit", '_STATISTICS_WRITING_DATE_UTC': '2023-10-07 13:59:46', '_STATISTICS_TAGS': 'BPS DURATION NUMBER_OF_FRAMES NUMBER_OF_BYTES', 'ENCODER': 'Lavc61.3.100 webvtt', 'DURATION': '00:23:53.230000000'}}]
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def getStreamDescriptor(filename):
|
|
||||||
|
|
||||||
ffprobeOutput = executeProcess(["ffprobe",
|
|
||||||
"-show_streams",
|
|
||||||
"-of", "json",
|
|
||||||
filename])
|
|
||||||
|
|
||||||
streamData = json.loads(ffprobeOutput)['streams']
|
|
||||||
|
|
||||||
descriptor = []
|
|
||||||
|
|
||||||
i = 0
|
|
||||||
for d in [s for s in streamData if s['codec_type'] == STREAM_TYPE_VIDEO]:
|
|
||||||
descriptor.append({
|
|
||||||
'index': d['index'],
|
|
||||||
'sub_index': i,
|
|
||||||
'type': STREAM_TYPE_VIDEO,
|
|
||||||
'codec': d['codec_name']
|
|
||||||
})
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
i = 0
|
|
||||||
for d in [s for s in streamData if s['codec_type'] == STREAM_TYPE_AUDIO]:
|
|
||||||
|
|
||||||
streamDescriptor = {
|
|
||||||
'index': d['index'],
|
|
||||||
'sub_index': i,
|
|
||||||
'type': STREAM_TYPE_AUDIO,
|
|
||||||
'codec': d['codec_name'],
|
|
||||||
'channels': d['channels']
|
|
||||||
}
|
|
||||||
|
|
||||||
if 'channel_layout' in d.keys():
|
|
||||||
streamDescriptor['layout'] = d['channel_layout']
|
|
||||||
elif d['channels'] == 6:
|
|
||||||
streamDescriptor['layout'] = STREAM_LAYOUT_6CH
|
|
||||||
else:
|
|
||||||
streamDescriptor['layout'] = 'undefined'
|
|
||||||
|
|
||||||
descriptor.append(streamDescriptor)
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
i = 0
|
|
||||||
for d in [s for s in streamData if s['codec_type'] == STREAM_TYPE_SUBTITLE]:
|
|
||||||
descriptor.append({
|
|
||||||
'index': d['index'],
|
|
||||||
'sub_index': i,
|
|
||||||
'type': STREAM_TYPE_SUBTITLE,
|
|
||||||
'codec': d['codec_name']
|
|
||||||
})
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
return descriptor
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def generateAV1Tokens(q, p):
|
|
||||||
|
|
||||||
return ['-c:v:0', 'libsvtav1',
|
|
||||||
'-svtav1-params', f"crf={q}:preset={p}:tune=0:enable-overlays=1:scd=1:scm=0",
|
|
||||||
'-pix_fmt', 'yuv420p10le']
|
|
||||||
|
|
||||||
def generateVP9Pass1Tokens(q):
|
|
||||||
|
|
||||||
return ['-c:v:0', 'libvpx-vp9',
|
|
||||||
'-row-mt', '1',
|
|
||||||
'-crf', str(q),
|
|
||||||
'-pass', '1',
|
|
||||||
'-speed', '4',
|
|
||||||
'-frame-parallel', '0',
|
|
||||||
'-g', '9999',
|
|
||||||
'-aq-mode', '0']
|
|
||||||
|
|
||||||
def generateVP9Pass2Tokens(q):
|
|
||||||
|
|
||||||
return ['-c:v:0', 'libvpx-vp9',
|
|
||||||
'-row-mt', '1',
|
|
||||||
'-crf', str(q),
|
|
||||||
'-pass', '2',
|
|
||||||
'-frame-parallel', '0',
|
|
||||||
'-g', '9999',
|
|
||||||
'-aq-mode', '0',
|
|
||||||
'-auto-alt-ref', '1',
|
|
||||||
'-lag-in-frames', '25']
|
|
||||||
|
|
||||||
|
|
||||||
def generateCropTokens(start, length):
|
|
||||||
|
|
||||||
return ['-ss', str(start), '-t', str(length)]
|
|
||||||
|
|
||||||
|
|
||||||
def generateDenoiseTokens(spatial=5, patch=7, research=7, hw=False):
|
|
||||||
filterName = 'nlmeans_opencl' if hw else 'nlmeans'
|
|
||||||
return ['-vf', f"{filterName}=s={spatial}:p={patch}:r={research}"]
|
|
||||||
|
|
||||||
|
|
||||||
def generateOutputTokens(f, suffix, q=None):
|
|
||||||
|
|
||||||
if q is None:
|
|
||||||
return ['-f', 'webm', f"{f}.{suffix}"]
|
|
||||||
else:
|
|
||||||
return ['-f', 'webm', f"{f}_q{q}.{suffix}"]
|
|
||||||
|
|
||||||
|
|
||||||
# preset = DEFAULT_AV1_PRESET
|
|
||||||
# presetTokens = [p for p in sys.argv if p.startswith('p=')]
|
|
||||||
# if presetTokens:
|
|
||||||
# preset = int(presetTokens[0].split('=')[1])
|
|
||||||
|
|
||||||
# cropStart = ''
|
|
||||||
# cropLength = ''
|
|
||||||
# cropTokens = [c for c in sys.argv if c.startswith('crop')]
|
|
||||||
# if cropTokens:
|
|
||||||
# if '=' in cropTokens[0]:
|
|
||||||
# cropString = cropTokens[0].split('=')[1]
|
|
||||||
# cropStart, cropLength = cropString.split(',')
|
|
||||||
# else:
|
|
||||||
# cropStart = 60
|
|
||||||
# cropLength = 180
|
|
||||||
#
|
|
||||||
# denoiseTokens = [d for d in sys.argv if d.startswith('denoise')]
|
|
||||||
#
|
|
||||||
|
|
||||||
# for aStream in audioStreams:
|
|
||||||
# if 'channel_layout' in aStream:
|
|
||||||
# print(f"audio stream: {aStream['channel_layout']}") #channel_layout
|
|
||||||
# else:
|
|
||||||
# print(f"unknown audio stream with {aStream['channels']} channels") #channel_layout
|
|
||||||
|
|
||||||
def generateAudioTokens(context, index, layout):
|
|
||||||
|
|
||||||
if layout == STREAM_LAYOUT_6_1:
|
|
||||||
return [f"-c:a:{index}",
|
|
||||||
'libopus',
|
|
||||||
f"-filter:a:{index}",
|
|
||||||
'channelmap=channel_layout=6.1',
|
|
||||||
f"-b:a:{index}",
|
|
||||||
context['bitrates']['dts']]
|
|
||||||
|
|
||||||
elif layout == STREAM_LAYOUT_5_1:
|
|
||||||
return [f"-c:a:{index}",
|
|
||||||
'libopus',
|
|
||||||
f"-filter:a:{index}",
|
|
||||||
"channelmap=FL-FL|FR-FR|FC-FC|LFE-LFE|SL-BL|SR-BR:5.1",
|
|
||||||
f"-b:a:{index}",
|
|
||||||
context['bitrates']['ac3']]
|
|
||||||
|
|
||||||
elif layout == STREAM_LAYOUT_STEREO:
|
|
||||||
return [f"-c:a:{index}",
|
|
||||||
'libopus',
|
|
||||||
f"-b:a:{index}",
|
|
||||||
context['bitrates']['stereo']]
|
|
||||||
|
|
||||||
elif layout == STREAM_LAYOUT_6CH:
|
|
||||||
return [f"-c:a:{index}",
|
|
||||||
'libopus',
|
|
||||||
f"-filter:a:{index}",
|
|
||||||
"channelmap=FL-FL|FR-FR|FC-FC|LFE-LFE|SL-BL|SR-BR:5.1",
|
|
||||||
f"-b:a:{index}",
|
|
||||||
context['bitrates']['ac3']]
|
|
||||||
else:
|
|
||||||
return []
|
|
||||||
|
|
||||||
|
|
||||||
def generateClearTokens(streams):
|
|
||||||
clearTokens = []
|
|
||||||
for s in streams:
|
|
||||||
for k in MKVMERGE_METADATA_KEYS:
|
|
||||||
clearTokens += [f"-metadata:s:{s['type'][0]}:{s['sub_index']}", f"{k}="]
|
|
||||||
return clearTokens
|
|
||||||
|
|
||||||
|
|
||||||
@click.group()
|
|
||||||
@click.pass_context
|
|
||||||
def ffx(ctx):
|
|
||||||
"""FFX"""
|
|
||||||
ctx.obj = {}
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Define a subcommand
|
|
||||||
@ffx.command()
|
|
||||||
def version():
|
|
||||||
click.echo(VERSION)
|
|
||||||
|
|
||||||
|
|
||||||
# Another subcommand
|
|
||||||
@ffx.command()
|
|
||||||
def help():
|
|
||||||
click.echo(f"ffx {VERSION}\n")
|
|
||||||
click.echo(f"Usage: ffx [input file] [output file] [vp9|av1] [q=[nn[,nn,...]]] [p=nn] [a=nnn[k]] [ac3=nnn[k]] [dts=nnn[k]] [crop]")
|
|
||||||
|
|
||||||
|
|
||||||
@click.argument('filename', nargs=1)
|
|
||||||
@ffx.command()
|
|
||||||
def streams(filename):
|
|
||||||
for d in getStreamDescriptor(filename):
|
|
||||||
click.echo(f"{d['codec']}{' (' + str(d['channels']) + ')' if d['type'] == 'audio' else ''}")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ffx.command()
|
|
||||||
@click.pass_context
|
|
||||||
|
|
||||||
@click.argument('paths', nargs=-1)
|
|
||||||
@click.option('-l', '--label', type=str, default=DEFAULT_LABEL, help='Label to be used as filename prefix')
|
|
||||||
|
|
||||||
@click.option('-v', '--video-encoder', type=str, default=DEFAULT_VIDEO_ENCODER, help='Target video encoder (vp9 or av1) default: vp9')
|
|
||||||
|
|
||||||
@click.option('-q', '--quality', type=str, default=DEFAULT_QUALITY, help='Quality settings to be used with VP9 encoder (default: 23)')
|
|
||||||
@click.option('-p', '--preset', type=str, default=DEFAULT_QUALITY, help='Quality preset to be used with AV1 encoder (default: 5)')
|
|
||||||
|
|
||||||
@click.option('-a', '--stereo-bitrate', type=int, default=DEFAULT_STEREO_BANDWIDTH, help='Bitrate in kbit/s to be used to encode stereo audio streams')
|
|
||||||
@click.option('-ac3', '--ac3-bitrate', type=int, default=DEFAULT_AC3_BANDWIDTH, help='Bitrate in kbit/s to be used to encode 5.1 audio streams')
|
|
||||||
@click.option('-dts', '--dts-bitrate', type=int, default=DEFAULT_DTS_BANDWIDTH, help='Bitrate in kbit/s to be used to encode 6.1 audio streams')
|
|
||||||
|
|
||||||
@click.option('-ds', '--default-subtitle', type=int, help='Index of default subtitle stream')
|
|
||||||
|
|
||||||
@click.option('-fa', '--forced-audio', type=int, help='Index of forced audio stream (including default audio stream tag)')
|
|
||||||
@click.option('-da', '--default-audio', type=int, help='Index of default audio stream')
|
|
||||||
|
|
||||||
|
|
||||||
@click.option("--crop", is_flag=False, flag_value="default", default="none")
|
|
||||||
|
|
||||||
@click.option("-c", "--clear-metadata", is_flag=True, default=False)
|
|
||||||
@click.option("-d", "--denoise", is_flag=True, default=False)
|
|
||||||
|
|
||||||
|
|
||||||
def convert(ctx, paths, label, video_encoder, quality, preset, stereo_bitrate, ac3_bitrate, dts_bitrate, crop, clear_metadata, default_subtitle, default_audio, denoise):
|
|
||||||
"""Batch conversion of audiovideo files in format suitable for web playback, e.g. jellyfin
|
|
||||||
|
|
||||||
Files found under PATHS will be converted according to parameters.
|
|
||||||
Filename extensions will be changed appropriately.
|
|
||||||
Suffices will we appended to filename in case of multiple created files
|
|
||||||
or if the filename has not changed."""
|
|
||||||
|
|
||||||
startTime = time.perf_counter()
|
|
||||||
|
|
||||||
sourcePath = paths[0]
|
|
||||||
targetFilename = paths[1]
|
|
||||||
|
|
||||||
if not os.path.isfile(sourcePath):
|
|
||||||
raise click.ClickException(f"There is no file with path {sourcePath}")
|
|
||||||
|
|
||||||
click.echo(f"src: {sourcePath} tgt: {targetFilename}")
|
|
||||||
|
|
||||||
|
|
||||||
click.echo(f"ve={video_encoder}")
|
|
||||||
|
|
||||||
|
|
||||||
qualityTokens = quality.split(',')
|
|
||||||
|
|
||||||
q_list = [q for q in qualityTokens if q.isnumeric()]
|
|
||||||
|
|
||||||
click.echo(q_list)
|
|
||||||
|
|
||||||
ctx.obj['bitrates'] = {}
|
|
||||||
ctx.obj['bitrates']['stereo'] = str(stereo_bitrate) if str(stereo_bitrate).endswith('k') else f"{stereo_bitrate}k"
|
|
||||||
ctx.obj['bitrates']['ac3'] = str(ac3_bitrate) if str(ac3_bitrate).endswith('k') else f"{ac3_bitrate}k"
|
|
||||||
ctx.obj['bitrates']['dts'] = str(dts_bitrate) if str(dts_bitrate).endswith('k') else f"{dts_bitrate}k"
|
|
||||||
|
|
||||||
|
|
||||||
click.echo(f"a={ctx.obj['bitrates']['stereo']}")
|
|
||||||
click.echo(f"ac3={ctx.obj['bitrates']['ac3']}")
|
|
||||||
click.echo(f"dts={ctx.obj['bitrates']['dts']}")
|
|
||||||
|
|
||||||
|
|
||||||
performCrop = (crop != 'none')
|
|
||||||
|
|
||||||
|
|
||||||
if performCrop:
|
|
||||||
|
|
||||||
cropTokens = crop.split(',')
|
|
||||||
|
|
||||||
if cropTokens and len(cropTokens) == 2:
|
|
||||||
|
|
||||||
cropStart, cropLength = crop.split(',')
|
|
||||||
else:
|
|
||||||
cropStart = DEFAULT_CROP_START
|
|
||||||
cropLength = DEFAULT_CROP_LENGTH
|
|
||||||
|
|
||||||
click.echo(f"crop start={cropStart} length={cropLength}")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
click.echo(f"\nRunning {len(q_list)} jobs")
|
|
||||||
|
|
||||||
|
|
||||||
streamDescriptor = getStreamDescriptor(sourcePath)
|
|
||||||
|
|
||||||
commandTokens = COMMAND_TOKENS + [sourcePath]
|
|
||||||
|
|
||||||
|
|
||||||
for q in q_list:
|
|
||||||
|
|
||||||
click.echo(f"\nRunning job q={q}")
|
|
||||||
|
|
||||||
mappingVideoTokens = ['-map', 'v:0']
|
|
||||||
mappingTokens = mappingVideoTokens.copy()
|
|
||||||
audioTokens = []
|
|
||||||
|
|
||||||
audioIndex = 0
|
|
||||||
for audioStreamDescriptor in streamDescriptor:
|
|
||||||
|
|
||||||
if audioStreamDescriptor['type'] == STREAM_TYPE_AUDIO:
|
|
||||||
|
|
||||||
mappingTokens += ['-map', f"a:{audioIndex}"]
|
|
||||||
audioTokens += generateAudioTokens(ctx.obj, audioIndex, audioStreamDescriptor['layout'])
|
|
||||||
audioIndex += 1
|
|
||||||
|
|
||||||
|
|
||||||
for s in range(len([d for d in streamDescriptor if d['type'] == STREAM_TYPE_SUBTITLE])):
|
|
||||||
mappingTokens += ['-map', f"s:{s}"]
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if video_encoder == 'av1':
|
|
||||||
|
|
||||||
commandSequence = commandTokens + mappingTokens + audioTokens + generateAV1Tokens(q, preset) + audioTokens
|
|
||||||
|
|
||||||
if clear_metadata:
|
|
||||||
commandSequence += generateClearTokens(streamDescriptor)
|
|
||||||
|
|
||||||
if performCrop:
|
|
||||||
commandSequence += generateCropTokens(cropStart, cropLength)
|
|
||||||
|
|
||||||
commandSequence += generateOutputTokens(targetFilename, DEFAULT_FILE_SUFFIX, q)
|
|
||||||
|
|
||||||
click.echo(f"Command: {' '.join(commandSequence)}")
|
|
||||||
|
|
||||||
executeProcess(commandSequence)
|
|
||||||
|
|
||||||
|
|
||||||
if video_encoder == 'vp9':
|
|
||||||
|
|
||||||
commandSequence1 = commandTokens + mappingVideoTokens + generateVP9Pass1Tokens(q)
|
|
||||||
|
|
||||||
if performCrop:
|
|
||||||
commandSequence1 += generateCropTokens(cropStart, cropLength)
|
|
||||||
|
|
||||||
commandSequence1 += NULL_TOKENS
|
|
||||||
|
|
||||||
click.echo(f"Command 1: {' '.join(commandSequence1)}")
|
|
||||||
|
|
||||||
if os.path.exists(TEMP_FILE_NAME):
|
|
||||||
os.remove(TEMP_FILE_NAME)
|
|
||||||
|
|
||||||
executeProcess(commandSequence1)
|
|
||||||
|
|
||||||
|
|
||||||
commandSequence2 = commandTokens + mappingTokens
|
|
||||||
|
|
||||||
if denoise:
|
|
||||||
commandSequence2 += generateDenoiseTokens()
|
|
||||||
|
|
||||||
commandSequence2 += generateVP9Pass2Tokens(q) + audioTokens
|
|
||||||
|
|
||||||
if clear_metadata:
|
|
||||||
commandSequence2 += generateClearTokens(streamDescriptor)
|
|
||||||
|
|
||||||
if performCrop:
|
|
||||||
commandSequence2 += generateCropTokens(cropStart, cropLength)
|
|
||||||
|
|
||||||
commandSequence2 += generateOutputTokens(targetFilename, DEFAULT_FILE_SUFFIX, q)
|
|
||||||
|
|
||||||
click.echo(f"Command 2: {' '.join(commandSequence2)}")
|
|
||||||
|
|
||||||
executeProcess(commandSequence2)
|
|
||||||
|
|
||||||
|
|
||||||
click.echo('\nDONE\n')
|
|
||||||
|
|
||||||
endTime = time.perf_counter()
|
|
||||||
click.echo(f"Time elapsed {endTime - startTime}")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
ffx()
|
|
||||||
28
guidance/workflow/optional/lean-interface-iteration.md
Normal file
28
guidance/workflow/optional/lean-interface-iteration.md
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
# Lean Interface Iteration
|
||||||
|
|
||||||
|
Rule set name: `lean-interface-iteration`
|
||||||
|
|
||||||
|
Rule set ID: `LII`
|
||||||
|
|
||||||
|
Status: optional, prompt-activated only
|
||||||
|
|
||||||
|
Trigger examples:
|
||||||
|
|
||||||
|
- `Apply the lean-interface-iteration rules.`
|
||||||
|
- `Apply LII rules.`
|
||||||
|
|
||||||
|
LII-0001: Apply this rule set only when it is explicitly requested in the prompt.
|
||||||
|
|
||||||
|
LII-0002: The target of work under this rule set is the iterated product state for the addressed iteration only.
|
||||||
|
|
||||||
|
LII-0003: Optimize the addressed interface toward the leanest and least complex model that still satisfies the iteration order.
|
||||||
|
|
||||||
|
LII-0004: Backward compatibility, legacy aliases, and compatibility shims are not required unless the prompt explicitly asks to preserve them.
|
||||||
|
|
||||||
|
LII-0005: Prefer one authoritative interface over multiple overlapping parameters, flags, or naming variants.
|
||||||
|
|
||||||
|
LII-0006: Remove or avoid transitional interface layers when they are not required by the addressed iteration order.
|
||||||
|
|
||||||
|
LII-0007: Update affected tests, guidance, requirements, and documentation so they describe the simplified interface model rather than a mixed legacy-and-new model.
|
||||||
|
|
||||||
|
LII-0008: Never change behavior, interfaces, or surrounding areas that are not addressed by the current iteration order.
|
||||||
56
guidance/workflow/optional/preparation-script-design.md
Normal file
56
guidance/workflow/optional/preparation-script-design.md
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
# Preparation Script Design
|
||||||
|
|
||||||
|
Rule set name: `preparation-script-design`
|
||||||
|
|
||||||
|
Rule set ID: `PSD`
|
||||||
|
|
||||||
|
Status: optional, prompt-activated only
|
||||||
|
|
||||||
|
Trigger examples:
|
||||||
|
|
||||||
|
- `Apply the preparation-script-design rules.`
|
||||||
|
- `Apply PSD rules.`
|
||||||
|
|
||||||
|
PSD-0001: Apply this rule set only when it is explicitly requested in the prompt.
|
||||||
|
|
||||||
|
PSD-0002: Use this rule set for scripts whose purpose is to prepare, verify, or expose a local development or automation environment rather than to perform product runtime behavior.
|
||||||
|
|
||||||
|
PSD-0003: Keep a preparation script focused on environment readiness, dependency installation, local helper exposure, and clear verification output; do not mix unrelated product logic into the script.
|
||||||
|
|
||||||
|
PSD-0004: Design the script to be idempotent so repeated runs converge on the same prepared state without unnecessary reinstallation or destructive side effects.
|
||||||
|
|
||||||
|
PSD-0005: Provide a verification-only mode such as `--check` that reports readiness without installing, modifying, or creating dependencies.
|
||||||
|
|
||||||
|
PSD-0006: Separate component checks from installation steps so the script can report what is missing before or after attempted remediation.
|
||||||
|
|
||||||
|
PSD-0007: Group required capabilities into clear purpose-oriented sections such as support toolchains, local package bundles, generated environment helpers, or other relevant readiness areas instead of presenting one undifferentiated dependency list.
|
||||||
|
|
||||||
|
PSD-0008: Prefer explicit per-component check helpers over opaque one-shot checks so failures remain traceable and easy to extend.
|
||||||
|
|
||||||
|
PSD-0009: Generate or update environment helper files only when they provide a stable, reusable way to expose repo-local or workspace-local tools, paths, or environment variables.
|
||||||
|
|
||||||
|
PSD-0010: Generated environment helper files shall be safe to source multiple times and should avoid duplicating path entries or clobbering unrelated user environment state.
|
||||||
|
|
||||||
|
PSD-0011: When a preparation flow seeds optional user-owned files such as config templates, do so non-destructively by creating them only when absent unless the prompt explicitly requests overwrite behavior.
|
||||||
|
|
||||||
|
PSD-0012: Report status in a concise scan-friendly line format of the shape `[status] Label: detail`, where the label names the checked component and the detail string stays short and specific.
|
||||||
|
|
||||||
|
PSD-0013: Prefer a small canonical status vocabulary in those report lines, with `ok` for satisfied checks, `warn` for non-blocking gaps, and a failure status such as `failed` for blocking or unsuccessful states.
|
||||||
|
|
||||||
|
PSD-0014: When a preparation script uses terminal colors in its status output, apply a consistent severity mapping so `ok` is green, `warn` is yellow, and all other status levels are red.
|
||||||
|
|
||||||
|
PSD-0015: In bracketed status markers such as `[ok]` or `[warn]`, keep the square brackets uncolored and apply the severity color only to the inner status text.
|
||||||
|
|
||||||
|
PSD-0016: Colorized status output shall degrade safely in non-terminal or non-color contexts so the script remains readable and automation-friendly without ANSI support.
|
||||||
|
|
||||||
|
PSD-0017: End with an explicit readiness conclusion that distinguishes between successful preparation, incomplete prerequisites, and failed installation attempts.
|
||||||
|
|
||||||
|
PSD-0018: Installation logic should use the narrowest supported platform-specific package-manager actions necessary for the declared scope and should fail clearly when no supported installation path is available.
|
||||||
|
|
||||||
|
PSD-0019: Treat repo-local helper tooling and local package installation boundaries explicitly rather than assuming global installs, especially when the prepared environment is intended to be reproducible.
|
||||||
|
|
||||||
|
PSD-0020: Keep the script suitable for both interactive local developer use and non-interactive automation checks by avoiding prompts during normal execution unless the prompt explicitly requires interactivity.
|
||||||
|
|
||||||
|
PSD-0021: When a script depends on generated helper files or adjacent validation helpers, update those supporting files only as needed to keep the preparation flow coherent and usable.
|
||||||
|
|
||||||
|
PSD-0022: Verify shell syntax after changes and, when feasible, run a dry readiness check so the resulting preparation flow is validated rather than only written.
|
||||||
54
pyproject.toml
Normal file
54
pyproject.toml
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
[project]
|
||||||
|
name = "ffx"
|
||||||
|
description = "FFX recoding and metadata managing tool"
|
||||||
|
version = "0.2.6"
|
||||||
|
license = {file = "LICENSE.md"}
|
||||||
|
dependencies = [
|
||||||
|
"requests",
|
||||||
|
"jinja2",
|
||||||
|
"click",
|
||||||
|
"textual",
|
||||||
|
"sqlalchemy",
|
||||||
|
]
|
||||||
|
readme = {file = "README.md", content-type = "text/markdown"}
|
||||||
|
authors = [
|
||||||
|
{name = "Marius", email = "javanaut@maveno.de"}
|
||||||
|
]
|
||||||
|
maintainers = [
|
||||||
|
{name = "Marius", email = "javanaut@maveno.de"}
|
||||||
|
]
|
||||||
|
classifiers = [
|
||||||
|
"Development Status :: 3 - Alpha",
|
||||||
|
"Programming Language :: Python"
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
Homepage = "https://gitea.maveno.de/Javanaut/ffx"
|
||||||
|
Repository = "https://gitea.maveno.de/Javanaut/ffx.git"
|
||||||
|
Issues = "https://gitea.maveno.de/Javanaut/ffx/issues"
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
test = [
|
||||||
|
"pytest",
|
||||||
|
]
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = [
|
||||||
|
"setuptools",
|
||||||
|
"wheel"
|
||||||
|
]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
ffx = "ffx.cli:ffx"
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
testpaths = ["tests"]
|
||||||
|
python_files = ["test_*.py"]
|
||||||
|
norecursedirs = ["tests/legacy", "tests/support"]
|
||||||
|
addopts = "-ra"
|
||||||
|
markers = [
|
||||||
|
"integration: exercises the FFX bundle with real ffmpeg/ffprobe processes",
|
||||||
|
"pattern_management: covers requirements/pattern_management.md",
|
||||||
|
"subtrack_mapping: covers requirements/subtrack_mapping.md",
|
||||||
|
]
|
||||||
98
requirements/architecture.md
Normal file
98
requirements/architecture.md
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
# Architecture
|
||||||
|
|
||||||
|
## Architecture Goals
|
||||||
|
|
||||||
|
- Keep the tool small, local, and easy to reason about.
|
||||||
|
- Separate media inspection, stored normalization rules, and conversion execution clearly enough that users can inspect and adjust behavior.
|
||||||
|
- Favor explicit local state and deterministic rule application over opaque automation.
|
||||||
|
- Make external runtime dependencies and platform assumptions visible.
|
||||||
|
|
||||||
|
## System Context
|
||||||
|
|
||||||
|
- Primary actors:
|
||||||
|
- Local operator running the CLI.
|
||||||
|
- Local operator using the Textual TUI to inspect files and maintain rules.
|
||||||
|
- External systems:
|
||||||
|
- `ffprobe` for media introspection.
|
||||||
|
- `ffmpeg` for conversion and extraction.
|
||||||
|
- TMDB API for optional show and episode metadata.
|
||||||
|
- Local filesystem for source media, generated outputs, subtitles, logs, config, and database files.
|
||||||
|
- Data entering the system:
|
||||||
|
- Media container and stream metadata from source files.
|
||||||
|
- Regex patterns and per-show normalization rules entered in the TUI.
|
||||||
|
- Optional config values from `~/.local/etc/ffx.json`.
|
||||||
|
- Optional TMDB identifiers and CLI overrides.
|
||||||
|
- Optional external subtitle files.
|
||||||
|
- Data leaving the system:
|
||||||
|
- Normalized output media files.
|
||||||
|
- Extracted stream files from unmux operations.
|
||||||
|
- SQLite rows representing shows, patterns, tracks, tags, shifted seasons, and properties.
|
||||||
|
- Local log output and console messages.
|
||||||
|
|
||||||
|
## High-Level Building Blocks
|
||||||
|
|
||||||
|
- Frontend, CLI, API, or worker:
|
||||||
|
- A Click-based CLI in [`src/ffx/cli.py`](/home/osgw/.local/src/codex/ffx/src/ffx/cli.py), exposed as the `ffx` command and via `python -m ffx`, including lightweight maintenance wrappers for bundle setup, workstation preparation, and upgrade tasks.
|
||||||
|
- A Textual terminal UI rooted in [`src/ffx/ffx_app.py`](/home/osgw/.local/src/codex/ffx/src/ffx/ffx_app.py) with screens for shows, patterns, file inspection, tracks, tags, and shifted seasons.
|
||||||
|
- Core business logic:
|
||||||
|
- Descriptor objects model media files, shows, and tracks.
|
||||||
|
- Controllers encapsulate CRUD operations and workflow orchestration for shows, patterns, tags, tracks, season shifts, configuration, and conversion.
|
||||||
|
- `MediaDescriptorChangeSet` computes differences between a file and its stored target schema to drive metadata and disposition updates.
|
||||||
|
- File inspection caches combined `ffprobe` data and crop-detection results per source and sampling window within one process to avoid repeated subprocess work.
|
||||||
|
- Storage:
|
||||||
|
- SQLite via SQLAlchemy ORM, with schema rooted in shows, patterns, tracks, media tags, track tags, shifted seasons, and generic properties.
|
||||||
|
- Ordered schema migrations are loaded dynamically from per-version-step modules under [`src/ffx/model/migration/`](/home/osgw/.local/src/codex/ffx/src/ffx/model/migration/).
|
||||||
|
- A configuration JSON file supplies optional path, metadata-filtering, and filename-template settings.
|
||||||
|
- Integration adapters:
|
||||||
|
- Process execution wrapper for `ffmpeg`, `ffprobe`, `nice`, and `cpulimit`, with explicit disabled states for niceness and CPU limiting, support for both absolute `cpulimit` values and machine-wide percent input, and a combined `cpulimit -- nice -n ... <command>` execution shape when both limits are configured.
|
||||||
|
- HTTP adapter for TMDB via `requests`.
|
||||||
|
|
||||||
|
## Data And Interface Notes
|
||||||
|
|
||||||
|
- Key entities or records:
|
||||||
|
- `Show`: canonical TV show metadata plus digit-formatting rules, optional show-level notes, and an optional show-level encoding-quality fallback.
|
||||||
|
- `Pattern`: regex rule tying filenames to one show and one target media schema.
|
||||||
|
- `Track` and `TrackTag`: persisted target stream records, codec, dispositions, audio layout, and stream-level tags. Detailed source-to-target mapping rules live in `requirements/subtrack_mapping.md`.
|
||||||
|
- `MediaTag`: persisted container-level metadata for a pattern.
|
||||||
|
- `ShiftedSeason`: mapping from source numbering ranges to adjusted season and episode numbers, owned either by a show as fallback or by a pattern as override.
|
||||||
|
- `Property`: internal key-value storage currently used for database versioning.
|
||||||
|
- External interfaces:
|
||||||
|
- CLI commands for conversion, inspection, extraction, and crop detection.
|
||||||
|
- TUI workflows for rule authoring and rule maintenance.
|
||||||
|
- Environment variable `TMDB_API_KEY` for TMDB access.
|
||||||
|
- Config keys `databasePath`, `logDirectory`, and `outputFilenameTemplate`, plus optional metadata-filter rules.
|
||||||
|
- Validation rules:
|
||||||
|
- Only supported media-file extensions are accepted for conversion.
|
||||||
|
- Stored database version must either match the runtime-required version already or have a supported sequential migration path to it.
|
||||||
|
- A normalized descriptor may have at most one default and one forced stream per relevant track type.
|
||||||
|
- Shifted-season ranges are intended not to overlap within the same owner scope and season, and runtime resolution prefers pattern-owned matches over show-owned matches.
|
||||||
|
- TMDB lookups require a show ID and season and episode numbers.
|
||||||
|
- Error-handling approach:
|
||||||
|
- User-facing operational failures are raised as `click.ClickException` or warnings.
|
||||||
|
- Ambiguous default and forced stream states trigger prompts unless `--no-prompt` is set, in which case the command fails fast.
|
||||||
|
- External-process failures and invalid media are surfaced through logs and command errors rather than retries, except for TMDB rate-limit retries.
|
||||||
|
|
||||||
|
## Deployment And Operations
|
||||||
|
|
||||||
|
- Runtime environment:
|
||||||
|
- Local Python environment with the package installed and `ffmpeg`, `ffprobe`, `nice`, and `cpulimit` available on `PATH`.
|
||||||
|
- Deployment shape:
|
||||||
|
- Single-process command execution on demand; no daemon, queue, or network service of its own.
|
||||||
|
- Secrets and configuration handling:
|
||||||
|
- TMDB secret is read from `TMDB_API_KEY`.
|
||||||
|
- User config is read from `~/.local/etc/ffx.json`.
|
||||||
|
- Database path may also be overridden per command via `--database-file`.
|
||||||
|
- Logging and monitoring approach:
|
||||||
|
- File and console logging configured per invocation.
|
||||||
|
- Default log file path is `~/.local/var/log/ffx.log`.
|
||||||
|
- No dedicated monitoring integration is present.
|
||||||
|
|
||||||
|
## Open Technical Questions
|
||||||
|
|
||||||
|
- Question: Should Linux-specific assumptions such as `/dev/null`, `nice`, `cpulimit`, and `~/.local` remain part of the supported-platform contract?
|
||||||
|
- Risk: Portability and operational behavior are underspecified for non-Linux environments.
|
||||||
|
- Next decision needed: Either document Linux-like systems as the official support boundary or refactor the process and path handling for broader portability.
|
||||||
|
|
||||||
|
- Question: Should placeholder TUI surfaces such as settings and help become part of the required product surface or stay explicitly out of scope?
|
||||||
|
- Risk: The UI appears broader than the actually finished feature set.
|
||||||
|
- Next decision needed: Either remove or complete placeholder screens and update requirements accordingly.
|
||||||
198
requirements/metadata_editor.md
Normal file
198
requirements/metadata_editor.md
Normal file
@@ -0,0 +1,198 @@
|
|||||||
|
# Metadata Editor
|
||||||
|
|
||||||
|
This file defines the requirements for a database-free interactive metadata
|
||||||
|
editor command derived from the current file-inspection UI.
|
||||||
|
|
||||||
|
Feasibility from the current codebase: yes, with a moderate refactor.
|
||||||
|
|
||||||
|
The strongest reusable pieces already exist:
|
||||||
|
|
||||||
|
- `ffprobe`-backed media probing through `FileProperties` and `MediaDescriptor`
|
||||||
|
- descriptor-level metadata and disposition mutation through `MediaDescriptor`
|
||||||
|
and `TrackDescriptor`
|
||||||
|
- diff and ffmpeg token generation through `MediaDescriptorChangeSet`
|
||||||
|
- stream-copy remux execution through `FfxController` with `VideoEncoder.COPY`
|
||||||
|
- reusable tag and track edit dialogs in the Textual UI
|
||||||
|
|
||||||
|
The main missing pieces are:
|
||||||
|
|
||||||
|
- a CLI bootstrap path that does not initialize SQLite
|
||||||
|
- a probe-only path that does not instantiate database-backed controllers
|
||||||
|
- a clean separation between original file state and editable draft state
|
||||||
|
- a safe temporary-output and replace workflow for writing changes back to the
|
||||||
|
same file path
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
- One new command: `ffx edit <file>`
|
||||||
|
- One-file interactive editing through a Textual screen derived from
|
||||||
|
`MediaDetailsScreen`
|
||||||
|
- Editing container-level metadata and per-stream metadata already visible in
|
||||||
|
the application
|
||||||
|
- Editing stream dispositions that are represented as metadata-like output
|
||||||
|
state, especially `default` and `forced`
|
||||||
|
- Writing the result back to the original file path through a temporary output
|
||||||
|
file and replace step
|
||||||
|
|
||||||
|
## Out Of Scope
|
||||||
|
|
||||||
|
- SQLite reads, writes, migrations, or pattern matching
|
||||||
|
- TMDB lookups, show selection, pattern selection, or shifted-season logic
|
||||||
|
- Batch editing multiple files in one command invocation
|
||||||
|
- Video or audio transcoding
|
||||||
|
- Container changes, filename changes, or rename workflows
|
||||||
|
- Stream add, stream delete, stream reorder, or stream substitution from
|
||||||
|
external files in the first release
|
||||||
|
- Editing technical stream identity such as codec, stream type, source index,
|
||||||
|
or audio layout in the first release
|
||||||
|
- Chapter editing
|
||||||
|
|
||||||
|
## Terms
|
||||||
|
|
||||||
|
- `baseline descriptor`: immutable in-memory representation of the file as last
|
||||||
|
probed from disk
|
||||||
|
- `draft descriptor`: mutable in-memory representation of the desired output
|
||||||
|
state
|
||||||
|
- `edit mode`: the database-free TUI mode used by `ffx edit`
|
||||||
|
- `planned changes`: user-visible summary of the differences between baseline
|
||||||
|
and draft plus any configured cleanup actions
|
||||||
|
- `temporary output file`: the write target used before replacing the original
|
||||||
|
file path
|
||||||
|
|
||||||
|
## Rules
|
||||||
|
|
||||||
|
- `METADATA_EDITOR-0001`: The system shall provide a command `ffx edit <file>`
|
||||||
|
that requires exactly one existing media file path and opens an interactive
|
||||||
|
Textual editor for that file.
|
||||||
|
- `METADATA_EDITOR-0002`: `ffx edit` shall not initialize SQLite, shall not
|
||||||
|
open the configured database file, shall not prompt for database migration,
|
||||||
|
and shall not instantiate any controller that depends on `context['database']`.
|
||||||
|
- `METADATA_EDITOR-0003`: `ffx edit` may still read configuration and logging
|
||||||
|
settings from `~/.local/etc/ffx.json`, but any global database option shall
|
||||||
|
have no effect on this command's behavior.
|
||||||
|
- `METADATA_EDITOR-0004`: Edit mode shall be derived from the current
|
||||||
|
`MediaDetailsScreen` behavior and layout where practical, but all DB-only UI
|
||||||
|
elements and actions such as show selection, pattern input, and pattern CRUD
|
||||||
|
actions shall be hidden, disabled, or replaced.
|
||||||
|
- `METADATA_EDITOR-0005`: Edit mode shall keep the baseline descriptor and the
|
||||||
|
draft descriptor as separate objects. Editing actions shall mutate only the
|
||||||
|
draft descriptor until the operator explicitly applies changes.
|
||||||
|
- `METADATA_EDITOR-0006`: The application shall keep raw metadata values
|
||||||
|
separate from rendered labels. Rich or Textual markup may be used for
|
||||||
|
presentation, but it shall never be stored in descriptor state, reused as
|
||||||
|
source data, or written into the media file.
|
||||||
|
- `METADATA_EDITOR-0007`: The planned-changes view shall compare the baseline
|
||||||
|
descriptor with the draft descriptor using `MediaDescriptorChangeSet` or an
|
||||||
|
equivalent descriptor-diff mechanism. It shall no longer mean `file -> db`.
|
||||||
|
- `METADATA_EDITOR-0008`: The editor shall support container-tag add, edit, and
|
||||||
|
delete operations on the draft descriptor.
|
||||||
|
- `METADATA_EDITOR-0009`: The editor shall support per-stream metadata edit
|
||||||
|
operations on the draft descriptor, including at least language, title, and
|
||||||
|
arbitrary stream tag key-value pairs.
|
||||||
|
- `METADATA_EDITOR-0010`: The editor shall support setting and clearing
|
||||||
|
`default` and `forced` dispositions in the draft descriptor, while enforcing
|
||||||
|
that there is at most one `default` and at most one `forced` stream per track
|
||||||
|
type.
|
||||||
|
- `METADATA_EDITOR-0011`: The first released editor scope shall treat technical
|
||||||
|
stream structure as immutable. A user shall not be able to change stream
|
||||||
|
count, output order, codec, track type, audio layout, or source-index
|
||||||
|
mapping through `ffx edit`.
|
||||||
|
- `METADATA_EDITOR-0012`: The track-edit UI used in edit mode shall therefore
|
||||||
|
expose only metadata fields and supported disposition fields. Structural
|
||||||
|
fields that are editable in pattern-authoring workflows shall be read-only or
|
||||||
|
absent in edit mode.
|
||||||
|
- `METADATA_EDITOR-0013`: The command shall write changes through an ffmpeg
|
||||||
|
stream-copy remux workflow only. No transcoding shall be performed as part of
|
||||||
|
`ffx edit`.
|
||||||
|
- `METADATA_EDITOR-0014`: Because ffmpeg cannot rewrite the source file in
|
||||||
|
place, `ffx edit` shall write to a temporary output file on the same
|
||||||
|
filesystem as the source file and shall replace the original path only after
|
||||||
|
ffmpeg reports success.
|
||||||
|
- `METADATA_EDITOR-0015`: The temporary output path shall preserve the original
|
||||||
|
container type and file extension. The feature shall not silently change the
|
||||||
|
container or extension during a metadata-only edit.
|
||||||
|
- `METADATA_EDITOR-0016`: If the rewrite step fails, the original file shall
|
||||||
|
remain untouched. The system shall not leave the user with a partially
|
||||||
|
replaced source file.
|
||||||
|
- `METADATA_EDITOR-0017`: After a successful replace, the application shall
|
||||||
|
reprobe the rewritten file, refresh the baseline descriptor from disk, reset
|
||||||
|
the draft state to that fresh baseline, and clear the dirty state.
|
||||||
|
- `METADATA_EDITOR-0018`: Edit mode shall track whether unsaved draft changes
|
||||||
|
exist and shall require confirmation before dismissing the screen or quitting
|
||||||
|
the app when such changes would be lost.
|
||||||
|
- `METADATA_EDITOR-0019`: Edit mode shall not inject conversion-only encoding
|
||||||
|
metadata such as encoder quality or preset markers.
|
||||||
|
- `METADATA_EDITOR-0020`: Signature-tag behavior shall be explicit for
|
||||||
|
metadata-only editing. The default behavior shall not add a misleading
|
||||||
|
recoding-style signature to a file that was only remuxed for metadata
|
||||||
|
updates.
|
||||||
|
- `METADATA_EDITOR-0021`: Configured metadata-removal rules from the local
|
||||||
|
configuration shall be surfaced clearly in the UI and in the planned-changes
|
||||||
|
view. If those rules are applied during save, the operator shall be able to
|
||||||
|
tell that the file will be cleaned in addition to any manual edits.
|
||||||
|
- `METADATA_EDITOR-0022`: The command shall provide an invocation-level way to
|
||||||
|
disable config-driven cleanup when the operator wants a pure manual metadata
|
||||||
|
edit without automatic tag removal.
|
||||||
|
- `METADATA_EDITOR-0023`: The existing global `--dry-run` behavior shall apply
|
||||||
|
to `ffx edit`. In dry-run mode the command shall not replace the original
|
||||||
|
file and shall expose the planned write operation clearly enough for the user
|
||||||
|
to understand what would happen.
|
||||||
|
|
||||||
|
## Acceptance
|
||||||
|
|
||||||
|
- `ffx edit /path/to/file.mkv` opens successfully on a workstation where the
|
||||||
|
configured database is missing, empty, incompatible, or intentionally
|
||||||
|
inaccessible.
|
||||||
|
- Opening a file in edit mode does not trigger database bootstrap or migration
|
||||||
|
prompts.
|
||||||
|
- A user can change a container tag, save, and see the rewritten file at the
|
||||||
|
same path with the updated metadata.
|
||||||
|
- A user can change a stream title or language, save, and see the rewritten
|
||||||
|
file at the same path with the updated stream metadata.
|
||||||
|
- A user can change `default` or `forced` on a track, save, and see the
|
||||||
|
rewritten file at the same path with the updated dispositions.
|
||||||
|
- The planned-changes view reflects manual edits relative to the original file
|
||||||
|
and, when enabled, any configured cleanup removals.
|
||||||
|
- No rendered Rich or Textual color markup appears in the saved file metadata.
|
||||||
|
- If ffmpeg fails while saving, the original file remains present and readable
|
||||||
|
at the original path.
|
||||||
|
- In dry-run mode, the original file remains untouched.
|
||||||
|
|
||||||
|
## Current Code Fit
|
||||||
|
|
||||||
|
- Good fit:
|
||||||
|
- `FfxController.runJob(...)` already has a `VideoEncoder.COPY` path that
|
||||||
|
can remux streams and apply metadata and disposition tokens.
|
||||||
|
- `MediaDescriptorChangeSet` already computes container-tag, stream-tag, and
|
||||||
|
disposition differences and can generate ffmpeg metadata tokens.
|
||||||
|
- `TagDetailsScreen` and `TrackDetailsScreen` already provide reusable edit
|
||||||
|
dialogs for draft state.
|
||||||
|
- `PatternDetailsScreen` already demonstrates add, edit, and delete flows for
|
||||||
|
tags and tracks in a draft-first UI.
|
||||||
|
- Refactor required:
|
||||||
|
- `ffx` CLI initialization currently creates a database context for all
|
||||||
|
non-lightweight commands, so `edit` needs its own DB-free bootstrap path.
|
||||||
|
- `FileProperties` currently instantiates `PatternController` eagerly, so
|
||||||
|
probing must be split from pattern matching or made lazy.
|
||||||
|
- `MediaDetailsScreen` currently assumes `command == 'inspect'` and mixes
|
||||||
|
file state with database-backed target-pattern state.
|
||||||
|
- `MediaDetailsScreen` currently mutates the probed source descriptor
|
||||||
|
directly. Edit mode needs an immutable baseline descriptor and a separate
|
||||||
|
mutable draft descriptor.
|
||||||
|
- `TrackDetailsScreen` currently exposes structural fields that are valid for
|
||||||
|
pattern authoring but too dangerous for metadata-only file editing.
|
||||||
|
|
||||||
|
## Risks
|
||||||
|
|
||||||
|
- Container-level metadata support differs across formats, so some requested tag
|
||||||
|
changes may not round-trip identically through ffmpeg for every supported
|
||||||
|
container.
|
||||||
|
- The existing metadata-removal implementation is conversion-oriented and may
|
||||||
|
remove tags more aggressively than a user expects from a manual editor unless
|
||||||
|
cleanup policy is made explicit.
|
||||||
|
- The current codebase lacks a dedicated descriptor clone API, so draft-state
|
||||||
|
separation should be implemented deliberately instead of via accidental shared
|
||||||
|
references.
|
||||||
|
- Replacing a file path with a temporary output changes inode identity, so any
|
||||||
|
future requirement around preserving timestamps, hard links, or extended
|
||||||
|
attributes would need additional explicit handling.
|
||||||
68
requirements/pattern_management.md
Normal file
68
requirements/pattern_management.md
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
# Pattern Management
|
||||||
|
|
||||||
|
This file defines the behavioral contract for managing shows, patterns, and
|
||||||
|
pattern-backed filename matching.
|
||||||
|
|
||||||
|
Primary source: actual tool code in `src/ffx/`.
|
||||||
|
Secondary source: operator intent captured in task discussion.
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
- The show, pattern, and track hierarchy stored in SQLite.
|
||||||
|
- The role of a pattern as a reusable normalization definition for related media files.
|
||||||
|
- Filename-driven assignment of a scanned media file to one show through one matching pattern.
|
||||||
|
- Duplicate-match handling when more than one pattern matches the same filename.
|
||||||
|
|
||||||
|
## Terms
|
||||||
|
|
||||||
|
- `show`: logical series identity such as one TV show entry in the database.
|
||||||
|
- `pattern`: regex-backed normalization definition attached to one show.
|
||||||
|
- `track`: one persisted target-track definition attached to one pattern.
|
||||||
|
- `scanned media file`: one source file currently being inspected or converted.
|
||||||
|
- `duplicate pattern match`: a filename state where more than one stored pattern matches the same scanned media file.
|
||||||
|
- `pattern-backed target schema`: the combination of one pattern's stored media tags and stored track definitions.
|
||||||
|
|
||||||
|
## Rules
|
||||||
|
|
||||||
|
- `PATTERN_MANAGEMENT-0001`: The domain model shall treat a show as the parent entity for patterns that describe distinct release families or normalization schemas for that show. A show may temporarily exist without patterns during editing or initial TUI creation.
|
||||||
|
- `PATTERN_MANAGEMENT-0002`: Each persisted pattern shall belong to exactly one show.
|
||||||
|
- `PATTERN_MANAGEMENT-0003`: The domain model shall treat a pattern as the reusable normalization definition for a series of media files expected to share the same internal track layout and materially similar stream and container metadata.
|
||||||
|
- `PATTERN_MANAGEMENT-0004`: Each persisted track definition shall belong to exactly one pattern.
|
||||||
|
- `PATTERN_MANAGEMENT-0005`: A pattern may also carry pattern-level media tags. The pattern's media tags plus its track definitions together form the pattern-backed target schema.
|
||||||
|
- `PATTERN_MANAGEMENT-0006`: A scanned media file shall resolve to at most one pattern and therefore at most one show.
|
||||||
|
- `PATTERN_MANAGEMENT-0007`: If no pattern matches a filename, the file shall remain unmatched rather than being assigned implicitly.
|
||||||
|
- `PATTERN_MANAGEMENT-0008`: If more than one pattern matches the same filename, the system shall raise a duplicate pattern match error instead of silently selecting one.
|
||||||
|
- `PATTERN_MANAGEMENT-0009`: Duplicate-match detection shall apply regardless of whether the competing patterns belong to the same show or to different shows.
|
||||||
|
- `PATTERN_MANAGEMENT-0010`: Exact duplicate pattern definitions for the same show should not create multiple persisted pattern rows.
|
||||||
|
- `PATTERN_MANAGEMENT-0011`: A persisted pattern shall define one or more tracks. Creating or retaining a zero-track pattern in the database is invalid managed state and shall be prohibited.
|
||||||
|
- `PATTERN_MANAGEMENT-0012`: A show may exist without patterns as an intermediate editing state, for example when a user creates the show first in the TUI and adds patterns later.
|
||||||
|
- `PATTERN_MANAGEMENT-0013`: Operator-facing pattern management should expose the owning show, regex pattern, stored track set, and stored media-tag set so a user can reason about matching and normalization behavior.
|
||||||
|
- `PATTERN_MANAGEMENT-0014`: Matching semantics shall be deterministic and documented. Implicit "last matching pattern wins" behavior is not acceptable released behavior.
|
||||||
|
|
||||||
|
## Acceptance
|
||||||
|
|
||||||
|
- A filename that matches exactly one pattern yields one matched pattern and one show identity.
|
||||||
|
- A filename that matches no pattern yields no matched pattern and an unmatched state.
|
||||||
|
- A filename that matches more than one pattern yields an explicit duplicate-match error.
|
||||||
|
- A pattern-backed target schema can be reconstructed from one pattern's stored media tags and stored track definitions.
|
||||||
|
- A show may be stored before any patterns are attached to it.
|
||||||
|
- A pattern cannot be stored or retained as a valid managed pattern unless at least one track is defined for it.
|
||||||
|
- Pattern-backed conversion never proceeds with two competing matching patterns for the same input filename.
|
||||||
|
|
||||||
|
## Current Code Fit
|
||||||
|
|
||||||
|
- `src/ffx/model/show.py` implements a one-to-many `Show -> Pattern` relationship.
|
||||||
|
- `src/ffx/model/pattern.py` implements `Pattern.show_id`, a one-to-many `Pattern -> Track` relationship, a one-to-many `Pattern -> MediaTag` relationship, and a unique `(show_id, pattern)` constraint for freshly created databases.
|
||||||
|
- `src/ffx/model/track.py` implements `Track.pattern_id`, so each persisted track belongs to one pattern.
|
||||||
|
- `src/ffx/model/pattern.py` reconstructs a pattern-backed target schema through `Pattern.getMediaDescriptor(...)`, combining stored media tags and stored tracks.
|
||||||
|
- `src/ffx/file_properties.py` assumes a scanned file resolves to at most one pattern, because it stores only one `self.__pattern` and derives one `show_id` from it.
|
||||||
|
- `src/ffx/pattern_controller.py` prevents exact duplicate `(show_id, pattern)` definitions during create and update flows, and it refreshes cached compiled regexes when stored pattern expressions change.
|
||||||
|
- `src/ffx/pattern_controller.py` now complies with duplicate-match safety. `matchFilename(...)` scans deterministically, returns exactly one match, returns `{}` for no match, and raises an explicit duplicate-pattern-match error when more than one pattern matches the same filename.
|
||||||
|
- The current persistence layer already aligns with the intended empty-show workflow because a show can exist without patterns.
|
||||||
|
- New pattern creation and schema replacement flows now require at least one track, and `TrackController.deleteTrack(...)` prevents deleting the last persisted track from a pattern.
|
||||||
|
- Trackless legacy rows can still exist in preexisting databases, but matching now rejects them explicitly instead of letting them participate silently.
|
||||||
|
|
||||||
|
## Risks
|
||||||
|
|
||||||
|
- The intended "release family" meaning of a pattern is a domain assumption, not something the code verifies automatically across all files matching that pattern.
|
||||||
|
- Preexisting databases created before the newer validation rules may still contain invalid rows, so upgrade and cleanup paths should continue to treat explicit validation failures as recoverable operator signals.
|
||||||
124
requirements/project.md
Normal file
124
requirements/project.md
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
## Purpose And Scope
|
||||||
|
|
||||||
|
- Project name: FFX
|
||||||
|
- User problem: TV episode files from mixed sources arrive with inconsistent codecs, stream metadata, subtitle layouts, season and episode numbering, and output filenames, which makes them awkward to archive and use in media-player applications.
|
||||||
|
- Target users: Individual operators curating a local TV media library on a workstation, especially users willing to define normalization rules per show.
|
||||||
|
- Success outcome: A user can inspect source files, define reusable show and pattern rules, and produce output files whose streams, metadata, and filenames follow a predictable schema for web playback and library import.
|
||||||
|
- Out of scope:
|
||||||
|
- Multi-user or hosted service workflows.
|
||||||
|
- General movie-library management.
|
||||||
|
- Distributed transcoding or remote job orchestration.
|
||||||
|
- Broad media-server administration beyond file preparation.
|
||||||
|
|
||||||
|
## Required Product
|
||||||
|
|
||||||
|
- Deliverable type: Installable Python command-line application with a Textual terminal UI for inspection and rule editing.
|
||||||
|
- Core capabilities:
|
||||||
|
- Maintain an SQLite-backed database of shows, filename-matching patterns, per-pattern stream layouts and metadata tags, and optional season-shift rules.
|
||||||
|
- Inspect existing media files through `ffprobe` and compare discovered stream metadata with stored normalization rules.
|
||||||
|
- Convert media files through `ffmpeg` into a normalized output layout, including video recoding, audio transcoding to Opus, metadata cleanup and rewrite, and controlled disposition flags.
|
||||||
|
- Build output filenames from detected or configured show, season, and episode information, optionally enriched from TMDB and a configurable Jinja-style filename template.
|
||||||
|
- Support auxiliary file operations such as subtitle import, unmuxing, crop detection, rename-only conversion runs, and direct in-place episode renaming.
|
||||||
|
- Supported environments:
|
||||||
|
- Local execution on a Python-capable workstation.
|
||||||
|
- Best-supported on Linux-like systems because the implementation assumes `~/.local`, `/dev/null`, `nice`, and `cpulimit`.
|
||||||
|
- Requires `ffmpeg`, `ffprobe`, and `cpulimit` on `PATH`.
|
||||||
|
- Operational owner: The local user running the tool and maintaining its config, database, and external tooling.
|
||||||
|
|
||||||
|
## Suggested User Stories
|
||||||
|
|
||||||
|
- As a library maintainer, I want to define show-specific matching rules once so that future source files can be normalized automatically.
|
||||||
|
- As an operator, I want to inspect a file before conversion so that I can compare its actual streams and tags against the stored target schema.
|
||||||
|
- As a user preparing web-playback files, I want to recode video and audio with a small set of predictable options so that results are compatible and consistently named.
|
||||||
|
- As a user dealing with nonstandard releases, I want CLI overrides for language, title, stream order, default and forced tracks, and season and episode data so that one-off fixes do not require database edits first.
|
||||||
|
- As a user importing anime or other shifted numbering schemes, I want season and episode offsets at the show level with optional pattern-specific overrides so that generated filenames align with TMDB and media-library expectations.
|
||||||
|
|
||||||
|
## Functional Requirements
|
||||||
|
|
||||||
|
- The system shall provide a CLI entrypoint named `ffx` with commands for `convert`, `inspect`, `shows`, `rename`, `unmux`, `cropdetect`, `setup`, `configure_workstation`, `upgrade`, `version`, and `help`.
|
||||||
|
- The system shall support a two-step local installation and preparation flow:
|
||||||
|
- `tools/setup.sh` is the bootstrap entrypoint for the first step and shall own bundle virtualenv creation, package installation, shell alias exposure, and optional Python test-package installation.
|
||||||
|
- `tools/configure_workstation.sh` is the bootstrap entrypoint for the second step and shall own workstation dependency checks and installation plus local config and directory seeding.
|
||||||
|
- After the bundle is installed, `ffx setup` and `ffx configure_workstation` shall remain aligned wrapper entrypoints for those same two steps.
|
||||||
|
- The CLI command `ffx setup` shall act as a wrapper for the first-step bundle-preparation flow in `tools/setup.sh`.
|
||||||
|
- The CLI command `ffx configure_workstation` shall act as a wrapper for the second-step preparation flow in `tools/configure_workstation.sh`.
|
||||||
|
- The system shall persist reusable normalization rules in SQLite for:
|
||||||
|
- shows and show formatting digits,
|
||||||
|
- optional show-level notes,
|
||||||
|
- optional show-level quality defaults,
|
||||||
|
- regex-based filename patterns,
|
||||||
|
- per-pattern media tags,
|
||||||
|
- per-pattern stream definitions,
|
||||||
|
- show-level and pattern-level shifted-season mappings,
|
||||||
|
- internal database version properties.
|
||||||
|
- The system shall apply supported ordered database migrations automatically when opening an older local database file and shall fail fast when no supported path exists.
|
||||||
|
- Before applying a required database migration, the system shall show the current version, target version, required sequential steps, and whether each corresponding migration module is present, then require user confirmation.
|
||||||
|
- Before applying a confirmed file-backed database migration, the system shall create an in-place backup copy whose filename includes the covered version range.
|
||||||
|
- Detailed show, pattern, and duplicate-match management rules live in `requirements/pattern_management.md`.
|
||||||
|
- The system shall inspect source media using `ffprobe` and derive a structured description of container metadata and streams.
|
||||||
|
- The system shall optionally open a Textual UI to browse shows, inspect files, and create, edit, or delete shows, patterns, stream definitions, tags, and shifted-season rules.
|
||||||
|
- The system shall match filenames against stored regex patterns to decide whether an input file should inherit a target stream and metadata schema.
|
||||||
|
- The system shall convert supported input files (`mkv`, `mp4`, `avi`, `flv`, `webm`) with `ffmpeg`, supporting at least:
|
||||||
|
- VP9, AV1, and H.264 video encoding,
|
||||||
|
- Opus audio encoding with bitrate selection based on channel layout,
|
||||||
|
- metadata and disposition rewriting,
|
||||||
|
- optional crop detection and crop application,
|
||||||
|
- optional deinterlacing and denoising,
|
||||||
|
- optional subtitle import from external files,
|
||||||
|
- rename-only move mode.
|
||||||
|
- The system shall support optional TMDB lookups to resolve show names, years, and episode titles when a show ID, season, and episode are available.
|
||||||
|
- The system shall generate output filenames from show metadata, season and episode indices, and episode names using the configured filename template.
|
||||||
|
- The system shall allow CLI overrides for stream languages, stream titles, default and forced tracks, stream order, TMDB show and episode data, output directory, label prefix, and processing resource limits.
|
||||||
|
- The system shall resolve encoding quality by precedence `CLI override -> pattern -> show -> encoder default` and shall report the chosen value and source.
|
||||||
|
- The system shall resolve season shifting by precedence `pattern -> show -> identity default` and shall report the chosen mapping and source.
|
||||||
|
- Processing resource limit rules:
|
||||||
|
- `--nice` shall accept niceness values from `-20` through `19`; omitting the option shall disable niceness adjustment.
|
||||||
|
- `--cpu` shall accept either a positive absolute `cpulimit` value such as `200`, or a percentage suffixed with `%` such as `25%` to represent a share of present CPUs; omitting the option or using `0` shall disable CPU limiting.
|
||||||
|
- When both limits are configured, the process wrapper shall execute the target command through `cpulimit` around a `nice -n ...` invocation so both limits apply to the launched media command.
|
||||||
|
- The system shall support extracting streams into separate files via `unmux` and reporting suggested crop parameters via `cropdetect`.
|
||||||
|
- The system shall support in-place episode renaming via `rename`, requiring a `--prefix`, accepting optional `--season` and `--suffix` overrides, preserving the source extension, and supporting dry-run output without moving files.
|
||||||
|
- Crop detection shall use a configurable sampling window, defaulting to a 60-second seek and a 180-second analysis duration, and repeated crop-detection requests for the same source plus sampling window shall reuse cached results within one process.
|
||||||
|
- The system shall handle invalid input and system failures gracefully by logging warnings or raising `click` errors for missing files, invalid media, missing TMDB credentials, incompatible database versions, and ambiguous track dispositions when prompting is disabled.
|
||||||
|
|
||||||
|
## Quality Requirements
|
||||||
|
|
||||||
|
- The system should stay understandable as a small local tool: controllers, descriptors, models, and screens should remain separate enough for contributors to trace a workflow end to end.
|
||||||
|
- The system should produce predictable output for the same database rules, CLI overrides, and source files.
|
||||||
|
- The system should preserve a lightweight operational footprint: local SQLite state, local log file, no mandatory background services.
|
||||||
|
- The system should be testable through modern automatically discovered tests and through remaining legacy harness coverage during migration.
|
||||||
|
- The system should expose enough logging to diagnose failed probes, failed conversions, and rule mismatches without requiring a debugger.
|
||||||
|
|
||||||
|
## Constraints And Assumptions
|
||||||
|
|
||||||
|
- Technology constraints:
|
||||||
|
- Python package built with setuptools.
|
||||||
|
- Primary libraries: `click`, `textual`, `sqlalchemy`, `jinja2`, `requests`.
|
||||||
|
- Conversion and inspection rely on external executables rather than pure-Python media libraries.
|
||||||
|
- Hosting or infrastructure constraints:
|
||||||
|
- Intended for local execution, not server deployment.
|
||||||
|
- Stores default state in `~/.local/etc/ffx.json`, `~/.local/var/ffx/ffx.db`, and `~/.local/var/log/ffx.log`.
|
||||||
|
- Timeline constraints:
|
||||||
|
- The current implemented scope reflects a compact alpha release stream up to version `0.2.6`.
|
||||||
|
- Team capacity assumptions:
|
||||||
|
- Maintained as a small codebase where simple patterns and direct controller logic are preferred over framework-heavy abstractions.
|
||||||
|
- Third-party dependencies:
|
||||||
|
- `ffmpeg`, `ffprobe`, and `cpulimit`.
|
||||||
|
- TMDB API access through `TMDB_API_KEY` for metadata enrichment.
|
||||||
|
- Installation assumptions:
|
||||||
|
- The Python-side bundle install step and optional Python test extras are managed by `tools/setup.sh`, with `ffx setup` as the aligned wrapper after bootstrap.
|
||||||
|
- The workstation-preparation step is managed separately by `tools/configure_workstation.sh` or `ffx configure_workstation`.
|
||||||
|
|
||||||
|
## Acceptance Scope
|
||||||
|
|
||||||
|
- First release boundary:
|
||||||
|
- Local installation through `pip`.
|
||||||
|
- Working SQLite-backed rule storage.
|
||||||
|
- Functional CLI conversion and inspection workflows.
|
||||||
|
- Textual CRUD flows for shows, patterns, tags, tracks, and shifted seasons.
|
||||||
|
- TMDB-assisted filename generation, subtitle import, season shifting, database versioning, and configurable output filename templating.
|
||||||
|
- Excluded follow-up ideas:
|
||||||
|
- Completing placeholder screens such as settings and help.
|
||||||
|
- Hardening platform portability beyond Linux-like systems.
|
||||||
|
- Broader media types, richer release packaging, and production-grade background processing.
|
||||||
|
- Demonstration scenario:
|
||||||
|
- Inspect a TV episode file, define or update the matching show and pattern in the TUI, then run `ffx convert` so the result uses the stored stream schema, optional TMDB episode naming, and a normalized output filename.
|
||||||
177
requirements/shifted_seasons_handling.md
Normal file
177
requirements/shifted_seasons_handling.md
Normal file
@@ -0,0 +1,177 @@
|
|||||||
|
# Numbering Mapping Handling
|
||||||
|
|
||||||
|
This file defines the behavioral contract for mapping source season and episode
|
||||||
|
numbering to target season and episode numbering through stored shifted-season
|
||||||
|
rules.
|
||||||
|
|
||||||
|
Primary sources:
|
||||||
|
- `requirements/project.md`
|
||||||
|
- `requirements/architecture.md`
|
||||||
|
- actual tool code in `src/ffx/`
|
||||||
|
|
||||||
|
Secondary source:
|
||||||
|
- `SCRATCHPAD.md`, used only to clarify current hardening gaps and not as the
|
||||||
|
primary contract source.
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
- Persisting shifted-season rules in SQLite.
|
||||||
|
- Allowing shifted-season rules to be attached either to a show or to a
|
||||||
|
specific pattern.
|
||||||
|
- Selecting at most one active shifted-season rule for one concrete source
|
||||||
|
season and episode tuple.
|
||||||
|
- Applying additive season and episode offsets to produce target numbering.
|
||||||
|
- Using shifted target numbering during `convert` for TMDB episode lookup and
|
||||||
|
generated season and episode filename tokens.
|
||||||
|
- Managing show-level default mappings and pattern-level override mappings from
|
||||||
|
the Textual editing workflows.
|
||||||
|
|
||||||
|
## Out Of Scope
|
||||||
|
|
||||||
|
- General filename parsing rules for detecting season and episode values.
|
||||||
|
- Standalone `rename` command behavior, which currently uses explicit rename
|
||||||
|
inputs rather than stored shifted-season rules.
|
||||||
|
- Stream or track mapping behavior unrelated to season and episode numbering.
|
||||||
|
|
||||||
|
## Terms
|
||||||
|
|
||||||
|
- `shifted-season rule`: one persisted row describing how one source-numbering
|
||||||
|
range maps to target numbering through additive offsets.
|
||||||
|
- `show-level shifted-season rule`: a rule attached directly to a show and used
|
||||||
|
as the fallback mapping layer for that show.
|
||||||
|
- `pattern-level shifted-season rule`: a rule attached directly to a pattern and
|
||||||
|
used as the override mapping layer for that pattern.
|
||||||
|
- `source numbering`: the season and episode values detected from the current
|
||||||
|
source file or supplied as source-side conversion inputs before shifting.
|
||||||
|
- `target numbering`: the season and episode values after one active
|
||||||
|
shifted-season rule has been applied.
|
||||||
|
- `original season`: the source-domain season number a shifted-season rule is
|
||||||
|
eligible to match.
|
||||||
|
- `episode range`: the optional source-domain episode interval covered by one
|
||||||
|
shifted-season rule.
|
||||||
|
- `open bound`: an unbounded start or end of the episode range. Current storage
|
||||||
|
uses `-1` as the internal sentinel for an open bound.
|
||||||
|
- `active shifted-season rule`: the single rule selected for one concrete input
|
||||||
|
after precedence resolution.
|
||||||
|
- `identity mapping`: the default `1:1` outcome where source numbering is used
|
||||||
|
unchanged.
|
||||||
|
|
||||||
|
## Rules
|
||||||
|
|
||||||
|
- `SHIFTED_SEASONS_HANDLING-0001`: The domain model shall allow a
|
||||||
|
shifted-season rule to be owned by exactly one of:
|
||||||
|
- one show
|
||||||
|
- one pattern
|
||||||
|
- `SHIFTED_SEASONS_HANDLING-0002`: A single shifted-season rule shall not
|
||||||
|
belong to both a show and a pattern at the same time.
|
||||||
|
- `SHIFTED_SEASONS_HANDLING-0003`: A shifted-season rule shall carry these
|
||||||
|
fields: `original_season`, `first_episode`, `last_episode`,
|
||||||
|
`season_offset`, and `episode_offset`.
|
||||||
|
- `SHIFTED_SEASONS_HANDLING-0004`: `season_offset` and `episode_offset` shall
|
||||||
|
be additive signed integers applied to matched source numbering to produce
|
||||||
|
target numbering.
|
||||||
|
- `SHIFTED_SEASONS_HANDLING-0005`: A shifted-season rule shall match a source
|
||||||
|
tuple only when:
|
||||||
|
- the source season equals `original_season`
|
||||||
|
- the source episode is greater than or equal to `first_episode` when the
|
||||||
|
lower bound is closed
|
||||||
|
- the source episode is less than or equal to `last_episode` when the upper
|
||||||
|
bound is closed
|
||||||
|
- `SHIFTED_SEASONS_HANDLING-0006`: An open lower or upper episode bound shall
|
||||||
|
represent an unbounded side of the covered source episode range.
|
||||||
|
- `SHIFTED_SEASONS_HANDLING-0007`: If one shifted-season rule matches, target
|
||||||
|
numbering shall be:
|
||||||
|
- `target season = source season + season_offset`
|
||||||
|
- `target episode = source episode + episode_offset`
|
||||||
|
- `SHIFTED_SEASONS_HANDLING-0008`: If no shifted-season rule matches, source
|
||||||
|
numbering shall pass through unchanged.
|
||||||
|
- `SHIFTED_SEASONS_HANDLING-0009`: Shifted-season handling shall operate in a
|
||||||
|
source-to-target numbering model. Stored rules map detected source numbering
|
||||||
|
to the target numbering used by conversion-facing metadata and output naming.
|
||||||
|
- `SHIFTED_SEASONS_HANDLING-0010`: Pattern matching identifies the owning show
|
||||||
|
and optionally a more specific owning pattern. Resolution of the active
|
||||||
|
shifted-season rule shall use this precedence order:
|
||||||
|
- matching pattern-level rule
|
||||||
|
- matching show-level rule
|
||||||
|
- identity mapping
|
||||||
|
- `SHIFTED_SEASONS_HANDLING-0011`: At most one shifted-season rule may be
|
||||||
|
active for one concrete source season and episode tuple. Shifted-season rules
|
||||||
|
shall never stack or compose.
|
||||||
|
- `SHIFTED_SEASONS_HANDLING-0012`: Within one owner scope, shifted-season rules
|
||||||
|
shall not overlap in their effective episode coverage for the same
|
||||||
|
`original_season`.
|
||||||
|
- `SHIFTED_SEASONS_HANDLING-0013`: If a shifted-season rule uses two closed
|
||||||
|
episode bounds, `last_episode` shall be greater than or equal to
|
||||||
|
`first_episode`.
|
||||||
|
- `SHIFTED_SEASONS_HANDLING-0014`: Shifted-season rule evaluation shall be
|
||||||
|
deterministic. Released behavior shall not depend on arbitrary database row
|
||||||
|
order when invalid overlapping rules exist.
|
||||||
|
- `SHIFTED_SEASONS_HANDLING-0015`: A pattern-level rule is permitted to map to
|
||||||
|
zero offsets. Such a rule is a valid explicit override that beats show-level
|
||||||
|
fallback and produces identity mapping for its covered source range.
|
||||||
|
- `SHIFTED_SEASONS_HANDLING-0016`: During `convert`, when show, season, and
|
||||||
|
episode values are available and stored shifting is active, the shifted target
|
||||||
|
numbering shall drive:
|
||||||
|
- TMDB episode lookup
|
||||||
|
- season and episode filename tokens such as `S01E02`
|
||||||
|
- generated episode basenames that include season and episode numbering
|
||||||
|
- `SHIFTED_SEASONS_HANDLING-0017`: When conversion is supplied explicit
|
||||||
|
target-domain season or episode values for TMDB naming, the system shall not
|
||||||
|
apply stored shifting on top of those already-targeted values.
|
||||||
|
- `SHIFTED_SEASONS_HANDLING-0018`: Operator-facing editing shall expose
|
||||||
|
shifted-season rule management in both of these places:
|
||||||
|
- show editing for show-level default mappings
|
||||||
|
- pattern editing for pattern-level override mappings
|
||||||
|
- `SHIFTED_SEASONS_HANDLING-0019`: User-facing shifted-season editing should
|
||||||
|
present open episode bounds as a natural empty-state input rather than forcing
|
||||||
|
operators to type the internal sentinel directly.
|
||||||
|
|
||||||
|
## Acceptance
|
||||||
|
|
||||||
|
- A show can exist with zero or more show-level shifted-season rules.
|
||||||
|
- A pattern can exist with zero or more pattern-level shifted-season rules.
|
||||||
|
- A shifted-season rule is stored against exactly one owner scope.
|
||||||
|
- A source tuple matching a pattern-level rule yields target numbering from that
|
||||||
|
rule even when a matching show-level rule also exists.
|
||||||
|
- A source tuple matching no pattern-level rule but matching a show-level rule
|
||||||
|
yields target numbering from the show-level rule.
|
||||||
|
- A source tuple matching neither scope yields identity mapping.
|
||||||
|
- A pattern-level zero-offset rule can explicitly override a nonzero show-level
|
||||||
|
rule for the same covered source range.
|
||||||
|
- Two shifted-season rules for the same owner scope and original season cannot
|
||||||
|
both be valid if they cover overlapping episode ranges.
|
||||||
|
- During `convert`, shifted numbering is what TMDB episode lookup and generated
|
||||||
|
season and episode tokens see when stored shifting is active.
|
||||||
|
- The TUI can display and maintain shifted-season rules from both the show and
|
||||||
|
pattern editing flows.
|
||||||
|
|
||||||
|
## Current Code Fit
|
||||||
|
|
||||||
|
- `src/ffx/model/show.py` and `src/ffx/model/pattern.py` now both expose
|
||||||
|
shifted-season relationships, and `src/ffx/model/shifted_season.py` stores
|
||||||
|
each rule against exactly one owner scope through `show_id` or `pattern_id`.
|
||||||
|
- `src/ffx/shifted_season_controller.py` now resolves mappings with
|
||||||
|
pattern-over-show precedence and applies at most one active rule for a source
|
||||||
|
tuple.
|
||||||
|
- `src/ffx/show_details_screen.py`,
|
||||||
|
`src/ffx/shifted_season_details_screen.py`, and
|
||||||
|
`src/ffx/shifted_season_delete_screen.py` provide reusable shifted-season
|
||||||
|
editing dialogs, and `src/ffx/pattern_details_screen.py` now exposes the
|
||||||
|
pattern-level override flow.
|
||||||
|
- `src/ffx/cli.py` now resolves shifted numbering during `convert` from:
|
||||||
|
pattern-level match, then show-level match, then identity mapping.
|
||||||
|
- `src/ffx/database.py` now migrates version-2 databases to version 3 by
|
||||||
|
preserving existing show-level rows and extending the schema for pattern-level
|
||||||
|
ownership.
|
||||||
|
|
||||||
|
## Risks
|
||||||
|
|
||||||
|
- The current CLI groups `--show`, `--season`, and `--episode` under one
|
||||||
|
override bucket used for TMDB-related behavior. Source-domain versus
|
||||||
|
target-domain semantics of each override must stay documented clearly so
|
||||||
|
stored shifting is neither skipped nor double-applied unexpectedly.
|
||||||
|
- Existing version-2 databases only contain show-owned shifted-season rows, so a
|
||||||
|
version-3 migration must preserve those rows as the show-level fallback layer.
|
||||||
|
- Current modern automated test coverage for shifted-season behavior is light,
|
||||||
|
so precedence, migration, and convert-time numbering behavior need focused
|
||||||
|
tests.
|
||||||
74
requirements/subtrack_mapping.md
Normal file
74
requirements/subtrack_mapping.md
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
# Subtrack Mapping
|
||||||
|
|
||||||
|
This file defines the behavioral contract for mapping input subtracks to output
|
||||||
|
subtracks during conversion.
|
||||||
|
|
||||||
|
Primary source: actual tool code in `src/ffx/`.
|
||||||
|
Secondary source: `tests/legacy/`, used only to clarify intent and reveal gaps.
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
- Ensuring each target subtrack is created from the corresponding source-subtrack information, including stream-level metadata.
|
||||||
|
- Mapping input streams to output streams during conversion.
|
||||||
|
- Using persisted pattern-track definitions from the database as the target schema.
|
||||||
|
- Allowing omission and reordering of retained tracks.
|
||||||
|
- Keeping stream-level metadata attached to the correct source-derived logical track after remapping.
|
||||||
|
- Normalizing target output into ordered track groups: video, audio, subtitle, then special types such as fonts or images.
|
||||||
|
|
||||||
|
## Terms
|
||||||
|
|
||||||
|
- `source_index`: identity of the originating input stream from ffprobe or an imported source descriptor.
|
||||||
|
- `index`: final output-track order across all retained tracks.
|
||||||
|
- `sub_index`: per-type position within the retained tracks of one type, for example audio stream `0` or subtitle stream `1`.
|
||||||
|
- `target schema`: stored or constructed output-track definition that decides which tracks are kept, omitted, reordered, and rewritten.
|
||||||
|
- `separate source file`: additional file bound to one target track slot whose media payload replaces the regular source payload for that slot.
|
||||||
|
|
||||||
|
## Rules
|
||||||
|
|
||||||
|
- `SUBTRACK_MAPPING-0001`: The system shall represent source-stream identity separately from output order. `source_index`, `index`, and `sub_index` are distinct concepts and shall not be collapsed into one field.
|
||||||
|
- `SUBTRACK_MAPPING-0002`: The system shall derive `source_index` for probed tracks from the original ffprobe stream index and preserve that identity through conversion planning.
|
||||||
|
- `SUBTRACK_MAPPING-0003`: Pattern-backed track definitions stored in the database shall persist both target output order and originating source-stream identity.
|
||||||
|
- `SUBTRACK_MAPPING-0004`: When a filename matches a pattern, the pattern target schema shall be the source of truth for which source tracks are retained, which are omitted, and in what order retained tracks appear in the output.
|
||||||
|
- `SUBTRACK_MAPPING-0005`: A target track may refer only to an existing source track of the same type. Conversion shall fail fast when a target track refers to a nonexistent source stream or a source stream of a different type.
|
||||||
|
- `SUBTRACK_MAPPING-0006`: The ffmpeg mapping phase shall be generated from target output order while resolving each retained output track back to its originating source stream via `source_index`.
|
||||||
|
- `SUBTRACK_MAPPING-0007`: Reordering and omission shall preserve logical track identity. Stream-level metadata, titles, languages, and disposition decisions shall stay attached to the correct source-derived logical track after mapping.
|
||||||
|
- `SUBTRACK_MAPPING-0008`: The system shall support one-off CLI stream-order overrides without requiring prior database edits.
|
||||||
|
- `SUBTRACK_MAPPING-0009`: Operator-facing inspection and editing surfaces shall expose enough source-versus-target information to let a user reason about subtrack mapping decisions.
|
||||||
|
- `SUBTRACK_MAPPING-0010`: Test coverage for subtrack mapping shall assert source-derived identity, omission, and output order explicitly. Final track counts or final type sequences alone are insufficient proof of correct mapping.
|
||||||
|
- `SUBTRACK_MAPPING-0011`: Retained target tracks shall appear in ordered groups: video track or tracks first, then audio tracks, then subtitle tracks, then special types such as fonts or images. Within each group, the target schema shall define the order.
|
||||||
|
- `SUBTRACK_MAPPING-0012`: Track omission is valid when required by output compatibility, when needed to normalize source tracks into the required target group order and schema, or when explicitly requested by database rules or CLI options.
|
||||||
|
- `SUBTRACK_MAPPING-0013`: If source tracks do not already comply with the required target group order, conversion shall reorder retained tracks to match the target ordering contract without losing source-track identity or stream-level metadata lineage.
|
||||||
|
|
||||||
|
## Separate Additional Source Files
|
||||||
|
|
||||||
|
- `SUBTRACK_MAPPING-0014`: A separate source file may substitute the media payload of one target subtrack without changing that target track's intended output position.
|
||||||
|
- `SUBTRACK_MAPPING-0015`: When a separate source file is used, the target track shall remain bound to the corresponding logical source track for mapping, validation, and metadata lineage.
|
||||||
|
- `SUBTRACK_MAPPING-0016`: Metadata for a substituted target track shall be merged from the regular source track and the separate source file when available.
|
||||||
|
- `SUBTRACK_MAPPING-0017`: If the separate source file provides a metadata field that is also present on the regular source track, the separate source file value shall win in the target output.
|
||||||
|
- `SUBTRACK_MAPPING-0018`: If a metadata field is absent from the separate source file, the system shall fall back to the corresponding metadata from the regular source track or target schema rewrite rules.
|
||||||
|
|
||||||
|
## Acceptance
|
||||||
|
|
||||||
|
- Given a source media descriptor and a pattern-backed target schema, the planned output tracks can be listed in final output order and each retained track can still be traced to one originating source stream.
|
||||||
|
- Planned output order follows grouped target order: video, audio, subtitle, then special types.
|
||||||
|
- Tracks not referenced by the target schema are omitted from output mapping.
|
||||||
|
- Tracks may also be omitted when they are incompatible with the chosen output format or explicitly excluded by database or CLI rules.
|
||||||
|
- Two retained target tracks never originate from the same source stream unless duplication is implemented explicitly as a separate feature.
|
||||||
|
- If target-track metadata is rewritten after reordering, it is written onto the correct source-derived logical track rather than the track that merely occupies the same final output position.
|
||||||
|
- Invalid target-to-source references fail deterministically before the conversion job is launched.
|
||||||
|
- If a separate source file substitutes one target track, that track keeps its target slot and ordering while metadata is merged with separate-file values taking precedence when both sides provide the same field.
|
||||||
|
- A test proving subtrack mapping must assert at least one of: exact `source_index` to output-order mapping, omission of named source tracks, or preservation of per-track metadata after reorder.
|
||||||
|
|
||||||
|
## Test Notes
|
||||||
|
|
||||||
|
- `tests/legacy/scenario.py` names pattern behavior as `Filter/Reorder Tracks`.
|
||||||
|
- `tests/legacy/scenario_4.py` is the strongest end-to-end signal because it runs DB-backed conversion and reapplies source indices before assertion.
|
||||||
|
- `tests/legacy/track_tag_combinator_2_0.py` and `tests/legacy/track_tag_combinator_3_4.py` sort result tracks by `source_index` before checking tags, which matches the intended identity model.
|
||||||
|
- Legacy permutation combinators define permutations but their assertion functions are stubs.
|
||||||
|
- Some legacy scenarios produce `AP` and `SP` selectors but do not execute them.
|
||||||
|
|
||||||
|
## Risks
|
||||||
|
|
||||||
|
- `src/ffx/media_descriptor.py` contains an explicit `rearrangeTrackDescriptors()` path whose current implementation appears defective and under-tested.
|
||||||
|
- Separate-source-file metadata precedence is only partly expressed in current implementation paths and should be covered directly in the rewritten test suite.
|
||||||
|
- Production code expresses the mapping contract more clearly than the legacy harness, so a rewrite should add direct logic-level tests for mapping and reorder planning.
|
||||||
144
requirements/tests.md
Normal file
144
requirements/tests.md
Normal file
@@ -0,0 +1,144 @@
|
|||||||
|
# Test Rewrite
|
||||||
|
|
||||||
|
This file captures the structure executed by `tests/legacy_runner.py` today and
|
||||||
|
defines the target shape for a complete rewrite.
|
||||||
|
|
||||||
|
Detailed product rules for source-to-target subtrack mapping live in
|
||||||
|
`requirements/subtrack_mapping.md`. This file describes only how tests cover
|
||||||
|
that area.
|
||||||
|
|
||||||
|
## Interpreter Requirement
|
||||||
|
|
||||||
|
- Agents shall run Python-side test commands with `~/.local/share/ffx.venv/bin/python`.
|
||||||
|
- This applies to the legacy harness, `unittest`, `pytest`, helper scripts, and `python -m ffx ...` test invocations.
|
||||||
|
- Agents shall not silently substitute `python`, `python3`, or another interpreter for Python-side test work.
|
||||||
|
- If `~/.local/share/ffx.venv/bin/python` is missing or not executable, agents shall stop and report the missing venv instead of continuing with Python-side test execution.
|
||||||
|
|
||||||
|
## Shell Environment Requirement
|
||||||
|
|
||||||
|
- Agents shall source `~/.bashrc` from an interactive Bash shell before running TMDB-dependent test commands or TMDB-dependent `python -m ffx ...` test invocations.
|
||||||
|
- Agents shall not source `~/.bashrc.d/interactive/77_tmdb.sh` directly for normal test work; `~/.bashrc` is the required entry point.
|
||||||
|
- In automation this means agents shall use an interactive Bash invocation such as `bash -ic 'source ~/.bashrc && ...'`, because a non-interactive `bash -lc` returns from `~/.bashrc` before the interactive fragments are loaded.
|
||||||
|
- If sourcing `~/.bashrc` still does not provide required shell environment such as `TMDB_API_KEY`, agents shall stop and report the missing environment instead of continuing with TMDB-dependent test execution.
|
||||||
|
|
||||||
|
## Current Harness
|
||||||
|
|
||||||
|
- Entrypoint: `~/.local/share/ffx.venv/bin/python tests/legacy_runner.py run`
|
||||||
|
- Runner style: custom Click CLI, not `pytest` or `unittest`
|
||||||
|
- Commands:
|
||||||
|
- `run`: discover scenario files, instantiate each scenario, run yielded jobs
|
||||||
|
- `dupe`: helper command that creates duplicate media fixtures; not part of the test run
|
||||||
|
- Filters: `--scenario`, `--variant`, `--limit`
|
||||||
|
- Shared context:
|
||||||
|
- builds one mutable dict for the whole run
|
||||||
|
- installs loggers and writes `ffx_test_report.log`
|
||||||
|
- creates `ConfigurationController` eagerly
|
||||||
|
- tracks only passed and failed counters
|
||||||
|
- Discovery:
|
||||||
|
- scenario files: `tests/legacy/scenario_*.py`
|
||||||
|
- combinators: `glob + importlib + inspect` by filename convention
|
||||||
|
- ordering: implicit glob order, no explicit sorting
|
||||||
|
- Skip behavior:
|
||||||
|
- Scenario 4 is skipped when `TMDB_API_KEY` is missing
|
||||||
|
- only `TMDB_API_KEY_NOT_PRESENT_EXCEPTION` is caught at scenario construction time
|
||||||
|
|
||||||
|
## Current Scenarios
|
||||||
|
|
||||||
|
- `1`: `tests/legacy/scenario_1.py`
|
||||||
|
- focus: basename generation without pattern lookup or TMDB
|
||||||
|
- inputs per job: `1`
|
||||||
|
- jobs: `140`
|
||||||
|
- expected failures: `0`
|
||||||
|
- execution: build one synthetic source file, run `~/.local/share/ffx.venv/bin/python -m ffx convert`, assert filename selectors only
|
||||||
|
- selectors executed: `B`, `L`, `I`
|
||||||
|
- selectors defined but not executed: `S`, `R`
|
||||||
|
- `2`: `tests/legacy/scenario_2.py`
|
||||||
|
- focus: conversion matrix over media layouts, dispositions, tags, and permutations
|
||||||
|
- inputs per job: `1`
|
||||||
|
- jobs: `8193`
|
||||||
|
- expected failures: `3267`
|
||||||
|
- execution: build one synthetic source file, run `~/.local/share/ffx.venv/bin/python -m ffx convert`, probe result with `FileProperties`, assert track layout and selected audio and subtitle metadata
|
||||||
|
- selectors executed: `M`, `AD`, `AT`, `SD`, `ST`
|
||||||
|
- selectors defined but not executed: `MT`, `AP`, `SP`, `J`
|
||||||
|
- `4`: `tests/legacy/scenario_4.py`
|
||||||
|
- focus: pattern-driven batch conversion with SQLite state and live TMDB naming
|
||||||
|
- inputs per job: `6`
|
||||||
|
- jobs: `768`
|
||||||
|
- expected failures: `336`
|
||||||
|
- execution: build six synthetic preset files, recreate temp SQLite DB, insert show and pattern, run one batch convert command via `~/.local/share/ffx.venv/bin/python`, query TMDB during assertions
|
||||||
|
- selectors executed: `M`, `AD`, `AT`, `SD`, `ST`
|
||||||
|
- selectors defined but not executed: `MT`, `AP`, `SP`, `J`
|
||||||
|
- notes:
|
||||||
|
- uses `MediaCombinator6` only
|
||||||
|
- issues live HTTP requests through `TmdbController` with no request cache
|
||||||
|
|
||||||
|
## Current Combinator Families
|
||||||
|
|
||||||
|
- scenario files discovered: `3`
|
||||||
|
- basename combinators discovered: `2`
|
||||||
|
- media combinators discovered: `8`
|
||||||
|
- media tag combinators discovered: `3`
|
||||||
|
- disposition combinator 2 variants: `4`
|
||||||
|
- disposition combinator 3 variants: `5`
|
||||||
|
- track tag combinator 2 variants: `4`
|
||||||
|
- track tag combinator 3 variants: `5`
|
||||||
|
- indicator variants: `7`
|
||||||
|
- label variants: `2`
|
||||||
|
- show variants: `3`
|
||||||
|
- release variants: `3`
|
||||||
|
- permutation 2 variants: `2`
|
||||||
|
- permutation 3 variants: `3`
|
||||||
|
|
||||||
|
## Current Totals
|
||||||
|
|
||||||
|
- full run without TMDB: `8333`
|
||||||
|
- full run with TMDB: `9101`
|
||||||
|
- Scenario 4 generated source files: `4608`
|
||||||
|
- Scenario 4 live TMDB episode queries: `4608`
|
||||||
|
|
||||||
|
## Current Behavior Areas
|
||||||
|
|
||||||
|
- output basename rules for label, season and episode indicator, show name, and release suffix combinations
|
||||||
|
- track layout normalization across the eight media combinator shapes from `VA` through `VAASSS`
|
||||||
|
- two-track and three-track disposition edge cases, including intentional failure cases
|
||||||
|
- two-track and three-track track-tag preservation checks, including checks that sort results by source identity
|
||||||
|
- container-level media tag handling
|
||||||
|
- pattern-backed conversion against a temporary SQLite database
|
||||||
|
- TMDB-assisted episode naming for batch conversion
|
||||||
|
|
||||||
|
## Structural Findings
|
||||||
|
|
||||||
|
- The suite is process-heavy: most jobs run `ffmpeg` to generate a fixture and then spawn the FFX CLI as a subprocess.
|
||||||
|
- The suite is integration-first and has almost no isolated unit-level coverage for pure logic.
|
||||||
|
- The base `Combinator` class is a placeholder and is not the real abstraction boundary used by the suite.
|
||||||
|
- Many combinator methods are placeholders: there are `25` `pass` statements across the current test modules.
|
||||||
|
- Several assertion families are never executed because scenario selector dispatch is incomplete.
|
||||||
|
- Scenario comments mention a Scenario 3, but no `scenario_3.py` exists.
|
||||||
|
- `tests/legacy/_basename_combinator_1.py` is effectively orphaned because discovery only matches `basename_combinator_*.py`.
|
||||||
|
- `tests/legacy/disposition_combinator_2_3 .py` contains an embedded space in the filename and is still part of discovery.
|
||||||
|
- Expected failures are validated only as subprocess return-code matches, not as specific error types or messages.
|
||||||
|
- The current suite depends on `ffmpeg`, `ffprobe`, SQLite, the local Python environment, and for Scenario 4 a live TMDB API key plus network access.
|
||||||
|
|
||||||
|
## Rewrite Target
|
||||||
|
|
||||||
|
- Replace the custom Click harness with a standard test runner, preferably `pytest`.
|
||||||
|
- Split the suite into explicit layers: unit, integration, and optional external-system tests.
|
||||||
|
- Keep unit tests as the default path and make them runnable without `ffmpeg`, `ffprobe`, TMDB, or a user config directory.
|
||||||
|
- Model discovery explicitly in code instead of relying on glob-plus-reflection naming conventions.
|
||||||
|
- Convert the current Cartesian-product combinators into readable parametrized cases grouped by behavior area.
|
||||||
|
- Preserve the current behavior areas, but represent them with targeted cases instead of thousands of opaque variant IDs.
|
||||||
|
- Make every assertion family explicit and executable; there must be no selector that is produced but never consumed.
|
||||||
|
- Replace live TMDB access with fixtures or mocks in normal runs; any live-contract test must be opt-in.
|
||||||
|
- Replace ad hoc subprocess return-code checks with assertions on typed exceptions, stderr content, or structured outputs.
|
||||||
|
- Provide small reusable media fixtures or fixture builders so only a narrow integration slice needs `ffmpeg`-generated media.
|
||||||
|
- Make database tests self-contained and fast through temporary databases and direct controller-level assertions.
|
||||||
|
- Make ordering, naming, and selection deterministic so a contributor can predict exactly what will run.
|
||||||
|
- Expose a small smoke suite for quick local runs and CI, plus a separately marked slower integration suite.
|
||||||
|
- Prefer domain-oriented test modules over combinator-family modules: basename, pattern matching, metadata rewrite, track ordering, TMDB naming, CLI smoke, and failure handling.
|
||||||
|
|
||||||
|
## Rewrite Acceptance
|
||||||
|
|
||||||
|
- A default local test run finishes quickly and without network access.
|
||||||
|
- A contributor can identify which behavior a failing test covers without decoding variant strings like `VAASSS-A:D10-S:T001`.
|
||||||
|
- All current intended failure behaviors remain covered, but each one is asserted directly and readably.
|
||||||
|
- The rewritten suite can be adopted by CI without requiring live TMDB credentials.
|
||||||
0
src/ffx/__init__.py
Normal file
0
src/ffx/__init__.py
Normal file
9
src/ffx/__main__.py
Normal file
9
src/ffx/__main__.py
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
from .cli import ffx
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
ffx()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
220
src/ffx/_iso_language.py
Normal file
220
src/ffx/_iso_language.py
Normal file
@@ -0,0 +1,220 @@
|
|||||||
|
from enum import Enum
|
||||||
|
import difflib
|
||||||
|
|
||||||
|
|
||||||
|
class IsoLanguage(Enum):
|
||||||
|
|
||||||
|
ABKHAZIAN = {"name": "Abkhazian", "iso639_1": "ab", "iso639_2": ["abk"]}
|
||||||
|
AFAR = {"name": "Afar", "iso639_1": "aa", "iso639_2": ["aar"]}
|
||||||
|
AFRIKAANS = {"name": "Afrikaans", "iso639_1": "af", "iso639_2": ["afr"]}
|
||||||
|
AKAN = {"name": "Akan", "iso639_1": "ak", "iso639_2": ["aka"]}
|
||||||
|
ALBANIAN = {"name": "Albanian", "iso639_1": "sq", "iso639_2": ["sqi", "alb"]}
|
||||||
|
AMHARIC = {"name": "Amharic", "iso639_1": "am", "iso639_2": ["amh"]}
|
||||||
|
ARABIC = {"name": "Arabic", "iso639_1": "ar", "iso639_2": ["ara"]}
|
||||||
|
ARAGONESE = {"name": "Aragonese", "iso639_1": "an", "iso639_2": ["arg"]}
|
||||||
|
ARMENIAN = {"name": "Armenian", "iso639_1": "hy", "iso639_2": ["hye", "arm"]}
|
||||||
|
ASSAMESE = {"name": "Assamese", "iso639_1": "as", "iso639_2": ["asm"]}
|
||||||
|
AVARIC = {"name": "Avaric", "iso639_1": "av", "iso639_2": ["ava"]}
|
||||||
|
AVESTAN = {"name": "Avestan", "iso639_1": "ae", "iso639_2": ["ave"]}
|
||||||
|
AYMARA = {"name": "Aymara", "iso639_1": "ay", "iso639_2": ["aym"]}
|
||||||
|
AZERBAIJANI = {"name": "Azerbaijani", "iso639_1": "az", "iso639_2": ["aze"]}
|
||||||
|
BAMBARA = {"name": "Bambara", "iso639_1": "bm", "iso639_2": ["bam"]}
|
||||||
|
BASHKIR = {"name": "Bashkir", "iso639_1": "ba", "iso639_2": ["bak"]}
|
||||||
|
BASQUE = {"name": "Basque", "iso639_1": "eu", "iso639_2": ["eus", "baq"]}
|
||||||
|
BELARUSIAN = {"name": "Belarusian", "iso639_1": "be", "iso639_2": ["bel"]}
|
||||||
|
BENGALI = {"name": "Bengali", "iso639_1": "bn", "iso639_2": ["ben"]}
|
||||||
|
BISLAMA = {"name": "Bislama", "iso639_1": "bi", "iso639_2": ["bis"]}
|
||||||
|
BOKMAL = {"name": "Bokmål", "iso639_1": "nb", "iso639_2": ["nob"]}
|
||||||
|
BOSNIAN = {"name": "Bosnian", "iso639_1": "bs", "iso639_2": ["bos"]}
|
||||||
|
BRETON = {"name": "Breton", "iso639_1": "br", "iso639_2": ["bre"]}
|
||||||
|
BULGARIAN = {"name": "Bulgarian", "iso639_1": "bg", "iso639_2": ["bul"]}
|
||||||
|
BURMESE = {"name": "Burmese", "iso639_1": "my", "iso639_2": ["mya", "bur"]}
|
||||||
|
CATALAN = {"name": "Catalan", "iso639_1": "ca", "iso639_2": ["cat"]}
|
||||||
|
CHAMORRO = {"name": "Chamorro", "iso639_1": "ch", "iso639_2": ["cha"]}
|
||||||
|
CHECHEN = {"name": "Chechen", "iso639_1": "ce", "iso639_2": ["che"]}
|
||||||
|
CHICHEWA = {"name": "Chichewa", "iso639_1": "ny", "iso639_2": ["nya"]}
|
||||||
|
CHINESE = {"name": "Chinese", "iso639_1": "zh", "iso639_2": ["zho", "chi"]}
|
||||||
|
CHURCH_SLAVIC = {"name": "Church Slavic", "iso639_1": "cu", "iso639_2": ["chu"]}
|
||||||
|
CHUVASH = {"name": "Chuvash", "iso639_1": "cv", "iso639_2": ["chv"]}
|
||||||
|
CORNISH = {"name": "Cornish", "iso639_1": "kw", "iso639_2": ["cor"]}
|
||||||
|
CORSICAN = {"name": "Corsican", "iso639_1": "co", "iso639_2": ["cos"]}
|
||||||
|
CREE = {"name": "Cree", "iso639_1": "cr", "iso639_2": ["cre"]}
|
||||||
|
CROATIAN = {"name": "Croatian", "iso639_1": "hr", "iso639_2": ["hrv"]}
|
||||||
|
CZECH = {"name": "Czech", "iso639_1": "cs", "iso639_2": ["ces", "cze"]}
|
||||||
|
DANISH = {"name": "Danish", "iso639_1": "da", "iso639_2": ["dan"]}
|
||||||
|
DIVEHI = {"name": "Divehi", "iso639_1": "dv", "iso639_2": ["div"]}
|
||||||
|
DUTCH = {"name": "Dutch", "iso639_1": "nl", "iso639_2": ["nld", "dut"]}
|
||||||
|
DZONGKHA = {"name": "Dzongkha", "iso639_1": "dz", "iso639_2": ["dzo"]}
|
||||||
|
ENGLISH = {"name": "English", "iso639_1": "en", "iso639_2": ["eng"]}
|
||||||
|
ESPERANTO = {"name": "Esperanto", "iso639_1": "eo", "iso639_2": ["epo"]}
|
||||||
|
ESTONIAN = {"name": "Estonian", "iso639_1": "et", "iso639_2": ["est"]}
|
||||||
|
EWE = {"name": "Ewe", "iso639_1": "ee", "iso639_2": ["ewe"]}
|
||||||
|
FAROESE = {"name": "Faroese", "iso639_1": "fo", "iso639_2": ["fao"]}
|
||||||
|
FIJIAN = {"name": "Fijian", "iso639_1": "fj", "iso639_2": ["fij"]}
|
||||||
|
FINNISH = {"name": "Finnish", "iso639_1": "fi", "iso639_2": ["fin"]}
|
||||||
|
FRENCH = {"name": "French", "iso639_1": "fr", "iso639_2": ["fra", "fre"]}
|
||||||
|
FULAH = {"name": "Fulah", "iso639_1": "ff", "iso639_2": ["ful"]}
|
||||||
|
GALICIAN = {"name": "Galician", "iso639_1": "gl", "iso639_2": ["glg"]}
|
||||||
|
GANDA = {"name": "Ganda", "iso639_1": "lg", "iso639_2": ["lug"]}
|
||||||
|
GEORGIAN = {"name": "Georgian", "iso639_1": "ka", "iso639_2": ["kat", "geo"]}
|
||||||
|
GERMAN = {"name": "German", "iso639_1": "de", "iso639_2": ["deu", "ger"]}
|
||||||
|
GREEK = {"name": "Greek", "iso639_1": "el", "iso639_2": ["ell", "gre"]}
|
||||||
|
GUARANI = {"name": "Guarani", "iso639_1": "gn", "iso639_2": ["grn"]}
|
||||||
|
GUJARATI = {"name": "Gujarati", "iso639_1": "gu", "iso639_2": ["guj"]}
|
||||||
|
HAITIAN = {"name": "Haitian", "iso639_1": "ht", "iso639_2": ["hat"]}
|
||||||
|
HAUSA = {"name": "Hausa", "iso639_1": "ha", "iso639_2": ["hau"]}
|
||||||
|
HEBREW = {"name": "Hebrew", "iso639_1": "he", "iso639_2": ["heb"]}
|
||||||
|
HERERO = {"name": "Herero", "iso639_1": "hz", "iso639_2": ["her"]}
|
||||||
|
HINDI = {"name": "Hindi", "iso639_1": "hi", "iso639_2": ["hin"]}
|
||||||
|
HIRI_MOTU = {"name": "Hiri Motu", "iso639_1": "ho", "iso639_2": ["hmo"]}
|
||||||
|
HUNGARIAN = {"name": "Hungarian", "iso639_1": "hu", "iso639_2": ["hun"]}
|
||||||
|
ICELANDIC = {"name": "Icelandic", "iso639_1": "is", "iso639_2": ["isl", "ice"]}
|
||||||
|
IDO = {"name": "Ido", "iso639_1": "io", "iso639_2": ["ido"]}
|
||||||
|
IGBO = {"name": "Igbo", "iso639_1": "ig", "iso639_2": ["ibo"]}
|
||||||
|
INDONESIAN = {"name": "Indonesian", "iso639_1": "id", "iso639_2": ["ind"]}
|
||||||
|
INTERLINGUA = {"name": "Interlingua", "iso639_1": "ia", "iso639_2": ["ina"]}
|
||||||
|
INTERLINGUE = {"name": "Interlingue", "iso639_1": "ie", "iso639_2": ["ile"]}
|
||||||
|
INUKTITUT = {"name": "Inuktitut", "iso639_1": "iu", "iso639_2": ["iku"]}
|
||||||
|
INUPIAQ = {"name": "Inupiaq", "iso639_1": "ik", "iso639_2": ["ipk"]}
|
||||||
|
IRISH = {"name": "Irish", "iso639_1": "ga", "iso639_2": ["gle"]}
|
||||||
|
ITALIAN = {"name": "Italian", "iso639_1": "it", "iso639_2": ["ita"]}
|
||||||
|
JAPANESE = {"name": "Japanese", "iso639_1": "ja", "iso639_2": ["jpn"]}
|
||||||
|
JAVANESE = {"name": "Javanese", "iso639_1": "jv", "iso639_2": ["jav"]}
|
||||||
|
KALAALLISUT = {"name": "Kalaallisut", "iso639_1": "kl", "iso639_2": ["kal"]}
|
||||||
|
KANNADA = {"name": "Kannada", "iso639_1": "kn", "iso639_2": ["kan"]}
|
||||||
|
KANURI = {"name": "Kanuri", "iso639_1": "kr", "iso639_2": ["kau"]}
|
||||||
|
KASHMIRI = {"name": "Kashmiri", "iso639_1": "ks", "iso639_2": ["kas"]}
|
||||||
|
KAZAKH = {"name": "Kazakh", "iso639_1": "kk", "iso639_2": ["kaz"]}
|
||||||
|
KHMER = {"name": "Khmer", "iso639_1": "km", "iso639_2": ["khm"]}
|
||||||
|
KIKUYU = {"name": "Kikuyu", "iso639_1": "ki", "iso639_2": ["kik"]}
|
||||||
|
KINYARWANDA = {"name": "Kinyarwanda", "iso639_1": "rw", "iso639_2": ["kin"]}
|
||||||
|
KIRGHIZ = {"name": "Kirghiz", "iso639_1": "ky", "iso639_2": ["kir"]}
|
||||||
|
KOMI = {"name": "Komi", "iso639_1": "kv", "iso639_2": ["kom"]}
|
||||||
|
KONGO = {"name": "Kongo", "iso639_1": "kg", "iso639_2": ["kon"]}
|
||||||
|
KOREAN = {"name": "Korean", "iso639_1": "ko", "iso639_2": ["kor"]}
|
||||||
|
KUANYAMA = {"name": "Kuanyama", "iso639_1": "kj", "iso639_2": ["kua"]}
|
||||||
|
KURDISH = {"name": "Kurdish", "iso639_1": "ku", "iso639_2": ["kur"]}
|
||||||
|
LAO = {"name": "Lao", "iso639_1": "lo", "iso639_2": ["lao"]}
|
||||||
|
LATIN = {"name": "Latin", "iso639_1": "la", "iso639_2": ["lat"]}
|
||||||
|
LATVIAN = {"name": "Latvian", "iso639_1": "lv", "iso639_2": ["lav"]}
|
||||||
|
LIMBURGAN = {"name": "Limburgan", "iso639_1": "li", "iso639_2": ["lim"]}
|
||||||
|
LINGALA = {"name": "Lingala", "iso639_1": "ln", "iso639_2": ["lin"]}
|
||||||
|
LITHUANIAN = {"name": "Lithuanian", "iso639_1": "lt", "iso639_2": ["lit"]}
|
||||||
|
LUBA_KATANGA = {"name": "Luba-Katanga", "iso639_1": "lu", "iso639_2": ["lub"]}
|
||||||
|
LUXEMBOURGISH = {"name": "Luxembourgish", "iso639_1": "lb", "iso639_2": ["ltz"]}
|
||||||
|
MACEDONIAN = {"name": "Macedonian", "iso639_1": "mk", "iso639_2": ["mkd", "mac"]}
|
||||||
|
MALAGASY = {"name": "Malagasy", "iso639_1": "mg", "iso639_2": ["mlg"]}
|
||||||
|
MALAY = {"name": "Malay", "iso639_1": "ms", "iso639_2": ["msa", "may"]}
|
||||||
|
MALAYALAM = {"name": "Malayalam", "iso639_1": "ml", "iso639_2": ["mal"]}
|
||||||
|
MALTESE = {"name": "Maltese", "iso639_1": "mt", "iso639_2": ["mlt"]}
|
||||||
|
MANX = {"name": "Manx", "iso639_1": "gv", "iso639_2": ["glv"]}
|
||||||
|
MAORI = {"name": "Maori", "iso639_1": "mi", "iso639_2": ["mri", "mao"]}
|
||||||
|
MARATHI = {"name": "Marathi", "iso639_1": "mr", "iso639_2": ["mar"]}
|
||||||
|
MARSHALLESE = {"name": "Marshallese", "iso639_1": "mh", "iso639_2": ["mah"]}
|
||||||
|
MONGOLIAN = {"name": "Mongolian", "iso639_1": "mn", "iso639_2": ["mon"]}
|
||||||
|
NAURU = {"name": "Nauru", "iso639_1": "na", "iso639_2": ["nau"]}
|
||||||
|
NAVAJO = {"name": "Navajo", "iso639_1": "nv", "iso639_2": ["nav"]}
|
||||||
|
NDONGA = {"name": "Ndonga", "iso639_1": "ng", "iso639_2": ["ndo"]}
|
||||||
|
NEPALI = {"name": "Nepali", "iso639_1": "ne", "iso639_2": ["nep"]}
|
||||||
|
NORTH_NDEBELE = {"name": "North Ndebele", "iso639_1": "nd", "iso639_2": ["nde"]}
|
||||||
|
NORTHERN_SAMI = {"name": "Northern Sami", "iso639_1": "se", "iso639_2": ["sme"]}
|
||||||
|
NORWEGIAN = {"name": "Norwegian", "iso639_1": "no", "iso639_2": ["nor"]}
|
||||||
|
NORWEGIAN_NYNORSK = {"name": "Nynorsk", "iso639_1": "nn", "iso639_2": ["nno"]}
|
||||||
|
OCCITAN = {"name": "Occitan", "iso639_1": "oc", "iso639_2": ["oci"]}
|
||||||
|
OJIBWA = {"name": "Ojibwa", "iso639_1": "oj", "iso639_2": ["oji"]}
|
||||||
|
ORIYA = {"name": "Oriya", "iso639_1": "or", "iso639_2": ["ori"]}
|
||||||
|
OROMO = {"name": "Oromo", "iso639_1": "om", "iso639_2": ["orm"]}
|
||||||
|
OSSETIAN = {"name": "Ossetian", "iso639_1": "os", "iso639_2": ["oss"]}
|
||||||
|
PALI = {"name": "Pali", "iso639_1": "pi", "iso639_2": ["pli"]}
|
||||||
|
PANJABI = {"name": "Panjabi", "iso639_1": "pa", "iso639_2": ["pan"]}
|
||||||
|
PERSIAN = {"name": "Persian", "iso639_1": "fa", "iso639_2": ["fas", "per"]}
|
||||||
|
POLISH = {"name": "Polish", "iso639_1": "pl", "iso639_2": ["pol"]}
|
||||||
|
PORTUGUESE = {"name": "Portuguese", "iso639_1": "pt", "iso639_2": ["por"]}
|
||||||
|
PUSHTO = {"name": "Pushto", "iso639_1": "ps", "iso639_2": ["pus"]}
|
||||||
|
QUECHUA = {"name": "Quechua", "iso639_1": "qu", "iso639_2": ["que"]}
|
||||||
|
ROMANIAN = {"name": "Romanian", "iso639_1": "ro", "iso639_2": ["ron", "rum"]}
|
||||||
|
ROMANSH = {"name": "Romansh", "iso639_1": "rm", "iso639_2": ["roh"]}
|
||||||
|
RUNDI = {"name": "Rundi", "iso639_1": "rn", "iso639_2": ["run"]}
|
||||||
|
RUSSIAN = {"name": "Russian", "iso639_1": "ru", "iso639_2": ["rus"]}
|
||||||
|
SAMOAN = {"name": "Samoan", "iso639_1": "sm", "iso639_2": ["smo"]}
|
||||||
|
SANGO = {"name": "Sango", "iso639_1": "sg", "iso639_2": ["sag"]}
|
||||||
|
SANSKRIT = {"name": "Sanskrit", "iso639_1": "sa", "iso639_2": ["san"]}
|
||||||
|
SARDINIAN = {"name": "Sardinian", "iso639_1": "sc", "iso639_2": ["srd"]}
|
||||||
|
SCOTTISH_GAELIC = {"name": "Scottish Gaelic", "iso639_1": "gd", "iso639_2": ["gla"]}
|
||||||
|
SERBIAN = {"name": "Serbian", "iso639_1": "sr", "iso639_2": ["srp"]}
|
||||||
|
SHONA = {"name": "Shona", "iso639_1": "sn", "iso639_2": ["sna"]}
|
||||||
|
SICHUAN_YI = {"name": "Sichuan Yi", "iso639_1": "ii", "iso639_2": ["iii"]}
|
||||||
|
SINDHI = {"name": "Sindhi", "iso639_1": "sd", "iso639_2": ["snd"]}
|
||||||
|
SINHALA = {"name": "Sinhala", "iso639_1": "si", "iso639_2": ["sin"]}
|
||||||
|
SLOVAK = {"name": "Slovak", "iso639_1": "sk", "iso639_2": ["slk", "slo"]}
|
||||||
|
SLOVENIAN = {"name": "Slovenian", "iso639_1": "sl", "iso639_2": ["slv"]}
|
||||||
|
SOMALI = {"name": "Somali", "iso639_1": "so", "iso639_2": ["som"]}
|
||||||
|
SOUTH_NDEBELE = {"name": "South Ndebele", "iso639_1": "nr", "iso639_2": ["nbl"]}
|
||||||
|
SOUTHERN_SOTHO = {"name": "Southern Sotho", "iso639_1": "st", "iso639_2": ["sot"]}
|
||||||
|
SPANISH = {"name": "Spanish", "iso639_1": "es", "iso639_2": ["spa"]}
|
||||||
|
SUNDANESE = {"name": "Sundanese", "iso639_1": "su", "iso639_2": ["sun"]}
|
||||||
|
SWAHILI = {"name": "Swahili", "iso639_1": "sw", "iso639_2": ["swa"]}
|
||||||
|
SWATI = {"name": "Swati", "iso639_1": "ss", "iso639_2": ["ssw"]}
|
||||||
|
SWEDISH = {"name": "Swedish", "iso639_1": "sv", "iso639_2": ["swe"]}
|
||||||
|
TAGALOG = {"name": "Tagalog", "iso639_1": "tl", "iso639_2": ["tgl"]}
|
||||||
|
TAHITIAN = {"name": "Tahitian", "iso639_1": "ty", "iso639_2": ["tah"]}
|
||||||
|
TAJIK = {"name": "Tajik", "iso639_1": "tg", "iso639_2": ["tgk"]}
|
||||||
|
TAMIL = {"name": "Tamil", "iso639_1": "ta", "iso639_2": ["tam"]}
|
||||||
|
TATAR = {"name": "Tatar", "iso639_1": "tt", "iso639_2": ["tat"]}
|
||||||
|
TELUGU = {"name": "Telugu", "iso639_1": "te", "iso639_2": ["tel"]}
|
||||||
|
THAI = {"name": "Thai", "iso639_1": "th", "iso639_2": ["tha"]}
|
||||||
|
TIBETAN = {"name": "Tibetan", "iso639_1": "bo", "iso639_2": ["bod", "tib"]}
|
||||||
|
TIGRINYA = {"name": "Tigrinya", "iso639_1": "ti", "iso639_2": ["tir"]}
|
||||||
|
TONGA = {"name": "Tonga", "iso639_1": "to", "iso639_2": ["ton"]}
|
||||||
|
TSONGA = {"name": "Tsonga", "iso639_1": "ts", "iso639_2": ["tso"]}
|
||||||
|
TSWANA = {"name": "Tswana", "iso639_1": "tn", "iso639_2": ["tsn"]}
|
||||||
|
TURKISH = {"name": "Turkish", "iso639_1": "tr", "iso639_2": ["tur"]}
|
||||||
|
TURKMEN = {"name": "Turkmen", "iso639_1": "tk", "iso639_2": ["tuk"]}
|
||||||
|
TWI = {"name": "Twi", "iso639_1": "tw", "iso639_2": ["twi"]}
|
||||||
|
UIGHUR = {"name": "Uighur", "iso639_1": "ug", "iso639_2": ["uig"]}
|
||||||
|
UKRAINIAN = {"name": "Ukrainian", "iso639_1": "uk", "iso639_2": ["ukr"]}
|
||||||
|
URDU = {"name": "Urdu", "iso639_1": "ur", "iso639_2": ["urd"]}
|
||||||
|
UZBEK = {"name": "Uzbek", "iso639_1": "uz", "iso639_2": ["uzb"]}
|
||||||
|
VENDA = {"name": "Venda", "iso639_1": "ve", "iso639_2": ["ven"]}
|
||||||
|
VIETNAMESE = {"name": "Vietnamese", "iso639_1": "vi", "iso639_2": ["vie"]}
|
||||||
|
VOLAPUK = {"name": "Volapük", "iso639_1": "vo", "iso639_2": ["vol"]}
|
||||||
|
WALLOON = {"name": "Walloon", "iso639_1": "wa", "iso639_2": ["wln"]}
|
||||||
|
WELSH = {"name": "Welsh", "iso639_1": "cy", "iso639_2": ["cym", "wel"]}
|
||||||
|
WESTERN_FRISIAN = {"name": "Western Frisian", "iso639_1": "fy", "iso639_2": ["fry"]}
|
||||||
|
WOLOF = {"name": "Wolof", "iso639_1": "wo", "iso639_2": ["wol"]}
|
||||||
|
XHOSA = {"name": "Xhosa", "iso639_1": "xh", "iso639_2": ["xho"]}
|
||||||
|
YIDDISH = {"name": "Yiddish", "iso639_1": "yi", "iso639_2": ["yid"]}
|
||||||
|
YORUBA = {"name": "Yoruba", "iso639_1": "yo", "iso639_2": ["yor"]}
|
||||||
|
ZHUANG = {"name": "Zhuang", "iso639_1": "za", "iso639_2": ["zha"]}
|
||||||
|
ZULU = {"name": "Zulu", "iso639_1": "zu", "iso639_2": ["zul"]}
|
||||||
|
|
||||||
|
FILIPINO = {"name": "Filipino", "iso639_1": "tl", "iso639_2": ["fil"]}
|
||||||
|
|
||||||
|
UNDEFINED = {"name": "undefined", "iso639_1": "xx", "iso639_2": ["und"]}
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def find(label : str):
|
||||||
|
|
||||||
|
closestMatches = difflib.get_close_matches(label, [l.value["name"] for l in IsoLanguage], n=1)
|
||||||
|
|
||||||
|
if closestMatches:
|
||||||
|
foundLangs = [l for l in IsoLanguage if l.value["name"] == closestMatches[0]]
|
||||||
|
return foundLangs[0] if foundLangs else IsoLanguage.UNDEFINED
|
||||||
|
else:
|
||||||
|
return IsoLanguage.UNDEFINED
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def findThreeLetter(theeLetter : str):
|
||||||
|
foundLangs = [l for l in IsoLanguage if str(theeLetter) in l.value["iso639_2"]]
|
||||||
|
return foundLangs[0] if foundLangs else IsoLanguage.UNDEFINED
|
||||||
|
|
||||||
|
|
||||||
|
def label(self):
|
||||||
|
return str(self.value["name"])
|
||||||
|
|
||||||
|
def twoLetter(self):
|
||||||
|
return str(self.value["iso639_1"])
|
||||||
|
|
||||||
|
def threeLetter(self):
|
||||||
|
return str(self.value["iso639_2"][0])
|
||||||
71
src/ffx/audio_layout.py
Normal file
71
src/ffx/audio_layout.py
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
from enum import Enum
|
||||||
|
from .track_type import TrackType
|
||||||
|
|
||||||
|
class AudioLayout(Enum):
|
||||||
|
|
||||||
|
LAYOUT_STEREO = {"label": "stereo", "index": 1}
|
||||||
|
LAYOUT_5_1 = {"label": "5.1(side)", "index": 2}
|
||||||
|
LAYOUT_6_1 = {"label": "6.1", "index": 3}
|
||||||
|
LAYOUT_7_1 = {"label": "7.1", "index": 4} #TODO: Does this exist?
|
||||||
|
|
||||||
|
LAYOUT_6CH = {"label": "6ch", "index": 5}
|
||||||
|
LAYOUT_5_0 = {"label": "5.0(side)", "index": 6}
|
||||||
|
|
||||||
|
LAYOUT_UNDEFINED = {"label": "undefined", "index": 0}
|
||||||
|
|
||||||
|
|
||||||
|
def label(self):
|
||||||
|
"""Returns the audio layout as string"""
|
||||||
|
return str(self.value['label'])
|
||||||
|
|
||||||
|
def index(self):
|
||||||
|
"""Returns the audio layout as integer"""
|
||||||
|
return int(self.value['index'])
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def fromLabel(label : str):
|
||||||
|
try:
|
||||||
|
|
||||||
|
return [a for a in AudioLayout if a.value['label'] == str(label)][0]
|
||||||
|
except:
|
||||||
|
return AudioLayout.LAYOUT_UNDEFINED
|
||||||
|
|
||||||
|
# @staticmethod
|
||||||
|
# def fromIndex(index : int):
|
||||||
|
# try:
|
||||||
|
# target_index = int(index)
|
||||||
|
# except (TypeError, ValueError):
|
||||||
|
# return AudioLayout.LAYOUT_UNDEFINED
|
||||||
|
# return next((a for a in AudioLayout if a.value['index'] == target_index),
|
||||||
|
# AudioLayout.LAYOUT_UNDEFINED)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def fromIndex(index : int):
|
||||||
|
try:
|
||||||
|
return [a for a in AudioLayout if a.value['index'] == int(index)][0]
|
||||||
|
except:
|
||||||
|
return AudioLayout.LAYOUT_UNDEFINED
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def identify(streamObj):
|
||||||
|
|
||||||
|
FFPROBE_LAYOUT_KEY = 'channel_layout'
|
||||||
|
FFPROBE_CHANNELS_KEY = 'channels'
|
||||||
|
FFPROBE_CODEC_TYPE_KEY = 'codec_type'
|
||||||
|
|
||||||
|
if (type(streamObj) is not dict
|
||||||
|
or FFPROBE_CODEC_TYPE_KEY not in streamObj.keys()
|
||||||
|
or streamObj[FFPROBE_CODEC_TYPE_KEY] != TrackType.AUDIO.label()):
|
||||||
|
raise Exception('Not an ffprobe audio stream object')
|
||||||
|
|
||||||
|
if FFPROBE_LAYOUT_KEY in streamObj.keys():
|
||||||
|
matchingLayouts = [l for l in AudioLayout if l.label() == streamObj[FFPROBE_LAYOUT_KEY]]
|
||||||
|
if matchingLayouts:
|
||||||
|
return matchingLayouts[0]
|
||||||
|
|
||||||
|
if (FFPROBE_CHANNELS_KEY in streamObj.keys()
|
||||||
|
and int(streamObj[FFPROBE_CHANNELS_KEY]) == 6):
|
||||||
|
|
||||||
|
return AudioLayout.LAYOUT_6CH
|
||||||
|
|
||||||
|
return AudioLayout.LAYOUT_UNDEFINED
|
||||||
1529
src/ffx/cli.py
Executable file
1529
src/ffx/cli.py
Executable file
File diff suppressed because it is too large
Load Diff
200
src/ffx/configuration_controller.py
Normal file
200
src/ffx/configuration_controller.py
Normal file
@@ -0,0 +1,200 @@
|
|||||||
|
import os, json
|
||||||
|
|
||||||
|
from .constants import (
|
||||||
|
DEFAULT_SHOW_INDEX_EPISODE_DIGITS,
|
||||||
|
DEFAULT_SHOW_INDEX_SEASON_DIGITS,
|
||||||
|
DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS,
|
||||||
|
DEFAULT_SHOW_INDICATOR_SEASON_DIGITS,
|
||||||
|
)
|
||||||
|
|
||||||
|
class ConfigurationController():
|
||||||
|
|
||||||
|
CONFIG_FILENAME = 'ffx.json'
|
||||||
|
DATABASE_FILENAME = 'ffx.db'
|
||||||
|
LOG_FILENAME = 'ffx.log'
|
||||||
|
|
||||||
|
DATABASE_PATH_CONFIG_KEY = 'databasePath'
|
||||||
|
LOG_DIRECTORY_CONFIG_KEY = 'logDirectory'
|
||||||
|
SUBTITLES_DIRECTORY_CONFIG_KEY = 'subtitlesDirectory'
|
||||||
|
LANGUAGE_CONFIG_KEY = 'language'
|
||||||
|
OUTPUT_FILENAME_TEMPLATE_KEY = 'outputFilenameTemplate'
|
||||||
|
DEFAULT_INDEX_SEASON_DIGITS_CONFIG_KEY = 'defaultIndexSeasonDigits'
|
||||||
|
DEFAULT_INDEX_EPISODE_DIGITS_CONFIG_KEY = 'defaultIndexEpisodeDigits'
|
||||||
|
DEFAULT_INDICATOR_SEASON_DIGITS_CONFIG_KEY = 'defaultIndicatorSeasonDigits'
|
||||||
|
DEFAULT_INDICATOR_EPISODE_DIGITS_CONFIG_KEY = 'defaultIndicatorEpisodeDigits'
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
|
||||||
|
self.__homeDir = os.path.expanduser("~")
|
||||||
|
self.__localVarDir = os.path.join(self.__homeDir, '.local', 'var')
|
||||||
|
self.__localEtcDir = os.path.join(self.__homeDir, '.local', 'etc')
|
||||||
|
|
||||||
|
self.__configurationData = {}
|
||||||
|
|
||||||
|
# .local/etc/ffx.json
|
||||||
|
self.__configFilePath = os.path.join(self.__localEtcDir, ConfigurationController.CONFIG_FILENAME)
|
||||||
|
if os.path.isfile(self.__configFilePath):
|
||||||
|
with open(self.__configFilePath, 'r') as configurationFile:
|
||||||
|
self.__configurationData = json.load(configurationFile)
|
||||||
|
|
||||||
|
if ConfigurationController.DATABASE_PATH_CONFIG_KEY in self.__configurationData.keys():
|
||||||
|
self.__databaseFilePath = self.__configurationData[ConfigurationController.DATABASE_PATH_CONFIG_KEY]
|
||||||
|
os.makedirs(os.path.dirname(self.__databaseFilePath), exist_ok=True)
|
||||||
|
else:
|
||||||
|
ffxVarDir = os.path.join(self.__localVarDir, 'ffx')
|
||||||
|
os.makedirs(ffxVarDir, exist_ok=True)
|
||||||
|
self.__databaseFilePath = os.path.join(ffxVarDir, ConfigurationController.DATABASE_FILENAME)
|
||||||
|
|
||||||
|
if ConfigurationController.LOG_DIRECTORY_CONFIG_KEY in self.__configurationData.keys():
|
||||||
|
self.__logDir = self.__configurationData[ConfigurationController.LOG_DIRECTORY_CONFIG_KEY]
|
||||||
|
else:
|
||||||
|
self.__logDir = os.path.join(self.__localVarDir, 'log')
|
||||||
|
os.makedirs(self.__logDir, exist_ok=True)
|
||||||
|
|
||||||
|
|
||||||
|
def getHomeDirectory(self):
|
||||||
|
return self.__homeDir
|
||||||
|
|
||||||
|
def getLogFilePath(self):
|
||||||
|
return os.path.join(self.__logDir, ConfigurationController.LOG_FILENAME)
|
||||||
|
|
||||||
|
def getDatabaseFilePath(self):
|
||||||
|
return self.__databaseFilePath
|
||||||
|
|
||||||
|
def getSubtitlesDirectoryPath(self):
|
||||||
|
subtitlesDirectory = self.__configurationData.get(
|
||||||
|
ConfigurationController.SUBTITLES_DIRECTORY_CONFIG_KEY,
|
||||||
|
'',
|
||||||
|
)
|
||||||
|
return os.path.expanduser(str(subtitlesDirectory)) if subtitlesDirectory else ''
|
||||||
|
|
||||||
|
def getLanguage(self):
|
||||||
|
return str(self.__configurationData.get(ConfigurationController.LANGUAGE_CONFIG_KEY, '')).strip()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def getConfiguredIntegerValue(cls, configurationData: dict, configKey: str, defaultValue: int) -> int:
|
||||||
|
configuredValue = configurationData.get(configKey, defaultValue)
|
||||||
|
try:
|
||||||
|
return int(configuredValue)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
return int(defaultValue)
|
||||||
|
|
||||||
|
def getDefaultIndexSeasonDigits(self):
|
||||||
|
return ConfigurationController.getConfiguredIntegerValue(
|
||||||
|
self.__configurationData,
|
||||||
|
ConfigurationController.DEFAULT_INDEX_SEASON_DIGITS_CONFIG_KEY,
|
||||||
|
DEFAULT_SHOW_INDEX_SEASON_DIGITS,
|
||||||
|
)
|
||||||
|
|
||||||
|
def getDefaultIndexEpisodeDigits(self):
|
||||||
|
return ConfigurationController.getConfiguredIntegerValue(
|
||||||
|
self.__configurationData,
|
||||||
|
ConfigurationController.DEFAULT_INDEX_EPISODE_DIGITS_CONFIG_KEY,
|
||||||
|
DEFAULT_SHOW_INDEX_EPISODE_DIGITS,
|
||||||
|
)
|
||||||
|
|
||||||
|
def getDefaultIndicatorSeasonDigits(self):
|
||||||
|
return ConfigurationController.getConfiguredIntegerValue(
|
||||||
|
self.__configurationData,
|
||||||
|
ConfigurationController.DEFAULT_INDICATOR_SEASON_DIGITS_CONFIG_KEY,
|
||||||
|
DEFAULT_SHOW_INDICATOR_SEASON_DIGITS,
|
||||||
|
)
|
||||||
|
|
||||||
|
def getDefaultIndicatorEpisodeDigits(self):
|
||||||
|
return ConfigurationController.getConfiguredIntegerValue(
|
||||||
|
self.__configurationData,
|
||||||
|
ConfigurationController.DEFAULT_INDICATOR_EPISODE_DIGITS_CONFIG_KEY,
|
||||||
|
DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS,
|
||||||
|
)
|
||||||
|
|
||||||
|
def getData(self):
|
||||||
|
return self.__configurationData
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# def addPattern(self, patternDescriptor):
|
||||||
|
#
|
||||||
|
# try:
|
||||||
|
#
|
||||||
|
# s = self.Session()
|
||||||
|
# q = s.query(Pattern).filter(Pattern.show_id == int(patternDescriptor['show_id']),
|
||||||
|
# Pattern.pattern == str(patternDescriptor['pattern']))
|
||||||
|
#
|
||||||
|
# if not q.count():
|
||||||
|
# pattern = Pattern(show_id = int(patternDescriptor['show_id']),
|
||||||
|
# pattern = str(patternDescriptor['pattern']))
|
||||||
|
# s.add(pattern)
|
||||||
|
# s.commit()
|
||||||
|
# return pattern.getId()
|
||||||
|
# else:
|
||||||
|
# return 0
|
||||||
|
#
|
||||||
|
# except Exception as ex:
|
||||||
|
# raise click.ClickException(f"PatternController.addPattern(): {repr(ex)}")
|
||||||
|
# finally:
|
||||||
|
# s.close()
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# def updatePattern(self, patternId, patternDescriptor):
|
||||||
|
#
|
||||||
|
# try:
|
||||||
|
# s = self.Session()
|
||||||
|
# q = s.query(Pattern).filter(Pattern.id == int(patternId))
|
||||||
|
#
|
||||||
|
# if q.count():
|
||||||
|
#
|
||||||
|
# pattern = q.first()
|
||||||
|
#
|
||||||
|
# pattern.show_id = int(patternDescriptor['show_id'])
|
||||||
|
# pattern.pattern = str(patternDescriptor['pattern'])
|
||||||
|
#
|
||||||
|
# s.commit()
|
||||||
|
# return True
|
||||||
|
#
|
||||||
|
# else:
|
||||||
|
# return False
|
||||||
|
#
|
||||||
|
# except Exception as ex:
|
||||||
|
# raise click.ClickException(f"PatternController.updatePattern(): {repr(ex)}")
|
||||||
|
# finally:
|
||||||
|
# s.close()
|
||||||
|
#
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# def findPattern(self, patternDescriptor):
|
||||||
|
#
|
||||||
|
# try:
|
||||||
|
# s = self.Session()
|
||||||
|
# q = s.query(Pattern).filter(Pattern.show_id == int(patternDescriptor['show_id']), Pattern.pattern == str(patternDescriptor['pattern']))
|
||||||
|
#
|
||||||
|
# if q.count():
|
||||||
|
# pattern = q.first()
|
||||||
|
# return int(pattern.id)
|
||||||
|
# else:
|
||||||
|
# return None
|
||||||
|
#
|
||||||
|
# except Exception as ex:
|
||||||
|
# raise click.ClickException(f"PatternController.findPattern(): {repr(ex)}")
|
||||||
|
# finally:
|
||||||
|
# s.close()
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# def getPattern(self, patternId : int):
|
||||||
|
#
|
||||||
|
# if type(patternId) is not int:
|
||||||
|
# raise ValueError(f"PatternController.getPattern(): Argument patternId is required to be of type int")
|
||||||
|
#
|
||||||
|
# try:
|
||||||
|
# s = self.Session()
|
||||||
|
# q = s.query(Pattern).filter(Pattern.id == int(patternId))
|
||||||
|
#
|
||||||
|
# return q.first() if q.count() else None
|
||||||
|
#
|
||||||
|
# except Exception as ex:
|
||||||
|
# raise click.ClickException(f"PatternController.getPattern(): {repr(ex)}")
|
||||||
|
# finally:
|
||||||
|
# s.close()
|
||||||
|
#
|
||||||
71
src/ffx/confirm_screen.py
Normal file
71
src/ffx/confirm_screen.py
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
from textual.containers import Grid
|
||||||
|
from textual.screen import Screen
|
||||||
|
from textual.widgets import Button, Footer, Header, Static
|
||||||
|
|
||||||
|
from .i18n import t
|
||||||
|
|
||||||
|
class ConfirmScreen(Screen):
|
||||||
|
|
||||||
|
BINDINGS = [
|
||||||
|
("escape", "back", t("Back")),
|
||||||
|
]
|
||||||
|
|
||||||
|
CSS = """
|
||||||
|
|
||||||
|
Grid {
|
||||||
|
grid-size: 4 7;
|
||||||
|
grid-rows: 2 2 2 2 2 2 2;
|
||||||
|
grid-columns: 1fr 1fr 1fr 1fr;
|
||||||
|
height: 100%;
|
||||||
|
width: 100%;
|
||||||
|
min-width: 80;
|
||||||
|
padding: 1;
|
||||||
|
overflow-x: auto;
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
Button {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.four {
|
||||||
|
column-span: 4;
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
message: str,
|
||||||
|
confirm_label: str = "Confirm",
|
||||||
|
cancel_label: str = "Cancel",
|
||||||
|
):
|
||||||
|
super().__init__()
|
||||||
|
self.__message = str(message)
|
||||||
|
self.__confirmLabel = str(t(confirm_label))
|
||||||
|
self.__cancelLabel = str(t(cancel_label))
|
||||||
|
|
||||||
|
def compose(self):
|
||||||
|
yield Header()
|
||||||
|
|
||||||
|
with Grid():
|
||||||
|
# Row 1
|
||||||
|
yield Static(self.__message, classes="four")
|
||||||
|
|
||||||
|
# Row 2
|
||||||
|
yield Static(" ", classes="four")
|
||||||
|
|
||||||
|
# Row 3
|
||||||
|
yield Button(self.__confirmLabel, id="confirm_button")
|
||||||
|
yield Button(self.__cancelLabel, id="cancel_button")
|
||||||
|
|
||||||
|
yield Footer()
|
||||||
|
|
||||||
|
def on_button_pressed(self, event: Button.Pressed) -> None:
|
||||||
|
if event.button.id == "confirm_button":
|
||||||
|
self.dismiss(True)
|
||||||
|
|
||||||
|
if event.button.id == "cancel_button":
|
||||||
|
self.dismiss(False)
|
||||||
|
|
||||||
|
def action_back(self):
|
||||||
|
self.dismiss(False)
|
||||||
30
src/ffx/constants.py
Normal file
30
src/ffx/constants.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
VERSION='0.2.6'
|
||||||
|
DATABASE_VERSION = 3
|
||||||
|
|
||||||
|
DEFAULT_QUALITY = 32
|
||||||
|
DEFAULT_AV1_PRESET = 5
|
||||||
|
|
||||||
|
DEFAULT_VIDEO_ENCODER_LABEL = "vp9"
|
||||||
|
DEFAULT_CONTAINER_FORMAT = "webm"
|
||||||
|
DEFAULT_CONTAINER_EXTENSION = "webm"
|
||||||
|
SUPPORTED_INPUT_FILE_EXTENSIONS = ("mkv", "mp4", "avi", "flv", "webm")
|
||||||
|
FFMPEG_COMMAND_TOKENS = ("ffmpeg", "-y")
|
||||||
|
FFMPEG_NULL_OUTPUT_TOKENS = ("-f", "null", "/dev/null")
|
||||||
|
|
||||||
|
DEFAULT_STEREO_BANDWIDTH = "112"
|
||||||
|
DEFAULT_AC3_BANDWIDTH = "256"
|
||||||
|
DEFAULT_DTS_BANDWIDTH = "320"
|
||||||
|
DEFAULT_7_1_BANDWIDTH = "384"
|
||||||
|
|
||||||
|
DEFAULT_CROPDETECT_SEEK_SECONDS = 60
|
||||||
|
DEFAULT_CROPDETECT_DURATION_SECONDS = 180
|
||||||
|
|
||||||
|
DEFAULT_cut_start = 60
|
||||||
|
DEFAULT_cut_length = 180
|
||||||
|
|
||||||
|
DEFAULT_SHOW_INDEX_SEASON_DIGITS = 2
|
||||||
|
DEFAULT_SHOW_INDEX_EPISODE_DIGITS = 2
|
||||||
|
DEFAULT_SHOW_INDICATOR_SEASON_DIGITS = 2
|
||||||
|
DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS = 2
|
||||||
|
|
||||||
|
DEFAULT_OUTPUT_FILENAME_TEMPLATE = '{{ ffx_show_name }} - {{ ffx_index }}{{ ffx_index_separator }}{{ ffx_episode_name }}{{ ffx_indicator_separator }}{{ ffx_indicator }}'
|
||||||
225
src/ffx/database.py
Normal file
225
src/ffx/database.py
Normal file
@@ -0,0 +1,225 @@
|
|||||||
|
import os, shutil, click
|
||||||
|
|
||||||
|
from sqlalchemy import create_engine, inspect, text
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
|
# Import the full model package so SQLAlchemy registers every mapped class
|
||||||
|
# before metadata creation and the first ORM query.
|
||||||
|
import ffx.model
|
||||||
|
from ffx.model.show import Base
|
||||||
|
|
||||||
|
from ffx.model.property import Property
|
||||||
|
from ffx.model.migration import (
|
||||||
|
DatabaseVersionException,
|
||||||
|
getMigrationPlan,
|
||||||
|
migrateDatabase,
|
||||||
|
)
|
||||||
|
|
||||||
|
from ffx.constants import DATABASE_VERSION
|
||||||
|
|
||||||
|
|
||||||
|
DATABASE_VERSION_KEY = 'database_version'
|
||||||
|
EXPECTED_TABLE_NAMES = set(Base.metadata.tables.keys())
|
||||||
|
|
||||||
|
def databaseContext(databasePath: str = ''):
|
||||||
|
|
||||||
|
databaseContext = {}
|
||||||
|
|
||||||
|
if databasePath is None:
|
||||||
|
# sqlite:///:memory:
|
||||||
|
databasePath = ':memory:'
|
||||||
|
elif not databasePath:
|
||||||
|
homeDir = os.path.expanduser("~")
|
||||||
|
ffxVarDir = os.path.join(homeDir, '.local', 'var', 'ffx')
|
||||||
|
if not os.path.exists(ffxVarDir):
|
||||||
|
os.makedirs(ffxVarDir)
|
||||||
|
databasePath = os.path.join(ffxVarDir, 'ffx.db')
|
||||||
|
else:
|
||||||
|
databasePath = os.path.expanduser(databasePath)
|
||||||
|
|
||||||
|
if databasePath != ':memory:':
|
||||||
|
databasePath = os.path.abspath(databasePath)
|
||||||
|
|
||||||
|
databaseContext['path'] = databasePath
|
||||||
|
databaseContext['url'] = f"sqlite:///{databasePath}"
|
||||||
|
databaseContext['engine'] = create_engine(databaseContext['url'])
|
||||||
|
databaseContext['session'] = sessionmaker(bind=databaseContext['engine'])
|
||||||
|
|
||||||
|
bootstrapDatabaseIfNeeded(databaseContext)
|
||||||
|
|
||||||
|
# isSyncronuous = False
|
||||||
|
# while not isSyncronuous:
|
||||||
|
# while True:
|
||||||
|
# try:
|
||||||
|
# with databaseContext['database_engine'].connect() as connection:
|
||||||
|
# connection.execute(sqlalchemy.text('PRAGMA foreign_keys=ON;'))
|
||||||
|
# #isSyncronuous = True
|
||||||
|
# break
|
||||||
|
# except sqlite3.OperationalError:
|
||||||
|
# time.sleep(0.1)
|
||||||
|
|
||||||
|
ensureDatabaseVersion(databaseContext)
|
||||||
|
|
||||||
|
return databaseContext
|
||||||
|
|
||||||
|
|
||||||
|
def databaseNeedsBootstrap(databaseContext) -> bool:
|
||||||
|
inspector = inspect(databaseContext['engine'])
|
||||||
|
existingTableNames = set(inspector.get_table_names())
|
||||||
|
return not EXPECTED_TABLE_NAMES.issubset(existingTableNames)
|
||||||
|
|
||||||
|
|
||||||
|
def bootstrapDatabaseIfNeeded(databaseContext):
|
||||||
|
if not databaseNeedsBootstrap(databaseContext):
|
||||||
|
return
|
||||||
|
|
||||||
|
Base.metadata.create_all(databaseContext['engine'])
|
||||||
|
|
||||||
|
|
||||||
|
def ensureDatabaseVersion(databaseContext):
|
||||||
|
|
||||||
|
currentDatabaseVersion = getDatabaseVersion(databaseContext)
|
||||||
|
if not currentDatabaseVersion:
|
||||||
|
setDatabaseVersion(databaseContext, DATABASE_VERSION)
|
||||||
|
return
|
||||||
|
|
||||||
|
if currentDatabaseVersion > DATABASE_VERSION:
|
||||||
|
raise DatabaseVersionException(
|
||||||
|
f"Current database version ({currentDatabaseVersion}) does not match required ({DATABASE_VERSION})"
|
||||||
|
)
|
||||||
|
|
||||||
|
if currentDatabaseVersion < DATABASE_VERSION:
|
||||||
|
promptForDatabaseMigration(databaseContext, currentDatabaseVersion, DATABASE_VERSION)
|
||||||
|
migrateDatabase(databaseContext, currentDatabaseVersion, DATABASE_VERSION, setDatabaseVersion)
|
||||||
|
currentDatabaseVersion = getDatabaseVersion(databaseContext)
|
||||||
|
|
||||||
|
if currentDatabaseVersion != DATABASE_VERSION:
|
||||||
|
raise DatabaseVersionException(
|
||||||
|
f"Current database version ({currentDatabaseVersion}) does not match required ({DATABASE_VERSION})"
|
||||||
|
)
|
||||||
|
|
||||||
|
ensureCurrentSchemaCompatibility(databaseContext)
|
||||||
|
|
||||||
|
|
||||||
|
def ensureCurrentSchemaCompatibility(databaseContext):
|
||||||
|
engine = databaseContext['engine']
|
||||||
|
inspector = inspect(engine)
|
||||||
|
showColumns = {
|
||||||
|
column['name']
|
||||||
|
for column in inspector.get_columns('shows')
|
||||||
|
}
|
||||||
|
|
||||||
|
alterStatements = []
|
||||||
|
if 'quality' not in showColumns:
|
||||||
|
alterStatements.append("ALTER TABLE shows ADD COLUMN quality INTEGER DEFAULT 0")
|
||||||
|
if 'notes' not in showColumns:
|
||||||
|
alterStatements.append("ALTER TABLE shows ADD COLUMN notes TEXT DEFAULT ''")
|
||||||
|
|
||||||
|
if not alterStatements:
|
||||||
|
return
|
||||||
|
|
||||||
|
with engine.begin() as connection:
|
||||||
|
for alterStatement in alterStatements:
|
||||||
|
connection.execute(text(alterStatement))
|
||||||
|
|
||||||
|
|
||||||
|
def promptForDatabaseMigration(databaseContext, currentDatabaseVersion: int, targetDatabaseVersion: int):
|
||||||
|
migrationPlan = getMigrationPlan(currentDatabaseVersion, targetDatabaseVersion)
|
||||||
|
|
||||||
|
click.echo("Database migration required.")
|
||||||
|
click.echo(f"Current version: {currentDatabaseVersion}")
|
||||||
|
click.echo(f"Target version: {targetDatabaseVersion}")
|
||||||
|
click.echo("Steps required:")
|
||||||
|
|
||||||
|
missingSteps = []
|
||||||
|
for migrationStep in migrationPlan:
|
||||||
|
moduleStatus = "present" if migrationStep.modulePresent else "missing"
|
||||||
|
click.echo(
|
||||||
|
f" {migrationStep.versionFrom} -> {migrationStep.versionTo}: "
|
||||||
|
+ f"{migrationStep.moduleName} [{moduleStatus}]"
|
||||||
|
)
|
||||||
|
if not migrationStep.modulePresent:
|
||||||
|
missingSteps.append(migrationStep)
|
||||||
|
|
||||||
|
if missingSteps:
|
||||||
|
firstMissingStep = missingSteps[0]
|
||||||
|
raise DatabaseVersionException(
|
||||||
|
f"No migration path from database version "
|
||||||
|
+ f"{firstMissingStep.versionFrom} to {firstMissingStep.versionTo}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not click.confirm(
|
||||||
|
"Create a backup and continue with database migration?",
|
||||||
|
default=True,
|
||||||
|
):
|
||||||
|
raise click.ClickException("Database migration aborted by user.")
|
||||||
|
|
||||||
|
backupPath = backupDatabaseBeforeMigration(
|
||||||
|
databaseContext,
|
||||||
|
currentDatabaseVersion,
|
||||||
|
targetDatabaseVersion,
|
||||||
|
)
|
||||||
|
click.echo(f"Database backup created: {backupPath}")
|
||||||
|
|
||||||
|
|
||||||
|
def backupDatabaseBeforeMigration(databaseContext, currentDatabaseVersion: int, targetDatabaseVersion: int) -> str:
|
||||||
|
databasePath = databaseContext.get('path', '')
|
||||||
|
if not databasePath or databasePath == ':memory:':
|
||||||
|
raise click.ClickException("Database migration backup requires a file-backed SQLite database.")
|
||||||
|
|
||||||
|
if not os.path.isfile(databasePath):
|
||||||
|
raise click.ClickException(f"Database file not found for backup: {databasePath}")
|
||||||
|
|
||||||
|
backupPath = f"{databasePath}.v{currentDatabaseVersion}-to-v{targetDatabaseVersion}.bak"
|
||||||
|
backupIndex = 1
|
||||||
|
while os.path.exists(backupPath):
|
||||||
|
backupPath = (
|
||||||
|
f"{databasePath}.v{currentDatabaseVersion}-to-v{targetDatabaseVersion}.{backupIndex}.bak"
|
||||||
|
)
|
||||||
|
backupIndex += 1
|
||||||
|
|
||||||
|
databaseContext['engine'].dispose()
|
||||||
|
shutil.copy2(databasePath, backupPath)
|
||||||
|
|
||||||
|
return backupPath
|
||||||
|
|
||||||
|
|
||||||
|
def getDatabaseVersion(databaseContext):
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
Session = databaseContext['session']
|
||||||
|
s = Session()
|
||||||
|
versionProperty = s.query(Property).filter(Property.key == DATABASE_VERSION_KEY).first()
|
||||||
|
|
||||||
|
return int(versionProperty.value) if versionProperty is not None else 0
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"getDatabaseVersion(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
|
||||||
|
def setDatabaseVersion(databaseContext, databaseVersion: int):
|
||||||
|
|
||||||
|
try:
|
||||||
|
Session = databaseContext['session']
|
||||||
|
s = Session()
|
||||||
|
|
||||||
|
q = s.query(Property).filter(Property.key == DATABASE_VERSION_KEY)
|
||||||
|
|
||||||
|
dbVersion = int(databaseVersion)
|
||||||
|
|
||||||
|
versionProperty = q.first()
|
||||||
|
if versionProperty:
|
||||||
|
versionProperty.value = str(dbVersion)
|
||||||
|
else:
|
||||||
|
versionProperty = Property(key = DATABASE_VERSION_KEY,
|
||||||
|
value = str(dbVersion))
|
||||||
|
s.add(versionProperty)
|
||||||
|
s.commit()
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"setDatabaseVersion(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
43
src/ffx/ffx_app.py
Normal file
43
src/ffx/ffx_app.py
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
from textual.app import App
|
||||||
|
|
||||||
|
from .i18n import set_current_language, t
|
||||||
|
from .shows_screen import ShowsScreen
|
||||||
|
from .inspect_details_screen import InspectDetailsScreen
|
||||||
|
from .media_edit_screen import MediaEditScreen
|
||||||
|
|
||||||
|
|
||||||
|
class FfxApp(App):
|
||||||
|
|
||||||
|
TITLE = "FFX"
|
||||||
|
|
||||||
|
BINDINGS = [
|
||||||
|
("q", "quit()", t("Quit")),
|
||||||
|
("h", "switch_mode('help')", t("Help")),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, context = {}):
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
# Data 'input' variable
|
||||||
|
self.context = context
|
||||||
|
set_current_language(self.context.get("language"))
|
||||||
|
|
||||||
|
|
||||||
|
def on_mount(self) -> None:
|
||||||
|
|
||||||
|
if 'command' in self.context.keys():
|
||||||
|
|
||||||
|
if self.context['command'] == 'shows':
|
||||||
|
self.push_screen(ShowsScreen())
|
||||||
|
|
||||||
|
if self.context['command'] == 'inspect':
|
||||||
|
self.push_screen(InspectDetailsScreen())
|
||||||
|
|
||||||
|
if self.context['command'] == 'edit':
|
||||||
|
self.push_screen(MediaEditScreen())
|
||||||
|
|
||||||
|
|
||||||
|
def getContext(self):
|
||||||
|
"""Data 'output' method"""
|
||||||
|
return self.context
|
||||||
471
src/ffx/ffx_controller.py
Normal file
471
src/ffx/ffx_controller.py
Normal file
@@ -0,0 +1,471 @@
|
|||||||
|
import os, click
|
||||||
|
from logging import Logger
|
||||||
|
|
||||||
|
from ffx.media_descriptor_change_set import MediaDescriptorChangeSet
|
||||||
|
|
||||||
|
from ffx.media_descriptor import MediaDescriptor
|
||||||
|
from ffx.audio_layout import AudioLayout
|
||||||
|
from ffx.track_type import TrackType
|
||||||
|
from ffx.track_codec import TrackCodec
|
||||||
|
from ffx.video_encoder import VideoEncoder
|
||||||
|
from ffx.process import executeProcess
|
||||||
|
|
||||||
|
from ffx.constants import (
|
||||||
|
DEFAULT_CONTAINER_EXTENSION,
|
||||||
|
DEFAULT_CONTAINER_FORMAT,
|
||||||
|
DEFAULT_VIDEO_ENCODER_LABEL,
|
||||||
|
DEFAULT_cut_start,
|
||||||
|
DEFAULT_cut_length,
|
||||||
|
FFMPEG_COMMAND_TOKENS,
|
||||||
|
FFMPEG_NULL_OUTPUT_TOKENS,
|
||||||
|
SUPPORTED_INPUT_FILE_EXTENSIONS,
|
||||||
|
)
|
||||||
|
|
||||||
|
from ffx.filter.quality_filter import QualityFilter
|
||||||
|
from ffx.filter.preset_filter import PresetFilter
|
||||||
|
from ffx.filter.crop_filter import CropFilter
|
||||||
|
|
||||||
|
from ffx.model.pattern import Pattern
|
||||||
|
|
||||||
|
|
||||||
|
class FfxController():
|
||||||
|
|
||||||
|
COMMAND_TOKENS = list(FFMPEG_COMMAND_TOKENS)
|
||||||
|
NULL_TOKENS = list(FFMPEG_NULL_OUTPUT_TOKENS) # -f null /dev/null
|
||||||
|
|
||||||
|
TEMP_FILE_NAME = "ffmpeg2pass-0.log"
|
||||||
|
|
||||||
|
DEFAULT_VIDEO_ENCODER = DEFAULT_VIDEO_ENCODER_LABEL
|
||||||
|
|
||||||
|
DEFAULT_FILE_FORMAT = DEFAULT_CONTAINER_FORMAT
|
||||||
|
DEFAULT_FILE_EXTENSION = DEFAULT_CONTAINER_EXTENSION
|
||||||
|
|
||||||
|
INPUT_FILE_EXTENSIONS = list(SUPPORTED_INPUT_FILE_EXTENSIONS)
|
||||||
|
|
||||||
|
CHANNEL_MAP_5_1 = 'FL-FL|FR-FR|FC-FC|LFE-LFE|SL-BL|SR-BR:5.1'
|
||||||
|
|
||||||
|
# SIGNATURE_TAGS = {'RECODED_WITH': 'FFX'}
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
context : dict,
|
||||||
|
targetMediaDescriptor : MediaDescriptor,
|
||||||
|
sourceMediaDescriptor : MediaDescriptor = None):
|
||||||
|
|
||||||
|
self.__context = context
|
||||||
|
|
||||||
|
self.__targetMediaDescriptor = targetMediaDescriptor
|
||||||
|
self.__sourceMediaDescriptor = sourceMediaDescriptor
|
||||||
|
|
||||||
|
self.__mdcs = MediaDescriptorChangeSet(context,
|
||||||
|
targetMediaDescriptor,
|
||||||
|
sourceMediaDescriptor)
|
||||||
|
|
||||||
|
self.__logger: Logger = context['logger']
|
||||||
|
|
||||||
|
|
||||||
|
def executeCommandSequence(self, commandSequence):
|
||||||
|
out, err, rc = executeProcess(commandSequence, context=self.__context)
|
||||||
|
if rc:
|
||||||
|
raise click.ClickException(f"Command resulted in error: rc={rc} error={err}")
|
||||||
|
return out, err, rc
|
||||||
|
|
||||||
|
|
||||||
|
def generateAV1Tokens(self, quality, preset, subIndex : int = 0):
|
||||||
|
|
||||||
|
return [f"-c:v:{int(subIndex)}", 'libsvtav1',
|
||||||
|
'-svtav1-params', f"crf={quality}:preset={preset}:tune=0:enable-overlays=1:scd=1:scm=0",
|
||||||
|
'-pix_fmt', 'yuv420p10le']
|
||||||
|
|
||||||
|
|
||||||
|
# -c:v libx264 -preset slow -crf 17
|
||||||
|
def generateH264Tokens(self, quality, subIndex : int = 0):
|
||||||
|
|
||||||
|
return [f"-c:v:{int(subIndex)}", 'libx264',
|
||||||
|
"-preset", "slow",
|
||||||
|
'-crf', str(quality)]
|
||||||
|
|
||||||
|
|
||||||
|
# -c:v:0 libvpx-vp9 -row-mt 1 -crf 32 -pass 1 -speed 4 -frame-parallel 0 -g 9999 -aq-mode 0
|
||||||
|
def generateVP9Pass1Tokens(self, quality, subIndex : int = 0):
|
||||||
|
|
||||||
|
return [f"-c:v:{int(subIndex)}",
|
||||||
|
'libvpx-vp9',
|
||||||
|
'-row-mt', '1',
|
||||||
|
'-crf', str(quality),
|
||||||
|
'-pass', '1',
|
||||||
|
'-speed', '4',
|
||||||
|
'-frame-parallel', '0',
|
||||||
|
'-g', '9999',
|
||||||
|
'-aq-mode', '0']
|
||||||
|
|
||||||
|
# -c:v:0 libvpx-vp9 -row-mt 1 -crf 32 -pass 2 -frame-parallel 0 -g 9999 -aq-mode 0 -auto-alt-ref 1 -lag-in-frames 25
|
||||||
|
def generateVP9Pass2Tokens(self, quality, subIndex : int = 0):
|
||||||
|
|
||||||
|
return [f"-c:v:{int(subIndex)}",
|
||||||
|
'libvpx-vp9',
|
||||||
|
'-row-mt', '1',
|
||||||
|
'-crf', str(quality),
|
||||||
|
'-pass', '2',
|
||||||
|
'-frame-parallel', '0',
|
||||||
|
'-g', '9999',
|
||||||
|
'-aq-mode', '0',
|
||||||
|
'-auto-alt-ref', '1',
|
||||||
|
'-lag-in-frames', '25']
|
||||||
|
|
||||||
|
def generateVideoCopyTokens(self, subIndex):
|
||||||
|
return [f"-c:v:{int(subIndex)}",
|
||||||
|
'copy']
|
||||||
|
|
||||||
|
def generateAudioCopyTokens(self, subIndex):
|
||||||
|
return [f"-c:a:{int(subIndex)}", 'copy']
|
||||||
|
|
||||||
|
def generateSubtitleCopyTokens(self, subIndex):
|
||||||
|
return [f"-c:s:{int(subIndex)}", 'copy']
|
||||||
|
|
||||||
|
def generateAttachmentCopyTokens(self, subIndex):
|
||||||
|
return [f"-c:t:{int(subIndex)}", 'copy']
|
||||||
|
|
||||||
|
def generateCopyTokens(self):
|
||||||
|
copyTokens = []
|
||||||
|
|
||||||
|
for trackDescriptor in self.__targetMediaDescriptor.getTrackDescriptors(trackType=TrackType.VIDEO):
|
||||||
|
copyTokens += self.generateVideoCopyTokens(trackDescriptor.getSubIndex())
|
||||||
|
|
||||||
|
for trackDescriptor in self.__targetMediaDescriptor.getTrackDescriptors(trackType=TrackType.AUDIO):
|
||||||
|
copyTokens += self.generateAudioCopyTokens(trackDescriptor.getSubIndex())
|
||||||
|
|
||||||
|
for trackDescriptor in self.__targetMediaDescriptor.getTrackDescriptors(trackType=TrackType.SUBTITLE):
|
||||||
|
copyTokens += self.generateSubtitleCopyTokens(trackDescriptor.getSubIndex())
|
||||||
|
|
||||||
|
attachmentDescriptors = (
|
||||||
|
self.__sourceMediaDescriptor.getTrackDescriptors(trackType=TrackType.ATTACHMENT)
|
||||||
|
if self.__sourceMediaDescriptor is not None
|
||||||
|
else self.__targetMediaDescriptor.getTrackDescriptors(trackType=TrackType.ATTACHMENT)
|
||||||
|
)
|
||||||
|
for trackDescriptor in attachmentDescriptors:
|
||||||
|
copyTokens += self.generateAttachmentCopyTokens(trackDescriptor.getSubIndex())
|
||||||
|
|
||||||
|
return copyTokens
|
||||||
|
|
||||||
|
|
||||||
|
def generateCropTokens(self):
|
||||||
|
|
||||||
|
if 'cut_start' in self.__context.keys() and 'cut_length' in self.__context.keys():
|
||||||
|
cropStart = int(self.__context['cut_start'])
|
||||||
|
cropLength = int(self.__context['cut_length'])
|
||||||
|
else:
|
||||||
|
cropStart = DEFAULT_cut_start
|
||||||
|
cropLength = DEFAULT_cut_length
|
||||||
|
|
||||||
|
return ['-ss', str(cropStart), '-t', str(cropLength)]
|
||||||
|
|
||||||
|
|
||||||
|
def generateOutputTokens(self, filePathBase, format = '', ext = ''):
|
||||||
|
|
||||||
|
self.__logger.debug(f"FfxController.generateOutputTokens(): base='{filePathBase}' format='{format}' ext='{ext}'")
|
||||||
|
|
||||||
|
outputFilePath = f"{filePathBase}{('.'+str(ext)) if ext else ''}"
|
||||||
|
if format:
|
||||||
|
return ['-f', format, outputFilePath]
|
||||||
|
else:
|
||||||
|
return [outputFilePath]
|
||||||
|
|
||||||
|
|
||||||
|
def generateEncodingMetadataTags(self, videoEncoder: VideoEncoder, quality, preset) -> dict:
|
||||||
|
metadataTags = {}
|
||||||
|
|
||||||
|
if videoEncoder in (VideoEncoder.AV1, VideoEncoder.H264, VideoEncoder.VP9):
|
||||||
|
metadataTags["ENCODING_QUALITY"] = str(quality)
|
||||||
|
|
||||||
|
if videoEncoder == VideoEncoder.AV1:
|
||||||
|
metadataTags["ENCODING_PRESET"] = str(preset)
|
||||||
|
|
||||||
|
return metadataTags
|
||||||
|
|
||||||
|
|
||||||
|
def generateAudioEncodingTokens(self):
|
||||||
|
"""Generates ffmpeg options audio streams including channel remapping, codec and bitrate"""
|
||||||
|
|
||||||
|
audioTokens = []
|
||||||
|
|
||||||
|
# targetAudioTrackDescriptors = [td for td in self.__targetMediaDescriptor.getAllTrackDescriptors() if td.getType() == TrackType.AUDIO]
|
||||||
|
targetAudioTrackDescriptors = self.__targetMediaDescriptor.getTrackDescriptors(trackType=TrackType.AUDIO)
|
||||||
|
|
||||||
|
trackSubIndex = 0
|
||||||
|
for trackDescriptor in targetAudioTrackDescriptors:
|
||||||
|
|
||||||
|
trackAudioLayout = trackDescriptor.getAudioLayout()
|
||||||
|
|
||||||
|
if trackAudioLayout == AudioLayout.LAYOUT_6_1:
|
||||||
|
audioTokens += [f"-c:a:{trackSubIndex}",
|
||||||
|
'libopus',
|
||||||
|
f"-filter:a:{trackSubIndex}",
|
||||||
|
'channelmap=channel_layout=6.1',
|
||||||
|
f"-b:a:{trackSubIndex}",
|
||||||
|
self.__context['bitrates']['dts']]
|
||||||
|
|
||||||
|
if trackAudioLayout == AudioLayout.LAYOUT_5_1:
|
||||||
|
audioTokens += [f"-c:a:{trackSubIndex}",
|
||||||
|
'libopus',
|
||||||
|
f"-filter:a:{trackSubIndex}",
|
||||||
|
f"channelmap={FfxController.CHANNEL_MAP_5_1}",
|
||||||
|
f"-b:a:{trackSubIndex}",
|
||||||
|
self.__context['bitrates']['ac3']]
|
||||||
|
|
||||||
|
if trackAudioLayout == AudioLayout.LAYOUT_STEREO:
|
||||||
|
audioTokens += [f"-c:a:{trackSubIndex}",
|
||||||
|
'libopus',
|
||||||
|
f"-b:a:{trackSubIndex}",
|
||||||
|
self.__context['bitrates']['stereo']]
|
||||||
|
|
||||||
|
if trackAudioLayout == AudioLayout.LAYOUT_6CH:
|
||||||
|
audioTokens += [f"-c:a:{trackSubIndex}",
|
||||||
|
'libopus',
|
||||||
|
f"-filter:a:{trackSubIndex}",
|
||||||
|
f"channelmap={FfxController.CHANNEL_MAP_5_1}",
|
||||||
|
f"-b:a:{trackSubIndex}",
|
||||||
|
self.__context['bitrates']['ac3']]
|
||||||
|
|
||||||
|
# -ac 5 ?
|
||||||
|
if trackAudioLayout == AudioLayout.LAYOUT_5_0:
|
||||||
|
audioTokens += [f"-c:a:{trackSubIndex}",
|
||||||
|
'libopus',
|
||||||
|
f"-filter:a:{trackSubIndex}",
|
||||||
|
'channelmap=channel_layout=5.0',
|
||||||
|
f"-b:a:{trackSubIndex}",
|
||||||
|
self.__context['bitrates']['ac3']]
|
||||||
|
|
||||||
|
trackSubIndex += 1
|
||||||
|
return audioTokens
|
||||||
|
|
||||||
|
|
||||||
|
def runJob(self,
|
||||||
|
sourcePath,
|
||||||
|
targetPath,
|
||||||
|
targetFormat: str = '',
|
||||||
|
chainIteration: list = [],
|
||||||
|
cropArguments: dict = {},
|
||||||
|
currentPattern: Pattern = None,
|
||||||
|
currentShowDescriptor = None):
|
||||||
|
# quality: int = DEFAULT_QUALITY,
|
||||||
|
# preset: int = DEFAULT_AV1_PRESET):
|
||||||
|
|
||||||
|
|
||||||
|
videoEncoder: VideoEncoder = self.__context.get('video_encoder', VideoEncoder.VP9)
|
||||||
|
|
||||||
|
|
||||||
|
qualityFilters = [fy for fy in chainIteration if fy['identifier'] == 'quality']
|
||||||
|
presetFilters = [fy for fy in chainIteration if fy['identifier'] == 'preset']
|
||||||
|
|
||||||
|
cropFilters = [fy for fy in chainIteration if fy['identifier'] == 'crop']
|
||||||
|
denoiseFilters = [fy for fy in chainIteration if fy['identifier'] == 'nlmeans']
|
||||||
|
deinterlaceFilters = [fy for fy in chainIteration if fy['identifier'] == 'bwdif']
|
||||||
|
|
||||||
|
|
||||||
|
if qualityFilters and (quality := qualityFilters[0]['parameters']['quality']):
|
||||||
|
self.__logger.info(f"Setting quality {quality} from command line")
|
||||||
|
elif currentPattern is not None and (quality := currentPattern.quality):
|
||||||
|
self.__logger.info(f"Setting quality {quality} from pattern")
|
||||||
|
elif currentShowDescriptor is not None and (quality := currentShowDescriptor.getQuality()):
|
||||||
|
self.__logger.info(f"Setting quality {quality} from show")
|
||||||
|
else:
|
||||||
|
quality = (QualityFilter.DEFAULT_H264_QUALITY
|
||||||
|
if (videoEncoder == VideoEncoder.H264)
|
||||||
|
else QualityFilter.DEFAULT_VP9_QUALITY)
|
||||||
|
self.__logger.info(f"Setting quality {quality} from default")
|
||||||
|
|
||||||
|
|
||||||
|
preset = presetFilters[0]['parameters']['preset'] if presetFilters else PresetFilter.DEFAULT_PRESET
|
||||||
|
self.__context['encoding_metadata_tags'] = self.generateEncodingMetadataTags(
|
||||||
|
videoEncoder,
|
||||||
|
quality,
|
||||||
|
preset,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
filterParamTokens = []
|
||||||
|
|
||||||
|
if cropArguments:
|
||||||
|
|
||||||
|
cropParams = (f"crop="
|
||||||
|
+ f"{cropArguments[CropFilter.OUTPUT_WIDTH_KEY]}"
|
||||||
|
+ f":{cropArguments[CropFilter.OUTPUT_HEIGHT_KEY]}"
|
||||||
|
+ f":{cropArguments[CropFilter.OFFSET_X_KEY]}"
|
||||||
|
+ f":{cropArguments[CropFilter.OFFSET_Y_KEY]}")
|
||||||
|
|
||||||
|
filterParamTokens.append(cropParams)
|
||||||
|
|
||||||
|
filterParamTokens.extend(denoiseFilters[0]['tokens'] if denoiseFilters else [])
|
||||||
|
filterParamTokens.extend(deinterlaceFilters[0]['tokens'] if deinterlaceFilters else [])
|
||||||
|
|
||||||
|
deinterlaceFilters
|
||||||
|
|
||||||
|
filterTokens = ['-vf', ', '.join(filterParamTokens)] if filterParamTokens else []
|
||||||
|
|
||||||
|
|
||||||
|
commandTokens = FfxController.COMMAND_TOKENS + ['-i', sourcePath]
|
||||||
|
|
||||||
|
if videoEncoder == VideoEncoder.COPY:
|
||||||
|
|
||||||
|
commandSequence = (commandTokens
|
||||||
|
+ self.__targetMediaDescriptor.getImportFileTokens()
|
||||||
|
+ self.__targetMediaDescriptor.getInputMappingTokens(sourceMediaDescriptor = self.__sourceMediaDescriptor)
|
||||||
|
+ self.__mdcs.generateDispositionTokens())
|
||||||
|
|
||||||
|
commandSequence += self.__mdcs.generateMetadataTokens()
|
||||||
|
commandSequence += self.generateCopyTokens()
|
||||||
|
|
||||||
|
if self.__context['perform_cut']:
|
||||||
|
commandSequence += self.generateCropTokens()
|
||||||
|
|
||||||
|
commandSequence += self.generateOutputTokens(targetPath,
|
||||||
|
targetFormat)
|
||||||
|
|
||||||
|
self.__logger.debug("FfxController.runJob(): Running command sequence")
|
||||||
|
|
||||||
|
if not self.__context['dry_run']:
|
||||||
|
self.executeCommandSequence(commandSequence)
|
||||||
|
return
|
||||||
|
|
||||||
|
if videoEncoder == VideoEncoder.AV1:
|
||||||
|
|
||||||
|
commandSequence = (commandTokens
|
||||||
|
+ self.__targetMediaDescriptor.getImportFileTokens()
|
||||||
|
+ self.__targetMediaDescriptor.getInputMappingTokens(sourceMediaDescriptor = self.__sourceMediaDescriptor)
|
||||||
|
+ self.__mdcs.generateDispositionTokens())
|
||||||
|
|
||||||
|
# Optional tokens
|
||||||
|
commandSequence += self.__mdcs.generateMetadataTokens()
|
||||||
|
commandSequence += filterTokens
|
||||||
|
|
||||||
|
for td in self.__targetMediaDescriptor.getTrackDescriptors(trackType=TrackType.VIDEO):
|
||||||
|
#HINT: Attached thumbnails are not supported by .webm container format
|
||||||
|
if td.getCodec != TrackCodec.PNG:
|
||||||
|
commandSequence += self.generateAV1Tokens(int(quality), int(preset))
|
||||||
|
|
||||||
|
commandSequence += self.generateAudioEncodingTokens()
|
||||||
|
|
||||||
|
if self.__context['perform_cut']:
|
||||||
|
commandSequence += self.generateCropTokens()
|
||||||
|
|
||||||
|
commandSequence += self.generateOutputTokens(targetPath,
|
||||||
|
targetFormat)
|
||||||
|
|
||||||
|
self.__logger.debug(f"FfxController.runJob(): Running command sequence")
|
||||||
|
|
||||||
|
if not self.__context['dry_run']:
|
||||||
|
self.executeCommandSequence(commandSequence)
|
||||||
|
|
||||||
|
|
||||||
|
if videoEncoder == VideoEncoder.H264:
|
||||||
|
|
||||||
|
commandSequence = (commandTokens
|
||||||
|
+ self.__targetMediaDescriptor.getImportFileTokens()
|
||||||
|
+ self.__targetMediaDescriptor.getInputMappingTokens(sourceMediaDescriptor = self.__sourceMediaDescriptor)
|
||||||
|
+ self.__mdcs.generateDispositionTokens())
|
||||||
|
|
||||||
|
# Optional tokens
|
||||||
|
commandSequence += self.__mdcs.generateMetadataTokens()
|
||||||
|
commandSequence += filterTokens
|
||||||
|
|
||||||
|
for td in self.__targetMediaDescriptor.getTrackDescriptors(trackType=TrackType.VIDEO):
|
||||||
|
#HINT: Attached thumbnails are not supported by .webm container format
|
||||||
|
if td.getCodec != TrackCodec.PNG:
|
||||||
|
commandSequence += self.generateH264Tokens(int(quality))
|
||||||
|
|
||||||
|
commandSequence += self.generateAudioEncodingTokens()
|
||||||
|
|
||||||
|
if self.__context['perform_cut']:
|
||||||
|
commandSequence += self.generateCropTokens()
|
||||||
|
|
||||||
|
commandSequence += self.generateOutputTokens(targetPath,
|
||||||
|
targetFormat)
|
||||||
|
|
||||||
|
self.__logger.debug(f"FfxController.runJob(): Running command sequence")
|
||||||
|
|
||||||
|
if not self.__context['dry_run']:
|
||||||
|
self.executeCommandSequence(commandSequence)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if videoEncoder == VideoEncoder.VP9:
|
||||||
|
|
||||||
|
commandSequence1 = (commandTokens
|
||||||
|
+ self.__targetMediaDescriptor.getInputMappingTokens(only_video=True))
|
||||||
|
|
||||||
|
# Optional tokens
|
||||||
|
#NOTE: Filters and so needs to run on the first pass as well, as here
|
||||||
|
# the required bitrate for the second run is determined and recorded
|
||||||
|
# TODO: Results seems to be slightly better with first pass omitted,
|
||||||
|
# Confirm or find better filter settings for 2-pass
|
||||||
|
# commandSequence1 += self.__context['denoiser'].generatefilterTokens()
|
||||||
|
|
||||||
|
for td in self.__targetMediaDescriptor.getTrackDescriptors(trackType=TrackType.VIDEO):
|
||||||
|
#HINT: Attached thumbnails are not supported by .webm container format
|
||||||
|
if td.getCodec != TrackCodec.PNG:
|
||||||
|
commandSequence1 += self.generateVP9Pass1Tokens(int(quality))
|
||||||
|
|
||||||
|
if self.__context['perform_cut']:
|
||||||
|
commandSequence1 += self.generateCropTokens()
|
||||||
|
|
||||||
|
commandSequence1 += FfxController.NULL_TOKENS
|
||||||
|
|
||||||
|
if os.path.exists(FfxController.TEMP_FILE_NAME):
|
||||||
|
os.remove(FfxController.TEMP_FILE_NAME)
|
||||||
|
|
||||||
|
self.__logger.debug(f"FfxController.runJob(): Running command sequence 1")
|
||||||
|
|
||||||
|
if not self.__context['dry_run']:
|
||||||
|
self.executeCommandSequence(commandSequence1)
|
||||||
|
|
||||||
|
commandSequence2 = (commandTokens
|
||||||
|
+ self.__targetMediaDescriptor.getImportFileTokens()
|
||||||
|
+ self.__targetMediaDescriptor.getInputMappingTokens(sourceMediaDescriptor = self.__sourceMediaDescriptor)
|
||||||
|
+ self.__mdcs.generateDispositionTokens())
|
||||||
|
|
||||||
|
# Optional tokens
|
||||||
|
commandSequence2 += self.__mdcs.generateMetadataTokens()
|
||||||
|
commandSequence2 += filterTokens
|
||||||
|
|
||||||
|
for td in self.__targetMediaDescriptor.getTrackDescriptors(trackType=TrackType.VIDEO):
|
||||||
|
#HINT: Attached thumbnails are not supported by .webm container format
|
||||||
|
if td.getCodec != TrackCodec.PNG:
|
||||||
|
commandSequence2 += self.generateVP9Pass2Tokens(int(quality))
|
||||||
|
|
||||||
|
commandSequence2 += self.generateAudioEncodingTokens()
|
||||||
|
|
||||||
|
if self.__context['perform_cut']:
|
||||||
|
commandSequence2 += self.generateCropTokens()
|
||||||
|
|
||||||
|
commandSequence2 += self.generateOutputTokens(targetPath,
|
||||||
|
targetFormat)
|
||||||
|
|
||||||
|
self.__logger.debug(f"FfxController.runJob(): Running command sequence 2")
|
||||||
|
|
||||||
|
if not self.__context['dry_run']:
|
||||||
|
self.executeCommandSequence(commandSequence2)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def createEmptyFile(self,
|
||||||
|
path: str = 'empty.mkv',
|
||||||
|
sizeX: int = 1280,
|
||||||
|
sizeY: int = 720,
|
||||||
|
rate: int = 25,
|
||||||
|
length: int = 10):
|
||||||
|
|
||||||
|
commandTokens = FfxController.COMMAND_TOKENS
|
||||||
|
|
||||||
|
commandTokens += ['-f',
|
||||||
|
'lavfi',
|
||||||
|
'-i',
|
||||||
|
f"color=size={sizeX}x{sizeY}:rate={rate}:color=black",
|
||||||
|
'-f',
|
||||||
|
'lavfi',
|
||||||
|
'-i',
|
||||||
|
'anullsrc=channel_layout=stereo:sample_rate=44100',
|
||||||
|
'-t',
|
||||||
|
str(length),
|
||||||
|
path]
|
||||||
|
|
||||||
|
self.executeCommandSequence(commandTokens)
|
||||||
300
src/ffx/file_properties.py
Normal file
300
src/ffx/file_properties.py
Normal file
@@ -0,0 +1,300 @@
|
|||||||
|
import os, re, json
|
||||||
|
|
||||||
|
from .constants import (
|
||||||
|
DEFAULT_CROPDETECT_DURATION_SECONDS,
|
||||||
|
DEFAULT_CROPDETECT_SEEK_SECONDS,
|
||||||
|
FFMPEG_COMMAND_TOKENS,
|
||||||
|
FFMPEG_NULL_OUTPUT_TOKENS,
|
||||||
|
)
|
||||||
|
from .media_descriptor import MediaDescriptor
|
||||||
|
from .pattern_controller import PatternController
|
||||||
|
|
||||||
|
from ffx.filter.crop_filter import CropFilter
|
||||||
|
|
||||||
|
from .process import executeProcess
|
||||||
|
|
||||||
|
from ffx.model.pattern import Pattern
|
||||||
|
|
||||||
|
|
||||||
|
class FileProperties():
|
||||||
|
_cropdetect_cache: dict[tuple[str, int, int, int, int], dict[str, str]] = {}
|
||||||
|
|
||||||
|
FILE_EXTENSIONS = ['mkv', 'mp4', 'avi', 'flv', 'webm']
|
||||||
|
FFPROBE_COMMAND_TOKENS = ["ffprobe", "-hide_banner", "-show_format", "-show_streams", "-of", "json"]
|
||||||
|
|
||||||
|
SE_INDICATOR_PATTERN = '([sS][0-9]+[eE][0-9]+)'
|
||||||
|
SEASON_EPISODE_INDICATOR_MATCH = '[sS]([0-9]+)[eE]([0-9]+)'
|
||||||
|
EPISODE_INDICATOR_MATCH = '[eE]([0-9]+)'
|
||||||
|
|
||||||
|
CROPDETECT_PATTERN = 'crop=[0-9]+:[0-9]+:[0-9]+:[0-9]+$'
|
||||||
|
|
||||||
|
DEFAULT_INDEX_DIGITS = 3
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def extractSeasonEpisodeValues(cls, sourceText: str) -> tuple[int | None, int] | None:
|
||||||
|
seasonEpisodeMatch = re.search(cls.SEASON_EPISODE_INDICATOR_MATCH, str(sourceText))
|
||||||
|
if seasonEpisodeMatch is not None:
|
||||||
|
return int(seasonEpisodeMatch.group(1)), int(seasonEpisodeMatch.group(2))
|
||||||
|
|
||||||
|
episodeMatch = re.search(cls.EPISODE_INDICATOR_MATCH, str(sourceText))
|
||||||
|
if episodeMatch is not None:
|
||||||
|
return None, int(episodeMatch.group(1))
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def __init__(self, context, sourcePath):
|
||||||
|
|
||||||
|
self.context = context
|
||||||
|
|
||||||
|
self.__logger = context['logger']
|
||||||
|
|
||||||
|
# Separate basedir, basename and extension for current source file
|
||||||
|
self.__sourcePath = sourcePath
|
||||||
|
|
||||||
|
self.__sourceDirectory = os.path.dirname(self.__sourcePath)
|
||||||
|
self.__sourceFilename = os.path.basename(self.__sourcePath)
|
||||||
|
|
||||||
|
sourcePathTokens = self.__sourceFilename.split('.')
|
||||||
|
|
||||||
|
if sourcePathTokens[-1] in FileProperties.FILE_EXTENSIONS:
|
||||||
|
self.__sourceFileBasename = '.'.join(sourcePathTokens[:-1])
|
||||||
|
self.__sourceFilenameExtension = sourcePathTokens[-1]
|
||||||
|
else:
|
||||||
|
self.__sourceFileBasename = self.__sourceFilename
|
||||||
|
self.__sourceFilenameExtension = ''
|
||||||
|
|
||||||
|
self.__usePattern = bool(self.context.get('use_pattern', True))
|
||||||
|
self.__pc = (
|
||||||
|
PatternController(context)
|
||||||
|
if self.__usePattern and 'database' in self.context
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
|
# Checking if database contains matching pattern
|
||||||
|
matchResult = (
|
||||||
|
self.__pc.matchFilename(self.__sourceFilename)
|
||||||
|
if self.__pc is not None
|
||||||
|
else {}
|
||||||
|
)
|
||||||
|
|
||||||
|
self.__logger.debug(f"FileProperties.__init__(): Match result: {matchResult}")
|
||||||
|
|
||||||
|
self.__pattern: Pattern = matchResult['pattern'] if matchResult else None
|
||||||
|
|
||||||
|
if matchResult:
|
||||||
|
databaseMatchedGroups = matchResult['match'].groups()
|
||||||
|
self.__logger.debug(f"FileProperties.__init__(): Matched groups: {databaseMatchedGroups}")
|
||||||
|
|
||||||
|
indicatorSource = databaseMatchedGroups[0]
|
||||||
|
else:
|
||||||
|
self.__logger.debug(f"FileProperties.__init__(): Checking file name for indicator {self.__sourceFilename}")
|
||||||
|
indicatorSource = self.__sourceFilename
|
||||||
|
|
||||||
|
seasonEpisodeValues = self.extractSeasonEpisodeValues(indicatorSource)
|
||||||
|
if seasonEpisodeValues is None:
|
||||||
|
self.__season = -1
|
||||||
|
self.__episode = -1
|
||||||
|
else:
|
||||||
|
sourceSeason, sourceEpisode = seasonEpisodeValues
|
||||||
|
self.__season = -1 if sourceSeason is None else int(sourceSeason)
|
||||||
|
self.__episode = int(sourceEpisode)
|
||||||
|
|
||||||
|
self.__ffprobeData = None
|
||||||
|
|
||||||
|
def _getCropdetectWindow(self):
|
||||||
|
cropdetectContext = self.context.get('cropdetect', {})
|
||||||
|
|
||||||
|
seekSeconds = int(cropdetectContext.get('seek_seconds', DEFAULT_CROPDETECT_SEEK_SECONDS))
|
||||||
|
durationSeconds = int(cropdetectContext.get('duration_seconds', DEFAULT_CROPDETECT_DURATION_SECONDS))
|
||||||
|
|
||||||
|
if seekSeconds < 0:
|
||||||
|
raise ValueError("Crop detection seek seconds must be zero or greater.")
|
||||||
|
if durationSeconds <= 0:
|
||||||
|
raise ValueError("Crop detection duration seconds must be greater than zero.")
|
||||||
|
|
||||||
|
return seekSeconds, durationSeconds
|
||||||
|
|
||||||
|
def _getCropdetectCacheKey(self):
|
||||||
|
sourceStat = os.stat(self.__sourcePath)
|
||||||
|
seekSeconds, durationSeconds = self._getCropdetectWindow()
|
||||||
|
|
||||||
|
return (
|
||||||
|
os.path.abspath(self.__sourcePath),
|
||||||
|
sourceStat.st_mtime_ns,
|
||||||
|
sourceStat.st_size,
|
||||||
|
seekSeconds,
|
||||||
|
durationSeconds,
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _clear_cropdetect_cache(cls):
|
||||||
|
cls._cropdetect_cache.clear()
|
||||||
|
|
||||||
|
def _getFfprobeData(self):
|
||||||
|
if self.__ffprobeData is not None:
|
||||||
|
return self.__ffprobeData
|
||||||
|
|
||||||
|
ffprobeOutput, ffprobeError, returnCode = executeProcess(
|
||||||
|
FileProperties.FFPROBE_COMMAND_TOKENS + [self.__sourcePath]
|
||||||
|
)
|
||||||
|
|
||||||
|
if 'Invalid data found when processing input' in ffprobeError:
|
||||||
|
raise Exception(f"File {self.__sourcePath} does not contain valid stream data")
|
||||||
|
|
||||||
|
if returnCode != 0:
|
||||||
|
raise Exception(f"ffprobe returned with error {returnCode}")
|
||||||
|
|
||||||
|
self.__ffprobeData = json.loads(ffprobeOutput)
|
||||||
|
return self.__ffprobeData
|
||||||
|
|
||||||
|
|
||||||
|
def getFormatData(self):
|
||||||
|
"""
|
||||||
|
"format": {
|
||||||
|
"filename": "Downloads/nagatoro_s02/nagatoro_s01e02.mkv",
|
||||||
|
"nb_streams": 18,
|
||||||
|
"nb_programs": 0,
|
||||||
|
"nb_stream_groups": 0,
|
||||||
|
"format_name": "matroska,webm",
|
||||||
|
"format_long_name": "Matroska / WebM",
|
||||||
|
"start_time": "0.000000",
|
||||||
|
"duration": "1420.063000",
|
||||||
|
"size": "1489169824",
|
||||||
|
"bit_rate": "8389316",
|
||||||
|
"probe_score": 100,
|
||||||
|
"tags": {
|
||||||
|
"PUBLISHER": "Crunchyroll",
|
||||||
|
"ENCODER": "Lavf58.29.100"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
return self._getFfprobeData()['format']
|
||||||
|
|
||||||
|
|
||||||
|
def getStreamData(self):
|
||||||
|
"""Returns ffprobe stream data as array with elements according to the following example
|
||||||
|
{
|
||||||
|
"index": 4,
|
||||||
|
"codec_name": "hdmv_pgs_subtitle",
|
||||||
|
"codec_long_name": "HDMV Presentation Graphic Stream subtitles",
|
||||||
|
"codec_type": "subtitle",
|
||||||
|
"codec_tag_string": "[0][0][0][0]",
|
||||||
|
"codec_tag": "0x0000",
|
||||||
|
"r_frame_rate": "0/0",
|
||||||
|
"avg_frame_rate": "0/0",
|
||||||
|
"time_base": "1/1000",
|
||||||
|
"start_pts": 0,
|
||||||
|
"start_time": "0.000000",
|
||||||
|
"duration_ts": 1421035,
|
||||||
|
"duration": "1421.035000",
|
||||||
|
"disposition": {
|
||||||
|
"default": 1,
|
||||||
|
"dub": 0,
|
||||||
|
"original": 0,
|
||||||
|
"comment": 0,
|
||||||
|
"lyrics": 0,
|
||||||
|
"karaoke": 0,
|
||||||
|
"forced": 0,
|
||||||
|
"hearing_impaired": 0,
|
||||||
|
"visual_impaired": 0,
|
||||||
|
"clean_effects": 0,
|
||||||
|
"attached_pic": 0,
|
||||||
|
"timed_thumbnails": 0,
|
||||||
|
"non_diegetic": 0,
|
||||||
|
"captions": 0,
|
||||||
|
"descriptions": 0,
|
||||||
|
"metadata": 0,
|
||||||
|
"dependent": 0,
|
||||||
|
"still_image": 0
|
||||||
|
},
|
||||||
|
"tags": {
|
||||||
|
"language": "ger",
|
||||||
|
"title": "German Full"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
return self._getFfprobeData()['streams']
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def findCropArguments(self):
|
||||||
|
""""""
|
||||||
|
|
||||||
|
cacheKey = self._getCropdetectCacheKey()
|
||||||
|
cachedCropArguments = FileProperties._cropdetect_cache.get(cacheKey)
|
||||||
|
if cachedCropArguments is not None:
|
||||||
|
self.__logger.debug(
|
||||||
|
"FileProperties.findCropArguments(): Reusing cached cropdetect result for %s",
|
||||||
|
self.__sourcePath,
|
||||||
|
)
|
||||||
|
return dict(cachedCropArguments)
|
||||||
|
|
||||||
|
seekSeconds, durationSeconds = self._getCropdetectWindow()
|
||||||
|
|
||||||
|
cropdetectCommand = (
|
||||||
|
list(FFMPEG_COMMAND_TOKENS)
|
||||||
|
+ ["-ss", str(seekSeconds), "-i", self.__sourcePath, "-t", str(durationSeconds), "-vf", "cropdetect"]
|
||||||
|
+ list(FFMPEG_NULL_OUTPUT_TOKENS)
|
||||||
|
)
|
||||||
|
_ffmpegOutput, ffmpegError, returnCode = executeProcess(cropdetectCommand, context=self.context)
|
||||||
|
|
||||||
|
errorLines = ffmpegError.split('\n')
|
||||||
|
|
||||||
|
crops = {}
|
||||||
|
for el in errorLines:
|
||||||
|
|
||||||
|
cropdetect_match = re.search(FileProperties.CROPDETECT_PATTERN, el)
|
||||||
|
|
||||||
|
if cropdetect_match is not None:
|
||||||
|
cropParam = str(cropdetect_match.group(0))
|
||||||
|
|
||||||
|
crops[cropParam] = crops.get(cropParam, 0) + 1
|
||||||
|
|
||||||
|
if crops:
|
||||||
|
cropString = max(crops.items(), key=lambda item: (item[1], item[0]))[0]
|
||||||
|
|
||||||
|
cropTokens = cropString.split('=')
|
||||||
|
cropValueTokens = cropTokens[1]
|
||||||
|
cropValues = cropValueTokens.split(':')
|
||||||
|
|
||||||
|
cropArguments = {
|
||||||
|
CropFilter.OUTPUT_WIDTH_KEY: cropValues[0],
|
||||||
|
CropFilter.OUTPUT_HEIGHT_KEY: cropValues[1],
|
||||||
|
CropFilter.OFFSET_X_KEY: cropValues[2],
|
||||||
|
CropFilter.OFFSET_Y_KEY: cropValues[3]
|
||||||
|
}
|
||||||
|
FileProperties._cropdetect_cache[cacheKey] = dict(cropArguments)
|
||||||
|
return cropArguments
|
||||||
|
|
||||||
|
if returnCode != 0:
|
||||||
|
raise Exception(f"ffmpeg cropdetect returned with error {returnCode}")
|
||||||
|
|
||||||
|
FileProperties._cropdetect_cache[cacheKey] = {}
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
def getMediaDescriptor(self):
|
||||||
|
return MediaDescriptor.fromFfprobe(self.context, self.getFormatData(), self.getStreamData())
|
||||||
|
|
||||||
|
|
||||||
|
def getShowId(self) -> int:
|
||||||
|
"""Result is -1 if the filename did not match anything in database"""
|
||||||
|
return self.__pattern.getShowId() if self.__pattern is not None else -1
|
||||||
|
|
||||||
|
def getPattern(self) -> Pattern:
|
||||||
|
"""Result is None if the filename did not match anything in database"""
|
||||||
|
return self.__pattern
|
||||||
|
|
||||||
|
|
||||||
|
def getSeason(self) -> int:
|
||||||
|
return int(self.__season)
|
||||||
|
|
||||||
|
def getEpisode(self) -> int:
|
||||||
|
return int(self.__episode)
|
||||||
|
|
||||||
|
|
||||||
|
def getFilename(self):
|
||||||
|
return self.__sourceFilename
|
||||||
|
|
||||||
|
def getFileBasename(self):
|
||||||
|
return self.__sourceFileBasename
|
||||||
0
src/ffx/filter/__init__.py
Normal file
0
src/ffx/filter/__init__.py
Normal file
51
src/ffx/filter/crop_filter.py
Normal file
51
src/ffx/filter/crop_filter.py
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
import itertools
|
||||||
|
|
||||||
|
from .filter import Filter
|
||||||
|
|
||||||
|
|
||||||
|
class CropFilter(Filter):
|
||||||
|
|
||||||
|
IDENTIFIER = 'crop'
|
||||||
|
|
||||||
|
OUTPUT_WIDTH_KEY = 'output_width'
|
||||||
|
OUTPUT_HEIGHT_KEY = 'output_height'
|
||||||
|
OFFSET_X_KEY = 'x_offset'
|
||||||
|
OFFSET_Y_KEY = 'y_offset'
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
|
||||||
|
self.__outputWidth = int(kwargs.get(CropFilter.OUTPUT_WIDTH_KEY, 0))
|
||||||
|
self.__outputHeight = int(kwargs.get(CropFilter.OUTPUT_HEIGHT_KEY, 0))
|
||||||
|
self.__offsetX = int(kwargs.get(CropFilter.OFFSET_X_KEY, 0))
|
||||||
|
self.__offsetY = int(kwargs.get(CropFilter.OFFSET_Y_KEY, 0))
|
||||||
|
|
||||||
|
super().__init__(self)
|
||||||
|
|
||||||
|
def setArguments(self, **kwargs):
|
||||||
|
self.__outputWidth = int(kwargs.get(CropFilter.OUTPUT_WIDTH_KEY))
|
||||||
|
self.__outputHeight = int(kwargs.get(CropFilter.OUTPUT_HEIGHT_KEY))
|
||||||
|
self.__offsetX = int(kwargs.get(CropFilter.OFFSET_X_KEY,))
|
||||||
|
self.__offsetY = int(kwargs.get(CropFilter.OFFSET_Y_KEY,))
|
||||||
|
|
||||||
|
def getPayload(self):
|
||||||
|
|
||||||
|
payload = {'identifier': CropFilter.IDENTIFIER,
|
||||||
|
'parameters': {
|
||||||
|
CropFilter.OUTPUT_WIDTH_KEY: self.__outputWidth,
|
||||||
|
CropFilter.OUTPUT_HEIGHT_KEY: self.__outputHeight,
|
||||||
|
CropFilter.OFFSET_X_KEY: self.__offsetX,
|
||||||
|
CropFilter.OFFSET_Y_KEY: self.__offsetY
|
||||||
|
},
|
||||||
|
'suffices': [],
|
||||||
|
'variant': f"C{self.__outputWidth}-{self.__outputHeight}-{self.__offsetX}-{self.__offsetY}",
|
||||||
|
'tokens': ['crop='
|
||||||
|
+ f"{self.__outputWidth}"
|
||||||
|
+ f":{self.__outputHeight}"
|
||||||
|
+ f":{self.__offsetX}"
|
||||||
|
+ f":{self.__offsetY}"]}
|
||||||
|
|
||||||
|
return payload
|
||||||
|
|
||||||
|
|
||||||
|
def getYield(self):
|
||||||
|
yield self.getPayload()
|
||||||
140
src/ffx/filter/deinterlace_filter.py
Normal file
140
src/ffx/filter/deinterlace_filter.py
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
import itertools
|
||||||
|
|
||||||
|
from .filter import Filter
|
||||||
|
|
||||||
|
|
||||||
|
class DeinterlaceFilter(Filter):
|
||||||
|
|
||||||
|
IDENTIFIER = 'bwdif'
|
||||||
|
|
||||||
|
# DEFAULT_STRENGTH: float = 2.8
|
||||||
|
# DEFAULT_PATCH_SIZE: int = 13
|
||||||
|
# DEFAULT_CHROMA_PATCH_SIZE: int = 9
|
||||||
|
# DEFAULT_RESEARCH_WINDOW: int = 23
|
||||||
|
# DEFAULT_CHROMA_RESEARCH_WINDOW: int= 17
|
||||||
|
|
||||||
|
# STRENGTH_KEY = 'strength'
|
||||||
|
# PATCH_SIZE_KEY = 'patch_size'
|
||||||
|
# CHROMA_PATCH_SIZE_KEY = 'chroma_patch_size'
|
||||||
|
# RESEARCH_WINDOW_KEY = 'research_window'
|
||||||
|
# CHROMA_RESEARCH_WINDOW_KEY = 'chroma_research_window'
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
|
||||||
|
# self.__useHardware = kwargs.get('use_hardware', False)
|
||||||
|
|
||||||
|
# self.__strengthList = []
|
||||||
|
# strength = kwargs.get(NlmeansFilter.STRENGTH_KEY, '')
|
||||||
|
# if strength:
|
||||||
|
# strengthTokens = strength.split(',')
|
||||||
|
# for st in strengthTokens:
|
||||||
|
# try:
|
||||||
|
# strengthValue = float(st)
|
||||||
|
# except:
|
||||||
|
# raise ValueError('NlmeansFilter: Strength value has to be of type float')
|
||||||
|
# if strengthValue < 1.0 or strengthValue > 30.0:
|
||||||
|
# raise ValueError('NlmeansFilter: Strength value has to be between 1.0 and 30.0')
|
||||||
|
# self.__strengthList.append(strengthValue)
|
||||||
|
# else:
|
||||||
|
# self.__strengthList = [NlmeansFilter.DEFAULT_STRENGTH]
|
||||||
|
|
||||||
|
# self.__patchSizeList = []
|
||||||
|
# patchSize = kwargs.get(NlmeansFilter.PATCH_SIZE_KEY, '')
|
||||||
|
# if patchSize:
|
||||||
|
# patchSizeTokens = patchSize.split(',')
|
||||||
|
# for pst in patchSizeTokens:
|
||||||
|
# try:
|
||||||
|
# patchSizeValue = int(pst)
|
||||||
|
# except:
|
||||||
|
# raise ValueError('NlmeansFilter: Patch size value has to be of type int')
|
||||||
|
# if patchSizeValue < 0 or patchSizeValue > 99:
|
||||||
|
# raise ValueError('NlmeansFilter: Patch size value has to be between 0 and 99')
|
||||||
|
# if patchSizeValue % 2 == 0:
|
||||||
|
# raise ValueError('NlmeansFilter: Patch size value has to an odd number')
|
||||||
|
# self.__patchSizeList.append(patchSizeValue)
|
||||||
|
# else:
|
||||||
|
# self.__patchSizeList = [NlmeansFilter.DEFAULT_PATCH_SIZE]
|
||||||
|
|
||||||
|
# self.__chromaPatchSizeList = []
|
||||||
|
# chromaPatchSize = kwargs.get(NlmeansFilter.CHROMA_PATCH_SIZE_KEY, '')
|
||||||
|
# if chromaPatchSize:
|
||||||
|
# chromaPatchSizeTokens = chromaPatchSize.split(',')
|
||||||
|
# for cpst in chromaPatchSizeTokens:
|
||||||
|
# try:
|
||||||
|
# chromaPatchSizeValue = int(pst)
|
||||||
|
# except:
|
||||||
|
# raise ValueError('NlmeansFilter: Chroma patch size value has to be of type int')
|
||||||
|
# if chromaPatchSizeValue < 0 or chromaPatchSizeValue > 99:
|
||||||
|
# raise ValueError('NlmeansFilter: Chroma patch value has to be between 0 and 99')
|
||||||
|
# if chromaPatchSizeValue % 2 == 0:
|
||||||
|
# raise ValueError('NlmeansFilter: Chroma patch value has to an odd number')
|
||||||
|
# self.__chromaPatchSizeList.append(chromaPatchSizeValue)
|
||||||
|
# else:
|
||||||
|
# self.__chromaPatchSizeList = [NlmeansFilter.DEFAULT_CHROMA_PATCH_SIZE]
|
||||||
|
|
||||||
|
# self.__researchWindowList = []
|
||||||
|
# researchWindow = kwargs.get(NlmeansFilter.RESEARCH_WINDOW_KEY, '')
|
||||||
|
# if researchWindow:
|
||||||
|
# researchWindowTokens = researchWindow.split(',')
|
||||||
|
# for rwt in researchWindowTokens:
|
||||||
|
# try:
|
||||||
|
# researchWindowValue = int(rwt)
|
||||||
|
# except:
|
||||||
|
# raise ValueError('NlmeansFilter: Research window value has to be of type int')
|
||||||
|
# if researchWindowValue < 0 or researchWindowValue > 99:
|
||||||
|
# raise ValueError('NlmeansFilter: Research window value has to be between 0 and 99')
|
||||||
|
# if researchWindowValue % 2 == 0:
|
||||||
|
# raise ValueError('NlmeansFilter: Research window value has to an odd number')
|
||||||
|
# self.__researchWindowList.append(researchWindowValue)
|
||||||
|
# else:
|
||||||
|
# self.__researchWindowList = [NlmeansFilter.DEFAULT_RESEARCH_WINDOW]
|
||||||
|
|
||||||
|
# self.__chromaResearchWindowList = []
|
||||||
|
# chromaResearchWindow = kwargs.get(NlmeansFilter.CHROMA_RESEARCH_WINDOW_KEY, '')
|
||||||
|
# if chromaResearchWindow:
|
||||||
|
# chromaResearchWindowTokens = chromaResearchWindow.split(',')
|
||||||
|
# for crwt in chromaResearchWindowTokens:
|
||||||
|
# try:
|
||||||
|
# chromaResearchWindowValue = int(crwt)
|
||||||
|
# except:
|
||||||
|
# raise ValueError('NlmeansFilter: Chroma research window value has to be of type int')
|
||||||
|
# if chromaResearchWindowValue < 0 or chromaResearchWindowValue > 99:
|
||||||
|
# raise ValueError('NlmeansFilter: Chroma research window value has to be between 0 and 99')
|
||||||
|
# if chromaResearchWindowValue % 2 == 0:
|
||||||
|
# raise ValueError('NlmeansFilter: Chroma research window value has to an odd number')
|
||||||
|
# self.__chromaResearchWindowList.append(chromaResearchWindowValue)
|
||||||
|
# else:
|
||||||
|
# self.__chromaResearchWindowList = [NlmeansFilter.DEFAULT_CHROMA_RESEARCH_WINDOW]
|
||||||
|
|
||||||
|
super().__init__(self)
|
||||||
|
|
||||||
|
|
||||||
|
def getPayload(self):
|
||||||
|
|
||||||
|
# strength = iteration[0]
|
||||||
|
# patchSize = iteration[1]
|
||||||
|
# chromaPatchSize = iteration[2]
|
||||||
|
# researchWindow = iteration[3]
|
||||||
|
# chromaResearchWindow = iteration[4]
|
||||||
|
|
||||||
|
suffices = []
|
||||||
|
|
||||||
|
# filterName = 'nlmeans_opencl' if self.__useHardware else 'nlmeans'
|
||||||
|
|
||||||
|
payload = {'identifier': DeinterlaceFilter.IDENTIFIER,
|
||||||
|
'parameters': {},
|
||||||
|
'suffices': suffices,
|
||||||
|
'variant': f"DEINT",
|
||||||
|
'tokens': ['bwdif=mode=1']}
|
||||||
|
|
||||||
|
return payload
|
||||||
|
|
||||||
|
|
||||||
|
def getYield(self):
|
||||||
|
# for it in itertools.product(self.__strengthList,
|
||||||
|
# self.__patchSizeList,
|
||||||
|
# self.__chromaPatchSizeList,
|
||||||
|
# self.__researchWindowList,
|
||||||
|
# self.__chromaResearchWindowList):
|
||||||
|
yield self.getPayload()
|
||||||
17
src/ffx/filter/filter.py
Normal file
17
src/ffx/filter/filter.py
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
import itertools
|
||||||
|
|
||||||
|
|
||||||
|
class Filter():
|
||||||
|
|
||||||
|
filterChain: list = []
|
||||||
|
|
||||||
|
def __init__(self, filter):
|
||||||
|
|
||||||
|
self.filterChain.append(filter)
|
||||||
|
|
||||||
|
def getFilterChain(self):
|
||||||
|
return self.filterChain
|
||||||
|
|
||||||
|
def getChainYield(self):
|
||||||
|
for fy in itertools.product(*[f.getYield() for f in self.filterChain]):
|
||||||
|
yield fy
|
||||||
162
src/ffx/filter/nlmeans_filter.py
Normal file
162
src/ffx/filter/nlmeans_filter.py
Normal file
@@ -0,0 +1,162 @@
|
|||||||
|
import itertools
|
||||||
|
|
||||||
|
from .filter import Filter
|
||||||
|
|
||||||
|
|
||||||
|
class NlmeansFilter(Filter):
|
||||||
|
|
||||||
|
IDENTIFIER = 'nlmeans'
|
||||||
|
|
||||||
|
DEFAULT_STRENGTH: float = 2.8
|
||||||
|
DEFAULT_PATCH_SIZE: int = 13
|
||||||
|
DEFAULT_CHROMA_PATCH_SIZE: int = 9
|
||||||
|
DEFAULT_RESEARCH_WINDOW: int = 23
|
||||||
|
DEFAULT_CHROMA_RESEARCH_WINDOW: int= 17
|
||||||
|
|
||||||
|
STRENGTH_KEY = 'strength'
|
||||||
|
PATCH_SIZE_KEY = 'patch_size'
|
||||||
|
CHROMA_PATCH_SIZE_KEY = 'chroma_patch_size'
|
||||||
|
RESEARCH_WINDOW_KEY = 'research_window'
|
||||||
|
CHROMA_RESEARCH_WINDOW_KEY = 'chroma_research_window'
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
|
||||||
|
self.__useHardware = kwargs.get('use_hardware', False)
|
||||||
|
|
||||||
|
self.__strengthList = []
|
||||||
|
strength = kwargs.get(NlmeansFilter.STRENGTH_KEY, '')
|
||||||
|
if strength:
|
||||||
|
strengthTokens = strength.split(',')
|
||||||
|
for st in strengthTokens:
|
||||||
|
try:
|
||||||
|
strengthValue = float(st)
|
||||||
|
except:
|
||||||
|
raise ValueError('NlmeansFilter: Strength value has to be of type float')
|
||||||
|
if strengthValue < 1.0 or strengthValue > 30.0:
|
||||||
|
raise ValueError('NlmeansFilter: Strength value has to be between 1.0 and 30.0')
|
||||||
|
self.__strengthList.append(strengthValue)
|
||||||
|
else:
|
||||||
|
self.__strengthList = [NlmeansFilter.DEFAULT_STRENGTH]
|
||||||
|
|
||||||
|
self.__patchSizeList = []
|
||||||
|
patchSize = kwargs.get(NlmeansFilter.PATCH_SIZE_KEY, '')
|
||||||
|
if patchSize:
|
||||||
|
patchSizeTokens = patchSize.split(',')
|
||||||
|
for pst in patchSizeTokens:
|
||||||
|
try:
|
||||||
|
patchSizeValue = int(pst)
|
||||||
|
except:
|
||||||
|
raise ValueError('NlmeansFilter: Patch size value has to be of type int')
|
||||||
|
if patchSizeValue < 0 or patchSizeValue > 99:
|
||||||
|
raise ValueError('NlmeansFilter: Patch size value has to be between 0 and 99')
|
||||||
|
if patchSizeValue % 2 == 0:
|
||||||
|
raise ValueError('NlmeansFilter: Patch size value has to an odd number')
|
||||||
|
self.__patchSizeList.append(patchSizeValue)
|
||||||
|
else:
|
||||||
|
self.__patchSizeList = [NlmeansFilter.DEFAULT_PATCH_SIZE]
|
||||||
|
|
||||||
|
self.__chromaPatchSizeList = []
|
||||||
|
chromaPatchSize = kwargs.get(NlmeansFilter.CHROMA_PATCH_SIZE_KEY, '')
|
||||||
|
if chromaPatchSize:
|
||||||
|
chromaPatchSizeTokens = chromaPatchSize.split(',')
|
||||||
|
for cpst in chromaPatchSizeTokens:
|
||||||
|
try:
|
||||||
|
chromaPatchSizeValue = int(pst)
|
||||||
|
except:
|
||||||
|
raise ValueError('NlmeansFilter: Chroma patch size value has to be of type int')
|
||||||
|
if chromaPatchSizeValue < 0 or chromaPatchSizeValue > 99:
|
||||||
|
raise ValueError('NlmeansFilter: Chroma patch value has to be between 0 and 99')
|
||||||
|
if chromaPatchSizeValue % 2 == 0:
|
||||||
|
raise ValueError('NlmeansFilter: Chroma patch value has to an odd number')
|
||||||
|
self.__chromaPatchSizeList.append(chromaPatchSizeValue)
|
||||||
|
else:
|
||||||
|
self.__chromaPatchSizeList = [NlmeansFilter.DEFAULT_CHROMA_PATCH_SIZE]
|
||||||
|
|
||||||
|
self.__researchWindowList = []
|
||||||
|
researchWindow = kwargs.get(NlmeansFilter.RESEARCH_WINDOW_KEY, '')
|
||||||
|
if researchWindow:
|
||||||
|
researchWindowTokens = researchWindow.split(',')
|
||||||
|
for rwt in researchWindowTokens:
|
||||||
|
try:
|
||||||
|
researchWindowValue = int(rwt)
|
||||||
|
except:
|
||||||
|
raise ValueError('NlmeansFilter: Research window value has to be of type int')
|
||||||
|
if researchWindowValue < 0 or researchWindowValue > 99:
|
||||||
|
raise ValueError('NlmeansFilter: Research window value has to be between 0 and 99')
|
||||||
|
if researchWindowValue % 2 == 0:
|
||||||
|
raise ValueError('NlmeansFilter: Research window value has to an odd number')
|
||||||
|
self.__researchWindowList.append(researchWindowValue)
|
||||||
|
else:
|
||||||
|
self.__researchWindowList = [NlmeansFilter.DEFAULT_RESEARCH_WINDOW]
|
||||||
|
|
||||||
|
self.__chromaResearchWindowList = []
|
||||||
|
chromaResearchWindow = kwargs.get(NlmeansFilter.CHROMA_RESEARCH_WINDOW_KEY, '')
|
||||||
|
if chromaResearchWindow:
|
||||||
|
chromaResearchWindowTokens = chromaResearchWindow.split(',')
|
||||||
|
for crwt in chromaResearchWindowTokens:
|
||||||
|
try:
|
||||||
|
chromaResearchWindowValue = int(crwt)
|
||||||
|
except:
|
||||||
|
raise ValueError('NlmeansFilter: Chroma research window value has to be of type int')
|
||||||
|
if chromaResearchWindowValue < 0 or chromaResearchWindowValue > 99:
|
||||||
|
raise ValueError('NlmeansFilter: Chroma research window value has to be between 0 and 99')
|
||||||
|
if chromaResearchWindowValue % 2 == 0:
|
||||||
|
raise ValueError('NlmeansFilter: Chroma research window value has to an odd number')
|
||||||
|
self.__chromaResearchWindowList.append(chromaResearchWindowValue)
|
||||||
|
else:
|
||||||
|
self.__chromaResearchWindowList = [NlmeansFilter.DEFAULT_CHROMA_RESEARCH_WINDOW]
|
||||||
|
|
||||||
|
super().__init__(self)
|
||||||
|
|
||||||
|
|
||||||
|
def getPayload(self, iteration):
|
||||||
|
|
||||||
|
strength = iteration[0]
|
||||||
|
patchSize = iteration[1]
|
||||||
|
chromaPatchSize = iteration[2]
|
||||||
|
researchWindow = iteration[3]
|
||||||
|
chromaResearchWindow = iteration[4]
|
||||||
|
|
||||||
|
suffices = []
|
||||||
|
|
||||||
|
if len(self.__strengthList) > 1:
|
||||||
|
suffices += [f"ds{strength}"]
|
||||||
|
if len(self.__patchSizeList) > 1:
|
||||||
|
suffices += [f"dp{patchSize}"]
|
||||||
|
if len(self.__chromaPatchSizeList) > 1:
|
||||||
|
suffices += [f"dpc{chromaPatchSize}"]
|
||||||
|
if len(self.__researchWindowList) > 1:
|
||||||
|
suffices += [f"dr{researchWindow}"]
|
||||||
|
if len(self.__chromaResearchWindowList) > 1:
|
||||||
|
suffices += [f"drc{chromaResearchWindow}"]
|
||||||
|
|
||||||
|
filterName = 'nlmeans_opencl' if self.__useHardware else 'nlmeans'
|
||||||
|
|
||||||
|
payload = {'identifier': NlmeansFilter.IDENTIFIER,
|
||||||
|
'parameters': {
|
||||||
|
'strength': strength,
|
||||||
|
'patch_size': patchSize,
|
||||||
|
'chroma_patch_size': chromaPatchSize,
|
||||||
|
'research_window': researchWindow,
|
||||||
|
'chroma_research_window': chromaResearchWindow
|
||||||
|
},
|
||||||
|
'suffices': suffices,
|
||||||
|
'variant': f"DS{strength}-DP{patchSize}-DPC{chromaPatchSize}"
|
||||||
|
+ f"-DR{researchWindow}-DRC{chromaResearchWindow}",
|
||||||
|
'tokens': [f"{filterName}=s={strength}"
|
||||||
|
+ f":p={patchSize}"
|
||||||
|
+ f":pc={chromaPatchSize}"
|
||||||
|
+ f":r={researchWindow}"
|
||||||
|
+ f":rc={chromaResearchWindow}"]}
|
||||||
|
|
||||||
|
return payload
|
||||||
|
|
||||||
|
|
||||||
|
def getYield(self):
|
||||||
|
for it in itertools.product(self.__strengthList,
|
||||||
|
self.__patchSizeList,
|
||||||
|
self.__chromaPatchSizeList,
|
||||||
|
self.__researchWindowList,
|
||||||
|
self.__chromaResearchWindowList):
|
||||||
|
yield self.getPayload(it)
|
||||||
54
src/ffx/filter/preset_filter.py
Normal file
54
src/ffx/filter/preset_filter.py
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
import itertools
|
||||||
|
|
||||||
|
from .filter import Filter
|
||||||
|
|
||||||
|
|
||||||
|
class PresetFilter(Filter):
|
||||||
|
|
||||||
|
IDENTIFIER = 'preset'
|
||||||
|
|
||||||
|
DEFAULT_PRESET = 5
|
||||||
|
|
||||||
|
PRESET_KEY = 'preset'
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
|
||||||
|
self.__presetsList = []
|
||||||
|
presets = str(kwargs.get(PresetFilter.PRESET_KEY, ''))
|
||||||
|
if presets:
|
||||||
|
presetTokens = presets.split(',')
|
||||||
|
for q in presetTokens:
|
||||||
|
try:
|
||||||
|
presetValue = int(q)
|
||||||
|
except:
|
||||||
|
raise ValueError('PresetFilter: Preset value has to be of type int')
|
||||||
|
if presetValue < 0 or presetValue > 13:
|
||||||
|
raise ValueError('PresetFilter: Preset value has to be between 0 and 13')
|
||||||
|
self.__presetsList.append(presetValue)
|
||||||
|
else:
|
||||||
|
self.__presetsList = [PresetFilter.DEFAULT_PRESET]
|
||||||
|
|
||||||
|
super().__init__(self)
|
||||||
|
|
||||||
|
|
||||||
|
def getPayload(self, preset):
|
||||||
|
|
||||||
|
suffices = []
|
||||||
|
|
||||||
|
if len(self.__presetsList) > 1:
|
||||||
|
suffices += [f"p{preset}"]
|
||||||
|
|
||||||
|
payload = {'identifier': PresetFilter.IDENTIFIER,
|
||||||
|
'parameters': {
|
||||||
|
'preset': preset
|
||||||
|
},
|
||||||
|
'suffices': suffices,
|
||||||
|
'variant': f"P{preset}",
|
||||||
|
'tokens': []}
|
||||||
|
|
||||||
|
return payload
|
||||||
|
|
||||||
|
|
||||||
|
def getYield(self):
|
||||||
|
for q in self.__presetsList:
|
||||||
|
yield self.getPayload(q)
|
||||||
62
src/ffx/filter/quality_filter.py
Normal file
62
src/ffx/filter/quality_filter.py
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
import click
|
||||||
|
|
||||||
|
from .filter import Filter
|
||||||
|
|
||||||
|
from ffx.video_encoder import VideoEncoder
|
||||||
|
|
||||||
|
|
||||||
|
class QualityFilter(Filter):
|
||||||
|
|
||||||
|
IDENTIFIER = 'quality'
|
||||||
|
|
||||||
|
DEFAULT_VP9_QUALITY = 32
|
||||||
|
DEFAULT_H264_QUALITY = 17
|
||||||
|
|
||||||
|
QUALITY_KEY = 'quality'
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
|
||||||
|
context = click.get_current_context().obj
|
||||||
|
|
||||||
|
|
||||||
|
self.__qualitiesList = []
|
||||||
|
qualities = kwargs.get(QualityFilter.QUALITY_KEY, '')
|
||||||
|
if qualities:
|
||||||
|
qualityTokens = qualities.split(',')
|
||||||
|
for q in qualityTokens:
|
||||||
|
try:
|
||||||
|
qualityValue = int(q)
|
||||||
|
except:
|
||||||
|
raise ValueError('QualityFilter: Quality value has to be of type int')
|
||||||
|
if qualityValue < 0 or qualityValue > 63:
|
||||||
|
raise ValueError('QualityFilter: Quality value has to be between 0 and 63')
|
||||||
|
self.__qualitiesList.append(qualityValue)
|
||||||
|
else:
|
||||||
|
|
||||||
|
self.__qualitiesList = [None]
|
||||||
|
|
||||||
|
|
||||||
|
super().__init__(self)
|
||||||
|
|
||||||
|
|
||||||
|
def getPayload(self, quality):
|
||||||
|
|
||||||
|
suffices = []
|
||||||
|
|
||||||
|
if len(self.__qualitiesList) > 1:
|
||||||
|
suffices += [f"q{quality}"]
|
||||||
|
|
||||||
|
payload = {'identifier': QualityFilter.IDENTIFIER,
|
||||||
|
'parameters': {
|
||||||
|
'quality': quality
|
||||||
|
},
|
||||||
|
'suffices': suffices,
|
||||||
|
'variant': f"Q{quality}",
|
||||||
|
'tokens': []}
|
||||||
|
|
||||||
|
return payload
|
||||||
|
|
||||||
|
|
||||||
|
def getYield(self):
|
||||||
|
for q in self.__qualitiesList:
|
||||||
|
yield self.getPayload(q)
|
||||||
6
src/ffx/filter/scale_filter.py
Normal file
6
src/ffx/filter/scale_filter.py
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
from .filter import Filter
|
||||||
|
|
||||||
|
class ScaleFilter(Filter):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__(self)
|
||||||
23
src/ffx/help_screen.py
Normal file
23
src/ffx/help_screen.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
from textual.app import ComposeResult
|
||||||
|
from textual.screen import Screen
|
||||||
|
from textual.widgets import Footer, Placeholder
|
||||||
|
|
||||||
|
from .i18n import t
|
||||||
|
from .screen_support import go_back_or_exit
|
||||||
|
|
||||||
|
class HelpScreen(Screen):
|
||||||
|
BINDINGS = [
|
||||||
|
("escape", "back", t("Back")),
|
||||||
|
]
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
context = self.app.getContext()
|
||||||
|
|
||||||
|
def compose(self) -> ComposeResult:
|
||||||
|
# Row 1
|
||||||
|
yield Placeholder(t("Help Screen"))
|
||||||
|
yield Footer()
|
||||||
|
|
||||||
|
def action_back(self):
|
||||||
|
go_back_or_exit(self)
|
||||||
259
src/ffx/helper.py
Normal file
259
src/ffx/helper.py
Normal file
@@ -0,0 +1,259 @@
|
|||||||
|
import re
|
||||||
|
|
||||||
|
from jinja2 import Environment, Undefined
|
||||||
|
from .constants import DEFAULT_OUTPUT_FILENAME_TEMPLATE
|
||||||
|
from .configuration_controller import ConfigurationController
|
||||||
|
from .logging_utils import get_ffx_logger
|
||||||
|
from .show_descriptor import ShowDescriptor
|
||||||
|
|
||||||
|
|
||||||
|
class EmptyStringUndefined(Undefined):
|
||||||
|
def __str__(self):
|
||||||
|
return ''
|
||||||
|
|
||||||
|
|
||||||
|
DIFF_ADDED_KEY = 'added'
|
||||||
|
DIFF_REMOVED_KEY = 'removed'
|
||||||
|
DIFF_CHANGED_KEY = 'changed'
|
||||||
|
DIFF_UNCHANGED_KEY = 'unchanged'
|
||||||
|
|
||||||
|
FILENAME_FILTER_TRANSLATION = str.maketrans(
|
||||||
|
{
|
||||||
|
"/": "-",
|
||||||
|
":": ";",
|
||||||
|
"*": "",
|
||||||
|
"'": "",
|
||||||
|
"?": "#",
|
||||||
|
"♥": "",
|
||||||
|
"’": "",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
TMDB_FILLER_MARKERS = (" (*)", "(*)")
|
||||||
|
TMDB_EPISODE_RANGE_SUFFIX_REGEX = re.compile(r"\(([0-9]+)[-/]([0-9]+)\)$")
|
||||||
|
TMDB_EPISODE_PART_SUFFIX_REGEX = re.compile(r"\(([0-9]+)\)$")
|
||||||
|
RICH_COLOR_REGEX = re.compile(r"\[[a-z_]+\](.+)\[/[a-z_]+\]")
|
||||||
|
|
||||||
|
|
||||||
|
def dictDiff(a : dict, b : dict, ignoreKeys: list = [], removeKeys: list = []):
|
||||||
|
"""
|
||||||
|
ignoreKeys: Ignored keys are filtered from calculating diff at all
|
||||||
|
removeKeys: Override diff calculation to remove keys certainly
|
||||||
|
"""
|
||||||
|
|
||||||
|
a_filtered = {k:v for k,v in a.items() if not k in ignoreKeys}
|
||||||
|
b_filtered = {k:v for k,v in b.items() if not k in ignoreKeys and k not in removeKeys}
|
||||||
|
|
||||||
|
a_only = {k:v for k,v in a_filtered.items() if not k in b_filtered.keys()}
|
||||||
|
b_only = {k:v for k,v in b_filtered.items() if not k in a_filtered.keys()}
|
||||||
|
|
||||||
|
a_b = set(a_filtered.keys()) & set(b_filtered.keys())
|
||||||
|
|
||||||
|
changed = {k:b_filtered[k] for k in a_b if a_filtered[k] != b_filtered[k]}
|
||||||
|
unchanged = {k:b_filtered[k] for k in a_b if a_filtered[k] == b_filtered[k]}
|
||||||
|
|
||||||
|
diffResult = {}
|
||||||
|
|
||||||
|
|
||||||
|
if a_only:
|
||||||
|
diffResult[DIFF_REMOVED_KEY] = a_only
|
||||||
|
diffResult[DIFF_UNCHANGED_KEY] = unchanged
|
||||||
|
if b_only:
|
||||||
|
diffResult[DIFF_ADDED_KEY] = b_only
|
||||||
|
if changed:
|
||||||
|
diffResult[DIFF_CHANGED_KEY] = changed
|
||||||
|
|
||||||
|
return diffResult
|
||||||
|
|
||||||
|
|
||||||
|
def dictKeysDiff(a : dict, b : dict):
|
||||||
|
|
||||||
|
a_keys = set(a.keys())
|
||||||
|
b_keys = set(b.keys())
|
||||||
|
|
||||||
|
a_only = a_keys - b_keys
|
||||||
|
b_only = b_keys - a_keys
|
||||||
|
a_b = a_keys & b_keys
|
||||||
|
|
||||||
|
changed = {k for k in a_b if a[k] != b[k]}
|
||||||
|
|
||||||
|
diffResult = {}
|
||||||
|
|
||||||
|
|
||||||
|
if a_only:
|
||||||
|
diffResult[DIFF_REMOVED_KEY] = a_only
|
||||||
|
diffResult[DIFF_UNCHANGED_KEY] = b_keys
|
||||||
|
if b_only:
|
||||||
|
diffResult[DIFF_ADDED_KEY] = b_only
|
||||||
|
if changed:
|
||||||
|
diffResult[DIFF_CHANGED_KEY] = changed
|
||||||
|
|
||||||
|
return diffResult
|
||||||
|
|
||||||
|
|
||||||
|
def dictCache(element: dict, cache: list = []):
|
||||||
|
for index in range(len(cache)):
|
||||||
|
diff = dictKeysDiff(cache[index], element)
|
||||||
|
if not diff:
|
||||||
|
return index, cache
|
||||||
|
cache.append(element)
|
||||||
|
return -1, cache
|
||||||
|
|
||||||
|
|
||||||
|
def setDiff(a : set, b : set) -> set:
|
||||||
|
|
||||||
|
a_only = a - b
|
||||||
|
b_only = b - a
|
||||||
|
a_and_b = a & b
|
||||||
|
|
||||||
|
diffResult = {}
|
||||||
|
|
||||||
|
if a_only:
|
||||||
|
diffResult[DIFF_REMOVED_KEY] = a_only
|
||||||
|
diffResult[DIFF_UNCHANGED_KEY] = a_and_b
|
||||||
|
if b_only:
|
||||||
|
diffResult[DIFF_ADDED_KEY] = b_only
|
||||||
|
|
||||||
|
return diffResult
|
||||||
|
|
||||||
|
|
||||||
|
def permutateList(inputList: list, permutation: list):
|
||||||
|
|
||||||
|
# 0,1,2: ABC
|
||||||
|
# 0,2,1: ACB
|
||||||
|
# 1,2,0: BCA
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def filterFilename(fileName: str) -> str:
|
||||||
|
"""This filter replaces charactes from TMDB responses with characters
|
||||||
|
less problemating when using in filenames or removes them"""
|
||||||
|
|
||||||
|
return str(fileName).translate(FILENAME_FILTER_TRANSLATION).strip()
|
||||||
|
|
||||||
|
def substituteTmdbFilename(fileName: str) -> str:
|
||||||
|
"""If chaining this method with filterFilename use this one first as the latter will destroy some patterns"""
|
||||||
|
|
||||||
|
normalizedFileName = str(fileName)
|
||||||
|
|
||||||
|
for fillerMarker in TMDB_FILLER_MARKERS:
|
||||||
|
normalizedFileName = normalizedFileName.replace(fillerMarker, '')
|
||||||
|
|
||||||
|
episodeRangeMatch = TMDB_EPISODE_RANGE_SUFFIX_REGEX.search(normalizedFileName)
|
||||||
|
if episodeRangeMatch is not None:
|
||||||
|
partFirstIndex, partLastIndex = episodeRangeMatch.groups()
|
||||||
|
return TMDB_EPISODE_RANGE_SUFFIX_REGEX.sub(
|
||||||
|
f"Teil {partFirstIndex}-{partLastIndex}",
|
||||||
|
normalizedFileName,
|
||||||
|
count=1,
|
||||||
|
)
|
||||||
|
|
||||||
|
episodePartMatch = TMDB_EPISODE_PART_SUFFIX_REGEX.search(normalizedFileName)
|
||||||
|
if episodePartMatch is not None:
|
||||||
|
partIndex = episodePartMatch.group(1)
|
||||||
|
return TMDB_EPISODE_PART_SUFFIX_REGEX.sub(
|
||||||
|
f"Teil {partIndex}",
|
||||||
|
normalizedFileName,
|
||||||
|
count=1,
|
||||||
|
)
|
||||||
|
|
||||||
|
return normalizedFileName
|
||||||
|
|
||||||
|
|
||||||
|
def getEpisodeFileBasename(showName,
|
||||||
|
episodeName,
|
||||||
|
season,
|
||||||
|
episode,
|
||||||
|
indexSeasonDigits = None,
|
||||||
|
indexEpisodeDigits = None,
|
||||||
|
indicatorSeasonDigits = None,
|
||||||
|
indicatorEpisodeDigits = None,
|
||||||
|
context = None):
|
||||||
|
"""
|
||||||
|
One Piece:
|
||||||
|
indexSeasonDigits = 0,
|
||||||
|
indexEpisodeDigits = 4,
|
||||||
|
indicatorSeasonDigits = 2,
|
||||||
|
indicatorEpisodeDigits = 4
|
||||||
|
|
||||||
|
Three-Body:
|
||||||
|
indexSeasonDigits = 0,
|
||||||
|
indexEpisodeDigits = 2,
|
||||||
|
indicatorSeasonDigits = 2,
|
||||||
|
indicatorEpisodeDigits = 2
|
||||||
|
|
||||||
|
Dragonball:
|
||||||
|
indexSeasonDigits = 0,
|
||||||
|
indexEpisodeDigits = 3,
|
||||||
|
indicatorSeasonDigits = 2,
|
||||||
|
indicatorEpisodeDigits = 3
|
||||||
|
|
||||||
|
Boruto:
|
||||||
|
indexSeasonDigits = 0,
|
||||||
|
indexEpisodeDigits = 4,
|
||||||
|
indicatorSeasonDigits = 2,
|
||||||
|
indicatorEpisodeDigits = 4
|
||||||
|
"""
|
||||||
|
|
||||||
|
cc: ConfigurationController = context['config'] if context is not None and 'config' in context.keys() else None
|
||||||
|
configData = cc.getData() if cc is not None else {}
|
||||||
|
outputFilenameTemplate = configData.get(ConfigurationController.OUTPUT_FILENAME_TEMPLATE_KEY,
|
||||||
|
DEFAULT_OUTPUT_FILENAME_TEMPLATE)
|
||||||
|
defaultDigitLengths = ShowDescriptor.getDefaultDigitLengths(context)
|
||||||
|
|
||||||
|
if indexSeasonDigits is None:
|
||||||
|
indexSeasonDigits = defaultDigitLengths[ShowDescriptor.INDEX_SEASON_DIGITS_KEY]
|
||||||
|
if indexEpisodeDigits is None:
|
||||||
|
indexEpisodeDigits = defaultDigitLengths[ShowDescriptor.INDEX_EPISODE_DIGITS_KEY]
|
||||||
|
if indicatorSeasonDigits is None:
|
||||||
|
indicatorSeasonDigits = defaultDigitLengths[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY]
|
||||||
|
if indicatorEpisodeDigits is None:
|
||||||
|
indicatorEpisodeDigits = defaultDigitLengths[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY]
|
||||||
|
|
||||||
|
if context is not None and 'logger' in context.keys():
|
||||||
|
logger = context['logger']
|
||||||
|
else:
|
||||||
|
logger = get_ffx_logger()
|
||||||
|
|
||||||
|
|
||||||
|
indexSeparator = ' ' if indexSeasonDigits or indexEpisodeDigits else ''
|
||||||
|
seasonIndex = '{num:{fill}{width}}'.format(num=season, fill='0', width=indexSeasonDigits) if indexSeasonDigits else ''
|
||||||
|
episodeIndex = '{num:{fill}{width}}'.format(num=episode, fill='0', width=indexEpisodeDigits) if indexEpisodeDigits else ''
|
||||||
|
|
||||||
|
indicatorSeparator = ' - ' if indicatorSeasonDigits or indicatorEpisodeDigits else ''
|
||||||
|
seasonIndicator = 'S{num:{fill}{width}}'.format(num=season, fill='0', width=indicatorSeasonDigits) if indicatorSeasonDigits else ''
|
||||||
|
episodeIndicator = 'E{num:{fill}{width}}'.format(num=episode, fill='0', width=indicatorEpisodeDigits) if indicatorEpisodeDigits else ''
|
||||||
|
|
||||||
|
jinjaKwargs = {
|
||||||
|
'ffx_show_name': showName,
|
||||||
|
'ffx_index_separator': indexSeparator,
|
||||||
|
'ffx_season_index': str(seasonIndex),
|
||||||
|
'ffx_episode_index': str(episodeIndex),
|
||||||
|
'ffx_index': str(seasonIndex) + str(episodeIndex),
|
||||||
|
'ffx_episode_name': episodeName,
|
||||||
|
'ffx_indicator_separator': indicatorSeparator,
|
||||||
|
'ffx_season_indicator': str(seasonIndicator),
|
||||||
|
'ffx_episode_indicator': str(episodeIndicator),
|
||||||
|
'ffx_indicator': str(seasonIndicator) + str(episodeIndicator)
|
||||||
|
}
|
||||||
|
|
||||||
|
jinjaEnv = Environment(undefined=EmptyStringUndefined)
|
||||||
|
jinjaTemplate = jinjaEnv.from_string(outputFilenameTemplate)
|
||||||
|
return jinjaTemplate.render(**jinjaKwargs)
|
||||||
|
|
||||||
|
# return ''.join(filenameTokens)
|
||||||
|
|
||||||
|
|
||||||
|
def formatRichColor(text: str, color: str = None):
|
||||||
|
if color is None:
|
||||||
|
return text
|
||||||
|
else:
|
||||||
|
return f"[{color}]{text}[/{color}]"
|
||||||
|
|
||||||
|
def removeRichColor(text: str):
|
||||||
|
richColorMatch = RICH_COLOR_REGEX.search(str(text))
|
||||||
|
if richColorMatch is None:
|
||||||
|
return text
|
||||||
|
else:
|
||||||
|
return str(richColorMatch.group(1))
|
||||||
158
src/ffx/i18n.py
Normal file
158
src/ffx/i18n.py
Normal file
@@ -0,0 +1,158 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_LANGUAGE = "de"
|
||||||
|
SOURCE_LANGUAGE = "en"
|
||||||
|
SUPPORTED_LANGUAGES = {
|
||||||
|
"de": "Deutsch",
|
||||||
|
"en": "English",
|
||||||
|
"fr": "Français",
|
||||||
|
"ja": "日本語",
|
||||||
|
"nb": "Norsk bokmål",
|
||||||
|
"eo": "Esperanto",
|
||||||
|
"ta": "தமிழ்",
|
||||||
|
"pt": "Português",
|
||||||
|
"es": "Español",
|
||||||
|
}
|
||||||
|
LANGUAGE_ALIASES = {
|
||||||
|
"deu": "de",
|
||||||
|
"ger": "de",
|
||||||
|
"english": "en",
|
||||||
|
"eng": "en",
|
||||||
|
"fra": "fr",
|
||||||
|
"fre": "fr",
|
||||||
|
"french": "fr",
|
||||||
|
"jpn": "ja",
|
||||||
|
"japanese": "ja",
|
||||||
|
"nor": "nb",
|
||||||
|
"nob": "nb",
|
||||||
|
"no": "nb",
|
||||||
|
"nn": "nb",
|
||||||
|
"bokmal": "nb",
|
||||||
|
"norwegian": "nb",
|
||||||
|
"epo": "eo",
|
||||||
|
"esperanto": "eo",
|
||||||
|
"tam": "ta",
|
||||||
|
"tamil": "ta",
|
||||||
|
"por": "pt",
|
||||||
|
"portuguese": "pt",
|
||||||
|
"spa": "es",
|
||||||
|
"spanish": "es",
|
||||||
|
}
|
||||||
|
|
||||||
|
_catalog_cache: dict[str, dict] = {}
|
||||||
|
_current_language = DEFAULT_LANGUAGE
|
||||||
|
|
||||||
|
|
||||||
|
def _assets_directory() -> Path:
|
||||||
|
return Path(__file__).resolve().parents[2] / "assets" / "i18n"
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_language_code(value: str | None) -> str | None:
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
normalized = str(value).strip().replace("-", "_")
|
||||||
|
if not normalized:
|
||||||
|
return None
|
||||||
|
|
||||||
|
base_language = normalized.split(".")[0].split("_")[0].lower()
|
||||||
|
if base_language in SUPPORTED_LANGUAGES:
|
||||||
|
return base_language
|
||||||
|
|
||||||
|
return LANGUAGE_ALIASES.get(base_language)
|
||||||
|
|
||||||
|
|
||||||
|
def detect_system_language(env: dict[str, str] | None = None) -> str | None:
|
||||||
|
environment = env or os.environ
|
||||||
|
for key in ("LC_ALL", "LC_MESSAGES", "LANG"):
|
||||||
|
if language_code := normalize_language_code(environment.get(key)):
|
||||||
|
return language_code
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_default_config_path(home_directory: str | None = None) -> Path:
|
||||||
|
base_home = Path(home_directory or os.path.expanduser("~"))
|
||||||
|
return base_home / ".local" / "etc" / "ffx.json"
|
||||||
|
|
||||||
|
|
||||||
|
def read_configured_language(
|
||||||
|
config_path: str | os.PathLike | None = None,
|
||||||
|
*,
|
||||||
|
home_directory: str | None = None,
|
||||||
|
) -> str | None:
|
||||||
|
resolved_path = Path(config_path) if config_path is not None else get_default_config_path(home_directory)
|
||||||
|
if not resolved_path.is_file():
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
config_data = json.loads(resolved_path.read_text(encoding="utf-8"))
|
||||||
|
except (OSError, ValueError, TypeError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
return normalize_language_code(config_data.get("language"))
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_application_language(
|
||||||
|
*,
|
||||||
|
cli_language: str | None = None,
|
||||||
|
config_language: str | None = None,
|
||||||
|
system_language: str | None = None,
|
||||||
|
env: dict[str, str] | None = None,
|
||||||
|
) -> str:
|
||||||
|
for candidate in (
|
||||||
|
cli_language,
|
||||||
|
config_language,
|
||||||
|
system_language or detect_system_language(env),
|
||||||
|
):
|
||||||
|
if normalized := normalize_language_code(candidate):
|
||||||
|
return normalized
|
||||||
|
|
||||||
|
return DEFAULT_LANGUAGE
|
||||||
|
|
||||||
|
|
||||||
|
def set_current_language(language_code: str | None) -> str:
|
||||||
|
global _current_language
|
||||||
|
_current_language = normalize_language_code(language_code) or DEFAULT_LANGUAGE
|
||||||
|
return _current_language
|
||||||
|
|
||||||
|
|
||||||
|
def get_current_language() -> str:
|
||||||
|
return _current_language
|
||||||
|
|
||||||
|
|
||||||
|
def _load_catalog(language_code: str) -> dict:
|
||||||
|
normalized = normalize_language_code(language_code) or DEFAULT_LANGUAGE
|
||||||
|
if normalized not in _catalog_cache:
|
||||||
|
catalog_path = _assets_directory() / f"{normalized}.json"
|
||||||
|
if catalog_path.is_file():
|
||||||
|
_catalog_cache[normalized] = json.loads(catalog_path.read_text(encoding="utf-8"))
|
||||||
|
else:
|
||||||
|
_catalog_cache[normalized] = {"phrases": {}, "iso_languages": {}}
|
||||||
|
return _catalog_cache[normalized]
|
||||||
|
|
||||||
|
|
||||||
|
def _lookup_phrase(language_code: str, source_text: str) -> str | None:
|
||||||
|
phrases = _load_catalog(language_code).get("phrases", {})
|
||||||
|
return phrases.get(source_text)
|
||||||
|
|
||||||
|
|
||||||
|
def t(source_text: str, **kwargs) -> str:
|
||||||
|
translated = (
|
||||||
|
_lookup_phrase(get_current_language(), source_text)
|
||||||
|
or _lookup_phrase(SOURCE_LANGUAGE, source_text)
|
||||||
|
or source_text
|
||||||
|
)
|
||||||
|
return translated.format(**kwargs) if kwargs else translated
|
||||||
|
|
||||||
|
|
||||||
|
def translate_iso_language(member_name: str, fallback: str) -> str:
|
||||||
|
for language_code in (get_current_language(), SOURCE_LANGUAGE):
|
||||||
|
translations = _load_catalog(language_code).get("iso_languages", {})
|
||||||
|
if member_name in translations:
|
||||||
|
return str(translations[member_name])
|
||||||
|
return str(fallback)
|
||||||
575
src/ffx/inspect_details_screen.py
Normal file
575
src/ffx/inspect_details_screen.py
Normal file
@@ -0,0 +1,575 @@
|
|||||||
|
import re
|
||||||
|
|
||||||
|
import click
|
||||||
|
from rich.text import Text
|
||||||
|
|
||||||
|
from textual.containers import Grid
|
||||||
|
from textual.widgets import Button, Footer, Header, Input, Static
|
||||||
|
from textual.widgets._data_table import CellDoesNotExist
|
||||||
|
|
||||||
|
from ffx.file_properties import FileProperties
|
||||||
|
from ffx.media_descriptor_change_set import MediaDescriptorChangeSet
|
||||||
|
from ffx.show_descriptor import ShowDescriptor
|
||||||
|
from ffx.track_descriptor import TrackDescriptor
|
||||||
|
|
||||||
|
from .i18n import t
|
||||||
|
from .media_workflow_screen_base import MediaWorkflowScreenBase
|
||||||
|
from .pattern_details_screen import PatternDetailsScreen
|
||||||
|
from .screen_support import (
|
||||||
|
add_auto_table_column,
|
||||||
|
build_screen_controllers,
|
||||||
|
go_back_or_exit,
|
||||||
|
localized_column_width,
|
||||||
|
update_table_column_label,
|
||||||
|
)
|
||||||
|
from .show_details_screen import ShowDetailsScreen
|
||||||
|
|
||||||
|
|
||||||
|
class InspectDetailsScreen(MediaWorkflowScreenBase):
|
||||||
|
|
||||||
|
GRID_COLUMN_LABEL_MIN = 12
|
||||||
|
GRID_COLUMN_2 = 20
|
||||||
|
GRID_COLUMN_3 = 40
|
||||||
|
GRID_COLUMN_4 = "4fr"
|
||||||
|
GRID_COLUMN_5 = 10
|
||||||
|
GRID_COLUMN_6 = "5fr"
|
||||||
|
|
||||||
|
CSS = f"""
|
||||||
|
|
||||||
|
Grid {{
|
||||||
|
grid-size: 6 11;
|
||||||
|
grid-rows: 9 2 2 2 2 8 2 2 2 8 8;
|
||||||
|
grid-columns: {GRID_COLUMN_LABEL_MIN} {GRID_COLUMN_2} {GRID_COLUMN_3} {GRID_COLUMN_4} {GRID_COLUMN_5} {GRID_COLUMN_6};
|
||||||
|
height: 100%;
|
||||||
|
width: 100%;
|
||||||
|
min-width: 120;
|
||||||
|
padding: 1;
|
||||||
|
overflow-x: auto;
|
||||||
|
overflow-y: auto;
|
||||||
|
}}
|
||||||
|
|
||||||
|
DataTable .datatable--cursor {{
|
||||||
|
background: darkorange;
|
||||||
|
color: black;
|
||||||
|
}}
|
||||||
|
|
||||||
|
DataTable .datatable--header {{
|
||||||
|
background: steelblue;
|
||||||
|
color: white;
|
||||||
|
}}
|
||||||
|
|
||||||
|
Input {{
|
||||||
|
border: none;
|
||||||
|
}}
|
||||||
|
Button {{
|
||||||
|
border: none;
|
||||||
|
}}
|
||||||
|
|
||||||
|
DataTable {{
|
||||||
|
min-height: 24;
|
||||||
|
width: 100%;
|
||||||
|
}}
|
||||||
|
|
||||||
|
.two {{
|
||||||
|
column-span: 2;
|
||||||
|
}}
|
||||||
|
.three {{
|
||||||
|
column-span: 3;
|
||||||
|
}}
|
||||||
|
.four {{
|
||||||
|
column-span: 4;
|
||||||
|
}}
|
||||||
|
.five {{
|
||||||
|
column-span: 5;
|
||||||
|
}}
|
||||||
|
|
||||||
|
#differences-table {{
|
||||||
|
row-span: 10;
|
||||||
|
}}
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _grid_columns_spec(cls, label_column_width: int | None = None) -> str:
|
||||||
|
return " ".join(
|
||||||
|
[
|
||||||
|
str(
|
||||||
|
cls.GRID_COLUMN_LABEL_MIN
|
||||||
|
if label_column_width is None
|
||||||
|
else int(label_column_width)
|
||||||
|
),
|
||||||
|
str(cls.GRID_COLUMN_2),
|
||||||
|
str(cls.GRID_COLUMN_3),
|
||||||
|
str(cls.GRID_COLUMN_4),
|
||||||
|
str(cls.GRID_COLUMN_5),
|
||||||
|
str(cls.GRID_COLUMN_6),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
COMMAND_NAME = "inspect"
|
||||||
|
DIFFERENCES_COLUMN_LABEL = "Differences (file->db/output)"
|
||||||
|
|
||||||
|
BINDINGS = [
|
||||||
|
("escape", "back", t("Back")),
|
||||||
|
("q", "app.quit", t("Quit")),
|
||||||
|
("n", "new_pattern", t("New Pattern")),
|
||||||
|
("u", "update_pattern", t("Update Pattern")),
|
||||||
|
("e", "edit_pattern", t("Edit Pattern")),
|
||||||
|
]
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._showRowData: dict[object, ShowDescriptor | None] = {}
|
||||||
|
self._showSortColumnKey = None
|
||||||
|
self._showSortReverse = False
|
||||||
|
self._showColumnLabels: dict[object, str] = {}
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
controllers = build_screen_controllers(
|
||||||
|
self.context,
|
||||||
|
pattern=True,
|
||||||
|
show=True,
|
||||||
|
track=True,
|
||||||
|
tag=True,
|
||||||
|
)
|
||||||
|
self._pc = controllers["pattern"]
|
||||||
|
self._sc = controllers["show"]
|
||||||
|
self._tc = controllers["track"]
|
||||||
|
self._tac = controllers["tag"]
|
||||||
|
|
||||||
|
self.reloadProperties(reset_draft=True)
|
||||||
|
|
||||||
|
def compose(self):
|
||||||
|
self._build_media_tags_table()
|
||||||
|
self._build_tracks_table()
|
||||||
|
self._build_differences_table()
|
||||||
|
|
||||||
|
yield Header()
|
||||||
|
|
||||||
|
with Grid(id="main_grid"):
|
||||||
|
|
||||||
|
self.showsTable = self._build_shows_table()
|
||||||
|
|
||||||
|
# Row 1
|
||||||
|
yield Static(t("Show"))
|
||||||
|
yield self.showsTable
|
||||||
|
yield Static(" ")
|
||||||
|
yield self.differencesTable
|
||||||
|
|
||||||
|
# Row 2
|
||||||
|
yield Static(" ", classes="five")
|
||||||
|
|
||||||
|
# Row 3
|
||||||
|
yield Static(" ")
|
||||||
|
yield Button(t("Substitute"), id="pattern_button")
|
||||||
|
yield Static(" ", classes="three")
|
||||||
|
|
||||||
|
# Row 4
|
||||||
|
yield Static(t("Pattern"))
|
||||||
|
yield Input(type="text", id="pattern_input", classes="three")
|
||||||
|
yield Static(" ")
|
||||||
|
|
||||||
|
# Row 5
|
||||||
|
yield Static(" ", classes="five")
|
||||||
|
|
||||||
|
# Row 6
|
||||||
|
yield Static(t("Media Tags"))
|
||||||
|
yield self.mediaTagsTable
|
||||||
|
yield Static(" ", classes="two")
|
||||||
|
|
||||||
|
# Row 7
|
||||||
|
yield Static(" ", classes="five")
|
||||||
|
|
||||||
|
# Row 8
|
||||||
|
yield Static(" ")
|
||||||
|
yield Button(t("Set Default"), id="select_default_button")
|
||||||
|
yield Button(t("Set Forced"), id="select_forced_button")
|
||||||
|
yield Static(" ", classes="two")
|
||||||
|
|
||||||
|
# Row 9
|
||||||
|
yield Static(t("Streams"))
|
||||||
|
yield self.tracksTable
|
||||||
|
yield Static(" ")
|
||||||
|
|
||||||
|
yield Footer()
|
||||||
|
|
||||||
|
def _update_grid_layout(self) -> None:
|
||||||
|
leftColumnWidth = max(
|
||||||
|
localized_column_width(t("Show"), self.GRID_COLUMN_LABEL_MIN),
|
||||||
|
localized_column_width(t("Pattern"), self.GRID_COLUMN_LABEL_MIN),
|
||||||
|
localized_column_width(t("Media Tags"), self.GRID_COLUMN_LABEL_MIN),
|
||||||
|
localized_column_width(t("Streams"), self.GRID_COLUMN_LABEL_MIN),
|
||||||
|
)
|
||||||
|
grid = self.query_one("#main_grid", Grid)
|
||||||
|
grid.styles.grid_columns = self._grid_columns_spec(leftColumnWidth)
|
||||||
|
|
||||||
|
def action_back(self):
|
||||||
|
go_back_or_exit(self)
|
||||||
|
|
||||||
|
def _build_shows_table(self):
|
||||||
|
from textual.widgets import DataTable
|
||||||
|
|
||||||
|
showsTable = DataTable(classes="three")
|
||||||
|
idLabel = t("ID")
|
||||||
|
nameLabel = t("Name")
|
||||||
|
yearLabel = t("Year")
|
||||||
|
self._showColumnKeyId = add_auto_table_column(showsTable, idLabel)
|
||||||
|
self._showColumnKeyName = add_auto_table_column(showsTable, nameLabel)
|
||||||
|
self._showColumnKeyYear = add_auto_table_column(showsTable, yearLabel)
|
||||||
|
self._showColumnLabels = {
|
||||||
|
self._showColumnKeyId: idLabel,
|
||||||
|
self._showColumnKeyName: nameLabel,
|
||||||
|
self._showColumnKeyYear: yearLabel,
|
||||||
|
}
|
||||||
|
showsTable.cursor_type = "row"
|
||||||
|
return showsTable
|
||||||
|
|
||||||
|
def _get_selected_show_row_key(self):
|
||||||
|
try:
|
||||||
|
row_key, _ = self.showsTable.coordinate_to_cell_key(
|
||||||
|
self.showsTable.cursor_coordinate
|
||||||
|
)
|
||||||
|
return row_key
|
||||||
|
except CellDoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _move_show_cursor_to_row_key(self, row_key):
|
||||||
|
if row_key is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
row_index = int(self.showsTable.get_row_index(row_key))
|
||||||
|
except Exception:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.showsTable.move_cursor(row=row_index)
|
||||||
|
|
||||||
|
def _sort_key_for_show_column(self, column_key):
|
||||||
|
if column_key == self._showColumnKeyId:
|
||||||
|
return lambda value: int(value) if str(value).strip().isdigit() else -1
|
||||||
|
if column_key == self._showColumnKeyYear:
|
||||||
|
return lambda value: int(value) if str(value).strip().isdigit() else -1
|
||||||
|
if column_key == self._showColumnKeyName:
|
||||||
|
return lambda value: str(value).casefold()
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _update_show_header_labels(self):
|
||||||
|
if not hasattr(self, "showsTable"):
|
||||||
|
return
|
||||||
|
|
||||||
|
arrow_up = "▴"
|
||||||
|
arrow_down = "▾"
|
||||||
|
|
||||||
|
for column_key, base_label in self._showColumnLabels.items():
|
||||||
|
column = self.showsTable.columns.get(column_key)
|
||||||
|
if column is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
label_text = base_label
|
||||||
|
if column_key == self._showSortColumnKey:
|
||||||
|
label_text = (
|
||||||
|
f"{base_label} "
|
||||||
|
f"{arrow_down if self._showSortReverse else arrow_up}"
|
||||||
|
)
|
||||||
|
|
||||||
|
update_table_column_label(self.showsTable, column_key, Text(label_text))
|
||||||
|
|
||||||
|
def _apply_show_sort(self, *, preserve_row_key=None):
|
||||||
|
if self._showSortColumnKey is None:
|
||||||
|
self._update_show_header_labels()
|
||||||
|
return
|
||||||
|
|
||||||
|
self.showsTable.sort(
|
||||||
|
self._showSortColumnKey,
|
||||||
|
key=self._sort_key_for_show_column(self._showSortColumnKey),
|
||||||
|
reverse=self._showSortReverse,
|
||||||
|
)
|
||||||
|
self._move_show_cursor_to_row_key(preserve_row_key)
|
||||||
|
self._update_show_header_labels()
|
||||||
|
|
||||||
|
def on_mount(self):
|
||||||
|
self._update_grid_layout()
|
||||||
|
|
||||||
|
if self._currentPattern is None:
|
||||||
|
self._add_show_row(None)
|
||||||
|
|
||||||
|
for show in self._sc.getAllShows():
|
||||||
|
self._add_show_row(show.getDescriptor(self.context))
|
||||||
|
|
||||||
|
self._showSortColumnKey = self._showColumnKeyName
|
||||||
|
self._apply_show_sort()
|
||||||
|
|
||||||
|
if self._currentPattern is not None:
|
||||||
|
showIdentifier = self._currentPattern.getShowId()
|
||||||
|
showRowIndex = self.getRowIndexFromShowId(showIdentifier)
|
||||||
|
if showRowIndex is not None:
|
||||||
|
self.showsTable.move_cursor(row=showRowIndex)
|
||||||
|
|
||||||
|
self.query_one("#pattern_input", Input).value = self._currentPattern.getPattern()
|
||||||
|
else:
|
||||||
|
self.query_one("#pattern_input", Input).value = self._mediaFilename
|
||||||
|
self.highlightPattern(True)
|
||||||
|
|
||||||
|
self.updateMediaTags()
|
||||||
|
self.updateTracks()
|
||||||
|
self.updateDifferences()
|
||||||
|
|
||||||
|
def on_button_pressed(self, event: Button.Pressed) -> None:
|
||||||
|
if event.button.id == "pattern_button":
|
||||||
|
pattern = self.query_one("#pattern_input", Input).value
|
||||||
|
patternMatch = re.search(FileProperties.SE_INDICATOR_PATTERN, pattern)
|
||||||
|
if patternMatch:
|
||||||
|
self.query_one("#pattern_input", Input).value = pattern.replace(
|
||||||
|
patternMatch.group(1),
|
||||||
|
FileProperties.SE_INDICATOR_PATTERN,
|
||||||
|
)
|
||||||
|
|
||||||
|
if event.button.id == "select_default_button":
|
||||||
|
if self.setSelectedTrackDefault():
|
||||||
|
self.updateTracks()
|
||||||
|
self.updateDifferences()
|
||||||
|
|
||||||
|
if event.button.id == "select_forced_button":
|
||||||
|
if self.setSelectedTrackForced():
|
||||||
|
self.updateTracks()
|
||||||
|
self.updateDifferences()
|
||||||
|
|
||||||
|
def on_data_table_header_selected(self, event) -> None:
|
||||||
|
if event.data_table is not self.showsTable:
|
||||||
|
return
|
||||||
|
|
||||||
|
selected_row_key = self._get_selected_show_row_key()
|
||||||
|
|
||||||
|
if self._showSortColumnKey == event.column_key:
|
||||||
|
self._showSortReverse = not self._showSortReverse
|
||||||
|
else:
|
||||||
|
self._showSortColumnKey = event.column_key
|
||||||
|
self._showSortReverse = False
|
||||||
|
|
||||||
|
self._apply_show_sort(preserve_row_key=selected_row_key)
|
||||||
|
|
||||||
|
def removeShow(self, showId: int = -1):
|
||||||
|
for row_key, show_descriptor in list(self._showRowData.items()):
|
||||||
|
if (
|
||||||
|
(showId == -1 and show_descriptor is None)
|
||||||
|
or (
|
||||||
|
show_descriptor is not None
|
||||||
|
and show_descriptor.getId() == showId
|
||||||
|
)
|
||||||
|
):
|
||||||
|
self.showsTable.remove_row(row_key)
|
||||||
|
self._showRowData.pop(row_key, None)
|
||||||
|
return
|
||||||
|
|
||||||
|
def getRowIndexFromShowId(self, showId: int = -1) -> int | None:
|
||||||
|
for row_key, show_descriptor in self._showRowData.items():
|
||||||
|
if (
|
||||||
|
(showId == -1 and show_descriptor is None)
|
||||||
|
or (
|
||||||
|
show_descriptor is not None
|
||||||
|
and show_descriptor.getId() == showId
|
||||||
|
)
|
||||||
|
):
|
||||||
|
return int(self.showsTable.get_row_index(row_key))
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _add_show_row(self, show_descriptor: ShowDescriptor | None):
|
||||||
|
if show_descriptor is None:
|
||||||
|
row_key = self.showsTable.add_row(" ", t("<New show>"), " ")
|
||||||
|
else:
|
||||||
|
row_key = self.showsTable.add_row(
|
||||||
|
str(show_descriptor.getId()),
|
||||||
|
str(show_descriptor.getName()),
|
||||||
|
str(show_descriptor.getYear()),
|
||||||
|
)
|
||||||
|
|
||||||
|
self._showRowData[row_key] = show_descriptor
|
||||||
|
return row_key
|
||||||
|
|
||||||
|
def highlightPattern(self, state: bool):
|
||||||
|
patternInput = self.query_one("#pattern_input", Input)
|
||||||
|
patternInput.styles.background = "red" if state else None
|
||||||
|
|
||||||
|
def getSelectedShowDescriptor(self) -> ShowDescriptor | None:
|
||||||
|
try:
|
||||||
|
row_key, _ = self.showsTable.coordinate_to_cell_key(
|
||||||
|
self.showsTable.cursor_coordinate
|
||||||
|
)
|
||||||
|
|
||||||
|
if row_key is not None:
|
||||||
|
return self._showRowData.get(row_key)
|
||||||
|
except (CellDoesNotExist, AttributeError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def getPatternObjFromInput(self):
|
||||||
|
patternObj = {}
|
||||||
|
try:
|
||||||
|
patternObj["show_id"] = self.getSelectedShowDescriptor().getId()
|
||||||
|
patternObj["pattern"] = str(self.query_one("#pattern_input", Input).value)
|
||||||
|
except Exception:
|
||||||
|
return {}
|
||||||
|
return patternObj
|
||||||
|
|
||||||
|
def handle_new_pattern(self, showDescriptor: ShowDescriptor):
|
||||||
|
if type(showDescriptor) is not ShowDescriptor:
|
||||||
|
raise TypeError(
|
||||||
|
"InspectDetailsScreen.handle_new_pattern(): Argument 'showDescriptor' has to be of type ShowDescriptor"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.removeShow()
|
||||||
|
|
||||||
|
showRowIndex = self.getRowIndexFromShowId(showDescriptor.getId())
|
||||||
|
if showRowIndex is None:
|
||||||
|
row_key = self._add_show_row(showDescriptor)
|
||||||
|
self._apply_show_sort(preserve_row_key=row_key)
|
||||||
|
|
||||||
|
showRowIndex = self.getRowIndexFromShowId(showDescriptor.getId())
|
||||||
|
if showRowIndex is not None:
|
||||||
|
self.showsTable.move_cursor(row=showRowIndex)
|
||||||
|
|
||||||
|
patternObj = self.getPatternObjFromInput()
|
||||||
|
if patternObj:
|
||||||
|
mediaTags = {}
|
||||||
|
for tagKey, tagValue in self._sourceMediaDescriptor.getTags().items():
|
||||||
|
if (
|
||||||
|
tagKey not in self._ignoreGlobalKeys
|
||||||
|
and tagKey not in self._removeGlobalKeys
|
||||||
|
):
|
||||||
|
mediaTags[tagKey] = tagValue
|
||||||
|
|
||||||
|
patternId = self._pc.savePatternSchema(
|
||||||
|
patternObj,
|
||||||
|
trackDescriptors=self._sourceMediaDescriptor.getTrackDescriptors(),
|
||||||
|
mediaTags=mediaTags,
|
||||||
|
)
|
||||||
|
if patternId:
|
||||||
|
self.reloadProperties(reset_draft=True)
|
||||||
|
self.updateMediaTags()
|
||||||
|
self.updateTracks()
|
||||||
|
self.updateDifferences()
|
||||||
|
self.highlightPattern(False)
|
||||||
|
|
||||||
|
def action_new_pattern(self):
|
||||||
|
selectedShowDescriptor = self.getSelectedShowDescriptor()
|
||||||
|
if selectedShowDescriptor is None:
|
||||||
|
self.app.push_screen(ShowDetailsScreen(), self.handle_new_pattern)
|
||||||
|
else:
|
||||||
|
self.handle_new_pattern(selectedShowDescriptor)
|
||||||
|
|
||||||
|
def action_update_pattern(self):
|
||||||
|
if self._currentPattern is not None:
|
||||||
|
patternObj = self.getPatternObjFromInput()
|
||||||
|
if (
|
||||||
|
patternObj
|
||||||
|
and self._currentPattern.getPattern() != patternObj["pattern"]
|
||||||
|
):
|
||||||
|
updated = self._pc.updatePattern(
|
||||||
|
self._currentPattern.getId(),
|
||||||
|
patternObj,
|
||||||
|
)
|
||||||
|
if updated:
|
||||||
|
self.reloadProperties(reset_draft=True)
|
||||||
|
self.updateMediaTags()
|
||||||
|
self.updateTracks()
|
||||||
|
self.updateDifferences()
|
||||||
|
return updated
|
||||||
|
|
||||||
|
self.reloadProperties(reset_draft=True)
|
||||||
|
|
||||||
|
tagDifferences = self._mediaChangeSetObj.get(MediaDescriptorChangeSet.TAGS_KEY, {})
|
||||||
|
for addedTagKey in tagDifferences.get(DIFF_ADDED_KEY, {}).keys():
|
||||||
|
self._tac.deleteMediaTagByKey(self._currentPattern.getId(), addedTagKey)
|
||||||
|
|
||||||
|
for removedTagKey in tagDifferences.get(DIFF_REMOVED_KEY, {}).keys():
|
||||||
|
currentTags = self._sourceMediaDescriptor.getTags()
|
||||||
|
self._tac.updateMediaTag(
|
||||||
|
self._currentPattern.getId(),
|
||||||
|
removedTagKey,
|
||||||
|
currentTags[removedTagKey],
|
||||||
|
)
|
||||||
|
|
||||||
|
for changedTagKey in tagDifferences.get(DIFF_CHANGED_KEY, {}).keys():
|
||||||
|
currentTags = self._sourceMediaDescriptor.getTags()
|
||||||
|
self._tac.updateMediaTag(
|
||||||
|
self._currentPattern.getId(),
|
||||||
|
changedTagKey,
|
||||||
|
currentTags[changedTagKey],
|
||||||
|
)
|
||||||
|
|
||||||
|
trackDifferences = self._mediaChangeSetObj.get(MediaDescriptorChangeSet.TRACKS_KEY, {})
|
||||||
|
|
||||||
|
for trackDescriptor in trackDifferences.get(DIFF_ADDED_KEY, {}).values():
|
||||||
|
self._tc.addTrack(trackDescriptor, patternId=self._currentPattern.getId())
|
||||||
|
|
||||||
|
for trackDescriptor in trackDifferences.get(DIFF_REMOVED_KEY, {}).values():
|
||||||
|
self._tc.deleteTrack(trackDescriptor.getId())
|
||||||
|
|
||||||
|
for trackIndex, trackDiff in trackDifferences.get(DIFF_CHANGED_KEY, {}).items():
|
||||||
|
targetTracks = [
|
||||||
|
track
|
||||||
|
for track in self._targetMediaDescriptor.getTrackDescriptors()
|
||||||
|
if track.getIndex() == trackIndex
|
||||||
|
]
|
||||||
|
targetTrackId = targetTracks[0].getId() if targetTracks else None
|
||||||
|
targetTrackIndex = targetTracks[0].getIndex() if targetTracks else None
|
||||||
|
|
||||||
|
tagsDiff = trackDiff.get(TrackDescriptor.TAGS_KEY, {})
|
||||||
|
for tagKey, tagValue in tagsDiff.get(DIFF_ADDED_KEY, {}).items():
|
||||||
|
self._tac.updateTrackTag(targetTrackId, tagKey, tagValue)
|
||||||
|
for tagKey in tagsDiff.get(DIFF_REMOVED_KEY, {}).keys():
|
||||||
|
self._tac.deleteTrackTagByKey(targetTrackId, tagKey)
|
||||||
|
for tagKey, tagValue in tagsDiff.get(DIFF_CHANGED_KEY, {}).items():
|
||||||
|
self._tac.updateTrackTag(targetTrackId, tagKey, tagValue)
|
||||||
|
|
||||||
|
dispositionDiff = trackDiff.get(TrackDescriptor.DISPOSITION_SET_KEY, {})
|
||||||
|
for changedDisposition in dispositionDiff.get(DIFF_ADDED_KEY, set()):
|
||||||
|
if targetTrackIndex is not None:
|
||||||
|
self._tc.setDispositionState(
|
||||||
|
self._currentPattern.getId(),
|
||||||
|
targetTrackIndex,
|
||||||
|
changedDisposition,
|
||||||
|
True,
|
||||||
|
)
|
||||||
|
for changedDisposition in dispositionDiff.get(DIFF_REMOVED_KEY, set()):
|
||||||
|
if targetTrackIndex is not None:
|
||||||
|
self._tc.setDispositionState(
|
||||||
|
self._currentPattern.getId(),
|
||||||
|
targetTrackIndex,
|
||||||
|
changedDisposition,
|
||||||
|
False,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.reloadProperties(reset_draft=True)
|
||||||
|
self.updateMediaTags()
|
||||||
|
self.updateTracks()
|
||||||
|
self.updateDifferences()
|
||||||
|
|
||||||
|
def action_edit_pattern(self):
|
||||||
|
patternObj = self.getPatternObjFromInput()
|
||||||
|
if patternObj.get("pattern"):
|
||||||
|
selectedPatternId = self._pc.findPattern(patternObj)
|
||||||
|
if selectedPatternId is None:
|
||||||
|
raise click.ClickException(
|
||||||
|
"InspectDetailsScreen.action_edit_pattern(): Pattern to edit has no id"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.app.push_screen(
|
||||||
|
PatternDetailsScreen(
|
||||||
|
patternId=selectedPatternId,
|
||||||
|
showId=self.getSelectedShowDescriptor().getId(),
|
||||||
|
),
|
||||||
|
self.handle_edit_pattern,
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle_edit_pattern(self, screenResult):
|
||||||
|
if not screenResult:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.reloadProperties(reset_draft=True)
|
||||||
|
if self._currentPattern is not None:
|
||||||
|
self.query_one("#pattern_input", Input).value = self._currentPattern.getPattern()
|
||||||
|
self.updateMediaTags()
|
||||||
|
self.updateTracks()
|
||||||
|
self.updateDifferences()
|
||||||
226
src/ffx/iso_language.py
Normal file
226
src/ffx/iso_language.py
Normal file
@@ -0,0 +1,226 @@
|
|||||||
|
from enum import Enum
|
||||||
|
import difflib
|
||||||
|
|
||||||
|
from .i18n import translate_iso_language
|
||||||
|
|
||||||
|
|
||||||
|
class IsoLanguage(Enum):
|
||||||
|
|
||||||
|
ABKHAZIAN = {"name": "Abkhazian", "iso639_1": "ab", "iso639_2": ["abk"]}
|
||||||
|
AFAR = {"name": "Afar", "iso639_1": "aa", "iso639_2": ["aar"]}
|
||||||
|
AFRIKAANS = {"name": "Afrikaans", "iso639_1": "af", "iso639_2": ["afr"]}
|
||||||
|
AKAN = {"name": "Akan", "iso639_1": "ak", "iso639_2": ["aka"]}
|
||||||
|
ALBANIAN = {"name": "Albanian", "iso639_1": "sq", "iso639_2": ["sqi", "alb"]}
|
||||||
|
AMHARIC = {"name": "Amharic", "iso639_1": "am", "iso639_2": ["amh"]}
|
||||||
|
ARABIC = {"name": "Arabic", "iso639_1": "ar", "iso639_2": ["ara"]}
|
||||||
|
ARAGONESE = {"name": "Aragonese", "iso639_1": "an", "iso639_2": ["arg"]}
|
||||||
|
ARMENIAN = {"name": "Armenian", "iso639_1": "hy", "iso639_2": ["hye", "arm"]}
|
||||||
|
ASSAMESE = {"name": "Assamese", "iso639_1": "as", "iso639_2": ["asm"]}
|
||||||
|
AVARIC = {"name": "Avaric", "iso639_1": "av", "iso639_2": ["ava"]}
|
||||||
|
AVESTAN = {"name": "Avestan", "iso639_1": "ae", "iso639_2": ["ave"]}
|
||||||
|
AYMARA = {"name": "Aymara", "iso639_1": "ay", "iso639_2": ["aym"]}
|
||||||
|
AZERBAIJANI = {"name": "Azerbaijani", "iso639_1": "az", "iso639_2": ["aze"]}
|
||||||
|
BAMBARA = {"name": "Bambara", "iso639_1": "bm", "iso639_2": ["bam"]}
|
||||||
|
BASHKIR = {"name": "Bashkir", "iso639_1": "ba", "iso639_2": ["bak"]}
|
||||||
|
BASQUE = {"name": "Basque", "iso639_1": "eu", "iso639_2": ["eus", "baq"]}
|
||||||
|
BELARUSIAN = {"name": "Belarusian", "iso639_1": "be", "iso639_2": ["bel"]}
|
||||||
|
BENGALI = {"name": "Bengali", "iso639_1": "bn", "iso639_2": ["ben"]}
|
||||||
|
BISLAMA = {"name": "Bislama", "iso639_1": "bi", "iso639_2": ["bis"]}
|
||||||
|
BOKMAL = {"name": "Bokmål", "iso639_1": "nb", "iso639_2": ["nob"]}
|
||||||
|
BOSNIAN = {"name": "Bosnian", "iso639_1": "bs", "iso639_2": ["bos"]}
|
||||||
|
BRETON = {"name": "Breton", "iso639_1": "br", "iso639_2": ["bre"]}
|
||||||
|
BULGARIAN = {"name": "Bulgarian", "iso639_1": "bg", "iso639_2": ["bul"]}
|
||||||
|
BURMESE = {"name": "Burmese", "iso639_1": "my", "iso639_2": ["mya", "bur"]}
|
||||||
|
CATALAN = {"name": "Catalan", "iso639_1": "ca", "iso639_2": ["cat"]}
|
||||||
|
CHAMORRO = {"name": "Chamorro", "iso639_1": "ch", "iso639_2": ["cha"]}
|
||||||
|
CHECHEN = {"name": "Chechen", "iso639_1": "ce", "iso639_2": ["che"]}
|
||||||
|
CHICHEWA = {"name": "Chichewa", "iso639_1": "ny", "iso639_2": ["nya"]}
|
||||||
|
CHINESE = {"name": "Chinese", "iso639_1": "zh", "iso639_2": ["zho", "chi"]}
|
||||||
|
CHURCH_SLAVIC = {"name": "Church Slavic", "iso639_1": "cu", "iso639_2": ["chu"]}
|
||||||
|
CHUVASH = {"name": "Chuvash", "iso639_1": "cv", "iso639_2": ["chv"]}
|
||||||
|
CORNISH = {"name": "Cornish", "iso639_1": "kw", "iso639_2": ["cor"]}
|
||||||
|
CORSICAN = {"name": "Corsican", "iso639_1": "co", "iso639_2": ["cos"]}
|
||||||
|
CREE = {"name": "Cree", "iso639_1": "cr", "iso639_2": ["cre"]}
|
||||||
|
CROATIAN = {"name": "Croatian", "iso639_1": "hr", "iso639_2": ["hrv"]}
|
||||||
|
CZECH = {"name": "Czech", "iso639_1": "cs", "iso639_2": ["ces", "cze"]}
|
||||||
|
DANISH = {"name": "Danish", "iso639_1": "da", "iso639_2": ["dan"]}
|
||||||
|
DIVEHI = {"name": "Divehi", "iso639_1": "dv", "iso639_2": ["div"]}
|
||||||
|
DUTCH = {"name": "Dutch", "iso639_1": "nl", "iso639_2": ["nld", "dut"]}
|
||||||
|
DZONGKHA = {"name": "Dzongkha", "iso639_1": "dz", "iso639_2": ["dzo"]}
|
||||||
|
ENGLISH = {"name": "English", "iso639_1": "en", "iso639_2": ["eng"]}
|
||||||
|
ESPERANTO = {"name": "Esperanto", "iso639_1": "eo", "iso639_2": ["epo"]}
|
||||||
|
ESTONIAN = {"name": "Estonian", "iso639_1": "et", "iso639_2": ["est"]}
|
||||||
|
EWE = {"name": "Ewe", "iso639_1": "ee", "iso639_2": ["ewe"]}
|
||||||
|
FAROESE = {"name": "Faroese", "iso639_1": "fo", "iso639_2": ["fao"]}
|
||||||
|
FIJIAN = {"name": "Fijian", "iso639_1": "fj", "iso639_2": ["fij"]}
|
||||||
|
FINNISH = {"name": "Finnish", "iso639_1": "fi", "iso639_2": ["fin"]}
|
||||||
|
FRENCH = {"name": "French", "iso639_1": "fr", "iso639_2": ["fra", "fre"]}
|
||||||
|
FULAH = {"name": "Fulah", "iso639_1": "ff", "iso639_2": ["ful"]}
|
||||||
|
GALICIAN = {"name": "Galician", "iso639_1": "gl", "iso639_2": ["glg"]}
|
||||||
|
GANDA = {"name": "Ganda", "iso639_1": "lg", "iso639_2": ["lug"]}
|
||||||
|
GEORGIAN = {"name": "Georgian", "iso639_1": "ka", "iso639_2": ["kat", "geo"]}
|
||||||
|
GERMAN = {"name": "German", "iso639_1": "de", "iso639_2": ["deu", "ger"]}
|
||||||
|
GREEK = {"name": "Greek", "iso639_1": "el", "iso639_2": ["ell", "gre"]}
|
||||||
|
GUARANI = {"name": "Guarani", "iso639_1": "gn", "iso639_2": ["grn"]}
|
||||||
|
GUJARATI = {"name": "Gujarati", "iso639_1": "gu", "iso639_2": ["guj"]}
|
||||||
|
HAITIAN = {"name": "Haitian", "iso639_1": "ht", "iso639_2": ["hat"]}
|
||||||
|
HAUSA = {"name": "Hausa", "iso639_1": "ha", "iso639_2": ["hau"]}
|
||||||
|
HEBREW = {"name": "Hebrew", "iso639_1": "he", "iso639_2": ["heb"]}
|
||||||
|
HERERO = {"name": "Herero", "iso639_1": "hz", "iso639_2": ["her"]}
|
||||||
|
HINDI = {"name": "Hindi", "iso639_1": "hi", "iso639_2": ["hin"]}
|
||||||
|
HIRI_MOTU = {"name": "Hiri Motu", "iso639_1": "ho", "iso639_2": ["hmo"]}
|
||||||
|
HUNGARIAN = {"name": "Hungarian", "iso639_1": "hu", "iso639_2": ["hun"]}
|
||||||
|
ICELANDIC = {"name": "Icelandic", "iso639_1": "is", "iso639_2": ["isl", "ice"]}
|
||||||
|
IDO = {"name": "Ido", "iso639_1": "io", "iso639_2": ["ido"]}
|
||||||
|
IGBO = {"name": "Igbo", "iso639_1": "ig", "iso639_2": ["ibo"]}
|
||||||
|
INDONESIAN = {"name": "Indonesian", "iso639_1": "id", "iso639_2": ["ind"]}
|
||||||
|
INTERLINGUA = {"name": "Interlingua", "iso639_1": "ia", "iso639_2": ["ina"]}
|
||||||
|
INTERLINGUE = {"name": "Interlingue", "iso639_1": "ie", "iso639_2": ["ile"]}
|
||||||
|
INUKTITUT = {"name": "Inuktitut", "iso639_1": "iu", "iso639_2": ["iku"]}
|
||||||
|
INUPIAQ = {"name": "Inupiaq", "iso639_1": "ik", "iso639_2": ["ipk"]}
|
||||||
|
IRISH = {"name": "Irish", "iso639_1": "ga", "iso639_2": ["gle"]}
|
||||||
|
ITALIAN = {"name": "Italian", "iso639_1": "it", "iso639_2": ["ita"]}
|
||||||
|
JAPANESE = {"name": "Japanese", "iso639_1": "ja", "iso639_2": ["jpn"]}
|
||||||
|
JAVANESE = {"name": "Javanese", "iso639_1": "jv", "iso639_2": ["jav"]}
|
||||||
|
KALAALLISUT = {"name": "Kalaallisut", "iso639_1": "kl", "iso639_2": ["kal"]}
|
||||||
|
KANNADA = {"name": "Kannada", "iso639_1": "kn", "iso639_2": ["kan"]}
|
||||||
|
KANURI = {"name": "Kanuri", "iso639_1": "kr", "iso639_2": ["kau"]}
|
||||||
|
KASHMIRI = {"name": "Kashmiri", "iso639_1": "ks", "iso639_2": ["kas"]}
|
||||||
|
KAZAKH = {"name": "Kazakh", "iso639_1": "kk", "iso639_2": ["kaz"]}
|
||||||
|
KHMER = {"name": "Khmer", "iso639_1": "km", "iso639_2": ["khm"]}
|
||||||
|
KIKUYU = {"name": "Kikuyu", "iso639_1": "ki", "iso639_2": ["kik"]}
|
||||||
|
KINYARWANDA = {"name": "Kinyarwanda", "iso639_1": "rw", "iso639_2": ["kin"]}
|
||||||
|
KIRGHIZ = {"name": "Kirghiz", "iso639_1": "ky", "iso639_2": ["kir"]}
|
||||||
|
KOMI = {"name": "Komi", "iso639_1": "kv", "iso639_2": ["kom"]}
|
||||||
|
KONGO = {"name": "Kongo", "iso639_1": "kg", "iso639_2": ["kon"]}
|
||||||
|
KOREAN = {"name": "Korean", "iso639_1": "ko", "iso639_2": ["kor"]}
|
||||||
|
KUANYAMA = {"name": "Kuanyama", "iso639_1": "kj", "iso639_2": ["kua"]}
|
||||||
|
KURDISH = {"name": "Kurdish", "iso639_1": "ku", "iso639_2": ["kur"]}
|
||||||
|
LAO = {"name": "Lao", "iso639_1": "lo", "iso639_2": ["lao"]}
|
||||||
|
LATIN = {"name": "Latin", "iso639_1": "la", "iso639_2": ["lat"]}
|
||||||
|
LATVIAN = {"name": "Latvian", "iso639_1": "lv", "iso639_2": ["lav"]}
|
||||||
|
LIMBURGAN = {"name": "Limburgan", "iso639_1": "li", "iso639_2": ["lim"]}
|
||||||
|
LINGALA = {"name": "Lingala", "iso639_1": "ln", "iso639_2": ["lin"]}
|
||||||
|
LITHUANIAN = {"name": "Lithuanian", "iso639_1": "lt", "iso639_2": ["lit"]}
|
||||||
|
LUBA_KATANGA = {"name": "Luba-Katanga", "iso639_1": "lu", "iso639_2": ["lub"]}
|
||||||
|
LUXEMBOURGISH = {"name": "Luxembourgish", "iso639_1": "lb", "iso639_2": ["ltz"]}
|
||||||
|
MACEDONIAN = {"name": "Macedonian", "iso639_1": "mk", "iso639_2": ["mkd", "mac"]}
|
||||||
|
MALAGASY = {"name": "Malagasy", "iso639_1": "mg", "iso639_2": ["mlg"]}
|
||||||
|
MALAY = {"name": "Malay", "iso639_1": "ms", "iso639_2": ["msa", "may"]}
|
||||||
|
MALAYALAM = {"name": "Malayalam", "iso639_1": "ml", "iso639_2": ["mal"]}
|
||||||
|
MALTESE = {"name": "Maltese", "iso639_1": "mt", "iso639_2": ["mlt"]}
|
||||||
|
MANX = {"name": "Manx", "iso639_1": "gv", "iso639_2": ["glv"]}
|
||||||
|
MAORI = {"name": "Maori", "iso639_1": "mi", "iso639_2": ["mri", "mao"]}
|
||||||
|
MARATHI = {"name": "Marathi", "iso639_1": "mr", "iso639_2": ["mar"]}
|
||||||
|
MARSHALLESE = {"name": "Marshallese", "iso639_1": "mh", "iso639_2": ["mah"]}
|
||||||
|
MONGOLIAN = {"name": "Mongolian", "iso639_1": "mn", "iso639_2": ["mon"]}
|
||||||
|
NAURU = {"name": "Nauru", "iso639_1": "na", "iso639_2": ["nau"]}
|
||||||
|
NAVAJO = {"name": "Navajo", "iso639_1": "nv", "iso639_2": ["nav"]}
|
||||||
|
NDONGA = {"name": "Ndonga", "iso639_1": "ng", "iso639_2": ["ndo"]}
|
||||||
|
NEPALI = {"name": "Nepali", "iso639_1": "ne", "iso639_2": ["nep"]}
|
||||||
|
NORTH_NDEBELE = {"name": "North Ndebele", "iso639_1": "nd", "iso639_2": ["nde"]}
|
||||||
|
NORTHERN_SAMI = {"name": "Northern Sami", "iso639_1": "se", "iso639_2": ["sme"]}
|
||||||
|
NORWEGIAN = {"name": "Norwegian", "iso639_1": "no", "iso639_2": ["nor"]}
|
||||||
|
NORWEGIAN_NYNORSK = {"name": "Nynorsk", "iso639_1": "nn", "iso639_2": ["nno"]}
|
||||||
|
OCCITAN = {"name": "Occitan", "iso639_1": "oc", "iso639_2": ["oci"]}
|
||||||
|
OJIBWA = {"name": "Ojibwa", "iso639_1": "oj", "iso639_2": ["oji"]}
|
||||||
|
ORIYA = {"name": "Oriya", "iso639_1": "or", "iso639_2": ["ori"]}
|
||||||
|
OROMO = {"name": "Oromo", "iso639_1": "om", "iso639_2": ["orm"]}
|
||||||
|
OSSETIAN = {"name": "Ossetian", "iso639_1": "os", "iso639_2": ["oss"]}
|
||||||
|
PALI = {"name": "Pali", "iso639_1": "pi", "iso639_2": ["pli"]}
|
||||||
|
PANJABI = {"name": "Panjabi", "iso639_1": "pa", "iso639_2": ["pan"]}
|
||||||
|
PERSIAN = {"name": "Persian", "iso639_1": "fa", "iso639_2": ["fas", "per"]}
|
||||||
|
POLISH = {"name": "Polish", "iso639_1": "pl", "iso639_2": ["pol"]}
|
||||||
|
PORTUGUESE = {"name": "Portuguese", "iso639_1": "pt", "iso639_2": ["por"]}
|
||||||
|
PUSHTO = {"name": "Pushto", "iso639_1": "ps", "iso639_2": ["pus"]}
|
||||||
|
QUECHUA = {"name": "Quechua", "iso639_1": "qu", "iso639_2": ["que"]}
|
||||||
|
ROMANIAN = {"name": "Romanian", "iso639_1": "ro", "iso639_2": ["ron", "rum"]}
|
||||||
|
ROMANSH = {"name": "Romansh", "iso639_1": "rm", "iso639_2": ["roh"]}
|
||||||
|
RUNDI = {"name": "Rundi", "iso639_1": "rn", "iso639_2": ["run"]}
|
||||||
|
RUSSIAN = {"name": "Russian", "iso639_1": "ru", "iso639_2": ["rus"]}
|
||||||
|
SAMOAN = {"name": "Samoan", "iso639_1": "sm", "iso639_2": ["smo"]}
|
||||||
|
SANGO = {"name": "Sango", "iso639_1": "sg", "iso639_2": ["sag"]}
|
||||||
|
SANSKRIT = {"name": "Sanskrit", "iso639_1": "sa", "iso639_2": ["san"]}
|
||||||
|
SARDINIAN = {"name": "Sardinian", "iso639_1": "sc", "iso639_2": ["srd"]}
|
||||||
|
SCOTTISH_GAELIC = {"name": "Scottish Gaelic", "iso639_1": "gd", "iso639_2": ["gla"]}
|
||||||
|
SERBIAN = {"name": "Serbian", "iso639_1": "sr", "iso639_2": ["srp"]}
|
||||||
|
SHONA = {"name": "Shona", "iso639_1": "sn", "iso639_2": ["sna"]}
|
||||||
|
SICHUAN_YI = {"name": "Sichuan Yi", "iso639_1": "ii", "iso639_2": ["iii"]}
|
||||||
|
SINDHI = {"name": "Sindhi", "iso639_1": "sd", "iso639_2": ["snd"]}
|
||||||
|
SINHALA = {"name": "Sinhala", "iso639_1": "si", "iso639_2": ["sin"]}
|
||||||
|
SLOVAK = {"name": "Slovak", "iso639_1": "sk", "iso639_2": ["slk", "slo"]}
|
||||||
|
SLOVENIAN = {"name": "Slovenian", "iso639_1": "sl", "iso639_2": ["slv"]}
|
||||||
|
SOMALI = {"name": "Somali", "iso639_1": "so", "iso639_2": ["som"]}
|
||||||
|
SOUTH_NDEBELE = {"name": "South Ndebele", "iso639_1": "nr", "iso639_2": ["nbl"]}
|
||||||
|
SOUTHERN_SOTHO = {"name": "Southern Sotho", "iso639_1": "st", "iso639_2": ["sot"]}
|
||||||
|
SPANISH = {"name": "Spanish", "iso639_1": "es", "iso639_2": ["spa"]}
|
||||||
|
SUNDANESE = {"name": "Sundanese", "iso639_1": "su", "iso639_2": ["sun"]}
|
||||||
|
SWAHILI = {"name": "Swahili", "iso639_1": "sw", "iso639_2": ["swa"]}
|
||||||
|
SWATI = {"name": "Swati", "iso639_1": "ss", "iso639_2": ["ssw"]}
|
||||||
|
SWEDISH = {"name": "Swedish", "iso639_1": "sv", "iso639_2": ["swe"]}
|
||||||
|
TAGALOG = {"name": "Tagalog", "iso639_1": "tl", "iso639_2": ["tgl"]}
|
||||||
|
TAHITIAN = {"name": "Tahitian", "iso639_1": "ty", "iso639_2": ["tah"]}
|
||||||
|
TAJIK = {"name": "Tajik", "iso639_1": "tg", "iso639_2": ["tgk"]}
|
||||||
|
TAMIL = {"name": "Tamil", "iso639_1": "ta", "iso639_2": ["tam"]}
|
||||||
|
TATAR = {"name": "Tatar", "iso639_1": "tt", "iso639_2": ["tat"]}
|
||||||
|
TELUGU = {"name": "Telugu", "iso639_1": "te", "iso639_2": ["tel"]}
|
||||||
|
THAI = {"name": "Thai", "iso639_1": "th", "iso639_2": ["tha"]}
|
||||||
|
TIBETAN = {"name": "Tibetan", "iso639_1": "bo", "iso639_2": ["bod", "tib"]}
|
||||||
|
TIGRINYA = {"name": "Tigrinya", "iso639_1": "ti", "iso639_2": ["tir"]}
|
||||||
|
TONGA = {"name": "Tonga", "iso639_1": "to", "iso639_2": ["ton"]}
|
||||||
|
TSONGA = {"name": "Tsonga", "iso639_1": "ts", "iso639_2": ["tso"]}
|
||||||
|
TSWANA = {"name": "Tswana", "iso639_1": "tn", "iso639_2": ["tsn"]}
|
||||||
|
TURKISH = {"name": "Turkish", "iso639_1": "tr", "iso639_2": ["tur"]}
|
||||||
|
TURKMEN = {"name": "Turkmen", "iso639_1": "tk", "iso639_2": ["tuk"]}
|
||||||
|
TWI = {"name": "Twi", "iso639_1": "tw", "iso639_2": ["twi"]}
|
||||||
|
UIGHUR = {"name": "Uighur", "iso639_1": "ug", "iso639_2": ["uig"]}
|
||||||
|
UKRAINIAN = {"name": "Ukrainian", "iso639_1": "uk", "iso639_2": ["ukr"]}
|
||||||
|
URDU = {"name": "Urdu", "iso639_1": "ur", "iso639_2": ["urd"]}
|
||||||
|
UZBEK = {"name": "Uzbek", "iso639_1": "uz", "iso639_2": ["uzb"]}
|
||||||
|
VENDA = {"name": "Venda", "iso639_1": "ve", "iso639_2": ["ven"]}
|
||||||
|
VIETNAMESE = {"name": "Vietnamese", "iso639_1": "vi", "iso639_2": ["vie"]}
|
||||||
|
VOLAPUK = {"name": "Volapük", "iso639_1": "vo", "iso639_2": ["vol"]}
|
||||||
|
WALLOON = {"name": "Walloon", "iso639_1": "wa", "iso639_2": ["wln"]}
|
||||||
|
WELSH = {"name": "Welsh", "iso639_1": "cy", "iso639_2": ["cym", "wel"]}
|
||||||
|
WESTERN_FRISIAN = {"name": "Western Frisian", "iso639_1": "fy", "iso639_2": ["fry"]}
|
||||||
|
WOLOF = {"name": "Wolof", "iso639_1": "wo", "iso639_2": ["wol"]}
|
||||||
|
XHOSA = {"name": "Xhosa", "iso639_1": "xh", "iso639_2": ["xho"]}
|
||||||
|
YIDDISH = {"name": "Yiddish", "iso639_1": "yi", "iso639_2": ["yid"]}
|
||||||
|
YORUBA = {"name": "Yoruba", "iso639_1": "yo", "iso639_2": ["yor"]}
|
||||||
|
ZHUANG = {"name": "Zhuang", "iso639_1": "za", "iso639_2": ["zha"]}
|
||||||
|
ZULU = {"name": "Zulu", "iso639_1": "zu", "iso639_2": ["zul"]}
|
||||||
|
|
||||||
|
FILIPINO = {"name": "Filipino", "iso639_1": "tl", "iso639_2": ["fil"]}
|
||||||
|
|
||||||
|
UNDEFINED = {"name": "undefined", "iso639_1": "xx", "iso639_2": ["und"]}
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def find(label : str):
|
||||||
|
|
||||||
|
candidate_map = {}
|
||||||
|
for language in IsoLanguage:
|
||||||
|
candidate_map[language.value["name"]] = language
|
||||||
|
candidate_map[translate_iso_language(language.name, language.value["name"])] = language
|
||||||
|
|
||||||
|
closestMatches = difflib.get_close_matches(label, list(candidate_map.keys()), n=1)
|
||||||
|
|
||||||
|
if closestMatches:
|
||||||
|
return candidate_map.get(closestMatches[0], IsoLanguage.UNDEFINED)
|
||||||
|
else:
|
||||||
|
return IsoLanguage.UNDEFINED
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def findThreeLetter(theeLetter : str):
|
||||||
|
foundLangs = [l for l in IsoLanguage if str(theeLetter) in l.value["iso639_2"]]
|
||||||
|
return foundLangs[0] if foundLangs else IsoLanguage.UNDEFINED
|
||||||
|
|
||||||
|
|
||||||
|
def label(self):
|
||||||
|
return str(translate_iso_language(self.name, self.value["name"]))
|
||||||
|
|
||||||
|
def twoLetter(self):
|
||||||
|
return str(self.value["iso639_1"])
|
||||||
|
|
||||||
|
def threeLetter(self):
|
||||||
|
return str(self.value["iso639_2"][0])
|
||||||
68
src/ffx/logging_utils.py
Normal file
68
src/ffx/logging_utils.py
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
FFX_LOGGER_NAME = "FFX"
|
||||||
|
CONSOLE_HANDLER_NAME = "ffx-console"
|
||||||
|
FILE_HANDLER_NAME = "ffx-file"
|
||||||
|
|
||||||
|
|
||||||
|
def get_ffx_logger(name: str = FFX_LOGGER_NAME) -> logging.Logger:
|
||||||
|
logger = logging.getLogger(name)
|
||||||
|
logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
if not logger.handlers:
|
||||||
|
logger.addHandler(logging.NullHandler())
|
||||||
|
|
||||||
|
return logger
|
||||||
|
|
||||||
|
|
||||||
|
def configure_ffx_logger(
|
||||||
|
log_file_path: str,
|
||||||
|
file_level: int,
|
||||||
|
console_level: int,
|
||||||
|
name: str = FFX_LOGGER_NAME,
|
||||||
|
) -> logging.Logger:
|
||||||
|
logger = get_ffx_logger(name)
|
||||||
|
logger.propagate = False
|
||||||
|
|
||||||
|
for handler in list(logger.handlers):
|
||||||
|
if isinstance(handler, logging.NullHandler):
|
||||||
|
logger.removeHandler(handler)
|
||||||
|
|
||||||
|
console_handler = next(
|
||||||
|
(handler for handler in logger.handlers if handler.get_name() == CONSOLE_HANDLER_NAME),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
if console_handler is None:
|
||||||
|
console_handler = logging.StreamHandler()
|
||||||
|
console_handler.set_name(CONSOLE_HANDLER_NAME)
|
||||||
|
logger.addHandler(console_handler)
|
||||||
|
|
||||||
|
console_handler.setLevel(console_level)
|
||||||
|
console_handler.setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
|
||||||
|
normalized_log_path = os.path.abspath(log_file_path)
|
||||||
|
file_handler = next(
|
||||||
|
(handler for handler in logger.handlers if handler.get_name() == FILE_HANDLER_NAME),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
if (
|
||||||
|
file_handler is not None
|
||||||
|
and os.path.abspath(file_handler.baseFilename) != normalized_log_path
|
||||||
|
):
|
||||||
|
logger.removeHandler(file_handler)
|
||||||
|
file_handler.close()
|
||||||
|
file_handler = None
|
||||||
|
|
||||||
|
if file_handler is None:
|
||||||
|
file_handler = logging.FileHandler(normalized_log_path)
|
||||||
|
file_handler.set_name(FILE_HANDLER_NAME)
|
||||||
|
logger.addHandler(file_handler)
|
||||||
|
|
||||||
|
file_handler.setLevel(file_level)
|
||||||
|
file_handler.setFormatter(
|
||||||
|
logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
|
||||||
|
)
|
||||||
|
|
||||||
|
return logger
|
||||||
47
src/ffx/media_controller.py
Normal file
47
src/ffx/media_controller.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
import click
|
||||||
|
|
||||||
|
from ffx.model.pattern import Pattern
|
||||||
|
from ffx.media_descriptor import MediaDescriptor
|
||||||
|
|
||||||
|
from ffx.tag_controller import TagController
|
||||||
|
from ffx.track_controller import TrackController
|
||||||
|
|
||||||
|
class MediaController():
|
||||||
|
|
||||||
|
def __init__(self, context):
|
||||||
|
|
||||||
|
self.context = context
|
||||||
|
self.Session = self.context['database']['session'] # convenience
|
||||||
|
|
||||||
|
self.__logger = context['logger']
|
||||||
|
|
||||||
|
self.__tc = TrackController(context = context)
|
||||||
|
self.__tac = TagController(context = context)
|
||||||
|
|
||||||
|
def setPatternMediaDescriptor(self, mediaDescriptor: MediaDescriptor, patternId: int):
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
pid = int(patternId)
|
||||||
|
|
||||||
|
s = self.Session()
|
||||||
|
pattern = s.query(Pattern).filter(Pattern.id == pid).first()
|
||||||
|
|
||||||
|
if pattern is not None:
|
||||||
|
|
||||||
|
for mediaTagKey, mediaTagValue in mediaDescriptor.getTags():
|
||||||
|
self.__tac.updateMediaTag(pid, mediaTagKey, mediaTagValue)
|
||||||
|
# for trackDescriptor in mediaDescriptor.getAllTrackDescriptors():
|
||||||
|
for trackDescriptor in mediaDescriptor.getTrackDescriptors():
|
||||||
|
self.__tc.addTrack(trackDescriptor, patternId = pid)
|
||||||
|
|
||||||
|
s.commit()
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
self.__logger.error(f"MediaController.setPatternMediaDescriptor(): {repr(ex)}")
|
||||||
|
raise click.ClickException(f"MediaController.setPatternMediaDescriptor(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
579
src/ffx/media_descriptor.py
Normal file
579
src/ffx/media_descriptor.py
Normal file
@@ -0,0 +1,579 @@
|
|||||||
|
import os, re, click
|
||||||
|
|
||||||
|
from typing import List, Self
|
||||||
|
|
||||||
|
from ffx.track_type import TrackType
|
||||||
|
from ffx.iso_language import IsoLanguage
|
||||||
|
|
||||||
|
from ffx.track_disposition import TrackDisposition
|
||||||
|
from ffx.track_codec import TrackCodec
|
||||||
|
|
||||||
|
from ffx.track_descriptor import TrackDescriptor
|
||||||
|
from ffx.logging_utils import get_ffx_logger
|
||||||
|
|
||||||
|
|
||||||
|
class MediaDescriptor:
|
||||||
|
"""This class represents the structural content of a media file including streams and metadata"""
|
||||||
|
|
||||||
|
CONTEXT_KEY = "context"
|
||||||
|
|
||||||
|
TAGS_KEY = "tags"
|
||||||
|
TRACKS_KEY = "tracks"
|
||||||
|
|
||||||
|
TRACK_DESCRIPTOR_LIST_KEY = "track_descriptors"
|
||||||
|
ATTACHMENT_DESCRIPTOR_LIST_KEY = "attachment_descriptors"
|
||||||
|
CLEAR_TAGS_FLAG_KEY = "clear_tags"
|
||||||
|
|
||||||
|
FFPROBE_DISPOSITION_KEY = "disposition"
|
||||||
|
FFPROBE_TAGS_KEY = "tags"
|
||||||
|
FFPROBE_CODEC_TYPE_KEY = "codec_type"
|
||||||
|
|
||||||
|
#407 remove as well
|
||||||
|
EXCLUDED_MEDIA_TAGS = ["creation_time"]
|
||||||
|
|
||||||
|
SEASON_EPISODE_STREAM_LANGUAGE_DISPOSITIONS_MATCH = '[sS]([0-9]+)[eE]([0-9]+)_([0-9]+)_([a-z]{3})(?:_([A-Z]{3}))*'
|
||||||
|
STREAM_LANGUAGE_DISPOSITIONS_MATCH = '([0-9]+)_([a-z]{3})(?:_([A-Z]{3}))*'
|
||||||
|
|
||||||
|
SUBTITLE_FILE_EXTENSION = 'vtt'
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
|
||||||
|
if MediaDescriptor.CONTEXT_KEY in kwargs.keys():
|
||||||
|
if type(kwargs[MediaDescriptor.CONTEXT_KEY]) is not dict:
|
||||||
|
raise TypeError(
|
||||||
|
f"MediaDescriptor.__init__(): Argument {MediaDescriptor.CONTEXT_KEY} is required to be of type dict"
|
||||||
|
)
|
||||||
|
self.__context = kwargs[MediaDescriptor.CONTEXT_KEY]
|
||||||
|
self.__logger = self.__context['logger']
|
||||||
|
else:
|
||||||
|
self.__context = {}
|
||||||
|
self.__logger = get_ffx_logger()
|
||||||
|
|
||||||
|
if MediaDescriptor.TAGS_KEY in kwargs.keys():
|
||||||
|
if type(kwargs[MediaDescriptor.TAGS_KEY]) is not dict:
|
||||||
|
raise TypeError(
|
||||||
|
f"MediaDescriptor.__init__(): Argument {MediaDescriptor.TAGS_KEY} is required to be of type dict"
|
||||||
|
)
|
||||||
|
self.__mediaTags = kwargs[MediaDescriptor.TAGS_KEY]
|
||||||
|
else:
|
||||||
|
self.__mediaTags = {}
|
||||||
|
|
||||||
|
if MediaDescriptor.TRACK_DESCRIPTOR_LIST_KEY in kwargs.keys():
|
||||||
|
if (
|
||||||
|
type(kwargs[MediaDescriptor.TRACK_DESCRIPTOR_LIST_KEY]) is not list
|
||||||
|
): # Use List typehint for TrackDescriptor as well if it works
|
||||||
|
raise TypeError(
|
||||||
|
f"MediaDescriptor.__init__(): Argument {MediaDescriptor.TRACK_DESCRIPTOR_LIST_KEY} is required to be of type list"
|
||||||
|
)
|
||||||
|
for d in kwargs[MediaDescriptor.TRACK_DESCRIPTOR_LIST_KEY]:
|
||||||
|
if type(d) is not TrackDescriptor:
|
||||||
|
raise TypeError(
|
||||||
|
f"TrackDesciptor.__init__(): All elements of argument list {MediaDescriptor.TRACK_DESCRIPTOR_LIST_KEY} are required to be of type TrackDescriptor"
|
||||||
|
)
|
||||||
|
self.__trackDescriptors: List[TrackDescriptor] = kwargs[MediaDescriptor.TRACK_DESCRIPTOR_LIST_KEY]
|
||||||
|
else:
|
||||||
|
self.__trackDescriptors: List[TrackDescriptor] = []
|
||||||
|
|
||||||
|
def setTrackLanguage(self, language: str, index: int, trackType: TrackType = None):
|
||||||
|
|
||||||
|
trackLanguage = IsoLanguage.findThreeLetter(language)
|
||||||
|
if trackLanguage == IsoLanguage.UNDEFINED:
|
||||||
|
self.__logger.warning('MediaDescriptor.setTrackLanguage(): Parameter language does not contain a registered '
|
||||||
|
+ f"ISO 639 3-letter language code, skipping to set language for"
|
||||||
|
+ str('' if trackType is None else trackType.label()) + f"track {index}")
|
||||||
|
|
||||||
|
trackList = self.getTrackDescriptors(trackType=trackType)
|
||||||
|
|
||||||
|
if index < 0 or index > len(trackList) - 1:
|
||||||
|
self.__logger.warning(f"MediaDescriptor.setTrackLanguage(): Parameter index ({index}) is "
|
||||||
|
+ f"out of range of {'' if trackType is None else trackType.label()}track list")
|
||||||
|
|
||||||
|
td: TrackDescriptor = trackList[index]
|
||||||
|
td.setLanguage(trackLanguage)
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def setTrackTitle(self, title: str, index: int, trackType: TrackType = None):
|
||||||
|
|
||||||
|
trackList = self.getTrackDescriptors(trackType=trackType)
|
||||||
|
|
||||||
|
if index < 0 or index > len(trackList) - 1:
|
||||||
|
self.__logger.error(f"MediaDescriptor.setTrackTitle(): Parameter index ({index}) is "
|
||||||
|
+ f"out of range of {'' if trackType is None else trackType.label()}track list")
|
||||||
|
raise click.Abort()
|
||||||
|
|
||||||
|
td: TrackDescriptor = trackList[index]
|
||||||
|
td.setTitle(title)
|
||||||
|
|
||||||
|
|
||||||
|
def setDefaultSubTrack(self, trackType: TrackType, subIndex: int):
|
||||||
|
# for t in self.getAllTrackDescriptors():
|
||||||
|
for t in self.getTrackDescriptors():
|
||||||
|
if t.getType() == trackType:
|
||||||
|
t.setDispositionFlag(
|
||||||
|
TrackDisposition.DEFAULT, t.getSubIndex() == int(subIndex)
|
||||||
|
)
|
||||||
|
|
||||||
|
def setForcedSubTrack(self, trackType: TrackType, subIndex: int):
|
||||||
|
# for t in self.getAllTrackDescriptors():
|
||||||
|
for t in self.getTrackDescriptors():
|
||||||
|
if t.getType() == trackType:
|
||||||
|
t.setDispositionFlag(
|
||||||
|
TrackDisposition.FORCED, t.getSubIndex() == int(subIndex)
|
||||||
|
)
|
||||||
|
|
||||||
|
def checkConfiguration(self):
|
||||||
|
|
||||||
|
videoTracks = self.getVideoTracks()
|
||||||
|
audioTracks = self.getAudioTracks()
|
||||||
|
subtitleTracks = self.getSubtitleTracks()
|
||||||
|
|
||||||
|
if len([v for v in videoTracks if v.getDispositionFlag(TrackDisposition.DEFAULT)]) > 1:
|
||||||
|
raise ValueError('More than one default video track')
|
||||||
|
if len([a for a in audioTracks if a.getDispositionFlag(TrackDisposition.DEFAULT)]) > 1:
|
||||||
|
raise ValueError('More than one default audio track')
|
||||||
|
if len([s for s in subtitleTracks if s.getDispositionFlag(TrackDisposition.DEFAULT)]) > 1:
|
||||||
|
raise ValueError('More than one default subtitle track')
|
||||||
|
|
||||||
|
if len([v for v in videoTracks if v.getDispositionFlag(TrackDisposition.FORCED)]) > 1:
|
||||||
|
raise ValueError('More than one forced video track')
|
||||||
|
if len([a for a in audioTracks if a.getDispositionFlag(TrackDisposition.FORCED)]) > 1:
|
||||||
|
raise ValueError('More than one forced audio track')
|
||||||
|
if len([s for s in subtitleTracks if s.getDispositionFlag(TrackDisposition.FORCED)]) > 1:
|
||||||
|
raise ValueError('More than one forced subtitle track')
|
||||||
|
|
||||||
|
trackDescriptors = videoTracks + audioTracks + subtitleTracks
|
||||||
|
sourceIndices = [
|
||||||
|
t.getSourceIndex() for t in trackDescriptors
|
||||||
|
]
|
||||||
|
if len(set(sourceIndices)) < len(trackDescriptors):
|
||||||
|
raise ValueError('Multiple streams originating from the same source stream')
|
||||||
|
|
||||||
|
|
||||||
|
def applyOverrides(self, overrides: dict):
|
||||||
|
|
||||||
|
if 'languages' in overrides.keys():
|
||||||
|
for trackIndex in overrides['languages'].keys():
|
||||||
|
self.setTrackLanguage(overrides['languages'][trackIndex], trackIndex)
|
||||||
|
|
||||||
|
if 'titles' in overrides.keys():
|
||||||
|
for trackIndex in overrides['titles'].keys():
|
||||||
|
self.setTrackTitle(overrides['titles'][trackIndex], trackIndex)
|
||||||
|
|
||||||
|
if 'forced_video' in overrides.keys():
|
||||||
|
sti = int(overrides['forced_video'])
|
||||||
|
self.setForcedSubTrack(TrackType.VIDEO, sti)
|
||||||
|
self.setDefaultSubTrack(TrackType.VIDEO, sti)
|
||||||
|
|
||||||
|
elif 'default_video' in overrides.keys():
|
||||||
|
sti = int(overrides['default_video'])
|
||||||
|
self.setDefaultSubTrack(TrackType.VIDEO, sti)
|
||||||
|
|
||||||
|
if 'forced_audio' in overrides.keys():
|
||||||
|
sti = int(overrides['forced_audio'])
|
||||||
|
self.setForcedSubTrack(TrackType.AUDIO, sti)
|
||||||
|
self.setDefaultSubTrack(TrackType.AUDIO, sti)
|
||||||
|
|
||||||
|
elif 'default_audio' in overrides.keys():
|
||||||
|
sti = int(overrides['default_audio'])
|
||||||
|
self.setDefaultSubTrack(TrackType.AUDIO, sti)
|
||||||
|
|
||||||
|
if 'forced_subtitle' in overrides.keys():
|
||||||
|
sti = int(overrides['forced_subtitle'])
|
||||||
|
self.setForcedSubTrack(TrackType.SUBTITLE, sti)
|
||||||
|
self.setDefaultSubTrack(TrackType.SUBTITLE, sti)
|
||||||
|
|
||||||
|
elif 'default_subtitle' in overrides.keys():
|
||||||
|
sti = int(overrides['default_subtitle'])
|
||||||
|
self.setDefaultSubTrack(TrackType.SUBTITLE, sti)
|
||||||
|
|
||||||
|
if 'stream_order' in overrides.keys():
|
||||||
|
self.rearrangeTrackDescriptors(overrides['stream_order'])
|
||||||
|
|
||||||
|
|
||||||
|
def applySourceIndices(self, sourceMediaDescriptor: Self):
|
||||||
|
# sourceTrackDescriptors = sourceMediaDescriptor.getAllTrackDescriptors()
|
||||||
|
sourceTrackDescriptors = sourceMediaDescriptor.getTrackDescriptors()
|
||||||
|
|
||||||
|
numTrackDescriptors = len(self.__trackDescriptors)
|
||||||
|
if len(sourceTrackDescriptors) != numTrackDescriptors:
|
||||||
|
raise ValueError('MediaDescriptor.applySourceIndices (): Number of track descriptors does not match')
|
||||||
|
|
||||||
|
for trackIndex in range(numTrackDescriptors):
|
||||||
|
self.__trackDescriptors[trackIndex].setSourceIndex(sourceTrackDescriptors[trackIndex].getSourceIndex())
|
||||||
|
|
||||||
|
|
||||||
|
def rearrangeTrackDescriptors(self, newOrder: List[int]):
|
||||||
|
if len(newOrder) != len(self.__trackDescriptors):
|
||||||
|
raise ValueError('Length of list with reordered indices does not match number of track descriptors')
|
||||||
|
reorderedTrackDescriptors = []
|
||||||
|
for oldIndex in newOrder:
|
||||||
|
reorderedTrackDescriptors.append(self.__trackDescriptors[oldIndex])
|
||||||
|
self.__trackDescriptors = reorderedTrackDescriptors
|
||||||
|
self.reindexSubIndices()
|
||||||
|
self.reindexIndices()
|
||||||
|
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def fromFfprobe(cls, context, formatData, streamData):
|
||||||
|
|
||||||
|
kwargs = {}
|
||||||
|
|
||||||
|
kwargs[MediaDescriptor.CONTEXT_KEY] = context
|
||||||
|
|
||||||
|
if MediaDescriptor.FFPROBE_TAGS_KEY in formatData.keys():
|
||||||
|
kwargs[MediaDescriptor.TAGS_KEY] = formatData[
|
||||||
|
MediaDescriptor.FFPROBE_TAGS_KEY
|
||||||
|
]
|
||||||
|
|
||||||
|
kwargs[MediaDescriptor.TRACK_DESCRIPTOR_LIST_KEY] = []
|
||||||
|
|
||||||
|
# TODO: Evtl obsolet
|
||||||
|
subIndexCounters = {}
|
||||||
|
|
||||||
|
for streamObj in streamData:
|
||||||
|
|
||||||
|
ffprobeCodecType = streamObj[MediaDescriptor.FFPROBE_CODEC_TYPE_KEY]
|
||||||
|
trackType = TrackType.fromLabel(ffprobeCodecType)
|
||||||
|
|
||||||
|
if trackType != TrackType.UNKNOWN:
|
||||||
|
|
||||||
|
if trackType not in subIndexCounters.keys():
|
||||||
|
subIndexCounters[trackType] = 0
|
||||||
|
|
||||||
|
kwargs[MediaDescriptor.TRACK_DESCRIPTOR_LIST_KEY].append(
|
||||||
|
TrackDescriptor.fromFfprobe(
|
||||||
|
streamObj, subIndex=subIndexCounters[trackType]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
subIndexCounters[trackType] += 1
|
||||||
|
|
||||||
|
return cls(**kwargs)
|
||||||
|
|
||||||
|
def getTags(self):
|
||||||
|
return self.__mediaTags
|
||||||
|
|
||||||
|
|
||||||
|
def sortSubIndices(
|
||||||
|
self, descriptors: List[TrackDescriptor]
|
||||||
|
) -> List[TrackDescriptor]:
|
||||||
|
subIndex = 0
|
||||||
|
for d in descriptors:
|
||||||
|
d.setSubIndex(subIndex)
|
||||||
|
subIndex += 1
|
||||||
|
return descriptors
|
||||||
|
|
||||||
|
def reindexSubIndices(self, trackDescriptors: list = []):
|
||||||
|
tdList = trackDescriptors if trackDescriptors else self.__trackDescriptors
|
||||||
|
subIndexCounter = {}
|
||||||
|
for td in tdList:
|
||||||
|
trackType = td.getType()
|
||||||
|
if trackType not in subIndexCounter.keys():
|
||||||
|
subIndexCounter[trackType] = 0
|
||||||
|
td.setSubIndex(subIndexCounter[trackType])
|
||||||
|
subIndexCounter[trackType] += 1
|
||||||
|
|
||||||
|
def sortIndices(
|
||||||
|
self, descriptors: List[TrackDescriptor]
|
||||||
|
) -> List[TrackDescriptor]:
|
||||||
|
index = 0
|
||||||
|
for d in descriptors:
|
||||||
|
d.setIndex(index)
|
||||||
|
index += 1
|
||||||
|
return descriptors
|
||||||
|
|
||||||
|
def reindexIndices(self, trackDescriptors: list = []):
|
||||||
|
tdList = trackDescriptors if trackDescriptors else self.__trackDescriptors
|
||||||
|
for trackIndex in range(len(tdList)):
|
||||||
|
tdList[trackIndex].setIndex(trackIndex)
|
||||||
|
|
||||||
|
|
||||||
|
# def getAllTrackDescriptors(self):
|
||||||
|
# """Returns all track descriptors sorted by type: video, audio then subtitles"""
|
||||||
|
# return self.getVideoTracks() + self.getAudioTracks() + self.getSubtitleTracks()
|
||||||
|
|
||||||
|
|
||||||
|
def getTrackDescriptors(self,
|
||||||
|
trackType: TrackType = None) -> List[TrackDescriptor]:
|
||||||
|
|
||||||
|
if trackType is None:
|
||||||
|
return self.__trackDescriptors
|
||||||
|
|
||||||
|
descriptorList = []
|
||||||
|
for td in self.__trackDescriptors:
|
||||||
|
if td.getType() == trackType:
|
||||||
|
descriptorList.append(td)
|
||||||
|
|
||||||
|
return descriptorList
|
||||||
|
|
||||||
|
|
||||||
|
def getVideoTracks(self) -> List[TrackDescriptor]:
|
||||||
|
return [v for v in self.__trackDescriptors if v.getType() == TrackType.VIDEO]
|
||||||
|
|
||||||
|
def getAudioTracks(self) -> List[TrackDescriptor]:
|
||||||
|
return [a for a in self.__trackDescriptors if a.getType() == TrackType.AUDIO]
|
||||||
|
|
||||||
|
def getSubtitleTracks(self) -> List[TrackDescriptor]:
|
||||||
|
return [
|
||||||
|
s
|
||||||
|
for s in self.__trackDescriptors
|
||||||
|
if s.getType() == TrackType.SUBTITLE
|
||||||
|
]
|
||||||
|
|
||||||
|
def getAttachmentTracks(self) -> List[TrackDescriptor]:
|
||||||
|
return [
|
||||||
|
s
|
||||||
|
for s in self.__trackDescriptors
|
||||||
|
if s.getType() == TrackType.ATTACHMENT
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def getImportFileTokens(self, use_sub_index: bool = True):
|
||||||
|
"""Generate ffmpeg import options for external stream files"""
|
||||||
|
|
||||||
|
importFileTokens = []
|
||||||
|
|
||||||
|
td: TrackDescriptor
|
||||||
|
for td in self.__trackDescriptors:
|
||||||
|
|
||||||
|
importedFilePath = td.getExternalSourceFilePath()
|
||||||
|
|
||||||
|
if importedFilePath:
|
||||||
|
|
||||||
|
self.__logger.info(f"Substituting subtitle stream #{td.getIndex()} "
|
||||||
|
+ f"({td.getType().label()}:{td.getSubIndex()}) "
|
||||||
|
+ f"with import from file {td.getExternalSourceFilePath()}")
|
||||||
|
|
||||||
|
importFileTokens += [
|
||||||
|
"-i",
|
||||||
|
importedFilePath,
|
||||||
|
]
|
||||||
|
|
||||||
|
return importFileTokens
|
||||||
|
|
||||||
|
|
||||||
|
def getInputMappingTokens(self,
|
||||||
|
use_sub_index: bool = True,
|
||||||
|
only_video: bool = False,
|
||||||
|
sourceMediaDescriptor: Self = None):
|
||||||
|
"""Tracks must be reordered for source index order"""
|
||||||
|
|
||||||
|
inputMappingTokens = []
|
||||||
|
|
||||||
|
sortedTrackDescriptors = sorted(self.__trackDescriptors, key=lambda d: d.getIndex())
|
||||||
|
sourceTrackDescriptorsByIndex = {
|
||||||
|
td.getIndex(): td
|
||||||
|
for td in (
|
||||||
|
sourceMediaDescriptor.getTrackDescriptors()
|
||||||
|
if sourceMediaDescriptor is not None
|
||||||
|
else sortedTrackDescriptors
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
# raise click.ClickException(' '.join([f"\nindex={td.getIndex()} subIndex={td.getSubIndex()} srcIndex={td.getSourceIndex()} type={td.getType().label()}" for td in self.__trackDescriptors]))
|
||||||
|
|
||||||
|
filePointer = 1
|
||||||
|
for trackIndex in range(len(sortedTrackDescriptors)):
|
||||||
|
|
||||||
|
td: TrackDescriptor = sortedTrackDescriptors[trackIndex]
|
||||||
|
|
||||||
|
#HINT: Attached thumbnails are not supported by .webm container format
|
||||||
|
if td.getCodec() != TrackCodec.PNG:
|
||||||
|
|
||||||
|
sourceTrackDescriptor = sourceTrackDescriptorsByIndex.get(td.getSourceIndex())
|
||||||
|
if sourceTrackDescriptor is None:
|
||||||
|
raise ValueError(f"No source track descriptor found for source index {td.getSourceIndex()}")
|
||||||
|
|
||||||
|
stdi = sourceTrackDescriptor.getIndex()
|
||||||
|
stdsi = sourceTrackDescriptor.getSubIndex()
|
||||||
|
|
||||||
|
trackType = td.getType()
|
||||||
|
trackCodec = td.getCodec()
|
||||||
|
|
||||||
|
if (trackType != TrackType.ATTACHMENT
|
||||||
|
and (trackType == TrackType.VIDEO or not only_video)):
|
||||||
|
|
||||||
|
|
||||||
|
importedFilePath = td.getExternalSourceFilePath()
|
||||||
|
|
||||||
|
if use_sub_index:
|
||||||
|
|
||||||
|
if importedFilePath:
|
||||||
|
|
||||||
|
inputMappingTokens += [
|
||||||
|
"-map",
|
||||||
|
f"{filePointer}:{trackType.indicator()}:0",
|
||||||
|
]
|
||||||
|
filePointer += 1
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
if not trackCodec in [TrackCodec.PGS, TrackCodec.VOBSUB]:
|
||||||
|
inputMappingTokens += [
|
||||||
|
"-map",
|
||||||
|
f"0:{trackType.indicator()}:{stdsi}",
|
||||||
|
]
|
||||||
|
|
||||||
|
else:
|
||||||
|
if not trackCodec in [TrackCodec.PGS, TrackCodec.VOBSUB]:
|
||||||
|
inputMappingTokens += ["-map", f"0:{stdi}"]
|
||||||
|
|
||||||
|
if sourceMediaDescriptor:
|
||||||
|
fontDescriptors = [ftd for ftd in sourceMediaDescriptor.getAttachmentTracks()
|
||||||
|
if ftd.getCodec() == TrackCodec.TTF]
|
||||||
|
else:
|
||||||
|
fontDescriptors = [ftd for ftd in self.__trackDescriptors
|
||||||
|
if ftd.getType() == TrackType.ATTACHMENT
|
||||||
|
and ftd.getCodec() == TrackCodec.TTF]
|
||||||
|
|
||||||
|
for ad in sorted(fontDescriptors, key=lambda d: d.getIndex()):
|
||||||
|
inputMappingTokens += ["-map", f"0:{ad.getIndex()}"]
|
||||||
|
|
||||||
|
return inputMappingTokens
|
||||||
|
|
||||||
|
|
||||||
|
def searchSubtitleFiles(self, searchDirectory, prefix):
|
||||||
|
|
||||||
|
sesld_match = re.compile(f"{prefix}_{MediaDescriptor.SEASON_EPISODE_STREAM_LANGUAGE_DISPOSITIONS_MATCH}")
|
||||||
|
sld_match = re.compile(f"{prefix}_{MediaDescriptor.STREAM_LANGUAGE_DISPOSITIONS_MATCH}")
|
||||||
|
|
||||||
|
subtitleFileDescriptors = []
|
||||||
|
|
||||||
|
for subtitleFilename in os.listdir(searchDirectory):
|
||||||
|
if subtitleFilename.startswith(prefix) and subtitleFilename.endswith(
|
||||||
|
"." + MediaDescriptor.SUBTITLE_FILE_EXTENSION
|
||||||
|
):
|
||||||
|
|
||||||
|
sesld_result = sesld_match.search(subtitleFilename)
|
||||||
|
sld_result = None if not sesld_result is None else sld_match.search(subtitleFilename)
|
||||||
|
|
||||||
|
if not sesld_result is None:
|
||||||
|
|
||||||
|
subtitleFilePath = os.path.join(searchDirectory, subtitleFilename)
|
||||||
|
if os.path.isfile(subtitleFilePath):
|
||||||
|
|
||||||
|
subtitleFileDescriptor = {}
|
||||||
|
subtitleFileDescriptor["path"] = subtitleFilePath
|
||||||
|
subtitleFileDescriptor["season"] = int(sesld_result.group(1))
|
||||||
|
subtitleFileDescriptor["episode"] = int(sesld_result.group(2))
|
||||||
|
subtitleFileDescriptor["index"] = int(sesld_result.group(3))
|
||||||
|
subtitleFileDescriptor["language"] = sesld_result.group(4)
|
||||||
|
|
||||||
|
dispSet = set()
|
||||||
|
dispCaptGroups = sesld_result.groups()
|
||||||
|
numCaptGroups = len(dispCaptGroups)
|
||||||
|
if numCaptGroups > 4:
|
||||||
|
for groupIndex in range(numCaptGroups - 4):
|
||||||
|
disp = TrackDisposition.fromIndicator(dispCaptGroups[groupIndex + 4])
|
||||||
|
if disp is not None:
|
||||||
|
dispSet.add(disp)
|
||||||
|
subtitleFileDescriptor["disposition_set"] = dispSet
|
||||||
|
|
||||||
|
subtitleFileDescriptors.append(subtitleFileDescriptor)
|
||||||
|
|
||||||
|
if not sld_result is None:
|
||||||
|
|
||||||
|
subtitleFilePath = os.path.join(searchDirectory, subtitleFilename)
|
||||||
|
if os.path.isfile(subtitleFilePath):
|
||||||
|
|
||||||
|
subtitleFileDescriptor = {}
|
||||||
|
subtitleFileDescriptor["path"] = subtitleFilePath
|
||||||
|
subtitleFileDescriptor["index"] = int(sld_result.group(1))
|
||||||
|
subtitleFileDescriptor["language"] = sld_result.group(2)
|
||||||
|
|
||||||
|
dispSet = set()
|
||||||
|
dispCaptGroups = sld_result.groups()
|
||||||
|
numCaptGroups = len(dispCaptGroups)
|
||||||
|
if numCaptGroups > 2:
|
||||||
|
for groupIndex in range(numCaptGroups - 2):
|
||||||
|
disp = TrackDisposition.fromIndicator(dispCaptGroups[groupIndex + 2])
|
||||||
|
if disp is not None:
|
||||||
|
dispSet.add(disp)
|
||||||
|
subtitleFileDescriptor["disposition_set"] = dispSet
|
||||||
|
|
||||||
|
subtitleFileDescriptors.append(subtitleFileDescriptor)
|
||||||
|
|
||||||
|
|
||||||
|
self.__logger.debug(f"searchSubtitleFiles(): Available subtitle files {subtitleFileDescriptors}")
|
||||||
|
|
||||||
|
return subtitleFileDescriptors
|
||||||
|
|
||||||
|
|
||||||
|
def importSubtitles(
|
||||||
|
self,
|
||||||
|
searchDirectory,
|
||||||
|
prefix,
|
||||||
|
season: int = -1,
|
||||||
|
episode: int = -1,
|
||||||
|
preserve_dispositions: bool = False,
|
||||||
|
):
|
||||||
|
|
||||||
|
# click.echo(f"Season: {season} Episode: {episode}")
|
||||||
|
self.__logger.debug(f"importSubtitles(): Season: {season} Episode: {episode}")
|
||||||
|
|
||||||
|
availableFileSubtitleDescriptors = self.searchSubtitleFiles(searchDirectory, prefix)
|
||||||
|
|
||||||
|
self.__logger.debug(f"importSubtitles(): availableFileSubtitleDescriptors: {availableFileSubtitleDescriptors}")
|
||||||
|
|
||||||
|
subtitleTracks = self.getSubtitleTracks()
|
||||||
|
|
||||||
|
self.__logger.debug(f"importSubtitles(): subtitleTracks: {[s.getIndex() for s in subtitleTracks]}")
|
||||||
|
|
||||||
|
matchingSubtitleFileDescriptors = (
|
||||||
|
sorted(
|
||||||
|
[
|
||||||
|
d
|
||||||
|
for d in availableFileSubtitleDescriptors
|
||||||
|
if ((season == -1 and episode == -1)
|
||||||
|
or (
|
||||||
|
d.get("season") == int(season)
|
||||||
|
and d.get("episode") == int(episode)
|
||||||
|
))
|
||||||
|
],
|
||||||
|
key=lambda d: d["index"],
|
||||||
|
)
|
||||||
|
if availableFileSubtitleDescriptors
|
||||||
|
else []
|
||||||
|
)
|
||||||
|
|
||||||
|
self.__logger.debug(f"importSubtitles(): matchingSubtitleFileDescriptors: {matchingSubtitleFileDescriptors}")
|
||||||
|
|
||||||
|
for msfd in matchingSubtitleFileDescriptors:
|
||||||
|
matchingSubtitleTrackDescriptor = [s for s in subtitleTracks if s.getIndex() == msfd["index"]]
|
||||||
|
if matchingSubtitleTrackDescriptor:
|
||||||
|
# click.echo(f"Found matching subtitle file {msfd["path"]}\n")
|
||||||
|
self.__logger.debug(f"importSubtitles(): Found matching subtitle file {msfd['path']}")
|
||||||
|
matchingTrack = matchingSubtitleTrackDescriptor[0]
|
||||||
|
matchingTrack.setExternalSourceFilePath(msfd["path"])
|
||||||
|
|
||||||
|
# Prefer metadata coming from the external single-track source when
|
||||||
|
# it is provided explicitly by the filename contract.
|
||||||
|
matchingTrack.getTags()["language"] = msfd["language"]
|
||||||
|
if msfd["disposition_set"] and not preserve_dispositions:
|
||||||
|
matchingTrack.setDispositionSet(msfd["disposition_set"])
|
||||||
|
|
||||||
|
|
||||||
|
def getConfiguration(self, label: str = ''):
|
||||||
|
yield f"--- {label if label else 'MediaDescriptor '+str(id(self))} {' '.join([str(k)+'='+str(v) for k,v in self.__mediaTags.items()])}"
|
||||||
|
# for td in self.getAllTrackDescriptors():
|
||||||
|
for td in self.getTrackDescriptors():
|
||||||
|
yield (f"{td.getIndex()}:{td.getType().indicator()}:{td.getSubIndex()} "
|
||||||
|
+ '|'.join([d.indicator() for d in td.getDispositionSet()])
|
||||||
|
+ ' ' + ' '.join([str(k)+'='+str(v) for k,v in td.getTags().items()]))
|
||||||
|
|
||||||
|
def clone(self, context: dict | None = None):
|
||||||
|
kwargs = {
|
||||||
|
MediaDescriptor.TAGS_KEY: dict(self.__mediaTags),
|
||||||
|
MediaDescriptor.TRACK_DESCRIPTOR_LIST_KEY: [
|
||||||
|
trackDescriptor.clone(context=context if context is not None else self.__context)
|
||||||
|
for trackDescriptor in self.__trackDescriptors
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
if context is not None:
|
||||||
|
kwargs[MediaDescriptor.CONTEXT_KEY] = context
|
||||||
|
elif self.__context:
|
||||||
|
kwargs[MediaDescriptor.CONTEXT_KEY] = self.__context
|
||||||
|
|
||||||
|
return MediaDescriptor(**kwargs)
|
||||||
403
src/ffx/media_descriptor_change_set.py
Normal file
403
src/ffx/media_descriptor_change_set.py
Normal file
@@ -0,0 +1,403 @@
|
|||||||
|
import click
|
||||||
|
|
||||||
|
from ffx.iso_language import IsoLanguage
|
||||||
|
from ffx.media_descriptor import MediaDescriptor
|
||||||
|
from ffx.track_descriptor import TrackDescriptor
|
||||||
|
|
||||||
|
from ffx.helper import dictDiff, setDiff, DIFF_ADDED_KEY, DIFF_CHANGED_KEY, DIFF_REMOVED_KEY, DIFF_UNCHANGED_KEY
|
||||||
|
|
||||||
|
from ffx.track_codec import TrackCodec
|
||||||
|
from ffx.track_disposition import TrackDisposition
|
||||||
|
from ffx.track_type import TrackType
|
||||||
|
|
||||||
|
|
||||||
|
class MediaDescriptorChangeSet():
|
||||||
|
|
||||||
|
TAGS_KEY = "tags"
|
||||||
|
TRACKS_KEY = "tracks"
|
||||||
|
DISPOSITION_SET_KEY = "disposition_set"
|
||||||
|
|
||||||
|
TRACK_DESCRIPTOR_KEY = "track_descriptor"
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, context,
|
||||||
|
targetMediaDescriptor: MediaDescriptor = None,
|
||||||
|
sourceMediaDescriptor: MediaDescriptor = None):
|
||||||
|
|
||||||
|
self.__context = context
|
||||||
|
self.__logger = context['logger']
|
||||||
|
|
||||||
|
self.__configurationData = self.__context['config'].getData()
|
||||||
|
|
||||||
|
metadataConfiguration = self.__configurationData['metadata'] if 'metadata' in self.__configurationData.keys() else {}
|
||||||
|
applyCleanup = bool(self.__context.get('apply_metadata_cleanup', True))
|
||||||
|
self.__applyMetadataNormalization = bool(
|
||||||
|
self.__context.get("apply_metadata_normalization", True)
|
||||||
|
)
|
||||||
|
|
||||||
|
self.__signatureTags = metadataConfiguration['signature'] if 'signature' in metadataConfiguration.keys() else {}
|
||||||
|
self.__removeGlobalKeys = (
|
||||||
|
metadataConfiguration['remove']
|
||||||
|
if applyCleanup and 'remove' in metadataConfiguration.keys()
|
||||||
|
else []
|
||||||
|
)
|
||||||
|
self.__ignoreGlobalKeys = metadataConfiguration['ignore'] if 'ignore' in metadataConfiguration.keys() else []
|
||||||
|
self.__removeTrackKeys = (
|
||||||
|
metadataConfiguration['streams']['remove']
|
||||||
|
if (
|
||||||
|
applyCleanup
|
||||||
|
and 'streams' in metadataConfiguration.keys()
|
||||||
|
and 'remove' in metadataConfiguration['streams'].keys()
|
||||||
|
)
|
||||||
|
else []
|
||||||
|
)
|
||||||
|
self.__ignoreTrackKeys = (metadataConfiguration['streams']['ignore']
|
||||||
|
if 'streams' in metadataConfiguration.keys()
|
||||||
|
and 'ignore' in metadataConfiguration['streams'].keys() else [])
|
||||||
|
|
||||||
|
|
||||||
|
self.__targetTrackDescriptors = targetMediaDescriptor.getTrackDescriptors() if targetMediaDescriptor is not None else []
|
||||||
|
self.__sourceTrackDescriptors = sourceMediaDescriptor.getTrackDescriptors() if sourceMediaDescriptor is not None else []
|
||||||
|
self.__targetTrackDescriptorsByIndex = {
|
||||||
|
trackDescriptor.getIndex(): trackDescriptor
|
||||||
|
for trackDescriptor in self.__targetTrackDescriptors
|
||||||
|
}
|
||||||
|
self.__sourceTrackDescriptorsByIndex = {
|
||||||
|
trackDescriptor.getIndex(): trackDescriptor
|
||||||
|
for trackDescriptor in self.__sourceTrackDescriptors
|
||||||
|
}
|
||||||
|
|
||||||
|
targetMediaTags = targetMediaDescriptor.getTags() if targetMediaDescriptor is not None else {}
|
||||||
|
sourceMediaTags = sourceMediaDescriptor.getTags() if sourceMediaDescriptor is not None else {}
|
||||||
|
|
||||||
|
|
||||||
|
self.__changeSetObj = {}
|
||||||
|
|
||||||
|
#if targetMediaDescriptor is not None:
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#!!#
|
||||||
|
tagsDiff = dictDiff(sourceMediaTags,
|
||||||
|
targetMediaTags,
|
||||||
|
ignoreKeys=self.__ignoreGlobalKeys,
|
||||||
|
removeKeys=self.__removeGlobalKeys)
|
||||||
|
|
||||||
|
if tagsDiff:
|
||||||
|
self.__changeSetObj[MediaDescriptorChangeSet.TAGS_KEY] = tagsDiff
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
self.__numTargetTracks = len(self.__targetTrackDescriptors)
|
||||||
|
|
||||||
|
# Current track configuration (of file)
|
||||||
|
|
||||||
|
self.__numSourceTracks = len(self.__sourceTrackDescriptors)
|
||||||
|
|
||||||
|
trackCompareResult = {}
|
||||||
|
|
||||||
|
for targetTrackDescriptor in self.__targetTrackDescriptors:
|
||||||
|
sourceTrackDescriptor = self.__sourceTrackDescriptorsByIndex.get(
|
||||||
|
targetTrackDescriptor.getSourceIndex()
|
||||||
|
)
|
||||||
|
|
||||||
|
if sourceTrackDescriptor is None:
|
||||||
|
if DIFF_ADDED_KEY not in trackCompareResult.keys():
|
||||||
|
trackCompareResult[DIFF_ADDED_KEY] = {}
|
||||||
|
trackCompareResult[DIFF_ADDED_KEY][targetTrackDescriptor.getIndex()] = targetTrackDescriptor
|
||||||
|
continue
|
||||||
|
|
||||||
|
trackDiff = self.compareTracks(targetTrackDescriptor, sourceTrackDescriptor)
|
||||||
|
if trackDiff:
|
||||||
|
if DIFF_CHANGED_KEY not in trackCompareResult.keys():
|
||||||
|
trackCompareResult[DIFF_CHANGED_KEY] = {}
|
||||||
|
trackCompareResult[DIFF_CHANGED_KEY][targetTrackDescriptor.getIndex()] = trackDiff
|
||||||
|
|
||||||
|
targetSourceIndices = {
|
||||||
|
targetTrackDescriptor.getSourceIndex()
|
||||||
|
for targetTrackDescriptor in self.__targetTrackDescriptors
|
||||||
|
}
|
||||||
|
for sourceTrackDescriptor in self.__sourceTrackDescriptors:
|
||||||
|
if sourceTrackDescriptor.getIndex() not in targetSourceIndices:
|
||||||
|
if DIFF_REMOVED_KEY not in trackCompareResult.keys():
|
||||||
|
trackCompareResult[DIFF_REMOVED_KEY] = {}
|
||||||
|
trackCompareResult[DIFF_REMOVED_KEY][sourceTrackDescriptor.getIndex()] = sourceTrackDescriptor
|
||||||
|
|
||||||
|
|
||||||
|
if trackCompareResult:
|
||||||
|
self.__changeSetObj[MediaDescriptorChangeSet.TRACKS_KEY] = trackCompareResult
|
||||||
|
|
||||||
|
|
||||||
|
def compareTracks(self,
|
||||||
|
targetTrackDescriptor: TrackDescriptor = None,
|
||||||
|
sourceTrackDescriptor: TrackDescriptor = None):
|
||||||
|
|
||||||
|
sourceTrackTags = sourceTrackDescriptor.getTags() if sourceTrackDescriptor is not None else {}
|
||||||
|
targetTrackTags = (
|
||||||
|
self.normalizeTrackTags(
|
||||||
|
targetTrackDescriptor.getTags(),
|
||||||
|
trackDescriptor=targetTrackDescriptor,
|
||||||
|
fallbackTrackTags=sourceTrackTags,
|
||||||
|
)
|
||||||
|
if targetTrackDescriptor is not None
|
||||||
|
else {}
|
||||||
|
)
|
||||||
|
|
||||||
|
trackCompareResult = {}
|
||||||
|
|
||||||
|
tagsDiffResult = dictDiff(sourceTrackTags,
|
||||||
|
targetTrackTags,
|
||||||
|
ignoreKeys=self.__ignoreTrackKeys,
|
||||||
|
removeKeys=self.__removeTrackKeys)
|
||||||
|
|
||||||
|
if tagsDiffResult:
|
||||||
|
trackCompareResult[MediaDescriptorChangeSet.TAGS_KEY] = tagsDiffResult
|
||||||
|
|
||||||
|
sourceDispositionSet = sourceTrackDescriptor.getDispositionSet() if sourceTrackDescriptor is not None else set()
|
||||||
|
targetDispositionSet = targetTrackDescriptor.getDispositionSet() if targetTrackDescriptor is not None else set()
|
||||||
|
|
||||||
|
# if targetTrackDescriptor.getIndex() == 3:
|
||||||
|
# raise click.ClickException(f"{sourceDispositionSet} {targetDispositionSet}")
|
||||||
|
|
||||||
|
dispositionDiffResult = setDiff(sourceDispositionSet, targetDispositionSet)
|
||||||
|
|
||||||
|
if dispositionDiffResult:
|
||||||
|
trackCompareResult[MediaDescriptorChangeSet.DISPOSITION_SET_KEY] = dispositionDiffResult
|
||||||
|
|
||||||
|
return trackCompareResult
|
||||||
|
|
||||||
|
def normalizeTrackTagValue(self, tagKey, tagValue):
|
||||||
|
if not self.__applyMetadataNormalization or tagKey != "language":
|
||||||
|
return tagValue
|
||||||
|
|
||||||
|
if isinstance(tagValue, IsoLanguage):
|
||||||
|
return tagValue.threeLetter()
|
||||||
|
|
||||||
|
trackLanguage = IsoLanguage.findThreeLetter(str(tagValue))
|
||||||
|
if trackLanguage != IsoLanguage.UNDEFINED:
|
||||||
|
return trackLanguage.threeLetter()
|
||||||
|
|
||||||
|
return tagValue
|
||||||
|
|
||||||
|
def resolveTrackLanguage(self, tagValue):
|
||||||
|
if isinstance(tagValue, IsoLanguage):
|
||||||
|
return tagValue
|
||||||
|
|
||||||
|
trackLanguage = IsoLanguage.findThreeLetter(str(tagValue))
|
||||||
|
if trackLanguage != IsoLanguage.UNDEFINED:
|
||||||
|
return trackLanguage
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def normalizeTrackTags(
|
||||||
|
self,
|
||||||
|
trackTags: dict,
|
||||||
|
trackDescriptor: TrackDescriptor = None,
|
||||||
|
fallbackTrackTags: dict = None,
|
||||||
|
):
|
||||||
|
normalizedTrackTags = {
|
||||||
|
tagKey: self.normalizeTrackTagValue(tagKey, tagValue)
|
||||||
|
for tagKey, tagValue in trackTags.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
self.__applyMetadataNormalization
|
||||||
|
and trackDescriptor is not None
|
||||||
|
and trackDescriptor.getType() == TrackType.SUBTITLE
|
||||||
|
):
|
||||||
|
trackTitle = str(normalizedTrackTags.get("title", "")).strip()
|
||||||
|
fallbackTitle = str((fallbackTrackTags or {}).get("title", "")).strip()
|
||||||
|
trackLanguage = self.resolveTrackLanguage(normalizedTrackTags.get("language"))
|
||||||
|
if not trackTitle and not fallbackTitle and trackLanguage is not None:
|
||||||
|
normalizedTrackTags["title"] = trackLanguage.label()
|
||||||
|
|
||||||
|
return normalizedTrackTags
|
||||||
|
|
||||||
|
|
||||||
|
def generateDispositionTokens(self):
|
||||||
|
"""
|
||||||
|
#Example: -disposition:s:0 default -disposition:s:1 0
|
||||||
|
"""
|
||||||
|
dispositionTokens = []
|
||||||
|
|
||||||
|
# if MediaDescriptorChangeSet.TRACKS_KEY in self.__changeSetObj.keys():
|
||||||
|
#
|
||||||
|
# if DIFF_ADDED_KEY in self.__changeSetObj[MediaDescriptorChangeSet.TRACKS_KEY].keys():
|
||||||
|
# addedTracks: dict = self.__changeSetObj[MediaDescriptorChangeSet.TRACKS_KEY][DIFF_ADDED_KEY]
|
||||||
|
# trackDescriptor: TrackDescriptor
|
||||||
|
# for trackDescriptor in addedTracks.values():
|
||||||
|
#
|
||||||
|
# dispositionSet = trackDescriptor.getDispositionSet()
|
||||||
|
#
|
||||||
|
# if dispositionSet:
|
||||||
|
# dispositionTokens += [f"-disposition:{trackDescriptor.getType().indicator()}:{trackDescriptor.getSubIndex()}",
|
||||||
|
# '+'.join([d.label() for d in dispositionSet])]
|
||||||
|
#
|
||||||
|
# if DIFF_CHANGED_KEY in self.__changeSetObj[MediaDescriptorChangeSet.TRACKS_KEY].keys():
|
||||||
|
# changedTracks: dict = self.__changeSetObj[MediaDescriptorChangeSet.TRACKS_KEY][DIFF_CHANGED_KEY]
|
||||||
|
# trackDiffObj: dict
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# for trackIndex, trackDiffObj in changedTracks.items():
|
||||||
|
#
|
||||||
|
# if MediaDescriptorChangeSet.DISPOSITION_SET_KEY in trackDiffObj.keys():
|
||||||
|
#
|
||||||
|
# dispositionDiffObj: dict = trackDiffObj[MediaDescriptorChangeSet.DISPOSITION_SET_KEY]
|
||||||
|
#
|
||||||
|
# addedDispositions = dispositionDiffObj[DIFF_ADDED_KEY] if DIFF_ADDED_KEY in dispositionDiffObj.keys() else set()
|
||||||
|
# removedDispositions = dispositionDiffObj[DIFF_REMOVED_KEY] if DIFF_REMOVED_KEY in dispositionDiffObj.keys() else set()
|
||||||
|
# unchangedDispositions = dispositionDiffObj[DIFF_UNCHANGED_KEY] if DIFF_UNCHANGED_KEY in dispositionDiffObj.keys() else set()
|
||||||
|
#
|
||||||
|
# targetDispositions = addedDispositions | unchangedDispositions
|
||||||
|
#
|
||||||
|
# trackDescriptor = self.__targetTrackDescriptors[trackIndex]
|
||||||
|
# streamIndicator = trackDescriptor.getType().indicator()
|
||||||
|
# subIndex = trackDescriptor.getSubIndex()
|
||||||
|
#
|
||||||
|
# if targetDispositions:
|
||||||
|
# dispositionTokens += [f"-disposition:{streamIndicator}:{subIndex}", '+'.join([d.label() for d in targetDispositions])]
|
||||||
|
# # if not targetDispositions and removedDispositions:
|
||||||
|
# else:
|
||||||
|
# dispositionTokens += [f"-disposition:{streamIndicator}:{subIndex}", '0']
|
||||||
|
for ttd in self.__targetTrackDescriptors:
|
||||||
|
|
||||||
|
targetDispositions = ttd.getDispositionSet()
|
||||||
|
streamIndicator = ttd.getType().indicator()
|
||||||
|
subIndex = ttd.getSubIndex()
|
||||||
|
|
||||||
|
if targetDispositions:
|
||||||
|
dispositionTokens += [f"-disposition:{streamIndicator}:{subIndex}", '+'.join([d.label() for d in targetDispositions])]
|
||||||
|
# if not targetDispositions and removedDispositions:
|
||||||
|
else:
|
||||||
|
dispositionTokens += [f"-disposition:{streamIndicator}:{subIndex}", '0']
|
||||||
|
|
||||||
|
return dispositionTokens
|
||||||
|
|
||||||
|
|
||||||
|
def generateMetadataTokens(self):
|
||||||
|
|
||||||
|
metadataTokens = []
|
||||||
|
|
||||||
|
if MediaDescriptorChangeSet.TAGS_KEY in self.__changeSetObj.keys():
|
||||||
|
|
||||||
|
addedMediaTags = (self.__changeSetObj[MediaDescriptorChangeSet.TAGS_KEY][DIFF_ADDED_KEY]
|
||||||
|
if DIFF_ADDED_KEY in self.__changeSetObj[MediaDescriptorChangeSet.TAGS_KEY].keys() else {})
|
||||||
|
removedMediaTags = (self.__changeSetObj[MediaDescriptorChangeSet.TAGS_KEY][DIFF_REMOVED_KEY]
|
||||||
|
if DIFF_REMOVED_KEY in self.__changeSetObj[MediaDescriptorChangeSet.TAGS_KEY].keys() else {})
|
||||||
|
changedMediaTags = (self.__changeSetObj[MediaDescriptorChangeSet.TAGS_KEY][DIFF_CHANGED_KEY]
|
||||||
|
if DIFF_CHANGED_KEY in self.__changeSetObj[MediaDescriptorChangeSet.TAGS_KEY].keys() else {})
|
||||||
|
|
||||||
|
outputMediaTags = addedMediaTags | changedMediaTags
|
||||||
|
|
||||||
|
if (not 'no_signature' in self.__context.keys()
|
||||||
|
or not self.__context['no_signature']):
|
||||||
|
outputMediaTags = outputMediaTags | self.__signatureTags
|
||||||
|
|
||||||
|
# outputMediaTags = {k:v for k,v in outputMediaTags.items() if k not in self.__removeGlobalKeys}
|
||||||
|
|
||||||
|
for tagKey, tagValue in outputMediaTags.items():
|
||||||
|
metadataTokens += [f"-metadata:g",
|
||||||
|
f"{tagKey}={tagValue}"]
|
||||||
|
|
||||||
|
for tagKey, tagValue in changedMediaTags.items():
|
||||||
|
metadataTokens += [f"-metadata:g",
|
||||||
|
f"{tagKey}={tagValue}"]
|
||||||
|
|
||||||
|
for removeKey in removedMediaTags.keys():
|
||||||
|
metadataTokens += [f"-metadata:g",
|
||||||
|
f"{removeKey}="]
|
||||||
|
|
||||||
|
|
||||||
|
if MediaDescriptorChangeSet.TRACKS_KEY in self.__changeSetObj.keys():
|
||||||
|
|
||||||
|
if DIFF_ADDED_KEY in self.__changeSetObj[MediaDescriptorChangeSet.TRACKS_KEY].keys():
|
||||||
|
addedTracks: dict = self.__changeSetObj[MediaDescriptorChangeSet.TRACKS_KEY][DIFF_ADDED_KEY]
|
||||||
|
trackDescriptor: TrackDescriptor
|
||||||
|
for trackDescriptor in addedTracks.values():
|
||||||
|
for tagKey, tagValue in self.normalizeTrackTags(
|
||||||
|
trackDescriptor.getTags(),
|
||||||
|
trackDescriptor=trackDescriptor,
|
||||||
|
).items():
|
||||||
|
if not tagKey in self.__removeTrackKeys:
|
||||||
|
metadataTokens += [f"-metadata:s:{trackDescriptor.getType().indicator()}"
|
||||||
|
+ f":{trackDescriptor.getSubIndex()}",
|
||||||
|
f"{tagKey}={tagValue}"]
|
||||||
|
|
||||||
|
if DIFF_CHANGED_KEY in self.__changeSetObj[MediaDescriptorChangeSet.TRACKS_KEY].keys():
|
||||||
|
changedTracks: dict = self.__changeSetObj[MediaDescriptorChangeSet.TRACKS_KEY][DIFF_CHANGED_KEY]
|
||||||
|
trackDiffObj: dict
|
||||||
|
for trackIndex, trackDiffObj in changedTracks.items():
|
||||||
|
|
||||||
|
if MediaDescriptorChangeSet.TAGS_KEY in trackDiffObj.keys():
|
||||||
|
|
||||||
|
tagsDiffObj = trackDiffObj[MediaDescriptorChangeSet.TAGS_KEY]
|
||||||
|
|
||||||
|
addedTrackTags = tagsDiffObj[DIFF_ADDED_KEY] if DIFF_ADDED_KEY in tagsDiffObj.keys() else {}
|
||||||
|
changedTrackTags = tagsDiffObj[DIFF_CHANGED_KEY] if DIFF_CHANGED_KEY in tagsDiffObj.keys() else {}
|
||||||
|
unchangedTrackTags = tagsDiffObj[DIFF_UNCHANGED_KEY] if DIFF_UNCHANGED_KEY in tagsDiffObj.keys() else {}
|
||||||
|
removedTrackTags = tagsDiffObj[DIFF_REMOVED_KEY] if DIFF_REMOVED_KEY in tagsDiffObj.keys() else {}
|
||||||
|
|
||||||
|
outputTrackTags = addedTrackTags | changedTrackTags
|
||||||
|
|
||||||
|
trackDescriptor = self.__targetTrackDescriptorsByIndex[trackIndex]
|
||||||
|
|
||||||
|
for tagKey, tagValue in self.normalizeTrackTags(
|
||||||
|
outputTrackTags,
|
||||||
|
trackDescriptor=trackDescriptor,
|
||||||
|
fallbackTrackTags=unchangedTrackTags | removedTrackTags,
|
||||||
|
).items():
|
||||||
|
metadataTokens += [f"-metadata:s:{trackDescriptor.getType().indicator()}"
|
||||||
|
+ f":{trackDescriptor.getSubIndex()}",
|
||||||
|
f"{tagKey}={tagValue}"]
|
||||||
|
|
||||||
|
if trackDescriptor.getExternalSourceFilePath():
|
||||||
|
# When a single-track external file substitutes the
|
||||||
|
# media payload, keep metadata from the regular
|
||||||
|
# source track unless the external/target side
|
||||||
|
# overrides it explicitly.
|
||||||
|
preservedTrackTags = (
|
||||||
|
{
|
||||||
|
tagKey: tagValue
|
||||||
|
for tagKey, tagValue in removedTrackTags.items()
|
||||||
|
if tagKey not in self.__removeTrackKeys
|
||||||
|
}
|
||||||
|
| unchangedTrackTags
|
||||||
|
)
|
||||||
|
for tagKey, tagValue in self.normalizeTrackTags(
|
||||||
|
preservedTrackTags,
|
||||||
|
trackDescriptor=trackDescriptor,
|
||||||
|
).items():
|
||||||
|
metadataTokens += [f"-metadata:s:{trackDescriptor.getType().indicator()}"
|
||||||
|
+ f":{trackDescriptor.getSubIndex()}",
|
||||||
|
f"{tagKey}={tagValue}"]
|
||||||
|
else:
|
||||||
|
for removeKey in removedTrackTags.keys():
|
||||||
|
metadataTokens += [f"-metadata:s:{trackDescriptor.getType().indicator()}"
|
||||||
|
+ f":{trackDescriptor.getSubIndex()}",
|
||||||
|
f"{removeKey}="]
|
||||||
|
|
||||||
|
for tagKey, tagValue in self.__context.get('encoding_metadata_tags', {}).items():
|
||||||
|
metadataTokens += [f"-metadata:g", f"{tagKey}={tagValue}"]
|
||||||
|
|
||||||
|
metadataTokens += self.generateConfiguredRemovalMetadataTokens()
|
||||||
|
|
||||||
|
return metadataTokens
|
||||||
|
|
||||||
|
|
||||||
|
def getChangeSetObj(self):
|
||||||
|
return self.__changeSetObj
|
||||||
|
|
||||||
|
def generateConfiguredRemovalMetadataTokens(self):
|
||||||
|
metadataTokens = []
|
||||||
|
|
||||||
|
for removeKey in self.__removeGlobalKeys:
|
||||||
|
metadataTokens += ["-metadata:g", f"{removeKey}="]
|
||||||
|
|
||||||
|
for trackDescriptor in self.__targetTrackDescriptors:
|
||||||
|
for removeKey in self.__removeTrackKeys:
|
||||||
|
metadataTokens += [
|
||||||
|
f"-metadata:s:{trackDescriptor.getType().indicator()}:{trackDescriptor.getSubIndex()}",
|
||||||
|
f"{removeKey}=",
|
||||||
|
]
|
||||||
|
|
||||||
|
return metadataTokens
|
||||||
1
src/ffx/media_details_screen.py
Normal file
1
src/ffx/media_details_screen.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
from .inspect_details_screen import InspectDetailsScreen as MediaDetailsScreen
|
||||||
428
src/ffx/media_edit_screen.py
Normal file
428
src/ffx/media_edit_screen.py
Normal file
@@ -0,0 +1,428 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
from textual.containers import Grid
|
||||||
|
from textual.widgets import Button, Footer, Header, Static
|
||||||
|
|
||||||
|
from ffx.metadata_editor import apply_metadata_edits
|
||||||
|
from ffx.track_descriptor import TrackDescriptor
|
||||||
|
|
||||||
|
from .i18n import t
|
||||||
|
from .confirm_screen import ConfirmScreen
|
||||||
|
from .media_workflow_screen_base import MediaWorkflowScreenBase
|
||||||
|
from .screen_support import localized_column_width
|
||||||
|
from .tag_delete_screen import TagDeleteScreen
|
||||||
|
from .tag_details_screen import TagDetailsScreen
|
||||||
|
from .track_details_screen import TrackDetailsScreen
|
||||||
|
|
||||||
|
|
||||||
|
class MediaEditScreen(MediaWorkflowScreenBase):
|
||||||
|
|
||||||
|
GRID_COLUMN_LABEL_MIN = 12
|
||||||
|
GRID_COLUMN_2 = 20
|
||||||
|
GRID_COLUMN_3 = 25
|
||||||
|
GRID_COLUMN_4 = "4fr"
|
||||||
|
GRID_COLUMN_5 = 12
|
||||||
|
GRID_COLUMN_6 = "5fr"
|
||||||
|
|
||||||
|
CSS = f"""
|
||||||
|
|
||||||
|
Grid {{
|
||||||
|
grid-size: 6 10;
|
||||||
|
grid-rows: 2 2 2 8 2 2 8 2 8 2 2;
|
||||||
|
grid-columns: {GRID_COLUMN_LABEL_MIN} {GRID_COLUMN_2} {GRID_COLUMN_3} {GRID_COLUMN_4} {GRID_COLUMN_5} {GRID_COLUMN_6};
|
||||||
|
height: 100%;
|
||||||
|
width: 100%;
|
||||||
|
min-width: 120;
|
||||||
|
padding: 1;
|
||||||
|
overflow-x: auto;
|
||||||
|
overflow-y: auto;
|
||||||
|
}}
|
||||||
|
|
||||||
|
DataTable .datatable--cursor {{
|
||||||
|
background: darkorange;
|
||||||
|
color: black;
|
||||||
|
}}
|
||||||
|
|
||||||
|
DataTable .datatable--header {{
|
||||||
|
background: steelblue;
|
||||||
|
color: white;
|
||||||
|
}}
|
||||||
|
|
||||||
|
Input {{
|
||||||
|
border: none;
|
||||||
|
}}
|
||||||
|
Button {{
|
||||||
|
border: none;
|
||||||
|
}}
|
||||||
|
|
||||||
|
DataTable {{
|
||||||
|
min-height: 24;
|
||||||
|
width: 100%;
|
||||||
|
}}
|
||||||
|
|
||||||
|
.two {{
|
||||||
|
column-span: 2;
|
||||||
|
}}
|
||||||
|
.three {{
|
||||||
|
column-span: 3;
|
||||||
|
}}
|
||||||
|
.four {{
|
||||||
|
column-span: 4;
|
||||||
|
}}
|
||||||
|
.five {{
|
||||||
|
column-span: 5;
|
||||||
|
}}
|
||||||
|
|
||||||
|
#differences-table {{
|
||||||
|
row-span: 10;
|
||||||
|
}}
|
||||||
|
|
||||||
|
#file_label {{
|
||||||
|
width: 100%;
|
||||||
|
}}
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _grid_columns_spec(cls, label_column_width: int | None = None) -> str:
|
||||||
|
return " ".join(
|
||||||
|
[
|
||||||
|
str(
|
||||||
|
cls.GRID_COLUMN_LABEL_MIN
|
||||||
|
if label_column_width is None
|
||||||
|
else int(label_column_width)
|
||||||
|
),
|
||||||
|
str(cls.GRID_COLUMN_2),
|
||||||
|
str(cls.GRID_COLUMN_3),
|
||||||
|
str(cls.GRID_COLUMN_4),
|
||||||
|
str(cls.GRID_COLUMN_5),
|
||||||
|
str(cls.GRID_COLUMN_6),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
COMMAND_NAME = "edit"
|
||||||
|
EDIT_MODE = True
|
||||||
|
DIFFERENCES_COLUMN_LABEL = "Planned Changes (file->edited output)"
|
||||||
|
|
||||||
|
BINDINGS = [
|
||||||
|
("escape", "back", t("Back")),
|
||||||
|
("q", "quit_screen", t("Quit")),
|
||||||
|
("a", "apply_changes", t("Apply")),
|
||||||
|
("r", "revert_changes", t("Revert")),
|
||||||
|
]
|
||||||
|
|
||||||
|
def compose(self):
|
||||||
|
self._build_media_tags_table()
|
||||||
|
self._build_tracks_table()
|
||||||
|
self._build_differences_table()
|
||||||
|
|
||||||
|
yield Header()
|
||||||
|
|
||||||
|
with Grid(id="main_grid"):
|
||||||
|
|
||||||
|
# Row 1
|
||||||
|
yield Static(t("File"))
|
||||||
|
yield Static(self._mediaFilename, id="file_label", classes="three", markup=False)
|
||||||
|
yield Static(" ")
|
||||||
|
yield self.differencesTable
|
||||||
|
|
||||||
|
# Row 2
|
||||||
|
yield Static(" ")
|
||||||
|
yield Button(t("Cleanup"), id="cleanup_toggle_button")
|
||||||
|
yield Button(t("Normalize"), id="normalize_toggle_button")
|
||||||
|
yield Static(" ", classes="two")
|
||||||
|
|
||||||
|
# Row 3
|
||||||
|
yield Static(t("Media Tags"))
|
||||||
|
yield Button(t("Add"), id="button_add_tag")
|
||||||
|
yield Button(t("Edit"), id="button_edit_tag")
|
||||||
|
yield Button(t("Delete"), id="button_delete_tag")
|
||||||
|
yield Static(" ")
|
||||||
|
|
||||||
|
# Row 4
|
||||||
|
yield Static(" ")
|
||||||
|
yield self.mediaTagsTable
|
||||||
|
yield Static(" ")
|
||||||
|
|
||||||
|
# Row 5
|
||||||
|
yield Static("", classes="five")
|
||||||
|
|
||||||
|
# Row 6
|
||||||
|
yield Static(t("Streams"))
|
||||||
|
yield Button(t("Edit"), id="button_edit_track")
|
||||||
|
yield Button(t("Set Default"), id="select_default_button")
|
||||||
|
yield Button(t("Set Forced"), id="select_forced_button")
|
||||||
|
yield Static(" ")
|
||||||
|
|
||||||
|
# Row 7
|
||||||
|
yield Static(" ")
|
||||||
|
yield self.tracksTable
|
||||||
|
yield Static(" ")
|
||||||
|
|
||||||
|
# Row 8
|
||||||
|
yield Static("", classes="five")
|
||||||
|
|
||||||
|
# Row 9
|
||||||
|
yield Static(" ")
|
||||||
|
yield Button(t("Apply"), id="apply_button")
|
||||||
|
yield Button(t("Revert"), id="revert_button")
|
||||||
|
yield Button(t("Quit"), id="quit_button")
|
||||||
|
yield Static(" ")
|
||||||
|
|
||||||
|
yield Footer()
|
||||||
|
|
||||||
|
def on_mount(self):
|
||||||
|
self._update_grid_layout()
|
||||||
|
self.updateMediaTags()
|
||||||
|
self.updateTracks()
|
||||||
|
self.updateDifferences()
|
||||||
|
self.updateToggleButtons()
|
||||||
|
|
||||||
|
def _update_grid_layout(self) -> None:
|
||||||
|
leftColumnWidth = max(
|
||||||
|
localized_column_width(t("File"), self.GRID_COLUMN_LABEL_MIN),
|
||||||
|
localized_column_width(t("Media Tags"), self.GRID_COLUMN_LABEL_MIN),
|
||||||
|
localized_column_width(t("Streams"), self.GRID_COLUMN_LABEL_MIN),
|
||||||
|
)
|
||||||
|
grid = self.query_one("#main_grid", Grid)
|
||||||
|
grid.styles.grid_columns = self._grid_columns_spec(leftColumnWidth)
|
||||||
|
|
||||||
|
def action_back(self):
|
||||||
|
self.action_quit_screen()
|
||||||
|
|
||||||
|
def setMessage(self, message: str):
|
||||||
|
self._messageText = str(message)
|
||||||
|
if self._messageText:
|
||||||
|
self.notify(self._messageText)
|
||||||
|
|
||||||
|
def updateToggleButtons(self):
|
||||||
|
self._set_toggle_button_state(
|
||||||
|
"#cleanup_toggle_button",
|
||||||
|
t("Cleanup"),
|
||||||
|
self._applyCleanup,
|
||||||
|
)
|
||||||
|
self._set_toggle_button_state(
|
||||||
|
"#normalize_toggle_button",
|
||||||
|
t("Normalize"),
|
||||||
|
self._applyNormalization,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _set_toggle_button_state(self, selector: str, label: str, enabled: bool):
|
||||||
|
try:
|
||||||
|
button = self.query_one(selector, Button)
|
||||||
|
except Exception:
|
||||||
|
return
|
||||||
|
|
||||||
|
button.label = label
|
||||||
|
button.styles.color = "black" if enabled else "white"
|
||||||
|
button.styles.background = "darkorange" if enabled else "black"
|
||||||
|
|
||||||
|
def refreshAfterDraftChange(self):
|
||||||
|
self.updateMediaTags()
|
||||||
|
self.updateTracks()
|
||||||
|
self.updateDifferences()
|
||||||
|
|
||||||
|
def on_button_pressed(self, event: Button.Pressed) -> None:
|
||||||
|
if event.button.id == "select_default_button":
|
||||||
|
if self.setSelectedTrackDefault():
|
||||||
|
self.refreshAfterDraftChange()
|
||||||
|
|
||||||
|
if event.button.id == "select_forced_button":
|
||||||
|
if self.setSelectedTrackForced():
|
||||||
|
self.refreshAfterDraftChange()
|
||||||
|
|
||||||
|
if event.button.id == "button_add_tag":
|
||||||
|
self.app.push_screen(TagDetailsScreen(), self.handle_update_media_tag)
|
||||||
|
|
||||||
|
if event.button.id == "button_edit_tag":
|
||||||
|
selectedTag = self.getSelectedMediaTag()
|
||||||
|
if selectedTag is not None:
|
||||||
|
self.app.push_screen(
|
||||||
|
TagDetailsScreen(key=selectedTag[0], value=selectedTag[1]),
|
||||||
|
self.handle_update_media_tag,
|
||||||
|
)
|
||||||
|
|
||||||
|
if event.button.id == "button_delete_tag":
|
||||||
|
selectedTag = self.getSelectedMediaTag()
|
||||||
|
if selectedTag is not None:
|
||||||
|
self.app.push_screen(
|
||||||
|
TagDeleteScreen(key=selectedTag[0], value=selectedTag[1]),
|
||||||
|
self.handle_delete_media_tag,
|
||||||
|
)
|
||||||
|
|
||||||
|
if event.button.id == "button_edit_track":
|
||||||
|
self.action_edit_selected_track()
|
||||||
|
|
||||||
|
if event.button.id == "cleanup_toggle_button":
|
||||||
|
self.action_toggle_cleanup()
|
||||||
|
|
||||||
|
if event.button.id == "normalize_toggle_button":
|
||||||
|
self.action_toggle_normalization()
|
||||||
|
|
||||||
|
if event.button.id == "apply_button":
|
||||||
|
self.action_apply_changes()
|
||||||
|
|
||||||
|
if event.button.id == "revert_button":
|
||||||
|
self.action_revert_changes()
|
||||||
|
|
||||||
|
if event.button.id == "quit_button":
|
||||||
|
self.action_quit_screen()
|
||||||
|
|
||||||
|
def action_edit_selected_track(self):
|
||||||
|
selectedTrack = self.getSelectedTrackDescriptor()
|
||||||
|
if selectedTrack is None:
|
||||||
|
self.setMessage(t("Select a stream first."))
|
||||||
|
return
|
||||||
|
|
||||||
|
self.app.push_screen(
|
||||||
|
TrackDetailsScreen(
|
||||||
|
trackDescriptor=selectedTrack,
|
||||||
|
patternLabel=os.path.basename(self._mediaFilename),
|
||||||
|
siblingTrackDescriptors=self._sourceMediaDescriptor.getTrackDescriptors(),
|
||||||
|
metadata_only=True,
|
||||||
|
),
|
||||||
|
self.handle_edit_track,
|
||||||
|
)
|
||||||
|
|
||||||
|
def action_toggle_cleanup(self):
|
||||||
|
self.setApplyCleanup(not self._applyCleanup)
|
||||||
|
self.updateToggleButtons()
|
||||||
|
self.updateMediaTags()
|
||||||
|
self.updateDifferences()
|
||||||
|
self.setMessage(
|
||||||
|
t("Cleanup enabled.") if self._applyCleanup else t("Cleanup disabled.")
|
||||||
|
)
|
||||||
|
|
||||||
|
def action_toggle_normalization(self):
|
||||||
|
self.setApplyNormalization(not self._applyNormalization)
|
||||||
|
self.updateToggleButtons()
|
||||||
|
self.updateDifferences()
|
||||||
|
self.setMessage(
|
||||||
|
t("Normalization enabled.")
|
||||||
|
if self._applyNormalization
|
||||||
|
else t("Normalization disabled.")
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle_update_media_tag(self, tag):
|
||||||
|
if tag is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
self._sourceMediaDescriptor.getTags()[str(tag[0])] = str(tag[1])
|
||||||
|
self.setMessage(t("Updated media tag {tag!r}.", tag=tag[0]))
|
||||||
|
self.refreshAfterDraftChange()
|
||||||
|
|
||||||
|
def handle_delete_media_tag(self, tag):
|
||||||
|
if tag is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
self._sourceMediaDescriptor.getTags().pop(str(tag[0]), None)
|
||||||
|
self.setMessage(t("Deleted media tag {tag!r}.", tag=tag[0]))
|
||||||
|
self.refreshAfterDraftChange()
|
||||||
|
|
||||||
|
def handle_edit_track(self, trackDescriptor: TrackDescriptor):
|
||||||
|
if trackDescriptor is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
updatedTracks = []
|
||||||
|
replaced = False
|
||||||
|
for currentTrack in self._sourceMediaDescriptor.getTrackDescriptors():
|
||||||
|
if (
|
||||||
|
currentTrack.getIndex() == trackDescriptor.getIndex()
|
||||||
|
and currentTrack.getSubIndex() == trackDescriptor.getSubIndex()
|
||||||
|
):
|
||||||
|
updatedTracks.append(trackDescriptor)
|
||||||
|
replaced = True
|
||||||
|
else:
|
||||||
|
updatedTracks.append(currentTrack)
|
||||||
|
|
||||||
|
if not replaced:
|
||||||
|
self.setMessage(t("Unable to update selected stream."))
|
||||||
|
return
|
||||||
|
|
||||||
|
self._sourceMediaDescriptor = self._sourceMediaDescriptor.clone(context=self.context)
|
||||||
|
self._sourceMediaDescriptor.getTrackDescriptors().clear()
|
||||||
|
self._sourceMediaDescriptor.getTrackDescriptors().extend(updatedTracks)
|
||||||
|
self.setMessage(
|
||||||
|
t(
|
||||||
|
"Updated stream #{index} ({track_type}).",
|
||||||
|
index=trackDescriptor.getIndex(),
|
||||||
|
track_type=t(trackDescriptor.getType().label()),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.refreshAfterDraftChange()
|
||||||
|
|
||||||
|
def action_apply_changes(self):
|
||||||
|
if not self.hasPendingChanges():
|
||||||
|
self.setMessage(t("No changes to apply."))
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
applyResult = apply_metadata_edits(
|
||||||
|
self.context,
|
||||||
|
self._mediaFilename,
|
||||||
|
self._baselineMediaDescriptor,
|
||||||
|
self._sourceMediaDescriptor,
|
||||||
|
)
|
||||||
|
except Exception as ex:
|
||||||
|
self.context["logger"].exception(
|
||||||
|
"Failed to apply metadata edits for %s",
|
||||||
|
self._mediaFilename,
|
||||||
|
)
|
||||||
|
self.setMessage(t("Apply failed: {error}", error=ex))
|
||||||
|
return
|
||||||
|
|
||||||
|
if applyResult.get("dry_run", False):
|
||||||
|
self.setMessage(
|
||||||
|
t(
|
||||||
|
"Dry-run: would rewrite via temporary file {target_path}",
|
||||||
|
target_path=applyResult["target_path"],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
self.reloadProperties(reset_draft=True)
|
||||||
|
self.refreshAfterDraftChange()
|
||||||
|
self.setMessage(t("Changes applied and file reloaded."))
|
||||||
|
|
||||||
|
def action_revert_changes(self):
|
||||||
|
if not self.hasPendingChanges():
|
||||||
|
self.setMessage(t("No changes to revert."))
|
||||||
|
return
|
||||||
|
|
||||||
|
self.app.push_screen(
|
||||||
|
ConfirmScreen(
|
||||||
|
t("Discard pending metadata changes and reload the file state?"),
|
||||||
|
confirm_label=t("Discard"),
|
||||||
|
cancel_label=t("Keep Editing"),
|
||||||
|
),
|
||||||
|
self.handle_revert_confirmation,
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle_revert_confirmation(self, confirmed):
|
||||||
|
if not confirmed:
|
||||||
|
self.setMessage(t("Keeping pending changes."))
|
||||||
|
return
|
||||||
|
|
||||||
|
self.reloadProperties(reset_draft=True)
|
||||||
|
self.refreshAfterDraftChange()
|
||||||
|
self.setMessage(t("Reverted pending changes."))
|
||||||
|
|
||||||
|
def action_quit_screen(self):
|
||||||
|
if self.hasPendingChanges():
|
||||||
|
self.app.push_screen(
|
||||||
|
ConfirmScreen(
|
||||||
|
t("Discard pending metadata changes and quit?"),
|
||||||
|
confirm_label=t("Discard"),
|
||||||
|
cancel_label=t("Stay"),
|
||||||
|
),
|
||||||
|
self.handle_quit_confirmation,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
self.app.exit()
|
||||||
|
|
||||||
|
def handle_quit_confirmation(self, confirmed):
|
||||||
|
if confirmed:
|
||||||
|
self.app.exit()
|
||||||
|
else:
|
||||||
|
self.setMessage(t("Continuing edit session."))
|
||||||
377
src/ffx/media_workflow_screen_base.py
Normal file
377
src/ffx/media_workflow_screen_base.py
Normal file
@@ -0,0 +1,377 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from textual.screen import Screen
|
||||||
|
from textual.widgets import DataTable
|
||||||
|
from textual.widgets._data_table import CellDoesNotExist
|
||||||
|
|
||||||
|
from ffx.audio_layout import AudioLayout
|
||||||
|
from ffx.file_properties import FileProperties
|
||||||
|
from ffx.helper import DIFF_ADDED_KEY, DIFF_CHANGED_KEY, DIFF_REMOVED_KEY
|
||||||
|
from ffx.media_descriptor_change_set import MediaDescriptorChangeSet
|
||||||
|
from ffx.track_descriptor import TrackDescriptor
|
||||||
|
from ffx.track_disposition import TrackDisposition
|
||||||
|
from ffx.track_type import TrackType
|
||||||
|
|
||||||
|
from .i18n import t
|
||||||
|
from .screen_support import add_auto_table_column, build_screen_bootstrap, populate_tag_table
|
||||||
|
|
||||||
|
|
||||||
|
class MediaWorkflowScreenBase(Screen):
|
||||||
|
|
||||||
|
|
||||||
|
TRACKS_TABLE_INDEX_COLUMN_LABEL = "Index"
|
||||||
|
TRACKS_TABLE_TYPE_COLUMN_LABEL = "Type"
|
||||||
|
TRACKS_TABLE_SUB_INDEX_COLUMN_LABEL = "SubIndex"
|
||||||
|
TRACKS_TABLE_CODEC_COLUMN_LABEL = "Codec"
|
||||||
|
TRACKS_TABLE_LAYOUT_COLUMN_LABEL = "Layout"
|
||||||
|
TRACKS_TABLE_LANGUAGE_COLUMN_LABEL = "Language"
|
||||||
|
TRACKS_TABLE_TITLE_COLUMN_LABEL = "Title"
|
||||||
|
TRACKS_TABLE_DEFAULT_COLUMN_LABEL = "Default"
|
||||||
|
TRACKS_TABLE_FORCED_COLUMN_LABEL = "Forced"
|
||||||
|
|
||||||
|
DIFFERENCES_COLUMN_LABEL = "Differences"
|
||||||
|
COMMAND_NAME = ""
|
||||||
|
EDIT_MODE = False
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
bootstrap = build_screen_bootstrap(self.app.getContext())
|
||||||
|
self.context = bootstrap.context
|
||||||
|
|
||||||
|
self._applyCleanup = False
|
||||||
|
self._applyNormalization = bool(self.context.get("apply_metadata_normalization", True))
|
||||||
|
self._removeGlobalKeys = []
|
||||||
|
self._ignoreGlobalKeys = []
|
||||||
|
self._apply_bootstrap_settings(bootstrap)
|
||||||
|
|
||||||
|
command = self.context.get("command")
|
||||||
|
if command != self.COMMAND_NAME:
|
||||||
|
raise click.ClickException(
|
||||||
|
f"{type(self).__name__}.__init__(): Can only perform command '{self.COMMAND_NAME}'"
|
||||||
|
)
|
||||||
|
|
||||||
|
arguments = self.context.get("arguments", {})
|
||||||
|
self._mediaFilename = arguments.get("filename", "")
|
||||||
|
if not self._mediaFilename:
|
||||||
|
raise click.ClickException(
|
||||||
|
f"{type(self).__name__}.__init__(): Argument 'filename' is required"
|
||||||
|
)
|
||||||
|
if not os.path.isfile(self._mediaFilename):
|
||||||
|
raise click.ClickException(
|
||||||
|
f"{type(self).__name__}.__init__(): Media file {self._mediaFilename} does not exist"
|
||||||
|
)
|
||||||
|
|
||||||
|
self._baselineMediaDescriptor = None
|
||||||
|
self._sourceMediaDescriptor = None
|
||||||
|
self._targetMediaDescriptor = None
|
||||||
|
self._currentPattern = None
|
||||||
|
self._mediaChangeSetObj = {}
|
||||||
|
self._messageText = ""
|
||||||
|
self._trackRowData: dict[object, TrackDescriptor] = {}
|
||||||
|
self._sourceMediaTagRowData: dict[object, tuple[str, str]] = {}
|
||||||
|
|
||||||
|
self.reloadProperties(reset_draft=True)
|
||||||
|
|
||||||
|
def _apply_bootstrap_settings(self, bootstrap) -> None:
|
||||||
|
self._applyCleanup = bootstrap.apply_cleanup
|
||||||
|
self._removeGlobalKeys = bootstrap.remove_global_keys
|
||||||
|
self._ignoreGlobalKeys = bootstrap.ignore_global_keys
|
||||||
|
|
||||||
|
def refreshCleanupSettings(self) -> None:
|
||||||
|
self._apply_bootstrap_settings(build_screen_bootstrap(self.context))
|
||||||
|
|
||||||
|
def setApplyCleanup(self, enabled: bool) -> None:
|
||||||
|
self.context["apply_metadata_cleanup"] = bool(enabled)
|
||||||
|
self.refreshCleanupSettings()
|
||||||
|
|
||||||
|
def refreshNormalizationSettings(self) -> None:
|
||||||
|
self._applyNormalization = bool(
|
||||||
|
self.context.get("apply_metadata_normalization", True)
|
||||||
|
)
|
||||||
|
|
||||||
|
def setApplyNormalization(self, enabled: bool) -> None:
|
||||||
|
self.context["apply_metadata_normalization"] = bool(enabled)
|
||||||
|
self.refreshNormalizationSettings()
|
||||||
|
|
||||||
|
def _build_media_tags_table(self):
|
||||||
|
self.mediaTagsTable = DataTable(classes="three")
|
||||||
|
add_auto_table_column(self.mediaTagsTable, t("Key"))
|
||||||
|
add_auto_table_column(self.mediaTagsTable, t("Value"))
|
||||||
|
self.mediaTagsTable.cursor_type = "row"
|
||||||
|
|
||||||
|
def _build_tracks_table(self):
|
||||||
|
self.tracksTable = DataTable(classes="three")
|
||||||
|
self._configure_tracks_table_columns()
|
||||||
|
self.tracksTable.cursor_type = "row"
|
||||||
|
|
||||||
|
def _configure_tracks_table_columns(self):
|
||||||
|
add_auto_table_column(self.tracksTable, t(self.TRACKS_TABLE_INDEX_COLUMN_LABEL))
|
||||||
|
add_auto_table_column(self.tracksTable, t(self.TRACKS_TABLE_TYPE_COLUMN_LABEL))
|
||||||
|
add_auto_table_column(self.tracksTable, t(self.TRACKS_TABLE_SUB_INDEX_COLUMN_LABEL))
|
||||||
|
add_auto_table_column(self.tracksTable, t(self.TRACKS_TABLE_CODEC_COLUMN_LABEL))
|
||||||
|
add_auto_table_column(self.tracksTable, t(self.TRACKS_TABLE_LAYOUT_COLUMN_LABEL))
|
||||||
|
add_auto_table_column(self.tracksTable, t(self.TRACKS_TABLE_LANGUAGE_COLUMN_LABEL))
|
||||||
|
add_auto_table_column(self.tracksTable, t(self.TRACKS_TABLE_TITLE_COLUMN_LABEL))
|
||||||
|
add_auto_table_column(self.tracksTable, t(self.TRACKS_TABLE_DEFAULT_COLUMN_LABEL))
|
||||||
|
add_auto_table_column(self.tracksTable, t(self.TRACKS_TABLE_FORCED_COLUMN_LABEL))
|
||||||
|
|
||||||
|
def _build_differences_table(self):
|
||||||
|
self.differencesTable = DataTable(id="differences-table")
|
||||||
|
add_auto_table_column(self.differencesTable, t(self.DIFFERENCES_COLUMN_LABEL))
|
||||||
|
self.differencesTable.cursor_type = "row"
|
||||||
|
|
||||||
|
def reloadProperties(self, reset_draft: bool = True):
|
||||||
|
self._mediaFileProperties = FileProperties(self.context, self._mediaFilename)
|
||||||
|
probedMediaDescriptor = self._mediaFileProperties.getMediaDescriptor()
|
||||||
|
|
||||||
|
if self.EDIT_MODE:
|
||||||
|
self._baselineMediaDescriptor = probedMediaDescriptor
|
||||||
|
if reset_draft or self._sourceMediaDescriptor is None:
|
||||||
|
self._sourceMediaDescriptor = probedMediaDescriptor.clone(context=self.context)
|
||||||
|
self._targetMediaDescriptor = self._sourceMediaDescriptor
|
||||||
|
self._currentPattern = None
|
||||||
|
else:
|
||||||
|
self._baselineMediaDescriptor = probedMediaDescriptor
|
||||||
|
self._sourceMediaDescriptor = probedMediaDescriptor
|
||||||
|
self._currentPattern = self._mediaFileProperties.getPattern()
|
||||||
|
self._targetMediaDescriptor = (
|
||||||
|
self._currentPattern.getMediaDescriptor(self.context)
|
||||||
|
if self._currentPattern is not None
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
|
self.rebuildChangeSet()
|
||||||
|
|
||||||
|
def rebuildChangeSet(self):
|
||||||
|
try:
|
||||||
|
if self.EDIT_MODE:
|
||||||
|
mdcs = MediaDescriptorChangeSet(
|
||||||
|
self.context,
|
||||||
|
self._sourceMediaDescriptor,
|
||||||
|
self._baselineMediaDescriptor,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if self._targetMediaDescriptor is None:
|
||||||
|
self._mediaChangeSetObj = {}
|
||||||
|
return
|
||||||
|
mdcs = MediaDescriptorChangeSet(
|
||||||
|
self.context,
|
||||||
|
self._targetMediaDescriptor,
|
||||||
|
self._sourceMediaDescriptor,
|
||||||
|
)
|
||||||
|
|
||||||
|
self._mediaChangeSetObj = mdcs.getChangeSetObj()
|
||||||
|
except ValueError:
|
||||||
|
self._mediaChangeSetObj = {}
|
||||||
|
|
||||||
|
def hasPendingChanges(self) -> bool:
|
||||||
|
return bool(self._mediaChangeSetObj)
|
||||||
|
|
||||||
|
def updateMediaTags(self):
|
||||||
|
self._sourceMediaTagRowData = populate_tag_table(
|
||||||
|
self.mediaTagsTable,
|
||||||
|
self._sourceMediaDescriptor.getTags(),
|
||||||
|
ignore_keys=self._ignoreGlobalKeys,
|
||||||
|
remove_keys=self._removeGlobalKeys,
|
||||||
|
)
|
||||||
|
|
||||||
|
def updateTracks(self):
|
||||||
|
self.tracksTable.clear(columns=True)
|
||||||
|
self._configure_tracks_table_columns()
|
||||||
|
self._trackRowData = {}
|
||||||
|
|
||||||
|
trackDescriptorList = self._sourceMediaDescriptor.getTrackDescriptors()
|
||||||
|
typeCounter = {}
|
||||||
|
|
||||||
|
for trackDescriptor in trackDescriptorList:
|
||||||
|
trackType = trackDescriptor.getType()
|
||||||
|
if trackType not in typeCounter:
|
||||||
|
typeCounter[trackType] = 0
|
||||||
|
|
||||||
|
dispositionSet = trackDescriptor.getDispositionSet()
|
||||||
|
audioLayout = trackDescriptor.getAudioLayout()
|
||||||
|
row = (
|
||||||
|
trackDescriptor.getIndex(),
|
||||||
|
t(trackType.label()),
|
||||||
|
typeCounter[trackType],
|
||||||
|
trackDescriptor.getCodec().label(),
|
||||||
|
t(audioLayout.label())
|
||||||
|
if trackType == TrackType.AUDIO
|
||||||
|
and audioLayout != AudioLayout.LAYOUT_UNDEFINED
|
||||||
|
else " ",
|
||||||
|
trackDescriptor.getLanguage().label(),
|
||||||
|
trackDescriptor.getTitle(),
|
||||||
|
t("Yes") if TrackDisposition.DEFAULT in dispositionSet else t("No"),
|
||||||
|
t("Yes") if TrackDisposition.FORCED in dispositionSet else t("No"),
|
||||||
|
)
|
||||||
|
|
||||||
|
row_key = self.tracksTable.add_row(*map(str, row))
|
||||||
|
self._trackRowData[row_key] = trackDescriptor
|
||||||
|
typeCounter[trackType] += 1
|
||||||
|
|
||||||
|
def updateDifferences(self):
|
||||||
|
self.rebuildChangeSet()
|
||||||
|
self.differencesTable.clear()
|
||||||
|
|
||||||
|
if not self.EDIT_MODE and self._currentPattern is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
targetDescriptor = (
|
||||||
|
self._sourceMediaDescriptor
|
||||||
|
if self.EDIT_MODE
|
||||||
|
else self._targetMediaDescriptor
|
||||||
|
)
|
||||||
|
targetTrackDescriptorsByIndex = {
|
||||||
|
trackDescriptor.getIndex(): trackDescriptor
|
||||||
|
for trackDescriptor in (
|
||||||
|
targetDescriptor.getTrackDescriptors()
|
||||||
|
if targetDescriptor is not None
|
||||||
|
else []
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
tagDifferences = self._mediaChangeSetObj.get(MediaDescriptorChangeSet.TAGS_KEY, {})
|
||||||
|
for tagKey, tagValue in tagDifferences.get(DIFF_ADDED_KEY, {}).items():
|
||||||
|
if tagKey not in self._ignoreGlobalKeys:
|
||||||
|
self.differencesTable.add_row(
|
||||||
|
t("add media tag: key='{key}' value='{value}'", key=tagKey, value=tagValue)
|
||||||
|
)
|
||||||
|
|
||||||
|
for tagKey, tagValue in tagDifferences.get(DIFF_REMOVED_KEY, {}).items():
|
||||||
|
if tagKey in self._ignoreGlobalKeys:
|
||||||
|
continue
|
||||||
|
if not self.EDIT_MODE and tagKey in self._removeGlobalKeys:
|
||||||
|
continue
|
||||||
|
self.differencesTable.add_row(
|
||||||
|
t("remove media tag: key='{key}' value='{value}'", key=tagKey, value=tagValue)
|
||||||
|
)
|
||||||
|
|
||||||
|
for tagKey, tagValue in tagDifferences.get(DIFF_CHANGED_KEY, {}).items():
|
||||||
|
if tagKey not in self._ignoreGlobalKeys:
|
||||||
|
self.differencesTable.add_row(
|
||||||
|
t("change media tag: key='{key}' value='{value}'", key=tagKey, value=tagValue)
|
||||||
|
)
|
||||||
|
|
||||||
|
trackDifferences = self._mediaChangeSetObj.get(MediaDescriptorChangeSet.TRACKS_KEY, {})
|
||||||
|
|
||||||
|
for trackDescriptor in trackDifferences.get(DIFF_ADDED_KEY, {}).values():
|
||||||
|
self.differencesTable.add_row(
|
||||||
|
t(
|
||||||
|
"add {track_type} track: index={index} lang={language}",
|
||||||
|
track_type=t(trackDescriptor.getType().label()),
|
||||||
|
index=trackDescriptor.getIndex(),
|
||||||
|
language=trackDescriptor.getLanguage().threeLetter(),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
for trackIndex in trackDifferences.get(DIFF_REMOVED_KEY, {}).keys():
|
||||||
|
self.differencesTable.add_row(t("remove stream #{index}", index=trackIndex))
|
||||||
|
|
||||||
|
for trackIndex, trackDiffObj in trackDifferences.get(DIFF_CHANGED_KEY, {}).items():
|
||||||
|
targetTrackDescriptor = targetTrackDescriptorsByIndex.get(trackIndex)
|
||||||
|
if targetTrackDescriptor is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
tagsDiff = trackDiffObj.get(MediaDescriptorChangeSet.TAGS_KEY, {})
|
||||||
|
for tagKey, tagValue in tagsDiff.get(DIFF_REMOVED_KEY, {}).items():
|
||||||
|
self.differencesTable.add_row(
|
||||||
|
t(
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove key={key} value={value}",
|
||||||
|
index=targetTrackDescriptor.getIndex(),
|
||||||
|
track_type=t(targetTrackDescriptor.getType().label()),
|
||||||
|
sub_index=targetTrackDescriptor.getSubIndex(),
|
||||||
|
key=tagKey,
|
||||||
|
value=tagValue,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
for tagKey, tagValue in tagsDiff.get(DIFF_ADDED_KEY, {}).items():
|
||||||
|
self.differencesTable.add_row(
|
||||||
|
t(
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add key={key} value={value}",
|
||||||
|
index=targetTrackDescriptor.getIndex(),
|
||||||
|
track_type=t(targetTrackDescriptor.getType().label()),
|
||||||
|
sub_index=targetTrackDescriptor.getSubIndex(),
|
||||||
|
key=tagKey,
|
||||||
|
value=tagValue,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
for tagKey, tagValue in tagsDiff.get(DIFF_CHANGED_KEY, {}).items():
|
||||||
|
self.differencesTable.add_row(
|
||||||
|
t(
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) change key={key} value={value}",
|
||||||
|
index=targetTrackDescriptor.getIndex(),
|
||||||
|
track_type=t(targetTrackDescriptor.getType().label()),
|
||||||
|
sub_index=targetTrackDescriptor.getSubIndex(),
|
||||||
|
key=tagKey,
|
||||||
|
value=tagValue,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
dispositionDiff = trackDiffObj.get(MediaDescriptorChangeSet.DISPOSITION_SET_KEY, {})
|
||||||
|
for addedDisposition in dispositionDiff.get(DIFF_ADDED_KEY, set()):
|
||||||
|
self.differencesTable.add_row(
|
||||||
|
t(
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) add disposition={disposition}",
|
||||||
|
index=targetTrackDescriptor.getIndex(),
|
||||||
|
track_type=t(targetTrackDescriptor.getType().label()),
|
||||||
|
sub_index=targetTrackDescriptor.getSubIndex(),
|
||||||
|
disposition=t(addedDisposition.label()),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
for removedDisposition in dispositionDiff.get(DIFF_REMOVED_KEY, set()):
|
||||||
|
self.differencesTable.add_row(
|
||||||
|
t(
|
||||||
|
"change stream #{index} ({track_type}:{sub_index}) remove disposition={disposition}",
|
||||||
|
index=targetTrackDescriptor.getIndex(),
|
||||||
|
track_type=t(targetTrackDescriptor.getType().label()),
|
||||||
|
sub_index=targetTrackDescriptor.getSubIndex(),
|
||||||
|
disposition=t(removedDisposition.label()),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def getSelectedMediaTag(self):
|
||||||
|
try:
|
||||||
|
row_key, _ = self.mediaTagsTable.coordinate_to_cell_key(
|
||||||
|
self.mediaTagsTable.cursor_coordinate
|
||||||
|
)
|
||||||
|
if row_key is not None:
|
||||||
|
return self._sourceMediaTagRowData.get(row_key)
|
||||||
|
return None
|
||||||
|
except CellDoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def getSelectedTrackDescriptor(self):
|
||||||
|
try:
|
||||||
|
row_key, _ = self.tracksTable.coordinate_to_cell_key(
|
||||||
|
self.tracksTable.cursor_coordinate
|
||||||
|
)
|
||||||
|
if row_key is not None:
|
||||||
|
return self._trackRowData.get(row_key)
|
||||||
|
return None
|
||||||
|
except CellDoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def setSelectedTrackDefault(self):
|
||||||
|
selectedTrackDescriptor = self.getSelectedTrackDescriptor()
|
||||||
|
if selectedTrackDescriptor is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
self._sourceMediaDescriptor.setDefaultSubTrack(
|
||||||
|
selectedTrackDescriptor.getType(),
|
||||||
|
selectedTrackDescriptor.getSubIndex(),
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def setSelectedTrackForced(self):
|
||||||
|
selectedTrackDescriptor = self.getSelectedTrackDescriptor()
|
||||||
|
if selectedTrackDescriptor is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
self._sourceMediaDescriptor.setForcedSubTrack(
|
||||||
|
selectedTrackDescriptor.getType(),
|
||||||
|
selectedTrackDescriptor.getSubIndex(),
|
||||||
|
)
|
||||||
|
return True
|
||||||
89
src/ffx/metadata_editor.py
Normal file
89
src/ffx/metadata_editor.py
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
from .constants import (
|
||||||
|
DEFAULT_AC3_BANDWIDTH,
|
||||||
|
DEFAULT_DTS_BANDWIDTH,
|
||||||
|
DEFAULT_STEREO_BANDWIDTH,
|
||||||
|
)
|
||||||
|
from .ffx_controller import FfxController
|
||||||
|
from .media_descriptor import MediaDescriptor
|
||||||
|
from .video_encoder import VideoEncoder
|
||||||
|
|
||||||
|
|
||||||
|
def create_temporary_output_path(source_path: str) -> str:
|
||||||
|
sourceDirectory = os.path.dirname(os.path.abspath(source_path)) or "."
|
||||||
|
sourceBasename = os.path.basename(source_path)
|
||||||
|
sourceStem, sourceExtension = os.path.splitext(sourceBasename)
|
||||||
|
|
||||||
|
descriptor, temporaryPath = tempfile.mkstemp(
|
||||||
|
prefix=f".{sourceStem}.ffx-edit-",
|
||||||
|
suffix=sourceExtension or ".tmp",
|
||||||
|
dir=sourceDirectory,
|
||||||
|
)
|
||||||
|
os.close(descriptor)
|
||||||
|
os.unlink(temporaryPath)
|
||||||
|
|
||||||
|
return temporaryPath
|
||||||
|
|
||||||
|
|
||||||
|
def build_metadata_edit_context(context: dict) -> dict:
|
||||||
|
editContext = dict(context)
|
||||||
|
editContext["video_encoder"] = VideoEncoder.COPY
|
||||||
|
editContext["perform_cut"] = False
|
||||||
|
editContext["no_signature"] = bool(editContext.get("no_signature", True))
|
||||||
|
editContext["resource_limits"] = dict(editContext.get("resource_limits", {}))
|
||||||
|
editContext["bitrates"] = dict(
|
||||||
|
editContext.get(
|
||||||
|
"bitrates",
|
||||||
|
{
|
||||||
|
"stereo": f"{DEFAULT_STEREO_BANDWIDTH}k",
|
||||||
|
"ac3": f"{DEFAULT_AC3_BANDWIDTH}k",
|
||||||
|
"dts": f"{DEFAULT_DTS_BANDWIDTH}k",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
editContext["encoding_metadata_tags"] = {}
|
||||||
|
return editContext
|
||||||
|
|
||||||
|
|
||||||
|
def apply_metadata_edits(
|
||||||
|
context: dict,
|
||||||
|
source_path: str,
|
||||||
|
baseline_descriptor: MediaDescriptor,
|
||||||
|
draft_descriptor: MediaDescriptor,
|
||||||
|
) -> dict[str, object]:
|
||||||
|
temporaryOutputPath = create_temporary_output_path(source_path)
|
||||||
|
editContext = build_metadata_edit_context(context)
|
||||||
|
controller = FfxController(editContext, draft_descriptor, baseline_descriptor)
|
||||||
|
|
||||||
|
try:
|
||||||
|
controller.runJob(
|
||||||
|
source_path,
|
||||||
|
temporaryOutputPath,
|
||||||
|
targetFormat="",
|
||||||
|
chainIteration=[],
|
||||||
|
)
|
||||||
|
|
||||||
|
if editContext.get("dry_run", False):
|
||||||
|
return {
|
||||||
|
"applied": False,
|
||||||
|
"dry_run": True,
|
||||||
|
"target_path": temporaryOutputPath,
|
||||||
|
}
|
||||||
|
|
||||||
|
os.replace(temporaryOutputPath, source_path)
|
||||||
|
return {
|
||||||
|
"applied": True,
|
||||||
|
"dry_run": False,
|
||||||
|
"target_path": source_path,
|
||||||
|
}
|
||||||
|
except Exception:
|
||||||
|
if os.path.exists(temporaryOutputPath):
|
||||||
|
os.remove(temporaryOutputPath)
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
if editContext.get("dry_run", False) and os.path.exists(temporaryOutputPath):
|
||||||
|
os.remove(temporaryOutputPath)
|
||||||
20
src/ffx/model/__init__.py
Normal file
20
src/ffx/model/__init__.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
"""Load ORM model modules so SQLAlchemy relationship strings can resolve."""
|
||||||
|
|
||||||
|
from .show import Base, Show
|
||||||
|
from .pattern import Pattern
|
||||||
|
from .track import Track
|
||||||
|
from .track_tag import TrackTag
|
||||||
|
from .media_tag import MediaTag
|
||||||
|
from .shifted_season import ShiftedSeason
|
||||||
|
from .property import Property
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'Base',
|
||||||
|
'Show',
|
||||||
|
'Pattern',
|
||||||
|
'Track',
|
||||||
|
'TrackTag',
|
||||||
|
'MediaTag',
|
||||||
|
'ShiftedSeason',
|
||||||
|
'Property',
|
||||||
|
]
|
||||||
28
src/ffx/model/media_tag.py
Normal file
28
src/ffx/model/media_tag.py
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
# from typing import List
|
||||||
|
from sqlalchemy import create_engine, Column, Integer, String, ForeignKey, Enum
|
||||||
|
from sqlalchemy.orm import relationship, declarative_base, sessionmaker
|
||||||
|
|
||||||
|
from .show import Base
|
||||||
|
|
||||||
|
|
||||||
|
class MediaTag(Base):
|
||||||
|
"""
|
||||||
|
relationship(argument, opt1, opt2, ...)
|
||||||
|
argument is string of class or Mapped class of the target entity
|
||||||
|
backref creates a bi-directional corresponding relationship (back_populates preferred)
|
||||||
|
back_populates points to the corresponding relationship (the actual class attribute identifier)
|
||||||
|
|
||||||
|
See: https://docs.sqlalchemy.org/en/(14|20)/orm/basic_relationships.html
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = 'media_tags'
|
||||||
|
|
||||||
|
# v1.x
|
||||||
|
id = Column(Integer, primary_key=True)
|
||||||
|
|
||||||
|
key = Column(String)
|
||||||
|
value = Column(String)
|
||||||
|
|
||||||
|
# v1.x
|
||||||
|
pattern_id = Column(Integer, ForeignKey('patterns.id', ondelete="CASCADE"))
|
||||||
|
pattern = relationship('Pattern', back_populates='media_tags')
|
||||||
82
src/ffx/model/migration/__init__.py
Normal file
82
src/ffx/model/migration/__init__.py
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
import importlib
|
||||||
|
import importlib.util
|
||||||
|
|
||||||
|
|
||||||
|
class DatabaseVersionException(Exception):
|
||||||
|
def __init__(self, errorMessage):
|
||||||
|
super().__init__(errorMessage)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class MigrationStep:
|
||||||
|
versionFrom: int
|
||||||
|
versionTo: int
|
||||||
|
moduleName: str
|
||||||
|
modulePresent: bool
|
||||||
|
|
||||||
|
|
||||||
|
def getMigrationStepModuleName(versionFrom: int, versionTo: int) -> str:
|
||||||
|
return f"ffx.model.migration.step_{int(versionFrom)}_{int(versionTo)}"
|
||||||
|
|
||||||
|
|
||||||
|
def migrationStepModuleExists(versionFrom: int, versionTo: int) -> bool:
|
||||||
|
moduleName = getMigrationStepModuleName(versionFrom, versionTo)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return importlib.util.find_spec(moduleName) is not None
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def getMigrationPlan(currentVersion: int, targetVersion: int) -> list[MigrationStep]:
|
||||||
|
version = int(currentVersion)
|
||||||
|
target = int(targetVersion)
|
||||||
|
migrationPlan = []
|
||||||
|
|
||||||
|
while version < target:
|
||||||
|
nextVersion = version + 1
|
||||||
|
migrationPlan.append(
|
||||||
|
MigrationStep(
|
||||||
|
versionFrom=version,
|
||||||
|
versionTo=nextVersion,
|
||||||
|
moduleName=getMigrationStepModuleName(version, nextVersion),
|
||||||
|
modulePresent=migrationStepModuleExists(version, nextVersion),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
version = nextVersion
|
||||||
|
|
||||||
|
return migrationPlan
|
||||||
|
|
||||||
|
|
||||||
|
def loadMigrationStep(versionFrom: int, versionTo: int):
|
||||||
|
moduleName = getMigrationStepModuleName(versionFrom, versionTo)
|
||||||
|
|
||||||
|
try:
|
||||||
|
module = importlib.import_module(moduleName)
|
||||||
|
except ModuleNotFoundError as ex:
|
||||||
|
if ex.name == moduleName:
|
||||||
|
raise DatabaseVersionException(
|
||||||
|
f"No migration path from database version {versionFrom} to {versionTo}"
|
||||||
|
) from ex
|
||||||
|
raise
|
||||||
|
|
||||||
|
migrationStep = getattr(module, "applyMigration", None)
|
||||||
|
if migrationStep is None:
|
||||||
|
raise DatabaseVersionException(
|
||||||
|
f"Migration module {moduleName} does not define applyMigration()"
|
||||||
|
)
|
||||||
|
|
||||||
|
return migrationStep
|
||||||
|
|
||||||
|
|
||||||
|
def migrateDatabase(databaseContext, currentVersion: int, targetVersion: int, setDatabaseVersion):
|
||||||
|
for migrationStepInfo in getMigrationPlan(currentVersion, targetVersion):
|
||||||
|
migrationStep = loadMigrationStep(
|
||||||
|
migrationStepInfo.versionFrom,
|
||||||
|
migrationStepInfo.versionTo,
|
||||||
|
)
|
||||||
|
migrationStep(databaseContext)
|
||||||
|
setDatabaseVersion(databaseContext, migrationStepInfo.versionTo)
|
||||||
84
src/ffx/model/migration/step_2_3.py
Normal file
84
src/ffx/model/migration/step_2_3.py
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
from sqlalchemy import inspect, text
|
||||||
|
|
||||||
|
|
||||||
|
def applyMigration(databaseContext):
|
||||||
|
engine = databaseContext['engine']
|
||||||
|
inspector = inspect(engine)
|
||||||
|
shiftedSeasonColumns = {
|
||||||
|
column['name']
|
||||||
|
for column in inspector.get_columns('shifted_seasons')
|
||||||
|
}
|
||||||
|
showColumns = {
|
||||||
|
column['name']
|
||||||
|
for column in inspector.get_columns('shows')
|
||||||
|
}
|
||||||
|
|
||||||
|
with engine.begin() as connection:
|
||||||
|
if 'pattern_id' not in shiftedSeasonColumns:
|
||||||
|
connection.execute(text("PRAGMA foreign_keys=OFF"))
|
||||||
|
connection.execute(
|
||||||
|
text(
|
||||||
|
"""
|
||||||
|
CREATE TABLE shifted_seasons_v3 (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
show_id INTEGER,
|
||||||
|
pattern_id INTEGER,
|
||||||
|
original_season INTEGER,
|
||||||
|
first_episode INTEGER DEFAULT -1,
|
||||||
|
last_episode INTEGER DEFAULT -1,
|
||||||
|
season_offset INTEGER DEFAULT 0,
|
||||||
|
episode_offset INTEGER DEFAULT 0,
|
||||||
|
FOREIGN KEY(show_id) REFERENCES shows(id) ON DELETE CASCADE,
|
||||||
|
FOREIGN KEY(pattern_id) REFERENCES patterns(id) ON DELETE CASCADE,
|
||||||
|
CHECK (
|
||||||
|
(show_id IS NOT NULL AND pattern_id IS NULL)
|
||||||
|
OR (show_id IS NULL AND pattern_id IS NOT NULL)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
)
|
||||||
|
connection.execute(
|
||||||
|
text(
|
||||||
|
"""
|
||||||
|
INSERT INTO shifted_seasons_v3 (
|
||||||
|
id,
|
||||||
|
show_id,
|
||||||
|
pattern_id,
|
||||||
|
original_season,
|
||||||
|
first_episode,
|
||||||
|
last_episode,
|
||||||
|
season_offset,
|
||||||
|
episode_offset
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
id,
|
||||||
|
show_id,
|
||||||
|
NULL,
|
||||||
|
original_season,
|
||||||
|
first_episode,
|
||||||
|
last_episode,
|
||||||
|
season_offset,
|
||||||
|
episode_offset
|
||||||
|
FROM shifted_seasons
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
)
|
||||||
|
connection.execute(text("DROP TABLE shifted_seasons"))
|
||||||
|
connection.execute(text("ALTER TABLE shifted_seasons_v3 RENAME TO shifted_seasons"))
|
||||||
|
connection.execute(
|
||||||
|
text("CREATE INDEX ix_shifted_seasons_show_id ON shifted_seasons(show_id)")
|
||||||
|
)
|
||||||
|
connection.execute(
|
||||||
|
text("CREATE INDEX ix_shifted_seasons_pattern_id ON shifted_seasons(pattern_id)")
|
||||||
|
)
|
||||||
|
connection.execute(text("PRAGMA foreign_keys=ON"))
|
||||||
|
|
||||||
|
if 'quality' not in showColumns:
|
||||||
|
connection.execute(
|
||||||
|
text("ALTER TABLE shows ADD COLUMN quality INTEGER DEFAULT 0")
|
||||||
|
)
|
||||||
|
if 'notes' not in showColumns:
|
||||||
|
connection.execute(
|
||||||
|
text("ALTER TABLE shows ADD COLUMN notes TEXT DEFAULT ''")
|
||||||
|
)
|
||||||
84
src/ffx/model/pattern.py
Normal file
84
src/ffx/model/pattern.py
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
import click
|
||||||
|
|
||||||
|
from sqlalchemy import Column, Integer, String, Text, ForeignKey, UniqueConstraint
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
|
||||||
|
from .show import Base, Show
|
||||||
|
|
||||||
|
from ffx.media_descriptor import MediaDescriptor
|
||||||
|
from ffx.show_descriptor import ShowDescriptor
|
||||||
|
|
||||||
|
|
||||||
|
class Pattern(Base):
|
||||||
|
|
||||||
|
__tablename__ = 'patterns'
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint('show_id', 'pattern', name='uq_patterns_show_id_pattern'),
|
||||||
|
)
|
||||||
|
|
||||||
|
# v1.x
|
||||||
|
id = Column(Integer, primary_key=True)
|
||||||
|
pattern = Column(String)
|
||||||
|
|
||||||
|
# v2.0
|
||||||
|
# id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
|
# pattern: Mapped[str] = mapped_column(String, nullable=False)
|
||||||
|
|
||||||
|
# v1.x
|
||||||
|
show_id = Column(Integer, ForeignKey('shows.id', ondelete="CASCADE"))
|
||||||
|
show = relationship(Show, back_populates='patterns', lazy='joined')
|
||||||
|
|
||||||
|
# v2.0
|
||||||
|
# show_id: Mapped[int] = mapped_column(ForeignKey("shows.id", ondelete="CASCADE"))
|
||||||
|
# show: Mapped["Show"] = relationship(back_populates="patterns")
|
||||||
|
|
||||||
|
tracks = relationship('Track', back_populates='pattern', cascade="all, delete", lazy='joined')
|
||||||
|
|
||||||
|
media_tags = relationship('MediaTag', back_populates='pattern', cascade="all, delete", lazy='joined')
|
||||||
|
shifted_seasons = relationship('ShiftedSeason', back_populates='pattern', cascade="all, delete", lazy='joined')
|
||||||
|
|
||||||
|
quality = Column(Integer, default=0)
|
||||||
|
|
||||||
|
notes = Column(Text, default='')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def getId(self):
|
||||||
|
return int(self.id)
|
||||||
|
|
||||||
|
def getShowId(self):
|
||||||
|
return int(self.show_id)
|
||||||
|
|
||||||
|
def getShowDescriptor(self, context) -> ShowDescriptor:
|
||||||
|
# click.echo(f"self.show {self.show} id={self.show_id}")
|
||||||
|
return self.show.getDescriptor(context)
|
||||||
|
|
||||||
|
def getId(self):
|
||||||
|
return int(self.id)
|
||||||
|
|
||||||
|
def getPattern(self):
|
||||||
|
return str(self.pattern)
|
||||||
|
|
||||||
|
def getTags(self):
|
||||||
|
return {str(t.key):str(t.value) for t in self.media_tags}
|
||||||
|
|
||||||
|
|
||||||
|
def getMediaDescriptor(self, context):
|
||||||
|
|
||||||
|
kwargs = {}
|
||||||
|
|
||||||
|
kwargs[MediaDescriptor.CONTEXT_KEY] = context
|
||||||
|
|
||||||
|
kwargs[MediaDescriptor.TAGS_KEY] = self.getTags()
|
||||||
|
kwargs[MediaDescriptor.TRACK_DESCRIPTOR_LIST_KEY] = []
|
||||||
|
|
||||||
|
# Set ordered subindices
|
||||||
|
subIndexCounter = {}
|
||||||
|
for track in self.tracks:
|
||||||
|
trackType = track.getType()
|
||||||
|
if not trackType in subIndexCounter.keys():
|
||||||
|
subIndexCounter[trackType] = 0
|
||||||
|
kwargs[MediaDescriptor.TRACK_DESCRIPTOR_LIST_KEY].append(track.getDescriptor(context, subIndex = subIndexCounter[trackType]))
|
||||||
|
subIndexCounter[trackType] += 1
|
||||||
|
|
||||||
|
return MediaDescriptor(**kwargs)
|
||||||
16
src/ffx/model/property.py
Normal file
16
src/ffx/model/property.py
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
# from typing import List
|
||||||
|
from sqlalchemy import create_engine, Column, Integer, String, ForeignKey, Enum
|
||||||
|
from sqlalchemy.orm import relationship, declarative_base, sessionmaker
|
||||||
|
|
||||||
|
from .show import Base
|
||||||
|
|
||||||
|
|
||||||
|
class Property(Base):
|
||||||
|
|
||||||
|
__tablename__ = 'properties'
|
||||||
|
|
||||||
|
# v1.x
|
||||||
|
id = Column(Integer, primary_key=True)
|
||||||
|
|
||||||
|
key = Column(String)
|
||||||
|
value = Column(String)
|
||||||
89
src/ffx/model/shifted_season.py
Normal file
89
src/ffx/model/shifted_season.py
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
import click
|
||||||
|
|
||||||
|
from sqlalchemy import CheckConstraint, Column, ForeignKey, Index, Integer
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
|
||||||
|
from .show import Base, Show
|
||||||
|
|
||||||
|
|
||||||
|
class ShiftedSeason(Base):
|
||||||
|
|
||||||
|
__tablename__ = 'shifted_seasons'
|
||||||
|
__table_args__ = (
|
||||||
|
CheckConstraint(
|
||||||
|
"(show_id IS NOT NULL AND pattern_id IS NULL) OR (show_id IS NULL AND pattern_id IS NOT NULL)",
|
||||||
|
name="ck_shifted_seasons_single_owner",
|
||||||
|
),
|
||||||
|
Index("ix_shifted_seasons_show_id", "show_id"),
|
||||||
|
Index("ix_shifted_seasons_pattern_id", "pattern_id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# v1.x
|
||||||
|
id = Column(Integer, primary_key=True)
|
||||||
|
|
||||||
|
|
||||||
|
# v2.0
|
||||||
|
# id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
|
# pattern: Mapped[str] = mapped_column(String, nullable=False)
|
||||||
|
|
||||||
|
# v1.x
|
||||||
|
show_id = Column(Integer, ForeignKey('shows.id', ondelete="CASCADE"), nullable=True)
|
||||||
|
show = relationship(Show, back_populates='shifted_seasons', lazy='joined')
|
||||||
|
|
||||||
|
pattern_id = Column(Integer, ForeignKey('patterns.id', ondelete="CASCADE"), nullable=True)
|
||||||
|
pattern = relationship('Pattern', back_populates='shifted_seasons', lazy='joined')
|
||||||
|
|
||||||
|
# v2.0
|
||||||
|
# show_id: Mapped[int] = mapped_column(ForeignKey("shows.id", ondelete="CASCADE"))
|
||||||
|
# show: Mapped["Show"] = relationship(back_populates="patterns")
|
||||||
|
|
||||||
|
|
||||||
|
original_season = Column(Integer)
|
||||||
|
|
||||||
|
first_episode = Column(Integer, default = -1)
|
||||||
|
last_episode = Column(Integer, default = -1)
|
||||||
|
|
||||||
|
season_offset = Column(Integer, default = 0)
|
||||||
|
episode_offset = Column(Integer, default = 0)
|
||||||
|
|
||||||
|
|
||||||
|
def getId(self):
|
||||||
|
return self.id
|
||||||
|
|
||||||
|
def getShowId(self):
|
||||||
|
return self.show_id
|
||||||
|
|
||||||
|
def getPatternId(self):
|
||||||
|
return self.pattern_id
|
||||||
|
|
||||||
|
|
||||||
|
def getOriginalSeason(self):
|
||||||
|
return self.original_season
|
||||||
|
|
||||||
|
def getFirstEpisode(self):
|
||||||
|
return self.first_episode
|
||||||
|
|
||||||
|
def getLastEpisode(self):
|
||||||
|
return self.last_episode
|
||||||
|
|
||||||
|
|
||||||
|
def getSeasonOffset(self):
|
||||||
|
return self.season_offset
|
||||||
|
|
||||||
|
def getEpisodeOffset(self):
|
||||||
|
return self.episode_offset
|
||||||
|
|
||||||
|
|
||||||
|
def getObj(self):
|
||||||
|
|
||||||
|
shiftedSeasonObj = {}
|
||||||
|
|
||||||
|
shiftedSeasonObj['show_id'] = self.getShowId()
|
||||||
|
shiftedSeasonObj['pattern_id'] = self.getPatternId()
|
||||||
|
shiftedSeasonObj['original_season'] = self.getOriginalSeason()
|
||||||
|
shiftedSeasonObj['first_episode'] = self.getFirstEpisode()
|
||||||
|
shiftedSeasonObj['last_episode'] = self.getLastEpisode()
|
||||||
|
shiftedSeasonObj['season_offset'] = self.getSeasonOffset()
|
||||||
|
shiftedSeasonObj['episode_offset'] = self.getEpisodeOffset()
|
||||||
|
|
||||||
|
return shiftedSeasonObj
|
||||||
66
src/ffx/model/show.py
Normal file
66
src/ffx/model/show.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
# from typing import List
|
||||||
|
from sqlalchemy import create_engine, Column, Integer, String, Text, ForeignKey
|
||||||
|
from sqlalchemy.orm import relationship, declarative_base, sessionmaker
|
||||||
|
|
||||||
|
from ffx.show_descriptor import ShowDescriptor
|
||||||
|
|
||||||
|
Base = declarative_base()
|
||||||
|
|
||||||
|
|
||||||
|
class Show(Base):
|
||||||
|
"""
|
||||||
|
relationship(argument, opt1, opt2, ...)
|
||||||
|
argument is string of class or Mapped class of the target entity
|
||||||
|
backref creates a bi-directional corresponding relationship (back_populates preferred)
|
||||||
|
back_populates points to the corresponding relationship (the actual class attribute identifier)
|
||||||
|
|
||||||
|
See: https://docs.sqlalchemy.org/en/(14|20)/orm/basic_relationships.html
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = 'shows'
|
||||||
|
|
||||||
|
# v1.x
|
||||||
|
id = Column(Integer, primary_key=True)
|
||||||
|
|
||||||
|
name = Column(String)
|
||||||
|
year = Column(Integer)
|
||||||
|
|
||||||
|
# v2.0
|
||||||
|
# id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
|
# name: Mapped[str] = mapped_column(String, nullable=False)
|
||||||
|
# year: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||||
|
|
||||||
|
# v1.x
|
||||||
|
#patterns = relationship('Pattern', back_populates='show', cascade="all, delete", passive_deletes=True)
|
||||||
|
patterns = relationship('Pattern', back_populates='show', cascade="all, delete")
|
||||||
|
# patterns = relationship('Pattern', back_populates='show', cascade="all")
|
||||||
|
|
||||||
|
# v2.0
|
||||||
|
# patterns: Mapped[List["Pattern"]] = relationship(back_populates="show", cascade="all, delete")
|
||||||
|
|
||||||
|
shifted_seasons = relationship('ShiftedSeason', back_populates='show', cascade="all, delete")
|
||||||
|
|
||||||
|
|
||||||
|
index_season_digits = Column(Integer, default=ShowDescriptor.DEFAULT_INDEX_SEASON_DIGITS)
|
||||||
|
index_episode_digits = Column(Integer, default=ShowDescriptor.DEFAULT_INDEX_EPISODE_DIGITS)
|
||||||
|
indicator_season_digits = Column(Integer, default=ShowDescriptor.DEFAULT_INDICATOR_SEASON_DIGITS)
|
||||||
|
indicator_episode_digits = Column(Integer, default=ShowDescriptor.DEFAULT_INDICATOR_EPISODE_DIGITS)
|
||||||
|
quality = Column(Integer, default=0)
|
||||||
|
notes = Column(Text, default='')
|
||||||
|
|
||||||
|
|
||||||
|
def getDescriptor(self, context):
|
||||||
|
|
||||||
|
kwargs = {}
|
||||||
|
kwargs[ShowDescriptor.CONTEXT_KEY] = context
|
||||||
|
kwargs[ShowDescriptor.ID_KEY] = int(self.id)
|
||||||
|
kwargs[ShowDescriptor.NAME_KEY] = str(self.name)
|
||||||
|
kwargs[ShowDescriptor.YEAR_KEY] = int(self.year)
|
||||||
|
kwargs[ShowDescriptor.INDEX_SEASON_DIGITS_KEY] = int(self.index_season_digits)
|
||||||
|
kwargs[ShowDescriptor.INDEX_EPISODE_DIGITS_KEY] = int(self.index_episode_digits)
|
||||||
|
kwargs[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY] = int(self.indicator_season_digits)
|
||||||
|
kwargs[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY] = int(self.indicator_episode_digits)
|
||||||
|
kwargs[ShowDescriptor.QUALITY_KEY] = int(self.quality or 0)
|
||||||
|
kwargs[ShowDescriptor.NOTES_KEY] = str(self.notes or '')
|
||||||
|
|
||||||
|
return ShowDescriptor(**kwargs)
|
||||||
216
src/ffx/model/track.py
Normal file
216
src/ffx/model/track.py
Normal file
@@ -0,0 +1,216 @@
|
|||||||
|
# from typing import List
|
||||||
|
from sqlalchemy import create_engine, Column, Integer, String, ForeignKey
|
||||||
|
from sqlalchemy.orm import relationship, declarative_base, sessionmaker
|
||||||
|
|
||||||
|
from .show import Base
|
||||||
|
|
||||||
|
from ffx.track_type import TrackType
|
||||||
|
|
||||||
|
from ffx.iso_language import IsoLanguage
|
||||||
|
|
||||||
|
from ffx.track_disposition import TrackDisposition
|
||||||
|
from ffx.track_descriptor import TrackDescriptor
|
||||||
|
|
||||||
|
from ffx.audio_layout import AudioLayout
|
||||||
|
from ffx.track_codec import TrackCodec
|
||||||
|
|
||||||
|
|
||||||
|
class Track(Base):
|
||||||
|
"""
|
||||||
|
relationship(argument, opt1, opt2, ...)
|
||||||
|
argument is string of class or Mapped class of the target entity
|
||||||
|
backref creates a bi-directional corresponding relationship (back_populates preferred)
|
||||||
|
back_populates points to the corresponding relationship (the actual class attribute identifier)
|
||||||
|
|
||||||
|
See: https://docs.sqlalchemy.org/en/(14|20)/orm/basic_relationships.html
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = 'tracks'
|
||||||
|
|
||||||
|
# v1.x
|
||||||
|
id = Column(Integer, primary_key=True, autoincrement = True)
|
||||||
|
|
||||||
|
# P=pattern_id+sub_index+track_type
|
||||||
|
track_type = Column(Integer) # TrackType
|
||||||
|
|
||||||
|
index = Column(Integer)
|
||||||
|
source_index = Column(Integer)
|
||||||
|
|
||||||
|
# v1.x
|
||||||
|
pattern_id = Column(Integer, ForeignKey('patterns.id', ondelete="CASCADE"))
|
||||||
|
pattern = relationship('Pattern', back_populates='tracks')
|
||||||
|
|
||||||
|
track_tags = relationship('TrackTag', back_populates='track', cascade="all, delete", lazy="joined")
|
||||||
|
|
||||||
|
disposition_flags = Column(Integer)
|
||||||
|
|
||||||
|
codec_name = Column(String)
|
||||||
|
audio_layout = Column(Integer)
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
|
||||||
|
trackType = kwargs.pop('track_type', None)
|
||||||
|
if trackType is not None:
|
||||||
|
self.track_type = int(trackType)
|
||||||
|
|
||||||
|
dispositionSet = kwargs.pop(TrackDescriptor.DISPOSITION_SET_KEY, set())
|
||||||
|
self.disposition_flags = int(TrackDisposition.toFlags(dispositionSet))
|
||||||
|
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def fromFfprobeStreamObj(cls, streamObj, patternId):
|
||||||
|
"""{
|
||||||
|
'index': 4,
|
||||||
|
'codec_name': 'hdmv_pgs_subtitle',
|
||||||
|
'codec_long_name': 'HDMV Presentation Graphic Stream subtitles',
|
||||||
|
'codec_type': 'subtitle',
|
||||||
|
'codec_tag_string': '[0][0][0][0]',
|
||||||
|
'codec_tag': '0x0000',
|
||||||
|
'r_frame_rate': '0/0',
|
||||||
|
'avg_frame_rate': '0/0',
|
||||||
|
'time_base': '1/1000',
|
||||||
|
'start_pts': 0,
|
||||||
|
'start_time': '0.000000',
|
||||||
|
'duration_ts': 1421035,
|
||||||
|
'duration': '1421.035000',
|
||||||
|
'disposition': {
|
||||||
|
'default': 1,
|
||||||
|
'dub': 0,
|
||||||
|
'original': 0,
|
||||||
|
'comment': 0,
|
||||||
|
'lyrics': 0,
|
||||||
|
'karaoke': 0,
|
||||||
|
'forced': 0,
|
||||||
|
'hearing_impaired': 0,
|
||||||
|
'visual_impaired': 0,
|
||||||
|
'clean_effects': 0,
|
||||||
|
'attached_pic': 0,
|
||||||
|
'timed_thumbnails': 0,
|
||||||
|
'non_diegetic': 0,
|
||||||
|
'captions': 0,
|
||||||
|
'descriptions': 0,
|
||||||
|
'metadata': 0,
|
||||||
|
'dependent': 0,
|
||||||
|
'still_image': 0
|
||||||
|
},
|
||||||
|
'tags': {
|
||||||
|
'language': 'ger',
|
||||||
|
'title': 'German Full'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# v1.x
|
||||||
|
id = Column(Integer, primary_key=True, autoincrement = True)
|
||||||
|
|
||||||
|
# P=pattern_id+sub_index+track_type
|
||||||
|
track_type = Column(Integer) # TrackType
|
||||||
|
sub_index = Column(Integer)
|
||||||
|
|
||||||
|
# v1.x
|
||||||
|
pattern_id = Column(Integer, ForeignKey('patterns.id', ondelete='CASCADE'))
|
||||||
|
pattern = relationship('Pattern', back_populates='tracks')
|
||||||
|
|
||||||
|
|
||||||
|
language = Column(String) # IsoLanguage threeLetter
|
||||||
|
title = Column(String)
|
||||||
|
|
||||||
|
|
||||||
|
track_tags = relationship('TrackTag', back_populates='track', cascade='all, delete')
|
||||||
|
|
||||||
|
|
||||||
|
disposition_flags = Column(Integer)
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
trackType = streamObj[TrackDescriptor.FFPROBE_CODEC_TYPE_KEY]
|
||||||
|
|
||||||
|
if trackType in [t.label() for t in TrackType]:
|
||||||
|
|
||||||
|
return cls(pattern_id = patternId,
|
||||||
|
track_type = trackType,
|
||||||
|
codec_name = streamObj[TrackDescriptor.FFPROBE_CODEC_NAME_KEY],
|
||||||
|
disposition_flags = sum([2**t.index() for (k,v) in streamObj[TrackDescriptor.FFPROBE_DISPOSITION_KEY].items()
|
||||||
|
if v and (t := TrackDisposition.find(k)) is not None]),
|
||||||
|
audio_layout = AudioLayout.identify(streamObj))
|
||||||
|
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def getId(self):
|
||||||
|
return int(self.id)
|
||||||
|
|
||||||
|
def getPatternId(self):
|
||||||
|
return int(self.pattern_id)
|
||||||
|
|
||||||
|
def getType(self):
|
||||||
|
return TrackType.fromIndex(self.track_type)
|
||||||
|
|
||||||
|
def getCodec(self) -> TrackCodec:
|
||||||
|
return TrackCodec.identify(self.codec_name)
|
||||||
|
|
||||||
|
def getIndex(self):
|
||||||
|
return int(self.index) if self.index is not None else -1
|
||||||
|
|
||||||
|
def getSourceIndex(self):
|
||||||
|
return int(self.source_index) if self.source_index is not None else -1
|
||||||
|
|
||||||
|
def getLanguage(self):
|
||||||
|
tags = {t.key:t.value for t in self.track_tags}
|
||||||
|
return IsoLanguage.findThreeLetter(tags['language']) if 'language' in tags.keys() else IsoLanguage.UNDEFINED
|
||||||
|
|
||||||
|
def getTitle(self):
|
||||||
|
tags = {t.key:t.value for t in self.track_tags}
|
||||||
|
return tags['title'] if 'title' in tags.keys() else ''
|
||||||
|
|
||||||
|
def getDispositionSet(self):
|
||||||
|
return TrackDisposition.toSet(self.disposition_flags)
|
||||||
|
|
||||||
|
def getAudioLayout(self):
|
||||||
|
return AudioLayout.fromIndex(self.audio_layout)
|
||||||
|
|
||||||
|
def getTags(self):
|
||||||
|
return {str(t.key):str(t.value) for t in self.track_tags}
|
||||||
|
|
||||||
|
|
||||||
|
def setDisposition(self, disposition : TrackDisposition):
|
||||||
|
self.disposition_flags = self.disposition_flags | int(2**disposition.index())
|
||||||
|
|
||||||
|
def resetDisposition(self, disposition : TrackDisposition):
|
||||||
|
self.disposition_flags = self.disposition_flags & sum([2**d.index() for d in TrackDisposition if d != disposition])
|
||||||
|
|
||||||
|
def getDisposition(self, disposition : TrackDisposition):
|
||||||
|
return bool(self.disposition_flags & 2**disposition.index())
|
||||||
|
|
||||||
|
|
||||||
|
def getDescriptor(self, context = None, subIndex : int = -1) -> TrackDescriptor:
|
||||||
|
|
||||||
|
kwargs = {}
|
||||||
|
|
||||||
|
if not context is None:
|
||||||
|
kwargs[TrackDescriptor.CONTEXT_KEY] = context
|
||||||
|
|
||||||
|
kwargs[TrackDescriptor.ID_KEY] = self.getId()
|
||||||
|
kwargs[TrackDescriptor.PATTERN_ID_KEY] = self.getPatternId()
|
||||||
|
|
||||||
|
kwargs[TrackDescriptor.INDEX_KEY] = self.getIndex()
|
||||||
|
kwargs[TrackDescriptor.SOURCE_INDEX_KEY] = self.getSourceIndex()
|
||||||
|
|
||||||
|
if subIndex > -1:
|
||||||
|
kwargs[TrackDescriptor.SUB_INDEX_KEY] = subIndex
|
||||||
|
|
||||||
|
kwargs[TrackDescriptor.TRACK_TYPE_KEY] = self.getType()
|
||||||
|
kwargs[TrackDescriptor.CODEC_KEY] = self.getCodec()
|
||||||
|
|
||||||
|
kwargs[TrackDescriptor.DISPOSITION_SET_KEY] = self.getDispositionSet()
|
||||||
|
kwargs[TrackDescriptor.TAGS_KEY] = self.getTags()
|
||||||
|
|
||||||
|
kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY] = self.getAudioLayout()
|
||||||
|
|
||||||
|
return TrackDescriptor(**kwargs)
|
||||||
28
src/ffx/model/track_tag.py
Normal file
28
src/ffx/model/track_tag.py
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
# from typing import List
|
||||||
|
from sqlalchemy import create_engine, Column, Integer, String, ForeignKey, Enum
|
||||||
|
from sqlalchemy.orm import relationship, declarative_base, sessionmaker
|
||||||
|
|
||||||
|
from .show import Base
|
||||||
|
|
||||||
|
|
||||||
|
class TrackTag(Base):
|
||||||
|
"""
|
||||||
|
relationship(argument, opt1, opt2, ...)
|
||||||
|
argument is string of class or Mapped class of the target entity
|
||||||
|
backref creates a bi-directional corresponding relationship (back_populates preferred)
|
||||||
|
back_populates points to the corresponding relationship (the actual class attribute identifier)
|
||||||
|
|
||||||
|
See: https://docs.sqlalchemy.org/en/(14|20)/orm/basic_relationships.html
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = 'track_tags'
|
||||||
|
|
||||||
|
# v1.x
|
||||||
|
id = Column(Integer, primary_key=True)
|
||||||
|
|
||||||
|
key = Column(String)
|
||||||
|
value = Column(String)
|
||||||
|
|
||||||
|
# v1.x
|
||||||
|
track_id = Column(Integer, ForeignKey('tracks.id', ondelete="CASCADE"))
|
||||||
|
track = relationship('Track', back_populates='track_tags')
|
||||||
411
src/ffx/pattern_controller.py
Normal file
411
src/ffx/pattern_controller.py
Normal file
@@ -0,0 +1,411 @@
|
|||||||
|
import re
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from ffx.model.media_tag import MediaTag
|
||||||
|
from ffx.model.pattern import Pattern
|
||||||
|
from ffx.model.track import Track
|
||||||
|
from ffx.model.track_tag import TrackTag
|
||||||
|
from ffx.track_descriptor import TrackDescriptor
|
||||||
|
from ffx.track_disposition import TrackDisposition
|
||||||
|
|
||||||
|
|
||||||
|
class DuplicatePatternMatchError(click.ClickException):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidPatternSchemaError(click.ClickException):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PatternController:
|
||||||
|
_compiled_regex_cache: dict[str, re.Pattern] = {}
|
||||||
|
|
||||||
|
def __init__(self, context):
|
||||||
|
|
||||||
|
self.context = context
|
||||||
|
self.Session = self.context["database"]["session"]
|
||||||
|
|
||||||
|
self.__configurationData = self.context["config"].getData()
|
||||||
|
|
||||||
|
metadataConfiguration = (
|
||||||
|
self.__configurationData["metadata"]
|
||||||
|
if "metadata" in self.__configurationData.keys()
|
||||||
|
else {}
|
||||||
|
)
|
||||||
|
|
||||||
|
self.__removeTrackKeys = (
|
||||||
|
metadataConfiguration["streams"]["remove"]
|
||||||
|
if "streams" in metadataConfiguration.keys()
|
||||||
|
and "remove" in metadataConfiguration["streams"].keys()
|
||||||
|
else []
|
||||||
|
)
|
||||||
|
self.__ignoreTrackKeys = (
|
||||||
|
metadataConfiguration["streams"]["ignore"]
|
||||||
|
if "streams" in metadataConfiguration.keys()
|
||||||
|
and "ignore" in metadataConfiguration["streams"].keys()
|
||||||
|
else []
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _clear_regex_cache(cls):
|
||||||
|
cls._compiled_regex_cache.clear()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _compile_pattern_expression(cls, pattern_id: int, expression: str) -> re.Pattern:
|
||||||
|
expression_text = str(expression)
|
||||||
|
compiled = cls._compiled_regex_cache.get(expression_text)
|
||||||
|
if compiled is None:
|
||||||
|
try:
|
||||||
|
compiled = re.compile(expression_text)
|
||||||
|
except re.error as ex:
|
||||||
|
raise click.ClickException(
|
||||||
|
f"Pattern #{pattern_id} contains an invalid regex {expression_text!r}: {ex}"
|
||||||
|
)
|
||||||
|
cls._compiled_regex_cache[expression_text] = compiled
|
||||||
|
return compiled
|
||||||
|
|
||||||
|
def _coerce_pattern_fields(self, patternObj):
|
||||||
|
return {
|
||||||
|
"show_id": int(patternObj["show_id"]),
|
||||||
|
"pattern": str(patternObj["pattern"]),
|
||||||
|
"quality": int(patternObj.get("quality", 0) or 0),
|
||||||
|
"notes": str(patternObj.get("notes", "")),
|
||||||
|
}
|
||||||
|
|
||||||
|
def _coerce_media_tags(self, mediaTags):
|
||||||
|
return {
|
||||||
|
str(tagKey): str(tagValue)
|
||||||
|
for tagKey, tagValue in (mediaTags or {}).items()
|
||||||
|
}
|
||||||
|
|
||||||
|
def _normalize_track_descriptors(self, trackDescriptors):
|
||||||
|
if trackDescriptors is None:
|
||||||
|
raise InvalidPatternSchemaError(
|
||||||
|
"Patterns must define at least one track before they can be stored."
|
||||||
|
)
|
||||||
|
|
||||||
|
normalized_descriptors = []
|
||||||
|
for trackDescriptor in trackDescriptors:
|
||||||
|
if type(trackDescriptor) is not TrackDescriptor:
|
||||||
|
raise TypeError(
|
||||||
|
"PatternController: All track descriptors are required to be of type TrackDescriptor"
|
||||||
|
)
|
||||||
|
normalized_descriptors.append(trackDescriptor)
|
||||||
|
|
||||||
|
if not normalized_descriptors:
|
||||||
|
raise InvalidPatternSchemaError(
|
||||||
|
"Patterns must define at least one track before they can be stored."
|
||||||
|
)
|
||||||
|
|
||||||
|
normalized_descriptors = sorted(
|
||||||
|
normalized_descriptors, key=lambda descriptor: descriptor.getIndex()
|
||||||
|
)
|
||||||
|
|
||||||
|
index_set = {descriptor.getIndex() for descriptor in normalized_descriptors}
|
||||||
|
expected_indexes = set(range(len(normalized_descriptors)))
|
||||||
|
if index_set != expected_indexes:
|
||||||
|
raise click.ClickException(
|
||||||
|
"Pattern tracks must use a contiguous zero-based index order."
|
||||||
|
)
|
||||||
|
|
||||||
|
return normalized_descriptors
|
||||||
|
|
||||||
|
def _ensure_unique_pattern_definition(
|
||||||
|
self,
|
||||||
|
session,
|
||||||
|
show_id: int,
|
||||||
|
pattern_expression: str,
|
||||||
|
exclude_pattern_id: int | None = None,
|
||||||
|
):
|
||||||
|
query = session.query(Pattern).filter(
|
||||||
|
Pattern.show_id == show_id,
|
||||||
|
Pattern.pattern == pattern_expression,
|
||||||
|
)
|
||||||
|
if exclude_pattern_id is not None:
|
||||||
|
query = query.filter(Pattern.id != int(exclude_pattern_id))
|
||||||
|
|
||||||
|
existing_pattern = query.first()
|
||||||
|
if existing_pattern is not None:
|
||||||
|
raise click.ClickException(
|
||||||
|
f"Pattern {pattern_expression!r} already exists for show #{show_id}."
|
||||||
|
)
|
||||||
|
|
||||||
|
def _build_track_row(self, trackDescriptor: TrackDescriptor) -> Track:
|
||||||
|
track = Track(
|
||||||
|
track_type=int(trackDescriptor.getType().index()),
|
||||||
|
codec_name=str(trackDescriptor.getCodec().identifier()),
|
||||||
|
index=int(trackDescriptor.getIndex()),
|
||||||
|
source_index=int(trackDescriptor.getSourceIndex()),
|
||||||
|
disposition_flags=int(
|
||||||
|
TrackDisposition.toFlags(trackDescriptor.getDispositionSet())
|
||||||
|
),
|
||||||
|
audio_layout=trackDescriptor.getAudioLayout().index(),
|
||||||
|
)
|
||||||
|
|
||||||
|
for tagKey, tagValue in trackDescriptor.getTags().items():
|
||||||
|
if tagKey in self.__ignoreTrackKeys or tagKey in self.__removeTrackKeys:
|
||||||
|
continue
|
||||||
|
track.track_tags.append(TrackTag(key=str(tagKey), value=str(tagValue)))
|
||||||
|
|
||||||
|
return track
|
||||||
|
|
||||||
|
def _replace_pattern_schema(
|
||||||
|
self,
|
||||||
|
session,
|
||||||
|
pattern: Pattern,
|
||||||
|
mediaTags: dict[str, str],
|
||||||
|
trackDescriptors: list[TrackDescriptor],
|
||||||
|
):
|
||||||
|
for mediaTag in list(pattern.media_tags):
|
||||||
|
session.delete(mediaTag)
|
||||||
|
for track in list(pattern.tracks):
|
||||||
|
session.delete(track)
|
||||||
|
session.flush()
|
||||||
|
|
||||||
|
for tagKey, tagValue in mediaTags.items():
|
||||||
|
pattern.media_tags.append(MediaTag(key=str(tagKey), value=str(tagValue)))
|
||||||
|
|
||||||
|
for trackDescriptor in trackDescriptors:
|
||||||
|
pattern.tracks.append(self._build_track_row(trackDescriptor))
|
||||||
|
|
||||||
|
def _validate_persisted_pattern(self, pattern: Pattern):
|
||||||
|
if not pattern.tracks:
|
||||||
|
raise InvalidPatternSchemaError(
|
||||||
|
f"Pattern #{pattern.getId()} ({pattern.getPattern()!r}) is invalid because it has no tracks."
|
||||||
|
)
|
||||||
|
|
||||||
|
def savePatternSchema(
|
||||||
|
self,
|
||||||
|
patternObj,
|
||||||
|
trackDescriptors,
|
||||||
|
mediaTags=None,
|
||||||
|
patternId: int | None = None,
|
||||||
|
) -> int:
|
||||||
|
fields = self._coerce_pattern_fields(patternObj)
|
||||||
|
normalized_tracks = self._normalize_track_descriptors(trackDescriptors)
|
||||||
|
normalized_tags = self._coerce_media_tags(mediaTags)
|
||||||
|
session = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
session = self.Session()
|
||||||
|
self._ensure_unique_pattern_definition(
|
||||||
|
session,
|
||||||
|
fields["show_id"],
|
||||||
|
fields["pattern"],
|
||||||
|
exclude_pattern_id=patternId,
|
||||||
|
)
|
||||||
|
|
||||||
|
if patternId is None:
|
||||||
|
pattern = Pattern(
|
||||||
|
show_id=fields["show_id"],
|
||||||
|
pattern=fields["pattern"],
|
||||||
|
quality=fields["quality"],
|
||||||
|
notes=fields["notes"],
|
||||||
|
)
|
||||||
|
session.add(pattern)
|
||||||
|
session.flush()
|
||||||
|
else:
|
||||||
|
pattern = session.query(Pattern).filter(Pattern.id == int(patternId)).first()
|
||||||
|
if pattern is None:
|
||||||
|
raise click.ClickException(
|
||||||
|
f"PatternController.savePatternSchema(): Pattern #{patternId} not found"
|
||||||
|
)
|
||||||
|
pattern.show_id = fields["show_id"]
|
||||||
|
pattern.pattern = fields["pattern"]
|
||||||
|
pattern.quality = fields["quality"]
|
||||||
|
pattern.notes = fields["notes"]
|
||||||
|
|
||||||
|
self._replace_pattern_schema(
|
||||||
|
session,
|
||||||
|
pattern,
|
||||||
|
normalized_tags,
|
||||||
|
normalized_tracks,
|
||||||
|
)
|
||||||
|
|
||||||
|
session.commit()
|
||||||
|
self._clear_regex_cache()
|
||||||
|
return pattern.getId()
|
||||||
|
|
||||||
|
except click.ClickException:
|
||||||
|
raise
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(
|
||||||
|
f"PatternController.savePatternSchema(): {repr(ex)}"
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
if session is not None:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def addPattern(self, patternObj, trackDescriptors=None, mediaTags=None):
|
||||||
|
return self.savePatternSchema(
|
||||||
|
patternObj,
|
||||||
|
trackDescriptors=trackDescriptors,
|
||||||
|
mediaTags=mediaTags,
|
||||||
|
)
|
||||||
|
|
||||||
|
def updatePattern(self, patternId, patternObj):
|
||||||
|
|
||||||
|
fields = self._coerce_pattern_fields(patternObj)
|
||||||
|
session = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
session = self.Session()
|
||||||
|
pattern = session.query(Pattern).filter(Pattern.id == int(patternId)).first()
|
||||||
|
|
||||||
|
if pattern is not None:
|
||||||
|
self._ensure_unique_pattern_definition(
|
||||||
|
session,
|
||||||
|
fields["show_id"],
|
||||||
|
fields["pattern"],
|
||||||
|
exclude_pattern_id=patternId,
|
||||||
|
)
|
||||||
|
self._validate_persisted_pattern(pattern)
|
||||||
|
|
||||||
|
pattern.show_id = fields["show_id"]
|
||||||
|
pattern.pattern = fields["pattern"]
|
||||||
|
pattern.quality = fields["quality"]
|
||||||
|
pattern.notes = fields["notes"]
|
||||||
|
|
||||||
|
session.commit()
|
||||||
|
self._clear_regex_cache()
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
except click.ClickException:
|
||||||
|
raise
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"PatternController.updatePattern(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
if session is not None:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def findPattern(self, patternObj):
|
||||||
|
session = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
session = self.Session()
|
||||||
|
pattern = (
|
||||||
|
session.query(Pattern)
|
||||||
|
.filter(
|
||||||
|
Pattern.show_id == int(patternObj["show_id"]),
|
||||||
|
Pattern.pattern == str(patternObj["pattern"]),
|
||||||
|
)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
|
||||||
|
if pattern is not None:
|
||||||
|
return int(pattern.id)
|
||||||
|
return None
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"PatternController.findPattern(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
if session is not None:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def getPatternsForShow(self, showId: int) -> list[Pattern]:
|
||||||
|
|
||||||
|
if type(showId) is not int:
|
||||||
|
raise ValueError(
|
||||||
|
"PatternController.getPatternsForShow(): Argument showId is required to be of type int"
|
||||||
|
)
|
||||||
|
|
||||||
|
session = None
|
||||||
|
try:
|
||||||
|
session = self.Session()
|
||||||
|
return (
|
||||||
|
session.query(Pattern)
|
||||||
|
.filter(Pattern.show_id == int(showId))
|
||||||
|
.order_by(Pattern.id)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"PatternController.getPatternsForShow(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
if session is not None:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def getPattern(self, patternId: int):
|
||||||
|
|
||||||
|
if type(patternId) is not int:
|
||||||
|
raise ValueError(
|
||||||
|
"PatternController.getPattern(): Argument patternId is required to be of type int"
|
||||||
|
)
|
||||||
|
|
||||||
|
session = None
|
||||||
|
try:
|
||||||
|
session = self.Session()
|
||||||
|
return session.query(Pattern).filter(Pattern.id == int(patternId)).first()
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"PatternController.getPattern(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
if session is not None:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def deletePattern(self, patternId):
|
||||||
|
session = None
|
||||||
|
try:
|
||||||
|
session = self.Session()
|
||||||
|
pattern = session.query(Pattern).filter(Pattern.id == int(patternId)).first()
|
||||||
|
|
||||||
|
if pattern is not None:
|
||||||
|
session.delete(pattern)
|
||||||
|
session.commit()
|
||||||
|
self._clear_regex_cache()
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"PatternController.deletePattern(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
if session is not None:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def matchFilename(self, filename: str) -> dict:
|
||||||
|
"""Return {'match': regex match, 'pattern': Pattern} or {} when unmatched."""
|
||||||
|
session = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
session = self.Session()
|
||||||
|
matches = []
|
||||||
|
query = session.query(Pattern).order_by(Pattern.show_id, Pattern.id)
|
||||||
|
|
||||||
|
for pattern in query.all():
|
||||||
|
compiled = self._compile_pattern_expression(
|
||||||
|
pattern.getId(),
|
||||||
|
pattern.getPattern(),
|
||||||
|
)
|
||||||
|
patternMatch = compiled.search(str(filename))
|
||||||
|
if patternMatch is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
self._validate_persisted_pattern(pattern)
|
||||||
|
matches.append({"match": patternMatch, "pattern": pattern})
|
||||||
|
|
||||||
|
if not matches:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
if len(matches) > 1:
|
||||||
|
duplicateDescriptions = ", ".join(
|
||||||
|
[
|
||||||
|
f"show #{match['pattern'].getShowId()} pattern #{match['pattern'].getId()} {match['pattern'].getPattern()!r}"
|
||||||
|
for match in matches
|
||||||
|
]
|
||||||
|
)
|
||||||
|
raise DuplicatePatternMatchError(
|
||||||
|
f"Filename {filename!r} matched more than one pattern: {duplicateDescriptions}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return matches[0]
|
||||||
|
|
||||||
|
except click.ClickException:
|
||||||
|
raise
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"PatternController.matchFilename(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
if session is not None:
|
||||||
|
session.close()
|
||||||
128
src/ffx/pattern_delete_screen.py
Normal file
128
src/ffx/pattern_delete_screen.py
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
import click
|
||||||
|
|
||||||
|
from textual.screen import Screen
|
||||||
|
from textual.widgets import Header, Footer, Static, Button
|
||||||
|
from textual.containers import Grid
|
||||||
|
|
||||||
|
from .i18n import t
|
||||||
|
from .show_controller import ShowController
|
||||||
|
from .pattern_controller import PatternController
|
||||||
|
from .screen_support import go_back_or_exit
|
||||||
|
|
||||||
|
from ffx.model.pattern import Pattern
|
||||||
|
|
||||||
|
|
||||||
|
# Screen[dict[int, str, int]]
|
||||||
|
class PatternDeleteScreen(Screen):
|
||||||
|
|
||||||
|
BINDINGS = [
|
||||||
|
("escape", "back", t("Back")),
|
||||||
|
]
|
||||||
|
|
||||||
|
CSS = """
|
||||||
|
|
||||||
|
Grid {
|
||||||
|
grid-size: 2;
|
||||||
|
grid-rows: 2 auto;
|
||||||
|
grid-columns: 18 5fr;
|
||||||
|
height: 100%;
|
||||||
|
width: 100%;
|
||||||
|
min-width: 90;
|
||||||
|
padding: 1;
|
||||||
|
overflow-x: auto;
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
Input {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
Button {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
#toplabel {
|
||||||
|
height: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.two {
|
||||||
|
column-span: 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
.box {
|
||||||
|
height: 100%;
|
||||||
|
border: solid green;
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, patternId = None, showId = None):
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
self.context = self.app.getContext()
|
||||||
|
self.Session = self.context['database']['session'] # convenience
|
||||||
|
|
||||||
|
self.__pc = PatternController(context = self.context)
|
||||||
|
self.__sc = ShowController(context = self.context)
|
||||||
|
|
||||||
|
self.__patternId = patternId
|
||||||
|
self.__pattern: Pattern = self.__pc.getPattern(patternId) if patternId is not None else {}
|
||||||
|
self.__showDescriptor = self.__sc.getShowDescriptor(showId) if showId is not None else {}
|
||||||
|
|
||||||
|
|
||||||
|
def on_mount(self):
|
||||||
|
if self.__showDescriptor:
|
||||||
|
self.query_one("#showlabel", Static).update(f"{self.__showDescriptor.getId()} - {self.__showDescriptor.getName()} ({self.__showDescriptor.getYear()})")
|
||||||
|
if not self.__pattern is None:
|
||||||
|
self.query_one("#patternlabel", Static).update(str(self.__pattern.pattern))
|
||||||
|
|
||||||
|
|
||||||
|
def compose(self):
|
||||||
|
|
||||||
|
yield Header()
|
||||||
|
|
||||||
|
with Grid():
|
||||||
|
# Row 1
|
||||||
|
yield Static(t("Are you sure to delete the following filename pattern?"), id="toplabel", classes="two")
|
||||||
|
|
||||||
|
# Row 2
|
||||||
|
yield Static("", classes="two")
|
||||||
|
|
||||||
|
# Row 3
|
||||||
|
yield Static(t("Pattern"))
|
||||||
|
yield Static("", id="patternlabel")
|
||||||
|
|
||||||
|
# Row 4
|
||||||
|
yield Static("", classes="two")
|
||||||
|
|
||||||
|
# Row 5
|
||||||
|
yield Static(t("from show"))
|
||||||
|
yield Static("", id="showlabel")
|
||||||
|
|
||||||
|
# Row 6
|
||||||
|
yield Static("", classes="two")
|
||||||
|
|
||||||
|
# Row 7
|
||||||
|
yield Button(t("Delete"), id="delete_button")
|
||||||
|
yield Button(t("Cancel"), id="cancel_button")
|
||||||
|
|
||||||
|
yield Footer()
|
||||||
|
|
||||||
|
|
||||||
|
# Event handler for button press
|
||||||
|
def on_button_pressed(self, event: Button.Pressed) -> None:
|
||||||
|
|
||||||
|
if event.button.id == "delete_button":
|
||||||
|
|
||||||
|
if self.__patternId is None:
|
||||||
|
raise click.ClickException('PatternDeleteScreen.on_button_pressed(): pattern id is undefined')
|
||||||
|
|
||||||
|
if self.__pc.deletePattern(self.__patternId):
|
||||||
|
self.dismiss(self.__pattern)
|
||||||
|
|
||||||
|
else:
|
||||||
|
#TODO: Meldung
|
||||||
|
self.app.pop_screen()
|
||||||
|
|
||||||
|
if event.button.id == "cancel_button":
|
||||||
|
self.app.pop_screen()
|
||||||
|
|
||||||
|
def action_back(self):
|
||||||
|
go_back_or_exit(self)
|
||||||
776
src/ffx/pattern_details_screen.py
Normal file
776
src/ffx/pattern_details_screen.py
Normal file
@@ -0,0 +1,776 @@
|
|||||||
|
import click, re
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from textual.screen import Screen
|
||||||
|
from textual.widgets import Header, Footer, Static, Button, Input, DataTable, TextArea
|
||||||
|
from textual.containers import Grid
|
||||||
|
|
||||||
|
from ffx.model.pattern import Pattern
|
||||||
|
|
||||||
|
from .track_details_screen import TrackDetailsScreen
|
||||||
|
from .track_delete_screen import TrackDeleteScreen
|
||||||
|
from .shifted_season_delete_screen import ShiftedSeasonDeleteScreen
|
||||||
|
from .shifted_season_details_screen import ShiftedSeasonDetailsScreen
|
||||||
|
|
||||||
|
from .tag_details_screen import TagDetailsScreen
|
||||||
|
from .tag_delete_screen import TagDeleteScreen
|
||||||
|
from .screen_support import (
|
||||||
|
add_auto_table_column,
|
||||||
|
build_screen_bootstrap,
|
||||||
|
build_screen_controllers,
|
||||||
|
go_back_or_exit,
|
||||||
|
populate_tag_table,
|
||||||
|
)
|
||||||
|
|
||||||
|
from ffx.track_type import TrackType
|
||||||
|
|
||||||
|
from ffx.track_disposition import TrackDisposition
|
||||||
|
from ffx.track_descriptor import TrackDescriptor
|
||||||
|
|
||||||
|
from textual.widgets._data_table import CellDoesNotExist
|
||||||
|
|
||||||
|
from ffx.file_properties import FileProperties
|
||||||
|
from ffx.iso_language import IsoLanguage
|
||||||
|
from ffx.audio_layout import AudioLayout
|
||||||
|
from ffx.model.shifted_season import ShiftedSeason
|
||||||
|
from .i18n import t
|
||||||
|
|
||||||
|
|
||||||
|
# Screen[dict[int, str, int]]
|
||||||
|
class PatternDetailsScreen(Screen):
|
||||||
|
|
||||||
|
BINDINGS = [
|
||||||
|
("escape", "back", t("Back")),
|
||||||
|
]
|
||||||
|
|
||||||
|
CSS = """
|
||||||
|
|
||||||
|
Grid {
|
||||||
|
grid-size: 7 20;
|
||||||
|
grid-rows: 2 2 2 2 2 2 6 2 2 8 2 2 8 2 2 8 2 2 2 2;
|
||||||
|
grid-columns: 18 1fr 1fr 1fr 1fr 1fr 1fr;
|
||||||
|
height: 100%;
|
||||||
|
width: 100%;
|
||||||
|
min-width: 140;
|
||||||
|
padding: 1;
|
||||||
|
overflow-x: auto;
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
Input {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
Button {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
DataTable {
|
||||||
|
min-height: 6;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
DataTable .datatable--cursor {
|
||||||
|
background: darkorange;
|
||||||
|
color: black;
|
||||||
|
}
|
||||||
|
|
||||||
|
DataTable .datatable--header {
|
||||||
|
background: steelblue;
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
#toplabel {
|
||||||
|
height: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.three {
|
||||||
|
column-span: 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
.four {
|
||||||
|
column-span: 4;
|
||||||
|
}
|
||||||
|
.five {
|
||||||
|
column-span: 5;
|
||||||
|
}
|
||||||
|
.six {
|
||||||
|
column-span: 6;
|
||||||
|
}
|
||||||
|
.seven {
|
||||||
|
column-span: 7;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
.four_box {
|
||||||
|
min-height: 6;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
.box {
|
||||||
|
height: 100%;
|
||||||
|
border: solid green;
|
||||||
|
}
|
||||||
|
|
||||||
|
.yellow {
|
||||||
|
tint: yellow 40%;
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, patternId = None, showId = None):
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
bootstrap = build_screen_bootstrap(self.app.getContext())
|
||||||
|
self.context = bootstrap.context
|
||||||
|
|
||||||
|
self.__removeGlobalKeys = bootstrap.remove_global_keys
|
||||||
|
self.__ignoreGlobalKeys = bootstrap.ignore_global_keys
|
||||||
|
|
||||||
|
controllers = build_screen_controllers(
|
||||||
|
self.context,
|
||||||
|
pattern=True,
|
||||||
|
show=True,
|
||||||
|
track=True,
|
||||||
|
tag=True,
|
||||||
|
shifted_season=True,
|
||||||
|
)
|
||||||
|
self.__pc = controllers['pattern']
|
||||||
|
self.__sc = controllers['show']
|
||||||
|
self.__tc = controllers['track']
|
||||||
|
self.__tac = controllers['tag']
|
||||||
|
self.__ssc = controllers['shifted_season']
|
||||||
|
|
||||||
|
self.__pattern : Pattern = self.__pc.getPattern(patternId) if patternId is not None else None
|
||||||
|
self.__showDescriptor = self.__sc.getShowDescriptor(showId) if showId is not None else None
|
||||||
|
self.__draftTracks : List[TrackDescriptor] = []
|
||||||
|
self.__draftTags : dict[str, str] = {}
|
||||||
|
self.__trackRowData: dict[object, TrackDescriptor] = {}
|
||||||
|
self.__tagRowData: dict[object, tuple[str, str]] = {}
|
||||||
|
self.__shiftedSeasonRowData: dict[object, dict[str, int | None]] = {}
|
||||||
|
|
||||||
|
|
||||||
|
def updateTracks(self):
|
||||||
|
|
||||||
|
self.tracksTable.clear()
|
||||||
|
self.__trackRowData = {}
|
||||||
|
|
||||||
|
tracks = self.getCurrentTrackDescriptors()
|
||||||
|
|
||||||
|
typeCounter = {}
|
||||||
|
|
||||||
|
td: TrackDescriptor
|
||||||
|
for td in tracks:
|
||||||
|
|
||||||
|
if (trackType := td.getType()) != TrackType.ATTACHMENT:
|
||||||
|
|
||||||
|
if not trackType in typeCounter.keys():
|
||||||
|
typeCounter[trackType] = 0
|
||||||
|
|
||||||
|
dispoSet = td.getDispositionSet()
|
||||||
|
|
||||||
|
trackLanguage = td.getLanguage()
|
||||||
|
audioLayout = td.getAudioLayout()
|
||||||
|
|
||||||
|
row = (td.getIndex(),
|
||||||
|
t(trackType.label()),
|
||||||
|
typeCounter[trackType],
|
||||||
|
td.getCodec().label(),
|
||||||
|
t(audioLayout.label()) if trackType == TrackType.AUDIO
|
||||||
|
and audioLayout != AudioLayout.LAYOUT_UNDEFINED else ' ',
|
||||||
|
trackLanguage.label() if trackLanguage != IsoLanguage.UNDEFINED else ' ',
|
||||||
|
td.getTitle(),
|
||||||
|
t('Yes') if TrackDisposition.DEFAULT in dispoSet else t('No'),
|
||||||
|
t('Yes') if TrackDisposition.FORCED in dispoSet else t('No'),
|
||||||
|
td.getSourceIndex())
|
||||||
|
|
||||||
|
row_key = self.tracksTable.add_row(*map(str, row))
|
||||||
|
self.__trackRowData[row_key] = td
|
||||||
|
|
||||||
|
typeCounter[trackType] += 1
|
||||||
|
|
||||||
|
|
||||||
|
def getCurrentTrackDescriptors(self) -> List[TrackDescriptor]:
|
||||||
|
if self.__pattern is not None:
|
||||||
|
return self.__tc.findSiblingDescriptors(self.__pattern.getId())
|
||||||
|
return list(self.__draftTracks)
|
||||||
|
|
||||||
|
|
||||||
|
def normalizeDraftTracks(self):
|
||||||
|
|
||||||
|
typeCounter = {}
|
||||||
|
|
||||||
|
for index, trackDescriptor in enumerate(self.__draftTracks):
|
||||||
|
trackDescriptor.setIndex(index)
|
||||||
|
|
||||||
|
trackType = trackDescriptor.getType()
|
||||||
|
subIndex = typeCounter.get(trackType, 0)
|
||||||
|
trackDescriptor.setSubIndex(subIndex)
|
||||||
|
typeCounter[trackType] = subIndex + 1
|
||||||
|
|
||||||
|
if trackDescriptor.getSourceIndex() < 0:
|
||||||
|
trackDescriptor.setSourceIndex(index)
|
||||||
|
|
||||||
|
|
||||||
|
def swapTracks(self, trackIndex1: int, trackIndex2: int):
|
||||||
|
|
||||||
|
ti1 = int(trackIndex1)
|
||||||
|
ti2 = int(trackIndex2)
|
||||||
|
|
||||||
|
if self.__pattern is None:
|
||||||
|
numSiblings = len(self.__draftTracks)
|
||||||
|
|
||||||
|
if ti1 < 0 or ti1 >= numSiblings:
|
||||||
|
raise ValueError(f"PatternDetailsScreen.swapTracks(): trackIndex1 ({ti1}) is out of range ({numSiblings})")
|
||||||
|
|
||||||
|
if ti2 < 0 or ti2 >= numSiblings:
|
||||||
|
raise ValueError(f"PatternDetailsScreen.swapTracks(): trackIndex2 ({ti2}) is out of range ({numSiblings})")
|
||||||
|
|
||||||
|
self.__draftTracks[ti1], self.__draftTracks[ti2] = self.__draftTracks[ti2], self.__draftTracks[ti1]
|
||||||
|
self.normalizeDraftTracks()
|
||||||
|
self.updateTracks()
|
||||||
|
return
|
||||||
|
|
||||||
|
siblingDescriptors: List[TrackDescriptor] = self.__tc.findSiblingDescriptors(self.__pattern.getId())
|
||||||
|
|
||||||
|
numSiblings = len(siblingDescriptors)
|
||||||
|
|
||||||
|
if ti1 < 0 or ti1 >= numSiblings:
|
||||||
|
raise ValueError(f"PatternDetailsScreen.swapTracks(): trackIndex1 ({ti1}) is out of range ({numSiblings})")
|
||||||
|
|
||||||
|
if ti2 < 0 or ti2 >= numSiblings:
|
||||||
|
raise ValueError(f"PatternDetailsScreen.swapTracks(): trackIndex2 ({ti2}) is out of range ({numSiblings})")
|
||||||
|
|
||||||
|
sibling1 = siblingDescriptors[trackIndex1]
|
||||||
|
sibling2 = siblingDescriptors[trackIndex2]
|
||||||
|
|
||||||
|
# raise click.ClickException(f"siblings id1={sibling1.getId()} id2={sibling2.getId()}")
|
||||||
|
|
||||||
|
subIndex2 = sibling2.getSubIndex()
|
||||||
|
|
||||||
|
sibling2.setIndex(sibling1.getIndex())
|
||||||
|
sibling2.setSubIndex(sibling1.getSubIndex())
|
||||||
|
|
||||||
|
sibling1.setIndex(trackIndex2)
|
||||||
|
sibling1.setSubIndex(subIndex2)
|
||||||
|
|
||||||
|
if not self.__tc.updateTrack(sibling1.getId(), sibling1):
|
||||||
|
raise click.ClickException('Update sibling1 failed')
|
||||||
|
if not self.__tc.updateTrack(sibling2.getId(), sibling2):
|
||||||
|
raise click.ClickException('Update sibling2 failed')
|
||||||
|
|
||||||
|
self.updateTracks()
|
||||||
|
|
||||||
|
|
||||||
|
def updateTags(self):
|
||||||
|
tags = (
|
||||||
|
self.__tac.findAllMediaTags(self.__pattern.getId())
|
||||||
|
if self.__pattern is not None
|
||||||
|
else self.__draftTags
|
||||||
|
)
|
||||||
|
|
||||||
|
self.__tagRowData = populate_tag_table(
|
||||||
|
self.tagsTable,
|
||||||
|
tags,
|
||||||
|
ignore_keys=self.__ignoreGlobalKeys,
|
||||||
|
remove_keys=self.__removeGlobalKeys,
|
||||||
|
)
|
||||||
|
|
||||||
|
def updateShiftedSeasons(self):
|
||||||
|
|
||||||
|
self.shiftedSeasonsTable.clear()
|
||||||
|
self.__shiftedSeasonRowData = {}
|
||||||
|
|
||||||
|
if self.__pattern is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
shiftedSeason: ShiftedSeason
|
||||||
|
for shiftedSeason in self.__ssc.getShiftedSeasonSiblings(patternId=self.__pattern.getId()):
|
||||||
|
shiftedSeasonObj = shiftedSeason.getObj()
|
||||||
|
shiftedSeasonObj['id'] = shiftedSeason.getId()
|
||||||
|
|
||||||
|
firstEpisode = shiftedSeasonObj['first_episode']
|
||||||
|
firstEpisodeStr = str(firstEpisode) if firstEpisode != -1 else ''
|
||||||
|
|
||||||
|
lastEpisode = shiftedSeasonObj['last_episode']
|
||||||
|
lastEpisodeStr = str(lastEpisode) if lastEpisode != -1 else ''
|
||||||
|
|
||||||
|
row = (
|
||||||
|
shiftedSeasonObj['original_season'],
|
||||||
|
firstEpisodeStr,
|
||||||
|
lastEpisodeStr,
|
||||||
|
shiftedSeasonObj['season_offset'],
|
||||||
|
shiftedSeasonObj['episode_offset'],
|
||||||
|
)
|
||||||
|
|
||||||
|
row_key = self.shiftedSeasonsTable.add_row(*map(str, row))
|
||||||
|
self.__shiftedSeasonRowData[row_key] = shiftedSeasonObj
|
||||||
|
|
||||||
|
def getSelectedShiftedSeasonObjFromInput(self):
|
||||||
|
|
||||||
|
shiftedSeasonObj = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
row_key, col_key = self.shiftedSeasonsTable.coordinate_to_cell_key(
|
||||||
|
self.shiftedSeasonsTable.cursor_coordinate
|
||||||
|
)
|
||||||
|
|
||||||
|
if row_key is not None:
|
||||||
|
shiftedSeasonObj = dict(self.__shiftedSeasonRowData.get(row_key, {}))
|
||||||
|
|
||||||
|
except CellDoesNotExist:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return shiftedSeasonObj
|
||||||
|
|
||||||
|
|
||||||
|
def on_mount(self):
|
||||||
|
|
||||||
|
if not self.__showDescriptor is None:
|
||||||
|
self.query_one("#showlabel", Static).update(f"{self.__showDescriptor.getId()} - {self.__showDescriptor.getName()} ({self.__showDescriptor.getYear()})")
|
||||||
|
|
||||||
|
if self.__pattern is not None:
|
||||||
|
|
||||||
|
self.query_one("#pattern_input", Input).value = str(self.__pattern.getPattern())
|
||||||
|
|
||||||
|
if self.__pattern and self.__pattern.quality:
|
||||||
|
self.query_one("#quality_input", Input).value = str(self.__pattern.quality)
|
||||||
|
|
||||||
|
if self.__pattern and self.__pattern.notes:
|
||||||
|
self.query_one("#notes_textarea", TextArea).text = str(self.__pattern.notes)
|
||||||
|
|
||||||
|
self.updateTags()
|
||||||
|
self.updateTracks()
|
||||||
|
self.updateShiftedSeasons()
|
||||||
|
|
||||||
|
def compose(self):
|
||||||
|
|
||||||
|
|
||||||
|
self.tagsTable = DataTable(classes="seven")
|
||||||
|
|
||||||
|
# Define the columns with headers
|
||||||
|
self.column_key_tag_key = add_auto_table_column(self.tagsTable, t("Key"))
|
||||||
|
self.column_key_tag_value = add_auto_table_column(self.tagsTable, t("Value"))
|
||||||
|
|
||||||
|
self.tagsTable.cursor_type = 'row'
|
||||||
|
|
||||||
|
|
||||||
|
self.tracksTable = DataTable(id="tracks_table", classes="seven")
|
||||||
|
|
||||||
|
self.column_key_track_index = add_auto_table_column(self.tracksTable, t("Index"))
|
||||||
|
self.column_key_track_type = add_auto_table_column(self.tracksTable, t("Type"))
|
||||||
|
self.column_key_track_sub_index = add_auto_table_column(self.tracksTable, t("SubIndex"))
|
||||||
|
self.column_key_track_codec = add_auto_table_column(self.tracksTable, t("Codec"))
|
||||||
|
self.column_key_track_audio_layout = add_auto_table_column(self.tracksTable, t("Layout"))
|
||||||
|
self.column_key_track_language = add_auto_table_column(self.tracksTable, t("Language"))
|
||||||
|
self.column_key_track_title = add_auto_table_column(self.tracksTable, t("Title"))
|
||||||
|
self.column_key_track_default = add_auto_table_column(self.tracksTable, t("Default"))
|
||||||
|
self.column_key_track_forced = add_auto_table_column(self.tracksTable, t("Forced"))
|
||||||
|
self.column_key_track_source_index = add_auto_table_column(self.tracksTable, t("SrcIndex"))
|
||||||
|
|
||||||
|
self.tracksTable.cursor_type = 'row'
|
||||||
|
|
||||||
|
self.shiftedSeasonsTable = DataTable(classes="seven")
|
||||||
|
|
||||||
|
self.column_key_original_season = add_auto_table_column(self.shiftedSeasonsTable, t("Source Season"))
|
||||||
|
self.column_key_first_episode = add_auto_table_column(self.shiftedSeasonsTable, t("First Episode"))
|
||||||
|
self.column_key_last_episode = add_auto_table_column(self.shiftedSeasonsTable, t("Last Episode"))
|
||||||
|
self.column_key_season_offset = add_auto_table_column(self.shiftedSeasonsTable, t("Season Offset"))
|
||||||
|
self.column_key_episode_offset = add_auto_table_column(self.shiftedSeasonsTable, t("Episode Offset"))
|
||||||
|
|
||||||
|
self.shiftedSeasonsTable.cursor_type = 'row'
|
||||||
|
|
||||||
|
|
||||||
|
yield Header()
|
||||||
|
|
||||||
|
with Grid():
|
||||||
|
|
||||||
|
# Row 1
|
||||||
|
yield Static(t("Edit filename pattern") if self.__pattern is not None else t("New filename pattern"), id="toplabel")
|
||||||
|
yield Input(type="text", id="pattern_input", classes="six")
|
||||||
|
|
||||||
|
# Row 2
|
||||||
|
yield Static(t("from show"))
|
||||||
|
yield Static("", id="showlabel", classes="five")
|
||||||
|
yield Button(t("Substitute pattern"), id="pattern_button")
|
||||||
|
|
||||||
|
# Row 3
|
||||||
|
yield Static(" ", classes="seven")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Row 4
|
||||||
|
yield Static(t("Quality"))
|
||||||
|
yield Input(type="integer", id="quality_input")
|
||||||
|
yield Static(' ', classes="five")
|
||||||
|
|
||||||
|
|
||||||
|
# Row 5
|
||||||
|
yield Static(" ", classes="seven")
|
||||||
|
|
||||||
|
|
||||||
|
# Row 6
|
||||||
|
yield Static(t("Notes"))
|
||||||
|
yield Static(" ", classes="six")
|
||||||
|
|
||||||
|
# Row 7
|
||||||
|
yield TextArea(id="notes_textarea", classes="four_box seven")
|
||||||
|
|
||||||
|
|
||||||
|
# Row 8
|
||||||
|
yield Static(" ", classes="seven")
|
||||||
|
|
||||||
|
# Row 9
|
||||||
|
yield Static(t("Numbering Mapping"))
|
||||||
|
if self.__pattern is not None:
|
||||||
|
yield Button(t("Add"), id="button_add_shifted_season")
|
||||||
|
yield Button(t("Edit"), id="button_edit_shifted_season")
|
||||||
|
yield Button(t("Delete"), id="button_delete_shifted_season")
|
||||||
|
else:
|
||||||
|
yield Static(" ")
|
||||||
|
yield Static(" ")
|
||||||
|
yield Static(" ")
|
||||||
|
|
||||||
|
yield Static(" ")
|
||||||
|
yield Static(" ")
|
||||||
|
yield Static(" ")
|
||||||
|
|
||||||
|
# Row 10
|
||||||
|
yield self.shiftedSeasonsTable
|
||||||
|
|
||||||
|
# Row 11
|
||||||
|
yield Static(" ", classes="seven")
|
||||||
|
|
||||||
|
# Row 12
|
||||||
|
yield Static(t("Media Tags"))
|
||||||
|
yield Button(t("Add"), id="button_add_tag")
|
||||||
|
yield Button(t("Edit"), id="button_edit_tag")
|
||||||
|
yield Button(t("Delete"), id="button_delete_tag")
|
||||||
|
|
||||||
|
yield Static(" ")
|
||||||
|
yield Static(" ")
|
||||||
|
yield Static(" ")
|
||||||
|
|
||||||
|
# Row 13
|
||||||
|
yield self.tagsTable
|
||||||
|
|
||||||
|
# Row 14
|
||||||
|
yield Static(" ", classes="seven")
|
||||||
|
|
||||||
|
# Row 15
|
||||||
|
yield Static(t("Streams"))
|
||||||
|
yield Button(t("Add"), id="button_add_track")
|
||||||
|
yield Button(t("Edit"), id="button_edit_track")
|
||||||
|
yield Button(t("Delete"), id="button_delete_track")
|
||||||
|
|
||||||
|
yield Static(" ")
|
||||||
|
yield Button(t("Up"), id="button_track_up")
|
||||||
|
yield Button(t("Down"), id="button_track_down")
|
||||||
|
|
||||||
|
# Row 16
|
||||||
|
yield self.tracksTable
|
||||||
|
|
||||||
|
# Row 17
|
||||||
|
yield Static(" ", classes="seven")
|
||||||
|
|
||||||
|
# Row 18
|
||||||
|
yield Static(" ", classes="seven")
|
||||||
|
|
||||||
|
# Row 19
|
||||||
|
yield Button(t("Save"), id="save_button")
|
||||||
|
yield Button(t("Cancel"), id="cancel_button")
|
||||||
|
yield Static(" ", classes="five")
|
||||||
|
|
||||||
|
# Row 20
|
||||||
|
yield Static(" ", classes="seven")
|
||||||
|
|
||||||
|
yield Footer()
|
||||||
|
|
||||||
|
|
||||||
|
def getPatternFromInput(self):
|
||||||
|
return str(self.query_one("#pattern_input", Input).value)
|
||||||
|
|
||||||
|
def getQualityFromInput(self):
|
||||||
|
try:
|
||||||
|
return int(self.query_one("#quality_input", Input).value)
|
||||||
|
except ValueError:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def getNotesFromInput(self):
|
||||||
|
return str(self.query_one("#notes_textarea", TextArea).text)
|
||||||
|
|
||||||
|
|
||||||
|
def getSelectedTrackDescriptor(self):
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
row_key, col_key = self.tracksTable.coordinate_to_cell_key(self.tracksTable.cursor_coordinate)
|
||||||
|
|
||||||
|
if row_key is not None:
|
||||||
|
return self.__trackRowData.get(row_key)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
except CellDoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def getSelectedTag(self):
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
# Fetch the currently selected row when 'Enter' is pressed
|
||||||
|
#selected_row_index = self.table.cursor_row
|
||||||
|
row_key, col_key = self.tagsTable.coordinate_to_cell_key(self.tagsTable.cursor_coordinate)
|
||||||
|
|
||||||
|
if row_key is not None:
|
||||||
|
return self.__tagRowData.get(row_key)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
except CellDoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Event handler for button press
|
||||||
|
def on_button_pressed(self, event: Button.Pressed) -> None:
|
||||||
|
# Check if the button pressed is the one we are interested in
|
||||||
|
if event.button.id == "save_button":
|
||||||
|
|
||||||
|
patternDescriptor = {}
|
||||||
|
patternDescriptor['show_id'] = self.__showDescriptor.getId()
|
||||||
|
patternDescriptor['pattern'] = self.getPatternFromInput()
|
||||||
|
patternDescriptor['quality'] = self.getQualityFromInput()
|
||||||
|
patternDescriptor['notes'] = self.getNotesFromInput()
|
||||||
|
|
||||||
|
if self.__pattern is not None:
|
||||||
|
|
||||||
|
if self.__pc.updatePattern(self.__pattern.getId(), patternDescriptor):
|
||||||
|
self.dismiss(patternDescriptor)
|
||||||
|
else:
|
||||||
|
#TODO: Meldung
|
||||||
|
self.app.pop_screen()
|
||||||
|
|
||||||
|
else:
|
||||||
|
patternId = self.__pc.savePatternSchema(
|
||||||
|
patternDescriptor,
|
||||||
|
trackDescriptors=self.__draftTracks,
|
||||||
|
mediaTags=self.__draftTags,
|
||||||
|
)
|
||||||
|
if patternId:
|
||||||
|
self.dismiss(patternDescriptor)
|
||||||
|
else:
|
||||||
|
#TODO: Meldung
|
||||||
|
self.app.pop_screen()
|
||||||
|
|
||||||
|
|
||||||
|
if event.button.id == "cancel_button":
|
||||||
|
self.app.pop_screen()
|
||||||
|
|
||||||
|
if event.button.id == "button_add_shifted_season":
|
||||||
|
if self.__pattern is not None:
|
||||||
|
self.app.push_screen(
|
||||||
|
ShiftedSeasonDetailsScreen(patternId=self.__pattern.getId()),
|
||||||
|
self.handle_update_shifted_season,
|
||||||
|
)
|
||||||
|
|
||||||
|
if event.button.id == "button_edit_shifted_season":
|
||||||
|
selectedShiftedSeasonObj = self.getSelectedShiftedSeasonObjFromInput()
|
||||||
|
if 'id' in selectedShiftedSeasonObj.keys():
|
||||||
|
self.app.push_screen(
|
||||||
|
ShiftedSeasonDetailsScreen(
|
||||||
|
patternId=self.__pattern.getId(),
|
||||||
|
shiftedSeasonId=selectedShiftedSeasonObj['id'],
|
||||||
|
),
|
||||||
|
self.handle_update_shifted_season,
|
||||||
|
)
|
||||||
|
|
||||||
|
if event.button.id == "button_delete_shifted_season":
|
||||||
|
selectedShiftedSeasonObj = self.getSelectedShiftedSeasonObjFromInput()
|
||||||
|
if 'id' in selectedShiftedSeasonObj.keys():
|
||||||
|
self.app.push_screen(
|
||||||
|
ShiftedSeasonDeleteScreen(
|
||||||
|
patternId=self.__pattern.getId(),
|
||||||
|
shiftedSeasonId=selectedShiftedSeasonObj['id'],
|
||||||
|
),
|
||||||
|
self.handle_delete_shifted_season,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
numTracks = len(self.getCurrentTrackDescriptors())
|
||||||
|
|
||||||
|
if event.button.id == "button_add_track":
|
||||||
|
self.app.push_screen(
|
||||||
|
TrackDetailsScreen(
|
||||||
|
patternId=self.__pattern.getId() if self.__pattern is not None else None,
|
||||||
|
patternLabel=self.getPatternFromInput(),
|
||||||
|
siblingTrackDescriptors=self.getCurrentTrackDescriptors(),
|
||||||
|
index=numTracks,
|
||||||
|
),
|
||||||
|
self.handle_add_track,
|
||||||
|
)
|
||||||
|
|
||||||
|
selectedTrack = self.getSelectedTrackDescriptor()
|
||||||
|
if selectedTrack is not None:
|
||||||
|
if event.button.id == "button_edit_track":
|
||||||
|
self.app.push_screen(
|
||||||
|
TrackDetailsScreen(
|
||||||
|
trackDescriptor=selectedTrack,
|
||||||
|
patternId=self.__pattern.getId() if self.__pattern is not None else None,
|
||||||
|
patternLabel=self.getPatternFromInput(),
|
||||||
|
siblingTrackDescriptors=self.getCurrentTrackDescriptors(),
|
||||||
|
),
|
||||||
|
self.handle_edit_track,
|
||||||
|
)
|
||||||
|
if event.button.id == "button_delete_track":
|
||||||
|
self.app.push_screen(
|
||||||
|
TrackDeleteScreen(trackDescriptor = selectedTrack),
|
||||||
|
self.handle_delete_track,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if event.button.id == "button_add_tag":
|
||||||
|
self.app.push_screen(TagDetailsScreen(), self.handle_update_tag)
|
||||||
|
|
||||||
|
if event.button.id == "button_edit_tag":
|
||||||
|
selectedTag = self.getSelectedTag()
|
||||||
|
if selectedTag is not None:
|
||||||
|
tagKey, tagValue = selectedTag
|
||||||
|
self.app.push_screen(TagDetailsScreen(key=tagKey, value=tagValue), self.handle_update_tag)
|
||||||
|
|
||||||
|
if event.button.id == "button_delete_tag":
|
||||||
|
selectedTag = self.getSelectedTag()
|
||||||
|
if selectedTag is not None:
|
||||||
|
tagKey, tagValue = selectedTag
|
||||||
|
self.app.push_screen(TagDeleteScreen(key=tagKey, value=tagValue), self.handle_delete_tag)
|
||||||
|
|
||||||
|
|
||||||
|
if event.button.id == "pattern_button":
|
||||||
|
|
||||||
|
pattern = self.query_one("#pattern_input", Input).value
|
||||||
|
|
||||||
|
patternMatch = re.search(FileProperties.SE_INDICATOR_PATTERN, pattern)
|
||||||
|
|
||||||
|
if patternMatch:
|
||||||
|
self.query_one("#pattern_input", Input).value = pattern.replace(patternMatch.group(1),
|
||||||
|
FileProperties.SE_INDICATOR_PATTERN)
|
||||||
|
|
||||||
|
|
||||||
|
if event.button.id == "button_track_up":
|
||||||
|
|
||||||
|
selectedTrackDescriptor = self.getSelectedTrackDescriptor()
|
||||||
|
if selectedTrackDescriptor is not None:
|
||||||
|
selectedTrackIndex = selectedTrackDescriptor.getIndex()
|
||||||
|
|
||||||
|
if selectedTrackIndex > 0 and selectedTrackIndex < self.tracksTable.row_count:
|
||||||
|
correspondingTrackIndex = selectedTrackIndex - 1
|
||||||
|
self.swapTracks(selectedTrackIndex, correspondingTrackIndex)
|
||||||
|
|
||||||
|
|
||||||
|
if event.button.id == "button_track_down":
|
||||||
|
|
||||||
|
selectedTrackDescriptor = self.getSelectedTrackDescriptor()
|
||||||
|
if selectedTrackDescriptor is not None:
|
||||||
|
selectedTrackIndex = selectedTrackDescriptor.getIndex()
|
||||||
|
|
||||||
|
if selectedTrackIndex >= 0 and selectedTrackIndex < (self.tracksTable.row_count - 1):
|
||||||
|
correspondingTrackIndex = selectedTrackIndex + 1
|
||||||
|
self.swapTracks(selectedTrackIndex, correspondingTrackIndex)
|
||||||
|
|
||||||
|
|
||||||
|
def handle_add_track(self, trackDescriptor : TrackDescriptor):
|
||||||
|
if trackDescriptor is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
if self.__pattern is not None:
|
||||||
|
self.__tc.addTrack(trackDescriptor, patternId=self.__pattern.getId())
|
||||||
|
else:
|
||||||
|
self.__draftTracks.append(trackDescriptor)
|
||||||
|
self.normalizeDraftTracks()
|
||||||
|
|
||||||
|
self.updateTracks()
|
||||||
|
|
||||||
|
|
||||||
|
def handle_edit_track(self, trackDescriptor : TrackDescriptor):
|
||||||
|
if trackDescriptor is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
if self.__pattern is not None:
|
||||||
|
if not self.__tc.updateTrack(trackDescriptor.getId(), trackDescriptor):
|
||||||
|
raise click.ClickException("PatternDetailsScreen.handle_edit_track(): track update failed")
|
||||||
|
else:
|
||||||
|
selectedTrack = self.getSelectedTrackDescriptor()
|
||||||
|
for index, currentTrack in enumerate(self.__draftTracks):
|
||||||
|
if (selectedTrack is not None
|
||||||
|
and currentTrack.getIndex() == selectedTrack.getIndex()
|
||||||
|
and currentTrack.getSubIndex() == selectedTrack.getSubIndex()):
|
||||||
|
self.__draftTracks[index] = trackDescriptor
|
||||||
|
break
|
||||||
|
self.normalizeDraftTracks()
|
||||||
|
|
||||||
|
self.updateTracks()
|
||||||
|
|
||||||
|
|
||||||
|
def handle_delete_track(self, trackDescriptor : TrackDescriptor):
|
||||||
|
if trackDescriptor is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
if self.__pattern is not None:
|
||||||
|
track = self.__tc.getTrack(trackDescriptor.getPatternId(), trackDescriptor.getIndex())
|
||||||
|
|
||||||
|
if track is None:
|
||||||
|
raise click.ClickException(
|
||||||
|
f"Track is none: patternId={trackDescriptor.getPatternId()} type={trackDescriptor.getType()} subIndex={trackDescriptor.getSubIndex()}"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.__tc.deleteTrack(track.getId())
|
||||||
|
else:
|
||||||
|
self.__draftTracks = [
|
||||||
|
currentTrack
|
||||||
|
for currentTrack in self.__draftTracks
|
||||||
|
if not (
|
||||||
|
currentTrack.getIndex() == trackDescriptor.getIndex()
|
||||||
|
and currentTrack.getSubIndex() == trackDescriptor.getSubIndex()
|
||||||
|
)
|
||||||
|
]
|
||||||
|
self.normalizeDraftTracks()
|
||||||
|
|
||||||
|
self.updateTracks()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def handle_update_tag(self, tag):
|
||||||
|
if tag is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
if self.__pattern is None:
|
||||||
|
self.__draftTags[str(tag[0])] = str(tag[1])
|
||||||
|
else:
|
||||||
|
if self.__tac.updateMediaTag(self.__pattern.getId(), tag[0], tag[1]) is None:
|
||||||
|
raise click.ClickException("PatternDetailsScreen.handle_update_tag(): tag update failed")
|
||||||
|
|
||||||
|
self.updateTags()
|
||||||
|
|
||||||
|
def handle_delete_tag(self, tag):
|
||||||
|
if tag is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
if self.__pattern is None:
|
||||||
|
self.__draftTags.pop(str(tag[0]), None)
|
||||||
|
self.updateTags()
|
||||||
|
return
|
||||||
|
|
||||||
|
if self.__tac.deleteMediaTagByKey(self.__pattern.getId(), tag[0]):
|
||||||
|
self.updateTags()
|
||||||
|
else:
|
||||||
|
raise click.ClickException('tag delete failed')
|
||||||
|
|
||||||
|
def handle_update_shifted_season(self, screenResult):
|
||||||
|
self.updateShiftedSeasons()
|
||||||
|
|
||||||
|
def action_back(self):
|
||||||
|
go_back_or_exit(self)
|
||||||
|
|
||||||
|
def handle_delete_shifted_season(self, screenResult):
|
||||||
|
self.updateShiftedSeasons()
|
||||||
169
src/ffx/process.py
Normal file
169
src/ffx/process.py
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
import os
|
||||||
|
import shlex
|
||||||
|
import subprocess
|
||||||
|
from typing import Iterable, List
|
||||||
|
|
||||||
|
from .logging_utils import get_ffx_logger
|
||||||
|
|
||||||
|
COMMAND_TIMED_OUT_RETURN_CODE = 124
|
||||||
|
COMMAND_NOT_FOUND_RETURN_CODE = 127
|
||||||
|
MIN_NICENESS = -20
|
||||||
|
MAX_NICENESS = 19
|
||||||
|
DISABLED_NICENESS_SENTINEL = 99
|
||||||
|
DISABLED_CPU_PERCENT_SENTINEL = 0
|
||||||
|
MIN_CPU_PERCENT = 1
|
||||||
|
MAX_CPU_PERCENT = 100
|
||||||
|
|
||||||
|
|
||||||
|
def formatCommandSequence(commandSequence: Iterable[str]) -> str:
|
||||||
|
return shlex.join([str(token) for token in commandSequence])
|
||||||
|
|
||||||
|
|
||||||
|
def normalizeNiceness(niceness) -> int | None:
|
||||||
|
if niceness is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
niceness = int(niceness)
|
||||||
|
if niceness == DISABLED_NICENESS_SENTINEL:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if niceness < MIN_NICENESS or niceness > MAX_NICENESS:
|
||||||
|
raise ValueError(
|
||||||
|
f"Niceness must be between {MIN_NICENESS} and {MAX_NICENESS}, "
|
||||||
|
+ f"or {DISABLED_NICENESS_SENTINEL} to disable."
|
||||||
|
)
|
||||||
|
|
||||||
|
return niceness
|
||||||
|
|
||||||
|
|
||||||
|
def getPresentCpuCount() -> int:
|
||||||
|
if hasattr(os, 'sched_getaffinity'):
|
||||||
|
affinity = os.sched_getaffinity(0)
|
||||||
|
if affinity:
|
||||||
|
return len(affinity)
|
||||||
|
|
||||||
|
cpuCount = os.cpu_count()
|
||||||
|
return cpuCount if cpuCount and cpuCount > 0 else 1
|
||||||
|
|
||||||
|
|
||||||
|
def normalizeCpuPercent(cpuPercent) -> int | None:
|
||||||
|
if cpuPercent is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
cpuPercent = str(cpuPercent).strip()
|
||||||
|
if cpuPercent.endswith('%'):
|
||||||
|
percentValue = int(cpuPercent[:-1].strip())
|
||||||
|
if percentValue == DISABLED_CPU_PERCENT_SENTINEL:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if percentValue < MIN_CPU_PERCENT or percentValue > MAX_CPU_PERCENT:
|
||||||
|
raise ValueError(
|
||||||
|
f"CPU percentage must be between {MIN_CPU_PERCENT}% and {MAX_CPU_PERCENT}%, "
|
||||||
|
+ f"or {DISABLED_CPU_PERCENT_SENTINEL} to disable."
|
||||||
|
)
|
||||||
|
|
||||||
|
return percentValue * getPresentCpuCount()
|
||||||
|
|
||||||
|
cpuPercent = int(cpuPercent)
|
||||||
|
if cpuPercent == DISABLED_CPU_PERCENT_SENTINEL:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if cpuPercent < MIN_CPU_PERCENT:
|
||||||
|
raise ValueError(
|
||||||
|
"CPU limit must be a positive absolute value such as 200, "
|
||||||
|
+ f"a percentage such as 25%, or {DISABLED_CPU_PERCENT_SENTINEL} to disable."
|
||||||
|
)
|
||||||
|
|
||||||
|
return cpuPercent
|
||||||
|
|
||||||
|
|
||||||
|
def getWrappedCommandSequence(commandSequence: List[str], context: dict = None) -> List[str]:
|
||||||
|
"""
|
||||||
|
niceness: -20 to 19, disabled when unset
|
||||||
|
cpu limit: positive absolute cpulimit value, or a machine-wide percentage
|
||||||
|
|
||||||
|
When both limits are configured, cpulimit wraps a nice-adjusted command:
|
||||||
|
cpulimit -l <cpu> -- nice -n <niceness> <command>
|
||||||
|
"""
|
||||||
|
|
||||||
|
resourceLimits = (context or {}).get('resource_limits', {})
|
||||||
|
niceness = normalizeNiceness(resourceLimits.get('niceness'))
|
||||||
|
cpu_percent = normalizeCpuPercent(
|
||||||
|
resourceLimits.get('cpu_limit', resourceLimits.get('cpu_percent'))
|
||||||
|
)
|
||||||
|
wrappedCommandSequence = [str(token) for token in commandSequence]
|
||||||
|
|
||||||
|
if niceness is not None:
|
||||||
|
wrappedCommandSequence = ['nice', '-n', str(niceness)] + wrappedCommandSequence
|
||||||
|
if cpu_percent is not None:
|
||||||
|
wrappedCommandSequence = ['cpulimit', '-l', str(cpu_percent), '--'] + wrappedCommandSequence
|
||||||
|
|
||||||
|
return wrappedCommandSequence
|
||||||
|
|
||||||
|
|
||||||
|
def getProcessTimeoutSeconds(context: dict = None, timeoutSeconds: float = None):
|
||||||
|
if timeoutSeconds is None:
|
||||||
|
timeoutSeconds = (context or {}).get('resource_limits', {}).get('timeout_seconds')
|
||||||
|
|
||||||
|
if timeoutSeconds is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
timeoutSeconds = float(timeoutSeconds)
|
||||||
|
|
||||||
|
return timeoutSeconds if timeoutSeconds > 0 else None
|
||||||
|
|
||||||
|
|
||||||
|
def executeProcess(
|
||||||
|
commandSequence: List[str],
|
||||||
|
directory: str = None,
|
||||||
|
context: dict = None,
|
||||||
|
timeoutSeconds: float = None,
|
||||||
|
):
|
||||||
|
|
||||||
|
logger = context['logger'] if context is not None and 'logger' in context else get_ffx_logger()
|
||||||
|
wrappedCommandSequence = getWrappedCommandSequence(commandSequence, context=context)
|
||||||
|
timeoutSeconds = getProcessTimeoutSeconds(context=context, timeoutSeconds=timeoutSeconds)
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
"executeProcess() cwd=%s timeout=%s command=%s",
|
||||||
|
directory or '.',
|
||||||
|
timeoutSeconds if timeoutSeconds is not None else 'none',
|
||||||
|
formatCommandSequence(wrappedCommandSequence),
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
completed = subprocess.run(
|
||||||
|
wrappedCommandSequence,
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
cwd=directory,
|
||||||
|
timeout=timeoutSeconds,
|
||||||
|
check=False,
|
||||||
|
)
|
||||||
|
except FileNotFoundError as ex:
|
||||||
|
error = (
|
||||||
|
"Command not found while running "
|
||||||
|
+ f"{formatCommandSequence(wrappedCommandSequence)}: {ex.filename or ex}"
|
||||||
|
)
|
||||||
|
logger.error(error)
|
||||||
|
return '', error, COMMAND_NOT_FOUND_RETURN_CODE
|
||||||
|
except subprocess.TimeoutExpired as ex:
|
||||||
|
stdout = ex.stdout or ''
|
||||||
|
stderr = ex.stderr or ''
|
||||||
|
error = (
|
||||||
|
f"Command timed out after {timeoutSeconds} seconds while running "
|
||||||
|
+ formatCommandSequence(wrappedCommandSequence)
|
||||||
|
)
|
||||||
|
if stderr:
|
||||||
|
error = f"{error}\n{stderr}"
|
||||||
|
logger.error(error)
|
||||||
|
return stdout, error, COMMAND_TIMED_OUT_RETURN_CODE
|
||||||
|
|
||||||
|
if completed.returncode != 0:
|
||||||
|
logger.warning(
|
||||||
|
"executeProcess() rc=%s command=%s",
|
||||||
|
completed.returncode,
|
||||||
|
formatCommandSequence(wrappedCommandSequence),
|
||||||
|
)
|
||||||
|
|
||||||
|
return completed.stdout, completed.stderr, completed.returncode
|
||||||
154
src/ffx/screen_support.py
Normal file
154
src/ffx/screen_support.py
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Mapping
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from rich.cells import cell_len
|
||||||
|
from rich.measure import measure_renderables
|
||||||
|
from rich.text import Text
|
||||||
|
|
||||||
|
from .helper import formatRichColor
|
||||||
|
from .pattern_controller import PatternController
|
||||||
|
from .show_controller import ShowController
|
||||||
|
from .shifted_season_controller import ShiftedSeasonController
|
||||||
|
from .tag_controller import TagController
|
||||||
|
from .tmdb_controller import TmdbController
|
||||||
|
from .track_controller import TrackController
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class ScreenBootstrap:
|
||||||
|
context: dict
|
||||||
|
configuration_data: dict
|
||||||
|
signature_tags: dict
|
||||||
|
apply_cleanup: bool
|
||||||
|
remove_global_keys: list
|
||||||
|
ignore_global_keys: list
|
||||||
|
remove_track_keys: list
|
||||||
|
ignore_track_keys: list
|
||||||
|
|
||||||
|
|
||||||
|
def build_screen_bootstrap(context: dict) -> ScreenBootstrap:
|
||||||
|
configurationData = context['config'].getData()
|
||||||
|
metadataConfiguration = configurationData.get('metadata', {})
|
||||||
|
streamMetadataConfiguration = metadataConfiguration.get('streams', {})
|
||||||
|
applyCleanup = bool(context.get('apply_metadata_cleanup', True))
|
||||||
|
|
||||||
|
return ScreenBootstrap(
|
||||||
|
context=context,
|
||||||
|
configuration_data=configurationData,
|
||||||
|
signature_tags=metadataConfiguration.get('signature', {}),
|
||||||
|
apply_cleanup=applyCleanup,
|
||||||
|
remove_global_keys=metadataConfiguration.get('remove', []) if applyCleanup else [],
|
||||||
|
ignore_global_keys=metadataConfiguration.get('ignore', []),
|
||||||
|
remove_track_keys=streamMetadataConfiguration.get('remove', []) if applyCleanup else [],
|
||||||
|
ignore_track_keys=streamMetadataConfiguration.get('ignore', []),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def build_screen_controllers(
|
||||||
|
context: dict,
|
||||||
|
*,
|
||||||
|
pattern: bool = False,
|
||||||
|
show: bool = False,
|
||||||
|
track: bool = False,
|
||||||
|
tag: bool = False,
|
||||||
|
tmdb: bool = False,
|
||||||
|
shifted_season: bool = False,
|
||||||
|
) -> dict[str, object]:
|
||||||
|
controllers = {}
|
||||||
|
|
||||||
|
if pattern:
|
||||||
|
controllers['pattern'] = PatternController(context=context)
|
||||||
|
if show:
|
||||||
|
controllers['show'] = ShowController(context=context)
|
||||||
|
if track:
|
||||||
|
controllers['track'] = TrackController(context=context)
|
||||||
|
if tag:
|
||||||
|
controllers['tag'] = TagController(context=context)
|
||||||
|
if tmdb:
|
||||||
|
controllers['tmdb'] = TmdbController()
|
||||||
|
if shifted_season:
|
||||||
|
controllers['shifted_season'] = ShiftedSeasonController(context=context)
|
||||||
|
|
||||||
|
return controllers
|
||||||
|
|
||||||
|
|
||||||
|
def populate_tag_table(
|
||||||
|
table,
|
||||||
|
tags: Mapping[str, object],
|
||||||
|
*,
|
||||||
|
ignore_keys: list[str],
|
||||||
|
remove_keys: list[str],
|
||||||
|
) -> dict[object, tuple[str, str]]:
|
||||||
|
"""Render display rows while keeping raw tag data addressable by row key."""
|
||||||
|
|
||||||
|
table.clear()
|
||||||
|
|
||||||
|
row_data: dict[object, tuple[str, str]] = {}
|
||||||
|
for tag_key, tag_value in tags.items():
|
||||||
|
raw_key = str(tag_key)
|
||||||
|
raw_value = str(tag_value)
|
||||||
|
|
||||||
|
text_color = None
|
||||||
|
if raw_key in ignore_keys:
|
||||||
|
text_color = "blue"
|
||||||
|
if raw_key in remove_keys:
|
||||||
|
text_color = "red"
|
||||||
|
|
||||||
|
row_key = table.add_row(
|
||||||
|
str(formatRichColor(raw_key, text_color)),
|
||||||
|
str(formatRichColor(raw_value, text_color)),
|
||||||
|
)
|
||||||
|
row_data[row_key] = (raw_key, raw_value)
|
||||||
|
|
||||||
|
return row_data
|
||||||
|
|
||||||
|
|
||||||
|
def localized_column_width(label: str, minimum: int, *, padding: int = 2) -> int:
|
||||||
|
"""Ensure translated table headers fit within their visible column width."""
|
||||||
|
|
||||||
|
text = str(label)
|
||||||
|
return max(
|
||||||
|
int(minimum),
|
||||||
|
len(text) + int(padding),
|
||||||
|
int(cell_len(text)) + int(padding),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def add_auto_table_column(table, label, *, key=None, default=None):
|
||||||
|
"""Add a DataTable column that sizes itself from header and cell content."""
|
||||||
|
|
||||||
|
return table.add_column(label, key=key, default=default)
|
||||||
|
|
||||||
|
|
||||||
|
def update_table_column_label(table, column_key, label) -> None:
|
||||||
|
"""Update a column label and keep auto-width columns in sync with it."""
|
||||||
|
|
||||||
|
column = table.columns.get(column_key)
|
||||||
|
if column is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
text_label = Text.from_markup(label) if isinstance(label, str) else label
|
||||||
|
column.label = text_label
|
||||||
|
|
||||||
|
if column.auto_width:
|
||||||
|
measured = measure_renderables(
|
||||||
|
table.app.console,
|
||||||
|
table.app.console.options,
|
||||||
|
[text_label],
|
||||||
|
).maximum
|
||||||
|
column.content_width = max(column.content_width, measured)
|
||||||
|
|
||||||
|
table.refresh()
|
||||||
|
|
||||||
|
|
||||||
|
def go_back_or_exit(screen) -> None:
|
||||||
|
"""Pop the current screen when possible, otherwise exit the app."""
|
||||||
|
|
||||||
|
screen_stack = getattr(screen.app, "screen_stack", ())
|
||||||
|
if len(screen_stack) > 2:
|
||||||
|
screen.app.pop_screen()
|
||||||
|
return
|
||||||
|
|
||||||
|
screen.app.exit()
|
||||||
23
src/ffx/settings_screen.py
Normal file
23
src/ffx/settings_screen.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
from textual.app import ComposeResult
|
||||||
|
from textual.screen import Screen
|
||||||
|
from textual.widgets import Footer, Placeholder
|
||||||
|
|
||||||
|
from .i18n import t
|
||||||
|
from .screen_support import go_back_or_exit
|
||||||
|
|
||||||
|
|
||||||
|
class SettingsScreen(Screen):
|
||||||
|
BINDINGS = [
|
||||||
|
("escape", "back", t("Back")),
|
||||||
|
]
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
context = self.app.getContext()
|
||||||
|
def compose(self) -> ComposeResult:
|
||||||
|
# Row 1
|
||||||
|
yield Placeholder(t("Settings Screen"))
|
||||||
|
yield Footer()
|
||||||
|
|
||||||
|
def action_back(self):
|
||||||
|
go_back_or_exit(self)
|
||||||
450
src/ffx/shifted_season_controller.py
Normal file
450
src/ffx/shifted_season_controller.py
Normal file
@@ -0,0 +1,450 @@
|
|||||||
|
import click
|
||||||
|
|
||||||
|
from ffx.model.shifted_season import ShiftedSeason
|
||||||
|
|
||||||
|
|
||||||
|
class EpisodeOrderException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class RangeOverlapException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ShiftedSeasonOwnerException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ShiftedSeasonController:
|
||||||
|
|
||||||
|
def __init__(self, context):
|
||||||
|
|
||||||
|
self.context = context
|
||||||
|
self.Session = self.context['database']['session'] # convenience
|
||||||
|
|
||||||
|
def _resolve_owner(self, showId=None, patternId=None):
|
||||||
|
hasShow = showId is not None
|
||||||
|
hasPattern = patternId is not None
|
||||||
|
|
||||||
|
if hasShow == hasPattern:
|
||||||
|
raise ShiftedSeasonOwnerException(
|
||||||
|
"ShiftedSeason rules require exactly one owner: either showId or patternId."
|
||||||
|
)
|
||||||
|
|
||||||
|
if hasShow:
|
||||||
|
if type(showId) is not int:
|
||||||
|
raise ValueError(
|
||||||
|
"ShiftedSeasonController: Argument showId is required to be of type int"
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
'show_id': int(showId),
|
||||||
|
'pattern_id': None,
|
||||||
|
'label': f"show #{int(showId)}",
|
||||||
|
}
|
||||||
|
|
||||||
|
if type(patternId) is not int:
|
||||||
|
raise ValueError(
|
||||||
|
"ShiftedSeasonController: Argument patternId is required to be of type int"
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
'show_id': None,
|
||||||
|
'pattern_id': int(patternId),
|
||||||
|
'label': f"pattern #{int(patternId)}",
|
||||||
|
}
|
||||||
|
|
||||||
|
def _apply_owner_filter(self, query, owner):
|
||||||
|
if owner['pattern_id'] is not None:
|
||||||
|
return query.filter(ShiftedSeason.pattern_id == owner['pattern_id'])
|
||||||
|
return query.filter(ShiftedSeason.show_id == owner['show_id'])
|
||||||
|
|
||||||
|
def _normalize_shifted_season_fields(self, shiftedSeasonObj: dict):
|
||||||
|
if type(shiftedSeasonObj) is not dict:
|
||||||
|
raise ValueError(
|
||||||
|
"ShiftedSeasonController: Argument shiftedSeasonObj is required to be of type dict"
|
||||||
|
)
|
||||||
|
|
||||||
|
fields = {
|
||||||
|
'original_season': int(shiftedSeasonObj['original_season']),
|
||||||
|
'first_episode': int(shiftedSeasonObj['first_episode']),
|
||||||
|
'last_episode': int(shiftedSeasonObj['last_episode']),
|
||||||
|
'season_offset': int(shiftedSeasonObj['season_offset']),
|
||||||
|
'episode_offset': int(shiftedSeasonObj['episode_offset']),
|
||||||
|
}
|
||||||
|
|
||||||
|
firstEpisode = fields['first_episode']
|
||||||
|
lastEpisode = fields['last_episode']
|
||||||
|
if firstEpisode != -1 and lastEpisode != -1 and lastEpisode < firstEpisode:
|
||||||
|
raise EpisodeOrderException(
|
||||||
|
"ShiftedSeason last_episode must be greater than or equal to first_episode."
|
||||||
|
)
|
||||||
|
|
||||||
|
return fields
|
||||||
|
|
||||||
|
def _ranges_overlap(self, firstEpisodeA, lastEpisodeA, firstEpisodeB, lastEpisodeB):
|
||||||
|
startA = float('-inf') if int(firstEpisodeA) == -1 else int(firstEpisodeA)
|
||||||
|
endA = float('inf') if int(lastEpisodeA) == -1 else int(lastEpisodeA)
|
||||||
|
startB = float('-inf') if int(firstEpisodeB) == -1 else int(firstEpisodeB)
|
||||||
|
endB = float('inf') if int(lastEpisodeB) == -1 else int(lastEpisodeB)
|
||||||
|
return startA <= endB and startB <= endA
|
||||||
|
|
||||||
|
def _ordered_query(self, session, owner):
|
||||||
|
q = self._apply_owner_filter(session.query(ShiftedSeason), owner)
|
||||||
|
return q.order_by(
|
||||||
|
ShiftedSeason.original_season.asc(),
|
||||||
|
ShiftedSeason.first_episode.asc(),
|
||||||
|
ShiftedSeason.last_episode.asc(),
|
||||||
|
ShiftedSeason.id.asc(),
|
||||||
|
)
|
||||||
|
|
||||||
|
def _find_matching_rule(self, session, owner, season: int, episode: int):
|
||||||
|
for shiftedSeasonEntry in self._ordered_query(session, owner).all():
|
||||||
|
if (
|
||||||
|
season == shiftedSeasonEntry.getOriginalSeason()
|
||||||
|
and (
|
||||||
|
shiftedSeasonEntry.getFirstEpisode() == -1
|
||||||
|
or episode >= shiftedSeasonEntry.getFirstEpisode()
|
||||||
|
)
|
||||||
|
and (
|
||||||
|
shiftedSeasonEntry.getLastEpisode() == -1
|
||||||
|
or episode <= shiftedSeasonEntry.getLastEpisode()
|
||||||
|
)
|
||||||
|
):
|
||||||
|
return shiftedSeasonEntry
|
||||||
|
return None
|
||||||
|
|
||||||
|
def checkShiftedSeason(
|
||||||
|
self,
|
||||||
|
showId: int | None = None,
|
||||||
|
shiftedSeasonObj: dict | None = None,
|
||||||
|
shiftedSeasonId: int = 0,
|
||||||
|
patternId: int | None = None,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Check whether a shifted-season rule is valid within one owner scope.
|
||||||
|
"""
|
||||||
|
|
||||||
|
session = None
|
||||||
|
try:
|
||||||
|
owner = self._resolve_owner(showId=showId, patternId=patternId)
|
||||||
|
fields = self._normalize_shifted_season_fields(shiftedSeasonObj)
|
||||||
|
session = self.Session()
|
||||||
|
|
||||||
|
q = self._ordered_query(session, owner)
|
||||||
|
if shiftedSeasonId:
|
||||||
|
q = q.filter(ShiftedSeason.id != int(shiftedSeasonId))
|
||||||
|
|
||||||
|
for siblingShiftedSeason in q.all():
|
||||||
|
if fields['original_season'] != siblingShiftedSeason.getOriginalSeason():
|
||||||
|
continue
|
||||||
|
|
||||||
|
if self._ranges_overlap(
|
||||||
|
fields['first_episode'],
|
||||||
|
fields['last_episode'],
|
||||||
|
siblingShiftedSeason.getFirstEpisode(),
|
||||||
|
siblingShiftedSeason.getLastEpisode(),
|
||||||
|
):
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
except (EpisodeOrderException, ShiftedSeasonOwnerException) as ex:
|
||||||
|
raise click.ClickException(str(ex))
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(
|
||||||
|
f"ShiftedSeasonController.checkShiftedSeason(): {repr(ex)}"
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
if session is not None:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def addShiftedSeason(
|
||||||
|
self,
|
||||||
|
showId: int | None = None,
|
||||||
|
shiftedSeasonObj: dict | None = None,
|
||||||
|
patternId: int | None = None,
|
||||||
|
):
|
||||||
|
|
||||||
|
session = None
|
||||||
|
try:
|
||||||
|
owner = self._resolve_owner(showId=showId, patternId=patternId)
|
||||||
|
fields = self._normalize_shifted_season_fields(shiftedSeasonObj)
|
||||||
|
|
||||||
|
if not self.checkShiftedSeason(
|
||||||
|
showId=owner['show_id'],
|
||||||
|
patternId=owner['pattern_id'],
|
||||||
|
shiftedSeasonObj=fields,
|
||||||
|
):
|
||||||
|
raise RangeOverlapException(
|
||||||
|
f"ShiftedSeason rule overlaps with an existing rule for {owner['label']}."
|
||||||
|
)
|
||||||
|
|
||||||
|
session = self.Session()
|
||||||
|
shiftedSeason = ShiftedSeason(
|
||||||
|
show_id=owner['show_id'],
|
||||||
|
pattern_id=owner['pattern_id'],
|
||||||
|
original_season=fields['original_season'],
|
||||||
|
first_episode=fields['first_episode'],
|
||||||
|
last_episode=fields['last_episode'],
|
||||||
|
season_offset=fields['season_offset'],
|
||||||
|
episode_offset=fields['episode_offset'],
|
||||||
|
)
|
||||||
|
session.add(shiftedSeason)
|
||||||
|
session.commit()
|
||||||
|
return shiftedSeason.getId()
|
||||||
|
|
||||||
|
except (EpisodeOrderException, RangeOverlapException, ShiftedSeasonOwnerException) as ex:
|
||||||
|
raise click.ClickException(str(ex))
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(
|
||||||
|
f"ShiftedSeasonController.addShiftedSeason(): {repr(ex)}"
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
if session is not None:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def updateShiftedSeason(self, shiftedSeasonId: int, shiftedSeasonObj: dict):
|
||||||
|
|
||||||
|
if type(shiftedSeasonId) is not int:
|
||||||
|
raise ValueError(
|
||||||
|
"ShiftedSeasonController.updateShiftedSeason(): Argument shiftedSeasonId is required to be of type int"
|
||||||
|
)
|
||||||
|
|
||||||
|
session = None
|
||||||
|
try:
|
||||||
|
fields = self._normalize_shifted_season_fields(shiftedSeasonObj)
|
||||||
|
session = self.Session()
|
||||||
|
|
||||||
|
shiftedSeason = (
|
||||||
|
session.query(ShiftedSeason)
|
||||||
|
.filter(ShiftedSeason.id == int(shiftedSeasonId))
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
|
||||||
|
if shiftedSeason is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
owner = self._resolve_owner(
|
||||||
|
showId=shiftedSeason.getShowId(),
|
||||||
|
patternId=shiftedSeason.getPatternId(),
|
||||||
|
)
|
||||||
|
if not self.checkShiftedSeason(
|
||||||
|
showId=owner['show_id'],
|
||||||
|
patternId=owner['pattern_id'],
|
||||||
|
shiftedSeasonObj=fields,
|
||||||
|
shiftedSeasonId=shiftedSeasonId,
|
||||||
|
):
|
||||||
|
raise RangeOverlapException(
|
||||||
|
f"ShiftedSeason rule overlaps with an existing rule for {owner['label']}."
|
||||||
|
)
|
||||||
|
|
||||||
|
shiftedSeason.original_season = fields['original_season']
|
||||||
|
shiftedSeason.first_episode = fields['first_episode']
|
||||||
|
shiftedSeason.last_episode = fields['last_episode']
|
||||||
|
shiftedSeason.season_offset = fields['season_offset']
|
||||||
|
shiftedSeason.episode_offset = fields['episode_offset']
|
||||||
|
|
||||||
|
session.commit()
|
||||||
|
return True
|
||||||
|
|
||||||
|
except (EpisodeOrderException, RangeOverlapException, ShiftedSeasonOwnerException) as ex:
|
||||||
|
raise click.ClickException(str(ex))
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(
|
||||||
|
f"ShiftedSeasonController.updateShiftedSeason(): {repr(ex)}"
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
if session is not None:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def findShiftedSeason(
|
||||||
|
self,
|
||||||
|
showId: int | None = None,
|
||||||
|
originalSeason: int | None = None,
|
||||||
|
firstEpisode: int | None = None,
|
||||||
|
lastEpisode: int | None = None,
|
||||||
|
patternId: int | None = None,
|
||||||
|
):
|
||||||
|
|
||||||
|
if type(originalSeason) is not int:
|
||||||
|
raise ValueError(
|
||||||
|
"ShiftedSeasonController.findShiftedSeason(): Argument originalSeason is required to be of type int"
|
||||||
|
)
|
||||||
|
|
||||||
|
if type(firstEpisode) is not int:
|
||||||
|
raise ValueError(
|
||||||
|
"ShiftedSeasonController.findShiftedSeason(): Argument firstEpisode is required to be of type int"
|
||||||
|
)
|
||||||
|
|
||||||
|
if type(lastEpisode) is not int:
|
||||||
|
raise ValueError(
|
||||||
|
"ShiftedSeasonController.findShiftedSeason(): Argument lastEpisode is required to be of type int"
|
||||||
|
)
|
||||||
|
|
||||||
|
session = None
|
||||||
|
try:
|
||||||
|
owner = self._resolve_owner(showId=showId, patternId=patternId)
|
||||||
|
session = self.Session()
|
||||||
|
shiftedSeason = (
|
||||||
|
self._apply_owner_filter(session.query(ShiftedSeason), owner)
|
||||||
|
.filter(
|
||||||
|
ShiftedSeason.original_season == int(originalSeason),
|
||||||
|
ShiftedSeason.first_episode == int(firstEpisode),
|
||||||
|
ShiftedSeason.last_episode == int(lastEpisode),
|
||||||
|
)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
|
||||||
|
return shiftedSeason.getId() if shiftedSeason is not None else None
|
||||||
|
|
||||||
|
except ShiftedSeasonOwnerException as ex:
|
||||||
|
raise click.ClickException(str(ex))
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(
|
||||||
|
f"ShiftedSeasonController.findShiftedSeason(): {repr(ex)}"
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
if session is not None:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def getShiftedSeasonSiblings(
|
||||||
|
self,
|
||||||
|
showId: int | None = None,
|
||||||
|
patternId: int | None = None,
|
||||||
|
):
|
||||||
|
session = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
owner = self._resolve_owner(showId=showId, patternId=patternId)
|
||||||
|
session = self.Session()
|
||||||
|
return self._ordered_query(session, owner).all()
|
||||||
|
|
||||||
|
except ShiftedSeasonOwnerException as ex:
|
||||||
|
raise click.ClickException(str(ex))
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(
|
||||||
|
f"ShiftedSeasonController.getShiftedSeasonSiblings(): {repr(ex)}"
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
if session is not None:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def getShiftedSeason(self, shiftedSeasonId: int):
|
||||||
|
|
||||||
|
if type(shiftedSeasonId) is not int:
|
||||||
|
raise ValueError(
|
||||||
|
"ShiftedSeasonController.getShiftedSeason(): Argument shiftedSeasonId is required to be of type int"
|
||||||
|
)
|
||||||
|
|
||||||
|
session = None
|
||||||
|
try:
|
||||||
|
session = self.Session()
|
||||||
|
return (
|
||||||
|
session.query(ShiftedSeason)
|
||||||
|
.filter(ShiftedSeason.id == int(shiftedSeasonId))
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(
|
||||||
|
f"ShiftedSeasonController.getShiftedSeason(): {repr(ex)}"
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
if session is not None:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def deleteShiftedSeason(self, shiftedSeasonId):
|
||||||
|
|
||||||
|
if type(shiftedSeasonId) is not int:
|
||||||
|
raise ValueError(
|
||||||
|
"ShiftedSeasonController.deleteShiftedSeason(): Argument shiftedSeasonId is required to be of type int"
|
||||||
|
)
|
||||||
|
|
||||||
|
session = None
|
||||||
|
try:
|
||||||
|
session = self.Session()
|
||||||
|
shiftedSeason = (
|
||||||
|
session.query(ShiftedSeason)
|
||||||
|
.filter(ShiftedSeason.id == int(shiftedSeasonId))
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
|
||||||
|
if shiftedSeason is not None:
|
||||||
|
session.delete(shiftedSeason)
|
||||||
|
session.commit()
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(
|
||||||
|
f"ShiftedSeasonController.deleteShiftedSeason(): {repr(ex)}"
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
if session is not None:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def shiftSeason(self, showId, season, episode, patternId=None):
|
||||||
|
if season == -1 or episode == -1:
|
||||||
|
return season, episode
|
||||||
|
|
||||||
|
shiftedSeason, shiftedEpisode, sourceLabel = self.resolveShiftSeason(
|
||||||
|
showId,
|
||||||
|
season,
|
||||||
|
episode,
|
||||||
|
patternId=patternId,
|
||||||
|
)
|
||||||
|
|
||||||
|
if shiftedSeason != season or shiftedEpisode != episode:
|
||||||
|
self.context['logger'].info(
|
||||||
|
f"Setting season shift {season}/{episode} -> {shiftedSeason}/{shiftedEpisode} from {sourceLabel}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return shiftedSeason, shiftedEpisode
|
||||||
|
|
||||||
|
def resolveShiftSeason(self, showId, season, episode, patternId=None):
|
||||||
|
if season == -1 or episode == -1:
|
||||||
|
return season, episode, "unrecognized"
|
||||||
|
|
||||||
|
session = None
|
||||||
|
try:
|
||||||
|
session = self.Session()
|
||||||
|
activeShift = None
|
||||||
|
|
||||||
|
if patternId is not None:
|
||||||
|
activeShift = self._find_matching_rule(
|
||||||
|
session,
|
||||||
|
self._resolve_owner(patternId=patternId),
|
||||||
|
season=int(season),
|
||||||
|
episode=int(episode),
|
||||||
|
)
|
||||||
|
|
||||||
|
if activeShift is None and showId is not None and showId != -1:
|
||||||
|
activeShift = self._find_matching_rule(
|
||||||
|
session,
|
||||||
|
self._resolve_owner(showId=showId),
|
||||||
|
season=int(season),
|
||||||
|
episode=int(episode),
|
||||||
|
)
|
||||||
|
|
||||||
|
if activeShift is None:
|
||||||
|
shiftedSeason = season
|
||||||
|
shiftedEpisode = episode
|
||||||
|
sourceLabel = "default"
|
||||||
|
else:
|
||||||
|
shiftedSeason = season + activeShift.getSeasonOffset()
|
||||||
|
shiftedEpisode = episode + activeShift.getEpisodeOffset()
|
||||||
|
sourceLabel = (
|
||||||
|
"pattern"
|
||||||
|
if activeShift.getPatternId() is not None
|
||||||
|
else "show"
|
||||||
|
)
|
||||||
|
return shiftedSeason, shiftedEpisode, sourceLabel
|
||||||
|
|
||||||
|
except ShiftedSeasonOwnerException as ex:
|
||||||
|
raise click.ClickException(str(ex))
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(
|
||||||
|
f"ShiftedSeasonController.shiftSeason(): {repr(ex)}"
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
if session is not None:
|
||||||
|
session.close()
|
||||||
152
src/ffx/shifted_season_delete_screen.py
Normal file
152
src/ffx/shifted_season_delete_screen.py
Normal file
@@ -0,0 +1,152 @@
|
|||||||
|
import click
|
||||||
|
|
||||||
|
from textual.screen import Screen
|
||||||
|
from textual.widgets import Header, Footer, Static, Button
|
||||||
|
from textual.containers import Grid
|
||||||
|
|
||||||
|
from .i18n import t
|
||||||
|
from .shifted_season_controller import ShiftedSeasonController
|
||||||
|
from .screen_support import go_back_or_exit
|
||||||
|
|
||||||
|
from ffx.model.shifted_season import ShiftedSeason
|
||||||
|
|
||||||
|
|
||||||
|
# Screen[dict[int, str, int]]
|
||||||
|
class ShiftedSeasonDeleteScreen(Screen):
|
||||||
|
|
||||||
|
BINDINGS = [
|
||||||
|
("escape", "back", t("Back")),
|
||||||
|
]
|
||||||
|
|
||||||
|
CSS = """
|
||||||
|
|
||||||
|
Grid {
|
||||||
|
grid-size: 2;
|
||||||
|
grid-rows: 2 auto;
|
||||||
|
grid-columns: 18 5fr;
|
||||||
|
height: 100%;
|
||||||
|
width: 100%;
|
||||||
|
min-width: 90;
|
||||||
|
padding: 1;
|
||||||
|
overflow-x: auto;
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
Input {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
Button {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
#toplabel {
|
||||||
|
height: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.two {
|
||||||
|
column-span: 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
.box {
|
||||||
|
height: 100%;
|
||||||
|
border: solid green;
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, showId = None, patternId = None, shiftedSeasonId = None):
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
self.context = self.app.getContext()
|
||||||
|
self.Session = self.context['database']['session'] # convenience
|
||||||
|
|
||||||
|
self.__ssc = ShiftedSeasonController(context = self.context)
|
||||||
|
|
||||||
|
self._showId = showId
|
||||||
|
self._patternId = patternId
|
||||||
|
self.__shiftedSeasonId = shiftedSeasonId
|
||||||
|
|
||||||
|
|
||||||
|
def on_mount(self):
|
||||||
|
|
||||||
|
shiftedSeason: ShiftedSeason = self.__ssc.getShiftedSeason(self.__shiftedSeasonId)
|
||||||
|
|
||||||
|
ownerLabel = (
|
||||||
|
t("pattern #{id}", id=self._patternId)
|
||||||
|
if self._patternId is not None
|
||||||
|
else t("show #{id}", id=self._showId)
|
||||||
|
)
|
||||||
|
self.query_one("#static_owner", Static).update(ownerLabel)
|
||||||
|
self.query_one("#static_original_season", Static).update(str(shiftedSeason.getOriginalSeason()))
|
||||||
|
self.query_one("#static_first_episode", Static).update(str(shiftedSeason.getFirstEpisode()))
|
||||||
|
self.query_one("#static_last_episode", Static).update(str(shiftedSeason.getLastEpisode()))
|
||||||
|
self.query_one("#static_season_offset", Static).update(str(shiftedSeason.getSeasonOffset()))
|
||||||
|
self.query_one("#static_episode_offset", Static).update(str(shiftedSeason.getEpisodeOffset()))
|
||||||
|
|
||||||
|
|
||||||
|
def compose(self):
|
||||||
|
|
||||||
|
yield Header()
|
||||||
|
|
||||||
|
with Grid():
|
||||||
|
# Row 1
|
||||||
|
yield Static(t("Are you sure to delete the following shifted season?"), id="toplabel", classes="two")
|
||||||
|
|
||||||
|
# Row 2
|
||||||
|
yield Static(" ", classes="two")
|
||||||
|
|
||||||
|
# Row 3
|
||||||
|
yield Static(t("from"))
|
||||||
|
yield Static(" ", id="static_owner")
|
||||||
|
|
||||||
|
# Row 4
|
||||||
|
yield Static(" ", classes="two")
|
||||||
|
|
||||||
|
# Row 5
|
||||||
|
yield Static(t("Source Season"))
|
||||||
|
yield Static(" ", id="static_original_season")
|
||||||
|
|
||||||
|
# Row 6
|
||||||
|
yield Static(t("First episode"))
|
||||||
|
yield Static(" ", id="static_first_episode")
|
||||||
|
|
||||||
|
# Row 7
|
||||||
|
yield Static(t("Last episode"))
|
||||||
|
yield Static(" ", id="static_last_episode")
|
||||||
|
|
||||||
|
# Row 8
|
||||||
|
yield Static(t("Season Offset"))
|
||||||
|
yield Static(" ", id="static_season_offset")
|
||||||
|
|
||||||
|
# Row 9
|
||||||
|
yield Static(t("Episode offset"))
|
||||||
|
yield Static(" ", id="static_episode_offset")
|
||||||
|
|
||||||
|
# Row 10
|
||||||
|
yield Static(" ", classes="two")
|
||||||
|
|
||||||
|
# Row 11
|
||||||
|
yield Button(t("Delete"), id="delete_button")
|
||||||
|
yield Button(t("Cancel"), id="cancel_button")
|
||||||
|
|
||||||
|
yield Footer()
|
||||||
|
|
||||||
|
|
||||||
|
# Event handler for button press
|
||||||
|
def on_button_pressed(self, event: Button.Pressed) -> None:
|
||||||
|
|
||||||
|
if event.button.id == "delete_button":
|
||||||
|
|
||||||
|
if self.__shiftedSeasonId is None:
|
||||||
|
raise click.ClickException('ShiftedSeasonDeleteScreen.on_button_pressed(): shifted season id is undefined')
|
||||||
|
|
||||||
|
if self.__ssc.deleteShiftedSeason(self.__shiftedSeasonId):
|
||||||
|
self.dismiss(self.__shiftedSeasonId)
|
||||||
|
|
||||||
|
else:
|
||||||
|
#TODO: Meldung
|
||||||
|
self.app.pop_screen()
|
||||||
|
|
||||||
|
if event.button.id == "cancel_button":
|
||||||
|
self.app.pop_screen()
|
||||||
|
|
||||||
|
def action_back(self):
|
||||||
|
go_back_or_exit(self)
|
||||||
252
src/ffx/shifted_season_details_screen.py
Normal file
252
src/ffx/shifted_season_details_screen.py
Normal file
@@ -0,0 +1,252 @@
|
|||||||
|
from typing import List
|
||||||
|
|
||||||
|
from textual.screen import Screen
|
||||||
|
from textual.widgets import Header, Footer, Static, Button, Input
|
||||||
|
from textual.containers import Grid
|
||||||
|
|
||||||
|
from .i18n import t
|
||||||
|
from .shifted_season_controller import ShiftedSeasonController
|
||||||
|
from .screen_support import go_back_or_exit
|
||||||
|
|
||||||
|
from ffx.model.shifted_season import ShiftedSeason
|
||||||
|
|
||||||
|
|
||||||
|
# Screen[dict[int, str, int]]
|
||||||
|
class ShiftedSeasonDetailsScreen(Screen):
|
||||||
|
|
||||||
|
BINDINGS = [
|
||||||
|
("escape", "back", t("Back")),
|
||||||
|
]
|
||||||
|
|
||||||
|
CSS = """
|
||||||
|
|
||||||
|
Grid {
|
||||||
|
grid-size: 3 10;
|
||||||
|
grid-rows: 2 2 2 2 2 2 2 2 2 2;
|
||||||
|
grid-columns: 20 1fr 1fr;
|
||||||
|
height: 100%;
|
||||||
|
width: 100%;
|
||||||
|
min-width: 80;
|
||||||
|
padding: 1;
|
||||||
|
overflow-x: auto;
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
Input {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
Button {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
DataTable {
|
||||||
|
min-height: 6;
|
||||||
|
}
|
||||||
|
|
||||||
|
DataTable .datatable--cursor {
|
||||||
|
background: darkorange;
|
||||||
|
color: black;
|
||||||
|
}
|
||||||
|
|
||||||
|
DataTable .datatable--header {
|
||||||
|
background: steelblue;
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
#toplabel {
|
||||||
|
height: 1;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
.two {
|
||||||
|
column-span: 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
.three {
|
||||||
|
column-span: 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
.four {
|
||||||
|
column-span: 4;
|
||||||
|
}
|
||||||
|
.five {
|
||||||
|
column-span: 5;
|
||||||
|
}
|
||||||
|
.six {
|
||||||
|
column-span: 6;
|
||||||
|
}
|
||||||
|
.seven {
|
||||||
|
column-span: 7;
|
||||||
|
}
|
||||||
|
|
||||||
|
.box {
|
||||||
|
height: 100%;
|
||||||
|
border: solid green;
|
||||||
|
}
|
||||||
|
|
||||||
|
.yellow {
|
||||||
|
tint: yellow 40%;
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, showId = None, patternId = None, shiftedSeasonId = None):
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
self.context = self.app.getContext()
|
||||||
|
self.Session = self.context['database']['session'] # convenience
|
||||||
|
|
||||||
|
self.__ssc = ShiftedSeasonController(context = self.context)
|
||||||
|
|
||||||
|
self.__showId = showId
|
||||||
|
self.__patternId = patternId
|
||||||
|
self.__shiftedSeasonId = shiftedSeasonId
|
||||||
|
|
||||||
|
def _owner_kwargs(self):
|
||||||
|
if self.__patternId is not None:
|
||||||
|
return {'patternId': self.__patternId}
|
||||||
|
return {'showId': self.__showId}
|
||||||
|
|
||||||
|
def on_mount(self):
|
||||||
|
|
||||||
|
if self.__shiftedSeasonId is not None:
|
||||||
|
shiftedSeason: ShiftedSeason = self.__ssc.getShiftedSeason(self.__shiftedSeasonId)
|
||||||
|
|
||||||
|
originalSeason = shiftedSeason.getOriginalSeason()
|
||||||
|
self.query_one("#input_original_season", Input).value = str(originalSeason)
|
||||||
|
|
||||||
|
firstEpisode = shiftedSeason.getFirstEpisode()
|
||||||
|
self.query_one("#input_first_episode", Input).value = str(firstEpisode) if firstEpisode != -1 else ''
|
||||||
|
|
||||||
|
lastEpisode = shiftedSeason.getLastEpisode()
|
||||||
|
self.query_one("#input_last_episode", Input).value = str(lastEpisode) if lastEpisode != -1 else ''
|
||||||
|
|
||||||
|
seasonOffset = shiftedSeason.getSeasonOffset()
|
||||||
|
self.query_one("#input_season_offset", Input).value = str(seasonOffset) if seasonOffset else ''
|
||||||
|
|
||||||
|
episodeOffset = shiftedSeason.getEpisodeOffset()
|
||||||
|
self.query_one("#input_episode_offset", Input).value = str(episodeOffset) if episodeOffset else ''
|
||||||
|
|
||||||
|
|
||||||
|
def compose(self):
|
||||||
|
|
||||||
|
yield Header()
|
||||||
|
|
||||||
|
with Grid():
|
||||||
|
|
||||||
|
# Row 1
|
||||||
|
yield Static(
|
||||||
|
t("Edit shifted season") if self.__shiftedSeasonId is not None else t("New shifted season"),
|
||||||
|
id="toplabel",
|
||||||
|
classes="three",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Row 2
|
||||||
|
yield Static(" ", classes="three")
|
||||||
|
|
||||||
|
# Row 3
|
||||||
|
yield Static(t("Source Season"))
|
||||||
|
yield Input(id="input_original_season", classes="two")
|
||||||
|
|
||||||
|
# Row 4
|
||||||
|
yield Static(t("First Episode"))
|
||||||
|
yield Input(id="input_first_episode", classes="two")
|
||||||
|
|
||||||
|
# Row 5
|
||||||
|
yield Static(t("Last Episode"))
|
||||||
|
yield Input(id="input_last_episode", classes="two")
|
||||||
|
|
||||||
|
# Row 6
|
||||||
|
yield Static(t("Season Offset"))
|
||||||
|
yield Input(id="input_season_offset", classes="two")
|
||||||
|
|
||||||
|
# Row 7
|
||||||
|
yield Static(t("Episode offset"))
|
||||||
|
yield Input(id="input_episode_offset", classes="two")
|
||||||
|
|
||||||
|
# Row 8
|
||||||
|
yield Static(" ", classes="three")
|
||||||
|
|
||||||
|
# Row 9
|
||||||
|
yield Button(t("Save"), id="save_button")
|
||||||
|
yield Button(t("Cancel"), id="cancel_button")
|
||||||
|
yield Static(" ")
|
||||||
|
|
||||||
|
# Row 10
|
||||||
|
yield Static(" ", classes="three")
|
||||||
|
|
||||||
|
yield Footer()
|
||||||
|
|
||||||
|
|
||||||
|
def getShiftedSeasonObjFromInput(self):
|
||||||
|
|
||||||
|
shiftedSeasonObj = {}
|
||||||
|
|
||||||
|
originalSeason = self.query_one("#input_original_season", Input).value
|
||||||
|
if not originalSeason:
|
||||||
|
return None
|
||||||
|
shiftedSeasonObj['original_season'] = int(originalSeason)
|
||||||
|
|
||||||
|
try:
|
||||||
|
shiftedSeasonObj['first_episode'] = int(self.query_one("#input_first_episode", Input).value)
|
||||||
|
except ValueError:
|
||||||
|
shiftedSeasonObj['first_episode'] = -1
|
||||||
|
|
||||||
|
try:
|
||||||
|
shiftedSeasonObj['last_episode'] = int(self.query_one("#input_last_episode", Input).value)
|
||||||
|
except ValueError:
|
||||||
|
shiftedSeasonObj['last_episode'] = -1
|
||||||
|
|
||||||
|
try:
|
||||||
|
shiftedSeasonObj['season_offset'] = int(self.query_one("#input_season_offset", Input).value)
|
||||||
|
except ValueError:
|
||||||
|
shiftedSeasonObj['season_offset'] = 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
shiftedSeasonObj['episode_offset'] = int(self.query_one("#input_episode_offset", Input).value)
|
||||||
|
except ValueError:
|
||||||
|
shiftedSeasonObj['episode_offset'] = 0
|
||||||
|
|
||||||
|
return shiftedSeasonObj
|
||||||
|
|
||||||
|
def action_back(self):
|
||||||
|
go_back_or_exit(self)
|
||||||
|
|
||||||
|
|
||||||
|
# Event handler for button press
|
||||||
|
def on_button_pressed(self, event: Button.Pressed) -> None:
|
||||||
|
|
||||||
|
# Check if the button pressed is the one we are interested in
|
||||||
|
if event.button.id == "save_button":
|
||||||
|
|
||||||
|
shiftedSeasonObj = self.getShiftedSeasonObjFromInput()
|
||||||
|
|
||||||
|
if shiftedSeasonObj is not None:
|
||||||
|
|
||||||
|
if self.__shiftedSeasonId is not None:
|
||||||
|
|
||||||
|
if self.__ssc.checkShiftedSeason(
|
||||||
|
shiftedSeasonObj=shiftedSeasonObj,
|
||||||
|
shiftedSeasonId=self.__shiftedSeasonId,
|
||||||
|
**self._owner_kwargs(),
|
||||||
|
):
|
||||||
|
if self.__ssc.updateShiftedSeason(self.__shiftedSeasonId, shiftedSeasonObj):
|
||||||
|
self.dismiss((self.__shiftedSeasonId, shiftedSeasonObj))
|
||||||
|
else:
|
||||||
|
#TODO: Meldung
|
||||||
|
self.app.pop_screen()
|
||||||
|
|
||||||
|
else:
|
||||||
|
if self.__ssc.checkShiftedSeason(
|
||||||
|
shiftedSeasonObj=shiftedSeasonObj,
|
||||||
|
**self._owner_kwargs(),
|
||||||
|
):
|
||||||
|
self.__shiftedSeasonId = self.__ssc.addShiftedSeason(
|
||||||
|
shiftedSeasonObj=shiftedSeasonObj,
|
||||||
|
**self._owner_kwargs(),
|
||||||
|
)
|
||||||
|
self.dismiss((self.__shiftedSeasonId, shiftedSeasonObj))
|
||||||
|
|
||||||
|
|
||||||
|
if event.button.id == "cancel_button":
|
||||||
|
self.app.pop_screen()
|
||||||
128
src/ffx/show_controller.py
Normal file
128
src/ffx/show_controller.py
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
import click
|
||||||
|
|
||||||
|
from ffx.model.show import Show
|
||||||
|
from ffx.show_descriptor import ShowDescriptor
|
||||||
|
|
||||||
|
|
||||||
|
class ShowController():
|
||||||
|
|
||||||
|
def __init__(self, context):
|
||||||
|
|
||||||
|
self.context = context
|
||||||
|
self.Session = self.context['database']['session'] # convenience
|
||||||
|
|
||||||
|
|
||||||
|
def getShowDescriptor(self, showId):
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = self.Session()
|
||||||
|
show = s.query(Show).filter(Show.id == showId).first()
|
||||||
|
|
||||||
|
if show is not None:
|
||||||
|
return show.getDescriptor(self.context)
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"ShowController.getShowDescriptor(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
def getShow(self, showId):
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = self.Session()
|
||||||
|
return s.query(Show).filter(Show.id == showId).first()
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"ShowController.getShow(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
def getAllShows(self):
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = self.Session()
|
||||||
|
return s.query(Show).all()
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"ShowController.getAllShows(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
|
||||||
|
def updateShow(self, showDescriptor: ShowDescriptor):
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = self.Session()
|
||||||
|
currentShow = s.query(Show).filter(Show.id == showDescriptor.getId()).first()
|
||||||
|
|
||||||
|
if currentShow is None:
|
||||||
|
show = Show(id = int(showDescriptor.getId()),
|
||||||
|
name = str(showDescriptor.getName()),
|
||||||
|
year = int(showDescriptor.getYear()),
|
||||||
|
index_season_digits = showDescriptor.getIndexSeasonDigits(),
|
||||||
|
index_episode_digits = showDescriptor.getIndexEpisodeDigits(),
|
||||||
|
indicator_season_digits = showDescriptor.getIndicatorSeasonDigits(),
|
||||||
|
indicator_episode_digits = showDescriptor.getIndicatorEpisodeDigits(),
|
||||||
|
quality = showDescriptor.getQuality(),
|
||||||
|
notes = showDescriptor.getNotes())
|
||||||
|
|
||||||
|
s.add(show)
|
||||||
|
s.commit()
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
changed = False
|
||||||
|
if currentShow.name != str(showDescriptor.getName()):
|
||||||
|
currentShow.name = str(showDescriptor.getName())
|
||||||
|
changed = True
|
||||||
|
if currentShow.year != int(showDescriptor.getYear()):
|
||||||
|
currentShow.year = int(showDescriptor.getYear())
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
if currentShow.index_season_digits != int(showDescriptor.getIndexSeasonDigits()):
|
||||||
|
currentShow.index_season_digits = int(showDescriptor.getIndexSeasonDigits())
|
||||||
|
changed = True
|
||||||
|
if currentShow.index_episode_digits != int(showDescriptor.getIndexEpisodeDigits()):
|
||||||
|
currentShow.index_episode_digits = int(showDescriptor.getIndexEpisodeDigits())
|
||||||
|
changed = True
|
||||||
|
if currentShow.indicator_season_digits != int(showDescriptor.getIndicatorSeasonDigits()):
|
||||||
|
currentShow.indicator_season_digits = int(showDescriptor.getIndicatorSeasonDigits())
|
||||||
|
changed = True
|
||||||
|
if currentShow.indicator_episode_digits != int(showDescriptor.getIndicatorEpisodeDigits()):
|
||||||
|
currentShow.indicator_episode_digits = int(showDescriptor.getIndicatorEpisodeDigits())
|
||||||
|
changed = True
|
||||||
|
if int(currentShow.quality or 0) != int(showDescriptor.getQuality()):
|
||||||
|
currentShow.quality = int(showDescriptor.getQuality())
|
||||||
|
changed = True
|
||||||
|
if str(currentShow.notes or '') != str(showDescriptor.getNotes()):
|
||||||
|
currentShow.notes = str(showDescriptor.getNotes())
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
if changed:
|
||||||
|
s.commit()
|
||||||
|
return changed
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"ShowController.updateShow(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
|
||||||
|
def deleteShow(self, show_id):
|
||||||
|
try:
|
||||||
|
s = self.Session()
|
||||||
|
show = s.query(Show).filter(Show.id == int(show_id)).first()
|
||||||
|
|
||||||
|
if show is not None:
|
||||||
|
|
||||||
|
#DAFUQ: https://stackoverflow.com/a/19245058
|
||||||
|
# q.delete()
|
||||||
|
s.delete(show)
|
||||||
|
|
||||||
|
s.commit()
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"ShowController.deleteShow(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
112
src/ffx/show_delete_screen.py
Normal file
112
src/ffx/show_delete_screen.py
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
from textual.screen import Screen
|
||||||
|
from textual.widgets import Header, Footer, Static, Button
|
||||||
|
from textual.containers import Grid
|
||||||
|
|
||||||
|
from .i18n import t
|
||||||
|
from .show_controller import ShowController
|
||||||
|
from .screen_support import go_back_or_exit
|
||||||
|
|
||||||
|
# Screen[dict[int, str, int]]
|
||||||
|
class ShowDeleteScreen(Screen):
|
||||||
|
|
||||||
|
BINDINGS = [
|
||||||
|
("escape", "back", t("Back")),
|
||||||
|
]
|
||||||
|
|
||||||
|
CSS = """
|
||||||
|
|
||||||
|
Grid {
|
||||||
|
grid-size: 2;
|
||||||
|
grid-rows: 2 auto;
|
||||||
|
grid-columns: 18 4fr;
|
||||||
|
height: 100%;
|
||||||
|
width: 100%;
|
||||||
|
min-width: 80;
|
||||||
|
padding: 1;
|
||||||
|
overflow-x: auto;
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
Input {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
Button {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
#toplabel {
|
||||||
|
height: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.two {
|
||||||
|
column-span: 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
.box {
|
||||||
|
height: 100%;
|
||||||
|
border: solid green;
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, showId = None):
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
self.context = self.app.getContext()
|
||||||
|
self.Session = self.context['database']['session'] # convenience
|
||||||
|
|
||||||
|
self.__sc = ShowController(context = self.context)
|
||||||
|
|
||||||
|
self.__showDescriptor = self.__sc.getShowDescriptor(showId) if showId is not None else {}
|
||||||
|
|
||||||
|
|
||||||
|
def on_mount(self):
|
||||||
|
if not self.__showDescriptor is None:
|
||||||
|
self.query_one("#showlabel", Static).update(f"{self.__showDescriptor.getId()} - {self.__showDescriptor.getName()} ({self.__showDescriptor.getYear()})")
|
||||||
|
|
||||||
|
|
||||||
|
def compose(self):
|
||||||
|
|
||||||
|
yield Header()
|
||||||
|
|
||||||
|
with Grid():
|
||||||
|
# Row 1
|
||||||
|
yield Static(t("Are you sure to delete the following show?"), id="toplabel", classes="two")
|
||||||
|
|
||||||
|
# Row 2
|
||||||
|
yield Static("", classes="two")
|
||||||
|
|
||||||
|
# Row 3
|
||||||
|
yield Static("", id="showlabel")
|
||||||
|
yield Static("")
|
||||||
|
|
||||||
|
# Row 4
|
||||||
|
yield Static("", classes="two")
|
||||||
|
|
||||||
|
# Row 5
|
||||||
|
yield Static("", classes="two")
|
||||||
|
|
||||||
|
# Row 6
|
||||||
|
yield Button(t("Delete"), id="delete_button")
|
||||||
|
yield Button(t("Cancel"), id="cancel_button")
|
||||||
|
|
||||||
|
|
||||||
|
yield Footer()
|
||||||
|
|
||||||
|
|
||||||
|
# Event handler for button press
|
||||||
|
def on_button_pressed(self, event: Button.Pressed) -> None:
|
||||||
|
|
||||||
|
if event.button.id == "delete_button":
|
||||||
|
|
||||||
|
if not self.__showDescriptor is None:
|
||||||
|
if self.__sc.deleteShow(self.__showDescriptor.getId()):
|
||||||
|
self.dismiss(self.__showDescriptor)
|
||||||
|
|
||||||
|
else:
|
||||||
|
#TODO: Meldung
|
||||||
|
self.app.pop_screen()
|
||||||
|
|
||||||
|
if event.button.id == "cancel_button":
|
||||||
|
self.app.pop_screen()
|
||||||
|
|
||||||
|
def action_back(self):
|
||||||
|
go_back_or_exit(self)
|
||||||
165
src/ffx/show_descriptor.py
Normal file
165
src/ffx/show_descriptor.py
Normal file
@@ -0,0 +1,165 @@
|
|||||||
|
from .configuration_controller import ConfigurationController
|
||||||
|
from .constants import (
|
||||||
|
DEFAULT_SHOW_INDEX_EPISODE_DIGITS,
|
||||||
|
DEFAULT_SHOW_INDEX_SEASON_DIGITS,
|
||||||
|
DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS,
|
||||||
|
DEFAULT_SHOW_INDICATOR_SEASON_DIGITS,
|
||||||
|
)
|
||||||
|
from .logging_utils import get_ffx_logger
|
||||||
|
|
||||||
|
|
||||||
|
class ShowDescriptor():
|
||||||
|
"""This class represents the structural content of a media file including streams and metadata"""
|
||||||
|
|
||||||
|
CONTEXT_KEY = 'context'
|
||||||
|
|
||||||
|
ID_KEY = 'id'
|
||||||
|
NAME_KEY = 'name'
|
||||||
|
YEAR_KEY = 'year'
|
||||||
|
|
||||||
|
INDEX_SEASON_DIGITS_KEY = 'index_season_digits'
|
||||||
|
INDEX_EPISODE_DIGITS_KEY = 'index_episode_digits'
|
||||||
|
INDICATOR_SEASON_DIGITS_KEY = 'indicator_season_digits'
|
||||||
|
INDICATOR_EPISODE_DIGITS_KEY = 'indicator_episode_digits'
|
||||||
|
QUALITY_KEY = 'quality'
|
||||||
|
NOTES_KEY = 'notes'
|
||||||
|
|
||||||
|
DEFAULT_INDEX_SEASON_DIGITS = DEFAULT_SHOW_INDEX_SEASON_DIGITS
|
||||||
|
DEFAULT_INDEX_EPISODE_DIGITS = DEFAULT_SHOW_INDEX_EPISODE_DIGITS
|
||||||
|
DEFAULT_INDICATOR_SEASON_DIGITS = DEFAULT_SHOW_INDICATOR_SEASON_DIGITS
|
||||||
|
DEFAULT_INDICATOR_EPISODE_DIGITS = DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def getDefaultDigitLengths(cls, context: dict | None = None) -> dict[str, int]:
|
||||||
|
configurationData = {}
|
||||||
|
|
||||||
|
if context is not None:
|
||||||
|
configController = context.get('config')
|
||||||
|
if configController is not None and hasattr(configController, 'getData'):
|
||||||
|
configurationData = configController.getData()
|
||||||
|
|
||||||
|
return {
|
||||||
|
cls.INDEX_SEASON_DIGITS_KEY: ConfigurationController.getConfiguredIntegerValue(
|
||||||
|
configurationData,
|
||||||
|
ConfigurationController.DEFAULT_INDEX_SEASON_DIGITS_CONFIG_KEY,
|
||||||
|
cls.DEFAULT_INDEX_SEASON_DIGITS,
|
||||||
|
),
|
||||||
|
cls.INDEX_EPISODE_DIGITS_KEY: ConfigurationController.getConfiguredIntegerValue(
|
||||||
|
configurationData,
|
||||||
|
ConfigurationController.DEFAULT_INDEX_EPISODE_DIGITS_CONFIG_KEY,
|
||||||
|
cls.DEFAULT_INDEX_EPISODE_DIGITS,
|
||||||
|
),
|
||||||
|
cls.INDICATOR_SEASON_DIGITS_KEY: ConfigurationController.getConfiguredIntegerValue(
|
||||||
|
configurationData,
|
||||||
|
ConfigurationController.DEFAULT_INDICATOR_SEASON_DIGITS_CONFIG_KEY,
|
||||||
|
cls.DEFAULT_INDICATOR_SEASON_DIGITS,
|
||||||
|
),
|
||||||
|
cls.INDICATOR_EPISODE_DIGITS_KEY: ConfigurationController.getConfiguredIntegerValue(
|
||||||
|
configurationData,
|
||||||
|
ConfigurationController.DEFAULT_INDICATOR_EPISODE_DIGITS_CONFIG_KEY,
|
||||||
|
cls.DEFAULT_INDICATOR_EPISODE_DIGITS,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
|
||||||
|
if ShowDescriptor.CONTEXT_KEY in kwargs.keys():
|
||||||
|
if type(kwargs[ShowDescriptor.CONTEXT_KEY]) is not dict:
|
||||||
|
raise TypeError(
|
||||||
|
f"ShowDescriptor.__init__(): Argument {ShowDescriptor.CONTEXT_KEY} is required to be of type dict"
|
||||||
|
)
|
||||||
|
self.__context = kwargs[ShowDescriptor.CONTEXT_KEY]
|
||||||
|
self.__logger = self.__context['logger']
|
||||||
|
else:
|
||||||
|
self.__context = {}
|
||||||
|
self.__logger = get_ffx_logger()
|
||||||
|
|
||||||
|
if ShowDescriptor.ID_KEY in kwargs.keys():
|
||||||
|
if type(kwargs[ShowDescriptor.ID_KEY]) is not int:
|
||||||
|
raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.ID_KEY} is required to be of type int")
|
||||||
|
self.__showId = kwargs[ShowDescriptor.ID_KEY]
|
||||||
|
else:
|
||||||
|
self.__showId = -1
|
||||||
|
|
||||||
|
if ShowDescriptor.NAME_KEY in kwargs.keys():
|
||||||
|
if type(kwargs[ShowDescriptor.NAME_KEY]) is not str:
|
||||||
|
raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.NAME_KEY} is required to be of type str")
|
||||||
|
self.__showName = kwargs[ShowDescriptor.NAME_KEY]
|
||||||
|
else:
|
||||||
|
self.__showName = ''
|
||||||
|
|
||||||
|
if ShowDescriptor.YEAR_KEY in kwargs.keys():
|
||||||
|
if type(kwargs[ShowDescriptor.YEAR_KEY]) is not int:
|
||||||
|
raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.YEAR_KEY} is required to be of type int")
|
||||||
|
self.__showYear = kwargs[ShowDescriptor.YEAR_KEY]
|
||||||
|
else:
|
||||||
|
self.__showYear = -1
|
||||||
|
|
||||||
|
defaultDigitLengths = self.getDefaultDigitLengths(self.__context)
|
||||||
|
|
||||||
|
if ShowDescriptor.INDEX_SEASON_DIGITS_KEY in kwargs.keys():
|
||||||
|
if type(kwargs[ShowDescriptor.INDEX_SEASON_DIGITS_KEY]) is not int:
|
||||||
|
raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.INDEX_SEASON_DIGITS_KEY} is required to be of type int")
|
||||||
|
self.__indexSeasonDigits = kwargs[ShowDescriptor.INDEX_SEASON_DIGITS_KEY]
|
||||||
|
else:
|
||||||
|
self.__indexSeasonDigits = defaultDigitLengths[ShowDescriptor.INDEX_SEASON_DIGITS_KEY]
|
||||||
|
|
||||||
|
if ShowDescriptor.INDEX_EPISODE_DIGITS_KEY in kwargs.keys():
|
||||||
|
if type(kwargs[ShowDescriptor.INDEX_EPISODE_DIGITS_KEY]) is not int:
|
||||||
|
raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.INDEX_EPISODE_DIGITS_KEY} is required to be of type int")
|
||||||
|
self.__indexEpisodeDigits = kwargs[ShowDescriptor.INDEX_EPISODE_DIGITS_KEY]
|
||||||
|
else:
|
||||||
|
self.__indexEpisodeDigits = defaultDigitLengths[ShowDescriptor.INDEX_EPISODE_DIGITS_KEY]
|
||||||
|
|
||||||
|
if ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY in kwargs.keys():
|
||||||
|
if type(kwargs[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY]) is not int:
|
||||||
|
raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY} is required to be of type int")
|
||||||
|
self.__indicatorSeasonDigits = kwargs[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY]
|
||||||
|
else:
|
||||||
|
self.__indicatorSeasonDigits = defaultDigitLengths[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY]
|
||||||
|
|
||||||
|
if ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY in kwargs.keys():
|
||||||
|
if type(kwargs[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY]) is not int:
|
||||||
|
raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY} is required to be of type int")
|
||||||
|
self.__indicatorEpisodeDigits = kwargs[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY]
|
||||||
|
else:
|
||||||
|
self.__indicatorEpisodeDigits = defaultDigitLengths[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY]
|
||||||
|
|
||||||
|
if ShowDescriptor.QUALITY_KEY in kwargs.keys():
|
||||||
|
if type(kwargs[ShowDescriptor.QUALITY_KEY]) is not int:
|
||||||
|
raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.QUALITY_KEY} is required to be of type int")
|
||||||
|
self.__quality = kwargs[ShowDescriptor.QUALITY_KEY]
|
||||||
|
else:
|
||||||
|
self.__quality = 0
|
||||||
|
|
||||||
|
if ShowDescriptor.NOTES_KEY in kwargs.keys():
|
||||||
|
if type(kwargs[ShowDescriptor.NOTES_KEY]) is not str:
|
||||||
|
raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.NOTES_KEY} is required to be of type str")
|
||||||
|
self.__notes = kwargs[ShowDescriptor.NOTES_KEY]
|
||||||
|
else:
|
||||||
|
self.__notes = ''
|
||||||
|
|
||||||
|
|
||||||
|
def getId(self):
|
||||||
|
return self.__showId
|
||||||
|
def getName(self):
|
||||||
|
return self.__showName
|
||||||
|
def getYear(self):
|
||||||
|
return self.__showYear
|
||||||
|
|
||||||
|
def getIndexSeasonDigits(self):
|
||||||
|
return self.__indexSeasonDigits
|
||||||
|
def getIndexEpisodeDigits(self):
|
||||||
|
return self.__indexEpisodeDigits
|
||||||
|
def getIndicatorSeasonDigits(self):
|
||||||
|
return self.__indicatorSeasonDigits
|
||||||
|
def getIndicatorEpisodeDigits(self):
|
||||||
|
return self.__indicatorEpisodeDigits
|
||||||
|
def getQuality(self):
|
||||||
|
return self.__quality
|
||||||
|
def getNotes(self):
|
||||||
|
return self.__notes
|
||||||
|
|
||||||
|
def getFilenamePrefix(self):
|
||||||
|
return f"{self.__showName} ({str(self.__showYear)})"
|
||||||
536
src/ffx/show_details_screen.py
Normal file
536
src/ffx/show_details_screen.py
Normal file
@@ -0,0 +1,536 @@
|
|||||||
|
import click
|
||||||
|
|
||||||
|
from textual.screen import Screen
|
||||||
|
from textual.widgets import Header, Footer, Static, Button, DataTable, Input, TextArea
|
||||||
|
from textual.containers import Grid
|
||||||
|
from textual.widgets._data_table import CellDoesNotExist
|
||||||
|
|
||||||
|
from .pattern_details_screen import PatternDetailsScreen
|
||||||
|
from .pattern_delete_screen import PatternDeleteScreen
|
||||||
|
|
||||||
|
from .show_descriptor import ShowDescriptor
|
||||||
|
|
||||||
|
from .shifted_season_details_screen import ShiftedSeasonDetailsScreen
|
||||||
|
from .shifted_season_delete_screen import ShiftedSeasonDeleteScreen
|
||||||
|
|
||||||
|
from ffx.model.shifted_season import ShiftedSeason
|
||||||
|
|
||||||
|
from .helper import filterFilename
|
||||||
|
from .i18n import t
|
||||||
|
from .screen_support import (
|
||||||
|
add_auto_table_column,
|
||||||
|
build_screen_bootstrap,
|
||||||
|
build_screen_controllers,
|
||||||
|
go_back_or_exit,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Screen[dict[int, str, int]]
|
||||||
|
class ShowDetailsScreen(Screen):
|
||||||
|
|
||||||
|
CSS = """
|
||||||
|
|
||||||
|
Grid {
|
||||||
|
grid-size: 5 19;
|
||||||
|
grid-rows: 2 2 2 2 2 2 6 2 2 2 2 2 2 2 9 2 9 2 2;
|
||||||
|
grid-columns: 25 20 20 20 1fr;
|
||||||
|
height: 100%;
|
||||||
|
width: 100%;
|
||||||
|
min-width: 110;
|
||||||
|
padding: 1;
|
||||||
|
overflow-x: auto;
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
Input {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
Button {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
DataTable {
|
||||||
|
column-span: 2;
|
||||||
|
min-height: 8;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
DataTable .datatable--cursor {
|
||||||
|
background: darkorange;
|
||||||
|
color: black;
|
||||||
|
}
|
||||||
|
|
||||||
|
DataTable .datatable--header {
|
||||||
|
background: steelblue;
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
#toplabel {
|
||||||
|
height: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
.two {
|
||||||
|
column-span: 2;
|
||||||
|
}
|
||||||
|
.three {
|
||||||
|
column-span: 3;
|
||||||
|
}
|
||||||
|
.four {
|
||||||
|
column-span: 4;
|
||||||
|
}
|
||||||
|
.five {
|
||||||
|
column-span: 5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.box {
|
||||||
|
height: 100%;
|
||||||
|
border: solid green;
|
||||||
|
}
|
||||||
|
|
||||||
|
.note_box {
|
||||||
|
min-height: 6;
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
BINDINGS = [
|
||||||
|
("escape", "back", t("Back")),
|
||||||
|
("a", "add_pattern", t("Add Pattern")),
|
||||||
|
("e", "edit_pattern", t("Edit Pattern")),
|
||||||
|
("r", "remove_pattern", t("Remove Pattern")),
|
||||||
|
]
|
||||||
|
|
||||||
|
def __init__(self, showId = None):
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
bootstrap = build_screen_bootstrap(self.app.getContext())
|
||||||
|
self.context = bootstrap.context
|
||||||
|
|
||||||
|
controllers = build_screen_controllers(
|
||||||
|
self.context,
|
||||||
|
pattern=True,
|
||||||
|
show=True,
|
||||||
|
tmdb=True,
|
||||||
|
shifted_season=True,
|
||||||
|
)
|
||||||
|
self.__sc = controllers['show']
|
||||||
|
self.__pc = controllers['pattern']
|
||||||
|
self.__tc = controllers['tmdb']
|
||||||
|
self.__ssc = controllers['shifted_season']
|
||||||
|
|
||||||
|
self.__showDescriptor = self.__sc.getShowDescriptor(showId) if showId is not None else None
|
||||||
|
self.__patternRowData: dict[object, dict[str, object]] = {}
|
||||||
|
self.__shiftedSeasonRowData: dict[object, dict[str, int | None]] = {}
|
||||||
|
|
||||||
|
|
||||||
|
def _add_pattern_row(self, *, pattern_id: int | None, pattern_text: str):
|
||||||
|
row_key = self.patternTable.add_row(str(pattern_text))
|
||||||
|
self.__patternRowData[row_key] = {
|
||||||
|
'id': pattern_id,
|
||||||
|
'show_id': self.__showDescriptor.getId() if self.__showDescriptor is not None else None,
|
||||||
|
'pattern': str(pattern_text),
|
||||||
|
}
|
||||||
|
return row_key
|
||||||
|
|
||||||
|
|
||||||
|
def _add_shifted_season_row(self, shifted_season_obj: dict[str, int | None]):
|
||||||
|
firstEpisode = shifted_season_obj['first_episode']
|
||||||
|
firstEpisodeStr = str(firstEpisode) if firstEpisode != -1 else ''
|
||||||
|
|
||||||
|
lastEpisode = shifted_season_obj['last_episode']
|
||||||
|
lastEpisodeStr = str(lastEpisode) if lastEpisode != -1 else ''
|
||||||
|
|
||||||
|
row = (
|
||||||
|
shifted_season_obj['original_season'],
|
||||||
|
firstEpisodeStr,
|
||||||
|
lastEpisodeStr,
|
||||||
|
shifted_season_obj['season_offset'],
|
||||||
|
shifted_season_obj['episode_offset'],
|
||||||
|
)
|
||||||
|
|
||||||
|
row_key = self.shiftedSeasonsTable.add_row(*map(str, row))
|
||||||
|
self.__shiftedSeasonRowData[row_key] = dict(shifted_season_obj)
|
||||||
|
return row_key
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def updateShiftedSeasons(self):
|
||||||
|
|
||||||
|
self.shiftedSeasonsTable.clear()
|
||||||
|
self.__shiftedSeasonRowData = {}
|
||||||
|
|
||||||
|
if not self.__showDescriptor is None:
|
||||||
|
|
||||||
|
showId = int(self.__showDescriptor.getId())
|
||||||
|
|
||||||
|
shiftedSeason: ShiftedSeason
|
||||||
|
for shiftedSeason in self.__ssc.getShiftedSeasonSiblings(showId=showId):
|
||||||
|
|
||||||
|
shiftedSeasonObj = shiftedSeason.getObj()
|
||||||
|
shiftedSeasonObj['id'] = shiftedSeason.getId()
|
||||||
|
self._add_shifted_season_row(shiftedSeasonObj)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def on_mount(self):
|
||||||
|
|
||||||
|
if self.__showDescriptor is not None:
|
||||||
|
|
||||||
|
showId = int(self.__showDescriptor.getId())
|
||||||
|
|
||||||
|
self.query_one("#id_static", Static).update(str(showId))
|
||||||
|
self.query_one("#name_input", Input).value = str(self.__showDescriptor.getName())
|
||||||
|
self.query_one("#year_input", Input).value = str(self.__showDescriptor.getYear())
|
||||||
|
|
||||||
|
self.query_one("#index_season_digits_input", Input).value = str(self.__showDescriptor.getIndexSeasonDigits())
|
||||||
|
self.query_one("#index_episode_digits_input", Input).value = str(self.__showDescriptor.getIndexEpisodeDigits())
|
||||||
|
self.query_one("#indicator_season_digits_input", Input).value = str(self.__showDescriptor.getIndicatorSeasonDigits())
|
||||||
|
self.query_one("#indicator_episode_digits_input", Input).value = str(self.__showDescriptor.getIndicatorEpisodeDigits())
|
||||||
|
if self.__showDescriptor.getQuality():
|
||||||
|
self.query_one("#quality_input", Input).value = str(self.__showDescriptor.getQuality())
|
||||||
|
if self.__showDescriptor.getNotes():
|
||||||
|
self.query_one("#notes_textarea", TextArea).text = str(self.__showDescriptor.getNotes())
|
||||||
|
|
||||||
|
|
||||||
|
#raise click.ClickException(f"show_id {showId}")
|
||||||
|
for pattern in self.__pc.getPatternsForShow(showId):
|
||||||
|
self._add_pattern_row(
|
||||||
|
pattern_id=pattern.getId(),
|
||||||
|
pattern_text=pattern.getPattern(),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.updateShiftedSeasons()
|
||||||
|
|
||||||
|
else:
|
||||||
|
defaultDigitLengths = ShowDescriptor.getDefaultDigitLengths(self.context)
|
||||||
|
|
||||||
|
self.query_one("#index_season_digits_input", Input).value = str(
|
||||||
|
defaultDigitLengths[ShowDescriptor.INDEX_SEASON_DIGITS_KEY]
|
||||||
|
)
|
||||||
|
self.query_one("#index_episode_digits_input", Input).value = str(
|
||||||
|
defaultDigitLengths[ShowDescriptor.INDEX_EPISODE_DIGITS_KEY]
|
||||||
|
)
|
||||||
|
self.query_one("#indicator_season_digits_input", Input).value = str(
|
||||||
|
defaultDigitLengths[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY]
|
||||||
|
)
|
||||||
|
self.query_one("#indicator_episode_digits_input", Input).value = str(
|
||||||
|
defaultDigitLengths[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def getSelectedPatternDescriptor(self):
|
||||||
|
|
||||||
|
selectedPattern = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
# Fetch the currently selected row when 'Enter' is pressed
|
||||||
|
#selected_row_index = self.table.cursor_row
|
||||||
|
row_key, col_key = self.patternTable.coordinate_to_cell_key(self.patternTable.cursor_coordinate)
|
||||||
|
|
||||||
|
if row_key is not None:
|
||||||
|
selectedPattern = dict(self.__patternRowData.get(row_key, {}))
|
||||||
|
|
||||||
|
except CellDoesNotExist:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return selectedPattern
|
||||||
|
|
||||||
|
|
||||||
|
def getSelectedShiftedSeasonObjFromInput(self):
|
||||||
|
|
||||||
|
shiftedSeasonObj = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
# Fetch the currently selected row when 'Enter' is pressed
|
||||||
|
#selected_row_index = self.table.cursor_row
|
||||||
|
row_key, col_key = self.shiftedSeasonsTable.coordinate_to_cell_key(self.shiftedSeasonsTable.cursor_coordinate)
|
||||||
|
|
||||||
|
if row_key is not None:
|
||||||
|
shiftedSeasonObj = dict(self.__shiftedSeasonRowData.get(row_key, {}))
|
||||||
|
|
||||||
|
except CellDoesNotExist:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return shiftedSeasonObj
|
||||||
|
|
||||||
|
|
||||||
|
def action_add_pattern(self):
|
||||||
|
if not self.__showDescriptor is None:
|
||||||
|
self.app.push_screen(PatternDetailsScreen(showId = self.__showDescriptor.getId()), self.handle_add_pattern)
|
||||||
|
|
||||||
|
|
||||||
|
def handle_add_pattern(self, screenResult):
|
||||||
|
if screenResult is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
pattern_id = self.__pc.findPattern(screenResult)
|
||||||
|
self._add_pattern_row(
|
||||||
|
pattern_id=pattern_id,
|
||||||
|
pattern_text=screenResult['pattern'],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def action_edit_pattern(self):
|
||||||
|
|
||||||
|
selectedPatternDescriptor = self.getSelectedPatternDescriptor()
|
||||||
|
|
||||||
|
if selectedPatternDescriptor:
|
||||||
|
selectedPatternId = selectedPatternDescriptor.get('id')
|
||||||
|
|
||||||
|
if selectedPatternId is None:
|
||||||
|
raise click.ClickException(f"ShowDetailsScreen.action_edit_pattern(): Pattern to edit has no id")
|
||||||
|
|
||||||
|
self.app.push_screen(PatternDetailsScreen(patternId = selectedPatternId, showId = self.__showDescriptor.getId()), self.handle_edit_pattern) # <-
|
||||||
|
|
||||||
|
|
||||||
|
def handle_edit_pattern(self, screenResult):
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
row_key, col_key = self.patternTable.coordinate_to_cell_key(self.patternTable.cursor_coordinate)
|
||||||
|
self.patternTable.update_cell(row_key, self.column_key_pattern, screenResult['pattern'])
|
||||||
|
if row_key in self.__patternRowData:
|
||||||
|
self.__patternRowData[row_key]['pattern'] = str(screenResult['pattern'])
|
||||||
|
|
||||||
|
except CellDoesNotExist:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def action_remove_pattern(self):
|
||||||
|
|
||||||
|
selectedPatternDescriptor = self.getSelectedPatternDescriptor()
|
||||||
|
|
||||||
|
if selectedPatternDescriptor:
|
||||||
|
|
||||||
|
selectedPatternId = selectedPatternDescriptor.get('id')
|
||||||
|
|
||||||
|
if selectedPatternId is None:
|
||||||
|
raise click.ClickException(f"ShowDetailsScreen.action_remove_pattern(): Pattern to remove has no id")
|
||||||
|
|
||||||
|
self.app.push_screen(PatternDeleteScreen(patternId = selectedPatternId, showId = self.__showDescriptor.getId()), self.handle_remove_pattern)
|
||||||
|
|
||||||
|
|
||||||
|
def handle_remove_pattern(self, pattern):
|
||||||
|
|
||||||
|
try:
|
||||||
|
row_key, col_key = self.patternTable.coordinate_to_cell_key(self.patternTable.cursor_coordinate)
|
||||||
|
self.patternTable.remove_row(row_key)
|
||||||
|
self.__patternRowData.pop(row_key, None)
|
||||||
|
|
||||||
|
except CellDoesNotExist:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def compose(self):
|
||||||
|
|
||||||
|
# Create the DataTable widget
|
||||||
|
self.patternTable = DataTable(classes="five")
|
||||||
|
|
||||||
|
# Define the columns with headers
|
||||||
|
self.column_key_pattern = add_auto_table_column(self.patternTable, t("Pattern"))
|
||||||
|
|
||||||
|
self.patternTable.cursor_type = 'row'
|
||||||
|
|
||||||
|
|
||||||
|
self.shiftedSeasonsTable = DataTable(classes="five")
|
||||||
|
|
||||||
|
self.column_key_original_season = add_auto_table_column(self.shiftedSeasonsTable, t("Source Season"))
|
||||||
|
self.column_key_first_episode = add_auto_table_column(self.shiftedSeasonsTable, t("First Episode"))
|
||||||
|
self.column_key_last_episode = add_auto_table_column(self.shiftedSeasonsTable, t("Last Episode"))
|
||||||
|
self.column_key_season_offset = add_auto_table_column(self.shiftedSeasonsTable, t("Season Offset"))
|
||||||
|
self.column_key_episode_offset = add_auto_table_column(self.shiftedSeasonsTable, t("Episode Offset"))
|
||||||
|
|
||||||
|
self.shiftedSeasonsTable.cursor_type = 'row'
|
||||||
|
|
||||||
|
|
||||||
|
yield Header()
|
||||||
|
|
||||||
|
with Grid():
|
||||||
|
|
||||||
|
# Row 1
|
||||||
|
yield Static(t("Show") if not self.__showDescriptor is None else t("New Show"), id="toplabel")
|
||||||
|
yield Button(t("Identify"), id="identify_button")
|
||||||
|
yield Static(" ", classes="three")
|
||||||
|
|
||||||
|
# Row 2
|
||||||
|
yield Static(t("ID"))
|
||||||
|
if not self.__showDescriptor is None:
|
||||||
|
yield Static("", id="id_static", classes="four")
|
||||||
|
else:
|
||||||
|
yield Input(type="integer", id="id_input", classes="four")
|
||||||
|
|
||||||
|
# Row 3
|
||||||
|
yield Static(t("Name"))
|
||||||
|
yield Input(type="text", id="name_input", classes="four")
|
||||||
|
|
||||||
|
# Row 4
|
||||||
|
yield Static(t("Year"))
|
||||||
|
yield Input(type="integer", id="year_input", classes="four")
|
||||||
|
|
||||||
|
# Row 5
|
||||||
|
yield Static(t("Quality"))
|
||||||
|
yield Input(type="integer", id="quality_input", classes="four")
|
||||||
|
|
||||||
|
# Row 6
|
||||||
|
yield Static(t("Notes"))
|
||||||
|
yield Static(" ", classes="four")
|
||||||
|
|
||||||
|
# Row 7
|
||||||
|
yield TextArea(id="notes_textarea", classes="five note_box")
|
||||||
|
|
||||||
|
# Row 8
|
||||||
|
yield Static(" ", classes="five")
|
||||||
|
|
||||||
|
# Row 9
|
||||||
|
yield Static(t("Index Season Digits"))
|
||||||
|
yield Input(type="integer", id="index_season_digits_input", classes="four")
|
||||||
|
|
||||||
|
# Row 10
|
||||||
|
yield Static(t("Index Episode Digits"))
|
||||||
|
yield Input(type="integer", id="index_episode_digits_input", classes="four")
|
||||||
|
|
||||||
|
# Row 11
|
||||||
|
yield Static(t("Indicator Season Digits"))
|
||||||
|
yield Input(type="integer", id="indicator_season_digits_input", classes="four")
|
||||||
|
|
||||||
|
# Row 12
|
||||||
|
yield Static(t("Indicator Edisode Digits"))
|
||||||
|
yield Input(type="integer", id="indicator_episode_digits_input", classes="four")
|
||||||
|
|
||||||
|
# Row 13
|
||||||
|
yield Static(" ", classes="five")
|
||||||
|
|
||||||
|
# Row 14
|
||||||
|
yield Static(t("Numbering Mapping"))
|
||||||
|
|
||||||
|
if self.__showDescriptor is not None:
|
||||||
|
yield Button(t("Add"), id="button_add_shifted_season")
|
||||||
|
yield Button(t("Edit"), id="button_edit_shifted_season")
|
||||||
|
yield Button(t("Delete"), id="button_delete_shifted_season")
|
||||||
|
else:
|
||||||
|
yield Static(" ")
|
||||||
|
yield Static(" ")
|
||||||
|
yield Static(" ")
|
||||||
|
|
||||||
|
yield Static(" ")
|
||||||
|
|
||||||
|
# Row 15
|
||||||
|
yield self.shiftedSeasonsTable
|
||||||
|
|
||||||
|
# Row 16
|
||||||
|
yield Static(t("File patterns"), classes="five")
|
||||||
|
|
||||||
|
# Row 17
|
||||||
|
yield self.patternTable
|
||||||
|
|
||||||
|
# Row 18
|
||||||
|
yield Static(" ", classes="five")
|
||||||
|
|
||||||
|
# Row 19
|
||||||
|
yield Button(t("Save"), id="save_button")
|
||||||
|
yield Button(t("Cancel"), id="cancel_button")
|
||||||
|
|
||||||
|
|
||||||
|
yield Footer()
|
||||||
|
|
||||||
|
|
||||||
|
def getShowDescriptorFromInput(self) -> ShowDescriptor:
|
||||||
|
|
||||||
|
kwargs = {ShowDescriptor.CONTEXT_KEY: self.context}
|
||||||
|
|
||||||
|
try:
|
||||||
|
if self.__showDescriptor:
|
||||||
|
kwargs[ShowDescriptor.ID_KEY] = int(self.__showDescriptor.getId())
|
||||||
|
else:
|
||||||
|
kwargs[ShowDescriptor.ID_KEY] = int(self.query_one("#id_input", Input).value)
|
||||||
|
except ValueError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
kwargs[ShowDescriptor.NAME_KEY] = str(self.query_one("#name_input", Input).value)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
kwargs[ShowDescriptor.YEAR_KEY] = int(self.query_one("#year_input", Input).value)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
kwargs[ShowDescriptor.INDEX_SEASON_DIGITS_KEY] = int(self.query_one("#index_season_digits_input", Input).value)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
kwargs[ShowDescriptor.INDEX_EPISODE_DIGITS_KEY] = int(self.query_one("#index_episode_digits_input", Input).value)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
kwargs[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY] = int(self.query_one("#indicator_season_digits_input", Input).value)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
kwargs[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY] = int(self.query_one("#indicator_episode_digits_input", Input).value)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
kwargs[ShowDescriptor.QUALITY_KEY] = int(self.query_one("#quality_input", Input).value)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
kwargs[ShowDescriptor.NOTES_KEY] = str(self.query_one("#notes_textarea", TextArea).text)
|
||||||
|
|
||||||
|
return ShowDescriptor(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
# Event handler for button press
|
||||||
|
def on_button_pressed(self, event: Button.Pressed) -> None:
|
||||||
|
|
||||||
|
if event.button.id == "save_button":
|
||||||
|
|
||||||
|
showDescriptor = self.getShowDescriptorFromInput()
|
||||||
|
|
||||||
|
if not showDescriptor is None:
|
||||||
|
if self.__sc.updateShow(showDescriptor):
|
||||||
|
self.dismiss(showDescriptor)
|
||||||
|
else:
|
||||||
|
#TODO: Meldung
|
||||||
|
self.app.pop_screen()
|
||||||
|
|
||||||
|
if event.button.id == "cancel_button":
|
||||||
|
self.app.pop_screen()
|
||||||
|
|
||||||
|
|
||||||
|
if event.button.id == "identify_button":
|
||||||
|
|
||||||
|
showDescriptor = self.getShowDescriptorFromInput()
|
||||||
|
if not showDescriptor is None:
|
||||||
|
showName, showYear = self.__tc.getShowNameAndYear(showDescriptor.getId())
|
||||||
|
|
||||||
|
self.query_one("#name_input", Input).value = filterFilename(showName)
|
||||||
|
self.query_one("#year_input", Input).value = str(showYear)
|
||||||
|
|
||||||
|
|
||||||
|
if event.button.id == "button_add_shifted_season":
|
||||||
|
if not self.__showDescriptor is None:
|
||||||
|
self.app.push_screen(ShiftedSeasonDetailsScreen(showId = self.__showDescriptor.getId()), self.handle_update_shifted_season)
|
||||||
|
|
||||||
|
if event.button.id == "button_edit_shifted_season":
|
||||||
|
selectedShiftedSeasonObj = self.getSelectedShiftedSeasonObjFromInput()
|
||||||
|
if 'id' in selectedShiftedSeasonObj.keys():
|
||||||
|
self.app.push_screen(ShiftedSeasonDetailsScreen(showId = self.__showDescriptor.getId(), shiftedSeasonId=selectedShiftedSeasonObj['id']), self.handle_update_shifted_season)
|
||||||
|
|
||||||
|
if event.button.id == "button_delete_shifted_season":
|
||||||
|
selectedShiftedSeasonObj = self.getSelectedShiftedSeasonObjFromInput()
|
||||||
|
if 'id' in selectedShiftedSeasonObj.keys():
|
||||||
|
self.app.push_screen(ShiftedSeasonDeleteScreen(showId = self.__showDescriptor.getId(), shiftedSeasonId=selectedShiftedSeasonObj['id']), self.handle_delete_shifted_season)
|
||||||
|
|
||||||
|
|
||||||
|
def handle_update_shifted_season(self, screenResult):
|
||||||
|
self.updateShiftedSeasons()
|
||||||
|
|
||||||
|
def handle_delete_shifted_season(self, screenResult):
|
||||||
|
self.updateShiftedSeasons()
|
||||||
|
|
||||||
|
def action_back(self):
|
||||||
|
go_back_or_exit(self)
|
||||||
281
src/ffx/shows_screen.py
Normal file
281
src/ffx/shows_screen.py
Normal file
@@ -0,0 +1,281 @@
|
|||||||
|
from textual.screen import Screen
|
||||||
|
from textual.widgets import Header, Footer, Static, DataTable
|
||||||
|
from textual.containers import Grid
|
||||||
|
from rich.text import Text
|
||||||
|
|
||||||
|
from .i18n import t
|
||||||
|
from .show_controller import ShowController
|
||||||
|
from .screen_support import add_auto_table_column, go_back_or_exit, update_table_column_label
|
||||||
|
|
||||||
|
from .show_details_screen import ShowDetailsScreen
|
||||||
|
from .show_delete_screen import ShowDeleteScreen
|
||||||
|
|
||||||
|
from ffx.show_descriptor import ShowDescriptor
|
||||||
|
|
||||||
|
from textual.widgets._data_table import CellDoesNotExist
|
||||||
|
|
||||||
|
|
||||||
|
class ShowsScreen(Screen):
|
||||||
|
|
||||||
|
CSS = """
|
||||||
|
|
||||||
|
Grid {
|
||||||
|
grid-size: 1;
|
||||||
|
grid-rows: 2 auto;
|
||||||
|
height: 100%;
|
||||||
|
width: 100%;
|
||||||
|
min-width: 80;
|
||||||
|
padding: 1;
|
||||||
|
overflow-x: auto;
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
DataTable .datatable--cursor {
|
||||||
|
background: darkorange;
|
||||||
|
color: black;
|
||||||
|
}
|
||||||
|
|
||||||
|
DataTable .datatable--header {
|
||||||
|
background: steelblue;
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
#top {
|
||||||
|
height: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#two {
|
||||||
|
column-span: 2;
|
||||||
|
row-span: 2;
|
||||||
|
tint: magenta 40%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.box {
|
||||||
|
height: 100%;
|
||||||
|
border: solid green;
|
||||||
|
}
|
||||||
|
|
||||||
|
DataTable {
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
BINDINGS = [
|
||||||
|
("escape", "back", t("Back")),
|
||||||
|
("e", "edit_show", t("Edit Show")),
|
||||||
|
("n", "new_show", t("New Show")),
|
||||||
|
("d", "delete_show", t("Delete Show")),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
self.context = self.app.getContext()
|
||||||
|
self.Session = self.context['database']['session'] # convenience
|
||||||
|
|
||||||
|
self.__sc = ShowController(context = self.context)
|
||||||
|
self.__showRowData: dict[object, ShowDescriptor] = {}
|
||||||
|
self.__sortColumnKey = None
|
||||||
|
self.__sortReverse = False
|
||||||
|
self.__columnLabels: dict[object, str] = {}
|
||||||
|
|
||||||
|
|
||||||
|
def _add_show_row(self, show_descriptor: ShowDescriptor):
|
||||||
|
row_key = self.table.add_row(
|
||||||
|
str(show_descriptor.getId()),
|
||||||
|
str(show_descriptor.getName()),
|
||||||
|
str(show_descriptor.getYear()),
|
||||||
|
)
|
||||||
|
self.__showRowData[row_key] = show_descriptor
|
||||||
|
return row_key
|
||||||
|
|
||||||
|
def _get_selected_row_key(self):
|
||||||
|
try:
|
||||||
|
row_key, _ = self.table.coordinate_to_cell_key(self.table.cursor_coordinate)
|
||||||
|
return row_key
|
||||||
|
except CellDoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _move_cursor_to_row_key(self, row_key):
|
||||||
|
if row_key is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
row_index = int(self.table.get_row_index(row_key))
|
||||||
|
except Exception:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.table.move_cursor(row=row_index)
|
||||||
|
|
||||||
|
def _sort_key_for_column(self, column_key):
|
||||||
|
if column_key == self.column_key_id:
|
||||||
|
return lambda value: int(value)
|
||||||
|
if column_key == self.column_key_year:
|
||||||
|
return lambda value: int(value)
|
||||||
|
if column_key == self.column_key_name:
|
||||||
|
return lambda value: str(value).casefold()
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _update_header_labels(self):
|
||||||
|
if not hasattr(self, "table"):
|
||||||
|
return
|
||||||
|
|
||||||
|
arrow_up = "▴"
|
||||||
|
arrow_down = "▾"
|
||||||
|
|
||||||
|
for column_key, base_label in self.__columnLabels.items():
|
||||||
|
column = self.table.columns.get(column_key)
|
||||||
|
if column is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
label_text = base_label
|
||||||
|
if column_key == self.__sortColumnKey:
|
||||||
|
label_text = f"{base_label} {arrow_down if self.__sortReverse else arrow_up}"
|
||||||
|
|
||||||
|
update_table_column_label(self.table, column_key, Text(label_text))
|
||||||
|
|
||||||
|
def _apply_sort(self, *, preserve_row_key=None):
|
||||||
|
if self.__sortColumnKey is None:
|
||||||
|
self._update_header_labels()
|
||||||
|
return
|
||||||
|
|
||||||
|
self.table.sort(
|
||||||
|
self.__sortColumnKey,
|
||||||
|
key=self._sort_key_for_column(self.__sortColumnKey),
|
||||||
|
reverse=self.__sortReverse,
|
||||||
|
)
|
||||||
|
self._move_cursor_to_row_key(preserve_row_key)
|
||||||
|
self._update_header_labels()
|
||||||
|
|
||||||
|
|
||||||
|
def getSelectedShowId(self):
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Fetch the currently selected row when 'Enter' is pressed
|
||||||
|
#selected_row_index = self.table.cursor_row
|
||||||
|
row_key, col_key = self.table.coordinate_to_cell_key(self.table.cursor_coordinate)
|
||||||
|
|
||||||
|
if row_key is not None:
|
||||||
|
selected_show = self.__showRowData.get(row_key)
|
||||||
|
return selected_show.getId() if selected_show is not None else None
|
||||||
|
|
||||||
|
except CellDoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def action_back(self):
|
||||||
|
go_back_or_exit(self)
|
||||||
|
|
||||||
|
def on_data_table_header_selected(self, event: DataTable.HeaderSelected) -> None:
|
||||||
|
if event.data_table is not self.table:
|
||||||
|
return
|
||||||
|
|
||||||
|
selected_row_key = self._get_selected_row_key()
|
||||||
|
|
||||||
|
if self.__sortColumnKey == event.column_key:
|
||||||
|
self.__sortReverse = not self.__sortReverse
|
||||||
|
else:
|
||||||
|
self.__sortColumnKey = event.column_key
|
||||||
|
self.__sortReverse = False
|
||||||
|
|
||||||
|
self._apply_sort(preserve_row_key=selected_row_key)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def action_new_show(self):
|
||||||
|
self.app.push_screen(ShowDetailsScreen(), self.handle_new_screen)
|
||||||
|
|
||||||
|
def handle_new_screen(self, screenResult):
|
||||||
|
if isinstance(screenResult, ShowDescriptor):
|
||||||
|
row_key = self._add_show_row(screenResult)
|
||||||
|
self._apply_sort(preserve_row_key=row_key)
|
||||||
|
|
||||||
|
|
||||||
|
def action_edit_show(self):
|
||||||
|
|
||||||
|
selectedShowId = self.getSelectedShowId()
|
||||||
|
|
||||||
|
if selectedShowId is not None:
|
||||||
|
self.app.push_screen(ShowDetailsScreen(showId = selectedShowId), self.handle_edit_screen)
|
||||||
|
|
||||||
|
|
||||||
|
def handle_edit_screen(self, showDescriptor: ShowDescriptor):
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
row_key, col_key = self.table.coordinate_to_cell_key(self.table.cursor_coordinate)
|
||||||
|
|
||||||
|
self.table.update_cell(row_key, self.column_key_name, showDescriptor.getName())
|
||||||
|
self.table.update_cell(row_key, self.column_key_year, showDescriptor.getYear())
|
||||||
|
self.__showRowData[row_key] = showDescriptor
|
||||||
|
self._apply_sort(preserve_row_key=row_key)
|
||||||
|
|
||||||
|
except CellDoesNotExist:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def action_delete_show(self):
|
||||||
|
|
||||||
|
selectedShowId = self.getSelectedShowId()
|
||||||
|
|
||||||
|
if selectedShowId is not None:
|
||||||
|
self.app.push_screen(ShowDeleteScreen(showId = selectedShowId), self.handle_delete_show)
|
||||||
|
|
||||||
|
|
||||||
|
def handle_delete_show(self, showDescriptor: ShowDescriptor):
|
||||||
|
|
||||||
|
try:
|
||||||
|
row_key, col_key = self.table.coordinate_to_cell_key(self.table.cursor_coordinate)
|
||||||
|
self.table.remove_row(row_key)
|
||||||
|
self.__showRowData.pop(row_key, None)
|
||||||
|
|
||||||
|
except CellDoesNotExist:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def on_mount(self) -> None:
|
||||||
|
for show in self.__sc.getAllShows():
|
||||||
|
self._add_show_row(show.getDescriptor(self.context))
|
||||||
|
|
||||||
|
self.__sortColumnKey = self.column_key_name
|
||||||
|
self._apply_sort()
|
||||||
|
|
||||||
|
|
||||||
|
def compose(self):
|
||||||
|
|
||||||
|
# Create the DataTable widget
|
||||||
|
self.table = DataTable()
|
||||||
|
|
||||||
|
# Define the columns with headers
|
||||||
|
idLabel = t("ID")
|
||||||
|
nameLabel = t("Name")
|
||||||
|
yearLabel = t("Year")
|
||||||
|
self.column_key_id = add_auto_table_column(self.table, idLabel)
|
||||||
|
self.column_key_name = add_auto_table_column(self.table, nameLabel)
|
||||||
|
self.column_key_year = add_auto_table_column(self.table, yearLabel)
|
||||||
|
self.__columnLabels = {
|
||||||
|
self.column_key_id: idLabel,
|
||||||
|
self.column_key_name: nameLabel,
|
||||||
|
self.column_key_year: yearLabel,
|
||||||
|
}
|
||||||
|
|
||||||
|
self.table.cursor_type = 'row'
|
||||||
|
|
||||||
|
yield Header()
|
||||||
|
|
||||||
|
with Grid():
|
||||||
|
# Row 1
|
||||||
|
yield Static(t("Shows"), markup=False)
|
||||||
|
|
||||||
|
# Row 2
|
||||||
|
yield self.table
|
||||||
|
|
||||||
|
f = Footer()
|
||||||
|
f.description = "yolo"
|
||||||
|
|
||||||
|
yield f
|
||||||
202
src/ffx/tag_controller.py
Normal file
202
src/ffx/tag_controller.py
Normal file
@@ -0,0 +1,202 @@
|
|||||||
|
import click
|
||||||
|
|
||||||
|
from ffx.model.track import Track
|
||||||
|
|
||||||
|
from ffx.model.media_tag import MediaTag
|
||||||
|
from ffx.model.track_tag import TrackTag
|
||||||
|
|
||||||
|
|
||||||
|
class TagController():
|
||||||
|
|
||||||
|
def __init__(self, context):
|
||||||
|
|
||||||
|
self.context = context
|
||||||
|
self.Session = self.context['database']['session'] # convenience
|
||||||
|
|
||||||
|
|
||||||
|
def updateMediaTag(self, patternId, tagKey, tagValue):
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = self.Session()
|
||||||
|
|
||||||
|
q = s.query(MediaTag).filter(MediaTag.pattern_id == int(patternId),
|
||||||
|
MediaTag.key == str(tagKey))
|
||||||
|
tag = q.first()
|
||||||
|
if tag:
|
||||||
|
tag.value = str(tagValue)
|
||||||
|
else:
|
||||||
|
tag = MediaTag(pattern_id = int(patternId),
|
||||||
|
key = str(tagKey),
|
||||||
|
value = str(tagValue))
|
||||||
|
s.add(tag)
|
||||||
|
s.commit()
|
||||||
|
|
||||||
|
return int(tag.id)
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"TagController.updateTrackTag(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
def updateTrackTag(self, trackId, tagKey, tagValue):
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = self.Session()
|
||||||
|
|
||||||
|
q = s.query(TrackTag).filter(TrackTag.track_id == int(trackId),
|
||||||
|
TrackTag.key == str(tagKey))
|
||||||
|
tag = q.first()
|
||||||
|
if tag:
|
||||||
|
tag.value = str(tagValue)
|
||||||
|
else:
|
||||||
|
tag = TrackTag(track_id = int(trackId),
|
||||||
|
key = str(tagKey),
|
||||||
|
value = str(tagValue))
|
||||||
|
s.add(tag)
|
||||||
|
s.commit()
|
||||||
|
|
||||||
|
return int(tag.id)
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"TagController.updateTrackTag(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
def deleteMediaTagByKey(self, patternId, tagKey):
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = self.Session()
|
||||||
|
|
||||||
|
tag = s.query(MediaTag).filter(
|
||||||
|
MediaTag.pattern_id == int(patternId),
|
||||||
|
MediaTag.key == str(tagKey),
|
||||||
|
).first()
|
||||||
|
if tag is not None:
|
||||||
|
s.delete(tag)
|
||||||
|
s.commit()
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"TagController.deleteMediaTagByKey(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
def deleteTrackTagByKey(self, trackId, tagKey):
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = self.Session()
|
||||||
|
|
||||||
|
q = s.query(TrackTag).filter(TrackTag.track_id == int(trackId),
|
||||||
|
TrackTag.key == str(tagKey))
|
||||||
|
tag = q.first()
|
||||||
|
if tag:
|
||||||
|
s.delete(tag)
|
||||||
|
s.commit()
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"TagController.deleteTrackTagByKey(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
def findAllMediaTags(self, patternId) -> dict:
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = self.Session()
|
||||||
|
|
||||||
|
tags = s.query(MediaTag).filter(MediaTag.pattern_id == int(patternId)).all()
|
||||||
|
return {t.key:t.value for t in tags}
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"TagController.findAllMediaTags(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
|
||||||
|
def findAllTrackTags(self, trackId) -> dict:
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = self.Session()
|
||||||
|
|
||||||
|
tags = s.query(TrackTag).filter(TrackTag.track_id == int(trackId)).all()
|
||||||
|
return {t.key:t.value for t in tags}
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"TagController.findAllTracks(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
|
||||||
|
def findMediaTag(self, trackId : int, trackKey : str) -> MediaTag:
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = self.Session()
|
||||||
|
return s.query(Track).filter(MediaTag.track_id == int(trackId), MediaTag.key == str(trackKey)).first()
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"TagController.findMediaTag(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
def findTrackTag(self, trackId : int, tagKey : str) -> TrackTag:
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = self.Session()
|
||||||
|
return s.query(TrackTag).filter(
|
||||||
|
TrackTag.track_id == int(trackId),
|
||||||
|
TrackTag.key == str(tagKey),
|
||||||
|
).first()
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"TagController.findTrackTag(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def deleteMediaTag(self, tagId) -> bool:
|
||||||
|
try:
|
||||||
|
s = self.Session()
|
||||||
|
tag = s.query(MediaTag).filter(MediaTag.id == int(tagId)).first()
|
||||||
|
|
||||||
|
if tag is not None:
|
||||||
|
|
||||||
|
s.delete(tag)
|
||||||
|
|
||||||
|
s.commit()
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"TagController.deleteMediaTag(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
|
||||||
|
def deleteTrackTag(self, tagId : int) -> bool:
|
||||||
|
|
||||||
|
if type(tagId) is not int:
|
||||||
|
raise TypeError('TagController.deleteTrackTag(): Argument tagId is required to be of type int')
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = self.Session()
|
||||||
|
tag = s.query(TrackTag).filter(TrackTag.id == int(tagId)).first()
|
||||||
|
|
||||||
|
if tag is not None:
|
||||||
|
|
||||||
|
s.delete(tag)
|
||||||
|
|
||||||
|
s.commit()
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"TagController.deleteTrackTag(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
110
src/ffx/tag_delete_screen.py
Normal file
110
src/ffx/tag_delete_screen.py
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
from textual.screen import Screen
|
||||||
|
from textual.widgets import Header, Footer, Static, Button
|
||||||
|
from textual.containers import Grid
|
||||||
|
|
||||||
|
from .i18n import t
|
||||||
|
from .screen_support import go_back_or_exit
|
||||||
|
|
||||||
|
|
||||||
|
# Screen[dict[int, str, int]]
|
||||||
|
class TagDeleteScreen(Screen):
|
||||||
|
|
||||||
|
BINDINGS = [
|
||||||
|
("escape", "back", t("Back")),
|
||||||
|
]
|
||||||
|
|
||||||
|
CSS = """
|
||||||
|
|
||||||
|
Grid {
|
||||||
|
grid-size: 4 9;
|
||||||
|
grid-rows: 2 2 2 2 2 2 2 2 2;
|
||||||
|
grid-columns: 18 1fr 1fr 1fr;
|
||||||
|
height: 100%;
|
||||||
|
width: 100%;
|
||||||
|
min-width: 90;
|
||||||
|
padding: 1;
|
||||||
|
overflow-x: auto;
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
Input {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
Button {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
#toplabel {
|
||||||
|
height: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.two {
|
||||||
|
column-span: 2;
|
||||||
|
}
|
||||||
|
.three {
|
||||||
|
column-span: 3;
|
||||||
|
}
|
||||||
|
.four {
|
||||||
|
column-span: 4;
|
||||||
|
}
|
||||||
|
.five {
|
||||||
|
column-span: 5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.box {
|
||||||
|
height: 100%;
|
||||||
|
border: solid green;
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, key=None, value=None):
|
||||||
|
super().__init__()
|
||||||
|
self.__key = key
|
||||||
|
self.__value = value
|
||||||
|
|
||||||
|
|
||||||
|
def on_mount(self):
|
||||||
|
|
||||||
|
self.query_one("#keylabel", Static).update(str(self.__key))
|
||||||
|
self.query_one("#valuelabel", Static).update(str(self.__value))
|
||||||
|
|
||||||
|
|
||||||
|
def compose(self):
|
||||||
|
|
||||||
|
yield Header()
|
||||||
|
|
||||||
|
with Grid():
|
||||||
|
|
||||||
|
# Row 1
|
||||||
|
yield Static(t("Are you sure to delete this tag?"), id="toplabel", classes="five")
|
||||||
|
|
||||||
|
# Row 2
|
||||||
|
yield Static(t("Key"))
|
||||||
|
yield Static(" ", id="keylabel", classes="four")
|
||||||
|
|
||||||
|
# Row 3
|
||||||
|
yield Static(t("Value"))
|
||||||
|
yield Static(" ", id="valuelabel", classes="four")
|
||||||
|
|
||||||
|
# Row 4
|
||||||
|
yield Static(" ", classes="five")
|
||||||
|
|
||||||
|
# Row 5
|
||||||
|
yield Button(t("Delete"), id="delete_button")
|
||||||
|
yield Button(t("Cancel"), id="cancel_button")
|
||||||
|
|
||||||
|
yield Footer()
|
||||||
|
|
||||||
|
|
||||||
|
# Event handler for button press
|
||||||
|
def on_button_pressed(self, event: Button.Pressed) -> None:
|
||||||
|
|
||||||
|
if event.button.id == "delete_button":
|
||||||
|
|
||||||
|
tag = (self.__key, self.__value)
|
||||||
|
self.dismiss(tag)
|
||||||
|
|
||||||
|
if event.button.id == "cancel_button":
|
||||||
|
self.app.pop_screen()
|
||||||
|
|
||||||
|
def action_back(self):
|
||||||
|
go_back_or_exit(self)
|
||||||
146
src/ffx/tag_details_screen.py
Normal file
146
src/ffx/tag_details_screen.py
Normal file
@@ -0,0 +1,146 @@
|
|||||||
|
from textual.screen import Screen
|
||||||
|
from textual.widgets import Header, Footer, Static, Button, Input
|
||||||
|
from textual.containers import Grid
|
||||||
|
|
||||||
|
from .i18n import t
|
||||||
|
from .screen_support import go_back_or_exit
|
||||||
|
|
||||||
|
|
||||||
|
# Screen[dict[int, str, int]]
|
||||||
|
class TagDetailsScreen(Screen):
|
||||||
|
|
||||||
|
BINDINGS = [
|
||||||
|
("escape", "back", t("Back")),
|
||||||
|
]
|
||||||
|
|
||||||
|
CSS = """
|
||||||
|
|
||||||
|
Grid {
|
||||||
|
grid-size: 5 20;
|
||||||
|
grid-rows: 2 2 2 2 2 3 2 2 2 2 2 6 2 2 6 2 2 2 2 6;
|
||||||
|
grid-columns: 18 1fr 1fr 1fr 5fr;
|
||||||
|
height: 100%;
|
||||||
|
width: 100%;
|
||||||
|
min-width: 100;
|
||||||
|
padding: 1;
|
||||||
|
overflow-x: auto;
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
Input {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
Button {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
SelectionList {
|
||||||
|
border: none;
|
||||||
|
min-height: 6;
|
||||||
|
}
|
||||||
|
Select {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
DataTable {
|
||||||
|
min-height: 6;
|
||||||
|
}
|
||||||
|
|
||||||
|
DataTable .datatable--cursor {
|
||||||
|
background: darkorange;
|
||||||
|
color: black;
|
||||||
|
}
|
||||||
|
|
||||||
|
DataTable .datatable--header {
|
||||||
|
background: steelblue;
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
#toplabel {
|
||||||
|
height: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.two {
|
||||||
|
column-span: 2;
|
||||||
|
}
|
||||||
|
.three {
|
||||||
|
column-span: 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
.four {
|
||||||
|
column-span: 4;
|
||||||
|
}
|
||||||
|
.five {
|
||||||
|
column-span: 5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.box {
|
||||||
|
height: 100%;
|
||||||
|
border: solid green;
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, key=None, value=None):
|
||||||
|
super().__init__()
|
||||||
|
self.__key = key
|
||||||
|
self.__value = value
|
||||||
|
|
||||||
|
|
||||||
|
def on_mount(self):
|
||||||
|
|
||||||
|
if self.__key is not None:
|
||||||
|
self.query_one("#key_input", Input).value = str(self.__key)
|
||||||
|
|
||||||
|
if self.__value is not None:
|
||||||
|
self.query_one("#value_input", Input).value = str(self.__value)
|
||||||
|
|
||||||
|
|
||||||
|
def compose(self):
|
||||||
|
|
||||||
|
yield Header()
|
||||||
|
|
||||||
|
with Grid():
|
||||||
|
|
||||||
|
# Row 1
|
||||||
|
yield Static(t("Key"))
|
||||||
|
yield Input(id="key_input", classes="four")
|
||||||
|
|
||||||
|
# Row 2
|
||||||
|
yield Static(t("Value"))
|
||||||
|
yield Input(id="value_input", classes="four")
|
||||||
|
|
||||||
|
# Row 3
|
||||||
|
yield Static(" ", classes="five")
|
||||||
|
|
||||||
|
# Row 4
|
||||||
|
yield Button(t("Save"), id="save_button")
|
||||||
|
yield Button(t("Cancel"), id="cancel_button")
|
||||||
|
|
||||||
|
# Row 5
|
||||||
|
yield Static(" ", classes="five")
|
||||||
|
|
||||||
|
# Row 6
|
||||||
|
yield Static(" ", classes="five", id="messagestatic")
|
||||||
|
|
||||||
|
yield Footer(id="footer")
|
||||||
|
|
||||||
|
|
||||||
|
def getTagFromInput(self):
|
||||||
|
|
||||||
|
tagKey = self.query_one("#key_input", Input).value
|
||||||
|
tagValue = self.query_one("#value_input", Input).value
|
||||||
|
|
||||||
|
return (tagKey, tagValue)
|
||||||
|
|
||||||
|
def action_back(self):
|
||||||
|
go_back_or_exit(self)
|
||||||
|
|
||||||
|
|
||||||
|
# Event handler for button press
|
||||||
|
def on_button_pressed(self, event: Button.Pressed) -> None:
|
||||||
|
|
||||||
|
# Check if the button pressed is the one we are interested in
|
||||||
|
if event.button.id == "save_button":
|
||||||
|
self.dismiss(self.getTagFromInput())
|
||||||
|
|
||||||
|
if event.button.id == "cancel_button":
|
||||||
|
self.app.pop_screen()
|
||||||
135
src/ffx/tmdb_controller.py
Normal file
135
src/ffx/tmdb_controller.py
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
import os, requests, time
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from .logging_utils import get_ffx_logger
|
||||||
|
|
||||||
|
|
||||||
|
class TMDB_REQUEST_EXCEPTION(Exception):
|
||||||
|
def __init__(self, statusCode, statusMessage):
|
||||||
|
errorMessage = f"TMDB query failed with status code {statusCode}: {statusMessage}"
|
||||||
|
super().__init__(errorMessage)
|
||||||
|
|
||||||
|
class TMDB_API_KEY_NOT_PRESENT_EXCEPTION(Exception):
|
||||||
|
def __str__(self):
|
||||||
|
return 'TMDB api key is not available, please set environment variable TMDB_API_KEY'
|
||||||
|
|
||||||
|
class TMDB_EXCESSIVE_USAGE_EXCEPTION(Exception):
|
||||||
|
def __str__(self):
|
||||||
|
return 'Rate limit was triggered too often'
|
||||||
|
|
||||||
|
|
||||||
|
class TmdbController():
|
||||||
|
|
||||||
|
DEFAULT_LANGUAGE = 'de-DE'
|
||||||
|
|
||||||
|
RATE_LIMIT_WAIT_SECONDS = 10
|
||||||
|
RATE_LIMIT_RETRIES = 3
|
||||||
|
|
||||||
|
def __init__(self, context = None):
|
||||||
|
self.__context = context
|
||||||
|
|
||||||
|
if context is None:
|
||||||
|
self.__logger = get_ffx_logger()
|
||||||
|
else:
|
||||||
|
self.__logger = context['logger']
|
||||||
|
|
||||||
|
self.__tmdbApiKey = os.environ.get('TMDB_API_KEY', None)
|
||||||
|
if self.__tmdbApiKey is None:
|
||||||
|
raise TMDB_API_KEY_NOT_PRESENT_EXCEPTION
|
||||||
|
|
||||||
|
self.tmdbLanguage = TmdbController.DEFAULT_LANGUAGE
|
||||||
|
|
||||||
|
|
||||||
|
def getTmdbRequest(self, tmdbUrl):
|
||||||
|
retries = TmdbController.RATE_LIMIT_RETRIES
|
||||||
|
while True:
|
||||||
|
response = requests.get(tmdbUrl)
|
||||||
|
if response.status_code == 429:
|
||||||
|
if not retries:
|
||||||
|
raise TMDB_EXCESSIVE_USAGE_EXCEPTION()
|
||||||
|
self.__logger.warning('TMDB Rate limit (status_code 429)')
|
||||||
|
time.sleep(TmdbController.RATE_LIMIT_WAIT_SECONDS)
|
||||||
|
retries -= 1
|
||||||
|
else:
|
||||||
|
jsonResult = response.json()
|
||||||
|
if ('success' in jsonResult.keys()
|
||||||
|
and not jsonResult['success']):
|
||||||
|
raise TMDB_REQUEST_EXCEPTION(jsonResult['status_code'], jsonResult['status_message'])
|
||||||
|
return jsonResult
|
||||||
|
|
||||||
|
|
||||||
|
def queryShow(self, showId):
|
||||||
|
"""
|
||||||
|
First level keys in the response object:
|
||||||
|
adult bool
|
||||||
|
backdrop_path str
|
||||||
|
created_by []
|
||||||
|
episode_run_time []
|
||||||
|
first_air_date str YYYY-MM-DD
|
||||||
|
genres []
|
||||||
|
homepage str
|
||||||
|
id int
|
||||||
|
in_production bool
|
||||||
|
languages []
|
||||||
|
last_air_date str YYYY-MM-DD
|
||||||
|
last_episode_to_air {}
|
||||||
|
name str
|
||||||
|
next_episode_to_air null
|
||||||
|
networks []
|
||||||
|
number_of_episodes int
|
||||||
|
number_of_seasons int
|
||||||
|
origin_country []
|
||||||
|
original_language str
|
||||||
|
original_name str
|
||||||
|
overview str
|
||||||
|
popularity float
|
||||||
|
poster_path str
|
||||||
|
production_companies []
|
||||||
|
production_countries []
|
||||||
|
seasons []
|
||||||
|
spoken_languages []
|
||||||
|
status str
|
||||||
|
tagline str
|
||||||
|
type str
|
||||||
|
vote_average float
|
||||||
|
vote_count int
|
||||||
|
"""
|
||||||
|
|
||||||
|
urlParams = f"?language={self.tmdbLanguage}&api_key={self.__tmdbApiKey}"
|
||||||
|
|
||||||
|
tmdbUrl = f"https://api.themoviedb.org/3/tv/{showId}{urlParams}"
|
||||||
|
|
||||||
|
return self.getTmdbRequest(tmdbUrl)
|
||||||
|
|
||||||
|
|
||||||
|
def getShowNameAndYear(self, showId: int):
|
||||||
|
|
||||||
|
showResult = self.queryShow(int(showId))
|
||||||
|
firstAirDate = datetime.strptime(showResult['first_air_date'], '%Y-%m-%d')
|
||||||
|
|
||||||
|
return str(showResult['name']), int(firstAirDate.year)
|
||||||
|
|
||||||
|
|
||||||
|
def queryEpisode(self, showId, season, episode):
|
||||||
|
"""
|
||||||
|
First level keys in the response object:
|
||||||
|
air_date str 'YYY-MM-DD'
|
||||||
|
crew []
|
||||||
|
episode_number int
|
||||||
|
guest_stars []
|
||||||
|
name str
|
||||||
|
overview str
|
||||||
|
id int
|
||||||
|
production_code
|
||||||
|
runtime int
|
||||||
|
season_number int
|
||||||
|
still_path str '/filename.jpg'
|
||||||
|
vote_average float
|
||||||
|
vote_count int
|
||||||
|
"""
|
||||||
|
|
||||||
|
urlParams = f"?language={self.tmdbLanguage}&api_key={self.__tmdbApiKey}"
|
||||||
|
|
||||||
|
tmdbUrl = f"https://api.themoviedb.org/3/tv/{showId}/season/{season}/episode/{episode}{urlParams}"
|
||||||
|
|
||||||
|
return self.getTmdbRequest(tmdbUrl)
|
||||||
58
src/ffx/track_codec.py
Normal file
58
src/ffx/track_codec.py
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
|
class TrackCodec(Enum):
|
||||||
|
|
||||||
|
H265 = {'identifier': 'hevc', 'format': 'h265', 'extension': 'h265' ,'label': 'H.265'}
|
||||||
|
H264 = {'identifier': 'h264', 'format': 'h264', 'extension': 'h264' ,'label': 'H.264'}
|
||||||
|
MPEG4 = {'identifier': 'mpeg4', 'format': 'm4v', 'extension': 'm4v' ,'label': 'MPEG-4'}
|
||||||
|
MPEG2 = {'identifier': 'mpeg2video', 'format': 'mpeg2video', 'extension': 'mpg' ,'label': 'MPEG-2'}
|
||||||
|
|
||||||
|
AAC = {'identifier': 'aac', 'format': None, 'extension': 'aac' , 'label': 'AAC'}
|
||||||
|
AC3 = {'identifier': 'ac3', 'format': 'ac3', 'extension': 'ac3' , 'label': 'AC3'}
|
||||||
|
EAC3 = {'identifier': 'eac3', 'format': 'eac3', 'extension': 'eac3' , 'label': 'EAC3'}
|
||||||
|
DTS = {'identifier': 'dts', 'format': 'dts', 'extension': 'dts' , 'label': 'DTS'}
|
||||||
|
MP3 = {'identifier': 'mp3', 'format': 'mp3', 'extension': 'mp3' , 'label': 'MP3'}
|
||||||
|
|
||||||
|
SRT = {'identifier': 'subrip', 'format': 'srt', 'extension': 'srt' , 'label': 'SRT'}
|
||||||
|
ASS = {'identifier': 'ass', 'format': 'ass', 'extension': 'ass' , 'label': 'ASS'}
|
||||||
|
TTF = {'identifier': 'ttf', 'format': None, 'extension': 'ttf' , 'label': 'TTF'}
|
||||||
|
PGS = {'identifier': 'hdmv_pgs_subtitle', 'format': 'sup', 'extension': 'sup' , 'label': 'PGS'}
|
||||||
|
VOBSUB = {'identifier': 'dvd_subtitle', 'format': None, 'extension': 'mkv' , 'label': 'VobSub'}
|
||||||
|
|
||||||
|
PNG = {'identifier': 'png', 'format': None, 'extension': 'png' , 'label': 'PNG'}
|
||||||
|
|
||||||
|
UNKNOWN = {'identifier': 'unknown', 'format': None, 'extension': None, 'label': 'UNKNOWN'}
|
||||||
|
|
||||||
|
|
||||||
|
def identifier(self):
|
||||||
|
"""Returns the codec identifier"""
|
||||||
|
return str(self.value['identifier'])
|
||||||
|
|
||||||
|
def label(self):
|
||||||
|
"""Returns the codec as string"""
|
||||||
|
return str(self.value['label'])
|
||||||
|
|
||||||
|
def format(self):
|
||||||
|
"""Returns the codec """
|
||||||
|
return self.value['format']
|
||||||
|
|
||||||
|
def extension(self):
|
||||||
|
"""Returns the corresponding extension"""
|
||||||
|
return str(self.value['extension'])
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def identify(identifier: str):
|
||||||
|
clist = [c for c in TrackCodec if c.value['identifier'] == str(identifier)]
|
||||||
|
if clist:
|
||||||
|
return clist[0]
|
||||||
|
else:
|
||||||
|
return TrackCodec.UNKNOWN
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def fromLabel(label: str):
|
||||||
|
clist = [c for c in TrackCodec if c.value['identifier'] == str(label)]
|
||||||
|
if clist:
|
||||||
|
return clist[0]
|
||||||
|
else:
|
||||||
|
return TrackCodec.UNKNOWN
|
||||||
278
src/ffx/track_controller.py
Normal file
278
src/ffx/track_controller.py
Normal file
@@ -0,0 +1,278 @@
|
|||||||
|
import click
|
||||||
|
|
||||||
|
from ffx.model.track import Track
|
||||||
|
|
||||||
|
from .track_type import TrackType
|
||||||
|
|
||||||
|
from .track_disposition import TrackDisposition
|
||||||
|
|
||||||
|
from .track_type import TrackType
|
||||||
|
|
||||||
|
from ffx.model.track_tag import TrackTag
|
||||||
|
from ffx.track_descriptor import TrackDescriptor
|
||||||
|
|
||||||
|
|
||||||
|
class TrackController():
|
||||||
|
|
||||||
|
def __init__(self, context):
|
||||||
|
|
||||||
|
self.context = context
|
||||||
|
self.Session = self.context['database']['session'] # convenience
|
||||||
|
|
||||||
|
self.__configurationData = self.context['config'].getData()
|
||||||
|
|
||||||
|
metadataConfiguration = self.__configurationData['metadata'] if 'metadata' in self.__configurationData.keys() else {}
|
||||||
|
|
||||||
|
self.__signatureTags = metadataConfiguration['signature'] if 'signature' in metadataConfiguration.keys() else {}
|
||||||
|
self.__removeGlobalKeys = metadataConfiguration['remove'] if 'remove' in metadataConfiguration.keys() else []
|
||||||
|
self.__ignoreGlobalKeys = metadataConfiguration['ignore'] if 'ignore' in metadataConfiguration.keys() else []
|
||||||
|
self.__removeTrackKeys = (metadataConfiguration['streams']['remove']
|
||||||
|
if 'streams' in metadataConfiguration.keys()
|
||||||
|
and 'remove' in metadataConfiguration['streams'].keys() else [])
|
||||||
|
self.__ignoreTrackKeys = (metadataConfiguration['streams']['ignore']
|
||||||
|
if 'streams' in metadataConfiguration.keys()
|
||||||
|
and 'ignore' in metadataConfiguration['streams'].keys() else [])
|
||||||
|
|
||||||
|
|
||||||
|
def addTrack(self, trackDescriptor : TrackDescriptor, patternId = None):
|
||||||
|
|
||||||
|
# option to override pattern id in case track descriptor has not set it
|
||||||
|
patId = int(trackDescriptor.getPatternId() if patternId is None else patternId)
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = self.Session()
|
||||||
|
track = Track(pattern_id = patId,
|
||||||
|
track_type = int(trackDescriptor.getType().index()),
|
||||||
|
codec_name = str(trackDescriptor.getCodec().identifier()),
|
||||||
|
index = int(trackDescriptor.getIndex()),
|
||||||
|
source_index = int(trackDescriptor.getSourceIndex()),
|
||||||
|
disposition_flags = int(TrackDisposition.toFlags(trackDescriptor.getDispositionSet())),
|
||||||
|
audio_layout = trackDescriptor.getAudioLayout().index())
|
||||||
|
|
||||||
|
s.add(track)
|
||||||
|
s.commit()
|
||||||
|
|
||||||
|
for k,v in trackDescriptor.getTags().items():
|
||||||
|
|
||||||
|
# Filter tags that make no sense to preserve
|
||||||
|
if k not in self.__ignoreTrackKeys and k not in self.__removeTrackKeys:
|
||||||
|
tag = TrackTag(track_id = track.id,
|
||||||
|
key = k,
|
||||||
|
value = v)
|
||||||
|
s.add(tag)
|
||||||
|
s.commit()
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"TrackController.addTrack(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
|
||||||
|
def updateTrack(self, trackId, trackDescriptor : TrackDescriptor):
|
||||||
|
|
||||||
|
if type(trackDescriptor) is not TrackDescriptor:
|
||||||
|
raise TypeError('TrackController.updateTrack(): Argument trackDescriptor is required to be of type TrackDescriptor')
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = self.Session()
|
||||||
|
track = s.query(Track).filter(Track.id == int(trackId)).first()
|
||||||
|
|
||||||
|
if track is not None:
|
||||||
|
|
||||||
|
track.index = int(trackDescriptor.getIndex())
|
||||||
|
|
||||||
|
track.track_type = int(trackDescriptor.getType().index())
|
||||||
|
track.codec_name = str(trackDescriptor.getCodec().identifier())
|
||||||
|
track.audio_layout = int(trackDescriptor.getAudioLayout().index())
|
||||||
|
|
||||||
|
track.disposition_flags = int(TrackDisposition.toFlags(trackDescriptor.getDispositionSet()))
|
||||||
|
|
||||||
|
descriptorTagKeys = trackDescriptor.getTags()
|
||||||
|
tagKeysInDescriptor = set(descriptorTagKeys.keys())
|
||||||
|
tagKeysInDb = {t.key for t in track.track_tags}
|
||||||
|
|
||||||
|
for k in tagKeysInDescriptor & tagKeysInDb: # to update
|
||||||
|
tags = [t for t in track.track_tags if t.key == k]
|
||||||
|
tags[0].value = descriptorTagKeys[k]
|
||||||
|
for k in tagKeysInDescriptor - tagKeysInDb: # to add
|
||||||
|
tag = TrackTag(track_id=track.id, key=k, value=descriptorTagKeys[k])
|
||||||
|
s.add(tag)
|
||||||
|
for k in tagKeysInDb - tagKeysInDescriptor: # to remove
|
||||||
|
tags = [t for t in track.track_tags if t.key == k]
|
||||||
|
s.delete(tags[0])
|
||||||
|
|
||||||
|
s.commit()
|
||||||
|
return True
|
||||||
|
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"TrackController.updateTrack(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
def findTracks(self, patternId):
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = self.Session()
|
||||||
|
|
||||||
|
q = s.query(Track).filter(Track.pattern_id == int(patternId))
|
||||||
|
return sorted([t for t in q.all()], key=lambda d: d.getIndex())
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"TrackController.findTracks(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
|
||||||
|
def findSiblingDescriptors(self, patternId):
|
||||||
|
"""Finds all stored tracks related to a pattern, packs them in descriptors
|
||||||
|
and also setting sub indices and returns list of descriptors"""
|
||||||
|
|
||||||
|
siblingTracks = self.findTracks(patternId)
|
||||||
|
siblingDescriptors = []
|
||||||
|
|
||||||
|
subIndexCounter = {}
|
||||||
|
st: Track
|
||||||
|
for st in siblingTracks:
|
||||||
|
trackType = st.getType()
|
||||||
|
|
||||||
|
if not trackType in subIndexCounter.keys():
|
||||||
|
subIndexCounter[trackType] = 0
|
||||||
|
siblingDescriptors.append(st.getDescriptor(subIndex=subIndexCounter[trackType]))
|
||||||
|
subIndexCounter[trackType] += 1
|
||||||
|
|
||||||
|
return siblingDescriptors
|
||||||
|
|
||||||
|
|
||||||
|
#TODO: mit optionalem Parameter lösen ^
|
||||||
|
def findVideoTracks(self, patternId):
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = self.Session()
|
||||||
|
|
||||||
|
q = s.query(Track).filter(Track.pattern_id == int(patternId), Track.track_type == TrackType.VIDEO.index())
|
||||||
|
return [a for a in q.all()]
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"TrackController.findVideoTracks(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
def findAudioTracks(self, patternId):
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = self.Session()
|
||||||
|
|
||||||
|
q = s.query(Track).filter(Track.pattern_id == int(patternId), Track.track_type == TrackType.AUDIO.index())
|
||||||
|
return [a for a in q.all()]
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"TrackController.findAudioTracks(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
def findSubtitleTracks(self, patternId):
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = self.Session()
|
||||||
|
|
||||||
|
q = s.query(Track).filter(Track.pattern_id == int(patternId), Track.track_type == TrackType.SUBTITLE.index())
|
||||||
|
return [s for s in q.all()]
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"TrackController.findSubtitleTracks(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
|
||||||
|
def getTrack(self, patternId : int, index: int) -> Track:
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = self.Session()
|
||||||
|
return s.query(Track).filter(
|
||||||
|
Track.pattern_id == int(patternId),
|
||||||
|
Track.index == int(index),
|
||||||
|
).first()
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"TrackController.getTrack(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
def setDispositionState(self, patternId: int, index: int, disposition : TrackDisposition, state : bool):
|
||||||
|
|
||||||
|
if type(patternId) is not int:
|
||||||
|
raise TypeError('TrackController.setTrackDisposition(): Argument patternId is required to be of type int')
|
||||||
|
if type(index) is not int:
|
||||||
|
raise TypeError('TrackController.setTrackDisposition(): Argument index is required to be of type int')
|
||||||
|
if type(disposition) is not TrackDisposition:
|
||||||
|
raise TypeError('TrackController.setTrackDisposition(): Argument disposition is required to be of type TrackDisposition')
|
||||||
|
if type(state) is not bool:
|
||||||
|
raise TypeError('TrackController.setTrackDisposition(): Argument state is required to be of type bool')
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = self.Session()
|
||||||
|
track = s.query(Track).filter(Track.pattern_id == patternId, Track.index == index).first()
|
||||||
|
|
||||||
|
if track is not None:
|
||||||
|
|
||||||
|
if state:
|
||||||
|
track.setDisposition(disposition)
|
||||||
|
else:
|
||||||
|
track.resetDisposition(disposition)
|
||||||
|
|
||||||
|
s.commit()
|
||||||
|
return True
|
||||||
|
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"TrackController.updateTrack(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
def deleteTrack(self, trackId):
|
||||||
|
try:
|
||||||
|
s = self.Session()
|
||||||
|
|
||||||
|
track = s.query(Track).filter(Track.id == int(trackId)).first()
|
||||||
|
|
||||||
|
if track is not None:
|
||||||
|
patternId = int(track.pattern_id)
|
||||||
|
|
||||||
|
q_siblings = s.query(Track).filter(Track.pattern_id == patternId).order_by(Track.index)
|
||||||
|
siblingTracks = q_siblings.all()
|
||||||
|
|
||||||
|
if len(siblingTracks) <= 1:
|
||||||
|
raise click.ClickException(
|
||||||
|
f"Cannot delete the last track from pattern #{patternId}. Patterns must define at least one track."
|
||||||
|
)
|
||||||
|
|
||||||
|
index = 0
|
||||||
|
for track in siblingTracks:
|
||||||
|
|
||||||
|
if track.id == int(trackId):
|
||||||
|
s.delete(track)
|
||||||
|
else:
|
||||||
|
track.index = index
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
s.commit()
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
raise click.ClickException(f"TrackController.deleteTrack(): {repr(ex)}")
|
||||||
|
finally:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
|
||||||
|
# def setDefaultSubTrack(self, trackType, subIndex):
|
||||||
|
# pass
|
||||||
|
#
|
||||||
|
# def setForcedSubTrack(self, trackType, subIndex):
|
||||||
|
# pass
|
||||||
134
src/ffx/track_delete_screen.py
Normal file
134
src/ffx/track_delete_screen.py
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
import click
|
||||||
|
|
||||||
|
from textual.screen import Screen
|
||||||
|
from textual.widgets import Header, Footer, Static, Button
|
||||||
|
from textual.containers import Grid
|
||||||
|
|
||||||
|
from ffx.track_descriptor import TrackDescriptor
|
||||||
|
from .i18n import t
|
||||||
|
from .screen_support import go_back_or_exit
|
||||||
|
|
||||||
|
|
||||||
|
# Screen[dict[int, str, int]]
|
||||||
|
class TrackDeleteScreen(Screen):
|
||||||
|
|
||||||
|
BINDINGS = [
|
||||||
|
("escape", "back", t("Back")),
|
||||||
|
]
|
||||||
|
|
||||||
|
CSS = """
|
||||||
|
|
||||||
|
Grid {
|
||||||
|
grid-size: 4 9;
|
||||||
|
grid-rows: 2 2 2 2 2 2 2 2 2;
|
||||||
|
grid-columns: 18 1fr 1fr 1fr;
|
||||||
|
height: 100%;
|
||||||
|
width: 100%;
|
||||||
|
min-width: 90;
|
||||||
|
padding: 1;
|
||||||
|
overflow-x: auto;
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
Input {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
Button {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
#toplabel {
|
||||||
|
height: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.two {
|
||||||
|
column-span: 2;
|
||||||
|
}
|
||||||
|
.three {
|
||||||
|
column-span: 3;
|
||||||
|
}
|
||||||
|
.four {
|
||||||
|
column-span: 4;
|
||||||
|
}
|
||||||
|
|
||||||
|
.box {
|
||||||
|
height: 100%;
|
||||||
|
border: solid green;
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, trackDescriptor : TrackDescriptor):
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
if type(trackDescriptor) is not TrackDescriptor:
|
||||||
|
raise click.ClickException('TrackDeleteScreen.init(): trackDescriptor is required to be of type TrackDescriptor')
|
||||||
|
|
||||||
|
self.__trackDescriptor = trackDescriptor
|
||||||
|
|
||||||
|
|
||||||
|
def on_mount(self):
|
||||||
|
|
||||||
|
self.query_one("#subindexlabel", Static).update(str(self.__trackDescriptor.getSubIndex()))
|
||||||
|
self.query_one("#patternlabel", Static).update(str(self.__trackDescriptor.getPatternId()))
|
||||||
|
self.query_one("#languagelabel", Static).update(str(self.__trackDescriptor.getLanguage().label()))
|
||||||
|
self.query_one("#titlelabel", Static).update(str(str(self.__trackDescriptor.getTitle())))
|
||||||
|
|
||||||
|
|
||||||
|
def compose(self):
|
||||||
|
|
||||||
|
yield Header()
|
||||||
|
|
||||||
|
with Grid():
|
||||||
|
|
||||||
|
# Row 1
|
||||||
|
yield Static(
|
||||||
|
t(
|
||||||
|
"Are you sure to delete the following {track_type} track?",
|
||||||
|
track_type=t(self.__trackDescriptor.getType().label()),
|
||||||
|
),
|
||||||
|
id="toplabel",
|
||||||
|
classes="four",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Row 2
|
||||||
|
yield Static(t("sub index"))
|
||||||
|
yield Static(" ", id="subindexlabel", classes="three")
|
||||||
|
|
||||||
|
# Row 3
|
||||||
|
yield Static(t("from pattern"))
|
||||||
|
yield Static(" ", id="patternlabel", classes="three")
|
||||||
|
|
||||||
|
# Row 4
|
||||||
|
yield Static(" ", classes="four")
|
||||||
|
|
||||||
|
# Row 5
|
||||||
|
yield Static(t("Language"))
|
||||||
|
yield Static(" ", id="languagelabel", classes="three")
|
||||||
|
|
||||||
|
# Row 6
|
||||||
|
yield Static(t("Title"))
|
||||||
|
yield Static(" ", id="titlelabel", classes="three")
|
||||||
|
|
||||||
|
# Row 7
|
||||||
|
yield Static(" ", classes="four")
|
||||||
|
|
||||||
|
# Row 8
|
||||||
|
yield Static(" ", classes="four")
|
||||||
|
|
||||||
|
# Row 9
|
||||||
|
yield Button(t("Delete"), id="delete_button")
|
||||||
|
yield Button(t("Cancel"), id="cancel_button")
|
||||||
|
|
||||||
|
yield Footer()
|
||||||
|
|
||||||
|
|
||||||
|
# Event handler for button press
|
||||||
|
def on_button_pressed(self, event: Button.Pressed) -> None:
|
||||||
|
|
||||||
|
if event.button.id == "delete_button":
|
||||||
|
self.dismiss(self.__trackDescriptor)
|
||||||
|
|
||||||
|
if event.button.id == "cancel_button":
|
||||||
|
self.app.pop_screen()
|
||||||
|
|
||||||
|
def action_back(self):
|
||||||
|
go_back_or_exit(self)
|
||||||
367
src/ffx/track_descriptor.py
Normal file
367
src/ffx/track_descriptor.py
Normal file
@@ -0,0 +1,367 @@
|
|||||||
|
from typing import Self
|
||||||
|
|
||||||
|
from .iso_language import IsoLanguage
|
||||||
|
from .track_type import TrackType
|
||||||
|
from .audio_layout import AudioLayout
|
||||||
|
from .track_disposition import TrackDisposition
|
||||||
|
from .track_codec import TrackCodec
|
||||||
|
from .logging_utils import get_ffx_logger
|
||||||
|
|
||||||
|
# from .helper import dictDiff, setDiff
|
||||||
|
|
||||||
|
|
||||||
|
class TrackDescriptor:
|
||||||
|
|
||||||
|
CONTEXT_KEY = "context"
|
||||||
|
|
||||||
|
ID_KEY = "id"
|
||||||
|
INDEX_KEY = "index"
|
||||||
|
SOURCE_INDEX_KEY = "source_index"
|
||||||
|
SUB_INDEX_KEY = "sub_index"
|
||||||
|
PATTERN_ID_KEY = "pattern_id"
|
||||||
|
EXTERNAL_SOURCE_FILE_PATH_KEY = "external_source_file"
|
||||||
|
|
||||||
|
DISPOSITION_SET_KEY = "disposition_set"
|
||||||
|
TAGS_KEY = "tags"
|
||||||
|
|
||||||
|
TRACK_TYPE_KEY = "track_type"
|
||||||
|
CODEC_KEY = "codec_name"
|
||||||
|
AUDIO_LAYOUT_KEY = "audio_layout"
|
||||||
|
|
||||||
|
FFPROBE_INDEX_KEY = "index"
|
||||||
|
FFPROBE_DISPOSITION_KEY = "disposition"
|
||||||
|
FFPROBE_TAGS_KEY = "tags"
|
||||||
|
FFPROBE_CODEC_TYPE_KEY = "codec_type"
|
||||||
|
FFPROBE_CODEC_KEY = "codec_name"
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
|
||||||
|
if TrackDescriptor.CONTEXT_KEY in kwargs.keys():
|
||||||
|
if type(kwargs[TrackDescriptor.CONTEXT_KEY]) is not dict:
|
||||||
|
raise TypeError(
|
||||||
|
f"TrackDescriptor.__init__(): Argument {TrackDescriptor.CONTEXT_KEY} is required to be of type dict"
|
||||||
|
)
|
||||||
|
self.__context = kwargs[TrackDescriptor.CONTEXT_KEY]
|
||||||
|
self.__logger = self.__context['logger']
|
||||||
|
else:
|
||||||
|
self.__context = {}
|
||||||
|
self.__logger = get_ffx_logger()
|
||||||
|
|
||||||
|
if TrackDescriptor.ID_KEY in kwargs.keys():
|
||||||
|
if type(kwargs[TrackDescriptor.ID_KEY]) is not int:
|
||||||
|
raise TypeError(
|
||||||
|
f"TrackDesciptor.__init__(): Argument {TrackDescriptor.ID_KEY} is required to be of type int"
|
||||||
|
)
|
||||||
|
self.__trackId = kwargs[TrackDescriptor.ID_KEY]
|
||||||
|
else:
|
||||||
|
self.__trackId = -1
|
||||||
|
|
||||||
|
if TrackDescriptor.PATTERN_ID_KEY in kwargs.keys():
|
||||||
|
if type(kwargs[TrackDescriptor.PATTERN_ID_KEY]) is not int:
|
||||||
|
raise TypeError(
|
||||||
|
f"TrackDesciptor.__init__(): Argument {TrackDescriptor.PATTERN_ID_KEY} is required to be of type int"
|
||||||
|
)
|
||||||
|
self.__patternId = kwargs[TrackDescriptor.PATTERN_ID_KEY]
|
||||||
|
else:
|
||||||
|
self.__patternId = -1
|
||||||
|
|
||||||
|
if TrackDescriptor.EXTERNAL_SOURCE_FILE_PATH_KEY in kwargs.keys():
|
||||||
|
if type(kwargs[TrackDescriptor.EXTERNAL_SOURCE_FILE_PATH_KEY]) is not str:
|
||||||
|
raise TypeError(
|
||||||
|
f"TrackDesciptor.__init__(): Argument {TrackDescriptor.EXTERNAL_SOURCE_FILE_PATH_KEY} is required to be of type str"
|
||||||
|
)
|
||||||
|
self.__externalSourceFilePath = kwargs[TrackDescriptor.EXTERNAL_SOURCE_FILE_PATH_KEY]
|
||||||
|
else:
|
||||||
|
self.__externalSourceFilePath = ''
|
||||||
|
|
||||||
|
if TrackDescriptor.INDEX_KEY in kwargs.keys():
|
||||||
|
if type(kwargs[TrackDescriptor.INDEX_KEY]) is not int:
|
||||||
|
raise TypeError(
|
||||||
|
f"TrackDesciptor.__init__(): Argument {TrackDescriptor.INDEX_KEY} is required to be of type int"
|
||||||
|
)
|
||||||
|
self.__index = kwargs[TrackDescriptor.INDEX_KEY]
|
||||||
|
else:
|
||||||
|
self.__index = -1
|
||||||
|
|
||||||
|
if (
|
||||||
|
TrackDescriptor.SOURCE_INDEX_KEY in kwargs.keys()
|
||||||
|
and type(kwargs[TrackDescriptor.SOURCE_INDEX_KEY]) is int
|
||||||
|
):
|
||||||
|
self.__sourceIndex = kwargs[TrackDescriptor.SOURCE_INDEX_KEY]
|
||||||
|
else:
|
||||||
|
self.__sourceIndex = self.__index
|
||||||
|
|
||||||
|
if TrackDescriptor.SUB_INDEX_KEY in kwargs.keys():
|
||||||
|
if type(kwargs[TrackDescriptor.SUB_INDEX_KEY]) is not int:
|
||||||
|
raise TypeError(
|
||||||
|
f"TrackDesciptor.__init__(): Argument {TrackDescriptor.SUB_INDEX_KEY} is required to be of type int"
|
||||||
|
)
|
||||||
|
self.__subIndex = kwargs[TrackDescriptor.SUB_INDEX_KEY]
|
||||||
|
else:
|
||||||
|
self.__subIndex = -1
|
||||||
|
|
||||||
|
if TrackDescriptor.TRACK_TYPE_KEY in kwargs.keys():
|
||||||
|
if type(kwargs[TrackDescriptor.TRACK_TYPE_KEY]) is not TrackType:
|
||||||
|
raise TypeError(
|
||||||
|
f"TrackDesciptor.__init__(): Argument {TrackDescriptor.TRACK_TYPE_KEY} is required to be of type TrackType"
|
||||||
|
)
|
||||||
|
self.__trackType = kwargs[TrackDescriptor.TRACK_TYPE_KEY]
|
||||||
|
else:
|
||||||
|
self.__trackType = TrackType.UNKNOWN
|
||||||
|
|
||||||
|
if TrackDescriptor.CODEC_KEY in kwargs.keys():
|
||||||
|
if type(kwargs[TrackDescriptor.CODEC_KEY]) is not TrackCodec:
|
||||||
|
raise TypeError(
|
||||||
|
f"TrackDesciptor.__init__(): Argument {TrackDescriptor.CODEC_KEY} is required to be of type TrackCodec"
|
||||||
|
)
|
||||||
|
self.__trackCodec = kwargs[TrackDescriptor.CODEC_KEY]
|
||||||
|
else:
|
||||||
|
self.__trackCodec = TrackCodec.UNKNOWN
|
||||||
|
|
||||||
|
if TrackDescriptor.TAGS_KEY in kwargs.keys():
|
||||||
|
if type(kwargs[TrackDescriptor.TAGS_KEY]) is not dict:
|
||||||
|
raise TypeError(
|
||||||
|
f"TrackDesciptor.__init__(): Argument {TrackDescriptor.TAGS_KEY} is required to be of type dict"
|
||||||
|
)
|
||||||
|
self.__trackTags = kwargs[TrackDescriptor.TAGS_KEY]
|
||||||
|
else:
|
||||||
|
self.__trackTags = {}
|
||||||
|
|
||||||
|
if TrackDescriptor.DISPOSITION_SET_KEY in kwargs.keys():
|
||||||
|
if type(kwargs[TrackDescriptor.DISPOSITION_SET_KEY]) is not set:
|
||||||
|
raise TypeError(
|
||||||
|
f"TrackDesciptor.__init__(): Argument {TrackDescriptor.DISPOSITION_SET_KEY} is required to be of type set"
|
||||||
|
)
|
||||||
|
for d in kwargs[TrackDescriptor.DISPOSITION_SET_KEY]:
|
||||||
|
if type(d) is not TrackDisposition:
|
||||||
|
raise TypeError(
|
||||||
|
f"TrackDesciptor.__init__(): All elements of argument set {TrackDescriptor.DISPOSITION_SET_KEY} is required to be of type TrackDisposition"
|
||||||
|
)
|
||||||
|
self.__dispositionSet = kwargs[TrackDescriptor.DISPOSITION_SET_KEY]
|
||||||
|
else:
|
||||||
|
self.__dispositionSet = set()
|
||||||
|
|
||||||
|
if TrackDescriptor.AUDIO_LAYOUT_KEY in kwargs.keys():
|
||||||
|
if type(kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY]) is not AudioLayout:
|
||||||
|
raise TypeError(
|
||||||
|
f"TrackDesciptor.__init__(): Argument {TrackDescriptor.AUDIO_LAYOUT_KEY} is required to be of type AudioLayout"
|
||||||
|
)
|
||||||
|
self.__audioLayout = kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY]
|
||||||
|
else:
|
||||||
|
self.__audioLayout = AudioLayout.LAYOUT_UNDEFINED
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def fromFfprobe(cls, streamObj, subIndex: int = -1):
|
||||||
|
"""Processes ffprobe stream data as array with elements according to the following example
|
||||||
|
{
|
||||||
|
"index": 4,
|
||||||
|
"codec_name": "hdmv_pgs_subtitle",
|
||||||
|
"codec_long_name": "HDMV Presentation Graphic Stream subtitles",
|
||||||
|
"codec_type": "subtitle",
|
||||||
|
"codec_tag_string": "[0][0][0][0]",
|
||||||
|
"codec_tag": "0x0000",
|
||||||
|
"r_frame_rate": "0/0",
|
||||||
|
"avg_frame_rate": "0/0",
|
||||||
|
"time_base": "1/1000",
|
||||||
|
"start_pts": 0,
|
||||||
|
"start_time": "0.000000",
|
||||||
|
"duration_ts": 1421035,
|
||||||
|
"duration": "1421.035000",
|
||||||
|
"disposition": {
|
||||||
|
"default": 1,
|
||||||
|
"dub": 0,
|
||||||
|
"original": 0,
|
||||||
|
"comment": 0,
|
||||||
|
"lyrics": 0,
|
||||||
|
"karaoke": 0,
|
||||||
|
"forced": 0,
|
||||||
|
"hearing_impaired": 0,
|
||||||
|
"visual_impaired": 0,
|
||||||
|
"clean_effects": 0,
|
||||||
|
"attached_pic": 0,
|
||||||
|
"timed_thumbnails": 0,
|
||||||
|
"non_diegetic": 0,
|
||||||
|
"captions": 0,
|
||||||
|
"descriptions": 0,
|
||||||
|
"metadata": 0,
|
||||||
|
"dependent": 0,
|
||||||
|
"still_image": 0
|
||||||
|
},
|
||||||
|
"tags": {
|
||||||
|
"language": "ger",
|
||||||
|
"title": "German Full"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
trackType = (
|
||||||
|
TrackType.fromLabel(streamObj["codec_type"])
|
||||||
|
if "codec_type" in streamObj.keys()
|
||||||
|
else TrackType.UNKNOWN
|
||||||
|
)
|
||||||
|
|
||||||
|
if trackType != TrackType.UNKNOWN:
|
||||||
|
|
||||||
|
kwargs = {}
|
||||||
|
|
||||||
|
kwargs[TrackDescriptor.INDEX_KEY] = (
|
||||||
|
int(streamObj[TrackDescriptor.FFPROBE_INDEX_KEY])
|
||||||
|
if TrackDescriptor.FFPROBE_INDEX_KEY in streamObj.keys()
|
||||||
|
else -1
|
||||||
|
)
|
||||||
|
kwargs[TrackDescriptor.SOURCE_INDEX_KEY] = kwargs[TrackDescriptor.INDEX_KEY]
|
||||||
|
kwargs[TrackDescriptor.SUB_INDEX_KEY] = subIndex
|
||||||
|
|
||||||
|
kwargs[TrackDescriptor.TRACK_TYPE_KEY] = trackType
|
||||||
|
|
||||||
|
kwargs[TrackDescriptor.CODEC_KEY] = TrackCodec.identify(streamObj[TrackDescriptor.FFPROBE_CODEC_KEY])
|
||||||
|
|
||||||
|
kwargs[TrackDescriptor.DISPOSITION_SET_KEY] = (
|
||||||
|
{
|
||||||
|
t
|
||||||
|
for d in (
|
||||||
|
k
|
||||||
|
for (k, v) in streamObj[
|
||||||
|
TrackDescriptor.FFPROBE_DISPOSITION_KEY
|
||||||
|
].items()
|
||||||
|
if v
|
||||||
|
)
|
||||||
|
if (t := TrackDisposition.find(d)) is not None
|
||||||
|
}
|
||||||
|
if TrackDescriptor.FFPROBE_DISPOSITION_KEY in streamObj.keys()
|
||||||
|
else set()
|
||||||
|
)
|
||||||
|
kwargs[TrackDescriptor.TAGS_KEY] = (
|
||||||
|
streamObj[TrackDescriptor.FFPROBE_TAGS_KEY]
|
||||||
|
if TrackDescriptor.FFPROBE_TAGS_KEY in streamObj.keys()
|
||||||
|
else {}
|
||||||
|
)
|
||||||
|
kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY] = (
|
||||||
|
AudioLayout.identify(streamObj)
|
||||||
|
if trackType == TrackType.AUDIO
|
||||||
|
else AudioLayout.LAYOUT_UNDEFINED
|
||||||
|
)
|
||||||
|
|
||||||
|
return cls(**kwargs)
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def getId(self):
|
||||||
|
return self.__trackId
|
||||||
|
|
||||||
|
def getPatternId(self):
|
||||||
|
return self.__patternId
|
||||||
|
|
||||||
|
def getIndex(self):
|
||||||
|
return self.__index
|
||||||
|
|
||||||
|
def setIndex(self, index):
|
||||||
|
self.__index = index
|
||||||
|
|
||||||
|
def getSourceIndex(self):
|
||||||
|
return self.__sourceIndex
|
||||||
|
|
||||||
|
def setSourceIndex(self, sourceIndex: int):
|
||||||
|
self.__sourceIndex = int(sourceIndex)
|
||||||
|
|
||||||
|
def getSubIndex(self):
|
||||||
|
return self.__subIndex
|
||||||
|
|
||||||
|
def setSubIndex(self, subIndex):
|
||||||
|
self.__subIndex = subIndex
|
||||||
|
|
||||||
|
def getType(self):
|
||||||
|
return self.__trackType
|
||||||
|
|
||||||
|
def getCodec(self) -> TrackCodec:
|
||||||
|
return self.__trackCodec
|
||||||
|
|
||||||
|
def getLanguage(self):
|
||||||
|
if "language" in self.__trackTags.keys():
|
||||||
|
return IsoLanguage.findThreeLetter(self.__trackTags["language"])
|
||||||
|
else:
|
||||||
|
return IsoLanguage.UNDEFINED
|
||||||
|
|
||||||
|
def setLanguage(self, language: IsoLanguage):
|
||||||
|
if not type(language) is IsoLanguage:
|
||||||
|
raise TypeError('language has to be of type IsoLanguage')
|
||||||
|
self.__trackTags["language"] = language
|
||||||
|
|
||||||
|
def getTitle(self):
|
||||||
|
if "title" in self.__trackTags.keys():
|
||||||
|
return str(self.__trackTags["title"])
|
||||||
|
else:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def setTitle(self, title: str):
|
||||||
|
self.__trackTags["title"] = str(title)
|
||||||
|
|
||||||
|
|
||||||
|
def getAudioLayout(self):
|
||||||
|
return self.__audioLayout
|
||||||
|
|
||||||
|
def getTags(self):
|
||||||
|
return self.__trackTags
|
||||||
|
|
||||||
|
def getDispositionSet(self):
|
||||||
|
return self.__dispositionSet
|
||||||
|
|
||||||
|
def setDispositionSet(self, dispositionSet: set):
|
||||||
|
self.__dispositionSet = dispositionSet
|
||||||
|
|
||||||
|
def getDispositionFlag(self, disposition: TrackDisposition) -> bool:
|
||||||
|
return bool(disposition in self.__dispositionSet)
|
||||||
|
|
||||||
|
def setDispositionFlag(self, disposition: TrackDisposition, state: bool):
|
||||||
|
if state:
|
||||||
|
self.__dispositionSet.add(disposition)
|
||||||
|
else:
|
||||||
|
self.__dispositionSet.discard(disposition)
|
||||||
|
|
||||||
|
# def compare(self, vsTrackDescriptor: Self):
|
||||||
|
#
|
||||||
|
# compareResult = {}
|
||||||
|
#
|
||||||
|
# tagsDiffResult = dictKeysDiff(vsTrackDescriptor.getTags(), self.getTags())
|
||||||
|
#
|
||||||
|
# if tagsDiffResult:
|
||||||
|
# compareResult[TrackDescriptor.TAGS_KEY] = tagsDiffResult
|
||||||
|
#
|
||||||
|
# vsDispositions = vsTrackDescriptor.getDispositionSet()
|
||||||
|
# dispositions = self.getDispositionSet()
|
||||||
|
#
|
||||||
|
# dispositionDiffResult = setDiff(vsDispositions, dispositions)
|
||||||
|
#
|
||||||
|
# if dispositionDiffResult:
|
||||||
|
# compareResult[TrackDescriptor.DISPOSITION_SET_KEY] = dispositionDiffResult
|
||||||
|
#
|
||||||
|
# return compareResult
|
||||||
|
|
||||||
|
def setExternalSourceFilePath(self, filePath: str):
|
||||||
|
self.__externalSourceFilePath = str(filePath)
|
||||||
|
|
||||||
|
def getExternalSourceFilePath(self):
|
||||||
|
return self.__externalSourceFilePath
|
||||||
|
|
||||||
|
def clone(self, context: dict | None = None):
|
||||||
|
kwargs = {
|
||||||
|
TrackDescriptor.ID_KEY: int(self.__trackId),
|
||||||
|
TrackDescriptor.PATTERN_ID_KEY: int(self.__patternId),
|
||||||
|
TrackDescriptor.EXTERNAL_SOURCE_FILE_PATH_KEY: str(self.__externalSourceFilePath),
|
||||||
|
TrackDescriptor.INDEX_KEY: int(self.__index),
|
||||||
|
TrackDescriptor.SOURCE_INDEX_KEY: int(self.__sourceIndex),
|
||||||
|
TrackDescriptor.SUB_INDEX_KEY: int(self.__subIndex),
|
||||||
|
TrackDescriptor.TRACK_TYPE_KEY: self.__trackType,
|
||||||
|
TrackDescriptor.CODEC_KEY: self.__trackCodec,
|
||||||
|
TrackDescriptor.TAGS_KEY: dict(self.__trackTags),
|
||||||
|
TrackDescriptor.DISPOSITION_SET_KEY: set(self.__dispositionSet),
|
||||||
|
TrackDescriptor.AUDIO_LAYOUT_KEY: self.__audioLayout,
|
||||||
|
}
|
||||||
|
|
||||||
|
if context is not None:
|
||||||
|
kwargs[TrackDescriptor.CONTEXT_KEY] = context
|
||||||
|
elif self.__context:
|
||||||
|
kwargs[TrackDescriptor.CONTEXT_KEY] = self.__context
|
||||||
|
|
||||||
|
return TrackDescriptor(**kwargs)
|
||||||
552
src/ffx/track_details_screen.py
Normal file
552
src/ffx/track_details_screen.py
Normal file
@@ -0,0 +1,552 @@
|
|||||||
|
import click
|
||||||
|
|
||||||
|
from textual.screen import Screen
|
||||||
|
from textual.widgets import Header, Footer, Static, Button, SelectionList, Select, DataTable, Input
|
||||||
|
from textual.containers import Grid
|
||||||
|
from textual.widgets._data_table import CellDoesNotExist
|
||||||
|
|
||||||
|
from .audio_layout import AudioLayout
|
||||||
|
from .iso_language import IsoLanguage
|
||||||
|
from .tag_delete_screen import TagDeleteScreen
|
||||||
|
from .tag_details_screen import TagDetailsScreen
|
||||||
|
from .track_codec import TrackCodec
|
||||||
|
from .track_descriptor import TrackDescriptor
|
||||||
|
from .track_disposition import TrackDisposition
|
||||||
|
from .track_type import TrackType
|
||||||
|
from .i18n import t
|
||||||
|
from .screen_support import add_auto_table_column, build_screen_bootstrap, go_back_or_exit, populate_tag_table
|
||||||
|
|
||||||
|
|
||||||
|
class TrackDetailsScreen(Screen):
|
||||||
|
|
||||||
|
BINDINGS = [
|
||||||
|
("escape", "back", t("Back")),
|
||||||
|
]
|
||||||
|
|
||||||
|
CSS = """
|
||||||
|
|
||||||
|
Grid {
|
||||||
|
grid-size: 5 24;
|
||||||
|
grid-rows: 2 2 2 2 2 3 3 2 2 3 2 2 2 2 2 6 2 2 6 2 2 2;
|
||||||
|
grid-columns: 18 1fr 1fr 1fr 4fr;
|
||||||
|
height: 100%;
|
||||||
|
width: 100%;
|
||||||
|
min-width: 115;
|
||||||
|
padding: 1;
|
||||||
|
overflow-x: auto;
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
Input {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
Button {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
SelectionList {
|
||||||
|
border: none;
|
||||||
|
min-height: 6;
|
||||||
|
}
|
||||||
|
Select {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
DataTable {
|
||||||
|
min-height: 6;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
DataTable .datatable--cursor {
|
||||||
|
background: darkorange;
|
||||||
|
color: black;
|
||||||
|
}
|
||||||
|
|
||||||
|
DataTable .datatable--header {
|
||||||
|
background: steelblue;
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
#toplabel {
|
||||||
|
height: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.two {
|
||||||
|
column-span: 2;
|
||||||
|
}
|
||||||
|
.three {
|
||||||
|
column-span: 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
.four {
|
||||||
|
column-span: 4;
|
||||||
|
}
|
||||||
|
.five {
|
||||||
|
column-span: 5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.box {
|
||||||
|
height: 100%;
|
||||||
|
border: solid green;
|
||||||
|
}
|
||||||
|
|
||||||
|
.yellow {
|
||||||
|
tint: yellow 40%;
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
trackDescriptor: TrackDescriptor = None,
|
||||||
|
patternId=None,
|
||||||
|
patternLabel: str = "",
|
||||||
|
siblingTrackDescriptors=None,
|
||||||
|
trackType: TrackType = None,
|
||||||
|
index=None,
|
||||||
|
subIndex=None,
|
||||||
|
metadata_only: bool = False,
|
||||||
|
):
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
bootstrap = build_screen_bootstrap(self.app.getContext())
|
||||||
|
self.context = bootstrap.context
|
||||||
|
|
||||||
|
self.__removeTrackKeys = bootstrap.remove_track_keys
|
||||||
|
self.__ignoreTrackKeys = bootstrap.ignore_track_keys
|
||||||
|
self.__tagRowData: dict[object, tuple[str, str]] = {}
|
||||||
|
|
||||||
|
self.__isNew = trackDescriptor is None
|
||||||
|
self.__trackDescriptor = trackDescriptor
|
||||||
|
self.__patternId = (
|
||||||
|
int(patternId)
|
||||||
|
if patternId is not None
|
||||||
|
else (
|
||||||
|
int(trackDescriptor.getPatternId())
|
||||||
|
if trackDescriptor is not None and trackDescriptor.getPatternId() != -1
|
||||||
|
else -1
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.__patternLabel = str(patternLabel)
|
||||||
|
self.__siblingTrackDescriptors = list(siblingTrackDescriptors or [])
|
||||||
|
self.__metadataOnly = bool(metadata_only)
|
||||||
|
|
||||||
|
if self.__isNew:
|
||||||
|
self.__trackType = trackType
|
||||||
|
self.__trackCodec = TrackCodec.UNKNOWN
|
||||||
|
self.__audioLayout = AudioLayout.LAYOUT_UNDEFINED
|
||||||
|
self.__index = index
|
||||||
|
self.__subIndex = subIndex
|
||||||
|
self.__draftTrackTags = {}
|
||||||
|
initial_language = IsoLanguage.UNDEFINED
|
||||||
|
initial_title = ""
|
||||||
|
else:
|
||||||
|
self.__trackType = trackDescriptor.getType()
|
||||||
|
self.__trackCodec = trackDescriptor.getCodec()
|
||||||
|
self.__audioLayout = trackDescriptor.getAudioLayout()
|
||||||
|
self.__index = trackDescriptor.getIndex()
|
||||||
|
self.__subIndex = trackDescriptor.getSubIndex()
|
||||||
|
self.__draftTrackTags = {
|
||||||
|
key: value
|
||||||
|
for key, value in trackDescriptor.getTags().items()
|
||||||
|
if key not in ("language", "title")
|
||||||
|
}
|
||||||
|
initial_language = trackDescriptor.getLanguage()
|
||||||
|
initial_title = trackDescriptor.getTitle()
|
||||||
|
|
||||||
|
self.__titleAutoManaged = (
|
||||||
|
initial_language == IsoLanguage.UNDEFINED and not str(initial_title).strip()
|
||||||
|
)
|
||||||
|
self.__suppressTitleChanged = False
|
||||||
|
self.__lastAutoTitle = ""
|
||||||
|
|
||||||
|
def _descriptor_refs_same_track(self, descriptor: TrackDescriptor) -> bool:
|
||||||
|
if self.__trackDescriptor is None:
|
||||||
|
return False
|
||||||
|
if descriptor.getId() != -1 and self.__trackDescriptor.getId() != -1:
|
||||||
|
return descriptor.getId() == self.__trackDescriptor.getId()
|
||||||
|
return (
|
||||||
|
descriptor.getPatternId() == self.__trackDescriptor.getPatternId()
|
||||||
|
and descriptor.getIndex() == self.__trackDescriptor.getIndex()
|
||||||
|
and descriptor.getSubIndex() == self.__trackDescriptor.getSubIndex()
|
||||||
|
)
|
||||||
|
|
||||||
|
def updateTags(self):
|
||||||
|
self.__tagRowData = populate_tag_table(
|
||||||
|
self.trackTagsTable,
|
||||||
|
self.__draftTrackTags,
|
||||||
|
ignore_keys=self.__ignoreTrackKeys,
|
||||||
|
remove_keys=self.__removeTrackKeys,
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def build_language_options():
|
||||||
|
return [
|
||||||
|
(language.label(), language)
|
||||||
|
for language in sorted(
|
||||||
|
[language for language in IsoLanguage if language != IsoLanguage.UNDEFINED],
|
||||||
|
key=lambda language: language.label().casefold(),
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def language_select_value(language):
|
||||||
|
return Select.NULL if language == IsoLanguage.UNDEFINED else language
|
||||||
|
|
||||||
|
def _apply_auto_title_for_language(self, language: IsoLanguage):
|
||||||
|
titleInput = self.query_one("#title_input", Input)
|
||||||
|
autoTitle = "" if language == IsoLanguage.UNDEFINED else language.label()
|
||||||
|
self.__suppressTitleChanged = True
|
||||||
|
titleInput.value = autoTitle
|
||||||
|
self.__suppressTitleChanged = False
|
||||||
|
self.__lastAutoTitle = autoTitle
|
||||||
|
|
||||||
|
def _handle_language_selection_changed(self, language):
|
||||||
|
if not self.__titleAutoManaged:
|
||||||
|
return
|
||||||
|
|
||||||
|
if not isinstance(language, IsoLanguage):
|
||||||
|
language = IsoLanguage.UNDEFINED
|
||||||
|
|
||||||
|
self._apply_auto_title_for_language(language)
|
||||||
|
|
||||||
|
def _handle_title_input_changed(self, titleValue: str):
|
||||||
|
if self.__suppressTitleChanged or not self.__titleAutoManaged:
|
||||||
|
return
|
||||||
|
|
||||||
|
language = self.query_one("#language_select", Select).value
|
||||||
|
if not isinstance(language, IsoLanguage):
|
||||||
|
language = IsoLanguage.UNDEFINED
|
||||||
|
|
||||||
|
expectedAutoTitle = "" if language == IsoLanguage.UNDEFINED else language.label()
|
||||||
|
if str(titleValue) != expectedAutoTitle:
|
||||||
|
self.__titleAutoManaged = False
|
||||||
|
|
||||||
|
def on_mount(self):
|
||||||
|
|
||||||
|
self.query_one("#index_label", Static).update(
|
||||||
|
str(self.__index) if self.__index is not None else "-"
|
||||||
|
)
|
||||||
|
self.query_one("#subindex_label", Static).update(
|
||||||
|
str(self.__subIndex) if self.__subIndex is not None else "-"
|
||||||
|
)
|
||||||
|
self.query_one("#pattern_label", Static).update(self.__patternLabel)
|
||||||
|
|
||||||
|
if self.__trackType is not None:
|
||||||
|
self.query_one("#type_select", Select).value = self.__trackType
|
||||||
|
|
||||||
|
self.query_one("#audio_layout_select", Select).value = self.__audioLayout
|
||||||
|
|
||||||
|
for disposition in TrackDisposition:
|
||||||
|
|
||||||
|
dispositionIsSet = (
|
||||||
|
self.__trackDescriptor is not None
|
||||||
|
and disposition in self.__trackDescriptor.getDispositionSet()
|
||||||
|
)
|
||||||
|
|
||||||
|
dispositionOption = (
|
||||||
|
t(disposition.label()),
|
||||||
|
disposition.index(),
|
||||||
|
dispositionIsSet,
|
||||||
|
)
|
||||||
|
self.query_one("#dispositions_selection_list", SelectionList).add_option(
|
||||||
|
dispositionOption
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.__trackDescriptor is not None:
|
||||||
|
self.query_one("#language_select", Select).value = self.language_select_value(
|
||||||
|
self.__trackDescriptor.getLanguage()
|
||||||
|
)
|
||||||
|
self.query_one("#title_input", Input).value = self.__trackDescriptor.getTitle()
|
||||||
|
self.updateTags()
|
||||||
|
|
||||||
|
if self.__metadataOnly:
|
||||||
|
self.query_one("#type_select", Select).disabled = True
|
||||||
|
self.query_one("#audio_layout_select", Select).disabled = True
|
||||||
|
|
||||||
|
def on_select_changed(self, event: Select.Changed) -> None:
|
||||||
|
if event.select.id == "language_select":
|
||||||
|
self._handle_language_selection_changed(event.value)
|
||||||
|
|
||||||
|
def on_input_changed(self, event: Input.Changed) -> None:
|
||||||
|
if event.input.id == "title_input":
|
||||||
|
self._handle_title_input_changed(event.value)
|
||||||
|
|
||||||
|
def compose(self):
|
||||||
|
|
||||||
|
self.trackTagsTable = DataTable(classes="five")
|
||||||
|
|
||||||
|
self.column_key_track_tag_key = add_auto_table_column(self.trackTagsTable, t("Key"))
|
||||||
|
self.column_key_track_tag_value = add_auto_table_column(self.trackTagsTable, t("Value"))
|
||||||
|
|
||||||
|
self.trackTagsTable.cursor_type = "row"
|
||||||
|
|
||||||
|
yield Header()
|
||||||
|
|
||||||
|
with Grid():
|
||||||
|
|
||||||
|
# Row 1
|
||||||
|
yield Static(
|
||||||
|
t("New stream") if self.__isNew else t("Edit stream"),
|
||||||
|
id="toplabel",
|
||||||
|
classes="five",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Row 2
|
||||||
|
yield Static(t("for pattern"))
|
||||||
|
yield Static("", id="pattern_label", classes="four", markup=False)
|
||||||
|
|
||||||
|
# Row 3
|
||||||
|
yield Static(" ", classes="five")
|
||||||
|
|
||||||
|
# Row 4
|
||||||
|
yield Static(t("Index / Subindex"))
|
||||||
|
yield Static("", id="index_label", classes="two")
|
||||||
|
yield Static("", id="subindex_label", classes="two")
|
||||||
|
|
||||||
|
# Row 5
|
||||||
|
yield Static(" ", classes="five")
|
||||||
|
|
||||||
|
# Row 6
|
||||||
|
yield Static(t("Type"))
|
||||||
|
yield Select(
|
||||||
|
[(t(trackType.label()), trackType) for trackType in TrackType],
|
||||||
|
classes="four",
|
||||||
|
id="type_select",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Row 7
|
||||||
|
yield Static(t("Audio Layout"))
|
||||||
|
yield Select(
|
||||||
|
[(t(layout.label()), layout) for layout in AudioLayout],
|
||||||
|
classes="four",
|
||||||
|
id="audio_layout_select",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Row 8
|
||||||
|
yield Static(" ", classes="five")
|
||||||
|
|
||||||
|
# Row 9
|
||||||
|
yield Static(" ", classes="five")
|
||||||
|
|
||||||
|
# Row 10
|
||||||
|
yield Static(t("Language"))
|
||||||
|
yield Select(
|
||||||
|
self.build_language_options(),
|
||||||
|
prompt=t("Select"),
|
||||||
|
classes="four",
|
||||||
|
id="language_select",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Row 11
|
||||||
|
yield Static(" ", classes="five")
|
||||||
|
|
||||||
|
# Row 12
|
||||||
|
yield Static(t("Title"))
|
||||||
|
yield Input(id="title_input", classes="four")
|
||||||
|
|
||||||
|
# Row 13
|
||||||
|
yield Static(" ", classes="five")
|
||||||
|
|
||||||
|
# Row 14
|
||||||
|
yield Static(" ", classes="five")
|
||||||
|
|
||||||
|
# Row 15
|
||||||
|
yield Static(t("Stream tags"))
|
||||||
|
yield Static(" ")
|
||||||
|
yield Button(t("Add"), id="button_add_stream_tag")
|
||||||
|
yield Button(t("Edit"), id="button_edit_stream_tag")
|
||||||
|
yield Button(t("Delete"), id="button_delete_stream_tag")
|
||||||
|
|
||||||
|
# Row 16
|
||||||
|
yield self.trackTagsTable
|
||||||
|
|
||||||
|
# Row 17
|
||||||
|
yield Static(" ", classes="five")
|
||||||
|
|
||||||
|
# Row 18
|
||||||
|
yield Static(t("Stream dispositions"), classes="five")
|
||||||
|
|
||||||
|
# Row 19
|
||||||
|
yield SelectionList[int](
|
||||||
|
classes="five",
|
||||||
|
id="dispositions_selection_list",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Row 20
|
||||||
|
yield Static(" ", classes="five")
|
||||||
|
|
||||||
|
# Row 21
|
||||||
|
yield Static(" ", classes="five")
|
||||||
|
|
||||||
|
# Row 22
|
||||||
|
yield Button(t("Save"), id="save_button")
|
||||||
|
yield Button(t("Cancel"), id="cancel_button")
|
||||||
|
|
||||||
|
# Row 23
|
||||||
|
yield Static(" ", classes="five")
|
||||||
|
|
||||||
|
# Row 24
|
||||||
|
yield Static(" ", classes="five", id="messagestatic")
|
||||||
|
|
||||||
|
yield Footer(id="footer")
|
||||||
|
|
||||||
|
def getTrackDescriptorFromInput(self):
|
||||||
|
|
||||||
|
kwargs = {}
|
||||||
|
kwargs[TrackDescriptor.CONTEXT_KEY] = self.context
|
||||||
|
|
||||||
|
if self.__trackDescriptor is not None and self.__trackDescriptor.getId() != -1:
|
||||||
|
kwargs[TrackDescriptor.ID_KEY] = self.__trackDescriptor.getId()
|
||||||
|
|
||||||
|
if self.__patternId != -1:
|
||||||
|
kwargs[TrackDescriptor.PATTERN_ID_KEY] = int(self.__patternId)
|
||||||
|
|
||||||
|
kwargs[TrackDescriptor.INDEX_KEY] = int(self.__index)
|
||||||
|
kwargs[TrackDescriptor.SOURCE_INDEX_KEY] = (
|
||||||
|
int(self.__trackDescriptor.getSourceIndex())
|
||||||
|
if self.__trackDescriptor is not None
|
||||||
|
else int(self.__index)
|
||||||
|
)
|
||||||
|
if self.__subIndex is not None and int(self.__subIndex) >= 0:
|
||||||
|
kwargs[TrackDescriptor.SUB_INDEX_KEY] = int(self.__subIndex)
|
||||||
|
|
||||||
|
selectedTrackType = self.query_one("#type_select", Select).value
|
||||||
|
if not isinstance(selectedTrackType, TrackType):
|
||||||
|
selectedTrackType = TrackType.UNKNOWN
|
||||||
|
kwargs[TrackDescriptor.TRACK_TYPE_KEY] = selectedTrackType
|
||||||
|
kwargs[TrackDescriptor.CODEC_KEY] = self.__trackCodec
|
||||||
|
|
||||||
|
if selectedTrackType == TrackType.AUDIO:
|
||||||
|
selectedAudioLayout = self.query_one("#audio_layout_select", Select).value
|
||||||
|
kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY] = (
|
||||||
|
selectedAudioLayout
|
||||||
|
if isinstance(selectedAudioLayout, AudioLayout)
|
||||||
|
else AudioLayout.LAYOUT_UNDEFINED
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY] = AudioLayout.LAYOUT_UNDEFINED
|
||||||
|
|
||||||
|
trackTags = dict(self.__draftTrackTags)
|
||||||
|
|
||||||
|
language = self.query_one("#language_select", Select).value
|
||||||
|
if isinstance(language, IsoLanguage):
|
||||||
|
trackTags["language"] = language.threeLetter()
|
||||||
|
|
||||||
|
title = self.query_one("#title_input", Input).value
|
||||||
|
if title:
|
||||||
|
trackTags["title"] = title
|
||||||
|
|
||||||
|
kwargs[TrackDescriptor.TAGS_KEY] = trackTags
|
||||||
|
|
||||||
|
dispositionFlags = sum(
|
||||||
|
[2 ** flag for flag in self.query_one("#dispositions_selection_list", SelectionList).selected]
|
||||||
|
)
|
||||||
|
kwargs[TrackDescriptor.DISPOSITION_SET_KEY] = TrackDisposition.toSet(
|
||||||
|
dispositionFlags
|
||||||
|
)
|
||||||
|
|
||||||
|
return TrackDescriptor(**kwargs)
|
||||||
|
|
||||||
|
def action_back(self):
|
||||||
|
go_back_or_exit(self)
|
||||||
|
|
||||||
|
def getSelectedTag(self):
|
||||||
|
|
||||||
|
try:
|
||||||
|
row_key, _ = self.trackTagsTable.coordinate_to_cell_key(
|
||||||
|
self.trackTagsTable.cursor_coordinate
|
||||||
|
)
|
||||||
|
|
||||||
|
if row_key is not None:
|
||||||
|
return self.__tagRowData.get(row_key)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
except CellDoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def on_button_pressed(self, event: Button.Pressed) -> None:
|
||||||
|
|
||||||
|
if event.button.id == "save_button":
|
||||||
|
trackDescriptor = self.getTrackDescriptorFromInput()
|
||||||
|
|
||||||
|
siblingTrackList = [
|
||||||
|
descriptor
|
||||||
|
for descriptor in self.__siblingTrackDescriptors
|
||||||
|
if not self._descriptor_refs_same_track(descriptor)
|
||||||
|
]
|
||||||
|
siblingTrackList = [
|
||||||
|
descriptor
|
||||||
|
for descriptor in siblingTrackList
|
||||||
|
if descriptor.getType() == trackDescriptor.getType()
|
||||||
|
]
|
||||||
|
|
||||||
|
numDefaultTracks = len(
|
||||||
|
[
|
||||||
|
descriptor
|
||||||
|
for descriptor in siblingTrackList
|
||||||
|
if TrackDisposition.DEFAULT in descriptor.getDispositionSet()
|
||||||
|
]
|
||||||
|
)
|
||||||
|
numForcedTracks = len(
|
||||||
|
[
|
||||||
|
descriptor
|
||||||
|
for descriptor in siblingTrackList
|
||||||
|
if TrackDisposition.FORCED in descriptor.getDispositionSet()
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.__isNew:
|
||||||
|
trackDescriptor.setSubIndex(len(siblingTrackList))
|
||||||
|
elif self.__subIndex is not None and int(self.__subIndex) >= 0:
|
||||||
|
trackDescriptor.setSubIndex(int(self.__subIndex))
|
||||||
|
|
||||||
|
if (
|
||||||
|
TrackDisposition.DEFAULT in trackDescriptor.getDispositionSet()
|
||||||
|
and numDefaultTracks
|
||||||
|
) or (
|
||||||
|
TrackDisposition.FORCED in trackDescriptor.getDispositionSet()
|
||||||
|
and numForcedTracks
|
||||||
|
):
|
||||||
|
|
||||||
|
self.query_one("#messagestatic", Static).update(
|
||||||
|
t(
|
||||||
|
"Cannot add another stream with disposition flag 'default' or 'forced' set"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.query_one("#messagestatic", Static).update(" ")
|
||||||
|
self.dismiss(trackDescriptor)
|
||||||
|
|
||||||
|
if event.button.id == "cancel_button":
|
||||||
|
self.app.pop_screen()
|
||||||
|
|
||||||
|
if event.button.id == "button_add_stream_tag":
|
||||||
|
self.app.push_screen(TagDetailsScreen(), self.handle_update_tag)
|
||||||
|
|
||||||
|
if event.button.id == "button_edit_stream_tag":
|
||||||
|
selectedTag = self.getSelectedTag()
|
||||||
|
if selectedTag is not None:
|
||||||
|
self.app.push_screen(
|
||||||
|
TagDetailsScreen(key=selectedTag[0], value=selectedTag[1]),
|
||||||
|
self.handle_update_tag,
|
||||||
|
)
|
||||||
|
|
||||||
|
if event.button.id == "button_delete_stream_tag":
|
||||||
|
selectedTag = self.getSelectedTag()
|
||||||
|
if selectedTag is not None:
|
||||||
|
self.app.push_screen(
|
||||||
|
TagDeleteScreen(key=selectedTag[0], value=selectedTag[1]),
|
||||||
|
self.handle_delete_tag,
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle_update_tag(self, tag):
|
||||||
|
if tag is None:
|
||||||
|
return
|
||||||
|
self.__draftTrackTags[str(tag[0])] = str(tag[1])
|
||||||
|
self.updateTags()
|
||||||
|
|
||||||
|
def handle_delete_tag(self, trackTag):
|
||||||
|
if trackTag is None:
|
||||||
|
return
|
||||||
|
self.__draftTrackTags.pop(str(trackTag[0]), None)
|
||||||
|
self.updateTags()
|
||||||
76
src/ffx/track_disposition.py
Normal file
76
src/ffx/track_disposition.py
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
import difflib, click
|
||||||
|
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
|
class TrackDisposition(Enum):
|
||||||
|
|
||||||
|
DEFAULT = {"name": "default", "index": 0, "indicator": "DEF"}
|
||||||
|
FORCED = {"name": "forced", "index": 1, "indicator": "FOR"}
|
||||||
|
|
||||||
|
DUB = {"name": "dub", "index": 2, "indicator": "DUB"}
|
||||||
|
ORIGINAL = {"name": "original", "index": 3, "indicator": "ORG"}
|
||||||
|
COMMENT = {"name": "comment", "index": 4, "indicator": "COM"}
|
||||||
|
LYRICS = {"name": "lyrics", "index": 5, "indicator": "LYR"}
|
||||||
|
KARAOKE = {"name": "karaoke", "index": 6, "indicator": "KAR"}
|
||||||
|
HEARING_IMPAIRED = {"name": "hearing_impaired", "index": 7, "indicator": "HIM"}
|
||||||
|
VISUAL_IMPAIRED = {"name": "visual_impaired", "index": 8, "indicator": "VIM"}
|
||||||
|
CLEAN_EFFECTS = {"name": "clean_effects", "index": 9, "indicator": "CLE"}
|
||||||
|
ATTACHED_PIC = {"name": "attached_pic", "index": 10, "indicator": "ATP"}
|
||||||
|
TIMED_THUMBNAILS = {"name": "timed_thumbnails", "index": 11, "indicator": "TTH"}
|
||||||
|
NON_DIEGETICS = {"name": "non_diegetic", "index": 12, "indicator": "NOD"}
|
||||||
|
CAPTIONS = {"name": "captions", "index": 13, "indicator": "CAP"}
|
||||||
|
DESCRIPTIONS = {"name": "descriptions", "index": 14, "indicator": "DES"}
|
||||||
|
METADATA = {"name": "metadata", "index": 15, "indicator": "MED"}
|
||||||
|
DEPENDENT = {"name": "dependent", "index": 16, "indicator": "DEP"}
|
||||||
|
STILL_IMAGE = {"name": "still_image", "index": 17, "indicator": "STI"}
|
||||||
|
|
||||||
|
|
||||||
|
def label(self):
|
||||||
|
return str(self.value['name'])
|
||||||
|
|
||||||
|
def index(self):
|
||||||
|
return int(self.value['index'])
|
||||||
|
|
||||||
|
def indicator(self):
|
||||||
|
return str(self.value['indicator'])
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def toFlags(dispositionSet):
|
||||||
|
"""Flags stored in integer bits (2**index)"""
|
||||||
|
|
||||||
|
if type(dispositionSet) is not set:
|
||||||
|
raise click.ClickException('TrackDisposition.toFlags(): Argument is not of type set')
|
||||||
|
|
||||||
|
flags = 0
|
||||||
|
for d in dispositionSet:
|
||||||
|
if type(d) is not TrackDisposition:
|
||||||
|
raise click.ClickException('TrackDisposition.toFlags(): Element not of type TrackDisposition')
|
||||||
|
flags += 2 ** d.index()
|
||||||
|
return flags
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def toSet(flags):
|
||||||
|
dispositionSet = set()
|
||||||
|
for d in TrackDisposition:
|
||||||
|
if flags & int(2 ** d.index()):
|
||||||
|
dispositionSet.add(d)
|
||||||
|
return dispositionSet
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def find(label):
|
||||||
|
matchingDispositions = [d for d in TrackDisposition if d.label() == str(label)]
|
||||||
|
if matchingDispositions:
|
||||||
|
return matchingDispositions[0]
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def fromIndicator(indicator: str):
|
||||||
|
matchingDispositions = [d for d in TrackDisposition if d.indicator() == str(indicator)]
|
||||||
|
if matchingDispositions:
|
||||||
|
return matchingDispositions[0]
|
||||||
|
else:
|
||||||
|
return None
|
||||||
39
src/ffx/track_type.py
Normal file
39
src/ffx/track_type.py
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
class TrackType(Enum):
|
||||||
|
|
||||||
|
VIDEO = {'label': 'video', 'index': 1}
|
||||||
|
AUDIO = {'label': 'audio', 'index': 2}
|
||||||
|
SUBTITLE = {'label': 'subtitle', 'index': 3}
|
||||||
|
ATTACHMENT = {'label': 'attachment', 'index': 4}
|
||||||
|
|
||||||
|
UNKNOWN = {'label': 'unknown', 'index': 0}
|
||||||
|
|
||||||
|
|
||||||
|
def label(self):
|
||||||
|
"""Returns the stream type as string"""
|
||||||
|
return str(self.value['label'])
|
||||||
|
|
||||||
|
def indicator(self):
|
||||||
|
"""Returns the stream type as single letter"""
|
||||||
|
return self.label()[0]
|
||||||
|
|
||||||
|
def index(self):
|
||||||
|
"""Returns the stream type index"""
|
||||||
|
return int(self.value['index'])
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def fromLabel(label : str):
|
||||||
|
tlist = [t for t in TrackType if t.value['label'] == str(label)]
|
||||||
|
if tlist:
|
||||||
|
return tlist[0]
|
||||||
|
else:
|
||||||
|
return TrackType.UNKNOWN
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def fromIndex(index : int):
|
||||||
|
tlist = [t for t in TrackType if t.value['index'] == int(index)]
|
||||||
|
if tlist:
|
||||||
|
return tlist[0]
|
||||||
|
else:
|
||||||
|
return TrackType.UNKNOWN
|
||||||
34
src/ffx/video_encoder.py
Normal file
34
src/ffx/video_encoder.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
class VideoEncoder(Enum):
|
||||||
|
|
||||||
|
AV1 = {'label': 'av1', 'index': 1}
|
||||||
|
VP9 = {'label': 'vp9', 'index': 2}
|
||||||
|
H264 = {'label': 'h264', 'index': 3}
|
||||||
|
COPY = {'label': 'copy', 'index': 4}
|
||||||
|
|
||||||
|
UNDEFINED = {'label': 'undefined', 'index': 0}
|
||||||
|
|
||||||
|
def label(self):
|
||||||
|
"""Returns the stream type as string"""
|
||||||
|
return str(self.value['label'])
|
||||||
|
|
||||||
|
def index(self):
|
||||||
|
"""Returns the stream type index"""
|
||||||
|
return int(self.value['index'])
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def fromLabel(label : str):
|
||||||
|
tlist = [t for t in VideoEncoder if t.value['label'] == str(label)]
|
||||||
|
if tlist:
|
||||||
|
return tlist[0]
|
||||||
|
else:
|
||||||
|
return VideoEncoder.UNDEFINED
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def fromIndex(index : int):
|
||||||
|
tlist = [t for t in VideoEncoder if t.value['index'] == int(index)]
|
||||||
|
if tlist:
|
||||||
|
return tlist[0]
|
||||||
|
else:
|
||||||
|
return VideoEncoder.UNDEFINED
|
||||||
1
tests/__init__.py
Normal file
1
tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
# Repo-root tests package for legacy and future test code.
|
||||||
1
tests/integration/__init__.py
Normal file
1
tests/integration/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
|
||||||
1
tests/integration/pattern_management/__init__.py
Normal file
1
tests/integration/pattern_management/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
|
||||||
@@ -0,0 +1,138 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
import tempfile
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from tests.support.ffx_bundle import (
|
||||||
|
PatternTrackSpec,
|
||||||
|
SourceTrackSpec,
|
||||||
|
add_show,
|
||||||
|
build_controller_context,
|
||||||
|
create_source_fixture,
|
||||||
|
dispose_controller_context,
|
||||||
|
expected_output_path,
|
||||||
|
run_ffx_convert,
|
||||||
|
)
|
||||||
|
|
||||||
|
from ffx.pattern_controller import PatternController
|
||||||
|
from ffx.track_type import TrackType
|
||||||
|
|
||||||
|
try:
|
||||||
|
import pytest
|
||||||
|
except ImportError: # pragma: no cover - unittest-only environments
|
||||||
|
pytest = None
|
||||||
|
|
||||||
|
if pytest is not None:
|
||||||
|
pytestmark = [pytest.mark.integration, pytest.mark.pattern_management]
|
||||||
|
|
||||||
|
|
||||||
|
class PatternManagementCliTests(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.tempdir = tempfile.TemporaryDirectory()
|
||||||
|
self.workdir = Path(self.tempdir.name)
|
||||||
|
self.home_dir = self.workdir / "home"
|
||||||
|
self.home_dir.mkdir()
|
||||||
|
self.database_path = self.workdir / "test.db"
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
self.tempdir.cleanup()
|
||||||
|
|
||||||
|
def prepare_duplicate_matching_patterns(self):
|
||||||
|
context = build_controller_context(self.database_path)
|
||||||
|
try:
|
||||||
|
add_show(context, show_id=1)
|
||||||
|
add_show(context, show_id=2)
|
||||||
|
|
||||||
|
controller = PatternController(context)
|
||||||
|
track_descriptors = [
|
||||||
|
PatternTrackSpec(index=0, source_index=0, track_type=TrackType.VIDEO)
|
||||||
|
]
|
||||||
|
|
||||||
|
def to_track_descriptor(spec: PatternTrackSpec):
|
||||||
|
from ffx.track_descriptor import TrackDescriptor
|
||||||
|
|
||||||
|
kwargs = {
|
||||||
|
TrackDescriptor.INDEX_KEY: spec.index,
|
||||||
|
TrackDescriptor.SOURCE_INDEX_KEY: spec.source_index,
|
||||||
|
TrackDescriptor.TRACK_TYPE_KEY: spec.track_type,
|
||||||
|
TrackDescriptor.TAGS_KEY: dict(spec.tags),
|
||||||
|
TrackDescriptor.DISPOSITION_SET_KEY: set(spec.dispositions),
|
||||||
|
}
|
||||||
|
return TrackDescriptor(**kwargs)
|
||||||
|
|
||||||
|
controller.savePatternSchema(
|
||||||
|
{"show_id": 1, "pattern": r"^dup_(s[0-9]+e[0-9]+)\.mkv$"},
|
||||||
|
[to_track_descriptor(track_descriptors[0])],
|
||||||
|
)
|
||||||
|
controller.savePatternSchema(
|
||||||
|
{"show_id": 2, "pattern": r"^dup_.*$"},
|
||||||
|
[to_track_descriptor(track_descriptors[0])],
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
dispose_controller_context(context)
|
||||||
|
|
||||||
|
def test_convert_fails_when_filename_matches_more_than_one_pattern(self):
|
||||||
|
self.prepare_duplicate_matching_patterns()
|
||||||
|
source_filename = "dup_s01e01.mkv"
|
||||||
|
source_path = create_source_fixture(
|
||||||
|
self.workdir,
|
||||||
|
source_filename,
|
||||||
|
[
|
||||||
|
SourceTrackSpec(TrackType.VIDEO, identity="video-0"),
|
||||||
|
SourceTrackSpec(TrackType.AUDIO, identity="audio-1", language="eng"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
completed = run_ffx_convert(
|
||||||
|
self.workdir,
|
||||||
|
self.home_dir,
|
||||||
|
self.database_path,
|
||||||
|
"--video-encoder",
|
||||||
|
"copy",
|
||||||
|
"--no-tmdb",
|
||||||
|
"--no-prompt",
|
||||||
|
"--no-signature",
|
||||||
|
str(source_path),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertNotEqual(completed.returncode, 0)
|
||||||
|
error_output = f"{completed.stdout}\n{completed.stderr}"
|
||||||
|
self.assertIn("matched more than one pattern", error_output)
|
||||||
|
self.assertFalse(expected_output_path(self.workdir, source_filename).exists())
|
||||||
|
|
||||||
|
def test_convert_can_ignore_duplicate_matches_when_no_pattern_is_requested(self):
|
||||||
|
self.prepare_duplicate_matching_patterns()
|
||||||
|
source_filename = "dup_s01e01.mkv"
|
||||||
|
source_path = create_source_fixture(
|
||||||
|
self.workdir,
|
||||||
|
source_filename,
|
||||||
|
[
|
||||||
|
SourceTrackSpec(TrackType.VIDEO, identity="video-0"),
|
||||||
|
SourceTrackSpec(TrackType.AUDIO, identity="audio-1", language="eng"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
completed = run_ffx_convert(
|
||||||
|
self.workdir,
|
||||||
|
self.home_dir,
|
||||||
|
self.database_path,
|
||||||
|
"--video-encoder",
|
||||||
|
"copy",
|
||||||
|
"--no-pattern",
|
||||||
|
"--no-tmdb",
|
||||||
|
"--no-prompt",
|
||||||
|
"--no-signature",
|
||||||
|
str(source_path),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
0,
|
||||||
|
completed.returncode,
|
||||||
|
f"STDOUT:\n{completed.stdout}\nSTDERR:\n{completed.stderr}",
|
||||||
|
)
|
||||||
|
self.assertTrue(expected_output_path(self.workdir, source_filename).exists())
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user