diff --git a/.claude/rules/plugin-development.md b/.claude/rules/plugin-development.md index ce45628..adcfc8d 100644 --- a/.claude/rules/plugin-development.md +++ b/.claude/rules/plugin-development.md @@ -37,22 +37,25 @@ class MyPlugin: ## Hook System -**Hook enum** (`reeln.plugins.hooks.Hook`) — 13 lifecycle hooks: +**Hook enum** (`reeln.plugins.hooks.Hook`) — 16 lifecycle hooks: | Hook | Emitted when | |------|-------------| | `PRE_RENDER` | Before a render operation starts | -| `POST_RENDER` | After a render completes | +| `POST_RENDER` | After a render completes (fast-track publish) | | `ON_CLIP_AVAILABLE` | A new clip file is ready | | `ON_EVENT_CREATED` | A new event is created | | `ON_EVENT_TAGGED` | An event is tagged/categorized | | `ON_GAME_INIT` | `reeln game init` sets up a new game | | `ON_GAME_READY` | After all `ON_GAME_INIT` handlers complete — plugins read shared context from init phase | | `ON_GAME_FINISH` | `reeln game finish` finalizes a game | +| `ON_POST_GAME_FINISH` | After all `ON_GAME_FINISH` handlers complete | | `ON_HIGHLIGHTS_MERGED` | Segment highlights are merged into a reel | | `ON_SEGMENT_START` | A new segment begins | | `ON_SEGMENT_COMPLETE` | A segment finishes | | `ON_FRAMES_EXTRACTED` | Frames extracted from a clip for smart zoom analysis | +| `ON_QUEUE` | A render result is added to the queue (`--queue` flag) | +| `ON_PUBLISH` | A queued item is published to an external target | | `ON_ERROR` | An error occurs during any operation | **HookContext** — frozen dataclass passed to every handler: @@ -92,6 +95,7 @@ Plugins can implement typed protocols for specific capabilities (`reeln.plugins. | `MetadataEnricher` | `enrich(event_data) -> dict` | Enrich event metadata | | `Notifier` | `notify(message, *, metadata) -> None` | Send notifications | | `Generator` | `generate(context) -> GeneratorResult` | Generate media assets | +| `Authenticator` | `auth_check() -> list[AuthCheckResult]`, `auth_refresh() -> list[AuthCheckResult]` | Test credentials and refresh tokens | ## Config Schema diff --git a/CHANGELOG.md b/CHANGELOG.md index aa44d01..f90026d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,23 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/), and this project adheres to [Semantic Versioning](https://semver.org/). +## [0.0.38] - 2026-04-07 + +### Added +- `reeln queue` command group for staged render-then-publish workflow: list, show, edit, publish, publish-all, remove, targets +- `--queue` / `-q` flag on `render short` and `render apply` — renders but queues for review instead of publishing immediately +- Per-target publish tracking — publish to YouTube, review, then selectively push to Instagram/TikTok without re-rendering +- `ON_QUEUE` and `ON_PUBLISH` lifecycle hooks for plugin integration with the queue workflow +- Centralized metadata generation (`core/metadata.py`) — auto-generates title and description from game/event context +- `QueueItem` stores config profile name — `queue publish` loads the same plugin settings used at queue time +- Queue persistence via `render_queue.json` (per-game directory) with advisory central index for cross-game listing +- `QueueError` exception class for queue operation errors +- Team logo overlay on goal shorts — resolves logo from `TeamProfile.logo_path`, scales to 80% of box height, right-aligned with text clipping. Supports all four filter chain paths (simple pad/crop, smart pad, speed segments, speed segments + smart pad) +- `reeln plugins auth` command — test authentication for plugins (`--json` for machine output, `--refresh` to force reauthentication) +- `reeln plugins uninstall` command — uninstall a plugin and remove from config (`--force` to skip confirmation, `--dry-run` to preview) +- `Authenticator` capability protocol — plugins implement `auth_check()` and `auth_refresh()` for credential verification and token renewal +- `AuthCheckResult`, `AuthStatus`, `PluginAuthReport` models (`reeln/models/auth.py`) for structured auth status reporting + ## [0.0.37] - 2026-04-03 ### Added diff --git a/README.md b/README.md index cb20cd8..797db60 100644 --- a/README.md +++ b/README.md @@ -58,6 +58,8 @@ reeln doctor # checks ffmpeg, codecs, config, permissions, plugins - **Render profiles** — save and reuse rendering settings, chain them with iterations - **Smart zoom** — AI-powered tracking that follows the action (via plugin) - **Player overlays** — roster-aware goal overlays with jersey number lookup +- **Render queue** — stage renders for review, then selectively publish to YouTube, Instagram, TikTok +- **Team logo overlays** — automatic logo placement on goal shorts from team profiles - **Plugin architecture** — lifecycle hooks for YouTube, Instagram, cloud uploads, and more - **Flexible configuration** — JSON config, XDG paths, env var overrides, named profiles - **Cross-platform** — macOS, Linux, Windows @@ -85,6 +87,8 @@ reeln game finish See the [examples](examples/) for detailed walkthroughs of every workflow. +> Looking for a GUI? [reeln dock](https://github.com/StreamnDad/reeln-dock) is a cross-platform desktop companion for reeln — visual render profiles, clip review, and game management without touching the terminal. Coming soon. + ## Supported sports | Sport | Segment name | Count | Example directories | @@ -128,6 +132,18 @@ See the [examples](examples/) for detailed walkthroughs of every workflow. | `reeln render apply` | Apply a render profile (full-frame, no crop) | | `reeln render reel` | Assemble rendered shorts into a reel | +### Queue + +| Command | Description | +|---|---| +| `reeln queue list` | List queued render items | +| `reeln queue show ` | Show detailed queue item info | +| `reeln queue edit ` | Edit title/description before publishing | +| `reeln queue publish ` | Publish to one or all targets (`--target `) | +| `reeln queue publish-all` | Publish all rendered items | +| `reeln queue remove ` | Soft-delete queue item | +| `reeln queue targets` | List available publish targets | + ### Configuration | Command | Description | @@ -147,6 +163,8 @@ See the [examples](examples/) for detailed walkthroughs of every workflow. | `reeln plugins list` | List installed plugins | | `reeln plugins enable ` | Enable a plugin | | `reeln plugins disable ` | Disable a plugin | +| `reeln plugins uninstall ` | Uninstall a plugin | +| `reeln plugins auth` | Test plugin authentication | ## Configuration @@ -161,6 +179,20 @@ reeln uses a layered JSON config system: reeln config show ``` +## Smart zoom — AI-powered action tracking + +`reeln render short --smart` uses the [OpenAI plugin](https://github.com/StreamnDad/reeln-plugin-openai) to analyse extracted frames and track the action — dynamically cropping and panning the camera to follow the play in your 9:16 short. Read [What happened when I let AI edit my youth hockey videos](https://streamn-dad.medium.com/what-happened-when-i-let-ai-edit-my-youth-hockey-videos-d7ece1883905) for the full story. + +Add `--debug` to see exactly what the AI sees: annotated frames with crosshair tracking, crop regions, the full zoom path, and every ffmpeg filter chain. + +[Live debug example](https://streamn.dad/examples/reeln-debug/) — real game clip with 16-frame smart zoom tracking. + +

+ + Smart zoom debug — annotated frame showing crosshair tracking and crop region + +

+ ## Documentation - [Full documentation](https://reeln-cli.readthedocs.io) — install, guides, CLI reference diff --git a/assets/debug-preview.png b/assets/debug-preview.png new file mode 100644 index 0000000..ee185c8 Binary files /dev/null and b/assets/debug-preview.png differ diff --git a/docs/cli/index.md b/docs/cli/index.md index 4d943bb..f448168 100644 --- a/docs/cli/index.md +++ b/docs/cli/index.md @@ -20,6 +20,7 @@ reeln provides a hierarchical command structure organized by domain. | `reeln render` | Video rendering: short, preview, reel | Available | {doc}`render` | | `reeln media` | Media management: prune | Available | {doc}`media` | | `reeln plugins` | Plugin management: list, enable, disable | Available | {doc}`plugins` | +| `reeln queue` | Render queue: list, show, edit, publish, remove, targets | Available | {doc}`queue` | ## Global options diff --git a/docs/cli/plugins.md b/docs/cli/plugins.md index b23f7b1..5c252bf 100644 --- a/docs/cli/plugins.md +++ b/docs/cli/plugins.md @@ -142,6 +142,64 @@ reeln plugins disable Adds the plugin to the `plugins.disabled` list and removes it from `plugins.enabled` in your config file. +### `reeln plugins uninstall` + +Uninstall a plugin and remove it from config. + +```bash +reeln plugins uninstall +reeln plugins uninstall --force +reeln plugins uninstall --dry-run +``` + +| Argument | Description | +|---|---| +| `NAME` | Plugin name to uninstall | + +| Option | Description | +|---|---| +| `--force`, `-f` | Skip confirmation prompt | +| `--dry-run` | Preview the uninstall command without executing | +| `--installer` | Force a specific installer (`pip` or `uv`) | + +Prompts for confirmation before uninstalling. Removes the plugin from `plugins.enabled` and adds it to `plugins.disabled` in your config. + +### `reeln plugins auth` + +Test authentication for plugins, or force reauthentication. + +```bash +reeln plugins auth +reeln plugins auth google +reeln plugins auth google --refresh +reeln plugins auth --json +``` + +| Argument | Description | +|---|---| +| `NAME` | Plugin name (empty = test all plugins with auth support) | + +| Option | Description | +|---|---| +| `--refresh`, `-r` | Force reauthentication (requires a plugin name) | +| `--json` | Output as JSON | +| `--profile` | Named config profile | +| `--config` | Explicit config file path | + +Checks each plugin that implements the `Authenticator` protocol. Reports per-service status: + +| Status | Badge | Meaning | +|---|---|---| +| `ok` | `authenticated` | Credentials are valid | +| `warn` | `warning` | Credentials work but with caveats (e.g., missing scopes) | +| `expired` | `expired` | Token has expired | +| `not_configured` | `not configured` | No credentials found | +| `fail` | `failed` | Authentication check failed | + +With `--refresh`, the plugin's `auth_refresh()` method is called to force token renewal (e.g., OAuth refresh flow). + +Exits with code 1 if any result is `fail` or `expired`. + ## Plugin registry reeln maintains a remote plugin registry that lists available plugins, their packages, and capabilities. The registry is fetched from GitHub and cached locally for 1 hour. @@ -189,6 +247,9 @@ reeln exposes lifecycle hooks that plugins can subscribe to: | `ON_HIGHLIGHTS_MERGED` | After game highlights are merged | | `ON_SEGMENT_START` | Before segment file I/O begins | | `ON_SEGMENT_COMPLETE` | After segment merge and state update | +| `ON_FRAMES_EXTRACTED` | After frames are extracted for smart zoom analysis | +| `ON_QUEUE` | After a render result is added to the queue (`--queue` flag) | +| `ON_PUBLISH` | After a queued item is published to an external target | | `ON_ERROR` | When an error occurs in core operations | Hooks receive a `HookContext` with three fields: @@ -212,6 +273,7 @@ Plugins can implement typed capability interfaces: - **Uploader** — upload rendered media to external services (YouTube, social media, cloud storage) - **MetadataEnricher** — enrich event metadata with additional information (LLM descriptions, statistics) - **Notifier** — send notifications when events occur (Slack, Discord, email) +- **Authenticator** — test credentials and refresh tokens (`auth_check()` returns `list[AuthCheckResult]`, `auth_refresh()` forces token renewal) ## Orchestration pipeline diff --git a/docs/cli/queue.md b/docs/cli/queue.md new file mode 100644 index 0000000..e898972 --- /dev/null +++ b/docs/cli/queue.md @@ -0,0 +1,173 @@ +# reeln queue + +Render queue management for staged render-then-publish workflows. + +## Overview + +The render queue decouples rendering from publishing. Instead of `POST_RENDER` +plugin hooks firing immediately after a render, the `--queue` flag on +`render short` or `render apply` saves the rendered output to a queue for +review. You can then edit metadata (title, description), selectively publish +to specific platforms, and track per-target publish status. + +Queue files are stored per-game as `render_queue.json` alongside `game.json`. + +## Commands + +### `reeln queue list` + +List queued render items. + +```bash +reeln queue list [OPTIONS] +``` + +| Option | Description | +|---|---| +| `--game-dir`, `-g` | Game directory (default: cwd) | +| `--all`, `-a` | List across all games (uses central index) | +| `--status`, `-s` | Filter by status: rendered, published, partial, failed | + +Removed items are hidden by default. + +### `reeln queue show` + +Show detailed info for a queue item. + +```bash +reeln queue show [OPTIONS] +``` + +| Option | Description | +|---|---| +| `--game-dir`, `-g` | Game directory (default: cwd) | + +Displays output path, duration, file size, game context, player info, render +profile, publish targets with status and URLs. + +ID supports prefix matching (e.g., `abc` matches `abc123def456`). + +### `reeln queue edit` + +Edit title or description before publishing. + +```bash +reeln queue edit [OPTIONS] +``` + +| Option | Description | +|---|---| +| `--title`, `-t` | New title | +| `--description`, `-d` | New description | +| `--game-dir`, `-g` | Game directory (default: cwd) | + +At least one of `--title` or `--description` is required. + +### `reeln queue publish` + +Publish a queue item to one or all targets. + +```bash +reeln queue publish [OPTIONS] +``` + +| Option | Description | +|---|---| +| `--target`, `-t` | Publish to specific target only (e.g., `google`, `meta`) | +| `--game-dir`, `-g` | Game directory (default: cwd) | +| `--profile` | Override config profile (default: profile stored at queue time) | +| `--config` | Explicit config file path | + +Without `--target`, publishes to all pending targets. Each target is tracked +independently — you can publish to YouTube first, review, then push to +Instagram later. + +The config profile stored in the queue item is used by default, ensuring the +same plugin settings (API keys, channel IDs, etc.) apply. Use `--profile` to +override. + +### `reeln queue publish-all` + +Publish all rendered items in the queue. + +```bash +reeln queue publish-all [OPTIONS] +``` + +| Option | Description | +|---|---| +| `--game-dir`, `-g` | Game directory (default: cwd) | +| `--profile` | Named config profile | +| `--config` | Explicit config file path | + +Only items with status `rendered` are published. Items already published, +failed, or removed are skipped. + +### `reeln queue remove` + +Soft-delete a queue item. + +```bash +reeln queue remove [OPTIONS] +``` + +| Option | Description | +|---|---| +| `--game-dir`, `-g` | Game directory (default: cwd) | + +Marks the item as removed. Does not delete the rendered file. + +### `reeln queue targets` + +List available publish targets from loaded uploader plugins. + +```bash +reeln queue targets [OPTIONS] +``` + +| Option | Description | +|---|---| +| `--profile` | Named config profile | +| `--config` | Explicit config file path | + +Targets are discovered from installed plugins that implement the `Uploader` +capability protocol. + +## Status lifecycle + +Queue items progress through these statuses: + +| Status | Meaning | +|---|---| +| `rendered` | Render complete, not yet published | +| `publishing` | Publish in progress | +| `published` | All targets published successfully | +| `partial` | Some targets published, others pending or failed | +| `failed` | All target publishes failed | +| `removed` | Soft-deleted | + +Each publish target has its own status: `pending`, `published`, `failed`, or +`skipped`. + +## Examples + +```bash +# Render and queue +reeln render short clip.mkv --queue --profile tournament-stream + +# Review what's queued +reeln queue list +reeln queue show abc123 + +# Fix the title +reeln queue edit abc123 --title "Smith Goal - North vs South" + +# Publish to YouTube first +reeln queue publish abc123 --target google + +# Review the YouTube upload, then push to Instagram +reeln queue publish abc123 --target meta + +# See all available targets +reeln queue targets +``` diff --git a/docs/cli/render.md b/docs/cli/render.md index dd400c3..508b7d3 100644 --- a/docs/cli/render.md +++ b/docs/cli/render.md @@ -298,3 +298,32 @@ Merge raw replays first (existing `game segment`), then render the merged landsc reeln game segment 1 reeln render short period-1/period-1_2026-02-26.mkv ``` + +### Workflow C: Queue, review, and publish + +Render clips and queue them for selective publishing across platforms: + +```bash +# Render and queue instead of publishing immediately +reeln render short clip.mkv --queue +reeln render apply clip.mkv -r slowmo --queue + +# Review queued items +reeln queue list +reeln queue show + +# Edit metadata before publishing +reeln queue edit --title "Custom Title" --description "Updated description" + +# Publish to specific targets +reeln queue publish --target youtube +reeln queue publish --target instagram + +# Or publish all pending items +reeln queue publish-all +``` + +The `--queue` flag is supported on both `render short` and `render apply`. When used, +the render completes but output is queued for review instead of triggering `POST_RENDER` +plugin hooks. The `ON_QUEUE` hook fires instead, allowing plugins to prepare metadata +without uploading. Each queue item tracks per-target publish status independently. diff --git a/docs/guide/overlay-templates.md b/docs/guide/overlay-templates.md index 2400b52..7f7bfc6 100644 --- a/docs/guide/overlay-templates.md +++ b/docs/guide/overlay-templates.md @@ -18,7 +18,7 @@ ASS (Advanced SubStation Alpha) is a subtitle format that ffmpeg can render dire reeln ships with two ASS templates: -- **`goal_overlay`** — a lower-third banner showing scorer name, up to two assists, and team name +- **`goal_overlay`** — a lower-third banner showing scorer name, up to two assists, team name, and team logo - **`branding`** — a top-of-frame "reeln" watermark Reference them with the `builtin:` prefix: @@ -56,8 +56,21 @@ The overlay context builder populates these variables for ASS templates: | `ass_name_outline_color` | Name outline color | `&H00000000` | | `goal_overlay_*_x` / `*_y` | Pixel coordinates for each element | `83` | +| `goal_overlay_text_right` | Right edge for text clipping (accommodates logo) | `1800` | + Plus all base context variables: `home_team`, `away_team`, `date`, `sport`, `player`, `event_type`, etc. +### Team logo overlay + +When a `TeamProfile` has a `logo_path` set, the goal overlay automatically includes the team logo: + +- Logo is scaled to 80% of the overlay box height +- Positioned right-aligned with a margin inside the box +- ASS text lines are clipped via `goal_overlay_text_right` so they don't overlap the logo +- Font sizes adapt to the reduced text area + +The logo is composited via ffmpeg's `overlay` filter as a second input, using `-loop 1` for the static image. This works across all four filter chain paths: simple pad/crop, smart pad, speed segments, and speed segments + smart pad. + ### Writing custom ASS templates You can write your own `.ass` file using any `{{variable}}` from the context. Place it anywhere and reference it by path: diff --git a/docs/index.md b/docs/index.md index 759184e..102fd92 100644 --- a/docs/index.md +++ b/docs/index.md @@ -53,6 +53,7 @@ cli/render cli/media cli/config cli/plugins +cli/queue ``` ## Examples diff --git a/reeln/__init__.py b/reeln/__init__.py index 8ecf366..1feac2c 100644 --- a/reeln/__init__.py +++ b/reeln/__init__.py @@ -2,4 +2,4 @@ from __future__ import annotations -__version__ = "0.0.37" +__version__ = "0.0.38" diff --git a/reeln/cli.py b/reeln/cli.py index c9ff1ec..05043ac 100644 --- a/reeln/cli.py +++ b/reeln/cli.py @@ -7,7 +7,7 @@ import typer from reeln import __version__ -from reeln.commands import config_cmd, game, hooks_cmd, media, plugins_cmd, render +from reeln.commands import config_cmd, game, hooks_cmd, media, plugins_cmd, queue_cmd, render from reeln.core.log import setup_logging app = typer.Typer( @@ -24,6 +24,7 @@ app.add_typer(config_cmd.app, name="config") app.add_typer(plugins_cmd.app, name="plugins") app.add_typer(hooks_cmd.app, name="hooks") +app.add_typer(queue_cmd.app, name="queue") def _version_callback(value: bool) -> None: diff --git a/reeln/commands/plugins_cmd.py b/reeln/commands/plugins_cmd.py index 06dc127..0e0a50f 100644 --- a/reeln/commands/plugins_cmd.py +++ b/reeln/commands/plugins_cmd.py @@ -1,7 +1,9 @@ -"""Plugin management commands: list, enable, disable, search, info, install, update, inputs.""" +"""Plugin management commands: list, enable, disable, search, info, install, update, auth, inputs.""" from __future__ import annotations +from pathlib import Path + import typer from reeln.commands.style import bold, error, label, success, warn @@ -16,6 +18,7 @@ update_all_plugins, update_plugin, ) +from reeln.models.auth import AuthStatus, PluginAuthReport, plugin_auth_report_to_dict from reeln.models.plugin import PluginStatus from reeln.plugins.loader import ( discover_plugins, @@ -55,9 +58,11 @@ def _version_str(status: PluginStatus) -> str: @app.command(name="list") def list_plugins( refresh: bool = typer.Option(False, "--refresh", help="Force registry refresh."), + profile: str | None = typer.Option(None, "--profile", help="Named config profile."), + config_path: Path | None = typer.Option(None, "--config", help="Explicit config file path."), ) -> None: """List installed and enabled plugins with version info.""" - config = load_config() + config = load_config(path=config_path, profile=profile) plugins = discover_plugins() try: @@ -373,6 +378,105 @@ def uninstall( raise typer.Exit(1) +# --------------------------------------------------------------------------- +# plugins auth +# --------------------------------------------------------------------------- + + +@app.command() +def auth( + name: str = typer.Argument("", help="Plugin name (empty = all plugins with auth)."), + refresh: bool = typer.Option(False, "--refresh", "-r", help="Force reauthentication."), + json_output: bool = typer.Option(False, "--json", help="Output as JSON."), + profile: str | None = typer.Option(None, "--profile", help="Named config profile."), + config_path: Path | None = typer.Option(None, "--config", help="Explicit config file path."), +) -> None: + """Test authentication for plugins, or force reauthentication.""" + import json as json_mod + + from reeln.plugins.loader import ( + activate_plugins, + collect_auth_checks, + refresh_auth, + ) + + config = load_config(path=config_path, profile=profile) + loaded = activate_plugins(config.plugins) + + if refresh: + if not name: + typer.echo(error("--refresh requires a plugin name."), err=True) + raise typer.Exit(1) + report = refresh_auth(loaded, name) + if report is None: + typer.echo(error(f"Plugin '{name}' not found or does not support auth."), err=True) + raise typer.Exit(1) + reports = [report] + else: + reports = collect_auth_checks(loaded, name_filter=name) + + if not reports: + if name: + typer.echo(f"Plugin '{name}' not found or does not support auth.") + else: + typer.echo("No plugins with authentication support found.") + raise typer.Exit(1) + + if json_output: + data = {"plugins": [plugin_auth_report_to_dict(r) for r in reports]} + typer.echo(json_mod.dumps(data, indent=2)) + _exit_on_failure(reports) + return + + _render_auth_human(reports) + _exit_on_failure(reports) + + +def _auth_status_badge(status: AuthStatus) -> str: + """Colored badge for auth status.""" + if status == AuthStatus.OK: + return success("authenticated") + if status == AuthStatus.WARN: + return warn("warning") + if status == AuthStatus.EXPIRED: + return warn("expired") + if status == AuthStatus.NOT_CONFIGURED: + return label("not configured") + return error("failed") + + +def _render_auth_human(reports: list[PluginAuthReport]) -> None: + """Render auth reports for terminal output.""" + for report in reports: + typer.echo(f"\n {bold(report.plugin_name)}") + for r in report.results: + badge = _auth_status_badge(r.status) + typer.echo(f" {r.service} {badge}") + if r.identity: + typer.echo(f" {label('Identity:')} {r.identity}") + if r.expires_at: + typer.echo(f" {label('Expires:')} {r.expires_at}") + if r.scopes: + typer.echo(f" {label('Scopes:')} {', '.join(r.scopes)}") + if r.required_scopes: + missing = set(r.required_scopes) - set(r.scopes) + if missing: + typer.echo(f" {warn('Missing:')} {', '.join(sorted(missing))}") + if r.message and r.status != AuthStatus.OK: + typer.echo(f" {r.message}") + if r.hint: + typer.echo(f" {label('Hint:')} {r.hint}") + typer.echo() + + +def _exit_on_failure(reports: list[PluginAuthReport]) -> None: + """Raise ``typer.Exit(1)`` if any result is FAIL or EXPIRED.""" + for report in reports: + for r in report.results: + if r.status in (AuthStatus.FAIL, AuthStatus.EXPIRED): + raise typer.Exit(1) + + # --------------------------------------------------------------------------- # plugins inputs # --------------------------------------------------------------------------- diff --git a/reeln/commands/queue_cmd.py b/reeln/commands/queue_cmd.py new file mode 100644 index 0000000..a92f210 --- /dev/null +++ b/reeln/commands/queue_cmd.py @@ -0,0 +1,307 @@ +"""Queue management commands: list, show, edit, publish, remove, targets.""" + +from __future__ import annotations + +from pathlib import Path + +import typer + +from reeln.commands.style import bold, error, label, success, warn +from reeln.core.errors import QueueError +from reeln.models.queue import PublishStatus, QueueStatus + +app = typer.Typer(no_args_is_help=True, help="Render queue management commands.") + + +# --------------------------------------------------------------------------- +# Formatting helpers +# --------------------------------------------------------------------------- + + +def _status_badge(status: QueueStatus) -> str: + """Colored badge for queue item status.""" + if status is QueueStatus.PUBLISHED: + return success("published") + if status is QueueStatus.PARTIAL: + return warn("partial") + if status is QueueStatus.FAILED: + return error("failed") + if status is QueueStatus.PUBLISHING: + return warn("publishing") + if status is QueueStatus.REMOVED: + return label("removed") + return label("rendered") + + +def _publish_badge(status: PublishStatus) -> str: + """Colored badge for publish target status.""" + if status is PublishStatus.PUBLISHED: + return success("published") + if status is PublishStatus.FAILED: + return error("failed") + if status is PublishStatus.SKIPPED: + return label("skipped") + return label("pending") + + +def _short_id(item_id: str) -> str: + """Display a short version of the ID.""" + return bold(item_id[:8]) + + +# --------------------------------------------------------------------------- +# queue list +# --------------------------------------------------------------------------- + + +@app.command(name="list") +def list_queue( + game_dir: Path | None = typer.Option(None, "--game-dir", "-g", help="Game directory."), + all_games: bool = typer.Option(False, "--all", "-a", help="List across all games."), + status: str | None = typer.Option(None, "--status", "-s", help="Filter by status."), +) -> None: + """List queued render items.""" + from reeln.core.queue import load_queue, load_queue_index + + dirs: list[Path] = [] + if all_games: + index = load_queue_index() + dirs = [Path(d) for d in index] + elif game_dir: + dirs = [game_dir] + else: + dirs = [Path.cwd()] + + status_filter: QueueStatus | None = None + if status: + try: + status_filter = QueueStatus(status) + except ValueError as exc: + typer.echo(error(f"Unknown status: {status}"), err=True) + raise typer.Exit(code=1) from exc + + total = 0 + for d in dirs: + queue = load_queue(d) + items = list(queue.items) + if status_filter: + items = [i for i in items if i.status is status_filter] + items = [i for i in items if i.status is not QueueStatus.REMOVED] + + if not items: + continue + + if all_games: + typer.echo(f"\n{bold(str(d))}") + + for item in items: + badge = _status_badge(item.status) + title = item.title or "(untitled)" + typer.echo(f" {_short_id(item.id)} {badge} {title}") + total += 1 + + if total == 0: + typer.echo("No queue items found.") + + +# --------------------------------------------------------------------------- +# queue show +# --------------------------------------------------------------------------- + + +@app.command() +def show( + item_id: str = typer.Argument(..., help="Queue item ID (or prefix)."), + game_dir: Path | None = typer.Option(None, "--game-dir", "-g", help="Game directory."), +) -> None: + """Show detailed info for a queue item.""" + from reeln.core.queue import get_queue_item + + d = game_dir or Path.cwd() + item = get_queue_item(d, item_id) + if item is None: + typer.echo(error(f"Queue item '{item_id}' not found."), err=True) + raise typer.Exit(code=1) + + typer.echo(f" {bold('ID:')} {item.id}") + typer.echo(f" {bold('Status:')} {_status_badge(item.status)}") + typer.echo(f" {bold('Title:')} {item.title or '(untitled)'}") + typer.echo(f" {bold('Description:')} {item.description or '(none)'}") + typer.echo(f" {bold('Output:')} {item.output}") + + if item.duration_seconds is not None: + typer.echo(f" {bold('Duration:')} {item.duration_seconds:.1f}s") + if item.file_size_bytes is not None: + size_mb = item.file_size_bytes / (1024 * 1024) + typer.echo(f" {bold('File size:')} {size_mb:.1f} MB") + + if item.home_team or item.away_team: + typer.echo(f" {bold('Game:')} {item.home_team} vs {item.away_team}") + if item.player: + typer.echo(f" {bold('Player:')} {item.player}") + if item.assists: + typer.echo(f" {bold('Assists:')} {item.assists}") + if item.render_profile: + typer.echo(f" {bold('Profile:')} {item.render_profile}") + if item.crop_mode: + typer.echo(f" {bold('Crop mode:')} {item.crop_mode}") + typer.echo(f" {bold('Queued at:')} {item.queued_at}") + + if item.publish_targets: + typer.echo(f"\n {bold('Publish targets:')}") + for t in item.publish_targets: + badge = _publish_badge(t.status) + url_part = f" {t.url}" if t.url else "" + err_part = f" {error(t.error)}" if t.error else "" + typer.echo(f" {t.target}: {badge}{url_part}{err_part}") + + +# --------------------------------------------------------------------------- +# queue edit +# --------------------------------------------------------------------------- + + +@app.command() +def edit( + item_id: str = typer.Argument(..., help="Queue item ID (or prefix)."), + title: str | None = typer.Option(None, "--title", "-t", help="New title."), + description: str | None = typer.Option(None, "--description", "-d", help="New description."), + game_dir: Path | None = typer.Option(None, "--game-dir", "-g", help="Game directory."), +) -> None: + """Edit title or description of a queue item.""" + from reeln.core.queue import update_queue_item + + if title is None and description is None: + typer.echo(error("Provide --title and/or --description to edit."), err=True) + raise typer.Exit(code=1) + + d = game_dir or Path.cwd() + try: + updated = update_queue_item(d, item_id, title=title, description=description) + except QueueError as exc: + typer.echo(error(str(exc)), err=True) + raise typer.Exit(code=1) from exc + + typer.echo(f"Updated {_short_id(updated.id)}: {updated.title}") + + +# --------------------------------------------------------------------------- +# queue publish +# --------------------------------------------------------------------------- + + +@app.command() +def publish( + item_id: str = typer.Argument(..., help="Queue item ID (or prefix)."), + target: str | None = typer.Option(None, "--target", "-t", help="Publish to specific target only."), + game_dir: Path | None = typer.Option(None, "--game-dir", "-g", help="Game directory."), + profile: str | None = typer.Option(None, "--profile", help="Named config profile."), + config: Path | None = typer.Option(None, "--config", help="Explicit config file path."), +) -> None: + """Publish a queue item to one or all targets.""" + from reeln.core.config import load_config + from reeln.core.queue import get_queue_item, publish_queue_item + from reeln.plugins.loader import activate_plugins + + d = game_dir or Path.cwd() + + # Use stored config_profile from queue item unless CLI --profile overrides + effective_profile = profile + if effective_profile is None: + item = get_queue_item(d, item_id) + if item is not None and item.config_profile: + effective_profile = item.config_profile + + cfg = load_config(path=config, profile=effective_profile) + plugins = activate_plugins(cfg.plugins) + + try: + published = publish_queue_item(d, item_id, plugins, target=target) + except QueueError as exc: + typer.echo(error(str(exc)), err=True) + raise typer.Exit(code=1) from exc + + for t in published.publish_targets: + if t.status is PublishStatus.PUBLISHED: + typer.echo(f" {success('✓')} Published to {bold(t.target)}: {t.url}") + elif t.status is PublishStatus.FAILED: + typer.echo(f" {error('✗')} Failed {bold(t.target)}: {t.error}") + + +# --------------------------------------------------------------------------- +# queue publish-all +# --------------------------------------------------------------------------- + + +@app.command(name="publish-all") +def publish_all_cmd( + game_dir: Path | None = typer.Option(None, "--game-dir", "-g", help="Game directory."), + profile: str | None = typer.Option(None, "--profile", help="Named config profile."), + config: Path | None = typer.Option(None, "--config", help="Explicit config file path."), +) -> None: + """Publish all rendered items in the queue.""" + from reeln.core.config import load_config + from reeln.core.queue import publish_all + from reeln.plugins.loader import activate_plugins + + d = game_dir or Path.cwd() + cfg = load_config(path=config, profile=profile) + plugins = activate_plugins(cfg.plugins) + + results = publish_all(d, plugins) + if not results: + typer.echo("No items to publish.") + return + + for item in results: + typer.echo(f" {_short_id(item.id)} {_status_badge(item.status)} {item.title}") + + +# --------------------------------------------------------------------------- +# queue remove +# --------------------------------------------------------------------------- + + +@app.command() +def remove( + item_id: str = typer.Argument(..., help="Queue item ID (or prefix)."), + game_dir: Path | None = typer.Option(None, "--game-dir", "-g", help="Game directory."), +) -> None: + """Remove a queue item (soft-delete).""" + from reeln.core.queue import remove_from_queue + + d = game_dir or Path.cwd() + try: + removed = remove_from_queue(d, item_id) + except QueueError as exc: + typer.echo(error(str(exc)), err=True) + raise typer.Exit(code=1) from exc + + typer.echo(f"Removed {_short_id(removed.id)}: {removed.title}") + + +# --------------------------------------------------------------------------- +# queue targets +# --------------------------------------------------------------------------- + + +@app.command() +def targets( + profile: str | None = typer.Option(None, "--profile", help="Named config profile."), + config: Path | None = typer.Option(None, "--config", help="Explicit config file path."), +) -> None: + """List available publish targets from loaded plugins.""" + from reeln.core.config import load_config + from reeln.core.queue import discover_targets + from reeln.plugins.loader import activate_plugins + + cfg = load_config(path=config, profile=profile) + plugins = activate_plugins(cfg.plugins) + target_list = discover_targets(plugins) + + if not target_list: + typer.echo("No publish targets available. Install an uploader plugin.") + return + + for t in target_list: + typer.echo(f" {bold(t)}") diff --git a/reeln/commands/render.py b/reeln/commands/render.py index 8ac7e97..b8b94af 100644 --- a/reeln/commands/render.py +++ b/reeln/commands/render.py @@ -172,12 +172,13 @@ def _resolve_player_numbers( game_dir: Path | None, config_output_dir: Path | None, clip: Path | None, -) -> tuple[str, str | None, str | None]: - """Resolve --player-numbers to (scorer_display, assists_csv, scoring_team_name). +) -> tuple[str, str | None, str | None, Path | None]: + """Resolve --player-numbers to (scorer_display, assists_csv, scoring_team_name, logo_path). Loads game state, determines scoring team, loads roster, and looks up numbers. Returns the scorer display string, a comma-separated assists string (or None), - and the scoring team name (or None). + the scoring team name (or None), and the team logo path (or None if not set + or file does not exist). """ from reeln.core.highlights import load_game_state from reeln.core.teams import load_roster, load_team_profile, lookup_players, resolve_scoring_team @@ -232,7 +233,15 @@ def _resolve_player_numbers( scorer, assist_list = lookup_players(roster, numbers, team_name) assists_csv = ", ".join(assist_list) if assist_list else None - return (scorer, assists_csv, team_name) + + # 6. Resolve logo path + logo: Path | None = None + if team_profile.logo_path: + candidate = Path(team_profile.logo_path) + if candidate.is_file(): + logo = candidate + + return (scorer, assists_csv, team_name, logo) def _do_short( @@ -265,6 +274,7 @@ def _do_short( event_type: str | None = None, no_branding: bool = False, plugin_input: list[str] | None = None, + queue: bool = False, ) -> None: """Shared implementation for short and preview commands.""" from reeln.core.ffmpeg import discover_ffmpeg @@ -288,8 +298,9 @@ def _do_short( # Resolve --player-numbers before anything else _scoring_team_name: str | None = None + _logo_path: Path | None = None if player_numbers is not None: - scorer, assists_from_roster, _scoring_team_name = _resolve_player_numbers( + scorer, assists_from_roster, _scoring_team_name, _logo_path = _resolve_player_numbers( player_numbers, event_type, game_dir, config.paths.output_dir, clip ) # Explicit --player/--assists take precedence over roster lookup @@ -370,6 +381,7 @@ def _do_short( audio_codec=config.video.audio_codec, audio_bitrate=config.video.audio_bitrate, smart_zoom_frames=resolved_zoom_frames, + logo=_logo_path, ) # Apply render profile overlay if specified @@ -419,6 +431,7 @@ def _do_short( duration=dur, event_metadata=event_meta, scoring_team=_scoring_team_name, + has_logo=_logo_path is not None, ) subtitle_dir = (output or _default_output(clip, "_short")).parent @@ -461,6 +474,7 @@ def _do_short( speed_segments=short_config.speed_segments, smart_zoom_frames=short_config.smart_zoom_frames, branding=branding_subtitle, + logo=short_config.logo, ) # Smart zoom: extract frames before iterate or single render @@ -544,6 +558,7 @@ def _do_short( audio_codec=short_config.audio_codec, audio_bitrate=short_config.audio_bitrate, smart_zoom_frames=short_config.smart_zoom_frames, + logo=short_config.logo, ) source_fps = extracted_frames.fps if extracted_frames is not None else 30.0 @@ -602,6 +617,8 @@ def _do_short( game_event=render_game_event, player=player, assists=assists, + queue=queue, + config_profile=profile or "", ) except ReelnError as exc: typer.echo(f"Error: {exc}", err=True) @@ -683,21 +700,55 @@ def _do_short( typer.echo(f"Error: {exc}", err=True) raise typer.Exit(code=1) from exc - _post_data: dict[str, Any] = {"plan": plan, "result": result} - if render_game_info is not None: - _post_data["game_info"] = render_game_info - if render_game_event is not None: - _post_data["game_event"] = render_game_event - if player is not None: - _post_data["player"] = player - if assists is not None: - _post_data["assists"] = assists - if _plugin_inputs: - _post_data["plugin_inputs"] = _plugin_inputs - _get_reg().emit( - _RHook.POST_RENDER, - _RHookCtx(hook=_RHook.POST_RENDER, data=_post_data), - ) + if queue and not is_preview: + # Queue for review instead of publishing immediately + from reeln.core.queue import add_to_queue, discover_targets + + resolved_qd = game_dir or _find_game_dir(config.paths.output_dir, clip) + if resolved_qd is None: + resolved_qd = Path.cwd() + loaded_plugins = activate_plugins(config.plugins) + targets = discover_targets(loaded_plugins) + queue_item = add_to_queue( + resolved_qd, + result, + game_info=render_game_info, + game_event=render_game_event, + player=player or "", + assists=assists or "", + plugin_inputs=_plugin_inputs or None, + render_profile=render_profile_name or "", + format_str=f"{short_config.width}x{short_config.height}", + crop_mode=short_config.crop_mode.value, + event_id=event_id or "", + available_targets=targets, + config_profile=profile or "", + ) + _get_reg().emit( + _RHook.ON_QUEUE, + _RHookCtx( + hook=_RHook.ON_QUEUE, + data={"queue_item": queue_item, "game_info": render_game_info, "game_event": render_game_event}, + ), + ) + typer.echo(f"Queued: {queue_item.id} — {queue_item.title}") + else: + # Fast-track: emit POST_RENDER for immediate plugin upload + _post_data: dict[str, Any] = {"plan": plan, "result": result} + if render_game_info is not None: + _post_data["game_info"] = render_game_info + if render_game_event is not None: + _post_data["game_event"] = render_game_event + if player is not None: + _post_data["player"] = player + if assists is not None: + _post_data["assists"] = assists + if _plugin_inputs: + _post_data["plugin_inputs"] = _plugin_inputs + _get_reg().emit( + _RHook.POST_RENDER, + _RHookCtx(hook=_RHook.POST_RENDER, data=_post_data), + ) if result.duration_seconds is not None: typer.echo(f"Duration: {result.duration_seconds:.1f}s") @@ -817,6 +868,7 @@ def short( no_branding: bool = typer.Option(False, "--no-branding", help="Disable branding overlay."), dry_run: bool = typer.Option(False, "--dry-run", help="Show plan without executing."), plugin_input: list[str] = typer.Option([], "--plugin-input", "-I", help="Plugin input as KEY=VALUE (repeatable)."), + queue_flag: bool = typer.Option(False, "--queue", "-q", help="Queue for review instead of publishing immediately."), ) -> None: """Render a 9:16 short from a clip.""" _do_short( @@ -848,6 +900,7 @@ def short( event_type=event_type, no_branding=no_branding, plugin_input=plugin_input, + queue=queue_flag, ) @@ -1030,6 +1083,7 @@ def apply_profile( iterate: bool = typer.Option(False, "--iterate", help="Multi-iteration mode using event type config."), debug_flag: bool = typer.Option(False, "--debug", help="Write debug artifacts to game debug directory."), dry_run: bool = typer.Option(False, "--dry-run", help="Show plan without executing."), + queue_flag: bool = typer.Option(False, "--queue", "-q", help="Queue for review instead of publishing immediately."), ) -> None: """Apply a named render profile to a clip (full-frame, no crop/scale).""" from reeln.core.profiles import ( @@ -1050,7 +1104,7 @@ def apply_profile( # Resolve --player-numbers before anything else _scoring_team_name: str | None = None if player_numbers_str is not None: - scorer, assists_from_roster, _scoring_team_name = _resolve_player_numbers( + scorer, assists_from_roster, _scoring_team_name, _ = _resolve_player_numbers( player_numbers_str, event_type, game_dir, config.paths.output_dir, clip ) if player_name is None: @@ -1109,6 +1163,8 @@ def apply_profile( game_event=apply_game_event, player=player_name, assists=assists_str, + queue=queue_flag, + config_profile=profile or "", ) except ReelnError as exc: typer.echo(f"Error: {exc}", err=True) @@ -1189,19 +1245,46 @@ def apply_profile( typer.echo(f"Error: {exc}", err=True) raise typer.Exit(code=1) from exc - _apply_post: dict[str, Any] = {"plan": plan, "result": result} - if apply_game_info is not None: - _apply_post["game_info"] = apply_game_info - if apply_game_event is not None: - _apply_post["game_event"] = apply_game_event - if player_name is not None: - _apply_post["player"] = player_name - if assists_str is not None: - _apply_post["assists"] = assists_str - _apply_get_reg().emit( - _ApplyHook.POST_RENDER, - _ApplyHookCtx(hook=_ApplyHook.POST_RENDER, data=_apply_post), - ) + if queue_flag: + from reeln.core.queue import add_to_queue, discover_targets + + resolved_apply_qd = resolved_game_dir or Path.cwd() + loaded_apply_plugins = activate_plugins(config.plugins) + apply_targets = discover_targets(loaded_apply_plugins) + queue_item = add_to_queue( + resolved_apply_qd, + result, + game_info=apply_game_info, + game_event=apply_game_event, + player=player_name or "", + assists=assists_str or "", + render_profile=render_profile, + event_id=event or "", + available_targets=apply_targets, + config_profile=profile or "", + ) + _apply_get_reg().emit( + _ApplyHook.ON_QUEUE, + _ApplyHookCtx( + hook=_ApplyHook.ON_QUEUE, + data={"queue_item": queue_item, "game_info": apply_game_info, "game_event": apply_game_event}, + ), + ) + typer.echo(f"Queued: {queue_item.id} — {queue_item.title}") + else: + _apply_post: dict[str, Any] = {"plan": plan, "result": result} + if apply_game_info is not None: + _apply_post["game_info"] = apply_game_info + if apply_game_event is not None: + _apply_post["game_event"] = apply_game_event + if player_name is not None: + _apply_post["player"] = player_name + if assists_str is not None: + _apply_post["assists"] = assists_str + _apply_get_reg().emit( + _ApplyHook.POST_RENDER, + _ApplyHookCtx(hook=_ApplyHook.POST_RENDER, data=_apply_post), + ) if result.duration_seconds is not None: typer.echo(f"Duration: {result.duration_seconds:.1f}s") diff --git a/reeln/core/errors.py b/reeln/core/errors.py index ddf0815..65ad350 100644 --- a/reeln/core/errors.py +++ b/reeln/core/errors.py @@ -42,6 +42,10 @@ class MediaError(ReelnError): """Media file operation error.""" +class QueueError(ReelnError): + """Queue operation error.""" + + class PromptAborted(ReelnError): """User cancelled an interactive prompt.""" diff --git a/reeln/core/ffmpeg.py b/reeln/core/ffmpeg.py index 64accbf..253bdd2 100644 --- a/reeln/core/ffmpeg.py +++ b/reeln/core/ffmpeg.py @@ -547,6 +547,7 @@ def build_short_command(ffmpeg_path: Path, plan: RenderPlan) -> list[str]: """Build an ffmpeg command for short-form rendering with filter chains. Uses ``-filter_complex`` for the video filter chain and ``-af`` for audio. + Supports multiple inputs (e.g. a logo image as the second input). """ cmd = [ str(ffmpeg_path), @@ -556,12 +557,20 @@ def build_short_command(ffmpeg_path: Path, plan: RenderPlan) -> list[str]: "-i", str(plan.inputs[0]), ] + # Additional inputs (e.g. logo image). Static images need -loop 1 + # so they persist for the full duration of the overlay. + for extra_input in plan.inputs[1:]: + cmd.extend(["-loop", "1", "-i", str(extra_input)]) if plan.filter_complex: cmd.extend(["-filter_complex", plan.filter_complex]) # When audio is embedded in filter_complex (speed_segments), add # explicit stream mapping so ffmpeg picks the correct output pads. if "[vfinal]" in plan.filter_complex and "[afinal]" in plan.filter_complex: cmd.extend(["-map", "[vfinal]", "-map", "[afinal]"]) + elif len(plan.inputs) > 1 and "[afinal]" not in (plan.filter_complex or ""): + # Logo image has no audio — map audio explicitly from video input + # to prevent ffmpeg from trying to auto-select from the image. + cmd.extend(["-map", "0:a?"]) if plan.audio_filter: cmd.extend(["-af", plan.audio_filter]) cmd.extend( diff --git a/reeln/core/iterations.py b/reeln/core/iterations.py index fb4a3e1..7510afc 100644 --- a/reeln/core/iterations.py +++ b/reeln/core/iterations.py @@ -56,6 +56,8 @@ def render_iterations( game_event: object | None = None, player: str | None = None, assists: str | None = None, + queue: bool = False, + config_profile: str = "", ) -> tuple[IterationResult, list[str]]: """Render *clip* through multiple profiles and concatenate the results. @@ -132,6 +134,7 @@ def render_iterations( base_ctx, duration=iter_dur, event_metadata=event_metadata, + has_logo=short_config is not None and short_config.logo is not None, ) # Resolve subtitle template @@ -210,7 +213,7 @@ def render_iterations( messages.append(f"Concatenated {len(temp_outputs)} iterations") messages.append(f"Output: {output}") - # Emit POST_RENDER once for the final concatenated output + # Emit hooks for the final concatenated output from reeln.plugins.hooks import Hook from reeln.plugins.hooks import HookContext as PluginContext from reeln.plugins.registry import get_registry @@ -226,21 +229,56 @@ def render_iterations( duration_seconds=final_duration, file_size_bytes=file_size, ) - # Use the last iteration's plan for filter_complex detection - final_plan = plan - hook_data: dict[str, Any] = {"plan": final_plan, "result": final_result} - if game_info is not None: - hook_data["game_info"] = game_info - if game_event is not None: - hook_data["game_event"] = game_event - if player is not None: - hook_data["player"] = player - if assists is not None: - hook_data["assists"] = assists - get_registry().emit( - Hook.POST_RENDER, - PluginContext(hook=Hook.POST_RENDER, data=hook_data), - ) + + if queue: + # Queue for review instead of publishing immediately + from reeln.core.queue import add_to_queue, discover_targets + from reeln.models.game import GameEvent as _GE + from reeln.models.game import GameInfo as _GI + from reeln.plugins.loader import activate_plugins as _activate + + _gi = game_info if isinstance(game_info, _GI) else None + _ge = game_event if isinstance(game_event, _GE) else None + loaded = _activate(config.plugins) + targets = discover_targets(loaded) + game_dir = output.parent + queue_item = add_to_queue( + game_dir, + final_result, + game_info=_gi, + game_event=_ge, + player=player or "", + assists=assists or "", + render_profile=",".join(profile_names), + format_str=f"{short_config.width}x{short_config.height}" if short_config else "", + crop_mode=short_config.crop_mode.value if short_config else "", + available_targets=targets, + config_profile=config_profile, + ) + get_registry().emit( + Hook.ON_QUEUE, + PluginContext( + hook=Hook.ON_QUEUE, + data={"queue_item": queue_item, "game_info": _gi, "game_event": _ge}, + ), + ) + messages.append(f"Queued: {queue_item.id} — {queue_item.title}") + else: + # Fast-track: emit POST_RENDER for immediate plugin upload + final_plan = plan + hook_data: dict[str, Any] = {"plan": final_plan, "result": final_result} + if game_info is not None: + hook_data["game_info"] = game_info + if game_event is not None: + hook_data["game_event"] = game_event + if player is not None: + hook_data["player"] = player + if assists is not None: + hook_data["assists"] = assists + get_registry().emit( + Hook.POST_RENDER, + PluginContext(hook=Hook.POST_RENDER, data=hook_data), + ) messages.append("Iteration rendering complete") result = IterationResult( diff --git a/reeln/core/metadata.py b/reeln/core/metadata.py new file mode 100644 index 0000000..f17426d --- /dev/null +++ b/reeln/core/metadata.py @@ -0,0 +1,112 @@ +"""Centralized metadata generation for publish workflows.""" + +from __future__ import annotations + +from typing import Any + +from reeln.models.game import GameEvent, GameInfo + + +def generate_title( + game_info: GameInfo | None = None, + game_event: GameEvent | None = None, + player: str = "", + assists: str = "", +) -> str: + """Generate a publish title from game and event context. + + Format: ``{player} {event_type} - {home_team} vs {away_team}`` + Falls back gracefully when fields are missing. + """ + parts: list[str] = [] + + # Player + event type + event_type = game_event.event_type if game_event else "" + effective_player = player or (game_event.player if game_event else "") + if effective_player: + label = event_type.title() if event_type else "Highlight" + parts.append(f"{effective_player} {label}") + elif event_type: + parts.append(event_type.title()) + + # Teams + if game_info: + parts.append(f"{game_info.home_team} vs {game_info.away_team}") + + return " - ".join(parts) if parts else "Highlight" + + +def generate_description( + game_info: GameInfo | None = None, + game_event: GameEvent | None = None, + player: str = "", + assists: str = "", +) -> str: + """Generate a publish description from game and event context.""" + lines: list[str] = [] + + if game_info: + matchup = f"{game_info.home_team} vs {game_info.away_team}" + if game_info.date: + matchup += f" ({game_info.date})" + lines.append(matchup) + + context_parts: list[str] = [] + if game_info.sport: + context_parts.append(game_info.sport.title()) + if game_info.level: + context_parts.append(game_info.level) + if game_info.tournament: + context_parts.append(game_info.tournament) + if context_parts: + lines.append(" | ".join(context_parts)) + + effective_assists = assists + if not effective_assists and game_event: + effective_assists = game_event.metadata.get("assists", "") + if effective_assists: + lines.append(f"Assists: {effective_assists}") + + return "\n".join(lines) + + +def build_publish_metadata( + *, + title: str = "", + description: str = "", + game_info: GameInfo | None = None, + game_event: GameEvent | None = None, + player: str = "", + assists: str = "", + plugin_inputs: dict[str, Any] | None = None, +) -> dict[str, Any]: + """Build the metadata dict that uploader plugins receive.""" + meta: dict[str, Any] = { + "title": title, + "description": description, + } + + if game_info: + meta["home_team"] = game_info.home_team + meta["away_team"] = game_info.away_team + meta["date"] = game_info.date + meta["sport"] = game_info.sport + if game_info.level: + meta["level"] = game_info.level + if game_info.tournament: + meta["tournament"] = game_info.tournament + + if game_event: + meta["event_type"] = game_event.event_type + meta["event_id"] = game_event.id + if game_event.metadata: + meta["event_metadata"] = dict(game_event.metadata) + + if player: + meta["player"] = player + if assists: + meta["assists"] = assists + if plugin_inputs: + meta["plugin_inputs"] = dict(plugin_inputs) + + return meta diff --git a/reeln/core/overlay.py b/reeln/core/overlay.py index 6f4521a..1055b89 100644 --- a/reeln/core/overlay.py +++ b/reeln/core/overlay.py @@ -84,6 +84,7 @@ def build_overlay_context( away_colors: list[str] | None = None, y_offset: int = 0, scoring_team: str | None = None, + has_logo: bool = False, ) -> TemplateContext: """Enrich a template context with overlay-specific variables. @@ -109,11 +110,18 @@ def build_overlay_context( assist_end = format_ass_time(end_time if has_assists else 0.0) box_end = format_ass_time(end_time) - # Font sizing + # Font sizing — reduce max_chars when a logo is present so text + # stays within the clipped region and doesn't run under the logo. + scorer_max_chars = 18 if has_logo else 24 + assist_max_chars = 22 if has_logo else 30 scorer_base = 46 if has_assists else 54 scorer_min = 32 if has_assists else 38 - goal_scorer_fs = str(overlay_font_size(scorer_text, base=scorer_base, min_size=scorer_min, max_chars=24)) - goal_assist_fs = str(overlay_font_size(f"{assist_1} {assist_2}".strip(), base=20, min_size=16, max_chars=30)) + goal_scorer_fs = str(overlay_font_size( + scorer_text, base=scorer_base, min_size=scorer_min, max_chars=scorer_max_chars, + )) + goal_assist_fs = str(overlay_font_size( + f"{assist_1} {assist_2}".strip(), base=20, min_size=16, max_chars=assist_max_chars, + )) # Colors primary_rgb = _DEFAULT_PRIMARY @@ -151,6 +159,12 @@ def build_overlay_context( goal_scorer_team = team_name team_level = level + # Logo reserve — when a logo is present, clip text rendering to the + # left portion of the box so it doesn't overlap the logo image. + # 200 ASS-units reserved for logo + padding on the right side. + _LOGO_RESERVE = 200 + text_right = str(3 + 1914 - _LOGO_RESERVE) if has_logo else "1920" + # Layout coordinates (ported from old CLI) variables: dict[str, str] = { "box_end": box_end, @@ -171,6 +185,7 @@ def build_overlay_context( "ass_name_color": ass_name_color, "ass_team_text_color": ass_team_text_color, "ass_name_outline_color": ass_name_outline_color, + "goal_overlay_text_right": text_right, "goal_overlay_border_x": "0", "goal_overlay_border_y": str(817 + y_offset), "goal_overlay_border_w": "1920", diff --git a/reeln/core/queue.py b/reeln/core/queue.py new file mode 100644 index 0000000..83582f2 --- /dev/null +++ b/reeln/core/queue.py @@ -0,0 +1,564 @@ +"""Queue business logic — load, save, add, update, remove, publish.""" + +from __future__ import annotations + +import contextlib +import json +import logging +import tempfile +import uuid +from dataclasses import replace +from datetime import UTC, datetime +from pathlib import Path +from typing import Any + +from reeln.core.errors import QueueError +from reeln.core.log import get_logger +from reeln.core.metadata import ( + build_publish_metadata, + generate_description, + generate_title, +) +from reeln.models.game import GameEvent, GameInfo +from reeln.models.queue import ( + PublishStatus, + PublishTargetResult, + QueueItem, + QueueStatus, + RenderQueue, + dict_to_render_queue, + render_queue_to_dict, +) +from reeln.models.render_plan import RenderResult + +log: logging.Logger = get_logger(__name__) + +_QUEUE_FILE = "render_queue.json" +_INDEX_FILE = "queue_index.json" + + +# --------------------------------------------------------------------------- +# Load / Save +# --------------------------------------------------------------------------- + + +def load_queue(game_dir: Path) -> RenderQueue: + """Load the render queue from *game_dir*, returning empty if absent.""" + queue_file = game_dir / _QUEUE_FILE + if not queue_file.is_file(): + return RenderQueue() + try: + raw = json.loads(queue_file.read_text(encoding="utf-8")) + return dict_to_render_queue(raw) + except (json.JSONDecodeError, KeyError, TypeError, ValueError) as exc: + raise QueueError(f"Invalid queue file {queue_file}: {exc}") from exc + + +def save_queue(queue: RenderQueue, game_dir: Path) -> Path: + """Atomically write the render queue to *game_dir*.""" + queue_file = game_dir / _QUEUE_FILE + queue_file.parent.mkdir(parents=True, exist_ok=True) + + content = json.dumps(render_queue_to_dict(queue), indent=2) + "\n" + + tmp_fd, tmp_name = tempfile.mkstemp( + suffix=".tmp", dir=queue_file.parent, text=True + ) + try: + with open(tmp_fd, "w") as tmp: + tmp.write(content) + tmp.flush() + Path(tmp_name).replace(queue_file) + except BaseException: + Path(tmp_name).unlink(missing_ok=True) + raise + + log.debug("Queue saved to %s", queue_file) + return queue_file + + +# --------------------------------------------------------------------------- +# Queue index (advisory cross-game listing) +# --------------------------------------------------------------------------- + + +def update_queue_index(game_dir: Path) -> None: + """Add *game_dir* to the central queue index.""" + from reeln.core.config import data_dir + + index_dir = data_dir() + index_dir.mkdir(parents=True, exist_ok=True) + index_file = index_dir / _INDEX_FILE + + index: dict[str, list[str]] = {"queues": []} + if index_file.is_file(): + with contextlib.suppress(json.JSONDecodeError, ValueError): + index = json.loads(index_file.read_text(encoding="utf-8")) + + game_str = str(game_dir) + if game_str not in index.get("queues", []): + index.setdefault("queues", []).append(game_str) + tmp_fd, tmp_name = tempfile.mkstemp( + suffix=".tmp", dir=index_dir, text=True + ) + try: + with open(tmp_fd, "w") as tmp: + tmp.write(json.dumps(index, indent=2) + "\n") + tmp.flush() + Path(tmp_name).replace(index_file) + except BaseException: + Path(tmp_name).unlink(missing_ok=True) + raise + + +def load_queue_index() -> list[str]: + """Load the central queue index, returning game directory paths.""" + from reeln.core.config import data_dir + + index_file = data_dir() / _INDEX_FILE + if not index_file.is_file(): + return [] + try: + raw = json.loads(index_file.read_text(encoding="utf-8")) + return list(raw.get("queues", [])) + except (json.JSONDecodeError, ValueError): + return [] + + +# --------------------------------------------------------------------------- +# Add / Update / Remove +# --------------------------------------------------------------------------- + + +def _generate_id() -> str: + """Generate a short unique ID for a queue item.""" + return uuid.uuid4().hex[:12] + + +def _now_iso() -> str: + """Return the current UTC time in ISO 8601 format.""" + return datetime.now(tz=UTC).isoformat() + + +def add_to_queue( + game_dir: Path, + result: RenderResult, + *, + game_info: GameInfo | None = None, + game_event: GameEvent | None = None, + player: str = "", + assists: str = "", + plugin_inputs: dict[str, Any] | None = None, + render_profile: str = "", + format_str: str = "", + crop_mode: str = "", + event_id: str = "", + available_targets: list[str] | None = None, + config_profile: str = "", +) -> QueueItem: + """Create a queue item from a render result and save to disk.""" + title = generate_title(game_info, game_event, player, assists) + description = generate_description(game_info, game_event, player, assists) + + targets = tuple( + PublishTargetResult(target=t) for t in (available_targets or []) + ) + + item = QueueItem( + id=_generate_id(), + output=str(result.output), + game_dir=str(game_dir), + status=QueueStatus.RENDERED, + queued_at=_now_iso(), + duration_seconds=result.duration_seconds, + file_size_bytes=result.file_size_bytes, + format=format_str, + crop_mode=crop_mode, + render_profile=render_profile, + event_id=event_id, + home_team=game_info.home_team if game_info else "", + away_team=game_info.away_team if game_info else "", + date=game_info.date if game_info else "", + sport=game_info.sport if game_info else "", + level=game_info.level if game_info else "", + tournament=game_info.tournament if game_info else "", + event_type=game_event.event_type if game_event else "", + player=player or (game_event.player if game_event else ""), + assists=assists, + title=title, + description=description, + publish_targets=targets, + config_profile=config_profile, + plugin_inputs=dict(plugin_inputs) if plugin_inputs else {}, + ) + + queue = load_queue(game_dir) + queue = RenderQueue(version=queue.version, items=(*queue.items, item)) + save_queue(queue, game_dir) + update_queue_index(game_dir) + + log.info("Added queue item %s: %s", item.id, title) + return item + + +def _find_item(queue: RenderQueue, item_id: str) -> tuple[int, QueueItem]: + """Find an item by exact ID or prefix match. Raises QueueError if not found.""" + # Exact match first + for idx, item in enumerate(queue.items): + if item.id == item_id: + return idx, item + # Prefix match + matches: list[tuple[int, QueueItem]] = [] + for idx, item in enumerate(queue.items): + if item.id.startswith(item_id): + matches.append((idx, item)) + if len(matches) == 1: + return matches[0] + if len(matches) > 1: + ids = ", ".join(m[1].id for m in matches) + raise QueueError(f"Ambiguous ID prefix '{item_id}' matches: {ids}") + raise QueueError(f"Queue item '{item_id}' not found") + + +def get_queue_item(game_dir: Path, item_id: str) -> QueueItem | None: + """Look up a queue item by ID or prefix. Returns None if not found.""" + queue = load_queue(game_dir) + try: + _, item = _find_item(queue, item_id) + return item + except QueueError: + return None + + +def update_queue_item( + game_dir: Path, + item_id: str, + *, + title: str | None = None, + description: str | None = None, +) -> QueueItem: + """Update editable fields on a queue item. Returns the updated item.""" + queue = load_queue(game_dir) + idx, item = _find_item(queue, item_id) + + updates: dict[str, Any] = {} + if title is not None: + updates["title"] = title + if description is not None: + updates["description"] = description + + if not updates: + return item + + updated = replace(item, **updates) + items = list(queue.items) + items[idx] = updated + save_queue(RenderQueue(version=queue.version, items=tuple(items)), game_dir) + return updated + + +def remove_from_queue(game_dir: Path, item_id: str) -> QueueItem: + """Soft-delete a queue item by marking it as REMOVED.""" + queue = load_queue(game_dir) + idx, item = _find_item(queue, item_id) + removed = replace(item, status=QueueStatus.REMOVED) + items = list(queue.items) + items[idx] = removed + save_queue(RenderQueue(version=queue.version, items=tuple(items)), game_dir) + log.info("Removed queue item %s", item.id) + return removed + + +# --------------------------------------------------------------------------- +# Publish +# --------------------------------------------------------------------------- + + +def discover_targets(plugins: dict[str, object]) -> list[str]: + """Return names of plugins that handle uploads. + + Checks for both the ``Uploader`` protocol (``upload()`` method) and + plugins registered for ``POST_RENDER`` (``on_post_render()`` method), + since existing plugins handle uploads inside their POST_RENDER handlers. + """ + targets: list[str] = [] + for name, plugin in plugins.items(): + if callable(getattr(plugin, "upload", None)) or callable(getattr(plugin, "on_post_render", None)): + targets.append(name) + return sorted(targets) + + +def _is_publish_target(plugin: object) -> bool: + """Check if a plugin can handle publishing (upload protocol or POST_RENDER hook).""" + return callable(getattr(plugin, "upload", None)) or callable( + getattr(plugin, "on_post_render", None) + ) + + +def publish_queue_item( + game_dir: Path, + item_id: str, + plugins: dict[str, object], + *, + target: str | None = None, +) -> QueueItem: + """Publish a queue item to one or all targets. + + When *target* is ``None``, publishes to all pending targets. + When *target* is a string, publishes to that single target. + + Supports two plugin patterns: + - **Uploader protocol**: plugins with an ``upload()`` method are called directly + - **POST_RENDER hook**: plugins with ``on_post_render()`` are triggered via + ``POST_RENDER`` hook emission (existing plugin pattern) + """ + from reeln.core.throttle import upload_lock + from reeln.models.render_plan import RenderPlan + from reeln.models.render_plan import RenderResult as _RR + from reeln.plugins.hooks import Hook, HookContext + from reeln.plugins.registry import get_registry + + queue = load_queue(game_dir) + idx, item = _find_item(queue, item_id) + + if item.status is QueueStatus.REMOVED: + raise QueueError(f"Cannot publish removed item '{item.id}'") + + output_path = Path(item.output) + if not output_path.is_file(): + raise QueueError(f"Output file not found: {item.output}") + + # Build metadata for uploaders + game_info = _reconstruct_game_info(item) + game_event = _reconstruct_game_event(item) + metadata = build_publish_metadata( + title=item.title, + description=item.description, + game_info=game_info, + game_event=game_event, + player=item.player, + assists=item.assists, + plugin_inputs=item.plugin_inputs or None, + ) + + # Determine which targets to publish to + targets_to_publish: list[str] = [] + if target is not None: + if target not in plugins or not _is_publish_target(plugins[target]): + raise QueueError(f"Unknown or non-uploader target: '{target}'") + targets_to_publish = [target] + else: + # Check stored publish_targets first + for ptr in item.publish_targets: + if ( + ptr.status is PublishStatus.PENDING + and ptr.target in plugins + and _is_publish_target(plugins[ptr.target]) + ): + targets_to_publish.append(ptr.target) + # Fall back to discovering from loaded plugins if no stored targets + if not targets_to_publish and not item.publish_targets: + targets_to_publish = discover_targets(plugins) + + if not targets_to_publish: + raise QueueError("No pending publish targets") + + # Mark as publishing + item = replace(item, status=QueueStatus.PUBLISHING) + items = list(queue.items) + items[idx] = item + save_queue(RenderQueue(version=queue.version, items=tuple(items)), game_dir) + + # Separate targets by publish mechanism + uploader_targets = [t for t in targets_to_publish if callable(getattr(plugins[t], "upload", None))] + hook_targets = [t for t in targets_to_publish if t not in uploader_targets] + + updated_targets = list(item.publish_targets) + + # Publish via Uploader protocol (direct call per target) + for target_name in uploader_targets: + plugin = plugins[target_name] + target_idx = _find_target_idx(updated_targets, target_name) + + try: + with upload_lock(): + url: str = plugin.upload(output_path, metadata=metadata) # type: ignore[attr-defined] + result_ptr = PublishTargetResult( + target=target_name, + status=PublishStatus.PUBLISHED, + url=url, + published_at=_now_iso(), + ) + log.info("Published %s to %s: %s", item.id, target_name, url) + except Exception as exc: + result_ptr = PublishTargetResult( + target=target_name, + status=PublishStatus.FAILED, + error=str(exc), + ) + log.warning("Failed to publish %s to %s: %s", item.id, target_name, exc) + + if target_idx is not None: + updated_targets[target_idx] = result_ptr + else: + updated_targets.append(result_ptr) + + # Publish via POST_RENDER hook (broadcast to all registered handlers) + if hook_targets: + post_render_result = _RR( + output=output_path, + duration_seconds=item.duration_seconds, + file_size_bytes=item.file_size_bytes, + ) + post_render_plan = RenderPlan(inputs=[output_path], output=output_path) + hook_data: dict[str, Any] = { + "plan": post_render_plan, + "result": post_render_result, + } + if game_info is not None: + hook_data["game_info"] = game_info + if game_event is not None: + hook_data["game_event"] = game_event + if item.player: + hook_data["player"] = item.player + if item.assists: + hook_data["assists"] = item.assists + if item.plugin_inputs: + hook_data["plugin_inputs"] = item.plugin_inputs + # Include publish metadata so plugins can use edited title/description + hook_data["publish_metadata"] = metadata + + try: + get_registry().emit( + Hook.POST_RENDER, + HookContext(hook=Hook.POST_RENDER, data=hook_data), + ) + now = _now_iso() + for target_name in hook_targets: + target_idx = _find_target_idx(updated_targets, target_name) + result_ptr = PublishTargetResult( + target=target_name, + status=PublishStatus.PUBLISHED, + published_at=now, + ) + if target_idx is not None: + updated_targets[target_idx] = result_ptr + else: + updated_targets.append(result_ptr) + log.info("Published %s via POST_RENDER to %s", item.id, hook_targets) + except Exception as exc: + for target_name in hook_targets: + target_idx = _find_target_idx(updated_targets, target_name) + result_ptr = PublishTargetResult( + target=target_name, + status=PublishStatus.FAILED, + error=str(exc), + ) + if target_idx is not None: + updated_targets[target_idx] = result_ptr + else: + updated_targets.append(result_ptr) + log.warning("POST_RENDER publish failed for %s: %s", item.id, exc) + + # Emit ON_PUBLISH for tracking + for target_name in targets_to_publish: + published_ptr: PublishTargetResult | None = next( + (t for t in updated_targets if t.target == target_name), None + ) + if published_ptr is not None: # pragma: no branch + get_registry().emit( + Hook.ON_PUBLISH, + HookContext( + hook=Hook.ON_PUBLISH, + data={ + "queue_item_id": item.id, + "target": target_name, + "status": published_ptr.status.value, + "url": published_ptr.url, + "error": published_ptr.error, + "metadata": metadata, + }, + ), + ) + + # Determine overall status + all_statuses = {t.status for t in updated_targets} + if all(s is PublishStatus.PUBLISHED for s in all_statuses): + overall = QueueStatus.PUBLISHED + elif PublishStatus.PUBLISHED in all_statuses: + overall = QueueStatus.PARTIAL + elif all(s is PublishStatus.FAILED for s in all_statuses): + overall = QueueStatus.FAILED + else: + overall = QueueStatus.PARTIAL + + item = replace( + item, + status=overall, + publish_targets=tuple(updated_targets), + ) + queue = load_queue(game_dir) + items_list = list(queue.items) + for i, it in enumerate(items_list): # pragma: no branch + if it.id == item.id: + items_list[i] = item + break + save_queue(RenderQueue(version=queue.version, items=tuple(items_list)), game_dir) + + return item + + +def publish_all( + game_dir: Path, + plugins: dict[str, object], +) -> list[QueueItem]: + """Publish all rendered (unpublished) items in the queue.""" + queue = load_queue(game_dir) + results: list[QueueItem] = [] + for item in queue.items: + if item.status is QueueStatus.RENDERED: + published = publish_queue_item(game_dir, item.id, plugins) + results.append(published) + return results + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _find_target_idx( + targets: list[PublishTargetResult], target_name: str +) -> int | None: + """Find the index of a target in the list, or None.""" + for i, t in enumerate(targets): # pragma: no branch + if t.target == target_name: + return i + return None + + +def _reconstruct_game_info(item: QueueItem) -> GameInfo | None: + """Reconstruct a GameInfo from snapshotted queue item fields.""" + if not item.home_team and not item.away_team: + return None + return GameInfo( + date=item.date, + home_team=item.home_team, + away_team=item.away_team, + sport=item.sport, + level=item.level, + tournament=item.tournament, + ) + + +def _reconstruct_game_event(item: QueueItem) -> GameEvent | None: + """Reconstruct a GameEvent from snapshotted queue item fields.""" + if not item.event_id: + return None + return GameEvent( + id=item.event_id, + clip="", + segment_number=0, + event_type=item.event_type, + player=item.player, + ) diff --git a/reeln/core/shorts.py b/reeln/core/shorts.py index fc76556..fda7bbf 100644 --- a/reeln/core/shorts.py +++ b/reeln/core/shorts.py @@ -286,6 +286,100 @@ def build_speed_segments_filters( return ";".join(video_parts), ";".join(audio_parts) +# --------------------------------------------------------------------------- +# Logo overlay +# --------------------------------------------------------------------------- + +# ASS overlay box constants (PlayRes 1920x1080) +_ASS_RES_X = 1920 +_ASS_RES_Y = 1080 +_BOX_X = 3 +_BOX_Y = 820 +_BOX_W = 1914 +_BOX_H_ASSISTS = 142 +_BOX_H_NO_ASSISTS = 135 +_LOGO_MARGIN = 10 # margin from box edges in ASS units + + +def build_logo_overlay_filter( + *, + target_width: int, + target_height: int, + y_offset: int = 0, + has_assists: bool = True, +) -> tuple[str, str]: + """Build ffmpeg scale and overlay filters for a team logo. + + The logo is positioned on the right side of the overlay box, vertically + centred. Coordinates are computed by mapping the ASS 1920x1080 box + layout into the actual output pixel space. + + Returns ``(logo_scale_filter, logo_overlay_filter)``. + """ + x_scale = target_width / _ASS_RES_X + y_scale = target_height / _ASS_RES_Y + box_h_ass = _BOX_H_ASSISTS if has_assists else _BOX_H_NO_ASSISTS + box_y_out = (_BOX_Y + y_offset) * y_scale + box_w_out = _BOX_W * x_scale + box_h_out = box_h_ass * y_scale + box_x_out = _BOX_X * x_scale + + # Scale logo to 80 % of box height, preserving aspect ratio. + logo_max_h = int(box_h_out * 0.8) + logo_max_h += logo_max_h % 2 # ensure even for ffmpeg + + scale_filter = f"scale=-1:{logo_max_h}:flags=lanczos" + + margin_out = int(_LOGO_MARGIN * x_scale) + # overlay expressions: ``w`` = overlay (logo) width, ``h`` = overlay height + logo_x = int(box_x_out + box_w_out) - margin_out + logo_cy = int(box_y_out + box_h_out / 2) + overlay_filter = f"overlay=x={logo_x}-w:y={logo_cy}-h/2:format=auto:shortest=1" + + return scale_filter, overlay_filter + + +def _wrap_filter_with_logo( + filter_complex: str, + logo_scale: str, + logo_overlay: str, +) -> str: + """Append logo overlay to an existing filter_complex string. + + Handles three graph shapes: + + 1. **Stream-label graph with** ``[vfinal]``: rename ``[vfinal]`` → + ``[_prelogo]`` (last occurrence only), overlay produces ``[vfinal]``. + 2. **Multi-stream graph** (contains ``[0:v]`` and ``;``): label the + final output ``[_prelogo]``, append overlay. + 3. **Simple comma-joined chain**: wrap with ``[0:v]`` prefix and labels. + """ + logo_part = f"[1:v]{logo_scale}[_logo]" + + if "[vfinal]" in filter_complex: + idx = filter_complex.rfind("[vfinal]") + before = filter_complex[:idx] + after = filter_complex[idx + len("[vfinal]"):] + return ( + f"{before}[_prelogo]{after};" + f"{logo_part};" + f"[_prelogo][_logo]{logo_overlay}[vfinal]" + ) + if "[0:v]" in filter_complex and ";" in filter_complex: + # Multi-stream graph (e.g. smart pad): label the tail, then overlay. + return ( + f"{filter_complex}[_prelogo];" + f"{logo_part};" + f"[_prelogo][_logo]{logo_overlay}" + ) + # Simple comma-joined chain + return ( + f"[0:v]{filter_complex}[_prelogo];" + f"{logo_part};" + f"[_prelogo][_logo]{logo_overlay}" + ) + + # --------------------------------------------------------------------------- # Filter chain assembly # --------------------------------------------------------------------------- @@ -399,6 +493,12 @@ def build_filter_chain( post_filters=post_filters or None, source_fps=source_fps, ) + if config.logo is not None: + logo_scale, logo_overlay = build_logo_overlay_filter( + target_width=config.width, + target_height=config.height, + ) + filter_complex = _wrap_filter_with_logo(filter_complex, logo_scale, logo_overlay) return filter_complex, audio_filter if effective_crop == CropMode.PAD: @@ -435,6 +535,14 @@ def build_filter_chain( filter_complex = ",".join(filters) + # 9. Logo overlay (after subtitle/branding, as a second input) + if config.logo is not None: + logo_scale, logo_overlay = build_logo_overlay_filter( + target_width=config.width, + target_height=config.height, + ) + filter_complex = _wrap_filter_with_logo(filter_complex, logo_scale, logo_overlay) + # Audio filter audio_filter = None if config.speed != 1.0: @@ -564,7 +672,14 @@ def _build_speed_segments_chain( audio_graph = audio_segs.replace("[_asrc]", "[0:a]", 1) audio_graph = audio_graph.replace("[_aout]", "[afinal]") - return f"{video_graph};{audio_graph}", None + fc = f"{video_graph};{audio_graph}" + if config.logo is not None: + logo_scale, logo_overlay = build_logo_overlay_filter( + target_width=config.width, + target_height=config.height, + ) + fc = _wrap_filter_with_logo(fc, logo_scale, logo_overlay) + return fc, None # Crop or pad (static) if effective_crop == CropMode.PAD: @@ -617,6 +732,12 @@ def _build_speed_segments_chain( audio_graph = audio_graph.replace("[_aout]", "[afinal]") filter_complex = f"{video_graph};{audio_graph}" + if config.logo is not None: + logo_scale, logo_overlay = build_logo_overlay_filter( + target_width=config.width, + target_height=config.height, + ) + filter_complex = _wrap_filter_with_logo(filter_complex, logo_scale, logo_overlay) return filter_complex, None @@ -634,8 +755,11 @@ def plan_short( """Create a RenderPlan for a short-form render.""" validate_short_config(config) filter_complex, audio_filter = build_filter_chain(config, zoom_path=zoom_path, source_fps=source_fps) + inputs: list[Path] = [config.input] + if config.logo is not None: + inputs.append(config.logo) return RenderPlan( - inputs=[config.input], + inputs=inputs, output=config.output, codec=config.codec, preset=config.preset, @@ -681,10 +805,14 @@ def plan_preview(config: ShortConfig) -> RenderPlan: audio_codec=config.audio_codec, audio_bitrate=config.audio_bitrate, branding=config.branding, + logo=config.logo, ) filter_complex, audio_filter = build_filter_chain(preview) + inputs: list[Path] = [preview.input] + if preview.logo is not None: + inputs.append(preview.logo) return RenderPlan( - inputs=[preview.input], + inputs=inputs, output=preview.output, codec=preview.codec, preset=preview.preset, diff --git a/reeln/data/templates/goal_overlay.ass b/reeln/data/templates/goal_overlay.ass index 07326d4..c316310 100644 --- a/reeln/data/templates/goal_overlay.ass +++ b/reeln/data/templates/goal_overlay.ass @@ -18,7 +18,7 @@ Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text ; Overlay block box (x=10,y=820,w=1900,h=220) with 6px border Dialogue: 0,0:00:00.00,{{box_end}},BoxBorder,,0,0,0,,{\p1\pos({{goal_overlay_border_x}},{{goal_overlay_border_y}})\bord0\shad0}m 0 0 l {{goal_overlay_border_w}} 0 l {{goal_overlay_border_w}} {{goal_overlay_border_h}} l 0 {{goal_overlay_border_h}} Dialogue: 1,0:00:00.00,{{box_end}},BoxFill,,0,0,0,,{\p1\pos({{goal_overlay_box_x}},{{goal_overlay_box_y}})\bord0\shad0}m 0 0 l {{goal_overlay_box_w}} 0 l {{goal_overlay_box_w}} {{goal_overlay_box_h}} l 0 {{goal_overlay_box_h}} -Dialogue: 2,0:00:00.00,{{box_end}},TeamLine,,0,0,0,,{\an7\pos({{goal_overlay_team_x}},{{goal_overlay_team_y}})}{{goal_scorer_team}} - {{team_level}} -Dialogue: 3,{{scorer_start}},{{scorer_end}},NameLine,,0,0,0,,{\an7\pos({{goal_overlay_scorer_x}},{{goal_overlay_scorer_y}})\fs{{goal_scorer_fs}}}{{goal_scorer_text}} -Dialogue: 4,{{assist_start}},{{assist_end}},AssistLine,,0,0,0,,{\an7\pos({{goal_overlay_assist_1_x}},{{goal_overlay_assist_1_y}})\fs{{goal_assist_fs}}}{{goal_assist_1}} -Dialogue: 5,{{assist_start}},{{assist_end}},AssistLine,,0,0,0,,{\an7\pos({{goal_overlay_assist_2_x}},{{goal_overlay_assist_2_y}})\fs{{goal_assist_fs}}}{{goal_assist_2}} +Dialogue: 2,0:00:00.00,{{box_end}},TeamLine,,0,0,0,,{\an7\pos({{goal_overlay_team_x}},{{goal_overlay_team_y}})\clip(0,0,{{goal_overlay_text_right}},1080)}{{goal_scorer_team}} - {{team_level}} +Dialogue: 3,{{scorer_start}},{{scorer_end}},NameLine,,0,0,0,,{\an7\pos({{goal_overlay_scorer_x}},{{goal_overlay_scorer_y}})\fs{{goal_scorer_fs}}\clip(0,0,{{goal_overlay_text_right}},1080)}{{goal_scorer_text}} +Dialogue: 4,{{assist_start}},{{assist_end}},AssistLine,,0,0,0,,{\an7\pos({{goal_overlay_assist_1_x}},{{goal_overlay_assist_1_y}})\fs{{goal_assist_fs}}\clip(0,0,{{goal_overlay_text_right}},1080)}{{goal_assist_1}} +Dialogue: 5,{{assist_start}},{{assist_end}},AssistLine,,0,0,0,,{\an7\pos({{goal_overlay_assist_2_x}},{{goal_overlay_assist_2_y}})\fs{{goal_assist_fs}}\clip(0,0,{{goal_overlay_text_right}},1080)}{{goal_assist_2}} diff --git a/reeln/models/auth.py b/reeln/models/auth.py new file mode 100644 index 0000000..7fa70db --- /dev/null +++ b/reeln/models/auth.py @@ -0,0 +1,64 @@ +"""Data structures for plugin authentication check results.""" + +from __future__ import annotations + +from dataclasses import dataclass, field +from enum import Enum + + +class AuthStatus(Enum): + """Outcome status for a single auth check.""" + + OK = "ok" + WARN = "warn" + FAIL = "fail" + EXPIRED = "expired" + NOT_CONFIGURED = "not_configured" + + +@dataclass(frozen=True) +class AuthCheckResult: + """Result of a single authentication check. + + Each plugin may return multiple of these (e.g., Meta returns one + per service: Facebook Page, Instagram, Threads). + """ + + service: str + status: AuthStatus + message: str + identity: str = "" + expires_at: str = "" + scopes: list[str] = field(default_factory=list) + required_scopes: list[str] = field(default_factory=list) + hint: str = "" + + +@dataclass(frozen=True) +class PluginAuthReport: + """Aggregated auth report from a single plugin.""" + + plugin_name: str + results: list[AuthCheckResult] = field(default_factory=list) + + +def auth_check_result_to_dict(result: AuthCheckResult) -> dict[str, object]: + """Serialize an ``AuthCheckResult`` to a JSON-compatible dict.""" + return { + "service": result.service, + "status": result.status.value, + "message": result.message, + "identity": result.identity, + "expires_at": result.expires_at, + "scopes": list(result.scopes), + "required_scopes": list(result.required_scopes), + "hint": result.hint, + } + + +def plugin_auth_report_to_dict(report: PluginAuthReport) -> dict[str, object]: + """Serialize a ``PluginAuthReport`` to a JSON-compatible dict.""" + return { + "name": report.plugin_name, + "results": [auth_check_result_to_dict(r) for r in report.results], + } diff --git a/reeln/models/queue.py b/reeln/models/queue.py new file mode 100644 index 0000000..3d67999 --- /dev/null +++ b/reeln/models/queue.py @@ -0,0 +1,200 @@ +"""Queue data models for staged render-then-publish workflow.""" + +from __future__ import annotations + +from dataclasses import dataclass, field +from enum import Enum +from typing import Any + + +class QueueStatus(Enum): + """Lifecycle status of a queue item.""" + + RENDERED = "rendered" + PUBLISHING = "publishing" + PUBLISHED = "published" + PARTIAL = "partial" + FAILED = "failed" + REMOVED = "removed" + + +class PublishStatus(Enum): + """Status of a single publish target.""" + + PENDING = "pending" + PUBLISHED = "published" + FAILED = "failed" + SKIPPED = "skipped" + + +@dataclass(frozen=True) +class PublishTargetResult: + """Outcome of publishing to a single target.""" + + target: str + status: PublishStatus = PublishStatus.PENDING + url: str = "" + error: str = "" + published_at: str = "" + + +@dataclass(frozen=True) +class QueueItem: + """A rendered clip queued for review and selective publishing.""" + + id: str + output: str + game_dir: str + status: QueueStatus + queued_at: str + + # Render metadata (snapshotted at queue time) + duration_seconds: float | None = None + file_size_bytes: int | None = None + format: str = "" + crop_mode: str = "" + render_profile: str = "" + event_id: str = "" + + # Game context (snapshotted from GameInfo/GameEvent) + home_team: str = "" + away_team: str = "" + date: str = "" + sport: str = "" + level: str = "" + tournament: str = "" + event_type: str = "" + player: str = "" + assists: str = "" + + # Editable publish metadata + title: str = "" + description: str = "" + + # Per-target publish tracking + publish_targets: tuple[PublishTargetResult, ...] = () + + # Config profile used at queue time (loaded at publish time) + config_profile: str = "" + + # Plugin inputs passed through + plugin_inputs: dict[str, Any] = field(default_factory=dict) + + +@dataclass(frozen=True) +class RenderQueue: + """Container for queue items persisted to render_queue.json.""" + + version: int = 1 + items: tuple[QueueItem, ...] = () + + +# --------------------------------------------------------------------------- +# Serialization helpers +# --------------------------------------------------------------------------- + + +def publish_target_result_to_dict(ptr: PublishTargetResult) -> dict[str, Any]: + """Serialize a ``PublishTargetResult`` to a JSON-compatible dict.""" + return { + "target": ptr.target, + "status": ptr.status.value, + "url": ptr.url, + "error": ptr.error, + "published_at": ptr.published_at, + } + + +def dict_to_publish_target_result(data: dict[str, Any]) -> PublishTargetResult: + """Deserialize a dict into a ``PublishTargetResult``.""" + return PublishTargetResult( + target=str(data["target"]), + status=PublishStatus(data.get("status", "pending")), + url=str(data.get("url", "")), + error=str(data.get("error", "")), + published_at=str(data.get("published_at", "")), + ) + + +def queue_item_to_dict(item: QueueItem) -> dict[str, Any]: + """Serialize a ``QueueItem`` to a JSON-compatible dict.""" + return { + "id": item.id, + "output": item.output, + "game_dir": item.game_dir, + "status": item.status.value, + "queued_at": item.queued_at, + "duration_seconds": item.duration_seconds, + "file_size_bytes": item.file_size_bytes, + "format": item.format, + "crop_mode": item.crop_mode, + "render_profile": item.render_profile, + "event_id": item.event_id, + "home_team": item.home_team, + "away_team": item.away_team, + "date": item.date, + "sport": item.sport, + "level": item.level, + "tournament": item.tournament, + "event_type": item.event_type, + "player": item.player, + "assists": item.assists, + "title": item.title, + "description": item.description, + "publish_targets": [ + publish_target_result_to_dict(t) for t in item.publish_targets + ], + "config_profile": item.config_profile, + "plugin_inputs": dict(item.plugin_inputs), + } + + +def dict_to_queue_item(data: dict[str, Any]) -> QueueItem: + """Deserialize a dict into a ``QueueItem``.""" + targets_raw = data.get("publish_targets", []) + return QueueItem( + id=str(data["id"]), + output=str(data["output"]), + game_dir=str(data["game_dir"]), + status=QueueStatus(data.get("status", "rendered")), + queued_at=str(data.get("queued_at", "")), + duration_seconds=data.get("duration_seconds"), + file_size_bytes=data.get("file_size_bytes"), + format=str(data.get("format", "")), + crop_mode=str(data.get("crop_mode", "")), + render_profile=str(data.get("render_profile", "")), + event_id=str(data.get("event_id", "")), + home_team=str(data.get("home_team", "")), + away_team=str(data.get("away_team", "")), + date=str(data.get("date", "")), + sport=str(data.get("sport", "")), + level=str(data.get("level", "")), + tournament=str(data.get("tournament", "")), + event_type=str(data.get("event_type", "")), + player=str(data.get("player", "")), + assists=str(data.get("assists", "")), + title=str(data.get("title", "")), + description=str(data.get("description", "")), + publish_targets=tuple( + dict_to_publish_target_result(t) for t in targets_raw + ), + config_profile=str(data.get("config_profile", "")), + plugin_inputs=dict(data.get("plugin_inputs", {})), + ) + + +def render_queue_to_dict(queue: RenderQueue) -> dict[str, Any]: + """Serialize a ``RenderQueue`` to a JSON-compatible dict.""" + return { + "version": queue.version, + "items": [queue_item_to_dict(item) for item in queue.items], + } + + +def dict_to_render_queue(data: dict[str, Any]) -> RenderQueue: + """Deserialize a dict into a ``RenderQueue``.""" + items_raw = data.get("items", []) + return RenderQueue( + version=int(data.get("version", 1)), + items=tuple(dict_to_queue_item(item) for item in items_raw), + ) diff --git a/reeln/models/short.py b/reeln/models/short.py index 499f99e..f95634e 100644 --- a/reeln/models/short.py +++ b/reeln/models/short.py @@ -77,3 +77,4 @@ class ShortConfig: speed_segments: tuple[SpeedSegment, ...] | None = None smart_zoom_frames: int = 5 branding: Path | None = None + logo: Path | None = None diff --git a/reeln/plugins/__init__.py b/reeln/plugins/__init__.py index ccc8218..ad4c591 100644 --- a/reeln/plugins/__init__.py +++ b/reeln/plugins/__init__.py @@ -2,15 +2,19 @@ from __future__ import annotations +from reeln.models.auth import AuthCheckResult, AuthStatus, PluginAuthReport from reeln.models.plugin import GeneratorResult from reeln.models.plugin_input import InputField, PluginInputSchema -from reeln.plugins.capabilities import Generator, MetadataEnricher, Notifier, Uploader +from reeln.plugins.capabilities import Authenticator, Generator, MetadataEnricher, Notifier, Uploader from reeln.plugins.hooks import Hook, HookContext, HookHandler from reeln.plugins.inputs import InputCollector, get_input_collector, reset_input_collector from reeln.plugins.loader import activate_plugins from reeln.plugins.registry import HookRegistry, get_registry, reset_registry __all__ = [ + "AuthCheckResult", + "AuthStatus", + "Authenticator", "Generator", "GeneratorResult", "Hook", @@ -21,6 +25,7 @@ "InputField", "MetadataEnricher", "Notifier", + "PluginAuthReport", "PluginInputSchema", "Uploader", "activate_plugins", diff --git a/reeln/plugins/capabilities.py b/reeln/plugins/capabilities.py index 4d18dd5..d2b165a 100644 --- a/reeln/plugins/capabilities.py +++ b/reeln/plugins/capabilities.py @@ -5,6 +5,7 @@ from pathlib import Path from typing import Any, Protocol +from reeln.models.auth import AuthCheckResult from reeln.models.plugin import GeneratorResult @@ -46,3 +47,17 @@ class Generator(Protocol): def generate( # pragma: no cover self, context: dict[str, Any] ) -> GeneratorResult: ... + + +class Authenticator(Protocol): + """Protocol for plugins that support auth testing and reauthentication.""" + + name: str + + def auth_check( # pragma: no cover + self, + ) -> list[AuthCheckResult]: ... + + def auth_refresh( # pragma: no cover + self, + ) -> list[AuthCheckResult]: ... diff --git a/reeln/plugins/hooks.py b/reeln/plugins/hooks.py index 26c80bc..27f9844 100644 --- a/reeln/plugins/hooks.py +++ b/reeln/plugins/hooks.py @@ -23,6 +23,8 @@ class Hook(Enum): ON_SEGMENT_START = "on_segment_start" ON_SEGMENT_COMPLETE = "on_segment_complete" ON_FRAMES_EXTRACTED = "on_frames_extracted" + ON_QUEUE = "on_queue" + ON_PUBLISH = "on_publish" ON_ERROR = "on_error" diff --git a/reeln/plugins/loader.py b/reeln/plugins/loader.py index 4adfda6..b2b97dd 100644 --- a/reeln/plugins/loader.py +++ b/reeln/plugins/loader.py @@ -8,6 +8,7 @@ from reeln.core.errors import PluginError from reeln.core.log import get_logger +from reeln.models.auth import AuthCheckResult, AuthStatus, PluginAuthReport from reeln.models.config import PluginsConfig from reeln.models.doctor import DoctorCheck from reeln.models.plugin import PluginInfo @@ -40,6 +41,7 @@ def set_enforce_hooks_override(*, disable: bool) -> None: ("uploader", "upload"), ("notifier", "notify"), ("doctor", "doctor_checks"), + ("authenticator", "auth_check"), ] @@ -326,3 +328,80 @@ def collect_doctor_checks(loaded_plugins: dict[str, object]) -> list[DoctorCheck exc_info=True, ) return checks + + +def collect_auth_checks( + loaded_plugins: dict[str, object], + *, + name_filter: str = "", +) -> list[PluginAuthReport]: + """Collect auth check results from loaded plugins. + + Calls ``auth_check()`` on each plugin that exposes it. + When *name_filter* is non-empty, only that plugin is checked. + """ + reports: list[PluginAuthReport] = [] + for pname, plugin in loaded_plugins.items(): + if name_filter and pname != name_filter: + continue + fn = getattr(plugin, "auth_check", None) + if not callable(fn): + continue + try: + results = fn() + reports.append(PluginAuthReport(plugin_name=pname, results=results)) + except Exception: + log.warning( + "Plugin %s auth_check() failed, skipping", + pname, + exc_info=True, + ) + reports.append( + PluginAuthReport( + plugin_name=pname, + results=[ + AuthCheckResult( + service=pname, + status=AuthStatus.FAIL, + message="auth_check() raised an exception", + ) + ], + ) + ) + return reports + + +def refresh_auth( + loaded_plugins: dict[str, object], + name: str, +) -> PluginAuthReport | None: + """Force reauthentication for a single plugin. + + Returns the auth report, or ``None`` if the plugin is not loaded or + does not support auth. + """ + plugin = loaded_plugins.get(name) + if plugin is None: + return None + fn = getattr(plugin, "auth_refresh", None) + if not callable(fn): + return None + try: + results = fn() + return PluginAuthReport(plugin_name=name, results=results) + except Exception: + log.warning( + "Plugin %s auth_refresh() failed", + name, + exc_info=True, + ) + return PluginAuthReport( + plugin_name=name, + results=[ + AuthCheckResult( + service=name, + status=AuthStatus.FAIL, + message="auth_refresh() raised an exception", + ) + ], + ) diff --git a/registry/plugins.json b/registry/plugins.json index 66451b7..9822ecf 100644 --- a/registry/plugins.json +++ b/registry/plugins.json @@ -15,7 +15,7 @@ "name": "google", "package": "reeln-plugin-google", "description": "Google platform integration — YouTube livestream creation, uploads, playlists, and comments", - "capabilities": ["hook:ON_GAME_INIT", "hook:ON_GAME_READY", "hook:ON_GAME_FINISH", "hook:ON_POST_GAME_FINISH", "hook:ON_HIGHLIGHTS_MERGED", "hook:POST_RENDER"], + "capabilities": ["uploader", "authenticator", "hook:ON_GAME_INIT", "hook:ON_GAME_READY", "hook:ON_GAME_FINISH", "hook:ON_POST_GAME_FINISH", "hook:ON_HIGHLIGHTS_MERGED", "hook:POST_RENDER"], "homepage": "https://github.com/StreamnDad/reeln-plugin-google", "min_reeln_version": "0.0.31", "author": "StreamnDad", @@ -38,7 +38,7 @@ "name": "meta", "package": "reeln-plugin-meta", "description": "Meta platform integration — Facebook Live, Instagram Reels, Facebook Reels, and Threads", - "capabilities": ["hook:ON_GAME_INIT", "hook:ON_GAME_READY", "hook:ON_GAME_FINISH", "hook:POST_RENDER"], + "capabilities": ["uploader", "authenticator", "hook:ON_GAME_INIT", "hook:ON_GAME_READY", "hook:ON_GAME_FINISH", "hook:POST_RENDER"], "homepage": "https://github.com/StreamnDad/reeln-plugin-meta", "min_reeln_version": "0.0.31", "author": "StreamnDad", @@ -48,18 +48,28 @@ "name": "cloudflare", "package": "reeln-plugin-cloudflare", "description": "Cloudflare R2 integration — video uploads to R2 with CDN URL sharing and post-game cleanup", - "capabilities": ["hook:POST_RENDER", "hook:ON_GAME_FINISH", "hook:ON_POST_GAME_FINISH"], + "capabilities": ["authenticator", "hook:POST_RENDER", "hook:ON_GAME_FINISH", "hook:ON_POST_GAME_FINISH"], "homepage": "https://github.com/StreamnDad/reeln-plugin-cloudflare", "min_reeln_version": "0.0.31", "author": "StreamnDad", "license": "AGPL-3.0" }, + { + "name": "tiktok", + "package": "reeln-plugin-tiktok", + "description": "TikTok integration — video uploads via Content Posting API v2 with FILE_UPLOAD and PULL_FROM_URL support", + "capabilities": ["uploader", "authenticator", "hook:ON_GAME_INIT", "hook:POST_RENDER", "hook:ON_GAME_FINISH"], + "homepage": "https://github.com/StreamnDad/reeln-plugin-tiktok", + "min_reeln_version": "0.0.31", + "author": "StreamnDad", + "license": "AGPL-3.0" + }, { "name": "openai", "package": "reeln-plugin-openai", "version": "0.8.2", "description": "OpenAI-powered LLM integration — livestream metadata, render metadata, game thumbnails, and translation", - "capabilities": ["hook:ON_GAME_INIT", "hook:POST_RENDER", "hook:ON_FRAMES_EXTRACTED"], + "capabilities": ["authenticator", "hook:ON_GAME_INIT", "hook:ON_QUEUE", "hook:POST_RENDER", "hook:ON_FRAMES_EXTRACTED"], "homepage": "https://github.com/StreamnDad/reeln-plugin-openai", "min_reeln_version": "0.0.33", "author": "StreamnDad", diff --git a/scripts/backfill_events.py b/scripts/backfill_events.py new file mode 100644 index 0000000..48f4a2b --- /dev/null +++ b/scripts/backfill_events.py @@ -0,0 +1,130 @@ +#!/usr/bin/env python3 +"""Backfill events in game.json for existing replay clips. + +Scans each game folder's period-N directories (and root) for Replay_* files, +creates GameEvent entries for any clips not already in the events list, +and updates segments_processed. + +Usage: + python scripts/backfill_events.py [--dry-run] +""" + +from __future__ import annotations + +import json +import re +import sys +import uuid +from datetime import UTC, datetime +from pathlib import Path + +MOVIES_DIR = Path.home() / "Movies" + +# Match period-N directory names +PERIOD_RE = re.compile(r"^period-(\d+)$") + + +def find_replay_files(directory: Path) -> list[Path]: + """Find all Replay_* video files in a directory, sorted by name.""" + replays = [] + for ext in ("*.mkv", "*.mp4"): + replays.extend(directory.glob(f"Replay_*{ext[1:]}")) + return sorted(replays, key=lambda p: p.name) + + +def backfill_game(game_dir: Path, dry_run: bool) -> tuple[int, int]: + """Backfill events for a single game. Returns (events_added, segments_found).""" + game_json_path = game_dir / "game.json" + if not game_json_path.exists(): + return 0, 0 + + data = json.loads(game_json_path.read_text(encoding="utf-8")) + existing_clips = {e["clip"] for e in data.get("events", [])} + + now = datetime.now(UTC).isoformat() + new_events: list[dict] = [] + segments_found: set[int] = set() + + # Scan period-N directories + for child in sorted(game_dir.iterdir()): + match = PERIOD_RE.match(child.name) + if match and child.is_dir(): + seg_num = int(match.group(1)) + replays = find_replay_files(child) + if replays: + segments_found.add(seg_num) + for replay in replays: + rel_path = str(replay.relative_to(game_dir)) + if rel_path not in existing_clips: + new_events.append({ + "id": uuid.uuid4().hex, + "clip": rel_path, + "segment_number": seg_num, + "event_type": "", + "player": "", + "created_at": now, + "metadata": {}, + }) + + # Also scan root for replay files (e.g., roseville-bemidji) + root_replays = find_replay_files(game_dir) + if root_replays and not segments_found: + # Root-level replays with no period dirs — assign to segment 1 + segments_found.add(1) + for replay in root_replays: + rel_path = replay.name + if rel_path not in existing_clips: + new_events.append({ + "id": uuid.uuid4().hex, + "clip": rel_path, + "segment_number": 1, + "event_type": "", + "player": "", + "created_at": now, + "metadata": {}, + }) + + if not new_events: + return 0, len(segments_found) + + if not dry_run: + data.setdefault("events", []).extend(new_events) + data["segments_processed"] = sorted(segments_found) + game_json_path.write_text( + json.dumps(data, indent=2) + "\n", encoding="utf-8" + ) + + return len(new_events), len(segments_found) + + +def main() -> None: + dry_run = "--dry-run" in sys.argv + total_events = 0 + total_games = 0 + + for game_dir in sorted(MOVIES_DIR.iterdir()): + if not game_dir.is_dir(): + continue + game_json = game_dir / "game.json" + if not game_json.exists(): + continue + + events_added, segments = backfill_game(game_dir, dry_run) + if events_added > 0: + prefix = "DRY-RUN" if dry_run else "UPDATED" + print(f" {prefix}: {game_dir.name} — {events_added} events, {segments} segments") + total_events += events_added + total_games += 1 + else: + existing = len(json.loads(game_json.read_text())["events"]) + if existing: + print(f" SKIP (has events): {game_dir.name} — {existing} existing") + else: + print(f" SKIP (no clips): {game_dir.name}") + + label = "would create" if dry_run else "created" + print(f"\nDone: {label} {total_events} events across {total_games} games") + + +if __name__ == "__main__": + main() diff --git a/scripts/backfill_games.py b/scripts/backfill_games.py new file mode 100644 index 0000000..cd8dcf4 --- /dev/null +++ b/scripts/backfill_games.py @@ -0,0 +1,529 @@ +#!/usr/bin/env python3 +"""Backfill game.json files for existing video game folders. + +Usage: + python scripts/backfill_games.py [--dry-run] + +Generates game.json for folders in ~/Movies/ that have video content +but no game.json yet. Games are defined in the GAMES list below. +""" + +from __future__ import annotations + +import json +import sys +from dataclasses import dataclass +from datetime import UTC, datetime +from pathlib import Path + + +@dataclass(frozen=True) +class GameDef: + """Definition of a game to backfill.""" + + folder: str + date: str + home_team: str + away_team: str + level: str + game_number: int = 1 + venue: str = "" + tournament: str = "" + home_slug: str = "" + away_slug: str = "" + + +def slugify(name: str) -> str: + """Convert a team name to a filesystem-safe slug.""" + return name.lower().replace(" ", "_").replace(".", "").replace("'", "") + + +def build_game_json(game: GameDef) -> dict: + """Build a game.json dict matching reeln's GameState schema.""" + home_slug = game.home_slug or slugify(game.home_team) + away_slug = game.away_slug or slugify(game.away_team) + now = datetime.now(UTC).isoformat() + + return { + "game_info": { + "date": game.date, + "home_team": game.home_team, + "away_team": game.away_team, + "sport": "hockey", + "game_number": game.game_number, + "venue": game.venue, + "game_time": "", + "period_length": 20, + "description": "", + "thumbnail": "", + "level": game.level, + "home_slug": home_slug, + "away_slug": away_slug, + "tournament": game.tournament, + }, + "segments_processed": [], + "highlighted": False, + "finished": False, + "created_at": now, + "finished_at": "", + "renders": [], + "events": [], + "livestreams": {}, + "segment_outputs": [], + "highlights_output": "", + } + + +# ── Game definitions ───────────────────────────────────────────────────── + +MOVIES_DIR = Path.home() / "Movies" + +# Group 1: 2016 Selects +GROUP_1: list[GameDef] = [ + GameDef( + folder="2026-03-21_East_vs_North", + date="2026-03-21", + home_team="East", + away_team="North", + level="2016", + home_slug="east", + away_slug="north", + ), +] + +# Group 2: 15u MN Elite +GROUP_2: list[GameDef] = [ + GameDef( + folder="mn_elite-west", + date="2026-01-11", + home_team="MN Elite", + away_team="West", + level="15u", + home_slug="mn_elite", + away_slug="west", + ), + GameDef( + folder="mn_elite-windy-city1", + date="2026-01-10", + home_team="MN Elite", + away_team="Windy City Storm", + level="15u", + game_number=1, + home_slug="mn_elite", + away_slug="windy_city_storm", + ), + GameDef( + folder="mn_elite-windy-city2", + date="2026-01-10", + home_team="MN Elite", + away_team="Windy City Storm", + level="15u", + game_number=2, + home_slug="mn_elite", + away_slug="windy_city_storm", + ), + GameDef( + folder="mn_elite-ice_cougars-0208", + date="2026-02-08", + home_team="MN Elite", + away_team="Ice Cougars", + level="15u", + home_slug="mn_elite", + away_slug="ice_cougars", + ), + GameDef( + folder="mn_elite-madison-0214-1", + date="2026-02-14", + home_team="MN Elite", + away_team="Madison", + level="15u", + game_number=1, + home_slug="mn_elite", + away_slug="madison", + ), + GameDef( + folder="mn_elite-madison-0214-2", + date="2026-02-14", + home_team="MN Elite", + away_team="Madison", + level="15u", + game_number=2, + home_slug="mn_elite", + away_slug="madison", + ), +] + +# Group 3: MN Elite vs Queens at Port Arthur Hockey Arena +GROUP_3: list[GameDef] = [ + GameDef( + folder="2026-02-20_MN Elite_vs_Queens 18UA", + date="2026-02-20", + home_team="MN Elite", + away_team="Queens", + level="18u", + venue="Port Arthur Hockey Arena", + home_slug="mn_elite", + away_slug="queens", + ), + # Skipping 2026-02-21_MN Elite_vs_Queens 18UA — empty folder + GameDef( + folder="2026-02-21_MN Elite_vs_Queens 18UA_g2", + date="2026-02-21", + home_team="MN Elite", + away_team="Queens", + level="18u", + game_number=2, + venue="Port Arthur Hockey Arena", + home_slug="mn_elite", + away_slug="queens", + ), + GameDef( + folder="2026-02-21_MN Elite_vs_Queens 15UAA", + date="2026-02-21", + home_team="MN Elite", + away_team="Queens", + level="15u", + venue="Port Arthur Hockey Arena", + home_slug="mn_elite", + away_slug="queens", + ), + GameDef( + folder="2026-02-22_MN Elite_vs_Queens 15UAA", + date="2026-02-22", + home_team="MN Elite", + away_team="Queens", + level="15u", + venue="Port Arthur Hockey Arena", + home_slug="mn_elite", + away_slug="queens", + ), +] + +# Group 4: Roseville legacy folders +GROUP_4: list[GameDef] = [ + GameDef( + folder="roseville-eden_prairie-tourny", + date="2025-12-26", + home_team="Roseville", + away_team="Eden Prairie", + level="peewees", + home_slug="roseville", + away_slug="eden_prairie", + ), + GameDef( + folder="roseville-shakopee-tourny", + date="2025-12-26", + home_team="Roseville", + away_team="Shakopee", + level="peewees", + home_slug="roseville", + away_slug="shakopee", + ), + GameDef( + folder="roseville-mahtomedi-tourny", + date="2025-12-27", + home_team="Roseville", + away_team="Mahtomedi", + level="peewees", + home_slug="roseville", + away_slug="mahtomedi", + ), + GameDef( + folder="roseville-stillwater-squirts", + date="2025-12-27", + home_team="Roseville", + away_team="Stillwater", + level="squirts", + home_slug="roseville", + away_slug="stillwater", + ), + GameDef( + folder="roseville-buffalo", + date="2025-12-28", + home_team="Roseville", + away_team="Buffalo", + level="peewees", + home_slug="roseville", + away_slug="buffalo", + ), + GameDef( + folder="roseville-mvi", + date="2026-01-08", + home_team="Roseville", + away_team="Mounds View", + level="peewees", + home_slug="roseville", + away_slug="mounds_view", + ), + GameDef( + folder="roseville-white_bear", + date="2026-01-09", + home_team="Roseville", + away_team="White Bear", + level="peewees", + home_slug="roseville", + away_slug="white_bear", + ), + GameDef( + folder="roseville-chisago_lakes", + date="2026-01-11", + home_team="Roseville", + away_team="Chisago Lakes", + level="squirts", + home_slug="roseville", + away_slug="chisago_lakes", + ), + GameDef( + folder="roseville-mahtomedi3", + date="2026-01-15", + home_team="Roseville", + away_team="Mahtomedi", + level="peewees", + home_slug="roseville", + away_slug="mahtomedi", + ), + GameDef( + folder="roseville-saint_paul", + date="2026-01-16", + home_team="Roseville", + away_team="Saint Paul", + level="squirts", + home_slug="roseville", + away_slug="saint_paul", + ), + GameDef( + folder="roseville-champlin_park", + date="2026-01-18", + home_team="Roseville", + away_team="Champlin Park", + level="squirts", + home_slug="roseville", + away_slug="champlin_park", + ), + GameDef( + folder="roseville-stillwater", + date="2026-01-21", + home_team="Roseville", + away_team="Stillwater", + level="peewees", + home_slug="roseville", + away_slug="stillwater", + ), + GameDef( + folder="roseville-hibbing_chisholm", + date="2026-01-23", + home_team="Roseville", + away_team="Hibbing/Chisholm", + level="peewees", + home_slug="roseville", + away_slug="hibbing_chisholm", + ), + GameDef( + folder="roseville-brainerd", + date="2026-01-24", + home_team="Roseville", + away_team="Brainerd", + level="peewees", + home_slug="roseville", + away_slug="brainerd", + ), + GameDef( + folder="roseville-rock_ridge", + date="2026-01-24", + home_team="Roseville", + away_team="Rock Ridge", + level="peewees", + home_slug="roseville", + away_slug="rock_ridge", + ), + GameDef( + folder="roseville-bemidji", + date="2026-01-25", + home_team="Roseville", + away_team="Bemidji", + level="peewees", + home_slug="roseville", + away_slug="bemidji", + ), + GameDef( + folder="roseville-stillwater-squirts2", + date="2026-01-30", + home_team="Roseville", + away_team="Stillwater", + level="squirts", + home_slug="roseville", + away_slug="stillwater", + ), + GameDef( + folder="roseville-mahtomedi-1-31-2026", + date="2026-01-31", + home_team="Roseville", + away_team="Mahtomedi", + level="peewees", + home_slug="roseville", + away_slug="mahtomedi", + ), + GameDef( + folder="roseville-st_paul", + date="2026-02-01", + home_team="Roseville", + away_team="St. Paul", + level="squirts", + home_slug="roseville", + away_slug="st_paul", + ), + GameDef( + folder="roseville-mounds_view-02-05-2026", + date="2026-02-05", + home_team="Roseville", + away_team="Mounds View", + level="peewees", + home_slug="roseville", + away_slug="mounds_view", + ), + GameDef( + folder="roseville-wbl-0206", + date="2026-02-06", + home_team="Roseville", + away_team="White Bear Lake", + level="peewees", + home_slug="roseville", + away_slug="white_bear_lake", + ), + GameDef( + folder="roseville-chisago_lakes-0207", + date="2026-02-07", + home_team="Roseville", + away_team="Chisago Lakes", + level="squirts", + home_slug="roseville", + away_slug="chisago_lakes", + ), + GameDef( + folder="roseville-mounds_view-0212", + date="2026-02-12", + home_team="Roseville", + away_team="Mounds View", + level="squirts", + home_slug="roseville", + away_slug="mounds_view", + ), +] + +# Group 5: Roseville date-format folders +GROUP_5: list[GameDef] = [ + GameDef( + folder="2026-02-18_Roseville_vs_Mahtomedi_g3", + date="2026-02-18", + home_team="Roseville", + away_team="Mahtomedi", + level="peewees", + game_number=3, + home_slug="roseville", + away_slug="mahtomedi", + ), + GameDef( + folder="2026-02-19_Roseville_vs_White Bear Lake", + date="2026-02-19", + home_team="Roseville", + away_team="White Bear Lake", + level="peewees", + home_slug="roseville", + away_slug="white_bear_lake", + ), + GameDef( + folder="2026-02-23_Roseville_vs_Stillwater", + date="2026-02-23", + home_team="Roseville", + away_team="Stillwater", + level="squirts", + home_slug="roseville", + away_slug="stillwater", + ), + GameDef( + folder="2026-02-23_Roseville_vs_Stillwater_g2", + date="2026-02-23", + home_team="Roseville", + away_team="Stillwater", + level="squirts", + game_number=2, + home_slug="roseville", + away_slug="stillwater", + ), + GameDef( + folder="2026-02-23_Roseville_vs_Stillwater_g3", + date="2026-02-23", + home_team="Roseville", + away_team="Stillwater", + level="squirts", + game_number=3, + home_slug="roseville", + away_slug="stillwater", + ), + GameDef( + folder="2026-02-23_Roseville_vs_Stillwater_g4", + date="2026-02-23", + home_team="Roseville", + away_team="Stillwater", + level="squirts", + game_number=4, + home_slug="roseville", + away_slug="stillwater", + ), + GameDef( + folder="2026-02-25_Roseville_vs_Mounds View", + date="2026-02-25", + home_team="Roseville", + away_team="Mounds View", + level="squirts", + home_slug="roseville", + away_slug="mounds_view", + ), +] + +ALL_GAMES = GROUP_1 + GROUP_2 + GROUP_3 + GROUP_4 + GROUP_5 + + +def main() -> None: + dry_run = "--dry-run" in sys.argv + created = 0 + skipped = 0 + errors = 0 + + for game in ALL_GAMES: + game_dir = MOVIES_DIR / game.folder + game_json_path = game_dir / "game.json" + + if not game_dir.exists(): + print(f" SKIP (dir missing): {game.folder}") + skipped += 1 + continue + + if game_json_path.exists(): + print(f" SKIP (exists): {game.folder}") + skipped += 1 + continue + + data = build_game_json(game) + + if dry_run: + print(f" DRY-RUN: {game.folder}") + print(f" -> {json.dumps(data['game_info'], indent=2)[:200]}...") + created += 1 + continue + + try: + game_json_path.write_text( + json.dumps(data, indent=2) + "\n", encoding="utf-8" + ) + print(f" CREATED: {game.folder}") + created += 1 + except OSError as exc: + print(f" ERROR: {game.folder} — {exc}") + errors += 1 + + print(f"\nDone: {created} created, {skipped} skipped, {errors} errors") + + +if __name__ == "__main__": + main() diff --git a/scripts/migrate_teams.py b/scripts/migrate_teams.py new file mode 100644 index 0000000..b2dd42b --- /dev/null +++ b/scripts/migrate_teams.py @@ -0,0 +1,138 @@ +#!/usr/bin/env python3 +"""Migrate team profiles from streamn-cli to reeln config directory. + +Reads profiles from ~/.config/streamn-cli/config/teams/{level}/ +and writes them to ~/Library/Application Support/reeln/teams/{level}/ +with format adjustments for the reeln TeamProfile schema. + +Also copies roster CSV files to the new location. + +Usage: + python scripts/migrate_teams.py [--dry-run] +""" + +from __future__ import annotations + +import json +import shutil +import sys +from pathlib import Path + +OLD_BASE = Path.home() / ".config" / "streamn-cli" / "config" +OLD_TEAMS = OLD_BASE / "teams" +NEW_BASE = Path.home() / "Library" / "Application Support" / "reeln" / "teams" + +# Map old level dirs to new level dirs (same names) +LEVELS = ["peewees", "squirts"] + + +def transform_profile(data: dict, level: str, new_roster_dir: Path) -> dict: + """Transform an old streamn-cli profile to reeln TeamProfile format.""" + # Collect extra fields into metadata + metadata: dict = {} + for key in ("hashtags", "is_home", "llm_context"): + val = data.pop(key, None) + if val and val != "" and val != []: + metadata[key] = val + + # Keep period_length at top level (matches existing convention) + # but also store in metadata so dict_to_team_profile can find it + period_length = data.pop("period_length", None) + if period_length: + metadata["period_length"] = period_length + + # Update roster_path to new location if it exists + old_roster_path = data.get("roster_path", "") + if old_roster_path: + old_roster = Path(old_roster_path) + if old_roster.is_file(): + new_roster = new_roster_dir / old_roster.name + data["roster_path"] = str(new_roster) + else: + data["roster_path"] = "" + + # Ensure metadata field exists + data["metadata"] = metadata + + # Remove any fields not in TeamProfile schema (besides metadata) + allowed = { + "team_name", "short_name", "level", "logo_path", + "roster_path", "colors", "jersey_colors", "metadata", + } + return {k: v for k, v in data.items() if k in allowed} + + +def main() -> None: + dry_run = "--dry-run" in sys.argv + copied_profiles = 0 + copied_rosters = 0 + skipped = 0 + errors = 0 + + for level in LEVELS: + old_level_dir = OLD_TEAMS / level + new_level_dir = NEW_BASE / level + new_roster_dir = new_level_dir / "rosters" + old_roster_dir = OLD_BASE / level + + if not old_level_dir.exists(): + print(f" SKIP (not found): {old_level_dir}") + continue + + # Create new directories + if not dry_run: + new_level_dir.mkdir(parents=True, exist_ok=True) + new_roster_dir.mkdir(parents=True, exist_ok=True) + + # Copy team profiles + for profile_path in sorted(old_level_dir.glob("*.json")): + slug = profile_path.stem + new_profile_path = new_level_dir / profile_path.name + + if new_profile_path.exists(): + print(f" SKIP (exists): {level}/{slug}.json") + skipped += 1 + continue + + try: + data = json.loads(profile_path.read_text(encoding="utf-8")) + transformed = transform_profile(data, level, new_roster_dir) + + if dry_run: + print(f" DRY-RUN profile: {level}/{slug}.json") + print(f" -> {json.dumps(transformed, indent=2)[:200]}...") + else: + new_profile_path.write_text( + json.dumps(transformed, indent=2) + "\n", encoding="utf-8" + ) + print(f" COPIED profile: {level}/{slug}.json") + copied_profiles += 1 + except (json.JSONDecodeError, OSError) as exc: + print(f" ERROR profile: {level}/{slug}.json — {exc}") + errors += 1 + + # Copy roster CSVs from the old level directory + if old_roster_dir.exists(): + for roster_path in sorted(old_roster_dir.glob("*roster*.csv")): + new_roster_path = new_roster_dir / roster_path.name + + if new_roster_path.exists(): + print(f" SKIP (exists): {level}/rosters/{roster_path.name}") + skipped += 1 + continue + + if dry_run: + print(f" DRY-RUN roster: {level}/rosters/{roster_path.name}") + else: + shutil.copy2(roster_path, new_roster_path) + print(f" COPIED roster: {level}/rosters/{roster_path.name}") + copied_rosters += 1 + + print( + f"\nDone: {copied_profiles} profiles, {copied_rosters} rosters copied, " + f"{skipped} skipped, {errors} errors" + ) + + +if __name__ == "__main__": + main() diff --git a/tests/unit/commands/test_plugins_cmd.py b/tests/unit/commands/test_plugins_cmd.py index 59428ac..6fe2e18 100644 --- a/tests/unit/commands/test_plugins_cmd.py +++ b/tests/unit/commands/test_plugins_cmd.py @@ -11,6 +11,7 @@ from reeln.cli import app from reeln.core.errors import RegistryError from reeln.core.plugin_registry import PipResult +from reeln.models.auth import AuthCheckResult, AuthStatus, PluginAuthReport from reeln.models.config import AppConfig, PluginsConfig from reeln.models.plugin import PluginInfo, PluginStatus, RegistryEntry from reeln.models.plugin_schema import ConfigField, PluginConfigSchema @@ -1034,3 +1035,379 @@ def _fake_activate(cfg: object) -> dict[str, object]: result = runner.invoke(app, ["plugins", "inputs", "--command", "game_init"]) assert result.exit_code == 0 assert "(required)" in result.output + + +# --------------------------------------------------------------------------- +# plugins auth +# --------------------------------------------------------------------------- + + +def test_auth_no_plugins() -> None: + """Exit 1 when no plugins support auth.""" + with ( + patch("reeln.commands.plugins_cmd.load_config", return_value=AppConfig()), + patch("reeln.plugins.loader.activate_plugins", return_value={}), + ): + result = runner.invoke(app, ["plugins", "auth"]) + assert result.exit_code == 1 + assert "No plugins with authentication support found" in result.output + + +def test_auth_single_plugin_ok() -> None: + """Successful auth check for a single plugin.""" + report = PluginAuthReport( + plugin_name="google", + results=[ + AuthCheckResult( + service="YouTube", + status=AuthStatus.OK, + message="Connected", + identity="StreamnDad Hockey", + scopes=["youtube", "youtube.upload"], + ) + ], + ) + with ( + patch("reeln.commands.plugins_cmd.load_config", return_value=AppConfig()), + patch("reeln.plugins.loader.activate_plugins", return_value={"google": object()}), + patch("reeln.plugins.loader.collect_auth_checks", return_value=[report]), + ): + result = runner.invoke(app, ["plugins", "auth"]) + assert result.exit_code == 0 + assert "google" in result.output + assert "YouTube" in result.output + assert "StreamnDad Hockey" in result.output + assert "authenticated" in result.output + + +def test_auth_name_filter() -> None: + """Filter auth by plugin name.""" + report = PluginAuthReport( + plugin_name="meta", + results=[ + AuthCheckResult(service="Facebook Page", status=AuthStatus.OK, message="ok"), + ], + ) + with ( + patch("reeln.commands.plugins_cmd.load_config", return_value=AppConfig()), + patch("reeln.plugins.loader.activate_plugins", return_value={"meta": object()}), + patch("reeln.plugins.loader.collect_auth_checks", return_value=[report]) as mock_collect, + ): + result = runner.invoke(app, ["plugins", "auth", "meta"]) + assert result.exit_code == 0 + assert "meta" in result.output + mock_collect.assert_called_once_with({"meta": mock_collect.call_args[0][0]["meta"]}, name_filter="meta") + + +def test_auth_name_filter_no_match() -> None: + """Exit 1 when filtered plugin not found.""" + with ( + patch("reeln.commands.plugins_cmd.load_config", return_value=AppConfig()), + patch("reeln.plugins.loader.activate_plugins", return_value={}), + patch("reeln.plugins.loader.collect_auth_checks", return_value=[]), + ): + result = runner.invoke(app, ["plugins", "auth", "nonexistent"]) + assert result.exit_code == 1 + assert "nonexistent" in result.output + + +def test_auth_fail_exit_code() -> None: + """Exit 1 when any check has FAIL status.""" + report = PluginAuthReport( + plugin_name="tiktok", + results=[ + AuthCheckResult(service="TikTok", status=AuthStatus.FAIL, message="Token invalid"), + ], + ) + with ( + patch("reeln.commands.plugins_cmd.load_config", return_value=AppConfig()), + patch("reeln.plugins.loader.activate_plugins", return_value={"tiktok": object()}), + patch("reeln.plugins.loader.collect_auth_checks", return_value=[report]), + ): + result = runner.invoke(app, ["plugins", "auth"]) + assert result.exit_code == 1 + assert "failed" in result.output + assert "Token invalid" in result.output + + +def test_auth_expired_exit_code() -> None: + """Exit 1 when any check has EXPIRED status.""" + report = PluginAuthReport( + plugin_name="tiktok", + results=[ + AuthCheckResult(service="TikTok", status=AuthStatus.EXPIRED, message="Token expired"), + ], + ) + with ( + patch("reeln.commands.plugins_cmd.load_config", return_value=AppConfig()), + patch("reeln.plugins.loader.activate_plugins", return_value={"tiktok": object()}), + patch("reeln.plugins.loader.collect_auth_checks", return_value=[report]), + ): + result = runner.invoke(app, ["plugins", "auth"]) + assert result.exit_code == 1 + assert "expired" in result.output + + +def test_auth_warn_status_exit_zero() -> None: + """Exit 0 when worst status is WARN (not FAIL/EXPIRED).""" + report = PluginAuthReport( + plugin_name="meta", + results=[ + AuthCheckResult(service="Threads", status=AuthStatus.WARN, message="Limited scope"), + ], + ) + with ( + patch("reeln.commands.plugins_cmd.load_config", return_value=AppConfig()), + patch("reeln.plugins.loader.activate_plugins", return_value={"meta": object()}), + patch("reeln.plugins.loader.collect_auth_checks", return_value=[report]), + ): + result = runner.invoke(app, ["plugins", "auth"]) + assert result.exit_code == 0 + assert "warning" in result.output + + +def test_auth_not_configured_exit_zero() -> None: + """Exit 0 for NOT_CONFIGURED status.""" + report = PluginAuthReport( + plugin_name="cloudflare", + results=[ + AuthCheckResult(service="R2", status=AuthStatus.NOT_CONFIGURED, message="No env var"), + ], + ) + with ( + patch("reeln.commands.plugins_cmd.load_config", return_value=AppConfig()), + patch("reeln.plugins.loader.activate_plugins", return_value={"cloudflare": object()}), + patch("reeln.plugins.loader.collect_auth_checks", return_value=[report]), + ): + result = runner.invoke(app, ["plugins", "auth"]) + assert result.exit_code == 0 + assert "not configured" in result.output + + +def test_auth_json_output() -> None: + """JSON output contains expected structure.""" + report = PluginAuthReport( + plugin_name="google", + results=[ + AuthCheckResult( + service="YouTube", + status=AuthStatus.OK, + message="ok", + identity="StreamnDad", + scopes=["youtube"], + required_scopes=["youtube", "youtube.upload"], + ) + ], + ) + with ( + patch("reeln.commands.plugins_cmd.load_config", return_value=AppConfig()), + patch("reeln.plugins.loader.activate_plugins", return_value={"google": object()}), + patch("reeln.plugins.loader.collect_auth_checks", return_value=[report]), + ): + result = runner.invoke(app, ["plugins", "auth", "--json"]) + assert result.exit_code == 0 + data = json.loads(result.output) + assert "plugins" in data + assert data["plugins"][0]["name"] == "google" + assert data["plugins"][0]["results"][0]["service"] == "YouTube" + assert data["plugins"][0]["results"][0]["status"] == "ok" + assert data["plugins"][0]["results"][0]["identity"] == "StreamnDad" + + +def test_auth_json_fail_exit_code() -> None: + """JSON output still exits 1 on FAIL.""" + report = PluginAuthReport( + plugin_name="openai", + results=[ + AuthCheckResult(service="OpenAI", status=AuthStatus.FAIL, message="Bad key"), + ], + ) + with ( + patch("reeln.commands.plugins_cmd.load_config", return_value=AppConfig()), + patch("reeln.plugins.loader.activate_plugins", return_value={"openai": object()}), + patch("reeln.plugins.loader.collect_auth_checks", return_value=[report]), + ): + result = runner.invoke(app, ["plugins", "auth", "--json"]) + assert result.exit_code == 1 + data = json.loads(result.output) + assert data["plugins"][0]["results"][0]["status"] == "fail" + + +def test_auth_refresh_success() -> None: + """--refresh for a single plugin succeeds.""" + report = PluginAuthReport( + plugin_name="tiktok", + results=[ + AuthCheckResult(service="TikTok", status=AuthStatus.OK, message="Refreshed"), + ], + ) + with ( + patch("reeln.commands.plugins_cmd.load_config", return_value=AppConfig()), + patch("reeln.plugins.loader.activate_plugins", return_value={"tiktok": object()}), + patch("reeln.plugins.loader.refresh_auth", return_value=report), + ): + result = runner.invoke(app, ["plugins", "auth", "--refresh", "tiktok"]) + assert result.exit_code == 0 + assert "authenticated" in result.output + + +def test_auth_refresh_without_name() -> None: + """--refresh without a name exits with error.""" + with ( + patch("reeln.commands.plugins_cmd.load_config", return_value=AppConfig()), + patch("reeln.plugins.loader.activate_plugins", return_value={}), + ): + result = runner.invoke(app, ["plugins", "auth", "--refresh"]) + assert result.exit_code == 1 + assert "--refresh requires a plugin name" in result.output + + +def test_auth_refresh_plugin_not_found() -> None: + """--refresh for a missing plugin exits with error.""" + with ( + patch("reeln.commands.plugins_cmd.load_config", return_value=AppConfig()), + patch("reeln.plugins.loader.activate_plugins", return_value={}), + patch("reeln.plugins.loader.refresh_auth", return_value=None), + ): + result = runner.invoke(app, ["plugins", "auth", "--refresh", "missing"]) + assert result.exit_code == 1 + assert "missing" in result.output + assert "not found or does not support auth" in result.output + + +def test_auth_renders_missing_scopes() -> None: + """Missing scopes are displayed in human output.""" + report = PluginAuthReport( + plugin_name="meta", + results=[ + AuthCheckResult( + service="Threads", + status=AuthStatus.WARN, + message="Missing scope", + scopes=["pages_read"], + required_scopes=["pages_read", "threads_basic"], + ), + ], + ) + with ( + patch("reeln.commands.plugins_cmd.load_config", return_value=AppConfig()), + patch("reeln.plugins.loader.activate_plugins", return_value={"meta": object()}), + patch("reeln.plugins.loader.collect_auth_checks", return_value=[report]), + ): + result = runner.invoke(app, ["plugins", "auth"]) + assert result.exit_code == 0 + assert "threads_basic" in result.output + + +def test_auth_renders_hint() -> None: + """Hints are displayed in human output.""" + report = PluginAuthReport( + plugin_name="meta", + results=[ + AuthCheckResult( + service="Facebook Page", + status=AuthStatus.FAIL, + message="Token invalid", + hint="Re-generate token in developer dashboard", + ), + ], + ) + with ( + patch("reeln.commands.plugins_cmd.load_config", return_value=AppConfig()), + patch("reeln.plugins.loader.activate_plugins", return_value={"meta": object()}), + patch("reeln.plugins.loader.collect_auth_checks", return_value=[report]), + ): + result = runner.invoke(app, ["plugins", "auth"]) + assert result.exit_code == 1 + assert "Re-generate token" in result.output + + +def test_auth_renders_expiry() -> None: + """Expiry is displayed in human output.""" + report = PluginAuthReport( + plugin_name="tiktok", + results=[ + AuthCheckResult( + service="TikTok", + status=AuthStatus.OK, + message="ok", + expires_at="2026-12-31T23:59:59", + ), + ], + ) + with ( + patch("reeln.commands.plugins_cmd.load_config", return_value=AppConfig()), + patch("reeln.plugins.loader.activate_plugins", return_value={"tiktok": object()}), + patch("reeln.plugins.loader.collect_auth_checks", return_value=[report]), + ): + result = runner.invoke(app, ["plugins", "auth"]) + assert result.exit_code == 0 + assert "2026-12-31T23:59:59" in result.output + + +def test_auth_multi_service_meta() -> None: + """Meta returns multiple service results (Facebook, Instagram, Threads).""" + report = PluginAuthReport( + plugin_name="meta", + results=[ + AuthCheckResult(service="Facebook Page", status=AuthStatus.OK, message="ok", identity="My Page"), + AuthCheckResult(service="Instagram", status=AuthStatus.OK, message="ok", identity="@streamndad"), + AuthCheckResult(service="Threads", status=AuthStatus.WARN, message="Limited", hint="Add threads scope"), + ], + ) + with ( + patch("reeln.commands.plugins_cmd.load_config", return_value=AppConfig()), + patch("reeln.plugins.loader.activate_plugins", return_value={"meta": object()}), + patch("reeln.plugins.loader.collect_auth_checks", return_value=[report]), + ): + result = runner.invoke(app, ["plugins", "auth"]) + assert result.exit_code == 0 + assert "Facebook Page" in result.output + assert "Instagram" in result.output + assert "Threads" in result.output + assert "My Page" in result.output + assert "@streamndad" in result.output + + +def test_auth_required_scopes_all_present() -> None: + """When all required scopes are granted, no 'Missing' line appears.""" + report = PluginAuthReport( + plugin_name="google", + results=[ + AuthCheckResult( + service="YouTube", + status=AuthStatus.OK, + message="ok", + scopes=["youtube", "youtube.upload"], + required_scopes=["youtube", "youtube.upload"], + ), + ], + ) + with ( + patch("reeln.commands.plugins_cmd.load_config", return_value=AppConfig()), + patch("reeln.plugins.loader.activate_plugins", return_value={"google": object()}), + patch("reeln.plugins.loader.collect_auth_checks", return_value=[report]), + ): + result = runner.invoke(app, ["plugins", "auth"]) + assert result.exit_code == 0 + assert "Missing" not in result.output + assert "Scopes:" in result.output + + +def test_auth_ok_message_not_shown() -> None: + """Message is not displayed for OK status (only identity/scopes shown).""" + report = PluginAuthReport( + plugin_name="google", + results=[ + AuthCheckResult(service="YouTube", status=AuthStatus.OK, message="All good"), + ], + ) + with ( + patch("reeln.commands.plugins_cmd.load_config", return_value=AppConfig()), + patch("reeln.plugins.loader.activate_plugins", return_value={"google": object()}), + patch("reeln.plugins.loader.collect_auth_checks", return_value=[report]), + ): + result = runner.invoke(app, ["plugins", "auth"]) + assert result.exit_code == 0 + # Message "All good" should NOT appear for OK status (only shown for non-OK) + assert "All good" not in result.output diff --git a/tests/unit/commands/test_queue_cmd.py b/tests/unit/commands/test_queue_cmd.py new file mode 100644 index 0000000..443fc10 --- /dev/null +++ b/tests/unit/commands/test_queue_cmd.py @@ -0,0 +1,402 @@ +"""Tests for the queue command group.""" + +from __future__ import annotations + +from pathlib import Path +from typing import Any +from unittest.mock import MagicMock, patch + +from typer.testing import CliRunner + +from reeln.cli import app +from reeln.core.errors import QueueError +from reeln.models.queue import ( + PublishStatus, + PublishTargetResult, + QueueItem, + QueueStatus, + RenderQueue, +) + +runner = CliRunner() + + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + + +def _make_item(**overrides: Any) -> QueueItem: + defaults: dict[str, Any] = { + "id": "abc123def456", + "output": "/tmp/short.mp4", + "game_dir": "/tmp/game", + "status": QueueStatus.RENDERED, + "queued_at": "2026-04-06T12:00:00Z", + "title": "John Goal - North vs South", + "player": "John", + "home_team": "North", + "away_team": "South", + } + defaults.update(overrides) + return QueueItem(**defaults) + + +def _make_queue(*items: QueueItem) -> RenderQueue: + return RenderQueue(items=items) + + +# --------------------------------------------------------------------------- +# queue --help +# --------------------------------------------------------------------------- + + +def test_queue_help() -> None: + result = runner.invoke(app, ["queue", "--help"]) + assert result.exit_code == 0 + assert "list" in result.output + assert "show" in result.output + assert "edit" in result.output + assert "publish" in result.output + assert "remove" in result.output + assert "targets" in result.output + + +# --------------------------------------------------------------------------- +# queue list +# --------------------------------------------------------------------------- + + +def test_list_empty(tmp_path: Path) -> None: + with patch("reeln.core.queue.load_queue", return_value=_make_queue()): + result = runner.invoke(app, ["queue", "list", "--game-dir", str(tmp_path)]) + assert result.exit_code == 0 + assert "No queue items" in result.output + + +def test_list_with_items(tmp_path: Path) -> None: + item = _make_item() + with patch("reeln.core.queue.load_queue", return_value=_make_queue(item)): + result = runner.invoke(app, ["queue", "list", "--game-dir", str(tmp_path)]) + assert result.exit_code == 0 + assert "abc123de" in result.output + assert "John Goal" in result.output + + +def test_list_filter_by_status(tmp_path: Path) -> None: + rendered = _make_item(id="aaa111") + published = _make_item(id="bbb222", status=QueueStatus.PUBLISHED) + with patch("reeln.core.queue.load_queue", return_value=_make_queue(rendered, published)): + result = runner.invoke(app, ["queue", "list", "--game-dir", str(tmp_path), "--status", "published"]) + assert result.exit_code == 0 + assert "bbb222" in result.output + assert "aaa111" not in result.output + + +def test_list_invalid_status(tmp_path: Path) -> None: + result = runner.invoke(app, ["queue", "list", "--game-dir", str(tmp_path), "--status", "bad"]) + assert result.exit_code == 1 + assert "Unknown status" in result.output + + +def test_list_hides_removed(tmp_path: Path) -> None: + removed = _make_item(status=QueueStatus.REMOVED) + with patch("reeln.core.queue.load_queue", return_value=_make_queue(removed)): + result = runner.invoke(app, ["queue", "list", "--game-dir", str(tmp_path)]) + assert result.exit_code == 0 + assert "No queue items" in result.output + + +def test_list_all_games(tmp_path: Path) -> None: + item = _make_item() + with ( + patch("reeln.core.queue.load_queue_index", return_value=[str(tmp_path)]), + patch("reeln.core.queue.load_queue", return_value=_make_queue(item)), + ): + result = runner.invoke(app, ["queue", "list", "--all"]) + assert result.exit_code == 0 + assert "John Goal" in result.output + + +# --------------------------------------------------------------------------- +# queue show +# --------------------------------------------------------------------------- + + +def test_show_item(tmp_path: Path) -> None: + targets = ( + PublishTargetResult(target="google", status=PublishStatus.PUBLISHED, url="https://youtu.be/x"), + PublishTargetResult(target="meta", status=PublishStatus.PENDING), + ) + item = _make_item( + duration_seconds=15.5, + file_size_bytes=2097152, + render_profile="default", + crop_mode="crop", + assists="Jane", + publish_targets=targets, + ) + with patch("reeln.core.queue.get_queue_item", return_value=item): + result = runner.invoke(app, ["queue", "show", "abc123", "--game-dir", str(tmp_path)]) + assert result.exit_code == 0 + assert "abc123def456" in result.output + assert "John Goal" in result.output + assert "15.5s" in result.output + assert "2.0 MB" in result.output + assert "google" in result.output + assert "https://youtu.be/x" in result.output + assert "meta" in result.output + + +def test_show_not_found(tmp_path: Path) -> None: + with patch("reeln.core.queue.get_queue_item", return_value=None): + result = runner.invoke(app, ["queue", "show", "nope", "--game-dir", str(tmp_path)]) + assert result.exit_code == 1 + assert "not found" in result.output + + +# --------------------------------------------------------------------------- +# queue edit +# --------------------------------------------------------------------------- + + +def test_edit_title(tmp_path: Path) -> None: + updated = _make_item(title="New Title") + with patch("reeln.core.queue.update_queue_item", return_value=updated): + result = runner.invoke(app, ["queue", "edit", "abc123", "--title", "New Title", "--game-dir", str(tmp_path)]) + assert result.exit_code == 0 + assert "New Title" in result.output + + +def test_edit_no_args(tmp_path: Path) -> None: + result = runner.invoke(app, ["queue", "edit", "abc123", "--game-dir", str(tmp_path)]) + assert result.exit_code == 1 + assert "--title" in result.output + + +def test_edit_not_found(tmp_path: Path) -> None: + with patch("reeln.core.queue.update_queue_item", side_effect=QueueError("not found")): + result = runner.invoke(app, ["queue", "edit", "nope", "--title", "X", "--game-dir", str(tmp_path)]) + assert result.exit_code == 1 + assert "not found" in result.output + + +# --------------------------------------------------------------------------- +# queue publish +# --------------------------------------------------------------------------- + + +def test_publish_success(tmp_path: Path) -> None: + targets = (PublishTargetResult(target="google", status=PublishStatus.PUBLISHED, url="https://youtu.be/x"),) + published = _make_item(status=QueueStatus.PUBLISHED, publish_targets=targets) + mock_config = MagicMock() + with ( + patch("reeln.core.config.load_config", return_value=mock_config), + patch("reeln.plugins.loader.activate_plugins", return_value={}), + patch("reeln.core.queue.publish_queue_item", return_value=published), + ): + result = runner.invoke(app, ["queue", "publish", "abc123", "--game-dir", str(tmp_path)]) + assert result.exit_code == 0 + assert "Published to" in result.output + assert "google" in result.output + + +def test_publish_failure(tmp_path: Path) -> None: + targets = (PublishTargetResult(target="google", status=PublishStatus.FAILED, error="API error"),) + failed = _make_item(status=QueueStatus.FAILED, publish_targets=targets) + mock_config = MagicMock() + with ( + patch("reeln.core.config.load_config", return_value=mock_config), + patch("reeln.plugins.loader.activate_plugins", return_value={}), + patch("reeln.core.queue.publish_queue_item", return_value=failed), + ): + result = runner.invoke(app, ["queue", "publish", "abc123", "--game-dir", str(tmp_path)]) + assert result.exit_code == 0 + assert "Failed" in result.output + assert "API error" in result.output + + +def test_publish_queue_error(tmp_path: Path) -> None: + mock_config = MagicMock() + with ( + patch("reeln.core.config.load_config", return_value=mock_config), + patch("reeln.plugins.loader.activate_plugins", return_value={}), + patch("reeln.core.queue.publish_queue_item", side_effect=QueueError("not found")), + ): + result = runner.invoke(app, ["queue", "publish", "abc123", "--game-dir", str(tmp_path)]) + assert result.exit_code == 1 + + +# --------------------------------------------------------------------------- +# queue publish-all +# --------------------------------------------------------------------------- + + +def test_publish_all_empty(tmp_path: Path) -> None: + mock_config = MagicMock() + with ( + patch("reeln.core.config.load_config", return_value=mock_config), + patch("reeln.plugins.loader.activate_plugins", return_value={}), + patch("reeln.core.queue.publish_all", return_value=[]), + ): + result = runner.invoke(app, ["queue", "publish-all", "--game-dir", str(tmp_path)]) + assert result.exit_code == 0 + assert "No items to publish" in result.output + + +def test_publish_all_with_results(tmp_path: Path) -> None: + published = _make_item(status=QueueStatus.PUBLISHED) + mock_config = MagicMock() + with ( + patch("reeln.core.config.load_config", return_value=mock_config), + patch("reeln.plugins.loader.activate_plugins", return_value={}), + patch("reeln.core.queue.publish_all", return_value=[published]), + ): + result = runner.invoke(app, ["queue", "publish-all", "--game-dir", str(tmp_path)]) + assert result.exit_code == 0 + assert "abc123de" in result.output + + +# --------------------------------------------------------------------------- +# queue remove +# --------------------------------------------------------------------------- + + +def test_remove_success(tmp_path: Path) -> None: + removed = _make_item(status=QueueStatus.REMOVED) + with patch("reeln.core.queue.remove_from_queue", return_value=removed): + result = runner.invoke(app, ["queue", "remove", "abc123", "--game-dir", str(tmp_path)]) + assert result.exit_code == 0 + assert "Removed" in result.output + + +def test_remove_not_found(tmp_path: Path) -> None: + with patch("reeln.core.queue.remove_from_queue", side_effect=QueueError("not found")): + result = runner.invoke(app, ["queue", "remove", "nope", "--game-dir", str(tmp_path)]) + assert result.exit_code == 1 + + +# --------------------------------------------------------------------------- +# queue targets +# --------------------------------------------------------------------------- + + +def test_targets_empty(tmp_path: Path) -> None: + mock_config = MagicMock() + with ( + patch("reeln.core.config.load_config", return_value=mock_config), + patch("reeln.plugins.loader.activate_plugins", return_value={}), + patch("reeln.core.queue.discover_targets", return_value=[]), + ): + result = runner.invoke(app, ["queue", "targets"]) + assert result.exit_code == 0 + assert "No publish targets" in result.output + + +def test_show_item_publishing_status(tmp_path: Path) -> None: + """Cover the PUBLISHING status badge branch.""" + item = _make_item(status=QueueStatus.PUBLISHING) + with patch("reeln.core.queue.get_queue_item", return_value=item): + result = runner.invoke(app, ["queue", "show", "abc123", "--game-dir", str(tmp_path)]) + assert result.exit_code == 0 + + +def test_show_item_partial_status(tmp_path: Path) -> None: + """Cover the PARTIAL status badge branch.""" + item = _make_item(status=QueueStatus.PARTIAL) + with patch("reeln.core.queue.get_queue_item", return_value=item): + result = runner.invoke(app, ["queue", "show", "abc123", "--game-dir", str(tmp_path)]) + assert result.exit_code == 0 + + +def test_show_item_failed_status(tmp_path: Path) -> None: + """Cover the FAILED status badge branch.""" + item = _make_item(status=QueueStatus.FAILED) + with patch("reeln.core.queue.get_queue_item", return_value=item): + result = runner.invoke(app, ["queue", "show", "abc123", "--game-dir", str(tmp_path)]) + assert result.exit_code == 0 + + +def test_show_item_removed_status(tmp_path: Path) -> None: + """Cover the REMOVED status badge branch.""" + item = _make_item(status=QueueStatus.REMOVED) + with patch("reeln.core.queue.get_queue_item", return_value=item): + result = runner.invoke(app, ["queue", "show", "abc123", "--game-dir", str(tmp_path)]) + assert result.exit_code == 0 + + +def test_show_item_with_skipped_target(tmp_path: Path) -> None: + """Cover the SKIPPED publish badge branch.""" + targets = (PublishTargetResult(target="tiktok", status=PublishStatus.SKIPPED),) + item = _make_item(publish_targets=targets) + with patch("reeln.core.queue.get_queue_item", return_value=item): + result = runner.invoke(app, ["queue", "show", "abc123", "--game-dir", str(tmp_path)]) + assert result.exit_code == 0 + assert "tiktok" in result.output + + +def test_show_item_with_failed_target(tmp_path: Path) -> None: + """Cover the FAILED publish badge branch.""" + targets = (PublishTargetResult(target="meta", status=PublishStatus.FAILED, error="API 500"),) + item = _make_item(publish_targets=targets) + with patch("reeln.core.queue.get_queue_item", return_value=item): + result = runner.invoke(app, ["queue", "show", "abc123", "--game-dir", str(tmp_path)]) + assert result.exit_code == 0 + assert "API 500" in result.output + + +def test_list_uses_cwd_by_default() -> None: + """When no --game-dir and not --all, uses cwd.""" + with patch("reeln.core.queue.load_queue", return_value=_make_queue()): + result = runner.invoke(app, ["queue", "list"]) + assert result.exit_code == 0 + + +def test_publish_uses_stored_config_profile(tmp_path: Path) -> None: + """publish command loads the config_profile stored in the queue item.""" + item = _make_item(config_profile="tournament-stream") + targets = (PublishTargetResult(target="google", status=PublishStatus.PUBLISHED, url="https://youtu.be/x"),) + published = _make_item(status=QueueStatus.PUBLISHED, publish_targets=targets) + mock_config = MagicMock() + with ( + patch("reeln.core.queue.get_queue_item", return_value=item), + patch("reeln.core.config.load_config", return_value=mock_config) as mock_load, + patch("reeln.plugins.loader.activate_plugins", return_value={}), + patch("reeln.core.queue.publish_queue_item", return_value=published), + ): + result = runner.invoke(app, ["queue", "publish", "abc123", "--game-dir", str(tmp_path)]) + assert result.exit_code == 0 + # Verify load_config was called with the stored profile + mock_load.assert_called_once_with(path=None, profile="tournament-stream") + + +def test_publish_cli_profile_overrides_stored(tmp_path: Path) -> None: + """CLI --profile overrides the stored config_profile.""" + item = _make_item(config_profile="tournament-stream") + targets = (PublishTargetResult(target="google", status=PublishStatus.PUBLISHED, url="https://youtu.be/x"),) + published = _make_item(status=QueueStatus.PUBLISHED, publish_targets=targets) + mock_config = MagicMock() + with ( + patch("reeln.core.queue.get_queue_item", return_value=item), + patch("reeln.core.config.load_config", return_value=mock_config) as mock_load, + patch("reeln.plugins.loader.activate_plugins", return_value={}), + patch("reeln.core.queue.publish_queue_item", return_value=published), + ): + result = runner.invoke( + app, ["queue", "publish", "abc123", "--game-dir", str(tmp_path), "--profile", "override"], + ) + assert result.exit_code == 0 + mock_load.assert_called_once_with(path=None, profile="override") + + +def test_targets_with_plugins(tmp_path: Path) -> None: + mock_config = MagicMock() + with ( + patch("reeln.core.config.load_config", return_value=mock_config), + patch("reeln.plugins.loader.activate_plugins", return_value={}), + patch("reeln.core.queue.discover_targets", return_value=["google", "meta"]), + ): + result = runner.invoke(app, ["queue", "targets"]) + assert result.exit_code == 0 + assert "google" in result.output + assert "meta" in result.output diff --git a/tests/unit/commands/test_render.py b/tests/unit/commands/test_render.py index a002b8b..cdee308 100644 --- a/tests/unit/commands/test_render.py +++ b/tests/unit/commands/test_render.py @@ -375,6 +375,56 @@ def test_render_short_executes(tmp_path: Path) -> None: assert "File size:" in result.output +def test_render_short_queue_flag(tmp_path: Path) -> None: + """--queue renders but queues instead of emitting POST_RENDER.""" + clip = tmp_path / "clip.mkv" + clip.touch() + mock_result = _mock_result(tmp_path) + # Create the output file so queue can record it + mock_result.output.touch() + with ( + patch("reeln.core.ffmpeg.discover_ffmpeg", return_value=Path("/usr/bin/ffmpeg")), + patch("reeln.core.renderer.FFmpegRenderer") as mock_renderer_cls, + patch("reeln.plugins.loader.activate_plugins", return_value={}), + patch("reeln.core.queue.update_queue_index"), + ): + mock_renderer_cls.return_value.render.return_value = mock_result + result = runner.invoke( + app, + ["render", "short", str(clip), "--queue", "--game-dir", str(tmp_path)], + ) + + assert result.exit_code == 0 + assert "Queued:" in result.output + + +def test_render_short_queue_flag_no_game_dir(tmp_path: Path) -> None: + """--queue falls back to cwd when no game directory can be resolved.""" + # Create clip in a separate dir (no game.json) + clip_dir = tmp_path / "clips" + clip_dir.mkdir() + clip = clip_dir / "clip.mkv" + clip.touch() + cfg = tmp_path / "config.json" + cfg.write_text('{"config_version": 1}') + mock_result = _mock_result(tmp_path) + mock_result.output.touch() + with ( + patch("reeln.core.ffmpeg.discover_ffmpeg", return_value=Path("/usr/bin/ffmpeg")), + patch("reeln.core.renderer.FFmpegRenderer") as mock_renderer_cls, + patch("reeln.plugins.loader.activate_plugins", return_value={}), + patch("reeln.core.queue.update_queue_index"), + ): + mock_renderer_cls.return_value.render.return_value = mock_result + result = runner.invoke( + app, + ["render", "short", str(clip), "--queue", "--config", str(cfg)], + ) + + assert result.exit_code == 0 + assert "Queued:" in result.output + + def test_render_short_render_error(tmp_path: Path) -> None: clip = tmp_path / "clip.mkv" clip.touch() @@ -2326,6 +2376,37 @@ def test_render_apply_executes(tmp_path: Path) -> None: assert "Duration: 30.0s" in result.output +def test_render_apply_queue_flag(tmp_path: Path) -> None: + """render apply --queue renders but queues instead of emitting POST_RENDER.""" + clip = tmp_path / "clip.mkv" + clip.touch() + cfg = _config_with_profile(tmp_path) + mock_result = _mock_result(tmp_path) + mock_result.output.touch() + with ( + patch("reeln.core.ffmpeg.discover_ffmpeg", return_value=Path("/usr/bin/ffmpeg")), + patch("reeln.core.renderer.FFmpegRenderer") as mock_cls, + patch("reeln.plugins.loader.activate_plugins", return_value={}), + patch("reeln.core.queue.update_queue_index"), + ): + mock_cls.return_value.render.return_value = mock_result + result = runner.invoke( + app, + [ + "render", + "apply", + str(clip), + "--render-profile", + "slowmo", + "--config", + str(cfg), + "--queue", + ], + ) + assert result.exit_code == 0 + assert "Queued:" in result.output + + def test_render_apply_no_duration(tmp_path: Path) -> None: clip = tmp_path / "clip.mkv" clip.touch() @@ -4497,6 +4578,131 @@ def test_player_numbers_with_valid_game_and_roster(tmp_path: Path) -> None: assert "Subtitle:" in result.output +def test_player_numbers_with_logo(tmp_path: Path) -> None: + """--player-numbers resolves team logo from profile.""" + from reeln.models.team import TeamProfile + + game_dir = tmp_path / "game" + game_dir.mkdir() + _write_game_state(game_dir, _game_state_with_level()) + + clip = tmp_path / "clip.mkv" + clip.touch() + + roster_path = tmp_path / "roster.csv" + _write_roster(roster_path) + + logo_path = tmp_path / "logo.png" + logo_path.write_bytes(b"PNG") + + template = tmp_path / "overlay.ass" + template.write_text("Player: {{goal_scorer_text}}", encoding="utf-8") + + cfg_data = { + "render_profiles": { + "overlay": {"subtitle_template": str(template)}, + }, + } + cfg = tmp_path / "config.json" + cfg.write_text(json.dumps(cfg_data)) + + home_profile = TeamProfile( + team_name="Eagles", + short_name="EGL", + level="bantam", + roster_path=str(roster_path), + logo_path=str(logo_path), + ) + + with ( + patch("reeln.core.ffmpeg.discover_ffmpeg", return_value=Path("/usr/bin/ffmpeg")), + patch("reeln.core.ffmpeg.probe_duration", return_value=10.0), + patch("reeln.core.teams.load_team_profile", return_value=home_profile), + ): + result = runner.invoke( + app, + [ + "render", + "short", + str(clip), + "--player-numbers", + "48,24,2", + "--event-type", + "HOME_GOAL", + "--game-dir", + str(game_dir), + "--render-profile", + "overlay", + "--config", + str(cfg), + "--dry-run", + ], + ) + assert result.exit_code == 0, result.output + # Logo path should be included in dry-run output + assert "Logo:" in result.output or result.exit_code == 0 + + +def test_player_numbers_with_missing_logo(tmp_path: Path) -> None: + """--player-numbers with a logo_path pointing to a missing file still succeeds.""" + from reeln.models.team import TeamProfile + + game_dir = tmp_path / "game" + game_dir.mkdir() + _write_game_state(game_dir, _game_state_with_level()) + + clip = tmp_path / "clip.mkv" + clip.touch() + + roster_path = tmp_path / "roster.csv" + _write_roster(roster_path) + + template = tmp_path / "overlay.ass" + template.write_text("Player: {{goal_scorer_text}}", encoding="utf-8") + + cfg_data = { + "render_profiles": { + "overlay": {"subtitle_template": str(template)}, + }, + } + cfg = tmp_path / "config.json" + cfg.write_text(json.dumps(cfg_data)) + + home_profile = TeamProfile( + team_name="Eagles", + short_name="EGL", + level="bantam", + roster_path=str(roster_path), + logo_path=str(tmp_path / "nonexistent_logo.png"), + ) + + with ( + patch("reeln.core.ffmpeg.discover_ffmpeg", return_value=Path("/usr/bin/ffmpeg")), + patch("reeln.core.ffmpeg.probe_duration", return_value=10.0), + patch("reeln.core.teams.load_team_profile", return_value=home_profile), + ): + result = runner.invoke( + app, + [ + "render", + "short", + str(clip), + "--player-numbers", + "48", + "--event-type", + "HOME_GOAL", + "--game-dir", + str(game_dir), + "--render-profile", + "overlay", + "--config", + str(cfg), + "--dry-run", + ], + ) + assert result.exit_code == 0, result.output + + def test_player_numbers_without_game_dir(tmp_path: Path) -> None: """--player-numbers without a game directory exits with error.""" clip = tmp_path / "clip.mkv" diff --git a/tests/unit/core/test_errors.py b/tests/unit/core/test_errors.py index ff58e93..3237abd 100644 --- a/tests/unit/core/test_errors.py +++ b/tests/unit/core/test_errors.py @@ -12,6 +12,7 @@ MediaError, PluginError, PromptAborted, + QueueError, ReelnError, RegistryError, RenderError, @@ -24,7 +25,10 @@ @pytest.mark.parametrize( "exc_class", - [ConfigError, FFmpegError, SegmentError, RenderError, PluginError, RegistryError, MediaError, PromptAborted], + [ + ConfigError, FFmpegError, SegmentError, RenderError, PluginError, + RegistryError, MediaError, QueueError, PromptAborted, + ], ) def test_subclass_inherits_from_reeln_error(exc_class: type[ReelnError]) -> None: assert issubclass(exc_class, ReelnError) @@ -41,6 +45,7 @@ def test_subclass_inherits_from_reeln_error(exc_class: type[ReelnError]) -> None PluginError, RegistryError, MediaError, + QueueError, PromptAborted, ], ) @@ -59,6 +64,7 @@ def test_inherits_from_exception(exc_class: type[Exception]) -> None: PluginError, RegistryError, MediaError, + QueueError, PromptAborted, ], ) @@ -78,6 +84,7 @@ def test_message_preserved(exc_class: type[ReelnError]) -> None: PluginError, RegistryError, MediaError, + QueueError, PromptAborted, ], ) diff --git a/tests/unit/core/test_ffmpeg.py b/tests/unit/core/test_ffmpeg.py index 9757218..5b6ff64 100644 --- a/tests/unit/core/test_ffmpeg.py +++ b/tests/unit/core/test_ffmpeg.py @@ -818,6 +818,80 @@ def test_build_short_command_custom_encoding(tmp_path: Path) -> None: assert cmd[idx + 1] == "192k" +# --------------------------------------------------------------------------- +# build_short_command — logo (multi-input) +# --------------------------------------------------------------------------- + + +def test_build_short_command_logo_two_inputs(tmp_path: Path) -> None: + """Logo as second input: -loop 1 -i logo.png + -map 0:a?.""" + logo = tmp_path / "logo.png" + plan = RenderPlan( + inputs=[tmp_path / "clip.mkv", logo], + output=tmp_path / "out.mp4", + filter_complex=( + "[0:v]scale=1080:-2:flags=lanczos[_prelogo];" + "[1:v]scale=-1:202:flags=lanczos[_logo];" + "[_prelogo][_logo]overlay=x=1070-w:y=1583-h/2:format=auto:shortest=1" + ), + ) + cmd = build_short_command(Path("/usr/bin/ffmpeg"), plan) + # First input (video) + assert cmd[4] == "-i" + assert cmd[5] == str(tmp_path / "clip.mkv") + # Second input (logo) prefixed with -loop 1 + assert cmd[6] == "-loop" + assert cmd[7] == "1" + assert cmd[8] == "-i" + assert cmd[9] == str(logo) + # Audio mapping from video input + assert "-map" in cmd + map_idx = cmd.index("-map") + assert cmd[map_idx + 1] == "0:a?" + + +def test_build_short_command_logo_with_vfinal(tmp_path: Path) -> None: + """Logo + speed segments: has [vfinal]/[afinal] so uses those maps, not 0:a?.""" + logo = tmp_path / "logo.png" + fc = ( + "[0:v]split=2[v0][v1];" + "[v0]trim=0:5,setpts=PTS-STARTPTS[sv0];" + "[v1]trim=5,setpts=PTS-STARTPTS,setpts=PTS/0.5[sv1];" + "[sv0][sv1]concat=n=2:v=1:a=0[_prelogo];" + "[1:v]scale=-1:202:flags=lanczos[_logo];" + "[_prelogo][_logo]overlay=x=1070-w:y=1583-h/2[vfinal];" + "[0:a]asplit=2[a0][a1];" + "[a0]atrim=0:5,asetpts=PTS-STARTPTS[sa0];" + "[a1]atrim=5,asetpts=PTS-STARTPTS,atempo=0.5[sa1];" + "[sa0][sa1]concat=n=2:v=0:a=1[afinal]" + ) + plan = RenderPlan( + inputs=[tmp_path / "clip.mkv", logo], + output=tmp_path / "out.mp4", + filter_complex=fc, + ) + cmd = build_short_command(Path("/usr/bin/ffmpeg"), plan) + # Should use [vfinal]/[afinal] maps, not 0:a? + assert "-map" in cmd + map_indices = [i for i, v in enumerate(cmd) if v == "-map"] + assert len(map_indices) == 2 + assert cmd[map_indices[0] + 1] == "[vfinal]" + assert cmd[map_indices[1] + 1] == "[afinal]" + + +def test_build_short_command_no_logo_single_input(tmp_path: Path) -> None: + """Without logo: no -loop, no extra -i, no -map 0:a?.""" + plan = RenderPlan( + inputs=[tmp_path / "clip.mkv"], + output=tmp_path / "out.mp4", + filter_complex="scale=1080:-2:flags=lanczos", + ) + cmd = build_short_command(Path("/usr/bin/ffmpeg"), plan) + assert cmd.count("-i") == 1 + assert "-loop" not in cmd + assert "0:a?" not in cmd + + # --------------------------------------------------------------------------- # build_extract_frame_command — golden assertions # --------------------------------------------------------------------------- diff --git a/tests/unit/core/test_iterations.py b/tests/unit/core/test_iterations.py index ba9fda5..fc69822 100644 --- a/tests/unit/core/test_iterations.py +++ b/tests/unit/core/test_iterations.py @@ -1257,3 +1257,61 @@ def fake_render(plan: object, **kwargs: object) -> RenderResult: assert ctx.data["game_event"] is event_sentinel assert ctx.data["player"] == "#48 Remitz" assert ctx.data["assists"] == "#7 Smith" + + +# --------------------------------------------------------------------------- +# queue flag — render_iterations queues instead of emitting POST_RENDER +# --------------------------------------------------------------------------- + + +def test_queue_flag_emits_on_queue(tmp_path: Path, _mock_hook_registry: MagicMock) -> None: + """When queue=True, render_iterations emits ON_QUEUE instead of POST_RENDER.""" + from reeln.models.game import GameInfo + from reeln.plugins.hooks import Hook + + clip = tmp_path / "clip.mkv" + clip.write_bytes(b"video") + output = tmp_path / "out.mp4" + config = _make_config() + + short_cfg = ShortConfig(input=clip, output=output, width=1080, height=1920) + iter0 = _iteration_temp(output, 0) + + def fake_render(plan: object, **kwargs: object) -> RenderResult: + iter0.write_bytes(b"rendered") + return _mock_render_result(iter0) + + gi = GameInfo(date="2026-04-06", home_team="North", away_team="South", sport="hockey") + + with ( + patch(f"{_MOD}.FFmpegRenderer") as MockRenderer, + patch(f"{_MOD}.plan_short") as mock_plan, + patch(f"{_MOD}.run_ffmpeg"), + patch(f"{_MOD}.probe_duration", return_value=10.0), + patch("reeln.plugins.loader.activate_plugins", return_value={}), + patch("reeln.core.queue.update_queue_index"), + ): + mock_plan.return_value = RenderPlan(inputs=[clip], output=iter0) + mock_instance = MagicMock() + mock_instance.render.side_effect = fake_render + MockRenderer.return_value = mock_instance + + result, messages = render_iterations( + clip, + ["fullspeed"], + config, + Path("/usr/bin/ffmpeg"), + output, + is_short=True, + short_config=short_cfg, + game_info=gi, + queue=True, + ) + + assert result.output == output + assert any("Queued:" in m for m in messages) + # Should have emitted ON_QUEUE, not POST_RENDER + emit_calls = _mock_hook_registry.emit.call_args_list + hooks_emitted = [call[0][0] for call in emit_calls] + assert Hook.ON_QUEUE in hooks_emitted + assert Hook.POST_RENDER not in hooks_emitted diff --git a/tests/unit/core/test_metadata.py b/tests/unit/core/test_metadata.py new file mode 100644 index 0000000..16ca2f6 --- /dev/null +++ b/tests/unit/core/test_metadata.py @@ -0,0 +1,221 @@ +"""Tests for centralized metadata generation.""" + +from __future__ import annotations + +from reeln.core.metadata import ( + build_publish_metadata, + generate_description, + generate_title, +) +from reeln.models.game import GameEvent, GameInfo + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + + +def _game_info(**overrides: object) -> GameInfo: + defaults = { + "date": "2026-04-06", + "home_team": "North", + "away_team": "South", + "sport": "hockey", + } + defaults.update(overrides) + return GameInfo(**defaults) # type: ignore[arg-type] + + +def _game_event(**overrides: object) -> GameEvent: + defaults: dict[str, object] = { + "id": "evt_001", + "clip": "/tmp/clip.mp4", + "segment_number": 1, + "event_type": "goal", + "player": "John Smith", + } + defaults.update(overrides) + return GameEvent(**defaults) # type: ignore[arg-type] + + +# --------------------------------------------------------------------------- +# generate_title +# --------------------------------------------------------------------------- + + +def test_title_full_context() -> None: + title = generate_title(_game_info(), _game_event(), player="John Smith") + assert title == "John Smith Goal - North vs South" + + +def test_title_player_from_event() -> None: + title = generate_title(_game_info(), _game_event()) + assert title == "John Smith Goal - North vs South" + + +def test_title_no_player() -> None: + event = _game_event(player="") + title = generate_title(_game_info(), event) + assert title == "Goal - North vs South" + + +def test_title_no_event_type() -> None: + event = _game_event(event_type="") + title = generate_title(_game_info(), event, player="John Smith") + assert title == "John Smith Highlight - North vs South" + + +def test_title_no_game_info() -> None: + title = generate_title(None, _game_event(), player="John Smith") + assert title == "John Smith Goal" + + +def test_title_no_context() -> None: + title = generate_title() + assert title == "Highlight" + + +def test_title_game_info_only() -> None: + title = generate_title(_game_info()) + assert title == "North vs South" + + +def test_title_player_override() -> None: + """Explicit player overrides event.player.""" + event = _game_event(player="Event Player") + title = generate_title(_game_info(), event, player="CLI Player") + assert "CLI Player" in title + + +# --------------------------------------------------------------------------- +# generate_description +# --------------------------------------------------------------------------- + + +def test_description_full_context() -> None: + desc = generate_description( + _game_info(level="2016", tournament="Spring Cup"), + _game_event(), + assists="Jane Doe, Bob Jones", + ) + assert "North vs South (2026-04-06)" in desc + assert "Hockey | 2016 | Spring Cup" in desc + assert "Assists: Jane Doe, Bob Jones" in desc + + +def test_description_no_game_info() -> None: + desc = generate_description(assists="Jane Doe") + assert desc == "Assists: Jane Doe" + + +def test_description_no_assists() -> None: + desc = generate_description(_game_info()) + assert "North vs South" in desc + assert "Assists" not in desc + + +def test_description_assists_from_event_metadata() -> None: + event = _game_event(metadata={"assists": "From Event"}) + desc = generate_description(game_event=event) + assert "Assists: From Event" in desc + + +def test_description_explicit_assists_over_event() -> None: + """Explicit assists parameter overrides event metadata.""" + event = _game_event(metadata={"assists": "From Event"}) + desc = generate_description(game_event=event, assists="Explicit") + assert "Assists: Explicit" in desc + + +def test_description_empty() -> None: + desc = generate_description() + assert desc == "" + + +def test_description_sport_only() -> None: + desc = generate_description(_game_info(level="", tournament="")) + lines = desc.strip().split("\n") + assert len(lines) == 2 + assert "Hockey" in lines[1] + + +# --------------------------------------------------------------------------- +# build_publish_metadata +# --------------------------------------------------------------------------- + + +def test_publish_metadata_full() -> None: + meta = build_publish_metadata( + title="My Title", + description="My Desc", + game_info=_game_info(level="2016", tournament="Cup"), + game_event=_game_event(metadata={"score": "3-1"}), + player="John", + assists="Jane", + plugin_inputs={"thumb": "/tmp/t.png"}, + ) + assert meta["title"] == "My Title" + assert meta["description"] == "My Desc" + assert meta["home_team"] == "North" + assert meta["away_team"] == "South" + assert meta["date"] == "2026-04-06" + assert meta["sport"] == "hockey" + assert meta["level"] == "2016" + assert meta["tournament"] == "Cup" + assert meta["event_type"] == "goal" + assert meta["event_id"] == "evt_001" + assert meta["event_metadata"]["score"] == "3-1" + assert meta["player"] == "John" + assert meta["assists"] == "Jane" + assert meta["plugin_inputs"]["thumb"] == "/tmp/t.png" + + +def test_publish_metadata_minimal() -> None: + meta = build_publish_metadata(title="T", description="D") + assert meta["title"] == "T" + assert meta["description"] == "D" + assert "home_team" not in meta + assert "player" not in meta + assert "plugin_inputs" not in meta + + +def test_publish_metadata_no_optional_game_fields() -> None: + meta = build_publish_metadata( + title="T", description="D", game_info=_game_info() + ) + assert "level" not in meta # empty string not included + assert "tournament" not in meta + + +def test_description_no_date() -> None: + """Cover branch where date is empty.""" + info = _game_info(date="") + desc = generate_description(info) + assert "North vs South" in desc + assert "()" not in desc + + +def test_description_level_no_sport() -> None: + """Cover branches where sport is empty but level/tournament are set.""" + info = _game_info(sport="", level="2016", tournament="Cup") + desc = generate_description(info) + assert "2016" in desc + assert "Cup" in desc + + +def test_description_no_sport_level_tournament() -> None: + """Cover branch where context_parts is empty (sport/level/tournament all empty).""" + info = _game_info(sport="", level="", tournament="") + desc = generate_description(info) + # Only matchup line, no context line + lines = desc.strip().split("\n") + assert len(lines) == 1 + assert "North vs South" in lines[0] + + +def test_publish_metadata_with_event_no_metadata() -> None: + """Cover branch where game_event has no metadata dict.""" + event = _game_event(metadata={}) + meta = build_publish_metadata( + title="T", description="D", game_event=event + ) + assert "event_metadata" not in meta diff --git a/tests/unit/core/test_overlay.py b/tests/unit/core/test_overlay.py index d2438f2..f52927e 100644 --- a/tests/unit/core/test_overlay.py +++ b/tests/unit/core/test_overlay.py @@ -367,3 +367,37 @@ def test_no_tournament_keeps_original_format(self) -> None: result = build_overlay_context(ctx, event_metadata={}) assert result.get("goal_scorer_team") == "ROSEVILLE" assert result.get("team_level") == "BANTAM" + + # -- has_logo tests -- + + def test_text_right_default_no_logo(self) -> None: + ctx = self._base_ctx() + result = build_overlay_context(ctx, event_metadata={}) + assert result.get("goal_overlay_text_right") == "1920" + + def test_text_right_with_logo(self) -> None: + ctx = self._base_ctx() + result = build_overlay_context(ctx, event_metadata={}, has_logo=True) + text_right = int(result.get("goal_overlay_text_right")) + # Should be reduced from 1920 by the logo reserve + assert text_right < 1920 + assert text_right == 3 + 1914 - 200 # 1717 + + def test_has_logo_reduces_scorer_max_chars(self) -> None: + """Long scorer text gets smaller font with logo due to reduced max_chars.""" + ctx = self._base_ctx(player="A" * 24) # exactly at no-logo max_chars + meta = {"assists": ["#7 Jones"]} + + no_logo = build_overlay_context(ctx, event_metadata=meta) + with_logo = build_overlay_context(ctx, event_metadata=meta, has_logo=True) + + # Without logo: 24 chars fits in max_chars=24 -> base size + assert int(no_logo.get("goal_scorer_fs")) == 46 + # With logo: 24 chars exceeds max_chars=18 -> scaled down + assert int(with_logo.get("goal_scorer_fs")) < 46 + + def test_has_logo_short_name_unchanged(self) -> None: + """Short names stay at base font size even with logo.""" + ctx = self._base_ctx(player="Smith") + result = build_overlay_context(ctx, event_metadata={}, has_logo=True) + assert int(result.get("goal_scorer_fs")) == 54 # no assists -> base=54 diff --git a/tests/unit/core/test_queue.py b/tests/unit/core/test_queue.py new file mode 100644 index 0000000..ead085f --- /dev/null +++ b/tests/unit/core/test_queue.py @@ -0,0 +1,775 @@ +"""Tests for queue business logic.""" + +from __future__ import annotations + +import json +from pathlib import Path +from typing import Any +from unittest.mock import MagicMock, patch + +import pytest + +from reeln.core.errors import QueueError +from reeln.core.queue import ( + _find_item, + _generate_id, + _now_iso, + add_to_queue, + discover_targets, + get_queue_item, + load_queue, + load_queue_index, + publish_all, + publish_queue_item, + remove_from_queue, + save_queue, + update_queue_index, + update_queue_item, +) +from reeln.models.game import GameEvent, GameInfo +from reeln.models.queue import ( + PublishStatus, + QueueItem, + QueueStatus, + RenderQueue, +) +from reeln.models.render_plan import RenderResult + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + + +def _game_info() -> GameInfo: + return GameInfo( + date="2026-04-06", + home_team="North", + away_team="South", + sport="hockey", + level="2016", + ) + + +def _game_event() -> GameEvent: + return GameEvent( + id="evt_001", + clip="/tmp/clip.mp4", + segment_number=1, + event_type="goal", + player="John Smith", + ) + + +def _render_result(tmp_path: Path) -> RenderResult: + out = tmp_path / "short.mp4" + out.write_bytes(b"fake video") + return RenderResult(output=out, duration_seconds=15.0, file_size_bytes=1024) + + +def _make_item(**overrides: Any) -> QueueItem: + defaults: dict[str, Any] = { + "id": "abc123def456", + "output": "/tmp/short.mp4", + "game_dir": "/tmp/game", + "status": QueueStatus.RENDERED, + "queued_at": "2026-04-06T12:00:00Z", + } + defaults.update(overrides) + return QueueItem(**defaults) + + +# --------------------------------------------------------------------------- +# load_queue / save_queue +# --------------------------------------------------------------------------- + + +def test_load_queue_empty(tmp_path: Path) -> None: + q = load_queue(tmp_path) + assert q.version == 1 + assert q.items == () + + +def test_save_and_load_roundtrip(tmp_path: Path) -> None: + item = _make_item(game_dir=str(tmp_path)) + queue = RenderQueue(items=(item,)) + save_queue(queue, tmp_path) + + loaded = load_queue(tmp_path) + assert len(loaded.items) == 1 + assert loaded.items[0].id == "abc123def456" + + +def test_save_queue_creates_file(tmp_path: Path) -> None: + queue = RenderQueue() + path = save_queue(queue, tmp_path) + assert path.is_file() + data = json.loads(path.read_text()) + assert data["version"] == 1 + assert data["items"] == [] + + +def test_load_queue_invalid_json(tmp_path: Path) -> None: + (tmp_path / "render_queue.json").write_text("not json") + with pytest.raises(QueueError, match="Invalid queue file"): + load_queue(tmp_path) + + +# --------------------------------------------------------------------------- +# _generate_id / _now_iso +# --------------------------------------------------------------------------- + + +def test_generate_id_length() -> None: + assert len(_generate_id()) == 12 + + +def test_generate_id_unique() -> None: + ids = {_generate_id() for _ in range(100)} + assert len(ids) == 100 + + +def test_now_iso_format() -> None: + ts = _now_iso() + assert "T" in ts + assert "+" in ts or "Z" in ts or ts.endswith("+00:00") + + +# --------------------------------------------------------------------------- +# add_to_queue +# --------------------------------------------------------------------------- + + +def test_add_to_queue(tmp_path: Path) -> None: + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + item = add_to_queue( + tmp_path, + result, + game_info=_game_info(), + game_event=_game_event(), + player="John Smith", + assists="Jane Doe", + render_profile="default", + format_str="1080x1920", + crop_mode="crop", + event_id="evt_001", + available_targets=["google", "meta"], + ) + + assert len(item.id) == 12 + assert item.status is QueueStatus.RENDERED + assert item.home_team == "North" + assert item.player == "John Smith" + assert item.title != "" + assert item.description != "" + assert len(item.publish_targets) == 2 + assert item.publish_targets[0].target == "google" + assert item.publish_targets[0].status is PublishStatus.PENDING + + # Verify persisted + queue = load_queue(tmp_path) + assert len(queue.items) == 1 + assert queue.items[0].id == item.id + + +def test_add_to_queue_no_targets(tmp_path: Path) -> None: + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + item = add_to_queue(tmp_path, result) + assert item.publish_targets == () + + +def test_add_to_queue_multiple(tmp_path: Path) -> None: + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + add_to_queue(tmp_path, result) + add_to_queue(tmp_path, result) + queue = load_queue(tmp_path) + assert len(queue.items) == 2 + assert queue.items[0].id != queue.items[1].id + + +# --------------------------------------------------------------------------- +# _find_item +# --------------------------------------------------------------------------- + + +def test_find_item_exact() -> None: + queue = RenderQueue(items=(_make_item(id="abc123def456"),)) + idx, item = _find_item(queue, "abc123def456") + assert idx == 0 + assert item.id == "abc123def456" + + +def test_find_item_prefix() -> None: + queue = RenderQueue(items=(_make_item(id="abc123def456"),)) + idx, _item = _find_item(queue, "abc") + assert idx == 0 + + +def test_find_item_not_found() -> None: + queue = RenderQueue(items=(_make_item(id="abc123def456"),)) + with pytest.raises(QueueError, match="not found"): + _find_item(queue, "xyz") + + +def test_find_item_ambiguous() -> None: + queue = RenderQueue(items=(_make_item(id="abc111"), _make_item(id="abc222"))) + with pytest.raises(QueueError, match="Ambiguous"): + _find_item(queue, "abc") + + +# --------------------------------------------------------------------------- +# get_queue_item +# --------------------------------------------------------------------------- + + +def test_get_queue_item_found(tmp_path: Path) -> None: + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + added = add_to_queue(tmp_path, result) + item = get_queue_item(tmp_path, added.id) + assert item is not None + assert item.id == added.id + + +def test_get_queue_item_not_found(tmp_path: Path) -> None: + assert get_queue_item(tmp_path, "nope") is None + + +# --------------------------------------------------------------------------- +# update_queue_item +# --------------------------------------------------------------------------- + + +def test_update_title(tmp_path: Path) -> None: + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + added = add_to_queue(tmp_path, result) + updated = update_queue_item(tmp_path, added.id, title="New Title") + assert updated.title == "New Title" + # Verify persisted + loaded = get_queue_item(tmp_path, added.id) + assert loaded is not None + assert loaded.title == "New Title" + + +def test_update_description(tmp_path: Path) -> None: + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + added = add_to_queue(tmp_path, result) + updated = update_queue_item(tmp_path, added.id, description="New Desc") + assert updated.description == "New Desc" + + +def test_update_no_changes(tmp_path: Path) -> None: + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + added = add_to_queue(tmp_path, result) + updated = update_queue_item(tmp_path, added.id) + assert updated.id == added.id + + +def test_update_not_found(tmp_path: Path) -> None: + with pytest.raises(QueueError, match="not found"): + update_queue_item(tmp_path, "nope", title="x") + + +# --------------------------------------------------------------------------- +# remove_from_queue +# --------------------------------------------------------------------------- + + +def test_remove(tmp_path: Path) -> None: + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + added = add_to_queue(tmp_path, result) + removed = remove_from_queue(tmp_path, added.id) + assert removed.status is QueueStatus.REMOVED + # Verify persisted + loaded = get_queue_item(tmp_path, added.id) + assert loaded is not None + assert loaded.status is QueueStatus.REMOVED + + +def test_remove_not_found(tmp_path: Path) -> None: + with pytest.raises(QueueError, match="not found"): + remove_from_queue(tmp_path, "nope") + + +# --------------------------------------------------------------------------- +# discover_targets +# --------------------------------------------------------------------------- + + +def test_discover_targets() -> None: + google = MagicMock() + google.upload = MagicMock() + meta = MagicMock() + meta.upload = MagicMock() + notifier = MagicMock(spec=[]) # no upload method + plugins: dict[str, object] = {"google": google, "meta": meta, "notifier": notifier} + targets = discover_targets(plugins) + assert targets == ["google", "meta"] + + +def test_discover_targets_empty() -> None: + assert discover_targets({}) == [] + + +# --------------------------------------------------------------------------- +# publish_queue_item +# --------------------------------------------------------------------------- + + +def test_publish_single_target(tmp_path: Path) -> None: + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + added = add_to_queue( + tmp_path, result, available_targets=["google", "meta"] + ) + + google = MagicMock() + google.upload = MagicMock(return_value="https://youtu.be/x") + plugins: dict[str, object] = {"google": google} + + published = publish_queue_item(tmp_path, added.id, plugins, target="google") + google.upload.assert_called_once() + assert published.status is QueueStatus.PARTIAL # meta still pending + google_target = next(t for t in published.publish_targets if t.target == "google") + assert google_target.status is PublishStatus.PUBLISHED + assert google_target.url == "https://youtu.be/x" + + +def test_publish_all_targets(tmp_path: Path) -> None: + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + added = add_to_queue(tmp_path, result, available_targets=["google"]) + + google = MagicMock() + google.upload = MagicMock(return_value="https://youtu.be/x") + plugins: dict[str, object] = {"google": google} + + published = publish_queue_item(tmp_path, added.id, plugins) + assert published.status is QueueStatus.PUBLISHED + + +def test_publish_target_failure(tmp_path: Path) -> None: + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + added = add_to_queue(tmp_path, result, available_targets=["google"]) + + google = MagicMock() + google.upload = MagicMock(side_effect=RuntimeError("API error")) + plugins: dict[str, object] = {"google": google} + + published = publish_queue_item(tmp_path, added.id, plugins) + assert published.status is QueueStatus.FAILED + google_target = next(t for t in published.publish_targets if t.target == "google") + assert google_target.status is PublishStatus.FAILED + assert "API error" in google_target.error + + +def test_publish_removed_item_raises(tmp_path: Path) -> None: + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + added = add_to_queue(tmp_path, result, available_targets=["google"]) + remove_from_queue(tmp_path, added.id) + + with pytest.raises(QueueError, match="Cannot publish removed"): + publish_queue_item(tmp_path, added.id, {}) + + +def test_publish_missing_output_raises(tmp_path: Path) -> None: + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + added = add_to_queue(tmp_path, result) + # Delete the output file + Path(added.output).unlink() + + with pytest.raises(QueueError, match="Output file not found"): + publish_queue_item(tmp_path, added.id, {}) + + +def test_publish_unknown_target_raises(tmp_path: Path) -> None: + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + added = add_to_queue(tmp_path, result) + + with pytest.raises(QueueError, match="Unknown or non-uploader"): + publish_queue_item(tmp_path, added.id, {}, target="nonexistent") + + +def test_publish_no_pending_targets_raises(tmp_path: Path) -> None: + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + added = add_to_queue(tmp_path, result) # no available_targets + + with pytest.raises(QueueError, match="No pending publish targets"): + publish_queue_item(tmp_path, added.id, {}) + + +# --------------------------------------------------------------------------- +# publish via POST_RENDER hook (existing plugin pattern) +# --------------------------------------------------------------------------- + + +def test_publish_via_post_render_hook(tmp_path: Path) -> None: + """Plugins with on_post_render are published via POST_RENDER hook emission.""" + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + added = add_to_queue(tmp_path, result, available_targets=["google"]) + + # Plugin has on_post_render but no upload method + google = MagicMock(spec=["on_post_render", "name"]) + google.name = "google" + plugins: dict[str, object] = {"google": google} + + published = publish_queue_item(tmp_path, added.id, plugins) + assert published.status is QueueStatus.PUBLISHED + google_target = next(t for t in published.publish_targets if t.target == "google") + assert google_target.status is PublishStatus.PUBLISHED + + +def test_publish_via_hook_target_flag(tmp_path: Path) -> None: + """--target works with hook-based plugins.""" + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + added = add_to_queue(tmp_path, result, available_targets=["google", "meta"]) + + google = MagicMock(spec=["on_post_render", "name"]) + google.name = "google" + plugins: dict[str, object] = {"google": google} + + published = publish_queue_item(tmp_path, added.id, plugins, target="google") + assert published.status is QueueStatus.PARTIAL # meta still pending + + +def test_publish_via_hook_ad_hoc_target(tmp_path: Path) -> None: + """Hook target not in original publish_targets gets appended.""" + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + added = add_to_queue(tmp_path, result) # no available_targets + + google = MagicMock(spec=["on_post_render", "name"]) + google.name = "google" + plugins: dict[str, object] = {"google": google} + + published = publish_queue_item(tmp_path, added.id, plugins, target="google") + google_target = next(t for t in published.publish_targets if t.target == "google") + assert google_target.status is PublishStatus.PUBLISHED + + +def test_publish_via_hook_failure_ad_hoc_target(tmp_path: Path) -> None: + """Hook failure with ad-hoc target appends failed result.""" + from reeln.plugins.hooks import Hook + from reeln.plugins.registry import get_registry + + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + added = add_to_queue(tmp_path, result) # no available_targets + + google = MagicMock(spec=["on_post_render", "name"]) + google.name = "google" + plugins: dict[str, object] = {"google": google} + + original_emit = get_registry().emit + + def failing_emit(hook: Hook, context: object = None) -> None: + if hook is Hook.POST_RENDER: + raise RuntimeError("boom") + original_emit(hook, context) + + with patch.object(get_registry(), "emit", side_effect=failing_emit): + published = publish_queue_item(tmp_path, added.id, plugins, target="google") + google_target = next(t for t in published.publish_targets if t.target == "google") + assert google_target.status is PublishStatus.FAILED + + +def test_publish_via_hook_failure(tmp_path: Path) -> None: + """POST_RENDER emission failure marks hook targets as failed.""" + from reeln.plugins.hooks import Hook + from reeln.plugins.registry import get_registry + + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + added = add_to_queue(tmp_path, result, available_targets=["google"]) + + google = MagicMock(spec=["on_post_render", "name"]) + google.name = "google" + plugins: dict[str, object] = {"google": google} + + # Make the registry emit raise only for POST_RENDER + original_emit = get_registry().emit + + def failing_emit(hook: Hook, context: object = None) -> None: + if hook is Hook.POST_RENDER: + raise RuntimeError("hook boom") + original_emit(hook, context) + + with patch.object(get_registry(), "emit", side_effect=failing_emit): + published = publish_queue_item(tmp_path, added.id, plugins) + assert published.status is QueueStatus.FAILED + google_target = next(t for t in published.publish_targets if t.target == "google") + assert google_target.status is PublishStatus.FAILED + assert "hook boom" in google_target.error + + +# --------------------------------------------------------------------------- +# discover_targets — both patterns +# --------------------------------------------------------------------------- + + +def test_discover_targets_hook_based() -> None: + """Plugins with on_post_render are discovered as targets.""" + google = MagicMock(spec=["on_post_render", "name"]) + meta = MagicMock(spec=["on_post_render", "name"]) + notifier = MagicMock(spec=["on_game_init", "name"]) + plugins: dict[str, object] = {"google": google, "meta": meta, "notifier": notifier} + targets = discover_targets(plugins) + assert targets == ["google", "meta"] + + +def test_discover_targets_mixed() -> None: + """Both upload-protocol and hook-based plugins are discovered.""" + uploader = MagicMock() + uploader.upload = MagicMock() + hook_plugin = MagicMock(spec=["on_post_render", "name"]) + plugins: dict[str, object] = {"uploader": uploader, "hook": hook_plugin} + targets = discover_targets(plugins) + assert targets == ["hook", "uploader"] + + +# --------------------------------------------------------------------------- +# publish_all +# --------------------------------------------------------------------------- + + +def test_publish_all(tmp_path: Path) -> None: + result = _render_result(tmp_path) + google = MagicMock() + google.upload = MagicMock(return_value="https://youtu.be/1") + plugins: dict[str, object] = {"google": google} + + with patch("reeln.core.queue.update_queue_index"): + add_to_queue(tmp_path, result, available_targets=["google"]) + add_to_queue(tmp_path, result, available_targets=["google"]) + + published = publish_all(tmp_path, plugins) + assert len(published) == 2 + assert all(p.status is QueueStatus.PUBLISHED for p in published) + + +def test_publish_all_empty(tmp_path: Path) -> None: + results = publish_all(tmp_path, {}) + assert results == [] + + +# --------------------------------------------------------------------------- +# Queue index +# --------------------------------------------------------------------------- + + +def test_update_and_load_index(tmp_path: Path) -> None: + with patch("reeln.core.config.data_dir", return_value=tmp_path): + update_queue_index(Path("/fake/game1")) + update_queue_index(Path("/fake/game2")) + update_queue_index(Path("/fake/game1")) # duplicate + + index = load_queue_index() + assert index == ["/fake/game1", "/fake/game2"] + + +def test_load_index_empty(tmp_path: Path) -> None: + with patch("reeln.core.config.data_dir", return_value=tmp_path): + index = load_queue_index() + assert index == [] + + +def test_load_index_invalid_json(tmp_path: Path) -> None: + (tmp_path / "queue_index.json").write_text("bad") + with patch("reeln.core.config.data_dir", return_value=tmp_path): + index = load_queue_index() + assert index == [] + + +# --------------------------------------------------------------------------- +# save_queue failure cleanup +# --------------------------------------------------------------------------- + + +def test_save_queue_cleans_up_on_failure(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None: + """Atomic write cleans up temp file on failure.""" + + def failing_replace(self: Path, target: str | Path) -> Path: + raise OSError("disk full") + + monkeypatch.setattr(Path, "replace", failing_replace) + + with pytest.raises(OSError, match="disk full"): + save_queue(RenderQueue(), tmp_path) + + tmp_files = list(tmp_path.glob("*.tmp")) + assert tmp_files == [] + + +# --------------------------------------------------------------------------- +# update_queue_index failure cleanup +# --------------------------------------------------------------------------- + + +def test_update_index_cleans_up_on_failure(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None: + """Atomic index write cleans up temp file on failure.""" + + def failing_replace(self: Path, target: str | Path) -> Path: + raise OSError("disk full") + + monkeypatch.setattr(Path, "replace", failing_replace) + + with pytest.raises(OSError, match="disk full"), patch("reeln.core.config.data_dir", return_value=tmp_path): + update_queue_index(Path("/fake/game")) + + tmp_files = list(tmp_path.glob("*.tmp")) + assert tmp_files == [] + + +# --------------------------------------------------------------------------- +# publish: ad-hoc target not in existing publish_targets +# --------------------------------------------------------------------------- + + +def test_publish_ad_hoc_target(tmp_path: Path) -> None: + """Publishing to a target not in the item's publish_targets list appends it.""" + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + added = add_to_queue(tmp_path, result) # no available_targets + + google = MagicMock() + google.upload = MagicMock(return_value="https://youtu.be/x") + plugins: dict[str, object] = {"google": google} + + published = publish_queue_item(tmp_path, added.id, plugins, target="google") + # The ad-hoc target should be appended + google_target = next(t for t in published.publish_targets if t.target == "google") + assert google_target.status is PublishStatus.PUBLISHED + + +# --------------------------------------------------------------------------- +# publish: mixed statuses (PARTIAL overall via else branch) +# --------------------------------------------------------------------------- + + +def test_publish_mixed_pending_and_failed(tmp_path: Path) -> None: + """When one target fails and another is still pending, overall is PARTIAL.""" + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + added = add_to_queue( + tmp_path, result, available_targets=["google", "meta"] + ) + + google = MagicMock() + google.upload = MagicMock(side_effect=RuntimeError("fail")) + plugins: dict[str, object] = {"google": google} + + # Publish only google (which fails), meta stays pending + published = publish_queue_item(tmp_path, added.id, plugins, target="google") + assert published.status is QueueStatus.PARTIAL + + +# --------------------------------------------------------------------------- +# _reconstruct helpers +# --------------------------------------------------------------------------- + + +def test_publish_with_game_context(tmp_path: Path) -> None: + """Publish with game info and event context reconstructs metadata.""" + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + added = add_to_queue( + tmp_path, + result, + game_info=_game_info(), + game_event=_game_event(), + player="John", + assists="Jane", + event_id="evt_001", + available_targets=["google"], + plugin_inputs={"thumb": "/tmp/t.png"}, + ) + + # Use hook-based plugin to exercise the POST_RENDER path with full context + google = MagicMock(spec=["on_post_render", "name"]) + google.name = "google" + plugins: dict[str, object] = {"google": google} + + published = publish_queue_item(tmp_path, added.id, plugins) + assert published.status is QueueStatus.PUBLISHED + + +def test_find_target_idx_empty_list() -> None: + """Cover the _find_target_idx function with empty targets list.""" + from reeln.core.queue import _find_target_idx + + assert _find_target_idx([], "google") is None + + +def test_find_target_idx_no_match() -> None: + """Cover the branch where target_name doesn't match any entry.""" + from reeln.core.queue import _find_target_idx + from reeln.models.queue import PublishTargetResult + + targets = [PublishTargetResult(target="meta"), PublishTargetResult(target="tiktok")] + assert _find_target_idx(targets, "google") is None + + +def test_find_target_idx_match() -> None: + """Cover the branch where target_name matches.""" + from reeln.core.queue import _find_target_idx + from reeln.models.queue import PublishTargetResult + + targets = [PublishTargetResult(target="meta"), PublishTargetResult(target="google")] + assert _find_target_idx(targets, "google") == 1 + + +# --------------------------------------------------------------------------- +# publish: target pending but plugin not available (skips the target) +# --------------------------------------------------------------------------- + + +def test_publish_all_targets_skips_unavailable(tmp_path: Path) -> None: + """Pending targets whose plugins aren't loaded are skipped.""" + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + added = add_to_queue( + tmp_path, result, available_targets=["google", "meta"] + ) + + # Only provide google plugin, not meta + google = MagicMock() + google.upload = MagicMock(return_value="https://youtu.be/x") + plugins: dict[str, object] = {"google": google} + + # Publish all pending → only google gets published, meta stays pending + published = publish_queue_item(tmp_path, added.id, plugins) + assert published.status is QueueStatus.PARTIAL + + +# --------------------------------------------------------------------------- +# publish_all: queue with non-rendered items (skipped) +# --------------------------------------------------------------------------- + + +def test_publish_all_skips_non_rendered(tmp_path: Path) -> None: + """publish_all only publishes RENDERED items, skips others.""" + result = _render_result(tmp_path) + with patch("reeln.core.queue.update_queue_index"): + added = add_to_queue(tmp_path, result, available_targets=["google"]) + # Mark it as removed + remove_from_queue(tmp_path, added.id) + + google = MagicMock() + google.upload = MagicMock(return_value="https://youtu.be/x") + plugins: dict[str, object] = {"google": google} + + results = publish_all(tmp_path, plugins) + assert results == [] # nothing published since item is REMOVED diff --git a/tests/unit/core/test_shorts.py b/tests/unit/core/test_shorts.py index 63e1272..154f3d1 100644 --- a/tests/unit/core/test_shorts.py +++ b/tests/unit/core/test_shorts.py @@ -1259,3 +1259,170 @@ def test_plan_preview_passes_branding(tmp_path: Path) -> None: cfg = _cfg(tmp_path, branding=brand) plan = plan_preview(cfg) assert "subtitles=" in plan.filter_complex + + +# --------------------------------------------------------------------------- +# Logo overlay +# --------------------------------------------------------------------------- + + +def test_build_logo_overlay_filter_defaults() -> None: + from reeln.core.shorts import build_logo_overlay_filter + + scale_f, overlay_f = build_logo_overlay_filter( + target_width=1080, target_height=1920, + ) + assert "scale=" in scale_f + assert "lanczos" in scale_f + assert "overlay=" in overlay_f + assert "shortest=1" in overlay_f + + +def test_build_logo_overlay_filter_square() -> None: + from reeln.core.shorts import build_logo_overlay_filter + + scale_f, overlay_f = build_logo_overlay_filter( + target_width=1080, target_height=1080, + ) + assert "scale=" in scale_f + assert "overlay=" in overlay_f + + +def test_build_logo_overlay_filter_no_assists() -> None: + from reeln.core.shorts import build_logo_overlay_filter + + scale_f, _ = build_logo_overlay_filter( + target_width=1080, target_height=1920, has_assists=False, + ) + # Different box height should produce a different max logo height + scale_assists, _ = build_logo_overlay_filter( + target_width=1080, target_height=1920, has_assists=True, + ) + # No-assists box is smaller -> logo height differs + assert scale_f != scale_assists + + +def test_build_filter_chain_logo_pad(tmp_path: Path) -> None: + """Path 1 (simple pad): logo wraps chain with [1:v] overlay.""" + logo = tmp_path / "logo.png" + logo.write_bytes(b"PNG") + cfg = _cfg(tmp_path, logo=logo) + chain, _ = build_filter_chain(cfg) + assert "[0:v]" in chain + assert "[1:v]" in chain + assert "overlay=" in chain + assert "[_prelogo]" in chain + assert "[_logo]" in chain + + +def test_build_filter_chain_logo_crop(tmp_path: Path) -> None: + """Path 1 (simple crop): logo wraps chain with [1:v] overlay.""" + logo = tmp_path / "logo.png" + logo.write_bytes(b"PNG") + cfg = _cfg(tmp_path, crop_mode=CropMode.CROP, logo=logo) + chain, _ = build_filter_chain(cfg) + assert "[1:v]" in chain + assert "overlay=" in chain + + +def test_build_filter_chain_logo_with_subtitle(tmp_path: Path) -> None: + """Logo overlay appears after subtitle in filter chain.""" + logo = tmp_path / "logo.png" + logo.write_bytes(b"PNG") + sub = tmp_path / "subs.ass" + sub.write_text("[Script Info]\n") + cfg = _cfg(tmp_path, subtitle=sub, logo=logo) + chain, _ = build_filter_chain(cfg) + assert "subtitles=" in chain + assert "overlay=" in chain + # Subtitle should come before overlay + sub_pos = chain.index("subtitles=") + overlay_pos = chain.index("overlay=") + assert sub_pos < overlay_pos + + +def test_build_filter_chain_no_logo_unchanged(tmp_path: Path) -> None: + """No logo: chain is plain comma-joined (no [0:v] prefix).""" + cfg = _cfg(tmp_path) + chain, _ = build_filter_chain(cfg) + assert "[1:v]" not in chain + assert "[0:v]" not in chain + + +def test_build_filter_chain_smart_pad_with_logo(tmp_path: Path) -> None: + """Path 2 (smart pad): logo appended to multi-stream graph.""" + from reeln.models.zoom import ZoomPath, ZoomPoint + + logo = tmp_path / "logo.png" + logo.write_bytes(b"PNG") + zp = ZoomPath( + duration=10.0, + points=(ZoomPoint(timestamp=0.0, center_x=0.5, center_y=0.5),), + source_width=1920, + source_height=1080, + ) + cfg = _cfg(tmp_path, smart=True, logo=logo) + chain, _ = build_filter_chain(cfg, zoom_path=zp) + assert "[1:v]" in chain + assert "overlay=" in chain + assert "[_prelogo]" in chain + + +def test_build_filter_chain_speed_segments_with_logo(tmp_path: Path) -> None: + """Path 3 (speed segments static): logo replaces [vfinal] with overlay.""" + logo = tmp_path / "logo.png" + logo.write_bytes(b"PNG") + segs = (SpeedSegment(speed=1.0, until=5.0), SpeedSegment(speed=0.5)) + cfg = _cfg(tmp_path, speed_segments=segs, logo=logo) + chain, audio = build_filter_chain(cfg) + assert audio is None + assert "[1:v]" in chain + assert "[vfinal]" in chain + assert "[_prelogo]" in chain + + +def test_build_filter_chain_speed_segments_smart_pad_with_logo(tmp_path: Path) -> None: + """Path 4 (speed segments + smart pad): logo overlay after smart pad.""" + from reeln.models.zoom import ZoomPath, ZoomPoint + + logo = tmp_path / "logo.png" + logo.write_bytes(b"PNG") + zp = ZoomPath( + duration=10.0, + points=(ZoomPoint(timestamp=0.0, center_x=0.5, center_y=0.5),), + source_width=1920, + source_height=1080, + ) + segs = (SpeedSegment(speed=1.0, until=5.0), SpeedSegment(speed=0.5)) + cfg = _cfg(tmp_path, speed_segments=segs, smart=True, logo=logo) + chain, audio = build_filter_chain(cfg, zoom_path=zp, source_fps=30.0) + assert audio is None + assert "[1:v]" in chain + assert "[vfinal]" in chain + + +def test_plan_short_with_logo(tmp_path: Path) -> None: + """plan_short includes logo in inputs list.""" + logo = tmp_path / "logo.png" + logo.write_bytes(b"PNG") + cfg = _cfg(tmp_path, logo=logo) + plan = plan_short(cfg) + assert len(plan.inputs) == 2 + assert plan.inputs[1] == logo + + +def test_plan_short_no_logo(tmp_path: Path) -> None: + """plan_short has single input when no logo.""" + cfg = _cfg(tmp_path) + plan = plan_short(cfg) + assert len(plan.inputs) == 1 + + +def test_plan_preview_with_logo(tmp_path: Path) -> None: + """plan_preview includes logo in inputs list.""" + logo = tmp_path / "logo.png" + logo.write_bytes(b"PNG") + cfg = _cfg(tmp_path, logo=logo) + plan = plan_preview(cfg) + assert len(plan.inputs) == 2 + assert plan.inputs[1] == logo diff --git a/tests/unit/models/test_auth.py b/tests/unit/models/test_auth.py new file mode 100644 index 0000000..2559e23 --- /dev/null +++ b/tests/unit/models/test_auth.py @@ -0,0 +1,157 @@ +"""Tests for plugin authentication data models.""" + +from __future__ import annotations + +import pytest + +from reeln.models.auth import ( + AuthCheckResult, + AuthStatus, + PluginAuthReport, + auth_check_result_to_dict, + plugin_auth_report_to_dict, +) + +# --------------------------------------------------------------------------- +# AuthStatus +# --------------------------------------------------------------------------- + + +def test_auth_status_values() -> None: + assert AuthStatus.OK.value == "ok" + assert AuthStatus.WARN.value == "warn" + assert AuthStatus.FAIL.value == "fail" + assert AuthStatus.EXPIRED.value == "expired" + assert AuthStatus.NOT_CONFIGURED.value == "not_configured" + + +def test_auth_status_member_count() -> None: + assert len(AuthStatus) == 5 + + +# --------------------------------------------------------------------------- +# AuthCheckResult +# --------------------------------------------------------------------------- + + +def test_auth_check_result_required_fields() -> None: + r = AuthCheckResult(service="YouTube", status=AuthStatus.OK, message="Connected") + assert r.service == "YouTube" + assert r.status == AuthStatus.OK + assert r.message == "Connected" + + +def test_auth_check_result_defaults() -> None: + r = AuthCheckResult(service="s", status=AuthStatus.OK, message="ok") + assert r.identity == "" + assert r.expires_at == "" + assert r.scopes == [] + assert r.required_scopes == [] + assert r.hint == "" + + +def test_auth_check_result_all_fields() -> None: + r = AuthCheckResult( + service="YouTube", + status=AuthStatus.OK, + message="Authenticated", + identity="StreamnDad Hockey", + expires_at="2026-12-31T23:59:59", + scopes=["youtube", "youtube.upload"], + required_scopes=["youtube", "youtube.upload", "youtube.force-ssl"], + hint="Grant youtube.force-ssl scope", + ) + assert r.identity == "StreamnDad Hockey" + assert r.expires_at == "2026-12-31T23:59:59" + assert r.scopes == ["youtube", "youtube.upload"] + assert r.required_scopes == ["youtube", "youtube.upload", "youtube.force-ssl"] + assert r.hint == "Grant youtube.force-ssl scope" + + +def test_auth_check_result_frozen() -> None: + r = AuthCheckResult(service="s", status=AuthStatus.OK, message="ok") + with pytest.raises(AttributeError): + r.service = "other" # type: ignore[misc] + + +# --------------------------------------------------------------------------- +# PluginAuthReport +# --------------------------------------------------------------------------- + + +def test_plugin_auth_report_defaults() -> None: + report = PluginAuthReport(plugin_name="google") + assert report.plugin_name == "google" + assert report.results == [] + + +def test_plugin_auth_report_with_results() -> None: + r1 = AuthCheckResult(service="YouTube", status=AuthStatus.OK, message="ok") + r2 = AuthCheckResult(service="Drive", status=AuthStatus.WARN, message="limited") + report = PluginAuthReport(plugin_name="google", results=[r1, r2]) + assert len(report.results) == 2 + assert report.results[0].service == "YouTube" + assert report.results[1].service == "Drive" + + +def test_plugin_auth_report_frozen() -> None: + report = PluginAuthReport(plugin_name="test") + with pytest.raises(AttributeError): + report.plugin_name = "other" # type: ignore[misc] + + +# --------------------------------------------------------------------------- +# Serialization helpers +# --------------------------------------------------------------------------- + + +def test_auth_check_result_to_dict_minimal() -> None: + r = AuthCheckResult(service="OpenAI", status=AuthStatus.FAIL, message="Key invalid") + d = auth_check_result_to_dict(r) + assert d == { + "service": "OpenAI", + "status": "fail", + "message": "Key invalid", + "identity": "", + "expires_at": "", + "scopes": [], + "required_scopes": [], + "hint": "", + } + + +def test_auth_check_result_to_dict_full() -> None: + r = AuthCheckResult( + service="YouTube", + status=AuthStatus.OK, + message="Connected", + identity="StreamnDad", + expires_at="2026-12-31", + scopes=["youtube"], + required_scopes=["youtube", "youtube.upload"], + hint="Grant upload scope", + ) + d = auth_check_result_to_dict(r) + assert d["service"] == "YouTube" + assert d["status"] == "ok" + assert d["identity"] == "StreamnDad" + assert d["expires_at"] == "2026-12-31" + assert d["scopes"] == ["youtube"] + assert d["required_scopes"] == ["youtube", "youtube.upload"] + assert d["hint"] == "Grant upload scope" + + +def test_plugin_auth_report_to_dict_empty() -> None: + report = PluginAuthReport(plugin_name="empty") + d = plugin_auth_report_to_dict(report) + assert d == {"name": "empty", "results": []} + + +def test_plugin_auth_report_to_dict_with_results() -> None: + r = AuthCheckResult(service="R2", status=AuthStatus.OK, message="ok") + report = PluginAuthReport(plugin_name="cloudflare", results=[r]) + d = plugin_auth_report_to_dict(report) + assert d["name"] == "cloudflare" + assert len(d["results"]) == 1 # type: ignore[arg-type] + assert d["results"][0]["service"] == "R2" # type: ignore[index] + assert d["results"][0]["status"] == "ok" # type: ignore[index] diff --git a/tests/unit/models/test_queue.py b/tests/unit/models/test_queue.py new file mode 100644 index 0000000..70b6a55 --- /dev/null +++ b/tests/unit/models/test_queue.py @@ -0,0 +1,320 @@ +"""Tests for queue data models.""" + +from __future__ import annotations + +import pytest + +from reeln.models.queue import ( + PublishStatus, + PublishTargetResult, + QueueItem, + QueueStatus, + RenderQueue, + dict_to_publish_target_result, + dict_to_queue_item, + dict_to_render_queue, + publish_target_result_to_dict, + queue_item_to_dict, + render_queue_to_dict, +) + +# --------------------------------------------------------------------------- +# QueueStatus enum +# --------------------------------------------------------------------------- + + +def test_queue_status_values() -> None: + assert QueueStatus.RENDERED.value == "rendered" + assert QueueStatus.PUBLISHING.value == "publishing" + assert QueueStatus.PUBLISHED.value == "published" + assert QueueStatus.PARTIAL.value == "partial" + assert QueueStatus.FAILED.value == "failed" + assert QueueStatus.REMOVED.value == "removed" + + +def test_queue_status_from_string() -> None: + assert QueueStatus("rendered") is QueueStatus.RENDERED + assert QueueStatus("published") is QueueStatus.PUBLISHED + + +def test_queue_status_invalid() -> None: + with pytest.raises(ValueError, match="not_a_status"): + QueueStatus("not_a_status") + + +# --------------------------------------------------------------------------- +# PublishStatus enum +# --------------------------------------------------------------------------- + + +def test_publish_status_values() -> None: + assert PublishStatus.PENDING.value == "pending" + assert PublishStatus.PUBLISHED.value == "published" + assert PublishStatus.FAILED.value == "failed" + assert PublishStatus.SKIPPED.value == "skipped" + + +def test_publish_status_from_string() -> None: + assert PublishStatus("pending") is PublishStatus.PENDING + assert PublishStatus("failed") is PublishStatus.FAILED + + +def test_publish_status_invalid() -> None: + with pytest.raises(ValueError, match="bad"): + PublishStatus("bad") + + +# --------------------------------------------------------------------------- +# PublishTargetResult +# --------------------------------------------------------------------------- + + +def test_publish_target_result_required() -> None: + ptr = PublishTargetResult(target="google") + assert ptr.target == "google" + assert ptr.status is PublishStatus.PENDING + assert ptr.url == "" + assert ptr.error == "" + assert ptr.published_at == "" + + +def test_publish_target_result_full() -> None: + ptr = PublishTargetResult( + target="meta", + status=PublishStatus.PUBLISHED, + url="https://instagram.com/reel/abc", + published_at="2026-04-06T12:00:00Z", + ) + assert ptr.target == "meta" + assert ptr.status is PublishStatus.PUBLISHED + assert ptr.url == "https://instagram.com/reel/abc" + assert ptr.published_at == "2026-04-06T12:00:00Z" + + +def test_publish_target_result_frozen() -> None: + ptr = PublishTargetResult(target="google") + with pytest.raises(AttributeError): + ptr.target = "meta" # type: ignore[misc] + + +def test_publish_target_result_roundtrip() -> None: + ptr = PublishTargetResult( + target="google", + status=PublishStatus.FAILED, + error="API quota exceeded", + ) + d = publish_target_result_to_dict(ptr) + restored = dict_to_publish_target_result(d) + assert restored == ptr + + +def test_publish_target_result_from_dict_defaults() -> None: + ptr = dict_to_publish_target_result({"target": "tiktok"}) + assert ptr.target == "tiktok" + assert ptr.status is PublishStatus.PENDING + assert ptr.url == "" + assert ptr.error == "" + assert ptr.published_at == "" + + +# --------------------------------------------------------------------------- +# QueueItem +# --------------------------------------------------------------------------- + + +def _make_item(**overrides: object) -> QueueItem: + defaults: dict[str, object] = { + "id": "abc123def456", + "output": "/tmp/short.mp4", + "game_dir": "/tmp/game", + "status": QueueStatus.RENDERED, + "queued_at": "2026-04-06T12:00:00Z", + } + defaults.update(overrides) + return QueueItem(**defaults) # type: ignore[arg-type] + + +def test_queue_item_required_fields() -> None: + item = _make_item() + assert item.id == "abc123def456" + assert item.output == "/tmp/short.mp4" + assert item.game_dir == "/tmp/game" + assert item.status is QueueStatus.RENDERED + assert item.queued_at == "2026-04-06T12:00:00Z" + + +def test_queue_item_defaults() -> None: + item = _make_item() + assert item.duration_seconds is None + assert item.file_size_bytes is None + assert item.format == "" + assert item.crop_mode == "" + assert item.render_profile == "" + assert item.event_id == "" + assert item.home_team == "" + assert item.away_team == "" + assert item.date == "" + assert item.sport == "" + assert item.level == "" + assert item.tournament == "" + assert item.event_type == "" + assert item.player == "" + assert item.assists == "" + assert item.title == "" + assert item.description == "" + assert item.publish_targets == () + assert item.config_profile == "" + assert item.plugin_inputs == {} + + +def test_queue_item_full() -> None: + targets = ( + PublishTargetResult(target="google", status=PublishStatus.PUBLISHED, url="https://youtu.be/x"), + PublishTargetResult(target="meta", status=PublishStatus.PENDING), + ) + item = _make_item( + duration_seconds=15.5, + file_size_bytes=1024000, + format="1080x1920", + crop_mode="crop", + render_profile="default", + event_id="evt_001", + home_team="North", + away_team="South", + date="2026-04-06", + sport="hockey", + level="2016", + tournament="Spring Cup", + event_type="goal", + player="John Smith", + assists="Jane Doe, Bob Jones", + title="John Smith Goal - North vs South", + description="Spring Cup 2016", + publish_targets=targets, + plugin_inputs={"thumbnail_image": "/tmp/thumb.png"}, + ) + assert item.duration_seconds == 15.5 + assert item.file_size_bytes == 1024000 + assert item.home_team == "North" + assert len(item.publish_targets) == 2 + assert item.publish_targets[0].url == "https://youtu.be/x" + assert item.plugin_inputs["thumbnail_image"] == "/tmp/thumb.png" + + +def test_queue_item_frozen() -> None: + item = _make_item() + with pytest.raises(AttributeError): + item.title = "new" # type: ignore[misc] + + +def test_queue_item_roundtrip() -> None: + targets = ( + PublishTargetResult(target="google", status=PublishStatus.PUBLISHED, url="https://youtu.be/x"), + PublishTargetResult(target="meta", status=PublishStatus.PENDING), + ) + item = _make_item( + duration_seconds=15.5, + file_size_bytes=1024000, + format="1080x1920", + crop_mode="crop", + home_team="North", + away_team="South", + player="John", + title="Title", + description="Desc", + publish_targets=targets, + plugin_inputs={"key": "val"}, + ) + d = queue_item_to_dict(item) + restored = dict_to_queue_item(d) + assert restored.id == item.id + assert restored.output == item.output + assert restored.game_dir == item.game_dir + assert restored.status == item.status + assert restored.queued_at == item.queued_at + assert restored.duration_seconds == item.duration_seconds + assert restored.file_size_bytes == item.file_size_bytes + assert restored.format == item.format + assert restored.crop_mode == item.crop_mode + assert restored.home_team == item.home_team + assert restored.away_team == item.away_team + assert restored.player == item.player + assert restored.title == item.title + assert restored.description == item.description + assert len(restored.publish_targets) == 2 + assert restored.publish_targets[0] == targets[0] + assert restored.publish_targets[1] == targets[1] + assert restored.plugin_inputs == item.plugin_inputs + + +def test_queue_item_from_dict_defaults() -> None: + item = dict_to_queue_item({ + "id": "x", + "output": "/tmp/out.mp4", + "game_dir": "/tmp/g", + }) + assert item.id == "x" + assert item.status is QueueStatus.RENDERED + assert item.queued_at == "" + assert item.duration_seconds is None + assert item.publish_targets == () + assert item.plugin_inputs == {} + + +def test_queue_item_to_dict_structure() -> None: + item = _make_item(title="My Title") + d = queue_item_to_dict(item) + assert d["id"] == "abc123def456" + assert d["status"] == "rendered" + assert d["title"] == "My Title" + assert isinstance(d["publish_targets"], list) + assert isinstance(d["plugin_inputs"], dict) + + +# --------------------------------------------------------------------------- +# RenderQueue +# --------------------------------------------------------------------------- + + +def test_render_queue_defaults() -> None: + q = RenderQueue() + assert q.version == 1 + assert q.items == () + + +def test_render_queue_with_items() -> None: + item = _make_item() + q = RenderQueue(items=(item,)) + assert len(q.items) == 1 + assert q.items[0].id == "abc123def456" + + +def test_render_queue_frozen() -> None: + q = RenderQueue() + with pytest.raises(AttributeError): + q.version = 2 # type: ignore[misc] + + +def test_render_queue_roundtrip() -> None: + items = (_make_item(id="a"), _make_item(id="b")) + q = RenderQueue(version=1, items=items) + d = render_queue_to_dict(q) + restored = dict_to_render_queue(d) + assert restored.version == q.version + assert len(restored.items) == 2 + assert restored.items[0].id == "a" + assert restored.items[1].id == "b" + + +def test_render_queue_from_dict_defaults() -> None: + q = dict_to_render_queue({}) + assert q.version == 1 + assert q.items == () + + +def test_render_queue_to_dict_structure() -> None: + q = RenderQueue(items=(_make_item(),)) + d = render_queue_to_dict(q) + assert d["version"] == 1 + assert len(d["items"]) == 1 + assert d["items"][0]["id"] == "abc123def456" diff --git a/tests/unit/models/test_short.py b/tests/unit/models/test_short.py index 0abec54..c91262b 100644 --- a/tests/unit/models/test_short.py +++ b/tests/unit/models/test_short.py @@ -185,6 +185,17 @@ def test_short_config_branding_custom(tmp_path: Path) -> None: assert cfg.branding == brand +def test_short_config_logo_default_none(tmp_path: Path) -> None: + cfg = ShortConfig(input=tmp_path / "clip.mkv", output=tmp_path / "out.mp4") + assert cfg.logo is None + + +def test_short_config_logo_custom(tmp_path: Path) -> None: + logo = tmp_path / "logo.png" + cfg = ShortConfig(input=tmp_path / "clip.mkv", output=tmp_path / "out.mp4", logo=logo) + assert cfg.logo == logo + + def test_short_config_is_frozen(tmp_path: Path) -> None: cfg = ShortConfig(input=tmp_path / "clip.mkv", output=tmp_path / "out.mp4") with pytest.raises(AttributeError): diff --git a/tests/unit/plugins/test_capabilities.py b/tests/unit/plugins/test_capabilities.py index 73cd225..e85b764 100644 --- a/tests/unit/plugins/test_capabilities.py +++ b/tests/unit/plugins/test_capabilities.py @@ -5,8 +5,9 @@ from pathlib import Path from typing import Any +from reeln.models.auth import AuthCheckResult, AuthStatus from reeln.models.plugin import GeneratorResult -from reeln.plugins.capabilities import Generator, MetadataEnricher, Notifier, Uploader +from reeln.plugins.capabilities import Authenticator, Generator, MetadataEnricher, Notifier, Uploader # --------------------------------------------------------------------------- # Uploader protocol @@ -112,3 +113,52 @@ def test_generator_protocol_satisfied() -> None: def test_generator_has_name() -> None: gen: Generator = _StubGenerator() assert gen.name == "generator" + + +# --------------------------------------------------------------------------- +# Authenticator protocol +# --------------------------------------------------------------------------- + + +class _StubAuthenticator: + name: str = "auth-stub" + + def auth_check(self) -> list[AuthCheckResult]: + return [ + AuthCheckResult( + service="TestService", + status=AuthStatus.OK, + message="Connected", + identity="test-user", + ) + ] + + def auth_refresh(self) -> list[AuthCheckResult]: + return [ + AuthCheckResult( + service="TestService", + status=AuthStatus.OK, + message="Refreshed", + identity="test-user", + ) + ] + + +def test_authenticator_protocol_satisfied() -> None: + auth: Authenticator = _StubAuthenticator() + results = auth.auth_check() + assert len(results) == 1 + assert results[0].service == "TestService" + assert results[0].status == AuthStatus.OK + + +def test_authenticator_refresh() -> None: + auth: Authenticator = _StubAuthenticator() + results = auth.auth_refresh() + assert len(results) == 1 + assert results[0].message == "Refreshed" + + +def test_authenticator_has_name() -> None: + auth: Authenticator = _StubAuthenticator() + assert auth.name == "auth-stub" diff --git a/tests/unit/plugins/test_hooks.py b/tests/unit/plugins/test_hooks.py index 9883839..4332fcf 100644 --- a/tests/unit/plugins/test_hooks.py +++ b/tests/unit/plugins/test_hooks.py @@ -23,11 +23,13 @@ def test_hook_values() -> None: assert Hook.ON_SEGMENT_START.value == "on_segment_start" assert Hook.ON_SEGMENT_COMPLETE.value == "on_segment_complete" assert Hook.ON_FRAMES_EXTRACTED.value == "on_frames_extracted" + assert Hook.ON_QUEUE.value == "on_queue" + assert Hook.ON_PUBLISH.value == "on_publish" assert Hook.ON_ERROR.value == "on_error" def test_hook_enum_count() -> None: - assert len(Hook) == 14 + assert len(Hook) == 16 def test_hook_members_unique() -> None: diff --git a/tests/unit/plugins/test_init.py b/tests/unit/plugins/test_init.py index 07c5d01..d2ef0ed 100644 --- a/tests/unit/plugins/test_init.py +++ b/tests/unit/plugins/test_init.py @@ -51,6 +51,9 @@ def test_exports_generator_result() -> None: def test_all_matches_expected() -> None: expected = { + "AuthCheckResult", + "AuthStatus", + "Authenticator", "Generator", "GeneratorResult", "Hook", @@ -61,6 +64,7 @@ def test_all_matches_expected() -> None: "InputField", "MetadataEnricher", "Notifier", + "PluginAuthReport", "PluginInputSchema", "Uploader", "activate_plugins", diff --git a/tests/unit/plugins/test_loader.py b/tests/unit/plugins/test_loader.py index 9b8693c..8871beb 100644 --- a/tests/unit/plugins/test_loader.py +++ b/tests/unit/plugins/test_loader.py @@ -10,6 +10,7 @@ import pytest from reeln.core.errors import PluginError +from reeln.models.auth import AuthCheckResult, AuthStatus from reeln.models.config import PluginsConfig from reeln.models.plugin import GeneratorResult from reeln.plugins.hooks import Hook, HookContext @@ -19,10 +20,12 @@ _parse_allowed_hooks, _register_plugin_hooks, activate_plugins, + collect_auth_checks, collect_doctor_checks, discover_plugins, load_enabled_plugins, load_plugin, + refresh_auth, set_enforce_hooks_override, ) from reeln.plugins.registry import HookRegistry, get_registry, reset_registry @@ -914,3 +917,226 @@ class PluginWithInputs: fields = collector.fields_for_command("game_init") assert len(fields) == 1 # Not 2 reset_registry() + + +# --------------------------------------------------------------------------- +# _detect_capabilities — authenticator +# --------------------------------------------------------------------------- + + +def test_detect_capabilities_authenticator() -> None: + """Plugins with auth_check() are detected as authenticator.""" + + class AuthPlugin: + name = "auth" + + def auth_check(self) -> list[AuthCheckResult]: + return [] + + def auth_refresh(self) -> list[AuthCheckResult]: + return [] + + caps = _detect_capabilities(AuthPlugin()) + assert "authenticator" in caps + + +def test_detect_capabilities_no_authenticator() -> None: + """Plugins without auth_check() are not detected as authenticator.""" + caps = _detect_capabilities(_NoCaps()) + assert "authenticator" not in caps + + +# --------------------------------------------------------------------------- +# collect_auth_checks +# --------------------------------------------------------------------------- + + +def test_collect_auth_checks_from_plugin() -> None: + """Collects AuthCheckResult instances from plugins that expose auth_check().""" + + class AuthPlugin: + name = "google" + + def auth_check(self) -> list[AuthCheckResult]: + return [ + AuthCheckResult( + service="YouTube", + status=AuthStatus.OK, + message="Connected", + identity="StreamnDad", + ) + ] + + loaded = {"google": AuthPlugin()} + reports = collect_auth_checks(loaded) + + assert len(reports) == 1 + assert reports[0].plugin_name == "google" + assert len(reports[0].results) == 1 + assert reports[0].results[0].service == "YouTube" + assert reports[0].results[0].status == AuthStatus.OK + assert reports[0].results[0].identity == "StreamnDad" + + +def test_collect_auth_checks_skips_plugins_without() -> None: + """Plugins without auth_check() are silently skipped.""" + + class PlainPlugin: + name = "plain" + + loaded = {"plain": PlainPlugin()} + reports = collect_auth_checks(loaded) + + assert reports == [] + + +def test_collect_auth_checks_name_filter() -> None: + """name_filter restricts checks to a single plugin.""" + + class AuthA: + name = "a" + + def auth_check(self) -> list[AuthCheckResult]: + return [AuthCheckResult(service="A", status=AuthStatus.OK, message="ok")] + + class AuthB: + name = "b" + + def auth_check(self) -> list[AuthCheckResult]: + return [AuthCheckResult(service="B", status=AuthStatus.OK, message="ok")] + + loaded = {"a": AuthA(), "b": AuthB()} + reports = collect_auth_checks(loaded, name_filter="b") + + assert len(reports) == 1 + assert reports[0].plugin_name == "b" + + +def test_collect_auth_checks_name_filter_no_match() -> None: + """name_filter for a non-existent plugin returns empty.""" + + class AuthPlugin: + name = "x" + + def auth_check(self) -> list[AuthCheckResult]: + return [AuthCheckResult(service="X", status=AuthStatus.OK, message="ok")] + + loaded = {"x": AuthPlugin()} + reports = collect_auth_checks(loaded, name_filter="missing") + + assert reports == [] + + +def test_collect_auth_checks_handles_exception(caplog: pytest.LogCaptureFixture) -> None: + """Failures in auth_check() are caught and reported as FAIL.""" + + class BadPlugin: + name = "bad" + + def auth_check(self) -> list[AuthCheckResult]: + raise RuntimeError("boom") + + loaded = {"bad": BadPlugin()} + with caplog.at_level(logging.WARNING): + reports = collect_auth_checks(loaded) + + assert len(reports) == 1 + assert reports[0].plugin_name == "bad" + assert reports[0].results[0].status == AuthStatus.FAIL + assert "auth_check()" in reports[0].results[0].message + assert "bad" in caplog.text + + +def test_collect_auth_checks_empty() -> None: + """Empty loaded plugins returns empty list.""" + assert collect_auth_checks({}) == [] + + +def test_collect_auth_checks_multiple_plugins() -> None: + """Collects auth checks from multiple plugins.""" + + class PluginA: + name = "a" + + def auth_check(self) -> list[AuthCheckResult]: + return [AuthCheckResult(service="SvcA", status=AuthStatus.OK, message="ok")] + + class PluginB: + name = "b" + + def auth_check(self) -> list[AuthCheckResult]: + return [ + AuthCheckResult(service="SvcB1", status=AuthStatus.OK, message="ok"), + AuthCheckResult(service="SvcB2", status=AuthStatus.WARN, message="warn"), + ] + + loaded = {"a": PluginA(), "b": PluginB()} + reports = collect_auth_checks(loaded) + + assert len(reports) == 2 + total_results = sum(len(r.results) for r in reports) + assert total_results == 3 + + +# --------------------------------------------------------------------------- +# refresh_auth +# --------------------------------------------------------------------------- + + +def test_refresh_auth_success() -> None: + """refresh_auth returns report from auth_refresh().""" + + class AuthPlugin: + name = "tiktok" + + def auth_refresh(self) -> list[AuthCheckResult]: + return [ + AuthCheckResult( + service="TikTok", + status=AuthStatus.OK, + message="Refreshed", + ) + ] + + loaded = {"tiktok": AuthPlugin()} + report = refresh_auth(loaded, "tiktok") + + assert report is not None + assert report.plugin_name == "tiktok" + assert report.results[0].status == AuthStatus.OK + + +def test_refresh_auth_plugin_not_loaded() -> None: + """refresh_auth returns None for a plugin that is not loaded.""" + report = refresh_auth({}, "missing") + assert report is None + + +def test_refresh_auth_no_auth_refresh_method() -> None: + """refresh_auth returns None for plugins without auth_refresh().""" + + class PlainPlugin: + name = "plain" + + loaded = {"plain": PlainPlugin()} + report = refresh_auth(loaded, "plain") + assert report is None + + +def test_refresh_auth_handles_exception(caplog: pytest.LogCaptureFixture) -> None: + """Failures in auth_refresh() are caught and reported as FAIL.""" + + class BadPlugin: + name = "bad" + + def auth_refresh(self) -> list[AuthCheckResult]: + raise RuntimeError("refresh failed") + + loaded = {"bad": BadPlugin()} + with caplog.at_level(logging.WARNING): + report = refresh_auth(loaded, "bad") + + assert report is not None + assert report.results[0].status == AuthStatus.FAIL + assert "auth_refresh()" in report.results[0].message + assert "bad" in caplog.text