Improve import command

This commit is contained in:
Joakim Hellsén 2026-01-05 18:46:46 +01:00
commit 1d6c52325c
No known key found for this signature in database
30 changed files with 2628 additions and 554 deletions

View file

@ -21,7 +21,7 @@ repos:
- id: trailing-whitespace - id: trailing-whitespace
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.14.6 rev: v0.14.10
hooks: hooks:
- id: ruff-check - id: ruff-check
args: ["--fix", "--exit-non-zero-on-fix"] args: ["--fix", "--exit-non-zero-on-fix"]
@ -34,6 +34,6 @@ repos:
args: ["--py311-plus"] args: ["--py311-plus"]
- repo: https://github.com/rhysd/actionlint - repo: https://github.com/rhysd/actionlint
rev: v1.7.9 rev: v1.7.10
hooks: hooks:
- id: actionlint - id: actionlint

12
.vscode/settings.json vendored
View file

@ -7,12 +7,17 @@
"appname", "appname",
"ASGI", "ASGI",
"collectstatic", "collectstatic",
"colorama",
"createsuperuser", "createsuperuser",
"dateparser", "dateparser",
"delenv",
"djlint", "djlint",
"docstrings", "docstrings",
"dotenv", "dotenv",
"dropcampaign",
"elif",
"Hellsén", "Hellsén",
"hostnames",
"httpx", "httpx",
"IGDB", "IGDB",
"isort", "isort",
@ -21,9 +26,11 @@
"lovinator", "lovinator",
"Mailgun", "Mailgun",
"makemigrations", "makemigrations",
"McCabe",
"platformdirs", "platformdirs",
"prefetcher", "prefetcher",
"psutil", "psutil",
"pydantic",
"pydocstyle", "pydocstyle",
"pygments", "pygments",
"pyright", "pyright",
@ -34,8 +41,11 @@
"sendgrid", "sendgrid",
"speculationrules", "speculationrules",
"testpass", "testpass",
"tqdm",
"ttvdrops", "ttvdrops",
"venv",
"wrongpassword", "wrongpassword",
"xdist" "xdist"
] ],
"python.analysis.typeCheckingMode": "standard"
} }

View file

@ -12,3 +12,9 @@ uv run python manage.py collectstatic
uv run python manage.py runserver uv run python manage.py runserver
uv run pytest uv run pytest
``` ```
## Import Drops
```bash
uv run python manage.py better_import_drops <file|dir> [--recursive] [--verbose] [--crash-on-error] [--skip-broken-moves]
```

View file

@ -6,14 +6,41 @@ import sys
from pathlib import Path from pathlib import Path
from typing import Any from typing import Any
import django_stubs_ext
from dotenv import load_dotenv from dotenv import load_dotenv
from platformdirs import user_data_dir from platformdirs import user_data_dir
logger: logging.Logger = logging.getLogger("ttvdrops.settings") logger: logging.Logger = logging.getLogger("ttvdrops.settings")
django_stubs_ext.monkeypatch()
load_dotenv(verbose=True) load_dotenv(verbose=True)
DEBUG: bool = os.getenv(key="DEBUG", default="True").lower() == "true" TRUE_VALUES: set[str] = {"1", "true", "yes", "y", "on"}
def env_bool(key: str, *, default: bool = False) -> bool:
"""Read a boolean from the environment, accepting common truthy values.
Returns:
bool: Parsed boolean value or the provided default when unset.
"""
value: str | None = os.getenv(key)
if value is None:
return default
return value.strip().lower() in TRUE_VALUES
def env_int(key: str, default: int) -> int:
"""Read an integer from the environment with a fallback default.
Returns:
int: Parsed integer value or the provided default when unset.
"""
value: str | None = os.getenv(key)
return int(value) if value is not None else default
DEBUG: bool = env_bool(key="DEBUG", default=True)
def get_data_dir() -> Path: def get_data_dir() -> Path:
@ -53,12 +80,12 @@ DEFAULT_FROM_EMAIL: str | None = os.getenv(key="EMAIL_HOST_USER", default=None)
EMAIL_HOST: str = os.getenv(key="EMAIL_HOST", default="smtp.gmail.com") EMAIL_HOST: str = os.getenv(key="EMAIL_HOST", default="smtp.gmail.com")
EMAIL_HOST_PASSWORD: str | None = os.getenv(key="EMAIL_HOST_PASSWORD", default=None) EMAIL_HOST_PASSWORD: str | None = os.getenv(key="EMAIL_HOST_PASSWORD", default=None)
EMAIL_HOST_USER: str | None = os.getenv(key="EMAIL_HOST_USER", default=None) EMAIL_HOST_USER: str | None = os.getenv(key="EMAIL_HOST_USER", default=None)
EMAIL_PORT: int = int(os.getenv(key="EMAIL_PORT", default="587")) EMAIL_PORT: int = env_int(key="EMAIL_PORT", default=587)
EMAIL_SUBJECT_PREFIX = "[TTVDrops] " EMAIL_SUBJECT_PREFIX = "[TTVDrops] "
EMAIL_TIMEOUT: int = int(os.getenv(key="EMAIL_TIMEOUT", default="10")) EMAIL_TIMEOUT: int = env_int(key="EMAIL_TIMEOUT", default=10)
EMAIL_USE_LOCALTIME = True EMAIL_USE_LOCALTIME = True
EMAIL_USE_TLS: bool = os.getenv(key="EMAIL_USE_TLS", default="True").lower() == "true" EMAIL_USE_TLS: bool = env_bool(key="EMAIL_USE_TLS", default=True)
EMAIL_USE_SSL: bool = os.getenv(key="EMAIL_USE_SSL", default="False").lower() == "true" EMAIL_USE_SSL: bool = env_bool(key="EMAIL_USE_SSL", default=False)
SERVER_EMAIL: str | None = os.getenv(key="EMAIL_HOST_USER", default=None) SERVER_EMAIL: str | None = os.getenv(key="EMAIL_HOST_USER", default=None)
LOGIN_REDIRECT_URL = "/" LOGIN_REDIRECT_URL = "/"
@ -81,11 +108,13 @@ STATICFILES_DIRS: list[Path] = [BASE_DIR / "static"]
TIME_ZONE = "UTC" TIME_ZONE = "UTC"
WSGI_APPLICATION = "config.wsgi.application" WSGI_APPLICATION = "config.wsgi.application"
INTERNAL_IPS: list[str] = []
if DEBUG: if DEBUG:
INTERNAL_IPS: list[str] = ["127.0.0.1", "localhost"] INTERNAL_IPS = ["127.0.0.1", "localhost"] # pyright: ignore[reportConstantRedefinition]
ALLOWED_HOSTS: list[str] = [".localhost", "127.0.0.1", "[::1]"]
if not DEBUG: if not DEBUG:
ALLOWED_HOSTS: list[str] = ["ttvdrops.lovinator.space"] ALLOWED_HOSTS = ["ttvdrops.lovinator.space"] # pyright: ignore[reportConstantRedefinition]
LOGGING: dict[str, Any] = { LOGGING: dict[str, Any] = {
"version": 1, "version": 1,
@ -124,7 +153,7 @@ MIDDLEWARE: list[str] = [
] ]
TEMPLATES: list[dict[str, str | list[Path] | bool | dict[str, list[str] | list[tuple[str, list[str]]]]]] = [ TEMPLATES: list[dict[str, Any]] = [
{ {
"BACKEND": "django.template.backends.django.DjangoTemplates", "BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [BASE_DIR / "templates"], "DIRS": [BASE_DIR / "templates"],
@ -145,7 +174,9 @@ DATABASES: dict[str, dict[str, str | Path | dict[str, str]]] = {
"ENGINE": "django.db.backends.sqlite3", "ENGINE": "django.db.backends.sqlite3",
"NAME": DATA_DIR / "ttvdrops.sqlite3", "NAME": DATA_DIR / "ttvdrops.sqlite3",
"OPTIONS": { "OPTIONS": {
"init_command": "PRAGMA foreign_keys = ON; PRAGMA journal_mode=WAL; PRAGMA synchronous=NORMAL; PRAGMA mmap_size = 134217728; PRAGMA journal_size_limit = 27103364; PRAGMA cache_size=2000;", # noqa: E501 "init_command": (
"PRAGMA foreign_keys = ON; PRAGMA journal_mode=WAL; PRAGMA synchronous=NORMAL; PRAGMA mmap_size = 134217728; PRAGMA journal_size_limit = 27103364; PRAGMA cache_size=2000;" # noqa: E501
),
"transaction_mode": "IMMEDIATE", "transaction_mode": "IMMEDIATE",
}, },
}, },
@ -154,7 +185,9 @@ DATABASES: dict[str, dict[str, str | Path | dict[str, str]]] = {
TESTING: bool = "test" in sys.argv or "PYTEST_VERSION" in os.environ TESTING: bool = "test" in sys.argv or "PYTEST_VERSION" in os.environ
if not TESTING: if not TESTING:
DEBUG_TOOLBAR_CONFIG: dict[str, str] = {"ROOT_TAG_EXTRA_ATTRS": "hx-preserve"} DEBUG_TOOLBAR_CONFIG: dict[str, str] = {
"ROOT_TAG_EXTRA_ATTRS": "hx-preserve",
}
INSTALLED_APPS = [ # pyright: ignore[reportConstantRedefinition] INSTALLED_APPS = [ # pyright: ignore[reportConstantRedefinition]
*INSTALLED_APPS, *INSTALLED_APPS,
"debug_toolbar", "debug_toolbar",

0
config/tests/__init__.py Normal file
View file

View file

@ -0,0 +1,124 @@
from __future__ import annotations
import importlib
import os
from contextlib import contextmanager
from typing import TYPE_CHECKING
import pytest
from config import settings
if TYPE_CHECKING:
from collections.abc import Callable
from collections.abc import Generator
from collections.abc import Iterator
from pathlib import Path
from types import ModuleType
@pytest.fixture
def reload_settings_module() -> Generator[Callable[..., ModuleType]]:
"""Reload ``config.settings`` with temporary environment overrides.
Yields:
Callable[..., settings]: Function that reloads the settings module using
provided environment overrides.
"""
original_env: dict[str, str] = os.environ.copy()
@contextmanager
def temporary_env(env: dict[str, str]) -> Iterator[None]:
previous_env: dict[str, str] = os.environ.copy()
os.environ.clear()
os.environ.update(env)
try:
yield
finally:
os.environ.clear()
os.environ.update(previous_env)
def _reload(**env_overrides: str | None) -> ModuleType:
env: dict[str, str] = os.environ.copy()
env.setdefault("DJANGO_SECRET_KEY", original_env.get("DJANGO_SECRET_KEY", "test-secret-key"))
for key, value in env_overrides.items():
if value is None:
env.pop(key, None)
else:
env[key] = value
with temporary_env(env):
return importlib.reload(settings)
yield _reload
with temporary_env(original_env):
importlib.reload(settings)
def test_env_bool_truthy_values(monkeypatch: pytest.MonkeyPatch) -> None:
"""env_bool should treat common truthy strings as True."""
truthy_values: list[str] = ["1", "true", "yes", "y", "on", "TrUe", " YES "]
for value in truthy_values:
monkeypatch.setenv("FEATURE_FLAG", value)
assert settings.env_bool("FEATURE_FLAG") is True
def test_env_bool_default_when_missing(monkeypatch: pytest.MonkeyPatch) -> None:
"""env_bool should fall back to the provided default when unset."""
monkeypatch.delenv("MISSING_FLAG", raising=False)
assert settings.env_bool("MISSING_FLAG", default=False) is False
assert settings.env_bool("MISSING_FLAG", default=True) is True
def test_env_int_parses_value(monkeypatch: pytest.MonkeyPatch) -> None:
"""env_int should parse integers from the environment."""
monkeypatch.setenv("MAX_COUNT", "5")
assert settings.env_int("MAX_COUNT", 1) == 5
def test_env_int_returns_default(monkeypatch: pytest.MonkeyPatch) -> None:
"""env_int should return the fallback when unset."""
monkeypatch.delenv("MAX_COUNT", raising=False)
assert settings.env_int("MAX_COUNT", 3) == 3
def test_get_data_dir_uses_platformdirs(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> None:
"""get_data_dir should use platformdirs and create the directory."""
fake_dir: Path = tmp_path / "data_dir"
def fake_user_data_dir(**_: str) -> str:
fake_dir.mkdir(parents=True, exist_ok=True)
return str(fake_dir)
monkeypatch.setattr(settings, "user_data_dir", fake_user_data_dir)
path: Path = settings.get_data_dir()
assert path == fake_dir
assert path.exists() is True
assert path.is_dir() is True
def test_allowed_hosts_when_debug_false(reload_settings_module: Callable[..., ModuleType]) -> None:
"""When DEBUG is false, ALLOWED_HOSTS should use the production host."""
reloaded: ModuleType = reload_settings_module(DEBUG="false")
assert reloaded.DEBUG is False
assert reloaded.ALLOWED_HOSTS == ["ttvdrops.lovinator.space"]
def test_allowed_hosts_when_debug_true(reload_settings_module: Callable[..., ModuleType]) -> None:
"""When DEBUG is true, development hostnames should be allowed."""
reloaded: ModuleType = reload_settings_module(DEBUG="1")
assert reloaded.DEBUG is True
assert reloaded.ALLOWED_HOSTS == [".localhost", "127.0.0.1", "[::1]"]
def test_debug_defaults_true_when_missing(reload_settings_module: Callable[..., ModuleType]) -> None:
"""DEBUG should default to True when the environment variable is missing."""
reloaded: ModuleType = reload_settings_module(DEBUG=None)
assert reloaded.DEBUG is True

View file

@ -17,7 +17,7 @@ urlpatterns: list[URLResolver] | list[URLPattern | URLResolver] = [ # type: ign
if not settings.TESTING: if not settings.TESTING:
# Import debug_toolbar lazily to avoid ImportError when not installed in testing environments # Import debug_toolbar lazily to avoid ImportError when not installed in testing environments
from debug_toolbar.toolbar import debug_toolbar_urls # type: ignore[import-untyped] # pyright: ignore[reportMissingTypeStubs] from debug_toolbar.toolbar import debug_toolbar_urls # pyright: ignore[reportMissingTypeStubs]
urlpatterns = [ urlpatterns = [
*urlpatterns, *urlpatterns,
@ -26,4 +26,7 @@ if not settings.TESTING:
# Serve media in development # Serve media in development
if settings.DEBUG: if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) urlpatterns += static(
settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT,
)

View file

@ -17,6 +17,9 @@ dependencies = [
"pydantic>=2.12.5", "pydantic>=2.12.5",
"tqdm>=4.67.1", "tqdm>=4.67.1",
"colorama>=0.4.6", "colorama>=0.4.6",
"django-stubs-ext>=5.2.8",
"django-stubs[compatible-mypy]>=5.2.8",
"types-pygments>=2.19.0.20251121",
] ]
[dependency-groups] [dependency-groups]
@ -27,9 +30,6 @@ DJANGO_SETTINGS_MODULE = "config.settings"
python_files = ["test_*.py", "*_test.py"] python_files = ["test_*.py", "*_test.py"]
addopts = ["--reuse-db", "--no-migrations"] addopts = ["--reuse-db", "--no-migrations"]
[tool.pyright]
exclude = ["**/migrations/**"]
[tool.ruff] [tool.ruff]
lint.select = ["ALL"] lint.select = ["ALL"]
@ -76,7 +76,7 @@ lint.ignore = [
preview = true preview = true
unsafe-fixes = true unsafe-fixes = true
fix = true fix = true
line-length = 160 line-length = 120
[tool.ruff.lint.per-file-ignores] [tool.ruff.lint.per-file-ignores]
"**/tests/**" = [ "**/tests/**" = [
@ -94,3 +94,9 @@ line-length = 160
[tool.djlint] [tool.djlint]
profile = "django" profile = "django"
ignore = "H021" ignore = "H021"
[tool.mypy]
plugins = ["mypy_django_plugin.main"]
[tool.django-stubs]
django_settings_module = "config.settings"

View file

@ -7,14 +7,14 @@
<!-- Campaign Title --> <!-- Campaign Title -->
{% if campaign.game %} {% if campaign.game %}
<h1 id="campaign-title"> <h1 id="campaign-title">
<a href="{% url 'twitch:game_detail' campaign.game.id %}">{{ campaign.game.get_game_name }}</a> - {{ campaign.clean_name }} <a href="{% url 'twitch:game_detail' campaign.game.twitch_id %}">{{ campaign.game.get_game_name }}</a> - {{ campaign.clean_name }}
</h1> </h1>
{% else %} {% else %}
<h1 id="campaign-title">{{ campaign.clean_name }}</h1> <h1 id="campaign-title">{{ campaign.clean_name }}</h1>
{% endif %} {% endif %}
{% if owner %} {% if owner %}
<p id="campaign-owner"> <p id="campaign-owner">
<a href="{% url 'twitch:organization_detail' owner.id %}">{{ owner.name }}</a> <a href="{% url 'twitch:organization_detail' owner.twitch_id %}">{{ owner.name }}</a>
</p> </p>
{% endif %} {% endif %}
<!-- Campaign image --> <!-- Campaign image -->
@ -23,7 +23,7 @@
height="160" height="160"
width="160" width="160"
src="{{ campaign.image_best_url|default:campaign.image_url }}" src="{{ campaign.image_best_url|default:campaign.image_url }}"
alt="{{ campaign.name }}"> alt="{{ campaign.name }}" />
{% endif %} {% endif %}
<!-- Campaign description --> <!-- Campaign description -->
<p id="campaign-description">{{ campaign.description|linebreaksbr }}</p> <p id="campaign-description">{{ campaign.description|linebreaksbr }}</p>
@ -98,7 +98,7 @@
<h5>Allowed Channels</h5> <h5>Allowed Channels</h5>
<div id="allowed-channels" style="margin-bottom: 20px;"> <div id="allowed-channels" style="margin-bottom: 20px;">
{% for channel in allowed_channels %} {% for channel in allowed_channels %}
<a href="{% url 'twitch:channel_detail' channel.id %}" <a href="{% url 'twitch:channel_detail' channel.twitch_id %}"
style="display: inline-block; style="display: inline-block;
margin: 2px 5px 2px 0; margin: 2px 5px 2px 0;
padding: 3px 8px; padding: 3px 8px;
@ -139,7 +139,7 @@
style="object-fit: cover; style="object-fit: cover;
margin-right: 3px" margin-right: 3px"
src="{{ benefit.image_best_url|default:benefit.image_asset_url }}" src="{{ benefit.image_best_url|default:benefit.image_asset_url }}"
alt="{{ benefit.name }}"> alt="{{ benefit.name }}" />
{% endif %} {% endif %}
{% endfor %} {% endfor %}
</td> </td>

View file

@ -49,16 +49,16 @@
{% if campaigns %} {% if campaigns %}
{% regroup campaigns by game as campaigns_by_game %} {% regroup campaigns by game as campaigns_by_game %}
{% for game_group in campaigns_by_game %} {% for game_group in campaigns_by_game %}
<section id="game-group-{{ game_group.grouper.id }}" <section id="game-group-{{ game_group.grouper.twitch_id }}"
style="margin-bottom: 3rem"> style="margin-bottom: 3rem">
<div style="display: flex; gap: 1rem;"> <div style="display: flex; gap: 1rem;">
<div style="flex-shrink: 0;"> <div style="flex-shrink: 0;">
{% if game_group.grouper.box_art_base_url %} {% if game_group.grouper.box_art_best_url %}
<img src="{{ game_group.grouper.box_art_base_url }}" <img src="{{ game_group.grouper.box_art_best_url }}"
alt="Box art for {{ game_group.grouper.display_name }}" alt="Box art for {{ game_group.grouper.display_name }}"
width="120" width="120"
height="160" height="160"
style="border-radius: 8px"> style="border-radius: 8px" />
{% else %} {% else %}
<div style="width: 120px; <div style="width: 120px;
height: 160px; height: 160px;
@ -69,7 +69,7 @@
justify-content: center; justify-content: center;
font-size: 1rem"> font-size: 1rem">
🎮 🎮
<br> <br />
No Image No Image
</div> </div>
{% endif %} {% endif %}
@ -78,15 +78,15 @@
{% comment %} Find this header section in your template {% endcomment %} {% comment %} Find this header section in your template {% endcomment %}
<header style="margin-bottom: 1rem;"> <header style="margin-bottom: 1rem;">
<h2 style="margin: 0 0 0.5rem 0;"> <h2 style="margin: 0 0 0.5rem 0;">
<a id="game-link-{{ game_group.grouper.id }}" <a id="game-link-{{ game_group.grouper.twitch_id }}"
href="{% url 'twitch:game_detail' game_group.grouper.id %}" href="{% url 'twitch:game_detail' game_group.grouper.twitch_id %}"
style="text-decoration: none">{{ game_group.grouper.display_name|default:game_group.grouper.name|default:game_group.grouper.slug|default:game_group.grouper.id }}</a> style="text-decoration: none">{{ game_group.grouper.display_name|default:game_group.grouper.name|default:game_group.grouper.slug|default:game_group.grouper.id }}</a>
</h2> </h2>
{% comment %} MODIFICATION: Check if the owner exists before creating the link {% endcomment %} {% comment %} Check if the owner exists and has a valid ID before creating the link {% endcomment %}
{% if game_group.grouper.owner %} {% if game_group.grouper.owner and game_group.grouper.owner.twitch_id %}
<p style="margin: 0;"> <p style="margin: 0;">
<a id="org-link-{{ game_group.grouper.owner.id }}" <a id="org-link-{{ game_group.grouper.owner.twitch_id }}"
href="{% url 'twitch:organization_detail' game_group.grouper.owner.id %}" href="{% url 'twitch:organization_detail' game_group.grouper.owner.twitch_id %}"
style="text-decoration: none">{{ game_group.grouper.owner.name }}</a> style="text-decoration: none">{{ game_group.grouper.owner.name }}</a>
</p> </p>
{% endif %} {% endif %}
@ -94,23 +94,23 @@
<div style="overflow-x: auto;"> <div style="overflow-x: auto;">
<div style="display: flex; gap: 1rem; min-width: max-content;"> <div style="display: flex; gap: 1rem; min-width: max-content;">
{% for campaign in game_group.list %} {% for campaign in game_group.list %}
<article id="campaign-{{ campaign.id }}" <article id="campaign-{{ campaign.twitch_id }}"
style="display: flex; style="display: flex;
flex-direction: column; flex-direction: column;
align-items: flex-start; align-items: flex-start;
padding: 0.5rem; padding: 0.5rem;
flex-shrink: 0"> flex-shrink: 0">
<div> <div>
<a id="campaign-link-{{ campaign.id }}" <a id="campaign-link-{{ campaign.twitch_id }}"
href="{% url 'twitch:campaign_detail' campaign.id %}" href="{% url 'twitch:campaign_detail' campaign.twitch_id %}"
style="text-decoration: none"> style="text-decoration: none">
{% if campaign.image_best_url or campaign.image_url %} {% if campaign.image_best_url or campaign.image_url %}
<img id="campaign-image-{{ campaign.id }}" <img id="campaign-image-{{ campaign.twitch_id }}"
src="{{ campaign.image_best_url|default:campaign.image_url }}" src="{{ campaign.image_best_url|default:campaign.image_url }}"
alt="Campaign artwork for {{ campaign.name }}" alt="Campaign artwork for {{ campaign.name }}"
width="120" width="120"
height="120" height="120"
style="border-radius: 4px"> style="border-radius: 4px" />
{% else %} {% else %}
<div style="width: 120px; <div style="width: 120px;
height: 120px; height: 120px;
@ -121,11 +121,11 @@
justify-content: center; justify-content: center;
font-size: 0.9rem"> font-size: 0.9rem">
📦 📦
<br> <br />
No Image No Image
</div> </div>
{% endif %} {% endif %}
<h4 id="campaign-name-{{ campaign.id }}" <h4 id="campaign-name-{{ campaign.twitch_id }}"
style="margin: 0.5rem 0; style="margin: 0.5rem 0;
text-align: left">{{ campaign.clean_name }}</h4> text-align: left">{{ campaign.clean_name }}</h4>
</a> </a>
@ -142,13 +142,14 @@
</div> </div>
<div style="margin-top: 0.5rem;"> <div style="margin-top: 0.5rem;">
{% if campaign.start_at <= now and campaign.end_at >= now %} {% if campaign.start_at <= now and campaign.end_at >= now %}
<span id="campaign-status-{{ campaign.id }}" <span id="campaign-status-{{ campaign.twitch_id }}"
style="font-weight: 600; style="font-weight: 600;
color: #28a745">Active</span> color: #28a745">Active</span>
{% elif campaign.start_at > now %} {% elif campaign.start_at > now %}
<span id="campaign-status-{{ campaign.id }}" style="font-weight: 600;">Upcoming</span> <span id="campaign-status-{{ campaign.twitch_id }}"
style="font-weight: 600">Upcoming</span>
{% else %} {% else %}
<span id="campaign-status-{{ campaign.id }}" <span id="campaign-status-{{ campaign.twitch_id }}"
style="font-weight: 600; style="font-weight: 600;
color: #dc3545">Expired</span> color: #dc3545">Expired</span>
{% endif %} {% endif %}
@ -168,7 +169,7 @@
<h2 style="margin: 0 0 1rem 0;">No Campaigns Found</h2> <h2 style="margin: 0 0 1rem 0;">No Campaigns Found</h2>
<p style="margin: 0; font-size: 1.1rem;"> <p style="margin: 0; font-size: 1.1rem;">
No campaigns match your current filters. No campaigns match your current filters.
<br> <br />
Try adjusting your search criteria. Try adjusting your search criteria.
</p> </p>
</section> </section>

View file

@ -26,9 +26,9 @@
<table id="active-campaigns-table"> <table id="active-campaigns-table">
<tbody> <tbody>
{% for campaign in active_campaigns %} {% for campaign in active_campaigns %}
<tr id="campaign-row-{{ campaign.id }}"> <tr id="campaign-row-{{ campaign.twitch_id }}">
<td> <td>
<a href="{% url 'twitch:campaign_detail' campaign.id %}">{{ campaign.clean_name }}</a> <a href="{% url 'twitch:campaign_detail' campaign.twitch_id %}">{{ campaign.clean_name }}</a>
{% if campaign.time_based_drops.all %} {% if campaign.time_based_drops.all %}
<div class="campaign-benefits"> <div class="campaign-benefits">
{% for benefit in campaign.sorted_benefits %} {% for benefit in campaign.sorted_benefits %}
@ -40,7 +40,7 @@
height="24" height="24"
style="display: inline-block; style="display: inline-block;
margin-right: 4px; margin-right: 4px;
vertical-align: middle"> vertical-align: middle" />
{% endif %} {% endif %}
{{ benefit.name }} {{ benefit.name }}
</span> </span>
@ -70,9 +70,9 @@
<table id="upcoming-campaigns-table"> <table id="upcoming-campaigns-table">
<tbody> <tbody>
{% for campaign in upcoming_campaigns %} {% for campaign in upcoming_campaigns %}
<tr id="campaign-row-{{ campaign.id }}"> <tr id="campaign-row-{{ campaign.twitch_id }}">
<td> <td>
<a href="{% url 'twitch:campaign_detail' campaign.id %}">{{ campaign.clean_name }}</a> <a href="{% url 'twitch:campaign_detail' campaign.twitch_id %}">{{ campaign.clean_name }}</a>
{% if campaign.time_based_drops.all %} {% if campaign.time_based_drops.all %}
<div class="campaign-benefits"> <div class="campaign-benefits">
{% for benefit in campaign.sorted_benefits %} {% for benefit in campaign.sorted_benefits %}
@ -84,7 +84,7 @@
height="24" height="24"
style="display: inline-block; style="display: inline-block;
margin-right: 4px; margin-right: 4px;
vertical-align: middle"> vertical-align: middle" />
{% endif %} {% endif %}
{{ benefit.name }} {{ benefit.name }}
</span> </span>
@ -114,9 +114,9 @@
<table id="expired-campaigns-table"> <table id="expired-campaigns-table">
<tbody> <tbody>
{% for campaign in expired_campaigns %} {% for campaign in expired_campaigns %}
<tr id="campaign-row-{{ campaign.id }}"> <tr id="campaign-row-{{ campaign.twitch_id }}">
<td> <td>
<a href="{% url 'twitch:campaign_detail' campaign.id %}">{{ campaign.clean_name }}</a> <a href="{% url 'twitch:campaign_detail' campaign.twitch_id %}">{{ campaign.clean_name }}</a>
{% if campaign.time_based_drops.all %} {% if campaign.time_based_drops.all %}
<div class="campaign-benefits"> <div class="campaign-benefits">
{% for benefit in campaign.sorted_benefits %} {% for benefit in campaign.sorted_benefits %}
@ -128,7 +128,7 @@
height="24" height="24"
style="display: inline-block; style="display: inline-block;
margin-right: 4px; margin-right: 4px;
vertical-align: middle"> vertical-align: middle" />
{% endif %} {% endif %}
{{ benefit.name }} {{ benefit.name }}
</span> </span>

View file

@ -14,7 +14,7 @@
id="search" id="search"
name="search" name="search"
value="{{ search_query }}" value="{{ search_query }}"
placeholder="Search channels..."> placeholder="Search channels..." />
<button id="search-button" type="submit">Search</button> <button id="search-button" type="submit">Search</button>
{% if search_query %} {% if search_query %}
<a href="{% url 'twitch:channel_list' %}">Clear</a> <a href="{% url 'twitch:channel_list' %}">Clear</a>
@ -35,7 +35,7 @@
<tr id="channel-row-{{ channel.id }}"> <tr id="channel-row-{{ channel.id }}">
<td> <td>
<a id="channel-link-{{ channel.id }}" <a id="channel-link-{{ channel.id }}"
href="{% url 'twitch:channel_detail' channel.id %}">{{ channel.display_name }}</a> href="{% url 'twitch:channel_detail' channel.twitch_id %}">{{ channel.display_name }}</a>
</td> </td>
<td>{{ channel.name }}</td> <td>{{ channel.name }}</td>
<td>{{ channel.campaign_count }}</td> <td>{{ channel.campaign_count }}</td>

View file

@ -13,7 +13,7 @@
<ul id="games-without-owner-list"> <ul id="games-without-owner-list">
{% for game in games_without_owner %} {% for game in games_without_owner %}
<li id="game-{{ game.id }}"> <li id="game-{{ game.id }}">
<a href="{% url 'twitch:game_detail' game.id %}">{{ game.display_name }}</a> (ID: {{ game.id }}) <a href="{% url 'twitch:game_detail' game.twitch_id %}">{{ game.display_name }}</a> (ID: {{ game.twitch_id }})
</li> </li>
{% endfor %} {% endfor %}
</ul> </ul>
@ -26,9 +26,9 @@
{% if broken_image_campaigns %} {% if broken_image_campaigns %}
<ul id="broken-image-campaigns-list"> <ul id="broken-image-campaigns-list">
{% for c in broken_image_campaigns %} {% for c in broken_image_campaigns %}
<li id="campaign-{{ c.id }}"> <li id="campaign-{{ c.twitch_id }}">
<a href="{% url 'twitch:campaign_detail' c.id %}">{{ c.name }}</a> <a href="{% url 'twitch:campaign_detail' c.twitch_id %}">{{ c.name }}</a>
(Game: <a href="{% url 'twitch:game_detail' c.game.id %}">{{ c.game.display_name }}</a>) (Game: <a href="{% url 'twitch:game_detail' c.game.twitch_id %}">{{ c.game.display_name }}</a>)
- URL: {{ c.image_best_url|default:c.image_url|default:'(empty)' }} - URL: {{ c.image_best_url|default:c.image_url|default:'(empty)' }}
</li> </li>
{% endfor %} {% endfor %}
@ -49,7 +49,7 @@
{{ b.name }} {{ b.name }}
{# Check if the relationship path to the game exists #} {# Check if the relationship path to the game exists #}
{% if first_drop and first_drop.campaign and first_drop.campaign.game %} {% if first_drop and first_drop.campaign and first_drop.campaign.game %}
(Game: <a href="{% url 'twitch:game_detail' first_drop.campaign.game.id %}">{{ first_drop.campaign.game.display_name }}</a>) (Game: <a href="{% url 'twitch:game_detail' first_drop.campaign.game.twitch_id %}">{{ first_drop.campaign.game.display_name }}</a>)
{% else %} {% else %}
(Game: Not linked) (Game: Not linked)
{% endif %} {% endif %}
@ -67,9 +67,9 @@
{% if active_missing_image %} {% if active_missing_image %}
<ul id="active-missing-image-list"> <ul id="active-missing-image-list">
{% for c in active_missing_image %} {% for c in active_missing_image %}
<li id="campaign-{{ c.id }}"> <li id="campaign-{{ c.twitch_id }}">
<a href="{% url 'twitch:campaign_detail' c.id %}">{{ c.name }}</a> <a href="{% url 'twitch:campaign_detail' c.twitch_id %}">{{ c.name }}</a>
(Game: <a href="{% url 'twitch:game_detail' c.game.id %}">{{ c.game.display_name }}</a>) (Game: <a href="{% url 'twitch:game_detail' c.game.twitch_id %}">{{ c.game.display_name }}</a>)
</li> </li>
{% endfor %} {% endfor %}
</ul> </ul>
@ -82,10 +82,10 @@
{% if drops_without_benefits %} {% if drops_without_benefits %}
<ul id="drops-without-benefits-list"> <ul id="drops-without-benefits-list">
{% for d in drops_without_benefits %} {% for d in drops_without_benefits %}
<li id="drop-{{ d.id }}"> <li id="drop-{{ d.twitch_id }}">
{{ d.name }} {{ d.name }}
(Campaign: <a href="{% url 'twitch:campaign_detail' d.campaign.id %}">{{ d.campaign.name }}</a> (Campaign: <a href="{% url 'twitch:campaign_detail' d.campaign.twitch_id %}">{{ d.campaign.name }}</a>
in Game: <a href="{% url 'twitch:game_detail' d.campaign.game.id %}">{{ d.campaign.game.display_name }}</a>) in Game: <a href="{% url 'twitch:game_detail' d.campaign.game.twitch_id %}">{{ d.campaign.game.display_name }}</a>)
</li> </li>
{% endfor %} {% endfor %}
</ul> </ul>
@ -98,9 +98,9 @@
{% if invalid_date_campaigns %} {% if invalid_date_campaigns %}
<ul id="invalid-date-campaigns-list"> <ul id="invalid-date-campaigns-list">
{% for c in invalid_date_campaigns %} {% for c in invalid_date_campaigns %}
<li id="campaign-{{ c.id }}"> <li id="campaign-{{ c.twitch_id }}">
<a href="{% url 'twitch:campaign_detail' c.id %}">{{ c.name }}</a> <a href="{% url 'twitch:campaign_detail' c.twitch_id %}">{{ c.name }}</a>
(Game: <a href="{% url 'twitch:game_detail' c.game.id %}">{{ c.game.display_name }}</a>) (Game: <a href="{% url 'twitch:game_detail' c.game.twitch_id %}">{{ c.game.display_name }}</a>)
- Start: {{ c.start_at|default:'(none)' }} / End: {{ c.end_at|default:'(none)' }} - Start: {{ c.start_at|default:'(none)' }} / End: {{ c.end_at|default:'(none)' }}
</li> </li>
{% endfor %} {% endfor %}

View file

@ -14,45 +14,21 @@
height="160" height="160"
width="160" width="160"
src="{{ game.box_art }}" src="{{ game.box_art }}"
alt="{{ game.name }}"> alt="{{ game.name }}" />
{% endif %} {% endif %}
<!-- Game owner --> <!-- Game owner -->
{% if owner %} {% if owner %}
<small><a id="owner-link" <small><a id="owner-link"
href="{% url 'twitch:organization_detail' owner.id %}">{{ owner.name }}</a></small> href="{% url 'twitch:organization_detail' owner.twitch_id %}">{{ owner.name }}</a></small>
{% endif %}
{% if user.is_authenticated %}
<form id="notification-form"
method="post"
action="{% url 'twitch:subscribe_notifications' game_id=game.id %}">
{% csrf_token %}
<div>
<input type="checkbox"
id="found"
name="notify_found"
{% if subscription and subscription.notify_found %}checked{% endif %} />
<label for="found">🔔 Get notified as soon as a drop for {{ game.display_name }} appears on Twitch.</label>
</div>
<div>
<input type="checkbox"
id="live"
name="notify_live"
{% if subscription and subscription.notify_live %}checked{% endif %} />
<label for="live">🎮 Get notified when the drop is live and ready to be farmed.</label>
</div>
<button id="save-notifications-button" type="submit">Save notification preferences</button>
</form>
{% else %}
<p id="login-prompt">Login to subscribe!</p>
{% endif %} {% endif %}
{% if active_campaigns %} {% if active_campaigns %}
<h5 id="active-campaigns-header">Active Campaigns</h5> <h5 id="active-campaigns-header">Active Campaigns</h5>
<table id="active-campaigns-table"> <table id="active-campaigns-table">
<tbody> <tbody>
{% for campaign in active_campaigns %} {% for campaign in active_campaigns %}
<tr id="campaign-row-{{ campaign.id }}"> <tr id="campaign-row-{{ campaign.twitch_id }}">
<td> <td>
<a href="{% url 'twitch:campaign_detail' campaign.id %}">{{ campaign.clean_name }}</a> <a href="{% url 'twitch:campaign_detail' campaign.twitch_id %}">{{ campaign.clean_name }}</a>
{% if campaign.time_based_drops.all %} {% if campaign.time_based_drops.all %}
<div class="campaign-benefits"> <div class="campaign-benefits">
{% comment %}Show unique benefits sorted alphabetically{% endcomment %} {% comment %}Show unique benefits sorted alphabetically{% endcomment %}
@ -65,7 +41,7 @@
height="24" height="24"
style="display: inline-block; style="display: inline-block;
margin-right: 4px; margin-right: 4px;
vertical-align: middle"> vertical-align: middle" />
{% endif %} {% endif %}
{{ benefit.name }} {{ benefit.name }}
</span> </span>
@ -86,9 +62,9 @@
<table id="upcoming-campaigns-table"> <table id="upcoming-campaigns-table">
<tbody> <tbody>
{% for campaign in upcoming_campaigns %} {% for campaign in upcoming_campaigns %}
<tr id="campaign-row-{{ campaign.id }}"> <tr id="campaign-row-{{ campaign.twitch_id }}">
<td> <td>
<a href="{% url 'twitch:campaign_detail' campaign.id %}">{{ campaign.clean_name }}</a> <a href="{% url 'twitch:campaign_detail' campaign.twitch_id %}">{{ campaign.clean_name }}</a>
{% if campaign.time_based_drops.all %} {% if campaign.time_based_drops.all %}
<div class="campaign-benefits"> <div class="campaign-benefits">
{% for benefit in campaign.sorted_benefits %} {% for benefit in campaign.sorted_benefits %}
@ -100,7 +76,7 @@
height="24" height="24"
style="display: inline-block; style="display: inline-block;
margin-right: 4px; margin-right: 4px;
vertical-align: middle"> vertical-align: middle" />
{% endif %} {% endif %}
{{ benefit.name }} {{ benefit.name }}
</span> </span>
@ -121,9 +97,9 @@
<table id="expired-campaigns-table"> <table id="expired-campaigns-table">
<tbody> <tbody>
{% for campaign in expired_campaigns %} {% for campaign in expired_campaigns %}
<tr id="campaign-row-{{ campaign.id }}"> <tr id="campaign-row-{{ campaign.twitch_id }}">
<td> <td>
<a href="{% url 'twitch:campaign_detail' campaign.id %}">{{ campaign.clean_name }}</a> <a href="{% url 'twitch:campaign_detail' campaign.twitch_id %}">{{ campaign.clean_name }}</a>
{% if campaign.time_based_drops.all %} {% if campaign.time_based_drops.all %}
<div class="campaign-benefits"> <div class="campaign-benefits">
{% comment %}Show unique benefits sorted alphabetically{% endcomment %} {% comment %}Show unique benefits sorted alphabetically{% endcomment %}
@ -136,7 +112,7 @@
height="24" height="24"
style="display: inline-block; style="display: inline-block;
margin-right: 4px; margin-right: 4px;
vertical-align: middle"> vertical-align: middle" />
{% endif %} {% endif %}
{{ benefit.name }} {{ benefit.name }}
</span> </span>

View file

@ -22,12 +22,12 @@
flex: 1 1 160px; flex: 1 1 160px;
text-align: center"> text-align: center">
<div style="margin-bottom: 0.25rem;"> <div style="margin-bottom: 0.25rem;">
{% if item.game.box_art_base_url %} {% if item.game.box_art_best_url %}
<img src="{{ item.game.box_art_base_url }}" <img src="{{ item.game.box_art_best_url }}"
alt="Box art for {{ item.game.display_name }}" alt="Box art for {{ item.game.display_name }}"
width="180" width="180"
height="240" height="240"
style="border-radius: 8px"> style="border-radius: 8px" />
{% else %} {% else %}
<div style="width: 180px; <div style="width: 180px;
height: 240px; height: 240px;
@ -39,13 +39,13 @@
font-size: 0.8rem; font-size: 0.8rem;
margin: 0 auto"> margin: 0 auto">
🎮 🎮
<br> <br />
No Image No Image
</div> </div>
{% endif %} {% endif %}
</div> </div>
<h4 style="margin: 0;"> <h4 style="margin: 0;">
<a href="{% url 'twitch:game_detail' item.game.id %}" <a href="{% url 'twitch:game_detail' item.game.twitch_id %}"
style="text-decoration: none; style="text-decoration: none;
color: inherit">{{ item.game.display_name }}</a> color: inherit">{{ item.game.display_name }}</a>
</h4> </h4>

View file

@ -14,7 +14,7 @@
<ul style="list-style: none; padding: 0; margin: 0;"> <ul style="list-style: none; padding: 0; margin: 0;">
{% for item in games %} {% for item in games %}
<li id="game-{{ item.game.id }}"> <li id="game-{{ item.game.id }}">
<a href="{% url 'twitch:game_detail' item.game.id %}">{{ item.game.display_name }}</a> <a href="{% url 'twitch:game_detail' item.game.twitch_id %}">{{ item.game.display_name }}</a>
</li> </li>
{% endfor %} {% endfor %}
</ul> </ul>

View file

@ -8,7 +8,7 @@
<ul id="org-list"> <ul id="org-list">
{% for organization in orgs %} {% for organization in orgs %}
<li id="org-{{ organization.id }}"> <li id="org-{{ organization.id }}">
<a href="{% url 'twitch:organization_detail' organization.id %}">{{ organization.name }}</a> <a href="{% url 'twitch:organization_detail' organization.twitch_id %}">{{ organization.name }}</a>
</li> </li>
{% endfor %} {% endfor %}
</ul> </ul>

View file

@ -31,7 +31,7 @@
<ul id="games-list"> <ul id="games-list">
{% for game in games %} {% for game in games %}
<li id="game-{{ game.id }}"> <li id="game-{{ game.id }}">
<a href="{% url 'twitch:game_detail' pk=game.id %}">{{ game }}</a> <a href="{% url 'twitch:game_detail' game.twitch_id %}">{{ game }}</a>
</li> </li>
{% endfor %} {% endfor %}
</ul> </ul>

View file

@ -31,7 +31,7 @@ class OrganizationFeed(Feed):
def items(self) -> list[Organization]: def items(self) -> list[Organization]:
"""Return the latest 100 organizations.""" """Return the latest 100 organizations."""
return list(Organization.objects.order_by("-id")[:100]) return list(Organization.objects.order_by("-updated_at")[:100])
def item_title(self, item: Model) -> SafeText: def item_title(self, item: Model) -> SafeText:
"""Return the organization name as the item title.""" """Return the organization name as the item title."""
@ -83,7 +83,9 @@ class DropCampaignFeed(Feed):
def items(self) -> list[DropCampaign]: def items(self) -> list[DropCampaign]:
"""Return the latest 100 drop campaigns.""" """Return the latest 100 drop campaigns."""
return list(DropCampaign.objects.select_related("game").order_by("-added_at")[:100]) return list(
DropCampaign.objects.select_related("game").order_by("-added_at")[:100],
)
def item_title(self, item: Model) -> SafeText: def item_title(self, item: Model) -> SafeText:
"""Return the campaign name as the item title (SafeText for RSS).""" """Return the campaign name as the item title (SafeText for RSS)."""
@ -112,7 +114,11 @@ class DropCampaignFeed(Feed):
description += f"<p><strong>Starts:</strong> {start_at.strftime('%Y-%m-%d %H:%M %Z')}</p>" description += f"<p><strong>Starts:</strong> {start_at.strftime('%Y-%m-%d %H:%M %Z')}</p>"
if end_at: if end_at:
description += f"<p><strong>Ends:</strong> {end_at.strftime('%Y-%m-%d %H:%M %Z')}</p>" description += f"<p><strong>Ends:</strong> {end_at.strftime('%Y-%m-%d %H:%M %Z')}</p>"
drops: QuerySet[TimeBasedDrop] | None = getattr(item, "time_based_drops", None) drops: QuerySet[TimeBasedDrop] | None = getattr(
item,
"time_based_drops",
None,
)
if drops: if drops:
drops_qs: QuerySet[TimeBasedDrop] = drops.select_related().prefetch_related("benefits").all() drops_qs: QuerySet[TimeBasedDrop] = drops.select_related().prefetch_related("benefits").all()
if drops_qs: if drops_qs:
@ -133,7 +139,7 @@ class DropCampaignFeed(Feed):
for benefit in drop.benefits.all(): for benefit in drop.benefits.all():
if getattr(benefit, "image_asset_url", None): if getattr(benefit, "image_asset_url", None):
description += format_html( description += format_html(
'<img height="60" width="60" style="object-fit: cover; margin-right: 5px;" src="{}" alt="{}">', '<img height="60" width="60" style="object-fit: cover; margin-right: 5px;" src="{}" alt="{}">', # noqa: E501
benefit.image_asset_url, benefit.image_asset_url,
benefit.name, benefit.name,
) )
@ -144,7 +150,9 @@ class DropCampaignFeed(Feed):
) )
description += placeholder_img description += placeholder_img
description += "</td>" description += "</td>"
description += f'<td style="border: 1px solid #ddd; padding: 8px;">{getattr(drop, "name", str(drop))}</td>' description += (
f'<td style="border: 1px solid #ddd; padding: 8px;">{getattr(drop, "name", str(drop))}</td>'
)
requirements: str = "" requirements: str = ""
if getattr(drop, "required_minutes_watched", None): if getattr(drop, "required_minutes_watched", None):
requirements = f"{drop.required_minutes_watched} minutes watched" requirements = f"{drop.required_minutes_watched} minutes watched"
@ -161,7 +169,9 @@ class DropCampaignFeed(Feed):
period += start_at.strftime("%Y-%m-%d %H:%M %Z") period += start_at.strftime("%Y-%m-%d %H:%M %Z")
if end_at is not None: if end_at is not None:
if period: if period:
period += " - " + end_at.strftime("%Y-%m-%d %H:%M %Z") period += " - " + end_at.strftime(
"%Y-%m-%d %H:%M %Z",
)
else: else:
period = end_at.strftime("%Y-%m-%d %H:%M %Z") period = end_at.strftime("%Y-%m-%d %H:%M %Z")
description += f'<td style="border: 1px solid #ddd; padding: 8px;">{period}</td>' description += f'<td style="border: 1px solid #ddd; padding: 8px;">{period}</td>'
@ -169,7 +179,10 @@ class DropCampaignFeed(Feed):
description += "</tbody></table><br>" description += "</tbody></table><br>"
details_url: str | None = getattr(item, "details_url", None) details_url: str | None = getattr(item, "details_url", None)
if details_url: if details_url:
description += format_html('<p><a href="{}">About this drop</a></p>', details_url) description += format_html(
'<p><a href="{}">About this drop</a></p>',
details_url,
)
return SafeText(description) return SafeText(description)
def item_link(self, item: Model) -> str: def item_link(self, item: Model) -> str:
@ -177,7 +190,10 @@ class DropCampaignFeed(Feed):
return reverse("twitch:campaign_detail", args=[item.pk]) return reverse("twitch:campaign_detail", args=[item.pk])
def item_pubdate(self, item: Model) -> datetime.datetime: def item_pubdate(self, item: Model) -> datetime.datetime:
"""Returns the publication date to the feed item. Fallback to updated_at or now if missing.""" """Returns the publication date to the feed item.
Fallback to updated_at or now if missing.
"""
start_at: datetime.datetime | None = getattr(item, "start_at", None) start_at: datetime.datetime | None = getattr(item, "start_at", None)
if start_at: if start_at:
return start_at return start_at
@ -214,7 +230,10 @@ class DropCampaignFeed(Feed):
return item.image_url return item.image_url
def item_enclosure_length(self, item: DropCampaign) -> int: # noqa: ARG002 def item_enclosure_length(self, item: DropCampaign) -> int: # noqa: ARG002
"""Returns the length of the enclosure. Currently not tracked, so return 0.""" """Returns the length of the enclosure.
Currently not tracked, so return 0.
"""
return 0 return 0
def item_enclosure_mime_type(self, item: DropCampaign) -> str: # noqa: ARG002 def item_enclosure_mime_type(self, item: DropCampaign) -> str: # noqa: ARG002

View file

@ -1,11 +1,13 @@
from __future__ import annotations from __future__ import annotations
import json
import os import os
import sys import sys
from concurrent.futures import ProcessPoolExecutor from datetime import UTC
from itertools import repeat from datetime import datetime
from pathlib import Path from pathlib import Path
from typing import Any from typing import Any
from typing import Literal
from colorama import Fore from colorama import Fore
from colorama import Style from colorama import Style
@ -13,28 +15,101 @@ from colorama import init as colorama_init
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand
from django.core.management.base import CommandError from django.core.management.base import CommandError
from django.core.management.base import CommandParser from django.core.management.base import CommandParser
from django.db import DatabaseError
from pydantic import ValidationError from pydantic import ValidationError
from tqdm import tqdm from tqdm import tqdm
from twitch.models import Channel from twitch.models import Channel
from twitch.models import DropBenefit from twitch.models import DropBenefit
from twitch.models import DropBenefitEdge
from twitch.models import DropCampaign from twitch.models import DropCampaign
from twitch.models import Game from twitch.models import Game
from twitch.models import Organization from twitch.models import Organization
from twitch.schemas import ViewerDropsDashboardPayload from twitch.models import TimeBasedDrop
from twitch.schemas import DropBenefitEdgeSchema
from twitch.schemas import DropBenefitSchema
from twitch.schemas import GameSchema
from twitch.schemas import GraphQLResponse
from twitch.schemas import OrganizationSchema
from twitch.schemas import TimeBasedDropSchema
from twitch.utils import parse_date
def move_failed_validation_file(file_path: Path) -> Path: def get_broken_directory_root() -> Path:
"""Get the root broken directory path from environment or default.
Reads from TTVDROPS_BROKEN_DIR environment variable if set,
otherwise defaults to a directory in the current user's home.
Returns:
Path to the root broken directory.
"""
env_path: str | None = os.environ.get("TTVDROPS_BROKEN_DIR")
if env_path:
return Path(env_path)
# Default to ~/ttvdrops/broken/
home: Path = Path.home()
return home / "ttvdrops" / "broken"
def get_imported_directory_root() -> Path:
"""Get the root imported directory path from environment or default.
Reads from TTVDROPS_IMPORTED_DIR environment variable if set,
otherwise defaults to a directory in the current user's home.
Returns:
Path to the root imported directory.
"""
env_path: str | None = os.environ.get("TTVDROPS_IMPORTED_DIR")
if env_path:
return Path(env_path)
# Default to ~/ttvdrops/imported/
home: Path = Path.home()
return home / "ttvdrops" / "imported"
def _build_broken_directory(
reason: str,
operation_name: str | None = None,
) -> Path:
"""Compute a deeply nested broken directory for triage.
Directory pattern: <broken_root>/<reason>/<operation>/<YYYY>/<MM>/<DD>
This keeps unrelated failures isolated and easy to browse later.
Args:
reason: High-level reason bucket (e.g., validation_failed).
operation_name: Optional operationName extracted from the payload.
Returns:
Path to the directory where the file should live.
"""
safe_reason: str = reason.replace(" ", "_")
op_segment: str = (operation_name or "unknown_op").replace(" ", "_")
now: datetime = datetime.now(tz=UTC)
broken_dir: Path = get_broken_directory_root() / safe_reason / op_segment / f"{now:%Y}" / f"{now:%m}" / f"{now:%d}"
broken_dir.mkdir(parents=True, exist_ok=True)
return broken_dir
def move_failed_validation_file(file_path: Path, operation_name: str | None = None) -> Path:
"""Moves a file that failed validation to a 'broken' subdirectory. """Moves a file that failed validation to a 'broken' subdirectory.
Args: Args:
file_path: Path to the file that failed validation file_path: Path to the file that failed validation
operation_name: Optional GraphQL operation name for finer grouping
Returns: Returns:
Path to the 'broken' directory where the file was moved Path to the 'broken' directory where the file was moved
""" """
broken_dir: Path = file_path.parent / "broken" broken_dir: Path = _build_broken_directory(
broken_dir.mkdir(parents=True, exist_ok=True) reason="validation_failed",
operation_name=operation_name,
)
target_file: Path = broken_dir / file_path.name target_file: Path = broken_dir / file_path.name
file_path.rename(target_file) file_path.rename(target_file)
@ -42,18 +117,25 @@ def move_failed_validation_file(file_path: Path) -> Path:
return broken_dir return broken_dir
def move_file_to_broken_subdir(file_path: Path, subdir: str) -> Path: def move_file_to_broken_subdir(
"""Move file to a nested broken/<subdir> directory and return that directory. file_path: Path,
subdir: str,
operation_name: str | None = None,
) -> Path:
"""Move file to broken/<subdir> and return that directory path.
Args: Args:
file_path: The file to move. file_path: The file to move.
subdir: Subdirectory name under "broken" (e.g., the matched keyword). subdir: Subdirectory name under "broken" (e.g., the matched keyword).
operation_name: Optional GraphQL operation name for finer grouping
Returns: Returns:
Path to the directory where the file was moved. Path to the directory where the file was moved.
""" """
broken_dir: Path = Path.home() / "broken" / subdir broken_dir: Path = _build_broken_directory(
broken_dir.mkdir(parents=True, exist_ok=True) reason=subdir,
operation_name=operation_name,
)
target_file: Path = broken_dir / file_path.name target_file: Path = broken_dir / file_path.name
file_path.rename(target_file) file_path.rename(target_file)
@ -61,6 +143,45 @@ def move_file_to_broken_subdir(file_path: Path, subdir: str) -> Path:
return broken_dir return broken_dir
def move_completed_file(file_path: Path, operation_name: str | None = None) -> Path:
"""Move a successfully processed file into an operation-named directory.
Moves to <imported_root>/<operation_name>/
Args:
file_path: Path to the processed JSON file.
operation_name: GraphQL operationName extracted from the payload.
Returns:
Path to the directory where the file was moved.
"""
safe_op: str = (operation_name or "unknown_op").replace(" ", "_").replace("/", "_").replace("\\", "_")
target_dir: Path = get_imported_directory_root() / safe_op
target_dir.mkdir(parents=True, exist_ok=True)
target_file: Path = target_dir / file_path.name
file_path.rename(target_file)
return target_dir
# Pre-compute keyword search patterns for faster detection
_KNOWN_NON_CAMPAIGN_PATTERNS: dict[str, str] = {
keyword: f'"operationName": "{keyword}"'
for keyword in [
"ChannelPointsContext",
"ClaimCommunityPoints",
"DirectoryPage_Game",
"DropCurrentSessionContext",
"DropsPage_ClaimDropRewards",
"OnsiteNotifications_DeleteNotification",
"PlaybackAccessToken",
"streamPlaybackAccessToken",
"VideoPlayerStreamInfoOverlayChannel",
]
}
def detect_non_campaign_keyword(raw_text: str) -> str | None: def detect_non_campaign_keyword(raw_text: str) -> str | None:
"""Detect if payload is a known non-drop-campaign response. """Detect if payload is a known non-drop-campaign response.
@ -73,30 +194,46 @@ def detect_non_campaign_keyword(raw_text: str) -> str | None:
Returns: Returns:
The matched keyword, or None if no match found. The matched keyword, or None if no match found.
""" """
probably_shit: list[str] = [ for keyword, pattern in _KNOWN_NON_CAMPAIGN_PATTERNS.items():
"ChannelPointsContext", if pattern in raw_text:
"ClaimCommunityPoints",
"DirectoryPage_Game",
"DropCurrentSessionContext",
"DropsPage_ClaimDropRewards",
"OnsiteNotifications_DeleteNotification",
"PlaybackAccessToken",
"streamPlaybackAccessToken",
"VideoPlayerStreamInfoOverlayChannel",
]
for keyword in probably_shit:
if f'"operationName": "{keyword}"' in raw_text:
return keyword return keyword
return None return None
def extract_operation_name_from_parsed(
payload: dict[str, Any] | list[Any],
) -> str | None:
"""Extract GraphQL operationName from an already parsed JSON payload.
This is safer than substring scanning. The expected location is
`payload["extensions"]["operationName"]`, but we guard against missing
keys.
Args:
payload: Parsed JSON object or list.
Returns:
The operation name if found, otherwise None.
"""
# Be defensive; never let provenance extraction break the import.
if not isinstance(payload, dict):
return None
extensions: dict[str, Any] | None = payload.get("extensions")
if isinstance(extensions, dict):
op_name: str | None = extensions.get("operationName")
if isinstance(op_name, str):
return op_name
return None
class Command(BaseCommand): class Command(BaseCommand):
"""Import Twitch drop campaign data from a JSON file or directory of JSON files.""" """Import Twitch drop campaign data from a JSON file or directory."""
help = "Import Twitch drop campaign data from a JSON file or directory" help = "Import Twitch drop campaign data from a JSON file or directory"
requires_migrations_checks = True requires_migrations_checks = True
# In-memory caches prevent repeated DB lookups during batch imports,
# cutting query volume and keeping runtime predictable.
game_cache: dict[str, Game] = {} game_cache: dict[str, Game] = {}
organization_cache: dict[str, Organization] = {} organization_cache: dict[str, Organization] = {}
drop_campaign_cache: dict[str, DropCampaign] = {} drop_campaign_cache: dict[str, DropCampaign] = {}
@ -105,13 +242,45 @@ class Command(BaseCommand):
def add_arguments(self, parser: CommandParser) -> None: def add_arguments(self, parser: CommandParser) -> None:
"""Populate the command with arguments.""" """Populate the command with arguments."""
parser.add_argument("path", type=str, help="Path to JSON file or directory") parser.add_argument(
parser.add_argument("--recursive", action="store_true", help="Recursively search directories for JSON files") "path",
parser.add_argument("--crash-on-error", action="store_true", help="Crash the command on first error instead of continuing") type=str,
parser.add_argument("--verbose", action="store_true", help="Print per-file success messages") help="Path to JSON file or directory",
)
parser.add_argument(
"--recursive",
action="store_true",
help="Recursively search directories for JSON files",
)
parser.add_argument(
"--crash-on-error",
dest="crash_on_error",
action="store_true",
help="Crash the command on first error instead of continuing",
)
parser.add_argument(
"--verbose",
action="store_true",
help="Print per-file success messages",
)
parser.add_argument(
"--skip-broken-moves",
dest="skip_broken_moves",
action="store_true",
help=(
"Do not move files to the broken directory on failures; useful"
" during testing to avoid unnecessary file moves"
),
)
def pre_fill_cache(self) -> None: def pre_fill_cache(self) -> None:
"""Load all existing IDs from DB into memory to avoid N+1 queries.""" """Load all existing IDs from DB into memory."""
self.game_cache = {}
self.organization_cache = {}
self.drop_campaign_cache = {}
self.channel_cache = {}
self.benefit_cache = {}
cache_operations: list[tuple[str, type, str]] = [ cache_operations: list[tuple[str, type, str]] = [
("Games", Game, "game_cache"), ("Games", Game, "game_cache"),
("Organizations", Organization, "organization_cache"), ("Organizations", Organization, "organization_cache"),
@ -120,14 +289,386 @@ class Command(BaseCommand):
("Benefits", DropBenefit, "benefit_cache"), ("Benefits", DropBenefit, "benefit_cache"),
] ]
with tqdm(cache_operations, desc="Loading caches", unit="cache", colour="cyan") as progress_bar: try:
for name, model, cache_attr in progress_bar: with tqdm(cache_operations, desc="Loading caches", unit="cache", colour="cyan") as progress_bar:
progress_bar.set_description(f"Loading {name}") for name, model, cache_attr in progress_bar:
cache: dict[str, Any] = {str(obj.twitch_id): obj for obj in model.objects.all()} self.load_cache_for_model(progress_bar, name, model, cache_attr)
setattr(self, cache_attr, cache) tqdm.write("")
progress_bar.write(f" {Fore.GREEN}{Style.RESET_ALL} {name}: {len(cache):,}") except (DatabaseError, OSError, RuntimeError, ValueError, TypeError):
# If cache loading fails completely, just use empty caches
tqdm.write(f"{Fore.YELLOW}{Style.RESET_ALL} Cache preload skipped (database error)\n")
tqdm.write("") def load_cache_for_model(self, progress_bar: tqdm, name: str, model: type, cache_attr: str) -> None:
"""Load cache for a specific model and attach to the command instance.
Args:
progress_bar: TQDM progress bar instance.
name: Human-readable name of the model being cached.
model: Django model class to query.
cache_attr: Attribute name on the command instance to store the cache.
"""
progress_bar.set_description(f"Loading {name}")
try:
cache: dict[str, Any] = {str(obj.twitch_id): obj for obj in model.objects.all()}
setattr(self, cache_attr, cache)
progress_bar.write(f" {Fore.GREEN}{Style.RESET_ALL} {name}: {len(cache):,}")
except (DatabaseError, OSError, RuntimeError, ValueError, TypeError) as e:
# Database error - skip this cache
msg: str = f" {Fore.YELLOW}{Style.RESET_ALL} {name}: Could not load ({type(e).__name__})"
progress_bar.write(msg)
setattr(self, cache_attr, {})
def _validate_campaigns(
self,
campaigns_found: list[dict[str, Any]],
file_path: Path,
options: dict[str, Any],
) -> list[GraphQLResponse]:
"""Validate campaign data using Pydantic schema.
Args:
campaigns_found: List of raw campaign dictionaries.
file_path: Path to the file being processed.
options: Command options.
Returns:
List of validated Pydantic GraphQLResponse models.
Raises:
ValidationError: If campaign data fails Pydantic validation
and crash-on-error is enabled.
"""
valid_campaigns: list[GraphQLResponse] = []
if isinstance(campaigns_found, list):
for campaign in campaigns_found:
if isinstance(campaign, dict):
try:
response: GraphQLResponse = GraphQLResponse.model_validate(campaign)
if response.data.current_user and response.data.current_user.drop_campaigns:
valid_campaigns.append(response)
except ValidationError as e:
tqdm.write(
f"{Fore.RED}{Style.RESET_ALL} Validation failed for an entry in {file_path.name}: {e}",
)
# Move invalid inputs out of the hot path so future runs can progress.
if not options.get("skip_broken_moves"):
op_name: str | None = extract_operation_name_from_parsed(campaign)
move_failed_validation_file(file_path, operation_name=op_name)
# optionally crash early to surface schema issues.
if options.get("crash_on_error"):
raise
continue
return valid_campaigns
def _get_or_create_organization(
self,
org_data: OrganizationSchema,
) -> Organization:
"""Get or create an organization from cache or database.
Args:
org_data: Organization data from Pydantic model.
Returns:
Organization instance.
"""
# Prefer cache hits to avoid hitting the DB on every campaign item.
if org_data.twitch_id in self.organization_cache:
return self.organization_cache[org_data.twitch_id]
org_obj, created = Organization.objects.update_or_create(
twitch_id=org_data.twitch_id,
defaults={
"name": org_data.name,
},
)
if created:
tqdm.write(f"{Fore.GREEN}{Style.RESET_ALL} Created new organization: {org_data.name}")
# Cache the organization for future lookups.
self.organization_cache[org_data.twitch_id] = org_obj
return org_obj
def _get_or_create_game(
self,
game_data: GameSchema,
org_obj: Organization,
) -> Game:
"""Get or create a game from cache or database.
Args:
game_data: Game data from Pydantic model.
org_obj: Organization that owns this game.
Returns:
Game instance.
"""
if game_data.twitch_id in self.game_cache:
game_obj: Game = self.game_cache[game_data.twitch_id]
# Maintenance: Ensure the existing game is linked to the
# correct owner (Sometimes games are imported without owner
# data first). Use owner_id to avoid triggering a query.
# Correct stale owner linkage that may exist from earlier
# partial imports.
if game_obj.owner_id != org_obj.pk: # type: ignore[attr-defined] # Django adds _id suffix for FK fields
game_obj.owner = org_obj
game_obj.save(update_fields=["owner"])
return game_obj
game_obj, created = Game.objects.update_or_create(
twitch_id=game_data.twitch_id,
defaults={
"display_name": game_data.display_name,
"box_art": game_data.box_art_url,
"owner": org_obj,
},
)
if created:
tqdm.write(f"{Fore.GREEN}{Style.RESET_ALL} Created new game: {game_data.display_name}")
self.game_cache[game_data.twitch_id] = game_obj
return game_obj
def _should_skip_campaign_update(
self,
cached_obj: DropCampaign,
defaults: dict[str, Any],
game_obj: Game,
) -> bool:
"""Check if campaign update can be skipped based on cache comparison.
Args:
cached_obj: Cached campaign object.
defaults: New campaign data.
game_obj: Associated game object.
Returns:
True if no update needed, False otherwise.
"""
# Use game_id (Django's auto-generated FK field) to avoid
# triggering a query. Compare FK IDs to avoid ORM reads; keeps
# this a pure in-memory check.
cached_game_id: int | None = getattr(cached_obj, "game_id", None)
# Ensure game object has a primary key (should always be true
# at this point)
game_id: int | None = game_obj.pk
# Short-circuit updates when nothing changed; reduces write
# load and log noise while keeping caches accurate.
return bool(
cached_obj.name == defaults["name"]
and cached_obj.start_at == defaults["start_at"]
and cached_obj.end_at == defaults["end_at"]
and cached_obj.details_url == defaults["details_url"]
and cached_obj.account_link_url == defaults["account_link_url"]
and cached_game_id == game_id
and cached_obj.is_account_connected == defaults["is_account_connected"],
)
def process_campaigns(
self,
campaigns_found: list[dict[str, Any]],
file_path: Path,
options: dict[str, Any],
) -> None:
"""Process, validate, and import campaign data.
With dependency resolution and caching.
Args:
campaigns_found: List of raw campaign dictionaries to process.
file_path: Path to the file being processed.
options: Command options dictionary.
Raises:
ValueError: If datetime parsing fails for campaign dates and
crash-on-error is enabled.
"""
valid_campaigns: list[GraphQLResponse] = self._validate_campaigns(
campaigns_found=campaigns_found,
file_path=file_path,
options=options,
)
for response in valid_campaigns:
if not response.data.current_user:
continue
for drop_campaign in response.data.current_user.drop_campaigns:
org_obj: Organization = self._get_or_create_organization(
org_data=drop_campaign.owner,
)
game_obj: Game = self._get_or_create_game(
game_data=drop_campaign.game,
org_obj=org_obj,
)
start_at_dt: datetime | None = parse_date(drop_campaign.start_at)
end_at_dt: datetime | None = parse_date(drop_campaign.end_at)
if start_at_dt is None or end_at_dt is None:
tqdm.write(f"{Fore.RED}{Style.RESET_ALL} Invalid datetime in campaign: {drop_campaign.name}")
if options.get("crash_on_error"):
msg: str = f"Failed to parse datetime for campaign {drop_campaign.name}"
raise ValueError(msg)
continue
defaults: dict[str, str | datetime | Game | bool] = {
"name": drop_campaign.name,
"game": game_obj,
"start_at": start_at_dt,
"end_at": end_at_dt,
"details_url": drop_campaign.details_url,
"account_link_url": drop_campaign.account_link_url,
"is_account_connected": (drop_campaign.self.is_account_connected),
}
if drop_campaign.twitch_id in self.drop_campaign_cache:
cached_obj: DropCampaign = self.drop_campaign_cache[drop_campaign.twitch_id]
if self._should_skip_campaign_update(cached_obj=cached_obj, defaults=defaults, game_obj=game_obj):
if options.get("verbose"):
tqdm.write(f"{Fore.YELLOW}{Style.RESET_ALL} Skipped (No changes): {drop_campaign.name}")
continue
campaign_obj, created = DropCampaign.objects.update_or_create(
twitch_id=drop_campaign.twitch_id,
defaults=defaults,
)
if created:
tqdm.write(f"{Fore.GREEN}{Style.RESET_ALL} Created new campaign: {drop_campaign.name}")
self.drop_campaign_cache[drop_campaign.twitch_id] = campaign_obj
action: Literal["Imported new", "Updated"] = "Imported new" if created else "Updated"
tqdm.write(f"{Fore.GREEN}{Style.RESET_ALL} {action} campaign: {drop_campaign.name}")
if (
response.extensions
and response.extensions.operation_name
and campaign_obj.operation_name != response.extensions.operation_name
):
campaign_obj.operation_name = response.extensions.operation_name
campaign_obj.save(update_fields=["operation_name"])
if drop_campaign.time_based_drops:
self._process_time_based_drops(
time_based_drops_schema=drop_campaign.time_based_drops,
campaign_obj=campaign_obj,
)
def _process_time_based_drops(
self,
time_based_drops_schema: list[TimeBasedDropSchema],
campaign_obj: DropCampaign,
) -> None:
"""Process time-based drops for a campaign.
Args:
time_based_drops_schema: List of TimeBasedDrop Pydantic schemas.
campaign_obj: The DropCampaign database object.
"""
for drop_schema in time_based_drops_schema:
start_at_dt: datetime | None = parse_date(drop_schema.start_at)
end_at_dt: datetime | None = parse_date(drop_schema.end_at)
drop_defaults: dict[str, str | int | datetime | DropCampaign] = {
"campaign": campaign_obj,
"name": drop_schema.name,
"required_subs": drop_schema.required_subs,
}
if drop_schema.required_minutes_watched is not None:
drop_defaults["required_minutes_watched"] = drop_schema.required_minutes_watched
if start_at_dt is not None:
drop_defaults["start_at"] = start_at_dt
if end_at_dt is not None:
drop_defaults["end_at"] = end_at_dt
drop_obj, created = TimeBasedDrop.objects.update_or_create(
twitch_id=drop_schema.twitch_id,
defaults=drop_defaults,
)
if created:
tqdm.write(f"{Fore.GREEN}{Style.RESET_ALL} Created TimeBasedDrop: {drop_schema.name}")
self._process_benefit_edges(
benefit_edges_schema=drop_schema.benefit_edges,
drop_obj=drop_obj,
)
def _get_or_update_benefit(self, benefit_schema: DropBenefitSchema) -> DropBenefit:
"""Return a DropBenefit, updating stale cached values when needed."""
benefit_defaults: dict[str, str | int | datetime | bool | None] = {
"name": benefit_schema.name,
"image_asset_url": benefit_schema.image_asset_url,
"entitlement_limit": benefit_schema.entitlement_limit,
"is_ios_available": benefit_schema.is_ios_available,
"distribution_type": benefit_schema.distribution_type,
}
if benefit_schema.created_at:
created_at_dt: datetime | None = parse_date(benefit_schema.created_at)
if created_at_dt:
benefit_defaults["created_at"] = created_at_dt
cached_benefit: DropBenefit | None = self.benefit_cache.get(benefit_schema.twitch_id)
if cached_benefit:
update_fields: list[str] = []
for field_name, value in benefit_defaults.items():
if getattr(cached_benefit, field_name) != value:
setattr(cached_benefit, field_name, value)
update_fields.append(field_name)
if update_fields:
cached_benefit.save(update_fields=update_fields)
benefit_obj: DropBenefit = cached_benefit
else:
benefit_obj, created = DropBenefit.objects.update_or_create(
twitch_id=benefit_schema.twitch_id,
defaults=benefit_defaults,
)
if created:
tqdm.write(f"{Fore.GREEN}{Style.RESET_ALL} Created DropBenefit: {benefit_schema.name}")
self.benefit_cache[benefit_schema.twitch_id] = benefit_obj
return benefit_obj
def _process_benefit_edges(
self,
benefit_edges_schema: list[DropBenefitEdgeSchema],
drop_obj: TimeBasedDrop,
) -> None:
"""Process benefit edges for a time-based drop.
Args:
benefit_edges_schema: List of DropBenefitEdge Pydantic schemas.
drop_obj: The TimeBasedDrop database object.
"""
for edge_schema in benefit_edges_schema:
benefit_schema: DropBenefitSchema = edge_schema.benefit
benefit_obj: DropBenefit = self._get_or_update_benefit(benefit_schema=benefit_schema)
_edge_obj, created = DropBenefitEdge.objects.update_or_create(
drop=drop_obj,
benefit=benefit_obj,
defaults={"entitlement_limit": edge_schema.entitlement_limit},
)
if created:
tqdm.write(f"{Fore.GREEN}{Style.RESET_ALL} Linked benefit: {benefit_schema.name}{drop_obj.name}")
def handle(self, *args, **options) -> None: # noqa: ARG002 def handle(self, *args, **options) -> None: # noqa: ARG002
"""Main entry point for the command. """Main entry point for the command.
@ -152,7 +693,7 @@ class Command(BaseCommand):
except KeyboardInterrupt: except KeyboardInterrupt:
tqdm.write(self.style.WARNING("\n\nInterrupted by user!")) tqdm.write(self.style.WARNING("\n\nInterrupted by user!"))
tqdm.write(self.style.WARNING("Shutting down gracefully...")) tqdm.write(self.style.WARNING("Shutting down gracefully..."))
sys.exit(130) sys.exit(130) # 128 + 2 (Keyboard Interrupt)
def process_json_files(self, input_path: Path, options: dict) -> None: def process_json_files(self, input_path: Path, options: dict) -> None:
"""Process multiple JSON files in a directory. """Process multiple JSON files in a directory.
@ -168,37 +709,39 @@ class Command(BaseCommand):
failed_count = 0 failed_count = 0
error_count = 0 error_count = 0
with ( with tqdm(
ProcessPoolExecutor() as executor, total=len(json_files),
tqdm( desc="Processing",
total=len(json_files), unit="file",
desc="Processing", bar_format=("{l_bar}{bar}| {n_fmt}/{total_fmt} [{elapsed}<{remaining}, {rate_fmt}]"),
unit="file", colour="green",
bar_format="{l_bar}{bar}| {n_fmt}/{total_fmt} [{elapsed}<{remaining}, {rate_fmt}]", dynamic_ncols=True,
colour="green", ) as progress_bar:
dynamic_ncols=True,
) as progress_bar,
):
# Choose a reasonable chunk_size to reduce overhead for huge file counts
cpu_count = os.cpu_count() or 1
chunk_size = max(1, min(1000, len(json_files) // (cpu_count * 8 or 1)))
results_iter = executor.map(self.process_file_worker, json_files, repeat(options), chunksize=chunk_size)
for file_path in json_files: for file_path in json_files:
try: try:
result: dict[str, bool | str] = next(results_iter) result: dict[str, bool | str] = self.process_file_worker(
file_path=file_path,
options=options,
)
if result["success"]: if result["success"]:
success_count += 1 success_count += 1
if options.get("verbose"): if options.get("verbose"):
progress_bar.write(f"{Fore.GREEN}{Style.RESET_ALL} {file_path.name}") progress_bar.write(f"{Fore.GREEN}{Style.RESET_ALL} {file_path.name}")
else: else:
failed_count += 1 failed_count += 1
reason = result.get("reason") if isinstance(result, dict) else None reason: bool | str | None = result.get("reason") if isinstance(result, dict) else None
if reason: if reason:
progress_bar.write(f"{Fore.RED}{Style.RESET_ALL} {file_path.name}{result['broken_dir']}/{file_path.name} ({reason})") progress_bar.write(
f"{Fore.RED}{Style.RESET_ALL} "
f"{file_path.name}{result['broken_dir']}/"
f"{file_path.name} ({reason})",
)
else: else:
progress_bar.write(f"{Fore.RED}{Style.RESET_ALL} {file_path.name}{result['broken_dir']}/{file_path.name}") progress_bar.write(
f"{Fore.RED}{Style.RESET_ALL} "
f"{file_path.name}{result['broken_dir']}/"
f"{file_path.name}",
)
except (OSError, ValueError, KeyError) as e: except (OSError, ValueError, KeyError) as e:
error_count += 1 error_count += 1
progress_bar.write(f"{Fore.RED}{Style.RESET_ALL} {file_path.name} (error: {e})") progress_bar.write(f"{Fore.RED}{Style.RESET_ALL} {file_path.name} (error: {e})")
@ -207,15 +750,27 @@ class Command(BaseCommand):
progress_bar.set_postfix_str(f"{success_count} | ✗ {failed_count + error_count}", refresh=True) progress_bar.set_postfix_str(f"{success_count} | ✗ {failed_count + error_count}", refresh=True)
progress_bar.update(1) progress_bar.update(1)
self.print_processing_summary(json_files, success_count, failed_count, error_count) self.print_processing_summary(
json_files,
success_count,
failed_count,
error_count,
)
def print_processing_summary(self, json_files: list[Path], success_count: int, failed_count: int, error_count: int) -> None: def print_processing_summary(
self,
json_files: list[Path],
success_count: int,
failed_count: int,
error_count: int,
) -> None:
"""Print a summary of the batch processing results. """Print a summary of the batch processing results.
Args: Args:
json_files: List of JSON file paths that were processed. json_files: List of JSON file paths that were processed.
success_count: Number of files processed successfully. success_count: Number of files processed successfully.
failed_count: Number of files that failed validation and were moved. failed_count: Number of files that failed validation and were
moved.
error_count: Number of files that encountered unexpected errors. error_count: Number of files that encountered unexpected errors.
""" """
tqdm.write("\n" + "=" * 50) tqdm.write("\n" + "=" * 50)
@ -227,7 +782,11 @@ class Command(BaseCommand):
tqdm.write(f"Total: {len(json_files)}") tqdm.write(f"Total: {len(json_files)}")
tqdm.write("=" * 50) tqdm.write("=" * 50)
def collect_json_files(self, options: dict, input_path: Path) -> list[Path]: def collect_json_files(
self,
options: dict,
input_path: Path,
) -> list[Path]:
"""Collect JSON files from the specified directory. """Collect JSON files from the specified directory.
Args: Args:
@ -246,9 +805,12 @@ class Command(BaseCommand):
json_files = [f for f in input_path.iterdir() if f.is_file() and f.suffix == ".json"] json_files = [f for f in input_path.iterdir() if f.is_file() and f.suffix == ".json"]
return json_files return json_files
@staticmethod def process_file_worker(
def process_file_worker(file_path: Path, options: dict) -> dict[str, bool | str]: self,
"""Worker function for parallel processing of files. file_path: Path,
options: dict,
) -> dict[str, bool | str]:
"""Worker function for processing files.
Args: Args:
file_path: Path to the JSON file to process file_path: Path to the JSON file to process
@ -256,26 +818,49 @@ class Command(BaseCommand):
Raises: Raises:
ValidationError: If the JSON file fails validation ValidationError: If the JSON file fails validation
json.JSONDecodeError: If the JSON file cannot be parsed
Returns: Returns:
Dict with success status and optional broken_dir path Dict with success status and optional broken_dir path
""" """
try: try:
raw_text: str = file_path.read_text(encoding="utf-8", errors="ignore") raw_text: str = file_path.read_text(encoding="utf-8", errors="ignore")
# Fast pre-filter: check for known non-campaign keywords and move early
matched: str | None = detect_non_campaign_keyword(raw_text) matched: str | None = detect_non_campaign_keyword(raw_text)
if matched: if matched:
broken_dir: Path = move_file_to_broken_subdir(file_path, matched) if not options.get("skip_broken_moves"):
return {"success": False, "broken_dir": str(broken_dir), "reason": f"matched '{matched}'"} broken_dir: Path = move_file_to_broken_subdir(file_path, matched)
return {"success": False, "broken_dir": str(broken_dir), "reason": f"matched '{matched}'"}
return {"success": False, "broken_dir": "(skipped)", "reason": f"matched '{matched}'"}
if "dropCampaign" not in raw_text:
if not options.get("skip_broken_moves"):
broken_dir = move_file_to_broken_subdir(file_path, "no_dropCampaign")
return {"success": False, "broken_dir": str(broken_dir), "reason": "no dropCampaign present"}
return {"success": False, "broken_dir": "(skipped)", "reason": "no dropCampaign present"}
parsed_json: dict[str, Any] = json.loads(raw_text)
operation_name: str | None = extract_operation_name_from_parsed(parsed_json)
campaigns_found: list[dict[str, Any]] = [parsed_json]
self.process_campaigns(
campaigns_found=campaigns_found,
file_path=file_path,
options=options,
)
ViewerDropsDashboardPayload.model_validate_json(raw_text) move_completed_file(file_path=file_path, operation_name=operation_name)
except ValidationError:
except (ValidationError, json.JSONDecodeError):
if options["crash_on_error"]: if options["crash_on_error"]:
raise raise
broken_dir: Path = move_failed_validation_file(file_path) if not options.get("skip_broken_moves"):
return {"success": False, "broken_dir": str(broken_dir)} parsed_json_local: Any | None = locals().get("parsed_json")
op_name: str | None = (
extract_operation_name_from_parsed(parsed_json_local)
if isinstance(parsed_json_local, (dict, list))
else None
)
broken_dir: Path = move_failed_validation_file(file_path, operation_name=op_name)
return {"success": False, "broken_dir": str(broken_dir)}
return {"success": False, "broken_dir": "(skipped)"}
else: else:
return {"success": True} return {"success": True}
@ -288,6 +873,7 @@ class Command(BaseCommand):
Raises: Raises:
ValidationError: If the JSON file fails validation ValidationError: If the JSON file fails validation
json.JSONDecodeError: If the JSON file cannot be parsed
""" """
with tqdm( with tqdm(
total=1, total=1,
@ -299,19 +885,58 @@ class Command(BaseCommand):
try: try:
raw_text: str = file_path.read_text(encoding="utf-8", errors="ignore") raw_text: str = file_path.read_text(encoding="utf-8", errors="ignore")
# Fast pre-filter for non-campaign responses
matched: str | None = detect_non_campaign_keyword(raw_text) matched: str | None = detect_non_campaign_keyword(raw_text)
if matched: if matched:
broken_dir: Path = move_file_to_broken_subdir(file_path, matched) if not options.get("skip_broken_moves"):
progress_bar.write(f"{Fore.RED}{Style.RESET_ALL} {file_path.name}{broken_dir}/{file_path.name} (matched '{matched}')") broken_dir: Path = move_file_to_broken_subdir(file_path, matched)
progress_bar.write(
f"{Fore.RED}{Style.RESET_ALL} {file_path.name}"
f"{broken_dir}/{file_path.name} "
f"(matched '{matched}')",
)
else:
progress_bar.write(
f"{Fore.RED}{Style.RESET_ALL} {file_path.name} (matched '{matched}', move skipped)",
)
return return
_: ViewerDropsDashboardPayload = ViewerDropsDashboardPayload.model_validate_json(raw_text) if "dropCampaign" not in raw_text:
if not options.get("skip_broken_moves"):
broken_dir = move_file_to_broken_subdir(file_path, "no_dropCampaign")
progress_bar.write(
f"{Fore.RED}{Style.RESET_ALL} {file_path.name}"
f"{broken_dir}/{file_path.name} "
f"(no dropCampaign present)",
)
else:
progress_bar.write(
f"{Fore.RED}{Style.RESET_ALL} {file_path.name} (no dropCampaign present, move skipped)",
)
return
parsed_json: dict[str, Any] = json.loads(raw_text)
operation_name: str | None = extract_operation_name_from_parsed(parsed_json)
campaigns_found: list[dict[str, Any]] = [parsed_json]
self.process_campaigns(campaigns_found=campaigns_found, file_path=file_path, options=options)
move_completed_file(file_path=file_path, operation_name=operation_name)
progress_bar.update(1) progress_bar.update(1)
progress_bar.write(f"{Fore.GREEN}{Style.RESET_ALL} {file_path.name}") progress_bar.write(f"{Fore.GREEN}{Style.RESET_ALL} {file_path.name}")
except ValidationError: except (ValidationError, json.JSONDecodeError):
if options["crash_on_error"]: if options["crash_on_error"]:
raise raise
broken_dir: Path = move_failed_validation_file(file_path) if not options.get("skip_broken_moves"):
progress_bar.write(f"{Fore.RED}{Style.RESET_ALL} {file_path.name}{broken_dir}/{file_path.name}") parsed_json_local: Any | None = locals().get("parsed_json")
op_name: str | None = (
extract_operation_name_from_parsed(parsed_json_local)
if isinstance(parsed_json_local, (dict, list))
else None
)
broken_dir: Path = move_failed_validation_file(file_path, operation_name=op_name)
progress_bar.write(f"{Fore.RED}{Style.RESET_ALL} {file_path.name}{broken_dir}/{file_path.name}")
else:
progress_bar.write(f"{Fore.RED}{Style.RESET_ALL} {file_path.name} (move skipped)")

View file

@ -52,7 +52,10 @@ def parse_date(value: str | None) -> datetime | None:
"RETURN_AS_TIMEZONE_AWARE": True, "RETURN_AS_TIMEZONE_AWARE": True,
"CACHE_SIZE_LIMIT": 0, "CACHE_SIZE_LIMIT": 0,
} }
dt: datetime | None = dateparser.parse(date_string=value, settings=dateparser_settings) # pyright: ignore[reportArgumentType] dt: datetime | None = dateparser.parse(
date_string=value,
settings=dateparser_settings, # pyright: ignore[reportArgumentType]
)
if not dt: if not dt:
return None return None
@ -63,7 +66,7 @@ def parse_date(value: str | None) -> datetime | None:
class Command(BaseCommand): class Command(BaseCommand):
"""Import Twitch drop campaign data from a JSON file or directory of JSON files.""" """Import Twitch drop campaign data from JSON."""
help = "Import Twitch drop campaign data from a JSON file or directory" help = "Import Twitch drop campaign data from a JSON file or directory"
requires_migrations_checks = True requires_migrations_checks = True
@ -110,7 +113,7 @@ class Command(BaseCommand):
parser.add_argument( parser.add_argument(
"--no-preload", "--no-preload",
action="store_true", action="store_true",
help="Do not preload existing DB objects into memory (default: preload).", help="Do not preload existing DB objects into memory.",
) )
def handle(self, **options) -> None: def handle(self, **options) -> None:
@ -126,7 +129,6 @@ class Command(BaseCommand):
AttributeError: If expected attributes are missing in the data. AttributeError: If expected attributes are missing in the data.
KeyError: If expected keys are missing in the data. KeyError: If expected keys are missing in the data.
IndexError: If list indices are out of range in the data. IndexError: If list indices are out of range in the data.
""" """
paths: list[str] = options["paths"] paths: list[str] = options["paths"]
processed_dir: str = options["processed_dir"] processed_dir: str = options["processed_dir"]
@ -136,7 +138,9 @@ class Command(BaseCommand):
# Preload DB objects into caches (unless disabled) # Preload DB objects into caches (unless disabled)
if not no_preload: if not no_preload:
try: try:
self.stdout.write("Preloading existing database objects into memory...") self.stdout.write(
"Preloading existing database objects into memory...",
)
self._preload_caches() self._preload_caches()
self.stdout.write( self.stdout.write(
f"Preloaded {len(self._game_cache)} games, " f"Preloaded {len(self._game_cache)} games, "
@ -147,7 +151,8 @@ class Command(BaseCommand):
) )
except (FileNotFoundError, OSError, RuntimeError): except (FileNotFoundError, OSError, RuntimeError):
# If preload fails for any reason, continue without it # If preload fails for any reason, continue without it
self.stdout.write(self.style.WARNING("Preloading caches failed — continuing without preload.")) msg = "Warning: Preloading caches failed — continuing without preload."
self.stdout.write(self.style.WARNING(msg))
self.stdout.write(self.style.ERROR(traceback.format_exc())) self.stdout.write(self.style.ERROR(traceback.format_exc()))
self._game_cache = {} self._game_cache = {}
self._organization_cache = {} self._organization_cache = {}
@ -167,37 +172,77 @@ class Command(BaseCommand):
processed_path: Path = path / processed_dir processed_path: Path = path / processed_dir
processed_path.mkdir(exist_ok=True) processed_path.mkdir(exist_ok=True)
self.process_drops(continue_on_error=continue_on_error, path=path, processed_path=processed_path) self.process_drops(
continue_on_error=continue_on_error,
path=path,
processed_path=processed_path,
)
except CommandError as e: except CommandError as e:
if not continue_on_error: if not continue_on_error:
raise raise
self.stdout.write(self.style.ERROR(f"Error processing path {p}: {e}")) self.stdout.write(
except (ValueError, TypeError, AttributeError, KeyError, IndexError): self.style.ERROR(f"Error processing path {p}: {e}"),
)
except (
ValueError,
TypeError,
AttributeError,
KeyError,
IndexError,
):
if not continue_on_error: if not continue_on_error:
raise raise
self.stdout.write(self.style.ERROR(f"Data error processing path {p}")) self.stdout.write(
self.style.ERROR(f"Data error processing path {p}"),
)
self.stdout.write(self.style.ERROR(traceback.format_exc())) self.stdout.write(self.style.ERROR(traceback.format_exc()))
except KeyboardInterrupt: except KeyboardInterrupt:
# Gracefully handle Ctrl+C # Gracefully handle Ctrl+C
self.stdout.write(self.style.WARNING("Interrupted by user, exiting import.")) self.stdout.write(
self.style.WARNING("Interrupted by user, exiting import."),
)
return return
def _preload_caches(self) -> None: def _preload_caches(self) -> None:
"""Load existing DB objects into in-memory caches to avoid repeated queries.""" """Load DB objects into in-memory caches to avoid repeated queries."""
# These queries may be heavy if DB is huge — safe because optional via --no-preload
with self._cache_locks["game"]: with self._cache_locks["game"]:
self._game_cache = {str(g.twitch_id): g for g in Game.objects.all()} self._game_cache = {} # Clear existing cache
with self._cache_locks["org"]: for game_instance in Game.objects.all():
self._organization_cache = {str(o.twitch_id): o for o in Organization.objects.all()} twitch_id = str(game_instance.twitch_id)
with self._cache_locks["campaign"]: self._game_cache[twitch_id] = game_instance
self._drop_campaign_cache = {str(c.twitch_id): c for c in DropCampaign.objects.all()}
with self._cache_locks["channel"]:
self._channel_cache = {str(ch.twitch_id): ch for ch in Channel.objects.all()}
with self._cache_locks["benefit"]:
self._benefit_cache = {str(b.twitch_id): b for b in DropBenefit.objects.all()}
def process_drops(self, *, continue_on_error: bool, path: Path, processed_path: Path) -> None: with self._cache_locks["org"]:
self._organization_cache = {}
for organization_instance in Organization.objects.all():
twitch_id = str(organization_instance.twitch_id)
self._organization_cache[twitch_id] = organization_instance
with self._cache_locks["campaign"]:
self._drop_campaign_cache = {}
for drop_campaign_instance in DropCampaign.objects.all():
twitch_id = str(drop_campaign_instance.twitch_id)
self._drop_campaign_cache[twitch_id] = drop_campaign_instance
with self._cache_locks["channel"]:
self._channel_cache = {}
for channel_instance in Channel.objects.all():
twitch_id = str(channel_instance.twitch_id)
self._channel_cache[twitch_id] = channel_instance
with self._cache_locks["benefit"]:
self._benefit_cache = {}
for benefit_instance in DropBenefit.objects.all():
twitch_id = str(benefit_instance.twitch_id)
self._benefit_cache[twitch_id] = benefit_instance
def process_drops(
self,
*,
continue_on_error: bool,
path: Path,
processed_path: Path,
) -> None:
"""Process drops from a file or directory. """Process drops from a file or directory.
Args: Args:
@ -233,7 +278,13 @@ class Command(BaseCommand):
msg: str = f"Path {path} does not exist" msg: str = f"Path {path} does not exist"
raise CommandError(msg) raise CommandError(msg)
def _process_directory(self, *, directory: Path, processed_path: Path, continue_on_error: bool) -> None: def _process_directory(
self,
*,
directory: Path,
processed_path: Path,
continue_on_error: bool,
) -> None:
"""Process all JSON files in a directory using parallel processing. """Process all JSON files in a directory using parallel processing.
Args: Args:
@ -252,7 +303,9 @@ class Command(BaseCommand):
""" """
json_files: list[Path] = list(directory.glob("*.json")) json_files: list[Path] = list(directory.glob("*.json"))
if not json_files: if not json_files:
self.stdout.write(self.style.WARNING(f"No JSON files found in {directory}")) self.stdout.write(
self.style.WARNING(f"No JSON files found in {directory}"),
)
return return
total_files: int = len(json_files) total_files: int = len(json_files)
@ -261,10 +314,19 @@ class Command(BaseCommand):
with concurrent.futures.ThreadPoolExecutor() as executor: with concurrent.futures.ThreadPoolExecutor() as executor:
try: try:
future_to_file: dict[concurrent.futures.Future[None], Path] = { future_to_file: dict[concurrent.futures.Future[None], Path] = {
executor.submit(self._process_file, json_file, processed_path): json_file for json_file in json_files executor.submit(
self._process_file,
json_file,
processed_path,
): json_file
for json_file in json_files
} }
# Wrap the as_completed iterator with tqdm for a progress bar # Wrap the as_completed iterator with tqdm for a progress bar
for future in tqdm(concurrent.futures.as_completed(future_to_file), total=total_files, desc="Processing files"): for future in tqdm(
concurrent.futures.as_completed(future_to_file),
total=total_files,
desc="Processing files",
):
json_file: Path = future_to_file[future] json_file: Path = future_to_file[future]
try: try:
future.result() future.result()
@ -273,20 +335,42 @@ class Command(BaseCommand):
# To stop all processing, we shut down the executor and re-raise # To stop all processing, we shut down the executor and re-raise
executor.shutdown(wait=False, cancel_futures=True) executor.shutdown(wait=False, cancel_futures=True)
raise raise
self.stdout.write(self.style.ERROR(f"Error processing {json_file}: {e}")) self.stdout.write(
except (ValueError, TypeError, AttributeError, KeyError, IndexError): self.style.ERROR(
f"Error processing {json_file}: {e}",
),
)
except (
ValueError,
TypeError,
AttributeError,
KeyError,
IndexError,
):
if not continue_on_error: if not continue_on_error:
# To stop all processing, we shut down the executor and re-raise # To stop all processing, we shut down the executor and re-raise
executor.shutdown(wait=False, cancel_futures=True) executor.shutdown(wait=False, cancel_futures=True)
raise raise
self.stdout.write(self.style.ERROR(f"Data error processing {json_file}")) self.stdout.write(
self.stdout.write(self.style.ERROR(traceback.format_exc())) self.style.ERROR(
f"Data error processing {json_file}",
),
)
self.stdout.write(
self.style.ERROR(traceback.format_exc()),
)
msg: str = f"Processed {total_files} JSON files in {directory}. Moved processed files to {processed_path}." msg: str = (
f"Processed {total_files} JSON files in {directory}. Moved processed files to {processed_path}."
)
self.stdout.write(self.style.SUCCESS(msg)) self.stdout.write(self.style.SUCCESS(msg))
except KeyboardInterrupt: except KeyboardInterrupt:
self.stdout.write(self.style.WARNING("Interruption received, shutting down threads immediately...")) self.stdout.write(
self.style.WARNING(
"Interruption received, shutting down threads immediately...",
),
)
executor.shutdown(wait=False, cancel_futures=True) executor.shutdown(wait=False, cancel_futures=True)
# Re-raise the exception to allow the main `handle` method to catch it and exit # Re-raise the exception to allow the main `handle` method to catch it and exit
raise raise
@ -331,7 +415,9 @@ class Command(BaseCommand):
target_dir.mkdir(parents=True, exist_ok=True) target_dir.mkdir(parents=True, exist_ok=True)
self.move_file(file_path, target_dir / file_path.name) self.move_file(file_path, target_dir / file_path.name)
tqdm.write(f"Moved {file_path} to {target_dir} (matched '{keyword}')") tqdm.write(
f"Moved {file_path} to {target_dir} (matched '{keyword}')",
)
return return
# Some responses have errors: # Some responses have errors:
@ -341,7 +427,9 @@ class Command(BaseCommand):
actual_error_dir: Path = processed_path / "actual_error" actual_error_dir: Path = processed_path / "actual_error"
actual_error_dir.mkdir(parents=True, exist_ok=True) actual_error_dir.mkdir(parents=True, exist_ok=True)
self.move_file(file_path, actual_error_dir / file_path.name) self.move_file(file_path, actual_error_dir / file_path.name)
tqdm.write(f"Moved {file_path} to {actual_error_dir} (contains Twitch errors)") tqdm.write(
f"Moved {file_path} to {actual_error_dir} (contains Twitch errors)",
)
return return
# If file has "__typename": "BroadcastSettings" move it to the "broadcast_settings" directory # If file has "__typename": "BroadcastSettings" move it to the "broadcast_settings" directory
@ -360,7 +448,9 @@ class Command(BaseCommand):
and data["data"]["channel"]["viewerDropCampaigns"] is None and data["data"]["channel"]["viewerDropCampaigns"] is None
): ):
file_path.unlink() file_path.unlink()
tqdm.write(f"Removed {file_path} (only contains empty viewerDropCampaigns)") tqdm.write(
f"Removed {file_path} (only contains empty viewerDropCampaigns)",
)
return return
# If file only contains {"data": {"user": null}} remove the file # If file only contains {"data": {"user": null}} remove the file
@ -377,11 +467,18 @@ class Command(BaseCommand):
tqdm.write(f"Removed {file_path} (only contains game data)") tqdm.write(f"Removed {file_path} (only contains game data)")
return return
# If file has "__typename": "DropCurrentSession" move it to the "drop_current_session" directory so we can process it separately. # If file has "__typename": "DropCurrentSession" move it to the "drop_current_session" directory so we can process it separately. # noqa: E501
if isinstance(data, dict) and data.get("data", {}).get("currentUser", {}).get("dropCurrentSession", {}).get("__typename") == "DropCurrentSession": if (
isinstance(data, dict)
and data.get("data", {}).get("currentUser", {}).get("dropCurrentSession", {}).get("__typename")
== "DropCurrentSession"
):
drop_current_session_dir: Path = processed_path / "drop_current_session" drop_current_session_dir: Path = processed_path / "drop_current_session"
drop_current_session_dir.mkdir(parents=True, exist_ok=True) drop_current_session_dir.mkdir(parents=True, exist_ok=True)
self.move_file(file_path, drop_current_session_dir / file_path.name) self.move_file(
file_path,
drop_current_session_dir / file_path.name,
)
return return
# If file is a list with one item: {"data": {"user": null}}, remove it # If file is a list with one item: {"data": {"user": null}}, remove it
@ -407,7 +504,10 @@ class Command(BaseCommand):
# Move file to "we_should_double_check" directory for manual review # Move file to "we_should_double_check" directory for manual review
we_should_double_check_dir: Path = processed_path / "we_should_double_check" we_should_double_check_dir: Path = processed_path / "we_should_double_check"
we_should_double_check_dir.mkdir(parents=True, exist_ok=True) we_should_double_check_dir.mkdir(parents=True, exist_ok=True)
self.move_file(file_path, we_should_double_check_dir / file_path.name) self.move_file(
file_path,
we_should_double_check_dir / file_path.name,
)
raise CommandError(msg) raise CommandError(msg)
self.move_file(file_path, processed_path) self.move_file(file_path, processed_path)
@ -426,19 +526,33 @@ class Command(BaseCommand):
if f1.read() != f2.read(): if f1.read() != f2.read():
new_name: Path = processed_path / f"{file_path.stem}_duplicate{file_path.suffix}" new_name: Path = processed_path / f"{file_path.stem}_duplicate{file_path.suffix}"
shutil.move(str(file_path), str(new_name)) shutil.move(str(file_path), str(new_name))
tqdm.write(f"Moved {file_path!s} to {new_name!s} (content differs)") tqdm.write(
f"Moved {file_path!s} to {new_name!s} (content differs)",
)
else: else:
tqdm.write(f"{file_path!s} already exists in {processed_path!s}, removing original file.") tqdm.write(
f"{file_path!s} already exists in {processed_path!s}, removing original file.",
)
file_path.unlink() file_path.unlink()
except FileNotFoundError: except FileNotFoundError:
tqdm.write(f"{file_path!s} not found when handling duplicate case, skipping.") tqdm.write(
f"{file_path!s} not found when handling duplicate case, skipping.",
)
except FileNotFoundError: except FileNotFoundError:
tqdm.write(f"{file_path!s} not found, skipping.") tqdm.write(f"{file_path!s} not found, skipping.")
except (PermissionError, OSError, shutil.Error) as e: except (PermissionError, OSError, shutil.Error) as e:
self.stdout.write(self.style.ERROR(f"Error moving {file_path!s} to {processed_path!s}: {e}")) self.stdout.write(
self.style.ERROR(
f"Error moving {file_path!s} to {processed_path!s}: {e}",
),
)
traceback.print_exc() traceback.print_exc()
def import_drop_campaign(self, data: dict[str, Any], file_path: Path) -> None: def import_drop_campaign(
self,
data: dict[str, Any],
file_path: Path,
) -> None:
"""Find and import drop campaign data from various JSON structures.""" """Find and import drop campaign data from various JSON structures."""
# Add this check: If this is a known "empty" response, ignore it silently. # Add this check: If this is a known "empty" response, ignore it silently.
if ( if (
@ -475,7 +589,9 @@ class Command(BaseCommand):
# Structure: {"data": {"currentUser": {"inventory": {"dropCampaignsInProgress": [...]}}}} # Structure: {"data": {"currentUser": {"inventory": {"dropCampaignsInProgress": [...]}}}}
if "inventory" in current_user and "dropCampaignsInProgress" in current_user["inventory"]: if "inventory" in current_user and "dropCampaignsInProgress" in current_user["inventory"]:
campaigns_found.extend(current_user["inventory"]["dropCampaignsInProgress"]) campaigns_found.extend(
current_user["inventory"]["dropCampaignsInProgress"],
)
# Structure: {"data": {"channel": {"viewerDropCampaigns": [...]}}} # Structure: {"data": {"channel": {"viewerDropCampaigns": [...]}}}
if "channel" in d and d["channel"] and "viewerDropCampaigns" in d["channel"]: if "channel" in d and d["channel"] and "viewerDropCampaigns" in d["channel"]:
@ -507,9 +623,17 @@ class Command(BaseCommand):
self.import_to_db(data, file_path=file_path) self.import_to_db(data, file_path=file_path)
return return
tqdm.write(self.style.WARNING(f"No valid drop campaign data found in {file_path.name}")) tqdm.write(
self.style.WARNING(
f"No valid drop campaign data found in {file_path.name}",
),
)
def import_to_db(self, campaign_data: dict[str, Any], file_path: Path) -> None: def import_to_db(
self,
campaign_data: dict[str, Any],
file_path: Path,
) -> None:
"""Import drop campaign data into the database with retry logic for SQLite locks. """Import drop campaign data into the database with retry logic for SQLite locks.
Args: Args:
@ -517,25 +641,51 @@ class Command(BaseCommand):
file_path: The path to the file being processed. file_path: The path to the file being processed.
""" """
with transaction.atomic(): with transaction.atomic():
game: Game = self.game_update_or_create(campaign_data=campaign_data) game: Game = self.game_update_or_create(
organization: Organization | None = self.owner_update_or_create(campaign_data=campaign_data) campaign_data=campaign_data,
)
organization: Organization | None = self.owner_update_or_create(
campaign_data=campaign_data,
)
if organization and game.owner != organization: if organization and game.owner != organization:
game.owner = organization game.owner = organization
game.save(update_fields=["owner"]) game.save(update_fields=["owner"])
drop_campaign: DropCampaign = self.drop_campaign_update_or_get(campaign_data=campaign_data, game=game) drop_campaign: DropCampaign = self.drop_campaign_update_or_get(
campaign_data=campaign_data,
game=game,
)
for drop_data in campaign_data.get("timeBasedDrops", []): for drop_data in campaign_data.get("timeBasedDrops", []):
self._process_time_based_drop(drop_data, drop_campaign, file_path) self._process_time_based_drop(
drop_data,
drop_campaign,
file_path,
)
def _process_time_based_drop(self, drop_data: dict[str, Any], drop_campaign: DropCampaign, file_path: Path) -> None: def _process_time_based_drop(
time_based_drop: TimeBasedDrop = self.create_time_based_drop(drop_campaign=drop_campaign, drop_data=drop_data) self,
drop_data: dict[str, Any],
drop_campaign: DropCampaign,
file_path: Path,
) -> None:
time_based_drop: TimeBasedDrop = self.create_time_based_drop(
drop_campaign=drop_campaign,
drop_data=drop_data,
)
benefit_edges: list[dict[str, Any]] = drop_data.get("benefitEdges", []) benefit_edges: list[dict[str, Any]] = drop_data.get("benefitEdges", [])
if not benefit_edges: if not benefit_edges:
tqdm.write(self.style.WARNING(f"No benefit edges found for drop {time_based_drop.name} (ID: {time_based_drop.twitch_id})")) tqdm.write(
self.move_file(file_path, Path("no_benefit_edges") / file_path.name) self.style.WARNING(
f"No benefit edges found for drop {time_based_drop.name} (ID: {time_based_drop.twitch_id})",
),
)
self.move_file(
file_path,
Path("no_benefit_edges") / file_path.name,
)
return return
for benefit_edge in benefit_edges: for benefit_edge in benefit_edges:
@ -558,14 +708,22 @@ class Command(BaseCommand):
benefit_defaults = {k: v for k, v in benefit_defaults.items() if v is not None} benefit_defaults = {k: v for k, v in benefit_defaults.items() if v is not None}
# Use cached create/update for benefits # Use cached create/update for benefits
benefit = self._get_or_create_benefit(benefit_data["id"], benefit_defaults) benefit = self._get_or_create_benefit(
benefit_data["id"],
benefit_defaults,
)
try: try:
with transaction.atomic(): with transaction.atomic():
drop_benefit_edge, created = DropBenefitEdge.objects.update_or_create( drop_benefit_edge, created = DropBenefitEdge.objects.update_or_create(
drop=time_based_drop, drop=time_based_drop,
benefit=benefit, benefit=benefit,
defaults={"entitlement_limit": benefit_edge.get("entitlementLimit", 1)}, defaults={
"entitlement_limit": benefit_edge.get(
"entitlementLimit",
1,
),
},
) )
if created: if created:
tqdm.write(f"Added {drop_benefit_edge}") tqdm.write(f"Added {drop_benefit_edge}")
@ -573,10 +731,14 @@ class Command(BaseCommand):
msg = f"Error: Multiple DropBenefitEdge objects found for drop {time_based_drop.twitch_id} and benefit {benefit.twitch_id}. Cannot update or create." # noqa: E501 msg = f"Error: Multiple DropBenefitEdge objects found for drop {time_based_drop.twitch_id} and benefit {benefit.twitch_id}. Cannot update or create." # noqa: E501
raise CommandError(msg) from e raise CommandError(msg) from e
except (IntegrityError, DatabaseError, TypeError, ValueError) as e: except (IntegrityError, DatabaseError, TypeError, ValueError) as e:
msg = f"Database or validation error creating DropBenefitEdge for drop {time_based_drop.twitch_id} and benefit {benefit.twitch_id}: {e}" msg = f"Database or validation error creating DropBenefitEdge for drop {time_based_drop.twitch_id} and benefit {benefit.twitch_id}: {e}" # noqa: E501
raise CommandError(msg) from e raise CommandError(msg) from e
def create_time_based_drop(self, drop_campaign: DropCampaign, drop_data: dict[str, Any]) -> TimeBasedDrop: def create_time_based_drop(
self,
drop_campaign: DropCampaign,
drop_data: dict[str, Any],
) -> TimeBasedDrop:
"""Creates or updates a TimeBasedDrop instance based on the provided drop data. """Creates or updates a TimeBasedDrop instance based on the provided drop data.
Args: Args:
@ -598,7 +760,9 @@ class Command(BaseCommand):
time_based_drop_defaults: dict[str, Any] = { time_based_drop_defaults: dict[str, Any] = {
"campaign": drop_campaign, "campaign": drop_campaign,
"name": drop_data.get("name"), "name": drop_data.get("name"),
"required_minutes_watched": drop_data.get("requiredMinutesWatched"), "required_minutes_watched": drop_data.get(
"requiredMinutesWatched",
),
"required_subs": drop_data.get("requiredSubs"), "required_subs": drop_data.get("requiredSubs"),
"start_at": parse_date(drop_data.get("startAt")), "start_at": parse_date(drop_data.get("startAt")),
"end_at": parse_date(drop_data.get("endAt")), "end_at": parse_date(drop_data.get("endAt")),
@ -614,7 +778,10 @@ class Command(BaseCommand):
try: try:
with transaction.atomic(): with transaction.atomic():
time_based_drop, created = TimeBasedDrop.objects.update_or_create(id=drop_data["id"], defaults=time_based_drop_defaults) time_based_drop, created = TimeBasedDrop.objects.update_or_create(
id=drop_data["id"],
defaults=time_based_drop_defaults,
)
if created: if created:
tqdm.write(f"Added {time_based_drop}") tqdm.write(f"Added {time_based_drop}")
except MultipleObjectsReturned as e: except MultipleObjectsReturned as e:
@ -652,7 +819,10 @@ class Command(BaseCommand):
lock = self._cache_locks.get(model_name) lock = self._cache_locks.get(model_name)
if lock is None: if lock is None:
# Fallback for models without a dedicated cache/lock # Fallback for models without a dedicated cache/lock
obj, created = model_class.objects.update_or_create(id=obj_id, defaults=defaults) obj, created = model_class.objects.update_or_create(
id=obj_id,
defaults=defaults,
)
if created: if created:
tqdm.write(f"Added {obj}") tqdm.write(f"Added {obj}")
return obj return obj
@ -672,7 +842,10 @@ class Command(BaseCommand):
# Use get_or_create which is safer in a race. It might still fail if two threads # Use get_or_create which is safer in a race. It might still fail if two threads
# try to create at the exact same time, so we wrap it. # try to create at the exact same time, so we wrap it.
try: try:
obj, created = model_class.objects.get_or_create(id=obj_id, defaults=defaults) obj, created = model_class.objects.get_or_create(
id=obj_id,
defaults=defaults,
)
except IntegrityError: except IntegrityError:
# Another thread created it between our `get` and `create` attempt. # Another thread created it between our `get` and `create` attempt.
# The object is guaranteed to exist now, so we can just fetch it. # The object is guaranteed to exist now, so we can just fetch it.
@ -700,8 +873,17 @@ class Command(BaseCommand):
return obj return obj
def _get_or_create_benefit(self, benefit_id: str | int, defaults: dict[str, Any]) -> DropBenefit: def _get_or_create_benefit(
return self._get_or_create_cached("benefit", DropBenefit, benefit_id, defaults) # pyright: ignore[reportReturnType] self,
benefit_id: str | int,
defaults: dict[str, Any],
) -> DropBenefit:
return self._get_or_create_cached(
"benefit",
DropBenefit,
benefit_id,
defaults,
) # pyright: ignore[reportReturnType]
def game_update_or_create(self, campaign_data: dict[str, Any]) -> Game: def game_update_or_create(self, campaign_data: dict[str, Any]) -> Game:
"""Update or create a game with caching. """Update or create a game with caching.
@ -726,11 +908,13 @@ class Command(BaseCommand):
# Filter out None values to avoid overwriting with them # Filter out None values to avoid overwriting with them
game_defaults = {k: v for k, v in game_defaults.items() if v is not None} game_defaults = {k: v for k, v in game_defaults.items() if v is not None}
game: Game | Organization | DropCampaign | Channel | DropBenefit | str | int | None = self._get_or_create_cached( game: Game | Organization | DropCampaign | Channel | DropBenefit | str | int | None = (
model_name="game", self._get_or_create_cached(
model_class=Game, model_name="game",
obj_id=game_data["id"], model_class=Game,
defaults=game_defaults, obj_id=game_data["id"],
defaults=game_defaults,
)
) )
if not isinstance(game, Game): if not isinstance(game, Game):
msg = "Expected a Game instance from _get_or_create_cached" msg = "Expected a Game instance from _get_or_create_cached"
@ -738,7 +922,10 @@ class Command(BaseCommand):
return game return game
def owner_update_or_create(self, campaign_data: dict[str, Any]) -> Organization | None: def owner_update_or_create(
self,
campaign_data: dict[str, Any],
) -> Organization | None:
"""Update or create an organization with caching. """Update or create an organization with caching.
Args: Args:
@ -768,7 +955,11 @@ class Command(BaseCommand):
return owner return owner
return None return None
def drop_campaign_update_or_get(self, campaign_data: dict[str, Any], game: Game) -> DropCampaign: def drop_campaign_update_or_get(
self,
campaign_data: dict[str, Any],
game: Game,
) -> DropCampaign:
"""Update or create a drop campaign with caching and channel handling. """Update or create a drop campaign with caching and channel handling.
Args: Args:
@ -791,9 +982,18 @@ class Command(BaseCommand):
"details_url": campaign_data.get("detailsURL"), "details_url": campaign_data.get("detailsURL"),
"account_link_url": campaign_data.get("accountLinkURL"), "account_link_url": campaign_data.get("accountLinkURL"),
"image_url": campaign_data.get("imageURL"), "image_url": campaign_data.get("imageURL"),
"start_at": parse_date(campaign_data.get("startAt") or campaign_data.get("startsAt")), "start_at": parse_date(
"end_at": parse_date(campaign_data.get("endAt") or campaign_data.get("endsAt")), campaign_data.get("startAt") or campaign_data.get("startsAt"),
"is_account_connected": campaign_data.get("self", {}).get("isAccountConnected"), ),
"end_at": parse_date(
campaign_data.get("endAt") or campaign_data.get("endsAt"),
),
"is_account_connected": (
campaign_data.get(
"self",
{},
).get("isAccountConnected")
),
"allow_is_enabled": allow_is_enabled, "allow_is_enabled": allow_is_enabled,
} }
@ -846,7 +1046,9 @@ class Command(BaseCommand):
channel_objects.append(channel) channel_objects.append(channel)
# Set the many-to-many relationship (save only if different) # Set the many-to-many relationship (save only if different)
current_ids = set(drop_campaign.allow_channels.values_list("id", flat=True)) current_ids = set(
drop_campaign.allow_channels.values_list("id", flat=True),
)
new_ids = {ch.twitch_id for ch in channel_objects} new_ids = {ch.twitch_id for ch in channel_objects}
if current_ids != new_ids: if current_ids != new_ids:
drop_campaign.allow_channels.set(channel_objects) drop_campaign.allow_channels.set(channel_objects)

View file

@ -1,4 +1,4 @@
# Generated by Django 5.2.8 on 2025-12-01 20:17 # Generated by Django 6.0 on 2025-12-11 10:49
from __future__ import annotations from __future__ import annotations
import django.db.models.deletion import django.db.models.deletion
@ -7,125 +7,292 @@ from django.db import models
class Migration(migrations.Migration): class Migration(migrations.Migration):
"""Initial Django migration for the twitch app schema.""" """Initial migration for Twitch-related models."""
initial = True initial = True
dependencies = [] dependencies = []
operations = [ operations = [
migrations.CreateModel(
name="Channel",
fields=[
(
"twitch_id",
models.TextField(help_text="The unique Twitch identifier for the channel.", primary_key=True, serialize=False, verbose_name="Channel ID"),
),
("name", models.TextField(help_text="The lowercase username of the channel.", verbose_name="Username")),
("display_name", models.TextField(help_text="The display name of the channel (with proper capitalization).", verbose_name="Display Name")),
("added_at", models.DateTimeField(auto_now_add=True, help_text="Timestamp when this channel record was created.")),
("updated_at", models.DateTimeField(auto_now=True, help_text="Timestamp when this channel record was last updated.")),
],
options={
"ordering": ["display_name"],
},
),
migrations.CreateModel(
name="DropBenefit",
fields=[
("twitch_id", models.TextField(help_text="Unique Twitch identifier for the benefit.", primary_key=True, serialize=False)),
("name", models.TextField(blank=True, default="N/A", help_text="Name of the drop benefit.")),
("image_asset_url", models.URLField(blank=True, default="", help_text="URL to the benefit's image asset.", max_length=500)),
(
"image_file",
models.FileField(blank=True, help_text="Locally cached benefit image served from this site.", null=True, upload_to="benefits/images/"),
),
(
"created_at",
models.DateTimeField(help_text="Timestamp when the benefit was created. This is from Twitch API and not auto-generated.", null=True),
),
("entitlement_limit", models.PositiveIntegerField(default=1, help_text="Maximum number of times this benefit can be earned.")),
("is_ios_available", models.BooleanField(default=False, help_text="Whether the benefit is available on iOS.")),
("distribution_type", models.TextField(blank=True, default="", help_text="Type of distribution for this benefit.", max_length=50)),
("added_at", models.DateTimeField(auto_now_add=True, help_text="Timestamp when this benefit record was created.")),
("updated_at", models.DateTimeField(auto_now=True, help_text="Timestamp when this benefit record was last updated.")),
],
options={
"ordering": ["-created_at"],
},
),
migrations.CreateModel( migrations.CreateModel(
name="Game", name="Game",
fields=[ fields=[
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("twitch_id", models.TextField(unique=True, verbose_name="Twitch game ID")), ("twitch_id", models.TextField(unique=True, verbose_name="Twitch game ID")),
("slug", models.TextField(blank=True, default="", help_text="Short unique identifier for the game.", max_length=200, verbose_name="Slug")), (
"slug",
models.TextField(
blank=True,
default="",
help_text="Short unique identifier for the game.",
max_length=200,
verbose_name="Slug",
),
),
("name", models.TextField(blank=True, default="", verbose_name="Name")), ("name", models.TextField(blank=True, default="", verbose_name="Name")),
("display_name", models.TextField(blank=True, default="", verbose_name="Display name")), ("display_name", models.TextField(blank=True, default="", verbose_name="Display name")),
("box_art", models.URLField(blank=True, default="", max_length=500, verbose_name="Box art URL")), ("box_art", models.URLField(blank=True, default="", max_length=500, verbose_name="Box art URL")),
( (
"box_art_file", "box_art_file",
models.FileField(blank=True, help_text="Locally cached box art image served from this site.", null=True, upload_to="games/box_art/"), models.FileField(
blank=True,
help_text="Locally cached box art image served from this site.",
null=True,
upload_to="games/box_art/",
),
),
(
"added_at",
models.DateTimeField(auto_now_add=True, help_text="Timestamp when this game record was created."),
),
(
"updated_at",
models.DateTimeField(auto_now=True, help_text="Timestamp when this game record was last updated."),
), ),
("added_at", models.DateTimeField(auto_now_add=True, help_text="Timestamp when this game record was created.")),
("updated_at", models.DateTimeField(auto_now=True, help_text="Timestamp when this game record was last updated.")),
], ],
options={ options={
"ordering": ["display_name"], "ordering": ["display_name"],
}, },
), ),
migrations.CreateModel( migrations.CreateModel(
name="Organization", name="Channel",
fields=[ fields=[
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
( (
"twitch_id", "twitch_id",
models.TextField( models.TextField(
help_text="The unique Twitch identifier for the organization.", help_text="The unique Twitch identifier for the channel.",
primary_key=True, unique=True,
serialize=False, verbose_name="Channel ID",
verbose_name="Organization ID", ),
),
("name", models.TextField(help_text="The lowercase username of the channel.", verbose_name="Username")),
(
"display_name",
models.TextField(
help_text="The display name of the channel (with proper capitalization).",
verbose_name="Display Name",
),
),
(
"added_at",
models.DateTimeField(
auto_now_add=True,
help_text="Timestamp when this channel record was created.",
),
),
(
"updated_at",
models.DateTimeField(
auto_now=True,
help_text="Timestamp when this channel record was last updated.",
), ),
), ),
("name", models.TextField(help_text="Display name of the organization.", unique=True, verbose_name="Name")),
("added_at", models.DateTimeField(auto_now_add=True, help_text="Timestamp when this organization record was created.")),
("updated_at", models.DateTimeField(auto_now=True, help_text="Timestamp when this organization record was last updated.")),
], ],
options={ options={
"ordering": ["name"], "ordering": ["display_name"],
"indexes": [
models.Index(fields=["display_name"], name="twitch_chan_display_2bf213_idx"),
models.Index(fields=["name"], name="twitch_chan_name_15d566_idx"),
models.Index(fields=["twitch_id"], name="twitch_chan_twitch__c8bbc6_idx"),
models.Index(fields=["added_at"], name="twitch_chan_added_a_5ce7b4_idx"),
models.Index(fields=["updated_at"], name="twitch_chan_updated_828594_idx"),
],
},
),
migrations.CreateModel(
name="DropBenefit",
fields=[
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
(
"twitch_id",
models.TextField(editable=False, help_text="The Twitch ID for this benefit.", unique=True),
),
("name", models.TextField(blank=True, default="N/A", help_text="Name of the drop benefit.")),
(
"image_asset_url",
models.URLField(
blank=True,
default="",
help_text="URL to the benefit's image asset.",
max_length=500,
),
),
(
"image_file",
models.FileField(
blank=True,
help_text="Locally cached benefit image served from this site.",
null=True,
upload_to="benefits/images/",
),
),
(
"created_at",
models.DateTimeField(
help_text="Timestamp when the benefit was created. This is from Twitch API and not auto-generated.", # noqa: E501
null=True,
),
),
(
"entitlement_limit",
models.PositiveIntegerField(
default=1,
help_text="Maximum number of times this benefit can be earned.",
),
),
(
"is_ios_available",
models.BooleanField(default=False, help_text="Whether the benefit is available on iOS."),
),
(
"distribution_type",
models.TextField(
blank=True,
default="",
help_text="Type of distribution for this benefit.",
max_length=50,
),
),
(
"added_at",
models.DateTimeField(
auto_now_add=True,
help_text="Timestamp when this benefit record was created.",
),
),
(
"updated_at",
models.DateTimeField(
auto_now=True,
help_text="Timestamp when this benefit record was last updated.",
),
),
],
options={
"ordering": ["-created_at"],
"indexes": [
models.Index(fields=["-created_at"], name="twitch_drop_created_5d2280_idx"),
models.Index(fields=["twitch_id"], name="twitch_drop_twitch__6eab58_idx"),
models.Index(fields=["name"], name="twitch_drop_name_7125ff_idx"),
models.Index(fields=["distribution_type"], name="twitch_drop_distrib_08b224_idx"),
models.Index(fields=["is_ios_available"], name="twitch_drop_is_ios__5f3dcf_idx"),
models.Index(fields=["added_at"], name="twitch_drop_added_a_fba438_idx"),
models.Index(fields=["updated_at"], name="twitch_drop_updated_7aaae3_idx"),
],
}, },
), ),
migrations.CreateModel( migrations.CreateModel(
name="DropBenefitEdge", name="DropBenefitEdge",
fields=[ fields=[
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("entitlement_limit", models.PositiveIntegerField(default=1, help_text="Max times this benefit can be claimed for this drop.")), (
("added_at", models.DateTimeField(auto_now_add=True, help_text="Timestamp when this drop-benefit edge was created.")), "entitlement_limit",
("updated_at", models.DateTimeField(auto_now=True, help_text="Timestamp when this drop-benefit edge was last updated.")), models.PositiveIntegerField(
default=1,
help_text="Max times this benefit can be claimed for this drop.",
),
),
(
"added_at",
models.DateTimeField(
auto_now_add=True,
help_text="Timestamp when this drop-benefit edge was created.",
),
),
(
"updated_at",
models.DateTimeField(
auto_now=True,
help_text="Timestamp when this drop-benefit edge was last updated.",
),
),
( (
"benefit", "benefit",
models.ForeignKey(help_text="The benefit in this relationship.", on_delete=django.db.models.deletion.CASCADE, to="twitch.dropbenefit"), models.ForeignKey(
help_text="The benefit in this relationship.",
on_delete=django.db.models.deletion.CASCADE,
to="twitch.dropbenefit",
),
), ),
], ],
), ),
migrations.CreateModel( migrations.CreateModel(
name="DropCampaign", name="DropCampaign",
fields=[ fields=[
("twitch_id", models.TextField(help_text="Unique Twitch identifier for the campaign.", primary_key=True, serialize=False)), ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
(
"twitch_id",
models.TextField(editable=False, help_text="The Twitch ID for this campaign.", unique=True),
),
("name", models.TextField(help_text="Name of the drop campaign.")), ("name", models.TextField(help_text="Name of the drop campaign.")),
("description", models.TextField(blank=True, help_text="Detailed description of the campaign.")), ("description", models.TextField(blank=True, help_text="Detailed description of the campaign.")),
("details_url", models.URLField(blank=True, default="", help_text="URL with campaign details.", max_length=500)), (
("account_link_url", models.URLField(blank=True, default="", help_text="URL to link a Twitch account for the campaign.", max_length=500)), "details_url",
("image_url", models.URLField(blank=True, default="", help_text="URL to an image representing the campaign.", max_length=500)), models.URLField(blank=True, default="", help_text="URL with campaign details.", max_length=500),
),
(
"account_link_url",
models.URLField(
blank=True,
default="",
help_text="URL to link a Twitch account for the campaign.",
max_length=500,
),
),
(
"image_url",
models.URLField(
blank=True,
default="",
help_text="URL to an image representing the campaign.",
max_length=500,
),
),
( (
"image_file", "image_file",
models.FileField(blank=True, help_text="Locally cached campaign image served from this site.", null=True, upload_to="campaigns/images/"), models.FileField(
blank=True,
help_text="Locally cached campaign image served from this site.",
null=True,
upload_to="campaigns/images/",
),
),
(
"start_at",
models.DateTimeField(blank=True, help_text="Datetime when the campaign starts.", null=True),
), ),
("start_at", models.DateTimeField(blank=True, help_text="Datetime when the campaign starts.", null=True)),
("end_at", models.DateTimeField(blank=True, help_text="Datetime when the campaign ends.", null=True)), ("end_at", models.DateTimeField(blank=True, help_text="Datetime when the campaign ends.", null=True)),
("is_account_connected", models.BooleanField(default=False, help_text="Indicates if the user account is linked.")), (
("allow_is_enabled", models.BooleanField(default=True, help_text="Whether the campaign allows participation.")), "is_account_connected",
("added_at", models.DateTimeField(auto_now_add=True, help_text="Timestamp when this campaign record was created.")), models.BooleanField(default=False, help_text="Indicates if the user account is linked."),
("updated_at", models.DateTimeField(auto_now=True, help_text="Timestamp when this campaign record was last updated.")), ),
(
"allow_is_enabled",
models.BooleanField(default=True, help_text="Whether the campaign allows participation."),
),
(
"operation_name",
models.TextField(
blank=True,
default="",
help_text="The GraphQL operation name used to fetch this campaign data (e.g., 'ViewerDropsDashboard').", # noqa: E501
),
),
(
"added_at",
models.DateTimeField(
auto_now_add=True,
help_text="Timestamp when this campaign record was created.",
),
),
(
"updated_at",
models.DateTimeField(
auto_now=True,
help_text="Timestamp when this campaign record was last updated.",
),
),
( (
"allow_channels", "allow_channels",
models.ManyToManyField( models.ManyToManyField(
@ -150,6 +317,50 @@ class Migration(migrations.Migration):
"ordering": ["-start_at"], "ordering": ["-start_at"],
}, },
), ),
migrations.CreateModel(
name="Organization",
fields=[
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
(
"twitch_id",
models.TextField(
editable=False,
help_text="The unique Twitch identifier for the organization.",
unique=True,
verbose_name="Organization ID",
),
),
(
"name",
models.TextField(help_text="Display name of the organization.", unique=True, verbose_name="Name"),
),
(
"added_at",
models.DateTimeField(
auto_now_add=True,
help_text="Timestamp when this organization record was created.",
verbose_name="Added At",
),
),
(
"updated_at",
models.DateTimeField(
auto_now=True,
help_text="Timestamp when this organization record was last updated.",
verbose_name="Updated At",
),
),
],
options={
"ordering": ["name"],
"indexes": [
models.Index(fields=["name"], name="twitch_orga_name_febe72_idx"),
models.Index(fields=["twitch_id"], name="twitch_orga_twitch__b89b29_idx"),
models.Index(fields=["added_at"], name="twitch_orga_added_a_8297ac_idx"),
models.Index(fields=["updated_at"], name="twitch_orga_updated_d7d431_idx"),
],
},
),
migrations.AddField( migrations.AddField(
model_name="game", model_name="game",
name="owner", name="owner",
@ -166,17 +377,46 @@ class Migration(migrations.Migration):
migrations.CreateModel( migrations.CreateModel(
name="TimeBasedDrop", name="TimeBasedDrop",
fields=[ fields=[
("twitch_id", models.TextField(help_text="Unique Twitch identifier for the time-based drop.", primary_key=True, serialize=False)), ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
(
"twitch_id",
models.TextField(editable=False, help_text="The Twitch ID for this time-based drop.", unique=True),
),
("name", models.TextField(help_text="Name of the time-based drop.")), ("name", models.TextField(help_text="Name of the time-based drop.")),
( (
"required_minutes_watched", "required_minutes_watched",
models.PositiveIntegerField(blank=True, help_text="Minutes required to watch before earning this drop.", null=True), models.PositiveIntegerField(
blank=True,
help_text="Minutes required to watch before earning this drop.",
null=True,
),
),
(
"required_subs",
models.PositiveIntegerField(
default=0,
help_text="Number of subscriptions required to unlock this drop.",
),
),
(
"start_at",
models.DateTimeField(blank=True, help_text="Datetime when this drop becomes available.", null=True),
), ),
("required_subs", models.PositiveIntegerField(default=0, help_text="Number of subscriptions required to unlock this drop.")),
("start_at", models.DateTimeField(blank=True, help_text="Datetime when this drop becomes available.", null=True)),
("end_at", models.DateTimeField(blank=True, help_text="Datetime when this drop expires.", null=True)), ("end_at", models.DateTimeField(blank=True, help_text="Datetime when this drop expires.", null=True)),
("added_at", models.DateTimeField(auto_now_add=True, help_text="Timestamp when this time-based drop record was created.")), (
("updated_at", models.DateTimeField(auto_now=True, help_text="Timestamp when this time-based drop record was last updated.")), "added_at",
models.DateTimeField(
auto_now_add=True,
help_text="Timestamp when this time-based drop record was created.",
),
),
(
"updated_at",
models.DateTimeField(
auto_now=True,
help_text="Timestamp when this time-based drop record was last updated.",
),
),
( (
"benefits", "benefits",
models.ManyToManyField( models.ManyToManyField(
@ -212,7 +452,15 @@ class Migration(migrations.Migration):
migrations.CreateModel( migrations.CreateModel(
name="TwitchGameData", name="TwitchGameData",
fields=[ fields=[
("twitch_id", models.TextField(primary_key=True, serialize=False, verbose_name="Twitch Game ID")), ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
(
"twitch_id",
models.TextField(
help_text="The Twitch ID for this game.",
unique=True,
verbose_name="Twitch Game ID",
),
),
("name", models.TextField(blank=True, default="", verbose_name="Name")), ("name", models.TextField(blank=True, default="", verbose_name="Name")),
( (
"box_art_url", "box_art_url",
@ -244,8 +492,192 @@ class Migration(migrations.Migration):
"ordering": ["name"], "ordering": ["name"],
}, },
), ),
migrations.AddIndex(
model_name="dropcampaign",
index=models.Index(fields=["-start_at"], name="twitch_drop_start_a_929f09_idx"),
),
migrations.AddIndex(
model_name="dropcampaign",
index=models.Index(fields=["end_at"], name="twitch_drop_end_at_6560b0_idx"),
),
migrations.AddIndex(
model_name="dropcampaign",
index=models.Index(fields=["game"], name="twitch_drop_game_id_868e70_idx"),
),
migrations.AddIndex(
model_name="dropcampaign",
index=models.Index(fields=["twitch_id"], name="twitch_drop_twitch__b717a1_idx"),
),
migrations.AddIndex(
model_name="dropcampaign",
index=models.Index(fields=["name"], name="twitch_drop_name_3b70b3_idx"),
),
migrations.AddIndex(
model_name="dropcampaign",
index=models.Index(fields=["description"], name="twitch_drop_descrip_5bc290_idx"),
),
migrations.AddIndex(
model_name="dropcampaign",
index=models.Index(fields=["is_account_connected"], name="twitch_drop_is_acco_7e9078_idx"),
),
migrations.AddIndex(
model_name="dropcampaign",
index=models.Index(fields=["allow_is_enabled"], name="twitch_drop_allow_i_b64555_idx"),
),
migrations.AddIndex(
model_name="dropcampaign",
index=models.Index(fields=["operation_name"], name="twitch_drop_operati_8cfeb5_idx"),
),
migrations.AddIndex(
model_name="dropcampaign",
index=models.Index(fields=["added_at"], name="twitch_drop_added_a_babe28_idx"),
),
migrations.AddIndex(
model_name="dropcampaign",
index=models.Index(fields=["updated_at"], name="twitch_drop_updated_0df991_idx"),
),
migrations.AddIndex(
model_name="dropcampaign",
index=models.Index(fields=["game", "-start_at"], name="twitch_drop_game_id_5e9b01_idx"),
),
migrations.AddIndex(
model_name="dropcampaign",
index=models.Index(fields=["start_at", "end_at"], name="twitch_drop_start_a_6e5fb6_idx"),
),
migrations.AddIndex(
model_name="dropcampaign",
index=models.Index(fields=["start_at", "end_at", "game"], name="twitch_drop_start_a_b02d4c_idx"),
),
migrations.AddIndex(
model_name="dropcampaign",
index=models.Index(fields=["end_at", "-start_at"], name="twitch_drop_end_at_81e51b_idx"),
),
migrations.AddIndex(
model_name="game",
index=models.Index(fields=["display_name"], name="twitch_game_display_a35ba3_idx"),
),
migrations.AddIndex(
model_name="game",
index=models.Index(fields=["name"], name="twitch_game_name_c92c15_idx"),
),
migrations.AddIndex(
model_name="game",
index=models.Index(fields=["slug"], name="twitch_game_slug_a02d3c_idx"),
),
migrations.AddIndex(
model_name="game",
index=models.Index(fields=["twitch_id"], name="twitch_game_twitch__887f78_idx"),
),
migrations.AddIndex(
model_name="game",
index=models.Index(fields=["owner"], name="twitch_game_owner_i_398fa9_idx"),
),
migrations.AddIndex(
model_name="game",
index=models.Index(fields=["added_at"], name="twitch_game_added_a_9e7e19_idx"),
),
migrations.AddIndex(
model_name="game",
index=models.Index(fields=["updated_at"], name="twitch_game_updated_01df03_idx"),
),
migrations.AddIndex(
model_name="game",
index=models.Index(fields=["owner", "display_name"], name="twitch_game_owner_i_7f9043_idx"),
),
migrations.AddIndex(
model_name="timebaseddrop",
index=models.Index(fields=["start_at"], name="twitch_time_start_a_13de4a_idx"),
),
migrations.AddIndex(
model_name="timebaseddrop",
index=models.Index(fields=["end_at"], name="twitch_time_end_at_3df95a_idx"),
),
migrations.AddIndex(
model_name="timebaseddrop",
index=models.Index(fields=["campaign"], name="twitch_time_campaig_bbe349_idx"),
),
migrations.AddIndex(
model_name="timebaseddrop",
index=models.Index(fields=["twitch_id"], name="twitch_time_twitch__31707a_idx"),
),
migrations.AddIndex(
model_name="timebaseddrop",
index=models.Index(fields=["name"], name="twitch_time_name_47c0f4_idx"),
),
migrations.AddIndex(
model_name="timebaseddrop",
index=models.Index(fields=["required_minutes_watched"], name="twitch_time_require_82c30c_idx"),
),
migrations.AddIndex(
model_name="timebaseddrop",
index=models.Index(fields=["required_subs"], name="twitch_time_require_959431_idx"),
),
migrations.AddIndex(
model_name="timebaseddrop",
index=models.Index(fields=["added_at"], name="twitch_time_added_a_a7de2e_idx"),
),
migrations.AddIndex(
model_name="timebaseddrop",
index=models.Index(fields=["updated_at"], name="twitch_time_updated_9e9d9e_idx"),
),
migrations.AddIndex(
model_name="timebaseddrop",
index=models.Index(fields=["campaign", "start_at"], name="twitch_time_campaig_29ac87_idx"),
),
migrations.AddIndex(
model_name="timebaseddrop",
index=models.Index(fields=["campaign", "required_minutes_watched"], name="twitch_time_campaig_920ae4_idx"),
),
migrations.AddIndex(
model_name="timebaseddrop",
index=models.Index(fields=["start_at", "end_at"], name="twitch_time_start_a_c481f1_idx"),
),
migrations.AddIndex(
model_name="dropbenefitedge",
index=models.Index(fields=["drop"], name="twitch_drop_drop_id_3a2994_idx"),
),
migrations.AddIndex(
model_name="dropbenefitedge",
index=models.Index(fields=["benefit"], name="twitch_drop_benefit_c92c87_idx"),
),
migrations.AddIndex(
model_name="dropbenefitedge",
index=models.Index(fields=["entitlement_limit"], name="twitch_drop_entitle_bee3a0_idx"),
),
migrations.AddIndex(
model_name="dropbenefitedge",
index=models.Index(fields=["added_at"], name="twitch_drop_added_a_2100ba_idx"),
),
migrations.AddIndex(
model_name="dropbenefitedge",
index=models.Index(fields=["updated_at"], name="twitch_drop_updated_00e3f2_idx"),
),
migrations.AddConstraint( migrations.AddConstraint(
model_name="dropbenefitedge", model_name="dropbenefitedge",
constraint=models.UniqueConstraint(fields=("drop", "benefit"), name="unique_drop_benefit"), constraint=models.UniqueConstraint(fields=("drop", "benefit"), name="unique_drop_benefit"),
), ),
migrations.AddIndex(
model_name="twitchgamedata",
index=models.Index(fields=["name"], name="twitch_twit_name_5dda5f_idx"),
),
migrations.AddIndex(
model_name="twitchgamedata",
index=models.Index(fields=["twitch_id"], name="twitch_twit_twitch__2207e6_idx"),
),
migrations.AddIndex(
model_name="twitchgamedata",
index=models.Index(fields=["game"], name="twitch_twit_game_id_0d820a_idx"),
),
migrations.AddIndex(
model_name="twitchgamedata",
index=models.Index(fields=["igdb_id"], name="twitch_twit_igdb_id_161335_idx"),
),
migrations.AddIndex(
model_name="twitchgamedata",
index=models.Index(fields=["added_at"], name="twitch_twit_added_a_2f4f36_idx"),
),
migrations.AddIndex(
model_name="twitchgamedata",
index=models.Index(fields=["updated_at"], name="twitch_twit_updated_ca8c4b_idx"),
),
] ]

View file

@ -17,8 +17,9 @@ class Organization(models.Model):
"""Represents an organization on Twitch that can own drop campaigns.""" """Represents an organization on Twitch that can own drop campaigns."""
twitch_id = models.TextField( twitch_id = models.TextField(
primary_key=True, unique=True,
verbose_name="Organization ID", verbose_name="Organization ID",
editable=False,
help_text="The unique Twitch identifier for the organization.", help_text="The unique Twitch identifier for the organization.",
) )
name = models.TextField( name = models.TextField(
@ -29,15 +30,25 @@ class Organization(models.Model):
added_at = models.DateTimeField( added_at = models.DateTimeField(
auto_now_add=True, auto_now_add=True,
verbose_name="Added At",
editable=False,
help_text="Timestamp when this organization record was created.", help_text="Timestamp when this organization record was created.",
) )
updated_at = models.DateTimeField( updated_at = models.DateTimeField(
auto_now=True, auto_now=True,
verbose_name="Updated At",
editable=False,
help_text="Timestamp when this organization record was last updated.", help_text="Timestamp when this organization record was last updated.",
) )
class Meta: class Meta:
ordering = ["name"] ordering = ["name"]
indexes = [
models.Index(fields=["name"]),
models.Index(fields=["twitch_id"]),
models.Index(fields=["added_at"]),
models.Index(fields=["updated_at"]),
]
def __str__(self) -> str: def __str__(self) -> str:
"""Return a string representation of the organization.""" """Return a string representation of the organization."""
@ -101,6 +112,17 @@ class Game(models.Model):
class Meta: class Meta:
ordering = ["display_name"] ordering = ["display_name"]
indexes = [
models.Index(fields=["display_name"]),
models.Index(fields=["name"]),
models.Index(fields=["slug"]),
models.Index(fields=["twitch_id"]),
models.Index(fields=["owner"]),
models.Index(fields=["added_at"]),
models.Index(fields=["updated_at"]),
# For games_grid_view grouping by owner + display_name
models.Index(fields=["owner", "display_name"]),
]
def __str__(self) -> str: def __str__(self) -> str:
"""Return a string representation of the game.""" """Return a string representation of the game."""
@ -115,8 +137,10 @@ class Game(models.Model):
@property @property
def organizations(self) -> models.QuerySet[Organization]: def organizations(self) -> models.QuerySet[Organization]:
"""Return all organizations that own games with campaigns for this game.""" """Return orgs that own games with campaigns for this game."""
return Organization.objects.filter(games__drop_campaigns__game=self).distinct() return Organization.objects.filter(
games__drop_campaigns__game=self,
).distinct()
@property @property
def get_game_name(self) -> str: def get_game_name(self) -> str:
@ -131,17 +155,28 @@ class Game(models.Model):
@property @property
def twitch_directory_url(self) -> str: def twitch_directory_url(self) -> str:
"""Return the Twitch directory URL for this game with drops filter if slug is available.""" """Return Twitch directory URL with drops filter when slug exists."""
if self.slug: if self.slug:
return f"https://www.twitch.tv/directory/category/{self.slug}?filter=drops" return f"https://www.twitch.tv/directory/category/{self.slug}?filter=drops"
return "" return ""
@property
def box_art_best_url(self) -> str:
"""Return the best available URL for the game's box art (local first)."""
try:
if self.box_art_file and getattr(self.box_art_file, "url", None):
return self.box_art_file.url
except (AttributeError, OSError, ValueError) as exc:
logger.debug("Failed to resolve Game.box_art_file url: %s", exc)
return self.box_art or ""
# MARK: TwitchGame # MARK: TwitchGame
class TwitchGameData(models.Model): class TwitchGameData(models.Model):
"""Represents game metadata returned from the Twitch API. """Represents game metadata returned from the Twitch API.
This mirrors the public Twitch API fields for a game and is tied to the local `Game` model where possible. This mirrors the public Twitch API fields for a game and is tied to the
local `Game` model where possible.
Fields: Fields:
id: Twitch game id (primary key) id: Twitch game id (primary key)
@ -151,7 +186,11 @@ class TwitchGameData(models.Model):
igdb_id: Optional IGDB id for the game igdb_id: Optional IGDB id for the game
""" """
twitch_id = models.TextField(primary_key=True, verbose_name="Twitch Game ID") twitch_id = models.TextField(
verbose_name="Twitch Game ID",
unique=True,
help_text="The Twitch ID for this game.",
)
game = models.ForeignKey( game = models.ForeignKey(
Game, Game,
on_delete=models.SET_NULL, on_delete=models.SET_NULL,
@ -159,7 +198,7 @@ class TwitchGameData(models.Model):
null=True, null=True,
blank=True, blank=True,
verbose_name="Game", verbose_name="Game",
help_text="Optional link to the local Game record for this Twitch game.", help_text=("Optional link to the local Game record for this Twitch game."),
) )
name = models.TextField(blank=True, default="", verbose_name="Name") name = models.TextField(blank=True, default="", verbose_name="Name")
@ -168,15 +207,29 @@ class TwitchGameData(models.Model):
blank=True, blank=True,
default="", default="",
verbose_name="Box art URL", verbose_name="Box art URL",
help_text="URL template with {width}x{height} placeholders for the box art image.", help_text=("URL template with {width}x{height} placeholders for the box art image."),
) )
igdb_id = models.TextField(blank=True, default="", verbose_name="IGDB ID") igdb_id = models.TextField(blank=True, default="", verbose_name="IGDB ID")
added_at = models.DateTimeField(auto_now_add=True, help_text="Record creation time.") added_at = models.DateTimeField(
updated_at = models.DateTimeField(auto_now=True, help_text="Record last update time.") auto_now_add=True,
help_text="Record creation time.",
)
updated_at = models.DateTimeField(
auto_now=True,
help_text="Record last update time.",
)
class Meta: class Meta:
ordering = ["name"] ordering = ["name"]
indexes = [
models.Index(fields=["name"]),
models.Index(fields=["twitch_id"]),
models.Index(fields=["game"]),
models.Index(fields=["igdb_id"]),
models.Index(fields=["added_at"]),
models.Index(fields=["updated_at"]),
]
def __str__(self) -> str: def __str__(self) -> str:
return self.name or self.twitch_id return self.name or self.twitch_id
@ -187,9 +240,9 @@ class Channel(models.Model):
"""Represents a Twitch channel that can participate in drop campaigns.""" """Represents a Twitch channel that can participate in drop campaigns."""
twitch_id = models.TextField( twitch_id = models.TextField(
primary_key=True,
verbose_name="Channel ID", verbose_name="Channel ID",
help_text="The unique Twitch identifier for the channel.", help_text="The unique Twitch identifier for the channel.",
unique=True,
) )
name = models.TextField( name = models.TextField(
verbose_name="Username", verbose_name="Username",
@ -197,7 +250,7 @@ class Channel(models.Model):
) )
display_name = models.TextField( display_name = models.TextField(
verbose_name="Display Name", verbose_name="Display Name",
help_text="The display name of the channel (with proper capitalization).", help_text=("The display name of the channel (with proper capitalization)."),
) )
added_at = models.DateTimeField( added_at = models.DateTimeField(
@ -211,6 +264,13 @@ class Channel(models.Model):
class Meta: class Meta:
ordering = ["display_name"] ordering = ["display_name"]
indexes = [
models.Index(fields=["display_name"]),
models.Index(fields=["name"]),
models.Index(fields=["twitch_id"]),
models.Index(fields=["added_at"]),
models.Index(fields=["updated_at"]),
]
def __str__(self) -> str: def __str__(self) -> str:
"""Return a string representation of the channel.""" """Return a string representation of the channel."""
@ -222,8 +282,9 @@ class DropCampaign(models.Model):
"""Represents a Twitch drop campaign.""" """Represents a Twitch drop campaign."""
twitch_id = models.TextField( twitch_id = models.TextField(
primary_key=True, unique=True,
help_text="Unique Twitch identifier for the campaign.", editable=False,
help_text="The Twitch ID for this campaign.",
) )
name = models.TextField( name = models.TextField(
help_text="Name of the drop campaign.", help_text="Name of the drop campaign.",
@ -289,6 +350,12 @@ class DropCampaign(models.Model):
help_text="Game associated with this campaign.", help_text="Game associated with this campaign.",
) )
operation_name = models.TextField(
blank=True,
default="",
help_text="The GraphQL operation name used to fetch this campaign data (e.g., 'ViewerDropsDashboard').",
)
added_at = models.DateTimeField( added_at = models.DateTimeField(
auto_now_add=True, auto_now_add=True,
help_text="Timestamp when this campaign record was created.", help_text="Timestamp when this campaign record was created.",
@ -300,6 +367,25 @@ class DropCampaign(models.Model):
class Meta: class Meta:
ordering = ["-start_at"] ordering = ["-start_at"]
indexes = [
models.Index(fields=["-start_at"]),
models.Index(fields=["end_at"]),
models.Index(fields=["game"]),
models.Index(fields=["twitch_id"]),
models.Index(fields=["name"]),
models.Index(fields=["description"]),
models.Index(fields=["is_account_connected"]),
models.Index(fields=["allow_is_enabled"]),
models.Index(fields=["operation_name"]),
models.Index(fields=["added_at"]),
models.Index(fields=["updated_at"]),
# Composite indexes for common queries
models.Index(fields=["game", "-start_at"]),
models.Index(fields=["start_at", "end_at"]),
# For dashboard and game_detail active campaign filtering
models.Index(fields=["start_at", "end_at", "game"]),
models.Index(fields=["end_at", "-start_at"]),
]
def __str__(self) -> str: def __str__(self) -> str:
return self.name return self.name
@ -319,7 +405,8 @@ class DropCampaign(models.Model):
Examples: Examples:
"Ravendawn - July 2" -> "July 2" "Ravendawn - July 2" -> "July 2"
"Party Animals Twitch Drop" -> "Twitch Drop" "Party Animals Twitch Drop" -> "Twitch Drop"
"Skull & Bones - Closed Beta" -> "Closed Beta" (& is replaced with "and") "Skull & Bones - Closed Beta" -> "Closed Beta" (& is replaced
with "and")
""" """
if not self.game or not self.game.display_name: if not self.game or not self.game.display_name:
return self.name return self.name
@ -343,12 +430,15 @@ class DropCampaign(models.Model):
@property @property
def image_best_url(self) -> str: def image_best_url(self) -> str:
"""Return the best available URL for the campaign image (local first).""" """Return the best URL for the campaign image (local first)."""
try: try:
if self.image_file and getattr(self.image_file, "url", None): if self.image_file and getattr(self.image_file, "url", None):
return self.image_file.url return self.image_file.url
except (AttributeError, OSError, ValueError) as exc: except (AttributeError, OSError, ValueError) as exc:
logger.debug("Failed to resolve DropCampaign.image_file url: %s", exc) logger.debug(
"Failed to resolve DropCampaign.image_file url: %s",
exc,
)
return self.image_url or "" return self.image_url or ""
@ -357,8 +447,9 @@ class DropBenefit(models.Model):
"""Represents a benefit that can be earned from a drop.""" """Represents a benefit that can be earned from a drop."""
twitch_id = models.TextField( twitch_id = models.TextField(
primary_key=True, unique=True,
help_text="Unique Twitch identifier for the benefit.", help_text="The Twitch ID for this benefit.",
editable=False,
) )
name = models.TextField( name = models.TextField(
blank=True, blank=True,
@ -379,14 +470,14 @@ class DropBenefit(models.Model):
) )
created_at = models.DateTimeField( created_at = models.DateTimeField(
null=True, null=True,
help_text="Timestamp when the benefit was created. This is from Twitch API and not auto-generated.", help_text=("Timestamp when the benefit was created. This is from Twitch API and not auto-generated."),
) )
entitlement_limit = models.PositiveIntegerField( entitlement_limit = models.PositiveIntegerField(
default=1, default=1,
help_text="Maximum number of times this benefit can be earned.", help_text="Maximum number of times this benefit can be earned.",
) )
# TODO(TheLovinator): Check if this should be default True or False # noqa: TD003 # NOTE: Default may need revisiting once requirements are confirmed.
is_ios_available = models.BooleanField( is_ios_available = models.BooleanField(
default=False, default=False,
help_text="Whether the benefit is available on iOS.", help_text="Whether the benefit is available on iOS.",
@ -409,19 +500,90 @@ class DropBenefit(models.Model):
class Meta: class Meta:
ordering = ["-created_at"] ordering = ["-created_at"]
indexes = [
models.Index(fields=["-created_at"]),
models.Index(fields=["twitch_id"]),
models.Index(fields=["name"]),
models.Index(fields=["distribution_type"]),
models.Index(fields=["is_ios_available"]),
models.Index(fields=["added_at"]),
models.Index(fields=["updated_at"]),
]
def __str__(self) -> str: def __str__(self) -> str:
"""Return a string representation of the drop benefit.""" """Return a string representation of the drop benefit."""
return self.name return self.name
@property
def image_best_url(self) -> str:
"""Return the best URL for the benefit image (local first)."""
try:
if self.image_file and getattr(self.image_file, "url", None):
return self.image_file.url
except (AttributeError, OSError, ValueError) as exc:
logger.debug(
"Failed to resolve DropBenefit.image_file url: %s",
exc,
)
return self.image_asset_url or ""
# MARK: DropBenefitEdge
class DropBenefitEdge(models.Model):
"""Link a TimeBasedDrop to a DropBenefit."""
drop = models.ForeignKey(
to="twitch.TimeBasedDrop",
on_delete=models.CASCADE,
help_text="The time-based drop in this relationship.",
)
benefit = models.ForeignKey(
DropBenefit,
on_delete=models.CASCADE,
help_text="The benefit in this relationship.",
)
entitlement_limit = models.PositiveIntegerField(
default=1,
help_text="Max times this benefit can be claimed for this drop.",
)
added_at = models.DateTimeField(
auto_now_add=True,
help_text="Timestamp when this drop-benefit edge was created.",
)
updated_at = models.DateTimeField(
auto_now=True,
help_text="Timestamp when this drop-benefit edge was last updated.",
)
class Meta:
constraints = [
models.UniqueConstraint(
fields=("drop", "benefit"),
name="unique_drop_benefit",
),
]
indexes = [
models.Index(fields=["drop"]),
models.Index(fields=["benefit"]),
models.Index(fields=["entitlement_limit"]),
models.Index(fields=["added_at"]),
models.Index(fields=["updated_at"]),
]
def __str__(self) -> str:
"""Return a string representation of the drop benefit edge."""
return f"{self.drop.name} - {self.benefit.name}"
# MARK: TimeBasedDrop # MARK: TimeBasedDrop
class TimeBasedDrop(models.Model): class TimeBasedDrop(models.Model):
"""Represents a time-based drop in a drop campaign.""" """Represents a time-based drop in a drop campaign."""
twitch_id = models.TextField( twitch_id = models.TextField(
primary_key=True, unique=True,
help_text="Unique Twitch identifier for the time-based drop.", editable=False,
help_text="The Twitch ID for this time-based drop.",
) )
name = models.TextField( name = models.TextField(
help_text="Name of the time-based drop.", help_text="Name of the time-based drop.",
@ -455,7 +617,7 @@ class TimeBasedDrop(models.Model):
) )
benefits = models.ManyToManyField( benefits = models.ManyToManyField(
DropBenefit, DropBenefit,
through="DropBenefitEdge", through=DropBenefitEdge,
related_name="drops", related_name="drops",
help_text="Benefits unlocked by this drop.", help_text="Benefits unlocked by this drop.",
) )
@ -466,50 +628,27 @@ class TimeBasedDrop(models.Model):
) )
updated_at = models.DateTimeField( updated_at = models.DateTimeField(
auto_now=True, auto_now=True,
help_text="Timestamp when this time-based drop record was last updated.", help_text=("Timestamp when this time-based drop record was last updated."),
) )
class Meta: class Meta:
ordering = ["start_at"] ordering = ["start_at"]
indexes = [
models.Index(fields=["start_at"]),
models.Index(fields=["end_at"]),
models.Index(fields=["campaign"]),
models.Index(fields=["twitch_id"]),
models.Index(fields=["name"]),
models.Index(fields=["required_minutes_watched"]),
models.Index(fields=["required_subs"]),
models.Index(fields=["added_at"]),
models.Index(fields=["updated_at"]),
# Composite indexes for common queries
models.Index(fields=["campaign", "start_at"]),
models.Index(fields=["campaign", "required_minutes_watched"]),
models.Index(fields=["start_at", "end_at"]),
]
def __str__(self) -> str: def __str__(self) -> str:
"""Return a string representation of the time-based drop.""" """Return a string representation of the time-based drop."""
return self.name return self.name
# MARK: DropBenefitEdge
class DropBenefitEdge(models.Model):
"""Represents the relationship between a TimeBasedDrop and a DropBenefit."""
drop = models.ForeignKey(
TimeBasedDrop,
on_delete=models.CASCADE,
help_text="The time-based drop in this relationship.",
)
benefit = models.ForeignKey(
DropBenefit,
on_delete=models.CASCADE,
help_text="The benefit in this relationship.",
)
entitlement_limit = models.PositiveIntegerField(
default=1,
help_text="Max times this benefit can be claimed for this drop.",
)
added_at = models.DateTimeField(
auto_now_add=True,
help_text="Timestamp when this drop-benefit edge was created.",
)
updated_at = models.DateTimeField(
auto_now=True,
help_text="Timestamp when this drop-benefit edge was last updated.",
)
class Meta:
constraints = [
models.UniqueConstraint(fields=("drop", "benefit"), name="unique_drop_benefit"),
]
def __str__(self) -> str:
"""Return a string representation of the drop benefit edge."""
return f"{self.drop.name} - {self.benefit.name}"

View file

@ -4,9 +4,10 @@ from typing import Literal
from pydantic import BaseModel from pydantic import BaseModel
from pydantic import Field from pydantic import Field
from pydantic import field_validator
class Organization(BaseModel): class OrganizationSchema(BaseModel):
"""Schema for Twitch Organization objects.""" """Schema for Twitch Organization objects."""
twitch_id: str = Field(alias="id") twitch_id: str = Field(alias="id")
@ -21,7 +22,7 @@ class Organization(BaseModel):
} }
class Game(BaseModel): class GameSchema(BaseModel):
"""Schema for Twitch Game objects.""" """Schema for Twitch Game objects."""
twitch_id: str = Field(alias="id") twitch_id: str = Field(alias="id")
@ -51,19 +52,74 @@ class DropCampaignSelfEdge(BaseModel):
} }
class DropBenefitSchema(BaseModel):
"""Schema for a benefit in a DropBenefitEdge."""
twitch_id: str = Field(alias="id")
name: str
image_asset_url: str = Field(alias="imageAssetURL")
created_at: str | None = Field(alias="createdAt")
entitlement_limit: int = Field(alias="entitlementLimit")
is_ios_available: bool = Field(alias="isIosAvailable")
distribution_type: str = Field(alias="distributionType")
type_name: Literal["Benefit"] = Field(alias="__typename")
model_config = {
"extra": "forbid",
"validate_assignment": True,
"strict": True,
"populate_by_name": True,
}
class DropBenefitEdgeSchema(BaseModel):
"""Schema for a benefit edge in a TimeBasedDrop."""
benefit: DropBenefitSchema
entitlement_limit: int = Field(alias="entitlementLimit")
model_config = {
"extra": "forbid",
"validate_assignment": True,
"strict": True,
"populate_by_name": True,
}
class TimeBasedDropSchema(BaseModel):
"""Schema for a TimeBasedDrop in a DropCampaign."""
twitch_id: str = Field(alias="id")
name: str
required_minutes_watched: int | None = Field(alias="requiredMinutesWatched")
required_subs: int = Field(alias="requiredSubs")
start_at: str | None = Field(alias="startAt")
end_at: str | None = Field(alias="endAt")
benefit_edges: list[DropBenefitEdgeSchema] = Field(alias="benefitEdges")
type_name: Literal["TimeBasedDrop"] = Field(alias="__typename")
model_config = {
"extra": "forbid",
"validate_assignment": True,
"strict": True,
"populate_by_name": True,
}
class DropCampaign(BaseModel): class DropCampaign(BaseModel):
"""Schema for Twitch DropCampaign objects.""" """Schema for Twitch DropCampaign objects."""
twitch_id: str = Field(alias="id") twitch_id: str = Field(alias="id")
name: str name: str
owner: Organization owner: OrganizationSchema
game: Game game: GameSchema
status: Literal["ACTIVE", "EXPIRED"] status: Literal["ACTIVE", "EXPIRED", "UPCOMING"]
start_at: str = Field(alias="startAt") start_at: str = Field(alias="startAt")
end_at: str = Field(alias="endAt") end_at: str = Field(alias="endAt")
details_url: str = Field(alias="detailsURL") details_url: str = Field(alias="detailsURL")
account_link_url: str = Field(alias="accountLinkURL") account_link_url: str = Field(alias="accountLinkURL")
self: DropCampaignSelfEdge self: DropCampaignSelfEdge
time_based_drops: list[TimeBasedDropSchema] = Field(default=[], alias="timeBasedDrops")
type_name: Literal["DropCampaign"] = Field(alias="__typename") type_name: Literal["DropCampaign"] = Field(alias="__typename")
model_config = { model_config = {
@ -93,7 +149,7 @@ class CurrentUser(BaseModel):
class Data(BaseModel): class Data(BaseModel):
"""Schema for the data field in Twitch API responses.""" """Schema for the data field in Twitch API responses."""
current_user: CurrentUser = Field(alias="currentUser") current_user: CurrentUser | None = Field(alias="currentUser")
model_config = { model_config = {
"extra": "forbid", "extra": "forbid",
@ -102,13 +158,40 @@ class Data(BaseModel):
"populate_by_name": True, "populate_by_name": True,
} }
@field_validator("current_user", mode="before")
@classmethod
def empty_dict_to_none(cls, v: dict) -> dict | None:
"""Convert empty dicts to None for current_user field.
Args:
v (dict): The value to validate.
Returns:
dict | None: None when input is an empty dict; otherwise the value.
"""
if v == {}:
return None
return v
class Extensions(BaseModel): class Extensions(BaseModel):
"""Schema for the extensions field in Twitch API responses.""" """Schema for the extensions field in GraphQL responses."""
duration_milliseconds: int = Field(alias="durationMilliseconds") operation_name: str | None = Field(default=None, alias="operationName")
operation_name: Literal["ViewerDropsDashboard"] = Field(alias="operationName")
request_id: str = Field(alias="requestID") model_config = {
"extra": "ignore",
"validate_assignment": True,
"strict": True,
"populate_by_name": True,
}
class GraphQLResponse(BaseModel):
"""Schema for the complete GraphQL response from Twitch API."""
data: Data
extensions: Extensions | None = None
model_config = { model_config = {
"extra": "forbid", "extra": "forbid",
@ -116,16 +199,3 @@ class Extensions(BaseModel):
"strict": True, "strict": True,
"populate_by_name": True, "populate_by_name": True,
} }
class ViewerDropsDashboardPayload(BaseModel):
"""Schema for the ViewerDropsDashboard response."""
data: Data
extensions: Extensions
model_config = {
"extra": "forbid",
"validate_assignment": True,
"strict": True,
}

0
twitch/tests/__init__.py Normal file
View file

5
twitch/tests/fixtures/README.md vendored Normal file
View file

@ -0,0 +1,5 @@
# Example files for fixtures used in tests
## ViewerDropsDashboard
- Fields extensions.durationMilliseconds and extensions.requestID has been removed from the example file as they are not relevant to the schema validation.

230
twitch/tests/test_views.py Normal file
View file

@ -0,0 +1,230 @@
from __future__ import annotations
from typing import TYPE_CHECKING
from typing import Any
from typing import Literal
import pytest
from django.test.client import _MonkeyPatchedWSGIResponse
from django.test.utils import ContextList
from twitch.models import DropBenefit
from twitch.models import DropCampaign
from twitch.models import Game
from twitch.models import Organization
from twitch.models import TimeBasedDrop
if TYPE_CHECKING:
from django.test import Client
from django.test.client import _MonkeyPatchedWSGIResponse
from django.test.utils import ContextList
@pytest.mark.django_db
class TestSearchView:
"""Tests for the search_view function."""
@pytest.fixture
def sample_data(self) -> dict[str, Organization | Game | DropCampaign | TimeBasedDrop | DropBenefit]:
"""Create sample data for testing.
Returns:
A dictionary containing the created sample data.
"""
org: Organization = Organization.objects.create(twitch_id="123", name="Test Organization")
game: Game = Game.objects.create(
twitch_id="456",
name="test_game",
display_name="Test Game",
owner=org,
)
campaign: DropCampaign = DropCampaign.objects.create(
twitch_id="789",
name="Test Campaign",
description="A test campaign",
game=game,
)
drop: TimeBasedDrop = TimeBasedDrop.objects.create(
twitch_id="1011",
name="Test Drop",
campaign=campaign,
)
benefit: DropBenefit = DropBenefit.objects.create(
twitch_id="1213",
name="Test Benefit",
)
return {
"org": org,
"game": game,
"campaign": campaign,
"drop": drop,
"benefit": benefit,
}
@staticmethod
def _get_context(response: _MonkeyPatchedWSGIResponse) -> ContextList | dict[str, Any]:
"""Normalize Django test response context to a plain dict.
Args:
response: The Django test response.
Returns:
The context as a plain dictionary.
"""
context: ContextList | dict[str, Any] = response.context
if isinstance(context, list): # Django can return a list of contexts
context = context[-1]
return context
def test_empty_query(
self,
client: Client,
sample_data: dict[str, Organization | Game | DropCampaign | TimeBasedDrop | DropBenefit],
) -> None:
"""Test search with empty query returns no results."""
response: _MonkeyPatchedWSGIResponse = client.get("/search/?q=")
context: ContextList | dict[str, Any] = self._get_context(response)
assert response.status_code == 200
assert "results" in context
assert context["results"] == {}
def test_no_query_parameter(
self,
client: Client,
sample_data: dict[str, Organization | Game | DropCampaign | TimeBasedDrop | DropBenefit],
) -> None:
"""Test search with no query parameter returns no results."""
response: _MonkeyPatchedWSGIResponse = client.get("/search/")
context: ContextList | dict[str, Any] = self._get_context(response)
assert response.status_code == 200
assert context["results"] == {}
@pytest.mark.parametrize(
"model_key",
["org", "game", "campaign", "drop", "benefit"],
)
def test_short_query_istartswith(
self,
client: Client,
sample_data: dict[str, Organization | Game | DropCampaign | TimeBasedDrop | DropBenefit],
model_key: Literal["org", "game", "campaign", "drop", "benefit"],
) -> None:
"""Test short query (< 3 chars) uses istartswith for all models."""
response: _MonkeyPatchedWSGIResponse = client.get("/search/?q=Te")
context: ContextList | dict[str, Any] = self._get_context(response)
assert response.status_code == 200
# Map model keys to result keys
result_key_map = {
"org": "organizations",
"game": "games",
"campaign": "campaigns",
"drop": "drops",
"benefit": "benefits",
}
result_key = result_key_map[model_key]
assert sample_data[model_key] in context["results"][result_key]
@pytest.mark.parametrize(
"model_key",
["org", "game", "campaign", "drop", "benefit"],
)
def test_long_query_icontains(
self,
client: Client,
sample_data: dict[str, Organization | Game | DropCampaign | TimeBasedDrop | DropBenefit],
model_key: Literal["org", "game", "campaign", "drop", "benefit"],
) -> None:
"""Test long query (>= 3 chars) uses icontains for all models."""
response: _MonkeyPatchedWSGIResponse = client.get("/search/?q=Test")
context: ContextList | dict[str, Any] = self._get_context(response)
assert response.status_code == 200
# Map model keys to result keys
result_key_map = {
"org": "organizations",
"game": "games",
"campaign": "campaigns",
"drop": "drops",
"benefit": "benefits",
}
result_key = result_key_map[model_key]
assert sample_data[model_key] in context["results"][result_key]
def test_campaign_description_search(
self,
client: Client,
sample_data: dict[str, Organization | Game | DropCampaign | TimeBasedDrop | DropBenefit],
) -> None:
"""Test that campaign description is searchable."""
response: _MonkeyPatchedWSGIResponse = client.get("/search/?q=campaign")
context: ContextList | dict[str, Any] = self._get_context(response)
assert response.status_code == 200
assert sample_data["campaign"] in context["results"]["campaigns"]
def test_game_display_name_search(
self,
client: Client,
sample_data: dict[str, Organization | Game | DropCampaign | TimeBasedDrop | DropBenefit],
) -> None:
"""Test that game display_name is searchable."""
response: _MonkeyPatchedWSGIResponse = client.get("/search/?q=Game")
context: ContextList | dict[str, Any] = self._get_context(response)
assert response.status_code == 200
assert sample_data["game"] in context["results"]["games"]
def test_query_no_matches(
self,
client: Client,
sample_data: dict[str, Organization | Game | DropCampaign | TimeBasedDrop | DropBenefit],
) -> None:
"""Test search with query that has no matches."""
response: _MonkeyPatchedWSGIResponse = client.get("/search/?q=xyz")
context: ContextList | dict[str, Any] = self._get_context(response)
assert response.status_code == 200
for result_list in context["results"].values():
assert len(result_list) == 0
def test_context_contains_query(
self,
client: Client,
sample_data: dict[str, Organization | Game | DropCampaign | TimeBasedDrop | DropBenefit],
) -> None:
"""Test that context contains the search query."""
query = "Test"
response: _MonkeyPatchedWSGIResponse = client.get(f"/search/?q={query}")
context: ContextList | dict[str, Any] = self._get_context(response)
assert context["query"] == query
@pytest.mark.parametrize(
("model_key", "related_field"),
[
("campaigns", "game"),
("drops", "campaign"),
],
)
def test_select_related_optimization(
self,
client: Client,
sample_data: dict[str, Organization | Game | DropCampaign | TimeBasedDrop | DropBenefit],
model_key: str,
related_field: str,
) -> None:
"""Test that queries use select_related for performance optimization."""
response: _MonkeyPatchedWSGIResponse = client.get("/search/?q=Test")
context: ContextList | dict[str, Any] = self._get_context(response)
results = context["results"][model_key]
assert len(results) > 0
# Verify the related object is accessible without additional query
first_result = results[0]
assert hasattr(first_result, related_field)

View file

@ -18,15 +18,39 @@ urlpatterns: list[URLPattern] = [
path("", views.dashboard, name="dashboard"), path("", views.dashboard, name="dashboard"),
path("search/", views.search_view, name="search"), path("search/", views.search_view, name="search"),
path("debug/", views.debug_view, name="debug"), path("debug/", views.debug_view, name="debug"),
path("campaigns/", views.DropCampaignListView.as_view(), name="campaign_list"), path(
path("campaigns/<str:pk>/", views.DropCampaignDetailView.as_view(), name="campaign_detail"), "campaigns/",
views.DropCampaignListView.as_view(),
name="campaign_list",
),
path(
"campaigns/<str:pk>/",
views.DropCampaignDetailView.as_view(),
name="campaign_detail",
),
path("games/", views.GamesGridView.as_view(), name="game_list"), path("games/", views.GamesGridView.as_view(), name="game_list"),
path("games/list/", views.GamesListView.as_view(), name="game_list_simple"), path(
path("games/<str:pk>/", views.GameDetailView.as_view(), name="game_detail"), "games/list/",
views.GamesListView.as_view(),
name="game_list_simple",
),
path(
"games/<str:pk>/",
views.GameDetailView.as_view(),
name="game_detail",
),
path("organizations/", views.OrgListView.as_view(), name="org_list"), path("organizations/", views.OrgListView.as_view(), name="org_list"),
path("organizations/<str:pk>/", views.OrgDetailView.as_view(), name="organization_detail"), path(
"organizations/<str:pk>/",
views.OrgDetailView.as_view(),
name="organization_detail",
),
path("channels/", views.ChannelListView.as_view(), name="channel_list"), path("channels/", views.ChannelListView.as_view(), name="channel_list"),
path("channels/<str:pk>/", views.ChannelDetailView.as_view(), name="channel_detail"), path(
"channels/<str:pk>/",
views.ChannelDetailView.as_view(),
name="channel_detail",
),
path("rss/organizations/", OrganizationFeed(), name="organization_feed"), path("rss/organizations/", OrganizationFeed(), name="organization_feed"),
path("rss/games/", GameFeed(), name="game_feed"), path("rss/games/", GameFeed(), name="game_feed"),
path("rss/campaigns/", DropCampaignFeed(), name="campaign_feed"), path("rss/campaigns/", DropCampaignFeed(), name="campaign_feed"),

View file

@ -10,7 +10,7 @@ if TYPE_CHECKING:
from datetime import datetime from datetime import datetime
@lru_cache(maxsize=40 * 1024) @lru_cache(maxsize=40 * 40 * 1024)
def parse_date(value: str) -> datetime | None: def parse_date(value: str) -> datetime | None:
"""Parse a datetime string into a timezone-aware datetime using dateparser. """Parse a datetime string into a timezone-aware datetime using dateparser.
@ -24,7 +24,10 @@ def parse_date(value: str) -> datetime | None:
"RETURN_AS_TIMEZONE_AWARE": True, "RETURN_AS_TIMEZONE_AWARE": True,
"CACHE_SIZE_LIMIT": 0, "CACHE_SIZE_LIMIT": 0,
} }
dt: datetime | None = dateparser.parse(date_string=value, settings=dateparser_settings) # pyright: ignore[reportArgumentType] dt: datetime | None = dateparser.parse(
date_string=value,
settings=dateparser_settings, # pyright: ignore[reportArgumentType]
)
if not dt: if not dt:
return None return None

View file

@ -8,12 +8,6 @@ from collections import defaultdict
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from typing import Any from typing import Any
if TYPE_CHECKING:
from django.db.models.manager import BaseManager
from django.contrib.postgres.search import SearchQuery
from django.contrib.postgres.search import SearchRank
from django.contrib.postgres.search import SearchVector
from django.core.serializers import serialize from django.core.serializers import serialize
from django.db.models import Count from django.db.models import Count
from django.db.models import F from django.db.models import F
@ -22,6 +16,7 @@ from django.db.models import Prefetch
from django.db.models import Q from django.db.models import Q
from django.db.models.functions import Trim from django.db.models.functions import Trim
from django.db.models.query import QuerySet from django.db.models.query import QuerySet
from django.http import Http404
from django.http import HttpRequest from django.http import HttpRequest
from django.http import HttpResponse from django.http import HttpResponse
from django.shortcuts import render from django.shortcuts import render
@ -44,7 +39,7 @@ if TYPE_CHECKING:
from django.http import HttpRequest from django.http import HttpRequest
from django.http import HttpResponse from django.http import HttpResponse
logger: logging.Logger = logging.getLogger(__name__) logger: logging.Logger = logging.getLogger("ttvdrops.views")
MIN_QUERY_LENGTH_FOR_FTS = 3 MIN_QUERY_LENGTH_FOR_FTS = 3
MIN_SEARCH_RANK = 0.05 MIN_SEARCH_RANK = 0.05
@ -60,55 +55,41 @@ def search_view(request: HttpRequest) -> HttpResponse:
Returns: Returns:
HttpResponse: The rendered search results. HttpResponse: The rendered search results.
""" """
query = request.GET.get("q", "") query: str = request.GET.get("q", "")
results = {} results: dict[str, QuerySet] = {}
if query: if query:
if len(query) < MIN_QUERY_LENGTH_FOR_FTS: if len(query) < MIN_QUERY_LENGTH_FOR_FTS:
results["organizations"] = Organization.objects.filter(name__istartswith=query) results["organizations"] = Organization.objects.filter(name__istartswith=query)
results["games"] = Game.objects.filter(Q(name__istartswith=query) | Q(display_name__istartswith=query)) results["games"] = Game.objects.filter(Q(name__istartswith=query) | Q(display_name__istartswith=query))
results["campaigns"] = DropCampaign.objects.filter(Q(name__istartswith=query) | Q(description__icontains=query)).select_related("game") results["campaigns"] = DropCampaign.objects.filter(
Q(name__istartswith=query) | Q(description__icontains=query),
).select_related("game")
results["drops"] = TimeBasedDrop.objects.filter(name__istartswith=query).select_related("campaign") results["drops"] = TimeBasedDrop.objects.filter(name__istartswith=query).select_related("campaign")
results["benefits"] = DropBenefit.objects.filter(name__istartswith=query) results["benefits"] = DropBenefit.objects.filter(name__istartswith=query)
else: else:
search_query = SearchQuery(query) # SQLite-compatible text search using icontains
results["organizations"] = Organization.objects.filter(
# Search Organizations name__icontains=query,
org_vector = SearchVector("name")
org_results = Organization.objects.annotate(rank=SearchRank(org_vector, search_query)).filter(rank__gte=MIN_SEARCH_RANK).order_by("-rank")
results["organizations"] = org_results
# Search Games
game_vector = SearchVector("name", "display_name")
game_results = Game.objects.annotate(rank=SearchRank(game_vector, search_query)).filter(rank__gte=MIN_SEARCH_RANK).order_by("-rank")
results["games"] = game_results
# Search DropCampaigns
campaign_vector = SearchVector("name", "description")
campaign_results = (
DropCampaign.objects.annotate(rank=SearchRank(campaign_vector, search_query))
.filter(rank__gte=MIN_SEARCH_RANK)
.select_related("game")
.order_by("-rank")
) )
results["campaigns"] = campaign_results results["games"] = Game.objects.filter(
Q(name__icontains=query) | Q(display_name__icontains=query),
# Search TimeBasedDrops )
drop_vector = SearchVector("name") results["campaigns"] = DropCampaign.objects.filter(
drop_results = ( Q(name__icontains=query) | Q(description__icontains=query),
TimeBasedDrop.objects.annotate(rank=SearchRank(drop_vector, search_query)) ).select_related("game")
.filter(rank__gte=MIN_SEARCH_RANK) results["drops"] = TimeBasedDrop.objects.filter(
.select_related("campaign") name__icontains=query,
.order_by("-rank") ).select_related("campaign")
results["benefits"] = DropBenefit.objects.filter(
name__icontains=query,
) )
results["drops"] = drop_results
# Search DropBenefits return render(
benefit_vector = SearchVector("name") request,
benefit_results = DropBenefit.objects.annotate(rank=SearchRank(benefit_vector, search_query)).filter(rank__gte=MIN_SEARCH_RANK).order_by("-rank") "twitch/search_results.html",
results["benefits"] = benefit_results {"query": query, "results": results},
)
return render(request, "twitch/search_results.html", {"query": query, "results": results})
# MARK: /organizations/ # MARK: /organizations/
@ -128,6 +109,34 @@ class OrgDetailView(DetailView):
template_name = "twitch/organization_detail.html" template_name = "twitch/organization_detail.html"
context_object_name = "organization" context_object_name = "organization"
def get_object(
self,
queryset: QuerySet[Organization] | None = None,
) -> Organization:
"""Get the organization object using twitch_id.
Args:
queryset: Optional queryset to use.
Returns:
Organization: The organization object.
Raises:
Http404: If the organization is not found.
"""
if queryset is None:
queryset = self.get_queryset()
# Use twitch_id as the lookup field since it's the primary key
pk: str | None = self.kwargs.get(self.pk_url_kwarg)
try:
org: Organization = queryset.get(twitch_id=pk)
except Organization.DoesNotExist as exc:
msg = "No organization found matching the query"
raise Http404(msg) from exc
return org
def get_context_data(self, **kwargs) -> dict[str, Any]: def get_context_data(self, **kwargs) -> dict[str, Any]:
"""Add additional context data. """Add additional context data.
@ -159,10 +168,12 @@ class OrgDetailView(DetailView):
pretty_org_data: str = json.dumps(org_data[0], indent=4) pretty_org_data: str = json.dumps(org_data[0], indent=4)
context.update({ context.update(
"games": games, {
"org_data": pretty_org_data, "games": games,
}) "org_data": pretty_org_data,
},
)
return context return context
@ -211,16 +222,19 @@ class DropCampaignListView(ListView):
return context return context
def format_and_color_json(code: str) -> str: def format_and_color_json(data: dict[str, Any] | str) -> str:
"""Format and color a JSON string for HTML display. """Format and color a JSON string for HTML display.
Args: Args:
code: The code string to format. data: Either a dictionary or a JSON string to format.
Returns: Returns:
str: The formatted code with HTML styles. str: The formatted code with HTML styles.
""" """
formatted_code: str = json.dumps(code, indent=4) if isinstance(data, dict):
formatted_code: str = json.dumps(data, indent=4)
else:
formatted_code = data
return highlight(formatted_code, JsonLexer(), HtmlFormatter()) return highlight(formatted_code, JsonLexer(), HtmlFormatter())
@ -232,7 +246,10 @@ class DropCampaignDetailView(DetailView):
template_name = "twitch/campaign_detail.html" template_name = "twitch/campaign_detail.html"
context_object_name = "campaign" context_object_name = "campaign"
def get_object(self, queryset: QuerySet[DropCampaign] | None = None) -> Model: def get_object(
self,
queryset: QuerySet[DropCampaign] | None = None,
) -> Model:
"""Get the campaign object with related data prefetched. """Get the campaign object with related data prefetched.
Args: Args:
@ -259,8 +276,12 @@ class DropCampaignDetailView(DetailView):
""" """
context: dict[str, Any] = super().get_context_data(**kwargs) context: dict[str, Any] = super().get_context_data(**kwargs)
campaign: DropCampaign = context["campaign"] campaign: DropCampaign = context["campaign"]
drops: BaseManager[TimeBasedDrop] = ( drops: QuerySet[TimeBasedDrop] = (
TimeBasedDrop.objects.filter(campaign=campaign).select_related("campaign").prefetch_related("benefits").order_by("required_minutes_watched") TimeBasedDrop.objects
.filter(campaign=campaign)
.select_related("campaign")
.prefetch_related("benefits")
.order_by("required_minutes_watched")
) )
serialized_campaign = serialize( serialized_campaign = serialize(
@ -297,11 +318,11 @@ class DropCampaignDetailView(DetailView):
drops_data: list[dict[str, Any]] = json.loads(serialized_drops) drops_data: list[dict[str, Any]] = json.loads(serialized_drops)
for i, drop in enumerate(drops): for i, drop in enumerate(drops):
benefits: list[DropBenefit] = list(drop.benefits.all()) drop_benefits: list[DropBenefit] = list(drop.benefits.all())
if benefits: if drop_benefits:
serialized_benefits = serialize( serialized_benefits = serialize(
"json", "json",
benefits, drop_benefits,
fields=("name", "image_asset_url"), fields=("name", "image_asset_url"),
) )
benefits_data = json.loads(serialized_benefits) benefits_data = json.loads(serialized_benefits)
@ -313,9 +334,6 @@ class DropCampaignDetailView(DetailView):
enhanced_drops: list[dict[str, TimeBasedDrop | datetime.datetime | str | None]] = [] enhanced_drops: list[dict[str, TimeBasedDrop | datetime.datetime | str | None]] = []
now: datetime.datetime = timezone.now() now: datetime.datetime = timezone.now()
for drop in drops: for drop in drops:
# Ensure benefits are loaded
benefits: list[DropBenefit] = list(drop.benefits.all())
# Calculate countdown text # Calculate countdown text
if drop.end_at and drop.end_at > now: if drop.end_at and drop.end_at > now:
time_diff: datetime.timedelta = drop.end_at - now time_diff: datetime.timedelta = drop.end_at - now
@ -387,7 +405,9 @@ class GamesGridView(ListView):
) )
def get_context_data(self, **kwargs) -> dict[str, Any]: def get_context_data(self, **kwargs) -> dict[str, Any]:
"""Add additional context data with games grouped by their owning organization in a highly optimized manner. """Add additional context data.
Games are grouped by their owning organization.
Args: Args:
**kwargs: Additional arguments. **kwargs: Additional arguments.
@ -398,8 +418,9 @@ class GamesGridView(ListView):
context: dict[str, Any] = super().get_context_data(**kwargs) context: dict[str, Any] = super().get_context_data(**kwargs)
now: datetime.datetime = timezone.now() now: datetime.datetime = timezone.now()
games_with_campaigns: BaseManager[Game] = ( games_with_campaigns: QuerySet[Game] = (
Game.objects.filter(drop_campaigns__isnull=False) Game.objects
.filter(drop_campaigns__isnull=False)
.select_related("owner") .select_related("owner")
.annotate( .annotate(
campaign_count=Count("drop_campaigns", distinct=True), campaign_count=Count("drop_campaigns", distinct=True),
@ -420,7 +441,9 @@ class GamesGridView(ListView):
if game.owner: if game.owner:
games_by_org[game.owner].append({"game": game}) games_by_org[game.owner].append({"game": game})
context["games_by_org"] = OrderedDict(sorted(games_by_org.items(), key=lambda item: item[0].name)) context["games_by_org"] = OrderedDict(
sorted(games_by_org.items(), key=lambda item: item[0].name),
)
return context return context
@ -433,6 +456,31 @@ class GameDetailView(DetailView):
template_name = "twitch/game_detail.html" template_name = "twitch/game_detail.html"
context_object_name = "game" context_object_name = "game"
def get_object(self, queryset: QuerySet[Game] | None = None) -> Game:
"""Get the game object using twitch_id as the primary key lookup.
Args:
queryset: Optional queryset to use.
Returns:
Game: The game object.
Raises:
Http404: If the game is not found.
"""
if queryset is None:
queryset = self.get_queryset()
# Use twitch_id as the lookup field since it's the primary key
pk = self.kwargs.get(self.pk_url_kwarg)
try:
game = queryset.get(twitch_id=pk)
except Game.DoesNotExist as exc:
msg = "No game found matching the query"
raise Http404(msg) from exc
return game
def get_context_data(self, **kwargs: object) -> dict[str, Any]: def get_context_data(self, **kwargs: object) -> dict[str, Any]:
"""Add additional context data. """Add additional context data.
@ -440,20 +488,27 @@ class GameDetailView(DetailView):
**kwargs: Additional arguments. **kwargs: Additional arguments.
Returns: Returns:
dict: Context data with active, upcoming, and expired campaigns. dict: Context data with active, upcoming, and expired
Expired campaigns are filtered based on either end date or status. campaigns. Expired campaigns are filtered based on
either end date or status.
""" """
context: dict[str, Any] = super().get_context_data(**kwargs) context: dict[str, Any] = super().get_context_data(**kwargs)
game: Game = self.get_object() # pyright: ignore[reportAssignmentType] game: Game = self.get_object() # pyright: ignore[reportAssignmentType]
now: datetime.datetime = timezone.now() now: datetime.datetime = timezone.now()
all_campaigns: BaseManager[DropCampaign] = ( all_campaigns: QuerySet[DropCampaign] = (
DropCampaign.objects.filter(game=game) DropCampaign.objects
.filter(game=game)
.select_related("game__owner") .select_related("game__owner")
.prefetch_related( .prefetch_related(
Prefetch( Prefetch(
"time_based_drops", "time_based_drops",
queryset=TimeBasedDrop.objects.prefetch_related(Prefetch("benefits", queryset=DropBenefit.objects.order_by("name"))), queryset=TimeBasedDrop.objects.prefetch_related(
Prefetch(
"benefits",
queryset=DropBenefit.objects.order_by("name"),
),
),
), ),
) )
.order_by("-end_at") .order_by("-end_at")
@ -462,24 +517,44 @@ class GameDetailView(DetailView):
active_campaigns: list[DropCampaign] = [ active_campaigns: list[DropCampaign] = [
campaign campaign
for campaign in all_campaigns for campaign in all_campaigns
if campaign.start_at is not None and campaign.start_at <= now and campaign.end_at is not None and campaign.end_at >= now if campaign.start_at is not None
and campaign.start_at <= now
and campaign.end_at is not None
and campaign.end_at >= now
] ]
active_campaigns.sort(key=lambda c: c.end_at if c.end_at is not None else datetime.datetime.max.replace(tzinfo=datetime.UTC)) active_campaigns.sort(
key=lambda c: c.end_at if c.end_at is not None else datetime.datetime.max.replace(tzinfo=datetime.UTC),
)
upcoming_campaigns: list[DropCampaign] = [campaign for campaign in all_campaigns if campaign.start_at is not None and campaign.start_at > now] upcoming_campaigns: list[DropCampaign] = [
campaign for campaign in all_campaigns if campaign.start_at is not None and campaign.start_at > now
]
upcoming_campaigns.sort(key=lambda c: c.start_at if c.start_at is not None else datetime.datetime.max.replace(tzinfo=datetime.UTC)) upcoming_campaigns.sort(
key=lambda c: c.start_at if c.start_at is not None else datetime.datetime.max.replace(tzinfo=datetime.UTC),
)
expired_campaigns: list[DropCampaign] = [campaign for campaign in all_campaigns if campaign.end_at is not None and campaign.end_at < now] expired_campaigns: list[DropCampaign] = [
campaign for campaign in all_campaigns if campaign.end_at is not None and campaign.end_at < now
]
# Add unique sorted benefits to each campaign object # Build campaign data with sorted benefits
campaigns_with_benefits: list[dict[str, Any]] = []
for campaign in all_campaigns: for campaign in all_campaigns:
benefits_dict: dict[int, DropBenefit] = {} # Use dict to track unique benefits by ID benefits_dict: dict[int, DropBenefit] = {}
for drop in campaign.time_based_drops.all(): # type: ignore[attr-defined] for drop in campaign.time_based_drops.all(): # type: ignore[attr-defined]
for benefit in drop.benefits.all(): for benefit in drop.benefits.all():
benefits_dict[benefit.id] = benefit benefits_dict[benefit.id] = benefit
# Sort benefits by name and attach to campaign sorted_benefits = sorted(
campaign.sorted_benefits = sorted(benefits_dict.values(), key=lambda b: b.name) # type: ignore[attr-defined] benefits_dict.values(),
key=lambda b: b.name,
)
campaigns_with_benefits.append(
{
"campaign": campaign,
"sorted_benefits": sorted_benefits,
},
)
serialized_game: str = serialize( serialized_game: str = serialize(
"json", "json",
@ -509,17 +584,22 @@ class GameDetailView(DetailView):
"is_account_connected", "is_account_connected",
), ),
) )
campaigns_data: list[dict[str, Any]] = json.loads(serialized_campaigns) campaigns_data: list[dict[str, Any]] = json.loads(
serialized_campaigns,
)
game_data[0]["fields"]["campaigns"] = campaigns_data game_data[0]["fields"]["campaigns"] = campaigns_data
context.update({ context.update(
"active_campaigns": active_campaigns, {
"upcoming_campaigns": upcoming_campaigns, "active_campaigns": active_campaigns,
"expired_campaigns": expired_campaigns, "upcoming_campaigns": upcoming_campaigns,
"owner": game.owner, "expired_campaigns": expired_campaigns,
"now": now, "campaigns_with_benefits": campaigns_with_benefits,
"game_data": format_and_color_json(json.dumps(game_data[0], indent=4)), "owner": game.owner,
}) "now": now,
"game_data": format_and_color_json(game_data[0]),
},
)
return context return context
@ -536,7 +616,8 @@ def dashboard(request: HttpRequest) -> HttpResponse:
""" """
now: datetime.datetime = timezone.now() now: datetime.datetime = timezone.now()
active_campaigns: QuerySet[DropCampaign] = ( active_campaigns: QuerySet[DropCampaign] = (
DropCampaign.objects.filter(start_at__lte=now, end_at__gte=now) DropCampaign.objects
.filter(start_at__lte=now, end_at__gte=now)
.select_related("game__owner") .select_related("game__owner")
.prefetch_related( .prefetch_related(
"allow_channels", "allow_channels",
@ -563,14 +644,26 @@ def dashboard(request: HttpRequest) -> HttpResponse:
"campaigns": [], "campaigns": [],
} }
campaigns_by_org_game[org_id]["games"][game_id]["campaigns"].append(campaign) campaigns_by_org_game[org_id]["games"][game_id]["campaigns"].append(
campaign,
)
sorted_campaigns_by_org_game: dict[str, Any] = { sorted_campaigns_by_org_game: dict[str, Any] = {
org_id: campaigns_by_org_game[org_id] for org_id in sorted(campaigns_by_org_game.keys(), key=lambda k: campaigns_by_org_game[k]["name"]) org_id: campaigns_by_org_game[org_id]
for org_id in sorted(
campaigns_by_org_game.keys(),
key=lambda k: campaigns_by_org_game[k]["name"],
)
} }
for org_data in sorted_campaigns_by_org_game.values(): for org_data in sorted_campaigns_by_org_game.values():
org_data["games"] = {game_id: org_data["games"][game_id] for game_id in sorted(org_data["games"].keys(), key=lambda k: org_data["games"][k]["name"])} org_data["games"] = {
game_id: org_data["games"][game_id]
for game_id in sorted(
org_data["games"].keys(),
key=lambda k: org_data["games"][k]["name"],
)
}
return render( return render(
request, request,
@ -592,41 +685,54 @@ def debug_view(request: HttpRequest) -> HttpResponse:
Returns: Returns:
HttpResponse: Rendered debug template or redirect if unauthorized. HttpResponse: Rendered debug template or redirect if unauthorized.
""" """
now = timezone.now() now: datetime.datetime = timezone.now()
# Games with no assigned owner organization # Games with no assigned owner organization
games_without_owner: QuerySet[Game] = Game.objects.filter(owner__isnull=True).order_by("display_name") games_without_owner: QuerySet[Game] = Game.objects.filter(
owner__isnull=True,
).order_by("display_name")
# Campaigns with missing or obviously broken images (empty or not starting with http) # Campaigns with missing or obviously broken images
broken_image_campaigns: QuerySet[DropCampaign] = DropCampaign.objects.filter( broken_image_campaigns: QuerySet[DropCampaign] = DropCampaign.objects.filter(
Q(image_url__isnull=True) | Q(image_url__exact="") | ~Q(image_url__startswith="http"), Q(image_url__isnull=True) | Q(image_url__exact="") | ~Q(image_url__startswith="http"),
).select_related("game") ).select_related("game")
# Benefits with missing images # Benefits with missing images
broken_benefit_images: QuerySet[DropBenefit] = DropBenefit.objects.annotate(trimmed_url=Trim("image_asset_url")).filter( broken_benefit_images: QuerySet[DropBenefit] = DropBenefit.objects.annotate(
trimmed_url=Trim("image_asset_url"),
).filter(
Q(image_asset_url__isnull=True) | Q(trimmed_url__exact="") | ~Q(image_asset_url__startswith="http"), Q(image_asset_url__isnull=True) | Q(trimmed_url__exact="") | ~Q(image_asset_url__startswith="http"),
) )
# Time-based drops without any benefits # Time-based drops without any benefits
drops_without_benefits: QuerySet[TimeBasedDrop] = TimeBasedDrop.objects.filter(benefits__isnull=True).select_related("campaign__game") drops_without_benefits: QuerySet[TimeBasedDrop] = TimeBasedDrop.objects.filter(
benefits__isnull=True,
).select_related(
"campaign__game",
)
# Campaigns with invalid dates (start after end or missing either) # Campaigns with invalid dates (start after end or missing either)
invalid_date_campaigns: QuerySet[DropCampaign] = DropCampaign.objects.filter( invalid_date_campaigns: QuerySet[DropCampaign] = DropCampaign.objects.filter(
Q(start_at__gt=F("end_at")) | Q(start_at__isnull=True) | Q(end_at__isnull=True), Q(start_at__gt=F("end_at")) | Q(start_at__isnull=True) | Q(end_at__isnull=True),
).select_related("game") ).select_related("game")
# Duplicate campaign names per game. We retrieve the game's name for user-friendly display. # Duplicate campaign names per game.
# We retrieve the game's name for user-friendly display.
duplicate_name_campaigns = ( duplicate_name_campaigns = (
DropCampaign.objects.values("game_id", "game__display_name", "name") DropCampaign.objects
.annotate(name_count=Count("id")) .values("game_id", "game__display_name", "name")
.annotate(name_count=Count("twitch_id"))
.filter(name_count__gt=1) .filter(name_count__gt=1)
.order_by("game__display_name", "name") .order_by("game__display_name", "name")
) )
# Campaigns currently active but image missing # Campaigns currently active but image missing
active_missing_image: QuerySet[DropCampaign] = ( active_missing_image: QuerySet[DropCampaign] = (
DropCampaign.objects.filter(start_at__lte=now, end_at__gte=now) DropCampaign.objects
.filter(Q(image_url__isnull=True) | Q(image_url__exact="") | ~Q(image_url__startswith="http")) .filter(start_at__lte=now, end_at__gte=now)
.filter(
Q(image_url__isnull=True) | Q(image_url__exact="") | ~Q(image_url__startswith="http"),
)
.select_related("game") .select_related("game")
) )
@ -641,7 +747,11 @@ def debug_view(request: HttpRequest) -> HttpResponse:
"active_missing_image": active_missing_image, "active_missing_image": active_missing_image,
} }
return render(request, "twitch/debug.html", context) return render(
request,
"twitch/debug.html",
context,
)
# MARK: /games/list/ # MARK: /games/list/
@ -700,9 +810,13 @@ class ChannelListView(ListView):
search_query: str | None = self.request.GET.get("search") search_query: str | None = self.request.GET.get("search")
if search_query: if search_query:
queryset = queryset.filter(Q(name__icontains=search_query) | Q(display_name__icontains=search_query)) queryset = queryset.filter(
Q(name__icontains=search_query) | Q(display_name__icontains=search_query),
)
return queryset.annotate(campaign_count=Count("allowed_campaigns", distinct=True)).order_by("-campaign_count", "name") return queryset.annotate(
campaign_count=Count("allowed_campaigns", distinct=True),
).order_by("-campaign_count", "name")
def get_context_data(self, **kwargs) -> dict[str, Any]: def get_context_data(self, **kwargs) -> dict[str, Any]:
"""Add additional context data. """Add additional context data.
@ -726,6 +840,31 @@ class ChannelDetailView(DetailView):
template_name = "twitch/channel_detail.html" template_name = "twitch/channel_detail.html"
context_object_name = "channel" context_object_name = "channel"
def get_object(self, queryset: QuerySet[Channel] | None = None) -> Channel:
"""Get the channel object using twitch_id as the primary key lookup.
Args:
queryset: Optional queryset to use.
Returns:
Channel: The channel object.
Raises:
Http404: If the channel is not found.
"""
if queryset is None:
queryset = self.get_queryset()
# Use twitch_id as the lookup field since it's the primary key
pk = self.kwargs.get(self.pk_url_kwarg)
try:
channel = queryset.get(twitch_id=pk)
except Channel.DoesNotExist as exc:
msg = "No channel found matching the query"
raise Http404(msg) from exc
return channel
def get_context_data(self, **kwargs: object) -> dict[str, Any]: def get_context_data(self, **kwargs: object) -> dict[str, Any]:
"""Add additional context data. """Add additional context data.
@ -733,20 +872,24 @@ class ChannelDetailView(DetailView):
**kwargs: Additional arguments. **kwargs: Additional arguments.
Returns: Returns:
dict: Context data with active, upcoming, and expired campaigns for this channel. dict: Context data with active, upcoming, and expired campaigns.
""" """
context: dict[str, Any] = super().get_context_data(**kwargs) context: dict[str, Any] = super().get_context_data(**kwargs)
channel: Channel = self.get_object() # pyright: ignore[reportAssignmentType] channel: Channel = self.get_object() # pyright: ignore[reportAssignmentType]
now: datetime.datetime = timezone.now() now: datetime.datetime = timezone.now()
all_campaigns: QuerySet[DropCampaign] = ( all_campaigns: QuerySet[DropCampaign] = (
DropCampaign.objects.filter(allow_channels=channel) DropCampaign.objects
.filter(allow_channels=channel)
.select_related("game__owner") .select_related("game__owner")
.prefetch_related( .prefetch_related(
Prefetch( Prefetch(
"time_based_drops", "time_based_drops",
queryset=TimeBasedDrop.objects.prefetch_related( queryset=TimeBasedDrop.objects.prefetch_related(
Prefetch("benefits", queryset=DropBenefit.objects.order_by("name")), Prefetch(
"benefits",
queryset=DropBenefit.objects.order_by("name"),
),
), ),
), ),
) )
@ -756,23 +899,43 @@ class ChannelDetailView(DetailView):
active_campaigns: list[DropCampaign] = [ active_campaigns: list[DropCampaign] = [
campaign campaign
for campaign in all_campaigns for campaign in all_campaigns
if campaign.start_at is not None and campaign.start_at <= now and campaign.end_at is not None and campaign.end_at >= now if campaign.start_at is not None
and campaign.start_at <= now
and campaign.end_at is not None
and campaign.end_at >= now
] ]
active_campaigns.sort(key=lambda c: c.end_at if c.end_at is not None else datetime.datetime.max.replace(tzinfo=datetime.UTC)) active_campaigns.sort(
key=lambda c: c.end_at if c.end_at is not None else datetime.datetime.max.replace(tzinfo=datetime.UTC),
)
upcoming_campaigns: list[DropCampaign] = [campaign for campaign in all_campaigns if campaign.start_at is not None and campaign.start_at > now] upcoming_campaigns: list[DropCampaign] = [
upcoming_campaigns.sort(key=lambda c: c.start_at if c.start_at is not None else datetime.datetime.max.replace(tzinfo=datetime.UTC)) campaign for campaign in all_campaigns if campaign.start_at is not None and campaign.start_at > now
]
upcoming_campaigns.sort(
key=lambda c: c.start_at if c.start_at is not None else datetime.datetime.max.replace(tzinfo=datetime.UTC),
)
expired_campaigns: list[DropCampaign] = [campaign for campaign in all_campaigns if campaign.end_at is not None and campaign.end_at < now] expired_campaigns: list[DropCampaign] = [
campaign for campaign in all_campaigns if campaign.end_at is not None and campaign.end_at < now
]
# Add unique sorted benefits to each campaign object # Build campaign data with sorted benefits
campaigns_with_benefits = []
for campaign in all_campaigns: for campaign in all_campaigns:
benefits_dict: dict[int, DropBenefit] = {} # Use dict to track unique benefits by ID benefits_dict: dict[int, DropBenefit] = {}
for drop in campaign.time_based_drops.all(): # type: ignore[attr-defined] for drop in campaign.time_based_drops.all(): # type: ignore[attr-defined]
for benefit in drop.benefits.all(): for benefit in drop.benefits.all():
benefits_dict[benefit.id] = benefit benefits_dict[benefit.id] = benefit
# Sort benefits by name and attach to campaign sorted_benefits = sorted(
campaign.sorted_benefits = sorted(benefits_dict.values(), key=lambda b: b.name) # type: ignore[attr-defined] benefits_dict.values(),
key=lambda b: b.name,
)
campaigns_with_benefits.append(
{
"campaign": campaign,
"sorted_benefits": sorted_benefits,
},
)
serialized_channel = serialize( serialized_channel = serialize(
"json", "json",
@ -802,12 +965,15 @@ class ChannelDetailView(DetailView):
campaigns_data = json.loads(serialized_campaigns) campaigns_data = json.loads(serialized_campaigns)
channel_data[0]["fields"]["campaigns"] = campaigns_data channel_data[0]["fields"]["campaigns"] = campaigns_data
context.update({ context.update(
"active_campaigns": active_campaigns, {
"upcoming_campaigns": upcoming_campaigns, "active_campaigns": active_campaigns,
"expired_campaigns": expired_campaigns, "upcoming_campaigns": upcoming_campaigns,
"now": now, "expired_campaigns": expired_campaigns,
"channel_data": format_and_color_json(channel_data[0]), "campaigns_with_benefits": campaigns_with_benefits,
}) "now": now,
"channel_data": format_and_color_json(channel_data[0]),
},
)
return context return context