Make Owner optional; use dateparser to parse dates; use json-repair to read JSON

This commit is contained in:
Joakim Hellsén 2025-09-01 21:50:38 +02:00
commit 6d5f014134
12 changed files with 858 additions and 453 deletions

View file

@ -1,6 +1,6 @@
$src = "D:\broken" $src = "C:\Responses\processed"
$dest = "C:\Responses" $dest = "C:\Responses"
$deg = 16000 $deg = 16000
# What to do when a destination file with the same name exists and contents are identical. # What to do when a destination file with the same name exists and contents are identical.
# Options: 'skip' - do not move the source (leave it where it is) # Options: 'skip' - do not move the source (leave it where it is)
@ -10,48 +10,49 @@ $SameFileAction = 'skip'
New-Item -ItemType Directory -Path $dest -Force | Out-Null New-Item -ItemType Directory -Path $dest -Force | Out-Null
Get-ChildItem -Path $src -Filter *.json -Recurse -File | Get-ChildItem -Path $src -Filter *.json -Recurse -File |
ForEach-Object -Parallel { ForEach-Object -Parallel {
$dest = $using:dest $dest = $using:dest
# copy the using-scoped setting into a local variable for safe use inside the scriptblock # copy the using-scoped setting into a local variable for safe use inside the scriptblock
$sameAction = $using:SameFileAction $sameAction = $using:SameFileAction
$name = $_.Name $name = $_.Name
$target = Join-Path $dest $name $target = Join-Path $dest $name
if (Test-Path -LiteralPath $target) { if (Test-Path -LiteralPath $target) {
# Try comparing contents by hash. If identical, either skip or overwrite based on $using:SameFileAction. # Try comparing contents by hash. If identical, either skip or overwrite based on $using:SameFileAction.
try { try {
$srcHash = Get-FileHash -Algorithm SHA256 -Path $_.FullName $srcHash = Get-FileHash -Algorithm SHA256 -Path $_.FullName
$dstHash = Get-FileHash -Algorithm SHA256 -Path $target $dstHash = Get-FileHash -Algorithm SHA256 -Path $target
if ($srcHash.Hash -eq $dstHash.Hash) { if ($srcHash.Hash -eq $dstHash.Hash) {
switch ($sameAction.ToLower()) { switch ($sameAction.ToLower()) {
'skip' { 'skip' {
Write-Verbose "Skipping move for identical file: $name" Write-Verbose "Skipping move for identical file: $name"
return return
} }
'overwrite' { 'overwrite' {
Move-Item -LiteralPath $_.FullName -Destination $target -Force Move-Item -LiteralPath $_.FullName -Destination $target -Force
return return
} }
default { default {
Write-Verbose "Unknown SameFileAction '$sameAction', skipping $name" Write-Verbose "Unknown SameFileAction '$sameAction', skipping $name"
return return
}
} }
} }
} catch {
# If hashing failed for any reason, fall back to the existing collision-avoidance behavior.
Write-Verbose "Hash comparison failed for $($name): $($_)"
}
# If we reach here, target exists and contents differ: find a non-colliding name (foo (1).json, etc.)
$i = 1
while (Test-Path -LiteralPath $target) {
$base = [IO.Path]::GetFileNameWithoutExtension($name)
$ext = [IO.Path]::GetExtension($name)
$target = Join-Path $dest ("{0} ({1}){2}" -f $base,$i,$ext)
$i++
} }
} }
catch {
# If hashing failed for any reason, fall back to the existing collision-avoidance behavior.
Write-Verbose "Hash comparison failed for $($name): $($_)"
}
Move-Item -LiteralPath $_.FullName -Destination $target -Force # If we reach here, target exists and contents differ: find a non-colliding name (foo (1).json, etc.)
} -ThrottleLimit $deg $i = 1
while (Test-Path -LiteralPath $target) {
$base = [IO.Path]::GetFileNameWithoutExtension($name)
$ext = [IO.Path]::GetExtension($name)
$target = Join-Path $dest ("{0} ({1}){2}" -f $base, $i, $ext)
$i++
}
}
Move-Item -LiteralPath $_.FullName -Destination $target -Force
} -ThrottleLimit $deg

View file

@ -5,12 +5,14 @@ description = "Get notified when a new drop is available on Twitch."
readme = "README.md" readme = "README.md"
requires-python = ">=3.13" requires-python = ">=3.13"
dependencies = [ dependencies = [
"dateparser>=1.2.2",
"django-browser-reload>=1.18.0", "django-browser-reload>=1.18.0",
"django-debug-toolbar>=5.2.0", "django-debug-toolbar>=5.2.0",
"django-stubs[compatible-mypy]>=5.2.2", "django-stubs[compatible-mypy]>=5.2.2",
"django-watchfiles>=1.1.0", "django-watchfiles>=1.1.0",
"django>=5.2.4", "django>=5.2.4",
"djlint>=1.36.4", "djlint>=1.36.4",
"json-repair>=0.50.0",
"orjson>=3.11.1", "orjson>=3.11.1",
"platformdirs>=4.3.8", "platformdirs>=4.3.8",
"python-dotenv>=1.1.1", "python-dotenv>=1.1.1",

View file

@ -7,10 +7,13 @@
<h1> <h1>
<a href="{% url 'twitch:game_detail' campaign.game.id %}">{{ campaign.game.display_name }}</a> - {{ campaign.clean_name }} <a href="{% url 'twitch:game_detail' campaign.game.id %}">{{ campaign.game.display_name }}</a> - {{ campaign.clean_name }}
</h1> </h1>
<p> {% if campaign.owner %}
{# TODO: Link to organization #} <p>
<a href="{% url 'twitch:organization_detail' campaign.owner.id %}">{{ campaign.owner.name }}</a> <a href="{% url 'twitch:organization_detail' campaign.owner.id %}">{{ campaign.owner.name }}</a>
</p> </p>
{% else %}
<p>Organization Unknown</p>
{% endif %}
{% if campaign.image_url %} {% if campaign.image_url %}
<img height="70" <img height="70"
width="70" width="70"

View file

@ -73,15 +73,19 @@
{% endif %} {% endif %}
</div> </div>
<div style="flex: 1;"> <div style="flex: 1;">
{% comment %} Find this header section in your template {% endcomment %}
<header style="margin-bottom: 1rem;"> <header style="margin-bottom: 1rem;">
<h2 style="margin: 0 0 0.5rem 0;"> <h2 style="margin: 0 0 0.5rem 0;">
<a href="{% url 'twitch:game_detail' game_group.grouper.id %}" <a href="{% url 'twitch:game_detail' game_group.grouper.id %}"
style="text-decoration: none">{{ game_group.grouper.display_name|default:game_group.grouper.name|default:game_group.grouper.slug|default:game_group.grouper.id }}</a> style="text-decoration: none">{{ game_group.grouper.display_name|default:game_group.grouper.name|default:game_group.grouper.slug|default:game_group.grouper.id }}</a>
</h2> </h2>
<p style="margin: 0;"> {% comment %} MODIFICATION: Check if the owner exists before creating the link {% endcomment %}
<a href="{% url 'twitch:organization_detail' game_group.list.0.owner.id %}" {% if game_group.grouper.owner %}
style="text-decoration: none">{{ game_group.list.0.owner.name }}</a> <p style="margin: 0;">
</p> <a href="{% url 'twitch:organization_detail' game_group.grouper.owner.id %}"
style="text-decoration: none">{{ game_group.grouper.owner.name }}</a>
</p>
{% endif %}
</header> </header>
<div style="overflow-x: auto;"> <div style="overflow-x: auto;">
<div style="display: flex; gap: 1rem; min-width: max-content;"> <div style="display: flex; gap: 1rem; min-width: max-content;">

View file

@ -69,7 +69,7 @@ Hover over the end time to see the exact date and time.
Started {{ campaign.start_at|timesince }} ago Started {{ campaign.start_at|timesince }} ago
</time> </time>
<time datetime="{{ campaign.created_at|date:'c' }}" <time datetime="{{ campaign.created_at|date:'c' }}"
title="Scraped at {{ campaign.created_at|date:'DATETIME_FORMAT' }}" title="{{ campaign.created_at|date:'DATETIME_FORMAT' }}"
style="font-size: 0.9rem; style="font-size: 0.9rem;
color: #666; color: #666;
display: block; display: block;
@ -77,7 +77,7 @@ Hover over the end time to see the exact date and time.
Scraped {{ campaign.created_at|timesince }} ago Scraped {{ campaign.created_at|timesince }} ago
</time> </time>
<time datetime="{{ campaign.start_at|date:'c' }} to {{ campaign.end_at|date:'c' }}" <time datetime="{{ campaign.start_at|date:'c' }} to {{ campaign.end_at|date:'c' }}"
title="Duration: {{ campaign.start_at|date:'DATETIME_FORMAT' }} to {{ campaign.end_at|date:'DATETIME_FORMAT' }}" title="{{ campaign.start_at|date:'DATETIME_FORMAT' }} to {{ campaign.end_at|date:'DATETIME_FORMAT' }}"
style="font-size: 0.9rem; style="font-size: 0.9rem;
color: #666; color: #666;
display: block; display: block;

View file

@ -32,8 +32,8 @@ class TimeBasedDropInline(admin.TabularInline):
class DropCampaignAdmin(admin.ModelAdmin): class DropCampaignAdmin(admin.ModelAdmin):
"""Admin configuration for DropCampaign model.""" """Admin configuration for DropCampaign model."""
list_display = ("id", "name", "game", "owner", "start_at", "end_at", "is_active") list_display = ("id", "name", "game", "start_at", "end_at", "is_active")
list_filter = ("game", "owner") list_filter = ("game",)
search_fields = ("id", "name", "description") search_fields = ("id", "name", "description")
inlines = [TimeBasedDropInline] inlines = [TimeBasedDropInline]
readonly_fields = ("created_at", "updated_at") readonly_fields = ("created_at", "updated_at")
@ -71,11 +71,9 @@ class DropBenefitAdmin(admin.ModelAdmin):
list_display = ( list_display = (
"id", "id",
"name", "name",
"game",
"owner_organization",
"distribution_type", "distribution_type",
"entitlement_limit", "entitlement_limit",
"created_at", "created_at",
) )
list_filter = ("game", "owner_organization", "distribution_type") list_filter = ("distribution_type",)
search_fields = ("id", "name") search_fields = ("id", "name")

View file

@ -1,18 +1,16 @@
from __future__ import annotations from __future__ import annotations
import json
import logging import logging
import re
import shutil import shutil
import traceback import traceback
from pathlib import Path from pathlib import Path
from typing import TYPE_CHECKING, Any from typing import TYPE_CHECKING, Any
import orjson import dateparser
import json_repair
from django.core.management.base import BaseCommand, CommandError, CommandParser from django.core.management.base import BaseCommand, CommandError, CommandParser
from django.db import transaction from django.db import transaction
from django.utils import timezone from django.utils import timezone
from django.utils.dateparse import parse_datetime
from twitch.models import DropBenefit, DropBenefitEdge, DropCampaign, Game, Organization, TimeBasedDrop from twitch.models import DropBenefit, DropBenefitEdge, DropCampaign, Game, Organization, TimeBasedDrop
@ -23,6 +21,30 @@ if TYPE_CHECKING:
logger: logging.Logger = logging.getLogger(__name__) logger: logging.Logger = logging.getLogger(__name__)
def parse_date(value: str | None) -> datetime | None:
"""Parse a datetime string into a timezone-aware datetime using dateparser.
Args:
value: The datetime string to parse.
Returns:
A timezone-aware datetime object or None if parsing fails.
"""
value = (value or "").strip()
if not value or value == "None":
return None
dt: datetime | None = dateparser.parse(value, settings={"RETURN_AS_TIMEZONE_AWARE": True})
if not dt:
return None
# Ensure aware in Django's current timezone
if timezone.is_naive(dt):
dt = timezone.make_aware(dt, timezone.get_current_timezone())
return dt
class Command(BaseCommand): class Command(BaseCommand):
"""Import Twitch drop campaign data from a JSON file or directory of JSON files.""" """Import Twitch drop campaign data from a JSON file or directory of JSON files."""
@ -96,19 +118,6 @@ class Command(BaseCommand):
self._process_file(json_file, processed_path) self._process_file(json_file, processed_path)
except CommandError as e: except CommandError as e:
self.stdout.write(self.style.ERROR(f"Error processing {json_file}: {e}")) self.stdout.write(self.style.ERROR(f"Error processing {json_file}: {e}"))
except (orjson.JSONDecodeError, json.JSONDecodeError):
# Attempt to clean trailing broken JSON and retry parsing
try:
self.clean_file(json_file)
self.stdout.write(self.style.SUCCESS(f"Cleaned JSON in '{json_file.name}', retrying import."))
# re-process the cleaned file
self._process_file(json_file, processed_path)
except (orjson.JSONDecodeError, json.JSONDecodeError):
# Still invalid after cleanup, move to broken_json
broken_json_dir: Path = processed_path / "broken_json"
broken_json_dir.mkdir(parents=True, exist_ok=True)
self.stdout.write(self.style.WARNING(f"Invalid JSON in '{json_file}', even after cleanup. Moving to '{broken_json_dir}'."))
self.move_file(json_file, broken_json_dir / json_file.name)
except (ValueError, TypeError, AttributeError, KeyError, IndexError): except (ValueError, TypeError, AttributeError, KeyError, IndexError):
self.stdout.write(self.style.ERROR(f"Data error processing {json_file}")) self.stdout.write(self.style.ERROR(f"Data error processing {json_file}"))
self.stdout.write(self.style.ERROR(traceback.format_exc())) self.stdout.write(self.style.ERROR(traceback.format_exc()))
@ -119,6 +128,9 @@ class Command(BaseCommand):
def _process_file(self, file_path: Path, processed_path: Path) -> None: def _process_file(self, file_path: Path, processed_path: Path) -> None:
"""Process a single JSON file. """Process a single JSON file.
Raises:
CommandError: If the file isn't a JSON file or has an invalid JSON structure.
Args: Args:
file_path: Path to the JSON file. file_path: Path to the JSON file.
processed_path: Subdirectory to move processed files to. processed_path: Subdirectory to move processed files to.
@ -126,7 +138,7 @@ class Command(BaseCommand):
raw_bytes: bytes = file_path.read_bytes() raw_bytes: bytes = file_path.read_bytes()
raw_text: str = raw_bytes.decode("utf-8") raw_text: str = raw_bytes.decode("utf-8")
data = orjson.loads(raw_bytes) data = json_repair.loads(raw_text)
broken_dir: Path = processed_path / "broken" broken_dir: Path = processed_path / "broken"
broken_dir.mkdir(parents=True, exist_ok=True) broken_dir.mkdir(parents=True, exist_ok=True)
@ -222,8 +234,11 @@ class Command(BaseCommand):
if isinstance(data, list): if isinstance(data, list):
for _item in data: for _item in data:
self.import_drop_campaign(_item, file_path=file_path) self.import_drop_campaign(_item, file_path=file_path)
else: elif isinstance(data, dict):
self.import_drop_campaign(data, file_path=file_path) self.import_drop_campaign(data, file_path=file_path)
else:
msg: str = f"Invalid JSON structure in {file_path}: Expected dict or list at top level"
raise CommandError(msg)
self.move_file(file_path, processed_path) self.move_file(file_path, processed_path)
@ -341,71 +356,52 @@ class Command(BaseCommand):
""" """
with transaction.atomic(): with transaction.atomic():
game: Game = self.game_update_or_create(campaign_data=campaign_data) game: Game = self.game_update_or_create(campaign_data=campaign_data)
organization: Organization | None = self.owner_update_or_create(campaign_data=campaign_data)
organization: Organization | None = self.owner_update_or_create(campaign_data=campaign_data, file_path=file_path) if organization:
if organization is None: game.owner = organization
self.stdout.write(self.style.WARNING("No organization found for this campaign, skipping drop campaign import.")) game.save(update_fields=["owner"])
return
drop_campaign: DropCampaign = self.drop_campaign_update_or_get( drop_campaign: DropCampaign = self.drop_campaign_update_or_get(campaign_data=campaign_data, game=game)
campaign_data=campaign_data,
game=game,
organization=organization,
)
for drop_data in campaign_data.get("timeBasedDrops", []): for drop_data in campaign_data.get("timeBasedDrops", []):
time_based_drop: TimeBasedDrop = self.create_time_based_drop(drop_campaign=drop_campaign, drop_data=drop_data) self._process_time_based_drop(drop_data, drop_campaign, file_path)
benefit_edges: list[dict[str, Any]] = drop_data.get("benefitEdges", [])
if not benefit_edges:
self.stdout.write(self.style.WARNING(f"No benefit edges found for drop {time_based_drop.name} (ID: {time_based_drop.id})"))
self.move_file(file_path, Path("no_benefit_edges") / file_path.name)
continue
for benefit_edge in benefit_edges:
benefit_defaults: dict[str, Any] = {}
benefit_data: dict[str, Any] = benefit_edge["benefit"]
benefit_name: str = str(benefit_data.get("name")).strip()
if benefit_name and benefit_name != "None":
benefit_defaults["name"] = benefit_name
img_asset: str = str(benefit_data.get("imageAssetURL")).strip()
if img_asset and img_asset != "None":
benefit_defaults["image_asset_url"] = img_asset
created_at: str = str(benefit_data.get("createdAt")).strip()
if created_at and created_at != "None":
benefit_defaults["created_at"] = created_at
ent_limit: int | None = benefit_data.get("entitlementLimit")
if ent_limit is not None:
benefit_defaults["entitlement_limit"] = ent_limit
ios_avail: bool | None = benefit_data.get("isIosAvailable")
if ios_avail is not None:
benefit_defaults["is_ios_available"] = ios_avail
dist_type: str | None = benefit_data.get("distributionType")
if dist_type is not None:
benefit_defaults["distribution_type"] = dist_type
benefit_defaults["game"] = game
benefit_defaults["owner_organization"] = organization
benefit, _ = DropBenefit.objects.update_or_create(
id=benefit_data["id"],
defaults=benefit_defaults,
)
DropBenefitEdge.objects.update_or_create(
drop=time_based_drop,
benefit=benefit,
defaults={
"entitlement_limit": benefit_edge.get("entitlementLimit", 1),
},
)
self.stdout.write(self.style.SUCCESS(f"Successfully imported drop campaign {drop_campaign.name} (ID: {drop_campaign.id})")) self.stdout.write(self.style.SUCCESS(f"Successfully imported drop campaign {drop_campaign.name} (ID: {drop_campaign.id})"))
def _process_time_based_drop(self, drop_data: dict[str, Any], drop_campaign: DropCampaign, file_path: Path) -> None:
time_based_drop: TimeBasedDrop = self.create_time_based_drop(drop_campaign=drop_campaign, drop_data=drop_data)
benefit_edges: list[dict[str, Any]] = drop_data.get("benefitEdges", [])
if not benefit_edges:
self.stdout.write(self.style.WARNING(f"No benefit edges found for drop {time_based_drop.name} (ID: {time_based_drop.id})"))
self.move_file(file_path, Path("no_benefit_edges") / file_path.name)
return
for benefit_edge in benefit_edges:
benefit_data: dict[str, Any] = benefit_edge["benefit"]
benefit_defaults = {
"name": benefit_data.get("name"),
"image_asset_url": benefit_data.get("imageAssetURL"),
"created_at": parse_date(benefit_data.get("createdAt")),
"entitlement_limit": benefit_data.get("entitlementLimit"),
"is_ios_available": benefit_data.get("isIosAvailable"),
"distribution_type": benefit_data.get("distributionType"),
}
# Filter out None values to avoid overwriting with them
benefit_defaults = {k: v for k, v in benefit_defaults.items() if v is not None}
benefit, _ = DropBenefit.objects.update_or_create(
id=benefit_data["id"],
defaults=benefit_defaults,
)
DropBenefitEdge.objects.update_or_create(
drop=time_based_drop,
benefit=benefit,
defaults={"entitlement_limit": benefit_edge.get("entitlementLimit", 1)},
)
def create_time_based_drop(self, drop_campaign: DropCampaign, drop_data: dict[str, Any]) -> TimeBasedDrop: def create_time_based_drop(self, drop_campaign: DropCampaign, drop_data: dict[str, Any]) -> TimeBasedDrop:
"""Creates or updates a TimeBasedDrop instance based on the provided drop data. """Creates or updates a TimeBasedDrop instance based on the provided drop data.
@ -423,49 +419,18 @@ class Command(BaseCommand):
TimeBasedDrop: The created or updated TimeBasedDrop instance. TimeBasedDrop: The created or updated TimeBasedDrop instance.
""" """
defaults: dict[str, Any] = {} time_based_drop_defaults: dict[str, Any] = {
"campaign": drop_campaign,
"name": drop_data.get("name"),
"required_minutes_watched": drop_data.get("requiredMinutesWatched"),
"required_subs": drop_data.get("requiredSubs"),
"start_at": parse_date(drop_data.get("startAt")),
"end_at": parse_date(drop_data.get("endAt")),
}
# Filter out None values to avoid overwriting with them
time_based_drop_defaults = {k: v for k, v in time_based_drop_defaults.items() if v is not None}
name: str = drop_data.get("name", "") time_based_drop, created = TimeBasedDrop.objects.update_or_create(id=drop_data["id"], defaults=time_based_drop_defaults)
if name:
defaults["name"] = name.strip()
# "requiredMinutesWatched": 240
required_minutes_watched: int = drop_data.get("requiredMinutesWatched", 0)
if required_minutes_watched:
defaults["required_minutes_watched"] = int(required_minutes_watched)
# "requiredSubs": 1,
required_subs: int = drop_data.get("requiredSubs", 0)
if required_subs:
defaults["required_subs"] = int(required_subs)
# "startAt": "2025-08-08T07:00:00Z",
# Model field is DateTimeField
start_at: str | None = drop_data.get("startAt")
if start_at:
# Convert to timezone-aware datetime
parsed_start_at: datetime | None = parse_datetime(start_at)
if parsed_start_at and timezone.is_naive(parsed_start_at):
parsed_start_at = timezone.make_aware(parsed_start_at)
if parsed_start_at:
defaults["start_at"] = parsed_start_at
# "endAt": "2025-02-04T10:59:59.999Z",
# Model field is DateTimeField
end_at: str | None = drop_data.get("endAt")
if end_at:
# Convert to timezone-aware datetime
parsed_end_at: datetime | None = parse_datetime(end_at)
if parsed_end_at and timezone.is_naive(parsed_end_at):
parsed_end_at = timezone.make_aware(parsed_end_at)
if parsed_end_at:
defaults["end_at"] = parsed_end_at
defaults["campaign"] = drop_campaign
time_based_drop, created = TimeBasedDrop.objects.update_or_create(id=drop_data["id"], defaults=defaults)
if created: if created:
self.stdout.write(self.style.SUCCESS(f"Successfully imported time-based drop {time_based_drop.name} (ID: {time_based_drop.id})")) self.stdout.write(self.style.SUCCESS(f"Successfully imported time-based drop {time_based_drop.name} (ID: {time_based_drop.id})"))
@ -475,7 +440,6 @@ class Command(BaseCommand):
self, self,
campaign_data: dict[str, Any], campaign_data: dict[str, Any],
game: Game, game: Game,
organization: Organization | None,
) -> DropCampaign: ) -> DropCampaign:
"""Update or create a drop campaign. """Update or create a drop campaign.
@ -487,51 +451,33 @@ class Command(BaseCommand):
Returns: Returns:
Returns the DropCampaign object. Returns the DropCampaign object.
""" """
defaults: dict[str, Any] = {} drop_campaign_defaults: dict[str, Any] = {
name = campaign_data.get("name") "game": game,
if name is not None: "name": campaign_data.get("name"),
defaults["name"] = name "description": campaign_data.get("description"),
desc = campaign_data.get("description") "details_url": campaign_data.get("detailsURL"),
if desc is not None: "account_link_url": campaign_data.get("accountLinkURL"),
defaults["description"] = desc.replace("\\n", "\n") "image_url": campaign_data.get("imageURL"),
details = campaign_data.get("detailsURL") "start_at": parse_date(campaign_data.get("startAt") or campaign_data.get("startsAt")),
if details is not None: "end_at": parse_date(campaign_data.get("endAt") or campaign_data.get("endsAt")),
defaults["details_url"] = details "is_account_connected": campaign_data.get("self", {}).get("isAccountConnected"),
acct_link = campaign_data.get("accountLinkURL") }
if acct_link is not None: # Filter out None values to avoid overwriting with them
defaults["account_link_url"] = acct_link drop_campaign_defaults = {k: v for k, v in drop_campaign_defaults.items() if v is not None}
img = campaign_data.get("imageURL")
if img is not None:
defaults["image_url"] = img
start = campaign_data.get("startAt")
if start is not None:
defaults["start_at"] = start
end = campaign_data.get("endAt")
if end is not None:
defaults["end_at"] = end
is_conn = campaign_data.get("self", {}).get("isAccountConnected")
if is_conn is not None:
defaults["is_account_connected"] = is_conn
defaults["game"] = game
if organization:
defaults["owner"] = organization
drop_campaign, created = DropCampaign.objects.update_or_create( drop_campaign, created = DropCampaign.objects.update_or_create(
id=campaign_data["id"], id=campaign_data["id"],
defaults=defaults, defaults=drop_campaign_defaults,
) )
if created: if created:
self.stdout.write(self.style.SUCCESS(f"Created new drop campaign: {drop_campaign.name} (ID: {drop_campaign.id})")) self.stdout.write(self.style.SUCCESS(f"Created new drop campaign: {drop_campaign.name} (ID: {drop_campaign.id})"))
return drop_campaign return drop_campaign
def owner_update_or_create(self, campaign_data: dict[str, Any], file_path: Path) -> Organization | None: def owner_update_or_create(self, campaign_data: dict[str, Any]) -> Organization | None:
"""Update or create an organization. """Update or create an organization.
Args: Args:
campaign_data: The drop campaign data to import. campaign_data: The drop campaign data to import.
file_path: Optional path to the file being processed, used for error handling.
Returns: Returns:
Returns the Organization object. Returns the Organization object.
@ -540,37 +486,20 @@ class Command(BaseCommand):
if not org_data: if not org_data:
self.stdout.write(self.style.WARNING("No owner data found in campaign data. Attempting to find organization by game.")) self.stdout.write(self.style.WARNING("No owner data found in campaign data. Attempting to find organization by game."))
# Try to find an organization by the game if possible organization: Organization | None = None
game_id: str | None = campaign_data.get("game", {}).get("id") if org_data:
if game_id: org_defaults: dict[str, Any] = {"name": org_data.get("name")}
game: Game | None = Game.objects.filter(id=game_id).first() # Filter out None values to avoid overwriting with them
if game: org_defaults = {k: v for k, v in org_defaults.items() if v is not None}
if game.organizations.exists():
org: Organization | None = game.organizations.first()
if org:
self.stdout.write(self.style.SUCCESS(f"Found organization '{org.name}' for game '{game.display_name}'"))
return org
else:
self.stdout.write(self.style.WARNING(f"No game found with id '{game_id}' when looking up organization."))
# If not found, move the file for manual review organization, created = Organization.objects.update_or_create(
self.stdout.write(self.style.WARNING("No organization found for this campaign, moving file for review.")) id=org_data["id"],
defaults=org_defaults,
todo_dir: Path = Path("check_these_please")
todo_dir.mkdir(parents=True, exist_ok=True)
self.move_file(
file_path,
todo_dir / file_path.name,
) )
return None if created:
self.stdout.write(self.style.SUCCESS(f"Created new organization: {organization.name} (ID: {organization.id})"))
organization, created = Organization.objects.update_or_create( return organization
id=org_data["id"], return None
defaults={"name": org_data["name"]},
)
if created:
self.stdout.write(self.style.SUCCESS(f"Created new organization: {organization.name} (ID: {organization.id})"))
return organization
def game_update_or_create(self, campaign_data: dict[str, Any]) -> Game: def game_update_or_create(self, campaign_data: dict[str, Any]) -> Game:
"""Update or create a game. """Update or create a game.
@ -628,21 +557,3 @@ class Command(BaseCommand):
if changed_fields: if changed_fields:
obj.save(update_fields=changed_fields) obj.save(update_fields=changed_fields)
return obj, created return obj, created
def clean_file(self, path: Path) -> None:
"""Strip trailing broken JSON after the last 'extensions' block."""
text: str = path.read_text(encoding="utf-8")
# Handle extensions block at end of a JSON array
cleaned: str = re.sub(
r'(?s),?\s*"extensions"\s*:\s*\{.*?\}\s*\}\s*\]\s*$',
"}]",
text,
)
if cleaned == text:
# Fallback for standalone extensions block
cleaned = re.sub(
r'(?s),?\s*"extensions"\s*:\s*\{.*?\}\s*$',
"}",
text,
)
path.write_text(cleaned, encoding="utf-8")

View file

@ -0,0 +1,272 @@
# Generated by Django 5.2.5 on 2025-09-01 17:01
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('twitch', '0005_alter_timebaseddrop_end_at_and_more'),
]
operations = [
migrations.AlterModelOptions(
name='dropbenefit',
options={'ordering': ['-created_at']},
),
migrations.AlterModelOptions(
name='dropcampaign',
options={'ordering': ['-start_at']},
),
migrations.AlterModelOptions(
name='game',
options={'ordering': ['display_name']},
),
migrations.AlterModelOptions(
name='organization',
options={'ordering': ['name']},
),
migrations.AlterModelOptions(
name='timebaseddrop',
options={'ordering': ['start_at']},
),
migrations.RemoveIndex(
model_name='dropbenefit',
name='twitch_drop_game_id_a9209e_idx',
),
migrations.RemoveIndex(
model_name='dropbenefit',
name='twitch_drop_owner_o_45b4cc_idx',
),
migrations.RemoveIndex(
model_name='dropcampaign',
name='twitch_drop_game_id_868e70_idx',
),
migrations.RemoveIndex(
model_name='dropcampaign',
name='twitch_drop_owner_i_37241d_idx',
),
migrations.RemoveIndex(
model_name='game',
name='twitch_game_box_art_498a89_idx',
),
migrations.RemoveIndex(
model_name='timebaseddrop',
name='twitch_time_campaig_bbe349_idx',
),
migrations.AlterUniqueTogether(
name='dropbenefitedge',
unique_together=set(),
),
migrations.RemoveField(
model_name='dropbenefit',
name='game',
),
migrations.RemoveField(
model_name='dropbenefit',
name='owner_organization',
),
migrations.RemoveField(
model_name='dropcampaign',
name='owner',
),
migrations.AddField(
model_name='game',
name='owner',
field=models.ForeignKey(blank=True, help_text='The organization that owns this game.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='games', to='twitch.organization', verbose_name='Organization'),
),
migrations.AlterField(
model_name='dropbenefit',
name='created_at',
field=models.DateTimeField(db_index=True, help_text='Timestamp when the benefit was created. This is from Twitch API and not auto-generated.', null=True),
),
migrations.AlterField(
model_name='dropbenefit',
name='distribution_type',
field=models.CharField(blank=True, db_index=True, default='', help_text='Type of distribution for this benefit.', max_length=50),
),
migrations.AlterField(
model_name='dropbenefit',
name='entitlement_limit',
field=models.PositiveIntegerField(default=1, help_text='Maximum number of times this benefit can be earned.'),
),
migrations.AlterField(
model_name='dropbenefit',
name='id',
field=models.CharField(help_text='Unique Twitch identifier for the benefit.', max_length=64, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='dropbenefit',
name='image_asset_url',
field=models.URLField(blank=True, default='', help_text="URL to the benefit's image asset.", max_length=500),
),
migrations.AlterField(
model_name='dropbenefit',
name='is_ios_available',
field=models.BooleanField(default=False, help_text='Whether the benefit is available on iOS.'),
),
migrations.AlterField(
model_name='dropbenefit',
name='name',
field=models.CharField(blank=True, db_index=True, default='N/A', help_text='Name of the drop benefit.', max_length=255),
),
migrations.AlterField(
model_name='dropbenefitedge',
name='benefit',
field=models.ForeignKey(help_text='The benefit in this relationship.', on_delete=django.db.models.deletion.CASCADE, to='twitch.dropbenefit'),
),
migrations.AlterField(
model_name='dropbenefitedge',
name='drop',
field=models.ForeignKey(help_text='The time-based drop in this relationship.', on_delete=django.db.models.deletion.CASCADE, to='twitch.timebaseddrop'),
),
migrations.AlterField(
model_name='dropbenefitedge',
name='entitlement_limit',
field=models.PositiveIntegerField(default=1, help_text='Max times this benefit can be claimed for this drop.'),
),
migrations.AlterField(
model_name='dropcampaign',
name='account_link_url',
field=models.URLField(blank=True, default='', help_text='URL to link a Twitch account for the campaign.', max_length=500),
),
migrations.AlterField(
model_name='dropcampaign',
name='created_at',
field=models.DateTimeField(auto_now_add=True, db_index=True, help_text='Timestamp when this campaign record was created.'),
),
migrations.AlterField(
model_name='dropcampaign',
name='description',
field=models.TextField(blank=True, help_text='Detailed description of the campaign.'),
),
migrations.AlterField(
model_name='dropcampaign',
name='details_url',
field=models.URLField(blank=True, default='', help_text='URL with campaign details.', max_length=500),
),
migrations.AlterField(
model_name='dropcampaign',
name='end_at',
field=models.DateTimeField(blank=True, db_index=True, help_text='Datetime when the campaign ends.', null=True),
),
migrations.AlterField(
model_name='dropcampaign',
name='game',
field=models.ForeignKey(help_text='Game associated with this campaign.', on_delete=django.db.models.deletion.CASCADE, related_name='drop_campaigns', to='twitch.game', verbose_name='Game'),
),
migrations.AlterField(
model_name='dropcampaign',
name='id',
field=models.CharField(help_text='Unique Twitch identifier for the campaign.', max_length=255, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='dropcampaign',
name='image_url',
field=models.URLField(blank=True, default='', help_text='URL to an image representing the campaign.', max_length=500),
),
migrations.AlterField(
model_name='dropcampaign',
name='is_account_connected',
field=models.BooleanField(default=False, help_text='Indicates if the user account is linked.'),
),
migrations.AlterField(
model_name='dropcampaign',
name='name',
field=models.CharField(db_index=True, help_text='Name of the drop campaign.', max_length=255),
),
migrations.AlterField(
model_name='dropcampaign',
name='start_at',
field=models.DateTimeField(blank=True, db_index=True, help_text='Datetime when the campaign starts.', null=True),
),
migrations.AlterField(
model_name='dropcampaign',
name='updated_at',
field=models.DateTimeField(auto_now=True, help_text='Timestamp when this campaign record was last updated.'),
),
migrations.AlterField(
model_name='game',
name='box_art',
field=models.URLField(blank=True, default='', max_length=500, verbose_name='Box art URL'),
),
migrations.AlterField(
model_name='game',
name='display_name',
field=models.CharField(blank=True, db_index=True, default='', max_length=255, verbose_name='Display name'),
),
migrations.AlterField(
model_name='game',
name='id',
field=models.CharField(max_length=64, primary_key=True, serialize=False, verbose_name='Game ID'),
),
migrations.AlterField(
model_name='game',
name='name',
field=models.CharField(blank=True, db_index=True, default='', max_length=255, verbose_name='Name'),
),
migrations.AlterField(
model_name='game',
name='slug',
field=models.CharField(blank=True, db_index=True, default='', help_text='Short unique identifier for the game.', max_length=200, verbose_name='Slug'),
),
migrations.AlterField(
model_name='organization',
name='id',
field=models.CharField(help_text='The unique Twitch identifier for the organization.', max_length=255, primary_key=True, serialize=False, verbose_name='Organization ID'),
),
migrations.AlterField(
model_name='organization',
name='name',
field=models.CharField(db_index=True, help_text='Display name of the organization.', max_length=255, unique=True, verbose_name='Name'),
),
migrations.AlterField(
model_name='timebaseddrop',
name='benefits',
field=models.ManyToManyField(help_text='Benefits unlocked by this drop.', related_name='drops', through='twitch.DropBenefitEdge', to='twitch.dropbenefit'),
),
migrations.AlterField(
model_name='timebaseddrop',
name='campaign',
field=models.ForeignKey(help_text='The campaign this drop belongs to.', on_delete=django.db.models.deletion.CASCADE, related_name='time_based_drops', to='twitch.dropcampaign'),
),
migrations.AlterField(
model_name='timebaseddrop',
name='end_at',
field=models.DateTimeField(blank=True, db_index=True, help_text='Datetime when this drop expires.', null=True),
),
migrations.AlterField(
model_name='timebaseddrop',
name='id',
field=models.CharField(help_text='Unique Twitch identifier for the time-based drop.', max_length=64, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='timebaseddrop',
name='name',
field=models.CharField(db_index=True, help_text='Name of the time-based drop.', max_length=255),
),
migrations.AlterField(
model_name='timebaseddrop',
name='required_minutes_watched',
field=models.PositiveIntegerField(blank=True, db_index=True, help_text='Minutes required to watch before earning this drop.', null=True),
),
migrations.AlterField(
model_name='timebaseddrop',
name='required_subs',
field=models.PositiveIntegerField(default=0, help_text='Number of subscriptions required to unlock this drop.'),
),
migrations.AlterField(
model_name='timebaseddrop',
name='start_at',
field=models.DateTimeField(blank=True, db_index=True, help_text='Datetime when this drop becomes available.', null=True),
),
migrations.AddConstraint(
model_name='dropbenefitedge',
constraint=models.UniqueConstraint(fields=('drop', 'benefit'), name='unique_drop_benefit'),
),
migrations.AddConstraint(
model_name='game',
constraint=models.UniqueConstraint(fields=('slug',), name='unique_game_slug'),
),
]

View file

@ -0,0 +1,17 @@
# Generated by Django 5.2.5 on 2025-09-01 17:06
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('twitch', '0006_alter_dropbenefit_options_alter_dropcampaign_options_and_more'),
]
operations = [
migrations.RemoveConstraint(
model_name='game',
name='unique_game_slug',
),
]

View file

@ -3,6 +3,7 @@ from __future__ import annotations
import logging import logging
import re import re
from typing import TYPE_CHECKING, ClassVar from typing import TYPE_CHECKING, ClassVar
from urllib.parse import urlsplit, urlunsplit
from django.db import models from django.db import models
from django.utils import timezone from django.utils import timezone
@ -15,21 +16,83 @@ if TYPE_CHECKING:
logger: logging.Logger = logging.getLogger("ttvdrops") logger: logging.Logger = logging.getLogger("ttvdrops")
class Organization(models.Model):
"""Represents an organization on Twitch that can own drop campaigns."""
id = models.CharField(
max_length=255,
primary_key=True,
verbose_name="Organization ID",
help_text="The unique Twitch identifier for the organization.",
)
name = models.CharField(
max_length=255,
db_index=True,
unique=True,
verbose_name="Name",
help_text="Display name of the organization.",
)
class Meta:
ordering = ["name"]
indexes: ClassVar[list] = [
models.Index(fields=["name"]),
]
def __str__(self) -> str:
"""Return a string representation of the organization."""
return self.name or self.id
class Game(models.Model): class Game(models.Model):
"""Represents a game on Twitch.""" """Represents a game on Twitch."""
id = models.TextField(primary_key=True) id = models.CharField(max_length=64, primary_key=True, verbose_name="Game ID")
slug = models.TextField(blank=True, default="", db_index=True) slug = models.CharField(
name = models.TextField(blank=True, default="", db_index=True) max_length=200,
display_name = models.TextField(blank=True, default="", db_index=True) blank=True,
box_art = models.URLField(max_length=500, blank=True, default="") default="",
db_index=True,
verbose_name="Slug",
help_text="Short unique identifier for the game.",
)
name = models.CharField(
max_length=255,
blank=True,
default="",
db_index=True,
verbose_name="Name",
)
display_name = models.CharField(
max_length=255,
blank=True,
default="",
db_index=True,
verbose_name="Display name",
)
box_art = models.URLField(
max_length=500,
blank=True,
default="",
verbose_name="Box art URL",
)
owner = models.ForeignKey(
Organization,
on_delete=models.SET_NULL,
related_name="games",
null=True,
blank=True,
verbose_name="Organization",
help_text="The organization that owns this game.",
)
class Meta: class Meta:
ordering = ["display_name"]
indexes: ClassVar[list] = [ indexes: ClassVar[list] = [
models.Index(fields=["slug"]), models.Index(fields=["slug"]),
models.Index(fields=["display_name"]), models.Index(fields=["display_name"]),
models.Index(fields=["name"]), models.Index(fields=["name"]),
models.Index(fields=["box_art"]),
] ]
def __str__(self) -> str: def __str__(self) -> str:
@ -41,78 +104,106 @@ class Game(models.Model):
self.name, self.name,
) )
return f"{self.display_name} ({self.name})" return f"{self.display_name} ({self.name})"
return self.name or self.slug or self.id return self.display_name or self.name or self.slug or self.id
@property @property
def organizations(self) -> models.QuerySet[Organization]: def organizations(self) -> models.QuerySet[Organization]:
"""Return all organizations that have drop campaigns for this game.""" """Return all organizations that own games with campaigns for this game."""
return Organization.objects.filter(drop_campaigns__game=self).distinct() return Organization.objects.filter(games__drop_campaigns__game=self).distinct()
@property @property
def box_art_base_url(self) -> str: def box_art_base_url(self) -> str:
"""Return the base box art URL without size suffix. """Return the base box art URL without Twitch size suffixes."""
Twitch box art URLs often include size suffixes like '-120x160.jpg'.
This property returns the base URL without the size suffix.
Examples:
'https://static-cdn.jtvnw.net/ttv-boxart/512710-120x160.jpg'
-> 'https://static-cdn.jtvnw.net/ttv-boxart/512710.jpg'
"""
if not self.box_art: if not self.box_art:
return "" return ""
parts = urlsplit(self.box_art)
# Remove size suffix pattern like '-120x160' from the filename path = re.sub(
return re.sub(r"-\d+x\d+(\.jpg|\.png|\.jpeg|\.gif|\.webp)$", r"\1", self.box_art) r"(-\d+x\d+)(\.(?:jpg|jpeg|png|gif|webp))$",
r"\2",
parts.path,
class Organization(models.Model): flags=re.IGNORECASE,
"""Represents an organization on Twitch that can own drop campaigns.""" )
return urlunsplit((parts.scheme, parts.netloc, path, "", ""))
id = models.TextField(primary_key=True)
name = models.TextField(db_index=True)
class Meta:
indexes: ClassVar[list] = [
models.Index(fields=["name"]),
]
def __str__(self) -> str:
"""Return a string representation of the organization."""
return self.name
class DropCampaign(models.Model): class DropCampaign(models.Model):
"""Represents a Twitch drop campaign.""" """Represents a Twitch drop campaign."""
id = models.TextField(primary_key=True) id = models.CharField(
name = models.TextField(db_index=True) max_length=255,
description = models.TextField(blank=True) primary_key=True,
details_url = models.URLField(max_length=500, blank=True, default="") help_text="Unique Twitch identifier for the campaign.",
account_link_url = models.URLField(max_length=500, blank=True, default="") )
image_url = models.URLField(max_length=500, blank=True, default="") name = models.CharField(
start_at = models.DateTimeField(db_index=True, null=True) max_length=255,
end_at = models.DateTimeField(db_index=True, null=True) db_index=True,
is_account_connected = models.BooleanField(default=False) help_text="Name of the drop campaign.",
)
description = models.TextField(
blank=True,
help_text="Detailed description of the campaign.",
)
details_url = models.URLField(
max_length=500,
blank=True,
default="",
help_text="URL with campaign details.",
)
account_link_url = models.URLField(
max_length=500,
blank=True,
default="",
help_text="URL to link a Twitch account for the campaign.",
)
image_url = models.URLField(
max_length=500,
blank=True,
default="",
help_text="URL to an image representing the campaign.",
)
start_at = models.DateTimeField(
db_index=True,
null=True,
blank=True,
help_text="Datetime when the campaign starts.",
)
end_at = models.DateTimeField(
db_index=True,
null=True,
blank=True,
help_text="Datetime when the campaign ends.",
)
is_account_connected = models.BooleanField(
default=False,
help_text="Indicates if the user account is linked.",
)
# Foreign keys game = models.ForeignKey(
game = models.ForeignKey(Game, on_delete=models.CASCADE, related_name="drop_campaigns", db_index=True) Game,
owner = models.ForeignKey(Organization, on_delete=models.CASCADE, related_name="drop_campaigns", db_index=True) on_delete=models.CASCADE,
related_name="drop_campaigns",
verbose_name="Game",
help_text="Game associated with this campaign.",
)
# Tracking fields created_at = models.DateTimeField(
created_at = models.DateTimeField(auto_now_add=True) auto_now_add=True,
updated_at = models.DateTimeField(auto_now=True) db_index=True,
help_text="Timestamp when this campaign record was created.",
)
updated_at = models.DateTimeField(
auto_now=True,
help_text="Timestamp when this campaign record was last updated.",
)
class Meta: class Meta:
ordering = ["-start_at"]
indexes: ClassVar[list] = [ indexes: ClassVar[list] = [
models.Index(fields=["name"]), models.Index(fields=["name"]),
models.Index(fields=["start_at", "end_at"]), models.Index(fields=["start_at", "end_at"]),
models.Index(fields=["game"]),
models.Index(fields=["owner"]),
] ]
def __str__(self) -> str: def __str__(self) -> str:
"""Return a string representation of the drop campaign."""
return self.name return self.name
@property @property
@ -135,29 +226,20 @@ class DropCampaign(models.Model):
if not self.game or not self.game.display_name: if not self.game or not self.game.display_name:
return self.name return self.name
# Try different variations of the game name
game_variations = [self.game.display_name] game_variations = [self.game.display_name]
# Add & to "and" conversion
if "&" in self.game.display_name: if "&" in self.game.display_name:
game_variations.append(self.game.display_name.replace("&", "and")) game_variations.append(self.game.display_name.replace("&", "and"))
# Add "and" to & conversion
if "and" in self.game.display_name: if "and" in self.game.display_name:
game_variations.append(self.game.display_name.replace("and", "&")) game_variations.append(self.game.display_name.replace("and", "&"))
# Check each variation
for game_name in game_variations: for game_name in game_variations:
if not self.name.startswith(game_name): # Check for different separators after the game name
continue for separator in [" - ", " | ", " "]:
prefix_to_check = game_name + separator
# Check if it's followed by a separator like " - " name: str = self.name
if self.name[len(game_name) :].startswith(" - "): if name.startswith(prefix_to_check):
return self.name[len(game_name) + 3 :].strip() return name.removeprefix(prefix_to_check).strip()
# Or just remove the game name if it's followed by a space
if len(self.name) > len(game_name) and self.name[len(game_name)] == " ":
return self.name[len(game_name) + 1 :].strip()
return self.name return self.name
@ -165,25 +247,53 @@ class DropCampaign(models.Model):
class DropBenefit(models.Model): class DropBenefit(models.Model):
"""Represents a benefit that can be earned from a drop.""" """Represents a benefit that can be earned from a drop."""
id = models.TextField(primary_key=True) id = models.CharField(
name = models.TextField(db_index=True, blank=True, default="N/A") max_length=64,
image_asset_url = models.URLField(max_length=500, blank=True, default="") primary_key=True,
created_at = models.DateTimeField(db_index=True, null=True) help_text="Unique Twitch identifier for the benefit.",
entitlement_limit = models.PositiveIntegerField(default=1) )
is_ios_available = models.BooleanField(default=False) name = models.CharField(
distribution_type = models.TextField(db_index=True, blank=True, default="") max_length=255,
db_index=True,
blank=True,
default="N/A",
help_text="Name of the drop benefit.",
)
image_asset_url = models.URLField(
max_length=500,
blank=True,
default="",
help_text="URL to the benefit's image asset.",
)
created_at = models.DateTimeField(
null=True,
db_index=True,
help_text="Timestamp when the benefit was created. This is from Twitch API and not auto-generated.",
)
entitlement_limit = models.PositiveIntegerField(
default=1,
help_text="Maximum number of times this benefit can be earned.",
)
# Foreign keys # TODO(TheLovinator): Check if this should be default True or False # noqa: TD003
game = models.ForeignKey(Game, on_delete=models.CASCADE, related_name="drop_benefits", db_index=True) is_ios_available = models.BooleanField(
owner_organization = models.ForeignKey(Organization, on_delete=models.CASCADE, related_name="drop_benefits", db_index=True) default=False,
help_text="Whether the benefit is available on iOS.",
)
distribution_type = models.CharField(
max_length=50,
db_index=True,
blank=True,
default="",
help_text="Type of distribution for this benefit.",
)
class Meta: class Meta:
ordering = ["-created_at"]
indexes: ClassVar[list] = [ indexes: ClassVar[list] = [
models.Index(fields=["name"]), models.Index(fields=["name"]),
models.Index(fields=["created_at"]), models.Index(fields=["created_at"]),
models.Index(fields=["distribution_type"]), models.Index(fields=["distribution_type"]),
models.Index(fields=["game"]),
models.Index(fields=["owner_organization"]),
] ]
def __str__(self) -> str: def __str__(self) -> str:
@ -194,22 +304,58 @@ class DropBenefit(models.Model):
class TimeBasedDrop(models.Model): class TimeBasedDrop(models.Model):
"""Represents a time-based drop in a drop campaign.""" """Represents a time-based drop in a drop campaign."""
id = models.TextField(primary_key=True) id = models.CharField(
name = models.TextField(db_index=True) max_length=64,
required_minutes_watched = models.PositiveIntegerField(db_index=True, null=True) primary_key=True,
required_subs = models.PositiveIntegerField(default=0) help_text="Unique Twitch identifier for the time-based drop.",
start_at = models.DateTimeField(db_index=True, null=True) )
end_at = models.DateTimeField(db_index=True, null=True) name = models.CharField(
max_length=255,
db_index=True,
help_text="Name of the time-based drop.",
)
required_minutes_watched = models.PositiveIntegerField(
db_index=True,
null=True,
blank=True,
help_text="Minutes required to watch before earning this drop.",
)
required_subs = models.PositiveIntegerField(
default=0,
help_text="Number of subscriptions required to unlock this drop.",
)
start_at = models.DateTimeField(
db_index=True,
null=True,
blank=True,
help_text="Datetime when this drop becomes available.",
)
end_at = models.DateTimeField(
db_index=True,
null=True,
blank=True,
help_text="Datetime when this drop expires.",
)
# Foreign keys # Foreign keys
campaign = models.ForeignKey(DropCampaign, on_delete=models.CASCADE, related_name="time_based_drops", db_index=True) campaign = models.ForeignKey(
benefits = models.ManyToManyField(DropBenefit, through="DropBenefitEdge", related_name="drops") # type: ignore[var-annotated] DropCampaign,
on_delete=models.CASCADE,
related_name="time_based_drops",
help_text="The campaign this drop belongs to.",
)
benefits = models.ManyToManyField(
DropBenefit,
through="DropBenefitEdge",
related_name="drops",
help_text="Benefits unlocked by this drop.",
)
class Meta: class Meta:
ordering = ["start_at"]
indexes: ClassVar[list] = [ indexes: ClassVar[list] = [
models.Index(fields=["name"]), models.Index(fields=["name"]),
models.Index(fields=["start_at", "end_at"]), models.Index(fields=["start_at", "end_at"]),
models.Index(fields=["campaign"]),
models.Index(fields=["required_minutes_watched"]), models.Index(fields=["required_minutes_watched"]),
] ]
@ -221,12 +367,25 @@ class TimeBasedDrop(models.Model):
class DropBenefitEdge(models.Model): class DropBenefitEdge(models.Model):
"""Represents the relationship between a TimeBasedDrop and a DropBenefit.""" """Represents the relationship between a TimeBasedDrop and a DropBenefit."""
drop = models.ForeignKey(TimeBasedDrop, on_delete=models.CASCADE, db_index=True) drop = models.ForeignKey(
benefit = models.ForeignKey(DropBenefit, on_delete=models.CASCADE, db_index=True) TimeBasedDrop,
entitlement_limit = models.PositiveIntegerField(default=1) on_delete=models.CASCADE,
help_text="The time-based drop in this relationship.",
)
benefit = models.ForeignKey(
DropBenefit,
on_delete=models.CASCADE,
help_text="The benefit in this relationship.",
)
entitlement_limit = models.PositiveIntegerField(
default=1,
help_text="Max times this benefit can be claimed for this drop.",
)
class Meta: class Meta:
unique_together = ("drop", "benefit") constraints = [
models.UniqueConstraint(fields=("drop", "benefit"), name="unique_drop_benefit"),
]
indexes: ClassVar[list] = [ indexes: ClassVar[list] = [
models.Index(fields=["drop", "benefit"]), models.Index(fields=["drop", "benefit"]),
] ]

View file

@ -2,7 +2,7 @@ from __future__ import annotations
import datetime import datetime
import logging import logging
from dataclasses import dataclass from collections import OrderedDict, defaultdict
from typing import TYPE_CHECKING, Any, cast from typing import TYPE_CHECKING, Any, cast
from django.contrib import messages from django.contrib import messages
@ -58,7 +58,8 @@ class OrgDetailView(DetailView):
else: else:
subscription = NotificationSubscription.objects.filter(user=user, organization=organization).first() subscription = NotificationSubscription.objects.filter(user=user, organization=organization).first()
games: QuerySet[Game, Game] = Game.objects.filter(drop_campaigns__owner=organization).distinct() games: QuerySet[Game, Game] = organization.games.all() # pyright: ignore[reportAttributeAccessIssue]
context.update({ context.update({
"subscription": subscription, "subscription": subscription,
"games": games, "games": games,
@ -87,7 +88,7 @@ class DropCampaignListView(ListView):
if game_filter: if game_filter:
queryset = queryset.filter(game__id=game_filter) queryset = queryset.filter(game__id=game_filter)
return queryset.select_related("game", "owner").order_by("-start_at") return queryset.select_related("game__owner").order_by("-start_at")
def get_context_data(self, **kwargs) -> dict[str, Any]: def get_context_data(self, **kwargs) -> dict[str, Any]:
"""Add additional context data. """Add additional context data.
@ -99,10 +100,10 @@ class DropCampaignListView(ListView):
dict: Context data. dict: Context data.
""" """
kwargs = cast("dict[str, Any]", kwargs) kwargs = cast("dict[str, Any]", kwargs)
context: dict[str, datetime.datetime | str | int | QuerySet[Game, Game] | None] = super().get_context_data(**kwargs) context: dict[str, Any] = super().get_context_data(**kwargs)
context["games"] = Game.objects.all().order_by("display_name") context["games"] = Game.objects.all().order_by("display_name")
context["status_options"] = ["active", "upcoming", "expired"]
context["now"] = timezone.now() context["now"] = timezone.now()
context["selected_game"] = str(self.request.GET.get(key="game", default="")) context["selected_game"] = str(self.request.GET.get(key="game", default=""))
context["selected_per_page"] = self.paginate_by context["selected_per_page"] = self.paginate_by
@ -130,7 +131,7 @@ class DropCampaignDetailView(DetailView):
if queryset is None: if queryset is None:
queryset = self.get_queryset() queryset = self.get_queryset()
queryset = queryset.select_related("game", "owner") queryset = queryset.select_related("game__owner")
return super().get_object(queryset=queryset) return super().get_object(queryset=queryset)
@ -162,12 +163,12 @@ class GamesGridView(ListView):
context_object_name = "games" context_object_name = "games"
def get_queryset(self) -> QuerySet[Game]: def get_queryset(self) -> QuerySet[Game]:
"""Get queryset of games, annotated with campaign counts to avoid N+1 queries. """Get queryset of all games, annotated with campaign counts.
Returns: Returns:
QuerySet[Game]: Queryset of games with annotations. QuerySet: Annotated games queryset.
""" """
now = timezone.now() now: datetime.datetime = timezone.now()
return ( return (
super() super()
.get_queryset() .get_queryset()
@ -186,64 +187,40 @@ class GamesGridView(ListView):
) )
def get_context_data(self, **kwargs) -> dict[str, Any]: def get_context_data(self, **kwargs) -> dict[str, Any]:
"""Add additional context data with games grouped by organization. """Add additional context data with games grouped by their owning organization in a highly optimized manner.
Args: Args:
**kwargs: Additional keyword arguments. **kwargs: Additional arguments.
Returns: Returns:
dict: Context data with games grouped by organization. dict: Context data with games grouped by organization.
""" """
@dataclass(frozen=True)
class OrganizationData:
id: str
name: str
context: dict[str, Any] = super().get_context_data(**kwargs) context: dict[str, Any] = super().get_context_data(**kwargs)
games_by_org: dict[OrganizationData, list[dict[str, Game | dict[str, int]]]] = {}
now: datetime.datetime = timezone.now() now: datetime.datetime = timezone.now()
organizations_with_games: QuerySet[Organization, Organization] = Organization.objects.filter(drop_campaigns__isnull=False).distinct().order_by("name") games_with_campaigns: QuerySet[Game, Game] = (
Game.objects.filter(drop_campaigns__isnull=False)
game_org_relations: QuerySet[DropCampaign, dict[str, Any]] = DropCampaign.objects.values("game_id", "owner_id", "owner__name").annotate( .select_related("owner")
campaign_count=Count("id", distinct=True), .annotate(
active_count=Count("id", filter=Q(start_at__lte=now, end_at__gte=now), distinct=True), campaign_count=Count("drop_campaigns", distinct=True),
active_count=Count(
"drop_campaigns",
filter=Q(
drop_campaigns__start_at__lte=now,
drop_campaigns__end_at__gte=now,
),
distinct=True,
),
)
.order_by("owner__name", "display_name")
) )
all_games: dict[str, Game] = {game.id: game for game in Game.objects.all()} games_by_org: defaultdict[Organization, list[dict[str, Game]]] = defaultdict(list)
org_names: dict[str, str] = {org.id: org.name for org in organizations_with_games} for game in games_with_campaigns:
if game.owner:
games_by_org[game.owner].append({"game": game})
game_org_map: dict[str, dict[str, Any]] = {} context["games_by_org"] = OrderedDict(sorted(games_by_org.items(), key=lambda item: item[0].name))
for relation in game_org_relations:
org_id: str = relation["owner_id"]
game_id: str = relation["game_id"]
if org_id not in game_org_map:
game_org_map[org_id] = {}
if game_id not in game_org_map[org_id]:
game: Game | None = all_games.get(game_id)
if game:
game_org_map[org_id][game_id] = {
"game": game,
"campaign_count": relation["campaign_count"],
"active_count": relation["active_count"],
}
for org_id, games in game_org_map.items():
if org_id in org_names:
org_obj = OrganizationData(id=org_id, name=org_names[org_id])
games_by_org[org_obj] = list(games.values())
games_with_counts: list[dict[str, Game | dict[str, int]]] = []
for org_games in games_by_org.values():
games_with_counts.extend(org_games)
context["games_with_counts"] = games_with_counts
context["games_by_org"] = games_by_org
return context return context
@ -275,7 +252,7 @@ class GameDetailView(DetailView):
subscription = NotificationSubscription.objects.filter(user=user, game=game).first() subscription = NotificationSubscription.objects.filter(user=user, game=game).first()
now: datetime.datetime = timezone.now() now: datetime.datetime = timezone.now()
all_campaigns: QuerySet[DropCampaign, DropCampaign] = DropCampaign.objects.filter(game=game).select_related("owner").order_by("-end_at") all_campaigns: QuerySet[DropCampaign, DropCampaign] = DropCampaign.objects.filter(game=game).select_related("game__owner").order_by("-end_at")
active_campaigns: list[DropCampaign] = [ active_campaigns: list[DropCampaign] = [
campaign campaign
@ -295,7 +272,7 @@ class GameDetailView(DetailView):
"upcoming_campaigns": upcoming_campaigns, "upcoming_campaigns": upcoming_campaigns,
"expired_campaigns": expired_campaigns, "expired_campaigns": expired_campaigns,
"subscription": subscription, "subscription": subscription,
"owner": active_campaigns[0].owner if active_campaigns else None, "owner": game.owner,
"now": now, "now": now,
}) })
@ -312,9 +289,9 @@ def dashboard(request: HttpRequest) -> HttpResponse:
HttpResponse: The rendered dashboard template. HttpResponse: The rendered dashboard template.
""" """
now: datetime.datetime = timezone.now() now: datetime.datetime = timezone.now()
active_campaigns: QuerySet[DropCampaign, DropCampaign] = ( active_campaigns: QuerySet[DropCampaign] = (
DropCampaign.objects.filter(start_at__lte=now, end_at__gte=now) DropCampaign.objects.filter(start_at__lte=now, end_at__gte=now)
.select_related("game", "owner") .select_related("game__owner")
.prefetch_related( .prefetch_related(
Prefetch( Prefetch(
"time_based_drops", "time_based_drops",
@ -326,8 +303,10 @@ def dashboard(request: HttpRequest) -> HttpResponse:
campaigns_by_org_game: dict[str, Any] = {} campaigns_by_org_game: dict[str, Any] = {}
for campaign in active_campaigns: for campaign in active_campaigns:
org_id: str = campaign.owner.id owner: Organization | None = campaign.game.owner
org_name: str = campaign.owner.name
org_id: str = owner.id if owner else "unknown"
org_name: str = owner.name if owner else "Unknown"
game_id: str = campaign.game.id game_id: str = campaign.game.id
game_name: str = campaign.game.display_name game_name: str = campaign.game.display_name
@ -370,8 +349,6 @@ def debug_view(request: HttpRequest) -> HttpResponse:
Returns: Returns:
HttpResponse: Rendered debug template or redirect if unauthorized. HttpResponse: Rendered debug template or redirect if unauthorized.
""" """
# Was previously staff-only; now any authenticated user can view.
now = timezone.now() now = timezone.now()
# Games with no organizations (no campaigns linking to an org) # Games with no organizations (no campaigns linking to an org)
@ -380,12 +357,12 @@ def debug_view(request: HttpRequest) -> HttpResponse:
# Campaigns with missing or obviously broken images (empty or very short or not http) # Campaigns with missing or obviously broken images (empty or very short or not http)
broken_image_campaigns: QuerySet[DropCampaign, DropCampaign] = DropCampaign.objects.filter( broken_image_campaigns: QuerySet[DropCampaign, DropCampaign] = DropCampaign.objects.filter(
Q(image_url__isnull=True) | Q(image_url__exact="") | ~Q(image_url__startswith="http") Q(image_url__isnull=True) | Q(image_url__exact="") | ~Q(image_url__startswith="http")
).select_related("game", "owner") ).select_related("game")
# Benefits with missing images # Benefits with missing images
broken_benefit_images: QuerySet[DropBenefit, DropBenefit] = DropBenefit.objects.filter( broken_benefit_images: QuerySet[DropBenefit, DropBenefit] = DropBenefit.objects.filter(
Q(image_asset_url__isnull=True) | Q(image_asset_url__exact="") | ~Q(image_asset_url__startswith="http") Q(image_asset_url__isnull=True) | Q(image_asset_url__exact="") | ~Q(image_asset_url__startswith="http")
).select_related("game", "owner_organization") ).prefetch_related(Prefetch("drops", queryset=TimeBasedDrop.objects.select_related("campaign__game")))
# Time-based drops without any benefits # Time-based drops without any benefits
drops_without_benefits: QuerySet[TimeBasedDrop, TimeBasedDrop] = TimeBasedDrop.objects.filter(benefits__isnull=True).select_related("campaign") drops_without_benefits: QuerySet[TimeBasedDrop, TimeBasedDrop] = TimeBasedDrop.objects.filter(benefits__isnull=True).select_related("campaign")
@ -393,7 +370,7 @@ def debug_view(request: HttpRequest) -> HttpResponse:
# Campaigns with invalid dates (start after end or missing either) # Campaigns with invalid dates (start after end or missing either)
invalid_date_campaigns: QuerySet[DropCampaign, DropCampaign] = DropCampaign.objects.filter( invalid_date_campaigns: QuerySet[DropCampaign, DropCampaign] = DropCampaign.objects.filter(
Q(start_at__gt=models.F("end_at")) | Q(start_at__isnull=True) | Q(end_at__isnull=True) Q(start_at__gt=models.F("end_at")) | Q(start_at__isnull=True) | Q(end_at__isnull=True)
).select_related("game", "owner") ).select_related("game")
# Duplicate campaign names per game # Duplicate campaign names per game
duplicate_name_campaigns = DropCampaign.objects.values("game_id", "name").annotate(name_count=Count("id")).filter(name_count__gt=1).order_by("-name_count") duplicate_name_campaigns = DropCampaign.objects.values("game_id", "name").annotate(name_count=Count("id")).filter(name_count__gt=1).order_by("-name_count")

121
uv.lock generated
View file

@ -59,6 +59,21 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/63/51/ef6c5628e46092f0a54c7cee69acc827adc6b6aab57b55d344fefbdf28f1/cssbeautifier-1.15.4-py3-none-any.whl", hash = "sha256:78c84d5e5378df7d08622bbd0477a1abdbd209680e95480bf22f12d5701efc98", size = 123667, upload-time = "2025-02-27T17:53:43.594Z" }, { url = "https://files.pythonhosted.org/packages/63/51/ef6c5628e46092f0a54c7cee69acc827adc6b6aab57b55d344fefbdf28f1/cssbeautifier-1.15.4-py3-none-any.whl", hash = "sha256:78c84d5e5378df7d08622bbd0477a1abdbd209680e95480bf22f12d5701efc98", size = 123667, upload-time = "2025-02-27T17:53:43.594Z" },
] ]
[[package]]
name = "dateparser"
version = "1.2.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "python-dateutil" },
{ name = "pytz" },
{ name = "regex" },
{ name = "tzlocal" },
]
sdist = { url = "https://files.pythonhosted.org/packages/a9/30/064144f0df1749e7bb5faaa7f52b007d7c2d08ec08fed8411aba87207f68/dateparser-1.2.2.tar.gz", hash = "sha256:986316f17cb8cdc23ea8ce563027c5ef12fc725b6fb1d137c14ca08777c5ecf7", size = 329840, upload-time = "2025-06-26T09:29:23.211Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/87/22/f020c047ae1346613db9322638186468238bcfa8849b4668a22b97faad65/dateparser-1.2.2-py3-none-any.whl", hash = "sha256:5a5d7211a09013499867547023a2a0c91d5a27d15dd4dbcea676ea9fe66f2482", size = 315453, upload-time = "2025-06-26T09:29:21.412Z" },
]
[[package]] [[package]]
name = "django" name = "django"
version = "5.2.5" version = "5.2.5"
@ -218,6 +233,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/2d/14/1c65fccf8413d5f5c6e8425f84675169654395098000d8bddc4e9d3390e1/jsbeautifier-1.15.4-py3-none-any.whl", hash = "sha256:72f65de312a3f10900d7685557f84cb61a9733c50dcc27271a39f5b0051bf528", size = 94707, upload-time = "2025-02-27T17:53:46.152Z" }, { url = "https://files.pythonhosted.org/packages/2d/14/1c65fccf8413d5f5c6e8425f84675169654395098000d8bddc4e9d3390e1/jsbeautifier-1.15.4-py3-none-any.whl", hash = "sha256:72f65de312a3f10900d7685557f84cb61a9733c50dcc27271a39f5b0051bf528", size = 94707, upload-time = "2025-02-27T17:53:46.152Z" },
] ]
[[package]]
name = "json-repair"
version = "0.50.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/2f/2c/bfdb1886bdda03e248d597922013eeb20d62188cb48d394590ae6e0c8975/json_repair-0.50.0.tar.gz", hash = "sha256:1d42a3f353e389cf6051941b45fa44b6d130af3c91406a749e88586d830adb89", size = 34815, upload-time = "2025-08-20T15:01:58.126Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/35/c2/93368d4c9355e8ad1f6d62b804de241939d0796b2a3a73737f665b802808/json_repair-0.50.0-py3-none-any.whl", hash = "sha256:b15da2c42deb43419b182d97dcfde6cd86d0b18ccd18ed1a887104ce85e7a364", size = 25985, upload-time = "2025-08-20T15:01:56.567Z" },
]
[[package]] [[package]]
name = "json5" name = "json5"
version = "0.12.1" version = "0.12.1"
@ -402,6 +426,18 @@ psutil = [
{ name = "psutil" }, { name = "psutil" },
] ]
[[package]]
name = "python-dateutil"
version = "2.9.0.post0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "six" },
]
sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" },
]
[[package]] [[package]]
name = "python-dotenv" name = "python-dotenv"
version = "1.1.1" version = "1.1.1"
@ -411,6 +447,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" },
] ]
[[package]]
name = "pytz"
version = "2025.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" },
]
[[package]] [[package]]
name = "pyyaml" name = "pyyaml"
version = "6.0.2" version = "6.0.2"
@ -430,38 +475,38 @@ wheels = [
[[package]] [[package]]
name = "regex" name = "regex"
version = "2025.7.34" version = "2025.8.29"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/0b/de/e13fa6dc61d78b30ba47481f99933a3b49a57779d625c392d8036770a60d/regex-2025.7.34.tar.gz", hash = "sha256:9ead9765217afd04a86822dfcd4ed2747dfe426e887da413b15ff0ac2457e21a", size = 400714, upload-time = "2025-07-31T00:21:16.262Z" } sdist = { url = "https://files.pythonhosted.org/packages/e4/10/2d333227cf5198eb3252f2d50c8ade5cd2015f11c22403f0c9e3d529e81a/regex-2025.8.29.tar.gz", hash = "sha256:731ddb27a0900fa227dfba976b4efccec8c1c6fba147829bb52e71d49e91a5d7", size = 400817, upload-time = "2025-08-29T22:43:36.985Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/15/16/b709b2119975035169a25aa8e4940ca177b1a2e25e14f8d996d09130368e/regex-2025.7.34-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c3c9740a77aeef3f5e3aaab92403946a8d34437db930a0280e7e81ddcada61f5", size = 485334, upload-time = "2025-07-31T00:19:56.58Z" }, { url = "https://files.pythonhosted.org/packages/42/db/2f0e1fbca855f3c519f3f8198817d14a9569ca939bc0cc86efd4da196d3e/regex-2025.8.29-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:eed02e5c39f91268ea4ddf68ee19eed189d57c605530b7d32960f54325c52e7a", size = 485405, upload-time = "2025-08-29T22:42:10.138Z" },
{ url = "https://files.pythonhosted.org/packages/94/a6/c09136046be0595f0331bc58a0e5f89c2d324cf734e0b0ec53cf4b12a636/regex-2025.7.34-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:69ed3bc611540f2ea70a4080f853741ec698be556b1df404599f8724690edbcd", size = 289942, upload-time = "2025-07-31T00:19:57.943Z" }, { url = "https://files.pythonhosted.org/packages/15/ed/52afe839607719750acc87d144ec3db699adb9c1f40ecb6fa9f3700437b6/regex-2025.8.29-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:630d5c7e0a490db2fee3c7b282c8db973abcbb036a6e4e6dc06c4270965852be", size = 290014, upload-time = "2025-08-29T22:42:12.38Z" },
{ url = "https://files.pythonhosted.org/packages/36/91/08fc0fd0f40bdfb0e0df4134ee37cfb16e66a1044ac56d36911fd01c69d2/regex-2025.7.34-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d03c6f9dcd562c56527c42b8530aad93193e0b3254a588be1f2ed378cdfdea1b", size = 285991, upload-time = "2025-07-31T00:19:59.837Z" }, { url = "https://files.pythonhosted.org/packages/da/84/beb3becb129e41ae3e6bacd737aa751228ec0c17c707b9999648f050968c/regex-2025.8.29-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2206d3a30469e8fc8848139884168127f456efbaca8ae14809c26b98d2be15c6", size = 286059, upload-time = "2025-08-29T22:42:14.009Z" },
{ url = "https://files.pythonhosted.org/packages/be/2f/99dc8f6f756606f0c214d14c7b6c17270b6bbe26d5c1f05cde9dbb1c551f/regex-2025.7.34-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6164b1d99dee1dfad33f301f174d8139d4368a9fb50bf0a3603b2eaf579963ad", size = 797415, upload-time = "2025-07-31T00:20:01.668Z" }, { url = "https://files.pythonhosted.org/packages/44/31/74476ac68cd5ed46634683cba634ab0885e917624d620c5959f67835554b/regex-2025.8.29-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:394c492c398a9f9e17545e19f770c58b97e65963eedaa25bb879e80a03e2b327", size = 797490, upload-time = "2025-08-29T22:42:15.864Z" },
{ url = "https://files.pythonhosted.org/packages/62/cf/2fcdca1110495458ba4e95c52ce73b361cf1cafd8a53b5c31542cde9a15b/regex-2025.7.34-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1e4f4f62599b8142362f164ce776f19d79bdd21273e86920a7b604a4275b4f59", size = 862487, upload-time = "2025-07-31T00:20:03.142Z" }, { url = "https://files.pythonhosted.org/packages/3f/97/1a8d109f891c4af31f43295304a51b76bc7aef4ce6d7953e4832f86c85f0/regex-2025.8.29-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:db8b0e05af08ff38d78544950e844b5f159032b66dedda19b3f9b17297248be7", size = 862562, upload-time = "2025-08-29T22:42:17.557Z" },
{ url = "https://files.pythonhosted.org/packages/90/38/899105dd27fed394e3fae45607c1983e138273ec167e47882fc401f112b9/regex-2025.7.34-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:72a26dcc6a59c057b292f39d41465d8233a10fd69121fa24f8f43ec6294e5415", size = 910717, upload-time = "2025-07-31T00:20:04.727Z" }, { url = "https://files.pythonhosted.org/packages/1b/a8/13d6ea4b8a0c7eed0e528dcb25cbdc3bc53e26b0928dc48d6c0381516c4a/regex-2025.8.29-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cd7c1821eff911917c476d41030b422791ce282c23ee9e1b8f7681fd0993f1e4", size = 910790, upload-time = "2025-08-29T22:42:19.268Z" },
{ url = "https://files.pythonhosted.org/packages/ee/f6/4716198dbd0bcc9c45625ac4c81a435d1c4d8ad662e8576dac06bab35b17/regex-2025.7.34-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d5273fddf7a3e602695c92716c420c377599ed3c853ea669c1fe26218867002f", size = 801943, upload-time = "2025-07-31T00:20:07.1Z" }, { url = "https://files.pythonhosted.org/packages/10/b3/1c7320c1fdc6569a086949d2c5b7b742696098c28a6c83ca909b8d36d17b/regex-2025.8.29-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0b4d8a7f75da748a2d0c045600259f1899c9dd8dd9d3da1daa50bf534c3fa5ba", size = 802016, upload-time = "2025-08-29T22:42:21.268Z" },
{ url = "https://files.pythonhosted.org/packages/40/5d/cff8896d27e4e3dd11dd72ac78797c7987eb50fe4debc2c0f2f1682eb06d/regex-2025.7.34-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c1844be23cd40135b3a5a4dd298e1e0c0cb36757364dd6cdc6025770363e06c1", size = 786664, upload-time = "2025-07-31T00:20:08.818Z" }, { url = "https://files.pythonhosted.org/packages/7a/b5/f3613b70a569b6309cd2a61ae869407b45cff25c9734f5ff179b416e9615/regex-2025.8.29-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5cd74545c32e0da0d489c2293101a82f4a1b88050c235e45509e4123017673b2", size = 786740, upload-time = "2025-08-29T22:42:23.538Z" },
{ url = "https://files.pythonhosted.org/packages/10/29/758bf83cf7b4c34f07ac3423ea03cee3eb3176941641e4ccc05620f6c0b8/regex-2025.7.34-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dde35e2afbbe2272f8abee3b9fe6772d9b5a07d82607b5788e8508974059925c", size = 856457, upload-time = "2025-07-31T00:20:10.328Z" }, { url = "https://files.pythonhosted.org/packages/e0/8a/9f16babae23011acbd27f886c4817159508f4f3209bcfce4bc2b8f12f2ba/regex-2025.8.29-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:97b98ea38fc3c1034f3d7bd30288d2c5b3be8cdcd69e2061d1c86cb14644a27b", size = 856533, upload-time = "2025-08-29T22:42:26.055Z" },
{ url = "https://files.pythonhosted.org/packages/d7/30/c19d212b619963c5b460bfed0ea69a092c6a43cba52a973d46c27b3e2975/regex-2025.7.34-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f3f6e8e7af516a7549412ce57613e859c3be27d55341a894aacaa11703a4c31a", size = 849008, upload-time = "2025-07-31T00:20:11.823Z" }, { url = "https://files.pythonhosted.org/packages/4d/d0/adca6eec8ed79541edadecf8b512d7a3960c2ba983d2e5baf68dbddd7a90/regex-2025.8.29-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:8decb26f271b989d612c5d99db5f8f741dcd63ece51c59029840070f5f9778bf", size = 849083, upload-time = "2025-08-29T22:42:27.762Z" },
{ url = "https://files.pythonhosted.org/packages/9e/b8/3c35da3b12c87e3cc00010ef6c3a4ae787cff0bc381aa3d251def219969a/regex-2025.7.34-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:469142fb94a869beb25b5f18ea87646d21def10fbacb0bcb749224f3509476f0", size = 788101, upload-time = "2025-07-31T00:20:13.729Z" }, { url = "https://files.pythonhosted.org/packages/46/cc/37fddb2a17cefffb43b9dfd5f585a6cd6f90ee5b32c821886d0c0c3bc243/regex-2025.8.29-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:62141843d1ec079cd66604424af566e542e7e072b2d9e37165d414d2e6e271dd", size = 788177, upload-time = "2025-08-29T22:42:31.121Z" },
{ url = "https://files.pythonhosted.org/packages/47/80/2f46677c0b3c2b723b2c358d19f9346e714113865da0f5f736ca1a883bde/regex-2025.7.34-cp313-cp313-win32.whl", hash = "sha256:da7507d083ee33ccea1310447410c27ca11fb9ef18c95899ca57ff60a7e4d8f1", size = 264401, upload-time = "2025-07-31T00:20:15.233Z" }, { url = "https://files.pythonhosted.org/packages/f5/ea/413fe88ce5ac2418223434aa1603d92134b74deed6007dc6e4c37d83bbcd/regex-2025.8.29-cp313-cp313-win32.whl", hash = "sha256:dd23006c90d9ff0c2e4e5f3eaf8233dcefe45684f2acb330869ec5c2aa02b1fb", size = 264473, upload-time = "2025-08-29T22:42:32.706Z" },
{ url = "https://files.pythonhosted.org/packages/be/fa/917d64dd074682606a003cba33585c28138c77d848ef72fc77cbb1183849/regex-2025.7.34-cp313-cp313-win_amd64.whl", hash = "sha256:9d644de5520441e5f7e2db63aec2748948cc39ed4d7a87fd5db578ea4043d997", size = 275368, upload-time = "2025-07-31T00:20:16.711Z" }, { url = "https://files.pythonhosted.org/packages/5a/73/d07bc1d1969e41bf1637a8aad4228da506747f4c94415ef03c534c7d68d6/regex-2025.8.29-cp313-cp313-win_amd64.whl", hash = "sha256:d41a71342819bdfe87c701f073a14ea4bd3f847333d696c7344e9ff3412b7f70", size = 275438, upload-time = "2025-08-29T22:42:34.35Z" },
{ url = "https://files.pythonhosted.org/packages/65/cd/f94383666704170a2154a5df7b16be28f0c27a266bffcd843e58bc84120f/regex-2025.7.34-cp313-cp313-win_arm64.whl", hash = "sha256:7bf1c5503a9f2cbd2f52d7e260acb3131b07b6273c470abb78568174fe6bde3f", size = 268482, upload-time = "2025-07-31T00:20:18.189Z" }, { url = "https://files.pythonhosted.org/packages/86/cd/2e05fc85ebee6fe6c5073c9b0c737a473c226422d75e93903810b247a9fe/regex-2025.8.29-cp313-cp313-win_arm64.whl", hash = "sha256:54018e66344d60b214f4aa151c046e0fa528221656f4f7eba5a787ccc7057312", size = 268553, upload-time = "2025-08-29T22:42:35.874Z" },
{ url = "https://files.pythonhosted.org/packages/ac/23/6376f3a23cf2f3c00514b1cdd8c990afb4dfbac3cb4a68b633c6b7e2e307/regex-2025.7.34-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:8283afe7042d8270cecf27cca558873168e771183d4d593e3c5fe5f12402212a", size = 485385, upload-time = "2025-07-31T00:20:19.692Z" }, { url = "https://files.pythonhosted.org/packages/2e/2d/2aa4b98231017994ea52d05c13997778af415f5d7faa7f90988a640dac44/regex-2025.8.29-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:c03308757831a8d89e7c007abb75d1d4c9fbca003b5fb32755d4475914535f08", size = 485447, upload-time = "2025-08-29T22:42:37.429Z" },
{ url = "https://files.pythonhosted.org/packages/73/5b/6d4d3a0b4d312adbfd6d5694c8dddcf1396708976dd87e4d00af439d962b/regex-2025.7.34-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6c053f9647e3421dd2f5dff8172eb7b4eec129df9d1d2f7133a4386319b47435", size = 289788, upload-time = "2025-07-31T00:20:21.941Z" }, { url = "https://files.pythonhosted.org/packages/b7/b4/ed3241bb99a0783fe650d8511924c7c43f704b720fab3e353393bea8c96a/regex-2025.8.29-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:0d4b71791975fc203e0e6c50db974abb23a8df30729c1ac4fd68c9f2bb8c9358", size = 289862, upload-time = "2025-08-29T22:42:39.71Z" },
{ url = "https://files.pythonhosted.org/packages/92/71/5862ac9913746e5054d01cb9fb8125b3d0802c0706ef547cae1e7f4428fa/regex-2025.7.34-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a16dd56bbcb7d10e62861c3cd000290ddff28ea142ffb5eb3470f183628011ac", size = 286136, upload-time = "2025-07-31T00:20:26.146Z" }, { url = "https://files.pythonhosted.org/packages/ba/f6/5237a7d0b2bd64bb216d06470549bc4cc33de57033772e3018708636a027/regex-2025.8.29-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:284fcd2dcb613e8b89b22a30cf42998c9a73ee360b8a24db8457d24f5c42282e", size = 286211, upload-time = "2025-08-29T22:42:41.266Z" },
{ url = "https://files.pythonhosted.org/packages/27/df/5b505dc447eb71278eba10d5ec940769ca89c1af70f0468bfbcb98035dc2/regex-2025.7.34-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:69c593ff5a24c0d5c1112b0df9b09eae42b33c014bdca7022d6523b210b69f72", size = 797753, upload-time = "2025-07-31T00:20:27.919Z" }, { url = "https://files.pythonhosted.org/packages/58/eb/05568fdc4028d1b339fb950fe6b92ade2613edd6423291939c8e29b21e8a/regex-2025.8.29-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b394b5157701b22cf63699c792bfeed65fbfeacbd94fea717a9e2036a51148ab", size = 797826, upload-time = "2025-08-29T22:42:42.911Z" },
{ url = "https://files.pythonhosted.org/packages/86/38/3e3dc953d13998fa047e9a2414b556201dbd7147034fbac129392363253b/regex-2025.7.34-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:98d0ce170fcde1a03b5df19c5650db22ab58af375aaa6ff07978a85c9f250f0e", size = 863263, upload-time = "2025-07-31T00:20:29.803Z" }, { url = "https://files.pythonhosted.org/packages/3d/2a/a3c1c209faa1f6a218e64c5a235e06f6f36c45b5aa924c6bf75241a996f7/regex-2025.8.29-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ea197ac22396faf5e70c87836bb89f94ed5b500e1b407646a4e5f393239611f1", size = 863338, upload-time = "2025-08-29T22:42:44.831Z" },
{ url = "https://files.pythonhosted.org/packages/68/e5/3ff66b29dde12f5b874dda2d9dec7245c2051f2528d8c2a797901497f140/regex-2025.7.34-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d72765a4bff8c43711d5b0f5b452991a9947853dfa471972169b3cc0ba1d0751", size = 910103, upload-time = "2025-07-31T00:20:31.313Z" }, { url = "https://files.pythonhosted.org/packages/dd/66/5e96f217662387742c0d9732e97129850bd3243e019309c1fbdcd62b5421/regex-2025.8.29-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:decd84f195c08b3d9d0297a7e310379aae13ca7e166473534508c81b95c74bba", size = 910176, upload-time = "2025-08-29T22:42:46.997Z" },
{ url = "https://files.pythonhosted.org/packages/9e/fe/14176f2182125977fba3711adea73f472a11f3f9288c1317c59cd16ad5e6/regex-2025.7.34-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4494f8fd95a77eb434039ad8460e64d57baa0434f1395b7da44015bef650d0e4", size = 801709, upload-time = "2025-07-31T00:20:33.323Z" }, { url = "https://files.pythonhosted.org/packages/fc/f2/975e77333267f9652bc2cc926382d8c9d86683eb84d1989459e644ac818b/regex-2025.8.29-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ebaf81f7344dbf1a2b383e35923648de8f78fb262cf04154c82853887ac3e684", size = 801784, upload-time = "2025-08-29T22:42:48.786Z" },
{ url = "https://files.pythonhosted.org/packages/5a/0d/80d4e66ed24f1ba876a9e8e31b709f9fd22d5c266bf5f3ab3c1afe683d7d/regex-2025.7.34-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4f42b522259c66e918a0121a12429b2abcf696c6f967fa37bdc7b72e61469f98", size = 786726, upload-time = "2025-07-31T00:20:35.252Z" }, { url = "https://files.pythonhosted.org/packages/75/d9/b25dbf9729b5a5958a804e91b376fe8e829ec10c0d7edb4b1ad91070132b/regex-2025.8.29-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d82fb8a97e5ed8f1d3ed7f8e0e7fe1760faa95846c0d38b314284dfdbe86b229", size = 786799, upload-time = "2025-08-29T22:42:50.868Z" },
{ url = "https://files.pythonhosted.org/packages/12/75/c3ebb30e04a56c046f5c85179dc173818551037daae2c0c940c7b19152cb/regex-2025.7.34-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:aaef1f056d96a0a5d53ad47d019d5b4c66fe4be2da87016e0d43b7242599ffc7", size = 857306, upload-time = "2025-07-31T00:20:37.12Z" }, { url = "https://files.pythonhosted.org/packages/1d/0a/7f8de7ea41d7a3a21dfcb9dcea7b727fdde9e35d74a23e16ef5edcd68005/regex-2025.8.29-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:1dcec2448ed0062f63e82ca02d1d05f74d4127cb6a9d76a73df60e81298d380b", size = 857380, upload-time = "2025-08-29T22:42:52.992Z" },
{ url = "https://files.pythonhosted.org/packages/b1/b2/a4dc5d8b14f90924f27f0ac4c4c4f5e195b723be98adecc884f6716614b6/regex-2025.7.34-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:656433e5b7dccc9bc0da6312da8eb897b81f5e560321ec413500e5367fcd5d47", size = 848494, upload-time = "2025-07-31T00:20:38.818Z" }, { url = "https://files.pythonhosted.org/packages/f8/40/494600424c394a507070b41fc0666ceaa7dccf62c3220a76833eb11de647/regex-2025.8.29-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:d0ffe4a3257a235f9d39b99c6f1bc53c7a4b11f28565726b1aa00a5787950d60", size = 848570, upload-time = "2025-08-29T22:42:54.857Z" },
{ url = "https://files.pythonhosted.org/packages/0d/21/9ac6e07a4c5e8646a90b56b61f7e9dac11ae0747c857f91d3d2bc7c241d9/regex-2025.7.34-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e91eb2c62c39705e17b4d42d4b86c4e86c884c0d15d9c5a47d0835f8387add8e", size = 787850, upload-time = "2025-07-31T00:20:40.478Z" }, { url = "https://files.pythonhosted.org/packages/be/d0/6988feb7c15bb3df7b944a10b3b58fb238c94987c70a991ba87e3685e1cd/regex-2025.8.29-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5421a2d2026e8189500f12375cfd80a9a1914466d446edd28b37eb33c1953b39", size = 787926, upload-time = "2025-08-29T22:42:57.025Z" },
{ url = "https://files.pythonhosted.org/packages/be/6c/d51204e28e7bc54f9a03bb799b04730d7e54ff2718862b8d4e09e7110a6a/regex-2025.7.34-cp314-cp314-win32.whl", hash = "sha256:f978ddfb6216028c8f1d6b0f7ef779949498b64117fc35a939022f67f810bdcb", size = 269730, upload-time = "2025-07-31T00:20:42.253Z" }, { url = "https://files.pythonhosted.org/packages/98/16/d719b131b0577a2a975376b3e673fc7f89b9998d54753f0419d59d33b3a1/regex-2025.8.29-cp314-cp314-win32.whl", hash = "sha256:ceeeaab602978c8eac3b25b8707f21a69c0bcd179d9af72519da93ef3966158f", size = 269805, upload-time = "2025-08-29T22:42:59.241Z" },
{ url = "https://files.pythonhosted.org/packages/74/52/a7e92d02fa1fdef59d113098cb9f02c5d03289a0e9f9e5d4d6acccd10677/regex-2025.7.34-cp314-cp314-win_amd64.whl", hash = "sha256:4b7dc33b9b48fb37ead12ffc7bdb846ac72f99a80373c4da48f64b373a7abeae", size = 278640, upload-time = "2025-07-31T00:20:44.42Z" }, { url = "https://files.pythonhosted.org/packages/a5/b7/50d3bb5df25ae73e7aee186a2f1e4f1ed5e4d54006bdf5abd558c1ce9e7a/regex-2025.8.29-cp314-cp314-win_amd64.whl", hash = "sha256:5ba4f8b0d5b88c33fe4060e6def58001fd8334b03c7ce2126964fa8851ab5d1b", size = 278710, upload-time = "2025-08-29T22:43:00.84Z" },
{ url = "https://files.pythonhosted.org/packages/d1/78/a815529b559b1771080faa90c3ab401730661f99d495ab0071649f139ebd/regex-2025.7.34-cp314-cp314-win_arm64.whl", hash = "sha256:4b8c4d39f451e64809912c82392933d80fe2e4a87eeef8859fcc5380d0173c64", size = 271757, upload-time = "2025-07-31T00:20:46.355Z" }, { url = "https://files.pythonhosted.org/packages/0f/34/c723ebe214c33000b53e0eebdc63ad3697d5611c7fa9b388eef2113a5e82/regex-2025.8.29-cp314-cp314-win_arm64.whl", hash = "sha256:7b4a3dc155984f09a55c64b90923cb136cd0dad21ca0168aba2382d90ea4c546", size = 271832, upload-time = "2025-08-29T22:43:02.777Z" },
] ]
[[package]] [[package]]
@ -508,12 +553,14 @@ name = "ttvdrops"
version = "0.1.0" version = "0.1.0"
source = { virtual = "." } source = { virtual = "." }
dependencies = [ dependencies = [
{ name = "dateparser" },
{ name = "django" }, { name = "django" },
{ name = "django-browser-reload" }, { name = "django-browser-reload" },
{ name = "django-debug-toolbar" }, { name = "django-debug-toolbar" },
{ name = "django-stubs", extra = ["compatible-mypy"] }, { name = "django-stubs", extra = ["compatible-mypy"] },
{ name = "django-watchfiles" }, { name = "django-watchfiles" },
{ name = "djlint" }, { name = "djlint" },
{ name = "json-repair" },
{ name = "orjson" }, { name = "orjson" },
{ name = "platformdirs" }, { name = "platformdirs" },
{ name = "python-dotenv" }, { name = "python-dotenv" },
@ -528,12 +575,14 @@ dev = [
[package.metadata] [package.metadata]
requires-dist = [ requires-dist = [
{ name = "dateparser", specifier = ">=1.2.2" },
{ name = "django", specifier = ">=5.2.4" }, { name = "django", specifier = ">=5.2.4" },
{ name = "django-browser-reload", specifier = ">=1.18.0" }, { name = "django-browser-reload", specifier = ">=1.18.0" },
{ name = "django-debug-toolbar", specifier = ">=5.2.0" }, { name = "django-debug-toolbar", specifier = ">=5.2.0" },
{ name = "django-stubs", extras = ["compatible-mypy"], specifier = ">=5.2.2" }, { name = "django-stubs", extras = ["compatible-mypy"], specifier = ">=5.2.2" },
{ name = "django-watchfiles", specifier = ">=1.1.0" }, { name = "django-watchfiles", specifier = ">=1.1.0" },
{ name = "djlint", specifier = ">=1.36.4" }, { name = "djlint", specifier = ">=1.36.4" },
{ name = "json-repair", specifier = ">=0.50.0" },
{ name = "orjson", specifier = ">=3.11.1" }, { name = "orjson", specifier = ">=3.11.1" },
{ name = "platformdirs", specifier = ">=4.3.8" }, { name = "platformdirs", specifier = ">=4.3.8" },
{ name = "python-dotenv", specifier = ">=1.1.1" }, { name = "python-dotenv", specifier = ">=1.1.1" },
@ -573,6 +622,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" }, { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" },
] ]
[[package]]
name = "tzlocal"
version = "5.3.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "tzdata", marker = "sys_platform == 'win32'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/8b/2e/c14812d3d4d9cd1773c6be938f89e5735a1f11a9f184ac3639b93cef35d5/tzlocal-5.3.1.tar.gz", hash = "sha256:cceffc7edecefea1f595541dbd6e990cb1ea3d19bf01b2809f362a03dd7921fd", size = 30761, upload-time = "2025-03-05T21:17:41.549Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c2/14/e2a54fabd4f08cd7af1c07030603c3356b74da07f7cc056e600436edfa17/tzlocal-5.3.1-py3-none-any.whl", hash = "sha256:eb1a66c3ef5847adf7a834f1be0800581b683b5608e74f86ecbcef8ab91bb85d", size = 18026, upload-time = "2025-03-05T21:17:39.857Z" },
]
[[package]] [[package]]
name = "watchfiles" name = "watchfiles"
version = "1.1.0" version = "1.1.0"