fix(ui): Queue and deezspot callbacks
This commit is contained in:
@@ -3,16 +3,11 @@ import sqlite3
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from .v3_2_0 import MigrationV3_2_0
|
||||
from .v3_2_1 import log_noop_migration_detected
|
||||
from .v3_3_0 import MigrationV3_3_0
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DATA_DIR = Path("./data")
|
||||
HISTORY_DB = DATA_DIR / "history" / "download_history.db"
|
||||
WATCH_DIR = DATA_DIR / "watch"
|
||||
PLAYLISTS_DB = WATCH_DIR / "playlists.db"
|
||||
ARTISTS_DB = WATCH_DIR / "artists.db"
|
||||
|
||||
# Credentials
|
||||
CREDS_DIR = DATA_DIR / "creds"
|
||||
@@ -20,89 +15,6 @@ ACCOUNTS_DB = CREDS_DIR / "accounts.db"
|
||||
BLOBS_DIR = CREDS_DIR / "blobs"
|
||||
SEARCH_JSON = CREDS_DIR / "search.json"
|
||||
|
||||
# Expected children table columns for history (album_/playlist_)
|
||||
CHILDREN_EXPECTED_COLUMNS: dict[str, str] = {
|
||||
"id": "INTEGER PRIMARY KEY AUTOINCREMENT",
|
||||
"title": "TEXT NOT NULL",
|
||||
"artists": "TEXT",
|
||||
"album_title": "TEXT",
|
||||
"duration_ms": "INTEGER",
|
||||
"track_number": "INTEGER",
|
||||
"disc_number": "INTEGER",
|
||||
"explicit": "BOOLEAN",
|
||||
"status": "TEXT NOT NULL",
|
||||
"external_ids": "TEXT",
|
||||
"genres": "TEXT",
|
||||
"isrc": "TEXT",
|
||||
"timestamp": "REAL NOT NULL",
|
||||
"position": "INTEGER",
|
||||
"metadata": "TEXT",
|
||||
}
|
||||
|
||||
# 3.2.0 expected schemas for Watch DBs (kept here to avoid importing modules with side-effects)
|
||||
EXPECTED_WATCHED_PLAYLISTS_COLUMNS: dict[str, str] = {
|
||||
"spotify_id": "TEXT PRIMARY KEY",
|
||||
"name": "TEXT",
|
||||
"owner_id": "TEXT",
|
||||
"owner_name": "TEXT",
|
||||
"total_tracks": "INTEGER",
|
||||
"link": "TEXT",
|
||||
"snapshot_id": "TEXT",
|
||||
"last_checked": "INTEGER",
|
||||
"added_at": "INTEGER",
|
||||
"is_active": "INTEGER DEFAULT 1",
|
||||
}
|
||||
|
||||
EXPECTED_PLAYLIST_TRACKS_COLUMNS: dict[str, str] = {
|
||||
"spotify_track_id": "TEXT PRIMARY KEY",
|
||||
"title": "TEXT",
|
||||
"artist_names": "TEXT",
|
||||
"album_name": "TEXT",
|
||||
"album_artist_names": "TEXT",
|
||||
"track_number": "INTEGER",
|
||||
"album_spotify_id": "TEXT",
|
||||
"duration_ms": "INTEGER",
|
||||
"added_at_playlist": "TEXT",
|
||||
"added_to_db": "INTEGER",
|
||||
"is_present_in_spotify": "INTEGER DEFAULT 1",
|
||||
"last_seen_in_spotify": "INTEGER",
|
||||
"snapshot_id": "TEXT",
|
||||
"final_path": "TEXT",
|
||||
}
|
||||
|
||||
EXPECTED_WATCHED_ARTISTS_COLUMNS: dict[str, str] = {
|
||||
"spotify_id": "TEXT PRIMARY KEY",
|
||||
"name": "TEXT",
|
||||
"link": "TEXT",
|
||||
"total_albums_on_spotify": "INTEGER",
|
||||
"last_checked": "INTEGER",
|
||||
"added_at": "INTEGER",
|
||||
"is_active": "INTEGER DEFAULT 1",
|
||||
"genres": "TEXT",
|
||||
"popularity": "INTEGER",
|
||||
"image_url": "TEXT",
|
||||
}
|
||||
|
||||
EXPECTED_ARTIST_ALBUMS_COLUMNS: dict[str, str] = {
|
||||
"album_spotify_id": "TEXT PRIMARY KEY",
|
||||
"artist_spotify_id": "TEXT",
|
||||
"name": "TEXT",
|
||||
"album_group": "TEXT",
|
||||
"album_type": "TEXT",
|
||||
"release_date": "TEXT",
|
||||
"release_date_precision": "TEXT",
|
||||
"total_tracks": "INTEGER",
|
||||
"link": "TEXT",
|
||||
"image_url": "TEXT",
|
||||
"added_to_db": "INTEGER",
|
||||
"last_seen_on_spotify": "INTEGER",
|
||||
"download_task_id": "TEXT",
|
||||
"download_status": "INTEGER DEFAULT 0",
|
||||
"is_fully_downloaded_managed_by_app": "INTEGER DEFAULT 0",
|
||||
}
|
||||
|
||||
m320 = MigrationV3_2_0()
|
||||
|
||||
|
||||
def _safe_connect(path: Path) -> Optional[sqlite3.Connection]:
|
||||
try:
|
||||
@@ -115,245 +27,6 @@ def _safe_connect(path: Path) -> Optional[sqlite3.Connection]:
|
||||
return None
|
||||
|
||||
|
||||
def _ensure_table_schema(
|
||||
conn: sqlite3.Connection,
|
||||
table_name: str,
|
||||
expected_columns: dict[str, str],
|
||||
table_description: str,
|
||||
) -> None:
|
||||
try:
|
||||
cur = conn.execute(f"PRAGMA table_info({table_name})")
|
||||
existing_info = cur.fetchall()
|
||||
existing_names = {row[1] for row in existing_info}
|
||||
for col_name, col_type in expected_columns.items():
|
||||
if col_name in existing_names:
|
||||
continue
|
||||
col_type_for_add = (
|
||||
col_type.replace("PRIMARY KEY", "")
|
||||
.replace("AUTOINCREMENT", "")
|
||||
.replace("NOT NULL", "")
|
||||
.strip()
|
||||
)
|
||||
try:
|
||||
conn.execute(
|
||||
f"ALTER TABLE {table_name} ADD COLUMN {col_name} {col_type_for_add}"
|
||||
)
|
||||
logger.info(
|
||||
f"Added missing column '{col_name} {col_type_for_add}' to {table_description} table '{table_name}'."
|
||||
)
|
||||
except sqlite3.OperationalError as e:
|
||||
logger.warning(
|
||||
f"Could not add column '{col_name}' to {table_description} table '{table_name}': {e}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error ensuring schema for {table_description} table '{table_name}': {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
|
||||
def _create_or_update_children_table(conn: sqlite3.Connection, table_name: str) -> None:
|
||||
conn.execute(
|
||||
f"""
|
||||
CREATE TABLE IF NOT EXISTS {table_name} (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
title TEXT NOT NULL,
|
||||
artists TEXT,
|
||||
album_title TEXT,
|
||||
duration_ms INTEGER,
|
||||
track_number INTEGER,
|
||||
disc_number INTEGER,
|
||||
explicit BOOLEAN,
|
||||
status TEXT NOT NULL,
|
||||
external_ids TEXT,
|
||||
genres TEXT,
|
||||
isrc TEXT,
|
||||
timestamp REAL NOT NULL,
|
||||
position INTEGER,
|
||||
metadata TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
_ensure_table_schema(
|
||||
conn, table_name, CHILDREN_EXPECTED_COLUMNS, "children history"
|
||||
)
|
||||
|
||||
|
||||
# --- Helper to validate instance is at least 3.1.2 on history DB ---
|
||||
|
||||
|
||||
def _history_children_tables(conn: sqlite3.Connection) -> list[str]:
|
||||
tables: set[str] = set()
|
||||
try:
|
||||
cur = conn.execute(
|
||||
"SELECT name FROM sqlite_master WHERE type='table' AND (name LIKE 'album_%' OR name LIKE 'playlist_%') AND name != 'download_history'"
|
||||
)
|
||||
for row in cur.fetchall():
|
||||
if row and row[0]:
|
||||
tables.add(row[0])
|
||||
except sqlite3.Error as e:
|
||||
logger.warning(f"Failed to scan sqlite_master for children tables: {e}")
|
||||
|
||||
try:
|
||||
cur = conn.execute(
|
||||
"SELECT DISTINCT children_table FROM download_history WHERE children_table IS NOT NULL AND TRIM(children_table) != ''"
|
||||
)
|
||||
for row in cur.fetchall():
|
||||
t = row[0]
|
||||
if t:
|
||||
tables.add(t)
|
||||
except sqlite3.Error as e:
|
||||
logger.warning(f"Failed to scan download_history for children tables: {e}")
|
||||
|
||||
return sorted(tables)
|
||||
|
||||
|
||||
def _is_history_at_least_3_2_0(conn: sqlite3.Connection) -> bool:
|
||||
required_cols = {"service", "quality_format", "quality_bitrate"}
|
||||
tables = _history_children_tables(conn)
|
||||
if not tables:
|
||||
# Nothing to migrate implies OK
|
||||
return True
|
||||
for t in tables:
|
||||
try:
|
||||
cur = conn.execute(f"PRAGMA table_info({t})")
|
||||
cols = {row[1] for row in cur.fetchall()}
|
||||
if not required_cols.issubset(cols):
|
||||
return False
|
||||
except sqlite3.OperationalError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
# --- 3.2.0 verification helpers for Watch DBs ---
|
||||
|
||||
|
||||
def _update_watch_playlists_db(conn: sqlite3.Connection) -> None:
|
||||
try:
|
||||
# Ensure core watched_playlists table exists and has expected schema
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS watched_playlists (
|
||||
spotify_id TEXT PRIMARY KEY,
|
||||
name TEXT,
|
||||
owner_id TEXT,
|
||||
owner_name TEXT,
|
||||
total_tracks INTEGER,
|
||||
link TEXT,
|
||||
snapshot_id TEXT,
|
||||
last_checked INTEGER,
|
||||
added_at INTEGER,
|
||||
is_active INTEGER DEFAULT 1
|
||||
)
|
||||
"""
|
||||
)
|
||||
_ensure_table_schema(
|
||||
conn,
|
||||
"watched_playlists",
|
||||
EXPECTED_WATCHED_PLAYLISTS_COLUMNS,
|
||||
"watched playlists",
|
||||
)
|
||||
|
||||
# Upgrade all dynamic playlist_ tables
|
||||
cur = conn.execute(
|
||||
"SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'playlist_%'"
|
||||
)
|
||||
for row in cur.fetchall():
|
||||
table_name = row[0]
|
||||
conn.execute(
|
||||
f"""
|
||||
CREATE TABLE IF NOT EXISTS {table_name} (
|
||||
spotify_track_id TEXT PRIMARY KEY,
|
||||
title TEXT,
|
||||
artist_names TEXT,
|
||||
album_name TEXT,
|
||||
album_artist_names TEXT,
|
||||
track_number INTEGER,
|
||||
album_spotify_id TEXT,
|
||||
duration_ms INTEGER,
|
||||
added_at_playlist TEXT,
|
||||
added_to_db INTEGER,
|
||||
is_present_in_spotify INTEGER DEFAULT 1,
|
||||
last_seen_in_spotify INTEGER,
|
||||
snapshot_id TEXT,
|
||||
final_path TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
_ensure_table_schema(
|
||||
conn,
|
||||
table_name,
|
||||
EXPECTED_PLAYLIST_TRACKS_COLUMNS,
|
||||
f"playlist tracks ({table_name})",
|
||||
)
|
||||
except Exception:
|
||||
logger.error(
|
||||
"Failed to upgrade watch playlists DB to 3.2.0 base schema", exc_info=True
|
||||
)
|
||||
|
||||
|
||||
def _update_watch_artists_db(conn: sqlite3.Connection) -> None:
|
||||
try:
|
||||
# Ensure core watched_artists table exists and has expected schema
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS watched_artists (
|
||||
spotify_id TEXT PRIMARY KEY,
|
||||
name TEXT,
|
||||
link TEXT,
|
||||
total_albums_on_spotify INTEGER,
|
||||
last_checked INTEGER,
|
||||
added_at INTEGER,
|
||||
is_active INTEGER DEFAULT 1,
|
||||
genres TEXT,
|
||||
popularity INTEGER,
|
||||
image_url TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
_ensure_table_schema(
|
||||
conn, "watched_artists", EXPECTED_WATCHED_ARTISTS_COLUMNS, "watched artists"
|
||||
)
|
||||
|
||||
# Upgrade all dynamic artist_ tables
|
||||
cur = conn.execute(
|
||||
"SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'artist_%'"
|
||||
)
|
||||
for row in cur.fetchall():
|
||||
table_name = row[0]
|
||||
conn.execute(
|
||||
f"""
|
||||
CREATE TABLE IF NOT EXISTS {table_name} (
|
||||
album_spotify_id TEXT PRIMARY KEY,
|
||||
artist_spotify_id TEXT,
|
||||
name TEXT,
|
||||
album_group TEXT,
|
||||
album_type TEXT,
|
||||
release_date TEXT,
|
||||
release_date_precision TEXT,
|
||||
total_tracks INTEGER,
|
||||
link TEXT,
|
||||
image_url TEXT,
|
||||
added_to_db INTEGER,
|
||||
last_seen_on_spotify INTEGER,
|
||||
download_task_id TEXT,
|
||||
download_status INTEGER DEFAULT 0,
|
||||
is_fully_downloaded_managed_by_app INTEGER DEFAULT 0
|
||||
)
|
||||
"""
|
||||
)
|
||||
_ensure_table_schema(
|
||||
conn,
|
||||
table_name,
|
||||
EXPECTED_ARTIST_ALBUMS_COLUMNS,
|
||||
f"artist albums ({table_name})",
|
||||
)
|
||||
except Exception:
|
||||
logger.error(
|
||||
"Failed to upgrade watch artists DB to 3.2.0 base schema", exc_info=True
|
||||
)
|
||||
|
||||
|
||||
def _ensure_creds_filesystem() -> None:
|
||||
try:
|
||||
BLOBS_DIR.mkdir(parents=True, exist_ok=True)
|
||||
@@ -374,35 +47,10 @@ def run_migrations_if_needed():
|
||||
return
|
||||
|
||||
try:
|
||||
# Require instance to be at least 3.2.0 on history DB; otherwise abort
|
||||
with _safe_connect(HISTORY_DB) as history_conn:
|
||||
if history_conn and not _is_history_at_least_3_2_0(history_conn):
|
||||
logger.error(
|
||||
"Instance is not at schema version 3.2.0. Please upgrade to 3.2.0 before applying 3.3.0."
|
||||
)
|
||||
raise RuntimeError(
|
||||
"Instance is not at schema version 3.2.0. Please upgrade to 3.2.0 before applying 3.3.0."
|
||||
)
|
||||
# Validate configuration version strictly at 3.3.0
|
||||
MigrationV3_3_0.assert_config_version_is_3_3_0()
|
||||
|
||||
# Watch playlists DB
|
||||
with _safe_connect(PLAYLISTS_DB) as conn:
|
||||
if conn:
|
||||
_update_watch_playlists_db(conn)
|
||||
# Apply 3.2.0 additions (batch progress columns)
|
||||
if not m320.check_watch_playlists(conn):
|
||||
m320.update_watch_playlists(conn)
|
||||
conn.commit()
|
||||
|
||||
# Watch artists DB (if exists)
|
||||
if ARTISTS_DB.exists():
|
||||
with _safe_connect(ARTISTS_DB) as conn:
|
||||
if conn:
|
||||
_update_watch_artists_db(conn)
|
||||
if not m320.check_watch_artists(conn):
|
||||
m320.update_watch_artists(conn)
|
||||
conn.commit()
|
||||
|
||||
# Accounts DB (no changes for this migration path)
|
||||
# No schema changes in 3.3.0 path; just ensure Accounts DB can be opened
|
||||
with _safe_connect(ACCOUNTS_DB) as conn:
|
||||
if conn:
|
||||
conn.commit()
|
||||
@@ -412,5 +60,4 @@ def run_migrations_if_needed():
|
||||
raise
|
||||
else:
|
||||
_ensure_creds_filesystem()
|
||||
log_noop_migration_detected()
|
||||
logger.info("Database migrations check completed (3.2.0 -> 3.3.0 path)")
|
||||
logger.info("Migration validation completed (3.3.0 gate)")
|
||||
|
||||
@@ -1,100 +0,0 @@
|
||||
import sqlite3
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MigrationV3_2_0:
|
||||
"""
|
||||
Migration for version 3.2.0 (upgrade path 3.2.0 -> 3.3.0).
|
||||
- Adds per-item batch progress columns to Watch DBs to support page-by-interval processing.
|
||||
- Enforces prerequisite: previous instance version must be 3.1.2 (validated by runner).
|
||||
"""
|
||||
|
||||
# New columns to add to watched tables
|
||||
PLAYLISTS_ADDED_COLUMNS: dict[str, str] = {
|
||||
"batch_next_offset": "INTEGER DEFAULT 0",
|
||||
"batch_processing_snapshot_id": "TEXT",
|
||||
}
|
||||
|
||||
ARTISTS_ADDED_COLUMNS: dict[str, str] = {
|
||||
"batch_next_offset": "INTEGER DEFAULT 0",
|
||||
}
|
||||
|
||||
# --- No-op for history/accounts in 3.3.0 ---
|
||||
|
||||
def check_history(self, conn: sqlite3.Connection) -> bool:
|
||||
return True
|
||||
|
||||
def update_history(self, conn: sqlite3.Connection) -> None:
|
||||
pass
|
||||
|
||||
def check_accounts(self, conn: sqlite3.Connection) -> bool:
|
||||
return True
|
||||
|
||||
def update_accounts(self, conn: sqlite3.Connection) -> None:
|
||||
pass
|
||||
|
||||
# --- Watch: playlists ---
|
||||
|
||||
def check_watch_playlists(self, conn: sqlite3.Connection) -> bool:
|
||||
try:
|
||||
cur = conn.execute("PRAGMA table_info(watched_playlists)")
|
||||
cols = {row[1] for row in cur.fetchall()}
|
||||
return set(self.PLAYLISTS_ADDED_COLUMNS.keys()).issubset(cols)
|
||||
except sqlite3.OperationalError:
|
||||
# Table missing means not ready
|
||||
return False
|
||||
|
||||
def update_watch_playlists(self, conn: sqlite3.Connection) -> None:
|
||||
# Add new columns if missing
|
||||
try:
|
||||
cur = conn.execute("PRAGMA table_info(watched_playlists)")
|
||||
existing = {row[1] for row in cur.fetchall()}
|
||||
for col_name, col_type in self.PLAYLISTS_ADDED_COLUMNS.items():
|
||||
if col_name in existing:
|
||||
continue
|
||||
try:
|
||||
conn.execute(
|
||||
f"ALTER TABLE watched_playlists ADD COLUMN {col_name} {col_type}"
|
||||
)
|
||||
logger.info(
|
||||
f"Added column '{col_name} {col_type}' to watched_playlists for 3.3.0 batch progress."
|
||||
)
|
||||
except sqlite3.OperationalError as e:
|
||||
logger.warning(
|
||||
f"Could not add column '{col_name}' to watched_playlists: {e}"
|
||||
)
|
||||
except Exception:
|
||||
logger.error("Failed to update watched_playlists for 3.3.0", exc_info=True)
|
||||
|
||||
# --- Watch: artists ---
|
||||
|
||||
def check_watch_artists(self, conn: sqlite3.Connection) -> bool:
|
||||
try:
|
||||
cur = conn.execute("PRAGMA table_info(watched_artists)")
|
||||
cols = {row[1] for row in cur.fetchall()}
|
||||
return set(self.ARTISTS_ADDED_COLUMNS.keys()).issubset(cols)
|
||||
except sqlite3.OperationalError:
|
||||
return False
|
||||
|
||||
def update_watch_artists(self, conn: sqlite3.Connection) -> None:
|
||||
try:
|
||||
cur = conn.execute("PRAGMA table_info(watched_artists)")
|
||||
existing = {row[1] for row in cur.fetchall()}
|
||||
for col_name, col_type in self.ARTISTS_ADDED_COLUMNS.items():
|
||||
if col_name in existing:
|
||||
continue
|
||||
try:
|
||||
conn.execute(
|
||||
f"ALTER TABLE watched_artists ADD COLUMN {col_name} {col_type}"
|
||||
)
|
||||
logger.info(
|
||||
f"Added column '{col_name} {col_type}' to watched_artists for 3.3.0 batch progress."
|
||||
)
|
||||
except sqlite3.OperationalError as e:
|
||||
logger.warning(
|
||||
f"Could not add column '{col_name}' to watched_artists: {e}"
|
||||
)
|
||||
except Exception:
|
||||
logger.error("Failed to update watched_artists for 3.3.0", exc_info=True)
|
||||
@@ -1,41 +0,0 @@
|
||||
import logging
|
||||
import sqlite3
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MigrationV3_2_1:
|
||||
"""
|
||||
No-op migration for version 3.2.1 (upgrade path 3.2.1 -> 3.3.0).
|
||||
No database schema changes are required.
|
||||
"""
|
||||
|
||||
def check_history(self, conn: sqlite3.Connection) -> bool:
|
||||
return True
|
||||
|
||||
def update_history(self, conn: sqlite3.Connection) -> None:
|
||||
pass
|
||||
|
||||
def check_accounts(self, conn: sqlite3.Connection) -> bool:
|
||||
return True
|
||||
|
||||
def update_accounts(self, conn: sqlite3.Connection) -> None:
|
||||
pass
|
||||
|
||||
def check_watch_playlists(self, conn: sqlite3.Connection) -> bool:
|
||||
return True
|
||||
|
||||
def update_watch_playlists(self, conn: sqlite3.Connection) -> None:
|
||||
pass
|
||||
|
||||
def check_watch_artists(self, conn: sqlite3.Connection) -> bool:
|
||||
return True
|
||||
|
||||
def update_watch_artists(self, conn: sqlite3.Connection) -> None:
|
||||
pass
|
||||
|
||||
|
||||
def log_noop_migration_detected() -> None:
|
||||
logger.info(
|
||||
"No migration performed: detected schema for 3.2.1; no changes needed for 3.2.1 -> 3.3.0."
|
||||
)
|
||||
69
routes/migrations/v3_3_0.py
Normal file
69
routes/migrations/v3_3_0.py
Normal file
@@ -0,0 +1,69 @@
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
CONFIG_PATH = Path("./data/config/main.json")
|
||||
REQUIRED_VERSION = "3.3.0"
|
||||
TARGET_VERSION = "3.3.1"
|
||||
|
||||
|
||||
def _load_config(config_path: Path) -> Optional[dict]:
|
||||
try:
|
||||
if not config_path.exists():
|
||||
logger.error(f"Configuration file not found at {config_path}")
|
||||
return None
|
||||
content = config_path.read_text(encoding="utf-8")
|
||||
return json.loads(content)
|
||||
except Exception:
|
||||
logger.error("Failed to read configuration file for migration", exc_info=True)
|
||||
return None
|
||||
|
||||
|
||||
def _save_config(config_path: Path, cfg: dict) -> None:
|
||||
config_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
config_path.write_text(json.dumps(cfg, indent=4) + "\n", encoding="utf-8")
|
||||
|
||||
|
||||
class MigrationV3_3_0:
|
||||
"""
|
||||
3.3.0 migration gate. This migration verifies the configuration indicates
|
||||
version 3.3.0, then bumps it to 3.3.1.
|
||||
|
||||
If the `version` key is missing or not equal to 3.3.0, execution aborts and
|
||||
prompts the user to update their instance to 3.3.0.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def assert_config_version_is_3_3_0() -> None:
|
||||
cfg = _load_config(CONFIG_PATH)
|
||||
if not cfg or "version" not in cfg:
|
||||
raise RuntimeError(
|
||||
"Missing 'version' in data/config/main.json. Please update your configuration to 3.3.0."
|
||||
)
|
||||
version = str(cfg.get("version", "")).strip()
|
||||
# Case 1: exactly 3.3.0 -> bump to 3.3.1
|
||||
if version == REQUIRED_VERSION:
|
||||
cfg["version"] = TARGET_VERSION
|
||||
try:
|
||||
_save_config(CONFIG_PATH, cfg)
|
||||
logger.info(
|
||||
f"Configuration version bumped from {REQUIRED_VERSION} to {TARGET_VERSION}."
|
||||
)
|
||||
except Exception:
|
||||
logger.error(
|
||||
"Failed to bump configuration version to 3.3.1", exc_info=True
|
||||
)
|
||||
raise
|
||||
return
|
||||
# Case 2: already 3.3.1 -> OK
|
||||
if version == TARGET_VERSION:
|
||||
logger.info("Configuration version 3.3.1 detected. Proceeding.")
|
||||
return
|
||||
# Case 3: anything else -> abort and instruct to update to 3.3.0 first
|
||||
raise RuntimeError(
|
||||
f"Unsupported configuration version '{version}'. Please update to {REQUIRED_VERSION}."
|
||||
)
|
||||
@@ -4,7 +4,7 @@ import logging
|
||||
import time
|
||||
import json
|
||||
import asyncio
|
||||
from typing import Set
|
||||
from typing import Set, Optional
|
||||
|
||||
import redis
|
||||
import threading
|
||||
@@ -42,12 +42,12 @@ class SSEBroadcaster:
|
||||
"""Add a new SSE client"""
|
||||
self.clients.add(queue)
|
||||
logger.debug(f"SSE: Client connected (total: {len(self.clients)})")
|
||||
|
||||
|
||||
async def remove_client(self, queue: asyncio.Queue):
|
||||
"""Remove an SSE client"""
|
||||
self.clients.discard(queue)
|
||||
logger.debug(f"SSE: Client disconnected (total: {len(self.clients)})")
|
||||
|
||||
|
||||
async def broadcast_event(self, event_data: dict):
|
||||
"""Broadcast an event to all connected clients"""
|
||||
logger.debug(
|
||||
@@ -118,26 +118,22 @@ def start_sse_redis_subscriber():
|
||||
|
||||
# Handle different event types
|
||||
if event_type == "progress_update":
|
||||
# Transform callback data into task format expected by frontend
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
try:
|
||||
broadcast_data = loop.run_until_complete(
|
||||
transform_callback_to_task_format(
|
||||
task_id, event_data
|
||||
)
|
||||
)
|
||||
if broadcast_data:
|
||||
# Transform callback data into standardized update format expected by frontend
|
||||
standardized = standardize_incoming_event(event_data)
|
||||
if standardized:
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
try:
|
||||
loop.run_until_complete(
|
||||
sse_broadcaster.broadcast_event(broadcast_data)
|
||||
sse_broadcaster.broadcast_event(standardized)
|
||||
)
|
||||
logger.debug(
|
||||
f"SSE Redis Subscriber: Broadcasted callback to {len(sse_broadcaster.clients)} clients"
|
||||
f"SSE Redis Subscriber: Broadcasted standardized progress update to {len(sse_broadcaster.clients)} clients"
|
||||
)
|
||||
finally:
|
||||
loop.close()
|
||||
finally:
|
||||
loop.close()
|
||||
elif event_type == "summary_update":
|
||||
# Task summary update - use existing trigger_sse_update logic
|
||||
# Task summary update - use standardized trigger
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
try:
|
||||
@@ -152,18 +148,20 @@ def start_sse_redis_subscriber():
|
||||
finally:
|
||||
loop.close()
|
||||
else:
|
||||
# Unknown event type - broadcast as-is
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
try:
|
||||
loop.run_until_complete(
|
||||
sse_broadcaster.broadcast_event(event_data)
|
||||
)
|
||||
logger.debug(
|
||||
f"SSE Redis Subscriber: Broadcasted {event_type} to {len(sse_broadcaster.clients)} clients"
|
||||
)
|
||||
finally:
|
||||
loop.close()
|
||||
# Unknown event type - attempt to standardize and broadcast
|
||||
standardized = standardize_incoming_event(event_data)
|
||||
if standardized:
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
try:
|
||||
loop.run_until_complete(
|
||||
sse_broadcaster.broadcast_event(standardized)
|
||||
)
|
||||
logger.debug(
|
||||
f"SSE Redis Subscriber: Broadcasted standardized {event_type} to {len(sse_broadcaster.clients)} clients"
|
||||
)
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
@@ -180,6 +178,85 @@ def start_sse_redis_subscriber():
|
||||
logger.debug("SSE Redis Subscriber: Background thread started")
|
||||
|
||||
|
||||
def build_task_object_from_callback(
|
||||
task_id: str, callback_data: dict
|
||||
) -> Optional[dict]:
|
||||
"""Build a standardized task object from callback payload and task info."""
|
||||
try:
|
||||
task_info = get_task_info(task_id)
|
||||
if not task_info:
|
||||
return None
|
||||
return {
|
||||
"task_id": task_id,
|
||||
"original_url": f"http://localhost:7171/api/{task_info.get('download_type', 'track')}/download/{task_info.get('url', '').split('/')[-1] if task_info.get('url') else ''}",
|
||||
"last_line": callback_data,
|
||||
"timestamp": time.time(),
|
||||
"download_type": task_info.get("download_type", "track"),
|
||||
"type": task_info.get("type", task_info.get("download_type", "track")),
|
||||
"name": task_info.get("name", "Unknown"),
|
||||
"artist": task_info.get("artist", ""),
|
||||
"created_at": task_info.get("created_at"),
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error building task object from callback for {task_id}: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def standardize_incoming_event(event_data: dict) -> Optional[dict]:
|
||||
"""
|
||||
Convert various incoming event shapes into a standardized SSE payload:
|
||||
{
|
||||
'change_type': 'update' | 'heartbeat',
|
||||
'tasks': [...],
|
||||
'current_timestamp': float,
|
||||
'trigger_reason': str (optional)
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Heartbeat passthrough (ensure tasks array exists)
|
||||
if event_data.get("change_type") == "heartbeat":
|
||||
return {
|
||||
"change_type": "heartbeat",
|
||||
"tasks": [],
|
||||
"current_timestamp": time.time(),
|
||||
}
|
||||
|
||||
# If already has tasks, just coerce change_type
|
||||
if isinstance(event_data.get("tasks"), list):
|
||||
return {
|
||||
"change_type": event_data.get("change_type", "update"),
|
||||
"tasks": event_data["tasks"],
|
||||
"current_timestamp": time.time(),
|
||||
"trigger_reason": event_data.get("trigger_reason"),
|
||||
}
|
||||
|
||||
# If it's a callback-shaped event
|
||||
callback_data = event_data.get("callback_data")
|
||||
task_id = event_data.get("task_id")
|
||||
if callback_data and task_id:
|
||||
task_obj = build_task_object_from_callback(task_id, callback_data)
|
||||
if task_obj:
|
||||
return {
|
||||
"change_type": "update",
|
||||
"tasks": [task_obj],
|
||||
"current_timestamp": time.time(),
|
||||
"trigger_reason": event_data.get("event_type", "callback_update"),
|
||||
}
|
||||
|
||||
# Fallback to empty update
|
||||
return {
|
||||
"change_type": "update",
|
||||
"tasks": [],
|
||||
"current_timestamp": time.time(),
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to standardize incoming event: {e}", exc_info=True)
|
||||
return None
|
||||
|
||||
|
||||
async def transform_callback_to_task_format(task_id: str, event_data: dict) -> dict:
|
||||
"""Transform callback event data into the task format expected by frontend"""
|
||||
try:
|
||||
@@ -210,7 +287,7 @@ async def transform_callback_to_task_format(task_id: str, event_data: dict) -> d
|
||||
|
||||
# Build minimal event data - global counts will be added at broadcast time
|
||||
return {
|
||||
"change_type": "update", # Use "update" so it gets processed by existing frontend logic
|
||||
"change_type": "update",
|
||||
"tasks": [task_object], # Frontend expects tasks array
|
||||
"current_timestamp": time.time(),
|
||||
"updated_count": 1,
|
||||
@@ -253,12 +330,12 @@ async def trigger_sse_update(task_id: str, reason: str = "task_update"):
|
||||
task_info, last_status, task_id, current_time, dummy_request
|
||||
)
|
||||
|
||||
# Create minimal event data - global counts will be added at broadcast time
|
||||
# Create standardized event data - global counts will be added at broadcast time
|
||||
event_data = {
|
||||
"tasks": [task_response],
|
||||
"current_timestamp": current_time,
|
||||
"since_timestamp": current_time,
|
||||
"change_type": "realtime",
|
||||
"change_type": "update",
|
||||
"trigger_reason": reason,
|
||||
}
|
||||
|
||||
@@ -419,6 +496,14 @@ def add_global_task_counts_to_event(event_data):
|
||||
event_data["active_tasks"] = global_task_counts["active"]
|
||||
event_data["all_tasks_count"] = sum(global_task_counts.values())
|
||||
|
||||
# Ensure tasks array is present for schema consistency
|
||||
if "tasks" not in event_data:
|
||||
event_data["tasks"] = []
|
||||
|
||||
# Ensure change_type is present
|
||||
if "change_type" not in event_data:
|
||||
event_data["change_type"] = "update"
|
||||
|
||||
return event_data
|
||||
|
||||
except Exception as e:
|
||||
@@ -495,7 +580,11 @@ def _build_task_response(
|
||||
try:
|
||||
item_id = item_url.split("/")[-1]
|
||||
if item_id:
|
||||
base_url = str(request.base_url).rstrip("/") if request else "http://localhost:7171"
|
||||
base_url = (
|
||||
str(request.base_url).rstrip("/")
|
||||
if request
|
||||
else "http://localhost:7171"
|
||||
)
|
||||
dynamic_original_url = (
|
||||
f"{base_url}/api/{download_type}/download/{item_id}"
|
||||
)
|
||||
@@ -573,7 +662,9 @@ def _build_task_response(
|
||||
return task_response
|
||||
|
||||
|
||||
async def get_paginated_tasks(page=1, limit=20, active_only=False, request: Optional[Request] = None):
|
||||
async def get_paginated_tasks(
|
||||
page=1, limit=20, active_only=False, request: Optional[Request] = None
|
||||
):
|
||||
"""
|
||||
Get paginated list of tasks.
|
||||
"""
|
||||
@@ -1066,47 +1157,18 @@ async def stream_task_updates(
|
||||
|
||||
try:
|
||||
# Register this client with the broadcaster
|
||||
logger.debug(f"SSE Stream: New client connecting...")
|
||||
logger.debug("SSE Stream: New client connecting...")
|
||||
await sse_broadcaster.add_client(client_queue)
|
||||
logger.debug(f"SSE Stream: Client registered successfully, total clients: {len(sse_broadcaster.clients)}")
|
||||
|
||||
# Send initial data immediately upon connection
|
||||
logger.debug(
|
||||
f"SSE Stream: Client registered successfully, total clients: {len(sse_broadcaster.clients)}"
|
||||
)
|
||||
|
||||
# Send initial data immediately upon connection (standardized 'update')
|
||||
initial_data = await generate_task_update_event(
|
||||
time.time(), active_only, request
|
||||
)
|
||||
yield initial_data
|
||||
|
||||
# Also send any active tasks as callback-style events to newly connected clients
|
||||
all_tasks = get_all_tasks()
|
||||
for task_summary in all_tasks:
|
||||
task_id = task_summary.get("task_id")
|
||||
if not task_id:
|
||||
continue
|
||||
|
||||
task_info = get_task_info(task_id)
|
||||
if not task_info:
|
||||
continue
|
||||
|
||||
last_status = get_last_task_status(task_id)
|
||||
task_status = get_task_status_from_last_status(last_status)
|
||||
|
||||
# Send recent callback data for active or recently completed tasks
|
||||
if is_task_active(task_status) or (
|
||||
last_status and last_status.get("timestamp", 0) > time.time() - 30
|
||||
):
|
||||
if last_status and "raw_callback" in last_status:
|
||||
callback_event = {
|
||||
"task_id": task_id,
|
||||
"callback_data": last_status["raw_callback"],
|
||||
"timestamp": last_status.get("timestamp", time.time()),
|
||||
"change_type": "callback",
|
||||
"event_type": "progress_update",
|
||||
"replay": True, # Mark as replay for client
|
||||
}
|
||||
event_json = json.dumps(callback_event)
|
||||
yield f"data: {event_json}\n\n"
|
||||
logger.debug(f"SSE Stream: Sent replay callback for task {task_id}")
|
||||
|
||||
# Send periodic heartbeats and listen for real-time events
|
||||
last_heartbeat = time.time()
|
||||
heartbeat_interval = 30.0
|
||||
@@ -1173,6 +1235,7 @@ async def stream_task_updates(
|
||||
+ task_counts["retrying"],
|
||||
"task_counts": task_counts,
|
||||
"change_type": "heartbeat",
|
||||
"tasks": [],
|
||||
}
|
||||
|
||||
event_json = json.dumps(heartbeat_data)
|
||||
@@ -1187,6 +1250,7 @@ async def stream_task_updates(
|
||||
"error": "Internal server error",
|
||||
"timestamp": time.time(),
|
||||
"change_type": "error",
|
||||
"tasks": [],
|
||||
}
|
||||
)
|
||||
yield f"data: {error_data}\n\n"
|
||||
@@ -1289,6 +1353,7 @@ async def generate_task_update_event(
|
||||
"current_timestamp": current_time,
|
||||
"updated_count": len(updated_tasks),
|
||||
"since_timestamp": since_timestamp,
|
||||
"change_type": "update",
|
||||
"initial": True, # Mark as initial load
|
||||
}
|
||||
|
||||
@@ -1301,7 +1366,12 @@ async def generate_task_update_event(
|
||||
except Exception as e:
|
||||
logger.error(f"Error generating initial SSE event: {e}", exc_info=True)
|
||||
error_data = json.dumps(
|
||||
{"error": "Failed to load initial data", "timestamp": time.time()}
|
||||
{
|
||||
"error": "Failed to load initial data",
|
||||
"timestamp": time.time(),
|
||||
"tasks": [],
|
||||
"change_type": "error",
|
||||
}
|
||||
)
|
||||
return f"data: {error_data}\n\n"
|
||||
|
||||
|
||||
@@ -101,7 +101,7 @@ def download_album(
|
||||
)
|
||||
dl.download_albumspo(
|
||||
link_album=url, # Spotify URL
|
||||
output_dir="/app/downloads",
|
||||
output_dir="./downloads",
|
||||
quality_download=quality, # Deezer quality
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
@@ -159,7 +159,7 @@ def download_album(
|
||||
)
|
||||
spo.download_album(
|
||||
link_album=url, # Spotify URL
|
||||
output_dir="/app/downloads",
|
||||
output_dir="./downloads",
|
||||
quality_download=fall_quality, # Spotify quality
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
@@ -216,7 +216,7 @@ def download_album(
|
||||
)
|
||||
spo.download_album(
|
||||
link_album=url,
|
||||
output_dir="/app/downloads",
|
||||
output_dir="./downloads",
|
||||
quality_download=quality,
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
@@ -260,7 +260,7 @@ def download_album(
|
||||
)
|
||||
dl.download_albumdee( # Deezer URL, download via Deezer
|
||||
link_album=url,
|
||||
output_dir="/app/downloads",
|
||||
output_dir="./downloads",
|
||||
quality_download=quality,
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
|
||||
@@ -28,7 +28,7 @@ CONFIG_FILE_PATH = Path("./data/config/main.json")
|
||||
|
||||
DEFAULT_MAIN_CONFIG = {
|
||||
"service": "spotify",
|
||||
"version": "3.3.0",
|
||||
"version": "3.3.1",
|
||||
"spotify": "",
|
||||
"deezer": "",
|
||||
"fallback": False,
|
||||
|
||||
@@ -98,7 +98,7 @@ def download_playlist(
|
||||
)
|
||||
dl.download_playlistspo(
|
||||
link_playlist=url, # Spotify URL
|
||||
output_dir="/app/downloads",
|
||||
output_dir="./downloads",
|
||||
quality_download=quality, # Deezer quality
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
@@ -161,7 +161,7 @@ def download_playlist(
|
||||
)
|
||||
spo.download_playlist(
|
||||
link_playlist=url, # Spotify URL
|
||||
output_dir="/app/downloads",
|
||||
output_dir="./downloads",
|
||||
quality_download=fall_quality, # Spotify quality
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
@@ -224,7 +224,7 @@ def download_playlist(
|
||||
)
|
||||
spo.download_playlist(
|
||||
link_playlist=url,
|
||||
output_dir="/app/downloads",
|
||||
output_dir="./downloads",
|
||||
quality_download=quality,
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
@@ -268,7 +268,7 @@ def download_playlist(
|
||||
)
|
||||
dl.download_playlistdee( # Deezer URL, download via Deezer
|
||||
link_playlist=url,
|
||||
output_dir="/app/downloads",
|
||||
output_dir="./downloads",
|
||||
quality_download=quality,
|
||||
recursive_quality=recursive_quality, # Usually False for playlists to get individual track qualities
|
||||
recursive_download=False,
|
||||
|
||||
@@ -94,7 +94,7 @@ def download_track(
|
||||
# download_trackspo means: Spotify URL, download via Deezer
|
||||
dl.download_trackspo(
|
||||
link_track=url, # Spotify URL
|
||||
output_dir="/app/downloads",
|
||||
output_dir="./downloads",
|
||||
quality_download=quality, # Deezer quality
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
@@ -153,7 +153,7 @@ def download_track(
|
||||
)
|
||||
spo.download_track(
|
||||
link_track=url, # Spotify URL
|
||||
output_dir="/app/downloads",
|
||||
output_dir="./downloads",
|
||||
quality_download=fall_quality, # Spotify quality
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
@@ -211,7 +211,7 @@ def download_track(
|
||||
)
|
||||
spo.download_track(
|
||||
link_track=url,
|
||||
output_dir="/app/downloads",
|
||||
output_dir="./downloads",
|
||||
quality_download=quality,
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
@@ -254,7 +254,7 @@ def download_track(
|
||||
)
|
||||
dl.download_trackdee( # Deezer URL, download via Deezer
|
||||
link_track=url,
|
||||
output_dir="/app/downloads",
|
||||
output_dir="./downloads",
|
||||
quality_download=quality,
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
|
||||
@@ -1098,7 +1098,7 @@ def update_playlist_m3u_file(playlist_spotify_id: str):
|
||||
# Get configuration settings
|
||||
|
||||
output_dir = (
|
||||
"/app/downloads" # This matches the output_dir used in download functions
|
||||
"./downloads" # This matches the output_dir used in download functions
|
||||
)
|
||||
|
||||
# Get all tracks for the playlist
|
||||
@@ -1125,14 +1125,14 @@ def update_playlist_m3u_file(playlist_spotify_id: str):
|
||||
skipped_missing_final_path = 0
|
||||
|
||||
for track in tracks:
|
||||
# Use final_path from deezspot summary and convert from /app/downloads to ../ relative path
|
||||
# Use final_path from deezspot summary and convert from ./downloads to ../ relative path
|
||||
final_path = track.get("final_path")
|
||||
if not final_path:
|
||||
skipped_missing_final_path += 1
|
||||
continue
|
||||
normalized = str(final_path).replace("\\", "/")
|
||||
if normalized.startswith("/app/downloads/"):
|
||||
relative_path = normalized.replace("/app/downloads/", "../", 1)
|
||||
if normalized.startswith("./downloads/"):
|
||||
relative_path = normalized.replace("./downloads/", "../", 1)
|
||||
elif "/downloads/" in normalized.lower():
|
||||
idx = normalized.lower().rfind("/downloads/")
|
||||
relative_path = "../" + normalized[idx + len("/downloads/") :]
|
||||
|
||||
Reference in New Issue
Block a user