feat: Change watchlist behaviour. It now updates progressively based on maxItemsPerRun and runs a batch on intervals determined by watchPollInterval
This commit is contained in:
20
app.py
20
app.py
@@ -25,6 +25,7 @@ except Exception as e:
|
|||||||
logging.getLogger(__name__).error(
|
logging.getLogger(__name__).error(
|
||||||
f"Database migration step failed early in startup: {e}", exc_info=True
|
f"Database migration step failed early in startup: {e}", exc_info=True
|
||||||
)
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
# Import route routers (to be created)
|
# Import route routers (to be created)
|
||||||
from routes.auth.credentials import router as credentials_router
|
from routes.auth.credentials import router as credentials_router
|
||||||
@@ -47,6 +48,9 @@ from routes.utils.celery_config import REDIS_URL
|
|||||||
from routes.auth import AUTH_ENABLED
|
from routes.auth import AUTH_ENABLED
|
||||||
from routes.auth.middleware import AuthMiddleware
|
from routes.auth.middleware import AuthMiddleware
|
||||||
|
|
||||||
|
# Import watch manager controls (start/stop) without triggering side effects
|
||||||
|
from routes.utils.watch.manager import start_watch_manager, stop_watch_manager
|
||||||
|
|
||||||
# Import and initialize routes (this will start the watch manager)
|
# Import and initialize routes (this will start the watch manager)
|
||||||
|
|
||||||
|
|
||||||
@@ -166,9 +170,25 @@ async def lifespan(app: FastAPI):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Failed to start Celery workers: {e}")
|
logging.error(f"Failed to start Celery workers: {e}")
|
||||||
|
|
||||||
|
# Start Watch Manager after Celery is up
|
||||||
|
try:
|
||||||
|
start_watch_manager()
|
||||||
|
logging.info("Watch Manager initialized and registered for shutdown.")
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(
|
||||||
|
f"Could not start Watch Manager: {e}. Watch functionality will be disabled.",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
|
||||||
yield
|
yield
|
||||||
|
|
||||||
# Shutdown
|
# Shutdown
|
||||||
|
try:
|
||||||
|
stop_watch_manager()
|
||||||
|
logging.info("Watch Manager stopped")
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Error stopping Watch Manager: {e}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
celery_manager.stop()
|
celery_manager.stop()
|
||||||
logging.info("Celery workers stopped")
|
logging.info("Celery workers stopped")
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
name: spotizerr
|
name: spotizerr
|
||||||
services:
|
services:
|
||||||
spotizerr:
|
spotizerr:
|
||||||
image: cooldockerizer93/spotizerr
|
image: cooldockerizer93/spotizerr:3.2.0
|
||||||
volumes:
|
volumes:
|
||||||
- ./data:/app/data
|
- ./data:/app/data
|
||||||
- ./downloads:/app/downloads
|
- ./downloads:/app/downloads
|
||||||
|
|||||||
@@ -1,36 +1,7 @@
|
|||||||
import logging
|
import logging
|
||||||
import atexit
|
|
||||||
|
|
||||||
# Configure basic logging for the application if not already configured
|
# Configure basic logging for the application if not already configured
|
||||||
# This is a good place for it if routes are a central part of your app structure.
|
# This remains safe to execute on import
|
||||||
logging.basicConfig(
|
logging.basicConfig(level=logging.INFO, format="%(message)s")
|
||||||
level=logging.INFO, format="%(message)s"
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Run DB migrations early so other modules see expected schemas
|
|
||||||
try:
|
|
||||||
from routes.migrations import run_migrations_if_needed
|
|
||||||
run_migrations_if_needed()
|
|
||||||
logger.info("Database migrations executed (if needed).")
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Database migration step failed: {e}", exc_info=True)
|
|
||||||
|
|
||||||
try:
|
|
||||||
from routes.utils.watch.manager import start_watch_manager, stop_watch_manager
|
|
||||||
|
|
||||||
# Start the playlist watch manager when the application/blueprint is initialized
|
|
||||||
start_watch_manager()
|
|
||||||
# Register the stop function to be called on application exit
|
|
||||||
atexit.register(stop_watch_manager)
|
|
||||||
logger.info("Playlist Watch Manager initialized and registered for shutdown.")
|
|
||||||
except ImportError as e:
|
|
||||||
logger.error(
|
|
||||||
f"Could not import or start Playlist Watch Manager: {e}. Playlist watching will be disabled."
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(
|
|
||||||
f"An unexpected error occurred during Playlist Watch Manager setup: {e}",
|
|
||||||
exc_info=True,
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -3,10 +3,7 @@ import sqlite3
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from .v3_0_6 import MigrationV3_0_6
|
from .v3_2_0 import MigrationV3_2_0
|
||||||
from .v3_1_0 import MigrationV3_1_0
|
|
||||||
from .v3_1_1 import MigrationV3_1_1
|
|
||||||
from .v3_1_2 import MigrationV3_1_2
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -41,7 +38,7 @@ CHILDREN_EXPECTED_COLUMNS: dict[str, str] = {
|
|||||||
"metadata": "TEXT",
|
"metadata": "TEXT",
|
||||||
}
|
}
|
||||||
|
|
||||||
# 3.1.2 expected schemas for Watch DBs (kept here to avoid importing modules with side-effects)
|
# 3.2.0 expected schemas for Watch DBs (kept here to avoid importing modules with side-effects)
|
||||||
EXPECTED_WATCHED_PLAYLISTS_COLUMNS: dict[str, str] = {
|
EXPECTED_WATCHED_PLAYLISTS_COLUMNS: dict[str, str] = {
|
||||||
"spotify_id": "TEXT PRIMARY KEY",
|
"spotify_id": "TEXT PRIMARY KEY",
|
||||||
"name": "TEXT",
|
"name": "TEXT",
|
||||||
@@ -103,10 +100,7 @@ EXPECTED_ARTIST_ALBUMS_COLUMNS: dict[str, str] = {
|
|||||||
"is_fully_downloaded_managed_by_app": "INTEGER DEFAULT 0",
|
"is_fully_downloaded_managed_by_app": "INTEGER DEFAULT 0",
|
||||||
}
|
}
|
||||||
|
|
||||||
m306 = MigrationV3_0_6()
|
m320 = MigrationV3_2_0()
|
||||||
m310 = MigrationV3_1_0()
|
|
||||||
m311 = MigrationV3_1_1()
|
|
||||||
m312 = MigrationV3_1_2()
|
|
||||||
|
|
||||||
|
|
||||||
def _safe_connect(path: Path) -> Optional[sqlite3.Connection]:
|
def _safe_connect(path: Path) -> Optional[sqlite3.Connection]:
|
||||||
@@ -184,60 +178,53 @@ def _create_or_update_children_table(conn: sqlite3.Connection, table_name: str)
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _update_children_tables_for_history(conn: sqlite3.Connection) -> None:
|
# --- Helper to validate instance is at least 3.1.2 on history DB ---
|
||||||
|
|
||||||
|
|
||||||
|
def _history_children_tables(conn: sqlite3.Connection) -> list[str]:
|
||||||
|
tables: set[str] = set()
|
||||||
try:
|
try:
|
||||||
try:
|
cur = conn.execute(
|
||||||
cur = conn.execute(
|
"SELECT name FROM sqlite_master WHERE type='table' AND (name LIKE 'album_%' OR name LIKE 'playlist_%') AND name != 'download_history'"
|
||||||
"SELECT DISTINCT children_table FROM download_history WHERE children_table IS NOT NULL AND TRIM(children_table) != ''"
|
|
||||||
)
|
|
||||||
for row in cur.fetchall():
|
|
||||||
table_name = row[0]
|
|
||||||
if not table_name:
|
|
||||||
continue
|
|
||||||
_create_or_update_children_table(conn, table_name)
|
|
||||||
except sqlite3.Error as e:
|
|
||||||
logger.warning(
|
|
||||||
f"Failed to scan referenced children tables from main history: {e}"
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
cur = conn.execute(
|
|
||||||
"SELECT name FROM sqlite_master WHERE type='table' AND (name LIKE 'album_%' OR name LIKE 'playlist_%') AND name != 'download_history'"
|
|
||||||
)
|
|
||||||
for row in cur.fetchall():
|
|
||||||
table_name = row[0]
|
|
||||||
_create_or_update_children_table(conn, table_name)
|
|
||||||
except sqlite3.Error as e:
|
|
||||||
logger.warning(f"Failed to scan legacy children tables in history DB: {e}")
|
|
||||||
logger.info("Children history tables migration ensured")
|
|
||||||
except Exception:
|
|
||||||
logger.error("Failed migrating children history tables", exc_info=True)
|
|
||||||
|
|
||||||
|
|
||||||
def _ensure_creds_filesystem() -> None:
|
|
||||||
try:
|
|
||||||
BLOBS_DIR.mkdir(parents=True, exist_ok=True)
|
|
||||||
if not SEARCH_JSON.exists():
|
|
||||||
SEARCH_JSON.write_text(
|
|
||||||
'{ "client_id": "", "client_secret": "" }\n', encoding="utf-8"
|
|
||||||
)
|
|
||||||
logger.info(f"Created default global Spotify creds file at {SEARCH_JSON}")
|
|
||||||
except Exception:
|
|
||||||
logger.error(
|
|
||||||
"Failed to ensure credentials filesystem (blobs/search.json)", exc_info=True
|
|
||||||
)
|
)
|
||||||
|
for row in cur.fetchall():
|
||||||
|
if row and row[0]:
|
||||||
|
tables.add(row[0])
|
||||||
|
except sqlite3.Error as e:
|
||||||
|
logger.warning(f"Failed to scan sqlite_master for children tables: {e}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
cur = conn.execute(
|
||||||
|
"SELECT DISTINCT children_table FROM download_history WHERE children_table IS NOT NULL AND TRIM(children_table) != ''"
|
||||||
|
)
|
||||||
|
for row in cur.fetchall():
|
||||||
|
t = row[0]
|
||||||
|
if t:
|
||||||
|
tables.add(t)
|
||||||
|
except sqlite3.Error as e:
|
||||||
|
logger.warning(f"Failed to scan download_history for children tables: {e}")
|
||||||
|
|
||||||
|
return sorted(tables)
|
||||||
|
|
||||||
|
|
||||||
def _apply_versioned_updates(
|
def _is_history_at_least_3_2_0(conn: sqlite3.Connection) -> bool:
|
||||||
conn: sqlite3.Connection, c_base, u_base, post_update=None
|
required_cols = {"service", "quality_format", "quality_bitrate"}
|
||||||
) -> None:
|
tables = _history_children_tables(conn)
|
||||||
if not c_base(conn):
|
if not tables:
|
||||||
u_base(conn)
|
# Nothing to migrate implies OK
|
||||||
if post_update:
|
return True
|
||||||
post_update(conn)
|
for t in tables:
|
||||||
|
try:
|
||||||
|
cur = conn.execute(f"PRAGMA table_info({t})")
|
||||||
|
cols = {row[1] for row in cur.fetchall()}
|
||||||
|
if not required_cols.issubset(cols):
|
||||||
|
return False
|
||||||
|
except sqlite3.OperationalError:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
# --- 3.1.2 upgrade helpers for Watch DBs ---
|
# --- 3.2.0 verification helpers for Watch DBs ---
|
||||||
|
|
||||||
|
|
||||||
def _update_watch_playlists_db(conn: sqlite3.Connection) -> None:
|
def _update_watch_playlists_db(conn: sqlite3.Connection) -> None:
|
||||||
@@ -298,10 +285,10 @@ def _update_watch_playlists_db(conn: sqlite3.Connection) -> None:
|
|||||||
EXPECTED_PLAYLIST_TRACKS_COLUMNS,
|
EXPECTED_PLAYLIST_TRACKS_COLUMNS,
|
||||||
f"playlist tracks ({table_name})",
|
f"playlist tracks ({table_name})",
|
||||||
)
|
)
|
||||||
logger.info("Upgraded watch playlists DB to 3.1.2 schema")
|
logger.info("Upgraded watch playlists DB to 3.2.0 base schema")
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.error(
|
logger.error(
|
||||||
"Failed to upgrade watch playlists DB to 3.1.2 schema", exc_info=True
|
"Failed to upgrade watch playlists DB to 3.2.0 base schema", exc_info=True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -361,10 +348,24 @@ def _update_watch_artists_db(conn: sqlite3.Connection) -> None:
|
|||||||
EXPECTED_ARTIST_ALBUMS_COLUMNS,
|
EXPECTED_ARTIST_ALBUMS_COLUMNS,
|
||||||
f"artist albums ({table_name})",
|
f"artist albums ({table_name})",
|
||||||
)
|
)
|
||||||
logger.info("Upgraded watch artists DB to 3.1.2 schema")
|
logger.info("Upgraded watch artists DB to 3.2.0 base schema")
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.error(
|
logger.error(
|
||||||
"Failed to upgrade watch artists DB to 3.1.2 schema", exc_info=True
|
"Failed to upgrade watch artists DB to 3.2.0 base schema", exc_info=True
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_creds_filesystem() -> None:
|
||||||
|
try:
|
||||||
|
BLOBS_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
|
if not SEARCH_JSON.exists():
|
||||||
|
SEARCH_JSON.write_text(
|
||||||
|
'{ "client_id": "", "client_secret": "" }\n', encoding="utf-8"
|
||||||
|
)
|
||||||
|
logger.info(f"Created default global Spotify creds file at {SEARCH_JSON}")
|
||||||
|
except Exception:
|
||||||
|
logger.error(
|
||||||
|
"Failed to ensure credentials filesystem (blobs/search.json)", exc_info=True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -374,75 +375,42 @@ def run_migrations_if_needed():
|
|||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# History DB
|
# Require instance to be at least 3.2.0 on history DB; otherwise abort
|
||||||
with _safe_connect(HISTORY_DB) as conn:
|
with _safe_connect(HISTORY_DB) as history_conn:
|
||||||
if conn:
|
if history_conn and not _is_history_at_least_3_2_0(history_conn):
|
||||||
_apply_versioned_updates(
|
logger.error(
|
||||||
conn,
|
"Instance is not at schema version 3.2.0. Please upgrade to 3.2.0 before applying 3.2.1."
|
||||||
m306.check_history,
|
)
|
||||||
m306.update_history,
|
raise RuntimeError(
|
||||||
post_update=_update_children_tables_for_history,
|
"Instance is not at schema version 3.2.0. Please upgrade to 3.2.0 before applying 3.2.1."
|
||||||
)
|
)
|
||||||
_apply_versioned_updates(conn, m311.check_history, m311.update_history)
|
|
||||||
_apply_versioned_updates(conn, m312.check_history, m312.update_history)
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
# Watch playlists DB
|
# Watch playlists DB
|
||||||
with _safe_connect(PLAYLISTS_DB) as conn:
|
with _safe_connect(PLAYLISTS_DB) as conn:
|
||||||
if conn:
|
if conn:
|
||||||
_apply_versioned_updates(
|
|
||||||
conn,
|
|
||||||
m306.check_watch_playlists,
|
|
||||||
m306.update_watch_playlists,
|
|
||||||
)
|
|
||||||
_apply_versioned_updates(
|
|
||||||
conn,
|
|
||||||
m311.check_watch_playlists,
|
|
||||||
m311.update_watch_playlists,
|
|
||||||
)
|
|
||||||
_apply_versioned_updates(
|
|
||||||
conn,
|
|
||||||
m312.check_watch_playlists,
|
|
||||||
m312.update_watch_playlists,
|
|
||||||
)
|
|
||||||
_update_watch_playlists_db(conn)
|
_update_watch_playlists_db(conn)
|
||||||
|
# Apply 3.2.0 additions (batch progress columns)
|
||||||
|
if not m320.check_watch_playlists(conn):
|
||||||
|
m320.update_watch_playlists(conn)
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
|
||||||
# Watch artists DB
|
# Watch artists DB (if exists)
|
||||||
if ARTISTS_DB.exists():
|
if ARTISTS_DB.exists():
|
||||||
with _safe_connect(ARTISTS_DB) as conn:
|
with _safe_connect(ARTISTS_DB) as conn:
|
||||||
if conn:
|
if conn:
|
||||||
_apply_versioned_updates(
|
|
||||||
conn, m306.check_watch_artists, m306.update_watch_artists
|
|
||||||
)
|
|
||||||
_apply_versioned_updates(
|
|
||||||
conn, m310.check_watch_artists, m310.update_watch_artists
|
|
||||||
)
|
|
||||||
_apply_versioned_updates(
|
|
||||||
conn, m311.check_watch_artists, m311.update_watch_artists
|
|
||||||
)
|
|
||||||
_apply_versioned_updates(
|
|
||||||
conn, m312.check_watch_artists, m312.update_watch_artists
|
|
||||||
)
|
|
||||||
_update_watch_artists_db(conn)
|
_update_watch_artists_db(conn)
|
||||||
|
if not m320.check_watch_artists(conn):
|
||||||
|
m320.update_watch_artists(conn)
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
|
||||||
# Accounts DB
|
# Accounts DB (no changes for this migration path)
|
||||||
with _safe_connect(ACCOUNTS_DB) as conn:
|
with _safe_connect(ACCOUNTS_DB) as conn:
|
||||||
if conn:
|
if conn:
|
||||||
_apply_versioned_updates(
|
|
||||||
conn, m306.check_accounts, m306.update_accounts
|
|
||||||
)
|
|
||||||
_apply_versioned_updates(
|
|
||||||
conn, m311.check_accounts, m311.update_accounts
|
|
||||||
)
|
|
||||||
_apply_versioned_updates(
|
|
||||||
conn, m312.check_accounts, m312.update_accounts
|
|
||||||
)
|
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Error during migration: %s", e, exc_info=True)
|
logger.error("Error during migration: %s", e, exc_info=True)
|
||||||
|
raise
|
||||||
else:
|
else:
|
||||||
_ensure_creds_filesystem()
|
_ensure_creds_filesystem()
|
||||||
logger.info("Database migrations check completed")
|
logger.info("Database migrations check completed (3.2.0 -> 3.2.1 path)")
|
||||||
|
|||||||
@@ -1,201 +0,0 @@
|
|||||||
import sqlite3
|
|
||||||
|
|
||||||
|
|
||||||
class MigrationV3_0_6:
|
|
||||||
HISTORY_SQL = """
|
|
||||||
CREATE TABLE IF NOT EXISTS download_history (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
download_type TEXT NOT NULL,
|
|
||||||
title TEXT NOT NULL,
|
|
||||||
artists TEXT,
|
|
||||||
timestamp REAL NOT NULL,
|
|
||||||
status TEXT NOT NULL,
|
|
||||||
service TEXT,
|
|
||||||
quality_format TEXT,
|
|
||||||
quality_bitrate TEXT,
|
|
||||||
total_tracks INTEGER,
|
|
||||||
successful_tracks INTEGER,
|
|
||||||
failed_tracks INTEGER,
|
|
||||||
skipped_tracks INTEGER,
|
|
||||||
children_table TEXT,
|
|
||||||
task_id TEXT,
|
|
||||||
external_ids TEXT,
|
|
||||||
metadata TEXT,
|
|
||||||
release_date TEXT,
|
|
||||||
genres TEXT,
|
|
||||||
images TEXT,
|
|
||||||
owner TEXT,
|
|
||||||
album_type TEXT,
|
|
||||||
duration_total_ms INTEGER,
|
|
||||||
explicit BOOLEAN
|
|
||||||
);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_download_history_timestamp ON download_history(timestamp);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_download_history_type_status ON download_history(download_type, status);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_download_history_task_id ON download_history(task_id);
|
|
||||||
CREATE UNIQUE INDEX IF NOT EXISTS uq_download_history_task_type_ids ON download_history(task_id, download_type, external_ids);
|
|
||||||
"""
|
|
||||||
|
|
||||||
WATCH_PLAYLISTS_SQL = """
|
|
||||||
CREATE TABLE IF NOT EXISTS watched_playlists (
|
|
||||||
spotify_id TEXT PRIMARY KEY,
|
|
||||||
name TEXT,
|
|
||||||
owner_id TEXT,
|
|
||||||
owner_name TEXT,
|
|
||||||
total_tracks INTEGER,
|
|
||||||
link TEXT,
|
|
||||||
snapshot_id TEXT,
|
|
||||||
last_checked INTEGER,
|
|
||||||
added_at INTEGER,
|
|
||||||
is_active INTEGER DEFAULT 1
|
|
||||||
);
|
|
||||||
"""
|
|
||||||
|
|
||||||
WATCH_ARTISTS_SQL = """
|
|
||||||
CREATE TABLE IF NOT EXISTS watched_artists (
|
|
||||||
spotify_id TEXT PRIMARY KEY,
|
|
||||||
name TEXT,
|
|
||||||
link TEXT,
|
|
||||||
total_albums_on_spotify INTEGER,
|
|
||||||
last_checked INTEGER,
|
|
||||||
added_at INTEGER,
|
|
||||||
is_active INTEGER DEFAULT 1,
|
|
||||||
genres TEXT,
|
|
||||||
popularity INTEGER,
|
|
||||||
image_url TEXT
|
|
||||||
);
|
|
||||||
"""
|
|
||||||
|
|
||||||
ACCOUNTS_SPOTIFY_SQL = """
|
|
||||||
CREATE TABLE IF NOT EXISTS spotify (
|
|
||||||
name TEXT PRIMARY KEY,
|
|
||||||
region TEXT,
|
|
||||||
created_at REAL,
|
|
||||||
updated_at REAL
|
|
||||||
);
|
|
||||||
"""
|
|
||||||
|
|
||||||
ACCOUNTS_DEEZER_SQL = """
|
|
||||||
CREATE TABLE IF NOT EXISTS deezer (
|
|
||||||
name TEXT PRIMARY KEY,
|
|
||||||
arl TEXT,
|
|
||||||
region TEXT,
|
|
||||||
created_at REAL,
|
|
||||||
updated_at REAL
|
|
||||||
);
|
|
||||||
"""
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _table_columns(conn: sqlite3.Connection, table: str) -> set[str]:
|
|
||||||
try:
|
|
||||||
cur = conn.execute(f"PRAGMA table_info({table})")
|
|
||||||
return {row[1] for row in cur.fetchall()}
|
|
||||||
except Exception:
|
|
||||||
return set()
|
|
||||||
|
|
||||||
# --- Checks ---
|
|
||||||
def check_history(self, conn: sqlite3.Connection) -> bool:
|
|
||||||
cur = conn.execute(
|
|
||||||
"SELECT name FROM sqlite_master WHERE type='table' AND name='download_history'"
|
|
||||||
)
|
|
||||||
if not cur.fetchone():
|
|
||||||
return False
|
|
||||||
required = {
|
|
||||||
"id",
|
|
||||||
"download_type",
|
|
||||||
"title",
|
|
||||||
"artists",
|
|
||||||
"timestamp",
|
|
||||||
"status",
|
|
||||||
"service",
|
|
||||||
"quality_format",
|
|
||||||
"quality_bitrate",
|
|
||||||
"total_tracks",
|
|
||||||
"successful_tracks",
|
|
||||||
"failed_tracks",
|
|
||||||
"skipped_tracks",
|
|
||||||
"children_table",
|
|
||||||
"task_id",
|
|
||||||
"external_ids",
|
|
||||||
"metadata",
|
|
||||||
"release_date",
|
|
||||||
"genres",
|
|
||||||
"images",
|
|
||||||
"owner",
|
|
||||||
"album_type",
|
|
||||||
"duration_total_ms",
|
|
||||||
"explicit",
|
|
||||||
}
|
|
||||||
return required.issubset(self._table_columns(conn, "download_history"))
|
|
||||||
|
|
||||||
def check_watch_playlists(self, conn: sqlite3.Connection) -> bool:
|
|
||||||
cur = conn.execute(
|
|
||||||
"SELECT name FROM sqlite_master WHERE type='table' AND name='watched_playlists'"
|
|
||||||
)
|
|
||||||
if not cur.fetchone():
|
|
||||||
return False
|
|
||||||
required = {
|
|
||||||
"spotify_id",
|
|
||||||
"name",
|
|
||||||
"owner_id",
|
|
||||||
"owner_name",
|
|
||||||
"total_tracks",
|
|
||||||
"link",
|
|
||||||
"snapshot_id",
|
|
||||||
"last_checked",
|
|
||||||
"added_at",
|
|
||||||
"is_active",
|
|
||||||
}
|
|
||||||
return required.issubset(self._table_columns(conn, "watched_playlists"))
|
|
||||||
|
|
||||||
def check_watch_artists(self, conn: sqlite3.Connection) -> bool:
|
|
||||||
cur = conn.execute(
|
|
||||||
"SELECT name FROM sqlite_master WHERE type='table' AND name='watched_artists'"
|
|
||||||
)
|
|
||||||
if not cur.fetchone():
|
|
||||||
return False
|
|
||||||
required = {
|
|
||||||
"spotify_id",
|
|
||||||
"name",
|
|
||||||
"link",
|
|
||||||
"total_albums_on_spotify",
|
|
||||||
"last_checked",
|
|
||||||
"added_at",
|
|
||||||
"is_active",
|
|
||||||
"genres",
|
|
||||||
"popularity",
|
|
||||||
"image_url",
|
|
||||||
}
|
|
||||||
return required.issubset(self._table_columns(conn, "watched_artists"))
|
|
||||||
|
|
||||||
def check_accounts(self, conn: sqlite3.Connection) -> bool:
|
|
||||||
cur = conn.execute(
|
|
||||||
"SELECT name FROM sqlite_master WHERE type='table' AND name='spotify'"
|
|
||||||
)
|
|
||||||
if not cur.fetchone():
|
|
||||||
return False
|
|
||||||
if not {"name", "region", "created_at", "updated_at"}.issubset(
|
|
||||||
self._table_columns(conn, "spotify")
|
|
||||||
):
|
|
||||||
return False
|
|
||||||
cur = conn.execute(
|
|
||||||
"SELECT name FROM sqlite_master WHERE type='table' AND name='deezer'"
|
|
||||||
)
|
|
||||||
if not cur.fetchone():
|
|
||||||
return False
|
|
||||||
return {"name", "arl", "region", "created_at", "updated_at"}.issubset(
|
|
||||||
self._table_columns(conn, "deezer")
|
|
||||||
)
|
|
||||||
|
|
||||||
# --- Updates ---
|
|
||||||
def update_history(self, conn: sqlite3.Connection) -> None:
|
|
||||||
conn.executescript(self.HISTORY_SQL)
|
|
||||||
|
|
||||||
def update_watch_playlists(self, conn: sqlite3.Connection) -> None:
|
|
||||||
conn.executescript(self.WATCH_PLAYLISTS_SQL)
|
|
||||||
|
|
||||||
def update_watch_artists(self, conn: sqlite3.Connection) -> None:
|
|
||||||
conn.executescript(self.WATCH_ARTISTS_SQL)
|
|
||||||
|
|
||||||
def update_accounts(self, conn: sqlite3.Connection) -> None:
|
|
||||||
conn.executescript(self.ACCOUNTS_SPOTIFY_SQL)
|
|
||||||
conn.executescript(self.ACCOUNTS_DEEZER_SQL)
|
|
||||||
@@ -1,88 +0,0 @@
|
|||||||
import sqlite3
|
|
||||||
import logging
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class MigrationV3_1_0:
|
|
||||||
ARTIST_ALBUMS_EXPECTED_COLUMNS: dict[str, str] = {
|
|
||||||
"album_spotify_id": "TEXT PRIMARY KEY",
|
|
||||||
"artist_spotify_id": "TEXT",
|
|
||||||
"name": "TEXT",
|
|
||||||
"album_group": "TEXT",
|
|
||||||
"album_type": "TEXT",
|
|
||||||
"release_date": "TEXT",
|
|
||||||
"release_date_precision": "TEXT",
|
|
||||||
"total_tracks": "INTEGER",
|
|
||||||
"link": "TEXT",
|
|
||||||
"image_url": "TEXT",
|
|
||||||
"added_to_db": "INTEGER",
|
|
||||||
"last_seen_on_spotify": "INTEGER",
|
|
||||||
"download_task_id": "TEXT",
|
|
||||||
"download_status": "INTEGER DEFAULT 0",
|
|
||||||
"is_fully_downloaded_managed_by_app": "INTEGER DEFAULT 0",
|
|
||||||
}
|
|
||||||
|
|
||||||
def _table_columns(self, conn: sqlite3.Connection, table: str) -> set[str]:
|
|
||||||
try:
|
|
||||||
cur = conn.execute(f"PRAGMA table_info({table})")
|
|
||||||
return {row[1] for row in cur.fetchall()}
|
|
||||||
except sqlite3.OperationalError:
|
|
||||||
return set()
|
|
||||||
|
|
||||||
def check_watch_artists(self, conn: sqlite3.Connection) -> bool:
|
|
||||||
"""Checks if the artist-specific tables have the new columns."""
|
|
||||||
try:
|
|
||||||
cur = conn.execute(
|
|
||||||
"SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'artist_%' LIMIT 1"
|
|
||||||
)
|
|
||||||
first_artist_table = cur.fetchone()
|
|
||||||
|
|
||||||
if not first_artist_table:
|
|
||||||
return True # No artist tables, so no migration needed
|
|
||||||
|
|
||||||
table_name = first_artist_table[0]
|
|
||||||
existing_columns = self._table_columns(conn, table_name)
|
|
||||||
required_columns = self.ARTIST_ALBUMS_EXPECTED_COLUMNS.keys()
|
|
||||||
|
|
||||||
return set(required_columns).issubset(existing_columns)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error checking artist watch DB schema: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def update_watch_artists(self, conn: sqlite3.Connection) -> None:
|
|
||||||
"""Updates all artist-specific tables with new columns."""
|
|
||||||
try:
|
|
||||||
cur = conn.execute(
|
|
||||||
"SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'artist_%'"
|
|
||||||
)
|
|
||||||
artist_tables = cur.fetchall()
|
|
||||||
|
|
||||||
for row in artist_tables:
|
|
||||||
table_name = row[0]
|
|
||||||
existing_columns = self._table_columns(conn, table_name)
|
|
||||||
|
|
||||||
for col_name, col_type in self.ARTIST_ALBUMS_EXPECTED_COLUMNS.items():
|
|
||||||
if col_name in existing_columns:
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Remove constraints for ADD COLUMN
|
|
||||||
col_type_for_add = (
|
|
||||||
col_type.replace("PRIMARY KEY", "")
|
|
||||||
.replace("AUTOINCREMENT", "")
|
|
||||||
.replace("NOT NULL", "")
|
|
||||||
.strip()
|
|
||||||
)
|
|
||||||
conn.execute(
|
|
||||||
f'ALTER TABLE "{table_name}" ADD COLUMN {col_name} {col_type_for_add}'
|
|
||||||
)
|
|
||||||
logger.info(
|
|
||||||
f"Added column '{col_name}' to table '{table_name}' in artists.db."
|
|
||||||
)
|
|
||||||
except sqlite3.OperationalError as e:
|
|
||||||
logger.warning(
|
|
||||||
f"Could not add column '{col_name}' to table '{table_name}': {e}"
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to update artist watch DB: {e}", exc_info=True)
|
|
||||||
@@ -1,42 +0,0 @@
|
|||||||
import sqlite3
|
|
||||||
|
|
||||||
|
|
||||||
class MigrationV3_1_1:
|
|
||||||
"""
|
|
||||||
Dummy migration for version 3.1.1 to 3.1.2.
|
|
||||||
No database schema changes were made between these versions.
|
|
||||||
This class serves as a placeholder to ensure the migration runner
|
|
||||||
is aware of this version and can proceed without errors.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def check_history(self, conn: sqlite3.Connection) -> bool:
|
|
||||||
# No changes, so migration is not needed.
|
|
||||||
return True
|
|
||||||
|
|
||||||
def update_history(self, conn: sqlite3.Connection) -> None:
|
|
||||||
# No-op
|
|
||||||
pass
|
|
||||||
|
|
||||||
def check_watch_artists(self, conn: sqlite3.Connection) -> bool:
|
|
||||||
# No changes, so migration is not needed.
|
|
||||||
return True
|
|
||||||
|
|
||||||
def update_watch_artists(self, conn: sqlite3.Connection) -> None:
|
|
||||||
# No-op
|
|
||||||
pass
|
|
||||||
|
|
||||||
def check_watch_playlists(self, conn: sqlite3.Connection) -> bool:
|
|
||||||
# No changes, so migration is not needed.
|
|
||||||
return True
|
|
||||||
|
|
||||||
def update_watch_playlists(self, conn: sqlite3.Connection) -> None:
|
|
||||||
# No-op
|
|
||||||
pass
|
|
||||||
|
|
||||||
def check_accounts(self, conn: sqlite3.Connection) -> bool:
|
|
||||||
# No changes, so migration is not needed.
|
|
||||||
return True
|
|
||||||
|
|
||||||
def update_accounts(self, conn: sqlite3.Connection) -> None:
|
|
||||||
# No-op
|
|
||||||
pass
|
|
||||||
@@ -1,103 +0,0 @@
|
|||||||
import sqlite3
|
|
||||||
import logging
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class MigrationV3_1_2:
|
|
||||||
"""
|
|
||||||
Migration for version 3.1.2.
|
|
||||||
Ensure history children tables (album_*/playlist_*) include service and quality columns.
|
|
||||||
"""
|
|
||||||
|
|
||||||
CHILDREN_EXTRA_COLUMNS: dict[str, str] = {
|
|
||||||
"service": "TEXT",
|
|
||||||
"quality_format": "TEXT",
|
|
||||||
"quality_bitrate": "TEXT",
|
|
||||||
}
|
|
||||||
|
|
||||||
def _table_columns(self, conn: sqlite3.Connection, table: str) -> set[str]:
|
|
||||||
try:
|
|
||||||
cur = conn.execute(f"PRAGMA table_info({table})")
|
|
||||||
return {row[1] for row in cur.fetchall()}
|
|
||||||
except sqlite3.OperationalError:
|
|
||||||
return set()
|
|
||||||
|
|
||||||
def _list_children_tables(self, conn: sqlite3.Connection) -> list[str]:
|
|
||||||
tables: set[str] = set()
|
|
||||||
try:
|
|
||||||
cur = conn.execute(
|
|
||||||
"SELECT name FROM sqlite_master WHERE type='table' AND (name LIKE 'album_%' OR name LIKE 'playlist_%') AND name != 'download_history'"
|
|
||||||
)
|
|
||||||
for row in cur.fetchall():
|
|
||||||
if row and row[0]:
|
|
||||||
tables.add(row[0])
|
|
||||||
except sqlite3.Error as e:
|
|
||||||
logger.warning(f"Failed to scan sqlite_master for children tables: {e}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
cur = conn.execute(
|
|
||||||
"SELECT DISTINCT children_table FROM download_history WHERE children_table IS NOT NULL AND TRIM(children_table) != ''"
|
|
||||||
)
|
|
||||||
for row in cur.fetchall():
|
|
||||||
t = row[0]
|
|
||||||
if t:
|
|
||||||
tables.add(t)
|
|
||||||
except sqlite3.Error as e:
|
|
||||||
logger.warning(f"Failed to scan download_history for children tables: {e}")
|
|
||||||
|
|
||||||
return sorted(tables)
|
|
||||||
|
|
||||||
def check_history(self, conn: sqlite3.Connection) -> bool:
|
|
||||||
tables = self._list_children_tables(conn)
|
|
||||||
if not tables:
|
|
||||||
# Nothing to migrate
|
|
||||||
return True
|
|
||||||
# If any table is missing any of the extra columns, migration is needed
|
|
||||||
for t in tables:
|
|
||||||
cols = self._table_columns(conn, t)
|
|
||||||
if not set(self.CHILDREN_EXTRA_COLUMNS.keys()).issubset(cols):
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def update_history(self, conn: sqlite3.Connection) -> None:
|
|
||||||
tables = self._list_children_tables(conn)
|
|
||||||
for t in tables:
|
|
||||||
existing = self._table_columns(conn, t)
|
|
||||||
for col_name, col_type in self.CHILDREN_EXTRA_COLUMNS.items():
|
|
||||||
if col_name in existing:
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
conn.execute(f"ALTER TABLE {t} ADD COLUMN {col_name} {col_type}")
|
|
||||||
logger.info(
|
|
||||||
f"Added column '{col_name} {col_type}' to history children table '{t}'."
|
|
||||||
)
|
|
||||||
except sqlite3.OperationalError as e:
|
|
||||||
logger.warning(
|
|
||||||
f"Could not add column '{col_name}' to history children table '{t}': {e}"
|
|
||||||
)
|
|
||||||
|
|
||||||
def check_watch_artists(self, conn: sqlite3.Connection) -> bool:
|
|
||||||
# No changes for watch artists in 3.1.2
|
|
||||||
return True
|
|
||||||
|
|
||||||
def update_watch_artists(self, conn: sqlite3.Connection) -> None:
|
|
||||||
# No-op
|
|
||||||
pass
|
|
||||||
|
|
||||||
def check_watch_playlists(self, conn: sqlite3.Connection) -> bool:
|
|
||||||
# No changes for watch playlists in 3.1.2
|
|
||||||
return True
|
|
||||||
|
|
||||||
def update_watch_playlists(self, conn: sqlite3.Connection) -> None:
|
|
||||||
# No-op
|
|
||||||
pass
|
|
||||||
|
|
||||||
def check_accounts(self, conn: sqlite3.Connection) -> bool:
|
|
||||||
# No changes for accounts in 3.1.2
|
|
||||||
return True
|
|
||||||
|
|
||||||
def update_accounts(self, conn: sqlite3.Connection) -> None:
|
|
||||||
# No-op
|
|
||||||
pass
|
|
||||||
100
routes/migrations/v3_2_0.py
Normal file
100
routes/migrations/v3_2_0.py
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
import sqlite3
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class MigrationV3_2_0:
|
||||||
|
"""
|
||||||
|
Migration for version 3.2.0 (upgrade path 3.2.0 -> 3.2.1).
|
||||||
|
- Adds per-item batch progress columns to Watch DBs to support page-by-interval processing.
|
||||||
|
- Enforces prerequisite: previous instance version must be 3.1.2 (validated by runner).
|
||||||
|
"""
|
||||||
|
|
||||||
|
# New columns to add to watched tables
|
||||||
|
PLAYLISTS_ADDED_COLUMNS: dict[str, str] = {
|
||||||
|
"batch_next_offset": "INTEGER DEFAULT 0",
|
||||||
|
"batch_processing_snapshot_id": "TEXT",
|
||||||
|
}
|
||||||
|
|
||||||
|
ARTISTS_ADDED_COLUMNS: dict[str, str] = {
|
||||||
|
"batch_next_offset": "INTEGER DEFAULT 0",
|
||||||
|
}
|
||||||
|
|
||||||
|
# --- No-op for history/accounts in 3.2.1 ---
|
||||||
|
|
||||||
|
def check_history(self, conn: sqlite3.Connection) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def update_history(self, conn: sqlite3.Connection) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def check_accounts(self, conn: sqlite3.Connection) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def update_accounts(self, conn: sqlite3.Connection) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# --- Watch: playlists ---
|
||||||
|
|
||||||
|
def check_watch_playlists(self, conn: sqlite3.Connection) -> bool:
|
||||||
|
try:
|
||||||
|
cur = conn.execute("PRAGMA table_info(watched_playlists)")
|
||||||
|
cols = {row[1] for row in cur.fetchall()}
|
||||||
|
return set(self.PLAYLISTS_ADDED_COLUMNS.keys()).issubset(cols)
|
||||||
|
except sqlite3.OperationalError:
|
||||||
|
# Table missing means not ready
|
||||||
|
return False
|
||||||
|
|
||||||
|
def update_watch_playlists(self, conn: sqlite3.Connection) -> None:
|
||||||
|
# Add new columns if missing
|
||||||
|
try:
|
||||||
|
cur = conn.execute("PRAGMA table_info(watched_playlists)")
|
||||||
|
existing = {row[1] for row in cur.fetchall()}
|
||||||
|
for col_name, col_type in self.PLAYLISTS_ADDED_COLUMNS.items():
|
||||||
|
if col_name in existing:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
conn.execute(
|
||||||
|
f"ALTER TABLE watched_playlists ADD COLUMN {col_name} {col_type}"
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
f"Added column '{col_name} {col_type}' to watched_playlists for 3.2.1 batch progress."
|
||||||
|
)
|
||||||
|
except sqlite3.OperationalError as e:
|
||||||
|
logger.warning(
|
||||||
|
f"Could not add column '{col_name}' to watched_playlists: {e}"
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
logger.error("Failed to update watched_playlists for 3.2.1", exc_info=True)
|
||||||
|
|
||||||
|
# --- Watch: artists ---
|
||||||
|
|
||||||
|
def check_watch_artists(self, conn: sqlite3.Connection) -> bool:
|
||||||
|
try:
|
||||||
|
cur = conn.execute("PRAGMA table_info(watched_artists)")
|
||||||
|
cols = {row[1] for row in cur.fetchall()}
|
||||||
|
return set(self.ARTISTS_ADDED_COLUMNS.keys()).issubset(cols)
|
||||||
|
except sqlite3.OperationalError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def update_watch_artists(self, conn: sqlite3.Connection) -> None:
|
||||||
|
try:
|
||||||
|
cur = conn.execute("PRAGMA table_info(watched_artists)")
|
||||||
|
existing = {row[1] for row in cur.fetchall()}
|
||||||
|
for col_name, col_type in self.ARTISTS_ADDED_COLUMNS.items():
|
||||||
|
if col_name in existing:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
conn.execute(
|
||||||
|
f"ALTER TABLE watched_artists ADD COLUMN {col_name} {col_type}"
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
f"Added column '{col_name} {col_type}' to watched_artists for 3.2.1 batch progress."
|
||||||
|
)
|
||||||
|
except sqlite3.OperationalError as e:
|
||||||
|
logger.warning(
|
||||||
|
f"Could not add column '{col_name}' to watched_artists: {e}"
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
logger.error("Failed to update watched_artists for 3.2.1", exc_info=True)
|
||||||
@@ -25,6 +25,9 @@ EXPECTED_WATCHED_PLAYLISTS_COLUMNS = {
|
|||||||
"last_checked": "INTEGER",
|
"last_checked": "INTEGER",
|
||||||
"added_at": "INTEGER",
|
"added_at": "INTEGER",
|
||||||
"is_active": "INTEGER DEFAULT 1",
|
"is_active": "INTEGER DEFAULT 1",
|
||||||
|
# New: batch progress for per-interval page fetching
|
||||||
|
"batch_next_offset": "INTEGER DEFAULT 0",
|
||||||
|
"batch_processing_snapshot_id": "TEXT",
|
||||||
}
|
}
|
||||||
|
|
||||||
EXPECTED_PLAYLIST_TRACKS_COLUMNS = {
|
EXPECTED_PLAYLIST_TRACKS_COLUMNS = {
|
||||||
@@ -55,6 +58,8 @@ EXPECTED_WATCHED_ARTISTS_COLUMNS = {
|
|||||||
"genres": "TEXT", # Comma-separated
|
"genres": "TEXT", # Comma-separated
|
||||||
"popularity": "INTEGER",
|
"popularity": "INTEGER",
|
||||||
"image_url": "TEXT",
|
"image_url": "TEXT",
|
||||||
|
# New: batch progress for per-interval page fetching
|
||||||
|
"batch_next_offset": "INTEGER DEFAULT 0",
|
||||||
}
|
}
|
||||||
|
|
||||||
EXPECTED_ARTIST_ALBUMS_COLUMNS = {
|
EXPECTED_ARTIST_ALBUMS_COLUMNS = {
|
||||||
@@ -439,6 +444,61 @@ def update_playlist_snapshot(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# --- New: per-playlist batch progress helpers ---
|
||||||
|
|
||||||
|
|
||||||
|
def get_playlist_batch_progress(playlist_spotify_id: str) -> tuple[int, str | None]:
|
||||||
|
"""Returns (batch_next_offset, batch_processing_snapshot_id) for a watched playlist."""
|
||||||
|
try:
|
||||||
|
with _get_playlists_db_connection() as conn:
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute(
|
||||||
|
"SELECT batch_next_offset, batch_processing_snapshot_id FROM watched_playlists WHERE spotify_id = ?",
|
||||||
|
(playlist_spotify_id,),
|
||||||
|
)
|
||||||
|
row = cursor.fetchone()
|
||||||
|
if not row:
|
||||||
|
return 0, None
|
||||||
|
next_offset = (
|
||||||
|
row["batch_next_offset"] if "batch_next_offset" in row.keys() else 0
|
||||||
|
)
|
||||||
|
processing_snapshot = (
|
||||||
|
row["batch_processing_snapshot_id"]
|
||||||
|
if "batch_processing_snapshot_id" in row.keys()
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
return int(next_offset or 0), processing_snapshot
|
||||||
|
except sqlite3.Error as e:
|
||||||
|
logger.error(
|
||||||
|
f"Error retrieving batch progress for playlist {playlist_spotify_id}: {e}",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
return 0, None
|
||||||
|
|
||||||
|
|
||||||
|
def set_playlist_batch_progress(
|
||||||
|
playlist_spotify_id: str, next_offset: int, processing_snapshot_id: str | None
|
||||||
|
) -> None:
|
||||||
|
"""Updates batch_next_offset and batch_processing_snapshot_id for a watched playlist."""
|
||||||
|
try:
|
||||||
|
with _get_playlists_db_connection() as conn:
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute(
|
||||||
|
"""
|
||||||
|
UPDATE watched_playlists
|
||||||
|
SET batch_next_offset = ?, batch_processing_snapshot_id = ?
|
||||||
|
WHERE spotify_id = ?
|
||||||
|
""",
|
||||||
|
(int(next_offset or 0), processing_snapshot_id, playlist_spotify_id),
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
except sqlite3.Error as e:
|
||||||
|
logger.error(
|
||||||
|
f"Error updating batch progress for playlist {playlist_spotify_id}: {e}",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_playlist_track_ids_from_db(playlist_spotify_id: str):
|
def get_playlist_track_ids_from_db(playlist_spotify_id: str):
|
||||||
"""Retrieves all track Spotify IDs from a specific playlist's tracks table in playlists.db."""
|
"""Retrieves all track Spotify IDs from a specific playlist's tracks table in playlists.db."""
|
||||||
table_name = f"playlist_{playlist_spotify_id.replace('-', '_')}"
|
table_name = f"playlist_{playlist_spotify_id.replace('-', '_')}"
|
||||||
@@ -773,7 +833,7 @@ def add_specific_tracks_to_playlist_table(
|
|||||||
def remove_specific_tracks_from_playlist_table(
|
def remove_specific_tracks_from_playlist_table(
|
||||||
playlist_spotify_id: str, track_spotify_ids: list
|
playlist_spotify_id: str, track_spotify_ids: list
|
||||||
):
|
):
|
||||||
"""Removes specific tracks from the playlist's local track table."""
|
"""Removes specific tracks from the playlist's local DB table."""
|
||||||
table_name = f"playlist_{playlist_spotify_id.replace('-', '_')}"
|
table_name = f"playlist_{playlist_spotify_id.replace('-', '_')}"
|
||||||
if not track_spotify_ids:
|
if not track_spotify_ids:
|
||||||
return 0
|
return 0
|
||||||
@@ -799,7 +859,7 @@ def remove_specific_tracks_from_playlist_table(
|
|||||||
conn.commit()
|
conn.commit()
|
||||||
deleted_count = cursor.rowcount
|
deleted_count = cursor.rowcount
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Manually removed {deleted_count} tracks from DB for playlist {playlist_spotify_id}."
|
f"Successfully removed {deleted_count} tracks locally for playlist {playlist_spotify_id}."
|
||||||
)
|
)
|
||||||
return deleted_count
|
return deleted_count
|
||||||
except sqlite3.Error as e:
|
except sqlite3.Error as e:
|
||||||
@@ -1164,6 +1224,53 @@ def update_artist_metadata_after_check(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# --- New: per-artist batch progress helpers ---
|
||||||
|
|
||||||
|
|
||||||
|
def get_artist_batch_next_offset(artist_spotify_id: str) -> int:
|
||||||
|
try:
|
||||||
|
with _get_artists_db_connection() as conn:
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute(
|
||||||
|
"SELECT batch_next_offset FROM watched_artists WHERE spotify_id = ?",
|
||||||
|
(artist_spotify_id,),
|
||||||
|
)
|
||||||
|
row = cursor.fetchone()
|
||||||
|
if not row:
|
||||||
|
return 0
|
||||||
|
return (
|
||||||
|
int(row["batch_next_offset"])
|
||||||
|
if "batch_next_offset" in row.keys()
|
||||||
|
else 0
|
||||||
|
)
|
||||||
|
except sqlite3.Error as e:
|
||||||
|
logger.error(
|
||||||
|
f"Error retrieving batch_next_offset for artist {artist_spotify_id}: {e}",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def set_artist_batch_next_offset(artist_spotify_id: str, next_offset: int) -> None:
|
||||||
|
try:
|
||||||
|
with _get_artists_db_connection() as conn:
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute(
|
||||||
|
"""
|
||||||
|
UPDATE watched_artists
|
||||||
|
SET batch_next_offset = ?
|
||||||
|
WHERE spotify_id = ?
|
||||||
|
""",
|
||||||
|
(int(next_offset or 0), artist_spotify_id),
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
except sqlite3.Error as e:
|
||||||
|
logger.error(
|
||||||
|
f"Error updating batch_next_offset for artist {artist_spotify_id}: {e}",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_artist_album_ids_from_db(artist_spotify_id: str):
|
def get_artist_album_ids_from_db(artist_spotify_id: str):
|
||||||
"""Retrieves all album Spotify IDs from a specific artist's albums table in artists.db."""
|
"""Retrieves all album Spotify IDs from a specific artist's albums table in artists.db."""
|
||||||
table_name = f"artist_{artist_spotify_id.replace('-', '_')}"
|
table_name = f"artist_{artist_spotify_id.replace('-', '_')}"
|
||||||
@@ -1289,11 +1396,11 @@ def add_or_update_album_for_artist(
|
|||||||
total_tracks,
|
total_tracks,
|
||||||
link,
|
link,
|
||||||
image_url,
|
image_url,
|
||||||
current_time, # added_to_db
|
current_time, # added_to_db
|
||||||
current_time, # last_seen_on_spotify
|
current_time, # last_seen_on_spotify
|
||||||
task_id, # download_task_id
|
task_id, # download_task_id
|
||||||
download_status, # download_status
|
download_status, # download_status
|
||||||
0, # is_fully_downloaded_managed_by_app
|
0, # is_fully_downloaded_managed_by_app
|
||||||
)
|
)
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
f"""
|
f"""
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "spotizerr-ui",
|
"name": "spotizerr-ui",
|
||||||
"private": true,
|
"private": true,
|
||||||
"version": "3.2.0",
|
"version": "3.2.1",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "vite",
|
"dev": "vite",
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ interface WatchSettings {
|
|||||||
enabled: boolean;
|
enabled: boolean;
|
||||||
watchPollIntervalSeconds: number;
|
watchPollIntervalSeconds: number;
|
||||||
watchedArtistAlbumGroup: AlbumGroup[];
|
watchedArtistAlbumGroup: AlbumGroup[];
|
||||||
|
maxItemsPerRun: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface DownloadSettings {
|
interface DownloadSettings {
|
||||||
@@ -92,8 +93,9 @@ export function WatchTab() {
|
|||||||
setTimeout(() => setSaveStatus("idle"), 3000);
|
setTimeout(() => setSaveStatus("idle"), 3000);
|
||||||
queryClient.invalidateQueries({ queryKey: ["watchConfig"] });
|
queryClient.invalidateQueries({ queryKey: ["watchConfig"] });
|
||||||
},
|
},
|
||||||
onError: (error) => {
|
onError: (error: any) => {
|
||||||
toast.error(`Failed to save settings: ${error.message}`);
|
const message = error?.response?.data?.error || error?.message || "Unknown error";
|
||||||
|
toast.error(`Failed to save settings: ${message}`);
|
||||||
setSaveStatus("error");
|
setSaveStatus("error");
|
||||||
setTimeout(() => setSaveStatus("idle"), 3000);
|
setTimeout(() => setSaveStatus("idle"), 3000);
|
||||||
},
|
},
|
||||||
@@ -108,6 +110,7 @@ export function WatchTab() {
|
|||||||
}, [config, reset]);
|
}, [config, reset]);
|
||||||
|
|
||||||
const watchEnabled = watch("enabled");
|
const watchEnabled = watch("enabled");
|
||||||
|
const maxItemsPerRunValue = watch("maxItemsPerRun");
|
||||||
|
|
||||||
// Validation effect for watch + download method requirement
|
// Validation effect for watch + download method requirement
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -126,8 +129,14 @@ export function WatchTab() {
|
|||||||
error = `Watch with Fallback requires accounts for both services. Missing: ${missingServices.join(", ")}. Configure accounts in the Accounts tab.`;
|
error = `Watch with Fallback requires accounts for both services. Missing: ${missingServices.join(", ")}. Configure accounts in the Accounts tab.`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Validate maxItemsPerRun range (1..50)
|
||||||
|
const mir = Number(maxItemsPerRunValue);
|
||||||
|
if (!error && (Number.isNaN(mir) || mir < 1 || mir > 50)) {
|
||||||
|
error = "Max items per run must be between 1 and 50.";
|
||||||
|
}
|
||||||
|
|
||||||
setValidationError(error);
|
setValidationError(error);
|
||||||
}, [watchEnabled, downloadConfig?.realTime, downloadConfig?.fallback, spotifyCredentials?.length, deezerCredentials?.length]);
|
}, [watchEnabled, downloadConfig?.realTime, downloadConfig?.fallback, spotifyCredentials?.length, deezerCredentials?.length, maxItemsPerRunValue]);
|
||||||
|
|
||||||
const onSubmit: SubmitHandler<WatchSettings> = (data) => {
|
const onSubmit: SubmitHandler<WatchSettings> = (data) => {
|
||||||
// Check validation before submitting
|
// Check validation before submitting
|
||||||
@@ -148,9 +157,18 @@ export function WatchTab() {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Validate maxItemsPerRun in handler too, to be safe
|
||||||
|
const mir = Number(data.maxItemsPerRun);
|
||||||
|
if (Number.isNaN(mir) || mir < 1 || mir > 50) {
|
||||||
|
setValidationError("Max items per run must be between 1 and 50.");
|
||||||
|
toast.error("Validation failed: Max items per run must be between 1 and 50.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
mutation.mutate({
|
mutation.mutate({
|
||||||
...data,
|
...data,
|
||||||
watchPollIntervalSeconds: Number(data.watchPollIntervalSeconds),
|
watchPollIntervalSeconds: Number(data.watchPollIntervalSeconds),
|
||||||
|
maxItemsPerRun: Number(data.maxItemsPerRun),
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -225,7 +243,20 @@ export function WatchTab() {
|
|||||||
{...register("watchPollIntervalSeconds")}
|
{...register("watchPollIntervalSeconds")}
|
||||||
className="block w-full p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline-none focus:ring-2 focus:ring-input-focus"
|
className="block w-full p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline-none focus:ring-2 focus:ring-input-focus"
|
||||||
/>
|
/>
|
||||||
<p className="text-sm text-content-muted dark:text-content-muted-dark mt-1">How often to check watched items for updates.</p>
|
<p className="text-sm text-content-muted dark:text-content-muted-dark mt-1">How often to check for new items in watchlist.</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex flex-col gap-2">
|
||||||
|
<label htmlFor="maxItemsPerRun" className="text-content-primary dark:text-content-primary-dark">Max Items Per Run</label>
|
||||||
|
<input
|
||||||
|
id="maxItemsPerRun"
|
||||||
|
type="number"
|
||||||
|
min="1"
|
||||||
|
max="50"
|
||||||
|
{...register("maxItemsPerRun")}
|
||||||
|
className="block w-full p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline-none focus:ring-2 focus:ring-input-focus"
|
||||||
|
/>
|
||||||
|
<p className="text-sm text-content-muted dark:text-content-muted-dark mt-1">Batch size per watch cycle (1–50).</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|||||||
@@ -7,165 +7,176 @@ import { useAuth } from "./auth-context";
|
|||||||
// --- Case Conversion Utility ---
|
// --- Case Conversion Utility ---
|
||||||
// This is added here to simplify the fix and avoid module resolution issues.
|
// This is added here to simplify the fix and avoid module resolution issues.
|
||||||
function snakeToCamel(str: string): string {
|
function snakeToCamel(str: string): string {
|
||||||
return str.replace(/(_\w)/g, (m) => m[1].toUpperCase());
|
return str.replace(/(_\w)/g, (m) => m[1].toUpperCase());
|
||||||
}
|
}
|
||||||
|
|
||||||
function convertKeysToCamelCase(obj: unknown): unknown {
|
function convertKeysToCamelCase(obj: unknown): unknown {
|
||||||
if (Array.isArray(obj)) {
|
if (Array.isArray(obj)) {
|
||||||
return obj.map((v) => convertKeysToCamelCase(v));
|
return obj.map((v) => convertKeysToCamelCase(v));
|
||||||
}
|
}
|
||||||
if (typeof obj === "object" && obj !== null) {
|
if (typeof obj === "object" && obj !== null) {
|
||||||
return Object.keys(obj).reduce((acc: Record<string, unknown>, key: string) => {
|
return Object.keys(obj).reduce((acc: Record<string, unknown>, key: string) => {
|
||||||
const camelKey = snakeToCamel(key);
|
const camelKey = snakeToCamel(key);
|
||||||
acc[camelKey] = convertKeysToCamelCase((obj as Record<string, unknown>)[key]);
|
acc[camelKey] = convertKeysToCamelCase((obj as Record<string, unknown>)[key]);
|
||||||
return acc;
|
return acc;
|
||||||
}, {});
|
}, {});
|
||||||
}
|
}
|
||||||
return obj;
|
return obj;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Redefine AppSettings to match the flat structure of the API response
|
// Redefine AppSettings to match the flat structure of the API response
|
||||||
export type FlatAppSettings = {
|
export type FlatAppSettings = {
|
||||||
service: "spotify" | "deezer";
|
service: "spotify" | "deezer";
|
||||||
spotify: string;
|
spotify: string;
|
||||||
spotifyQuality: "NORMAL" | "HIGH" | "VERY_HIGH";
|
spotifyQuality: "NORMAL" | "HIGH" | "VERY_HIGH";
|
||||||
deezer: string;
|
deezer: string;
|
||||||
deezerQuality: "MP3_128" | "MP3_320" | "FLAC";
|
deezerQuality: "MP3_128" | "MP3_320" | "FLAC";
|
||||||
maxConcurrentDownloads: number;
|
maxConcurrentDownloads: number;
|
||||||
realTime: boolean;
|
realTime: boolean;
|
||||||
fallback: boolean;
|
fallback: boolean;
|
||||||
convertTo: "MP3" | "AAC" | "OGG" | "OPUS" | "FLAC" | "WAV" | "ALAC" | "";
|
convertTo: "MP3" | "AAC" | "OGG" | "OPUS" | "FLAC" | "WAV" | "ALAC" | "";
|
||||||
bitrate: string;
|
bitrate: string;
|
||||||
maxRetries: number;
|
maxRetries: number;
|
||||||
retryDelaySeconds: number;
|
retryDelaySeconds: number;
|
||||||
retryDelayIncrease: number;
|
retryDelayIncrease: number;
|
||||||
customDirFormat: string;
|
customDirFormat: string;
|
||||||
customTrackFormat: string;
|
customTrackFormat: string;
|
||||||
tracknumPadding: boolean;
|
tracknumPadding: boolean;
|
||||||
saveCover: boolean;
|
saveCover: boolean;
|
||||||
explicitFilter: boolean;
|
explicitFilter: boolean;
|
||||||
// Add other fields from the old AppSettings as needed by other parts of the app
|
// Add other fields from the old AppSettings as needed by other parts of the app
|
||||||
watch: AppSettings["watch"];
|
watch: AppSettings["watch"];
|
||||||
// Add defaults for the new download properties
|
// Add defaults for the new download properties
|
||||||
threads: number;
|
threads: number;
|
||||||
path: string;
|
path: string;
|
||||||
skipExisting: boolean;
|
skipExisting: boolean;
|
||||||
m3u: boolean;
|
m3u: boolean;
|
||||||
hlsThreads: number;
|
hlsThreads: number;
|
||||||
// Frontend-only flag used in DownloadsTab
|
// Frontend-only flag used in DownloadsTab
|
||||||
recursiveQuality: boolean;
|
recursiveQuality: boolean;
|
||||||
separateTracksByUser: boolean;
|
separateTracksByUser: boolean;
|
||||||
// Add defaults for the new formatting properties
|
// Add defaults for the new formatting properties
|
||||||
track: string;
|
track: string;
|
||||||
album: string;
|
album: string;
|
||||||
playlist: string;
|
playlist: string;
|
||||||
compilation: string;
|
compilation: string;
|
||||||
artistSeparator: string;
|
artistSeparator: string;
|
||||||
spotifyMetadata: boolean;
|
spotifyMetadata: boolean;
|
||||||
realTimeMultiplier: number;
|
realTimeMultiplier: number;
|
||||||
};
|
};
|
||||||
|
|
||||||
const defaultSettings: FlatAppSettings = {
|
const defaultSettings: FlatAppSettings = {
|
||||||
service: "spotify",
|
service: "spotify",
|
||||||
spotify: "",
|
spotify: "",
|
||||||
spotifyQuality: "NORMAL",
|
spotifyQuality: "NORMAL",
|
||||||
deezer: "",
|
deezer: "",
|
||||||
deezerQuality: "MP3_128",
|
deezerQuality: "MP3_128",
|
||||||
maxConcurrentDownloads: 3,
|
maxConcurrentDownloads: 3,
|
||||||
realTime: false,
|
realTime: false,
|
||||||
fallback: false,
|
fallback: false,
|
||||||
convertTo: "",
|
convertTo: "",
|
||||||
bitrate: "",
|
bitrate: "",
|
||||||
maxRetries: 3,
|
maxRetries: 3,
|
||||||
retryDelaySeconds: 5,
|
retryDelaySeconds: 5,
|
||||||
retryDelayIncrease: 5,
|
retryDelayIncrease: 5,
|
||||||
customDirFormat: "%ar_album%/%album%",
|
customDirFormat: "%ar_album%/%album%",
|
||||||
customTrackFormat: "%tracknum%. %music%",
|
customTrackFormat: "%tracknum%. %music%",
|
||||||
tracknumPadding: true,
|
tracknumPadding: true,
|
||||||
saveCover: true,
|
saveCover: true,
|
||||||
explicitFilter: false,
|
explicitFilter: false,
|
||||||
// Add defaults for the new download properties
|
// Add defaults for the new download properties
|
||||||
threads: 4,
|
threads: 4,
|
||||||
path: "/downloads",
|
path: "/downloads",
|
||||||
skipExisting: true,
|
skipExisting: true,
|
||||||
m3u: false,
|
m3u: false,
|
||||||
hlsThreads: 8,
|
hlsThreads: 8,
|
||||||
// Frontend-only default
|
// Frontend-only default
|
||||||
recursiveQuality: false,
|
recursiveQuality: false,
|
||||||
separateTracksByUser: false,
|
separateTracksByUser: false,
|
||||||
// Add defaults for the new formatting properties
|
// Add defaults for the new formatting properties
|
||||||
track: "{artist_name}/{album_name}/{track_number} - {track_name}",
|
track: "{artist_name}/{album_name}/{track_number} - {track_name}",
|
||||||
album: "{artist_name}/{album_name}",
|
album: "{artist_name}/{album_name}",
|
||||||
playlist: "Playlists/{playlist_name}",
|
playlist: "Playlists/{playlist_name}",
|
||||||
compilation: "Compilations/{album_name}",
|
compilation: "Compilations/{album_name}",
|
||||||
artistSeparator: "; ",
|
artistSeparator: "; ",
|
||||||
spotifyMetadata: true,
|
spotifyMetadata: true,
|
||||||
watch: {
|
watch: {
|
||||||
enabled: false,
|
enabled: false,
|
||||||
},
|
maxItemsPerRun: 50,
|
||||||
realTimeMultiplier: 0,
|
watchPollIntervalSeconds: 3600,
|
||||||
|
watchedArtistAlbumGroup: ["album", "single"],
|
||||||
|
},
|
||||||
|
realTimeMultiplier: 0,
|
||||||
};
|
};
|
||||||
|
|
||||||
interface FetchedCamelCaseSettings {
|
interface FetchedCamelCaseSettings {
|
||||||
watchEnabled?: boolean;
|
watchEnabled?: boolean;
|
||||||
watch?: { enabled: boolean };
|
watch?: { enabled: boolean; maxItemsPerRun?: number; watchPollIntervalSeconds?: number; watchedArtistAlbumGroup?: string[] };
|
||||||
[key: string]: unknown;
|
[key: string]: unknown;
|
||||||
}
|
}
|
||||||
|
|
||||||
const fetchSettings = async (): Promise<FlatAppSettings> => {
|
const fetchSettings = async (): Promise<FlatAppSettings> => {
|
||||||
try {
|
try {
|
||||||
const [{ data: generalConfig }, { data: watchConfig }] = await Promise.all([
|
const [{ data: generalConfig }, { data: watchConfig }] = await Promise.all([
|
||||||
authApiClient.client.get("/config"),
|
authApiClient.client.get("/config"),
|
||||||
authApiClient.client.get("/config/watch"),
|
authApiClient.client.get("/config/watch"),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const combinedConfig = {
|
const combinedConfig = {
|
||||||
...generalConfig,
|
...generalConfig,
|
||||||
watch: watchConfig,
|
watch: watchConfig,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Transform the keys before returning the data
|
// Transform the keys before returning the data
|
||||||
const camelData = convertKeysToCamelCase(combinedConfig) as FetchedCamelCaseSettings;
|
const camelData = convertKeysToCamelCase(combinedConfig) as FetchedCamelCaseSettings;
|
||||||
|
|
||||||
const withDefaults: FlatAppSettings = {
|
const withDefaults: FlatAppSettings = {
|
||||||
...(camelData as unknown as FlatAppSettings),
|
...(camelData as unknown as FlatAppSettings),
|
||||||
// Ensure required frontend-only fields exist
|
// Ensure required frontend-only fields exist
|
||||||
recursiveQuality: Boolean((camelData as any).recursiveQuality ?? false),
|
recursiveQuality: Boolean((camelData as any).recursiveQuality ?? false),
|
||||||
realTimeMultiplier: Number((camelData as any).realTimeMultiplier ?? 0),
|
realTimeMultiplier: Number((camelData as any).realTimeMultiplier ?? 0),
|
||||||
};
|
// Ensure watch subkeys default if missing
|
||||||
|
watch: {
|
||||||
|
...(camelData.watch as any),
|
||||||
|
enabled: Boolean((camelData.watch as any)?.enabled ?? false),
|
||||||
|
maxItemsPerRun: Number((camelData.watch as any)?.maxItemsPerRun ?? 50),
|
||||||
|
watchPollIntervalSeconds: Number((camelData.watch as any)?.watchPollIntervalSeconds ?? 3600),
|
||||||
|
watchedArtistAlbumGroup: (camelData.watch as any)?.watchedArtistAlbumGroup ?? ["album", "single"],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
return withDefaults;
|
return withDefaults;
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
// If we get authentication errors, return default settings
|
// If we get authentication errors, return default settings
|
||||||
if (error.response?.status === 401 || error.response?.status === 403) {
|
if (error.response?.status === 401 || error.response?.status === 403) {
|
||||||
console.log("Authentication required for config access, using default settings");
|
console.log("Authentication required for config access, using default settings");
|
||||||
return defaultSettings;
|
return defaultSettings;
|
||||||
}
|
}
|
||||||
// Re-throw other errors for React Query to handle
|
// Re-throw other errors for React Query to handle
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export function SettingsProvider({ children }: { children: ReactNode }) {
|
export function SettingsProvider({ children }: { children: ReactNode }) {
|
||||||
const { isLoading, authEnabled, isAuthenticated, user } = useAuth();
|
const { isLoading, authEnabled, isAuthenticated, user } = useAuth();
|
||||||
|
|
||||||
// Only fetch settings when auth is ready and user is admin (or auth is disabled)
|
// Only fetch settings when auth is ready and user is admin (or auth is disabled)
|
||||||
const shouldFetchSettings = !isLoading && (!authEnabled || (isAuthenticated && user?.role === "admin"));
|
const shouldFetchSettings = !isLoading && (!authEnabled || (isAuthenticated && user?.role === "admin"));
|
||||||
|
|
||||||
const {
|
const {
|
||||||
data: settings,
|
data: settings,
|
||||||
isLoading: isSettingsLoading,
|
isLoading: isSettingsLoading,
|
||||||
isError,
|
isError,
|
||||||
} = useQuery({
|
} = useQuery({
|
||||||
queryKey: ["config"],
|
queryKey: ["config"],
|
||||||
queryFn: fetchSettings,
|
queryFn: fetchSettings,
|
||||||
staleTime: 1000 * 60 * 5, // 5 minutes
|
staleTime: 1000 * 60 * 5, // 5 minutes
|
||||||
refetchOnWindowFocus: false,
|
refetchOnWindowFocus: false,
|
||||||
enabled: shouldFetchSettings, // Only run query when auth is ready and user is admin
|
enabled: shouldFetchSettings, // Only run query when auth is ready and user is admin
|
||||||
});
|
});
|
||||||
|
|
||||||
// Use default settings on error to prevent app crash
|
// Use default settings on error to prevent app crash
|
||||||
const value = { settings: isError ? defaultSettings : settings || null, isLoading: isSettingsLoading };
|
const value = { settings: isError ? defaultSettings : settings || null, isLoading: isSettingsLoading };
|
||||||
|
|
||||||
return <SettingsContext.Provider value={value}>{children}</SettingsContext.Provider>;
|
return <SettingsContext.Provider value={value}>{children}</SettingsContext.Provider>;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -37,7 +37,9 @@ export interface AppSettings {
|
|||||||
spotifyMetadata: boolean;
|
spotifyMetadata: boolean;
|
||||||
watch: {
|
watch: {
|
||||||
enabled: boolean;
|
enabled: boolean;
|
||||||
// Add other watch properties from the old type if they still exist in the API response
|
maxItemsPerRun: number;
|
||||||
|
watchPollIntervalSeconds: number;
|
||||||
|
watchedArtistAlbumGroup: string[];
|
||||||
};
|
};
|
||||||
// Add other root-level properties from the API if they exist
|
// Add other root-level properties from the API if they exist
|
||||||
realTimeMultiplier: number;
|
realTimeMultiplier: number;
|
||||||
|
|||||||
Reference in New Issue
Block a user