First try on 3.1.0 migration scripts
This commit is contained in:
@@ -3,16 +3,8 @@ import sqlite3
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from .v3_0_6 import (
|
||||
check_history_3_0_6,
|
||||
check_watch_playlists_3_0_6,
|
||||
check_watch_artists_3_0_6,
|
||||
update_history_3_0_6,
|
||||
update_watch_playlists_3_0_6,
|
||||
update_watch_artists_3_0_6,
|
||||
check_accounts_3_0_6,
|
||||
update_accounts_3_0_6,
|
||||
)
|
||||
from .v3_0_6 import MigrationV3_0_6
|
||||
from .v3_1_0 import MigrationV3_1_0
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -47,6 +39,9 @@ CHILDREN_EXPECTED_COLUMNS: dict[str, str] = {
|
||||
"metadata": "TEXT",
|
||||
}
|
||||
|
||||
m306 = MigrationV3_0_6()
|
||||
m310 = MigrationV3_1_0()
|
||||
|
||||
|
||||
def _safe_connect(path: Path) -> Optional[sqlite3.Connection]:
|
||||
try:
|
||||
@@ -59,21 +54,12 @@ def _safe_connect(path: Path) -> Optional[sqlite3.Connection]:
|
||||
return None
|
||||
|
||||
|
||||
def _table_columns(conn: sqlite3.Connection, table: str) -> set[str]:
|
||||
try:
|
||||
cur = conn.execute(f"PRAGMA table_info({table})")
|
||||
return {row[1] for row in cur.fetchall()}
|
||||
except Exception:
|
||||
return set()
|
||||
|
||||
|
||||
def _ensure_table_schema(
|
||||
conn: sqlite3.Connection,
|
||||
table_name: str,
|
||||
expected_columns: dict[str, str],
|
||||
table_description: str,
|
||||
) -> None:
|
||||
"""Ensure the given table has all expected columns, adding any missing columns safely."""
|
||||
try:
|
||||
cur = conn.execute(f"PRAGMA table_info({table_name})")
|
||||
existing_info = cur.fetchall()
|
||||
@@ -81,7 +67,6 @@ def _ensure_table_schema(
|
||||
for col_name, col_type in expected_columns.items():
|
||||
if col_name in existing_names:
|
||||
continue
|
||||
# Strip PK/NOT NULL when altering existing table to avoid errors
|
||||
col_type_for_add = (
|
||||
col_type.replace("PRIMARY KEY", "").replace("AUTOINCREMENT", "").replace("NOT NULL", "").strip()
|
||||
)
|
||||
@@ -104,7 +89,6 @@ def _ensure_table_schema(
|
||||
|
||||
|
||||
def _create_or_update_children_table(conn: sqlite3.Connection, table_name: str) -> None:
|
||||
"""Create children table if missing and ensure it has all expected columns."""
|
||||
conn.execute(
|
||||
f"""
|
||||
CREATE TABLE IF NOT EXISTS {table_name} (
|
||||
@@ -130,9 +114,7 @@ def _create_or_update_children_table(conn: sqlite3.Connection, table_name: str)
|
||||
|
||||
|
||||
def _update_children_tables_for_history(conn: sqlite3.Connection) -> None:
|
||||
"""Ensure all existing children tables and referenced children tables conform to expected schema."""
|
||||
try:
|
||||
# Create or update any tables referenced by download_history.children_table
|
||||
try:
|
||||
cur = conn.execute(
|
||||
"SELECT DISTINCT children_table FROM download_history WHERE children_table IS NOT NULL AND TRIM(children_table) != ''"
|
||||
@@ -145,7 +127,6 @@ def _update_children_tables_for_history(conn: sqlite3.Connection) -> None:
|
||||
except sqlite3.Error as e:
|
||||
logger.warning(f"Failed to scan referenced children tables from main history: {e}")
|
||||
|
||||
# Find any legacy children tables by name pattern album_% or playlist_%
|
||||
try:
|
||||
cur = conn.execute(
|
||||
"SELECT name FROM sqlite_master WHERE type='table' AND (name LIKE 'album_%' OR name LIKE 'playlist_%') AND name != 'download_history'"
|
||||
@@ -161,7 +142,6 @@ def _update_children_tables_for_history(conn: sqlite3.Connection) -> None:
|
||||
|
||||
|
||||
def _ensure_creds_filesystem() -> None:
|
||||
"""Ensure blobs directory and search.json exist."""
|
||||
try:
|
||||
BLOBS_DIR.mkdir(parents=True, exist_ok=True)
|
||||
if not SEARCH_JSON.exists():
|
||||
@@ -171,48 +151,74 @@ def _ensure_creds_filesystem() -> None:
|
||||
logger.error("Failed to ensure credentials filesystem (blobs/search.json)", exc_info=True)
|
||||
|
||||
|
||||
def _apply_versioned_updates(conn: sqlite3.Connection, c306, u306, c310, u310, post_update=None) -> None:
|
||||
if not c306(conn):
|
||||
u306(conn)
|
||||
if not c310(conn):
|
||||
u310(conn)
|
||||
if post_update:
|
||||
post_update(conn)
|
||||
|
||||
|
||||
def run_migrations_if_needed() -> None:
|
||||
"""Detect and apply necessary migrations by version for each DB.
|
||||
Idempotent by design.
|
||||
"""
|
||||
try:
|
||||
# History DB
|
||||
h_conn = _safe_connect(HISTORY_DB)
|
||||
if h_conn:
|
||||
try:
|
||||
if not check_history_3_0_6(h_conn):
|
||||
update_history_3_0_6(h_conn)
|
||||
# Ensure children tables regardless
|
||||
_update_children_tables_for_history(h_conn)
|
||||
_apply_versioned_updates(
|
||||
h_conn,
|
||||
m306.check_history,
|
||||
m306.update_history,
|
||||
m310.check_history,
|
||||
m310.update_history,
|
||||
post_update=_update_children_tables_for_history,
|
||||
)
|
||||
h_conn.commit()
|
||||
finally:
|
||||
h_conn.close()
|
||||
|
||||
# Watch DBs
|
||||
# Watch playlists DB
|
||||
p_conn = _safe_connect(PLAYLISTS_DB)
|
||||
if p_conn:
|
||||
try:
|
||||
if not check_watch_playlists_3_0_6(p_conn):
|
||||
update_watch_playlists_3_0_6(p_conn)
|
||||
_apply_versioned_updates(
|
||||
p_conn,
|
||||
m306.check_watch_playlists,
|
||||
m306.update_watch_playlists,
|
||||
m310.check_watch_playlists,
|
||||
m310.update_watch_playlists,
|
||||
)
|
||||
p_conn.commit()
|
||||
finally:
|
||||
p_conn.close()
|
||||
|
||||
# Watch artists DB
|
||||
a_conn = _safe_connect(ARTISTS_DB)
|
||||
if a_conn:
|
||||
try:
|
||||
if not check_watch_artists_3_0_6(a_conn):
|
||||
update_watch_artists_3_0_6(a_conn)
|
||||
_apply_versioned_updates(
|
||||
a_conn,
|
||||
m306.check_watch_artists,
|
||||
m306.update_watch_artists,
|
||||
m310.check_watch_artists,
|
||||
m310.update_watch_artists,
|
||||
)
|
||||
a_conn.commit()
|
||||
finally:
|
||||
a_conn.close()
|
||||
|
||||
# Credentials accounts DB and files
|
||||
# Accounts DB
|
||||
c_conn = _safe_connect(ACCOUNTS_DB)
|
||||
if c_conn:
|
||||
try:
|
||||
if not check_accounts_3_0_6(c_conn):
|
||||
update_accounts_3_0_6(c_conn)
|
||||
_apply_versioned_updates(
|
||||
c_conn,
|
||||
m306.check_accounts,
|
||||
m306.update_accounts,
|
||||
m310.check_accounts,
|
||||
m310.update_accounts,
|
||||
)
|
||||
c_conn.commit()
|
||||
finally:
|
||||
c_conn.close()
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import sqlite3
|
||||
|
||||
HISTORY_SQL = """
|
||||
CREATE TABLE IF NOT EXISTS download_history (
|
||||
|
||||
class MigrationV3_0_6:
|
||||
HISTORY_SQL = """
|
||||
CREATE TABLE IF NOT EXISTS download_history (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
download_type TEXT NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
@@ -26,15 +28,15 @@ CREATE TABLE IF NOT EXISTS download_history (
|
||||
album_type TEXT,
|
||||
duration_total_ms INTEGER,
|
||||
explicit BOOLEAN
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS idx_download_history_timestamp ON download_history(timestamp);
|
||||
CREATE INDEX IF NOT EXISTS idx_download_history_type_status ON download_history(download_type, status);
|
||||
CREATE INDEX IF NOT EXISTS idx_download_history_task_id ON download_history(task_id);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS uq_download_history_task_type_ids ON download_history(task_id, download_type, external_ids);
|
||||
"""
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS idx_download_history_timestamp ON download_history(timestamp);
|
||||
CREATE INDEX IF NOT EXISTS idx_download_history_type_status ON download_history(download_type, status);
|
||||
CREATE INDEX IF NOT EXISTS idx_download_history_task_id ON download_history(task_id);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS uq_download_history_task_type_ids ON download_history(task_id, download_type, external_ids);
|
||||
"""
|
||||
|
||||
WATCH_PLAYLISTS_SQL = """
|
||||
CREATE TABLE IF NOT EXISTS watched_playlists (
|
||||
WATCH_PLAYLISTS_SQL = """
|
||||
CREATE TABLE IF NOT EXISTS watched_playlists (
|
||||
spotify_id TEXT PRIMARY KEY,
|
||||
name TEXT,
|
||||
owner_id TEXT,
|
||||
@@ -45,11 +47,11 @@ CREATE TABLE IF NOT EXISTS watched_playlists (
|
||||
last_checked INTEGER,
|
||||
added_at INTEGER,
|
||||
is_active INTEGER DEFAULT 1
|
||||
);
|
||||
"""
|
||||
);
|
||||
"""
|
||||
|
||||
WATCH_ARTISTS_SQL = """
|
||||
CREATE TABLE IF NOT EXISTS watched_artists (
|
||||
WATCH_ARTISTS_SQL = """
|
||||
CREATE TABLE IF NOT EXISTS watched_artists (
|
||||
spotify_id TEXT PRIMARY KEY,
|
||||
name TEXT,
|
||||
link TEXT,
|
||||
@@ -60,41 +62,38 @@ CREATE TABLE IF NOT EXISTS watched_artists (
|
||||
genres TEXT,
|
||||
popularity INTEGER,
|
||||
image_url TEXT
|
||||
);
|
||||
"""
|
||||
);
|
||||
"""
|
||||
|
||||
ACCOUNTS_SPOTIFY_SQL = """
|
||||
CREATE TABLE IF NOT EXISTS spotify (
|
||||
ACCOUNTS_SPOTIFY_SQL = """
|
||||
CREATE TABLE IF NOT EXISTS spotify (
|
||||
name TEXT PRIMARY KEY,
|
||||
region TEXT,
|
||||
created_at REAL,
|
||||
updated_at REAL
|
||||
);
|
||||
"""
|
||||
);
|
||||
"""
|
||||
|
||||
ACCOUNTS_DEEZER_SQL = """
|
||||
CREATE TABLE IF NOT EXISTS deezer (
|
||||
ACCOUNTS_DEEZER_SQL = """
|
||||
CREATE TABLE IF NOT EXISTS deezer (
|
||||
name TEXT PRIMARY KEY,
|
||||
arl TEXT,
|
||||
region TEXT,
|
||||
created_at REAL,
|
||||
updated_at REAL
|
||||
);
|
||||
"""
|
||||
);
|
||||
"""
|
||||
|
||||
|
||||
# --- Check functions ---
|
||||
|
||||
def _table_columns(conn: sqlite3.Connection, table: str) -> set[str]:
|
||||
@staticmethod
|
||||
def _table_columns(conn: sqlite3.Connection, table: str) -> set[str]:
|
||||
try:
|
||||
cur = conn.execute(f"PRAGMA table_info({table})")
|
||||
return {row[1] for row in cur.fetchall()}
|
||||
except Exception:
|
||||
return set()
|
||||
|
||||
|
||||
def check_history_3_0_6(conn: sqlite3.Connection) -> bool:
|
||||
"""Return True if history DB matches v3.0.6 schema for main table."""
|
||||
# --- Checks ---
|
||||
def check_history(self, conn: sqlite3.Connection) -> bool:
|
||||
cur = conn.execute(
|
||||
"SELECT name FROM sqlite_master WHERE type='table' AND name='download_history'"
|
||||
)
|
||||
@@ -126,10 +125,9 @@ def check_history_3_0_6(conn: sqlite3.Connection) -> bool:
|
||||
"duration_total_ms",
|
||||
"explicit",
|
||||
}
|
||||
return required.issubset(_table_columns(conn, "download_history"))
|
||||
return required.issubset(self._table_columns(conn, "download_history"))
|
||||
|
||||
|
||||
def check_watch_playlists_3_0_6(conn: sqlite3.Connection) -> bool:
|
||||
def check_watch_playlists(self, conn: sqlite3.Connection) -> bool:
|
||||
cur = conn.execute(
|
||||
"SELECT name FROM sqlite_master WHERE type='table' AND name='watched_playlists'"
|
||||
)
|
||||
@@ -147,10 +145,9 @@ def check_watch_playlists_3_0_6(conn: sqlite3.Connection) -> bool:
|
||||
"added_at",
|
||||
"is_active",
|
||||
}
|
||||
return required.issubset(_table_columns(conn, "watched_playlists"))
|
||||
return required.issubset(self._table_columns(conn, "watched_playlists"))
|
||||
|
||||
|
||||
def check_watch_artists_3_0_6(conn: sqlite3.Connection) -> bool:
|
||||
def check_watch_artists(self, conn: sqlite3.Connection) -> bool:
|
||||
cur = conn.execute(
|
||||
"SELECT name FROM sqlite_master WHERE type='table' AND name='watched_artists'"
|
||||
)
|
||||
@@ -168,44 +165,37 @@ def check_watch_artists_3_0_6(conn: sqlite3.Connection) -> bool:
|
||||
"popularity",
|
||||
"image_url",
|
||||
}
|
||||
return required.issubset(_table_columns(conn, "watched_artists"))
|
||||
return required.issubset(self._table_columns(conn, "watched_artists"))
|
||||
|
||||
|
||||
def check_accounts_3_0_6(conn: sqlite3.Connection) -> bool:
|
||||
"""Return True if accounts DB has both spotify and deezer tables with expected columns."""
|
||||
# Spotify table
|
||||
def check_accounts(self, conn: sqlite3.Connection) -> bool:
|
||||
cur = conn.execute(
|
||||
"SELECT name FROM sqlite_master WHERE type='table' AND name='spotify'"
|
||||
)
|
||||
if not cur.fetchone():
|
||||
return False
|
||||
spotify_required = {"name", "region", "created_at", "updated_at"}
|
||||
if not spotify_required.issubset(_table_columns(conn, "spotify")):
|
||||
if not {"name", "region", "created_at", "updated_at"}.issubset(
|
||||
self._table_columns(conn, "spotify")
|
||||
):
|
||||
return False
|
||||
# Deezer table
|
||||
cur = conn.execute(
|
||||
"SELECT name FROM sqlite_master WHERE type='table' AND name='deezer'"
|
||||
)
|
||||
if not cur.fetchone():
|
||||
return False
|
||||
deezer_required = {"name", "arl", "region", "created_at", "updated_at"}
|
||||
return deezer_required.issubset(_table_columns(conn, "deezer"))
|
||||
return {"name", "arl", "region", "created_at", "updated_at"}.issubset(
|
||||
self._table_columns(conn, "deezer")
|
||||
)
|
||||
|
||||
# --- Updates ---
|
||||
def update_history(self, conn: sqlite3.Connection) -> None:
|
||||
conn.executescript(self.HISTORY_SQL)
|
||||
|
||||
# --- Update functions ---
|
||||
def update_watch_playlists(self, conn: sqlite3.Connection) -> None:
|
||||
conn.executescript(self.WATCH_PLAYLISTS_SQL)
|
||||
|
||||
def update_history_3_0_6(conn: sqlite3.Connection) -> None:
|
||||
conn.executescript(HISTORY_SQL)
|
||||
def update_watch_artists(self, conn: sqlite3.Connection) -> None:
|
||||
conn.executescript(self.WATCH_ARTISTS_SQL)
|
||||
|
||||
|
||||
def update_watch_playlists_3_0_6(conn: sqlite3.Connection) -> None:
|
||||
conn.executescript(WATCH_PLAYLISTS_SQL)
|
||||
|
||||
|
||||
def update_watch_artists_3_0_6(conn: sqlite3.Connection) -> None:
|
||||
conn.executescript(WATCH_ARTISTS_SQL)
|
||||
|
||||
|
||||
def update_accounts_3_0_6(conn: sqlite3.Connection) -> None:
|
||||
conn.executescript(ACCOUNTS_SPOTIFY_SQL)
|
||||
conn.executescript(ACCOUNTS_DEEZER_SQL)
|
||||
def update_accounts(self, conn: sqlite3.Connection) -> None:
|
||||
conn.executescript(self.ACCOUNTS_SPOTIFY_SQL)
|
||||
conn.executescript(self.ACCOUNTS_DEEZER_SQL)
|
||||
331
routes/migrations/v3_1_0.py
Normal file
331
routes/migrations/v3_1_0.py
Normal file
@@ -0,0 +1,331 @@
|
||||
import sqlite3
|
||||
|
||||
|
||||
class MigrationV3_1_0:
|
||||
# --- Expected Schemas (3.1.0) ---
|
||||
HISTORY_MAIN_REQUIRED = {
|
||||
"id",
|
||||
"download_type",
|
||||
"title",
|
||||
"artists",
|
||||
"timestamp",
|
||||
"status",
|
||||
"service",
|
||||
"quality_format",
|
||||
"quality_bitrate",
|
||||
"total_tracks",
|
||||
"successful_tracks",
|
||||
"failed_tracks",
|
||||
"skipped_tracks",
|
||||
"children_table",
|
||||
"task_id",
|
||||
"external_ids",
|
||||
"metadata",
|
||||
"release_date",
|
||||
"genres",
|
||||
"images",
|
||||
"owner",
|
||||
"album_type",
|
||||
"duration_total_ms",
|
||||
"explicit",
|
||||
}
|
||||
|
||||
HISTORY_MAIN_SQL = """
|
||||
CREATE TABLE IF NOT EXISTS download_history (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
download_type TEXT NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
artists TEXT,
|
||||
timestamp REAL NOT NULL,
|
||||
status TEXT NOT NULL,
|
||||
service TEXT,
|
||||
quality_format TEXT,
|
||||
quality_bitrate TEXT,
|
||||
total_tracks INTEGER,
|
||||
successful_tracks INTEGER,
|
||||
failed_tracks INTEGER,
|
||||
skipped_tracks INTEGER,
|
||||
children_table TEXT,
|
||||
task_id TEXT,
|
||||
external_ids TEXT,
|
||||
metadata TEXT,
|
||||
release_date TEXT,
|
||||
genres TEXT,
|
||||
images TEXT,
|
||||
owner TEXT,
|
||||
album_type TEXT,
|
||||
duration_total_ms INTEGER,
|
||||
explicit BOOLEAN
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS idx_download_history_timestamp ON download_history(timestamp);
|
||||
CREATE INDEX IF NOT EXISTS idx_download_history_type_status ON download_history(download_type, status);
|
||||
CREATE INDEX IF NOT EXISTS idx_download_history_task_id ON download_history(task_id);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS uq_download_history_task_type_ids ON download_history(task_id, download_type, external_ids);
|
||||
"""
|
||||
|
||||
# Children tables schema (album_% / playlist_%):
|
||||
HISTORY_CHILDREN_EXPECTED = {
|
||||
"id": "INTEGER PRIMARY KEY AUTOINCREMENT",
|
||||
"title": "TEXT NOT NULL",
|
||||
"artists": "TEXT",
|
||||
"album_title": "TEXT",
|
||||
"duration_ms": "INTEGER",
|
||||
"track_number": "INTEGER",
|
||||
"disc_number": "INTEGER",
|
||||
"explicit": "BOOLEAN",
|
||||
"status": "TEXT NOT NULL",
|
||||
"external_ids": "TEXT",
|
||||
"genres": "TEXT",
|
||||
"isrc": "TEXT",
|
||||
"timestamp": "REAL NOT NULL",
|
||||
"position": "INTEGER",
|
||||
"metadata": "TEXT",
|
||||
}
|
||||
|
||||
WATCH_PLAYLISTS_REQUIRED = {
|
||||
"spotify_id",
|
||||
"name",
|
||||
"owner_id",
|
||||
"owner_name",
|
||||
"total_tracks",
|
||||
"link",
|
||||
"snapshot_id",
|
||||
"last_checked",
|
||||
"added_at",
|
||||
"is_active",
|
||||
}
|
||||
|
||||
# Per-playlist tracks table expected columns
|
||||
PLAYLIST_TRACKS_EXPECTED = {
|
||||
"spotify_track_id": "TEXT PRIMARY KEY",
|
||||
"title": "TEXT",
|
||||
"artist_names": "TEXT",
|
||||
"album_name": "TEXT",
|
||||
"album_artist_names": "TEXT",
|
||||
"track_number": "INTEGER",
|
||||
"album_spotify_id": "TEXT",
|
||||
"duration_ms": "INTEGER",
|
||||
"added_at_playlist": "TEXT",
|
||||
"added_to_db": "INTEGER",
|
||||
"is_present_in_spotify": "INTEGER DEFAULT 1",
|
||||
"last_seen_in_spotify": "INTEGER",
|
||||
"snapshot_id": "TEXT",
|
||||
"final_path": "TEXT",
|
||||
}
|
||||
|
||||
WATCH_ARTISTS_REQUIRED = {
|
||||
"spotify_id",
|
||||
"name",
|
||||
"link",
|
||||
"total_albums_on_spotify",
|
||||
"last_checked",
|
||||
"added_at",
|
||||
"is_active",
|
||||
"genres",
|
||||
"popularity",
|
||||
"image_url",
|
||||
}
|
||||
|
||||
ARTIST_ALBUMS_EXPECTED = {
|
||||
"album_spotify_id": "TEXT PRIMARY KEY",
|
||||
"artist_spotify_id": "TEXT",
|
||||
"name": "TEXT",
|
||||
"album_group": "TEXT",
|
||||
"album_type": "TEXT",
|
||||
"release_date": "TEXT",
|
||||
"release_date_precision": "TEXT",
|
||||
"total_tracks": "INTEGER",
|
||||
"link": "TEXT",
|
||||
"image_url": "TEXT",
|
||||
"added_to_db": "INTEGER",
|
||||
"last_seen_on_spotify": "INTEGER",
|
||||
"download_task_id": "TEXT",
|
||||
"download_status": "INTEGER DEFAULT 0",
|
||||
"is_fully_downloaded_managed_by_app": "INTEGER DEFAULT 0",
|
||||
}
|
||||
|
||||
ACCOUNTS_SPOTIFY_REQUIRED = {"name", "region", "created_at", "updated_at"}
|
||||
ACCOUNTS_DEEZER_REQUIRED = {"name", "arl", "region", "created_at", "updated_at"}
|
||||
|
||||
ACCOUNTS_SPOTIFY_SQL = """
|
||||
CREATE TABLE IF NOT EXISTS spotify (
|
||||
name TEXT PRIMARY KEY,
|
||||
region TEXT,
|
||||
created_at REAL,
|
||||
updated_at REAL
|
||||
);
|
||||
"""
|
||||
|
||||
ACCOUNTS_DEEZER_SQL = """
|
||||
CREATE TABLE IF NOT EXISTS deezer (
|
||||
name TEXT PRIMARY KEY,
|
||||
arl TEXT,
|
||||
region TEXT,
|
||||
created_at REAL,
|
||||
updated_at REAL
|
||||
);
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def _columns(conn: sqlite3.Connection, table: str) -> set[str]:
|
||||
try:
|
||||
cur = conn.execute(f"PRAGMA table_info({table})")
|
||||
return {row[1] for row in cur.fetchall()}
|
||||
except Exception:
|
||||
return set()
|
||||
|
||||
@staticmethod
|
||||
def _ensure_table_schema(conn: sqlite3.Connection, table_name: str, expected: dict[str, str], desc: str) -> None:
|
||||
cur = conn.execute(f"PRAGMA table_info({table_name})")
|
||||
existing = {row[1] for row in cur.fetchall()}
|
||||
for col, col_type in expected.items():
|
||||
if col in existing:
|
||||
continue
|
||||
col_type_for_add = (
|
||||
col_type.replace("PRIMARY KEY", "").replace("AUTOINCREMENT", "").replace("NOT NULL", "").strip()
|
||||
)
|
||||
try:
|
||||
conn.execute(f"ALTER TABLE {table_name} ADD COLUMN {col} {col_type_for_add}")
|
||||
except sqlite3.OperationalError:
|
||||
pass
|
||||
|
||||
# --- Check methods ---
|
||||
def check_history(self, conn: sqlite3.Connection) -> bool:
|
||||
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='download_history'")
|
||||
if not cur.fetchone():
|
||||
return False
|
||||
return self.HISTORY_MAIN_REQUIRED.issubset(self._columns(conn, "download_history"))
|
||||
|
||||
def check_watch_playlists(self, conn: sqlite3.Connection) -> bool:
|
||||
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='watched_playlists'")
|
||||
if not cur.fetchone():
|
||||
return False
|
||||
if not self.WATCH_PLAYLISTS_REQUIRED.issubset(self._columns(conn, "watched_playlists")):
|
||||
return False
|
||||
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'playlist_%'")
|
||||
rows = cur.fetchall()
|
||||
for (table_name,) in rows:
|
||||
cols = self._columns(conn, table_name)
|
||||
required_cols = set(self.PLAYLIST_TRACKS_EXPECTED.keys())
|
||||
if not required_cols.issubset(cols):
|
||||
return False
|
||||
return True
|
||||
|
||||
def check_watch_artists(self, conn: sqlite3.Connection) -> bool:
|
||||
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='watched_artists'")
|
||||
if not cur.fetchone():
|
||||
return False
|
||||
if not self.WATCH_ARTISTS_REQUIRED.issubset(self._columns(conn, "watched_artists")):
|
||||
return False
|
||||
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'artist_%'")
|
||||
rows = cur.fetchall()
|
||||
for (table_name,) in rows:
|
||||
cols = self._columns(conn, table_name)
|
||||
required_cols = set(self.ARTIST_ALBUMS_EXPECTED.keys())
|
||||
if not required_cols.issubset(cols):
|
||||
return False
|
||||
return True
|
||||
|
||||
def check_accounts(self, conn: sqlite3.Connection) -> bool:
|
||||
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='spotify'")
|
||||
if not cur.fetchone():
|
||||
return False
|
||||
if not self.ACCOUNTS_SPOTIFY_REQUIRED.issubset(self._columns(conn, "spotify")):
|
||||
return False
|
||||
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='deezer'")
|
||||
if not cur.fetchone():
|
||||
return False
|
||||
if not self.ACCOUNTS_DEEZER_REQUIRED.issubset(self._columns(conn, "deezer")):
|
||||
return False
|
||||
return True
|
||||
|
||||
# --- Update methods ---
|
||||
def update_history(self, conn: sqlite3.Connection) -> None:
|
||||
conn.executescript(self.HISTORY_MAIN_SQL)
|
||||
|
||||
def update_watch_playlists(self, conn: sqlite3.Connection) -> None:
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS watched_playlists (
|
||||
spotify_id TEXT PRIMARY KEY,
|
||||
name TEXT,
|
||||
owner_id TEXT,
|
||||
owner_name TEXT,
|
||||
total_tracks INTEGER,
|
||||
link TEXT,
|
||||
snapshot_id TEXT,
|
||||
last_checked INTEGER,
|
||||
added_at INTEGER,
|
||||
is_active INTEGER DEFAULT 1
|
||||
)
|
||||
"""
|
||||
)
|
||||
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'playlist_%'")
|
||||
for (table_name,) in cur.fetchall():
|
||||
conn.execute(
|
||||
f"""
|
||||
CREATE TABLE IF NOT EXISTS {table_name} (
|
||||
spotify_track_id TEXT PRIMARY KEY,
|
||||
title TEXT,
|
||||
artist_names TEXT,
|
||||
album_name TEXT,
|
||||
album_artist_names TEXT,
|
||||
track_number INTEGER,
|
||||
album_spotify_id TEXT,
|
||||
duration_ms INTEGER,
|
||||
added_at_playlist TEXT,
|
||||
added_to_db INTEGER,
|
||||
is_present_in_spotify INTEGER DEFAULT 1,
|
||||
last_seen_in_spotify INTEGER,
|
||||
snapshot_id TEXT,
|
||||
final_path TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
self._ensure_table_schema(conn, table_name, self.PLAYLIST_TRACKS_EXPECTED, f"playlist tracks {table_name}")
|
||||
|
||||
def update_watch_artists(self, conn: sqlite3.Connection) -> None:
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS watched_artists (
|
||||
spotify_id TEXT PRIMARY KEY,
|
||||
name TEXT,
|
||||
link TEXT,
|
||||
total_albums_on_spotify INTEGER,
|
||||
last_checked INTEGER,
|
||||
added_at INTEGER,
|
||||
is_active INTEGER DEFAULT 1,
|
||||
genres TEXT,
|
||||
popularity INTEGER,
|
||||
image_url TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'artist_%'")
|
||||
for (table_name,) in cur.fetchall():
|
||||
conn.execute(
|
||||
f"""
|
||||
CREATE TABLE IF NOT EXISTS {table_name} (
|
||||
album_spotify_id TEXT PRIMARY KEY,
|
||||
artist_spotify_id TEXT,
|
||||
name TEXT,
|
||||
album_group TEXT,
|
||||
album_type TEXT,
|
||||
release_date TEXT,
|
||||
release_date_precision TEXT,
|
||||
total_tracks INTEGER,
|
||||
link TEXT,
|
||||
image_url TEXT,
|
||||
added_to_db INTEGER,
|
||||
last_seen_on_spotify INTEGER,
|
||||
download_task_id TEXT,
|
||||
download_status INTEGER DEFAULT 0,
|
||||
is_fully_downloaded_managed_by_app INTEGER DEFAULT 0
|
||||
)
|
||||
"""
|
||||
)
|
||||
self._ensure_table_schema(conn, table_name, self.ARTIST_ALBUMS_EXPECTED, f"artist albums {table_name}")
|
||||
|
||||
def update_accounts(self, conn: sqlite3.Connection) -> None:
|
||||
conn.executescript(self.ACCOUNTS_SPOTIFY_SQL)
|
||||
conn.executescript(self.ACCOUNTS_DEEZER_SQL)
|
||||
@@ -996,7 +996,7 @@ def init_artists_db():
|
||||
|
||||
def _create_artist_albums_table(artist_spotify_id: str):
|
||||
"""Creates or updates a table for a specific artist to store their albums in artists.db."""
|
||||
table_name = f"artist_{artist_spotify_id.replace('-', '_').replace(' ', '_')}" # Sanitize table name
|
||||
table_name = f"artist_{artist_spotify_id.replace('-', '_').replace(' ', '_')}_albums" # Sanitize table name
|
||||
try:
|
||||
with _get_artists_db_connection() as conn: # Use artists connection
|
||||
cursor = conn.cursor()
|
||||
|
||||
238
tests/migration/test_v3_1_0.py
Normal file
238
tests/migration/test_v3_1_0.py
Normal file
@@ -0,0 +1,238 @@
|
||||
import sqlite3
|
||||
from pathlib import Path
|
||||
import pytest
|
||||
import json
|
||||
|
||||
# Override the autouse credentials fixture from conftest for this module
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def setup_credentials_for_tests():
|
||||
yield
|
||||
|
||||
|
||||
def _create_playlists_db_3_1_0(db_path: Path):
|
||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with sqlite3.connect(str(db_path)) as conn:
|
||||
# watched_playlists
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS watched_playlists (
|
||||
spotify_id TEXT PRIMARY KEY,
|
||||
name TEXT,
|
||||
owner_id TEXT,
|
||||
owner_name TEXT,
|
||||
total_tracks INTEGER,
|
||||
link TEXT,
|
||||
snapshot_id TEXT,
|
||||
last_checked INTEGER,
|
||||
added_at INTEGER,
|
||||
is_active INTEGER DEFAULT 1
|
||||
)
|
||||
"""
|
||||
)
|
||||
# example playlist table with all expected columns
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS playlist_abc123 (
|
||||
spotify_track_id TEXT PRIMARY KEY,
|
||||
title TEXT,
|
||||
artist_names TEXT,
|
||||
album_name TEXT,
|
||||
album_artist_names TEXT,
|
||||
track_number INTEGER,
|
||||
album_spotify_id TEXT,
|
||||
duration_ms INTEGER,
|
||||
added_at_playlist TEXT,
|
||||
added_to_db INTEGER,
|
||||
is_present_in_spotify INTEGER DEFAULT 1,
|
||||
last_seen_in_spotify INTEGER,
|
||||
snapshot_id TEXT,
|
||||
final_path TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def _create_artists_db_3_1_0(db_path: Path):
|
||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with sqlite3.connect(str(db_path)) as conn:
|
||||
# watched_artists
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS watched_artists (
|
||||
spotify_id TEXT PRIMARY KEY,
|
||||
name TEXT,
|
||||
link TEXT,
|
||||
total_albums_on_spotify INTEGER,
|
||||
last_checked INTEGER,
|
||||
added_at INTEGER,
|
||||
is_active INTEGER DEFAULT 1,
|
||||
genres TEXT,
|
||||
popularity INTEGER,
|
||||
image_url TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
# example artist albums table (using _albums suffix per docs)
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS artist_def456_albums (
|
||||
album_spotify_id TEXT PRIMARY KEY,
|
||||
artist_spotify_id TEXT,
|
||||
name TEXT,
|
||||
album_group TEXT,
|
||||
album_type TEXT,
|
||||
release_date TEXT,
|
||||
release_date_precision TEXT,
|
||||
total_tracks INTEGER,
|
||||
link TEXT,
|
||||
image_url TEXT,
|
||||
added_to_db INTEGER,
|
||||
last_seen_on_spotify INTEGER,
|
||||
download_task_id TEXT,
|
||||
download_status INTEGER DEFAULT 0,
|
||||
is_fully_downloaded_managed_by_app INTEGER DEFAULT 0
|
||||
)
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def _create_history_db_3_1_0(db_path: Path):
|
||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with sqlite3.connect(str(db_path)) as conn:
|
||||
conn.executescript(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS download_history (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
download_type TEXT NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
artists TEXT,
|
||||
timestamp REAL NOT NULL,
|
||||
status TEXT NOT NULL,
|
||||
service TEXT,
|
||||
quality_format TEXT,
|
||||
quality_bitrate TEXT,
|
||||
total_tracks INTEGER,
|
||||
successful_tracks INTEGER,
|
||||
failed_tracks INTEGER,
|
||||
skipped_tracks INTEGER,
|
||||
children_table TEXT,
|
||||
task_id TEXT,
|
||||
external_ids TEXT,
|
||||
metadata TEXT,
|
||||
release_date TEXT,
|
||||
genres TEXT,
|
||||
images TEXT,
|
||||
owner TEXT,
|
||||
album_type TEXT,
|
||||
duration_total_ms INTEGER,
|
||||
explicit BOOLEAN
|
||||
);
|
||||
"""
|
||||
)
|
||||
# reference children tables to be created by migration
|
||||
conn.execute(
|
||||
"INSERT INTO download_history (download_type, title, artists, timestamp, status, children_table) VALUES ('album','X','[]',strftime('%s','now'),'completed','album_child1')"
|
||||
)
|
||||
conn.execute(
|
||||
"INSERT INTO download_history (download_type, title, artists, timestamp, status, children_table) VALUES ('playlist','Y','[]',strftime('%s','now'),'completed','playlist_child2')"
|
||||
)
|
||||
|
||||
|
||||
def _create_accounts_db_3_1_0(db_path: Path):
|
||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with sqlite3.connect(str(db_path)) as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS spotify (
|
||||
name TEXT PRIMARY KEY,
|
||||
region TEXT,
|
||||
created_at REAL,
|
||||
updated_at REAL
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS deezer (
|
||||
name TEXT PRIMARY KEY,
|
||||
arl TEXT,
|
||||
region TEXT,
|
||||
created_at REAL,
|
||||
updated_at REAL
|
||||
)
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def _get_columns(db_path: Path, table: str) -> set[str]:
|
||||
with sqlite3.connect(str(db_path)) as conn:
|
||||
cur = conn.execute(f"PRAGMA table_info({table})")
|
||||
return {row[1] for row in cur.fetchall()}
|
||||
|
||||
|
||||
def test_migration_3_1_0_upgrades_all(tmp_path: Path, monkeypatch: pytest.MonkeyPatch):
|
||||
data_dir = tmp_path / "data"
|
||||
history_db = data_dir / "history" / "download_history.db"
|
||||
playlists_db = data_dir / "watch" / "playlists.db"
|
||||
artists_db = data_dir / "watch" / "artists.db"
|
||||
creds_dir = data_dir / "creds"
|
||||
accounts_db = creds_dir / "accounts.db"
|
||||
blobs_dir = creds_dir / "blobs"
|
||||
search_json = creds_dir / "search.json"
|
||||
|
||||
# Create all DBs to match 3.1.0 schema
|
||||
_create_history_db_3_1_0(history_db)
|
||||
_create_playlists_db_3_1_0(playlists_db)
|
||||
_create_artists_db_3_1_0(artists_db)
|
||||
_create_accounts_db_3_1_0(accounts_db)
|
||||
|
||||
from routes.migrations import runner
|
||||
monkeypatch.setattr(runner, "DATA_DIR", data_dir)
|
||||
monkeypatch.setattr(runner, "HISTORY_DB", history_db)
|
||||
monkeypatch.setattr(runner, "WATCH_DIR", data_dir / "watch")
|
||||
monkeypatch.setattr(runner, "PLAYLISTS_DB", playlists_db)
|
||||
monkeypatch.setattr(runner, "ARTISTS_DB", artists_db)
|
||||
monkeypatch.setattr(runner, "CREDS_DIR", creds_dir)
|
||||
monkeypatch.setattr(runner, "ACCOUNTS_DB", accounts_db)
|
||||
monkeypatch.setattr(runner, "BLOBS_DIR", blobs_dir)
|
||||
monkeypatch.setattr(runner, "SEARCH_JSON", search_json)
|
||||
|
||||
# Act: run migrations (should be mostly no-op, but will ensure children tables)
|
||||
runner.run_migrations_if_needed()
|
||||
runner.run_migrations_if_needed()
|
||||
|
||||
# Children tables created/ensured
|
||||
expected_children_cols = {
|
||||
"id",
|
||||
"title",
|
||||
"artists",
|
||||
"album_title",
|
||||
"duration_ms",
|
||||
"track_number",
|
||||
"disc_number",
|
||||
"explicit",
|
||||
"status",
|
||||
"external_ids",
|
||||
"genres",
|
||||
"isrc",
|
||||
"timestamp",
|
||||
"position",
|
||||
"metadata",
|
||||
}
|
||||
assert _get_columns(history_db, "album_child1").issuperset(expected_children_cols)
|
||||
assert _get_columns(history_db, "playlist_child2").issuperset(expected_children_cols)
|
||||
|
||||
# Playlist per-table schema present
|
||||
playlist_cols = _get_columns(playlists_db, "playlist_abc123")
|
||||
assert {"spotify_track_id", "title", "artist_names", "album_name", "album_artist_names", "track_number", "album_spotify_id", "duration_ms", "added_at_playlist", "added_to_db", "is_present_in_spotify", "last_seen_in_spotify", "snapshot_id", "final_path"}.issubset(playlist_cols)
|
||||
|
||||
# Artist per-table schema present
|
||||
artist_cols = _get_columns(artists_db, "artist_def456_albums")
|
||||
assert {"album_spotify_id", "artist_spotify_id", "name", "album_group", "album_type", "release_date", "release_date_precision", "total_tracks", "link", "image_url", "added_to_db", "last_seen_on_spotify", "download_task_id", "download_status", "is_fully_downloaded_managed_by_app"}.issubset(artist_cols)
|
||||
|
||||
# Accounts DB present and creds filesystem ensured
|
||||
assert accounts_db.exists()
|
||||
assert blobs_dir.exists() and blobs_dir.is_dir()
|
||||
assert search_json.exists()
|
||||
data = json.loads(search_json.read_text())
|
||||
assert set(data.keys()) == {"client_id", "client_secret"}
|
||||
Reference in New Issue
Block a user