Improve 3.0.6 migration
This commit is contained in:
@@ -4,7 +4,6 @@ from pathlib import Path
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from .v3_0_6 import MigrationV3_0_6
|
from .v3_0_6 import MigrationV3_0_6
|
||||||
from .v3_1_0 import MigrationV3_1_0
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -39,8 +38,69 @@ CHILDREN_EXPECTED_COLUMNS: dict[str, str] = {
|
|||||||
"metadata": "TEXT",
|
"metadata": "TEXT",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# 3.1.2 expected schemas for Watch DBs (kept here to avoid importing modules with side-effects)
|
||||||
|
EXPECTED_WATCHED_PLAYLISTS_COLUMNS: dict[str, str] = {
|
||||||
|
"spotify_id": "TEXT PRIMARY KEY",
|
||||||
|
"name": "TEXT",
|
||||||
|
"owner_id": "TEXT",
|
||||||
|
"owner_name": "TEXT",
|
||||||
|
"total_tracks": "INTEGER",
|
||||||
|
"link": "TEXT",
|
||||||
|
"snapshot_id": "TEXT",
|
||||||
|
"last_checked": "INTEGER",
|
||||||
|
"added_at": "INTEGER",
|
||||||
|
"is_active": "INTEGER DEFAULT 1",
|
||||||
|
}
|
||||||
|
|
||||||
|
EXPECTED_PLAYLIST_TRACKS_COLUMNS: dict[str, str] = {
|
||||||
|
"spotify_track_id": "TEXT PRIMARY KEY",
|
||||||
|
"title": "TEXT",
|
||||||
|
"artist_names": "TEXT",
|
||||||
|
"album_name": "TEXT",
|
||||||
|
"album_artist_names": "TEXT",
|
||||||
|
"track_number": "INTEGER",
|
||||||
|
"album_spotify_id": "TEXT",
|
||||||
|
"duration_ms": "INTEGER",
|
||||||
|
"added_at_playlist": "TEXT",
|
||||||
|
"added_to_db": "INTEGER",
|
||||||
|
"is_present_in_spotify": "INTEGER DEFAULT 1",
|
||||||
|
"last_seen_in_spotify": "INTEGER",
|
||||||
|
"snapshot_id": "TEXT",
|
||||||
|
"final_path": "TEXT",
|
||||||
|
}
|
||||||
|
|
||||||
|
EXPECTED_WATCHED_ARTISTS_COLUMNS: dict[str, str] = {
|
||||||
|
"spotify_id": "TEXT PRIMARY KEY",
|
||||||
|
"name": "TEXT",
|
||||||
|
"link": "TEXT",
|
||||||
|
"total_albums_on_spotify": "INTEGER",
|
||||||
|
"last_checked": "INTEGER",
|
||||||
|
"added_at": "INTEGER",
|
||||||
|
"is_active": "INTEGER DEFAULT 1",
|
||||||
|
"genres": "TEXT",
|
||||||
|
"popularity": "INTEGER",
|
||||||
|
"image_url": "TEXT",
|
||||||
|
}
|
||||||
|
|
||||||
|
EXPECTED_ARTIST_ALBUMS_COLUMNS: dict[str, str] = {
|
||||||
|
"album_spotify_id": "TEXT PRIMARY KEY",
|
||||||
|
"artist_spotify_id": "TEXT",
|
||||||
|
"name": "TEXT",
|
||||||
|
"album_group": "TEXT",
|
||||||
|
"album_type": "TEXT",
|
||||||
|
"release_date": "TEXT",
|
||||||
|
"release_date_precision": "TEXT",
|
||||||
|
"total_tracks": "INTEGER",
|
||||||
|
"link": "TEXT",
|
||||||
|
"image_url": "TEXT",
|
||||||
|
"added_to_db": "INTEGER",
|
||||||
|
"last_seen_on_spotify": "INTEGER",
|
||||||
|
"download_task_id": "TEXT",
|
||||||
|
"download_status": "INTEGER DEFAULT 0",
|
||||||
|
"is_fully_downloaded_managed_by_app": "INTEGER DEFAULT 0",
|
||||||
|
}
|
||||||
|
|
||||||
m306 = MigrationV3_0_6()
|
m306 = MigrationV3_0_6()
|
||||||
m310 = MigrationV3_1_0()
|
|
||||||
|
|
||||||
|
|
||||||
def _safe_connect(path: Path) -> Optional[sqlite3.Connection]:
|
def _safe_connect(path: Path) -> Optional[sqlite3.Connection]:
|
||||||
@@ -151,15 +211,118 @@ def _ensure_creds_filesystem() -> None:
|
|||||||
logger.error("Failed to ensure credentials filesystem (blobs/search.json)", exc_info=True)
|
logger.error("Failed to ensure credentials filesystem (blobs/search.json)", exc_info=True)
|
||||||
|
|
||||||
|
|
||||||
def _apply_versioned_updates(conn: sqlite3.Connection, c306, u306, c310, u310, post_update=None) -> None:
|
def _apply_versioned_updates(conn: sqlite3.Connection, c_base, u_base, post_update=None) -> None:
|
||||||
if not c306(conn):
|
if not c_base(conn):
|
||||||
u306(conn)
|
u_base(conn)
|
||||||
if not c310(conn):
|
|
||||||
u310(conn)
|
|
||||||
if post_update:
|
if post_update:
|
||||||
post_update(conn)
|
post_update(conn)
|
||||||
|
|
||||||
|
|
||||||
|
# --- 3.1.2 upgrade helpers for Watch DBs ---
|
||||||
|
|
||||||
|
def _update_watch_playlists_db(conn: sqlite3.Connection) -> None:
|
||||||
|
try:
|
||||||
|
# Ensure core watched_playlists table exists and has expected schema
|
||||||
|
conn.execute(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS watched_playlists (
|
||||||
|
spotify_id TEXT PRIMARY KEY,
|
||||||
|
name TEXT,
|
||||||
|
owner_id TEXT,
|
||||||
|
owner_name TEXT,
|
||||||
|
total_tracks INTEGER,
|
||||||
|
link TEXT,
|
||||||
|
snapshot_id TEXT,
|
||||||
|
last_checked INTEGER,
|
||||||
|
added_at INTEGER,
|
||||||
|
is_active INTEGER DEFAULT 1
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
_ensure_table_schema(conn, "watched_playlists", EXPECTED_WATCHED_PLAYLISTS_COLUMNS, "watched playlists")
|
||||||
|
|
||||||
|
# Upgrade all dynamic playlist_ tables
|
||||||
|
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'playlist_%'")
|
||||||
|
for row in cur.fetchall():
|
||||||
|
table_name = row[0]
|
||||||
|
conn.execute(
|
||||||
|
f"""
|
||||||
|
CREATE TABLE IF NOT EXISTS {table_name} (
|
||||||
|
spotify_track_id TEXT PRIMARY KEY,
|
||||||
|
title TEXT,
|
||||||
|
artist_names TEXT,
|
||||||
|
album_name TEXT,
|
||||||
|
album_artist_names TEXT,
|
||||||
|
track_number INTEGER,
|
||||||
|
album_spotify_id TEXT,
|
||||||
|
duration_ms INTEGER,
|
||||||
|
added_at_playlist TEXT,
|
||||||
|
added_to_db INTEGER,
|
||||||
|
is_present_in_spotify INTEGER DEFAULT 1,
|
||||||
|
last_seen_in_spotify INTEGER,
|
||||||
|
snapshot_id TEXT,
|
||||||
|
final_path TEXT
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
_ensure_table_schema(conn, table_name, EXPECTED_PLAYLIST_TRACKS_COLUMNS, f"playlist tracks ({table_name})")
|
||||||
|
logger.info("Upgraded watch playlists DB to 3.1.2 schema")
|
||||||
|
except Exception:
|
||||||
|
logger.error("Failed to upgrade watch playlists DB to 3.1.2 schema", exc_info=True)
|
||||||
|
|
||||||
|
|
||||||
|
def _update_watch_artists_db(conn: sqlite3.Connection) -> None:
|
||||||
|
try:
|
||||||
|
# Ensure core watched_artists table exists and has expected schema
|
||||||
|
conn.execute(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS watched_artists (
|
||||||
|
spotify_id TEXT PRIMARY KEY,
|
||||||
|
name TEXT,
|
||||||
|
link TEXT,
|
||||||
|
total_albums_on_spotify INTEGER,
|
||||||
|
last_checked INTEGER,
|
||||||
|
added_at INTEGER,
|
||||||
|
is_active INTEGER DEFAULT 1,
|
||||||
|
genres TEXT,
|
||||||
|
popularity INTEGER,
|
||||||
|
image_url TEXT
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
_ensure_table_schema(conn, "watched_artists", EXPECTED_WATCHED_ARTISTS_COLUMNS, "watched artists")
|
||||||
|
|
||||||
|
# Upgrade all dynamic artist_ tables
|
||||||
|
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'artist_%'")
|
||||||
|
for row in cur.fetchall():
|
||||||
|
table_name = row[0]
|
||||||
|
conn.execute(
|
||||||
|
f"""
|
||||||
|
CREATE TABLE IF NOT EXISTS {table_name} (
|
||||||
|
album_spotify_id TEXT PRIMARY KEY,
|
||||||
|
artist_spotify_id TEXT,
|
||||||
|
name TEXT,
|
||||||
|
album_group TEXT,
|
||||||
|
album_type TEXT,
|
||||||
|
release_date TEXT,
|
||||||
|
release_date_precision TEXT,
|
||||||
|
total_tracks INTEGER,
|
||||||
|
link TEXT,
|
||||||
|
image_url TEXT,
|
||||||
|
added_to_db INTEGER,
|
||||||
|
last_seen_on_spotify INTEGER,
|
||||||
|
download_task_id TEXT,
|
||||||
|
download_status INTEGER DEFAULT 0,
|
||||||
|
is_fully_downloaded_managed_by_app INTEGER DEFAULT 0
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
_ensure_table_schema(conn, table_name, EXPECTED_ARTIST_ALBUMS_COLUMNS, f"artist albums ({table_name})")
|
||||||
|
logger.info("Upgraded watch artists DB to 3.1.2 schema")
|
||||||
|
except Exception:
|
||||||
|
logger.error("Failed to upgrade watch artists DB to 3.1.2 schema", exc_info=True)
|
||||||
|
|
||||||
|
|
||||||
def run_migrations_if_needed() -> None:
|
def run_migrations_if_needed() -> None:
|
||||||
try:
|
try:
|
||||||
# History DB
|
# History DB
|
||||||
@@ -170,8 +333,6 @@ def run_migrations_if_needed() -> None:
|
|||||||
h_conn,
|
h_conn,
|
||||||
m306.check_history,
|
m306.check_history,
|
||||||
m306.update_history,
|
m306.update_history,
|
||||||
m310.check_history,
|
|
||||||
m310.update_history,
|
|
||||||
post_update=_update_children_tables_for_history,
|
post_update=_update_children_tables_for_history,
|
||||||
)
|
)
|
||||||
h_conn.commit()
|
h_conn.commit()
|
||||||
@@ -186,9 +347,8 @@ def run_migrations_if_needed() -> None:
|
|||||||
p_conn,
|
p_conn,
|
||||||
m306.check_watch_playlists,
|
m306.check_watch_playlists,
|
||||||
m306.update_watch_playlists,
|
m306.update_watch_playlists,
|
||||||
m310.check_watch_playlists,
|
|
||||||
m310.update_watch_playlists,
|
|
||||||
)
|
)
|
||||||
|
_update_watch_playlists_db(p_conn)
|
||||||
p_conn.commit()
|
p_conn.commit()
|
||||||
finally:
|
finally:
|
||||||
p_conn.close()
|
p_conn.close()
|
||||||
@@ -201,9 +361,8 @@ def run_migrations_if_needed() -> None:
|
|||||||
a_conn,
|
a_conn,
|
||||||
m306.check_watch_artists,
|
m306.check_watch_artists,
|
||||||
m306.update_watch_artists,
|
m306.update_watch_artists,
|
||||||
m310.check_watch_artists,
|
|
||||||
m310.update_watch_artists,
|
|
||||||
)
|
)
|
||||||
|
_update_watch_artists_db(a_conn)
|
||||||
a_conn.commit()
|
a_conn.commit()
|
||||||
finally:
|
finally:
|
||||||
a_conn.close()
|
a_conn.close()
|
||||||
@@ -216,8 +375,6 @@ def run_migrations_if_needed() -> None:
|
|||||||
c_conn,
|
c_conn,
|
||||||
m306.check_accounts,
|
m306.check_accounts,
|
||||||
m306.update_accounts,
|
m306.update_accounts,
|
||||||
m310.check_accounts,
|
|
||||||
m310.update_accounts,
|
|
||||||
)
|
)
|
||||||
c_conn.commit()
|
c_conn.commit()
|
||||||
finally:
|
finally:
|
||||||
|
|||||||
@@ -1,331 +0,0 @@
|
|||||||
import sqlite3
|
|
||||||
|
|
||||||
|
|
||||||
class MigrationV3_1_0:
|
|
||||||
# --- Expected Schemas (3.1.0) ---
|
|
||||||
HISTORY_MAIN_REQUIRED = {
|
|
||||||
"id",
|
|
||||||
"download_type",
|
|
||||||
"title",
|
|
||||||
"artists",
|
|
||||||
"timestamp",
|
|
||||||
"status",
|
|
||||||
"service",
|
|
||||||
"quality_format",
|
|
||||||
"quality_bitrate",
|
|
||||||
"total_tracks",
|
|
||||||
"successful_tracks",
|
|
||||||
"failed_tracks",
|
|
||||||
"skipped_tracks",
|
|
||||||
"children_table",
|
|
||||||
"task_id",
|
|
||||||
"external_ids",
|
|
||||||
"metadata",
|
|
||||||
"release_date",
|
|
||||||
"genres",
|
|
||||||
"images",
|
|
||||||
"owner",
|
|
||||||
"album_type",
|
|
||||||
"duration_total_ms",
|
|
||||||
"explicit",
|
|
||||||
}
|
|
||||||
|
|
||||||
HISTORY_MAIN_SQL = """
|
|
||||||
CREATE TABLE IF NOT EXISTS download_history (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
download_type TEXT NOT NULL,
|
|
||||||
title TEXT NOT NULL,
|
|
||||||
artists TEXT,
|
|
||||||
timestamp REAL NOT NULL,
|
|
||||||
status TEXT NOT NULL,
|
|
||||||
service TEXT,
|
|
||||||
quality_format TEXT,
|
|
||||||
quality_bitrate TEXT,
|
|
||||||
total_tracks INTEGER,
|
|
||||||
successful_tracks INTEGER,
|
|
||||||
failed_tracks INTEGER,
|
|
||||||
skipped_tracks INTEGER,
|
|
||||||
children_table TEXT,
|
|
||||||
task_id TEXT,
|
|
||||||
external_ids TEXT,
|
|
||||||
metadata TEXT,
|
|
||||||
release_date TEXT,
|
|
||||||
genres TEXT,
|
|
||||||
images TEXT,
|
|
||||||
owner TEXT,
|
|
||||||
album_type TEXT,
|
|
||||||
duration_total_ms INTEGER,
|
|
||||||
explicit BOOLEAN
|
|
||||||
);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_download_history_timestamp ON download_history(timestamp);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_download_history_type_status ON download_history(download_type, status);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_download_history_task_id ON download_history(task_id);
|
|
||||||
CREATE UNIQUE INDEX IF NOT EXISTS uq_download_history_task_type_ids ON download_history(task_id, download_type, external_ids);
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Children tables schema (album_% / playlist_%):
|
|
||||||
HISTORY_CHILDREN_EXPECTED = {
|
|
||||||
"id": "INTEGER PRIMARY KEY AUTOINCREMENT",
|
|
||||||
"title": "TEXT NOT NULL",
|
|
||||||
"artists": "TEXT",
|
|
||||||
"album_title": "TEXT",
|
|
||||||
"duration_ms": "INTEGER",
|
|
||||||
"track_number": "INTEGER",
|
|
||||||
"disc_number": "INTEGER",
|
|
||||||
"explicit": "BOOLEAN",
|
|
||||||
"status": "TEXT NOT NULL",
|
|
||||||
"external_ids": "TEXT",
|
|
||||||
"genres": "TEXT",
|
|
||||||
"isrc": "TEXT",
|
|
||||||
"timestamp": "REAL NOT NULL",
|
|
||||||
"position": "INTEGER",
|
|
||||||
"metadata": "TEXT",
|
|
||||||
}
|
|
||||||
|
|
||||||
WATCH_PLAYLISTS_REQUIRED = {
|
|
||||||
"spotify_id",
|
|
||||||
"name",
|
|
||||||
"owner_id",
|
|
||||||
"owner_name",
|
|
||||||
"total_tracks",
|
|
||||||
"link",
|
|
||||||
"snapshot_id",
|
|
||||||
"last_checked",
|
|
||||||
"added_at",
|
|
||||||
"is_active",
|
|
||||||
}
|
|
||||||
|
|
||||||
# Per-playlist tracks table expected columns
|
|
||||||
PLAYLIST_TRACKS_EXPECTED = {
|
|
||||||
"spotify_track_id": "TEXT PRIMARY KEY",
|
|
||||||
"title": "TEXT",
|
|
||||||
"artist_names": "TEXT",
|
|
||||||
"album_name": "TEXT",
|
|
||||||
"album_artist_names": "TEXT",
|
|
||||||
"track_number": "INTEGER",
|
|
||||||
"album_spotify_id": "TEXT",
|
|
||||||
"duration_ms": "INTEGER",
|
|
||||||
"added_at_playlist": "TEXT",
|
|
||||||
"added_to_db": "INTEGER",
|
|
||||||
"is_present_in_spotify": "INTEGER DEFAULT 1",
|
|
||||||
"last_seen_in_spotify": "INTEGER",
|
|
||||||
"snapshot_id": "TEXT",
|
|
||||||
"final_path": "TEXT",
|
|
||||||
}
|
|
||||||
|
|
||||||
WATCH_ARTISTS_REQUIRED = {
|
|
||||||
"spotify_id",
|
|
||||||
"name",
|
|
||||||
"link",
|
|
||||||
"total_albums_on_spotify",
|
|
||||||
"last_checked",
|
|
||||||
"added_at",
|
|
||||||
"is_active",
|
|
||||||
"genres",
|
|
||||||
"popularity",
|
|
||||||
"image_url",
|
|
||||||
}
|
|
||||||
|
|
||||||
ARTIST_ALBUMS_EXPECTED = {
|
|
||||||
"album_spotify_id": "TEXT PRIMARY KEY",
|
|
||||||
"artist_spotify_id": "TEXT",
|
|
||||||
"name": "TEXT",
|
|
||||||
"album_group": "TEXT",
|
|
||||||
"album_type": "TEXT",
|
|
||||||
"release_date": "TEXT",
|
|
||||||
"release_date_precision": "TEXT",
|
|
||||||
"total_tracks": "INTEGER",
|
|
||||||
"link": "TEXT",
|
|
||||||
"image_url": "TEXT",
|
|
||||||
"added_to_db": "INTEGER",
|
|
||||||
"last_seen_on_spotify": "INTEGER",
|
|
||||||
"download_task_id": "TEXT",
|
|
||||||
"download_status": "INTEGER DEFAULT 0",
|
|
||||||
"is_fully_downloaded_managed_by_app": "INTEGER DEFAULT 0",
|
|
||||||
}
|
|
||||||
|
|
||||||
ACCOUNTS_SPOTIFY_REQUIRED = {"name", "region", "created_at", "updated_at"}
|
|
||||||
ACCOUNTS_DEEZER_REQUIRED = {"name", "arl", "region", "created_at", "updated_at"}
|
|
||||||
|
|
||||||
ACCOUNTS_SPOTIFY_SQL = """
|
|
||||||
CREATE TABLE IF NOT EXISTS spotify (
|
|
||||||
name TEXT PRIMARY KEY,
|
|
||||||
region TEXT,
|
|
||||||
created_at REAL,
|
|
||||||
updated_at REAL
|
|
||||||
);
|
|
||||||
"""
|
|
||||||
|
|
||||||
ACCOUNTS_DEEZER_SQL = """
|
|
||||||
CREATE TABLE IF NOT EXISTS deezer (
|
|
||||||
name TEXT PRIMARY KEY,
|
|
||||||
arl TEXT,
|
|
||||||
region TEXT,
|
|
||||||
created_at REAL,
|
|
||||||
updated_at REAL
|
|
||||||
);
|
|
||||||
"""
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _columns(conn: sqlite3.Connection, table: str) -> set[str]:
|
|
||||||
try:
|
|
||||||
cur = conn.execute(f"PRAGMA table_info({table})")
|
|
||||||
return {row[1] for row in cur.fetchall()}
|
|
||||||
except Exception:
|
|
||||||
return set()
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _ensure_table_schema(conn: sqlite3.Connection, table_name: str, expected: dict[str, str], desc: str) -> None:
|
|
||||||
cur = conn.execute(f"PRAGMA table_info({table_name})")
|
|
||||||
existing = {row[1] for row in cur.fetchall()}
|
|
||||||
for col, col_type in expected.items():
|
|
||||||
if col in existing:
|
|
||||||
continue
|
|
||||||
col_type_for_add = (
|
|
||||||
col_type.replace("PRIMARY KEY", "").replace("AUTOINCREMENT", "").replace("NOT NULL", "").strip()
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
conn.execute(f"ALTER TABLE {table_name} ADD COLUMN {col} {col_type_for_add}")
|
|
||||||
except sqlite3.OperationalError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# --- Check methods ---
|
|
||||||
def check_history(self, conn: sqlite3.Connection) -> bool:
|
|
||||||
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='download_history'")
|
|
||||||
if not cur.fetchone():
|
|
||||||
return False
|
|
||||||
return self.HISTORY_MAIN_REQUIRED.issubset(self._columns(conn, "download_history"))
|
|
||||||
|
|
||||||
def check_watch_playlists(self, conn: sqlite3.Connection) -> bool:
|
|
||||||
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='watched_playlists'")
|
|
||||||
if not cur.fetchone():
|
|
||||||
return False
|
|
||||||
if not self.WATCH_PLAYLISTS_REQUIRED.issubset(self._columns(conn, "watched_playlists")):
|
|
||||||
return False
|
|
||||||
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'playlist_%'")
|
|
||||||
rows = cur.fetchall()
|
|
||||||
for (table_name,) in rows:
|
|
||||||
cols = self._columns(conn, table_name)
|
|
||||||
required_cols = set(self.PLAYLIST_TRACKS_EXPECTED.keys())
|
|
||||||
if not required_cols.issubset(cols):
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def check_watch_artists(self, conn: sqlite3.Connection) -> bool:
|
|
||||||
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='watched_artists'")
|
|
||||||
if not cur.fetchone():
|
|
||||||
return False
|
|
||||||
if not self.WATCH_ARTISTS_REQUIRED.issubset(self._columns(conn, "watched_artists")):
|
|
||||||
return False
|
|
||||||
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'artist_%'")
|
|
||||||
rows = cur.fetchall()
|
|
||||||
for (table_name,) in rows:
|
|
||||||
cols = self._columns(conn, table_name)
|
|
||||||
required_cols = set(self.ARTIST_ALBUMS_EXPECTED.keys())
|
|
||||||
if not required_cols.issubset(cols):
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def check_accounts(self, conn: sqlite3.Connection) -> bool:
|
|
||||||
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='spotify'")
|
|
||||||
if not cur.fetchone():
|
|
||||||
return False
|
|
||||||
if not self.ACCOUNTS_SPOTIFY_REQUIRED.issubset(self._columns(conn, "spotify")):
|
|
||||||
return False
|
|
||||||
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='deezer'")
|
|
||||||
if not cur.fetchone():
|
|
||||||
return False
|
|
||||||
if not self.ACCOUNTS_DEEZER_REQUIRED.issubset(self._columns(conn, "deezer")):
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
# --- Update methods ---
|
|
||||||
def update_history(self, conn: sqlite3.Connection) -> None:
|
|
||||||
conn.executescript(self.HISTORY_MAIN_SQL)
|
|
||||||
|
|
||||||
def update_watch_playlists(self, conn: sqlite3.Connection) -> None:
|
|
||||||
conn.execute(
|
|
||||||
"""
|
|
||||||
CREATE TABLE IF NOT EXISTS watched_playlists (
|
|
||||||
spotify_id TEXT PRIMARY KEY,
|
|
||||||
name TEXT,
|
|
||||||
owner_id TEXT,
|
|
||||||
owner_name TEXT,
|
|
||||||
total_tracks INTEGER,
|
|
||||||
link TEXT,
|
|
||||||
snapshot_id TEXT,
|
|
||||||
last_checked INTEGER,
|
|
||||||
added_at INTEGER,
|
|
||||||
is_active INTEGER DEFAULT 1
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'playlist_%'")
|
|
||||||
for (table_name,) in cur.fetchall():
|
|
||||||
conn.execute(
|
|
||||||
f"""
|
|
||||||
CREATE TABLE IF NOT EXISTS {table_name} (
|
|
||||||
spotify_track_id TEXT PRIMARY KEY,
|
|
||||||
title TEXT,
|
|
||||||
artist_names TEXT,
|
|
||||||
album_name TEXT,
|
|
||||||
album_artist_names TEXT,
|
|
||||||
track_number INTEGER,
|
|
||||||
album_spotify_id TEXT,
|
|
||||||
duration_ms INTEGER,
|
|
||||||
added_at_playlist TEXT,
|
|
||||||
added_to_db INTEGER,
|
|
||||||
is_present_in_spotify INTEGER DEFAULT 1,
|
|
||||||
last_seen_in_spotify INTEGER,
|
|
||||||
snapshot_id TEXT,
|
|
||||||
final_path TEXT
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
self._ensure_table_schema(conn, table_name, self.PLAYLIST_TRACKS_EXPECTED, f"playlist tracks {table_name}")
|
|
||||||
|
|
||||||
def update_watch_artists(self, conn: sqlite3.Connection) -> None:
|
|
||||||
conn.execute(
|
|
||||||
"""
|
|
||||||
CREATE TABLE IF NOT EXISTS watched_artists (
|
|
||||||
spotify_id TEXT PRIMARY KEY,
|
|
||||||
name TEXT,
|
|
||||||
link TEXT,
|
|
||||||
total_albums_on_spotify INTEGER,
|
|
||||||
last_checked INTEGER,
|
|
||||||
added_at INTEGER,
|
|
||||||
is_active INTEGER DEFAULT 1,
|
|
||||||
genres TEXT,
|
|
||||||
popularity INTEGER,
|
|
||||||
image_url TEXT
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'artist_%'")
|
|
||||||
for (table_name,) in cur.fetchall():
|
|
||||||
conn.execute(
|
|
||||||
f"""
|
|
||||||
CREATE TABLE IF NOT EXISTS {table_name} (
|
|
||||||
album_spotify_id TEXT PRIMARY KEY,
|
|
||||||
artist_spotify_id TEXT,
|
|
||||||
name TEXT,
|
|
||||||
album_group TEXT,
|
|
||||||
album_type TEXT,
|
|
||||||
release_date TEXT,
|
|
||||||
release_date_precision TEXT,
|
|
||||||
total_tracks INTEGER,
|
|
||||||
link TEXT,
|
|
||||||
image_url TEXT,
|
|
||||||
added_to_db INTEGER,
|
|
||||||
last_seen_on_spotify INTEGER,
|
|
||||||
download_task_id TEXT,
|
|
||||||
download_status INTEGER DEFAULT 0,
|
|
||||||
is_fully_downloaded_managed_by_app INTEGER DEFAULT 0
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
self._ensure_table_schema(conn, table_name, self.ARTIST_ALBUMS_EXPECTED, f"artist albums {table_name}")
|
|
||||||
|
|
||||||
def update_accounts(self, conn: sqlite3.Connection) -> None:
|
|
||||||
conn.executescript(self.ACCOUNTS_SPOTIFY_SQL)
|
|
||||||
conn.executescript(self.ACCOUNTS_DEEZER_SQL)
|
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "spotizerr-ui",
|
"name": "spotizerr-ui",
|
||||||
"private": true,
|
"private": true,
|
||||||
"version": "3.1.1",
|
"version": "3.1.2",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "vite",
|
"dev": "vite",
|
||||||
|
|||||||
@@ -122,6 +122,103 @@ def _create_306_history_db(db_path: Path) -> None:
|
|||||||
conn.execute(
|
conn.execute(
|
||||||
"CREATE TABLE IF NOT EXISTS album_legacy (id INTEGER PRIMARY KEY AUTOINCREMENT, title TEXT NOT NULL)"
|
"CREATE TABLE IF NOT EXISTS album_legacy (id INTEGER PRIMARY KEY AUTOINCREMENT, title TEXT NOT NULL)"
|
||||||
)
|
)
|
||||||
|
# Create a fully-specified children table from docs and add rows
|
||||||
|
conn.execute(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS album_f9e8d7c6b5 (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
title TEXT NOT NULL,
|
||||||
|
artists TEXT,
|
||||||
|
album_title TEXT,
|
||||||
|
duration_ms INTEGER,
|
||||||
|
track_number INTEGER,
|
||||||
|
disc_number INTEGER,
|
||||||
|
explicit BOOLEAN,
|
||||||
|
status TEXT NOT NULL,
|
||||||
|
external_ids TEXT,
|
||||||
|
genres TEXT,
|
||||||
|
isrc TEXT,
|
||||||
|
timestamp REAL NOT NULL,
|
||||||
|
position INTEGER,
|
||||||
|
metadata TEXT
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
conn.execute(
|
||||||
|
"""
|
||||||
|
INSERT INTO download_history (
|
||||||
|
download_type, title, artists, timestamp, status, service,
|
||||||
|
quality_format, quality_bitrate, total_tracks, successful_tracks,
|
||||||
|
failed_tracks, skipped_tracks, children_table, task_id,
|
||||||
|
external_ids, metadata, release_date, genres, images, owner,
|
||||||
|
album_type, duration_total_ms, explicit
|
||||||
|
) VALUES (?, ?, ?, strftime('%s','now'), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
""",
|
||||||
|
(
|
||||||
|
"album",
|
||||||
|
"Random Access Memories",
|
||||||
|
"[\"Daft Punk\"]",
|
||||||
|
"partial",
|
||||||
|
"spotify",
|
||||||
|
"FLAC",
|
||||||
|
"1411",
|
||||||
|
13,
|
||||||
|
12,
|
||||||
|
1,
|
||||||
|
0,
|
||||||
|
"album_f9e8d7c6b5",
|
||||||
|
"celery-task-id-789",
|
||||||
|
"{\"spotify\": \"4m2880jivSbbyEGAKfITCa\"}",
|
||||||
|
"{\"callback_type\": \"album\"}",
|
||||||
|
"{\"year\": 2013, \"month\": 5, \"day\": 17}",
|
||||||
|
"[\"disco\", \"funk\"]",
|
||||||
|
"[{\"url\": \"https://i.scdn.co/image/...\"}]",
|
||||||
|
None,
|
||||||
|
"album",
|
||||||
|
4478293,
|
||||||
|
0
|
||||||
|
),
|
||||||
|
)
|
||||||
|
conn.executemany(
|
||||||
|
"""
|
||||||
|
INSERT INTO album_f9e8d7c6b5 (
|
||||||
|
title, artists, album_title, duration_ms, track_number, disc_number, explicit, status,
|
||||||
|
external_ids, genres, isrc, timestamp, position, metadata
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, strftime('%s','now'), ?, ?)
|
||||||
|
""",
|
||||||
|
[
|
||||||
|
(
|
||||||
|
"Get Lucky (feat. Pharrell Williams & Nile Rodgers)",
|
||||||
|
"[\"Daft Punk\", \"Pharrell Williams\", \"Nile Rodgers\"]",
|
||||||
|
"Random Access Memories",
|
||||||
|
369626,
|
||||||
|
8,
|
||||||
|
1,
|
||||||
|
0,
|
||||||
|
"completed",
|
||||||
|
"{\"spotify\": \"69kOkLUCdZlE8ApD28j1JG\", \"isrc\": \"GBUJH1300019\"}",
|
||||||
|
"[]",
|
||||||
|
"GBUJH1300019",
|
||||||
|
0,
|
||||||
|
"{\"album\": {...}, \"type\": \"track\"}",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Lose Yourself to Dance (feat. Pharrell Williams)",
|
||||||
|
"[\"Daft Punk\", \"Pharrell Williams\"]",
|
||||||
|
"Random Access Memories",
|
||||||
|
353893,
|
||||||
|
6,
|
||||||
|
1,
|
||||||
|
0,
|
||||||
|
"failed",
|
||||||
|
"{\"spotify\": \"5L95vS64r8PAj5M8H1oYkm\", \"isrc\": \"GBUJH1300017\"}",
|
||||||
|
"[]",
|
||||||
|
"GBUJH1300017",
|
||||||
|
0,
|
||||||
|
"{\"album\": {...}, \"failure_reason\": \"Could not find matching track on Deezer.\"}",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _create_306_watch_dbs(playlists_db: Path, artists_db: Path) -> None:
|
def _create_306_watch_dbs(playlists_db: Path, artists_db: Path) -> None:
|
||||||
@@ -143,6 +240,93 @@ def _create_306_watch_dbs(playlists_db: Path, artists_db: Path) -> None:
|
|||||||
);
|
);
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
# Insert a sample watched playlist row (docs example)
|
||||||
|
pconn.execute(
|
||||||
|
"""
|
||||||
|
INSERT OR REPLACE INTO watched_playlists (
|
||||||
|
spotify_id, name, owner_id, owner_name, total_tracks, link, snapshot_id, last_checked, added_at, is_active
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
""",
|
||||||
|
(
|
||||||
|
"37i9dQZF1DXcBWIGoYBM5M",
|
||||||
|
"Today's Top Hits",
|
||||||
|
"spotify",
|
||||||
|
"Spotify",
|
||||||
|
50,
|
||||||
|
"https://open.spotify.com/playlist/37i9dQZF1DXcBWIGoYBM5M",
|
||||||
|
"MTY3NzE4NjgwMCwwMDAwMDAwMDk1ODVmYjI5ZDY5MGUzN2Q4Y2U4OWY2YmY1ZDE4ZTAy",
|
||||||
|
1677187000,
|
||||||
|
1677186950,
|
||||||
|
1,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
# Create a legacy/minimal playlist dynamic table to test schema upgrade
|
||||||
|
pconn.execute(
|
||||||
|
"CREATE TABLE IF NOT EXISTS playlist_legacy (spotify_track_id TEXT PRIMARY KEY, title TEXT)"
|
||||||
|
)
|
||||||
|
# Create a fully-specified playlist dynamic table (docs example) and add rows
|
||||||
|
pconn.execute(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS playlist_37i9dQZF1DXcBWIGoYBM5M (
|
||||||
|
spotify_track_id TEXT PRIMARY KEY,
|
||||||
|
title TEXT,
|
||||||
|
artist_names TEXT,
|
||||||
|
album_name TEXT,
|
||||||
|
album_artist_names TEXT,
|
||||||
|
track_number INTEGER,
|
||||||
|
album_spotify_id TEXT,
|
||||||
|
duration_ms INTEGER,
|
||||||
|
added_at_playlist TEXT,
|
||||||
|
added_to_db INTEGER,
|
||||||
|
is_present_in_spotify INTEGER,
|
||||||
|
last_seen_in_spotify INTEGER,
|
||||||
|
snapshot_id TEXT,
|
||||||
|
final_path TEXT
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
pconn.executemany(
|
||||||
|
"""
|
||||||
|
INSERT OR REPLACE INTO playlist_37i9dQZF1DXcBWIGoYBM5M (
|
||||||
|
spotify_track_id, title, artist_names, album_name, album_artist_names, track_number, album_spotify_id,
|
||||||
|
duration_ms, added_at_playlist, added_to_db, is_present_in_spotify, last_seen_in_spotify, snapshot_id, final_path
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
""",
|
||||||
|
[
|
||||||
|
(
|
||||||
|
"4k6Uh1HXdhtusDW5y80vNN",
|
||||||
|
"As It Was",
|
||||||
|
"Harry Styles",
|
||||||
|
"Harry's House",
|
||||||
|
"Harry Styles",
|
||||||
|
4,
|
||||||
|
"5r36AJ6VOJtp00oxSkNaAO",
|
||||||
|
167303,
|
||||||
|
"2023-02-20T10:00:00Z",
|
||||||
|
1677186980,
|
||||||
|
1,
|
||||||
|
1677187000,
|
||||||
|
"MTY3NzE4NjgwMCwwMDAwMDAwMDk1ODVmYjI5ZDY5MGUzN2Q4Y2U4OWY2YmY1ZDE4ZTAy",
|
||||||
|
"/downloads/music/Harry Styles/Harry's House/04 - As It Was.flac",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"5ww2BF9slyYgAno5EAsoOJ",
|
||||||
|
"Flowers",
|
||||||
|
"Miley Cyrus",
|
||||||
|
"Endless Summer Vacation",
|
||||||
|
"Miley Cyrus",
|
||||||
|
1,
|
||||||
|
"1lw0K2sIKi84gav3e4pG3c",
|
||||||
|
194952,
|
||||||
|
"2023-02-23T12:00:00Z",
|
||||||
|
1677186995,
|
||||||
|
1,
|
||||||
|
1677187000,
|
||||||
|
"MTY3NzE4NjgwMCwwMDAwMDAwMDk1ODVmYjI5ZDY5MGUzN2Q4Y2U4OWY2YmY1ZDE4ZTAy",
|
||||||
|
None,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
with sqlite3.connect(str(artists_db)) as aconn:
|
with sqlite3.connect(str(artists_db)) as aconn:
|
||||||
aconn.executescript(
|
aconn.executescript(
|
||||||
"""
|
"""
|
||||||
@@ -160,6 +344,138 @@ def _create_306_watch_dbs(playlists_db: Path, artists_db: Path) -> None:
|
|||||||
);
|
);
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
# Insert a sample watched artist row (docs example)
|
||||||
|
aconn.execute(
|
||||||
|
"""
|
||||||
|
INSERT OR REPLACE INTO watched_artists (
|
||||||
|
spotify_id, name, link, total_albums_on_spotify, last_checked, added_at, is_active, genres, popularity, image_url
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
""",
|
||||||
|
(
|
||||||
|
"4oLeXFyACqeem2VImYeBFe",
|
||||||
|
"Madeon",
|
||||||
|
"https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe",
|
||||||
|
45,
|
||||||
|
1677188000,
|
||||||
|
1677187900,
|
||||||
|
1,
|
||||||
|
"electro house, filter house, french house",
|
||||||
|
65,
|
||||||
|
"https://i.scdn.co/image/ab6761610000e5eb...",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
# Create a legacy/minimal artist dynamic table to test schema upgrade
|
||||||
|
aconn.execute(
|
||||||
|
"CREATE TABLE IF NOT EXISTS artist_legacy (album_spotify_id TEXT PRIMARY KEY, name TEXT)"
|
||||||
|
)
|
||||||
|
# Create a fully-specified artist dynamic table (docs example) and add rows
|
||||||
|
aconn.execute(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS artist_4oLeXFyACqeem2VImYeBFe (
|
||||||
|
album_spotify_id TEXT PRIMARY KEY,
|
||||||
|
artist_spotify_id TEXT,
|
||||||
|
name TEXT,
|
||||||
|
album_group TEXT,
|
||||||
|
album_type TEXT,
|
||||||
|
release_date TEXT,
|
||||||
|
release_date_precision TEXT,
|
||||||
|
total_tracks INTEGER,
|
||||||
|
link TEXT,
|
||||||
|
image_url TEXT,
|
||||||
|
added_to_db INTEGER,
|
||||||
|
last_seen_on_spotify INTEGER,
|
||||||
|
download_task_id TEXT,
|
||||||
|
download_status INTEGER,
|
||||||
|
is_fully_downloaded_managed_by_app INTEGER
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
aconn.executemany(
|
||||||
|
"""
|
||||||
|
INSERT OR REPLACE INTO artist_4oLeXFyACqeem2VImYeBFe (
|
||||||
|
album_spotify_id, artist_spotify_id, name, album_group, album_type, release_date, release_date_precision,
|
||||||
|
total_tracks, link, image_url, added_to_db, last_seen_on_spotify, download_task_id, download_status, is_fully_downloaded_managed_by_app
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
""",
|
||||||
|
[
|
||||||
|
(
|
||||||
|
"2GWMnf2ltOQd2v2T62a2m8",
|
||||||
|
"4oLeXFyACqeem2VImYeBFe",
|
||||||
|
"Good Faith",
|
||||||
|
"album",
|
||||||
|
"album",
|
||||||
|
"2019-11-15",
|
||||||
|
"day",
|
||||||
|
10,
|
||||||
|
"https://open.spotify.com/album/2GWMnf2ltOQd2v2T62a2m8",
|
||||||
|
"https://i.scdn.co/image/ab67616d0000b273...",
|
||||||
|
1677187950,
|
||||||
|
1677188000,
|
||||||
|
"celery-task-id-123",
|
||||||
|
2,
|
||||||
|
1,
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"2smfe2S0AVaxH2I1a5p55n",
|
||||||
|
"4oLeXFyACqeem2VImYeBFe",
|
||||||
|
"Gonna Be Good",
|
||||||
|
"single",
|
||||||
|
"single",
|
||||||
|
"2023-01-19",
|
||||||
|
"day",
|
||||||
|
1,
|
||||||
|
"https://open.spotify.com/album/2smfe2S0AVaxH2I1a5p55n",
|
||||||
|
"https://i.scdn.co/image/ab67616d0000b273...",
|
||||||
|
1677187960,
|
||||||
|
1677188000,
|
||||||
|
"celery-task-id-456",
|
||||||
|
1,
|
||||||
|
0,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _create_306_accounts(creds_dir: Path, accounts_db: Path) -> None:
|
||||||
|
creds_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
with sqlite3.connect(str(accounts_db)) as conn:
|
||||||
|
conn.executescript(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS spotify (
|
||||||
|
name TEXT PRIMARY KEY,
|
||||||
|
region TEXT,
|
||||||
|
created_at REAL,
|
||||||
|
updated_at REAL
|
||||||
|
);
|
||||||
|
CREATE TABLE IF NOT EXISTS deezer (
|
||||||
|
name TEXT PRIMARY KEY,
|
||||||
|
arl TEXT,
|
||||||
|
region TEXT,
|
||||||
|
created_at REAL,
|
||||||
|
updated_at REAL
|
||||||
|
);
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
conn.execute(
|
||||||
|
"INSERT OR REPLACE INTO spotify (name, region, created_at, updated_at) VALUES (?, ?, ?, ?)",
|
||||||
|
("my_main_spotify", "US", 1677190000.0, 1677190000.0),
|
||||||
|
)
|
||||||
|
conn.execute(
|
||||||
|
"INSERT OR REPLACE INTO deezer (name, arl, region, created_at, updated_at) VALUES (?, ?, ?, ?, ?)",
|
||||||
|
("my_hifi_deezer", "a1b2c3d4e5f6a1b2c3d4e5f6...", "FR", 1677190100.0, 1677190100.0),
|
||||||
|
)
|
||||||
|
# Pre-create creds filesystem
|
||||||
|
search_json = creds_dir / "search.json"
|
||||||
|
if not search_json.exists():
|
||||||
|
search_json.write_text('{"client_id":"your_global_spotify_client_id","client_secret":"your_global_spotify_client_secret"}\n', encoding="utf-8")
|
||||||
|
blobs_dir = creds_dir / "blobs" / "my_main_spotify"
|
||||||
|
blobs_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
creds_blob = blobs_dir / "credentials.json"
|
||||||
|
if not creds_blob.exists():
|
||||||
|
creds_blob.write_text(
|
||||||
|
'{"version":"v1","access_token":"...","expires_at":1677193600,"refresh_token":"...","scope":"user-read-private user-read-email playlist-read-private"}\n',
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _get_columns(db_path: Path, table: str) -> set[str]:
|
def _get_columns(db_path: Path, table: str) -> set[str]:
|
||||||
@@ -168,6 +484,12 @@ def _get_columns(db_path: Path, table: str) -> set[str]:
|
|||||||
return {row[1] for row in cur.fetchall()}
|
return {row[1] for row in cur.fetchall()}
|
||||||
|
|
||||||
|
|
||||||
|
def _get_count(db_path: Path, table: str) -> int:
|
||||||
|
with sqlite3.connect(str(db_path)) as conn:
|
||||||
|
cur = conn.execute(f"SELECT COUNT(*) FROM {table}")
|
||||||
|
return cur.fetchone()[0]
|
||||||
|
|
||||||
|
|
||||||
def test_migration_children_tables_created_and_upgraded(tmp_path: Path, monkeypatch: pytest.MonkeyPatch):
|
def test_migration_children_tables_created_and_upgraded(tmp_path: Path, monkeypatch: pytest.MonkeyPatch):
|
||||||
# Arrange temp paths
|
# Arrange temp paths
|
||||||
data_dir = tmp_path / "data"
|
data_dir = tmp_path / "data"
|
||||||
@@ -179,9 +501,10 @@ def test_migration_children_tables_created_and_upgraded(tmp_path: Path, monkeypa
|
|||||||
blobs_dir = creds_dir / "blobs"
|
blobs_dir = creds_dir / "blobs"
|
||||||
search_json = creds_dir / "search.json"
|
search_json = creds_dir / "search.json"
|
||||||
|
|
||||||
# Create 3.0.6 base schemas and sample data
|
# Create 3.0.6 base schemas and sample data (full simulation)
|
||||||
_create_306_history_db(history_db)
|
_create_306_history_db(history_db)
|
||||||
_create_306_watch_dbs(playlists_db, artists_db)
|
_create_306_watch_dbs(playlists_db, artists_db)
|
||||||
|
_create_306_accounts(creds_dir, accounts_db)
|
||||||
|
|
||||||
# Point the migration runner to our temp DBs
|
# Point the migration runner to our temp DBs
|
||||||
from routes.migrations import runner
|
from routes.migrations import runner
|
||||||
@@ -222,16 +545,89 @@ def test_migration_children_tables_created_and_upgraded(tmp_path: Path, monkeypa
|
|||||||
assert _get_columns(history_db, "playlist_test2").issuperset(expected_children_cols)
|
assert _get_columns(history_db, "playlist_test2").issuperset(expected_children_cols)
|
||||||
# Legacy table upgraded
|
# Legacy table upgraded
|
||||||
assert _get_columns(history_db, "album_legacy").issuperset(expected_children_cols)
|
assert _get_columns(history_db, "album_legacy").issuperset(expected_children_cols)
|
||||||
|
# Pre-existing children table preserved and correct
|
||||||
|
assert _get_columns(history_db, "album_f9e8d7c6b5").issuperset(expected_children_cols)
|
||||||
|
assert _get_count(history_db, "album_f9e8d7c6b5") == 2
|
||||||
|
|
||||||
# Assert: accounts DB created with expected tables and columns
|
# Assert: accounts DB created/preserved with expected tables and columns
|
||||||
assert accounts_db.exists()
|
assert accounts_db.exists()
|
||||||
spotify_cols = _get_columns(accounts_db, "spotify")
|
spotify_cols = _get_columns(accounts_db, "spotify")
|
||||||
deezer_cols = _get_columns(accounts_db, "deezer")
|
deezer_cols = _get_columns(accounts_db, "deezer")
|
||||||
assert {"name", "region", "created_at", "updated_at"}.issubset(spotify_cols)
|
assert {"name", "region", "created_at", "updated_at"}.issubset(spotify_cols)
|
||||||
assert {"name", "arl", "region", "created_at", "updated_at"}.issubset(deezer_cols)
|
assert {"name", "arl", "region", "created_at", "updated_at"}.issubset(deezer_cols)
|
||||||
|
|
||||||
# Assert: creds filesystem
|
# Assert: creds filesystem and pre-existing blob preserved
|
||||||
assert blobs_dir.exists() and blobs_dir.is_dir()
|
assert blobs_dir.exists() and blobs_dir.is_dir()
|
||||||
assert search_json.exists()
|
assert search_json.exists()
|
||||||
data = json.loads(search_json.read_text())
|
data = json.loads(search_json.read_text())
|
||||||
assert set(data.keys()) == {"client_id", "client_secret"}
|
assert set(data.keys()) == {"client_id", "client_secret"}
|
||||||
|
assert (blobs_dir / "my_main_spotify" / "credentials.json").exists()
|
||||||
|
|
||||||
|
# Assert: watch playlists core and dynamic tables upgraded to/at 3.1.2 schema
|
||||||
|
watched_playlists_cols = _get_columns(playlists_db, "watched_playlists")
|
||||||
|
assert {
|
||||||
|
"spotify_id",
|
||||||
|
"name",
|
||||||
|
"owner_id",
|
||||||
|
"owner_name",
|
||||||
|
"total_tracks",
|
||||||
|
"link",
|
||||||
|
"snapshot_id",
|
||||||
|
"last_checked",
|
||||||
|
"added_at",
|
||||||
|
"is_active",
|
||||||
|
}.issubset(watched_playlists_cols)
|
||||||
|
playlist_dynamic_expected = {
|
||||||
|
"spotify_track_id",
|
||||||
|
"title",
|
||||||
|
"artist_names",
|
||||||
|
"album_name",
|
||||||
|
"album_artist_names",
|
||||||
|
"track_number",
|
||||||
|
"album_spotify_id",
|
||||||
|
"duration_ms",
|
||||||
|
"added_at_playlist",
|
||||||
|
"added_to_db",
|
||||||
|
"is_present_in_spotify",
|
||||||
|
"last_seen_in_spotify",
|
||||||
|
"snapshot_id",
|
||||||
|
"final_path",
|
||||||
|
}
|
||||||
|
assert _get_columns(playlists_db, "playlist_legacy").issuperset(playlist_dynamic_expected)
|
||||||
|
assert _get_columns(playlists_db, "playlist_37i9dQZF1DXcBWIGoYBM5M").issuperset(playlist_dynamic_expected)
|
||||||
|
assert _get_count(playlists_db, "playlist_37i9dQZF1DXcBWIGoYBM5M") == 2
|
||||||
|
|
||||||
|
# Assert: watch artists core and dynamic tables upgraded to/at 3.1.2 schema
|
||||||
|
watched_artists_cols = _get_columns(artists_db, "watched_artists")
|
||||||
|
assert {
|
||||||
|
"spotify_id",
|
||||||
|
"name",
|
||||||
|
"link",
|
||||||
|
"total_albums_on_spotify",
|
||||||
|
"last_checked",
|
||||||
|
"added_at",
|
||||||
|
"is_active",
|
||||||
|
"genres",
|
||||||
|
"popularity",
|
||||||
|
"image_url",
|
||||||
|
}.issubset(watched_artists_cols)
|
||||||
|
artist_dynamic_expected = {
|
||||||
|
"album_spotify_id",
|
||||||
|
"artist_spotify_id",
|
||||||
|
"name",
|
||||||
|
"album_group",
|
||||||
|
"album_type",
|
||||||
|
"release_date",
|
||||||
|
"release_date_precision",
|
||||||
|
"total_tracks",
|
||||||
|
"link",
|
||||||
|
"image_url",
|
||||||
|
"added_to_db",
|
||||||
|
"last_seen_on_spotify",
|
||||||
|
"download_task_id",
|
||||||
|
"download_status",
|
||||||
|
"is_fully_downloaded_managed_by_app",
|
||||||
|
}
|
||||||
|
assert _get_columns(artists_db, "artist_legacy").issuperset(artist_dynamic_expected)
|
||||||
|
assert _get_columns(artists_db, "artist_4oLeXFyACqeem2VImYeBFe").issuperset(artist_dynamic_expected)
|
||||||
|
assert _get_count(artists_db, "artist_4oLeXFyACqeem2VImYeBFe") == 2
|
||||||
@@ -1,238 +0,0 @@
|
|||||||
import sqlite3
|
|
||||||
from pathlib import Path
|
|
||||||
import pytest
|
|
||||||
import json
|
|
||||||
|
|
||||||
# Override the autouse credentials fixture from conftest for this module
|
|
||||||
@pytest.fixture(scope="session", autouse=True)
|
|
||||||
def setup_credentials_for_tests():
|
|
||||||
yield
|
|
||||||
|
|
||||||
|
|
||||||
def _create_playlists_db_3_1_0(db_path: Path):
|
|
||||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
||||||
with sqlite3.connect(str(db_path)) as conn:
|
|
||||||
# watched_playlists
|
|
||||||
conn.execute(
|
|
||||||
"""
|
|
||||||
CREATE TABLE IF NOT EXISTS watched_playlists (
|
|
||||||
spotify_id TEXT PRIMARY KEY,
|
|
||||||
name TEXT,
|
|
||||||
owner_id TEXT,
|
|
||||||
owner_name TEXT,
|
|
||||||
total_tracks INTEGER,
|
|
||||||
link TEXT,
|
|
||||||
snapshot_id TEXT,
|
|
||||||
last_checked INTEGER,
|
|
||||||
added_at INTEGER,
|
|
||||||
is_active INTEGER DEFAULT 1
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
# example playlist table with all expected columns
|
|
||||||
conn.execute(
|
|
||||||
"""
|
|
||||||
CREATE TABLE IF NOT EXISTS playlist_abc123 (
|
|
||||||
spotify_track_id TEXT PRIMARY KEY,
|
|
||||||
title TEXT,
|
|
||||||
artist_names TEXT,
|
|
||||||
album_name TEXT,
|
|
||||||
album_artist_names TEXT,
|
|
||||||
track_number INTEGER,
|
|
||||||
album_spotify_id TEXT,
|
|
||||||
duration_ms INTEGER,
|
|
||||||
added_at_playlist TEXT,
|
|
||||||
added_to_db INTEGER,
|
|
||||||
is_present_in_spotify INTEGER DEFAULT 1,
|
|
||||||
last_seen_in_spotify INTEGER,
|
|
||||||
snapshot_id TEXT,
|
|
||||||
final_path TEXT
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _create_artists_db_3_1_0(db_path: Path):
|
|
||||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
||||||
with sqlite3.connect(str(db_path)) as conn:
|
|
||||||
# watched_artists
|
|
||||||
conn.execute(
|
|
||||||
"""
|
|
||||||
CREATE TABLE IF NOT EXISTS watched_artists (
|
|
||||||
spotify_id TEXT PRIMARY KEY,
|
|
||||||
name TEXT,
|
|
||||||
link TEXT,
|
|
||||||
total_albums_on_spotify INTEGER,
|
|
||||||
last_checked INTEGER,
|
|
||||||
added_at INTEGER,
|
|
||||||
is_active INTEGER DEFAULT 1,
|
|
||||||
genres TEXT,
|
|
||||||
popularity INTEGER,
|
|
||||||
image_url TEXT
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
# example artist albums table (using _albums suffix per docs)
|
|
||||||
conn.execute(
|
|
||||||
"""
|
|
||||||
CREATE TABLE IF NOT EXISTS artist_def456_albums (
|
|
||||||
album_spotify_id TEXT PRIMARY KEY,
|
|
||||||
artist_spotify_id TEXT,
|
|
||||||
name TEXT,
|
|
||||||
album_group TEXT,
|
|
||||||
album_type TEXT,
|
|
||||||
release_date TEXT,
|
|
||||||
release_date_precision TEXT,
|
|
||||||
total_tracks INTEGER,
|
|
||||||
link TEXT,
|
|
||||||
image_url TEXT,
|
|
||||||
added_to_db INTEGER,
|
|
||||||
last_seen_on_spotify INTEGER,
|
|
||||||
download_task_id TEXT,
|
|
||||||
download_status INTEGER DEFAULT 0,
|
|
||||||
is_fully_downloaded_managed_by_app INTEGER DEFAULT 0
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _create_history_db_3_1_0(db_path: Path):
|
|
||||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
||||||
with sqlite3.connect(str(db_path)) as conn:
|
|
||||||
conn.executescript(
|
|
||||||
"""
|
|
||||||
CREATE TABLE IF NOT EXISTS download_history (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
download_type TEXT NOT NULL,
|
|
||||||
title TEXT NOT NULL,
|
|
||||||
artists TEXT,
|
|
||||||
timestamp REAL NOT NULL,
|
|
||||||
status TEXT NOT NULL,
|
|
||||||
service TEXT,
|
|
||||||
quality_format TEXT,
|
|
||||||
quality_bitrate TEXT,
|
|
||||||
total_tracks INTEGER,
|
|
||||||
successful_tracks INTEGER,
|
|
||||||
failed_tracks INTEGER,
|
|
||||||
skipped_tracks INTEGER,
|
|
||||||
children_table TEXT,
|
|
||||||
task_id TEXT,
|
|
||||||
external_ids TEXT,
|
|
||||||
metadata TEXT,
|
|
||||||
release_date TEXT,
|
|
||||||
genres TEXT,
|
|
||||||
images TEXT,
|
|
||||||
owner TEXT,
|
|
||||||
album_type TEXT,
|
|
||||||
duration_total_ms INTEGER,
|
|
||||||
explicit BOOLEAN
|
|
||||||
);
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
# reference children tables to be created by migration
|
|
||||||
conn.execute(
|
|
||||||
"INSERT INTO download_history (download_type, title, artists, timestamp, status, children_table) VALUES ('album','X','[]',strftime('%s','now'),'completed','album_child1')"
|
|
||||||
)
|
|
||||||
conn.execute(
|
|
||||||
"INSERT INTO download_history (download_type, title, artists, timestamp, status, children_table) VALUES ('playlist','Y','[]',strftime('%s','now'),'completed','playlist_child2')"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _create_accounts_db_3_1_0(db_path: Path):
|
|
||||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
||||||
with sqlite3.connect(str(db_path)) as conn:
|
|
||||||
conn.execute(
|
|
||||||
"""
|
|
||||||
CREATE TABLE IF NOT EXISTS spotify (
|
|
||||||
name TEXT PRIMARY KEY,
|
|
||||||
region TEXT,
|
|
||||||
created_at REAL,
|
|
||||||
updated_at REAL
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
conn.execute(
|
|
||||||
"""
|
|
||||||
CREATE TABLE IF NOT EXISTS deezer (
|
|
||||||
name TEXT PRIMARY KEY,
|
|
||||||
arl TEXT,
|
|
||||||
region TEXT,
|
|
||||||
created_at REAL,
|
|
||||||
updated_at REAL
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _get_columns(db_path: Path, table: str) -> set[str]:
|
|
||||||
with sqlite3.connect(str(db_path)) as conn:
|
|
||||||
cur = conn.execute(f"PRAGMA table_info({table})")
|
|
||||||
return {row[1] for row in cur.fetchall()}
|
|
||||||
|
|
||||||
|
|
||||||
def test_migration_3_1_0_upgrades_all(tmp_path: Path, monkeypatch: pytest.MonkeyPatch):
|
|
||||||
data_dir = tmp_path / "data"
|
|
||||||
history_db = data_dir / "history" / "download_history.db"
|
|
||||||
playlists_db = data_dir / "watch" / "playlists.db"
|
|
||||||
artists_db = data_dir / "watch" / "artists.db"
|
|
||||||
creds_dir = data_dir / "creds"
|
|
||||||
accounts_db = creds_dir / "accounts.db"
|
|
||||||
blobs_dir = creds_dir / "blobs"
|
|
||||||
search_json = creds_dir / "search.json"
|
|
||||||
|
|
||||||
# Create all DBs to match 3.1.0 schema
|
|
||||||
_create_history_db_3_1_0(history_db)
|
|
||||||
_create_playlists_db_3_1_0(playlists_db)
|
|
||||||
_create_artists_db_3_1_0(artists_db)
|
|
||||||
_create_accounts_db_3_1_0(accounts_db)
|
|
||||||
|
|
||||||
from routes.migrations import runner
|
|
||||||
monkeypatch.setattr(runner, "DATA_DIR", data_dir)
|
|
||||||
monkeypatch.setattr(runner, "HISTORY_DB", history_db)
|
|
||||||
monkeypatch.setattr(runner, "WATCH_DIR", data_dir / "watch")
|
|
||||||
monkeypatch.setattr(runner, "PLAYLISTS_DB", playlists_db)
|
|
||||||
monkeypatch.setattr(runner, "ARTISTS_DB", artists_db)
|
|
||||||
monkeypatch.setattr(runner, "CREDS_DIR", creds_dir)
|
|
||||||
monkeypatch.setattr(runner, "ACCOUNTS_DB", accounts_db)
|
|
||||||
monkeypatch.setattr(runner, "BLOBS_DIR", blobs_dir)
|
|
||||||
monkeypatch.setattr(runner, "SEARCH_JSON", search_json)
|
|
||||||
|
|
||||||
# Act: run migrations (should be mostly no-op, but will ensure children tables)
|
|
||||||
runner.run_migrations_if_needed()
|
|
||||||
runner.run_migrations_if_needed()
|
|
||||||
|
|
||||||
# Children tables created/ensured
|
|
||||||
expected_children_cols = {
|
|
||||||
"id",
|
|
||||||
"title",
|
|
||||||
"artists",
|
|
||||||
"album_title",
|
|
||||||
"duration_ms",
|
|
||||||
"track_number",
|
|
||||||
"disc_number",
|
|
||||||
"explicit",
|
|
||||||
"status",
|
|
||||||
"external_ids",
|
|
||||||
"genres",
|
|
||||||
"isrc",
|
|
||||||
"timestamp",
|
|
||||||
"position",
|
|
||||||
"metadata",
|
|
||||||
}
|
|
||||||
assert _get_columns(history_db, "album_child1").issuperset(expected_children_cols)
|
|
||||||
assert _get_columns(history_db, "playlist_child2").issuperset(expected_children_cols)
|
|
||||||
|
|
||||||
# Playlist per-table schema present
|
|
||||||
playlist_cols = _get_columns(playlists_db, "playlist_abc123")
|
|
||||||
assert {"spotify_track_id", "title", "artist_names", "album_name", "album_artist_names", "track_number", "album_spotify_id", "duration_ms", "added_at_playlist", "added_to_db", "is_present_in_spotify", "last_seen_in_spotify", "snapshot_id", "final_path"}.issubset(playlist_cols)
|
|
||||||
|
|
||||||
# Artist per-table schema present
|
|
||||||
artist_cols = _get_columns(artists_db, "artist_def456_albums")
|
|
||||||
assert {"album_spotify_id", "artist_spotify_id", "name", "album_group", "album_type", "release_date", "release_date_precision", "total_tracks", "link", "image_url", "added_to_db", "last_seen_on_spotify", "download_task_id", "download_status", "is_fully_downloaded_managed_by_app"}.issubset(artist_cols)
|
|
||||||
|
|
||||||
# Accounts DB present and creds filesystem ensured
|
|
||||||
assert accounts_db.exists()
|
|
||||||
assert blobs_dir.exists() and blobs_dir.is_dir()
|
|
||||||
assert search_json.exists()
|
|
||||||
data = json.loads(search_json.read_text())
|
|
||||||
assert set(data.keys()) == {"client_id", "client_secret"}
|
|
||||||
Reference in New Issue
Block a user