First try on 3.1.0 migration scripts

This commit is contained in:
Xoconoch
2025-08-17 12:09:33 -06:00
parent c5c5acc665
commit 770ac3c588
5 changed files with 801 additions and 236 deletions

View File

@@ -3,16 +3,8 @@ import sqlite3
from pathlib import Path
from typing import Optional
from .v3_0_6 import (
check_history_3_0_6,
check_watch_playlists_3_0_6,
check_watch_artists_3_0_6,
update_history_3_0_6,
update_watch_playlists_3_0_6,
update_watch_artists_3_0_6,
check_accounts_3_0_6,
update_accounts_3_0_6,
)
from .v3_0_6 import MigrationV3_0_6
from .v3_1_0 import MigrationV3_1_0
logger = logging.getLogger(__name__)
@@ -47,6 +39,9 @@ CHILDREN_EXPECTED_COLUMNS: dict[str, str] = {
"metadata": "TEXT",
}
m306 = MigrationV3_0_6()
m310 = MigrationV3_1_0()
def _safe_connect(path: Path) -> Optional[sqlite3.Connection]:
try:
@@ -59,21 +54,12 @@ def _safe_connect(path: Path) -> Optional[sqlite3.Connection]:
return None
def _table_columns(conn: sqlite3.Connection, table: str) -> set[str]:
try:
cur = conn.execute(f"PRAGMA table_info({table})")
return {row[1] for row in cur.fetchall()}
except Exception:
return set()
def _ensure_table_schema(
conn: sqlite3.Connection,
table_name: str,
expected_columns: dict[str, str],
table_description: str,
) -> None:
"""Ensure the given table has all expected columns, adding any missing columns safely."""
try:
cur = conn.execute(f"PRAGMA table_info({table_name})")
existing_info = cur.fetchall()
@@ -81,7 +67,6 @@ def _ensure_table_schema(
for col_name, col_type in expected_columns.items():
if col_name in existing_names:
continue
# Strip PK/NOT NULL when altering existing table to avoid errors
col_type_for_add = (
col_type.replace("PRIMARY KEY", "").replace("AUTOINCREMENT", "").replace("NOT NULL", "").strip()
)
@@ -104,7 +89,6 @@ def _ensure_table_schema(
def _create_or_update_children_table(conn: sqlite3.Connection, table_name: str) -> None:
"""Create children table if missing and ensure it has all expected columns."""
conn.execute(
f"""
CREATE TABLE IF NOT EXISTS {table_name} (
@@ -130,9 +114,7 @@ def _create_or_update_children_table(conn: sqlite3.Connection, table_name: str)
def _update_children_tables_for_history(conn: sqlite3.Connection) -> None:
"""Ensure all existing children tables and referenced children tables conform to expected schema."""
try:
# Create or update any tables referenced by download_history.children_table
try:
cur = conn.execute(
"SELECT DISTINCT children_table FROM download_history WHERE children_table IS NOT NULL AND TRIM(children_table) != ''"
@@ -145,7 +127,6 @@ def _update_children_tables_for_history(conn: sqlite3.Connection) -> None:
except sqlite3.Error as e:
logger.warning(f"Failed to scan referenced children tables from main history: {e}")
# Find any legacy children tables by name pattern album_% or playlist_%
try:
cur = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND (name LIKE 'album_%' OR name LIKE 'playlist_%') AND name != 'download_history'"
@@ -161,7 +142,6 @@ def _update_children_tables_for_history(conn: sqlite3.Connection) -> None:
def _ensure_creds_filesystem() -> None:
"""Ensure blobs directory and search.json exist."""
try:
BLOBS_DIR.mkdir(parents=True, exist_ok=True)
if not SEARCH_JSON.exists():
@@ -171,48 +151,74 @@ def _ensure_creds_filesystem() -> None:
logger.error("Failed to ensure credentials filesystem (blobs/search.json)", exc_info=True)
def _apply_versioned_updates(conn: sqlite3.Connection, c306, u306, c310, u310, post_update=None) -> None:
if not c306(conn):
u306(conn)
if not c310(conn):
u310(conn)
if post_update:
post_update(conn)
def run_migrations_if_needed() -> None:
"""Detect and apply necessary migrations by version for each DB.
Idempotent by design.
"""
try:
# History DB
h_conn = _safe_connect(HISTORY_DB)
if h_conn:
try:
if not check_history_3_0_6(h_conn):
update_history_3_0_6(h_conn)
# Ensure children tables regardless
_update_children_tables_for_history(h_conn)
_apply_versioned_updates(
h_conn,
m306.check_history,
m306.update_history,
m310.check_history,
m310.update_history,
post_update=_update_children_tables_for_history,
)
h_conn.commit()
finally:
h_conn.close()
# Watch DBs
# Watch playlists DB
p_conn = _safe_connect(PLAYLISTS_DB)
if p_conn:
try:
if not check_watch_playlists_3_0_6(p_conn):
update_watch_playlists_3_0_6(p_conn)
_apply_versioned_updates(
p_conn,
m306.check_watch_playlists,
m306.update_watch_playlists,
m310.check_watch_playlists,
m310.update_watch_playlists,
)
p_conn.commit()
finally:
p_conn.close()
# Watch artists DB
a_conn = _safe_connect(ARTISTS_DB)
if a_conn:
try:
if not check_watch_artists_3_0_6(a_conn):
update_watch_artists_3_0_6(a_conn)
_apply_versioned_updates(
a_conn,
m306.check_watch_artists,
m306.update_watch_artists,
m310.check_watch_artists,
m310.update_watch_artists,
)
a_conn.commit()
finally:
a_conn.close()
# Credentials accounts DB and files
# Accounts DB
c_conn = _safe_connect(ACCOUNTS_DB)
if c_conn:
try:
if not check_accounts_3_0_6(c_conn):
update_accounts_3_0_6(c_conn)
_apply_versioned_updates(
c_conn,
m306.check_accounts,
m306.update_accounts,
m310.check_accounts,
m310.update_accounts,
)
c_conn.commit()
finally:
c_conn.close()

View File

@@ -1,211 +1,201 @@
import sqlite3
HISTORY_SQL = """
CREATE TABLE IF NOT EXISTS download_history (
id INTEGER PRIMARY KEY AUTOINCREMENT,
download_type TEXT NOT NULL,
title TEXT NOT NULL,
artists TEXT,
timestamp REAL NOT NULL,
status TEXT NOT NULL,
service TEXT,
quality_format TEXT,
quality_bitrate TEXT,
total_tracks INTEGER,
successful_tracks INTEGER,
failed_tracks INTEGER,
skipped_tracks INTEGER,
children_table TEXT,
task_id TEXT,
external_ids TEXT,
metadata TEXT,
release_date TEXT,
genres TEXT,
images TEXT,
owner TEXT,
album_type TEXT,
duration_total_ms INTEGER,
explicit BOOLEAN
);
CREATE INDEX IF NOT EXISTS idx_download_history_timestamp ON download_history(timestamp);
CREATE INDEX IF NOT EXISTS idx_download_history_type_status ON download_history(download_type, status);
CREATE INDEX IF NOT EXISTS idx_download_history_task_id ON download_history(task_id);
CREATE UNIQUE INDEX IF NOT EXISTS uq_download_history_task_type_ids ON download_history(task_id, download_type, external_ids);
"""
WATCH_PLAYLISTS_SQL = """
CREATE TABLE IF NOT EXISTS watched_playlists (
spotify_id TEXT PRIMARY KEY,
name TEXT,
owner_id TEXT,
owner_name TEXT,
total_tracks INTEGER,
link TEXT,
snapshot_id TEXT,
last_checked INTEGER,
added_at INTEGER,
is_active INTEGER DEFAULT 1
);
"""
class MigrationV3_0_6:
HISTORY_SQL = """
CREATE TABLE IF NOT EXISTS download_history (
id INTEGER PRIMARY KEY AUTOINCREMENT,
download_type TEXT NOT NULL,
title TEXT NOT NULL,
artists TEXT,
timestamp REAL NOT NULL,
status TEXT NOT NULL,
service TEXT,
quality_format TEXT,
quality_bitrate TEXT,
total_tracks INTEGER,
successful_tracks INTEGER,
failed_tracks INTEGER,
skipped_tracks INTEGER,
children_table TEXT,
task_id TEXT,
external_ids TEXT,
metadata TEXT,
release_date TEXT,
genres TEXT,
images TEXT,
owner TEXT,
album_type TEXT,
duration_total_ms INTEGER,
explicit BOOLEAN
);
CREATE INDEX IF NOT EXISTS idx_download_history_timestamp ON download_history(timestamp);
CREATE INDEX IF NOT EXISTS idx_download_history_type_status ON download_history(download_type, status);
CREATE INDEX IF NOT EXISTS idx_download_history_task_id ON download_history(task_id);
CREATE UNIQUE INDEX IF NOT EXISTS uq_download_history_task_type_ids ON download_history(task_id, download_type, external_ids);
"""
WATCH_ARTISTS_SQL = """
CREATE TABLE IF NOT EXISTS watched_artists (
spotify_id TEXT PRIMARY KEY,
name TEXT,
link TEXT,
total_albums_on_spotify INTEGER,
last_checked INTEGER,
added_at INTEGER,
is_active INTEGER DEFAULT 1,
genres TEXT,
popularity INTEGER,
image_url TEXT
);
"""
WATCH_PLAYLISTS_SQL = """
CREATE TABLE IF NOT EXISTS watched_playlists (
spotify_id TEXT PRIMARY KEY,
name TEXT,
owner_id TEXT,
owner_name TEXT,
total_tracks INTEGER,
link TEXT,
snapshot_id TEXT,
last_checked INTEGER,
added_at INTEGER,
is_active INTEGER DEFAULT 1
);
"""
ACCOUNTS_SPOTIFY_SQL = """
CREATE TABLE IF NOT EXISTS spotify (
name TEXT PRIMARY KEY,
region TEXT,
created_at REAL,
updated_at REAL
);
"""
WATCH_ARTISTS_SQL = """
CREATE TABLE IF NOT EXISTS watched_artists (
spotify_id TEXT PRIMARY KEY,
name TEXT,
link TEXT,
total_albums_on_spotify INTEGER,
last_checked INTEGER,
added_at INTEGER,
is_active INTEGER DEFAULT 1,
genres TEXT,
popularity INTEGER,
image_url TEXT
);
"""
ACCOUNTS_DEEZER_SQL = """
CREATE TABLE IF NOT EXISTS deezer (
name TEXT PRIMARY KEY,
arl TEXT,
region TEXT,
created_at REAL,
updated_at REAL
);
"""
ACCOUNTS_SPOTIFY_SQL = """
CREATE TABLE IF NOT EXISTS spotify (
name TEXT PRIMARY KEY,
region TEXT,
created_at REAL,
updated_at REAL
);
"""
ACCOUNTS_DEEZER_SQL = """
CREATE TABLE IF NOT EXISTS deezer (
name TEXT PRIMARY KEY,
arl TEXT,
region TEXT,
created_at REAL,
updated_at REAL
);
"""
# --- Check functions ---
@staticmethod
def _table_columns(conn: sqlite3.Connection, table: str) -> set[str]:
try:
cur = conn.execute(f"PRAGMA table_info({table})")
return {row[1] for row in cur.fetchall()}
except Exception:
return set()
def _table_columns(conn: sqlite3.Connection, table: str) -> set[str]:
try:
cur = conn.execute(f"PRAGMA table_info({table})")
return {row[1] for row in cur.fetchall()}
except Exception:
return set()
# --- Checks ---
def check_history(self, conn: sqlite3.Connection) -> bool:
cur = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name='download_history'"
)
if not cur.fetchone():
return False
required = {
"id",
"download_type",
"title",
"artists",
"timestamp",
"status",
"service",
"quality_format",
"quality_bitrate",
"total_tracks",
"successful_tracks",
"failed_tracks",
"skipped_tracks",
"children_table",
"task_id",
"external_ids",
"metadata",
"release_date",
"genres",
"images",
"owner",
"album_type",
"duration_total_ms",
"explicit",
}
return required.issubset(self._table_columns(conn, "download_history"))
def check_watch_playlists(self, conn: sqlite3.Connection) -> bool:
cur = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name='watched_playlists'"
)
if not cur.fetchone():
return False
required = {
"spotify_id",
"name",
"owner_id",
"owner_name",
"total_tracks",
"link",
"snapshot_id",
"last_checked",
"added_at",
"is_active",
}
return required.issubset(self._table_columns(conn, "watched_playlists"))
def check_history_3_0_6(conn: sqlite3.Connection) -> bool:
"""Return True if history DB matches v3.0.6 schema for main table."""
cur = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name='download_history'"
)
if not cur.fetchone():
return False
required = {
"id",
"download_type",
"title",
"artists",
"timestamp",
"status",
"service",
"quality_format",
"quality_bitrate",
"total_tracks",
"successful_tracks",
"failed_tracks",
"skipped_tracks",
"children_table",
"task_id",
"external_ids",
"metadata",
"release_date",
"genres",
"images",
"owner",
"album_type",
"duration_total_ms",
"explicit",
}
return required.issubset(_table_columns(conn, "download_history"))
def check_watch_artists(self, conn: sqlite3.Connection) -> bool:
cur = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name='watched_artists'"
)
if not cur.fetchone():
return False
required = {
"spotify_id",
"name",
"link",
"total_albums_on_spotify",
"last_checked",
"added_at",
"is_active",
"genres",
"popularity",
"image_url",
}
return required.issubset(self._table_columns(conn, "watched_artists"))
def check_accounts(self, conn: sqlite3.Connection) -> bool:
cur = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name='spotify'"
)
if not cur.fetchone():
return False
if not {"name", "region", "created_at", "updated_at"}.issubset(
self._table_columns(conn, "spotify")
):
return False
cur = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name='deezer'"
)
if not cur.fetchone():
return False
return {"name", "arl", "region", "created_at", "updated_at"}.issubset(
self._table_columns(conn, "deezer")
)
def check_watch_playlists_3_0_6(conn: sqlite3.Connection) -> bool:
cur = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name='watched_playlists'"
)
if not cur.fetchone():
return False
required = {
"spotify_id",
"name",
"owner_id",
"owner_name",
"total_tracks",
"link",
"snapshot_id",
"last_checked",
"added_at",
"is_active",
}
return required.issubset(_table_columns(conn, "watched_playlists"))
# --- Updates ---
def update_history(self, conn: sqlite3.Connection) -> None:
conn.executescript(self.HISTORY_SQL)
def update_watch_playlists(self, conn: sqlite3.Connection) -> None:
conn.executescript(self.WATCH_PLAYLISTS_SQL)
def check_watch_artists_3_0_6(conn: sqlite3.Connection) -> bool:
cur = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name='watched_artists'"
)
if not cur.fetchone():
return False
required = {
"spotify_id",
"name",
"link",
"total_albums_on_spotify",
"last_checked",
"added_at",
"is_active",
"genres",
"popularity",
"image_url",
}
return required.issubset(_table_columns(conn, "watched_artists"))
def update_watch_artists(self, conn: sqlite3.Connection) -> None:
conn.executescript(self.WATCH_ARTISTS_SQL)
def check_accounts_3_0_6(conn: sqlite3.Connection) -> bool:
"""Return True if accounts DB has both spotify and deezer tables with expected columns."""
# Spotify table
cur = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name='spotify'"
)
if not cur.fetchone():
return False
spotify_required = {"name", "region", "created_at", "updated_at"}
if not spotify_required.issubset(_table_columns(conn, "spotify")):
return False
# Deezer table
cur = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name='deezer'"
)
if not cur.fetchone():
return False
deezer_required = {"name", "arl", "region", "created_at", "updated_at"}
return deezer_required.issubset(_table_columns(conn, "deezer"))
# --- Update functions ---
def update_history_3_0_6(conn: sqlite3.Connection) -> None:
conn.executescript(HISTORY_SQL)
def update_watch_playlists_3_0_6(conn: sqlite3.Connection) -> None:
conn.executescript(WATCH_PLAYLISTS_SQL)
def update_watch_artists_3_0_6(conn: sqlite3.Connection) -> None:
conn.executescript(WATCH_ARTISTS_SQL)
def update_accounts_3_0_6(conn: sqlite3.Connection) -> None:
conn.executescript(ACCOUNTS_SPOTIFY_SQL)
conn.executescript(ACCOUNTS_DEEZER_SQL)
def update_accounts(self, conn: sqlite3.Connection) -> None:
conn.executescript(self.ACCOUNTS_SPOTIFY_SQL)
conn.executescript(self.ACCOUNTS_DEEZER_SQL)

331
routes/migrations/v3_1_0.py Normal file
View File

@@ -0,0 +1,331 @@
import sqlite3
class MigrationV3_1_0:
# --- Expected Schemas (3.1.0) ---
HISTORY_MAIN_REQUIRED = {
"id",
"download_type",
"title",
"artists",
"timestamp",
"status",
"service",
"quality_format",
"quality_bitrate",
"total_tracks",
"successful_tracks",
"failed_tracks",
"skipped_tracks",
"children_table",
"task_id",
"external_ids",
"metadata",
"release_date",
"genres",
"images",
"owner",
"album_type",
"duration_total_ms",
"explicit",
}
HISTORY_MAIN_SQL = """
CREATE TABLE IF NOT EXISTS download_history (
id INTEGER PRIMARY KEY AUTOINCREMENT,
download_type TEXT NOT NULL,
title TEXT NOT NULL,
artists TEXT,
timestamp REAL NOT NULL,
status TEXT NOT NULL,
service TEXT,
quality_format TEXT,
quality_bitrate TEXT,
total_tracks INTEGER,
successful_tracks INTEGER,
failed_tracks INTEGER,
skipped_tracks INTEGER,
children_table TEXT,
task_id TEXT,
external_ids TEXT,
metadata TEXT,
release_date TEXT,
genres TEXT,
images TEXT,
owner TEXT,
album_type TEXT,
duration_total_ms INTEGER,
explicit BOOLEAN
);
CREATE INDEX IF NOT EXISTS idx_download_history_timestamp ON download_history(timestamp);
CREATE INDEX IF NOT EXISTS idx_download_history_type_status ON download_history(download_type, status);
CREATE INDEX IF NOT EXISTS idx_download_history_task_id ON download_history(task_id);
CREATE UNIQUE INDEX IF NOT EXISTS uq_download_history_task_type_ids ON download_history(task_id, download_type, external_ids);
"""
# Children tables schema (album_% / playlist_%):
HISTORY_CHILDREN_EXPECTED = {
"id": "INTEGER PRIMARY KEY AUTOINCREMENT",
"title": "TEXT NOT NULL",
"artists": "TEXT",
"album_title": "TEXT",
"duration_ms": "INTEGER",
"track_number": "INTEGER",
"disc_number": "INTEGER",
"explicit": "BOOLEAN",
"status": "TEXT NOT NULL",
"external_ids": "TEXT",
"genres": "TEXT",
"isrc": "TEXT",
"timestamp": "REAL NOT NULL",
"position": "INTEGER",
"metadata": "TEXT",
}
WATCH_PLAYLISTS_REQUIRED = {
"spotify_id",
"name",
"owner_id",
"owner_name",
"total_tracks",
"link",
"snapshot_id",
"last_checked",
"added_at",
"is_active",
}
# Per-playlist tracks table expected columns
PLAYLIST_TRACKS_EXPECTED = {
"spotify_track_id": "TEXT PRIMARY KEY",
"title": "TEXT",
"artist_names": "TEXT",
"album_name": "TEXT",
"album_artist_names": "TEXT",
"track_number": "INTEGER",
"album_spotify_id": "TEXT",
"duration_ms": "INTEGER",
"added_at_playlist": "TEXT",
"added_to_db": "INTEGER",
"is_present_in_spotify": "INTEGER DEFAULT 1",
"last_seen_in_spotify": "INTEGER",
"snapshot_id": "TEXT",
"final_path": "TEXT",
}
WATCH_ARTISTS_REQUIRED = {
"spotify_id",
"name",
"link",
"total_albums_on_spotify",
"last_checked",
"added_at",
"is_active",
"genres",
"popularity",
"image_url",
}
ARTIST_ALBUMS_EXPECTED = {
"album_spotify_id": "TEXT PRIMARY KEY",
"artist_spotify_id": "TEXT",
"name": "TEXT",
"album_group": "TEXT",
"album_type": "TEXT",
"release_date": "TEXT",
"release_date_precision": "TEXT",
"total_tracks": "INTEGER",
"link": "TEXT",
"image_url": "TEXT",
"added_to_db": "INTEGER",
"last_seen_on_spotify": "INTEGER",
"download_task_id": "TEXT",
"download_status": "INTEGER DEFAULT 0",
"is_fully_downloaded_managed_by_app": "INTEGER DEFAULT 0",
}
ACCOUNTS_SPOTIFY_REQUIRED = {"name", "region", "created_at", "updated_at"}
ACCOUNTS_DEEZER_REQUIRED = {"name", "arl", "region", "created_at", "updated_at"}
ACCOUNTS_SPOTIFY_SQL = """
CREATE TABLE IF NOT EXISTS spotify (
name TEXT PRIMARY KEY,
region TEXT,
created_at REAL,
updated_at REAL
);
"""
ACCOUNTS_DEEZER_SQL = """
CREATE TABLE IF NOT EXISTS deezer (
name TEXT PRIMARY KEY,
arl TEXT,
region TEXT,
created_at REAL,
updated_at REAL
);
"""
@staticmethod
def _columns(conn: sqlite3.Connection, table: str) -> set[str]:
try:
cur = conn.execute(f"PRAGMA table_info({table})")
return {row[1] for row in cur.fetchall()}
except Exception:
return set()
@staticmethod
def _ensure_table_schema(conn: sqlite3.Connection, table_name: str, expected: dict[str, str], desc: str) -> None:
cur = conn.execute(f"PRAGMA table_info({table_name})")
existing = {row[1] for row in cur.fetchall()}
for col, col_type in expected.items():
if col in existing:
continue
col_type_for_add = (
col_type.replace("PRIMARY KEY", "").replace("AUTOINCREMENT", "").replace("NOT NULL", "").strip()
)
try:
conn.execute(f"ALTER TABLE {table_name} ADD COLUMN {col} {col_type_for_add}")
except sqlite3.OperationalError:
pass
# --- Check methods ---
def check_history(self, conn: sqlite3.Connection) -> bool:
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='download_history'")
if not cur.fetchone():
return False
return self.HISTORY_MAIN_REQUIRED.issubset(self._columns(conn, "download_history"))
def check_watch_playlists(self, conn: sqlite3.Connection) -> bool:
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='watched_playlists'")
if not cur.fetchone():
return False
if not self.WATCH_PLAYLISTS_REQUIRED.issubset(self._columns(conn, "watched_playlists")):
return False
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'playlist_%'")
rows = cur.fetchall()
for (table_name,) in rows:
cols = self._columns(conn, table_name)
required_cols = set(self.PLAYLIST_TRACKS_EXPECTED.keys())
if not required_cols.issubset(cols):
return False
return True
def check_watch_artists(self, conn: sqlite3.Connection) -> bool:
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='watched_artists'")
if not cur.fetchone():
return False
if not self.WATCH_ARTISTS_REQUIRED.issubset(self._columns(conn, "watched_artists")):
return False
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'artist_%'")
rows = cur.fetchall()
for (table_name,) in rows:
cols = self._columns(conn, table_name)
required_cols = set(self.ARTIST_ALBUMS_EXPECTED.keys())
if not required_cols.issubset(cols):
return False
return True
def check_accounts(self, conn: sqlite3.Connection) -> bool:
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='spotify'")
if not cur.fetchone():
return False
if not self.ACCOUNTS_SPOTIFY_REQUIRED.issubset(self._columns(conn, "spotify")):
return False
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='deezer'")
if not cur.fetchone():
return False
if not self.ACCOUNTS_DEEZER_REQUIRED.issubset(self._columns(conn, "deezer")):
return False
return True
# --- Update methods ---
def update_history(self, conn: sqlite3.Connection) -> None:
conn.executescript(self.HISTORY_MAIN_SQL)
def update_watch_playlists(self, conn: sqlite3.Connection) -> None:
conn.execute(
"""
CREATE TABLE IF NOT EXISTS watched_playlists (
spotify_id TEXT PRIMARY KEY,
name TEXT,
owner_id TEXT,
owner_name TEXT,
total_tracks INTEGER,
link TEXT,
snapshot_id TEXT,
last_checked INTEGER,
added_at INTEGER,
is_active INTEGER DEFAULT 1
)
"""
)
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'playlist_%'")
for (table_name,) in cur.fetchall():
conn.execute(
f"""
CREATE TABLE IF NOT EXISTS {table_name} (
spotify_track_id TEXT PRIMARY KEY,
title TEXT,
artist_names TEXT,
album_name TEXT,
album_artist_names TEXT,
track_number INTEGER,
album_spotify_id TEXT,
duration_ms INTEGER,
added_at_playlist TEXT,
added_to_db INTEGER,
is_present_in_spotify INTEGER DEFAULT 1,
last_seen_in_spotify INTEGER,
snapshot_id TEXT,
final_path TEXT
)
"""
)
self._ensure_table_schema(conn, table_name, self.PLAYLIST_TRACKS_EXPECTED, f"playlist tracks {table_name}")
def update_watch_artists(self, conn: sqlite3.Connection) -> None:
conn.execute(
"""
CREATE TABLE IF NOT EXISTS watched_artists (
spotify_id TEXT PRIMARY KEY,
name TEXT,
link TEXT,
total_albums_on_spotify INTEGER,
last_checked INTEGER,
added_at INTEGER,
is_active INTEGER DEFAULT 1,
genres TEXT,
popularity INTEGER,
image_url TEXT
)
"""
)
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'artist_%'")
for (table_name,) in cur.fetchall():
conn.execute(
f"""
CREATE TABLE IF NOT EXISTS {table_name} (
album_spotify_id TEXT PRIMARY KEY,
artist_spotify_id TEXT,
name TEXT,
album_group TEXT,
album_type TEXT,
release_date TEXT,
release_date_precision TEXT,
total_tracks INTEGER,
link TEXT,
image_url TEXT,
added_to_db INTEGER,
last_seen_on_spotify INTEGER,
download_task_id TEXT,
download_status INTEGER DEFAULT 0,
is_fully_downloaded_managed_by_app INTEGER DEFAULT 0
)
"""
)
self._ensure_table_schema(conn, table_name, self.ARTIST_ALBUMS_EXPECTED, f"artist albums {table_name}")
def update_accounts(self, conn: sqlite3.Connection) -> None:
conn.executescript(self.ACCOUNTS_SPOTIFY_SQL)
conn.executescript(self.ACCOUNTS_DEEZER_SQL)

View File

@@ -996,7 +996,7 @@ def init_artists_db():
def _create_artist_albums_table(artist_spotify_id: str):
"""Creates or updates a table for a specific artist to store their albums in artists.db."""
table_name = f"artist_{artist_spotify_id.replace('-', '_').replace(' ', '_')}" # Sanitize table name
table_name = f"artist_{artist_spotify_id.replace('-', '_').replace(' ', '_')}_albums" # Sanitize table name
try:
with _get_artists_db_connection() as conn: # Use artists connection
cursor = conn.cursor()