Migration from 3.0.6

This commit is contained in:
Xoconoch
2025-08-17 11:45:28 -06:00
parent 3d2c98f59b
commit c5c5acc665
3 changed files with 100 additions and 2 deletions

View File

@@ -10,6 +10,8 @@ from .v3_0_6 import (
update_history_3_0_6, update_history_3_0_6,
update_watch_playlists_3_0_6, update_watch_playlists_3_0_6,
update_watch_artists_3_0_6, update_watch_artists_3_0_6,
check_accounts_3_0_6,
update_accounts_3_0_6,
) )
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -20,6 +22,12 @@ WATCH_DIR = DATA_DIR / "watch"
PLAYLISTS_DB = WATCH_DIR / "playlists.db" PLAYLISTS_DB = WATCH_DIR / "playlists.db"
ARTISTS_DB = WATCH_DIR / "artists.db" ARTISTS_DB = WATCH_DIR / "artists.db"
# Credentials
CREDS_DIR = DATA_DIR / "creds"
ACCOUNTS_DB = CREDS_DIR / "accounts.db"
BLOBS_DIR = CREDS_DIR / "blobs"
SEARCH_JSON = CREDS_DIR / "search.json"
# Expected children table columns for history (album_/playlist_) # Expected children table columns for history (album_/playlist_)
CHILDREN_EXPECTED_COLUMNS: dict[str, str] = { CHILDREN_EXPECTED_COLUMNS: dict[str, str] = {
"id": "INTEGER PRIMARY KEY AUTOINCREMENT", "id": "INTEGER PRIMARY KEY AUTOINCREMENT",
@@ -152,6 +160,17 @@ def _update_children_tables_for_history(conn: sqlite3.Connection) -> None:
logger.error("Failed migrating children history tables", exc_info=True) logger.error("Failed migrating children history tables", exc_info=True)
def _ensure_creds_filesystem() -> None:
"""Ensure blobs directory and search.json exist."""
try:
BLOBS_DIR.mkdir(parents=True, exist_ok=True)
if not SEARCH_JSON.exists():
SEARCH_JSON.write_text('{ "client_id": "", "client_secret": "" }\n', encoding="utf-8")
logger.info(f"Created default global Spotify creds file at {SEARCH_JSON}")
except Exception:
logger.error("Failed to ensure credentials filesystem (blobs/search.json)", exc_info=True)
def run_migrations_if_needed() -> None: def run_migrations_if_needed() -> None:
"""Detect and apply necessary migrations by version for each DB. """Detect and apply necessary migrations by version for each DB.
Idempotent by design. Idempotent by design.
@@ -187,6 +206,18 @@ def run_migrations_if_needed() -> None:
a_conn.commit() a_conn.commit()
finally: finally:
a_conn.close() a_conn.close()
# Credentials accounts DB and files
c_conn = _safe_connect(ACCOUNTS_DB)
if c_conn:
try:
if not check_accounts_3_0_6(c_conn):
update_accounts_3_0_6(c_conn)
c_conn.commit()
finally:
c_conn.close()
_ensure_creds_filesystem()
logger.info("Database migrations check completed") logger.info("Database migrations check completed")
except Exception: except Exception:
logger.error("Database migration failed", exc_info=True) logger.error("Database migration failed", exc_info=True)

View File

@@ -63,6 +63,25 @@ CREATE TABLE IF NOT EXISTS watched_artists (
); );
""" """
ACCOUNTS_SPOTIFY_SQL = """
CREATE TABLE IF NOT EXISTS spotify (
name TEXT PRIMARY KEY,
region TEXT,
created_at REAL,
updated_at REAL
);
"""
ACCOUNTS_DEEZER_SQL = """
CREATE TABLE IF NOT EXISTS deezer (
name TEXT PRIMARY KEY,
arl TEXT,
region TEXT,
created_at REAL,
updated_at REAL
);
"""
# --- Check functions --- # --- Check functions ---
@@ -152,6 +171,27 @@ def check_watch_artists_3_0_6(conn: sqlite3.Connection) -> bool:
return required.issubset(_table_columns(conn, "watched_artists")) return required.issubset(_table_columns(conn, "watched_artists"))
def check_accounts_3_0_6(conn: sqlite3.Connection) -> bool:
"""Return True if accounts DB has both spotify and deezer tables with expected columns."""
# Spotify table
cur = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name='spotify'"
)
if not cur.fetchone():
return False
spotify_required = {"name", "region", "created_at", "updated_at"}
if not spotify_required.issubset(_table_columns(conn, "spotify")):
return False
# Deezer table
cur = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name='deezer'"
)
if not cur.fetchone():
return False
deezer_required = {"name", "arl", "region", "created_at", "updated_at"}
return deezer_required.issubset(_table_columns(conn, "deezer"))
# --- Update functions --- # --- Update functions ---
def update_history_3_0_6(conn: sqlite3.Connection) -> None: def update_history_3_0_6(conn: sqlite3.Connection) -> None:
@@ -163,4 +203,9 @@ def update_watch_playlists_3_0_6(conn: sqlite3.Connection) -> None:
def update_watch_artists_3_0_6(conn: sqlite3.Connection) -> None: def update_watch_artists_3_0_6(conn: sqlite3.Connection) -> None:
conn.executescript(WATCH_ARTISTS_SQL) conn.executescript(WATCH_ARTISTS_SQL)
def update_accounts_3_0_6(conn: sqlite3.Connection) -> None:
conn.executescript(ACCOUNTS_SPOTIFY_SQL)
conn.executescript(ACCOUNTS_DEEZER_SQL)

View File

@@ -1,6 +1,7 @@
import sqlite3 import sqlite3
from pathlib import Path from pathlib import Path
import pytest import pytest
import json
# Override the autouse credentials fixture from conftest for this module # Override the autouse credentials fixture from conftest for this module
@pytest.fixture(scope="session", autouse=True) @pytest.fixture(scope="session", autouse=True)
@@ -173,6 +174,10 @@ def test_migration_children_tables_created_and_upgraded(tmp_path: Path, monkeypa
history_db = data_dir / "history" / "download_history.db" history_db = data_dir / "history" / "download_history.db"
playlists_db = data_dir / "watch" / "playlists.db" playlists_db = data_dir / "watch" / "playlists.db"
artists_db = data_dir / "watch" / "artists.db" artists_db = data_dir / "watch" / "artists.db"
creds_dir = data_dir / "creds"
accounts_db = creds_dir / "accounts.db"
blobs_dir = creds_dir / "blobs"
search_json = creds_dir / "search.json"
# Create 3.0.6 base schemas and sample data # Create 3.0.6 base schemas and sample data
_create_306_history_db(history_db) _create_306_history_db(history_db)
@@ -185,6 +190,10 @@ def test_migration_children_tables_created_and_upgraded(tmp_path: Path, monkeypa
monkeypatch.setattr(runner, "WATCH_DIR", data_dir / "watch") monkeypatch.setattr(runner, "WATCH_DIR", data_dir / "watch")
monkeypatch.setattr(runner, "PLAYLISTS_DB", playlists_db) monkeypatch.setattr(runner, "PLAYLISTS_DB", playlists_db)
monkeypatch.setattr(runner, "ARTISTS_DB", artists_db) monkeypatch.setattr(runner, "ARTISTS_DB", artists_db)
monkeypatch.setattr(runner, "CREDS_DIR", creds_dir)
monkeypatch.setattr(runner, "ACCOUNTS_DB", accounts_db)
monkeypatch.setattr(runner, "BLOBS_DIR", blobs_dir)
monkeypatch.setattr(runner, "SEARCH_JSON", search_json)
# Act: run migrations # Act: run migrations
runner.run_migrations_if_needed() runner.run_migrations_if_needed()
@@ -212,4 +221,17 @@ def test_migration_children_tables_created_and_upgraded(tmp_path: Path, monkeypa
assert _get_columns(history_db, "album_test1").issuperset(expected_children_cols) assert _get_columns(history_db, "album_test1").issuperset(expected_children_cols)
assert _get_columns(history_db, "playlist_test2").issuperset(expected_children_cols) assert _get_columns(history_db, "playlist_test2").issuperset(expected_children_cols)
# Legacy table upgraded # Legacy table upgraded
assert _get_columns(history_db, "album_legacy").issuperset(expected_children_cols) assert _get_columns(history_db, "album_legacy").issuperset(expected_children_cols)
# Assert: accounts DB created with expected tables and columns
assert accounts_db.exists()
spotify_cols = _get_columns(accounts_db, "spotify")
deezer_cols = _get_columns(accounts_db, "deezer")
assert {"name", "region", "created_at", "updated_at"}.issubset(spotify_cols)
assert {"name", "arl", "region", "created_at", "updated_at"}.issubset(deezer_cols)
# Assert: creds filesystem
assert blobs_dir.exists() and blobs_dir.is_dir()
assert search_json.exists()
data = json.loads(search_json.read_text())
assert set(data.keys()) == {"client_id", "client_secret"}