Add database migration from 3.1.0 -> 3.1.2

This commit is contained in:
Xoconoch
2025-08-17 14:02:24 -06:00
parent 2ffd64c047
commit 28d1272fde
5 changed files with 172 additions and 13 deletions

5
.gitignore vendored
View File

@@ -41,3 +41,8 @@ logs/
Test.py Test.py
spotizerr-ui/dev-dist spotizerr-ui/dev-dist
celerybeat-schedule celerybeat-schedule
data.*/
3.1.0.md
3.0.6.md
3.1.2.md
sqltree.sh

View File

@@ -1,7 +1,7 @@
name: spotizerr name: spotizerr
services: services:
spotizerr: spotizerr:
image: cooldockerizer93/spotizerr:3.0.6 image: cooldockerizer93/spotizerr:3.1.0
volumes: volumes:
- ./data:/app/data - ./data:/app/data
- ./downloads:/app/downloads - ./downloads:/app/downloads

View File

@@ -4,6 +4,7 @@ from pathlib import Path
from typing import Optional from typing import Optional
from .v3_0_6 import MigrationV3_0_6 from .v3_0_6 import MigrationV3_0_6
from .v3_1_0 import MigrationV3_1_0
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -101,6 +102,7 @@ EXPECTED_ARTIST_ALBUMS_COLUMNS: dict[str, str] = {
} }
m306 = MigrationV3_0_6() m306 = MigrationV3_0_6()
m310 = MigrationV3_1_0()
def _safe_connect(path: Path) -> Optional[sqlite3.Connection]: def _safe_connect(path: Path) -> Optional[sqlite3.Connection]:
@@ -354,18 +356,17 @@ def run_migrations_if_needed() -> None:
p_conn.close() p_conn.close()
# Watch artists DB # Watch artists DB
a_conn = _safe_connect(ARTISTS_DB) if ARTISTS_DB.exists():
if a_conn: with _safe_connect(ARTISTS_DB) as conn:
try: if conn:
_apply_versioned_updates( _apply_versioned_updates(
a_conn, conn, m306.check_watch_artists, m306.update_watch_artists
m306.check_watch_artists, )
m306.update_watch_artists, _apply_versioned_updates(
) conn, m310.check_watch_artists, m310.update_watch_artists
_update_watch_artists_db(a_conn) )
a_conn.commit() _update_watch_artists_db(conn)
finally: conn.commit()
a_conn.close()
# Accounts DB # Accounts DB
c_conn = _safe_connect(ACCOUNTS_DB) c_conn = _safe_connect(ACCOUNTS_DB)

View File

@@ -0,0 +1,88 @@
import sqlite3
import logging
logger = logging.getLogger(__name__)
class MigrationV3_1_0:
ARTIST_ALBUMS_EXPECTED_COLUMNS: dict[str, str] = {
"album_spotify_id": "TEXT PRIMARY KEY",
"artist_spotify_id": "TEXT",
"name": "TEXT",
"album_group": "TEXT",
"album_type": "TEXT",
"release_date": "TEXT",
"release_date_precision": "TEXT",
"total_tracks": "INTEGER",
"link": "TEXT",
"image_url": "TEXT",
"added_to_db": "INTEGER",
"last_seen_on_spotify": "INTEGER",
"download_task_id": "TEXT",
"download_status": "INTEGER DEFAULT 0",
"is_fully_downloaded_managed_by_app": "INTEGER DEFAULT 0",
}
def _table_columns(self, conn: sqlite3.Connection, table: str) -> set[str]:
try:
cur = conn.execute(f"PRAGMA table_info({table})")
return {row[1] for row in cur.fetchall()}
except sqlite3.OperationalError:
return set()
def check_watch_artists(self, conn: sqlite3.Connection) -> bool:
"""Checks if the artist-specific tables have the new columns."""
try:
cur = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'artist_%' LIMIT 1"
)
first_artist_table = cur.fetchone()
if not first_artist_table:
return True # No artist tables, so no migration needed
table_name = first_artist_table[0]
existing_columns = self._table_columns(conn, table_name)
required_columns = self.ARTIST_ALBUMS_EXPECTED_COLUMNS.keys()
return set(required_columns).issubset(existing_columns)
except Exception as e:
logger.error(f"Error checking artist watch DB schema: {e}")
return False
def update_watch_artists(self, conn: sqlite3.Connection) -> None:
"""Updates all artist-specific tables with new columns."""
try:
cur = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'artist_%'"
)
artist_tables = cur.fetchall()
for row in artist_tables:
table_name = row[0]
existing_columns = self._table_columns(conn, table_name)
for col_name, col_type in self.ARTIST_ALBUMS_EXPECTED_COLUMNS.items():
if col_name in existing_columns:
continue
try:
# Remove constraints for ADD COLUMN
col_type_for_add = (
col_type.replace("PRIMARY KEY", "")
.replace("AUTOINCREMENT", "")
.replace("NOT NULL", "")
.strip()
)
conn.execute(
f'ALTER TABLE "{table_name}" ADD COLUMN {col_name} {col_type_for_add}'
)
logger.info(
f"Added column '{col_name}' to table '{table_name}' in artists.db."
)
except sqlite3.OperationalError as e:
logger.warning(
f"Could not add column '{col_name}' to table '{table_name}': {e}"
)
except Exception as e:
logger.error(f"Failed to update artist watch DB: {e}", exc_info=True)

View File

@@ -0,0 +1,65 @@
import sqlite3
from pathlib import Path
import pytest
import sqlite3
from pathlib import Path
import pytest
from routes.migrations.v3_1_0 import MigrationV3_1_0
# Override the autouse credentials fixture from conftest for this module
@pytest.fixture(scope="session", autouse=True)
def setup_credentials_for_tests():
# No-op to avoid external API calls
yield
def _create_310_watch_artists_db(db_path: Path) -> None:
db_path.parent.mkdir(parents=True, exist_ok=True)
with sqlite3.connect(str(db_path)) as conn:
conn.executescript(
"""
CREATE TABLE watched_artists (
spotify_id TEXT PRIMARY KEY,
name TEXT
);
CREATE TABLE "artist_a1b2c3" (
album_spotify_id TEXT PRIMARY KEY,
artist_spotify_id TEXT,
name TEXT,
album_type TEXT,
release_date TEXT,
total_tracks INTEGER,
link TEXT,
image_url TEXT,
added_to_db INTEGER,
last_seen_on_spotify INTEGER
);
"""
)
conn.execute("INSERT INTO watched_artists (spotify_id) VALUES (?)", ('a1b2c3',))
def test_watch_artists_migration(tmp_path):
# 1. Setup mock v3.1.0 database
db_path = tmp_path / "artists.db"
_create_310_watch_artists_db(db_path)
# 2. Run the migration
migration = MigrationV3_1_0()
with sqlite3.connect(db_path) as conn:
# Sanity check before migration
cur = conn.execute('PRAGMA table_info("artist_a1b2c3")')
columns_before = {row[1] for row in cur.fetchall()}
assert 'download_status' not in columns_before
# Apply migration
migration.update_watch_artists(conn)
# 3. Assert migration was successful
cur = conn.execute('PRAGMA table_info("artist_a1b2c3")')
columns_after = {row[1] for row in cur.fetchall()}
expected_columns = migration.ARTIST_ALBUMS_EXPECTED_COLUMNS.keys()
assert set(expected_columns).issubset(columns_after)