Add migration scripts for 3.0.6
This commit is contained in:
412
app.py
412
app.py
@@ -14,6 +14,14 @@ import redis
|
|||||||
import socket
|
import socket
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
# Run DB migrations as early as possible, before importing any routers that may touch DBs
|
||||||
|
try:
|
||||||
|
from routes.migrations import run_migrations_if_needed
|
||||||
|
run_migrations_if_needed()
|
||||||
|
logging.getLogger(__name__).info("Database migrations executed (if needed) early in startup.")
|
||||||
|
except Exception as e:
|
||||||
|
logging.getLogger(__name__).error(f"Database migration step failed early in startup: {e}", exc_info=True)
|
||||||
|
|
||||||
# Import route routers (to be created)
|
# Import route routers (to be created)
|
||||||
from routes.auth.credentials import router as credentials_router
|
from routes.auth.credentials import router as credentials_router
|
||||||
from routes.auth.auth import router as auth_router
|
from routes.auth.auth import router as auth_router
|
||||||
@@ -41,246 +49,246 @@ import routes
|
|||||||
|
|
||||||
# Configure application-wide logging
|
# Configure application-wide logging
|
||||||
def setup_logging():
|
def setup_logging():
|
||||||
"""Configure application-wide logging with rotation"""
|
"""Configure application-wide logging with rotation"""
|
||||||
# Create logs directory if it doesn't exist
|
# Create logs directory if it doesn't exist
|
||||||
logs_dir = Path("logs")
|
logs_dir = Path("logs")
|
||||||
logs_dir.mkdir(exist_ok=True)
|
logs_dir.mkdir(exist_ok=True)
|
||||||
|
|
||||||
# Set up log file paths
|
# Set up log file paths
|
||||||
main_log = logs_dir / "spotizerr.log"
|
main_log = logs_dir / "spotizerr.log"
|
||||||
|
|
||||||
# Configure root logger
|
# Configure root logger
|
||||||
root_logger = logging.getLogger()
|
root_logger = logging.getLogger()
|
||||||
root_logger.setLevel(logging.DEBUG)
|
root_logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
# Clear any existing handlers from the root logger
|
# Clear any existing handlers from the root logger
|
||||||
if root_logger.hasHandlers():
|
if root_logger.hasHandlers():
|
||||||
root_logger.handlers.clear()
|
root_logger.handlers.clear()
|
||||||
|
|
||||||
# Log formatting
|
# Log formatting
|
||||||
log_format = logging.Formatter(
|
log_format = logging.Formatter(
|
||||||
"%(asctime)s [%(levelname)s] %(message)s",
|
"%(asctime)s [%(levelname)s] %(message)s",
|
||||||
datefmt="%Y-%m-%d %H:%M:%S",
|
datefmt="%Y-%m-%d %H:%M:%S",
|
||||||
)
|
)
|
||||||
|
|
||||||
# File handler with rotation (10 MB max, keep 5 backups)
|
# File handler with rotation (10 MB max, keep 5 backups)
|
||||||
file_handler = logging.handlers.RotatingFileHandler(
|
file_handler = logging.handlers.RotatingFileHandler(
|
||||||
main_log, maxBytes=10 * 1024 * 1024, backupCount=5, encoding="utf-8"
|
main_log, maxBytes=10 * 1024 * 1024, backupCount=5, encoding="utf-8"
|
||||||
)
|
)
|
||||||
file_handler.setFormatter(log_format)
|
file_handler.setFormatter(log_format)
|
||||||
file_handler.setLevel(logging.INFO)
|
file_handler.setLevel(logging.INFO)
|
||||||
|
|
||||||
# Console handler for stderr
|
# Console handler for stderr
|
||||||
console_handler = logging.StreamHandler(sys.stderr)
|
console_handler = logging.StreamHandler(sys.stderr)
|
||||||
console_handler.setFormatter(log_format)
|
console_handler.setFormatter(log_format)
|
||||||
console_handler.setLevel(logging.INFO)
|
console_handler.setLevel(logging.INFO)
|
||||||
|
|
||||||
# Add handlers to root logger
|
# Add handlers to root logger
|
||||||
root_logger.addHandler(file_handler)
|
root_logger.addHandler(file_handler)
|
||||||
root_logger.addHandler(console_handler)
|
root_logger.addHandler(console_handler)
|
||||||
|
|
||||||
# Set up specific loggers
|
# Set up specific loggers
|
||||||
for logger_name in [
|
for logger_name in [
|
||||||
"routes",
|
"routes",
|
||||||
"routes.utils",
|
"routes.utils",
|
||||||
"routes.utils.celery_manager",
|
"routes.utils.celery_manager",
|
||||||
"routes.utils.celery_tasks",
|
"routes.utils.celery_tasks",
|
||||||
"routes.utils.watch",
|
"routes.utils.watch",
|
||||||
]:
|
]:
|
||||||
logger = logging.getLogger(logger_name)
|
logger = logging.getLogger(logger_name)
|
||||||
logger.setLevel(logging.INFO)
|
logger.setLevel(logging.INFO)
|
||||||
logger.propagate = True # Propagate to root logger
|
logger.propagate = True # Propagate to root logger
|
||||||
|
|
||||||
logging.info("Logging system initialized")
|
logging.info("Logging system initialized")
|
||||||
|
|
||||||
|
|
||||||
def check_redis_connection():
|
def check_redis_connection():
|
||||||
"""Check if Redis is available and accessible"""
|
"""Check if Redis is available and accessible"""
|
||||||
if not REDIS_URL:
|
if not REDIS_URL:
|
||||||
logging.error("REDIS_URL is not configured. Please check your environment.")
|
logging.error("REDIS_URL is not configured. Please check your environment.")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Parse Redis URL
|
# Parse Redis URL
|
||||||
parsed_url = urlparse(REDIS_URL)
|
parsed_url = urlparse(REDIS_URL)
|
||||||
host = parsed_url.hostname or "localhost"
|
host = parsed_url.hostname or "localhost"
|
||||||
port = parsed_url.port or 6379
|
port = parsed_url.port or 6379
|
||||||
|
|
||||||
logging.info(f"Testing Redis connection to {host}:{port}...")
|
logging.info(f"Testing Redis connection to {host}:{port}...")
|
||||||
|
|
||||||
# Test socket connection first
|
# Test socket connection first
|
||||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
sock.settimeout(5)
|
sock.settimeout(5)
|
||||||
result = sock.connect_ex((host, port))
|
result = sock.connect_ex((host, port))
|
||||||
sock.close()
|
sock.close()
|
||||||
|
|
||||||
if result != 0:
|
if result != 0:
|
||||||
logging.error(f"Cannot connect to Redis at {host}:{port}")
|
logging.error(f"Cannot connect to Redis at {host}:{port}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Test Redis client connection
|
# Test Redis client connection
|
||||||
r = redis.from_url(REDIS_URL, socket_connect_timeout=5, socket_timeout=5)
|
r = redis.from_url(REDIS_URL, socket_connect_timeout=5, socket_timeout=5)
|
||||||
r.ping()
|
r.ping()
|
||||||
logging.info("Redis connection successful")
|
logging.info("Redis connection successful")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
except redis.ConnectionError as e:
|
except redis.ConnectionError as e:
|
||||||
logging.error(f"Redis connection error: {e}")
|
logging.error(f"Redis connection error: {e}")
|
||||||
return False
|
return False
|
||||||
except redis.TimeoutError as e:
|
except redis.TimeoutError as e:
|
||||||
logging.error(f"Redis timeout error: {e}")
|
logging.error(f"Redis timeout error: {e}")
|
||||||
return False
|
return False
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Unexpected error checking Redis connection: {e}")
|
logging.error(f"Unexpected error checking Redis connection: {e}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def lifespan(app: FastAPI):
|
async def lifespan(app: FastAPI):
|
||||||
"""Handle application startup and shutdown"""
|
"""Handle application startup and shutdown"""
|
||||||
# Startup
|
# Startup
|
||||||
setup_logging()
|
setup_logging()
|
||||||
|
|
||||||
# Check Redis connection
|
# Check Redis connection
|
||||||
if not check_redis_connection():
|
if not check_redis_connection():
|
||||||
logging.error("Failed to connect to Redis. Please ensure Redis is running and accessible.")
|
logging.error("Failed to connect to Redis. Please ensure Redis is running and accessible.")
|
||||||
# Don't exit, but warn - some functionality may not work
|
# Don't exit, but warn - some functionality may not work
|
||||||
|
|
||||||
# Start Celery workers
|
# Start Celery workers
|
||||||
try:
|
try:
|
||||||
celery_manager.start()
|
celery_manager.start()
|
||||||
logging.info("Celery workers started successfully")
|
logging.info("Celery workers started successfully")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Failed to start Celery workers: {e}")
|
logging.error(f"Failed to start Celery workers: {e}")
|
||||||
|
|
||||||
yield
|
yield
|
||||||
|
|
||||||
# Shutdown
|
# Shutdown
|
||||||
try:
|
try:
|
||||||
celery_manager.stop()
|
celery_manager.stop()
|
||||||
logging.info("Celery workers stopped")
|
logging.info("Celery workers stopped")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error stopping Celery workers: {e}")
|
logging.error(f"Error stopping Celery workers: {e}")
|
||||||
|
|
||||||
|
|
||||||
def create_app():
|
def create_app():
|
||||||
app = FastAPI(
|
app = FastAPI(
|
||||||
title="Spotizerr API",
|
title="Spotizerr API",
|
||||||
description="Music download service API",
|
description="Music download service API",
|
||||||
version="3.0.0",
|
version="3.0.0",
|
||||||
lifespan=lifespan,
|
lifespan=lifespan,
|
||||||
redirect_slashes=True # Enable automatic trailing slash redirects
|
redirect_slashes=True # Enable automatic trailing slash redirects
|
||||||
)
|
)
|
||||||
|
|
||||||
# Set up CORS
|
# Set up CORS
|
||||||
app.add_middleware(
|
app.add_middleware(
|
||||||
CORSMiddleware,
|
CORSMiddleware,
|
||||||
allow_origins=["*"],
|
allow_origins=["*"],
|
||||||
allow_credentials=True,
|
allow_credentials=True,
|
||||||
allow_methods=["*"],
|
allow_methods=["*"],
|
||||||
allow_headers=["*"],
|
allow_headers=["*"],
|
||||||
)
|
)
|
||||||
|
|
||||||
# Add authentication middleware (only if auth is enabled)
|
# Add authentication middleware (only if auth is enabled)
|
||||||
if AUTH_ENABLED:
|
if AUTH_ENABLED:
|
||||||
app.add_middleware(AuthMiddleware)
|
app.add_middleware(AuthMiddleware)
|
||||||
logging.info("Authentication system enabled")
|
logging.info("Authentication system enabled")
|
||||||
else:
|
else:
|
||||||
logging.info("Authentication system disabled")
|
logging.info("Authentication system disabled")
|
||||||
|
|
||||||
# Register routers with URL prefixes
|
# Register routers with URL prefixes
|
||||||
app.include_router(auth_router, prefix="/api/auth", tags=["auth"])
|
app.include_router(auth_router, prefix="/api/auth", tags=["auth"])
|
||||||
|
|
||||||
# Include SSO router if available
|
# Include SSO router if available
|
||||||
try:
|
try:
|
||||||
from routes.auth.sso import router as sso_router
|
from routes.auth.sso import router as sso_router
|
||||||
app.include_router(sso_router, prefix="/api/auth", tags=["sso"])
|
app.include_router(sso_router, prefix="/api/auth", tags=["sso"])
|
||||||
logging.info("SSO functionality enabled")
|
logging.info("SSO functionality enabled")
|
||||||
except ImportError as e:
|
except ImportError as e:
|
||||||
logging.warning(f"SSO functionality not available: {e}")
|
logging.warning(f"SSO functionality not available: {e}")
|
||||||
app.include_router(config_router, prefix="/api/config", tags=["config"])
|
app.include_router(config_router, prefix="/api/config", tags=["config"])
|
||||||
app.include_router(search_router, prefix="/api/search", tags=["search"])
|
app.include_router(search_router, prefix="/api/search", tags=["search"])
|
||||||
app.include_router(credentials_router, prefix="/api/credentials", tags=["credentials"])
|
app.include_router(credentials_router, prefix="/api/credentials", tags=["credentials"])
|
||||||
app.include_router(album_router, prefix="/api/album", tags=["album"])
|
app.include_router(album_router, prefix="/api/album", tags=["album"])
|
||||||
app.include_router(track_router, prefix="/api/track", tags=["track"])
|
app.include_router(track_router, prefix="/api/track", tags=["track"])
|
||||||
app.include_router(playlist_router, prefix="/api/playlist", tags=["playlist"])
|
app.include_router(playlist_router, prefix="/api/playlist", tags=["playlist"])
|
||||||
app.include_router(artist_router, prefix="/api/artist", tags=["artist"])
|
app.include_router(artist_router, prefix="/api/artist", tags=["artist"])
|
||||||
app.include_router(prgs_router, prefix="/api/prgs", tags=["progress"])
|
app.include_router(prgs_router, prefix="/api/prgs", tags=["progress"])
|
||||||
app.include_router(history_router, prefix="/api/history", tags=["history"])
|
app.include_router(history_router, prefix="/api/history", tags=["history"])
|
||||||
|
|
||||||
# Add request logging middleware
|
# Add request logging middleware
|
||||||
@app.middleware("http")
|
@app.middleware("http")
|
||||||
async def log_requests(request: Request, call_next):
|
async def log_requests(request: Request, call_next):
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
|
|
||||||
# Log request
|
# Log request
|
||||||
logger = logging.getLogger("uvicorn.access")
|
logger = logging.getLogger("uvicorn.access")
|
||||||
logger.debug(f"Request: {request.method} {request.url.path}")
|
logger.debug(f"Request: {request.method} {request.url.path}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = await call_next(request)
|
response = await call_next(request)
|
||||||
|
|
||||||
# Log response
|
# Log response
|
||||||
duration = round((time.time() - start_time) * 1000, 2)
|
duration = round((time.time() - start_time) * 1000, 2)
|
||||||
logger.debug(f"Response: {response.status_code} | Duration: {duration}ms")
|
logger.debug(f"Response: {response.status_code} | Duration: {duration}ms")
|
||||||
|
|
||||||
return response
|
return response
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# Log errors
|
# Log errors
|
||||||
logger.error(f"Server error: {str(e)}", exc_info=True)
|
logger.error(f"Server error: {str(e)}", exc_info=True)
|
||||||
raise HTTPException(status_code=500, detail="Internal Server Error")
|
raise HTTPException(status_code=500, detail="Internal Server Error")
|
||||||
|
|
||||||
# Mount static files for React app
|
# Mount static files for React app
|
||||||
if os.path.exists("spotizerr-ui/dist"):
|
if os.path.exists("spotizerr-ui/dist"):
|
||||||
app.mount("/static", StaticFiles(directory="spotizerr-ui/dist"), name="static")
|
app.mount("/static", StaticFiles(directory="spotizerr-ui/dist"), name="static")
|
||||||
|
|
||||||
# Serve React App - catch-all route for SPA (but not for API routes)
|
# Serve React App - catch-all route for SPA (but not for API routes)
|
||||||
@app.get("/{full_path:path}")
|
@app.get("/{full_path:path}")
|
||||||
async def serve_react_app(full_path: str):
|
async def serve_react_app(full_path: str):
|
||||||
"""Serve React app with fallback to index.html for SPA routing"""
|
"""Serve React app with fallback to index.html for SPA routing"""
|
||||||
static_dir = "spotizerr-ui/dist"
|
static_dir = "spotizerr-ui/dist"
|
||||||
|
|
||||||
# Don't serve React app for API routes (more specific check)
|
# Don't serve React app for API routes (more specific check)
|
||||||
if full_path.startswith("api") or full_path.startswith("api/"):
|
if full_path.startswith("api") or full_path.startswith("api/"):
|
||||||
raise HTTPException(status_code=404, detail="API endpoint not found")
|
raise HTTPException(status_code=404, detail="API endpoint not found")
|
||||||
|
|
||||||
# If it's a file that exists, serve it
|
# If it's a file that exists, serve it
|
||||||
if full_path and os.path.exists(os.path.join(static_dir, full_path)):
|
if full_path and os.path.exists(os.path.join(static_dir, full_path)):
|
||||||
return FileResponse(os.path.join(static_dir, full_path))
|
return FileResponse(os.path.join(static_dir, full_path))
|
||||||
else:
|
else:
|
||||||
# Fallback to index.html for SPA routing
|
# Fallback to index.html for SPA routing
|
||||||
return FileResponse(os.path.join(static_dir, "index.html"))
|
return FileResponse(os.path.join(static_dir, "index.html"))
|
||||||
else:
|
else:
|
||||||
logging.warning("React app build directory not found at spotizerr-ui/dist")
|
logging.warning("React app build directory not found at spotizerr-ui/dist")
|
||||||
|
|
||||||
return app
|
return app
|
||||||
|
|
||||||
|
|
||||||
def start_celery_workers():
|
def start_celery_workers():
|
||||||
"""Start Celery workers with dynamic configuration"""
|
"""Start Celery workers with dynamic configuration"""
|
||||||
# This function is now handled by the lifespan context manager
|
# This function is now handled by the lifespan context manager
|
||||||
# and the celery_manager.start() call
|
# and the celery_manager.start() call
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
import uvicorn
|
import uvicorn
|
||||||
|
|
||||||
app = create_app()
|
app = create_app()
|
||||||
|
|
||||||
# Use HOST environment variable if present, otherwise fall back to IPv4 wildcard
|
# Use HOST environment variable if present, otherwise fall back to IPv4 wildcard
|
||||||
host = os.getenv("HOST", "0.0.0.0")
|
host = os.getenv("HOST", "0.0.0.0")
|
||||||
|
|
||||||
# Allow overriding port via PORT env var, with default 7171
|
# Allow overriding port via PORT env var, with default 7171
|
||||||
try:
|
try:
|
||||||
port = int(os.getenv("PORT", "7171"))
|
port = int(os.getenv("PORT", "7171"))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
port = 7171
|
port = 7171
|
||||||
|
|
||||||
uvicorn.run(
|
uvicorn.run(
|
||||||
app,
|
app,
|
||||||
host=host,
|
host=host,
|
||||||
port=port,
|
port=port,
|
||||||
log_level="info",
|
log_level="info",
|
||||||
access_log=True
|
access_log=True
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
name: spotizerr
|
name: spotizerr
|
||||||
services:
|
services:
|
||||||
spotizerr:
|
spotizerr:
|
||||||
image: cooldockerizer93/spotizerr
|
image: cooldockerizer93/spotizerr:3.0.6
|
||||||
volumes:
|
volumes:
|
||||||
- ./data:/app/data
|
- ./data:/app/data
|
||||||
- ./downloads:/app/downloads
|
- ./downloads:/app/downloads
|
||||||
|
|||||||
@@ -9,6 +9,14 @@ logging.basicConfig(
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Run DB migrations early so other modules see expected schemas
|
||||||
|
try:
|
||||||
|
from routes.migrations import run_migrations_if_needed
|
||||||
|
run_migrations_if_needed()
|
||||||
|
logger.info("Database migrations executed (if needed).")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Database migration step failed: {e}", exc_info=True)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from routes.utils.watch.manager import start_watch_manager, stop_watch_manager
|
from routes.utils.watch.manager import start_watch_manager, stop_watch_manager
|
||||||
|
|
||||||
|
|||||||
76
routes/migrations/3.0.6.py
Normal file
76
routes/migrations/3.0.6.py
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
import sqlite3
|
||||||
|
|
||||||
|
HISTORY_SQL = """
|
||||||
|
CREATE TABLE IF NOT EXISTS download_history (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
download_type TEXT NOT NULL,
|
||||||
|
title TEXT NOT NULL,
|
||||||
|
artists TEXT,
|
||||||
|
timestamp REAL NOT NULL,
|
||||||
|
status TEXT NOT NULL,
|
||||||
|
service TEXT,
|
||||||
|
quality_format TEXT,
|
||||||
|
quality_bitrate TEXT,
|
||||||
|
total_tracks INTEGER,
|
||||||
|
successful_tracks INTEGER,
|
||||||
|
failed_tracks INTEGER,
|
||||||
|
skipped_tracks INTEGER,
|
||||||
|
children_table TEXT,
|
||||||
|
task_id TEXT,
|
||||||
|
external_ids TEXT,
|
||||||
|
metadata TEXT,
|
||||||
|
release_date TEXT,
|
||||||
|
genres TEXT,
|
||||||
|
images TEXT,
|
||||||
|
owner TEXT,
|
||||||
|
album_type TEXT,
|
||||||
|
duration_total_ms INTEGER,
|
||||||
|
explicit BOOLEAN
|
||||||
|
);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_download_history_timestamp ON download_history(timestamp);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_download_history_type_status ON download_history(download_type, status);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_download_history_task_id ON download_history(task_id);
|
||||||
|
CREATE UNIQUE INDEX IF NOT EXISTS uq_download_history_task_type_ids ON download_history(task_id, download_type, external_ids);
|
||||||
|
"""
|
||||||
|
|
||||||
|
WATCH_PLAYLISTS_SQL = """
|
||||||
|
CREATE TABLE IF NOT EXISTS watched_playlists (
|
||||||
|
spotify_id TEXT PRIMARY KEY,
|
||||||
|
name TEXT,
|
||||||
|
owner_id TEXT,
|
||||||
|
owner_name TEXT,
|
||||||
|
total_tracks INTEGER,
|
||||||
|
link TEXT,
|
||||||
|
snapshot_id TEXT,
|
||||||
|
last_checked INTEGER,
|
||||||
|
added_at INTEGER,
|
||||||
|
is_active INTEGER DEFAULT 1
|
||||||
|
);
|
||||||
|
"""
|
||||||
|
|
||||||
|
WATCH_ARTISTS_SQL = """
|
||||||
|
CREATE TABLE IF NOT EXISTS watched_artists (
|
||||||
|
spotify_id TEXT PRIMARY KEY,
|
||||||
|
name TEXT,
|
||||||
|
link TEXT,
|
||||||
|
total_albums_on_spotify INTEGER,
|
||||||
|
last_checked INTEGER,
|
||||||
|
added_at INTEGER,
|
||||||
|
is_active INTEGER DEFAULT 1,
|
||||||
|
genres TEXT,
|
||||||
|
popularity INTEGER,
|
||||||
|
image_url TEXT
|
||||||
|
);
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def apply_history(conn: sqlite3.Connection) -> None:
|
||||||
|
conn.executescript(HISTORY_SQL)
|
||||||
|
|
||||||
|
|
||||||
|
def apply_watch_playlists(conn: sqlite3.Connection) -> None:
|
||||||
|
conn.executescript(WATCH_PLAYLISTS_SQL)
|
||||||
|
|
||||||
|
|
||||||
|
def apply_watch_artists(conn: sqlite3.Connection) -> None:
|
||||||
|
conn.executescript(WATCH_ARTISTS_SQL)
|
||||||
2
routes/migrations/__init__.py
Normal file
2
routes/migrations/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
# Expose the migration runner entrypoint
|
||||||
|
from .runner import run_migrations_if_needed # noqa: F401
|
||||||
350
routes/migrations/runner.py
Normal file
350
routes/migrations/runner.py
Normal file
@@ -0,0 +1,350 @@
|
|||||||
|
import logging
|
||||||
|
import sqlite3
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
DATA_DIR = Path("./data")
|
||||||
|
HISTORY_DB = DATA_DIR / "history" / "download_history.db"
|
||||||
|
WATCH_DIR = DATA_DIR / "watch"
|
||||||
|
PLAYLISTS_DB = WATCH_DIR / "playlists.db"
|
||||||
|
ARTISTS_DB = WATCH_DIR / "artists.db"
|
||||||
|
|
||||||
|
# Expected children table columns for history (album_/playlist_)
|
||||||
|
CHILDREN_EXPECTED_COLUMNS: dict[str, str] = {
|
||||||
|
"id": "INTEGER PRIMARY KEY AUTOINCREMENT",
|
||||||
|
"title": "TEXT NOT NULL",
|
||||||
|
"artists": "TEXT",
|
||||||
|
"album_title": "TEXT",
|
||||||
|
"duration_ms": "INTEGER",
|
||||||
|
"track_number": "INTEGER",
|
||||||
|
"disc_number": "INTEGER",
|
||||||
|
"explicit": "BOOLEAN",
|
||||||
|
"status": "TEXT NOT NULL",
|
||||||
|
"external_ids": "TEXT",
|
||||||
|
"genres": "TEXT",
|
||||||
|
"isrc": "TEXT",
|
||||||
|
"timestamp": "REAL NOT NULL",
|
||||||
|
"position": "INTEGER",
|
||||||
|
"metadata": "TEXT",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _safe_connect(path: Path) -> Optional[sqlite3.Connection]:
|
||||||
|
try:
|
||||||
|
path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
conn = sqlite3.connect(str(path))
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
return conn
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to open SQLite DB {path}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _table_columns(conn: sqlite3.Connection, table: str) -> set[str]:
|
||||||
|
try:
|
||||||
|
cur = conn.execute(f"PRAGMA table_info({table})")
|
||||||
|
return {row[1] for row in cur.fetchall()}
|
||||||
|
except Exception:
|
||||||
|
return set()
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_table_schema(
|
||||||
|
conn: sqlite3.Connection,
|
||||||
|
table_name: str,
|
||||||
|
expected_columns: dict[str, str],
|
||||||
|
table_description: str,
|
||||||
|
) -> None:
|
||||||
|
"""Ensure the given table has all expected columns, adding any missing columns safely."""
|
||||||
|
try:
|
||||||
|
cur = conn.execute(f"PRAGMA table_info({table_name})")
|
||||||
|
existing_info = cur.fetchall()
|
||||||
|
existing_names = {row[1] for row in existing_info}
|
||||||
|
for col_name, col_type in expected_columns.items():
|
||||||
|
if col_name in existing_names:
|
||||||
|
continue
|
||||||
|
# Strip PK/NOT NULL when altering existing table to avoid errors
|
||||||
|
col_type_for_add = (
|
||||||
|
col_type.replace("PRIMARY KEY", "").replace("AUTOINCREMENT", "").replace("NOT NULL", "").strip()
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
conn.execute(
|
||||||
|
f"ALTER TABLE {table_name} ADD COLUMN {col_name} {col_type_for_add}"
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
f"Added missing column '{col_name} {col_type_for_add}' to {table_description} table '{table_name}'."
|
||||||
|
)
|
||||||
|
except sqlite3.OperationalError as e:
|
||||||
|
logger.warning(
|
||||||
|
f"Could not add column '{col_name}' to {table_description} table '{table_name}': {e}"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Error ensuring schema for {table_description} table '{table_name}': {e}",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _create_or_update_children_table(conn: sqlite3.Connection, table_name: str) -> None:
|
||||||
|
"""Create children table if missing and ensure it has all expected columns."""
|
||||||
|
conn.execute(
|
||||||
|
f"""
|
||||||
|
CREATE TABLE IF NOT EXISTS {table_name} (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
title TEXT NOT NULL,
|
||||||
|
artists TEXT,
|
||||||
|
album_title TEXT,
|
||||||
|
duration_ms INTEGER,
|
||||||
|
track_number INTEGER,
|
||||||
|
disc_number INTEGER,
|
||||||
|
explicit BOOLEAN,
|
||||||
|
status TEXT NOT NULL,
|
||||||
|
external_ids TEXT,
|
||||||
|
genres TEXT,
|
||||||
|
isrc TEXT,
|
||||||
|
timestamp REAL NOT NULL,
|
||||||
|
position INTEGER,
|
||||||
|
metadata TEXT
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
_ensure_table_schema(conn, table_name, CHILDREN_EXPECTED_COLUMNS, "children history")
|
||||||
|
|
||||||
|
|
||||||
|
def _update_children_tables_for_history(conn: sqlite3.Connection) -> None:
|
||||||
|
"""Ensure all existing children tables and referenced children tables conform to expected schema."""
|
||||||
|
try:
|
||||||
|
# Create or update any tables referenced by download_history.children_table
|
||||||
|
try:
|
||||||
|
cur = conn.execute(
|
||||||
|
"SELECT DISTINCT children_table FROM download_history WHERE children_table IS NOT NULL AND TRIM(children_table) != ''"
|
||||||
|
)
|
||||||
|
for row in cur.fetchall():
|
||||||
|
table_name = row[0]
|
||||||
|
if not table_name:
|
||||||
|
continue
|
||||||
|
_create_or_update_children_table(conn, table_name)
|
||||||
|
except sqlite3.Error as e:
|
||||||
|
logger.warning(f"Failed to scan referenced children tables from main history: {e}")
|
||||||
|
|
||||||
|
# Find any legacy children tables by name pattern album_% or playlist_%
|
||||||
|
try:
|
||||||
|
cur = conn.execute(
|
||||||
|
"SELECT name FROM sqlite_master WHERE type='table' AND (name LIKE 'album_%' OR name LIKE 'playlist_%') AND name != 'download_history'"
|
||||||
|
)
|
||||||
|
for row in cur.fetchall():
|
||||||
|
table_name = row[0]
|
||||||
|
_create_or_update_children_table(conn, table_name)
|
||||||
|
except sqlite3.Error as e:
|
||||||
|
logger.warning(f"Failed to scan legacy children tables in history DB: {e}")
|
||||||
|
logger.info("Children history tables migration ensured")
|
||||||
|
except Exception:
|
||||||
|
logger.error("Failed migrating children history tables", exc_info=True)
|
||||||
|
|
||||||
|
|
||||||
|
def _history_needs_306(conn: sqlite3.Connection) -> bool:
|
||||||
|
"""Detect if history DB needs 3.0.6 schema (missing columns or tables)."""
|
||||||
|
# If table missing entirely, we definitely need to create it
|
||||||
|
cur = conn.execute(
|
||||||
|
"SELECT name FROM sqlite_master WHERE type='table' AND name='download_history'"
|
||||||
|
)
|
||||||
|
row = cur.fetchone()
|
||||||
|
if not row:
|
||||||
|
return True
|
||||||
|
cols = _table_columns(conn, "download_history")
|
||||||
|
required = {
|
||||||
|
"id",
|
||||||
|
"download_type",
|
||||||
|
"title",
|
||||||
|
"artists",
|
||||||
|
"timestamp",
|
||||||
|
"status",
|
||||||
|
"service",
|
||||||
|
"quality_format",
|
||||||
|
"quality_bitrate",
|
||||||
|
"total_tracks",
|
||||||
|
"successful_tracks",
|
||||||
|
"failed_tracks",
|
||||||
|
"skipped_tracks",
|
||||||
|
"children_table",
|
||||||
|
"task_id",
|
||||||
|
"external_ids",
|
||||||
|
"metadata",
|
||||||
|
"release_date",
|
||||||
|
"genres",
|
||||||
|
"images",
|
||||||
|
"owner",
|
||||||
|
"album_type",
|
||||||
|
"duration_total_ms",
|
||||||
|
"explicit",
|
||||||
|
}
|
||||||
|
return not required.issubset(cols)
|
||||||
|
|
||||||
|
|
||||||
|
def _watch_playlists_needs_306(conn: sqlite3.Connection) -> bool:
|
||||||
|
cur = conn.execute(
|
||||||
|
"SELECT name FROM sqlite_master WHERE type='table' AND name='watched_playlists'"
|
||||||
|
)
|
||||||
|
row = cur.fetchone()
|
||||||
|
if not row:
|
||||||
|
return True
|
||||||
|
cols = _table_columns(conn, "watched_playlists")
|
||||||
|
required = {
|
||||||
|
"spotify_id",
|
||||||
|
"name",
|
||||||
|
"owner_id",
|
||||||
|
"owner_name",
|
||||||
|
"total_tracks",
|
||||||
|
"link",
|
||||||
|
"snapshot_id",
|
||||||
|
"last_checked",
|
||||||
|
"added_at",
|
||||||
|
"is_active",
|
||||||
|
}
|
||||||
|
return not required.issubset(cols)
|
||||||
|
|
||||||
|
|
||||||
|
def _watch_artists_needs_306(conn: sqlite3.Connection) -> bool:
|
||||||
|
cur = conn.execute(
|
||||||
|
"SELECT name FROM sqlite_master WHERE type='table' AND name='watched_artists'"
|
||||||
|
)
|
||||||
|
row = cur.fetchone()
|
||||||
|
if not row:
|
||||||
|
return True
|
||||||
|
cols = _table_columns(conn, "watched_artists")
|
||||||
|
required = {
|
||||||
|
"spotify_id",
|
||||||
|
"name",
|
||||||
|
"link",
|
||||||
|
"total_albums_on_spotify",
|
||||||
|
"last_checked",
|
||||||
|
"added_at",
|
||||||
|
"is_active",
|
||||||
|
"genres",
|
||||||
|
"popularity",
|
||||||
|
"image_url",
|
||||||
|
}
|
||||||
|
return not required.issubset(cols)
|
||||||
|
|
||||||
|
|
||||||
|
def _apply_history_306(conn: sqlite3.Connection) -> None:
|
||||||
|
logger.info("Applying 3.0.6 migration for history DB")
|
||||||
|
conn.executescript(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS download_history (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
download_type TEXT NOT NULL,
|
||||||
|
title TEXT NOT NULL,
|
||||||
|
artists TEXT,
|
||||||
|
timestamp REAL NOT NULL,
|
||||||
|
status TEXT NOT NULL,
|
||||||
|
service TEXT,
|
||||||
|
quality_format TEXT,
|
||||||
|
quality_bitrate TEXT,
|
||||||
|
total_tracks INTEGER,
|
||||||
|
successful_tracks INTEGER,
|
||||||
|
failed_tracks INTEGER,
|
||||||
|
skipped_tracks INTEGER,
|
||||||
|
children_table TEXT,
|
||||||
|
task_id TEXT,
|
||||||
|
external_ids TEXT,
|
||||||
|
metadata TEXT,
|
||||||
|
release_date TEXT,
|
||||||
|
genres TEXT,
|
||||||
|
images TEXT,
|
||||||
|
owner TEXT,
|
||||||
|
album_type TEXT,
|
||||||
|
duration_total_ms INTEGER,
|
||||||
|
explicit BOOLEAN
|
||||||
|
);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_download_history_timestamp ON download_history(timestamp);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_download_history_type_status ON download_history(download_type, status);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_download_history_task_id ON download_history(task_id);
|
||||||
|
CREATE UNIQUE INDEX IF NOT EXISTS uq_download_history_task_type_ids ON download_history(task_id, download_type, external_ids);
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
# After ensuring main table, also ensure children tables
|
||||||
|
_update_children_tables_for_history(conn)
|
||||||
|
|
||||||
|
|
||||||
|
def _apply_watch_playlists_306(conn: sqlite3.Connection) -> None:
|
||||||
|
logger.info("Applying 3.0.6 migration for watch playlists DB")
|
||||||
|
conn.executescript(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS watched_playlists (
|
||||||
|
spotify_id TEXT PRIMARY KEY,
|
||||||
|
name TEXT,
|
||||||
|
owner_id TEXT,
|
||||||
|
owner_name TEXT,
|
||||||
|
total_tracks INTEGER,
|
||||||
|
link TEXT,
|
||||||
|
snapshot_id TEXT,
|
||||||
|
last_checked INTEGER,
|
||||||
|
added_at INTEGER,
|
||||||
|
is_active INTEGER DEFAULT 1
|
||||||
|
);
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _apply_watch_artists_306(conn: sqlite3.Connection) -> None:
|
||||||
|
logger.info("Applying 3.0.6 migration for watch artists DB")
|
||||||
|
conn.executescript(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS watched_artists (
|
||||||
|
spotify_id TEXT PRIMARY KEY,
|
||||||
|
name TEXT,
|
||||||
|
link TEXT,
|
||||||
|
total_albums_on_spotify INTEGER,
|
||||||
|
last_checked INTEGER,
|
||||||
|
added_at INTEGER,
|
||||||
|
is_active INTEGER DEFAULT 1,
|
||||||
|
genres TEXT,
|
||||||
|
popularity INTEGER,
|
||||||
|
image_url TEXT
|
||||||
|
);
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_if_needed() -> None:
|
||||||
|
"""Detect and apply necessary migrations to align DB schema for v3.1.x.
|
||||||
|
Currently implements 3.0.6 baseline creation for history and watch DBs.
|
||||||
|
Idempotent by design.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# History DB
|
||||||
|
h_conn = _safe_connect(HISTORY_DB)
|
||||||
|
if h_conn:
|
||||||
|
try:
|
||||||
|
if _history_needs_306(h_conn):
|
||||||
|
_apply_history_306(h_conn)
|
||||||
|
else:
|
||||||
|
# Even if main table is OK, ensure children tables are up-to-date
|
||||||
|
_update_children_tables_for_history(h_conn)
|
||||||
|
h_conn.commit()
|
||||||
|
finally:
|
||||||
|
h_conn.close()
|
||||||
|
|
||||||
|
# Watch DBs
|
||||||
|
p_conn = _safe_connect(PLAYLISTS_DB)
|
||||||
|
if p_conn:
|
||||||
|
try:
|
||||||
|
if _watch_playlists_needs_306(p_conn):
|
||||||
|
_apply_watch_playlists_306(p_conn)
|
||||||
|
p_conn.commit()
|
||||||
|
finally:
|
||||||
|
p_conn.close()
|
||||||
|
|
||||||
|
a_conn = _safe_connect(ARTISTS_DB)
|
||||||
|
if a_conn:
|
||||||
|
try:
|
||||||
|
if _watch_artists_needs_306(a_conn):
|
||||||
|
_apply_watch_artists_306(a_conn)
|
||||||
|
a_conn.commit()
|
||||||
|
finally:
|
||||||
|
a_conn.close()
|
||||||
|
logger.info("Database migrations check completed")
|
||||||
|
except Exception:
|
||||||
|
logger.error("Database migration failed", exc_info=True)
|
||||||
1
tests/migration/__init__.py
Normal file
1
tests/migration/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
|
||||||
215
tests/migration/test_v3_0_6.py
Normal file
215
tests/migration/test_v3_0_6.py
Normal file
@@ -0,0 +1,215 @@
|
|||||||
|
import sqlite3
|
||||||
|
from pathlib import Path
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
# Override the autouse credentials fixture from conftest for this module
|
||||||
|
@pytest.fixture(scope="session", autouse=True)
|
||||||
|
def setup_credentials_for_tests():
|
||||||
|
# No-op to avoid external API calls; this shadows the session autouse fixture in conftest.py
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
def _create_306_history_db(db_path: Path) -> None:
|
||||||
|
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
with sqlite3.connect(str(db_path)) as conn:
|
||||||
|
conn.executescript(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS download_history (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
download_type TEXT NOT NULL,
|
||||||
|
title TEXT NOT NULL,
|
||||||
|
artists TEXT,
|
||||||
|
timestamp REAL NOT NULL,
|
||||||
|
status TEXT NOT NULL,
|
||||||
|
service TEXT,
|
||||||
|
quality_format TEXT,
|
||||||
|
quality_bitrate TEXT,
|
||||||
|
total_tracks INTEGER,
|
||||||
|
successful_tracks INTEGER,
|
||||||
|
failed_tracks INTEGER,
|
||||||
|
skipped_tracks INTEGER,
|
||||||
|
children_table TEXT,
|
||||||
|
task_id TEXT,
|
||||||
|
external_ids TEXT,
|
||||||
|
metadata TEXT,
|
||||||
|
release_date TEXT,
|
||||||
|
genres TEXT,
|
||||||
|
images TEXT,
|
||||||
|
owner TEXT,
|
||||||
|
album_type TEXT,
|
||||||
|
duration_total_ms INTEGER,
|
||||||
|
explicit BOOLEAN
|
||||||
|
);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_download_history_timestamp ON download_history(timestamp);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_download_history_type_status ON download_history(download_type, status);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_download_history_task_id ON download_history(task_id);
|
||||||
|
CREATE UNIQUE INDEX IF NOT EXISTS uq_download_history_task_type_ids ON download_history(task_id, download_type, external_ids);
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
# Insert rows that reference non-existent children tables
|
||||||
|
conn.execute(
|
||||||
|
"""
|
||||||
|
INSERT INTO download_history (
|
||||||
|
download_type, title, artists, timestamp, status, service,
|
||||||
|
quality_format, quality_bitrate, total_tracks, successful_tracks,
|
||||||
|
failed_tracks, skipped_tracks, children_table, task_id,
|
||||||
|
external_ids, metadata, release_date, genres, images, owner,
|
||||||
|
album_type, duration_total_ms, explicit
|
||||||
|
) VALUES (?, ?, ?, strftime('%s','now'), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
""",
|
||||||
|
(
|
||||||
|
"album",
|
||||||
|
"Test Album",
|
||||||
|
"[]",
|
||||||
|
"completed",
|
||||||
|
"spotify",
|
||||||
|
"FLAC",
|
||||||
|
"1411kbps",
|
||||||
|
10,
|
||||||
|
8,
|
||||||
|
1,
|
||||||
|
1,
|
||||||
|
"album_test1",
|
||||||
|
"task-album-1",
|
||||||
|
"{}",
|
||||||
|
"{}",
|
||||||
|
"{}",
|
||||||
|
"[]",
|
||||||
|
"[]",
|
||||||
|
"{}",
|
||||||
|
"album",
|
||||||
|
123456,
|
||||||
|
0,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
conn.execute(
|
||||||
|
"""
|
||||||
|
INSERT INTO download_history (
|
||||||
|
download_type, title, artists, timestamp, status, service,
|
||||||
|
quality_format, quality_bitrate, total_tracks, successful_tracks,
|
||||||
|
failed_tracks, skipped_tracks, children_table, task_id,
|
||||||
|
external_ids, metadata, release_date, genres, images, owner,
|
||||||
|
album_type, duration_total_ms, explicit
|
||||||
|
) VALUES (?, ?, ?, strftime('%s','now'), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
""",
|
||||||
|
(
|
||||||
|
"playlist",
|
||||||
|
"Test Playlist",
|
||||||
|
"[]",
|
||||||
|
"partial",
|
||||||
|
"spotify",
|
||||||
|
"MP3",
|
||||||
|
"320kbps",
|
||||||
|
20,
|
||||||
|
15,
|
||||||
|
3,
|
||||||
|
2,
|
||||||
|
"playlist_test2",
|
||||||
|
"task-playlist-1",
|
||||||
|
"{}",
|
||||||
|
"{}",
|
||||||
|
"{}",
|
||||||
|
"[]",
|
||||||
|
"[]",
|
||||||
|
"{}",
|
||||||
|
"",
|
||||||
|
654321,
|
||||||
|
0,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
# Create a legacy children table with too-few columns to test schema upgrade
|
||||||
|
conn.execute(
|
||||||
|
"CREATE TABLE IF NOT EXISTS album_legacy (id INTEGER PRIMARY KEY AUTOINCREMENT, title TEXT NOT NULL)"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _create_306_watch_dbs(playlists_db: Path, artists_db: Path) -> None:
|
||||||
|
playlists_db.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
with sqlite3.connect(str(playlists_db)) as pconn:
|
||||||
|
pconn.executescript(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS watched_playlists (
|
||||||
|
spotify_id TEXT PRIMARY KEY,
|
||||||
|
name TEXT,
|
||||||
|
owner_id TEXT,
|
||||||
|
owner_name TEXT,
|
||||||
|
total_tracks INTEGER,
|
||||||
|
link TEXT,
|
||||||
|
snapshot_id TEXT,
|
||||||
|
last_checked INTEGER,
|
||||||
|
added_at INTEGER,
|
||||||
|
is_active INTEGER DEFAULT 1
|
||||||
|
);
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
with sqlite3.connect(str(artists_db)) as aconn:
|
||||||
|
aconn.executescript(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS watched_artists (
|
||||||
|
spotify_id TEXT PRIMARY KEY,
|
||||||
|
name TEXT,
|
||||||
|
link TEXT,
|
||||||
|
total_albums_on_spotify INTEGER,
|
||||||
|
last_checked INTEGER,
|
||||||
|
added_at INTEGER,
|
||||||
|
is_active INTEGER DEFAULT 1,
|
||||||
|
genres TEXT,
|
||||||
|
popularity INTEGER,
|
||||||
|
image_url TEXT
|
||||||
|
);
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_columns(db_path: Path, table: str) -> set[str]:
|
||||||
|
with sqlite3.connect(str(db_path)) as conn:
|
||||||
|
cur = conn.execute(f"PRAGMA table_info({table})")
|
||||||
|
return {row[1] for row in cur.fetchall()}
|
||||||
|
|
||||||
|
|
||||||
|
def test_migration_children_tables_created_and_upgraded(tmp_path: Path, monkeypatch: pytest.MonkeyPatch):
|
||||||
|
# Arrange temp paths
|
||||||
|
data_dir = tmp_path / "data"
|
||||||
|
history_db = data_dir / "history" / "download_history.db"
|
||||||
|
playlists_db = data_dir / "watch" / "playlists.db"
|
||||||
|
artists_db = data_dir / "watch" / "artists.db"
|
||||||
|
|
||||||
|
# Create 3.0.6 base schemas and sample data
|
||||||
|
_create_306_history_db(history_db)
|
||||||
|
_create_306_watch_dbs(playlists_db, artists_db)
|
||||||
|
|
||||||
|
# Point the migration runner to our temp DBs
|
||||||
|
from routes.migrations import runner
|
||||||
|
monkeypatch.setattr(runner, "DATA_DIR", data_dir)
|
||||||
|
monkeypatch.setattr(runner, "HISTORY_DB", history_db)
|
||||||
|
monkeypatch.setattr(runner, "WATCH_DIR", data_dir / "watch")
|
||||||
|
monkeypatch.setattr(runner, "PLAYLISTS_DB", playlists_db)
|
||||||
|
monkeypatch.setattr(runner, "ARTISTS_DB", artists_db)
|
||||||
|
|
||||||
|
# Act: run migrations
|
||||||
|
runner.run_migrations_if_needed()
|
||||||
|
# Run twice to ensure idempotency
|
||||||
|
runner.run_migrations_if_needed()
|
||||||
|
|
||||||
|
# Assert: referenced children tables exist with expected columns
|
||||||
|
expected_children_cols = {
|
||||||
|
"id",
|
||||||
|
"title",
|
||||||
|
"artists",
|
||||||
|
"album_title",
|
||||||
|
"duration_ms",
|
||||||
|
"track_number",
|
||||||
|
"disc_number",
|
||||||
|
"explicit",
|
||||||
|
"status",
|
||||||
|
"external_ids",
|
||||||
|
"genres",
|
||||||
|
"isrc",
|
||||||
|
"timestamp",
|
||||||
|
"position",
|
||||||
|
"metadata",
|
||||||
|
}
|
||||||
|
assert _get_columns(history_db, "album_test1").issuperset(expected_children_cols)
|
||||||
|
assert _get_columns(history_db, "playlist_test2").issuperset(expected_children_cols)
|
||||||
|
# Legacy table upgraded
|
||||||
|
assert _get_columns(history_db, "album_legacy").issuperset(expected_children_cols)
|
||||||
Reference in New Issue
Block a user