Merge pull request #290 from spotizerr-dev/dev

Dev
This commit is contained in:
Spotizerr
2025-08-20 22:04:46 -05:00
committed by GitHub
32 changed files with 3031 additions and 2061 deletions

5
CONTRIBUTING.md Normal file
View File

@@ -0,0 +1,5 @@
# Contributing guidelines
- All pull requests must be made to `dev` branch
- When implementing a feature related to downloading, follow the rule of choice: Every download must come from an active decision made by the user (e.g. clicking a download button, deciding the user wants a whole artist's discography, etc.). This takes out of the picture features like recommendation algorithms, auto-genererated playlists, etc.

View File

@@ -145,7 +145,7 @@ If you self-host a music server with other users than yourself, you almost certa
### Monitor an Artist
1. Search for the artist
2. Click "Add to Watchlist"
2. Click "Add to Watchlist"
3. Configure which release types to monitor (albums, singles, etc.)
4. New releases will be automatically downloaded
@@ -244,6 +244,10 @@ This software is for educational purposes and personal use only. Ensure you comp
- Downloaded files retain original metadata
- Service limitations apply based on account types
### Contributing
See [CONTRIBUTING.md](./CONTRIBUTING.md)
## 🙏 Acknowledgements
This project was inspired by the amazing [deezspot library](https://github.com/jakiepari/deezspot). Although their creators are in no way related to Spotizerr, they still deserve credit for their excellent work.
This project was inspired by the amazing [deezspot library](https://github.com/jakiepari/deezspot). Although their creators are in no way related to Spotizerr, they still deserve credit for their excellent work.

468
app.py
View File

@@ -8,7 +8,6 @@ import logging.handlers
import time
from pathlib import Path
import os
import atexit
import sys
import redis
import socket
@@ -16,11 +15,16 @@ from urllib.parse import urlparse
# Run DB migrations as early as possible, before importing any routers that may touch DBs
try:
from routes.migrations import run_migrations_if_needed
run_migrations_if_needed()
logging.getLogger(__name__).info("Database migrations executed (if needed) early in startup.")
from routes.migrations import run_migrations_if_needed
run_migrations_if_needed()
logging.getLogger(__name__).info(
"Database migrations executed (if needed) early in startup."
)
except Exception as e:
logging.getLogger(__name__).error(f"Database migration step failed early in startup: {e}", exc_info=True)
logging.getLogger(__name__).error(
f"Database migration step failed early in startup: {e}", exc_info=True
)
# Import route routers (to be created)
from routes.auth.credentials import router as credentials_router
@@ -44,251 +48,299 @@ from routes.auth import AUTH_ENABLED
from routes.auth.middleware import AuthMiddleware
# Import and initialize routes (this will start the watch manager)
import routes
# Configure application-wide logging
def setup_logging():
"""Configure application-wide logging with rotation"""
# Create logs directory if it doesn't exist
logs_dir = Path("logs")
logs_dir.mkdir(exist_ok=True)
"""Configure application-wide logging with rotation"""
# Create logs directory if it doesn't exist
logs_dir = Path("logs")
logs_dir.mkdir(exist_ok=True)
# Set up log file paths
main_log = logs_dir / "spotizerr.log"
# Set up log file paths
main_log = logs_dir / "spotizerr.log"
# Configure root logger
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
# Configure root logger
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
# Clear any existing handlers from the root logger
if root_logger.hasHandlers():
root_logger.handlers.clear()
# Clear any existing handlers from the root logger
if root_logger.hasHandlers():
root_logger.handlers.clear()
# Log formatting
log_format = logging.Formatter(
"%(asctime)s [%(levelname)s] %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)
# Log formatting
log_format = logging.Formatter(
"%(asctime)s [%(levelname)s] %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)
# File handler with rotation (10 MB max, keep 5 backups)
file_handler = logging.handlers.RotatingFileHandler(
main_log, maxBytes=10 * 1024 * 1024, backupCount=5, encoding="utf-8"
)
file_handler.setFormatter(log_format)
file_handler.setLevel(logging.INFO)
# File handler with rotation (10 MB max, keep 5 backups)
file_handler = logging.handlers.RotatingFileHandler(
main_log, maxBytes=10 * 1024 * 1024, backupCount=5, encoding="utf-8"
)
file_handler.setFormatter(log_format)
file_handler.setLevel(logging.INFO)
# Console handler for stderr
console_handler = logging.StreamHandler(sys.stderr)
console_handler.setFormatter(log_format)
console_handler.setLevel(logging.INFO)
# Console handler for stderr
console_handler = logging.StreamHandler(sys.stderr)
console_handler.setFormatter(log_format)
console_handler.setLevel(logging.INFO)
# Add handlers to root logger
root_logger.addHandler(file_handler)
root_logger.addHandler(console_handler)
# Add handlers to root logger
root_logger.addHandler(file_handler)
root_logger.addHandler(console_handler)
# Set up specific loggers
for logger_name in [
"routes",
"routes.utils",
"routes.utils.celery_manager",
"routes.utils.celery_tasks",
"routes.utils.watch",
]:
logger = logging.getLogger(logger_name)
logger.setLevel(logging.INFO)
logger.propagate = True # Propagate to root logger
# Set up specific loggers
for logger_name in [
"routes",
"routes.utils",
"routes.utils.celery_manager",
"routes.utils.celery_tasks",
"routes.utils.watch",
]:
logger = logging.getLogger(logger_name)
logger.setLevel(logging.INFO)
logger.propagate = True # Propagate to root logger
logging.info("Logging system initialized")
logging.info("Logging system initialized")
def check_redis_connection():
"""Check if Redis is available and accessible"""
if not REDIS_URL:
logging.error("REDIS_URL is not configured. Please check your environment.")
return False
"""Check if Redis is available and accessible"""
if not REDIS_URL:
logging.error("REDIS_URL is not configured. Please check your environment.")
return False
try:
# Parse Redis URL
parsed_url = urlparse(REDIS_URL)
host = parsed_url.hostname or "localhost"
port = parsed_url.port or 6379
try:
# Parse Redis URL
parsed_url = urlparse(REDIS_URL)
host = parsed_url.hostname or "localhost"
port = parsed_url.port or 6379
logging.info(f"Testing Redis connection to {host}:{port}...")
logging.info(f"Testing Redis connection to {host}:{port}...")
# Test socket connection first
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(5)
result = sock.connect_ex((host, port))
sock.close()
# Test socket connection first
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(5)
result = sock.connect_ex((host, port))
sock.close()
if result != 0:
logging.error(f"Cannot connect to Redis at {host}:{port}")
return False
if result != 0:
logging.error(f"Cannot connect to Redis at {host}:{port}")
return False
# Test Redis client connection
r = redis.from_url(REDIS_URL, socket_connect_timeout=5, socket_timeout=5)
r.ping()
logging.info("Redis connection successful")
return True
# Test Redis client connection
r = redis.from_url(REDIS_URL, socket_connect_timeout=5, socket_timeout=5)
r.ping()
logging.info("Redis connection successful")
return True
except redis.ConnectionError as e:
logging.error(f"Redis connection error: {e}")
return False
except redis.TimeoutError as e:
logging.error(f"Redis timeout error: {e}")
return False
except Exception as e:
logging.error(f"Unexpected error checking Redis connection: {e}")
return False
except redis.ConnectionError as e:
logging.error(f"Redis connection error: {e}")
return False
except redis.TimeoutError as e:
logging.error(f"Redis timeout error: {e}")
return False
except Exception as e:
logging.error(f"Unexpected error checking Redis connection: {e}")
return False
@asynccontextmanager
async def lifespan(app: FastAPI):
"""Handle application startup and shutdown"""
# Startup
setup_logging()
# Check Redis connection
if not check_redis_connection():
logging.error("Failed to connect to Redis. Please ensure Redis is running and accessible.")
# Don't exit, but warn - some functionality may not work
# Start Celery workers
try:
celery_manager.start()
logging.info("Celery workers started successfully")
except Exception as e:
logging.error(f"Failed to start Celery workers: {e}")
yield
# Shutdown
try:
celery_manager.stop()
logging.info("Celery workers stopped")
except Exception as e:
logging.error(f"Error stopping Celery workers: {e}")
"""Handle application startup and shutdown"""
# Startup
setup_logging()
# Check Redis connection
if not check_redis_connection():
logging.error(
"Failed to connect to Redis. Please ensure Redis is running and accessible."
)
# Don't exit, but warn - some functionality may not work
# Start Celery workers
try:
celery_manager.start()
logging.info("Celery workers started successfully")
except Exception as e:
logging.error(f"Failed to start Celery workers: {e}")
yield
# Shutdown
try:
celery_manager.stop()
logging.info("Celery workers stopped")
except Exception as e:
logging.error(f"Error stopping Celery workers: {e}")
def create_app():
app = FastAPI(
title="Spotizerr API",
description="Music download service API",
version="3.0.0",
lifespan=lifespan,
redirect_slashes=True # Enable automatic trailing slash redirects
)
app = FastAPI(
title="Spotizerr API",
description="Music download service API",
version="3.0.0",
lifespan=lifespan,
redirect_slashes=True, # Enable automatic trailing slash redirects
)
# Set up CORS
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Set up CORS
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Add authentication middleware (only if auth is enabled)
if AUTH_ENABLED:
app.add_middleware(AuthMiddleware)
logging.info("Authentication system enabled")
else:
logging.info("Authentication system disabled")
# Add authentication middleware (only if auth is enabled)
if AUTH_ENABLED:
app.add_middleware(AuthMiddleware)
logging.info("Authentication system enabled")
else:
logging.info("Authentication system disabled")
# Register routers with URL prefixes
app.include_router(auth_router, prefix="/api/auth", tags=["auth"])
# Include SSO router if available
try:
from routes.auth.sso import router as sso_router
app.include_router(sso_router, prefix="/api/auth", tags=["sso"])
logging.info("SSO functionality enabled")
except ImportError as e:
logging.warning(f"SSO functionality not available: {e}")
app.include_router(config_router, prefix="/api/config", tags=["config"])
app.include_router(search_router, prefix="/api/search", tags=["search"])
app.include_router(credentials_router, prefix="/api/credentials", tags=["credentials"])
app.include_router(album_router, prefix="/api/album", tags=["album"])
app.include_router(track_router, prefix="/api/track", tags=["track"])
app.include_router(playlist_router, prefix="/api/playlist", tags=["playlist"])
app.include_router(artist_router, prefix="/api/artist", tags=["artist"])
app.include_router(prgs_router, prefix="/api/prgs", tags=["progress"])
app.include_router(history_router, prefix="/api/history", tags=["history"])
# Register routers with URL prefixes
app.include_router(auth_router, prefix="/api/auth", tags=["auth"])
# Add request logging middleware
@app.middleware("http")
async def log_requests(request: Request, call_next):
start_time = time.time()
# Log request
logger = logging.getLogger("uvicorn.access")
logger.debug(f"Request: {request.method} {request.url.path}")
try:
response = await call_next(request)
# Log response
duration = round((time.time() - start_time) * 1000, 2)
logger.debug(f"Response: {response.status_code} | Duration: {duration}ms")
return response
except Exception as e:
# Log errors
logger.error(f"Server error: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail="Internal Server Error")
# Include SSO router if available
try:
from routes.auth.sso import router as sso_router
# Mount static files for React app
if os.path.exists("spotizerr-ui/dist"):
app.mount("/static", StaticFiles(directory="spotizerr-ui/dist"), name="static")
# Serve React App - catch-all route for SPA (but not for API routes)
@app.get("/{full_path:path}")
async def serve_react_app(full_path: str):
"""Serve React app with fallback to index.html for SPA routing"""
static_dir = "spotizerr-ui/dist"
# Don't serve React app for API routes (more specific check)
if full_path.startswith("api") or full_path.startswith("api/"):
raise HTTPException(status_code=404, detail="API endpoint not found")
# If it's a file that exists, serve it
if full_path and os.path.exists(os.path.join(static_dir, full_path)):
return FileResponse(os.path.join(static_dir, full_path))
else:
# Fallback to index.html for SPA routing
return FileResponse(os.path.join(static_dir, "index.html"))
else:
logging.warning("React app build directory not found at spotizerr-ui/dist")
app.include_router(sso_router, prefix="/api/auth", tags=["sso"])
logging.info("SSO functionality enabled")
except ImportError as e:
logging.warning(f"SSO functionality not available: {e}")
app.include_router(config_router, prefix="/api/config", tags=["config"])
app.include_router(search_router, prefix="/api/search", tags=["search"])
app.include_router(
credentials_router, prefix="/api/credentials", tags=["credentials"]
)
app.include_router(album_router, prefix="/api/album", tags=["album"])
app.include_router(track_router, prefix="/api/track", tags=["track"])
app.include_router(playlist_router, prefix="/api/playlist", tags=["playlist"])
app.include_router(artist_router, prefix="/api/artist", tags=["artist"])
app.include_router(prgs_router, prefix="/api/prgs", tags=["progress"])
app.include_router(history_router, prefix="/api/history", tags=["history"])
return app
# Add request logging middleware
@app.middleware("http")
async def log_requests(request: Request, call_next):
start_time = time.time()
# Log request
logger = logging.getLogger("uvicorn.access")
logger.debug(f"Request: {request.method} {request.url.path}")
try:
response = await call_next(request)
# Log response
duration = round((time.time() - start_time) * 1000, 2)
logger.debug(f"Response: {response.status_code} | Duration: {duration}ms")
return response
except Exception as e:
# Log errors
logger.error(f"Server error: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail="Internal Server Error")
# Mount static files for React app
if os.path.exists("spotizerr-ui/dist"):
app.mount("/static", StaticFiles(directory="spotizerr-ui/dist"), name="static")
# Serve React App - catch-all route for SPA (but not for API routes)
@app.get("/{full_path:path}")
async def serve_react_app(full_path: str):
"""Serve React app with fallback to index.html for SPA routing. Prevent directory traversal."""
static_dir = "spotizerr-ui/dist"
static_dir_path = Path(static_dir).resolve()
index_path = static_dir_path / "index.html"
allowed_exts = {
".html",
".js",
".css",
".map",
".png",
".jpg",
".jpeg",
".svg",
".webp",
".ico",
".json",
".txt",
".woff",
".woff2",
".ttf",
".eot",
".mp3",
".ogg",
".mp4",
".webm",
}
# Don't serve React app for API routes (more specific check)
if full_path.startswith("api") or full_path.startswith("api/"):
raise HTTPException(status_code=404, detail="API endpoint not found")
# Reject null bytes early
if "\x00" in full_path:
return FileResponse(str(index_path))
# Sanitize path: normalize backslashes and strip URL schemes
sanitized = full_path.replace("\\", "/").lstrip("/")
if sanitized.startswith("http://") or sanitized.startswith("https://"):
return FileResponse(str(index_path))
# Resolve requested path safely and ensure it stays within static_dir
try:
requested_path = (static_dir_path / sanitized).resolve()
except Exception:
requested_path = index_path
# If traversal attempted or non-file within static dir, fall back to index.html for SPA routing
if not str(requested_path).startswith(str(static_dir_path)):
return FileResponse(str(index_path))
# Disallow hidden files (starting with dot) and enforce safe extensions
if requested_path.is_file():
name = requested_path.name
if name.startswith("."):
return FileResponse(str(index_path))
suffix = requested_path.suffix.lower()
if suffix in allowed_exts:
return FileResponse(str(requested_path))
# Not an allowed asset; fall back to SPA index
return FileResponse(str(index_path))
else:
# Fallback to index.html for SPA routing
return FileResponse(str(index_path))
else:
logging.warning("React app build directory not found at spotizerr-ui/dist")
return app
def start_celery_workers():
"""Start Celery workers with dynamic configuration"""
# This function is now handled by the lifespan context manager
# and the celery_manager.start() call
pass
"""Start Celery workers with dynamic configuration"""
# This function is now handled by the lifespan context manager
# and the celery_manager.start() call
pass
if __name__ == "__main__":
import uvicorn
import uvicorn
app = create_app()
app = create_app()
# Use HOST environment variable if present, otherwise fall back to IPv4 wildcard
host = os.getenv("HOST", "0.0.0.0")
# Use HOST environment variable if present, otherwise fall back to IPv4 wildcard
host = os.getenv("HOST", "0.0.0.0")
# Allow overriding port via PORT env var, with default 7171
try:
port = int(os.getenv("PORT", "7171"))
except ValueError:
port = 7171
# Allow overriding port via PORT env var, with default 7171
try:
port = int(os.getenv("PORT", "7171"))
except ValueError:
port = 7171
uvicorn.run(
app,
host=host,
port=port,
log_level="info",
access_log=True
)
uvicorn.run(app, host=host, port=port, log_level="info", access_log=True)

View File

@@ -1,7 +1,7 @@
fastapi==0.116.1
uvicorn[standard]==0.35.0
celery==5.5.3
deezspot-spotizerr==2.6.0
deezspot-spotizerr==2.7.3
httpx==0.28.1
bcrypt==4.2.1
PyJWT==2.10.1

View File

@@ -1,6 +1,5 @@
from fastapi import APIRouter, HTTPException, Request, Depends
from fastapi import APIRouter, Request, Depends
from fastapi.responses import JSONResponse
import json
import traceback
import uuid
import time
@@ -21,7 +20,11 @@ def construct_spotify_url(item_id: str, item_type: str = "track") -> str:
@router.get("/download/{album_id}")
async def handle_download(album_id: str, request: Request, current_user: User = Depends(require_auth_from_state)):
async def handle_download(
album_id: str,
request: Request,
current_user: User = Depends(require_auth_from_state),
):
# Retrieve essential parameters from the request.
# name = request.args.get('name')
# artist = request.args.get('artist')
@@ -38,8 +41,10 @@ async def handle_download(album_id: str, request: Request, current_user: User =
or not album_info.get("artists")
):
return JSONResponse(
content={"error": f"Could not retrieve metadata for album ID: {album_id}"},
status_code=404
content={
"error": f"Could not retrieve metadata for album ID: {album_id}"
},
status_code=404,
)
name_from_spotify = album_info.get("name")
@@ -51,15 +56,16 @@ async def handle_download(album_id: str, request: Request, current_user: User =
except Exception as e:
return JSONResponse(
content={"error": f"Failed to fetch metadata for album {album_id}: {str(e)}"},
status_code=500
content={
"error": f"Failed to fetch metadata for album {album_id}: {str(e)}"
},
status_code=500,
)
# Validate required parameters
if not url:
return JSONResponse(
content={"error": "Missing required parameter: url"},
status_code=400
content={"error": "Missing required parameter: url"}, status_code=400
)
# Add the task to the queue with only essential parameters
@@ -84,7 +90,7 @@ async def handle_download(album_id: str, request: Request, current_user: User =
"error": "Duplicate download detected.",
"existing_task": e.existing_task,
},
status_code=409
status_code=409,
)
except Exception as e:
# Generic error handling for other issues during task submission
@@ -116,25 +122,23 @@ async def handle_download(album_id: str, request: Request, current_user: User =
"error": f"Failed to queue album download: {str(e)}",
"task_id": error_task_id,
},
status_code=500
status_code=500,
)
return JSONResponse(
content={"task_id": task_id},
status_code=202
)
return JSONResponse(content={"task_id": task_id}, status_code=202)
@router.get("/download/cancel")
async def cancel_download(request: Request, current_user: User = Depends(require_auth_from_state)):
async def cancel_download(
request: Request, current_user: User = Depends(require_auth_from_state)
):
"""
Cancel a running download process by its task id.
"""
task_id = request.query_params.get("task_id")
if not task_id:
return JSONResponse(
content={"error": "Missing process id (task_id) parameter"},
status_code=400
content={"error": "Missing process id (task_id) parameter"}, status_code=400
)
# Use the queue manager's cancellation method.
@@ -145,7 +149,9 @@ async def cancel_download(request: Request, current_user: User = Depends(require
@router.get("/info")
async def get_album_info(request: Request, current_user: User = Depends(require_auth_from_state)):
async def get_album_info(
request: Request, current_user: User = Depends(require_auth_from_state)
):
"""
Retrieve Spotify album metadata given a Spotify album ID.
Expects a query parameter 'id' that contains the Spotify album ID.
@@ -153,15 +159,30 @@ async def get_album_info(request: Request, current_user: User = Depends(require_
spotify_id = request.query_params.get("id")
if not spotify_id:
return JSONResponse(
content={"error": "Missing parameter: id"},
status_code=400
)
return JSONResponse(content={"error": "Missing parameter: id"}, status_code=400)
try:
# Use the get_spotify_info function (already imported at top)
# Optional pagination params for tracks
limit_param = request.query_params.get("limit")
offset_param = request.query_params.get("offset")
limit = int(limit_param) if limit_param is not None else None
offset = int(offset_param) if offset_param is not None else None
# Fetch album metadata
album_info = get_spotify_info(spotify_id, "album")
# Fetch album tracks with pagination
album_tracks = get_spotify_info(
spotify_id, "album_tracks", limit=limit, offset=offset
)
# Merge tracks into album payload in the same shape Spotify returns on album
album_info["tracks"] = album_tracks
return JSONResponse(content=album_info, status_code=200)
except ValueError as ve:
return JSONResponse(
content={"error": f"Invalid limit/offset: {str(ve)}"}, status_code=400
)
except Exception as e:
error_data = {"error": str(e), "traceback": traceback.format_exc()}
return JSONResponse(content=error_data, status_code=500)

View File

@@ -2,7 +2,7 @@
Artist endpoint router.
"""
from fastapi import APIRouter, HTTPException, Request, Depends
from fastapi import APIRouter, HTTPException, Request, Depends, Query
from fastapi.responses import JSONResponse
import json
import traceback
@@ -24,7 +24,7 @@ from routes.utils.watch.manager import check_watched_artists, get_watch_config
from routes.utils.get_info import get_spotify_info
# Import authentication dependencies
from routes.auth.middleware import require_auth_from_state, require_admin_from_state, User
from routes.auth.middleware import require_auth_from_state, User
router = APIRouter()
@@ -43,7 +43,11 @@ def log_json(message_dict):
@router.get("/download/{artist_id}")
async def handle_artist_download(artist_id: str, request: Request, current_user: User = Depends(require_auth_from_state)):
async def handle_artist_download(
artist_id: str,
request: Request,
current_user: User = Depends(require_auth_from_state),
):
"""
Enqueues album download tasks for the given artist.
Expected query parameters:
@@ -58,8 +62,7 @@ async def handle_artist_download(artist_id: str, request: Request, current_user:
# Validate required parameters
if not url: # This check is mostly for safety, as url is constructed
return JSONResponse(
content={"error": "Missing required parameter: url"},
status_code=400
content={"error": "Missing required parameter: url"}, status_code=400
)
try:
@@ -68,7 +71,10 @@ async def handle_artist_download(artist_id: str, request: Request, current_user:
# Delegate to the download_artist_albums function which will handle album filtering
successfully_queued_albums, duplicate_albums = download_artist_albums(
url=url, album_type=album_type, request_args=dict(request.query_params)
url=url,
album_type=album_type,
request_args=dict(request.query_params),
username=current_user.username,
)
# Return the list of album task IDs.
@@ -85,7 +91,7 @@ async def handle_artist_download(artist_id: str, request: Request, current_user:
return JSONResponse(
content=response_data,
status_code=202 # Still 202 Accepted as some operations may have succeeded
status_code=202, # Still 202 Accepted as some operations may have succeeded
)
except Exception as e:
return JSONResponse(
@@ -94,7 +100,7 @@ async def handle_artist_download(artist_id: str, request: Request, current_user:
"message": str(e),
"traceback": traceback.format_exc(),
},
status_code=500
status_code=500,
)
@@ -106,12 +112,16 @@ async def cancel_artist_download():
"""
return JSONResponse(
content={"error": "Artist download cancellation is not supported."},
status_code=400
status_code=400,
)
@router.get("/info")
async def get_artist_info(request: Request, current_user: User = Depends(require_auth_from_state)):
async def get_artist_info(
request: Request, current_user: User = Depends(require_auth_from_state),
limit: int = Query(10, ge=1), # default=10, must be >=1
offset: int = Query(0, ge=0) # default=0, must be >=0
):
"""
Retrieves Spotify artist metadata given a Spotify artist ID.
Expects a query parameter 'id' with the Spotify artist ID.
@@ -119,27 +129,25 @@ async def get_artist_info(request: Request, current_user: User = Depends(require
spotify_id = request.query_params.get("id")
if not spotify_id:
return JSONResponse(
content={"error": "Missing parameter: id"},
status_code=400
)
return JSONResponse(content={"error": "Missing parameter: id"}, status_code=400)
try:
# Get artist metadata first
artist_metadata = get_spotify_info(spotify_id, "artist")
# Get artist discography for albums
artist_discography = get_spotify_info(spotify_id, "artist_discography")
artist_discography = get_spotify_info(spotify_id, "artist_discography", limit=limit, offset=offset)
# Combine metadata with discography
artist_info = {
**artist_metadata,
"albums": artist_discography
}
artist_info = {**artist_metadata, "albums": artist_discography}
# If artist_info is successfully fetched and has albums,
# check if the artist is watched and augment album items with is_locally_known status
if artist_info and artist_info.get("albums") and artist_info["albums"].get("items"):
if (
artist_info
and artist_info.get("albums")
and artist_info["albums"].get("items")
):
watched_artist_details = get_watched_artist(
spotify_id
) # spotify_id is the artist ID
@@ -155,13 +163,11 @@ async def get_artist_info(request: Request, current_user: User = Depends(require
# If not watched, or no albums, is_locally_known will not be added.
# Frontend should handle absence of this key as false.
return JSONResponse(
content=artist_info, status_code=200
)
return JSONResponse(content=artist_info, status_code=200)
except Exception as e:
return JSONResponse(
content={"error": str(e), "traceback": traceback.format_exc()},
status_code=500
status_code=500,
)
@@ -169,11 +175,16 @@ async def get_artist_info(request: Request, current_user: User = Depends(require
@router.put("/watch/{artist_spotify_id}")
async def add_artist_to_watchlist(artist_spotify_id: str, current_user: User = Depends(require_auth_from_state)):
async def add_artist_to_watchlist(
artist_spotify_id: str, current_user: User = Depends(require_auth_from_state)
):
"""Adds an artist to the watchlist."""
watch_config = get_watch_config()
if not watch_config.get("enabled", False):
raise HTTPException(status_code=403, detail={"error": "Watch feature is currently disabled globally."})
raise HTTPException(
status_code=403,
detail={"error": "Watch feature is currently disabled globally."},
)
logger.info(f"Attempting to add artist {artist_spotify_id} to watchlist.")
try:
@@ -182,7 +193,7 @@ async def add_artist_to_watchlist(artist_spotify_id: str, current_user: User = D
# Get artist metadata directly for name and basic info
artist_metadata = get_spotify_info(artist_spotify_id, "artist")
# Get artist discography for album count
artist_album_list_data = get_spotify_info(
artist_spotify_id, "artist_discography"
@@ -197,7 +208,7 @@ async def add_artist_to_watchlist(artist_spotify_id: str, current_user: User = D
status_code=404,
detail={
"error": f"Could not fetch artist metadata for {artist_spotify_id} to initiate watch."
}
},
)
# Check if we got album data
@@ -213,7 +224,9 @@ async def add_artist_to_watchlist(artist_spotify_id: str, current_user: User = D
"id": artist_spotify_id,
"name": artist_metadata.get("name", "Unknown Artist"),
"albums": { # Mimic structure if add_artist_db expects it for total_albums
"total": artist_album_list_data.get("total", 0) if artist_album_list_data else 0
"total": artist_album_list_data.get("total", 0)
if artist_album_list_data
else 0
},
# Add any other fields add_artist_db might expect from a true artist object if necessary
}
@@ -232,11 +245,16 @@ async def add_artist_to_watchlist(artist_spotify_id: str, current_user: User = D
logger.error(
f"Error adding artist {artist_spotify_id} to watchlist: {e}", exc_info=True
)
raise HTTPException(status_code=500, detail={"error": f"Could not add artist to watchlist: {str(e)}"})
raise HTTPException(
status_code=500,
detail={"error": f"Could not add artist to watchlist: {str(e)}"},
)
@router.get("/watch/{artist_spotify_id}/status")
async def get_artist_watch_status(artist_spotify_id: str, current_user: User = Depends(require_auth_from_state)):
async def get_artist_watch_status(
artist_spotify_id: str, current_user: User = Depends(require_auth_from_state)
):
"""Checks if a specific artist is being watched."""
logger.info(f"Checking watch status for artist {artist_spotify_id}.")
try:
@@ -250,22 +268,29 @@ async def get_artist_watch_status(artist_spotify_id: str, current_user: User = D
f"Error checking watch status for artist {artist_spotify_id}: {e}",
exc_info=True,
)
raise HTTPException(status_code=500, detail={"error": f"Could not check watch status: {str(e)}"})
raise HTTPException(
status_code=500, detail={"error": f"Could not check watch status: {str(e)}"}
)
@router.delete("/watch/{artist_spotify_id}")
async def remove_artist_from_watchlist(artist_spotify_id: str, current_user: User = Depends(require_auth_from_state)):
async def remove_artist_from_watchlist(
artist_spotify_id: str, current_user: User = Depends(require_auth_from_state)
):
"""Removes an artist from the watchlist."""
watch_config = get_watch_config()
if not watch_config.get("enabled", False):
raise HTTPException(status_code=403, detail={"error": "Watch feature is currently disabled globally."})
raise HTTPException(
status_code=403,
detail={"error": "Watch feature is currently disabled globally."},
)
logger.info(f"Attempting to remove artist {artist_spotify_id} from watchlist.")
try:
if not get_watched_artist(artist_spotify_id):
raise HTTPException(
status_code=404,
detail={"error": f"Artist {artist_spotify_id} not found in watchlist."}
detail={"error": f"Artist {artist_spotify_id} not found in watchlist."},
)
remove_artist_db(artist_spotify_id)
@@ -280,23 +305,30 @@ async def remove_artist_from_watchlist(artist_spotify_id: str, current_user: Use
)
raise HTTPException(
status_code=500,
detail={"error": f"Could not remove artist from watchlist: {str(e)}"}
detail={"error": f"Could not remove artist from watchlist: {str(e)}"},
)
@router.get("/watch/list")
async def list_watched_artists_endpoint(current_user: User = Depends(require_auth_from_state)):
async def list_watched_artists_endpoint(
current_user: User = Depends(require_auth_from_state),
):
"""Lists all artists currently in the watchlist."""
try:
artists = get_watched_artists()
return [dict(artist) for artist in artists]
except Exception as e:
logger.error(f"Error listing watched artists: {e}", exc_info=True)
raise HTTPException(status_code=500, detail={"error": f"Could not list watched artists: {str(e)}"})
raise HTTPException(
status_code=500,
detail={"error": f"Could not list watched artists: {str(e)}"},
)
@router.post("/watch/trigger_check")
async def trigger_artist_check_endpoint(current_user: User = Depends(require_auth_from_state)):
async def trigger_artist_check_endpoint(
current_user: User = Depends(require_auth_from_state),
):
"""Manually triggers the artist checking mechanism for all watched artists."""
watch_config = get_watch_config()
if not watch_config.get("enabled", False):
@@ -304,7 +336,7 @@ async def trigger_artist_check_endpoint(current_user: User = Depends(require_aut
status_code=403,
detail={
"error": "Watch feature is currently disabled globally. Cannot trigger check."
}
},
)
logger.info("Manual trigger for artist check received for all artists.")
@@ -320,12 +352,14 @@ async def trigger_artist_check_endpoint(current_user: User = Depends(require_aut
)
raise HTTPException(
status_code=500,
detail={"error": f"Could not trigger artist check for all: {str(e)}"}
detail={"error": f"Could not trigger artist check for all: {str(e)}"},
)
@router.post("/watch/trigger_check/{artist_spotify_id}")
async def trigger_specific_artist_check_endpoint(artist_spotify_id: str, current_user: User = Depends(require_auth_from_state)):
async def trigger_specific_artist_check_endpoint(
artist_spotify_id: str, current_user: User = Depends(require_auth_from_state)
):
"""Manually triggers the artist checking mechanism for a specific artist."""
watch_config = get_watch_config()
if not watch_config.get("enabled", False):
@@ -333,7 +367,7 @@ async def trigger_specific_artist_check_endpoint(artist_spotify_id: str, current
status_code=403,
detail={
"error": "Watch feature is currently disabled globally. Cannot trigger check."
}
},
)
logger.info(
@@ -349,7 +383,7 @@ async def trigger_specific_artist_check_endpoint(artist_spotify_id: str, current
status_code=404,
detail={
"error": f"Artist {artist_spotify_id} is not in the watchlist. Add it first."
}
},
)
thread = threading.Thread(
@@ -373,12 +407,16 @@ async def trigger_specific_artist_check_endpoint(artist_spotify_id: str, current
status_code=500,
detail={
"error": f"Could not trigger artist check for {artist_spotify_id}: {str(e)}"
}
},
)
@router.post("/watch/{artist_spotify_id}/albums")
async def mark_albums_as_known_for_artist(artist_spotify_id: str, request: Request, current_user: User = Depends(require_auth_from_state)):
async def mark_albums_as_known_for_artist(
artist_spotify_id: str,
request: Request,
current_user: User = Depends(require_auth_from_state),
):
"""Fetches details for given album IDs and adds/updates them in the artist's local DB table."""
watch_config = get_watch_config()
if not watch_config.get("enabled", False):
@@ -386,7 +424,7 @@ async def mark_albums_as_known_for_artist(artist_spotify_id: str, request: Reque
status_code=403,
detail={
"error": "Watch feature is currently disabled globally. Cannot mark albums."
}
},
)
logger.info(f"Attempting to mark albums as known for artist {artist_spotify_id}.")
@@ -399,13 +437,13 @@ async def mark_albums_as_known_for_artist(artist_spotify_id: str, request: Reque
status_code=400,
detail={
"error": "Invalid request body. Expecting a JSON array of album Spotify IDs."
}
},
)
if not get_watched_artist(artist_spotify_id):
raise HTTPException(
status_code=404,
detail={"error": f"Artist {artist_spotify_id} is not being watched."}
detail={"error": f"Artist {artist_spotify_id} is not being watched."},
)
fetched_albums_details = []
@@ -446,11 +484,18 @@ async def mark_albums_as_known_for_artist(artist_spotify_id: str, request: Reque
f"Error marking albums as known for artist {artist_spotify_id}: {e}",
exc_info=True,
)
raise HTTPException(status_code=500, detail={"error": f"Could not mark albums as known: {str(e)}"})
raise HTTPException(
status_code=500,
detail={"error": f"Could not mark albums as known: {str(e)}"},
)
@router.delete("/watch/{artist_spotify_id}/albums")
async def mark_albums_as_missing_locally_for_artist(artist_spotify_id: str, request: Request, current_user: User = Depends(require_auth_from_state)):
async def mark_albums_as_missing_locally_for_artist(
artist_spotify_id: str,
request: Request,
current_user: User = Depends(require_auth_from_state),
):
"""Removes specified albums from the artist's local DB table."""
watch_config = get_watch_config()
if not watch_config.get("enabled", False):
@@ -458,7 +503,7 @@ async def mark_albums_as_missing_locally_for_artist(artist_spotify_id: str, requ
status_code=403,
detail={
"error": "Watch feature is currently disabled globally. Cannot mark albums."
}
},
)
logger.info(
@@ -473,13 +518,13 @@ async def mark_albums_as_missing_locally_for_artist(artist_spotify_id: str, requ
status_code=400,
detail={
"error": "Invalid request body. Expecting a JSON array of album Spotify IDs."
}
},
)
if not get_watched_artist(artist_spotify_id):
raise HTTPException(
status_code=404,
detail={"error": f"Artist {artist_spotify_id} is not being watched."}
detail={"error": f"Artist {artist_spotify_id} is not being watched."},
)
deleted_count = remove_specific_albums_from_artist_table(
@@ -498,4 +543,7 @@ async def mark_albums_as_missing_locally_for_artist(artist_spotify_id: str, requ
f"Error marking albums as missing (deleting locally) for artist {artist_spotify_id}: {e}",
exc_info=True,
)
raise HTTPException(status_code=500, detail={"error": f"Could not mark albums as missing: {str(e)}"})
raise HTTPException(
status_code=500,
detail={"error": f"Could not mark albums as missing: {str(e)}"},
)

View File

@@ -1,9 +1,8 @@
from fastapi import APIRouter, HTTPException, Request, Depends
from fastapi import APIRouter, Request, Depends
from fastapi.responses import JSONResponse
import json
import traceback
import logging
from routes.utils.history_manager import history_manager
from typing import Any, Dict
# Import authentication dependencies
from routes.auth.middleware import require_auth_from_state, User
@@ -15,10 +14,12 @@ router = APIRouter()
@router.get("/")
@router.get("")
async def get_history(request: Request, current_user: User = Depends(require_auth_from_state)):
async def get_history(
request: Request, current_user: User = Depends(require_auth_from_state)
):
"""
Retrieve download history with optional filtering and pagination.
Query parameters:
- limit: Maximum number of records (default: 100, max: 500)
- offset: Number of records to skip (default: 0)
@@ -31,143 +32,144 @@ async def get_history(request: Request, current_user: User = Depends(require_aut
offset = max(int(request.query_params.get("offset", 0)), 0)
download_type = request.query_params.get("download_type")
status = request.query_params.get("status")
# Validate download_type if provided
valid_types = ["track", "album", "playlist"]
if download_type and download_type not in valid_types:
return JSONResponse(
content={"error": f"Invalid download_type. Must be one of: {valid_types}"},
status_code=400
content={
"error": f"Invalid download_type. Must be one of: {valid_types}"
},
status_code=400,
)
# Validate status if provided
valid_statuses = ["completed", "failed", "skipped", "in_progress"]
if status and status not in valid_statuses:
return JSONResponse(
content={"error": f"Invalid status. Must be one of: {valid_statuses}"},
status_code=400
status_code=400,
)
# Get history from manager
history = history_manager.get_download_history(
limit=limit,
offset=offset,
download_type=download_type,
status=status
limit=limit, offset=offset, download_type=download_type, status=status
)
# Add pagination info
response_data = {
response_data: Dict[str, Any] = {
"downloads": history,
"pagination": {
"limit": limit,
"offset": offset,
"returned_count": len(history)
}
"returned_count": len(history),
},
}
filters: Dict[str, Any] = {}
if download_type:
response_data["filters"] = {"download_type": download_type}
filters["download_type"] = download_type
if status:
if "filters" not in response_data:
response_data["filters"] = {}
response_data["filters"]["status"] = status
return JSONResponse(
content=response_data,
status_code=200
)
filters["status"] = status
if filters:
response_data["filters"] = filters
return JSONResponse(content=response_data, status_code=200)
except ValueError as e:
return JSONResponse(
content={"error": f"Invalid parameter value: {str(e)}"},
status_code=400
content={"error": f"Invalid parameter value: {str(e)}"}, status_code=400
)
except Exception as e:
logger.error(f"Error retrieving download history: {e}", exc_info=True)
return JSONResponse(
content={"error": "Failed to retrieve download history", "details": str(e)},
status_code=500
status_code=500,
)
@router.get("/{task_id}")
async def get_download_by_task_id(task_id: str, current_user: User = Depends(require_auth_from_state)):
async def get_download_by_task_id(
task_id: str, current_user: User = Depends(require_auth_from_state)
):
"""
Retrieve specific download history by task ID.
Args:
task_id: Celery task ID
task_id: Celery task ID
"""
try:
download = history_manager.get_download_by_task_id(task_id)
if not download:
return JSONResponse(
content={"error": f"Download with task ID '{task_id}' not found"},
status_code=404
status_code=404,
)
return JSONResponse(
content=download,
status_code=200
)
return JSONResponse(content=download, status_code=200)
except Exception as e:
logger.error(f"Error retrieving download for task {task_id}: {e}", exc_info=True)
logger.error(
f"Error retrieving download for task {task_id}: {e}", exc_info=True
)
return JSONResponse(
content={"error": "Failed to retrieve download", "details": str(e)},
status_code=500
status_code=500,
)
@router.get("/{task_id}/children")
async def get_download_children(task_id: str, current_user: User = Depends(require_auth_from_state)):
async def get_download_children(
task_id: str, current_user: User = Depends(require_auth_from_state)
):
"""
Retrieve children tracks for an album or playlist download.
Args:
task_id: Celery task ID
task_id: Celery task ID
"""
try:
# First get the main download to find the children table
download = history_manager.get_download_by_task_id(task_id)
if not download:
return JSONResponse(
content={"error": f"Download with task ID '{task_id}' not found"},
status_code=404
status_code=404,
)
children_table = download.get("children_table")
if not children_table:
return JSONResponse(
content={"error": f"Download '{task_id}' has no children tracks"},
status_code=404
status_code=404,
)
# Get children tracks
children = history_manager.get_children_history(children_table)
response_data = {
"task_id": task_id,
"download_type": download.get("download_type"),
"title": download.get("title"),
"children_table": children_table,
"tracks": children,
"track_count": len(children)
"track_count": len(children),
}
return JSONResponse(
content=response_data,
status_code=200
)
return JSONResponse(content=response_data, status_code=200)
except Exception as e:
logger.error(f"Error retrieving children for task {task_id}: {e}", exc_info=True)
logger.error(
f"Error retrieving children for task {task_id}: {e}", exc_info=True
)
return JSONResponse(
content={"error": "Failed to retrieve download children", "details": str(e)},
status_code=500
content={
"error": "Failed to retrieve download children",
"details": str(e),
},
status_code=500,
)
@@ -178,25 +180,27 @@ async def get_download_stats(current_user: User = Depends(require_auth_from_stat
"""
try:
stats = history_manager.get_download_stats()
return JSONResponse(
content=stats,
status_code=200
)
return JSONResponse(content=stats, status_code=200)
except Exception as e:
logger.error(f"Error retrieving download stats: {e}", exc_info=True)
return JSONResponse(
content={"error": "Failed to retrieve download statistics", "details": str(e)},
status_code=500
content={
"error": "Failed to retrieve download statistics",
"details": str(e),
},
status_code=500,
)
@router.get("/search")
async def search_history(request: Request, current_user: User = Depends(require_auth_from_state)):
async def search_history(
request: Request, current_user: User = Depends(require_auth_from_state)
):
"""
Search download history by title or artist.
Query parameters:
- q: Search query (required)
- limit: Maximum number of results (default: 50, max: 200)
@@ -206,147 +210,134 @@ async def search_history(request: Request, current_user: User = Depends(require_
if not query:
return JSONResponse(
content={"error": "Missing required parameter: q (search query)"},
status_code=400
status_code=400,
)
limit = min(int(request.query_params.get("limit", 50)), 200) # Cap at 200
# Search history
results = history_manager.search_history(query, limit)
response_data = {
"query": query,
"results": results,
"result_count": len(results),
"limit": limit
"limit": limit,
}
return JSONResponse(
content=response_data,
status_code=200
)
return JSONResponse(content=response_data, status_code=200)
except ValueError as e:
return JSONResponse(
content={"error": f"Invalid parameter value: {str(e)}"},
status_code=400
content={"error": f"Invalid parameter value: {str(e)}"}, status_code=400
)
except Exception as e:
logger.error(f"Error searching download history: {e}", exc_info=True)
return JSONResponse(
content={"error": "Failed to search download history", "details": str(e)},
status_code=500
status_code=500,
)
@router.get("/recent")
async def get_recent_downloads(request: Request, current_user: User = Depends(require_auth_from_state)):
async def get_recent_downloads(
request: Request, current_user: User = Depends(require_auth_from_state)
):
"""
Get most recent downloads.
Query parameters:
- limit: Maximum number of results (default: 20, max: 100)
"""
try:
limit = min(int(request.query_params.get("limit", 20)), 100) # Cap at 100
recent = history_manager.get_recent_downloads(limit)
response_data = {
"downloads": recent,
"count": len(recent),
"limit": limit
}
return JSONResponse(
content=response_data,
status_code=200
)
response_data = {"downloads": recent, "count": len(recent), "limit": limit}
return JSONResponse(content=response_data, status_code=200)
except ValueError as e:
return JSONResponse(
content={"error": f"Invalid parameter value: {str(e)}"},
status_code=400
content={"error": f"Invalid parameter value: {str(e)}"}, status_code=400
)
except Exception as e:
logger.error(f"Error retrieving recent downloads: {e}", exc_info=True)
return JSONResponse(
content={"error": "Failed to retrieve recent downloads", "details": str(e)},
status_code=500
status_code=500,
)
@router.get("/failed")
async def get_failed_downloads(request: Request, current_user: User = Depends(require_auth_from_state)):
async def get_failed_downloads(
request: Request, current_user: User = Depends(require_auth_from_state)
):
"""
Get failed downloads.
Query parameters:
- limit: Maximum number of results (default: 50, max: 200)
"""
try:
limit = min(int(request.query_params.get("limit", 50)), 200) # Cap at 200
failed = history_manager.get_failed_downloads(limit)
response_data = {
"downloads": failed,
"count": len(failed),
"limit": limit
}
return JSONResponse(
content=response_data,
status_code=200
)
response_data = {"downloads": failed, "count": len(failed), "limit": limit}
return JSONResponse(content=response_data, status_code=200)
except ValueError as e:
return JSONResponse(
content={"error": f"Invalid parameter value: {str(e)}"},
status_code=400
content={"error": f"Invalid parameter value: {str(e)}"}, status_code=400
)
except Exception as e:
logger.error(f"Error retrieving failed downloads: {e}", exc_info=True)
return JSONResponse(
content={"error": "Failed to retrieve failed downloads", "details": str(e)},
status_code=500
status_code=500,
)
@router.post("/cleanup")
async def cleanup_old_history(request: Request, current_user: User = Depends(require_auth_from_state)):
async def cleanup_old_history(
request: Request, current_user: User = Depends(require_auth_from_state)
):
"""
Clean up old download history.
JSON body:
- days_old: Number of days old to keep (default: 30)
"""
try:
data = await request.json() if request.headers.get("content-type") == "application/json" else {}
data = (
await request.json()
if request.headers.get("content-type") == "application/json"
else {}
)
days_old = data.get("days_old", 30)
if not isinstance(days_old, int) or days_old <= 0:
return JSONResponse(
content={"error": "days_old must be a positive integer"},
status_code=400
status_code=400,
)
deleted_count = history_manager.clear_old_history(days_old)
response_data = {
"message": f"Successfully cleaned up old download history",
"message": "Successfully cleaned up old download history",
"deleted_records": deleted_count,
"days_old": days_old
"days_old": days_old,
}
return JSONResponse(
content=response_data,
status_code=200
)
return JSONResponse(content=response_data, status_code=200)
except Exception as e:
logger.error(f"Error cleaning up old history: {e}", exc_info=True)
return JSONResponse(
content={"error": "Failed to cleanup old history", "details": str(e)},
status_code=500
)
status_code=500,
)

View File

@@ -6,6 +6,7 @@ from typing import Optional
from .v3_0_6 import MigrationV3_0_6
from .v3_1_0 import MigrationV3_1_0
from .v3_1_1 import MigrationV3_1_1
from .v3_1_2 import MigrationV3_1_2
logger = logging.getLogger(__name__)
@@ -23,138 +24,142 @@ SEARCH_JSON = CREDS_DIR / "search.json"
# Expected children table columns for history (album_/playlist_)
CHILDREN_EXPECTED_COLUMNS: dict[str, str] = {
"id": "INTEGER PRIMARY KEY AUTOINCREMENT",
"title": "TEXT NOT NULL",
"artists": "TEXT",
"album_title": "TEXT",
"duration_ms": "INTEGER",
"track_number": "INTEGER",
"disc_number": "INTEGER",
"explicit": "BOOLEAN",
"status": "TEXT NOT NULL",
"external_ids": "TEXT",
"genres": "TEXT",
"isrc": "TEXT",
"timestamp": "REAL NOT NULL",
"position": "INTEGER",
"metadata": "TEXT",
"id": "INTEGER PRIMARY KEY AUTOINCREMENT",
"title": "TEXT NOT NULL",
"artists": "TEXT",
"album_title": "TEXT",
"duration_ms": "INTEGER",
"track_number": "INTEGER",
"disc_number": "INTEGER",
"explicit": "BOOLEAN",
"status": "TEXT NOT NULL",
"external_ids": "TEXT",
"genres": "TEXT",
"isrc": "TEXT",
"timestamp": "REAL NOT NULL",
"position": "INTEGER",
"metadata": "TEXT",
}
# 3.1.2 expected schemas for Watch DBs (kept here to avoid importing modules with side-effects)
EXPECTED_WATCHED_PLAYLISTS_COLUMNS: dict[str, str] = {
"spotify_id": "TEXT PRIMARY KEY",
"name": "TEXT",
"owner_id": "TEXT",
"owner_name": "TEXT",
"total_tracks": "INTEGER",
"link": "TEXT",
"snapshot_id": "TEXT",
"last_checked": "INTEGER",
"added_at": "INTEGER",
"is_active": "INTEGER DEFAULT 1",
"spotify_id": "TEXT PRIMARY KEY",
"name": "TEXT",
"owner_id": "TEXT",
"owner_name": "TEXT",
"total_tracks": "INTEGER",
"link": "TEXT",
"snapshot_id": "TEXT",
"last_checked": "INTEGER",
"added_at": "INTEGER",
"is_active": "INTEGER DEFAULT 1",
}
EXPECTED_PLAYLIST_TRACKS_COLUMNS: dict[str, str] = {
"spotify_track_id": "TEXT PRIMARY KEY",
"title": "TEXT",
"artist_names": "TEXT",
"album_name": "TEXT",
"album_artist_names": "TEXT",
"track_number": "INTEGER",
"album_spotify_id": "TEXT",
"duration_ms": "INTEGER",
"added_at_playlist": "TEXT",
"added_to_db": "INTEGER",
"is_present_in_spotify": "INTEGER DEFAULT 1",
"last_seen_in_spotify": "INTEGER",
"snapshot_id": "TEXT",
"final_path": "TEXT",
"spotify_track_id": "TEXT PRIMARY KEY",
"title": "TEXT",
"artist_names": "TEXT",
"album_name": "TEXT",
"album_artist_names": "TEXT",
"track_number": "INTEGER",
"album_spotify_id": "TEXT",
"duration_ms": "INTEGER",
"added_at_playlist": "TEXT",
"added_to_db": "INTEGER",
"is_present_in_spotify": "INTEGER DEFAULT 1",
"last_seen_in_spotify": "INTEGER",
"snapshot_id": "TEXT",
"final_path": "TEXT",
}
EXPECTED_WATCHED_ARTISTS_COLUMNS: dict[str, str] = {
"spotify_id": "TEXT PRIMARY KEY",
"name": "TEXT",
"link": "TEXT",
"total_albums_on_spotify": "INTEGER",
"last_checked": "INTEGER",
"added_at": "INTEGER",
"is_active": "INTEGER DEFAULT 1",
"genres": "TEXT",
"popularity": "INTEGER",
"image_url": "TEXT",
"spotify_id": "TEXT PRIMARY KEY",
"name": "TEXT",
"link": "TEXT",
"total_albums_on_spotify": "INTEGER",
"last_checked": "INTEGER",
"added_at": "INTEGER",
"is_active": "INTEGER DEFAULT 1",
"genres": "TEXT",
"popularity": "INTEGER",
"image_url": "TEXT",
}
EXPECTED_ARTIST_ALBUMS_COLUMNS: dict[str, str] = {
"album_spotify_id": "TEXT PRIMARY KEY",
"artist_spotify_id": "TEXT",
"name": "TEXT",
"album_group": "TEXT",
"album_type": "TEXT",
"release_date": "TEXT",
"release_date_precision": "TEXT",
"total_tracks": "INTEGER",
"link": "TEXT",
"image_url": "TEXT",
"added_to_db": "INTEGER",
"last_seen_on_spotify": "INTEGER",
"download_task_id": "TEXT",
"download_status": "INTEGER DEFAULT 0",
"is_fully_downloaded_managed_by_app": "INTEGER DEFAULT 0",
"album_spotify_id": "TEXT PRIMARY KEY",
"artist_spotify_id": "TEXT",
"name": "TEXT",
"album_group": "TEXT",
"album_type": "TEXT",
"release_date": "TEXT",
"release_date_precision": "TEXT",
"total_tracks": "INTEGER",
"link": "TEXT",
"image_url": "TEXT",
"added_to_db": "INTEGER",
"last_seen_on_spotify": "INTEGER",
"download_task_id": "TEXT",
"download_status": "INTEGER DEFAULT 0",
"is_fully_downloaded_managed_by_app": "INTEGER DEFAULT 0",
}
m306 = MigrationV3_0_6()
m310 = MigrationV3_1_0()
m311 = MigrationV3_1_1()
m312 = MigrationV3_1_2()
def _safe_connect(path: Path) -> Optional[sqlite3.Connection]:
try:
path.parent.mkdir(parents=True, exist_ok=True)
conn = sqlite3.connect(str(path))
conn.row_factory = sqlite3.Row
return conn
except Exception as e:
logger.error(f"Failed to open SQLite DB {path}: {e}")
return None
try:
path.parent.mkdir(parents=True, exist_ok=True)
conn = sqlite3.connect(str(path))
conn.row_factory = sqlite3.Row
return conn
except Exception as e:
logger.error(f"Failed to open SQLite DB {path}: {e}")
return None
def _ensure_table_schema(
conn: sqlite3.Connection,
table_name: str,
expected_columns: dict[str, str],
table_description: str,
conn: sqlite3.Connection,
table_name: str,
expected_columns: dict[str, str],
table_description: str,
) -> None:
try:
cur = conn.execute(f"PRAGMA table_info({table_name})")
existing_info = cur.fetchall()
existing_names = {row[1] for row in existing_info}
for col_name, col_type in expected_columns.items():
if col_name in existing_names:
continue
col_type_for_add = (
col_type.replace("PRIMARY KEY", "").replace("AUTOINCREMENT", "").replace("NOT NULL", "").strip()
)
try:
conn.execute(
f"ALTER TABLE {table_name} ADD COLUMN {col_name} {col_type_for_add}"
)
logger.info(
f"Added missing column '{col_name} {col_type_for_add}' to {table_description} table '{table_name}'."
)
except sqlite3.OperationalError as e:
logger.warning(
f"Could not add column '{col_name}' to {table_description} table '{table_name}': {e}"
)
except Exception as e:
logger.error(
f"Error ensuring schema for {table_description} table '{table_name}': {e}",
exc_info=True,
)
try:
cur = conn.execute(f"PRAGMA table_info({table_name})")
existing_info = cur.fetchall()
existing_names = {row[1] for row in existing_info}
for col_name, col_type in expected_columns.items():
if col_name in existing_names:
continue
col_type_for_add = (
col_type.replace("PRIMARY KEY", "")
.replace("AUTOINCREMENT", "")
.replace("NOT NULL", "")
.strip()
)
try:
conn.execute(
f"ALTER TABLE {table_name} ADD COLUMN {col_name} {col_type_for_add}"
)
logger.info(
f"Added missing column '{col_name} {col_type_for_add}' to {table_description} table '{table_name}'."
)
except sqlite3.OperationalError as e:
logger.warning(
f"Could not add column '{col_name}' to {table_description} table '{table_name}': {e}"
)
except Exception as e:
logger.error(
f"Error ensuring schema for {table_description} table '{table_name}': {e}",
exc_info=True,
)
def _create_or_update_children_table(conn: sqlite3.Connection, table_name: str) -> None:
conn.execute(
f"""
conn.execute(
f"""
CREATE TABLE IF NOT EXISTS {table_name} (
id INTEGER PRIMARY KEY AUTOINCREMENT,
title TEXT NOT NULL,
@@ -173,62 +178,73 @@ def _create_or_update_children_table(conn: sqlite3.Connection, table_name: str)
metadata TEXT
)
"""
)
_ensure_table_schema(conn, table_name, CHILDREN_EXPECTED_COLUMNS, "children history")
)
_ensure_table_schema(
conn, table_name, CHILDREN_EXPECTED_COLUMNS, "children history"
)
def _update_children_tables_for_history(conn: sqlite3.Connection) -> None:
try:
try:
cur = conn.execute(
"SELECT DISTINCT children_table FROM download_history WHERE children_table IS NOT NULL AND TRIM(children_table) != ''"
)
for row in cur.fetchall():
table_name = row[0]
if not table_name:
continue
_create_or_update_children_table(conn, table_name)
except sqlite3.Error as e:
logger.warning(f"Failed to scan referenced children tables from main history: {e}")
try:
try:
cur = conn.execute(
"SELECT DISTINCT children_table FROM download_history WHERE children_table IS NOT NULL AND TRIM(children_table) != ''"
)
for row in cur.fetchall():
table_name = row[0]
if not table_name:
continue
_create_or_update_children_table(conn, table_name)
except sqlite3.Error as e:
logger.warning(
f"Failed to scan referenced children tables from main history: {e}"
)
try:
cur = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND (name LIKE 'album_%' OR name LIKE 'playlist_%') AND name != 'download_history'"
)
for row in cur.fetchall():
table_name = row[0]
_create_or_update_children_table(conn, table_name)
except sqlite3.Error as e:
logger.warning(f"Failed to scan legacy children tables in history DB: {e}")
logger.info("Children history tables migration ensured")
except Exception:
logger.error("Failed migrating children history tables", exc_info=True)
try:
cur = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND (name LIKE 'album_%' OR name LIKE 'playlist_%') AND name != 'download_history'"
)
for row in cur.fetchall():
table_name = row[0]
_create_or_update_children_table(conn, table_name)
except sqlite3.Error as e:
logger.warning(f"Failed to scan legacy children tables in history DB: {e}")
logger.info("Children history tables migration ensured")
except Exception:
logger.error("Failed migrating children history tables", exc_info=True)
def _ensure_creds_filesystem() -> None:
try:
BLOBS_DIR.mkdir(parents=True, exist_ok=True)
if not SEARCH_JSON.exists():
SEARCH_JSON.write_text('{ "client_id": "", "client_secret": "" }\n', encoding="utf-8")
logger.info(f"Created default global Spotify creds file at {SEARCH_JSON}")
except Exception:
logger.error("Failed to ensure credentials filesystem (blobs/search.json)", exc_info=True)
try:
BLOBS_DIR.mkdir(parents=True, exist_ok=True)
if not SEARCH_JSON.exists():
SEARCH_JSON.write_text(
'{ "client_id": "", "client_secret": "" }\n', encoding="utf-8"
)
logger.info(f"Created default global Spotify creds file at {SEARCH_JSON}")
except Exception:
logger.error(
"Failed to ensure credentials filesystem (blobs/search.json)", exc_info=True
)
def _apply_versioned_updates(conn: sqlite3.Connection, c_base, u_base, post_update=None) -> None:
if not c_base(conn):
u_base(conn)
if post_update:
post_update(conn)
def _apply_versioned_updates(
conn: sqlite3.Connection, c_base, u_base, post_update=None
) -> None:
if not c_base(conn):
u_base(conn)
if post_update:
post_update(conn)
# --- 3.1.2 upgrade helpers for Watch DBs ---
def _update_watch_playlists_db(conn: sqlite3.Connection) -> None:
try:
# Ensure core watched_playlists table exists and has expected schema
conn.execute(
"""
try:
# Ensure core watched_playlists table exists and has expected schema
conn.execute(
"""
CREATE TABLE IF NOT EXISTS watched_playlists (
spotify_id TEXT PRIMARY KEY,
name TEXT,
@@ -242,15 +258,22 @@ def _update_watch_playlists_db(conn: sqlite3.Connection) -> None:
is_active INTEGER DEFAULT 1
)
"""
)
_ensure_table_schema(conn, "watched_playlists", EXPECTED_WATCHED_PLAYLISTS_COLUMNS, "watched playlists")
)
_ensure_table_schema(
conn,
"watched_playlists",
EXPECTED_WATCHED_PLAYLISTS_COLUMNS,
"watched playlists",
)
# Upgrade all dynamic playlist_ tables
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'playlist_%'")
for row in cur.fetchall():
table_name = row[0]
conn.execute(
f"""
# Upgrade all dynamic playlist_ tables
cur = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'playlist_%'"
)
for row in cur.fetchall():
table_name = row[0]
conn.execute(
f"""
CREATE TABLE IF NOT EXISTS {table_name} (
spotify_track_id TEXT PRIMARY KEY,
title TEXT,
@@ -268,18 +291,25 @@ def _update_watch_playlists_db(conn: sqlite3.Connection) -> None:
final_path TEXT
)
"""
)
_ensure_table_schema(conn, table_name, EXPECTED_PLAYLIST_TRACKS_COLUMNS, f"playlist tracks ({table_name})")
logger.info("Upgraded watch playlists DB to 3.1.2 schema")
except Exception:
logger.error("Failed to upgrade watch playlists DB to 3.1.2 schema", exc_info=True)
)
_ensure_table_schema(
conn,
table_name,
EXPECTED_PLAYLIST_TRACKS_COLUMNS,
f"playlist tracks ({table_name})",
)
logger.info("Upgraded watch playlists DB to 3.1.2 schema")
except Exception:
logger.error(
"Failed to upgrade watch playlists DB to 3.1.2 schema", exc_info=True
)
def _update_watch_artists_db(conn: sqlite3.Connection) -> None:
try:
# Ensure core watched_artists table exists and has expected schema
conn.execute(
"""
try:
# Ensure core watched_artists table exists and has expected schema
conn.execute(
"""
CREATE TABLE IF NOT EXISTS watched_artists (
spotify_id TEXT PRIMARY KEY,
name TEXT,
@@ -293,15 +323,19 @@ def _update_watch_artists_db(conn: sqlite3.Connection) -> None:
image_url TEXT
)
"""
)
_ensure_table_schema(conn, "watched_artists", EXPECTED_WATCHED_ARTISTS_COLUMNS, "watched artists")
)
_ensure_table_schema(
conn, "watched_artists", EXPECTED_WATCHED_ARTISTS_COLUMNS, "watched artists"
)
# Upgrade all dynamic artist_ tables
cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'artist_%'")
for row in cur.fetchall():
table_name = row[0]
conn.execute(
f"""
# Upgrade all dynamic artist_ tables
cur = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'artist_%'"
)
for row in cur.fetchall():
table_name = row[0]
conn.execute(
f"""
CREATE TABLE IF NOT EXISTS {table_name} (
album_spotify_id TEXT PRIMARY KEY,
artist_spotify_id TEXT,
@@ -320,11 +354,18 @@ def _update_watch_artists_db(conn: sqlite3.Connection) -> None:
is_fully_downloaded_managed_by_app INTEGER DEFAULT 0
)
"""
)
_ensure_table_schema(conn, table_name, EXPECTED_ARTIST_ALBUMS_COLUMNS, f"artist albums ({table_name})")
logger.info("Upgraded watch artists DB to 3.1.2 schema")
except Exception:
logger.error("Failed to upgrade watch artists DB to 3.1.2 schema", exc_info=True)
)
_ensure_table_schema(
conn,
table_name,
EXPECTED_ARTIST_ALBUMS_COLUMNS,
f"artist albums ({table_name})",
)
logger.info("Upgraded watch artists DB to 3.1.2 schema")
except Exception:
logger.error(
"Failed to upgrade watch artists DB to 3.1.2 schema", exc_info=True
)
def run_migrations_if_needed():
@@ -343,6 +384,7 @@ def run_migrations_if_needed():
post_update=_update_children_tables_for_history,
)
_apply_versioned_updates(conn, m311.check_history, m311.update_history)
_apply_versioned_updates(conn, m312.check_history, m312.update_history)
conn.commit()
# Watch playlists DB
@@ -358,6 +400,11 @@ def run_migrations_if_needed():
m311.check_watch_playlists,
m311.update_watch_playlists,
)
_apply_versioned_updates(
conn,
m312.check_watch_playlists,
m312.update_watch_playlists,
)
_update_watch_playlists_db(conn)
conn.commit()
@@ -374,18 +421,28 @@ def run_migrations_if_needed():
_apply_versioned_updates(
conn, m311.check_watch_artists, m311.update_watch_artists
)
_apply_versioned_updates(
conn, m312.check_watch_artists, m312.update_watch_artists
)
_update_watch_artists_db(conn)
conn.commit()
# Accounts DB
with _safe_connect(ACCOUNTS_DB) as conn:
if conn:
_apply_versioned_updates(conn, m306.check_accounts, m306.update_accounts)
_apply_versioned_updates(conn, m311.check_accounts, m311.update_accounts)
_apply_versioned_updates(
conn, m306.check_accounts, m306.update_accounts
)
_apply_versioned_updates(
conn, m311.check_accounts, m311.update_accounts
)
_apply_versioned_updates(
conn, m312.check_accounts, m312.update_accounts
)
conn.commit()
except Exception as e:
logger.error("Error during migration: %s", e, exc_info=True)
else:
_ensure_creds_filesystem()
logger.info("Database migrations check completed")
logger.info("Database migrations check completed")

103
routes/migrations/v3_1_2.py Normal file
View File

@@ -0,0 +1,103 @@
import sqlite3
import logging
logger = logging.getLogger(__name__)
class MigrationV3_1_2:
"""
Migration for version 3.1.2.
Ensure history children tables (album_*/playlist_*) include service and quality columns.
"""
CHILDREN_EXTRA_COLUMNS: dict[str, str] = {
"service": "TEXT",
"quality_format": "TEXT",
"quality_bitrate": "TEXT",
}
def _table_columns(self, conn: sqlite3.Connection, table: str) -> set[str]:
try:
cur = conn.execute(f"PRAGMA table_info({table})")
return {row[1] for row in cur.fetchall()}
except sqlite3.OperationalError:
return set()
def _list_children_tables(self, conn: sqlite3.Connection) -> list[str]:
tables: set[str] = set()
try:
cur = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND (name LIKE 'album_%' OR name LIKE 'playlist_%') AND name != 'download_history'"
)
for row in cur.fetchall():
if row and row[0]:
tables.add(row[0])
except sqlite3.Error as e:
logger.warning(f"Failed to scan sqlite_master for children tables: {e}")
try:
cur = conn.execute(
"SELECT DISTINCT children_table FROM download_history WHERE children_table IS NOT NULL AND TRIM(children_table) != ''"
)
for row in cur.fetchall():
t = row[0]
if t:
tables.add(t)
except sqlite3.Error as e:
logger.warning(f"Failed to scan download_history for children tables: {e}")
return sorted(tables)
def check_history(self, conn: sqlite3.Connection) -> bool:
tables = self._list_children_tables(conn)
if not tables:
# Nothing to migrate
return True
# If any table is missing any of the extra columns, migration is needed
for t in tables:
cols = self._table_columns(conn, t)
if not set(self.CHILDREN_EXTRA_COLUMNS.keys()).issubset(cols):
return False
return True
def update_history(self, conn: sqlite3.Connection) -> None:
tables = self._list_children_tables(conn)
for t in tables:
existing = self._table_columns(conn, t)
for col_name, col_type in self.CHILDREN_EXTRA_COLUMNS.items():
if col_name in existing:
continue
try:
conn.execute(f"ALTER TABLE {t} ADD COLUMN {col_name} {col_type}")
logger.info(
f"Added column '{col_name} {col_type}' to history children table '{t}'."
)
except sqlite3.OperationalError as e:
logger.warning(
f"Could not add column '{col_name}' to history children table '{t}': {e}"
)
def check_watch_artists(self, conn: sqlite3.Connection) -> bool:
# No changes for watch artists in 3.1.2
return True
def update_watch_artists(self, conn: sqlite3.Connection) -> None:
# No-op
pass
def check_watch_playlists(self, conn: sqlite3.Connection) -> bool:
# No changes for watch playlists in 3.1.2
return True
def update_watch_playlists(self, conn: sqlite3.Connection) -> None:
# No-op
pass
def check_accounts(self, conn: sqlite3.Connection) -> bool:
# No changes for accounts in 3.1.2
return True
def update_accounts(self, conn: sqlite3.Connection) -> None:
# No-op
pass

View File

@@ -72,6 +72,28 @@ def validate_config(config_data: dict, watch_config: dict = None) -> tuple[bool,
if watch_config is None:
watch_config = get_watch_config_http()
# Ensure realTimeMultiplier is a valid integer in range 0..10 if provided
if "realTimeMultiplier" in config_data or "real_time_multiplier" in config_data:
key = (
"realTimeMultiplier"
if "realTimeMultiplier" in config_data
else "real_time_multiplier"
)
val = config_data.get(key)
if isinstance(val, bool):
return False, "realTimeMultiplier must be an integer between 0 and 10."
try:
ival = int(val)
except Exception:
return False, "realTimeMultiplier must be an integer between 0 and 10."
if ival < 0 or ival > 10:
return False, "realTimeMultiplier must be between 0 and 10."
# Normalize to camelCase in the working dict so save_config writes it
if key == "real_time_multiplier":
config_data["realTimeMultiplier"] = ival
else:
config_data["realTimeMultiplier"] = ival
# Check if fallback is enabled but missing required accounts
if config_data.get("fallback", False):
has_spotify = has_credentials("spotify")
@@ -169,6 +191,7 @@ def _migrate_legacy_keys_inplace(cfg: dict) -> bool:
"artist_separator": "artistSeparator",
"recursive_quality": "recursiveQuality",
"spotify_metadata": "spotifyMetadata",
"real_time_multiplier": "realTimeMultiplier",
}
modified = False
for legacy, camel in legacy_map.items():

View File

@@ -31,6 +31,8 @@ def download_album(
recursive_quality=True,
spotify_metadata=True,
_is_celery_task_execution=False, # Added to skip duplicate check from Celery task
real_time_multiplier=None,
pad_number_width=None,
):
if not _is_celery_task_execution:
existing_task = get_existing_task_id(
@@ -116,6 +118,7 @@ def download_album(
bitrate=bitrate,
artist_separator=artist_separator,
spotify_metadata=spotify_metadata,
pad_number_width=pad_number_width,
)
print(
f"DEBUG: album.py - Album download via Deezer (account: {fallback}) successful for Spotify URL."
@@ -173,6 +176,8 @@ def download_album(
convert_to=convert_to,
bitrate=bitrate,
artist_separator=artist_separator,
real_time_multiplier=real_time_multiplier,
pad_number_width=pad_number_width,
)
print(
f"DEBUG: album.py - Spotify direct download (account: {main} for blob) successful."
@@ -228,6 +233,8 @@ def download_album(
convert_to=convert_to,
bitrate=bitrate,
artist_separator=artist_separator,
real_time_multiplier=real_time_multiplier,
pad_number_width=pad_number_width,
)
print(
f"DEBUG: album.py - Direct Spotify download (account: {main} for blob) successful."
@@ -268,6 +275,7 @@ def download_album(
convert_to=convert_to,
bitrate=bitrate,
artist_separator=artist_separator,
pad_number_width=pad_number_width,
)
print(
f"DEBUG: album.py - Direct Deezer download (account: {main}) successful."

View File

@@ -87,7 +87,7 @@ def get_artist_discography(
def download_artist_albums(
url, album_type="album,single,compilation", request_args=None
url, album_type="album,single,compilation", request_args=None, username=None
):
"""
Download albums by an artist, filtered by album types.
@@ -97,6 +97,7 @@ def download_artist_albums(
album_type (str): Comma-separated list of album types to download
(album, single, compilation, appears_on)
request_args (dict): Original request arguments for tracking
username (str | None): Username initiating the request, used for per-user separation
Returns:
tuple: (list of successfully queued albums, list of duplicate albums)
@@ -160,11 +161,15 @@ def download_artist_albums(
album_name = album.get("name", "Unknown Album")
album_artists = album.get("artists", [])
album_artist = (
album_artists[0].get("name", "Unknown Artist") if album_artists else "Unknown Artist"
album_artists[0].get("name", "Unknown Artist")
if album_artists
else "Unknown Artist"
)
if not album_url:
logger.warning(f"Skipping album '{album_name}' because it has no Spotify URL.")
logger.warning(
f"Skipping album '{album_name}' because it has no Spotify URL."
)
continue
task_data = {
@@ -174,6 +179,8 @@ def download_artist_albums(
"artist": album_artist,
"orig_request": request_args,
}
if username:
task_data["username"] = username
try:
task_id = download_queue_manager.add_task(task_data)
@@ -199,7 +206,9 @@ def download_artist_albums(
}
)
except Exception as e:
logger.error(f"Failed to queue album {album_name} for an unknown reason: {e}")
logger.error(
f"Failed to queue album {album_name} for an unknown reason: {e}"
)
logger.info(
f"Artist album processing: {len(successfully_queued_albums)} queued, {len(duplicate_albums)} duplicates found."

View File

@@ -49,6 +49,8 @@ DEFAULT_MAIN_CONFIG = {
"spotifyMetadata": True,
"separateTracksByUser": False,
"watch": {},
"realTimeMultiplier": 0,
"padNumberWidth": 3,
}
@@ -63,6 +65,8 @@ def _migrate_legacy_keys(cfg: dict) -> tuple[dict, bool]:
"artist_separator": "artistSeparator",
"recursive_quality": "recursiveQuality",
"spotify_metadata": "spotifyMetadata",
"real_time_multiplier": "realTimeMultiplier",
"pad_number_width": "padNumberWidth",
}
for legacy, camel in legacy_map.items():
if legacy in out and camel not in out:

View File

@@ -72,6 +72,9 @@ def get_config_params():
),
"separateTracksByUser": config.get("separateTracksByUser", False),
"watch": config.get("watch", {}),
"realTimeMultiplier": config.get(
"realTimeMultiplier", config.get("real_time_multiplier", 0)
),
}
except Exception as e:
logger.error(f"Error reading config for parameters: {e}")
@@ -96,6 +99,7 @@ def get_config_params():
"recursiveQuality": False,
"separateTracksByUser": False,
"watch": {},
"realTimeMultiplier": 0,
}
@@ -363,7 +367,7 @@ class CeleryDownloadQueueManager:
original_request = task.get(
"orig_request", task.get("original_request", {})
)
# Get username for user-specific paths
username = task.get("username", "")
@@ -389,9 +393,11 @@ class CeleryDownloadQueueManager:
original_request.get("real_time"), config_params["realTime"]
),
"custom_dir_format": self._get_user_specific_dir_format(
original_request.get("custom_dir_format", config_params["customDirFormat"]),
original_request.get(
"custom_dir_format", config_params["customDirFormat"]
),
config_params.get("separateTracksByUser", False),
username
username,
),
"custom_track_format": original_request.get(
"custom_track_format", config_params["customTrackFormat"]
@@ -419,6 +425,9 @@ class CeleryDownloadQueueManager:
"retry_count": 0,
"original_request": original_request,
"created_at": time.time(),
"real_time_multiplier": original_request.get(
"real_time_multiplier", config_params.get("realTimeMultiplier", 0)
),
}
# If from_watch_job is True, ensure track_details_for_db is passed through
@@ -497,12 +506,12 @@ class CeleryDownloadQueueManager:
def _get_user_specific_dir_format(self, base_format, separate_by_user, username):
"""
Modify the directory format to include username if separateTracksByUser is enabled
Args:
base_format (str): The base directory format from config
separate_by_user (bool): Whether to separate tracks by user
username (str): The username to include in path
Returns:
str: The modified directory format
"""

View File

@@ -550,6 +550,9 @@ def retry_task(task_id):
task_info["pad_tracks"] = task_info.get(
"pad_tracks", config_params.get("tracknum_padding", True)
)
task_info["pad_number_width"] = task_info.get(
"pad_number_width", config_params.get("padNumberWidth", 3)
)
# Store the updated task info
store_task_info(new_task_id, task_info)
@@ -1626,6 +1629,12 @@ def download_track(self, **task_data):
spotify_metadata = task_data.get(
"spotify_metadata", config_params.get("spotifyMetadata", True)
)
real_time_multiplier = task_data.get(
"real_time_multiplier", config_params.get("realTimeMultiplier", 0)
)
pad_number_width = task_data.get(
"pad_number_width", config_params.get("padNumberWidth", 3)
)
# Execute the download - service is now determined from URL
download_track_func(
@@ -1646,6 +1655,8 @@ def download_track(self, **task_data):
artist_separator=artist_separator,
spotify_metadata=spotify_metadata,
_is_celery_task_execution=True, # Skip duplicate check inside Celery task (consistency)
real_time_multiplier=real_time_multiplier,
pad_number_width=pad_number_width,
)
return {"status": "success", "message": "Track download completed"}
@@ -1725,6 +1736,12 @@ def download_album(self, **task_data):
spotify_metadata = task_data.get(
"spotify_metadata", config_params.get("spotifyMetadata", True)
)
real_time_multiplier = task_data.get(
"real_time_multiplier", config_params.get("realTimeMultiplier", 0)
)
pad_number_width = task_data.get(
"pad_number_width", config_params.get("padNumberWidth", 3)
)
# Execute the download - service is now determined from URL
download_album_func(
@@ -1745,6 +1762,8 @@ def download_album(self, **task_data):
artist_separator=artist_separator,
spotify_metadata=spotify_metadata,
_is_celery_task_execution=True, # Skip duplicate check inside Celery task
real_time_multiplier=real_time_multiplier,
pad_number_width=pad_number_width,
)
return {"status": "success", "message": "Album download completed"}
@@ -1833,6 +1852,12 @@ def download_playlist(self, **task_data):
"retry_delay_increase", config_params.get("retryDelayIncrease", 5)
)
max_retries = task_data.get("max_retries", config_params.get("maxRetries", 3))
real_time_multiplier = task_data.get(
"real_time_multiplier", config_params.get("realTimeMultiplier", 0)
)
pad_number_width = task_data.get(
"pad_number_width", config_params.get("padNumberWidth", 3)
)
# Execute the download - service is now determined from URL
download_playlist_func(
@@ -1856,6 +1881,8 @@ def download_playlist(self, **task_data):
artist_separator=artist_separator,
spotify_metadata=spotify_metadata,
_is_celery_task_execution=True, # Skip duplicate check inside Celery task
real_time_multiplier=real_time_multiplier,
pad_number_width=pad_number_width,
)
return {"status": "success", "message": "Playlist download completed"}

View File

@@ -239,7 +239,7 @@ def get_spotify_info(
Args:
spotify_id: The Spotify ID of the entity
spotify_type: The type of entity (track, album, playlist, artist, artist_discography, episode)
spotify_type: The type of entity (track, album, playlist, artist, artist_discography, episode, album_tracks)
limit (int, optional): The maximum number of items to return. Used for pagination.
offset (int, optional): The index of the first item to return. Used for pagination.
@@ -255,6 +255,12 @@ def get_spotify_info(
elif spotify_type == "album":
return client.album(spotify_id)
elif spotify_type == "album_tracks":
# Fetch album's tracks with pagination support
return client.album_tracks(
spotify_id, limit=limit or 20, offset=offset or 0
)
elif spotify_type == "playlist":
# Use optimized playlist fetching
return get_playlist_full(spotify_id)
@@ -269,7 +275,10 @@ def get_spotify_info(
elif spotify_type == "artist_discography":
# Get artist's albums with pagination
albums = client.artist_albums(
spotify_id, limit=limit or 20, offset=offset or 0
spotify_id,
limit=limit or 20,
offset=offset or 0,
include_groups="single,album,appears_on",
)
return albums

File diff suppressed because it is too large Load Diff

View File

@@ -28,6 +28,8 @@ def download_playlist(
recursive_quality=True,
spotify_metadata=True,
_is_celery_task_execution=False, # Added to skip duplicate check from Celery task
real_time_multiplier=None,
pad_number_width=None,
):
if not _is_celery_task_execution:
existing_task = get_existing_task_id(
@@ -113,6 +115,7 @@ def download_playlist(
bitrate=bitrate,
artist_separator=artist_separator,
spotify_metadata=spotify_metadata,
pad_number_width=pad_number_width,
)
print(
f"DEBUG: playlist.py - Playlist download via Deezer (account: {fallback}) successful for Spotify URL."
@@ -175,6 +178,8 @@ def download_playlist(
convert_to=convert_to,
bitrate=bitrate,
artist_separator=artist_separator,
real_time_multiplier=real_time_multiplier,
pad_number_width=pad_number_width,
)
print(
f"DEBUG: playlist.py - Spotify direct download (account: {main} for blob) successful."
@@ -236,6 +241,8 @@ def download_playlist(
convert_to=convert_to,
bitrate=bitrate,
artist_separator=artist_separator,
real_time_multiplier=real_time_multiplier,
pad_number_width=pad_number_width,
)
print(
f"DEBUG: playlist.py - Direct Spotify download (account: {main} for blob) successful."
@@ -276,6 +283,7 @@ def download_playlist(
convert_to=convert_to,
bitrate=bitrate,
artist_separator=artist_separator,
pad_number_width=pad_number_width,
)
print(
f"DEBUG: playlist.py - Direct Deezer download (account: {main}) successful."

View File

@@ -29,6 +29,8 @@ def download_track(
recursive_quality=False,
spotify_metadata=True,
_is_celery_task_execution=False, # Added for consistency, not currently used for duplicate check
real_time_multiplier=None,
pad_number_width=None,
):
try:
# Detect URL source (Spotify or Deezer) from URL
@@ -107,6 +109,7 @@ def download_track(
bitrate=bitrate,
artist_separator=artist_separator,
spotify_metadata=spotify_metadata,
pad_number_width=pad_number_width,
)
print(
f"DEBUG: track.py - Track download via Deezer (account: {fallback}) successful for Spotify URL."
@@ -166,6 +169,8 @@ def download_track(
convert_to=convert_to,
bitrate=bitrate,
artist_separator=artist_separator,
real_time_multiplier=real_time_multiplier,
pad_number_width=pad_number_width,
)
print(
f"DEBUG: track.py - Spotify direct download (account: {main} for blob) successful."
@@ -222,6 +227,8 @@ def download_track(
convert_to=convert_to,
bitrate=bitrate,
artist_separator=artist_separator,
real_time_multiplier=real_time_multiplier,
pad_number_width=pad_number_width,
)
print(
f"DEBUG: track.py - Direct Spotify download (account: {main} for blob) successful."
@@ -261,6 +268,7 @@ def download_track(
convert_to=convert_to,
bitrate=bitrate,
artist_separator=artist_separator,
pad_number_width=pad_number_width,
)
print(
f"DEBUG: track.py - Direct Deezer download (account: {main}) successful."

View File

@@ -30,6 +30,9 @@ from routes.utils.get_info import (
) # To fetch playlist, track, artist, and album details
from routes.utils.celery_queue_manager import download_queue_manager
# Added import to fetch base formatting config
from routes.utils.celery_queue_manager import get_config_params
logger = logging.getLogger(__name__)
MAIN_CONFIG_FILE_PATH = Path("./data/config/main.json")
WATCH_OLD_FILE_PATH = Path("./data/config/watch.json")
@@ -153,6 +156,38 @@ def construct_spotify_url(item_id, item_type="track"):
return f"https://open.spotify.com/{item_type}/{item_id}"
# Helper to replace playlist placeholders in custom formats per-track
def _apply_playlist_placeholders(
base_dir_fmt: str,
base_track_fmt: str,
playlist_name: str,
playlist_position_one_based: int,
total_tracks_in_playlist: int,
pad_tracks: bool,
) -> tuple[str, str]:
try:
width = max(2, len(str(total_tracks_in_playlist))) if pad_tracks else 0
if (
pad_tracks
and playlist_position_one_based is not None
and playlist_position_one_based > 0
):
playlist_num_str = str(playlist_position_one_based).zfill(width)
else:
playlist_num_str = (
str(playlist_position_one_based) if playlist_position_one_based else ""
)
dir_fmt = base_dir_fmt.replace("%playlist%", playlist_name)
track_fmt = base_track_fmt.replace("%playlist%", playlist_name).replace(
"%playlistnum%", playlist_num_str
)
return dir_fmt, track_fmt
except Exception:
# On any error, return originals
return base_dir_fmt, base_track_fmt
def has_playlist_changed(playlist_spotify_id: str, current_snapshot_id: str) -> bool:
"""
Check if a playlist has changed by comparing snapshot_id.
@@ -320,6 +355,11 @@ def check_watched_playlists(specific_playlist_id: str = None):
)
config = get_watch_config()
use_snapshot_checking = config.get("useSnapshotIdChecking", True)
# Fetch base formatting configuration once for this run
formatting_cfg = get_config_params()
base_dir_fmt = formatting_cfg.get("customDirFormat", "%ar_album%/%album%")
base_track_fmt = formatting_cfg.get("customTrackFormat", "%tracknum%. %music%")
pad_tracks = formatting_cfg.get("tracknumPadding", True)
if specific_playlist_id:
playlist_obj = get_watched_playlist(specific_playlist_id)
@@ -483,12 +523,17 @@ def check_watched_playlists(specific_playlist_id: str = None):
current_api_track_ids = set()
api_track_id_to_item_map = {}
for item in all_api_track_items: # Use all_api_track_items
api_track_position_map: dict[str, int] = {}
# Build maps for quick lookup and position within the playlist (1-based)
for idx, item in enumerate(
all_api_track_items, start=1
): # Use overall playlist index for numbering
track = item.get("track")
if track and track.get("id") and not track.get("is_local"):
track_id = track["id"]
current_api_track_ids.add(track_id)
api_track_id_to_item_map[track_id] = item
api_track_position_map[track_id] = idx
db_track_ids = get_playlist_track_ids_from_db(playlist_spotify_id)
@@ -507,6 +552,19 @@ def check_watched_playlists(specific_playlist_id: str = None):
continue
track_to_queue = api_item["track"]
# Compute per-track formatting overrides for playlist placeholders
position_in_playlist = api_track_position_map.get(track_id)
custom_dir_format, custom_track_format = (
_apply_playlist_placeholders(
base_dir_fmt,
base_track_fmt,
playlist_name,
position_in_playlist if position_in_playlist else 0,
api_total_tracks,
pad_tracks,
)
)
task_payload = {
"download_type": "track",
"url": construct_spotify_url(track_id, "track"),
@@ -525,7 +583,9 @@ def check_watched_playlists(specific_playlist_id: str = None):
"track_spotify_id": track_id,
"track_item_for_db": api_item, # Pass full API item for DB update on completion
},
# "track_details_for_db" was old name, using track_item_for_db consistent with celery_tasks
# Override formats so %playlist% and %playlistnum% resolve now per track
"custom_dir_format": custom_dir_format,
"custom_track_format": custom_track_format,
}
try:
task_id_or_none = download_queue_manager.add_task(

View File

@@ -1,7 +1,7 @@
{
"name": "spotizerr-ui",
"private": true,
"version": "3.1.2",
"version": "3.2.0",
"type": "module",
"scripts": {
"dev": "vite",

View File

@@ -23,6 +23,7 @@ interface DownloadSettings {
spotifyQuality: "NORMAL" | "HIGH" | "VERY_HIGH";
recursiveQuality: boolean; // frontend field (sent as camelCase to backend)
separateTracksByUser: boolean;
realTimeMultiplier: number;
}
interface WatchConfig {
@@ -71,6 +72,7 @@ const fetchCredentials = async (service: "spotify" | "deezer"): Promise<Credenti
export function DownloadsTab({ config, isLoading }: DownloadsTabProps) {
const queryClient = useQueryClient();
const [validationError, setValidationError] = useState<string>("");
const [saveStatus, setSaveStatus] = useState<"idle" | "success" | "error">("idle");
// Fetch watch config
const { data: watchConfig } = useQuery({
@@ -96,10 +98,14 @@ export function DownloadsTab({ config, isLoading }: DownloadsTabProps) {
mutationFn: saveDownloadConfig,
onSuccess: () => {
toast.success("Download settings saved successfully!");
setSaveStatus("success");
setTimeout(() => setSaveStatus("idle"), 3000);
queryClient.invalidateQueries({ queryKey: ["config"] });
},
onError: (error) => {
toast.error(`Failed to save settings: ${error.message}`);
setSaveStatus("error");
setTimeout(() => setSaveStatus("idle"), 3000);
},
});
@@ -150,7 +156,7 @@ export function DownloadsTab({ config, isLoading }: DownloadsTabProps) {
const missingServices: string[] = [];
if (!spotifyCredentials?.length) missingServices.push("Spotify");
if (!deezerCredentials?.length) missingServices.push("Deezer");
const error = `Download Fallback requires accounts to be configured for both services. Missing: ${missingServices.join(", ")}. Configure accounts in the Accounts tab.`;
const error = `Download Fallback requires accounts to be configured for both Spotify and Deezer. Missing: ${missingServices.join(", ")}. Configure accounts in the Accounts tab.`;
setValidationError(error);
toast.error("Validation failed: " + error);
return;
@@ -162,6 +168,7 @@ export function DownloadsTab({ config, isLoading }: DownloadsTabProps) {
maxRetries: Number(data.maxRetries),
retryDelaySeconds: Number(data.retryDelaySeconds),
retryDelayIncrease: Number(data.retryDelayIncrease),
realTimeMultiplier: Number(data.realTimeMultiplier ?? 0),
});
};
@@ -171,6 +178,24 @@ export function DownloadsTab({ config, isLoading }: DownloadsTabProps) {
return (
<form onSubmit={handleSubmit(onSubmit)} className="space-y-8">
<div className="flex items-center justify-end mb-4">
<div className="flex items-center gap-3">
{saveStatus === "success" && (
<span className="text-success text-sm">Saved</span>
)}
{saveStatus === "error" && (
<span className="text-error text-sm">Save failed</span>
)}
<button
type="submit"
disabled={mutation.isPending || !!validationError}
className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50"
>
{mutation.isPending ? "Saving..." : "Save Download Settings"}
</button>
</div>
</div>
{/* Download Settings */}
<div className="space-y-4">
<h3 className="text-xl font-semibold text-content-primary dark:text-content-primary-dark">Download Behavior</h3>
@@ -188,6 +213,26 @@ export function DownloadsTab({ config, isLoading }: DownloadsTabProps) {
<label htmlFor="realTimeToggle" className="text-content-primary dark:text-content-primary-dark">Real-time downloading</label>
<input id="realTimeToggle" type="checkbox" {...register("realTime")} className="h-6 w-6 rounded" />
</div>
{/* Real-time Multiplier (Spotify only) */}
<div className="flex flex-col gap-2">
<div className="flex items-center justify-between">
<label htmlFor="realTimeMultiplier" className="text-content-primary dark:text-content-primary-dark">Real-time speed multiplier (Spotify)</label>
<span className="text-xs text-content-secondary dark:text-content-secondary-dark">010</span>
</div>
<input
id="realTimeMultiplier"
type="number"
min={0}
max={10}
step={1}
{...register("realTimeMultiplier")}
disabled={!realTime}
className="block w-full p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline-none focus:ring-2 focus:ring-input-focus disabled:opacity-50"
/>
<p className="text-xs text-content-muted dark:text-content-muted-dark">
Controls how fast Spotify real-time downloads go. Only affects Spotify downloads; ignored for Deezer.
</p>
</div>
<div className="flex items-center justify-between">
<label htmlFor="fallbackToggle" className="text-content-primary dark:text-content-primary-dark">Download Fallback</label>
<input id="fallbackToggle" type="checkbox" {...register("fallback")} className="h-6 w-6 rounded" />
@@ -338,14 +383,6 @@ export function DownloadsTab({ config, isLoading }: DownloadsTabProps) {
/>
</div>
</div>
<button
type="submit"
disabled={mutation.isPending || !!validationError}
className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50"
>
{mutation.isPending ? "Saving..." : "Save Download Settings"}
</button>
</form>
);
}

View File

@@ -1,4 +1,4 @@
import { useRef } from "react";
import { useRef, useState } from "react";
import { useForm, type SubmitHandler } from "react-hook-form";
import { authApiClient } from "../../lib/api-client";
import { toast } from "sonner";
@@ -16,6 +16,7 @@ interface FormattingSettings {
compilation: string;
artistSeparator: string;
spotifyMetadata: boolean;
padNumberWidth?: number | "auto";
}
interface FormattingTabProps {
@@ -79,20 +80,28 @@ export function FormattingTab({ config, isLoading }: FormattingTabProps) {
const queryClient = useQueryClient();
const dirInputRef = useRef<HTMLInputElement | null>(null);
const trackInputRef = useRef<HTMLInputElement | null>(null);
const [saveStatus, setSaveStatus] = useState<"idle" | "success" | "error">("idle");
const mutation = useMutation({
mutationFn: saveFormattingConfig,
onSuccess: () => {
toast.success("Formatting settings saved!");
setSaveStatus("success");
setTimeout(() => setSaveStatus("idle"), 3000);
queryClient.invalidateQueries({ queryKey: ["config"] });
},
onError: (error) => {
toast.error(`Failed to save settings: ${error.message}`);
setSaveStatus("error");
setTimeout(() => setSaveStatus("idle"), 3000);
},
});
const { register, handleSubmit, setValue } = useForm<FormattingSettings>({
values: config,
values: {
...config,
padNumberWidth: config.padNumberWidth ?? 3,
},
});
// Correctly register the refs for react-hook-form while also holding a local ref.
@@ -120,6 +129,24 @@ export function FormattingTab({ config, isLoading }: FormattingTabProps) {
return (
<form onSubmit={handleSubmit(onSubmit)} className="space-y-8">
<div className="flex items-center justify-end mb-4">
<div className="flex items-center gap-3">
{saveStatus === "success" && (
<span className="text-success text-sm">Saved</span>
)}
{saveStatus === "error" && (
<span className="text-error text-sm">Save failed</span>
)}
<button
type="submit"
disabled={mutation.isPending}
className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50"
>
{mutation.isPending ? "Saving..." : "Save Formatting Settings"}
</button>
</div>
</div>
<div className="space-y-4">
<h3 className="text-xl font-semibold text-content-primary dark:text-content-primary-dark">File Naming</h3>
<div className="flex flex-col gap-2">
@@ -165,6 +192,27 @@ export function FormattingTab({ config, isLoading }: FormattingTabProps) {
className="h-6 w-6 rounded"
/>
</div>
<div className="flex items-center justify-between gap-4">
<label htmlFor="padNumberWidth" className="text-content-primary dark:text-content-primary-dark">Track Number Padding Width</label>
<input
id="padNumberWidth"
type="text"
placeholder="3 or auto"
{...register("padNumberWidth", {
setValueAs: (v) => {
if (typeof v !== "string") return v;
const trimmed = v.trim().toLowerCase();
if (trimmed === "auto") return "auto" as const;
const parsed = parseInt(trimmed, 10);
return Number.isNaN(parsed) ? 3 : parsed;
},
})}
className="block w-40 p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline-none focus:ring-2 focus:ring-input-focus text-sm"
/>
</div>
<p className="text-xs text-content-muted dark:text-content-muted-dark">
"01. Track" if set to 2, "001. Track" if set to 3...
</p>
<div className="flex items-center justify-between">
<label htmlFor="artistSeparator" className="text-content-primary dark:text-content-primary-dark">Artist Separator</label>
<input
@@ -185,14 +233,6 @@ export function FormattingTab({ config, isLoading }: FormattingTabProps) {
<input id="spotifyMetadataToggle" type="checkbox" {...register("spotifyMetadata")} className="h-6 w-6 rounded" />
</div>
</div>
<button
type="submit"
disabled={mutation.isPending}
className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50"
>
{mutation.isPending ? "Saving..." : "Save Formatting Settings"}
</button>
</form>
);
}

View File

@@ -3,7 +3,7 @@ import { authApiClient } from "../../lib/api-client";
import { toast } from "sonner";
import { useQuery, useMutation, useQueryClient } from "@tanstack/react-query";
import { useSettings } from "../../contexts/settings-context";
import { useEffect } from "react";
import { useEffect, useState } from "react";
// --- Type Definitions ---
interface Credential {
@@ -56,13 +56,21 @@ export function GeneralTab({ config, isLoading: isConfigLoading }: GeneralTabPro
}
}, [config, reset]);
const [saveStatus, setSaveStatus] = useState<"idle" | "success" | "error">("idle");
const mutation = useMutation({
mutationFn: saveGeneralConfig,
onSuccess: () => {
toast.success("General settings saved!");
setSaveStatus("success");
setTimeout(() => setSaveStatus("idle"), 3000);
queryClient.invalidateQueries({ queryKey: ["config"] });
},
onError: (e: Error) => toast.error(`Failed to save: ${e.message}`),
onError: (e: Error) => {
toast.error(`Failed to save: ${e.message}`);
setSaveStatus("error");
setTimeout(() => setSaveStatus("idle"), 3000);
},
});
const onSubmit: SubmitHandler<GeneralSettings> = (data) => {
@@ -74,6 +82,24 @@ export function GeneralTab({ config, isLoading: isConfigLoading }: GeneralTabPro
return (
<form onSubmit={handleSubmit(onSubmit)} className="space-y-8">
<div className="flex items-center justify-end mb-4">
<div className="flex items-center gap-3">
{saveStatus === "success" && (
<span className="text-success text-sm">Saved</span>
)}
{saveStatus === "error" && (
<span className="text-error text-sm">Save failed</span>
)}
<button
type="submit"
disabled={mutation.isPending}
className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50"
>
{mutation.isPending ? "Saving..." : "Save General Settings"}
</button>
</div>
</div>
<div className="space-y-4">
<h3 className="text-xl font-semibold text-content-primary dark:text-content-primary-dark">Service Defaults</h3>
<div className="flex flex-col gap-2">
@@ -140,14 +166,6 @@ export function GeneralTab({ config, isLoading: isConfigLoading }: GeneralTabPro
The explicit content filter is controlled by an environment variable and cannot be changed here.
</p>
</div>
<button
type="submit"
disabled={mutation.isPending}
className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50"
>
{mutation.isPending ? "Saving..." : "Save General Settings"}
</button>
</form>
);
}

View File

@@ -1,4 +1,4 @@
import { useEffect } from "react";
import { useEffect, useState } from "react";
import { useForm, Controller } from "react-hook-form";
import { authApiClient } from "../../lib/api-client";
import { toast } from "sonner";
@@ -46,14 +46,21 @@ function SpotifyApiForm() {
const queryClient = useQueryClient();
const { data, isLoading } = useQuery({ queryKey: ["spotifyApiConfig"], queryFn: fetchSpotifyApiConfig });
const { register, handleSubmit, reset } = useForm<SpotifyApiSettings>();
const [saveStatus, setSaveStatus] = useState<"idle" | "success" | "error">("idle");
const mutation = useMutation({
mutationFn: saveSpotifyApiConfig,
onSuccess: () => {
toast.success("Spotify API settings saved!");
setSaveStatus("success");
setTimeout(() => setSaveStatus("idle"), 3000);
queryClient.invalidateQueries({ queryKey: ["spotifyApiConfig"] });
},
onError: (e) => toast.error(`Failed to save: ${e.message}`),
onError: (e) => {
toast.error(`Failed to save: ${e.message}`);
setSaveStatus("error");
setTimeout(() => setSaveStatus("idle"), 3000);
},
});
useEffect(() => {
@@ -66,6 +73,24 @@ function SpotifyApiForm() {
return (
<form onSubmit={handleSubmit(onSubmit)} className="space-y-4">
<div className="flex items-center justify-end mb-2">
<div className="flex items-center gap-3">
{saveStatus === "success" && (
<span className="text-success text-sm">Saved</span>
)}
{saveStatus === "error" && (
<span className="text-error text-sm">Save failed</span>
)}
<button
type="submit"
disabled={mutation.isPending}
className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50"
>
{mutation.isPending ? "Saving..." : "Save Spotify API"}
</button>
</div>
</div>
<div className="flex flex-col gap-2">
<label htmlFor="client_id" className="text-content-primary dark:text-content-primary-dark">Client ID</label>
<input
@@ -86,13 +111,6 @@ function SpotifyApiForm() {
placeholder="Optional"
/>
</div>
<button
type="submit"
disabled={mutation.isPending}
className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50"
>
{mutation.isPending ? "Saving..." : "Save Spotify API"}
</button>
</form>
);
}
@@ -102,14 +120,21 @@ function WebhookForm() {
const { data, isLoading } = useQuery({ queryKey: ["webhookConfig"], queryFn: fetchWebhookConfig });
const { register, handleSubmit, control, reset, watch } = useForm<WebhookSettings>();
const currentUrl = watch("url");
const [saveStatus, setSaveStatus] = useState<"idle" | "success" | "error">("idle");
const mutation = useMutation({
mutationFn: saveWebhookConfig,
onSuccess: () => {
// No toast needed since the function shows one
setSaveStatus("success");
setTimeout(() => setSaveStatus("idle"), 3000);
queryClient.invalidateQueries({ queryKey: ["webhookConfig"] });
},
onError: (e) => toast.error(`Failed to save: ${e.message}`),
onError: (e) => {
toast.error(`Failed to save: ${e.message}`);
setSaveStatus("error");
setTimeout(() => setSaveStatus("idle"), 3000);
},
});
const testMutation = useMutation({
@@ -130,6 +155,24 @@ function WebhookForm() {
return (
<form onSubmit={handleSubmit(onSubmit)} className="space-y-6">
<div className="flex items-center justify-end mb-2">
<div className="flex items-center gap-3">
{saveStatus === "success" && (
<span className="text-success text-sm">Saved</span>
)}
{saveStatus === "error" && (
<span className="text-error text-sm">Save failed</span>
)}
<button
type="submit"
disabled={mutation.isPending}
className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50"
>
{mutation.isPending ? "Saving..." : "Save Webhook"}
</button>
</div>
</div>
<div className="flex flex-col gap-2">
<label htmlFor="webhookUrl" className="text-content-primary dark:text-content-primary-dark">Webhook URL</label>
<input
@@ -168,13 +211,6 @@ function WebhookForm() {
</div>
</div>
<div className="flex gap-2">
<button
type="submit"
disabled={mutation.isPending}
className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50"
>
{mutation.isPending ? "Saving..." : "Save Webhook"}
</button>
<button
type="button"
onClick={() => testMutation.mutate(currentUrl)}

View File

@@ -57,6 +57,7 @@ const saveWatchConfig = async (data: Partial<WatchSettings>) => {
export function WatchTab() {
const queryClient = useQueryClient();
const [validationError, setValidationError] = useState<string>("");
const [saveStatus, setSaveStatus] = useState<"idle" | "success" | "error">("idle");
const { data: config, isLoading } = useQuery({
queryKey: ["watchConfig"],
@@ -87,10 +88,14 @@ export function WatchTab() {
mutationFn: saveWatchConfig,
onSuccess: () => {
toast.success("Watch settings saved successfully!");
setSaveStatus("success");
setTimeout(() => setSaveStatus("idle"), 3000);
queryClient.invalidateQueries({ queryKey: ["watchConfig"] });
},
onError: (error) => {
toast.error(`Failed to save settings: ${error.message}`);
setSaveStatus("error");
setTimeout(() => setSaveStatus("idle"), 3000);
},
});
@@ -155,6 +160,24 @@ export function WatchTab() {
return (
<form onSubmit={handleSubmit(onSubmit)} className="space-y-8">
<div className="flex items-center justify-end mb-4">
<div className="flex items-center gap-3">
{saveStatus === "success" && (
<span className="text-success text-sm">Saved</span>
)}
{saveStatus === "error" && (
<span className="text-error text-sm">Save failed</span>
)}
<button
type="submit"
disabled={mutation.isPending || !!validationError}
className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50"
>
{mutation.isPending ? "Saving..." : "Save Watch Settings"}
</button>
</div>
</div>
<div className="space-y-4">
<h3 className="text-xl font-semibold text-content-primary dark:text-content-primary-dark">Watchlist Behavior</h3>
<div className="flex items-center justify-between">
@@ -234,14 +257,6 @@ export function WatchTab() {
))}
</div>
</div>
<button
type="submit"
disabled={mutation.isPending || !!validationError}
className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50"
>
{mutation.isPending ? "Saving..." : "Save Watch Settings"}
</button>
</form>
);
}

View File

@@ -62,6 +62,7 @@ export type FlatAppSettings = {
compilation: string;
artistSeparator: string;
spotifyMetadata: boolean;
realTimeMultiplier: number;
};
const defaultSettings: FlatAppSettings = {
@@ -102,6 +103,7 @@ const defaultSettings: FlatAppSettings = {
watch: {
enabled: false,
},
realTimeMultiplier: 0,
};
interface FetchedCamelCaseSettings {
@@ -129,6 +131,7 @@ const fetchSettings = async (): Promise<FlatAppSettings> => {
...(camelData as unknown as FlatAppSettings),
// Ensure required frontend-only fields exist
recursiveQuality: Boolean((camelData as any).recursiveQuality ?? false),
realTimeMultiplier: Number((camelData as any).realTimeMultiplier ?? 0),
};
return withDefaults;

View File

@@ -40,6 +40,7 @@ export interface AppSettings {
// Add other watch properties from the old type if they still exist in the API response
};
// Add other root-level properties from the API if they exist
realTimeMultiplier: number;
}
export interface SettingsContextType {

View File

@@ -1,5 +1,5 @@
import { Link, useParams } from "@tanstack/react-router";
import { useEffect, useState, useContext } from "react";
import { useEffect, useState, useContext, useRef, useCallback } from "react";
import apiClient from "../lib/api-client";
import { QueueContext } from "../contexts/queue-context";
import { useSettings } from "../contexts/settings-context";
@@ -10,31 +10,91 @@ import { FaArrowLeft } from "react-icons/fa";
export const Album = () => {
const { albumId } = useParams({ from: "/album/$albumId" });
const [album, setAlbum] = useState<AlbumType | null>(null);
const [tracks, setTracks] = useState<TrackType[]>([]);
const [offset, setOffset] = useState<number>(0);
const [isLoading, setIsLoading] = useState<boolean>(false);
const [isLoadingMore, setIsLoadingMore] = useState<boolean>(false);
const [error, setError] = useState<string | null>(null);
const context = useContext(QueueContext);
const { settings } = useSettings();
const loadMoreRef = useRef<HTMLDivElement | null>(null);
const PAGE_SIZE = 50;
if (!context) {
throw new Error("useQueue must be used within a QueueProvider");
}
const { addItem } = context;
const totalTracks = album?.total_tracks ?? 0;
const hasMore = tracks.length < totalTracks;
// Initial load
useEffect(() => {
const fetchAlbum = async () => {
if (!albumId) return;
setIsLoading(true);
setError(null);
try {
const response = await apiClient.get(`/album/info?id=${albumId}`);
setAlbum(response.data);
const response = await apiClient.get(`/album/info?id=${albumId}&limit=${PAGE_SIZE}&offset=0`);
const data: AlbumType & { tracks: { items: TrackType[]; total?: number; limit?: number; offset?: number } } = response.data;
setAlbum(data);
setTracks(data.tracks.items || []);
setOffset((data.tracks.items || []).length);
} catch (err) {
setError("Failed to load album");
console.error("Error fetching album:", err);
} finally {
setIsLoading(false);
}
};
// reset state when albumId changes
setAlbum(null);
setTracks([]);
setOffset(0);
if (albumId) {
fetchAlbum();
}
}, [albumId]);
const loadMore = useCallback(async () => {
if (!albumId || isLoadingMore || !hasMore) return;
setIsLoadingMore(true);
try {
const response = await apiClient.get(`/album/info?id=${albumId}&limit=${PAGE_SIZE}&offset=${offset}`);
const data: AlbumType & { tracks: { items: TrackType[]; total?: number; limit?: number; offset?: number } } = response.data;
const newItems = data.tracks.items || [];
setTracks((prev) => [...prev, ...newItems]);
setOffset((prev) => prev + newItems.length);
} catch (err) {
console.error("Error fetching more tracks:", err);
} finally {
setIsLoadingMore(false);
}
}, [albumId, offset, isLoadingMore, hasMore]);
// IntersectionObserver to trigger loadMore
useEffect(() => {
if (!loadMoreRef.current) return;
const sentinel = loadMoreRef.current;
const observer = new IntersectionObserver(
(entries) => {
const first = entries[0];
if (first.isIntersecting) {
loadMore();
}
},
{ root: null, rootMargin: "200px", threshold: 0.1 }
);
observer.observe(sentinel);
return () => {
observer.unobserve(sentinel);
observer.disconnect();
};
}, [loadMore]);
const handleDownloadTrack = (track: TrackType) => {
if (!track.id) return;
toast.info(`Adding ${track.name} to queue...`);
@@ -51,7 +111,7 @@ export const Album = () => {
return <div className="text-red-500">{error}</div>;
}
if (!album) {
if (!album || isLoading) {
return <div>Loading...</div>;
}
@@ -67,7 +127,7 @@ export const Album = () => {
);
}
const hasExplicitTrack = album.tracks.items.some((track) => track.explicit);
const hasExplicitTrack = tracks.some((track) => track.explicit);
return (
<div className="space-y-4 md:space-y-6">
@@ -130,11 +190,11 @@ export const Album = () => {
<h2 className="text-xl font-semibold text-content-primary dark:text-content-primary-dark px-1">Tracks</h2>
<div className="bg-surface-muted dark:bg-surface-muted-dark rounded-xl p-2 md:p-4 shadow-sm">
<div className="space-y-1 md:space-y-2">
{album.tracks.items.map((track, index) => {
{tracks.map((track, index) => {
if (isExplicitFilterEnabled && track.explicit) {
return (
<div
key={index}
key={`${track.id || "explicit"}-${index}`}
className="flex items-center justify-between p-3 md:p-4 bg-surface-muted dark:bg-surface-muted-dark rounded-lg opacity-50"
>
<div className="flex items-center gap-3 md:gap-4 min-w-0 flex-1">
@@ -147,7 +207,7 @@ export const Album = () => {
}
return (
<div
key={track.id}
key={track.id || `${index}`}
className="flex items-center justify-between p-3 md:p-4 hover:bg-surface-secondary dark:hover:bg-surface-secondary-dark rounded-lg transition-colors duration-200 group"
>
<div className="flex items-center gap-3 md:gap-4 min-w-0 flex-1">
@@ -188,6 +248,13 @@ export const Album = () => {
</div>
);
})}
<div ref={loadMoreRef} />
{isLoadingMore && (
<div className="p-3 text-center text-content-muted dark:text-content-muted-dark text-sm">Loading more...</div>
)}
{!hasMore && tracks.length > 0 && (
<div className="p-3 text-center text-content-muted dark:text-content-muted-dark text-sm">End of album</div>
)}
</div>
</div>
</div>

View File

@@ -1,5 +1,5 @@
import { Link, useParams } from "@tanstack/react-router";
import { useEffect, useState, useContext } from "react";
import { useEffect, useState, useContext, useRef, useCallback } from "react";
import { toast } from "sonner";
import apiClient from "../lib/api-client";
import type { AlbumType, ArtistType, TrackType } from "../types/spotify";
@@ -18,58 +18,170 @@ export const Artist = () => {
const context = useContext(QueueContext);
const { settings } = useSettings();
const sentinelRef = useRef<HTMLDivElement | null>(null);
// Pagination state
const LIMIT = 20; // tune as you like
const [offset, setOffset] = useState<number>(0);
const [loading, setLoading] = useState<boolean>(false);
const [loadingMore, setLoadingMore] = useState<boolean>(false);
const [hasMore, setHasMore] = useState<boolean>(true); // assume more until we learn otherwise
if (!context) {
throw new Error("useQueue must be used within a QueueProvider");
}
const { addItem } = context;
const applyFilters = useCallback(
(items: AlbumType[]) => {
return items.filter((item) => (settings?.explicitFilter ? !item.explicit : true));
},
[settings?.explicitFilter]
);
// Helper to dedupe albums by id
const dedupeAppendAlbums = (current: AlbumType[], incoming: AlbumType[]) => {
const seen = new Set(current.map((a) => a.id));
const filtered = incoming.filter((a) => !seen.has(a.id));
return current.concat(filtered);
};
// Fetch artist info & first page of albums
useEffect(() => {
const fetchArtistData = async () => {
if (!artistId) return;
if (!artistId) return;
let cancelled = false;
const fetchInitial = async () => {
setLoading(true);
setError(null);
setAlbums([]);
setOffset(0);
setHasMore(true);
try {
const response = await apiClient.get(`/artist/info?id=${artistId}`);
const artistData = response.data;
const resp = await apiClient.get(`/artist/info?id=${artistId}&limit=${LIMIT}&offset=0`);
const data = resp.data;
// Check if we have artist data in the response
if (artistData?.id && artistData?.name) {
// Set artist info directly from the response
setArtist({
id: artistData.id,
name: artistData.name,
images: artistData.images || [],
external_urls: artistData.external_urls || { spotify: "" },
followers: artistData.followers || { total: 0 },
genres: artistData.genres || [],
popularity: artistData.popularity || 0,
type: artistData.type || 'artist',
uri: artistData.uri || ''
});
if (cancelled) return;
// Check if we have albums data
if (artistData?.albums?.items && artistData.albums.items.length > 0) {
setAlbums(artistData.albums.items);
if (data?.id && data?.name) {
// set artist meta
setArtist({
id: data.id,
name: data.name,
images: data.images || [],
external_urls: data.external_urls || { spotify: "" },
followers: data.followers || { total: 0 },
genres: data.genres || [],
popularity: data.popularity || 0,
type: data.type || "artist",
uri: data.uri || "",
});
// top tracks (if provided)
if (Array.isArray(data.top_tracks)) {
setTopTracks(data.top_tracks);
} else {
setError("No albums found for this artist.");
return;
setTopTracks([]);
}
// albums pagination info
const items: AlbumType[] = (data?.albums?.items as AlbumType[]) || [];
const total: number | undefined = data?.albums?.total;
setAlbums(items);
setOffset(items.length);
if (typeof total === "number") {
setHasMore(items.length < total);
} else {
// If server didn't return total, default behavior: stop when an empty page arrives.
setHasMore(items.length > 0);
}
} else {
setError("Could not load artist data.");
return;
}
setTopTracks([]);
const watchStatusResponse = await apiClient.get<{ is_watched: boolean }>(`/artist/watch/${artistId}/status`);
setIsWatched(watchStatusResponse.data.is_watched);
// fetch watch status
try {
const watchStatusResponse = await apiClient.get<{ is_watched: boolean }>(`/artist/watch/${artistId}/status`);
if (!cancelled) setIsWatched(watchStatusResponse.data.is_watched);
} catch (e) {
// ignore watch status errors
console.warn("Failed to load watch status", e);
}
} catch (err) {
setError("Failed to load artist page");
console.error(err);
if (!cancelled) {
console.error(err);
setError("Failed to load artist page");
}
} finally {
if (!cancelled) setLoading(false);
}
};
fetchArtistData();
}, [artistId]);
fetchInitial();
return () => {
cancelled = true;
};
}, [artistId, LIMIT]);
// Fetch more albums (next page)
const fetchMoreAlbums = useCallback(async () => {
if (!artistId || loadingMore || loading || !hasMore) return;
setLoadingMore(true);
try {
const resp = await apiClient.get(`/artist/info?id=${artistId}&limit=${LIMIT}&offset=${offset}`);
const data = resp.data;
const items: AlbumType[] = (data?.albums?.items as AlbumType[]) || [];
const total: number | undefined = data?.albums?.total;
setAlbums((cur) => dedupeAppendAlbums(cur, items));
setOffset((cur) => cur + items.length);
if (typeof total === "number") {
setHasMore((prev) => prev && offset + items.length < total);
} else {
// if server doesn't expose total, stop when we get fewer than LIMIT items
setHasMore(items.length === LIMIT);
}
} catch (err) {
console.error("Failed to load more albums", err);
toast.error("Failed to load more albums");
setHasMore(false);
} finally {
setLoadingMore(false);
}
}, [artistId, offset, LIMIT, loadingMore, loading, hasMore]);
// IntersectionObserver to trigger fetchMoreAlbums when sentinel is visible
useEffect(() => {
const sentinel = sentinelRef.current;
if (!sentinel) return;
if (!hasMore) return;
const observer = new IntersectionObserver(
(entries) => {
entries.forEach((entry) => {
if (entry.isIntersecting) {
fetchMoreAlbums();
}
});
},
{
root: null,
rootMargin: "400px", // start loading a bit before the sentinel enters viewport
threshold: 0.1,
}
);
observer.observe(sentinel);
return () => observer.disconnect();
}, [fetchMoreAlbums, hasMore]);
// --- existing handlers (unchanged) ---
const handleDownloadTrack = (track: TrackType) => {
if (!track.id) return;
toast.info(`Adding ${track.name} to queue...`);
@@ -83,31 +195,25 @@ export const Artist = () => {
const handleDownloadArtist = async () => {
if (!artistId || !artist) return;
try {
toast.info(`Downloading ${artist.name} discography...`);
// Call the artist download endpoint which returns album task IDs
const response = await apiClient.get(`/artist/download/${artistId}`);
if (response.data.queued_albums?.length > 0) {
toast.success(
`${artist.name} discography queued successfully!`,
{
description: `${response.data.queued_albums.length} albums added to queue.`,
}
);
toast.success(`${artist.name} discography queued successfully!`, {
description: `${response.data.queued_albums.length} albums added to queue.`,
});
} else {
toast.info("No new albums to download for this artist.");
}
} catch (error: any) {
console.error("Artist download failed:", error);
toast.error(
"Failed to download artist",
{
description: error.response?.data?.error || "An unexpected error occurred.",
}
);
toast.error("Failed to download artist", {
description: error.response?.data?.error || "An unexpected error occurred.",
});
}
};
@@ -132,18 +238,14 @@ export const Artist = () => {
return <div className="text-red-500">{error}</div>;
}
if (!artist) {
if (loading && !artist) {
return <div>Loading...</div>;
}
if (!artist.name) {
if (!artist) {
return <div>Artist data could not be fully loaded. Please try again later.</div>;
}
const applyFilters = (items: AlbumType[]) => {
return items.filter((item) => (settings?.explicitFilter ? !item.explicit : true));
};
const artistAlbums = applyFilters(albums.filter((album) => album.album_type === "album"));
const artistSingles = applyFilters(albums.filter((album) => album.album_type === "single"));
const artistCompilations = applyFilters(albums.filter((album) => album.album_type === "compilation"));
@@ -178,11 +280,10 @@ export const Artist = () => {
</button>
<button
onClick={handleToggleWatch}
className={`flex items-center gap-2 px-4 py-2 rounded-md transition-colors border ${
isWatched
className={`flex items-center gap-2 px-4 py-2 rounded-md transition-colors border ${isWatched
? "bg-button-primary text-button-primary-text border-primary"
: "bg-surface dark:bg-surface-dark hover:bg-surface-muted dark:hover:bg-surface-muted-dark border-border dark:border-border-dark text-content-primary dark:text-content-primary-dark"
}`}
}`}
>
{isWatched ? (
<>
@@ -208,11 +309,15 @@ export const Artist = () => {
key={track.id}
className="track-item flex items-center justify-between p-2 rounded-md hover:bg-surface-muted dark:hover:bg-surface-muted-dark transition-colors"
>
<Link to="/track/$trackId" params={{ trackId: track.id }} className="font-semibold text-content-primary dark:text-content-primary-dark">
<Link
to="/track/$trackId"
params={{ trackId: track.id }}
className="font-semibold text-content-primary dark:text-content-primary-dark"
>
{track.name}
</Link>
<button
onClick={() => handleDownloadTrack(track)}
<button
onClick={() => handleDownloadTrack(track)}
className="px-3 py-1 bg-button-secondary hover:bg-button-secondary-hover text-button-secondary-text hover:text-button-secondary-text-hover rounded"
>
Download
@@ -223,6 +328,7 @@ export const Artist = () => {
</div>
)}
{/* Albums */}
{artistAlbums.length > 0 && (
<div className="mb-12">
<h2 className="text-3xl font-bold mb-6 text-content-primary dark:text-content-primary-dark">Albums</h2>
@@ -234,6 +340,7 @@ export const Artist = () => {
</div>
)}
{/* Singles */}
{artistSingles.length > 0 && (
<div className="mb-12">
<h2 className="text-3xl font-bold mb-6 text-content-primary dark:text-content-primary-dark">Singles</h2>
@@ -245,6 +352,7 @@ export const Artist = () => {
</div>
)}
{/* Compilations */}
{artistCompilations.length > 0 && (
<div className="mb-12">
<h2 className="text-3xl font-bold mb-6 text-content-primary dark:text-content-primary-dark">Compilations</h2>
@@ -255,6 +363,22 @@ export const Artist = () => {
</div>
</div>
)}
{/* sentinel + loading */}
<div className="flex flex-col items-center gap-2">
{loadingMore && <div className="py-4">Loading more...</div>}
{!hasMore && !loading && <div className="py-4 text-sm text-content-secondary">End of discography</div>}
{/* fallback load more button for browsers that block IntersectionObserver or for manual control */}
{hasMore && !loadingMore && (
<button
onClick={() => fetchMoreAlbums()}
className="px-4 py-2 mb-6 rounded bg-surface-muted hover:bg-surface-muted-dark"
>
Load more
</button>
)}
<div ref={sentinelRef} style={{ height: 1, width: "100%" }} />
</div>
</div>
);
};

View File

@@ -54,6 +54,9 @@ type ChildTrack = {
timestamp: number;
position?: number;
metadata: Record<string, any>;
service?: string;
quality_format?: string;
quality_bitrate?: string;
};
type ChildrenResponse = {
@@ -73,7 +76,9 @@ const STATUS_CLASS: Record<string, string> = {
skipped: "text-content-muted dark:text-content-muted-dark",
};
const formatQuality = (entry: HistoryEntry): string => {
const formatQuality = (
entry: { quality_format?: string; quality_bitrate?: string }
): string => {
const format = entry.quality_format || "Unknown";
const bitrate = entry.quality_bitrate || "";
return bitrate ? `${format} ${bitrate}` : format;
@@ -195,11 +200,8 @@ export const History = () => {
id: "quality",
header: "Quality",
cell: (info) => {
const entry = info.row.original;
if ("download_type" in entry) {
return formatQuality(entry);
}
return "N/A";
const entry = info.row.original as HistoryEntry | ChildTrack;
return formatQuality(entry);
},
}),
columnHelper.accessor("status", {
@@ -622,7 +624,7 @@ export const History = () => {
<div className="col-span-2">
<span className="text-content-muted dark:text-content-muted-dark">Quality:</span>
<span className="ml-1 text-content-primary dark:text-content-primary-dark">
{"download_type" in entry ? formatQuality(entry) : "N/A"}
{formatQuality(entry as HistoryEntry | ChildTrack)}
</span>
</div>
<div className="col-span-2">

View File

@@ -190,7 +190,7 @@ export const Playlist = () => {
<div className="bg-surface dark:bg-surface-dark border border-border dark:border-border-dark rounded-xl p-4 md:p-6 shadow-sm">
<div className="flex flex-col items-center gap-4 md:gap-6">
<img
src={playlistMetadata.images[0]?.url || "/placeholder.jpg"}
src={playlistMetadata.images?.at(0)?.url || "/placeholder.jpg"}
alt={playlistMetadata.name}
className="w-32 h-32 sm:w-40 sm:h-40 md:w-48 md:h-48 object-cover rounded-lg shadow-lg mx-auto"
/>
@@ -255,7 +255,7 @@ export const Playlist = () => {
<span className="text-content-muted dark:text-content-muted-dark w-6 md:w-8 text-right text-sm font-medium">{index + 1}</span>
<Link to="/album/$albumId" params={{ albumId: track.album.id }}>
<img
src={track.album.images.at(-1)?.url}
src={track.album.images?.at(-1)?.url || "/placeholder.jpg "}
alt={track.album.name}
className="w-10 h-10 md:w-12 md:h-12 object-cover rounded hover:scale-105 transition-transform duration-300"
/>