Finally implemented SSE

This commit is contained in:
Xoconoch
2025-08-02 12:46:36 -06:00
parent a80c4c846e
commit 9fdc0bde42
12 changed files with 1588 additions and 1298 deletions

293
app.py
View File

@@ -1,14 +1,8 @@
from flask import Flask, request, send_from_directory from fastapi import FastAPI, Request, HTTPException
from flask_cors import CORS from fastapi.middleware.cors import CORSMiddleware
from routes.search import search_bp from fastapi.staticfiles import StaticFiles
from routes.credentials import credentials_bp from fastapi.responses import FileResponse
from routes.album import album_bp from contextlib import asynccontextmanager
from routes.track import track_bp
from routes.playlist import playlist_bp
from routes.prgs import prgs_bp
from routes.config import config_bp
from routes.artist import artist_bp
from routes.history import history_bp
import logging import logging
import logging.handlers import logging.handlers
import time import time
@@ -20,10 +14,24 @@ import redis
import socket import socket
from urllib.parse import urlparse from urllib.parse import urlparse
# Import route routers (to be created)
from routes.search import router as search_router
from routes.credentials import router as credentials_router
from routes.album import router as album_router
from routes.track import router as track_router
from routes.playlist import router as playlist_router
from routes.prgs import router as prgs_router
from routes.config import router as config_router
from routes.artist import router as artist_router
from routes.history import router as history_router
# Import Celery configuration and manager # Import Celery configuration and manager
from routes.utils.celery_manager import celery_manager from routes.utils.celery_manager import celery_manager
from routes.utils.celery_config import REDIS_URL from routes.utils.celery_config import REDIS_URL
# Import and initialize routes (this will start the watch manager)
import routes
# Configure application-wide logging # Configure application-wide logging
def setup_logging(): def setup_logging():
@@ -66,175 +74,178 @@ def setup_logging():
root_logger.addHandler(console_handler) root_logger.addHandler(console_handler)
# Set up specific loggers # Set up specific loggers
for logger_name in ["werkzeug", "celery", "routes", "flask", "waitress"]: for logger_name in [
module_logger = logging.getLogger(logger_name) "routes",
module_logger.setLevel(logging.INFO) "routes.utils",
# Handlers are inherited from root logger "routes.utils.celery_manager",
"routes.utils.celery_tasks",
"routes.utils.watch",
]:
logger = logging.getLogger(logger_name)
logger.setLevel(logging.INFO)
logger.propagate = True # Propagate to root logger
# Enable propagation for all loggers logging.info("Logging system initialized")
logging.getLogger("celery").propagate = True
# Notify successful setup
root_logger.info("Logging system initialized")
# Return the main file handler for permissions adjustment
return file_handler
def check_redis_connection(): def check_redis_connection():
"""Check if Redis is reachable and retry with exponential backoff if not""" """Check if Redis is available and accessible"""
max_retries = 5 if not REDIS_URL:
retry_count = 0 logging.error("REDIS_URL is not configured. Please check your environment.")
retry_delay = 1 # start with 1 second return False
# Extract host and port from REDIS_URL
redis_host = "redis" # default
redis_port = 6379 # default
# Parse from REDIS_URL if possible
if REDIS_URL:
# parse hostname and port (handles optional auth)
try: try:
parsed = urlparse(REDIS_URL) # Parse Redis URL
if parsed.hostname: parsed_url = urlparse(REDIS_URL)
redis_host = parsed.hostname host = parsed_url.hostname or "localhost"
if parsed.port: port = parsed_url.port or 6379
redis_port = parsed.port
except Exception:
pass
# Log Redis connection details logging.info(f"Testing Redis connection to {host}:{port}...")
logging.info(f"Checking Redis connection to {redis_host}:{redis_port}")
while retry_count < max_retries: # Test socket connection first
try:
# First try socket connection to check if Redis port is open
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(2) sock.settimeout(5)
result = sock.connect_ex((redis_host, redis_port)) result = sock.connect_ex((host, port))
sock.close() sock.close()
if result != 0: if result != 0:
raise ConnectionError( logging.error(f"Cannot connect to Redis at {host}:{port}")
f"Cannot connect to Redis at {redis_host}:{redis_port}" return False
)
# If socket connection successful, try Redis ping # Test Redis client connection
r = redis.Redis.from_url(REDIS_URL) r = redis.from_url(REDIS_URL, socket_connect_timeout=5, socket_timeout=5)
r.ping() r.ping()
logging.info("Successfully connected to Redis") logging.info("Redis connection successful")
return True return True
except redis.ConnectionError as e:
logging.error(f"Redis connection error: {e}")
return False
except redis.TimeoutError as e:
logging.error(f"Redis timeout error: {e}")
return False
except Exception as e: except Exception as e:
retry_count += 1 logging.error(f"Unexpected error checking Redis connection: {e}")
if retry_count >= max_retries:
logging.error(
f"Failed to connect to Redis after {max_retries} attempts: {e}"
)
logging.error(
f"Make sure Redis is running at {redis_host}:{redis_port}"
)
return False return False
logging.warning(f"Redis connection attempt {retry_count} failed: {e}")
logging.info(f"Retrying in {retry_delay} seconds...")
time.sleep(retry_delay)
retry_delay *= 2 # exponential backoff
return False @asynccontextmanager
async def lifespan(app: FastAPI):
"""Handle application startup and shutdown"""
# Startup
setup_logging()
# Check Redis connection
if not check_redis_connection():
logging.error("Failed to connect to Redis. Please ensure Redis is running and accessible.")
# Don't exit, but warn - some functionality may not work
# Start Celery workers
try:
celery_manager.start()
logging.info("Celery workers started successfully")
except Exception as e:
logging.error(f"Failed to start Celery workers: {e}")
yield
# Shutdown
try:
celery_manager.stop()
logging.info("Celery workers stopped")
except Exception as e:
logging.error(f"Error stopping Celery workers: {e}")
def create_app(): def create_app():
app = Flask(__name__, static_folder="spotizerr-ui/dist", static_url_path="/") app = FastAPI(
title="Spotizerr API",
description="Music download service API",
version="1.0.0",
lifespan=lifespan
)
# Set up CORS # Set up CORS
CORS(app) app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Register blueprints # Register routers with URL prefixes
app.register_blueprint(config_bp, url_prefix="/api") app.include_router(config_router, prefix="/api", tags=["config"])
app.register_blueprint(search_bp, url_prefix="/api") app.include_router(search_router, prefix="/api", tags=["search"])
app.register_blueprint(credentials_bp, url_prefix="/api/credentials") app.include_router(credentials_router, prefix="/api/credentials", tags=["credentials"])
app.register_blueprint(album_bp, url_prefix="/api/album") app.include_router(album_router, prefix="/api/album", tags=["album"])
app.register_blueprint(track_bp, url_prefix="/api/track") app.include_router(track_router, prefix="/api/track", tags=["track"])
app.register_blueprint(playlist_bp, url_prefix="/api/playlist") app.include_router(playlist_router, prefix="/api/playlist", tags=["playlist"])
app.register_blueprint(artist_bp, url_prefix="/api/artist") app.include_router(artist_router, prefix="/api/artist", tags=["artist"])
app.register_blueprint(prgs_bp, url_prefix="/api/prgs") app.include_router(prgs_router, prefix="/api/prgs", tags=["progress"])
app.register_blueprint(history_bp, url_prefix="/api/history") app.include_router(history_router, prefix="/api/history", tags=["history"])
# Serve React App
@app.route("/", defaults={"path": ""})
@app.route("/<path:path>")
def serve_react_app(path):
if path != "" and os.path.exists(os.path.join(app.static_folder, path)):
return send_from_directory(app.static_folder, path)
else:
return send_from_directory(app.static_folder, "index.html")
# Add request logging middleware # Add request logging middleware
@app.before_request @app.middleware("http")
def log_request(): async def log_requests(request: Request, call_next):
request.start_time = time.time() start_time = time.time()
app.logger.debug(f"Request: {request.method} {request.path}")
# Log request
logger = logging.getLogger("uvicorn.access")
logger.debug(f"Request: {request.method} {request.url.path}")
try:
response = await call_next(request)
# Log response
duration = round((time.time() - start_time) * 1000, 2)
logger.debug(f"Response: {response.status_code} | Duration: {duration}ms")
@app.after_request
def log_response(response):
if hasattr(request, "start_time"):
duration = round((time.time() - request.start_time) * 1000, 2)
app.logger.debug(f"Response: {response.status} | Duration: {duration}ms")
return response return response
except Exception as e:
# Log errors
logger.error(f"Server error: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail="Internal Server Error")
# Error logging # Mount static files for React app
@app.errorhandler(Exception) if os.path.exists("spotizerr-ui/dist"):
def handle_exception(e): app.mount("/static", StaticFiles(directory="spotizerr-ui/dist"), name="static")
app.logger.error(f"Server error: {str(e)}", exc_info=True)
return "Internal Server Error", 500 # Serve React App - catch-all route for SPA
@app.get("/{full_path:path}")
async def serve_react_app(full_path: str):
"""Serve React app with fallback to index.html for SPA routing"""
static_dir = "spotizerr-ui/dist"
# If it's a file that exists, serve it
if full_path and os.path.exists(os.path.join(static_dir, full_path)):
return FileResponse(os.path.join(static_dir, full_path))
else:
# Fallback to index.html for SPA routing
return FileResponse(os.path.join(static_dir, "index.html"))
else:
logging.warning("React app build directory not found at spotizerr-ui/dist")
return app return app
def start_celery_workers(): def start_celery_workers():
"""Start Celery workers with dynamic configuration""" """Start Celery workers with dynamic configuration"""
logging.info("Starting Celery workers with dynamic configuration") # This function is now handled by the lifespan context manager
celery_manager.start() # and the celery_manager.start() call
pass
# Register shutdown handler
atexit.register(celery_manager.stop)
if __name__ == "__main__": if __name__ == "__main__":
# Configure application logging import uvicorn
log_handler = setup_logging()
# Set permissions for log file
try:
if os.name != "nt": # Not Windows
os.chmod(log_handler.baseFilename, 0o666)
except Exception as e:
logging.warning(f"Could not set permissions on log file: {e}")
# Check Redis connection before starting
if not check_redis_connection():
logging.error("Exiting: Could not establish Redis connection.")
sys.exit(1)
# Start Celery workers in a separate thread
start_celery_workers()
# Clean up Celery workers on exit
atexit.register(celery_manager.stop)
# Create Flask app
app = create_app() app = create_app()
# Get host and port from environment variables or use defaults # Run with uvicorn
host = os.environ.get("HOST", "0.0.0.0") uvicorn.run(
port = int(os.environ.get("PORT", 7171)) app,
host="0.0.0.0",
# Use Flask's built-in server for development port=7171,
# logging.info(f"Starting Flask development server on http://{host}:{port}") log_level="info",
# app.run(host=host, port=port, debug=True) access_log=True
)
# The following uses Waitress, a production-ready server.
# To use it, comment out the app.run() line above and uncomment the lines below.
logging.info(f"Starting server with Waitress on http://{host}:{port}")
from waitress import serve
serve(app, host=host, port=port)

View File

@@ -1,5 +1,5 @@
waitress==3.0.2 fastapi==0.115.6
uvicorn[standard]==0.32.1
celery==5.5.3 celery==5.5.3
Flask==3.1.1
flask_cors==6.0.0
deezspot-spotizerr==2.2.0 deezspot-spotizerr==2.2.0
httpx

View File

@@ -1,4 +1,5 @@
from flask import Blueprint, Response, request from fastapi import APIRouter, HTTPException, Request
from fastapi.responses import JSONResponse
import json import json
import traceback import traceback
import uuid import uuid
@@ -8,7 +9,7 @@ from routes.utils.celery_tasks import store_task_info, store_task_status, Progre
from routes.utils.get_info import get_spotify_info from routes.utils.get_info import get_spotify_info
from routes.utils.errors import DuplicateDownloadError from routes.utils.errors import DuplicateDownloadError
album_bp = Blueprint("album", __name__) router = APIRouter()
def construct_spotify_url(item_id: str, item_type: str = "track") -> str: def construct_spotify_url(item_id: str, item_type: str = "track") -> str:
@@ -16,8 +17,8 @@ def construct_spotify_url(item_id: str, item_type: str = "track") -> str:
return f"https://open.spotify.com/{item_type}/{item_id}" return f"https://open.spotify.com/{item_type}/{item_id}"
@album_bp.route("/download/<album_id>", methods=["GET"]) @router.get("/download/{album_id}")
def handle_download(album_id): async def handle_download(album_id: str, request: Request):
# Retrieve essential parameters from the request. # Retrieve essential parameters from the request.
# name = request.args.get('name') # name = request.args.get('name')
# artist = request.args.get('artist') # artist = request.args.get('artist')
@@ -33,12 +34,9 @@ def handle_download(album_id):
or not album_info.get("name") or not album_info.get("name")
or not album_info.get("artists") or not album_info.get("artists")
): ):
return Response( return JSONResponse(
json.dumps( content={"error": f"Could not retrieve metadata for album ID: {album_id}"},
{"error": f"Could not retrieve metadata for album ID: {album_id}"} status_code=404
),
status=404,
mimetype="application/json",
) )
name_from_spotify = album_info.get("name") name_from_spotify = album_info.get("name")
@@ -49,27 +47,23 @@ def handle_download(album_id):
) )
except Exception as e: except Exception as e:
return Response( return JSONResponse(
json.dumps( content={"error": f"Failed to fetch metadata for album {album_id}: {str(e)}"},
{"error": f"Failed to fetch metadata for album {album_id}: {str(e)}"} status_code=500
),
status=500,
mimetype="application/json",
) )
# Validate required parameters # Validate required parameters
if not url: if not url:
return Response( return JSONResponse(
json.dumps({"error": "Missing required parameter: url"}), content={"error": "Missing required parameter: url"},
status=400, status_code=400
mimetype="application/json",
) )
# Add the task to the queue with only essential parameters # Add the task to the queue with only essential parameters
# The queue manager will now handle all config parameters # The queue manager will now handle all config parameters
# Include full original request URL in metadata # Include full original request URL in metadata
orig_params = request.args.to_dict() orig_params = dict(request.query_params)
orig_params["original_url"] = request.url orig_params["original_url"] = str(request.url)
try: try:
task_id = download_queue_manager.add_task( task_id = download_queue_manager.add_task(
{ {
@@ -81,15 +75,12 @@ def handle_download(album_id):
} }
) )
except DuplicateDownloadError as e: except DuplicateDownloadError as e:
return Response( return JSONResponse(
json.dumps( content={
{
"error": "Duplicate download detected.", "error": "Duplicate download detected.",
"existing_task": e.existing_task, "existing_task": e.existing_task,
} },
), status_code=409
status=409,
mimetype="application/json",
) )
except Exception as e: except Exception as e:
# Generic error handling for other issues during task submission # Generic error handling for other issues during task submission
@@ -116,61 +107,57 @@ def handle_download(album_id):
"timestamp": time.time(), "timestamp": time.time(),
}, },
) )
return Response( return JSONResponse(
json.dumps( content={
{
"error": f"Failed to queue album download: {str(e)}", "error": f"Failed to queue album download: {str(e)}",
"task_id": error_task_id, "task_id": error_task_id,
} },
), status_code=500
status=500,
mimetype="application/json",
) )
return Response( return JSONResponse(
json.dumps({"task_id": task_id}), status=202, mimetype="application/json" content={"task_id": task_id},
status_code=202
) )
@album_bp.route("/download/cancel", methods=["GET"]) @router.get("/download/cancel")
def cancel_download(): async def cancel_download(request: Request):
""" """
Cancel a running download process by its task id. Cancel a running download process by its task id.
""" """
task_id = request.args.get("task_id") task_id = request.query_params.get("task_id")
if not task_id: if not task_id:
return Response( return JSONResponse(
json.dumps({"error": "Missing process id (task_id) parameter"}), content={"error": "Missing process id (task_id) parameter"},
status=400, status_code=400
mimetype="application/json",
) )
# Use the queue manager's cancellation method. # Use the queue manager's cancellation method.
result = download_queue_manager.cancel_task(task_id) result = download_queue_manager.cancel_task(task_id)
status_code = 200 if result.get("status") == "cancelled" else 404 status_code = 200 if result.get("status") == "cancelled" else 404
return Response(json.dumps(result), status=status_code, mimetype="application/json") return JSONResponse(content=result, status_code=status_code)
@album_bp.route("/info", methods=["GET"]) @router.get("/info")
def get_album_info(): async def get_album_info(request: Request):
""" """
Retrieve Spotify album metadata given a Spotify album ID. Retrieve Spotify album metadata given a Spotify album ID.
Expects a query parameter 'id' that contains the Spotify album ID. Expects a query parameter 'id' that contains the Spotify album ID.
""" """
spotify_id = request.args.get("id") spotify_id = request.query_params.get("id")
if not spotify_id: if not spotify_id:
return Response( return JSONResponse(
json.dumps({"error": "Missing parameter: id"}), content={"error": "Missing parameter: id"},
status=400, status_code=400
mimetype="application/json",
) )
try: try:
# Use the get_spotify_info function (already imported at top) # Use the get_spotify_info function (already imported at top)
album_info = get_spotify_info(spotify_id, "album") album_info = get_spotify_info(spotify_id, "album")
return Response(json.dumps(album_info), status=200, mimetype="application/json") return JSONResponse(content=album_info, status_code=200)
except Exception as e: except Exception as e:
error_data = {"error": str(e), "traceback": traceback.format_exc()} error_data = {"error": str(e), "traceback": traceback.format_exc()}
return Response(json.dumps(error_data), status=500, mimetype="application/json") return JSONResponse(content=error_data, status_code=500)

View File

@@ -1,8 +1,9 @@
""" """
Artist endpoint blueprint. Artist endpoint router.
""" """
from flask import Blueprint, Response, request, jsonify from fastapi import APIRouter, HTTPException, Request
from fastapi.responses import JSONResponse
import json import json
import traceback import traceback
from routes.utils.artist import download_artist_albums from routes.utils.artist import download_artist_albums
@@ -22,7 +23,7 @@ from routes.utils.watch.db import (
from routes.utils.watch.manager import check_watched_artists, get_watch_config from routes.utils.watch.manager import check_watched_artists, get_watch_config
from routes.utils.get_info import get_spotify_info from routes.utils.get_info import get_spotify_info
artist_bp = Blueprint("artist", __name__, url_prefix="/api/artist") router = APIRouter()
# Existing log_json can be used, or a logger instance. # Existing log_json can be used, or a logger instance.
# Let's initialize a logger for consistency with merged code. # Let's initialize a logger for consistency with merged code.
@@ -38,8 +39,8 @@ def log_json(message_dict):
print(json.dumps(message_dict)) print(json.dumps(message_dict))
@artist_bp.route("/download/<artist_id>", methods=["GET"]) @router.get("/download/{artist_id}")
def handle_artist_download(artist_id): async def handle_artist_download(artist_id: str, request: Request):
""" """
Enqueues album download tasks for the given artist. Enqueues album download tasks for the given artist.
Expected query parameters: Expected query parameters:
@@ -49,14 +50,13 @@ def handle_artist_download(artist_id):
url = construct_spotify_url(artist_id, "artist") url = construct_spotify_url(artist_id, "artist")
# Retrieve essential parameters from the request. # Retrieve essential parameters from the request.
album_type = request.args.get("album_type", "album,single,compilation") album_type = request.query_params.get("album_type", "album,single,compilation")
# Validate required parameters # Validate required parameters
if not url: # This check is mostly for safety, as url is constructed if not url: # This check is mostly for safety, as url is constructed
return Response( return JSONResponse(
json.dumps({"error": "Missing required parameter: url"}), content={"error": "Missing required parameter: url"},
status=400, status_code=400
mimetype="application/json",
) )
try: try:
@@ -65,7 +65,7 @@ def handle_artist_download(artist_id):
# Delegate to the download_artist_albums function which will handle album filtering # Delegate to the download_artist_albums function which will handle album filtering
successfully_queued_albums, duplicate_albums = download_artist_albums( successfully_queued_albums, duplicate_albums = download_artist_albums(
url=url, album_type=album_type, request_args=request.args.to_dict() url=url, album_type=album_type, request_args=dict(request.query_params)
) )
# Return the list of album task IDs. # Return the list of album task IDs.
@@ -80,51 +80,45 @@ def handle_artist_download(artist_id):
f" {len(duplicate_albums)} albums were already in progress or queued." f" {len(duplicate_albums)} albums were already in progress or queued."
) )
return Response( return JSONResponse(
json.dumps(response_data), content=response_data,
status=202, # Still 202 Accepted as some operations may have succeeded status_code=202 # Still 202 Accepted as some operations may have succeeded
mimetype="application/json",
) )
except Exception as e: except Exception as e:
return Response( return JSONResponse(
json.dumps( content={
{
"status": "error", "status": "error",
"message": str(e), "message": str(e),
"traceback": traceback.format_exc(), "traceback": traceback.format_exc(),
} },
), status_code=500
status=500,
mimetype="application/json",
) )
@artist_bp.route("/download/cancel", methods=["GET"]) @router.get("/download/cancel")
def cancel_artist_download(): async def cancel_artist_download():
""" """
Cancelling an artist download is not supported since the endpoint only enqueues album tasks. Cancelling an artist download is not supported since the endpoint only enqueues album tasks.
(Cancellation for individual album tasks can be implemented via the queue manager.) (Cancellation for individual album tasks can be implemented via the queue manager.)
""" """
return Response( return JSONResponse(
json.dumps({"error": "Artist download cancellation is not supported."}), content={"error": "Artist download cancellation is not supported."},
status=400, status_code=400
mimetype="application/json",
) )
@artist_bp.route("/info", methods=["GET"]) @router.get("/info")
def get_artist_info(): async def get_artist_info(request: Request):
""" """
Retrieves Spotify artist metadata given a Spotify artist ID. Retrieves Spotify artist metadata given a Spotify artist ID.
Expects a query parameter 'id' with the Spotify artist ID. Expects a query parameter 'id' with the Spotify artist ID.
""" """
spotify_id = request.args.get("id") spotify_id = request.query_params.get("id")
if not spotify_id: if not spotify_id:
return Response( return JSONResponse(
json.dumps({"error": "Missing parameter: id"}), content={"error": "Missing parameter: id"},
status=400, status_code=400
mimetype="application/json",
) )
try: try:
@@ -158,33 +152,30 @@ def get_artist_info():
# If not watched, or no albums, is_locally_known will not be added. # If not watched, or no albums, is_locally_known will not be added.
# Frontend should handle absence of this key as false. # Frontend should handle absence of this key as false.
return Response( return JSONResponse(
json.dumps(artist_info), status=200, mimetype="application/json" content=artist_info, status_code=200
) )
except Exception as e: except Exception as e:
return Response( return JSONResponse(
json.dumps({"error": str(e), "traceback": traceback.format_exc()}), content={"error": str(e), "traceback": traceback.format_exc()},
status=500, status_code=500
mimetype="application/json",
) )
# --- Merged Artist Watch Routes --- # --- Merged Artist Watch Routes ---
@artist_bp.route("/watch/<string:artist_spotify_id>", methods=["PUT"]) @router.put("/watch/{artist_spotify_id}")
def add_artist_to_watchlist(artist_spotify_id): async def add_artist_to_watchlist(artist_spotify_id: str):
"""Adds an artist to the watchlist.""" """Adds an artist to the watchlist."""
watch_config = get_watch_config() watch_config = get_watch_config()
if not watch_config.get("enabled", False): if not watch_config.get("enabled", False):
return jsonify({"error": "Watch feature is currently disabled globally."}), 403 raise HTTPException(status_code=403, detail={"error": "Watch feature is currently disabled globally."})
logger.info(f"Attempting to add artist {artist_spotify_id} to watchlist.") logger.info(f"Attempting to add artist {artist_spotify_id} to watchlist.")
try: try:
if get_watched_artist(artist_spotify_id): if get_watched_artist(artist_spotify_id):
return jsonify( return {"message": f"Artist {artist_spotify_id} is already being watched."}
{"message": f"Artist {artist_spotify_id} is already being watched."}
), 200
# Get artist metadata directly for name and basic info # Get artist metadata directly for name and basic info
artist_metadata = get_spotify_info(artist_spotify_id, "artist") artist_metadata = get_spotify_info(artist_spotify_id, "artist")
@@ -199,11 +190,12 @@ def add_artist_to_watchlist(artist_spotify_id):
logger.error( logger.error(
f"Could not fetch artist metadata for {artist_spotify_id} from Spotify." f"Could not fetch artist metadata for {artist_spotify_id} from Spotify."
) )
return jsonify( raise HTTPException(
{ status_code=404,
detail={
"error": f"Could not fetch artist metadata for {artist_spotify_id} to initiate watch." "error": f"Could not fetch artist metadata for {artist_spotify_id} to initiate watch."
} }
), 404 )
# Check if we got album data # Check if we got album data
if not artist_album_list_data or not isinstance( if not artist_album_list_data or not isinstance(
@@ -228,115 +220,118 @@ def add_artist_to_watchlist(artist_spotify_id):
logger.info( logger.info(
f"Artist {artist_spotify_id} ('{artist_metadata.get('name', 'Unknown Artist')}') added to watchlist. Their albums will be processed by the watch manager." f"Artist {artist_spotify_id} ('{artist_metadata.get('name', 'Unknown Artist')}') added to watchlist. Their albums will be processed by the watch manager."
) )
return jsonify( return {
{
"message": f"Artist {artist_spotify_id} added to watchlist. Albums will be processed shortly." "message": f"Artist {artist_spotify_id} added to watchlist. Albums will be processed shortly."
} }
), 201 except HTTPException:
raise
except Exception as e: except Exception as e:
logger.error( logger.error(
f"Error adding artist {artist_spotify_id} to watchlist: {e}", exc_info=True f"Error adding artist {artist_spotify_id} to watchlist: {e}", exc_info=True
) )
return jsonify({"error": f"Could not add artist to watchlist: {str(e)}"}), 500 raise HTTPException(status_code=500, detail={"error": f"Could not add artist to watchlist: {str(e)}"})
@artist_bp.route("/watch/<string:artist_spotify_id>/status", methods=["GET"]) @router.get("/watch/{artist_spotify_id}/status")
def get_artist_watch_status(artist_spotify_id): async def get_artist_watch_status(artist_spotify_id: str):
"""Checks if a specific artist is being watched.""" """Checks if a specific artist is being watched."""
logger.info(f"Checking watch status for artist {artist_spotify_id}.") logger.info(f"Checking watch status for artist {artist_spotify_id}.")
try: try:
artist = get_watched_artist(artist_spotify_id) artist = get_watched_artist(artist_spotify_id)
if artist: if artist:
return jsonify({"is_watched": True, "artist_data": dict(artist)}), 200 return {"is_watched": True, "artist_data": dict(artist)}
else: else:
return jsonify({"is_watched": False}), 200 return {"is_watched": False}
except Exception as e: except Exception as e:
logger.error( logger.error(
f"Error checking watch status for artist {artist_spotify_id}: {e}", f"Error checking watch status for artist {artist_spotify_id}: {e}",
exc_info=True, exc_info=True,
) )
return jsonify({"error": f"Could not check watch status: {str(e)}"}), 500 raise HTTPException(status_code=500, detail={"error": f"Could not check watch status: {str(e)}"})
@artist_bp.route("/watch/<string:artist_spotify_id>", methods=["DELETE"]) @router.delete("/watch/{artist_spotify_id}")
def remove_artist_from_watchlist(artist_spotify_id): async def remove_artist_from_watchlist(artist_spotify_id: str):
"""Removes an artist from the watchlist.""" """Removes an artist from the watchlist."""
watch_config = get_watch_config() watch_config = get_watch_config()
if not watch_config.get("enabled", False): if not watch_config.get("enabled", False):
return jsonify({"error": "Watch feature is currently disabled globally."}), 403 raise HTTPException(status_code=403, detail={"error": "Watch feature is currently disabled globally."})
logger.info(f"Attempting to remove artist {artist_spotify_id} from watchlist.") logger.info(f"Attempting to remove artist {artist_spotify_id} from watchlist.")
try: try:
if not get_watched_artist(artist_spotify_id): if not get_watched_artist(artist_spotify_id):
return jsonify( raise HTTPException(
{"error": f"Artist {artist_spotify_id} not found in watchlist."} status_code=404,
), 404 detail={"error": f"Artist {artist_spotify_id} not found in watchlist."}
)
remove_artist_db(artist_spotify_id) remove_artist_db(artist_spotify_id)
logger.info(f"Artist {artist_spotify_id} removed from watchlist successfully.") logger.info(f"Artist {artist_spotify_id} removed from watchlist successfully.")
return jsonify( return {"message": f"Artist {artist_spotify_id} removed from watchlist."}
{"message": f"Artist {artist_spotify_id} removed from watchlist."} except HTTPException:
), 200 raise
except Exception as e: except Exception as e:
logger.error( logger.error(
f"Error removing artist {artist_spotify_id} from watchlist: {e}", f"Error removing artist {artist_spotify_id} from watchlist: {e}",
exc_info=True, exc_info=True,
) )
return jsonify( raise HTTPException(
{"error": f"Could not remove artist from watchlist: {str(e)}"} status_code=500,
), 500 detail={"error": f"Could not remove artist from watchlist: {str(e)}"}
)
@artist_bp.route("/watch/list", methods=["GET"]) @router.get("/watch/list")
def list_watched_artists_endpoint(): async def list_watched_artists_endpoint():
"""Lists all artists currently in the watchlist.""" """Lists all artists currently in the watchlist."""
try: try:
artists = get_watched_artists() artists = get_watched_artists()
return jsonify([dict(artist) for artist in artists]), 200 return [dict(artist) for artist in artists]
except Exception as e: except Exception as e:
logger.error(f"Error listing watched artists: {e}", exc_info=True) logger.error(f"Error listing watched artists: {e}", exc_info=True)
return jsonify({"error": f"Could not list watched artists: {str(e)}"}), 500 raise HTTPException(status_code=500, detail={"error": f"Could not list watched artists: {str(e)}"})
@artist_bp.route("/watch/trigger_check", methods=["POST"]) @router.post("/watch/trigger_check")
def trigger_artist_check_endpoint(): async def trigger_artist_check_endpoint():
"""Manually triggers the artist checking mechanism for all watched artists.""" """Manually triggers the artist checking mechanism for all watched artists."""
watch_config = get_watch_config() watch_config = get_watch_config()
if not watch_config.get("enabled", False): if not watch_config.get("enabled", False):
return jsonify( raise HTTPException(
{ status_code=403,
detail={
"error": "Watch feature is currently disabled globally. Cannot trigger check." "error": "Watch feature is currently disabled globally. Cannot trigger check."
} }
), 403 )
logger.info("Manual trigger for artist check received for all artists.") logger.info("Manual trigger for artist check received for all artists.")
try: try:
thread = threading.Thread(target=check_watched_artists, args=(None,)) thread = threading.Thread(target=check_watched_artists, args=(None,))
thread.start() thread.start()
return jsonify( return {
{
"message": "Artist check triggered successfully in the background for all artists." "message": "Artist check triggered successfully in the background for all artists."
} }
), 202
except Exception as e: except Exception as e:
logger.error( logger.error(
f"Error manually triggering artist check for all: {e}", exc_info=True f"Error manually triggering artist check for all: {e}", exc_info=True
) )
return jsonify( raise HTTPException(
{"error": f"Could not trigger artist check for all: {str(e)}"} status_code=500,
), 500 detail={"error": f"Could not trigger artist check for all: {str(e)}"}
)
@artist_bp.route("/watch/trigger_check/<string:artist_spotify_id>", methods=["POST"]) @router.post("/watch/trigger_check/{artist_spotify_id}")
def trigger_specific_artist_check_endpoint(artist_spotify_id: str): async def trigger_specific_artist_check_endpoint(artist_spotify_id: str):
"""Manually triggers the artist checking mechanism for a specific artist.""" """Manually triggers the artist checking mechanism for a specific artist."""
watch_config = get_watch_config() watch_config = get_watch_config()
if not watch_config.get("enabled", False): if not watch_config.get("enabled", False):
return jsonify( raise HTTPException(
{ status_code=403,
detail={
"error": "Watch feature is currently disabled globally. Cannot trigger check." "error": "Watch feature is currently disabled globally. Cannot trigger check."
} }
), 403 )
logger.info( logger.info(
f"Manual trigger for specific artist check received for ID: {artist_spotify_id}" f"Manual trigger for specific artist check received for ID: {artist_spotify_id}"
@@ -347,11 +342,12 @@ def trigger_specific_artist_check_endpoint(artist_spotify_id: str):
logger.warning( logger.warning(
f"Trigger specific check: Artist ID {artist_spotify_id} not found in watchlist." f"Trigger specific check: Artist ID {artist_spotify_id} not found in watchlist."
) )
return jsonify( raise HTTPException(
{ status_code=404,
detail={
"error": f"Artist {artist_spotify_id} is not in the watchlist. Add it first." "error": f"Artist {artist_spotify_id} is not in the watchlist. Add it first."
} }
), 404 )
thread = threading.Thread( thread = threading.Thread(
target=check_watched_artists, args=(artist_spotify_id,) target=check_watched_artists, args=(artist_spotify_id,)
@@ -360,50 +356,54 @@ def trigger_specific_artist_check_endpoint(artist_spotify_id: str):
logger.info( logger.info(
f"Artist check triggered in background for specific artist ID: {artist_spotify_id}" f"Artist check triggered in background for specific artist ID: {artist_spotify_id}"
) )
return jsonify( return {
{
"message": f"Artist check triggered successfully in the background for {artist_spotify_id}." "message": f"Artist check triggered successfully in the background for {artist_spotify_id}."
} }
), 202 except HTTPException:
raise
except Exception as e: except Exception as e:
logger.error( logger.error(
f"Error manually triggering specific artist check for {artist_spotify_id}: {e}", f"Error manually triggering specific artist check for {artist_spotify_id}: {e}",
exc_info=True, exc_info=True,
) )
return jsonify( raise HTTPException(
{ status_code=500,
detail={
"error": f"Could not trigger artist check for {artist_spotify_id}: {str(e)}" "error": f"Could not trigger artist check for {artist_spotify_id}: {str(e)}"
} }
), 500 )
@artist_bp.route("/watch/<string:artist_spotify_id>/albums", methods=["POST"]) @router.post("/watch/{artist_spotify_id}/albums")
def mark_albums_as_known_for_artist(artist_spotify_id): async def mark_albums_as_known_for_artist(artist_spotify_id: str, request: Request):
"""Fetches details for given album IDs and adds/updates them in the artist's local DB table.""" """Fetches details for given album IDs and adds/updates them in the artist's local DB table."""
watch_config = get_watch_config() watch_config = get_watch_config()
if not watch_config.get("enabled", False): if not watch_config.get("enabled", False):
return jsonify( raise HTTPException(
{ status_code=403,
detail={
"error": "Watch feature is currently disabled globally. Cannot mark albums." "error": "Watch feature is currently disabled globally. Cannot mark albums."
} }
), 403 )
logger.info(f"Attempting to mark albums as known for artist {artist_spotify_id}.") logger.info(f"Attempting to mark albums as known for artist {artist_spotify_id}.")
try: try:
album_ids = request.json album_ids = await request.json()
if not isinstance(album_ids, list) or not all( if not isinstance(album_ids, list) or not all(
isinstance(aid, str) for aid in album_ids isinstance(aid, str) for aid in album_ids
): ):
return jsonify( raise HTTPException(
{ status_code=400,
detail={
"error": "Invalid request body. Expecting a JSON array of album Spotify IDs." "error": "Invalid request body. Expecting a JSON array of album Spotify IDs."
} }
), 400 )
if not get_watched_artist(artist_spotify_id): if not get_watched_artist(artist_spotify_id):
return jsonify( raise HTTPException(
{"error": f"Artist {artist_spotify_id} is not being watched."} status_code=404,
), 404 detail={"error": f"Artist {artist_spotify_id} is not being watched."}
)
fetched_albums_details = [] fetched_albums_details = []
for album_id in album_ids: for album_id in album_ids:
@@ -422,12 +422,10 @@ def mark_albums_as_known_for_artist(artist_spotify_id):
) )
if not fetched_albums_details: if not fetched_albums_details:
return jsonify( return {
{
"message": "No valid album details could be fetched to mark as known.", "message": "No valid album details could be fetched to mark as known.",
"processed_count": 0, "processed_count": 0,
} }
), 200
processed_count = add_specific_albums_to_artist_table( processed_count = add_specific_albums_to_artist_table(
artist_spotify_id, fetched_albums_details artist_spotify_id, fetched_albums_details
@@ -435,48 +433,51 @@ def mark_albums_as_known_for_artist(artist_spotify_id):
logger.info( logger.info(
f"Successfully marked/updated {processed_count} albums as known for artist {artist_spotify_id}." f"Successfully marked/updated {processed_count} albums as known for artist {artist_spotify_id}."
) )
return jsonify( return {
{
"message": f"Successfully processed {processed_count} albums for artist {artist_spotify_id}." "message": f"Successfully processed {processed_count} albums for artist {artist_spotify_id}."
} }
), 200 except HTTPException:
raise
except Exception as e: except Exception as e:
logger.error( logger.error(
f"Error marking albums as known for artist {artist_spotify_id}: {e}", f"Error marking albums as known for artist {artist_spotify_id}: {e}",
exc_info=True, exc_info=True,
) )
return jsonify({"error": f"Could not mark albums as known: {str(e)}"}), 500 raise HTTPException(status_code=500, detail={"error": f"Could not mark albums as known: {str(e)}"})
@artist_bp.route("/watch/<string:artist_spotify_id>/albums", methods=["DELETE"]) @router.delete("/watch/{artist_spotify_id}/albums")
def mark_albums_as_missing_locally_for_artist(artist_spotify_id): async def mark_albums_as_missing_locally_for_artist(artist_spotify_id: str, request: Request):
"""Removes specified albums from the artist's local DB table.""" """Removes specified albums from the artist's local DB table."""
watch_config = get_watch_config() watch_config = get_watch_config()
if not watch_config.get("enabled", False): if not watch_config.get("enabled", False):
return jsonify( raise HTTPException(
{ status_code=403,
detail={
"error": "Watch feature is currently disabled globally. Cannot mark albums." "error": "Watch feature is currently disabled globally. Cannot mark albums."
} }
), 403 )
logger.info( logger.info(
f"Attempting to mark albums as missing (delete locally) for artist {artist_spotify_id}." f"Attempting to mark albums as missing (delete locally) for artist {artist_spotify_id}."
) )
try: try:
album_ids = request.json album_ids = await request.json()
if not isinstance(album_ids, list) or not all( if not isinstance(album_ids, list) or not all(
isinstance(aid, str) for aid in album_ids isinstance(aid, str) for aid in album_ids
): ):
return jsonify( raise HTTPException(
{ status_code=400,
detail={
"error": "Invalid request body. Expecting a JSON array of album Spotify IDs." "error": "Invalid request body. Expecting a JSON array of album Spotify IDs."
} }
), 400 )
if not get_watched_artist(artist_spotify_id): if not get_watched_artist(artist_spotify_id):
return jsonify( raise HTTPException(
{"error": f"Artist {artist_spotify_id} is not being watched."} status_code=404,
), 404 detail={"error": f"Artist {artist_spotify_id} is not being watched."}
)
deleted_count = remove_specific_albums_from_artist_table( deleted_count = remove_specific_albums_from_artist_table(
artist_spotify_id, album_ids artist_spotify_id, album_ids
@@ -484,14 +485,14 @@ def mark_albums_as_missing_locally_for_artist(artist_spotify_id):
logger.info( logger.info(
f"Successfully removed {deleted_count} albums locally for artist {artist_spotify_id}." f"Successfully removed {deleted_count} albums locally for artist {artist_spotify_id}."
) )
return jsonify( return {
{
"message": f"Successfully removed {deleted_count} albums locally for artist {artist_spotify_id}." "message": f"Successfully removed {deleted_count} albums locally for artist {artist_spotify_id}."
} }
), 200 except HTTPException:
raise
except Exception as e: except Exception as e:
logger.error( logger.error(
f"Error marking albums as missing (deleting locally) for artist {artist_spotify_id}: {e}", f"Error marking albums as missing (deleting locally) for artist {artist_spotify_id}: {e}",
exc_info=True, exc_info=True,
) )
return jsonify({"error": f"Could not mark albums as missing: {str(e)}"}), 500 raise HTTPException(status_code=500, detail={"error": f"Could not mark albums as missing: {str(e)}"})

View File

@@ -1,4 +1,5 @@
from flask import Blueprint, jsonify, request from fastapi import APIRouter, HTTPException, Request
from fastapi.responses import JSONResponse
import json import json
import logging import logging
import os import os
@@ -18,7 +19,7 @@ from routes.utils.watch.manager import (
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
config_bp = Blueprint("config", __name__) router = APIRouter()
# Flag for config change notifications # Flag for config change notifications
@@ -108,26 +109,28 @@ def save_watch_config_http(watch_config_data): # Renamed
return False, str(e) return False, str(e)
@config_bp.route("/config", methods=["GET"]) @router.get("/config")
def handle_config(): async def handle_config():
"""Handles GET requests for the main configuration.""" """Handles GET requests for the main configuration."""
try: try:
config = get_config() config = get_config()
return jsonify(config) return config
except Exception as e: except Exception as e:
logger.error(f"Error in GET /config: {e}", exc_info=True) logger.error(f"Error in GET /config: {e}", exc_info=True)
return jsonify( raise HTTPException(
{"error": "Failed to retrieve configuration", "details": str(e)} status_code=500,
), 500 detail={"error": "Failed to retrieve configuration", "details": str(e)}
)
@config_bp.route("/config", methods=["POST", "PUT"]) @router.post("/config")
def update_config(): @router.put("/config")
async def update_config(request: Request):
"""Handles POST/PUT requests to update the main configuration.""" """Handles POST/PUT requests to update the main configuration."""
try: try:
new_config = request.get_json() new_config = await request.json()
if not isinstance(new_config, dict): if not isinstance(new_config, dict):
return jsonify({"error": "Invalid config format"}), 400 raise HTTPException(status_code=400, detail={"error": "Invalid config format"})
# Preserve the explicitFilter setting from environment # Preserve the explicitFilter setting from environment
explicit_filter_env = os.environ.get("EXPLICIT_FILTER", "false").lower() explicit_filter_env = os.environ.get("EXPLICIT_FILTER", "false").lower()
@@ -140,73 +143,83 @@ def update_config():
if updated_config_values is None: if updated_config_values is None:
# This case should ideally not be reached if save_config succeeded # This case should ideally not be reached if save_config succeeded
# and get_config handles errors by returning a default or None. # and get_config handles errors by returning a default or None.
return jsonify( raise HTTPException(
{"error": "Failed to retrieve configuration after saving"} status_code=500,
), 500 detail={"error": "Failed to retrieve configuration after saving"}
)
return jsonify(updated_config_values) return updated_config_values
else: else:
return jsonify( raise HTTPException(
{"error": "Failed to update configuration", "details": error_msg} status_code=500,
), 500 detail={"error": "Failed to update configuration", "details": error_msg}
)
except json.JSONDecodeError: except json.JSONDecodeError:
return jsonify({"error": "Invalid JSON data"}), 400 raise HTTPException(status_code=400, detail={"error": "Invalid JSON data"})
except HTTPException:
raise
except Exception as e: except Exception as e:
logger.error(f"Error in POST/PUT /config: {e}", exc_info=True) logger.error(f"Error in POST/PUT /config: {e}", exc_info=True)
return jsonify( raise HTTPException(
{"error": "Failed to update configuration", "details": str(e)} status_code=500,
), 500 detail={"error": "Failed to update configuration", "details": str(e)}
)
@config_bp.route("/config/check", methods=["GET"]) @router.get("/config/check")
def check_config_changes(): async def check_config_changes():
# This endpoint seems more related to dynamically checking if config changed # This endpoint seems more related to dynamically checking if config changed
# on disk, which might not be necessary if settings are applied on restart # on disk, which might not be necessary if settings are applied on restart
# or by a dedicated manager. For now, just return current config. # or by a dedicated manager. For now, just return current config.
try: try:
config = get_config() config = get_config()
return jsonify( return {"message": "Current configuration retrieved.", "config": config}
{"message": "Current configuration retrieved.", "config": config}
)
except Exception as e: except Exception as e:
logger.error(f"Error in GET /config/check: {e}", exc_info=True) logger.error(f"Error in GET /config/check: {e}", exc_info=True)
return jsonify( raise HTTPException(
{"error": "Failed to check configuration", "details": str(e)} status_code=500,
), 500 detail={"error": "Failed to check configuration", "details": str(e)}
)
@config_bp.route("/config/watch", methods=["GET"]) @router.get("/config/watch")
def handle_watch_config(): async def handle_watch_config():
"""Handles GET requests for the watch configuration.""" """Handles GET requests for the watch configuration."""
try: try:
watch_config = get_watch_config_http() watch_config = get_watch_config_http()
return jsonify(watch_config) return watch_config
except Exception as e: except Exception as e:
logger.error(f"Error in GET /config/watch: {e}", exc_info=True) logger.error(f"Error in GET /config/watch: {e}", exc_info=True)
return jsonify( raise HTTPException(
{"error": "Failed to retrieve watch configuration", "details": str(e)} status_code=500,
), 500 detail={"error": "Failed to retrieve watch configuration", "details": str(e)}
)
@config_bp.route("/config/watch", methods=["POST", "PUT"]) @router.post("/config/watch")
def update_watch_config(): @router.put("/config/watch")
async def update_watch_config(request: Request):
"""Handles POST/PUT requests to update the watch configuration.""" """Handles POST/PUT requests to update the watch configuration."""
try: try:
new_watch_config = request.get_json() new_watch_config = await request.json()
if not isinstance(new_watch_config, dict): if not isinstance(new_watch_config, dict):
return jsonify({"error": "Invalid watch config format"}), 400 raise HTTPException(status_code=400, detail={"error": "Invalid watch config format"})
success, error_msg = save_watch_config_http(new_watch_config) success, error_msg = save_watch_config_http(new_watch_config)
if success: if success:
return jsonify({"message": "Watch configuration updated successfully"}), 200 return {"message": "Watch configuration updated successfully"}
else: else:
return jsonify( raise HTTPException(
{"error": "Failed to update watch configuration", "details": error_msg} status_code=500,
), 500 detail={"error": "Failed to update watch configuration", "details": error_msg}
)
except json.JSONDecodeError: except json.JSONDecodeError:
return jsonify({"error": "Invalid JSON data for watch config"}), 400 raise HTTPException(status_code=400, detail={"error": "Invalid JSON data for watch config"})
except HTTPException:
raise
except Exception as e: except Exception as e:
logger.error(f"Error in POST/PUT /config/watch: {e}", exc_info=True) logger.error(f"Error in POST/PUT /config/watch: {e}", exc_info=True)
return jsonify( raise HTTPException(
{"error": "Failed to update watch configuration", "details": str(e)} status_code=500,
), 500 detail={"error": "Failed to update watch configuration", "details": str(e)}
)

View File

@@ -1,4 +1,6 @@
from flask import Blueprint, request, jsonify from fastapi import APIRouter, HTTPException, Request
import json
import logging
from routes.utils.credentials import ( from routes.utils.credentials import (
get_credential, get_credential,
list_credentials, list_credentials,
@@ -10,159 +12,210 @@ from routes.utils.credentials import (
_get_global_spotify_api_creds, _get_global_spotify_api_creds,
save_global_spotify_api_creds, save_global_spotify_api_creds,
) )
import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
credentials_bp = Blueprint("credentials", __name__) router = APIRouter()
# Initialize the database and tables when the blueprint is loaded # Initialize the database and tables when the router is loaded
init_credentials_db() init_credentials_db()
@credentials_bp.route("/spotify_api_config", methods=["GET", "PUT"]) @router.get("/spotify_api_config")
def handle_spotify_api_config(): @router.put("/spotify_api_config")
async def handle_spotify_api_config(request: Request):
"""Handles GET and PUT requests for the global Spotify API client_id and client_secret.""" """Handles GET and PUT requests for the global Spotify API client_id and client_secret."""
try: try:
if request.method == "GET": if request.method == "GET":
client_id, client_secret = _get_global_spotify_api_creds() client_id, client_secret = _get_global_spotify_api_creds()
if client_id is not None and client_secret is not None: if client_id is not None and client_secret is not None:
return jsonify( return {"client_id": client_id, "client_secret": client_secret}
{"client_id": client_id, "client_secret": client_secret}
), 200
else: else:
# If search.json exists but is empty/incomplete, or doesn't exist # If search.json exists but is empty/incomplete, or doesn't exist
return jsonify( return {
{
"warning": "Global Spotify API credentials are not fully configured or file is missing.", "warning": "Global Spotify API credentials are not fully configured or file is missing.",
"client_id": client_id or "", "client_id": client_id or "",
"client_secret": client_secret or "", "client_secret": client_secret or "",
} }
), 200
elif request.method == "PUT": elif request.method == "PUT":
data = request.get_json() data = await request.json()
if not data or "client_id" not in data or "client_secret" not in data: if not data or "client_id" not in data or "client_secret" not in data:
return jsonify( raise HTTPException(
{ status_code=400,
"error": "Request body must contain 'client_id' and 'client_secret'" detail={"error": "Request body must contain 'client_id' and 'client_secret'"}
} )
), 400
client_id = data["client_id"] client_id = data["client_id"]
client_secret = data["client_secret"] client_secret = data["client_secret"]
if not isinstance(client_id, str) or not isinstance(client_secret, str): if not isinstance(client_id, str) or not isinstance(client_secret, str):
return jsonify( raise HTTPException(
{"error": "'client_id' and 'client_secret' must be strings"} status_code=400,
), 400 detail={"error": "'client_id' and 'client_secret' must be strings"}
)
if save_global_spotify_api_creds(client_id, client_secret): if save_global_spotify_api_creds(client_id, client_secret):
return jsonify( return {"message": "Global Spotify API credentials updated successfully."}
{"message": "Global Spotify API credentials updated successfully."}
), 200
else: else:
return jsonify( raise HTTPException(
{"error": "Failed to save global Spotify API credentials."} status_code=500,
), 500 detail={"error": "Failed to save global Spotify API credentials."}
)
except HTTPException:
raise
except Exception as e: except Exception as e:
logger.error(f"Error in /spotify_api_config: {e}", exc_info=True) logger.error(f"Error in /spotify_api_config: {e}", exc_info=True)
return jsonify({"error": f"An unexpected error occurred: {str(e)}"}), 500 raise HTTPException(status_code=500, detail={"error": f"An unexpected error occurred: {str(e)}"})
@credentials_bp.route("/<service>", methods=["GET"]) @router.get("/{service}")
def handle_list_credentials(service): async def handle_list_credentials(service: str):
try: try:
if service not in ["spotify", "deezer"]: if service not in ["spotify", "deezer"]:
return jsonify( raise HTTPException(
{"error": "Invalid service. Must be 'spotify' or 'deezer'"} status_code=400,
), 400 detail={"error": "Invalid service. Must be 'spotify' or 'deezer'"}
return jsonify(list_credentials(service)) )
return list_credentials(service)
except ValueError as e: # Should not happen with service check above except ValueError as e: # Should not happen with service check above
return jsonify({"error": str(e)}), 400 raise HTTPException(status_code=400, detail={"error": str(e)})
except Exception as e: except Exception as e:
logger.error(f"Error listing credentials for {service}: {e}", exc_info=True) logger.error(f"Error listing credentials for {service}: {e}", exc_info=True)
return jsonify({"error": f"An unexpected error occurred: {str(e)}"}), 500 raise HTTPException(status_code=500, detail={"error": f"An unexpected error occurred: {str(e)}"})
@credentials_bp.route("/<service>/<name>", methods=["GET", "POST", "PUT", "DELETE"]) @router.get("/{service}/{name}")
def handle_single_credential(service, name): async def handle_get_credential(service: str, name: str):
try: try:
if service not in ["spotify", "deezer"]: if service not in ["spotify", "deezer"]:
return jsonify( raise HTTPException(
{"error": "Invalid service. Must be 'spotify' or 'deezer'"} status_code=400,
), 400 detail={"error": "Invalid service. Must be 'spotify' or 'deezer'"}
)
# cred_type logic is removed for Spotify as API keys are global.
# For Deezer, it's always 'credentials' type implicitly.
if request.method == "GET":
# get_credential for Spotify now only returns region and blob_file_path # get_credential for Spotify now only returns region and blob_file_path
return jsonify(get_credential(service, name)) return get_credential(service, name)
elif request.method == "POST":
data = request.get_json()
if not data:
return jsonify({"error": "Request body cannot be empty."}), 400
# create_credential for Spotify now expects 'region' and 'blob_content'
# For Deezer, it expects 'arl' and 'region'
# Validation is handled within create_credential utility function
result = create_credential(service, name, data)
return jsonify(
{
"message": f"Credential for '{name}' ({service}) created successfully.",
"details": result,
}
), 201
elif request.method == "PUT":
data = request.get_json()
if not data:
return jsonify({"error": "Request body cannot be empty."}), 400
# edit_credential for Spotify now handles updates to 'region', 'blob_content'
# For Deezer, 'arl', 'region'
result = edit_credential(service, name, data)
return jsonify(
{
"message": f"Credential for '{name}' ({service}) updated successfully.",
"details": result,
}
)
elif request.method == "DELETE":
# delete_credential for Spotify also handles deleting the blob directory
result = delete_credential(service, name)
return jsonify(
{
"message": f"Credential for '{name}' ({service}) deleted successfully.",
"details": result,
}
)
except (ValueError, FileNotFoundError, FileExistsError) as e: except (ValueError, FileNotFoundError, FileExistsError) as e:
status_code = 400 status_code = 400
if isinstance(e, FileNotFoundError): if isinstance(e, FileNotFoundError):
status_code = 404 status_code = 404
elif isinstance(e, FileExistsError): elif isinstance(e, FileExistsError):
status_code = 409 status_code = 409
logger.warning(f"Client error in /<{service}>/<{name}>: {str(e)}") logger.warning(f"Client error in /{service}/{name}: {str(e)}")
return jsonify({"error": str(e)}), status_code raise HTTPException(status_code=status_code, detail={"error": str(e)})
except Exception as e: except Exception as e:
logger.error(f"Server error in /<{service}>/<{name}>: {e}", exc_info=True) logger.error(f"Server error in /{service}/{name}: {e}", exc_info=True)
return jsonify({"error": f"An unexpected error occurred: {str(e)}"}), 500 raise HTTPException(status_code=500, detail={"error": f"An unexpected error occurred: {str(e)}"})
@router.post("/{service}/{name}")
async def handle_create_credential(service: str, name: str, request: Request):
try:
if service not in ["spotify", "deezer"]:
raise HTTPException(
status_code=400,
detail={"error": "Invalid service. Must be 'spotify' or 'deezer'"}
)
data = await request.json()
if not data:
raise HTTPException(status_code=400, detail={"error": "Request body cannot be empty."})
# create_credential for Spotify now expects 'region' and 'blob_content'
# For Deezer, it expects 'arl' and 'region'
# Validation is handled within create_credential utility function
result = create_credential(service, name, data)
return {
"message": f"Credential for '{name}' ({service}) created successfully.",
"details": result,
}
except (ValueError, FileNotFoundError, FileExistsError) as e:
status_code = 400
if isinstance(e, FileNotFoundError):
status_code = 404
elif isinstance(e, FileExistsError):
status_code = 409
logger.warning(f"Client error in /{service}/{name}: {str(e)}")
raise HTTPException(status_code=status_code, detail={"error": str(e)})
except Exception as e:
logger.error(f"Server error in /{service}/{name}: {e}", exc_info=True)
raise HTTPException(status_code=500, detail={"error": f"An unexpected error occurred: {str(e)}"})
@router.put("/{service}/{name}")
async def handle_update_credential(service: str, name: str, request: Request):
try:
if service not in ["spotify", "deezer"]:
raise HTTPException(
status_code=400,
detail={"error": "Invalid service. Must be 'spotify' or 'deezer'"}
)
data = await request.json()
if not data:
raise HTTPException(status_code=400, detail={"error": "Request body cannot be empty."})
# edit_credential for Spotify now handles updates to 'region', 'blob_content'
# For Deezer, 'arl', 'region'
result = edit_credential(service, name, data)
return {
"message": f"Credential for '{name}' ({service}) updated successfully.",
"details": result,
}
except (ValueError, FileNotFoundError, FileExistsError) as e:
status_code = 400
if isinstance(e, FileNotFoundError):
status_code = 404
elif isinstance(e, FileExistsError):
status_code = 409
logger.warning(f"Client error in /{service}/{name}: {str(e)}")
raise HTTPException(status_code=status_code, detail={"error": str(e)})
except Exception as e:
logger.error(f"Server error in /{service}/{name}: {e}", exc_info=True)
raise HTTPException(status_code=500, detail={"error": f"An unexpected error occurred: {str(e)}"})
@router.delete("/{service}/{name}")
async def handle_delete_credential(service: str, name: str):
try:
if service not in ["spotify", "deezer"]:
raise HTTPException(
status_code=400,
detail={"error": "Invalid service. Must be 'spotify' or 'deezer'"}
)
# delete_credential for Spotify also handles deleting the blob directory
result = delete_credential(service, name)
return {
"message": f"Credential for '{name}' ({service}) deleted successfully.",
"details": result,
}
except (ValueError, FileNotFoundError, FileExistsError) as e:
status_code = 400
if isinstance(e, FileNotFoundError):
status_code = 404
elif isinstance(e, FileExistsError):
status_code = 409
logger.warning(f"Client error in /{service}/{name}: {str(e)}")
raise HTTPException(status_code=status_code, detail={"error": str(e)})
except Exception as e:
logger.error(f"Server error in /{service}/{name}: {e}", exc_info=True)
raise HTTPException(status_code=500, detail={"error": f"An unexpected error occurred: {str(e)}"})
# The '/search/<service>/<name>' route is now obsolete for Spotify and has been removed. # The '/search/<service>/<name>' route is now obsolete for Spotify and has been removed.
@credentials_bp.route("/all/<service>", methods=["GET"]) @router.get("/all/{service}")
def handle_all_credentials(service): async def handle_all_credentials(service: str):
"""Lists all credentials for a given service. For Spotify, API keys are global and not listed per account.""" """Lists all credentials for a given service. For Spotify, API keys are global and not listed per account."""
try: try:
if service not in ["spotify", "deezer"]: if service not in ["spotify", "deezer"]:
return jsonify( raise HTTPException(
{"error": "Invalid service. Must be 'spotify' or 'deezer'"} status_code=400,
), 400 detail={"error": "Invalid service. Must be 'spotify' or 'deezer'"}
)
credentials_list = [] credentials_list = []
account_names = list_credentials(service) # This lists names from DB account_names = list_credentials(service) # This lists names from DB
@@ -190,14 +243,14 @@ def handle_all_credentials(service):
} }
) )
return jsonify(credentials_list) return credentials_list
except Exception as e: except Exception as e:
logger.error(f"Error in /all/{service}: {e}", exc_info=True) logger.error(f"Error in /all/{service}: {e}", exc_info=True)
return jsonify({"error": f"An unexpected error occurred: {str(e)}"}), 500 raise HTTPException(status_code=500, detail={"error": f"An unexpected error occurred: {str(e)}"})
@credentials_bp.route("/markets", methods=["GET"]) @router.get("/markets")
def handle_markets(): async def handle_markets():
""" """
Returns a list of unique market regions for Deezer and Spotify accounts. Returns a list of unique market regions for Deezer and Spotify accounts.
""" """
@@ -229,13 +282,11 @@ def handle_markets():
f"Could not retrieve region for spotify account {name}: {e}" f"Could not retrieve region for spotify account {name}: {e}"
) )
return jsonify( return {
{
"deezer": sorted(list(deezer_regions)), "deezer": sorted(list(deezer_regions)),
"spotify": sorted(list(spotify_regions)), "spotify": sorted(list(spotify_regions)),
} }
), 200
except Exception as e: except Exception as e:
logger.error(f"Error in /markets: {e}", exc_info=True) logger.error(f"Error in /markets: {e}", exc_info=True)
return jsonify({"error": f"An unexpected error occurred: {str(e)}"}), 500 raise HTTPException(status_code=500, detail={"error": f"An unexpected error occurred: {str(e)}"})

View File

@@ -1,4 +1,5 @@
from flask import Blueprint, Response, request, jsonify from fastapi import APIRouter, HTTPException, Request
from fastapi.responses import JSONResponse
import json import json
import traceback import traceback
import logging import logging
@@ -6,11 +7,11 @@ from routes.utils.history_manager import history_manager
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
history_bp = Blueprint("history", __name__) router = APIRouter()
@history_bp.route("/", methods=["GET"]) @router.get("/")
def get_history(): async def get_history(request: Request):
""" """
Retrieve download history with optional filtering and pagination. Retrieve download history with optional filtering and pagination.
@@ -22,27 +23,25 @@ def get_history():
""" """
try: try:
# Parse query parameters # Parse query parameters
limit = min(int(request.args.get("limit", 100)), 500) # Cap at 500 limit = min(int(request.query_params.get("limit", 100)), 500) # Cap at 500
offset = max(int(request.args.get("offset", 0)), 0) offset = max(int(request.query_params.get("offset", 0)), 0)
download_type = request.args.get("download_type") download_type = request.query_params.get("download_type")
status = request.args.get("status") status = request.query_params.get("status")
# Validate download_type if provided # Validate download_type if provided
valid_types = ["track", "album", "playlist"] valid_types = ["track", "album", "playlist"]
if download_type and download_type not in valid_types: if download_type and download_type not in valid_types:
return Response( return JSONResponse(
json.dumps({"error": f"Invalid download_type. Must be one of: {valid_types}"}), content={"error": f"Invalid download_type. Must be one of: {valid_types}"},
status=400, status_code=400
mimetype="application/json",
) )
# Validate status if provided # Validate status if provided
valid_statuses = ["completed", "failed", "skipped", "in_progress"] valid_statuses = ["completed", "failed", "skipped", "in_progress"]
if status and status not in valid_statuses: if status and status not in valid_statuses:
return Response( return JSONResponse(
json.dumps({"error": f"Invalid status. Must be one of: {valid_statuses}"}), content={"error": f"Invalid status. Must be one of: {valid_statuses}"},
status=400, status_code=400
mimetype="application/json",
) )
# Get history from manager # Get history from manager
@@ -70,29 +69,26 @@ def get_history():
response_data["filters"] = {} response_data["filters"] = {}
response_data["filters"]["status"] = status response_data["filters"]["status"] = status
return Response( return JSONResponse(
json.dumps(response_data), content=response_data,
status=200, status_code=200
mimetype="application/json"
) )
except ValueError as e: except ValueError as e:
return Response( return JSONResponse(
json.dumps({"error": f"Invalid parameter value: {str(e)}"}), content={"error": f"Invalid parameter value: {str(e)}"},
status=400, status_code=400
mimetype="application/json",
) )
except Exception as e: except Exception as e:
logger.error(f"Error retrieving download history: {e}", exc_info=True) logger.error(f"Error retrieving download history: {e}", exc_info=True)
return Response( return JSONResponse(
json.dumps({"error": "Failed to retrieve download history", "details": str(e)}), content={"error": "Failed to retrieve download history", "details": str(e)},
status=500, status_code=500
mimetype="application/json",
) )
@history_bp.route("/<task_id>", methods=["GET"]) @router.get("/{task_id}")
def get_download_by_task_id(task_id): async def get_download_by_task_id(task_id: str):
""" """
Retrieve specific download history by task ID. Retrieve specific download history by task ID.
@@ -103,29 +99,26 @@ def get_download_by_task_id(task_id):
download = history_manager.get_download_by_task_id(task_id) download = history_manager.get_download_by_task_id(task_id)
if not download: if not download:
return Response( return JSONResponse(
json.dumps({"error": f"Download with task ID '{task_id}' not found"}), content={"error": f"Download with task ID '{task_id}' not found"},
status=404, status_code=404
mimetype="application/json",
) )
return Response( return JSONResponse(
json.dumps(download), content=download,
status=200, status_code=200
mimetype="application/json"
) )
except Exception as e: except Exception as e:
logger.error(f"Error retrieving download for task {task_id}: {e}", exc_info=True) logger.error(f"Error retrieving download for task {task_id}: {e}", exc_info=True)
return Response( return JSONResponse(
json.dumps({"error": "Failed to retrieve download", "details": str(e)}), content={"error": "Failed to retrieve download", "details": str(e)},
status=500, status_code=500
mimetype="application/json",
) )
@history_bp.route("/<task_id>/children", methods=["GET"]) @router.get("/{task_id}/children")
def get_download_children(task_id): async def get_download_children(task_id: str):
""" """
Retrieve children tracks for an album or playlist download. Retrieve children tracks for an album or playlist download.
@@ -137,18 +130,16 @@ def get_download_children(task_id):
download = history_manager.get_download_by_task_id(task_id) download = history_manager.get_download_by_task_id(task_id)
if not download: if not download:
return Response( return JSONResponse(
json.dumps({"error": f"Download with task ID '{task_id}' not found"}), content={"error": f"Download with task ID '{task_id}' not found"},
status=404, status_code=404
mimetype="application/json",
) )
children_table = download.get("children_table") children_table = download.get("children_table")
if not children_table: if not children_table:
return Response( return JSONResponse(
json.dumps({"error": f"Download '{task_id}' has no children tracks"}), content={"error": f"Download '{task_id}' has no children tracks"},
status=404, status_code=404
mimetype="application/json",
) )
# Get children tracks # Get children tracks
@@ -163,46 +154,42 @@ def get_download_children(task_id):
"track_count": len(children) "track_count": len(children)
} }
return Response( return JSONResponse(
json.dumps(response_data), content=response_data,
status=200, status_code=200
mimetype="application/json"
) )
except Exception as e: except Exception as e:
logger.error(f"Error retrieving children for task {task_id}: {e}", exc_info=True) logger.error(f"Error retrieving children for task {task_id}: {e}", exc_info=True)
return Response( return JSONResponse(
json.dumps({"error": "Failed to retrieve download children", "details": str(e)}), content={"error": "Failed to retrieve download children", "details": str(e)},
status=500, status_code=500
mimetype="application/json",
) )
@history_bp.route("/stats", methods=["GET"]) @router.get("/stats")
def get_download_stats(): async def get_download_stats():
""" """
Get download statistics and summary information. Get download statistics and summary information.
""" """
try: try:
stats = history_manager.get_download_stats() stats = history_manager.get_download_stats()
return Response( return JSONResponse(
json.dumps(stats), content=stats,
status=200, status_code=200
mimetype="application/json"
) )
except Exception as e: except Exception as e:
logger.error(f"Error retrieving download stats: {e}", exc_info=True) logger.error(f"Error retrieving download stats: {e}", exc_info=True)
return Response( return JSONResponse(
json.dumps({"error": "Failed to retrieve download statistics", "details": str(e)}), content={"error": "Failed to retrieve download statistics", "details": str(e)},
status=500, status_code=500
mimetype="application/json",
) )
@history_bp.route("/search", methods=["GET"]) @router.get("/search")
def search_history(): async def search_history(request: Request):
""" """
Search download history by title or artist. Search download history by title or artist.
@@ -211,15 +198,14 @@ def search_history():
- limit: Maximum number of results (default: 50, max: 200) - limit: Maximum number of results (default: 50, max: 200)
""" """
try: try:
query = request.args.get("q") query = request.query_params.get("q")
if not query: if not query:
return Response( return JSONResponse(
json.dumps({"error": "Missing required parameter: q (search query)"}), content={"error": "Missing required parameter: q (search query)"},
status=400, status_code=400
mimetype="application/json",
) )
limit = min(int(request.args.get("limit", 50)), 200) # Cap at 200 limit = min(int(request.query_params.get("limit", 50)), 200) # Cap at 200
# Search history # Search history
results = history_manager.search_history(query, limit) results = history_manager.search_history(query, limit)
@@ -231,29 +217,26 @@ def search_history():
"limit": limit "limit": limit
} }
return Response( return JSONResponse(
json.dumps(response_data), content=response_data,
status=200, status_code=200
mimetype="application/json"
) )
except ValueError as e: except ValueError as e:
return Response( return JSONResponse(
json.dumps({"error": f"Invalid parameter value: {str(e)}"}), content={"error": f"Invalid parameter value: {str(e)}"},
status=400, status_code=400
mimetype="application/json",
) )
except Exception as e: except Exception as e:
logger.error(f"Error searching download history: {e}", exc_info=True) logger.error(f"Error searching download history: {e}", exc_info=True)
return Response( return JSONResponse(
json.dumps({"error": "Failed to search download history", "details": str(e)}), content={"error": "Failed to search download history", "details": str(e)},
status=500, status_code=500
mimetype="application/json",
) )
@history_bp.route("/recent", methods=["GET"]) @router.get("/recent")
def get_recent_downloads(): async def get_recent_downloads(request: Request):
""" """
Get most recent downloads. Get most recent downloads.
@@ -261,7 +244,7 @@ def get_recent_downloads():
- limit: Maximum number of results (default: 20, max: 100) - limit: Maximum number of results (default: 20, max: 100)
""" """
try: try:
limit = min(int(request.args.get("limit", 20)), 100) # Cap at 100 limit = min(int(request.query_params.get("limit", 20)), 100) # Cap at 100
recent = history_manager.get_recent_downloads(limit) recent = history_manager.get_recent_downloads(limit)
@@ -271,29 +254,26 @@ def get_recent_downloads():
"limit": limit "limit": limit
} }
return Response( return JSONResponse(
json.dumps(response_data), content=response_data,
status=200, status_code=200
mimetype="application/json"
) )
except ValueError as e: except ValueError as e:
return Response( return JSONResponse(
json.dumps({"error": f"Invalid parameter value: {str(e)}"}), content={"error": f"Invalid parameter value: {str(e)}"},
status=400, status_code=400
mimetype="application/json",
) )
except Exception as e: except Exception as e:
logger.error(f"Error retrieving recent downloads: {e}", exc_info=True) logger.error(f"Error retrieving recent downloads: {e}", exc_info=True)
return Response( return JSONResponse(
json.dumps({"error": "Failed to retrieve recent downloads", "details": str(e)}), content={"error": "Failed to retrieve recent downloads", "details": str(e)},
status=500, status_code=500
mimetype="application/json",
) )
@history_bp.route("/failed", methods=["GET"]) @router.get("/failed")
def get_failed_downloads(): async def get_failed_downloads(request: Request):
""" """
Get failed downloads. Get failed downloads.
@@ -301,7 +281,7 @@ def get_failed_downloads():
- limit: Maximum number of results (default: 50, max: 200) - limit: Maximum number of results (default: 50, max: 200)
""" """
try: try:
limit = min(int(request.args.get("limit", 50)), 200) # Cap at 200 limit = min(int(request.query_params.get("limit", 50)), 200) # Cap at 200
failed = history_manager.get_failed_downloads(limit) failed = history_manager.get_failed_downloads(limit)
@@ -311,29 +291,26 @@ def get_failed_downloads():
"limit": limit "limit": limit
} }
return Response( return JSONResponse(
json.dumps(response_data), content=response_data,
status=200, status_code=200
mimetype="application/json"
) )
except ValueError as e: except ValueError as e:
return Response( return JSONResponse(
json.dumps({"error": f"Invalid parameter value: {str(e)}"}), content={"error": f"Invalid parameter value: {str(e)}"},
status=400, status_code=400
mimetype="application/json",
) )
except Exception as e: except Exception as e:
logger.error(f"Error retrieving failed downloads: {e}", exc_info=True) logger.error(f"Error retrieving failed downloads: {e}", exc_info=True)
return Response( return JSONResponse(
json.dumps({"error": "Failed to retrieve failed downloads", "details": str(e)}), content={"error": "Failed to retrieve failed downloads", "details": str(e)},
status=500, status_code=500
mimetype="application/json",
) )
@history_bp.route("/cleanup", methods=["POST"]) @router.post("/cleanup")
def cleanup_old_history(): async def cleanup_old_history(request: Request):
""" """
Clean up old download history. Clean up old download history.
@@ -341,14 +318,13 @@ def cleanup_old_history():
- days_old: Number of days old to keep (default: 30) - days_old: Number of days old to keep (default: 30)
""" """
try: try:
data = request.get_json() or {} data = await request.json() if request.headers.get("content-type") == "application/json" else {}
days_old = data.get("days_old", 30) days_old = data.get("days_old", 30)
if not isinstance(days_old, int) or days_old <= 0: if not isinstance(days_old, int) or days_old <= 0:
return Response( return JSONResponse(
json.dumps({"error": "days_old must be a positive integer"}), content={"error": "days_old must be a positive integer"},
status=400, status_code=400
mimetype="application/json",
) )
deleted_count = history_manager.clear_old_history(days_old) deleted_count = history_manager.clear_old_history(days_old)
@@ -359,16 +335,14 @@ def cleanup_old_history():
"days_old": days_old "days_old": days_old
} }
return Response( return JSONResponse(
json.dumps(response_data), content=response_data,
status=200, status_code=200
mimetype="application/json"
) )
except Exception as e: except Exception as e:
logger.error(f"Error cleaning up old history: {e}", exc_info=True) logger.error(f"Error cleaning up old history: {e}", exc_info=True)
return Response( return JSONResponse(
json.dumps({"error": "Failed to cleanup old history", "details": str(e)}), content={"error": "Failed to cleanup old history", "details": str(e)},
status=500, status_code=500
mimetype="application/json",
) )

View File

@@ -1,4 +1,5 @@
from flask import Blueprint, Response, request, jsonify from fastapi import APIRouter, HTTPException, Request
from fastapi.responses import JSONResponse
import json import json
import traceback import traceback
import logging # Added logging import import logging # Added logging import
@@ -30,7 +31,7 @@ from routes.utils.watch.manager import (
from routes.utils.errors import DuplicateDownloadError from routes.utils.errors import DuplicateDownloadError
logger = logging.getLogger(__name__) # Added logger initialization logger = logging.getLogger(__name__) # Added logger initialization
playlist_bp = Blueprint("playlist", __name__, url_prefix="/api/playlist") router = APIRouter()
def construct_spotify_url(item_id: str, item_type: str = "track") -> str: def construct_spotify_url(item_id: str, item_type: str = "track") -> str:
@@ -38,18 +39,16 @@ def construct_spotify_url(item_id: str, item_type: str = "track") -> str:
return f"https://open.spotify.com/{item_type}/{item_id}" return f"https://open.spotify.com/{item_type}/{item_id}"
@playlist_bp.route("/download/<playlist_id>", methods=["GET"]) @router.get("/download/{playlist_id}")
def handle_download(playlist_id): async def handle_download(playlist_id: str, request: Request):
# Retrieve essential parameters from the request. # Retrieve essential parameters from the request.
# name = request.args.get('name') # Removed # name = request.args.get('name') # Removed
# artist = request.args.get('artist') # Removed # artist = request.args.get('artist') # Removed
orig_params = request.args.to_dict() orig_params = dict(request.query_params)
# Construct the URL from playlist_id # Construct the URL from playlist_id
url = construct_spotify_url(playlist_id, "playlist") url = construct_spotify_url(playlist_id, "playlist")
orig_params["original_url"] = ( orig_params["original_url"] = str(request.url) # Update original_url to the constructed one
request.url
) # Update original_url to the constructed one
# Fetch metadata from Spotify using optimized function # Fetch metadata from Spotify using optimized function
try: try:
@@ -60,14 +59,11 @@ def handle_download(playlist_id):
or not playlist_info.get("name") or not playlist_info.get("name")
or not playlist_info.get("owner") or not playlist_info.get("owner")
): ):
return Response( return JSONResponse(
json.dumps( content={
{
"error": f"Could not retrieve metadata for playlist ID: {playlist_id}" "error": f"Could not retrieve metadata for playlist ID: {playlist_id}"
} },
), status_code=404
status=404,
mimetype="application/json",
) )
name_from_spotify = playlist_info.get("name") name_from_spotify = playlist_info.get("name")
@@ -76,22 +72,18 @@ def handle_download(playlist_id):
artist_from_spotify = owner_info.get("display_name", "Unknown Owner") artist_from_spotify = owner_info.get("display_name", "Unknown Owner")
except Exception as e: except Exception as e:
return Response( return JSONResponse(
json.dumps( content={
{
"error": f"Failed to fetch metadata for playlist {playlist_id}: {str(e)}" "error": f"Failed to fetch metadata for playlist {playlist_id}: {str(e)}"
} },
), status_code=500
status=500,
mimetype="application/json",
) )
# Validate required parameters # Validate required parameters
if not url: # This check might be redundant now but kept for safety if not url: # This check might be redundant now but kept for safety
return Response( return JSONResponse(
json.dumps({"error": "Missing required parameter: url"}), content={"error": "Missing required parameter: url"},
status=400, status_code=400
mimetype="application/json",
) )
try: try:
@@ -105,15 +97,12 @@ def handle_download(playlist_id):
} }
) )
except DuplicateDownloadError as e: except DuplicateDownloadError as e:
return Response( return JSONResponse(
json.dumps( content={
{
"error": "Duplicate download detected.", "error": "Duplicate download detected.",
"existing_task": e.existing_task, "existing_task": e.existing_task,
} },
), status_code=409
status=409,
mimetype="application/json",
) )
except Exception as e: except Exception as e:
# Generic error handling for other issues during task submission # Generic error handling for other issues during task submission
@@ -138,58 +127,52 @@ def handle_download(playlist_id):
"timestamp": time.time(), "timestamp": time.time(),
}, },
) )
return Response( return JSONResponse(
json.dumps( content={
{
"error": f"Failed to queue playlist download: {str(e)}", "error": f"Failed to queue playlist download: {str(e)}",
"task_id": error_task_id, "task_id": error_task_id,
} },
), status_code=500
status=500,
mimetype="application/json",
) )
return Response( return JSONResponse(
json.dumps({"task_id": task_id}), content={"task_id": task_id},
status=202, status_code=202
mimetype="application/json",
) )
@playlist_bp.route("/download/cancel", methods=["GET"]) @router.get("/download/cancel")
def cancel_download(): async def cancel_download(request: Request):
""" """
Cancel a running playlist download process by its task id. Cancel a running playlist download process by its task id.
""" """
task_id = request.args.get("task_id") task_id = request.query_params.get("task_id")
if not task_id: if not task_id:
return Response( return JSONResponse(
json.dumps({"error": "Missing task id (task_id) parameter"}), content={"error": "Missing task id (task_id) parameter"},
status=400, status_code=400
mimetype="application/json",
) )
# Use the queue manager's cancellation method. # Use the queue manager's cancellation method.
result = download_queue_manager.cancel_task(task_id) result = download_queue_manager.cancel_task(task_id)
status_code = 200 if result.get("status") == "cancelled" else 404 status_code = 200 if result.get("status") == "cancelled" else 404
return Response(json.dumps(result), status=status_code, mimetype="application/json") return JSONResponse(content=result, status_code=status_code)
@playlist_bp.route("/info", methods=["GET"]) @router.get("/info")
def get_playlist_info(): async def get_playlist_info(request: Request):
""" """
Retrieve Spotify playlist metadata given a Spotify playlist ID. Retrieve Spotify playlist metadata given a Spotify playlist ID.
Expects a query parameter 'id' that contains the Spotify playlist ID. Expects a query parameter 'id' that contains the Spotify playlist ID.
""" """
spotify_id = request.args.get("id") spotify_id = request.query_params.get("id")
include_tracks = request.args.get("include_tracks", "false").lower() == "true" include_tracks = request.query_params.get("include_tracks", "false").lower() == "true"
if not spotify_id: if not spotify_id:
return Response( return JSONResponse(
json.dumps({"error": "Missing parameter: id"}), content={"error": "Missing parameter: id"},
status=400, status_code=400
mimetype="application/json",
) )
try: try:
@@ -216,27 +199,26 @@ def get_playlist_info():
# If not watched, or no tracks, is_locally_known will not be added, or tracks won't exist to add it to. # If not watched, or no tracks, is_locally_known will not be added, or tracks won't exist to add it to.
# Frontend should handle absence of this key as false. # Frontend should handle absence of this key as false.
return Response( return JSONResponse(
json.dumps(playlist_info), status=200, mimetype="application/json" content=playlist_info, status_code=200
) )
except Exception as e: except Exception as e:
error_data = {"error": str(e), "traceback": traceback.format_exc()} error_data = {"error": str(e), "traceback": traceback.format_exc()}
return Response(json.dumps(error_data), status=500, mimetype="application/json") return JSONResponse(content=error_data, status_code=500)
@playlist_bp.route("/metadata", methods=["GET"]) @router.get("/metadata")
def get_playlist_metadata(): async def get_playlist_metadata(request: Request):
""" """
Retrieve only Spotify playlist metadata (no tracks) to avoid rate limiting. Retrieve only Spotify playlist metadata (no tracks) to avoid rate limiting.
Expects a query parameter 'id' that contains the Spotify playlist ID. Expects a query parameter 'id' that contains the Spotify playlist ID.
""" """
spotify_id = request.args.get("id") spotify_id = request.query_params.get("id")
if not spotify_id: if not spotify_id:
return Response( return JSONResponse(
json.dumps({"error": "Missing parameter: id"}), content={"error": "Missing parameter: id"},
status=400, status_code=400
mimetype="application/json",
) )
try: try:
@@ -244,29 +226,28 @@ def get_playlist_metadata():
from routes.utils.get_info import get_playlist_metadata from routes.utils.get_info import get_playlist_metadata
playlist_metadata = get_playlist_metadata(spotify_id) playlist_metadata = get_playlist_metadata(spotify_id)
return Response( return JSONResponse(
json.dumps(playlist_metadata), status=200, mimetype="application/json" content=playlist_metadata, status_code=200
) )
except Exception as e: except Exception as e:
error_data = {"error": str(e), "traceback": traceback.format_exc()} error_data = {"error": str(e), "traceback": traceback.format_exc()}
return Response(json.dumps(error_data), status=500, mimetype="application/json") return JSONResponse(content=error_data, status_code=500)
@playlist_bp.route("/tracks", methods=["GET"]) @router.get("/tracks")
def get_playlist_tracks(): async def get_playlist_tracks(request: Request):
""" """
Retrieve playlist tracks with pagination support for progressive loading. Retrieve playlist tracks with pagination support for progressive loading.
Expects query parameters: 'id' (playlist ID), 'limit' (optional), 'offset' (optional). Expects query parameters: 'id' (playlist ID), 'limit' (optional), 'offset' (optional).
""" """
spotify_id = request.args.get("id") spotify_id = request.query_params.get("id")
limit = request.args.get("limit", 50, type=int) limit = int(request.query_params.get("limit", 50))
offset = request.args.get("offset", 0, type=int) offset = int(request.query_params.get("offset", 0))
if not spotify_id: if not spotify_id:
return Response( return JSONResponse(
json.dumps({"error": "Missing parameter: id"}), content={"error": "Missing parameter: id"},
status=400, status_code=400
mimetype="application/json",
) )
try: try:
@@ -274,28 +255,26 @@ def get_playlist_tracks():
from routes.utils.get_info import get_playlist_tracks from routes.utils.get_info import get_playlist_tracks
tracks_data = get_playlist_tracks(spotify_id, limit=limit, offset=offset) tracks_data = get_playlist_tracks(spotify_id, limit=limit, offset=offset)
return Response( return JSONResponse(
json.dumps(tracks_data), status=200, mimetype="application/json" content=tracks_data, status_code=200
) )
except Exception as e: except Exception as e:
error_data = {"error": str(e), "traceback": traceback.format_exc()} error_data = {"error": str(e), "traceback": traceback.format_exc()}
return Response(json.dumps(error_data), status=500, mimetype="application/json") return JSONResponse(content=error_data, status_code=500)
@playlist_bp.route("/watch/<string:playlist_spotify_id>", methods=["PUT"]) @router.put("/watch/{playlist_spotify_id}")
def add_to_watchlist(playlist_spotify_id): async def add_to_watchlist(playlist_spotify_id: str):
"""Adds a playlist to the watchlist.""" """Adds a playlist to the watchlist."""
watch_config = get_watch_config() watch_config = get_watch_config()
if not watch_config.get("enabled", False): if not watch_config.get("enabled", False):
return jsonify({"error": "Watch feature is currently disabled globally."}), 403 raise HTTPException(status_code=403, detail={"error": "Watch feature is currently disabled globally."})
logger.info(f"Attempting to add playlist {playlist_spotify_id} to watchlist.") logger.info(f"Attempting to add playlist {playlist_spotify_id} to watchlist.")
try: try:
# Check if already watched # Check if already watched
if get_watched_playlist(playlist_spotify_id): if get_watched_playlist(playlist_spotify_id):
return jsonify( return {"message": f"Playlist {playlist_spotify_id} is already being watched."}
{"message": f"Playlist {playlist_spotify_id} is already being watched."}
), 200
# Fetch playlist details from Spotify to populate our DB # Fetch playlist details from Spotify to populate our DB
from routes.utils.get_info import get_playlist_metadata from routes.utils.get_info import get_playlist_metadata
@@ -304,11 +283,12 @@ def add_to_watchlist(playlist_spotify_id):
logger.error( logger.error(
f"Could not fetch details for playlist {playlist_spotify_id} from Spotify." f"Could not fetch details for playlist {playlist_spotify_id} from Spotify."
) )
return jsonify( raise HTTPException(
{ status_code=404,
detail={
"error": f"Could not fetch details for playlist {playlist_spotify_id} from Spotify." "error": f"Could not fetch details for playlist {playlist_spotify_id} from Spotify."
} }
), 404 )
add_playlist_db(playlist_data) # This also creates the tracks table add_playlist_db(playlist_data) # This also creates the tracks table
@@ -323,99 +303,104 @@ def add_to_watchlist(playlist_spotify_id):
logger.info( logger.info(
f"Playlist {playlist_spotify_id} added to watchlist. Its tracks will be processed by the watch manager." f"Playlist {playlist_spotify_id} added to watchlist. Its tracks will be processed by the watch manager."
) )
return jsonify( return {
{
"message": f"Playlist {playlist_spotify_id} added to watchlist. Tracks will be processed shortly." "message": f"Playlist {playlist_spotify_id} added to watchlist. Tracks will be processed shortly."
} }
), 201 except HTTPException:
raise
except Exception as e: except Exception as e:
logger.error( logger.error(
f"Error adding playlist {playlist_spotify_id} to watchlist: {e}", f"Error adding playlist {playlist_spotify_id} to watchlist: {e}",
exc_info=True, exc_info=True,
) )
return jsonify({"error": f"Could not add playlist to watchlist: {str(e)}"}), 500 raise HTTPException(status_code=500, detail={"error": f"Could not add playlist to watchlist: {str(e)}"})
@playlist_bp.route("/watch/<string:playlist_spotify_id>/status", methods=["GET"]) @router.get("/watch/{playlist_spotify_id}/status")
def get_playlist_watch_status(playlist_spotify_id): async def get_playlist_watch_status(playlist_spotify_id: str):
"""Checks if a specific playlist is being watched.""" """Checks if a specific playlist is being watched."""
logger.info(f"Checking watch status for playlist {playlist_spotify_id}.") logger.info(f"Checking watch status for playlist {playlist_spotify_id}.")
try: try:
playlist = get_watched_playlist(playlist_spotify_id) playlist = get_watched_playlist(playlist_spotify_id)
if playlist: if playlist:
return jsonify({"is_watched": True, "playlist_data": playlist}), 200 return {"is_watched": True, "playlist_data": playlist}
else: else:
# Return 200 with is_watched: false, so frontend can clearly distinguish # Return 200 with is_watched: false, so frontend can clearly distinguish
# between "not watched" and an actual error fetching status. # between "not watched" and an actual error fetching status.
return jsonify({"is_watched": False}), 200 return {"is_watched": False}
except Exception as e: except Exception as e:
logger.error( logger.error(
f"Error checking watch status for playlist {playlist_spotify_id}: {e}", f"Error checking watch status for playlist {playlist_spotify_id}: {e}",
exc_info=True, exc_info=True,
) )
return jsonify({"error": f"Could not check watch status: {str(e)}"}), 500 raise HTTPException(status_code=500, detail={"error": f"Could not check watch status: {str(e)}"})
@playlist_bp.route("/watch/<string:playlist_spotify_id>", methods=["DELETE"]) @router.delete("/watch/{playlist_spotify_id}")
def remove_from_watchlist(playlist_spotify_id): async def remove_from_watchlist(playlist_spotify_id: str):
"""Removes a playlist from the watchlist.""" """Removes a playlist from the watchlist."""
watch_config = get_watch_config() watch_config = get_watch_config()
if not watch_config.get("enabled", False): if not watch_config.get("enabled", False):
return jsonify({"error": "Watch feature is currently disabled globally."}), 403 raise HTTPException(status_code=403, detail={"error": "Watch feature is currently disabled globally."})
logger.info(f"Attempting to remove playlist {playlist_spotify_id} from watchlist.") logger.info(f"Attempting to remove playlist {playlist_spotify_id} from watchlist.")
try: try:
if not get_watched_playlist(playlist_spotify_id): if not get_watched_playlist(playlist_spotify_id):
return jsonify( raise HTTPException(
{"error": f"Playlist {playlist_spotify_id} not found in watchlist."} status_code=404,
), 404 detail={"error": f"Playlist {playlist_spotify_id} not found in watchlist."}
)
remove_playlist_db(playlist_spotify_id) remove_playlist_db(playlist_spotify_id)
logger.info( logger.info(
f"Playlist {playlist_spotify_id} removed from watchlist successfully." f"Playlist {playlist_spotify_id} removed from watchlist successfully."
) )
return jsonify( return {"message": f"Playlist {playlist_spotify_id} removed from watchlist."}
{"message": f"Playlist {playlist_spotify_id} removed from watchlist."} except HTTPException:
), 200 raise
except Exception as e: except Exception as e:
logger.error( logger.error(
f"Error removing playlist {playlist_spotify_id} from watchlist: {e}", f"Error removing playlist {playlist_spotify_id} from watchlist: {e}",
exc_info=True, exc_info=True,
) )
return jsonify( raise HTTPException(
{"error": f"Could not remove playlist from watchlist: {str(e)}"} status_code=500,
), 500 detail={"error": f"Could not remove playlist from watchlist: {str(e)}"}
)
@playlist_bp.route("/watch/<string:playlist_spotify_id>/tracks", methods=["POST"]) @router.post("/watch/{playlist_spotify_id}/tracks")
def mark_tracks_as_known(playlist_spotify_id): async def mark_tracks_as_known(playlist_spotify_id: str, request: Request):
"""Fetches details for given track IDs and adds/updates them in the playlist's local DB table.""" """Fetches details for given track IDs and adds/updates them in the playlist's local DB table."""
watch_config = get_watch_config() watch_config = get_watch_config()
if not watch_config.get("enabled", False): if not watch_config.get("enabled", False):
return jsonify( raise HTTPException(
{ status_code=403,
detail={
"error": "Watch feature is currently disabled globally. Cannot mark tracks." "error": "Watch feature is currently disabled globally. Cannot mark tracks."
} }
), 403 )
logger.info( logger.info(
f"Attempting to mark tracks as known for playlist {playlist_spotify_id}." f"Attempting to mark tracks as known for playlist {playlist_spotify_id}."
) )
try: try:
track_ids = request.json track_ids = await request.json()
if not isinstance(track_ids, list) or not all( if not isinstance(track_ids, list) or not all(
isinstance(tid, str) for tid in track_ids isinstance(tid, str) for tid in track_ids
): ):
return jsonify( raise HTTPException(
{ status_code=400,
detail={
"error": "Invalid request body. Expecting a JSON array of track Spotify IDs." "error": "Invalid request body. Expecting a JSON array of track Spotify IDs."
} }
), 400 )
if not get_watched_playlist(playlist_spotify_id): if not get_watched_playlist(playlist_spotify_id):
return jsonify( raise HTTPException(
{"error": f"Playlist {playlist_spotify_id} is not being watched."} status_code=404,
), 404 detail={"error": f"Playlist {playlist_spotify_id} is not being watched."}
)
fetched_tracks_details = [] fetched_tracks_details = []
for track_id in track_ids: for track_id in track_ids:
@@ -433,12 +418,10 @@ def mark_tracks_as_known(playlist_spotify_id):
) )
if not fetched_tracks_details: if not fetched_tracks_details:
return jsonify( return {
{
"message": "No valid track details could be fetched to mark as known.", "message": "No valid track details could be fetched to mark as known.",
"processed_count": 0, "processed_count": 0,
} }
), 200
add_specific_tracks_to_playlist_table( add_specific_tracks_to_playlist_table(
playlist_spotify_id, fetched_tracks_details playlist_spotify_id, fetched_tracks_details
@@ -446,48 +429,51 @@ def mark_tracks_as_known(playlist_spotify_id):
logger.info( logger.info(
f"Successfully marked/updated {len(fetched_tracks_details)} tracks as known for playlist {playlist_spotify_id}." f"Successfully marked/updated {len(fetched_tracks_details)} tracks as known for playlist {playlist_spotify_id}."
) )
return jsonify( return {
{
"message": f"Successfully processed {len(fetched_tracks_details)} tracks for playlist {playlist_spotify_id}." "message": f"Successfully processed {len(fetched_tracks_details)} tracks for playlist {playlist_spotify_id}."
} }
), 200 except HTTPException:
raise
except Exception as e: except Exception as e:
logger.error( logger.error(
f"Error marking tracks as known for playlist {playlist_spotify_id}: {e}", f"Error marking tracks as known for playlist {playlist_spotify_id}: {e}",
exc_info=True, exc_info=True,
) )
return jsonify({"error": f"Could not mark tracks as known: {str(e)}"}), 500 raise HTTPException(status_code=500, detail={"error": f"Could not mark tracks as known: {str(e)}"})
@playlist_bp.route("/watch/<string:playlist_spotify_id>/tracks", methods=["DELETE"]) @router.delete("/watch/{playlist_spotify_id}/tracks")
def mark_tracks_as_missing_locally(playlist_spotify_id): async def mark_tracks_as_missing_locally(playlist_spotify_id: str, request: Request):
"""Removes specified tracks from the playlist's local DB table.""" """Removes specified tracks from the playlist's local DB table."""
watch_config = get_watch_config() watch_config = get_watch_config()
if not watch_config.get("enabled", False): if not watch_config.get("enabled", False):
return jsonify( raise HTTPException(
{ status_code=403,
detail={
"error": "Watch feature is currently disabled globally. Cannot mark tracks." "error": "Watch feature is currently disabled globally. Cannot mark tracks."
} }
), 403 )
logger.info( logger.info(
f"Attempting to mark tracks as missing (remove locally) for playlist {playlist_spotify_id}." f"Attempting to mark tracks as missing (remove locally) for playlist {playlist_spotify_id}."
) )
try: try:
track_ids = request.json track_ids = await request.json()
if not isinstance(track_ids, list) or not all( if not isinstance(track_ids, list) or not all(
isinstance(tid, str) for tid in track_ids isinstance(tid, str) for tid in track_ids
): ):
return jsonify( raise HTTPException(
{ status_code=400,
detail={
"error": "Invalid request body. Expecting a JSON array of track Spotify IDs." "error": "Invalid request body. Expecting a JSON array of track Spotify IDs."
} }
), 400 )
if not get_watched_playlist(playlist_spotify_id): if not get_watched_playlist(playlist_spotify_id):
return jsonify( raise HTTPException(
{"error": f"Playlist {playlist_spotify_id} is not being watched."} status_code=404,
), 404 detail={"error": f"Playlist {playlist_spotify_id} is not being watched."}
)
deleted_count = remove_specific_tracks_from_playlist_table( deleted_count = remove_specific_tracks_from_playlist_table(
playlist_spotify_id, track_ids playlist_spotify_id, track_ids
@@ -495,72 +481,71 @@ def mark_tracks_as_missing_locally(playlist_spotify_id):
logger.info( logger.info(
f"Successfully removed {deleted_count} tracks locally for playlist {playlist_spotify_id}." f"Successfully removed {deleted_count} tracks locally for playlist {playlist_spotify_id}."
) )
return jsonify( return {
{
"message": f"Successfully removed {deleted_count} tracks locally for playlist {playlist_spotify_id}." "message": f"Successfully removed {deleted_count} tracks locally for playlist {playlist_spotify_id}."
} }
), 200 except HTTPException:
raise
except Exception as e: except Exception as e:
logger.error( logger.error(
f"Error marking tracks as missing (deleting locally) for playlist {playlist_spotify_id}: {e}", f"Error marking tracks as missing (deleting locally) for playlist {playlist_spotify_id}: {e}",
exc_info=True, exc_info=True,
) )
return jsonify({"error": f"Could not mark tracks as missing: {str(e)}"}), 500 raise HTTPException(status_code=500, detail={"error": f"Could not mark tracks as missing: {str(e)}"})
@playlist_bp.route("/watch/list", methods=["GET"]) @router.get("/watch/list")
def list_watched_playlists_endpoint(): async def list_watched_playlists_endpoint():
"""Lists all playlists currently in the watchlist.""" """Lists all playlists currently in the watchlist."""
try: try:
playlists = get_watched_playlists() playlists = get_watched_playlists()
return jsonify(playlists), 200 return playlists
except Exception as e: except Exception as e:
logger.error(f"Error listing watched playlists: {e}", exc_info=True) logger.error(f"Error listing watched playlists: {e}", exc_info=True)
return jsonify({"error": f"Could not list watched playlists: {str(e)}"}), 500 raise HTTPException(status_code=500, detail={"error": f"Could not list watched playlists: {str(e)}"})
@playlist_bp.route("/watch/trigger_check", methods=["POST"]) @router.post("/watch/trigger_check")
def trigger_playlist_check_endpoint(): async def trigger_playlist_check_endpoint():
"""Manually triggers the playlist checking mechanism for all watched playlists.""" """Manually triggers the playlist checking mechanism for all watched playlists."""
watch_config = get_watch_config() watch_config = get_watch_config()
if not watch_config.get("enabled", False): if not watch_config.get("enabled", False):
return jsonify( raise HTTPException(
{ status_code=403,
detail={
"error": "Watch feature is currently disabled globally. Cannot trigger check." "error": "Watch feature is currently disabled globally. Cannot trigger check."
} }
), 403 )
logger.info("Manual trigger for playlist check received for all playlists.") logger.info("Manual trigger for playlist check received for all playlists.")
try: try:
# Run check_watched_playlists without an ID to check all # Run check_watched_playlists without an ID to check all
thread = threading.Thread(target=check_watched_playlists, args=(None,)) thread = threading.Thread(target=check_watched_playlists, args=(None,))
thread.start() thread.start()
return jsonify( return {
{
"message": "Playlist check triggered successfully in the background for all playlists." "message": "Playlist check triggered successfully in the background for all playlists."
} }
), 202
except Exception as e: except Exception as e:
logger.error( logger.error(
f"Error manually triggering playlist check for all: {e}", exc_info=True f"Error manually triggering playlist check for all: {e}", exc_info=True
) )
return jsonify( raise HTTPException(
{"error": f"Could not trigger playlist check for all: {str(e)}"} status_code=500,
), 500 detail={"error": f"Could not trigger playlist check for all: {str(e)}"}
@playlist_bp.route(
"/watch/trigger_check/<string:playlist_spotify_id>", methods=["POST"]
) )
def trigger_specific_playlist_check_endpoint(playlist_spotify_id: str):
@router.post("/watch/trigger_check/{playlist_spotify_id}")
async def trigger_specific_playlist_check_endpoint(playlist_spotify_id: str):
"""Manually triggers the playlist checking mechanism for a specific playlist.""" """Manually triggers the playlist checking mechanism for a specific playlist."""
watch_config = get_watch_config() watch_config = get_watch_config()
if not watch_config.get("enabled", False): if not watch_config.get("enabled", False):
return jsonify( raise HTTPException(
{ status_code=403,
detail={
"error": "Watch feature is currently disabled globally. Cannot trigger check." "error": "Watch feature is currently disabled globally. Cannot trigger check."
} }
), 403 )
logger.info( logger.info(
f"Manual trigger for specific playlist check received for ID: {playlist_spotify_id}" f"Manual trigger for specific playlist check received for ID: {playlist_spotify_id}"
@@ -572,11 +557,12 @@ def trigger_specific_playlist_check_endpoint(playlist_spotify_id: str):
logger.warning( logger.warning(
f"Trigger specific check: Playlist ID {playlist_spotify_id} not found in watchlist." f"Trigger specific check: Playlist ID {playlist_spotify_id} not found in watchlist."
) )
return jsonify( raise HTTPException(
{ status_code=404,
detail={
"error": f"Playlist {playlist_spotify_id} is not in the watchlist. Add it first." "error": f"Playlist {playlist_spotify_id} is not in the watchlist. Add it first."
} }
), 404 )
# Run check_watched_playlists with the specific ID # Run check_watched_playlists with the specific ID
thread = threading.Thread( thread = threading.Thread(
@@ -586,18 +572,19 @@ def trigger_specific_playlist_check_endpoint(playlist_spotify_id: str):
logger.info( logger.info(
f"Playlist check triggered in background for specific playlist ID: {playlist_spotify_id}" f"Playlist check triggered in background for specific playlist ID: {playlist_spotify_id}"
) )
return jsonify( return {
{
"message": f"Playlist check triggered successfully in the background for {playlist_spotify_id}." "message": f"Playlist check triggered successfully in the background for {playlist_spotify_id}."
} }
), 202 except HTTPException:
raise
except Exception as e: except Exception as e:
logger.error( logger.error(
f"Error manually triggering specific playlist check for {playlist_spotify_id}: {e}", f"Error manually triggering specific playlist check for {playlist_spotify_id}: {e}",
exc_info=True, exc_info=True,
) )
return jsonify( raise HTTPException(
{ status_code=500,
detail={
"error": f"Could not trigger playlist check for {playlist_spotify_id}: {str(e)}" "error": f"Could not trigger playlist check for {playlist_spotify_id}: {str(e)}"
} }
), 500 )

View File

@@ -1,6 +1,9 @@
from flask import Blueprint, abort, jsonify, request from fastapi import APIRouter, HTTPException, Request
from fastapi.responses import JSONResponse, StreamingResponse
import logging import logging
import time import time
import json
import asyncio
from routes.utils.celery_tasks import ( from routes.utils.celery_tasks import (
get_task_info, get_task_info,
@@ -14,9 +17,7 @@ from routes.utils.celery_tasks import (
# Configure logging # Configure logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
prgs_bp = Blueprint("prgs", __name__, url_prefix="/api/prgs") router = APIRouter()
# (Old .prg file system removed. Using new task system only.)
# Define active task states using ProgressState constants # Define active task states using ProgressState constants
ACTIVE_TASK_STATES = { ACTIVE_TASK_STATES = {
@@ -119,7 +120,7 @@ def _build_error_callback_object(last_status):
return callback_object return callback_object
def _build_task_response(task_info, last_status, task_id, current_time): def _build_task_response(task_info, last_status, task_id, current_time, request: Request):
""" """
Helper function to build a standardized task response object. Helper function to build a standardized task response object.
""" """
@@ -132,7 +133,7 @@ def _build_task_response(task_info, last_status, task_id, current_time):
try: try:
item_id = item_url.split("/")[-1] item_id = item_url.split("/")[-1]
if item_id: if item_id:
base_url = request.host_url.rstrip("/") base_url = str(request.base_url).rstrip("/")
dynamic_original_url = ( dynamic_original_url = (
f"{base_url}/api/{download_type}/download/{item_id}" f"{base_url}/api/{download_type}/download/{item_id}"
) )
@@ -184,7 +185,7 @@ def _build_task_response(task_info, last_status, task_id, current_time):
return task_response return task_response
def get_paginated_tasks(page=1, limit=20, active_only=False): async def get_paginated_tasks(page=1, limit=20, active_only=False, request: Request = None):
""" """
Get paginated list of tasks. Get paginated list of tasks.
""" """
@@ -233,7 +234,7 @@ def get_paginated_tasks(page=1, limit=20, active_only=False):
elif is_active_task: elif is_active_task:
task_counts["active"] += 1 task_counts["active"] += 1
task_response = _build_task_response(task_info, last_status, task_id, time.time()) task_response = _build_task_response(task_info, last_status, task_id, time.time(), request)
if is_active_task: if is_active_task:
active_tasks.append(task_response) active_tasks.append(task_response)
@@ -277,117 +278,18 @@ def get_paginated_tasks(page=1, limit=20, active_only=False):
"pagination": pagination_info "pagination": pagination_info
} }
return jsonify(response) return response
except Exception as e: except Exception as e:
logger.error(f"Error in get_paginated_tasks: {e}", exc_info=True) logger.error(f"Error in get_paginated_tasks: {e}", exc_info=True)
return jsonify({"error": "Failed to retrieve paginated tasks"}), 500 raise HTTPException(status_code=500, detail={"error": "Failed to retrieve paginated tasks"})
@prgs_bp.route("/<task_id>", methods=["GET"]) # IMPORTANT: Specific routes MUST come before parameterized routes in FastAPI
def get_task_details(task_id): # Otherwise "updates" gets matched as a {task_id} parameter!
"""
Return a JSON object with the resource type, its name (title),
the last progress update, and, if available, the original request parameters.
This function works with the new task ID based system. @router.get("/list")
async def list_tasks(request: Request):
Args:
task_id: A task UUID from Celery
"""
# Only support new task IDs
task_info = get_task_info(task_id)
if not task_info:
abort(404, "Task not found")
# Dynamically construct original_url
dynamic_original_url = ""
download_type = task_info.get("download_type")
# The 'url' field in task_info stores the Spotify/Deezer URL of the item
# e.g., https://open.spotify.com/album/albumId or https://www.deezer.com/track/trackId
item_url = task_info.get("url")
if download_type and item_url:
try:
# Extract the ID from the item_url (last part of the path)
item_id = item_url.split("/")[-1]
if item_id: # Ensure item_id is not empty
base_url = request.host_url.rstrip("/")
dynamic_original_url = (
f"{base_url}/api/{download_type}/download/{item_id}"
)
else:
logger.warning(
f"Could not extract item ID from URL: {item_url} for task {task_id}. Falling back for original_url."
)
original_request_obj = task_info.get("original_request", {})
dynamic_original_url = original_request_obj.get("original_url", "")
except Exception as e:
logger.error(
f"Error constructing dynamic original_url for task {task_id}: {e}",
exc_info=True,
)
original_request_obj = task_info.get("original_request", {})
dynamic_original_url = original_request_obj.get(
"original_url", ""
) # Fallback on any error
else:
logger.warning(
f"Missing download_type ('{download_type}') or item_url ('{item_url}') in task_info for task {task_id}. Falling back for original_url."
)
original_request_obj = task_info.get("original_request", {})
dynamic_original_url = original_request_obj.get("original_url", "")
last_status = get_last_task_status(task_id)
status_count = len(get_task_status(task_id))
# Determine last_line content
if last_status and "raw_callback" in last_status:
last_line_content = last_status["raw_callback"]
elif last_status and get_task_status_from_last_status(last_status) == "error":
last_line_content = _build_error_callback_object(last_status)
else:
# Fallback for non-error, no raw_callback, or if last_status is None
last_line_content = last_status
response = {
"original_url": dynamic_original_url,
"last_line": last_line_content,
"timestamp": last_status.get("timestamp") if last_status else time.time(),
"task_id": task_id,
"status_count": status_count,
"created_at": task_info.get("created_at"),
"name": task_info.get("name"),
"artist": task_info.get("artist"),
"type": task_info.get("type"),
"download_type": task_info.get("download_type"),
}
if last_status and last_status.get("summary"):
response["summary"] = last_status["summary"]
return jsonify(response)
@prgs_bp.route("/delete/<task_id>", methods=["DELETE"])
def delete_task(task_id):
"""
Delete a task's information and history.
Args:
task_id: A task UUID from Celery
"""
# Only support new task IDs
task_info = get_task_info(task_id)
if not task_info:
abort(404, "Task not found")
# First, cancel the task if it's running
cancel_task(task_id)
return {"message": f"Task {task_id} deleted successfully"}, 200
@prgs_bp.route("/list", methods=["GET"])
def list_tasks():
""" """
Retrieve a paginated list of all tasks in the system. Retrieve a paginated list of all tasks in the system.
Returns a detailed list of task objects including status and metadata. Returns a detailed list of task objects including status and metadata.
@@ -399,9 +301,9 @@ def list_tasks():
""" """
try: try:
# Get query parameters # Get query parameters
page = int(request.args.get('page', 1)) page = int(request.query_params.get('page', 1))
limit = min(int(request.args.get('limit', 50)), 100) # Cap at 100 limit = min(int(request.query_params.get('limit', 50)), 100) # Cap at 100
active_only = request.args.get('active_only', '').lower() == 'true' active_only = request.query_params.get('active_only', '').lower() == 'true'
tasks = get_all_tasks() tasks = get_all_tasks()
active_tasks = [] active_tasks = []
@@ -447,7 +349,7 @@ def list_tasks():
elif is_active_task: elif is_active_task:
task_counts["active"] += 1 task_counts["active"] += 1
task_response = _build_task_response(task_info, last_status, task_id, time.time()) task_response = _build_task_response(task_info, last_status, task_id, time.time(), request)
if is_active_task: if is_active_task:
active_tasks.append(task_response) active_tasks.append(task_response)
@@ -509,75 +411,14 @@ def list_tasks():
"timestamp": time.time() "timestamp": time.time()
} }
return jsonify(response) return response
except Exception as e: except Exception as e:
logger.error(f"Error in /api/prgs/list: {e}", exc_info=True) logger.error(f"Error in /api/prgs/list: {e}", exc_info=True)
return jsonify({"error": "Failed to retrieve task list"}), 500 raise HTTPException(status_code=500, detail={"error": "Failed to retrieve task list"})
@prgs_bp.route("/cancel/<task_id>", methods=["POST"])
def cancel_task_endpoint(task_id):
"""
Cancel a running or queued task.
Args:
task_id: The ID of the task to cancel
"""
try:
# First check if this is a task ID in the new system
task_info = get_task_info(task_id)
if task_info:
# This is a task ID in the new system
result = cancel_task(task_id)
return jsonify(result)
# If not found in new system, we need to handle the old system cancellation
# For now, return an error as we're transitioning to the new system
return jsonify(
{
"status": "error",
"message": "Cancellation for old system is not supported in the new API. Please use the new task ID format.",
}
), 400
except Exception as e:
abort(500, f"An error occurred: {e}")
@prgs_bp.route("/cancel/all", methods=["POST"]) @router.get("/updates")
def cancel_all_tasks(): async def get_task_updates(request: Request):
"""
Cancel all active (running or queued) tasks.
"""
try:
tasks_to_cancel = get_all_tasks()
cancelled_count = 0
errors = []
for task_summary in tasks_to_cancel:
task_id = task_summary.get("task_id")
if not task_id:
continue
try:
cancel_task(task_id)
cancelled_count += 1
except Exception as e:
error_message = f"Failed to cancel task {task_id}: {e}"
logger.error(error_message)
errors.append(error_message)
response = {
"message": f"Attempted to cancel all active tasks. {cancelled_count} tasks cancelled.",
"cancelled_count": cancelled_count,
"errors": errors,
}
return jsonify(response), 200
except Exception as e:
logger.error(f"Error in /api/prgs/cancel/all: {e}", exc_info=True)
return jsonify({"error": "Failed to cancel all tasks"}), 500
@prgs_bp.route("/updates", methods=["GET"])
def get_task_updates():
""" """
Retrieve only tasks that have been updated since the specified timestamp. Retrieve only tasks that have been updated since the specified timestamp.
This endpoint is optimized for polling to reduce unnecessary data transfer. This endpoint is optimized for polling to reduce unnecessary data transfer.
@@ -598,19 +439,20 @@ def get_task_updates():
""" """
try: try:
# Get query parameters # Get query parameters
since_param = request.args.get('since') since_param = request.query_params.get('since')
page = int(request.args.get('page', 1)) page = int(request.query_params.get('page', 1))
limit = min(int(request.args.get('limit', 20)), 100) # Cap at 100 limit = min(int(request.query_params.get('limit', 20)), 100) # Cap at 100
active_only = request.args.get('active_only', '').lower() == 'true' active_only = request.query_params.get('active_only', '').lower() == 'true'
if not since_param: if not since_param:
# If no 'since' parameter, return paginated tasks (fallback behavior) # If no 'since' parameter, return paginated tasks (fallback behavior)
return get_paginated_tasks(page, limit, active_only) response = await get_paginated_tasks(page, limit, active_only, request)
return response
try: try:
since_timestamp = float(since_param) since_timestamp = float(since_param)
except (ValueError, TypeError): except (ValueError, TypeError):
return jsonify({"error": "Invalid 'since' timestamp format"}), 400 raise HTTPException(status_code=400, detail={"error": "Invalid 'since' timestamp format"})
# Get all tasks # Get all tasks
all_tasks = get_all_tasks() all_tasks = get_all_tasks()
@@ -668,7 +510,7 @@ def get_task_updates():
if should_include: if should_include:
# Construct the same detailed task object as in list_tasks() # Construct the same detailed task object as in list_tasks()
task_response = _build_task_response(task_info, last_status, task_id, current_time) task_response = _build_task_response(task_info, last_status, task_id, current_time, request)
if is_active_task: if is_active_task:
active_tasks.append(task_response) active_tasks.append(task_response)
@@ -707,8 +549,437 @@ def get_task_updates():
} }
logger.debug(f"Returning {len(active_tasks)} active + {len(paginated_updated_tasks)} paginated tasks out of {len(all_tasks)} total") logger.debug(f"Returning {len(active_tasks)} active + {len(paginated_updated_tasks)} paginated tasks out of {len(all_tasks)} total")
return jsonify(response) return response
except HTTPException:
raise
except Exception as e: except Exception as e:
logger.error(f"Error in /api/prgs/updates: {e}", exc_info=True) logger.error(f"Error in /api/prgs/updates: {e}", exc_info=True)
return jsonify({"error": "Failed to retrieve task updates"}), 500 raise HTTPException(status_code=500, detail={"error": "Failed to retrieve task updates"})
@router.post("/cancel/all")
async def cancel_all_tasks():
"""
Cancel all active (running or queued) tasks.
"""
try:
tasks_to_cancel = get_all_tasks()
cancelled_count = 0
errors = []
for task_summary in tasks_to_cancel:
task_id = task_summary.get("task_id")
if not task_id:
continue
try:
cancel_task(task_id)
cancelled_count += 1
except Exception as e:
error_message = f"Failed to cancel task {task_id}: {e}"
logger.error(error_message)
errors.append(error_message)
response = {
"message": f"Attempted to cancel all active tasks. {cancelled_count} tasks cancelled.",
"cancelled_count": cancelled_count,
"errors": errors,
}
return response
except Exception as e:
logger.error(f"Error in /api/prgs/cancel/all: {e}", exc_info=True)
raise HTTPException(status_code=500, detail={"error": "Failed to cancel all tasks"})
@router.post("/cancel/{task_id}")
async def cancel_task_endpoint(task_id: str):
"""
Cancel a running or queued task.
Args:
task_id: The ID of the task to cancel
"""
try:
# First check if this is a task ID in the new system
task_info = get_task_info(task_id)
if task_info:
# This is a task ID in the new system
result = cancel_task(task_id)
return result
# If not found in new system, we need to handle the old system cancellation
# For now, return an error as we're transitioning to the new system
raise HTTPException(
status_code=400,
detail={
"status": "error",
"message": "Cancellation for old system is not supported in the new API. Please use the new task ID format.",
}
)
except HTTPException:
raise
except Exception as e:
raise HTTPException(status_code=500, detail=f"An error occurred: {e}")
@router.delete("/delete/{task_id}")
async def delete_task(task_id: str):
"""
Delete a task's information and history.
Args:
task_id: A task UUID from Celery
"""
# Only support new task IDs
task_info = get_task_info(task_id)
if not task_info:
raise HTTPException(status_code=404, detail="Task not found")
# First, cancel the task if it's running
cancel_task(task_id)
return {"message": f"Task {task_id} deleted successfully"}
@router.get("/stream")
async def stream_task_updates(request: Request):
"""
Stream real-time task updates via Server-Sent Events (SSE).
This endpoint provides continuous updates for task status changes without polling.
Clients can connect and receive instant notifications when tasks update.
Query parameters:
active_only (bool): If true, only stream active tasks (downloading, processing, etc.)
Returns:
Server-Sent Events stream with task update data in JSON format
"""
# Get query parameters
active_only = request.query_params.get('active_only', '').lower() == 'true'
async def event_generator():
# Track last update timestamp for this client connection
last_update_timestamp = time.time()
try:
# Send initial data immediately upon connection
yield await generate_task_update_event(last_update_timestamp, active_only, request)
last_update_timestamp = time.time()
# Continuous monitoring loop
while True:
try:
# Check for updates since last timestamp
current_time = time.time()
# Get all tasks and check for updates
all_tasks = get_all_tasks()
updated_tasks = []
active_tasks = []
# Task categorization counters
task_counts = {
"active": 0,
"queued": 0,
"completed": 0,
"error": 0,
"cancelled": 0,
"retrying": 0,
"skipped": 0
}
has_updates = False
for task_summary in all_tasks:
task_id = task_summary.get("task_id")
if not task_id:
continue
task_info = get_task_info(task_id)
if not task_info:
continue
last_status = get_last_task_status(task_id)
# Check if task has been updated since the given timestamp
task_timestamp = last_status.get("timestamp") if last_status else task_info.get("created_at", 0)
# Determine task status and categorize
task_status = get_task_status_from_last_status(last_status)
is_active_task = is_task_active(task_status)
# Categorize tasks by status using ProgressState constants
if task_status == ProgressState.RETRYING:
task_counts["retrying"] += 1
elif task_status in {ProgressState.QUEUED, "pending"}:
task_counts["queued"] += 1
elif task_status in {ProgressState.COMPLETE, ProgressState.DONE}:
task_counts["completed"] += 1
elif task_status == ProgressState.ERROR:
task_counts["error"] += 1
elif task_status == ProgressState.CANCELLED:
task_counts["cancelled"] += 1
elif task_status == ProgressState.SKIPPED:
task_counts["skipped"] += 1
elif is_active_task:
task_counts["active"] += 1
# Always include active tasks in updates, apply filtering to others
should_include = is_active_task or (task_timestamp > last_update_timestamp and not active_only)
if should_include:
has_updates = True
# Construct the same detailed task object as in updates endpoint
task_response = _build_task_response(task_info, last_status, task_id, current_time, request)
if is_active_task:
active_tasks.append(task_response)
else:
updated_tasks.append(task_response)
# Only send update if there are changes
if has_updates:
# Combine active tasks (always shown) with updated tasks
all_returned_tasks = active_tasks + updated_tasks
# Sort by priority (active first, then by creation time)
all_returned_tasks.sort(key=lambda x: (
0 if x.get("task_id") in [t["task_id"] for t in active_tasks] else 1,
-x.get("created_at", 0)
))
update_data = {
"tasks": all_returned_tasks,
"current_timestamp": current_time,
"total_tasks": task_counts["active"] + task_counts["retrying"],
"all_tasks_count": len(all_tasks),
"task_counts": task_counts,
"active_tasks": len(active_tasks),
"updated_count": len(updated_tasks),
"since_timestamp": last_update_timestamp,
}
# Send SSE event with update data
event_data = json.dumps(update_data)
yield f"data: {event_data}\n\n"
logger.debug(f"SSE: Sent {len(active_tasks)} active + {len(updated_tasks)} updated tasks")
# Update last timestamp
last_update_timestamp = current_time
# Wait before next check (much shorter than polling interval)
await asyncio.sleep(0.5) # Check every 500ms for real-time feel
except Exception as e:
logger.error(f"Error in SSE event generation: {e}", exc_info=True)
# Send error event and continue
error_data = json.dumps({"error": "Internal server error", "timestamp": time.time()})
yield f"data: {error_data}\n\n"
await asyncio.sleep(1) # Wait longer on error
except asyncio.CancelledError:
logger.info("SSE client disconnected")
return
except Exception as e:
logger.error(f"SSE connection error: {e}", exc_info=True)
return
return StreamingResponse(
event_generator(),
media_type="text/plain",
headers={
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"Content-Type": "text/event-stream",
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Headers": "Cache-Control"
}
)
async def generate_task_update_event(since_timestamp: float, active_only: bool, request: Request) -> str:
"""
Generate initial task update event for SSE connection.
This replicates the logic from get_task_updates but for SSE format.
"""
try:
# Get all tasks
all_tasks = get_all_tasks()
updated_tasks = []
active_tasks = []
current_time = time.time()
# Task categorization counters
task_counts = {
"active": 0,
"queued": 0,
"completed": 0,
"error": 0,
"cancelled": 0,
"retrying": 0,
"skipped": 0
}
for task_summary in all_tasks:
task_id = task_summary.get("task_id")
if not task_id:
continue
task_info = get_task_info(task_id)
if not task_info:
continue
last_status = get_last_task_status(task_id)
# Check if task has been updated since the given timestamp
task_timestamp = last_status.get("timestamp") if last_status else task_info.get("created_at", 0)
# Determine task status and categorize
task_status = get_task_status_from_last_status(last_status)
is_active_task = is_task_active(task_status)
# Categorize tasks by status using ProgressState constants
if task_status == ProgressState.RETRYING:
task_counts["retrying"] += 1
elif task_status in {ProgressState.QUEUED, "pending"}:
task_counts["queued"] += 1
elif task_status in {ProgressState.COMPLETE, ProgressState.DONE}:
task_counts["completed"] += 1
elif task_status == ProgressState.ERROR:
task_counts["error"] += 1
elif task_status == ProgressState.CANCELLED:
task_counts["cancelled"] += 1
elif task_status == ProgressState.SKIPPED:
task_counts["skipped"] += 1
elif is_active_task:
task_counts["active"] += 1
# Always include active tasks in updates, apply filtering to others
should_include = is_active_task or (task_timestamp > since_timestamp and not active_only)
if should_include:
# Construct the same detailed task object as in updates endpoint
task_response = _build_task_response(task_info, last_status, task_id, current_time, request)
if is_active_task:
active_tasks.append(task_response)
else:
updated_tasks.append(task_response)
# Combine active tasks (always shown) with updated tasks
all_returned_tasks = active_tasks + updated_tasks
# Sort by priority (active first, then by creation time)
all_returned_tasks.sort(key=lambda x: (
0 if x.get("task_id") in [t["task_id"] for t in active_tasks] else 1,
-x.get("created_at", 0)
))
initial_data = {
"tasks": all_returned_tasks,
"current_timestamp": current_time,
"total_tasks": task_counts["active"] + task_counts["retrying"],
"all_tasks_count": len(all_tasks),
"task_counts": task_counts,
"active_tasks": len(active_tasks),
"updated_count": len(updated_tasks),
"since_timestamp": since_timestamp,
"initial": True # Mark as initial load
}
event_data = json.dumps(initial_data)
return f"data: {event_data}\n\n"
except Exception as e:
logger.error(f"Error generating initial SSE event: {e}", exc_info=True)
error_data = json.dumps({"error": "Failed to load initial data", "timestamp": time.time()})
return f"data: {error_data}\n\n"
# IMPORTANT: This parameterized route MUST come AFTER all specific routes
# Otherwise FastAPI will match specific routes like "/updates" as task_id parameters
@router.get("/{task_id}")
async def get_task_details(task_id: str, request: Request):
"""
Return a JSON object with the resource type, its name (title),
the last progress update, and, if available, the original request parameters.
This function works with the new task ID based system.
Args:
task_id: A task UUID from Celery
"""
# Only support new task IDs
task_info = get_task_info(task_id)
if not task_info:
raise HTTPException(status_code=404, detail="Task not found")
# Dynamically construct original_url
dynamic_original_url = ""
download_type = task_info.get("download_type")
# The 'url' field in task_info stores the Spotify/Deezer URL of the item
# e.g., https://open.spotify.com/album/albumId or https://www.deezer.com/track/trackId
item_url = task_info.get("url")
if download_type and item_url:
try:
# Extract the ID from the item_url (last part of the path)
item_id = item_url.split("/")[-1]
if item_id: # Ensure item_id is not empty
base_url = str(request.base_url).rstrip("/")
dynamic_original_url = (
f"{base_url}/api/{download_type}/download/{item_id}"
)
else:
logger.warning(
f"Could not extract item ID from URL: {item_url} for task {task_id}. Falling back for original_url."
)
original_request_obj = task_info.get("original_request", {})
dynamic_original_url = original_request_obj.get("original_url", "")
except Exception as e:
logger.error(
f"Error constructing dynamic original_url for task {task_id}: {e}",
exc_info=True,
)
original_request_obj = task_info.get("original_request", {})
dynamic_original_url = original_request_obj.get(
"original_url", ""
) # Fallback on any error
else:
logger.warning(
f"Missing download_type ('{download_type}') or item_url ('{item_url}') in task_info for task {task_id}. Falling back for original_url."
)
original_request_obj = task_info.get("original_request", {})
dynamic_original_url = original_request_obj.get("original_url", "")
last_status = get_last_task_status(task_id)
status_count = len(get_task_status(task_id))
# Determine last_line content
if last_status and "raw_callback" in last_status:
last_line_content = last_status["raw_callback"]
elif last_status and get_task_status_from_last_status(last_status) == "error":
last_line_content = _build_error_callback_object(last_status)
else:
# Fallback for non-error, no raw_callback, or if last_status is None
last_line_content = last_status
response = {
"original_url": dynamic_original_url,
"last_line": last_line_content,
"timestamp": last_status.get("timestamp") if last_status else time.time(),
"task_id": task_id,
"status_count": status_count,
"created_at": task_info.get("created_at"),
"name": task_info.get("name"),
"artist": task_info.get("artist"),
"type": task_info.get("type"),
"download_type": task_info.get("download_type"),
}
if last_status and last_status.get("summary"):
response["summary"] = last_status["summary"]
return response

View File

@@ -1,71 +1,65 @@
from flask import Blueprint, jsonify, request from fastapi import APIRouter, HTTPException, Request
from routes.utils.search import search # Corrected import import json
from routes.config import get_config # Import get_config function
search_bp = Blueprint("search", __name__)
@search_bp.route("/search", methods=["GET"])
def handle_search():
try:
# Get query parameters
query = request.args.get("q", "")
search_type = request.args.get("search_type", "")
limit = int(request.args.get("limit", 10))
main = request.args.get(
"main", ""
) # Get the main parameter for account selection
# If main parameter is not provided in the request, get it from config
if not main:
config = get_config()
if config and "spotify" in config:
main = config["spotify"]
print(f"Using main from config: {main}")
# Validate parameters
if not query:
return jsonify({"error": "Missing search query"}), 400
valid_types = ["track", "album", "artist", "playlist", "episode"]
if search_type not in valid_types:
return jsonify({"error": "Invalid search type"}), 400
# Perform the search with corrected parameter name
raw_results = search(
query=query,
search_type=search_type, # Fixed parameter name
limit=limit,
main=main, # Pass the main parameter
)
# Extract items from the appropriate section of the response based on search_type
items = []
if raw_results and search_type + "s" in raw_results:
type_key = search_type + "s"
items = raw_results[type_key].get("items", [])
elif raw_results and search_type in raw_results:
items = raw_results[search_type].get("items", [])
# Filter out any null items from the results
if items:
items = [item for item in items if item is not None]
# Return both the items array and the full data for debugging
return jsonify(
{
"items": items,
"data": raw_results, # Include full data for debugging
"error": None,
}
)
except ValueError as e:
print(f"ValueError in search: {str(e)}")
return jsonify({"error": str(e)}), 400
except Exception as e:
import traceback import traceback
import logging
from routes.utils.search import search
print(f"Exception in search: {str(e)}") logger = logging.getLogger(__name__)
print(traceback.format_exc()) router = APIRouter()
return jsonify({"error": f"Internal server error: {str(e)}"}), 500
@router.get("/search")
async def handle_search(request: Request):
"""
Handle search requests for tracks, albums, playlists, or artists.
Frontend compatible endpoint that returns results in { items: [] } format.
"""
query = request.query_params.get("q")
# Frontend sends 'search_type', so check both 'search_type' and 'type'
search_type = request.query_params.get("search_type") or request.query_params.get("type", "track")
limit = request.query_params.get("limit", "20")
main = request.query_params.get("main") # Account context
if not query:
raise HTTPException(status_code=400, detail={"error": "Missing parameter: q"})
try:
limit = int(limit)
except ValueError:
raise HTTPException(status_code=400, detail={"error": "limit must be an integer"})
try:
# Use the single search_type (not multiple types like before)
result = search(
query=query,
search_type=search_type,
limit=limit,
main=main
)
# Extract items from the Spotify API response based on search type
# Spotify API returns results in format like { "tracks": { "items": [...] } }
items = []
# Map search types to their plural forms in Spotify response
type_mapping = {
"track": "tracks",
"album": "albums",
"artist": "artists",
"playlist": "playlists",
"episode": "episodes",
"show": "shows"
}
response_key = type_mapping.get(search_type.lower(), "tracks")
if result and response_key in result:
items = result[response_key].get("items", [])
# Return in the format expected by frontend: { items: [] }
return {"items": items}
except Exception as e:
error_data = {"error": str(e), "traceback": traceback.format_exc()}
logger.error(f"Error in search: {error_data}")
raise HTTPException(status_code=500, detail=error_data)

View File

@@ -1,21 +1,15 @@
from flask import Blueprint, Response, request from fastapi import APIRouter, HTTPException, Request
from fastapi.responses import JSONResponse
import json import json
import traceback import traceback
import uuid # For generating error task IDs import uuid
import time # For timestamps import time
from routes.utils.celery_queue_manager import ( from routes.utils.celery_queue_manager import download_queue_manager
download_queue_manager, from routes.utils.celery_tasks import store_task_info, store_task_status, ProgressState
get_existing_task_id, from routes.utils.get_info import get_spotify_info
) from routes.utils.errors import DuplicateDownloadError
from routes.utils.celery_tasks import (
store_task_info,
store_task_status,
ProgressState,
) # For error task creation
from urllib.parse import urlparse # for URL validation
from routes.utils.get_info import get_spotify_info # Added import
track_bp = Blueprint("track", __name__) router = APIRouter()
def construct_spotify_url(item_id: str, item_type: str = "track") -> str: def construct_spotify_url(item_id: str, item_type: str = "track") -> str:
@@ -23,16 +17,14 @@ def construct_spotify_url(item_id: str, item_type: str = "track") -> str:
return f"https://open.spotify.com/{item_type}/{item_id}" return f"https://open.spotify.com/{item_type}/{item_id}"
@track_bp.route("/download/<track_id>", methods=["GET"]) @router.get("/download/{track_id}")
def handle_download(track_id): async def handle_download(track_id: str, request: Request):
# Retrieve essential parameters from the request. # Retrieve essential parameters from the request.
# name = request.args.get('name') # Removed # name = request.args.get('name') # Removed
# artist = request.args.get('artist') # Removed # artist = request.args.get('artist') # Removed
orig_params = request.args.to_dict()
# Construct the URL from track_id # Construct the URL from track_id
url = construct_spotify_url(track_id, "track") url = construct_spotify_url(track_id, "track")
orig_params["original_url"] = url # Update original_url to the constructed one
# Fetch metadata from Spotify # Fetch metadata from Spotify
try: try:
@@ -42,12 +34,9 @@ def handle_download(track_id):
or not track_info.get("name") or not track_info.get("name")
or not track_info.get("artists") or not track_info.get("artists")
): ):
return Response( return JSONResponse(
json.dumps( content={"error": f"Could not retrieve metadata for track ID: {track_id}"},
{"error": f"Could not retrieve metadata for track ID: {track_id}"} status_code=404
),
status=404,
mimetype="application/json",
) )
name_from_spotify = track_info.get("name") name_from_spotify = track_info.get("name")
@@ -58,72 +47,53 @@ def handle_download(track_id):
) )
except Exception as e: except Exception as e:
return Response( return JSONResponse(
json.dumps( content={"error": f"Failed to fetch metadata for track {track_id}: {str(e)}"},
{"error": f"Failed to fetch metadata for track {track_id}: {str(e)}"} status_code=500
),
status=500,
mimetype="application/json",
) )
# Validate required parameters # Validate required parameters
if not url: if not url:
return Response( return JSONResponse(
json.dumps( content={"error": "Missing required parameter: url"},
{"error": "Missing required parameter: url", "original_url": url} status_code=400
),
status=400,
mimetype="application/json",
)
# Validate URL domain
parsed = urlparse(url)
host = parsed.netloc.lower()
if not (
host.endswith("deezer.com")
or host.endswith("open.spotify.com")
or host.endswith("spotify.com")
):
return Response(
json.dumps({"error": f"Invalid Link {url} :(", "original_url": url}),
status=400,
mimetype="application/json",
)
# Check for existing task before adding to the queue
existing_task = get_existing_task_id(url)
if existing_task:
return Response(
json.dumps(
{
"error": "Duplicate download detected.",
"existing_task": existing_task,
}
),
status=409,
mimetype="application/json",
) )
# Add the task to the queue with only essential parameters
# The queue manager will now handle all config parameters
# Include full original request URL in metadata
orig_params = dict(request.query_params)
orig_params["original_url"] = str(request.url)
try: try:
task_id = download_queue_manager.add_task( task_id = download_queue_manager.add_task(
{ {
"download_type": "track", "download_type": "track",
"url": url, "url": url,
"name": name_from_spotify, # Use fetched name "name": name_from_spotify,
"artist": artist_from_spotify, # Use fetched artist "artist": artist_from_spotify,
"orig_request": orig_params, "orig_request": orig_params,
} }
) )
# Removed DuplicateDownloadError handling, add_task now manages this by creating an error task. except DuplicateDownloadError as e:
return JSONResponse(
content={
"error": "Duplicate download detected.",
"existing_task": e.existing_task,
},
status_code=409
)
except Exception as e: except Exception as e:
# Generic error handling for other issues during task submission # Generic error handling for other issues during task submission
# Create an error task ID if add_task itself fails before returning an ID
error_task_id = str(uuid.uuid4()) error_task_id = str(uuid.uuid4())
store_task_info( store_task_info(
error_task_id, error_task_id,
{ {
"download_type": "track", "download_type": "track",
"url": url, "url": url,
"name": name_from_spotify, # Use fetched name "name": name_from_spotify,
"artist": artist_from_spotify, # Use fetched artist "artist": artist_from_spotify,
"original_request": orig_params, "original_request": orig_params,
"created_at": time.time(), "created_at": time.time(),
"is_submission_error_task": True, "is_submission_error_task": True,
@@ -137,65 +107,57 @@ def handle_download(track_id):
"timestamp": time.time(), "timestamp": time.time(),
}, },
) )
return Response( return JSONResponse(
json.dumps( content={
{
"error": f"Failed to queue track download: {str(e)}", "error": f"Failed to queue track download: {str(e)}",
"task_id": error_task_id, "task_id": error_task_id,
} },
), status_code=500
status=500,
mimetype="application/json",
) )
return Response( return JSONResponse(
json.dumps({"task_id": task_id}), content={"task_id": task_id},
status=202, status_code=202
mimetype="application/json",
) )
@track_bp.route("/download/cancel", methods=["GET"]) @router.get("/download/cancel")
def cancel_download(): async def cancel_download(request: Request):
""" """
Cancel a running track download process by its task id. Cancel a running download process by its task id.
""" """
task_id = request.args.get("task_id") task_id = request.query_params.get("task_id")
if not task_id: if not task_id:
return Response( return JSONResponse(
json.dumps({"error": "Missing task id (task_id) parameter"}), content={"error": "Missing process id (task_id) parameter"},
status=400, status_code=400
mimetype="application/json",
) )
# Use the queue manager's cancellation method. # Use the queue manager's cancellation method.
result = download_queue_manager.cancel_task(task_id) result = download_queue_manager.cancel_task(task_id)
status_code = 200 if result.get("status") == "cancelled" else 404 status_code = 200 if result.get("status") == "cancelled" else 404
return Response(json.dumps(result), status=status_code, mimetype="application/json") return JSONResponse(content=result, status_code=status_code)
@track_bp.route("/info", methods=["GET"]) @router.get("/info")
def get_track_info(): async def get_track_info(request: Request):
""" """
Retrieve Spotify track metadata given a Spotify track ID. Retrieve Spotify track metadata given a Spotify track ID.
Expects a query parameter 'id' that contains the Spotify track ID. Expects a query parameter 'id' that contains the Spotify track ID.
""" """
spotify_id = request.args.get("id") spotify_id = request.query_params.get("id")
if not spotify_id: if not spotify_id:
return Response( return JSONResponse(
json.dumps({"error": "Missing parameter: id"}), content={"error": "Missing parameter: id"},
status=400, status_code=400
mimetype="application/json",
) )
try: try:
# Import and use the get_spotify_info function from the utility module. # Use the get_spotify_info function (already imported at top)
from routes.utils.get_info import get_spotify_info
track_info = get_spotify_info(spotify_id, "track") track_info = get_spotify_info(spotify_id, "track")
return Response(json.dumps(track_info), status=200, mimetype="application/json") return JSONResponse(content=track_info, status_code=200)
except Exception as e: except Exception as e:
error_data = {"error": str(e), "traceback": traceback.format_exc()} error_data = {"error": str(e), "traceback": traceback.format_exc()}
return Response(json.dumps(error_data), status=500, mimetype="application/json") return JSONResponse(content=error_data, status_code=500)

View File

@@ -43,10 +43,12 @@ export function QueueProvider({ children }: { children: ReactNode }) {
const pollingIntervals = useRef<Record<string, number>>({}); const pollingIntervals = useRef<Record<string, number>>({});
const cancelledRemovalTimers = useRef<Record<string, number>>({}); const cancelledRemovalTimers = useRef<Record<string, number>>({});
// Smart polling state // SSE connection state
const smartPollingInterval = useRef<number | null>(null); const sseConnection = useRef<EventSource | null>(null);
const lastUpdateTimestamp = useRef<number>(0);
const isInitialized = useRef<boolean>(false); const isInitialized = useRef<boolean>(false);
const reconnectTimeoutRef = useRef<number | null>(null);
const maxReconnectAttempts = 5;
const reconnectAttempts = useRef<number>(0);
// Pagination state // Pagination state
const [currentPage, setCurrentPage] = useState(1); const [currentPage, setCurrentPage] = useState(1);
@@ -150,33 +152,33 @@ export function QueueProvider({ children }: { children: ReactNode }) {
}, [scheduleCancelledTaskRemoval]); }, [scheduleCancelledTaskRemoval]);
const startSmartPolling = useCallback(() => { const startSmartPolling = useCallback(() => {
if (smartPollingInterval.current) return; // Already polling if (sseConnection.current) return; // Already connected
console.log("Starting smart polling"); console.log("Starting SSE connection");
const intervalId = window.setInterval(async () => { const connectSSE = () => {
try { try {
const response = await apiClient.get<{ // Create SSE connection
tasks: any[]; const eventSource = new EventSource(`/api/prgs/stream?active_only=true`);
current_timestamp: number; sseConnection.current = eventSource;
total_tasks: number;
active_tasks: number; eventSource.onopen = () => {
updated_count: number; console.log("SSE connection established");
task_counts?: { reconnectAttempts.current = 0; // Reset reconnect attempts on successful connection
active: number;
queued: number;
retrying: number;
completed: number;
error: number;
cancelled: number;
skipped: number;
}; };
}>(`/prgs/updates?since=${lastUpdateTimestamp.current}&active_only=true`);
const { tasks: updatedTasks, current_timestamp, total_tasks, task_counts } = response.data; eventSource.onmessage = (event) => {
try {
const data = JSON.parse(event.data);
// Update the last timestamp for next poll // Handle error events
lastUpdateTimestamp.current = current_timestamp; if (data.error) {
console.error("SSE error event:", data.error);
toast.error("Connection error: " + data.error);
return;
}
const { tasks: updatedTasks, current_timestamp, total_tasks, task_counts } = data;
// Update total tasks count - use active + queued if task_counts available // Update total tasks count - use active + queued if task_counts available
const calculatedTotal = task_counts ? const calculatedTotal = task_counts ?
@@ -184,11 +186,11 @@ export function QueueProvider({ children }: { children: ReactNode }) {
(total_tasks || 0); (total_tasks || 0);
setTotalTasks(calculatedTotal); setTotalTasks(calculatedTotal);
if (updatedTasks.length > 0) { if (updatedTasks && updatedTasks.length > 0) {
console.log(`Smart polling: ${updatedTasks.length} tasks updated (${response.data.active_tasks} active) out of ${response.data.total_tasks} total`); console.log(`SSE: ${updatedTasks.length} tasks updated (${data.active_tasks} active) out of ${data.total_tasks} total`);
// Create a map of updated tasks by task_id for efficient lookup // Create a map of updated tasks by task_id for efficient lookup
const updatedTasksMap = new Map(updatedTasks.map(task => [task.task_id, task])); const updatedTasksMap = new Map(updatedTasks.map((task: any) => [task.task_id, task]));
setItems(prev => { setItems(prev => {
// Update existing items with new data, and add any new active tasks // Update existing items with new data, and add any new active tasks
@@ -203,14 +205,14 @@ export function QueueProvider({ children }: { children: ReactNode }) {
// Only add new active tasks that aren't in our current items and aren't in terminal state // Only add new active tasks that aren't in our current items and aren't in terminal state
const currentTaskIds = new Set(prev.map(item => item.taskId || item.id)); const currentTaskIds = new Set(prev.map(item => item.taskId || item.id));
const newActiveTasks = updatedTasks const newActiveTasks = updatedTasks
.filter(task => { .filter((task: any) => {
const isNew = !currentTaskIds.has(task.task_id); const isNew = !currentTaskIds.has(task.task_id);
const status = task.last_line?.status_info?.status || task.last_line?.status || "unknown"; const status = task.last_line?.status_info?.status || task.last_line?.status || "unknown";
const isActive = isActiveTaskStatus(status); const isActive = isActiveTaskStatus(status);
const isTerminal = ["completed", "error", "cancelled", "skipped", "done"].includes(status); const isTerminal = ["completed", "error", "cancelled", "skipped", "done"].includes(status);
return isNew && isActive && !isTerminal; return isNew && isActive && !isTerminal;
}) })
.map(task => { .map((task: any) => {
const spotifyId = task.original_url?.split("/").pop() || ""; const spotifyId = task.original_url?.split("/").pop() || "";
const baseItem: QueueItem = { const baseItem: QueueItem = {
id: task.task_id, id: task.task_id,
@@ -228,19 +230,57 @@ export function QueueProvider({ children }: { children: ReactNode }) {
}); });
} }
} catch (error) { } catch (error) {
console.error("Smart polling failed:", error); console.error("Failed to parse SSE message:", error);
} }
}, 2000); // Poll every 2 seconds };
smartPollingInterval.current = intervalId; eventSource.onerror = (error) => {
console.error("SSE connection error:", error);
// Close the connection
eventSource.close();
sseConnection.current = null;
// Attempt to reconnect with exponential backoff
if (reconnectAttempts.current < maxReconnectAttempts) {
reconnectAttempts.current++;
const delay = Math.min(1000 * Math.pow(2, reconnectAttempts.current - 1), 30000); // Max 30 seconds
console.log(`SSE reconnecting in ${delay}ms (attempt ${reconnectAttempts.current}/${maxReconnectAttempts})`);
reconnectTimeoutRef.current = window.setTimeout(() => {
connectSSE();
}, delay);
} else {
console.error("SSE max reconnection attempts reached");
toast.error("Connection lost. Please refresh the page.");
}
};
} catch (error) {
console.error("Failed to create SSE connection:", error);
toast.error("Failed to establish real-time connection");
}
};
connectSSE();
}, [updateItemFromPrgs]); }, [updateItemFromPrgs]);
const stopSmartPolling = useCallback(() => { const stopSmartPolling = useCallback(() => {
if (smartPollingInterval.current) { if (sseConnection.current) {
console.log("Stopping smart polling"); console.log("Closing SSE connection");
clearInterval(smartPollingInterval.current); sseConnection.current.close();
smartPollingInterval.current = null; sseConnection.current = null;
} }
// Clear any pending reconnection timeout
if (reconnectTimeoutRef.current) {
clearTimeout(reconnectTimeoutRef.current);
reconnectTimeoutRef.current = null;
}
// Reset reconnection attempts
reconnectAttempts.current = 0;
}, []); }, []);
const loadMoreTasks = useCallback(async () => { const loadMoreTasks = useCallback(async () => {
@@ -312,7 +352,7 @@ export function QueueProvider({ children }: { children: ReactNode }) {
const startPolling = useCallback( const startPolling = useCallback(
(taskId: string) => { (taskId: string) => {
// Legacy function - now just ensures smart polling is active // Legacy function - now just ensures SSE connection is active
startSmartPolling(); startSmartPolling();
}, },
[startSmartPolling], [startSmartPolling],
@@ -373,10 +413,9 @@ export function QueueProvider({ children }: { children: ReactNode }) {
setTotalTasks(calculatedTotal); setTotalTasks(calculatedTotal);
// Set initial timestamp to current time // Set initial timestamp to current time
lastUpdateTimestamp.current = timestamp;
isInitialized.current = true; isInitialized.current = true;
// Start smart polling for real-time updates // Start SSE connection for real-time updates
startSmartPolling(); startSmartPolling();
} catch (error) { } catch (error) {
console.error("Failed to fetch queue from backend:", error); console.error("Failed to fetch queue from backend:", error);
@@ -386,7 +425,7 @@ export function QueueProvider({ children }: { children: ReactNode }) {
fetchQueue(); fetchQueue();
// Cleanup function to stop polling when component unmounts // Cleanup function to stop SSE connection when component unmounts
return () => { return () => {
stopSmartPolling(); stopSmartPolling();
// Clean up any remaining individual polling intervals (legacy cleanup) // Clean up any remaining individual polling intervals (legacy cleanup)