This commit is contained in:
Xoconoch
2025-08-19 09:36:22 -06:00
parent 015ae024a6
commit 6538cde022
2 changed files with 393 additions and 350 deletions

468
app.py
View File

@@ -8,7 +8,6 @@ import logging.handlers
import time import time
from pathlib import Path from pathlib import Path
import os import os
import atexit
import sys import sys
import redis import redis
import socket import socket
@@ -16,11 +15,16 @@ from urllib.parse import urlparse
# Run DB migrations as early as possible, before importing any routers that may touch DBs # Run DB migrations as early as possible, before importing any routers that may touch DBs
try: try:
from routes.migrations import run_migrations_if_needed from routes.migrations import run_migrations_if_needed
run_migrations_if_needed()
logging.getLogger(__name__).info("Database migrations executed (if needed) early in startup.") run_migrations_if_needed()
logging.getLogger(__name__).info(
"Database migrations executed (if needed) early in startup."
)
except Exception as e: except Exception as e:
logging.getLogger(__name__).error(f"Database migration step failed early in startup: {e}", exc_info=True) logging.getLogger(__name__).error(
f"Database migration step failed early in startup: {e}", exc_info=True
)
# Import route routers (to be created) # Import route routers (to be created)
from routes.auth.credentials import router as credentials_router from routes.auth.credentials import router as credentials_router
@@ -44,251 +48,299 @@ from routes.auth import AUTH_ENABLED
from routes.auth.middleware import AuthMiddleware from routes.auth.middleware import AuthMiddleware
# Import and initialize routes (this will start the watch manager) # Import and initialize routes (this will start the watch manager)
import routes
# Configure application-wide logging # Configure application-wide logging
def setup_logging(): def setup_logging():
"""Configure application-wide logging with rotation""" """Configure application-wide logging with rotation"""
# Create logs directory if it doesn't exist # Create logs directory if it doesn't exist
logs_dir = Path("logs") logs_dir = Path("logs")
logs_dir.mkdir(exist_ok=True) logs_dir.mkdir(exist_ok=True)
# Set up log file paths # Set up log file paths
main_log = logs_dir / "spotizerr.log" main_log = logs_dir / "spotizerr.log"
# Configure root logger # Configure root logger
root_logger = logging.getLogger() root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG) root_logger.setLevel(logging.DEBUG)
# Clear any existing handlers from the root logger # Clear any existing handlers from the root logger
if root_logger.hasHandlers(): if root_logger.hasHandlers():
root_logger.handlers.clear() root_logger.handlers.clear()
# Log formatting # Log formatting
log_format = logging.Formatter( log_format = logging.Formatter(
"%(asctime)s [%(levelname)s] %(message)s", "%(asctime)s [%(levelname)s] %(message)s",
datefmt="%Y-%m-%d %H:%M:%S", datefmt="%Y-%m-%d %H:%M:%S",
) )
# File handler with rotation (10 MB max, keep 5 backups) # File handler with rotation (10 MB max, keep 5 backups)
file_handler = logging.handlers.RotatingFileHandler( file_handler = logging.handlers.RotatingFileHandler(
main_log, maxBytes=10 * 1024 * 1024, backupCount=5, encoding="utf-8" main_log, maxBytes=10 * 1024 * 1024, backupCount=5, encoding="utf-8"
) )
file_handler.setFormatter(log_format) file_handler.setFormatter(log_format)
file_handler.setLevel(logging.INFO) file_handler.setLevel(logging.INFO)
# Console handler for stderr # Console handler for stderr
console_handler = logging.StreamHandler(sys.stderr) console_handler = logging.StreamHandler(sys.stderr)
console_handler.setFormatter(log_format) console_handler.setFormatter(log_format)
console_handler.setLevel(logging.INFO) console_handler.setLevel(logging.INFO)
# Add handlers to root logger # Add handlers to root logger
root_logger.addHandler(file_handler) root_logger.addHandler(file_handler)
root_logger.addHandler(console_handler) root_logger.addHandler(console_handler)
# Set up specific loggers # Set up specific loggers
for logger_name in [ for logger_name in [
"routes", "routes",
"routes.utils", "routes.utils",
"routes.utils.celery_manager", "routes.utils.celery_manager",
"routes.utils.celery_tasks", "routes.utils.celery_tasks",
"routes.utils.watch", "routes.utils.watch",
]: ]:
logger = logging.getLogger(logger_name) logger = logging.getLogger(logger_name)
logger.setLevel(logging.INFO) logger.setLevel(logging.INFO)
logger.propagate = True # Propagate to root logger logger.propagate = True # Propagate to root logger
logging.info("Logging system initialized") logging.info("Logging system initialized")
def check_redis_connection(): def check_redis_connection():
"""Check if Redis is available and accessible""" """Check if Redis is available and accessible"""
if not REDIS_URL: if not REDIS_URL:
logging.error("REDIS_URL is not configured. Please check your environment.") logging.error("REDIS_URL is not configured. Please check your environment.")
return False return False
try: try:
# Parse Redis URL # Parse Redis URL
parsed_url = urlparse(REDIS_URL) parsed_url = urlparse(REDIS_URL)
host = parsed_url.hostname or "localhost" host = parsed_url.hostname or "localhost"
port = parsed_url.port or 6379 port = parsed_url.port or 6379
logging.info(f"Testing Redis connection to {host}:{port}...") logging.info(f"Testing Redis connection to {host}:{port}...")
# Test socket connection first # Test socket connection first
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(5) sock.settimeout(5)
result = sock.connect_ex((host, port)) result = sock.connect_ex((host, port))
sock.close() sock.close()
if result != 0: if result != 0:
logging.error(f"Cannot connect to Redis at {host}:{port}") logging.error(f"Cannot connect to Redis at {host}:{port}")
return False return False
# Test Redis client connection # Test Redis client connection
r = redis.from_url(REDIS_URL, socket_connect_timeout=5, socket_timeout=5) r = redis.from_url(REDIS_URL, socket_connect_timeout=5, socket_timeout=5)
r.ping() r.ping()
logging.info("Redis connection successful") logging.info("Redis connection successful")
return True return True
except redis.ConnectionError as e: except redis.ConnectionError as e:
logging.error(f"Redis connection error: {e}") logging.error(f"Redis connection error: {e}")
return False return False
except redis.TimeoutError as e: except redis.TimeoutError as e:
logging.error(f"Redis timeout error: {e}") logging.error(f"Redis timeout error: {e}")
return False return False
except Exception as e: except Exception as e:
logging.error(f"Unexpected error checking Redis connection: {e}") logging.error(f"Unexpected error checking Redis connection: {e}")
return False return False
@asynccontextmanager @asynccontextmanager
async def lifespan(app: FastAPI): async def lifespan(app: FastAPI):
"""Handle application startup and shutdown""" """Handle application startup and shutdown"""
# Startup # Startup
setup_logging() setup_logging()
# Check Redis connection # Check Redis connection
if not check_redis_connection(): if not check_redis_connection():
logging.error("Failed to connect to Redis. Please ensure Redis is running and accessible.") logging.error(
# Don't exit, but warn - some functionality may not work "Failed to connect to Redis. Please ensure Redis is running and accessible."
)
# Start Celery workers # Don't exit, but warn - some functionality may not work
try:
celery_manager.start() # Start Celery workers
logging.info("Celery workers started successfully") try:
except Exception as e: celery_manager.start()
logging.error(f"Failed to start Celery workers: {e}") logging.info("Celery workers started successfully")
except Exception as e:
yield logging.error(f"Failed to start Celery workers: {e}")
# Shutdown yield
try:
celery_manager.stop() # Shutdown
logging.info("Celery workers stopped") try:
except Exception as e: celery_manager.stop()
logging.error(f"Error stopping Celery workers: {e}") logging.info("Celery workers stopped")
except Exception as e:
logging.error(f"Error stopping Celery workers: {e}")
def create_app(): def create_app():
app = FastAPI( app = FastAPI(
title="Spotizerr API", title="Spotizerr API",
description="Music download service API", description="Music download service API",
version="3.0.0", version="3.0.0",
lifespan=lifespan, lifespan=lifespan,
redirect_slashes=True # Enable automatic trailing slash redirects redirect_slashes=True, # Enable automatic trailing slash redirects
) )
# Set up CORS # Set up CORS
app.add_middleware( app.add_middleware(
CORSMiddleware, CORSMiddleware,
allow_origins=["*"], allow_origins=["*"],
allow_credentials=True, allow_credentials=True,
allow_methods=["*"], allow_methods=["*"],
allow_headers=["*"], allow_headers=["*"],
) )
# Add authentication middleware (only if auth is enabled) # Add authentication middleware (only if auth is enabled)
if AUTH_ENABLED: if AUTH_ENABLED:
app.add_middleware(AuthMiddleware) app.add_middleware(AuthMiddleware)
logging.info("Authentication system enabled") logging.info("Authentication system enabled")
else: else:
logging.info("Authentication system disabled") logging.info("Authentication system disabled")
# Register routers with URL prefixes # Register routers with URL prefixes
app.include_router(auth_router, prefix="/api/auth", tags=["auth"]) app.include_router(auth_router, prefix="/api/auth", tags=["auth"])
# Include SSO router if available
try:
from routes.auth.sso import router as sso_router
app.include_router(sso_router, prefix="/api/auth", tags=["sso"])
logging.info("SSO functionality enabled")
except ImportError as e:
logging.warning(f"SSO functionality not available: {e}")
app.include_router(config_router, prefix="/api/config", tags=["config"])
app.include_router(search_router, prefix="/api/search", tags=["search"])
app.include_router(credentials_router, prefix="/api/credentials", tags=["credentials"])
app.include_router(album_router, prefix="/api/album", tags=["album"])
app.include_router(track_router, prefix="/api/track", tags=["track"])
app.include_router(playlist_router, prefix="/api/playlist", tags=["playlist"])
app.include_router(artist_router, prefix="/api/artist", tags=["artist"])
app.include_router(prgs_router, prefix="/api/prgs", tags=["progress"])
app.include_router(history_router, prefix="/api/history", tags=["history"])
# Add request logging middleware # Include SSO router if available
@app.middleware("http") try:
async def log_requests(request: Request, call_next): from routes.auth.sso import router as sso_router
start_time = time.time()
# Log request
logger = logging.getLogger("uvicorn.access")
logger.debug(f"Request: {request.method} {request.url.path}")
try:
response = await call_next(request)
# Log response
duration = round((time.time() - start_time) * 1000, 2)
logger.debug(f"Response: {response.status_code} | Duration: {duration}ms")
return response
except Exception as e:
# Log errors
logger.error(f"Server error: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail="Internal Server Error")
# Mount static files for React app app.include_router(sso_router, prefix="/api/auth", tags=["sso"])
if os.path.exists("spotizerr-ui/dist"): logging.info("SSO functionality enabled")
app.mount("/static", StaticFiles(directory="spotizerr-ui/dist"), name="static") except ImportError as e:
logging.warning(f"SSO functionality not available: {e}")
# Serve React App - catch-all route for SPA (but not for API routes) app.include_router(config_router, prefix="/api/config", tags=["config"])
@app.get("/{full_path:path}") app.include_router(search_router, prefix="/api/search", tags=["search"])
async def serve_react_app(full_path: str): app.include_router(
"""Serve React app with fallback to index.html for SPA routing""" credentials_router, prefix="/api/credentials", tags=["credentials"]
static_dir = "spotizerr-ui/dist" )
app.include_router(album_router, prefix="/api/album", tags=["album"])
# Don't serve React app for API routes (more specific check) app.include_router(track_router, prefix="/api/track", tags=["track"])
if full_path.startswith("api") or full_path.startswith("api/"): app.include_router(playlist_router, prefix="/api/playlist", tags=["playlist"])
raise HTTPException(status_code=404, detail="API endpoint not found") app.include_router(artist_router, prefix="/api/artist", tags=["artist"])
app.include_router(prgs_router, prefix="/api/prgs", tags=["progress"])
# If it's a file that exists, serve it app.include_router(history_router, prefix="/api/history", tags=["history"])
if full_path and os.path.exists(os.path.join(static_dir, full_path)):
return FileResponse(os.path.join(static_dir, full_path))
else:
# Fallback to index.html for SPA routing
return FileResponse(os.path.join(static_dir, "index.html"))
else:
logging.warning("React app build directory not found at spotizerr-ui/dist")
return app # Add request logging middleware
@app.middleware("http")
async def log_requests(request: Request, call_next):
start_time = time.time()
# Log request
logger = logging.getLogger("uvicorn.access")
logger.debug(f"Request: {request.method} {request.url.path}")
try:
response = await call_next(request)
# Log response
duration = round((time.time() - start_time) * 1000, 2)
logger.debug(f"Response: {response.status_code} | Duration: {duration}ms")
return response
except Exception as e:
# Log errors
logger.error(f"Server error: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail="Internal Server Error")
# Mount static files for React app
if os.path.exists("spotizerr-ui/dist"):
app.mount("/static", StaticFiles(directory="spotizerr-ui/dist"), name="static")
# Serve React App - catch-all route for SPA (but not for API routes)
@app.get("/{full_path:path}")
async def serve_react_app(full_path: str):
"""Serve React app with fallback to index.html for SPA routing. Prevent directory traversal."""
static_dir = "spotizerr-ui/dist"
static_dir_path = Path(static_dir).resolve()
index_path = static_dir_path / "index.html"
allowed_exts = {
".html",
".js",
".css",
".map",
".png",
".jpg",
".jpeg",
".svg",
".webp",
".ico",
".json",
".txt",
".woff",
".woff2",
".ttf",
".eot",
".mp3",
".ogg",
".mp4",
".webm",
}
# Don't serve React app for API routes (more specific check)
if full_path.startswith("api") or full_path.startswith("api/"):
raise HTTPException(status_code=404, detail="API endpoint not found")
# Reject null bytes early
if "\x00" in full_path:
return FileResponse(str(index_path))
# Sanitize path: normalize backslashes and strip URL schemes
sanitized = full_path.replace("\\", "/").lstrip("/")
if sanitized.startswith("http://") or sanitized.startswith("https://"):
return FileResponse(str(index_path))
# Resolve requested path safely and ensure it stays within static_dir
try:
requested_path = (static_dir_path / sanitized).resolve()
except Exception:
requested_path = index_path
# If traversal attempted or non-file within static dir, fall back to index.html for SPA routing
if not str(requested_path).startswith(str(static_dir_path)):
return FileResponse(str(index_path))
# Disallow hidden files (starting with dot) and enforce safe extensions
if requested_path.is_file():
name = requested_path.name
if name.startswith("."):
return FileResponse(str(index_path))
suffix = requested_path.suffix.lower()
if suffix in allowed_exts:
return FileResponse(str(requested_path))
# Not an allowed asset; fall back to SPA index
return FileResponse(str(index_path))
else:
# Fallback to index.html for SPA routing
return FileResponse(str(index_path))
else:
logging.warning("React app build directory not found at spotizerr-ui/dist")
return app
def start_celery_workers(): def start_celery_workers():
"""Start Celery workers with dynamic configuration""" """Start Celery workers with dynamic configuration"""
# This function is now handled by the lifespan context manager # This function is now handled by the lifespan context manager
# and the celery_manager.start() call # and the celery_manager.start() call
pass pass
if __name__ == "__main__": if __name__ == "__main__":
import uvicorn import uvicorn
app = create_app() app = create_app()
# Use HOST environment variable if present, otherwise fall back to IPv4 wildcard # Use HOST environment variable if present, otherwise fall back to IPv4 wildcard
host = os.getenv("HOST", "0.0.0.0") host = os.getenv("HOST", "0.0.0.0")
# Allow overriding port via PORT env var, with default 7171 # Allow overriding port via PORT env var, with default 7171
try: try:
port = int(os.getenv("PORT", "7171")) port = int(os.getenv("PORT", "7171"))
except ValueError: except ValueError:
port = 7171 port = 7171
uvicorn.run( uvicorn.run(app, host=host, port=port, log_level="info", access_log=True)
app,
host=host,
port=port,
log_level="info",
access_log=True
)

View File

@@ -1,9 +1,8 @@
from fastapi import APIRouter, HTTPException, Request, Depends from fastapi import APIRouter, Request, Depends
from fastapi.responses import JSONResponse from fastapi.responses import JSONResponse
import json
import traceback
import logging import logging
from routes.utils.history_manager import history_manager from routes.utils.history_manager import history_manager
from typing import Any, Dict
# Import authentication dependencies # Import authentication dependencies
from routes.auth.middleware import require_auth_from_state, User from routes.auth.middleware import require_auth_from_state, User
@@ -15,10 +14,12 @@ router = APIRouter()
@router.get("/") @router.get("/")
@router.get("") @router.get("")
async def get_history(request: Request, current_user: User = Depends(require_auth_from_state)): async def get_history(
request: Request, current_user: User = Depends(require_auth_from_state)
):
""" """
Retrieve download history with optional filtering and pagination. Retrieve download history with optional filtering and pagination.
Query parameters: Query parameters:
- limit: Maximum number of records (default: 100, max: 500) - limit: Maximum number of records (default: 100, max: 500)
- offset: Number of records to skip (default: 0) - offset: Number of records to skip (default: 0)
@@ -31,143 +32,144 @@ async def get_history(request: Request, current_user: User = Depends(require_aut
offset = max(int(request.query_params.get("offset", 0)), 0) offset = max(int(request.query_params.get("offset", 0)), 0)
download_type = request.query_params.get("download_type") download_type = request.query_params.get("download_type")
status = request.query_params.get("status") status = request.query_params.get("status")
# Validate download_type if provided # Validate download_type if provided
valid_types = ["track", "album", "playlist"] valid_types = ["track", "album", "playlist"]
if download_type and download_type not in valid_types: if download_type and download_type not in valid_types:
return JSONResponse( return JSONResponse(
content={"error": f"Invalid download_type. Must be one of: {valid_types}"}, content={
status_code=400 "error": f"Invalid download_type. Must be one of: {valid_types}"
},
status_code=400,
) )
# Validate status if provided # Validate status if provided
valid_statuses = ["completed", "failed", "skipped", "in_progress"] valid_statuses = ["completed", "failed", "skipped", "in_progress"]
if status and status not in valid_statuses: if status and status not in valid_statuses:
return JSONResponse( return JSONResponse(
content={"error": f"Invalid status. Must be one of: {valid_statuses}"}, content={"error": f"Invalid status. Must be one of: {valid_statuses}"},
status_code=400 status_code=400,
) )
# Get history from manager # Get history from manager
history = history_manager.get_download_history( history = history_manager.get_download_history(
limit=limit, limit=limit, offset=offset, download_type=download_type, status=status
offset=offset,
download_type=download_type,
status=status
) )
# Add pagination info # Add pagination info
response_data = { response_data: Dict[str, Any] = {
"downloads": history, "downloads": history,
"pagination": { "pagination": {
"limit": limit, "limit": limit,
"offset": offset, "offset": offset,
"returned_count": len(history) "returned_count": len(history),
} },
} }
filters: Dict[str, Any] = {}
if download_type: if download_type:
response_data["filters"] = {"download_type": download_type} filters["download_type"] = download_type
if status: if status:
if "filters" not in response_data: filters["status"] = status
response_data["filters"] = {} if filters:
response_data["filters"]["status"] = status response_data["filters"] = filters
return JSONResponse( return JSONResponse(content=response_data, status_code=200)
content=response_data,
status_code=200
)
except ValueError as e: except ValueError as e:
return JSONResponse( return JSONResponse(
content={"error": f"Invalid parameter value: {str(e)}"}, content={"error": f"Invalid parameter value: {str(e)}"}, status_code=400
status_code=400
) )
except Exception as e: except Exception as e:
logger.error(f"Error retrieving download history: {e}", exc_info=True) logger.error(f"Error retrieving download history: {e}", exc_info=True)
return JSONResponse( return JSONResponse(
content={"error": "Failed to retrieve download history", "details": str(e)}, content={"error": "Failed to retrieve download history", "details": str(e)},
status_code=500 status_code=500,
) )
@router.get("/{task_id}") @router.get("/{task_id}")
async def get_download_by_task_id(task_id: str, current_user: User = Depends(require_auth_from_state)): async def get_download_by_task_id(
task_id: str, current_user: User = Depends(require_auth_from_state)
):
""" """
Retrieve specific download history by task ID. Retrieve specific download history by task ID.
Args: Args:
task_id: Celery task ID task_id: Celery task ID
""" """
try: try:
download = history_manager.get_download_by_task_id(task_id) download = history_manager.get_download_by_task_id(task_id)
if not download: if not download:
return JSONResponse( return JSONResponse(
content={"error": f"Download with task ID '{task_id}' not found"}, content={"error": f"Download with task ID '{task_id}' not found"},
status_code=404 status_code=404,
) )
return JSONResponse( return JSONResponse(content=download, status_code=200)
content=download,
status_code=200
)
except Exception as e: except Exception as e:
logger.error(f"Error retrieving download for task {task_id}: {e}", exc_info=True) logger.error(
f"Error retrieving download for task {task_id}: {e}", exc_info=True
)
return JSONResponse( return JSONResponse(
content={"error": "Failed to retrieve download", "details": str(e)}, content={"error": "Failed to retrieve download", "details": str(e)},
status_code=500 status_code=500,
) )
@router.get("/{task_id}/children") @router.get("/{task_id}/children")
async def get_download_children(task_id: str, current_user: User = Depends(require_auth_from_state)): async def get_download_children(
task_id: str, current_user: User = Depends(require_auth_from_state)
):
""" """
Retrieve children tracks for an album or playlist download. Retrieve children tracks for an album or playlist download.
Args: Args:
task_id: Celery task ID task_id: Celery task ID
""" """
try: try:
# First get the main download to find the children table # First get the main download to find the children table
download = history_manager.get_download_by_task_id(task_id) download = history_manager.get_download_by_task_id(task_id)
if not download: if not download:
return JSONResponse( return JSONResponse(
content={"error": f"Download with task ID '{task_id}' not found"}, content={"error": f"Download with task ID '{task_id}' not found"},
status_code=404 status_code=404,
) )
children_table = download.get("children_table") children_table = download.get("children_table")
if not children_table: if not children_table:
return JSONResponse( return JSONResponse(
content={"error": f"Download '{task_id}' has no children tracks"}, content={"error": f"Download '{task_id}' has no children tracks"},
status_code=404 status_code=404,
) )
# Get children tracks # Get children tracks
children = history_manager.get_children_history(children_table) children = history_manager.get_children_history(children_table)
response_data = { response_data = {
"task_id": task_id, "task_id": task_id,
"download_type": download.get("download_type"), "download_type": download.get("download_type"),
"title": download.get("title"), "title": download.get("title"),
"children_table": children_table, "children_table": children_table,
"tracks": children, "tracks": children,
"track_count": len(children) "track_count": len(children),
} }
return JSONResponse( return JSONResponse(content=response_data, status_code=200)
content=response_data,
status_code=200
)
except Exception as e: except Exception as e:
logger.error(f"Error retrieving children for task {task_id}: {e}", exc_info=True) logger.error(
f"Error retrieving children for task {task_id}: {e}", exc_info=True
)
return JSONResponse( return JSONResponse(
content={"error": "Failed to retrieve download children", "details": str(e)}, content={
status_code=500 "error": "Failed to retrieve download children",
"details": str(e),
},
status_code=500,
) )
@@ -178,25 +180,27 @@ async def get_download_stats(current_user: User = Depends(require_auth_from_stat
""" """
try: try:
stats = history_manager.get_download_stats() stats = history_manager.get_download_stats()
return JSONResponse( return JSONResponse(content=stats, status_code=200)
content=stats,
status_code=200
)
except Exception as e: except Exception as e:
logger.error(f"Error retrieving download stats: {e}", exc_info=True) logger.error(f"Error retrieving download stats: {e}", exc_info=True)
return JSONResponse( return JSONResponse(
content={"error": "Failed to retrieve download statistics", "details": str(e)}, content={
status_code=500 "error": "Failed to retrieve download statistics",
"details": str(e),
},
status_code=500,
) )
@router.get("/search") @router.get("/search")
async def search_history(request: Request, current_user: User = Depends(require_auth_from_state)): async def search_history(
request: Request, current_user: User = Depends(require_auth_from_state)
):
""" """
Search download history by title or artist. Search download history by title or artist.
Query parameters: Query parameters:
- q: Search query (required) - q: Search query (required)
- limit: Maximum number of results (default: 50, max: 200) - limit: Maximum number of results (default: 50, max: 200)
@@ -206,147 +210,134 @@ async def search_history(request: Request, current_user: User = Depends(require_
if not query: if not query:
return JSONResponse( return JSONResponse(
content={"error": "Missing required parameter: q (search query)"}, content={"error": "Missing required parameter: q (search query)"},
status_code=400 status_code=400,
) )
limit = min(int(request.query_params.get("limit", 50)), 200) # Cap at 200 limit = min(int(request.query_params.get("limit", 50)), 200) # Cap at 200
# Search history # Search history
results = history_manager.search_history(query, limit) results = history_manager.search_history(query, limit)
response_data = { response_data = {
"query": query, "query": query,
"results": results, "results": results,
"result_count": len(results), "result_count": len(results),
"limit": limit "limit": limit,
} }
return JSONResponse( return JSONResponse(content=response_data, status_code=200)
content=response_data,
status_code=200
)
except ValueError as e: except ValueError as e:
return JSONResponse( return JSONResponse(
content={"error": f"Invalid parameter value: {str(e)}"}, content={"error": f"Invalid parameter value: {str(e)}"}, status_code=400
status_code=400
) )
except Exception as e: except Exception as e:
logger.error(f"Error searching download history: {e}", exc_info=True) logger.error(f"Error searching download history: {e}", exc_info=True)
return JSONResponse( return JSONResponse(
content={"error": "Failed to search download history", "details": str(e)}, content={"error": "Failed to search download history", "details": str(e)},
status_code=500 status_code=500,
) )
@router.get("/recent") @router.get("/recent")
async def get_recent_downloads(request: Request, current_user: User = Depends(require_auth_from_state)): async def get_recent_downloads(
request: Request, current_user: User = Depends(require_auth_from_state)
):
""" """
Get most recent downloads. Get most recent downloads.
Query parameters: Query parameters:
- limit: Maximum number of results (default: 20, max: 100) - limit: Maximum number of results (default: 20, max: 100)
""" """
try: try:
limit = min(int(request.query_params.get("limit", 20)), 100) # Cap at 100 limit = min(int(request.query_params.get("limit", 20)), 100) # Cap at 100
recent = history_manager.get_recent_downloads(limit) recent = history_manager.get_recent_downloads(limit)
response_data = { response_data = {"downloads": recent, "count": len(recent), "limit": limit}
"downloads": recent,
"count": len(recent), return JSONResponse(content=response_data, status_code=200)
"limit": limit
}
return JSONResponse(
content=response_data,
status_code=200
)
except ValueError as e: except ValueError as e:
return JSONResponse( return JSONResponse(
content={"error": f"Invalid parameter value: {str(e)}"}, content={"error": f"Invalid parameter value: {str(e)}"}, status_code=400
status_code=400
) )
except Exception as e: except Exception as e:
logger.error(f"Error retrieving recent downloads: {e}", exc_info=True) logger.error(f"Error retrieving recent downloads: {e}", exc_info=True)
return JSONResponse( return JSONResponse(
content={"error": "Failed to retrieve recent downloads", "details": str(e)}, content={"error": "Failed to retrieve recent downloads", "details": str(e)},
status_code=500 status_code=500,
) )
@router.get("/failed") @router.get("/failed")
async def get_failed_downloads(request: Request, current_user: User = Depends(require_auth_from_state)): async def get_failed_downloads(
request: Request, current_user: User = Depends(require_auth_from_state)
):
""" """
Get failed downloads. Get failed downloads.
Query parameters: Query parameters:
- limit: Maximum number of results (default: 50, max: 200) - limit: Maximum number of results (default: 50, max: 200)
""" """
try: try:
limit = min(int(request.query_params.get("limit", 50)), 200) # Cap at 200 limit = min(int(request.query_params.get("limit", 50)), 200) # Cap at 200
failed = history_manager.get_failed_downloads(limit) failed = history_manager.get_failed_downloads(limit)
response_data = { response_data = {"downloads": failed, "count": len(failed), "limit": limit}
"downloads": failed,
"count": len(failed), return JSONResponse(content=response_data, status_code=200)
"limit": limit
}
return JSONResponse(
content=response_data,
status_code=200
)
except ValueError as e: except ValueError as e:
return JSONResponse( return JSONResponse(
content={"error": f"Invalid parameter value: {str(e)}"}, content={"error": f"Invalid parameter value: {str(e)}"}, status_code=400
status_code=400
) )
except Exception as e: except Exception as e:
logger.error(f"Error retrieving failed downloads: {e}", exc_info=True) logger.error(f"Error retrieving failed downloads: {e}", exc_info=True)
return JSONResponse( return JSONResponse(
content={"error": "Failed to retrieve failed downloads", "details": str(e)}, content={"error": "Failed to retrieve failed downloads", "details": str(e)},
status_code=500 status_code=500,
) )
@router.post("/cleanup") @router.post("/cleanup")
async def cleanup_old_history(request: Request, current_user: User = Depends(require_auth_from_state)): async def cleanup_old_history(
request: Request, current_user: User = Depends(require_auth_from_state)
):
""" """
Clean up old download history. Clean up old download history.
JSON body: JSON body:
- days_old: Number of days old to keep (default: 30) - days_old: Number of days old to keep (default: 30)
""" """
try: try:
data = await request.json() if request.headers.get("content-type") == "application/json" else {} data = (
await request.json()
if request.headers.get("content-type") == "application/json"
else {}
)
days_old = data.get("days_old", 30) days_old = data.get("days_old", 30)
if not isinstance(days_old, int) or days_old <= 0: if not isinstance(days_old, int) or days_old <= 0:
return JSONResponse( return JSONResponse(
content={"error": "days_old must be a positive integer"}, content={"error": "days_old must be a positive integer"},
status_code=400 status_code=400,
) )
deleted_count = history_manager.clear_old_history(days_old) deleted_count = history_manager.clear_old_history(days_old)
response_data = { response_data = {
"message": f"Successfully cleaned up old download history", "message": "Successfully cleaned up old download history",
"deleted_records": deleted_count, "deleted_records": deleted_count,
"days_old": days_old "days_old": days_old,
} }
return JSONResponse( return JSONResponse(content=response_data, status_code=200)
content=response_data,
status_code=200
)
except Exception as e: except Exception as e:
logger.error(f"Error cleaning up old history: {e}", exc_info=True) logger.error(f"Error cleaning up old history: {e}", exc_info=True)
return JSONResponse( return JSONResponse(
content={"error": "Failed to cleanup old history", "details": str(e)}, content={"error": "Failed to cleanup old history", "details": str(e)},
status_code=500 status_code=500,
) )