From 7025b273b3fd276993adea6a5e5e310c55d10d06 Mon Sep 17 00:00:00 2001 From: Phlogi Date: Fri, 22 Aug 2025 20:20:51 +0200 Subject: [PATCH 01/32] fixup: NoneType Float Issue --- routes/system/progress.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/routes/system/progress.py b/routes/system/progress.py index 465407d..c6cbc58 100755 --- a/routes/system/progress.py +++ b/routes/system/progress.py @@ -4,7 +4,7 @@ import logging import time import json import asyncio -from typing import Dict, Set +from typing import Dict, Set, Optional from routes.utils.celery_tasks import ( get_task_info, @@ -141,7 +141,7 @@ def start_sse_redis_subscriber(): thread.start() logger.info("SSE Redis Subscriber: Background thread started") -async def transform_callback_to_task_format(task_id: str, event_data: dict) -> dict: +async def transform_callback_to_task_format(task_id: str, event_data: dict) -> Optional[dict]: """Transform callback event data into the task format expected by frontend""" try: # Import here to avoid circular imports @@ -646,7 +646,7 @@ async def list_tasks(request: Request, current_user: User = Depends(require_auth other_tasks.append(task_response) # Sort other tasks by creation time (newest first) - other_tasks.sort(key=lambda x: x.get("created_at", 0), reverse=True) + other_tasks.sort(key=lambda x: x.get("created_at") or 0.0, reverse=True) if active_only: # Return only active tasks without pagination @@ -876,7 +876,7 @@ async def cancel_task_endpoint(task_id: str, current_user: User = Depends(requir try: # Push an immediate SSE update so clients reflect cancellation and partial summary await trigger_sse_update(task_id, "cancelled") - result["sse_notified"] = True + result["sse_notified"] = "true" except Exception as e: logger.error(f"SSE notify after cancel failed for {task_id}: {e}") return result From 13680ddd2623063922d16ccf596d0cf0fc9e99a6 Mon Sep 17 00:00:00 2001 From: Phlogi Date: Sat, 23 Aug 2025 19:37:42 +0200 Subject: [PATCH 02/32] fix: global logging level --- app.py | 34 ++++++++++++++++++++++------ routes/__init__.py | 4 ---- routes/system/progress.py | 30 ++++++++++-------------- routes/utils/celery_manager.py | 15 ++++++------ routes/utils/celery_queue_manager.py | 2 +- 5 files changed, 48 insertions(+), 37 deletions(-) diff --git a/app.py b/app.py index c24c822..0036763 100755 --- a/app.py +++ b/app.py @@ -13,6 +13,16 @@ import redis import socket from urllib.parse import urlparse +# Define a mapping from string log levels to logging constants +LOG_LEVELS = { + "CRITICAL": logging.CRITICAL, + "ERROR": logging.ERROR, + "WARNING": logging.WARNING, + "INFO": logging.INFO, + "DEBUG": logging.DEBUG, + "NOTSET": logging.NOTSET, +} + # Run DB migrations as early as possible, before importing any routers that may touch DBs try: from routes.migrations import run_migrations_if_needed @@ -27,13 +37,18 @@ except Exception as e: ) sys.exit(1) +# Get log level from environment variable, default to INFO +log_level_str = os.getenv("LOG_LEVEL", "WARNING").upper() +log_level = LOG_LEVELS.get(log_level_str, logging.INFO) + # Import route routers (to be created) from routes.auth.credentials import router as credentials_router from routes.auth.auth import router as auth_router -from routes.content.artist import router as artist_router from routes.content.album import router as album_router +from routes.content.artist import router as artist_router from routes.content.track import router as track_router from routes.content.playlist import router as playlist_router +from routes.content.bulk_add import router as bulk_add_router from routes.core.search import router as search_router from routes.core.history import router as history_router from routes.system.progress import router as prgs_router @@ -66,7 +81,7 @@ def setup_logging(): # Configure root logger root_logger = logging.getLogger() - root_logger.setLevel(logging.DEBUG) + root_logger.setLevel(log_level) # Clear any existing handlers from the root logger if root_logger.hasHandlers(): @@ -83,12 +98,12 @@ def setup_logging(): main_log, maxBytes=10 * 1024 * 1024, backupCount=5, encoding="utf-8" ) file_handler.setFormatter(log_format) - file_handler.setLevel(logging.INFO) + file_handler.setLevel(log_level) # Console handler for stderr console_handler = logging.StreamHandler(sys.stderr) console_handler.setFormatter(log_format) - console_handler.setLevel(logging.INFO) + console_handler.setLevel(log_level) # Add handlers to root logger root_logger.addHandler(file_handler) @@ -101,10 +116,15 @@ def setup_logging(): "routes.utils.celery_manager", "routes.utils.celery_tasks", "routes.utils.watch", + "uvicorn", # General Uvicorn logger + "uvicorn.access", # Uvicorn access logs + "uvicorn.error", # Uvicorn error logs ]: logger = logging.getLogger(logger_name) - logger.setLevel(logging.INFO) - logger.propagate = True # Propagate to root logger + logger.setLevel(log_level) + # For uvicorn.access, we explicitly set propagate to False to prevent duplicate logging + # if access_log=False is used in uvicorn.run, and to ensure our middleware handles it. + logger.propagate = False if logger_name == "uvicorn.access" else True logging.info("Logging system initialized") @@ -363,4 +383,4 @@ if __name__ == "__main__": except ValueError: port = 7171 - uvicorn.run(app, host=host, port=port, log_level="info", access_log=True) + uvicorn.run(app, host=host, port=port, log_level=log_level_str.lower(), access_log=False) diff --git a/routes/__init__.py b/routes/__init__.py index 2fa27c9..eea436a 100755 --- a/routes/__init__.py +++ b/routes/__init__.py @@ -1,7 +1,3 @@ import logging -# Configure basic logging for the application if not already configured -# This remains safe to execute on import -logging.basicConfig(level=logging.INFO, format="%(message)s") - logger = logging.getLogger(__name__) diff --git a/routes/system/progress.py b/routes/system/progress.py index c6cbc58..d8c242c 100755 --- a/routes/system/progress.py +++ b/routes/system/progress.py @@ -31,12 +31,12 @@ class SSEBroadcaster: async def add_client(self, queue: asyncio.Queue): """Add a new SSE client""" self.clients.add(queue) - logger.info(f"SSE: Client connected (total: {len(self.clients)})") + logger.debug(f"SSE: Client connected (total: {len(self.clients)})") async def remove_client(self, queue: asyncio.Queue): """Remove an SSE client""" self.clients.discard(queue) - logger.info(f"SSE: Client disconnected (total: {len(self.clients)})") + logger.debug(f"SSE: Client disconnected (total: {len(self.clients)})") async def broadcast_event(self, event_data: dict): """Broadcast an event to all connected clients""" @@ -69,7 +69,7 @@ class SSEBroadcaster: for client in disconnected: self.clients.discard(client) - logger.info(f"SSE Broadcaster: Successfully sent to {sent_count} clients, removed {len(disconnected)} disconnected clients") + logger.debug(f"SSE Broadcaster: Successfully sent to {sent_count} clients, removed {len(disconnected)} disconnected clients") # Global broadcaster instance sse_broadcaster = SSEBroadcaster() @@ -139,7 +139,7 @@ def start_sse_redis_subscriber(): # Start Redis subscriber in background thread thread = threading.Thread(target=redis_subscriber_thread, daemon=True) thread.start() - logger.info("SSE Redis Subscriber: Background thread started") + logger.debug("SSE Redis Subscriber: Background thread started") async def transform_callback_to_task_format(task_id: str, event_data: dict) -> Optional[dict]: """Transform callback event data into the task format expected by frontend""" @@ -200,13 +200,7 @@ async def trigger_sse_update(task_id: str, reason: str = "task_update"): last_status = get_last_task_status(task_id) # Create a dummy request for the _build_task_response function - from fastapi import Request - class DummyRequest: - def __init__(self): - self.base_url = "http://localhost:7171" - - dummy_request = DummyRequest() - task_response = _build_task_response(task_info, last_status, task_id, current_time, dummy_request) + task_response = _build_task_response(task_info, last_status, task_id, current_time, request=None) # Create minimal event data - global counts will be added at broadcast time event_data = { @@ -431,7 +425,7 @@ def _build_error_callback_object(last_status): return callback_object -def _build_task_response(task_info, last_status, task_id, current_time, request: Request): +def _build_task_response(task_info, last_status, task_id, current_time, request: Optional[Request] = None): """ Helper function to build a standardized task response object. """ @@ -444,7 +438,7 @@ def _build_task_response(task_info, last_status, task_id, current_time, request: try: item_id = item_url.split("/")[-1] if item_id: - base_url = str(request.base_url).rstrip("/") + base_url = str(request.base_url).rstrip("/") if request else "http://localhost:7171" dynamic_original_url = ( f"{base_url}/api/{download_type}/download/{item_id}" ) @@ -496,7 +490,7 @@ def _build_task_response(task_info, last_status, task_id, current_time, request: return task_response -async def get_paginated_tasks(page=1, limit=20, active_only=False, request: Request = None): +async def get_paginated_tasks(page=1, limit=20, active_only=False, request: Optional[Request] = None): """ Get paginated list of tasks. """ @@ -938,9 +932,9 @@ async def stream_task_updates(request: Request, current_user: User = Depends(get try: # Register this client with the broadcaster - logger.info(f"SSE Stream: New client connecting...") + logger.debug(f"SSE Stream: New client connecting...") await sse_broadcaster.add_client(client_queue) - logger.info(f"SSE Stream: Client registered successfully, total clients: {len(sse_broadcaster.clients)}") + logger.debug(f"SSE Stream: Client registered successfully, total clients: {len(sse_broadcaster.clients)}") # Send initial data immediately upon connection initial_data = await generate_task_update_event(time.time(), active_only, request) @@ -973,7 +967,7 @@ async def stream_task_updates(request: Request, current_user: User = Depends(get } event_json = json.dumps(callback_event) yield f"data: {event_json}\n\n" - logger.info(f"SSE Stream: Sent replay callback for task {task_id}") + logger.debug(f"SSE Stream: Sent replay callback for task {task_id}") # Send periodic heartbeats and listen for real-time events last_heartbeat = time.time() @@ -1039,7 +1033,7 @@ async def stream_task_updates(request: Request, current_user: User = Depends(get await asyncio.sleep(1) except asyncio.CancelledError: - logger.info("SSE client disconnected") + logger.debug("SSE client disconnected") return except Exception as e: logger.error(f"SSE connection error: {e}", exc_info=True) diff --git a/routes/utils/celery_manager.py b/routes/utils/celery_manager.py index faebe95..9ce27b4 100644 --- a/routes/utils/celery_manager.py +++ b/routes/utils/celery_manager.py @@ -2,6 +2,7 @@ import subprocess import logging import time import threading +import os # Import Celery task utilities from .celery_config import get_config_params, MAX_CONCURRENT_DL @@ -40,8 +41,10 @@ class CeleryManager: ) def _get_worker_command( - self, queues, concurrency, worker_name_suffix, log_level="INFO" + self, queues, concurrency, worker_name_suffix, log_level_env=None ): + # Use LOG_LEVEL from environment if provided, otherwise default to INFO + log_level = log_level_env if log_level_env else os.getenv("LOG_LEVEL", "WARNING").upper() # Use a unique worker name to avoid conflicts. # %h is replaced by celery with the actual hostname. hostname = f"worker_{worker_name_suffix}@%h" @@ -117,6 +120,7 @@ class CeleryManager: queues="downloads", concurrency=self.concurrency, worker_name_suffix="dlw", # Download Worker + log_level_env=os.getenv("LOG_LEVEL", "WARNING").upper(), ) logger.info( f"Starting Celery Download Worker with command: {' '.join(download_cmd)}" @@ -151,7 +155,7 @@ class CeleryManager: queues="utility_tasks,default", # Listen to utility and default concurrency=5, # Increased concurrency for SSE updates and utility tasks worker_name_suffix="utw", # Utility Worker - log_level="ERROR" # Reduce log verbosity for utility worker (only errors) + log_level_env=os.getenv("LOG_LEVEL", "WARNING").upper(), ) logger.info( f"Starting Celery Utility Worker with command: {' '.join(utility_cmd)}" @@ -250,7 +254,7 @@ class CeleryManager: # Restart only the download worker download_cmd = self._get_worker_command( - "downloads", self.concurrency, "dlw" + "downloads", self.concurrency, "dlw", log_level_env=os.getenv("LOG_LEVEL", "WARNING").upper() ) logger.info( f"Restarting Celery Download Worker with command: {' '.join(download_cmd)}" @@ -366,10 +370,7 @@ celery_manager = CeleryManager() # Example of how to use the manager (typically called from your main app script) if __name__ == "__main__": - logging.basicConfig( - level=logging.INFO, - format="%(message)s", - ) + # Removed logging.basicConfig as it's handled by the main app's setup_logging logger.info("Starting Celery Manager example...") celery_manager.start() try: diff --git a/routes/utils/celery_queue_manager.py b/routes/utils/celery_queue_manager.py index 10b47f1..4d0a378 100644 --- a/routes/utils/celery_queue_manager.py +++ b/routes/utils/celery_queue_manager.py @@ -246,7 +246,7 @@ class CeleryDownloadQueueManager: """Initialize the Celery-based download queue manager""" self.max_concurrent = MAX_CONCURRENT_DL self.paused = False - print( + logger.info( f"Celery Download Queue Manager initialized with max_concurrent={self.max_concurrent}" ) From dbfaba2eb8cf666e242facef68cff98cb7c673e4 Mon Sep 17 00:00:00 2001 From: Phlogi Date: Sat, 23 Aug 2025 20:48:51 +0200 Subject: [PATCH 03/32] add global log info and how to run in dev mode --- README.md | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 5c6ecc6..77f0279 100644 --- a/README.md +++ b/README.md @@ -211,7 +211,7 @@ Access logs via Docker: docker logs spotizerr ``` -**Log Locations:** +**Log and File Locations:** - Application Logs: `docker logs spotizerr` (main app and Celery workers) - Individual Task Logs: `./logs/tasks/` (inside container, maps to your volume) - Credentials: `./data/creds/` @@ -221,6 +221,12 @@ docker logs spotizerr - Download History Database: `./data/history/` - Spotify Token Cache: `./.cache/` (if `SPOTIPY_CACHE_PATH` is mapped) +**Global Logging Level:** +The application's global logging level can be controlled via the `LOG_LEVEL` environment variable. +Supported values (case-insensitive): `CRITICAL`, `ERROR`, `WARNING`, `INFO`, `DEBUG`, `NOTSET`. +If not set, the default logging level is `WARNING`. +Example in `.env` file: `LOG_LEVEL=DEBUG` + ## 🤝 Contributing 1. Fork the repository @@ -228,6 +234,21 @@ docker logs spotizerr 3. Make your changes 4. Submit a pull request +Here is the text to add to your `README.md` file, preferably after the "Quick Start" section: + +## 💻 Development Setup + +To run Spotizerr in development mode: + +1. **Backend (API):** + * Ensure Python dependencies are installed (e.g., using `uv pip install -r requirements.txt`). + * Start a Redis server. + * Run the app insidie your activated virtual env: `python3 app.py` +2. **Frontend (UI):** + * Navigate to `spotizerr-ui/`. + * Install dependencies: `pnpm install`. + * Start the development server: `pnpm dev`. + ## 📄 License This project is licensed under the GPL yada yada, see [LICENSE](LICENSE) file for details. From 5482128d79175d0fdbb9f815ba72bece94f36eae Mon Sep 17 00:00:00 2001 From: Phlogi Date: Sat, 23 Aug 2025 20:49:56 +0200 Subject: [PATCH 04/32] Revert "fixup: NoneType Float Issue" This reverts commit 7025b273b3fd276993adea6a5e5e310c55d10d06. --- routes/system/progress.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/routes/system/progress.py b/routes/system/progress.py index d8c242c..224c70e 100755 --- a/routes/system/progress.py +++ b/routes/system/progress.py @@ -4,7 +4,7 @@ import logging import time import json import asyncio -from typing import Dict, Set, Optional +from typing import Dict, Set from routes.utils.celery_tasks import ( get_task_info, @@ -141,7 +141,7 @@ def start_sse_redis_subscriber(): thread.start() logger.debug("SSE Redis Subscriber: Background thread started") -async def transform_callback_to_task_format(task_id: str, event_data: dict) -> Optional[dict]: +async def transform_callback_to_task_format(task_id: str, event_data: dict) -> dict: """Transform callback event data into the task format expected by frontend""" try: # Import here to avoid circular imports @@ -640,7 +640,7 @@ async def list_tasks(request: Request, current_user: User = Depends(require_auth other_tasks.append(task_response) # Sort other tasks by creation time (newest first) - other_tasks.sort(key=lambda x: x.get("created_at") or 0.0, reverse=True) + other_tasks.sort(key=lambda x: x.get("created_at", 0), reverse=True) if active_only: # Return only active tasks without pagination @@ -870,7 +870,7 @@ async def cancel_task_endpoint(task_id: str, current_user: User = Depends(requir try: # Push an immediate SSE update so clients reflect cancellation and partial summary await trigger_sse_update(task_id, "cancelled") - result["sse_notified"] = "true" + result["sse_notified"] = True except Exception as e: logger.error(f"SSE notify after cancel failed for {task_id}: {e}") return result From 2f11233ea1443c77f343f320c449fbec636b74a8 Mon Sep 17 00:00:00 2001 From: Phlogi Date: Sat, 23 Aug 2025 21:35:29 +0200 Subject: [PATCH 05/32] feat: add bulk add mode for download and watch --- app.py | 2 + routes/content/bulk_add.py | 108 ++++++++ .../src/components/config/WatchTab.tsx | 1 + spotizerr-ui/src/lib/spotify-utils.ts | 15 + spotizerr-ui/src/routes/home.tsx | 256 +++++++++++++++--- 5 files changed, 351 insertions(+), 31 deletions(-) create mode 100644 routes/content/bulk_add.py create mode 100644 spotizerr-ui/src/lib/spotify-utils.ts diff --git a/app.py b/app.py index c2449b4..4d182ab 100755 --- a/app.py +++ b/app.py @@ -241,6 +241,7 @@ def create_app(): from routes.content.album import router as album_router from routes.content.track import router as track_router from routes.content.playlist import router as playlist_router + from routes.content.bulk_add import router as bulk_add_router from routes.content.artist import router as artist_router from routes.system.progress import router as prgs_router from routes.core.history import router as history_router @@ -263,6 +264,7 @@ def create_app(): app.include_router(album_router, prefix="/api/album", tags=["album"]) app.include_router(track_router, prefix="/api/track", tags=["track"]) app.include_router(playlist_router, prefix="/api/playlist", tags=["playlist"]) + app.include_router(bulk_add_router, prefix="/api/bulk", tags=["bulk"]) app.include_router(artist_router, prefix="/api/artist", tags=["artist"]) app.include_router(prgs_router, prefix="/api/prgs", tags=["progress"]) app.include_router(history_router, prefix="/api/history", tags=["history"]) diff --git a/routes/content/bulk_add.py b/routes/content/bulk_add.py new file mode 100644 index 0000000..f0480e6 --- /dev/null +++ b/routes/content/bulk_add.py @@ -0,0 +1,108 @@ +import re +from typing import List, Dict, Any +from fastapi import APIRouter, HTTPException +from pydantic import BaseModel +import logging + +# Assuming these imports are available for queue management and Spotify info +from routes.utils.get_info import get_spotify_info +from routes.utils.celery_tasks import download_track, download_album, download_playlist + +router = APIRouter() +logger = logging.getLogger(__name__) + +class BulkAddLinksRequest(BaseModel): + links: List[str] + +@router.post("/bulk-add-spotify-links") +async def bulk_add_spotify_links(request: BulkAddLinksRequest): + added_count = 0 + failed_links = [] + total_links = len(request.links) + + for link in request.links: + # Assuming links are pre-filtered by the frontend, + # but still handle potential errors during info retrieval or unsupported types + # Extract type and ID from the link directly using regex + match = re.match(r"https://open\.spotify\.com(?:/intl-[a-z]{2})?/(track|album|playlist|artist)/([a-zA-Z0-9]+)", link) + if not match: + logger.warning(f"Could not parse Spotify link (unexpected format after frontend filter): {link}") + failed_links.append(link) + continue + + spotify_type = match.group(1) + spotify_id = match.group(2) + + try: + # Get basic info to confirm existence and get name/artist + # For playlists, we might want to get full info later when adding to queue + if spotify_type == "playlist": + item_info = get_spotify_info(spotify_id, "playlist_metadata") + else: + item_info = get_spotify_info(spotify_id, spotify_type) + + item_name = item_info.get("name", "Unknown Name") + artist_name = "" + if spotify_type in ["track", "album"]: + artists = item_info.get("artists", []) + if artists: + artist_name = ", ".join([a.get("name", "Unknown Artist") for a in artists]) + elif spotify_type == "playlist": + owner = item_info.get("owner", {}) + artist_name = owner.get("display_name", "Unknown Owner") + + # Construct URL for the download task + spotify_url = f"https://open.spotify.com/{spotify_type}/{spotify_id}" + + # Add to Celery queue based on type + if spotify_type == "track": + download_track.delay( + url=spotify_url, + spotify_id=spotify_id, + type=spotify_type, + name=item_name, + artist=artist_name, + download_type="track", + ) + elif spotify_type == "album": + download_album.delay( + url=spotify_url, + spotify_id=spotify_id, + type=spotify_type, + name=item_name, + artist=artist_name, + download_type="album", + ) + elif spotify_type == "playlist": + download_playlist.delay( + url=spotify_url, + spotify_id=spotify_id, + type=spotify_type, + name=item_name, + artist=artist_name, + download_type="playlist", + ) + else: + logger.warning(f"Unsupported Spotify type for download: {spotify_type} for link: {link}") + failed_links.append(link) + continue + + added_count += 1 + logger.debug(f"Added {added_count+1}/{total_links} {spotify_type} '{item_name}' ({spotify_id}) to queue.") + + except Exception as e: + logger.error(f"Error processing Spotify link {link}: {e}", exc_info=True) + failed_links.append(link) + + message = f"Successfully added {added_count}/{total_links} links to queue." + if failed_links: + message += f" Failed to add {len(failed_links)} links." + logger.warning(f"Bulk add completed with {len(failed_links)} failures.") + else: + logger.info(f"Bulk add completed successfully. Added {added_count} links.") + + return { + "message": message, + "count": added_count, + "failed_links": failed_links, + } \ No newline at end of file diff --git a/spotizerr-ui/src/components/config/WatchTab.tsx b/spotizerr-ui/src/components/config/WatchTab.tsx index f75d9d8..2722293 100644 --- a/spotizerr-ui/src/components/config/WatchTab.tsx +++ b/spotizerr-ui/src/components/config/WatchTab.tsx @@ -89,6 +89,7 @@ export function WatchTab() { onSuccess: () => { toast.success("Watch settings saved successfully!"); queryClient.invalidateQueries({ queryKey: ["watchConfig"] }); + queryClient.invalidateQueries({ queryKey: ["config"] }); // Invalidate main config to refresh watch.enabled in SettingsProvider }, onError: (error: any) => { const message = error?.response?.data?.error || error?.message || "Unknown error"; diff --git a/spotizerr-ui/src/lib/spotify-utils.ts b/spotizerr-ui/src/lib/spotify-utils.ts new file mode 100644 index 0000000..96f0f03 --- /dev/null +++ b/spotizerr-ui/src/lib/spotify-utils.ts @@ -0,0 +1,15 @@ +export interface ParsedSpotifyUrl { + type: "track" | "album" | "playlist" | "artist" | "unknown"; + id: string; +} + +export const parseSpotifyUrl = (url: string): ParsedSpotifyUrl => { + const match = url.match(/https:\/\/open\.spotify\.com(?:\/intl-[a-z]{2})?\/(track|album|playlist|artist)\/([a-zA-Z0-9]+)(?:\?.*)?/); + if (match) { + return { + type: match[1] as ParsedSpotifyUrl["type"], + id: match[2], + }; + } + return { type: "unknown", id: "" }; +}; \ No newline at end of file diff --git a/spotizerr-ui/src/routes/home.tsx b/spotizerr-ui/src/routes/home.tsx index f969a5e..23064c7 100644 --- a/spotizerr-ui/src/routes/home.tsx +++ b/spotizerr-ui/src/routes/home.tsx @@ -3,9 +3,13 @@ import { useNavigate, useSearch, useRouterState } from "@tanstack/react-router"; import { useDebounce } from "use-debounce"; import { toast } from "sonner"; import type { TrackType, AlbumType, SearchResult } from "@/types/spotify"; +import { parseSpotifyUrl} from "@/lib/spotify-utils"; import { QueueContext } from "@/contexts/queue-context"; import { SearchResultCard } from "@/components/SearchResultCard"; import { indexRoute } from "@/router"; +import { authApiClient } from "@/lib/api-client"; +import { useSettings } from "@/contexts/settings-context"; +import { FaEye } from "react-icons/fa"; // Utility function to safely get properties from search results const safelyGetProperty = (obj: any, path: string[], fallback: T): T => { @@ -30,10 +34,15 @@ export const Home = () => { const { q, type } = useSearch({ from: "/" }); const { items: allResults } = indexRoute.useLoaderData(); const isLoading = useRouterState({ select: (s) => s.status === "pending" }); + const { settings } = useSettings(); const [query, setQuery] = useState(q || ""); const [searchType, setSearchType] = useState<"track" | "album" | "artist" | "playlist">(type || "track"); const [debouncedQuery] = useDebounce(query, 500); + const [activeTab, setActiveTab] = useState<"search" | "bulkAdd">("search"); + const [linksInput, setLinksInput] = useState(""); + const [isBulkAdding, setIsBulkAdding] = useState(false); + const [isBulkWatching, setIsBulkWatching] = useState(false); const [displayedResults, setDisplayedResults] = useState([]); const [isLoadingMore, setIsLoadingMore] = useState(false); @@ -55,6 +64,121 @@ export const Home = () => { } const { addItem } = context; + const handleAddBulkLinks = useCallback(async () => { + const allLinks = linksInput.split("\n").map((link) => link.trim()).filter(Boolean); + if (allLinks.length === 0) { + toast.info("No links provided to add."); + return; + } + + const supportedLinks: string[] = []; + const unsupportedLinks: string[] = []; + + allLinks.forEach((link) => { + const parsed = parseSpotifyUrl(link); + if (parsed.type !== "unknown") { + supportedLinks.push(link); + } else { + unsupportedLinks.push(link); + } + }); + + if (unsupportedLinks.length > 0) { + toast.warning("Some links are not supported and will be skipped.", { + description: `Unsupported: ${unsupportedLinks.join(", ")}`, + }); + } + + if (supportedLinks.length === 0) { + toast.info("No supported links to add."); + return; + } + + setIsBulkAdding(true); + try { + const response = await authApiClient.client.post("/bulk/bulk-add-spotify-links", { links: supportedLinks }); + const { message, count, failed_links } = response.data; + + if (failed_links && failed_links.length > 0) { + toast.warning("Bulk Add Completed with Warnings", { + description: `${count} links added. Failed to add ${failed_links.length} links: ${failed_links.join(", ")}`, + }); + } else { + toast.success("Bulk Add Successful", { + description: `${count} links added to queue.`, + }); + } + setLinksInput(""); // Clear input after successful add + } catch (error: any) { + const errorMessage = error.response?.data?.detail?.message || error.message; + const failedLinks = error.response?.data?.detail?.failed_links || []; + + let description = errorMessage; + if (failedLinks.length > 0) { + description += ` Failed links: ${failedLinks.join(", ")}`; + } + + toast.error("Bulk Add Failed", { + description: description, + }); + if (failedLinks.length > 0) { + console.error("Failed links:", failedLinks); + } + } finally { + setIsBulkAdding(false); + } + }, [linksInput]); + + const handleWatchBulkLinks = useCallback(async () => { + const links = linksInput.split("\n").map((link) => link.trim()).filter(Boolean); + if (links.length === 0) { + toast.info("No links provided to watch."); + return; + } + + const supportedLinks: { type: "artist" | "playlist"; id: string }[] = []; + const unsupportedLinks: string[] = []; + + links.forEach((link) => { + const parsed = parseSpotifyUrl(link); + if (parsed.type === "artist" || parsed.type === "playlist") { + supportedLinks.push({ type: parsed.type, id: parsed.id }); + } else { + unsupportedLinks.push(link); + } + }); + + if (unsupportedLinks.length > 0) { + toast.warning("Some links are not supported for watching.", { + description: `Unsupported: ${unsupportedLinks.join(", ")}`, + }); + } + + if (supportedLinks.length === 0) { + toast.info("No supported links to watch."); + return; + } + + setIsBulkWatching(true); + try { + const watchPromises = supportedLinks.map((item) => + authApiClient.client.put(`/${item.type}/watch/${item.id}`) + ); + await Promise.all(watchPromises); + toast.success("Bulk Watch Successful", { + description: `${supportedLinks.length} supported links added to watchlist.`, + }); + setLinksInput(""); // Clear input after successful add + } catch (error: any) { + const errorMessage = error.response?.data?.detail?.message || error.message; + toast.error("Bulk Watch Failed", { + description: errorMessage, + }); + } finally { + setIsBulkWatching(false); + } + }, [linksInput]); + const loadMore = useCallback(() => { setIsLoadingMore(true); setTimeout(() => { @@ -159,39 +283,109 @@ export const Home = () => {

Spotizerr

-
- setQuery(e.target.value)} - placeholder="Search for a track, album, or artist" - className="flex-1 p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline-none focus:ring-2 focus:ring-input-focus" - /> - -
-
0 ? 'overflow-y-auto md:overflow-visible' : '' - }`}> - {isLoading ? ( -

Loading results...

- ) : ( - <> - {resultComponent} -
- {isLoadingMore &&

Loading more results...

} - - )} + Search + +
+ + {activeTab === "search" && ( + <> +
+
+ setQuery(e.target.value)} + placeholder="Search for a track, album, or artist" + className="flex-1 p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline-none focus:ring-2 focus:ring-input-focus" + /> + +
+
+
0 ? 'overflow-y-auto md:overflow-visible' : '' + }`}> + {isLoading ? ( +

Loading results...

+ ) : ( + <> + {resultComponent} +
+ {isLoadingMore &&

Loading more results...

} + + )} +
+ + )} + + {activeTab === "bulkAdd" && ( +
+ +
+ + + {settings?.watch?.enabled && ( + + )} +
+
+ )}
); }; From 4049bea29e4afed63b61bd094e35eb2fce8d34be Mon Sep 17 00:00:00 2001 From: Phlogi Date: Sat, 23 Aug 2025 21:48:10 +0200 Subject: [PATCH 06/32] match the more flexible regexp allowing more URLs --- routes/content/bulk_add.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/routes/content/bulk_add.py b/routes/content/bulk_add.py index f0480e6..b5471ea 100644 --- a/routes/content/bulk_add.py +++ b/routes/content/bulk_add.py @@ -24,7 +24,7 @@ async def bulk_add_spotify_links(request: BulkAddLinksRequest): # Assuming links are pre-filtered by the frontend, # but still handle potential errors during info retrieval or unsupported types # Extract type and ID from the link directly using regex - match = re.match(r"https://open\.spotify\.com(?:/intl-[a-z]{2})?/(track|album|playlist|artist)/([a-zA-Z0-9]+)", link) + match = re.match(r"https://open\.spotify\.com(?:/intl-[a-z]{2})?/(track|album|playlist|artist)/([a-zA-Z0-9]+)(?:\?.*)?", link) if not match: logger.warning(f"Could not parse Spotify link (unexpected format after frontend filter): {link}") failed_links.append(link) From 965362cddcb1548dc7d13794b63a937a98c79251 Mon Sep 17 00:00:00 2001 From: Phlogi Date: Sat, 23 Aug 2025 21:54:43 +0200 Subject: [PATCH 07/32] add download icon --- spotizerr-ui/src/routes/home.tsx | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/spotizerr-ui/src/routes/home.tsx b/spotizerr-ui/src/routes/home.tsx index 23064c7..1f0cf81 100644 --- a/spotizerr-ui/src/routes/home.tsx +++ b/spotizerr-ui/src/routes/home.tsx @@ -9,7 +9,7 @@ import { SearchResultCard } from "@/components/SearchResultCard"; import { indexRoute } from "@/router"; import { authApiClient } from "@/lib/api-client"; import { useSettings } from "@/contexts/settings-context"; -import { FaEye } from "react-icons/fa"; +import { FaEye, FaDownload } from "react-icons/fa"; // Utility function to safely get properties from search results const safelyGetProperty = (obj: any, path: string[], fallback: T): T => { @@ -365,9 +365,13 @@ export const Home = () => { {settings?.watch?.enabled && ( + ))} +
{ {isLoading ? (

Loading results...

) : ( - <> - {resultComponent} -
- {isLoadingMore &&

Loading more results...

} - + <> + {resultComponent} +
+ {isLoadingMore &&

Loading more results...

} + )}
From 0661865d1624b7041f91eb3f4fa8a628eb7badc9 Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Sat, 23 Aug 2025 23:00:11 -0600 Subject: [PATCH 12/32] fix(ui): Queue and deezspot callbacks --- routes/migrations/runner.py | 363 +--------- routes/migrations/v3_2_0.py | 100 --- routes/migrations/v3_2_1.py | 41 -- routes/migrations/v3_3_0.py | 69 ++ routes/system/progress.py | 212 ++++-- routes/utils/album.py | 8 +- routes/utils/celery_config.py | 2 +- routes/utils/playlist.py | 8 +- routes/utils/track.py | 8 +- routes/utils/watch/manager.py | 8 +- spotizerr-ui/package.json | 2 +- spotizerr-ui/src/components/Queue.tsx | 2 +- .../src/components/config/AccountsTab.tsx | 1 + spotizerr-ui/src/contexts/QueueProvider.tsx | 456 +++++++------ spotizerr-ui/src/contexts/queue-context.ts | 37 +- spotizerr-ui/src/routes/root.tsx | 7 + spotizerr-ui/src/types/callbacks.ts | 11 + tests/migration/__init__.py | 1 - tests/migration/test_v3_0_6.py | 633 ------------------ tests/migration/test_v3_1_0.py | 65 -- tests/migration/test_v3_1_1.py | 135 ---- 21 files changed, 509 insertions(+), 1660 deletions(-) delete mode 100644 routes/migrations/v3_2_0.py delete mode 100644 routes/migrations/v3_2_1.py create mode 100644 routes/migrations/v3_3_0.py delete mode 100644 tests/migration/__init__.py delete mode 100644 tests/migration/test_v3_0_6.py delete mode 100644 tests/migration/test_v3_1_0.py delete mode 100644 tests/migration/test_v3_1_1.py diff --git a/routes/migrations/runner.py b/routes/migrations/runner.py index 4981ac9..6a12991 100644 --- a/routes/migrations/runner.py +++ b/routes/migrations/runner.py @@ -3,16 +3,11 @@ import sqlite3 from pathlib import Path from typing import Optional -from .v3_2_0 import MigrationV3_2_0 -from .v3_2_1 import log_noop_migration_detected +from .v3_3_0 import MigrationV3_3_0 logger = logging.getLogger(__name__) DATA_DIR = Path("./data") -HISTORY_DB = DATA_DIR / "history" / "download_history.db" -WATCH_DIR = DATA_DIR / "watch" -PLAYLISTS_DB = WATCH_DIR / "playlists.db" -ARTISTS_DB = WATCH_DIR / "artists.db" # Credentials CREDS_DIR = DATA_DIR / "creds" @@ -20,89 +15,6 @@ ACCOUNTS_DB = CREDS_DIR / "accounts.db" BLOBS_DIR = CREDS_DIR / "blobs" SEARCH_JSON = CREDS_DIR / "search.json" -# Expected children table columns for history (album_/playlist_) -CHILDREN_EXPECTED_COLUMNS: dict[str, str] = { - "id": "INTEGER PRIMARY KEY AUTOINCREMENT", - "title": "TEXT NOT NULL", - "artists": "TEXT", - "album_title": "TEXT", - "duration_ms": "INTEGER", - "track_number": "INTEGER", - "disc_number": "INTEGER", - "explicit": "BOOLEAN", - "status": "TEXT NOT NULL", - "external_ids": "TEXT", - "genres": "TEXT", - "isrc": "TEXT", - "timestamp": "REAL NOT NULL", - "position": "INTEGER", - "metadata": "TEXT", -} - -# 3.2.0 expected schemas for Watch DBs (kept here to avoid importing modules with side-effects) -EXPECTED_WATCHED_PLAYLISTS_COLUMNS: dict[str, str] = { - "spotify_id": "TEXT PRIMARY KEY", - "name": "TEXT", - "owner_id": "TEXT", - "owner_name": "TEXT", - "total_tracks": "INTEGER", - "link": "TEXT", - "snapshot_id": "TEXT", - "last_checked": "INTEGER", - "added_at": "INTEGER", - "is_active": "INTEGER DEFAULT 1", -} - -EXPECTED_PLAYLIST_TRACKS_COLUMNS: dict[str, str] = { - "spotify_track_id": "TEXT PRIMARY KEY", - "title": "TEXT", - "artist_names": "TEXT", - "album_name": "TEXT", - "album_artist_names": "TEXT", - "track_number": "INTEGER", - "album_spotify_id": "TEXT", - "duration_ms": "INTEGER", - "added_at_playlist": "TEXT", - "added_to_db": "INTEGER", - "is_present_in_spotify": "INTEGER DEFAULT 1", - "last_seen_in_spotify": "INTEGER", - "snapshot_id": "TEXT", - "final_path": "TEXT", -} - -EXPECTED_WATCHED_ARTISTS_COLUMNS: dict[str, str] = { - "spotify_id": "TEXT PRIMARY KEY", - "name": "TEXT", - "link": "TEXT", - "total_albums_on_spotify": "INTEGER", - "last_checked": "INTEGER", - "added_at": "INTEGER", - "is_active": "INTEGER DEFAULT 1", - "genres": "TEXT", - "popularity": "INTEGER", - "image_url": "TEXT", -} - -EXPECTED_ARTIST_ALBUMS_COLUMNS: dict[str, str] = { - "album_spotify_id": "TEXT PRIMARY KEY", - "artist_spotify_id": "TEXT", - "name": "TEXT", - "album_group": "TEXT", - "album_type": "TEXT", - "release_date": "TEXT", - "release_date_precision": "TEXT", - "total_tracks": "INTEGER", - "link": "TEXT", - "image_url": "TEXT", - "added_to_db": "INTEGER", - "last_seen_on_spotify": "INTEGER", - "download_task_id": "TEXT", - "download_status": "INTEGER DEFAULT 0", - "is_fully_downloaded_managed_by_app": "INTEGER DEFAULT 0", -} - -m320 = MigrationV3_2_0() - def _safe_connect(path: Path) -> Optional[sqlite3.Connection]: try: @@ -115,245 +27,6 @@ def _safe_connect(path: Path) -> Optional[sqlite3.Connection]: return None -def _ensure_table_schema( - conn: sqlite3.Connection, - table_name: str, - expected_columns: dict[str, str], - table_description: str, -) -> None: - try: - cur = conn.execute(f"PRAGMA table_info({table_name})") - existing_info = cur.fetchall() - existing_names = {row[1] for row in existing_info} - for col_name, col_type in expected_columns.items(): - if col_name in existing_names: - continue - col_type_for_add = ( - col_type.replace("PRIMARY KEY", "") - .replace("AUTOINCREMENT", "") - .replace("NOT NULL", "") - .strip() - ) - try: - conn.execute( - f"ALTER TABLE {table_name} ADD COLUMN {col_name} {col_type_for_add}" - ) - logger.info( - f"Added missing column '{col_name} {col_type_for_add}' to {table_description} table '{table_name}'." - ) - except sqlite3.OperationalError as e: - logger.warning( - f"Could not add column '{col_name}' to {table_description} table '{table_name}': {e}" - ) - except Exception as e: - logger.error( - f"Error ensuring schema for {table_description} table '{table_name}': {e}", - exc_info=True, - ) - - -def _create_or_update_children_table(conn: sqlite3.Connection, table_name: str) -> None: - conn.execute( - f""" - CREATE TABLE IF NOT EXISTS {table_name} ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - title TEXT NOT NULL, - artists TEXT, - album_title TEXT, - duration_ms INTEGER, - track_number INTEGER, - disc_number INTEGER, - explicit BOOLEAN, - status TEXT NOT NULL, - external_ids TEXT, - genres TEXT, - isrc TEXT, - timestamp REAL NOT NULL, - position INTEGER, - metadata TEXT - ) - """ - ) - _ensure_table_schema( - conn, table_name, CHILDREN_EXPECTED_COLUMNS, "children history" - ) - - -# --- Helper to validate instance is at least 3.1.2 on history DB --- - - -def _history_children_tables(conn: sqlite3.Connection) -> list[str]: - tables: set[str] = set() - try: - cur = conn.execute( - "SELECT name FROM sqlite_master WHERE type='table' AND (name LIKE 'album_%' OR name LIKE 'playlist_%') AND name != 'download_history'" - ) - for row in cur.fetchall(): - if row and row[0]: - tables.add(row[0]) - except sqlite3.Error as e: - logger.warning(f"Failed to scan sqlite_master for children tables: {e}") - - try: - cur = conn.execute( - "SELECT DISTINCT children_table FROM download_history WHERE children_table IS NOT NULL AND TRIM(children_table) != ''" - ) - for row in cur.fetchall(): - t = row[0] - if t: - tables.add(t) - except sqlite3.Error as e: - logger.warning(f"Failed to scan download_history for children tables: {e}") - - return sorted(tables) - - -def _is_history_at_least_3_2_0(conn: sqlite3.Connection) -> bool: - required_cols = {"service", "quality_format", "quality_bitrate"} - tables = _history_children_tables(conn) - if not tables: - # Nothing to migrate implies OK - return True - for t in tables: - try: - cur = conn.execute(f"PRAGMA table_info({t})") - cols = {row[1] for row in cur.fetchall()} - if not required_cols.issubset(cols): - return False - except sqlite3.OperationalError: - return False - return True - - -# --- 3.2.0 verification helpers for Watch DBs --- - - -def _update_watch_playlists_db(conn: sqlite3.Connection) -> None: - try: - # Ensure core watched_playlists table exists and has expected schema - conn.execute( - """ - CREATE TABLE IF NOT EXISTS watched_playlists ( - spotify_id TEXT PRIMARY KEY, - name TEXT, - owner_id TEXT, - owner_name TEXT, - total_tracks INTEGER, - link TEXT, - snapshot_id TEXT, - last_checked INTEGER, - added_at INTEGER, - is_active INTEGER DEFAULT 1 - ) - """ - ) - _ensure_table_schema( - conn, - "watched_playlists", - EXPECTED_WATCHED_PLAYLISTS_COLUMNS, - "watched playlists", - ) - - # Upgrade all dynamic playlist_ tables - cur = conn.execute( - "SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'playlist_%'" - ) - for row in cur.fetchall(): - table_name = row[0] - conn.execute( - f""" - CREATE TABLE IF NOT EXISTS {table_name} ( - spotify_track_id TEXT PRIMARY KEY, - title TEXT, - artist_names TEXT, - album_name TEXT, - album_artist_names TEXT, - track_number INTEGER, - album_spotify_id TEXT, - duration_ms INTEGER, - added_at_playlist TEXT, - added_to_db INTEGER, - is_present_in_spotify INTEGER DEFAULT 1, - last_seen_in_spotify INTEGER, - snapshot_id TEXT, - final_path TEXT - ) - """ - ) - _ensure_table_schema( - conn, - table_name, - EXPECTED_PLAYLIST_TRACKS_COLUMNS, - f"playlist tracks ({table_name})", - ) - except Exception: - logger.error( - "Failed to upgrade watch playlists DB to 3.2.0 base schema", exc_info=True - ) - - -def _update_watch_artists_db(conn: sqlite3.Connection) -> None: - try: - # Ensure core watched_artists table exists and has expected schema - conn.execute( - """ - CREATE TABLE IF NOT EXISTS watched_artists ( - spotify_id TEXT PRIMARY KEY, - name TEXT, - link TEXT, - total_albums_on_spotify INTEGER, - last_checked INTEGER, - added_at INTEGER, - is_active INTEGER DEFAULT 1, - genres TEXT, - popularity INTEGER, - image_url TEXT - ) - """ - ) - _ensure_table_schema( - conn, "watched_artists", EXPECTED_WATCHED_ARTISTS_COLUMNS, "watched artists" - ) - - # Upgrade all dynamic artist_ tables - cur = conn.execute( - "SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'artist_%'" - ) - for row in cur.fetchall(): - table_name = row[0] - conn.execute( - f""" - CREATE TABLE IF NOT EXISTS {table_name} ( - album_spotify_id TEXT PRIMARY KEY, - artist_spotify_id TEXT, - name TEXT, - album_group TEXT, - album_type TEXT, - release_date TEXT, - release_date_precision TEXT, - total_tracks INTEGER, - link TEXT, - image_url TEXT, - added_to_db INTEGER, - last_seen_on_spotify INTEGER, - download_task_id TEXT, - download_status INTEGER DEFAULT 0, - is_fully_downloaded_managed_by_app INTEGER DEFAULT 0 - ) - """ - ) - _ensure_table_schema( - conn, - table_name, - EXPECTED_ARTIST_ALBUMS_COLUMNS, - f"artist albums ({table_name})", - ) - except Exception: - logger.error( - "Failed to upgrade watch artists DB to 3.2.0 base schema", exc_info=True - ) - - def _ensure_creds_filesystem() -> None: try: BLOBS_DIR.mkdir(parents=True, exist_ok=True) @@ -374,35 +47,10 @@ def run_migrations_if_needed(): return try: - # Require instance to be at least 3.2.0 on history DB; otherwise abort - with _safe_connect(HISTORY_DB) as history_conn: - if history_conn and not _is_history_at_least_3_2_0(history_conn): - logger.error( - "Instance is not at schema version 3.2.0. Please upgrade to 3.2.0 before applying 3.3.0." - ) - raise RuntimeError( - "Instance is not at schema version 3.2.0. Please upgrade to 3.2.0 before applying 3.3.0." - ) + # Validate configuration version strictly at 3.3.0 + MigrationV3_3_0.assert_config_version_is_3_3_0() - # Watch playlists DB - with _safe_connect(PLAYLISTS_DB) as conn: - if conn: - _update_watch_playlists_db(conn) - # Apply 3.2.0 additions (batch progress columns) - if not m320.check_watch_playlists(conn): - m320.update_watch_playlists(conn) - conn.commit() - - # Watch artists DB (if exists) - if ARTISTS_DB.exists(): - with _safe_connect(ARTISTS_DB) as conn: - if conn: - _update_watch_artists_db(conn) - if not m320.check_watch_artists(conn): - m320.update_watch_artists(conn) - conn.commit() - - # Accounts DB (no changes for this migration path) + # No schema changes in 3.3.0 path; just ensure Accounts DB can be opened with _safe_connect(ACCOUNTS_DB) as conn: if conn: conn.commit() @@ -412,5 +60,4 @@ def run_migrations_if_needed(): raise else: _ensure_creds_filesystem() - log_noop_migration_detected() - logger.info("Database migrations check completed (3.2.0 -> 3.3.0 path)") + logger.info("Migration validation completed (3.3.0 gate)") diff --git a/routes/migrations/v3_2_0.py b/routes/migrations/v3_2_0.py deleted file mode 100644 index 3849210..0000000 --- a/routes/migrations/v3_2_0.py +++ /dev/null @@ -1,100 +0,0 @@ -import sqlite3 -import logging - -logger = logging.getLogger(__name__) - - -class MigrationV3_2_0: - """ - Migration for version 3.2.0 (upgrade path 3.2.0 -> 3.3.0). - - Adds per-item batch progress columns to Watch DBs to support page-by-interval processing. - - Enforces prerequisite: previous instance version must be 3.1.2 (validated by runner). - """ - - # New columns to add to watched tables - PLAYLISTS_ADDED_COLUMNS: dict[str, str] = { - "batch_next_offset": "INTEGER DEFAULT 0", - "batch_processing_snapshot_id": "TEXT", - } - - ARTISTS_ADDED_COLUMNS: dict[str, str] = { - "batch_next_offset": "INTEGER DEFAULT 0", - } - - # --- No-op for history/accounts in 3.3.0 --- - - def check_history(self, conn: sqlite3.Connection) -> bool: - return True - - def update_history(self, conn: sqlite3.Connection) -> None: - pass - - def check_accounts(self, conn: sqlite3.Connection) -> bool: - return True - - def update_accounts(self, conn: sqlite3.Connection) -> None: - pass - - # --- Watch: playlists --- - - def check_watch_playlists(self, conn: sqlite3.Connection) -> bool: - try: - cur = conn.execute("PRAGMA table_info(watched_playlists)") - cols = {row[1] for row in cur.fetchall()} - return set(self.PLAYLISTS_ADDED_COLUMNS.keys()).issubset(cols) - except sqlite3.OperationalError: - # Table missing means not ready - return False - - def update_watch_playlists(self, conn: sqlite3.Connection) -> None: - # Add new columns if missing - try: - cur = conn.execute("PRAGMA table_info(watched_playlists)") - existing = {row[1] for row in cur.fetchall()} - for col_name, col_type in self.PLAYLISTS_ADDED_COLUMNS.items(): - if col_name in existing: - continue - try: - conn.execute( - f"ALTER TABLE watched_playlists ADD COLUMN {col_name} {col_type}" - ) - logger.info( - f"Added column '{col_name} {col_type}' to watched_playlists for 3.3.0 batch progress." - ) - except sqlite3.OperationalError as e: - logger.warning( - f"Could not add column '{col_name}' to watched_playlists: {e}" - ) - except Exception: - logger.error("Failed to update watched_playlists for 3.3.0", exc_info=True) - - # --- Watch: artists --- - - def check_watch_artists(self, conn: sqlite3.Connection) -> bool: - try: - cur = conn.execute("PRAGMA table_info(watched_artists)") - cols = {row[1] for row in cur.fetchall()} - return set(self.ARTISTS_ADDED_COLUMNS.keys()).issubset(cols) - except sqlite3.OperationalError: - return False - - def update_watch_artists(self, conn: sqlite3.Connection) -> None: - try: - cur = conn.execute("PRAGMA table_info(watched_artists)") - existing = {row[1] for row in cur.fetchall()} - for col_name, col_type in self.ARTISTS_ADDED_COLUMNS.items(): - if col_name in existing: - continue - try: - conn.execute( - f"ALTER TABLE watched_artists ADD COLUMN {col_name} {col_type}" - ) - logger.info( - f"Added column '{col_name} {col_type}' to watched_artists for 3.3.0 batch progress." - ) - except sqlite3.OperationalError as e: - logger.warning( - f"Could not add column '{col_name}' to watched_artists: {e}" - ) - except Exception: - logger.error("Failed to update watched_artists for 3.3.0", exc_info=True) diff --git a/routes/migrations/v3_2_1.py b/routes/migrations/v3_2_1.py deleted file mode 100644 index d8cad20..0000000 --- a/routes/migrations/v3_2_1.py +++ /dev/null @@ -1,41 +0,0 @@ -import logging -import sqlite3 - -logger = logging.getLogger(__name__) - - -class MigrationV3_2_1: - """ - No-op migration for version 3.2.1 (upgrade path 3.2.1 -> 3.3.0). - No database schema changes are required. - """ - - def check_history(self, conn: sqlite3.Connection) -> bool: - return True - - def update_history(self, conn: sqlite3.Connection) -> None: - pass - - def check_accounts(self, conn: sqlite3.Connection) -> bool: - return True - - def update_accounts(self, conn: sqlite3.Connection) -> None: - pass - - def check_watch_playlists(self, conn: sqlite3.Connection) -> bool: - return True - - def update_watch_playlists(self, conn: sqlite3.Connection) -> None: - pass - - def check_watch_artists(self, conn: sqlite3.Connection) -> bool: - return True - - def update_watch_artists(self, conn: sqlite3.Connection) -> None: - pass - - -def log_noop_migration_detected() -> None: - logger.info( - "No migration performed: detected schema for 3.2.1; no changes needed for 3.2.1 -> 3.3.0." - ) diff --git a/routes/migrations/v3_3_0.py b/routes/migrations/v3_3_0.py new file mode 100644 index 0000000..b36a4b0 --- /dev/null +++ b/routes/migrations/v3_3_0.py @@ -0,0 +1,69 @@ +import json +import logging +from pathlib import Path +from typing import Optional + +logger = logging.getLogger(__name__) + + +CONFIG_PATH = Path("./data/config/main.json") +REQUIRED_VERSION = "3.3.0" +TARGET_VERSION = "3.3.1" + + +def _load_config(config_path: Path) -> Optional[dict]: + try: + if not config_path.exists(): + logger.error(f"Configuration file not found at {config_path}") + return None + content = config_path.read_text(encoding="utf-8") + return json.loads(content) + except Exception: + logger.error("Failed to read configuration file for migration", exc_info=True) + return None + + +def _save_config(config_path: Path, cfg: dict) -> None: + config_path.parent.mkdir(parents=True, exist_ok=True) + config_path.write_text(json.dumps(cfg, indent=4) + "\n", encoding="utf-8") + + +class MigrationV3_3_0: + """ + 3.3.0 migration gate. This migration verifies the configuration indicates + version 3.3.0, then bumps it to 3.3.1. + + If the `version` key is missing or not equal to 3.3.0, execution aborts and + prompts the user to update their instance to 3.3.0. + """ + + @staticmethod + def assert_config_version_is_3_3_0() -> None: + cfg = _load_config(CONFIG_PATH) + if not cfg or "version" not in cfg: + raise RuntimeError( + "Missing 'version' in data/config/main.json. Please update your configuration to 3.3.0." + ) + version = str(cfg.get("version", "")).strip() + # Case 1: exactly 3.3.0 -> bump to 3.3.1 + if version == REQUIRED_VERSION: + cfg["version"] = TARGET_VERSION + try: + _save_config(CONFIG_PATH, cfg) + logger.info( + f"Configuration version bumped from {REQUIRED_VERSION} to {TARGET_VERSION}." + ) + except Exception: + logger.error( + "Failed to bump configuration version to 3.3.1", exc_info=True + ) + raise + return + # Case 2: already 3.3.1 -> OK + if version == TARGET_VERSION: + logger.info("Configuration version 3.3.1 detected. Proceeding.") + return + # Case 3: anything else -> abort and instruct to update to 3.3.0 first + raise RuntimeError( + f"Unsupported configuration version '{version}'. Please update to {REQUIRED_VERSION}." + ) diff --git a/routes/system/progress.py b/routes/system/progress.py index a2b1a66..104a617 100755 --- a/routes/system/progress.py +++ b/routes/system/progress.py @@ -4,7 +4,7 @@ import logging import time import json import asyncio -from typing import Set +from typing import Set, Optional import redis import threading @@ -42,12 +42,12 @@ class SSEBroadcaster: """Add a new SSE client""" self.clients.add(queue) logger.debug(f"SSE: Client connected (total: {len(self.clients)})") - + async def remove_client(self, queue: asyncio.Queue): """Remove an SSE client""" self.clients.discard(queue) logger.debug(f"SSE: Client disconnected (total: {len(self.clients)})") - + async def broadcast_event(self, event_data: dict): """Broadcast an event to all connected clients""" logger.debug( @@ -118,26 +118,22 @@ def start_sse_redis_subscriber(): # Handle different event types if event_type == "progress_update": - # Transform callback data into task format expected by frontend - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - try: - broadcast_data = loop.run_until_complete( - transform_callback_to_task_format( - task_id, event_data - ) - ) - if broadcast_data: + # Transform callback data into standardized update format expected by frontend + standardized = standardize_incoming_event(event_data) + if standardized: + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + try: loop.run_until_complete( - sse_broadcaster.broadcast_event(broadcast_data) + sse_broadcaster.broadcast_event(standardized) ) logger.debug( - f"SSE Redis Subscriber: Broadcasted callback to {len(sse_broadcaster.clients)} clients" + f"SSE Redis Subscriber: Broadcasted standardized progress update to {len(sse_broadcaster.clients)} clients" ) - finally: - loop.close() + finally: + loop.close() elif event_type == "summary_update": - # Task summary update - use existing trigger_sse_update logic + # Task summary update - use standardized trigger loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) try: @@ -152,18 +148,20 @@ def start_sse_redis_subscriber(): finally: loop.close() else: - # Unknown event type - broadcast as-is - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - try: - loop.run_until_complete( - sse_broadcaster.broadcast_event(event_data) - ) - logger.debug( - f"SSE Redis Subscriber: Broadcasted {event_type} to {len(sse_broadcaster.clients)} clients" - ) - finally: - loop.close() + # Unknown event type - attempt to standardize and broadcast + standardized = standardize_incoming_event(event_data) + if standardized: + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + try: + loop.run_until_complete( + sse_broadcaster.broadcast_event(standardized) + ) + logger.debug( + f"SSE Redis Subscriber: Broadcasted standardized {event_type} to {len(sse_broadcaster.clients)} clients" + ) + finally: + loop.close() except Exception as e: logger.error( @@ -180,6 +178,85 @@ def start_sse_redis_subscriber(): logger.debug("SSE Redis Subscriber: Background thread started") +def build_task_object_from_callback( + task_id: str, callback_data: dict +) -> Optional[dict]: + """Build a standardized task object from callback payload and task info.""" + try: + task_info = get_task_info(task_id) + if not task_info: + return None + return { + "task_id": task_id, + "original_url": f"http://localhost:7171/api/{task_info.get('download_type', 'track')}/download/{task_info.get('url', '').split('/')[-1] if task_info.get('url') else ''}", + "last_line": callback_data, + "timestamp": time.time(), + "download_type": task_info.get("download_type", "track"), + "type": task_info.get("type", task_info.get("download_type", "track")), + "name": task_info.get("name", "Unknown"), + "artist": task_info.get("artist", ""), + "created_at": task_info.get("created_at"), + } + except Exception as e: + logger.error( + f"Error building task object from callback for {task_id}: {e}", + exc_info=True, + ) + return None + + +def standardize_incoming_event(event_data: dict) -> Optional[dict]: + """ + Convert various incoming event shapes into a standardized SSE payload: + { + 'change_type': 'update' | 'heartbeat', + 'tasks': [...], + 'current_timestamp': float, + 'trigger_reason': str (optional) + } + """ + try: + # Heartbeat passthrough (ensure tasks array exists) + if event_data.get("change_type") == "heartbeat": + return { + "change_type": "heartbeat", + "tasks": [], + "current_timestamp": time.time(), + } + + # If already has tasks, just coerce change_type + if isinstance(event_data.get("tasks"), list): + return { + "change_type": event_data.get("change_type", "update"), + "tasks": event_data["tasks"], + "current_timestamp": time.time(), + "trigger_reason": event_data.get("trigger_reason"), + } + + # If it's a callback-shaped event + callback_data = event_data.get("callback_data") + task_id = event_data.get("task_id") + if callback_data and task_id: + task_obj = build_task_object_from_callback(task_id, callback_data) + if task_obj: + return { + "change_type": "update", + "tasks": [task_obj], + "current_timestamp": time.time(), + "trigger_reason": event_data.get("event_type", "callback_update"), + } + + # Fallback to empty update + return { + "change_type": "update", + "tasks": [], + "current_timestamp": time.time(), + } + except Exception as e: + logger.error(f"Failed to standardize incoming event: {e}", exc_info=True) + return None + + async def transform_callback_to_task_format(task_id: str, event_data: dict) -> dict: """Transform callback event data into the task format expected by frontend""" try: @@ -210,7 +287,7 @@ async def transform_callback_to_task_format(task_id: str, event_data: dict) -> d # Build minimal event data - global counts will be added at broadcast time return { - "change_type": "update", # Use "update" so it gets processed by existing frontend logic + "change_type": "update", "tasks": [task_object], # Frontend expects tasks array "current_timestamp": time.time(), "updated_count": 1, @@ -253,12 +330,12 @@ async def trigger_sse_update(task_id: str, reason: str = "task_update"): task_info, last_status, task_id, current_time, dummy_request ) - # Create minimal event data - global counts will be added at broadcast time + # Create standardized event data - global counts will be added at broadcast time event_data = { "tasks": [task_response], "current_timestamp": current_time, "since_timestamp": current_time, - "change_type": "realtime", + "change_type": "update", "trigger_reason": reason, } @@ -419,6 +496,14 @@ def add_global_task_counts_to_event(event_data): event_data["active_tasks"] = global_task_counts["active"] event_data["all_tasks_count"] = sum(global_task_counts.values()) + # Ensure tasks array is present for schema consistency + if "tasks" not in event_data: + event_data["tasks"] = [] + + # Ensure change_type is present + if "change_type" not in event_data: + event_data["change_type"] = "update" + return event_data except Exception as e: @@ -495,7 +580,11 @@ def _build_task_response( try: item_id = item_url.split("/")[-1] if item_id: - base_url = str(request.base_url).rstrip("/") if request else "http://localhost:7171" + base_url = ( + str(request.base_url).rstrip("/") + if request + else "http://localhost:7171" + ) dynamic_original_url = ( f"{base_url}/api/{download_type}/download/{item_id}" ) @@ -573,7 +662,9 @@ def _build_task_response( return task_response -async def get_paginated_tasks(page=1, limit=20, active_only=False, request: Optional[Request] = None): +async def get_paginated_tasks( + page=1, limit=20, active_only=False, request: Optional[Request] = None +): """ Get paginated list of tasks. """ @@ -1066,47 +1157,18 @@ async def stream_task_updates( try: # Register this client with the broadcaster - logger.debug(f"SSE Stream: New client connecting...") + logger.debug("SSE Stream: New client connecting...") await sse_broadcaster.add_client(client_queue) - logger.debug(f"SSE Stream: Client registered successfully, total clients: {len(sse_broadcaster.clients)}") - - # Send initial data immediately upon connection + logger.debug( + f"SSE Stream: Client registered successfully, total clients: {len(sse_broadcaster.clients)}" + ) + + # Send initial data immediately upon connection (standardized 'update') initial_data = await generate_task_update_event( time.time(), active_only, request ) yield initial_data - # Also send any active tasks as callback-style events to newly connected clients - all_tasks = get_all_tasks() - for task_summary in all_tasks: - task_id = task_summary.get("task_id") - if not task_id: - continue - - task_info = get_task_info(task_id) - if not task_info: - continue - - last_status = get_last_task_status(task_id) - task_status = get_task_status_from_last_status(last_status) - - # Send recent callback data for active or recently completed tasks - if is_task_active(task_status) or ( - last_status and last_status.get("timestamp", 0) > time.time() - 30 - ): - if last_status and "raw_callback" in last_status: - callback_event = { - "task_id": task_id, - "callback_data": last_status["raw_callback"], - "timestamp": last_status.get("timestamp", time.time()), - "change_type": "callback", - "event_type": "progress_update", - "replay": True, # Mark as replay for client - } - event_json = json.dumps(callback_event) - yield f"data: {event_json}\n\n" - logger.debug(f"SSE Stream: Sent replay callback for task {task_id}") - # Send periodic heartbeats and listen for real-time events last_heartbeat = time.time() heartbeat_interval = 30.0 @@ -1173,6 +1235,7 @@ async def stream_task_updates( + task_counts["retrying"], "task_counts": task_counts, "change_type": "heartbeat", + "tasks": [], } event_json = json.dumps(heartbeat_data) @@ -1187,6 +1250,7 @@ async def stream_task_updates( "error": "Internal server error", "timestamp": time.time(), "change_type": "error", + "tasks": [], } ) yield f"data: {error_data}\n\n" @@ -1289,6 +1353,7 @@ async def generate_task_update_event( "current_timestamp": current_time, "updated_count": len(updated_tasks), "since_timestamp": since_timestamp, + "change_type": "update", "initial": True, # Mark as initial load } @@ -1301,7 +1366,12 @@ async def generate_task_update_event( except Exception as e: logger.error(f"Error generating initial SSE event: {e}", exc_info=True) error_data = json.dumps( - {"error": "Failed to load initial data", "timestamp": time.time()} + { + "error": "Failed to load initial data", + "timestamp": time.time(), + "tasks": [], + "change_type": "error", + } ) return f"data: {error_data}\n\n" diff --git a/routes/utils/album.py b/routes/utils/album.py index b6fb6e5..be67078 100755 --- a/routes/utils/album.py +++ b/routes/utils/album.py @@ -101,7 +101,7 @@ def download_album( ) dl.download_albumspo( link_album=url, # Spotify URL - output_dir="/app/downloads", + output_dir="./downloads", quality_download=quality, # Deezer quality recursive_quality=recursive_quality, recursive_download=False, @@ -159,7 +159,7 @@ def download_album( ) spo.download_album( link_album=url, # Spotify URL - output_dir="/app/downloads", + output_dir="./downloads", quality_download=fall_quality, # Spotify quality recursive_quality=recursive_quality, recursive_download=False, @@ -216,7 +216,7 @@ def download_album( ) spo.download_album( link_album=url, - output_dir="/app/downloads", + output_dir="./downloads", quality_download=quality, recursive_quality=recursive_quality, recursive_download=False, @@ -260,7 +260,7 @@ def download_album( ) dl.download_albumdee( # Deezer URL, download via Deezer link_album=url, - output_dir="/app/downloads", + output_dir="./downloads", quality_download=quality, recursive_quality=recursive_quality, recursive_download=False, diff --git a/routes/utils/celery_config.py b/routes/utils/celery_config.py index 7cd852b..83814fd 100644 --- a/routes/utils/celery_config.py +++ b/routes/utils/celery_config.py @@ -28,7 +28,7 @@ CONFIG_FILE_PATH = Path("./data/config/main.json") DEFAULT_MAIN_CONFIG = { "service": "spotify", - "version": "3.3.0", + "version": "3.3.1", "spotify": "", "deezer": "", "fallback": False, diff --git a/routes/utils/playlist.py b/routes/utils/playlist.py index b19bd7c..efdec27 100755 --- a/routes/utils/playlist.py +++ b/routes/utils/playlist.py @@ -98,7 +98,7 @@ def download_playlist( ) dl.download_playlistspo( link_playlist=url, # Spotify URL - output_dir="/app/downloads", + output_dir="./downloads", quality_download=quality, # Deezer quality recursive_quality=recursive_quality, recursive_download=False, @@ -161,7 +161,7 @@ def download_playlist( ) spo.download_playlist( link_playlist=url, # Spotify URL - output_dir="/app/downloads", + output_dir="./downloads", quality_download=fall_quality, # Spotify quality recursive_quality=recursive_quality, recursive_download=False, @@ -224,7 +224,7 @@ def download_playlist( ) spo.download_playlist( link_playlist=url, - output_dir="/app/downloads", + output_dir="./downloads", quality_download=quality, recursive_quality=recursive_quality, recursive_download=False, @@ -268,7 +268,7 @@ def download_playlist( ) dl.download_playlistdee( # Deezer URL, download via Deezer link_playlist=url, - output_dir="/app/downloads", + output_dir="./downloads", quality_download=quality, recursive_quality=recursive_quality, # Usually False for playlists to get individual track qualities recursive_download=False, diff --git a/routes/utils/track.py b/routes/utils/track.py index 7499d31..6259482 100755 --- a/routes/utils/track.py +++ b/routes/utils/track.py @@ -94,7 +94,7 @@ def download_track( # download_trackspo means: Spotify URL, download via Deezer dl.download_trackspo( link_track=url, # Spotify URL - output_dir="/app/downloads", + output_dir="./downloads", quality_download=quality, # Deezer quality recursive_quality=recursive_quality, recursive_download=False, @@ -153,7 +153,7 @@ def download_track( ) spo.download_track( link_track=url, # Spotify URL - output_dir="/app/downloads", + output_dir="./downloads", quality_download=fall_quality, # Spotify quality recursive_quality=recursive_quality, recursive_download=False, @@ -211,7 +211,7 @@ def download_track( ) spo.download_track( link_track=url, - output_dir="/app/downloads", + output_dir="./downloads", quality_download=quality, recursive_quality=recursive_quality, recursive_download=False, @@ -254,7 +254,7 @@ def download_track( ) dl.download_trackdee( # Deezer URL, download via Deezer link_track=url, - output_dir="/app/downloads", + output_dir="./downloads", quality_download=quality, recursive_quality=recursive_quality, recursive_download=False, diff --git a/routes/utils/watch/manager.py b/routes/utils/watch/manager.py index a4e7aa6..4a10e78 100644 --- a/routes/utils/watch/manager.py +++ b/routes/utils/watch/manager.py @@ -1098,7 +1098,7 @@ def update_playlist_m3u_file(playlist_spotify_id: str): # Get configuration settings output_dir = ( - "/app/downloads" # This matches the output_dir used in download functions + "./downloads" # This matches the output_dir used in download functions ) # Get all tracks for the playlist @@ -1125,14 +1125,14 @@ def update_playlist_m3u_file(playlist_spotify_id: str): skipped_missing_final_path = 0 for track in tracks: - # Use final_path from deezspot summary and convert from /app/downloads to ../ relative path + # Use final_path from deezspot summary and convert from ./downloads to ../ relative path final_path = track.get("final_path") if not final_path: skipped_missing_final_path += 1 continue normalized = str(final_path).replace("\\", "/") - if normalized.startswith("/app/downloads/"): - relative_path = normalized.replace("/app/downloads/", "../", 1) + if normalized.startswith("./downloads/"): + relative_path = normalized.replace("./downloads/", "../", 1) elif "/downloads/" in normalized.lower(): idx = normalized.lower().rfind("/downloads/") relative_path = "../" + normalized[idx + len("/downloads/") :] diff --git a/spotizerr-ui/package.json b/spotizerr-ui/package.json index 66fe2e1..8cc0b17 100644 --- a/spotizerr-ui/package.json +++ b/spotizerr-ui/package.json @@ -1,7 +1,7 @@ { "name": "spotizerr-ui", "private": true, - "version": "3.3.0", + "version": "3.3.1", "type": "module", "scripts": { "dev": "vite", diff --git a/spotizerr-ui/src/components/Queue.tsx b/spotizerr-ui/src/components/Queue.tsx index 19cbb1f..b62f7c2 100644 --- a/spotizerr-ui/src/components/Queue.tsx +++ b/spotizerr-ui/src/components/Queue.tsx @@ -772,7 +772,7 @@ export const Queue = () => { const priorities = { "real-time": 1, downloading: 2, processing: 3, initializing: 4, retrying: 5, queued: 6, done: 7, completed: 7, error: 8, cancelled: 9, skipped: 10 - }; + } as Record; return priorities[status as keyof typeof priorities] || 10; }; diff --git a/spotizerr-ui/src/components/config/AccountsTab.tsx b/spotizerr-ui/src/components/config/AccountsTab.tsx index 29f8f94..c8297c7 100644 --- a/spotizerr-ui/src/components/config/AccountsTab.tsx +++ b/spotizerr-ui/src/components/config/AccountsTab.tsx @@ -103,6 +103,7 @@ export function AccountsTab() { }, onError: (error) => { const msg = extractApiErrorMessage(error); + toast.error(msg); }, }); diff --git a/spotizerr-ui/src/contexts/QueueProvider.tsx b/spotizerr-ui/src/contexts/QueueProvider.tsx index 9c793c4..17d8555 100644 --- a/spotizerr-ui/src/contexts/QueueProvider.tsx +++ b/spotizerr-ui/src/contexts/QueueProvider.tsx @@ -10,7 +10,7 @@ import { } from "./queue-context"; import { toast } from "sonner"; import { v4 as uuidv4 } from "uuid"; -import type { CallbackObject } from "@/types/callbacks"; +import type { CallbackObject, SummaryObject, IDs } from "@/types/callbacks"; import { useAuth } from "@/contexts/auth-context"; export function QueueProvider({ children }: { children: ReactNode }) { @@ -43,54 +43,89 @@ export function QueueProvider({ children }: { children: ReactNode }) { return items.filter(item => isActiveStatus(getStatus(item))).length; }, [items]); - // Improved deduplication - check both id and taskId fields - const itemExists = useCallback((taskId: string, items: QueueItem[]): boolean => { - return items.some(item => - item.id === taskId || - item.taskId === taskId || - // Also check spotify ID to prevent same track being added multiple times - (item.spotifyId && item.spotifyId === taskId) - ); + const extractIDs = useCallback((cb?: CallbackObject): IDs | undefined => { + if (!cb) return undefined; + if ((cb as any).track) return (cb as any).track.ids as IDs; + if ((cb as any).album) return (cb as any).album.ids as IDs; + if ((cb as any).playlist) return (cb as any).playlist.ids as IDs; + return undefined; }, []); // Convert SSE task data to QueueItem const createQueueItemFromTask = useCallback((task: any): QueueItem => { - const spotifyId = task.original_url?.split("/").pop() || ""; + const lastCallback = task.last_line as CallbackObject | undefined; + const ids = extractIDs(lastCallback); + + // Determine container type up-front + const downloadType = (task.download_type || task.type || "track") as DownloadType; + + // Compute spotifyId fallback chain + const fallbackFromUrl = task.original_url?.split("/").pop() || ""; + const spotifyId = ids?.spotify || fallbackFromUrl || ""; // Extract display info from callback - let name = task.name || "Unknown"; - let artist = task.artist || ""; + let name: string = task.name || "Unknown"; + let artist: string = task.artist || ""; - // Handle different callback structures - if (task.last_line) { - try { - if ("track" in task.last_line) { - name = task.last_line.track.title || name; - artist = task.last_line.track.artists?.[0]?.name || artist; - } else if ("album" in task.last_line) { - name = task.last_line.album.title || name; - artist = task.last_line.album.artists?.map((a: any) => a.name).join(", ") || artist; - } else if ("playlist" in task.last_line) { - name = task.last_line.playlist.title || name; - artist = task.last_line.playlist.owner?.name || artist; + try { + if (lastCallback) { + if ((lastCallback as any).track) { + // Prefer parent container title if this is an album/playlist operation + const parent = (lastCallback as any).parent; + if (downloadType === "playlist" && parent && (parent as any).title) { + name = (parent as any).title || name; + artist = (parent as any).owner?.name || artist; + } else if (downloadType === "album" && parent && (parent as any).title) { + name = (parent as any).title || name; + const arts = (parent as any).artists || []; + artist = Array.isArray(arts) && arts.length > 0 ? (arts.map((a: any) => a.name).filter(Boolean).join(", ")) : artist; + } else { + // Fallback to the current track's info for standalone track downloads + name = (lastCallback as any).track.title || name; + const arts = (lastCallback as any).track.artists || []; + artist = Array.isArray(arts) && arts.length > 0 ? (arts.map((a: any) => a.name).filter(Boolean).join(", ")) : artist; + } + } else if ((lastCallback as any).album) { + name = (lastCallback as any).album.title || name; + const arts = (lastCallback as any).album.artists || []; + artist = Array.isArray(arts) && arts.length > 0 ? (arts.map((a: any) => a.name).filter(Boolean).join(", ")) : artist; + } else if ((lastCallback as any).playlist) { + name = (lastCallback as any).playlist.title || name; + artist = (lastCallback as any).playlist.owner?.name || artist; + } else if ((lastCallback as any).status === "processing") { + name = (lastCallback as any).name || name; + artist = (lastCallback as any).artist || artist; } - } catch (error) { - console.warn(`createQueueItemFromTask: Error parsing callback for task ${task.task_id}:`, error); } + } catch (error) { + console.warn(`createQueueItemFromTask: Error parsing callback for task ${task.task_id}:`, error); + } + + // Prefer summary from callback status_info if present; fallback to task.summary + let summary: SummaryObject | undefined = undefined; + try { + const statusInfo = (lastCallback as any)?.status_info; + if (statusInfo && typeof statusInfo === "object" && "summary" in statusInfo) { + summary = (statusInfo as any).summary || undefined; + } + } catch {} + if (!summary && task.summary) { + summary = task.summary as SummaryObject; } const queueItem: QueueItem = { id: task.task_id, taskId: task.task_id, - downloadType: task.download_type || task.type || "track", + downloadType, spotifyId, - lastCallback: task.last_line as CallbackObject, + ids, + lastCallback: lastCallback as CallbackObject, name, artist, - summary: task.summary, + summary, error: task.error, }; - + // Debug log for status detection issues const status = getStatus(queueItem); if (status === "unknown" || !status) { @@ -98,7 +133,7 @@ export function QueueProvider({ children }: { children: ReactNode }) { } return queueItem; - }, []); + }, [extractIDs]); // Schedule auto-removal for completed tasks const scheduleRemoval = useCallback((taskId: string, delay: number = 10000) => { @@ -162,7 +197,7 @@ export function QueueProvider({ children }: { children: ReactNode }) { sseConnection.current = eventSource; - eventSource.onopen = () => { + eventSource.onopen = () => { console.log("SSE connected successfully"); reconnectAttempts.current = 0; lastHeartbeat.current = Date.now(); @@ -172,47 +207,47 @@ export function QueueProvider({ children }: { children: ReactNode }) { clearTimeout(reconnectTimeoutRef.current); reconnectTimeoutRef.current = null; } - }; + }; - eventSource.onmessage = (event) => { - try { - const data = JSON.parse(event.data); - - // Debug logging for all SSE events - console.log("🔄 SSE Event Received:", { - timestamp: new Date().toISOString(), - changeType: data.change_type || "update", - totalTasks: data.total_tasks, - taskCounts: data.task_counts, - tasksCount: data.tasks?.length || 0, - taskIds: data.tasks?.map((t: any) => { - const tempItem = createQueueItemFromTask(t); - const status = getStatus(tempItem); - // Special logging for playlist/album track progress - if (t.last_line?.current_track && t.last_line?.total_tracks) { - return { - id: t.task_id, - status, - type: t.download_type, - track: `${t.last_line.current_track}/${t.last_line.total_tracks}`, - trackStatus: t.last_line.status_info?.status - }; - } - return { id: t.task_id, status, type: t.download_type }; - }) || [], - rawData: data - }); - - if (data.error) { + eventSource.onmessage = (event) => { + try { + const data = JSON.parse(event.data); + + // Debug logging for all SSE events + console.log("🔄 SSE Event Received:", { + timestamp: new Date().toISOString(), + changeType: data.change_type || "update", + totalTasks: data.total_tasks, + taskCounts: data.task_counts, + tasksCount: data.tasks?.length || 0, + taskIds: data.tasks?.map((t: any) => { + const tempItem = createQueueItemFromTask(t); + const status = getStatus(tempItem); + // Special logging for playlist/album track progress + if (t.last_line?.current_track && t.last_line?.total_tracks) { + return { + id: t.task_id, + status, + type: t.download_type, + track: `${t.last_line.current_track}/${t.last_line.total_tracks}`, + trackStatus: t.last_line.status_info?.status + }; + } + return { id: t.task_id, status, type: t.download_type }; + }) || [], + rawData: data + }); + + if (data.error) { console.error("SSE error:", data.error); toast.error("Connection error"); - return; - } + return; + } - // Handle different message types from optimized backend + // Handle message types from backend const changeType = data.change_type || "update"; const triggerReason = data.trigger_reason || ""; - + if (changeType === "heartbeat") { // Heartbeat - just update counts, no task processing const { total_tasks, task_counts } = data; @@ -221,7 +256,6 @@ export function QueueProvider({ children }: { children: ReactNode }) { (total_tasks || 0); setTotalTasks(calculatedTotal); lastHeartbeat.current = Date.now(); - // Reduce heartbeat logging noise - only log every 10th heartbeat if (Math.random() < 0.1) { console.log("SSE: Connection active (heartbeat)"); } @@ -249,9 +283,10 @@ export function QueueProvider({ children }: { children: ReactNode }) { setItems(prev => { // Create improved deduplication maps - const existingTaskIds = new Set(); - const existingSpotifyIds = new Set(); - const existingItemsMap = new Map(); + const existingTaskIds = new Set(); + const existingSpotifyIds = new Set(); + const existingDeezerIds = new Set(); + const existingItemsMap = new Map(); prev.forEach(item => { if (item.id) { @@ -263,6 +298,7 @@ export function QueueProvider({ children }: { children: ReactNode }) { existingItemsMap.set(item.taskId, item); } if (item.spotifyId) existingSpotifyIds.add(item.spotifyId); + if (item.ids?.deezer) existingDeezerIds.add(item.ids.deezer); }); // Process each updated task @@ -271,33 +307,37 @@ export function QueueProvider({ children }: { children: ReactNode }) { const newTasksToAdd: QueueItem[] = []; for (const task of updatedTasks) { - const taskId = task.task_id; - const spotifyId = task.original_url?.split("/").pop(); + const taskId = task.task_id as string; // Skip if already processed (shouldn't happen but safety check) if (processedTaskIds.has(taskId)) continue; processedTaskIds.add(taskId); // Check if this task exists in current queue - const existingItem = existingItemsMap.get(taskId) || - Array.from(existingItemsMap.values()).find(item => - item.spotifyId === spotifyId - ); + const existingItem = existingItemsMap.get(taskId); + const newItemCandidate = createQueueItemFromTask(task); + const candidateSpotify = newItemCandidate.spotifyId; + const candidateDeezer = newItemCandidate.ids?.deezer; - if (existingItem) { + // If not found by id, try to match by identifiers + const existingById = existingItem || Array.from(existingItemsMap.values()).find(item => + (candidateSpotify && item.spotifyId === candidateSpotify) || + (candidateDeezer && item.ids?.deezer === candidateDeezer) + ); + + if (existingById) { // Skip SSE updates for items that are already cancelled by user action - const existingStatus = getStatus(existingItem); - if (existingStatus === "cancelled" && existingItem.error === "Cancelled by user") { + const existingStatus = getStatus(existingById); + if (existingStatus === "cancelled" && existingById.error === "Cancelled by user") { console.log(`SSE: Skipping update for user-cancelled task ${taskId}`); continue; } // Update existing item - const updatedItem = createQueueItemFromTask(task); + const updatedItem = newItemCandidate; const status = getStatus(updatedItem); - const previousStatus = getStatus(existingItem); + const previousStatus = getStatus(existingById); - // Only log significant status changes if (previousStatus !== status) { console.log(`SSE: Status change ${taskId}: ${previousStatus} → ${status}`); } @@ -305,33 +345,32 @@ export function QueueProvider({ children }: { children: ReactNode }) { // Schedule removal for terminal states if (isTerminalStatus(status)) { const delay = status === "cancelled" ? 5000 : 10000; - scheduleRemoval(existingItem.id, delay); + scheduleRemoval(existingById.id, delay); console.log(`SSE: Scheduling removal for terminal task ${taskId} (${status}) in ${delay}ms`); } updatedItems.push(updatedItem); } else { // This is a new task from SSE - const newItem = createQueueItemFromTask(task); + const newItem = newItemCandidate; const status = getStatus(newItem); - // Check for duplicates by spotify ID - if (spotifyId && existingSpotifyIds.has(spotifyId)) { - console.log(`SSE: Skipping duplicate by spotify ID: ${spotifyId}`); + // Check for duplicates by identifiers + if ((candidateSpotify && existingSpotifyIds.has(candidateSpotify)) || + (candidateDeezer && existingDeezerIds.has(candidateDeezer))) { + console.log(`SSE: Skipping duplicate by identifier: ${candidateSpotify || candidateDeezer}`); continue; } - // Check if this is a pending download - if (pendingDownloads.current.has(spotifyId || taskId)) { + // Check if this is a pending download (by spotify id for now) + if (pendingDownloads.current.has(candidateSpotify || newItem.id)) { console.log(`SSE: Skipping pending download: ${taskId}`); continue; } - // For terminal tasks from SSE, these should be tasks that just transitioned - // (backend now filters out already-terminal tasks) + // For terminal tasks from SSE if (isTerminalStatus(status)) { console.log(`SSE: Adding recently completed task: ${taskId} (${status})`); - // Schedule immediate removal for terminal tasks const delay = status === "cancelled" ? 5000 : 10000; scheduleRemoval(newItem.id, delay); } else if (isActiveStatus(status)) { @@ -349,7 +388,9 @@ export function QueueProvider({ children }: { children: ReactNode }) { const finalItems = prev.map(item => { const updated = updatedItems.find(u => u.id === item.id || u.taskId === item.id || - u.id === item.taskId || u.taskId === item.taskId + u.id === item.taskId || u.taskId === item.taskId || + (u.spotifyId && u.spotifyId === item.spotifyId) || + (u.ids?.deezer && u.ids.deezer === item.ids?.deezer) ); return updated || item; }); @@ -360,69 +401,69 @@ export function QueueProvider({ children }: { children: ReactNode }) { } else if (changeType === "update") { // Update received but no tasks - might be count updates only console.log("SSE: Received update with count changes only"); - } - } catch (error) { - console.error("Failed to parse SSE message:", error, event.data); } - }; + } catch (error) { + console.error("Failed to parse SSE message:", error, event.data); + } + }; eventSource.onerror = (error) => { - // Use appropriate logging level - first attempt failures are common and expected - if (reconnectAttempts.current === 0) { - console.log("SSE initial connection failed, will retry shortly..."); - } else { - console.warn("SSE connection error:", error); - } - - // Only check for auth errors if auth is enabled - if (authEnabled) { - const token = authApiClient.getToken(); - if (!token) { - console.warn("SSE: Connection error and no auth token - stopping reconnection attempts"); - eventSource.close(); - sseConnection.current = null; - stopHealthCheck(); - return; - } - } - - eventSource.close(); - sseConnection.current = null; - - if (reconnectAttempts.current < maxReconnectAttempts) { - reconnectAttempts.current++; - // Use shorter delays for faster recovery, especially on first attempts - const baseDelay = reconnectAttempts.current === 1 ? 100 : 1000; - const delay = Math.min(baseDelay * Math.pow(2, reconnectAttempts.current - 1), 15000); - - if (reconnectAttempts.current === 1) { - console.log("SSE: Retrying connection shortly..."); - } else { - console.log(`SSE: Reconnecting in ${delay}ms (attempt ${reconnectAttempts.current}/${maxReconnectAttempts})`); - } - - reconnectTimeoutRef.current = window.setTimeout(() => { - if (reconnectAttempts.current === 1) { - console.log("SSE: Attempting reconnection..."); - } else { - console.log("SSE: Attempting to reconnect..."); - } - connectSSE(); - }, delay); - } else { - console.error("SSE: Max reconnection attempts reached"); - toast.error("Connection lost. Please refresh the page."); - } - }; - - } catch (error) { - console.log("Initial SSE connection setup failed, will retry:", error); - // Don't show toast for initial connection failures since they often recover quickly - if (reconnectAttempts.current > 0) { - toast.error("Failed to establish connection"); + // Use appropriate logging level - first attempt failures are common and expected + if (reconnectAttempts.current === 0) { + console.log("SSE initial connection failed, will retry shortly..."); + } else { + console.warn("SSE connection error:", error); } + + // Only check for auth errors if auth is enabled + if (authEnabled) { + const token = authApiClient.getToken(); + if (!token) { + console.warn("SSE: Connection error and no auth token - stopping reconnection attempts"); + eventSource.close(); + sseConnection.current = null; + stopHealthCheck(); + return; + } + } + + eventSource.close(); + sseConnection.current = null; + + if (reconnectAttempts.current < maxReconnectAttempts) { + reconnectAttempts.current++; + // Use shorter delays for faster recovery, especially on first attempts + const baseDelay = reconnectAttempts.current === 1 ? 100 : 1000; + const delay = Math.min(baseDelay * Math.pow(2, reconnectAttempts.current - 1), 15000); + + if (reconnectAttempts.current === 1) { + console.log("SSE: Retrying connection shortly..."); + } else { + console.log(`SSE: Reconnecting in ${delay}ms (attempt ${reconnectAttempts.current}/${maxReconnectAttempts})`); + } + + reconnectTimeoutRef.current = window.setTimeout(() => { + if (reconnectAttempts.current === 1) { + console.log("SSE: Attempting reconnection..."); + } else { + console.log("SSE: Attempting to reconnect..."); + } + connectSSE(); + }, delay); + } else { + console.error("SSE: Max reconnection attempts reached"); + toast.error("Connection lost. Please refresh the page."); + } + }; + + } catch (error) { + console.log("Initial SSE connection setup failed, will retry:", error); + // Don't show toast for initial connection failures since they often recover quickly + if (reconnectAttempts.current > 0) { + toast.error("Failed to establish connection"); } - }, [createQueueItemFromTask, scheduleRemoval, startHealthCheck, authEnabled]); + } + }, [createQueueItemFromTask, startHealthCheck, authEnabled, stopHealthCheck]); const disconnectSSE = useCallback(() => { if (sseConnection.current) { @@ -449,17 +490,19 @@ export function QueueProvider({ children }: { children: ReactNode }) { if (newTasks.length > 0) { setItems(prev => { - const uniqueNewTasks = newTasks - .filter((task: any) => !itemExists(task.task_id, prev)) - .filter((task: any) => { - const tempItem = createQueueItemFromTask(task); - const status = getStatus(tempItem); + const extended = newTasks + .map((task: any) => createQueueItemFromTask(task)) + .filter((qi: QueueItem) => { + const status = getStatus(qi); // Consistent filtering - exclude all terminal state tasks in pagination too - return !isTerminalStatus(status); - }) - .map((task: any) => createQueueItemFromTask(task)); - - return [...prev, ...uniqueNewTasks]; + if (isTerminalStatus(status)) return false; + // Dedupe by task id or identifiers + if (prev.some(p => p.id === qi.id || p.taskId === qi.id)) return false; + if (qi.spotifyId && prev.some(p => p.spotifyId === qi.spotifyId)) return false; + if (qi.ids?.deezer && prev.some(p => p.ids?.deezer === qi.ids?.deezer)) return false; + return true; + }); + return [...prev, ...extended]; }); setCurrentPage(nextPage); } @@ -471,7 +514,7 @@ export function QueueProvider({ children }: { children: ReactNode }) { } finally { setIsLoadingMore(false); } - }, [hasMore, isLoadingMore, currentPage, createQueueItemFromTask, itemExists]); + }, [hasMore, isLoadingMore, currentPage, createQueueItemFromTask]); // Note: SSE connection state is managed through the initialize effect and restartSSE method // The auth context should call restartSSE() when login/logout occurs @@ -496,13 +539,11 @@ export function QueueProvider({ children }: { children: ReactNode }) { const { tasks, pagination, total_tasks, task_counts } = response.data; const queueItems = tasks - .filter((task: any) => { - const tempItem = createQueueItemFromTask(task); - const status = getStatus(tempItem); - // On refresh, exclude all terminal state tasks to start with a clean queue + .map((task: any) => createQueueItemFromTask(task)) + .filter((qi: QueueItem) => { + const status = getStatus(qi); return !isTerminalStatus(status); - }) - .map((task: any) => createQueueItemFromTask(task)); + }); console.log(`Queue initialized: ${queueItems.length} items (filtered out terminal state tasks)`); setItems(queueItems); @@ -542,8 +583,8 @@ export function QueueProvider({ children }: { children: ReactNode }) { return; } - // Check if item already exists in queue - if (itemExists(item.spotifyId, items)) { + // Check if item already exists in queue (by spotify id or identifiers on items) + if (items.some(i => i.spotifyId === item.spotifyId || i.ids?.spotify === item.spotifyId)) { toast.info("Item already in queue"); return; } @@ -551,22 +592,22 @@ export function QueueProvider({ children }: { children: ReactNode }) { const tempId = uuidv4(); pendingDownloads.current.add(item.spotifyId); - const newItem: QueueItem = { + const newItem: QueueItem = { id: tempId, downloadType: item.type, spotifyId: item.spotifyId, name: item.name, artist: item.artist || "", - }; + } as QueueItem; - setItems(prev => [newItem, ...prev]); + setItems(prev => [newItem, ...prev]); - try { + try { const response = await authApiClient.client.get(`/${item.type}/download/${item.spotifyId}`); - const { task_id: taskId } = response.data; + const { task_id: taskId } = response.data; - setItems(prev => - prev.map(i => + setItems(prev => + prev.map(i => i.id === tempId ? { ...i, id: taskId, taskId } : i ) ); @@ -575,15 +616,15 @@ export function QueueProvider({ children }: { children: ReactNode }) { pendingDownloads.current.delete(item.spotifyId); connectSSE(); // Ensure connection is active - } catch (error: any) { + } catch (error: any) { console.error(`Failed to start download:`, error); - toast.error(`Failed to start download for ${item.name}`); + toast.error(`Failed to start download for ${item.name}`); // Remove failed item and clear from pending setItems(prev => prev.filter(i => i.id !== tempId)); pendingDownloads.current.delete(item.spotifyId); } - }, [connectSSE, itemExists, items]); + }, [connectSSE, items]); const removeItem = useCallback((id: string) => { const item = items.find(i => i.id === id); @@ -604,32 +645,18 @@ export function QueueProvider({ children }: { children: ReactNode }) { }, [items]); const cancelItem = useCallback(async (id: string) => { - const item = items.find(i => i.id === id); + const item = items.find(i => i.id === id); if (!item?.taskId) return; - try { - await authApiClient.client.post(`/prgs/cancel/${item.taskId}`); - - setItems(prev => - prev.map(i => - i.id === id ? { - ...i, - error: "Cancelled by user", - lastCallback: { - status: "cancelled", - timestamp: Date.now() / 1000, - type: item.downloadType, - name: item.name, - artist: item.artist - } as unknown as CallbackObject - } : i - ) - ); + try { + await authApiClient.client.post(`/prgs/cancel/${item.taskId}`); - // Remove immediately after showing cancelled state briefly + // Mark as cancelled via error field to preserve type safety + setItems(prev => prev.map(i => i.id === id ? { ...i, error: "Cancelled by user" } : i)); + + // Remove shortly after showing cancelled state setTimeout(() => { setItems(prev => prev.filter(i => i.id !== id)); - // Clean up any existing removal timer if (removalTimers.current[id]) { clearTimeout(removalTimers.current[id]); delete removalTimers.current[id]; @@ -637,11 +664,11 @@ export function QueueProvider({ children }: { children: ReactNode }) { }, 500); toast.info(`Cancelled: ${item.name}`); - } catch (error) { + } catch (error) { console.error("Failed to cancel task:", error); toast.error(`Failed to cancel: ${item.name}`); } - }, [items, scheduleRemoval]); + }, [items]); const cancelAll = useCallback(async () => { const activeItems = items.filter(item => { @@ -657,26 +684,11 @@ export function QueueProvider({ children }: { children: ReactNode }) { try { await authApiClient.client.post("/prgs/cancel/all"); + // Mark each active item as cancelled via error field activeItems.forEach(item => { - setItems(prev => - prev.map(i => - i.id === item.id ? { - ...i, - error: "Cancelled by user", - lastCallback: { - status: "cancelled", - timestamp: Date.now() / 1000, - type: item.downloadType, - name: item.name, - artist: item.artist - } as unknown as CallbackObject - } : i - ) - ); - // Remove immediately after showing cancelled state briefly + setItems(prev => prev.map(i => i.id === item.id ? { ...i, error: "Cancelled by user" } : i)); setTimeout(() => { setItems(prev => prev.filter(i => i.id !== item.id)); - // Clean up any existing removal timer if (removalTimers.current[item.id]) { clearTimeout(removalTimers.current[item.id]); delete removalTimers.current[item.id]; @@ -689,7 +701,7 @@ export function QueueProvider({ children }: { children: ReactNode }) { console.error("Failed to cancel all:", error); toast.error("Failed to cancel downloads"); } - }, [items, scheduleRemoval]); + }, [items]); const clearCompleted = useCallback(() => { setItems(prev => prev.filter(item => { diff --git a/spotizerr-ui/src/contexts/queue-context.ts b/spotizerr-ui/src/contexts/queue-context.ts index 22da227..15f54da 100644 --- a/spotizerr-ui/src/contexts/queue-context.ts +++ b/spotizerr-ui/src/contexts/queue-context.ts @@ -1,23 +1,23 @@ import { createContext, useContext } from "react"; -import type { SummaryObject, CallbackObject, TrackCallbackObject, AlbumCallbackObject, PlaylistCallbackObject, ProcessingCallbackObject } from "@/types/callbacks"; +import type { SummaryObject, CallbackObject, TrackCallbackObject, AlbumCallbackObject, PlaylistCallbackObject, ProcessingCallbackObject, IDs } from "@/types/callbacks"; export type DownloadType = "track" | "album" | "playlist"; // Type guards for callback objects const isProcessingCallback = (obj: CallbackObject): obj is ProcessingCallbackObject => { - return "status" in obj && typeof obj.status === "string"; + return "status" in obj && typeof (obj as ProcessingCallbackObject).status === "string" && (obj as any).name !== undefined; }; const isTrackCallback = (obj: CallbackObject): obj is TrackCallbackObject => { - return "track" in obj && "status_info" in obj; + return (obj as any).track !== undefined && (obj as any).status_info !== undefined; }; const isAlbumCallback = (obj: CallbackObject): obj is AlbumCallbackObject => { - return "album" in obj && "status_info" in obj; + return (obj as any).album !== undefined && (obj as any).status_info !== undefined; }; const isPlaylistCallback = (obj: CallbackObject): obj is PlaylistCallbackObject => { - return "playlist" in obj && "status_info" in obj; + return (obj as any).playlist !== undefined && (obj as any).status_info !== undefined; }; // Simplified queue item that works directly with callback objects @@ -27,6 +27,9 @@ export interface QueueItem { downloadType: DownloadType; spotifyId: string; + // Primary identifiers from callback (spotify/deezer/isrc/upc) + ids?: IDs; + // Current callback data - this is the source of truth lastCallback?: CallbackObject; @@ -43,6 +46,11 @@ export interface QueueItem { // Status extraction utilities export const getStatus = (item: QueueItem): string => { + // If user locally cancelled the task, reflect it without fabricating a callback + if (item.error === "Cancelled by user") { + return "cancelled"; + } + if (!item.lastCallback) { // Only log if this seems problematic (task has been around for a while) return "initializing"; @@ -57,32 +65,30 @@ export const getStatus = (item: QueueItem): string => { if (item.downloadType === "album" || item.downloadType === "playlist") { const currentTrack = item.lastCallback.current_track || 1; const totalTracks = item.lastCallback.total_tracks || 1; - const trackStatus = item.lastCallback.status_info.status; + const trackStatus = item.lastCallback.status_info.status as string; // If this is the last track and it's in a terminal state, the parent is done if (currentTrack >= totalTracks && ["done", "skipped", "error"].includes(trackStatus)) { - console.log(`🎵 Playlist/Album completed: ${item.name} (track ${currentTrack}/${totalTracks}, status: ${trackStatus})`); return "completed"; } // If track is in terminal state but not the last track, parent is still downloading if (["done", "skipped", "error"].includes(trackStatus)) { - console.log(`🎵 Playlist/Album progress: ${item.name} (track ${currentTrack}/${totalTracks}, status: ${trackStatus}) - continuing...`); return "downloading"; } // Track is actively being processed return "downloading"; } - return item.lastCallback.status_info.status; + return item.lastCallback.status_info.status as string; } if (isAlbumCallback(item.lastCallback)) { - return item.lastCallback.status_info.status; + return item.lastCallback.status_info.status as string; } if (isPlaylistCallback(item.lastCallback)) { - return item.lastCallback.status_info.status; + return item.lastCallback.status_info.status as string; } console.warn(`getStatus: Unknown callback type for item ${item.id}:`, item.lastCallback); @@ -104,8 +110,8 @@ export const getProgress = (item: QueueItem): number | undefined => { // For individual tracks if (item.downloadType === "track" && isTrackCallback(item.lastCallback)) { - if (item.lastCallback.status_info.status === "real-time" && "progress" in item.lastCallback.status_info) { - return item.lastCallback.status_info.progress; + if ((item.lastCallback.status_info as any).status === "real-time" && "progress" in (item.lastCallback.status_info as any)) { + return (item.lastCallback.status_info as any).progress as number; } return undefined; } @@ -115,8 +121,9 @@ export const getProgress = (item: QueueItem): number | undefined => { const callback = item.lastCallback; const currentTrack = callback.current_track || 1; const totalTracks = callback.total_tracks || 1; - const trackProgress = (callback.status_info.status === "real-time" && "progress" in callback.status_info) - ? callback.status_info.progress : 0; + const statusInfo: any = callback.status_info; + const trackProgress = (statusInfo.status === "real-time" && "progress" in statusInfo) + ? statusInfo.progress : 0; // Formula: ((completed tracks) + (current track progress / 100)) / total tracks * 100 const completedTracks = currentTrack - 1; diff --git a/spotizerr-ui/src/routes/root.tsx b/spotizerr-ui/src/routes/root.tsx index f39cc2b..ca25246 100644 --- a/spotizerr-ui/src/routes/root.tsx +++ b/spotizerr-ui/src/routes/root.tsx @@ -7,6 +7,7 @@ import { ProtectedRoute } from "@/components/auth/ProtectedRoute"; import { UserMenu } from "@/components/auth/UserMenu"; import { useContext, useState, useEffect } from "react"; import { getTheme, toggleTheme } from "@/lib/theme"; +import { useSettings } from "@/contexts/settings-context"; function ThemeToggle() { const [currentTheme, setCurrentTheme] = useState<'light' | 'dark' | 'system'>('system'); @@ -80,6 +81,8 @@ function ThemeToggle() { function AppLayout() { const { toggleVisibility, totalTasks } = useContext(QueueContext) || {}; + const { settings } = useSettings(); + const watchEnabled = !!settings?.watch?.enabled; return (
@@ -92,9 +95,11 @@ function AppLayout() {
+ {watchEnabled && ( Watchlist + )} History @@ -144,9 +149,11 @@ function AppLayout() { Home + {watchEnabled && ( Watchlist + )} History diff --git a/spotizerr-ui/src/types/callbacks.ts b/spotizerr-ui/src/types/callbacks.ts index 5e88286..98cfc91 100644 --- a/spotizerr-ui/src/types/callbacks.ts +++ b/spotizerr-ui/src/types/callbacks.ts @@ -222,11 +222,22 @@ export interface SummaryObject { total_successful: number; total_skipped: number; total_failed: number; + // Optional metadata present in deezspot summaries (album/playlist and sometimes single-track) + service: "spotify" | "deezer"; + quality: string; // e.g., "ogg", "flac" + bitrate: string; // e.g., "320k" + m3u_path?: string; // playlist convenience output + // Convenience fields that may appear for single-track flows + final_path?: string; + download_quality?: string; // e.g., "OGG_320" } export interface DoneObject extends BaseStatusObject { status: "done"; summary?: SummaryObject; + // Convenience fields often present on done for tracks + final_path?: string; + download_quality?: string; } export type StatusInfo = diff --git a/tests/migration/__init__.py b/tests/migration/__init__.py deleted file mode 100644 index 0519ecb..0000000 --- a/tests/migration/__init__.py +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/tests/migration/test_v3_0_6.py b/tests/migration/test_v3_0_6.py deleted file mode 100644 index 17cdfcb..0000000 --- a/tests/migration/test_v3_0_6.py +++ /dev/null @@ -1,633 +0,0 @@ -import sqlite3 -from pathlib import Path -import pytest -import json - -# Override the autouse credentials fixture from conftest for this module -@pytest.fixture(scope="session", autouse=True) -def setup_credentials_for_tests(): - # No-op to avoid external API calls; this shadows the session autouse fixture in conftest.py - yield - - -def _create_306_history_db(db_path: Path) -> None: - db_path.parent.mkdir(parents=True, exist_ok=True) - with sqlite3.connect(str(db_path)) as conn: - conn.executescript( - """ - CREATE TABLE IF NOT EXISTS download_history ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - download_type TEXT NOT NULL, - title TEXT NOT NULL, - artists TEXT, - timestamp REAL NOT NULL, - status TEXT NOT NULL, - service TEXT, - quality_format TEXT, - quality_bitrate TEXT, - total_tracks INTEGER, - successful_tracks INTEGER, - failed_tracks INTEGER, - skipped_tracks INTEGER, - children_table TEXT, - task_id TEXT, - external_ids TEXT, - metadata TEXT, - release_date TEXT, - genres TEXT, - images TEXT, - owner TEXT, - album_type TEXT, - duration_total_ms INTEGER, - explicit BOOLEAN - ); - CREATE INDEX IF NOT EXISTS idx_download_history_timestamp ON download_history(timestamp); - CREATE INDEX IF NOT EXISTS idx_download_history_type_status ON download_history(download_type, status); - CREATE INDEX IF NOT EXISTS idx_download_history_task_id ON download_history(task_id); - CREATE UNIQUE INDEX IF NOT EXISTS uq_download_history_task_type_ids ON download_history(task_id, download_type, external_ids); - """ - ) - # Insert rows that reference non-existent children tables - conn.execute( - """ - INSERT INTO download_history ( - download_type, title, artists, timestamp, status, service, - quality_format, quality_bitrate, total_tracks, successful_tracks, - failed_tracks, skipped_tracks, children_table, task_id, - external_ids, metadata, release_date, genres, images, owner, - album_type, duration_total_ms, explicit - ) VALUES (?, ?, ?, strftime('%s','now'), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) - """, - ( - "album", - "Test Album", - "[]", - "completed", - "spotify", - "FLAC", - "1411kbps", - 10, - 8, - 1, - 1, - "album_test1", - "task-album-1", - "{}", - "{}", - "{}", - "[]", - "[]", - "{}", - "album", - 123456, - 0, - ), - ) - conn.execute( - """ - INSERT INTO download_history ( - download_type, title, artists, timestamp, status, service, - quality_format, quality_bitrate, total_tracks, successful_tracks, - failed_tracks, skipped_tracks, children_table, task_id, - external_ids, metadata, release_date, genres, images, owner, - album_type, duration_total_ms, explicit - ) VALUES (?, ?, ?, strftime('%s','now'), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) - """, - ( - "playlist", - "Test Playlist", - "[]", - "partial", - "spotify", - "MP3", - "320kbps", - 20, - 15, - 3, - 2, - "playlist_test2", - "task-playlist-1", - "{}", - "{}", - "{}", - "[]", - "[]", - "{}", - "", - 654321, - 0, - ), - ) - # Create a legacy children table with too-few columns to test schema upgrade - conn.execute( - "CREATE TABLE IF NOT EXISTS album_legacy (id INTEGER PRIMARY KEY AUTOINCREMENT, title TEXT NOT NULL)" - ) - # Create a fully-specified children table from docs and add rows - conn.execute( - """ - CREATE TABLE IF NOT EXISTS album_f9e8d7c6b5 ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - title TEXT NOT NULL, - artists TEXT, - album_title TEXT, - duration_ms INTEGER, - track_number INTEGER, - disc_number INTEGER, - explicit BOOLEAN, - status TEXT NOT NULL, - external_ids TEXT, - genres TEXT, - isrc TEXT, - timestamp REAL NOT NULL, - position INTEGER, - metadata TEXT - ) - """ - ) - conn.execute( - """ - INSERT INTO download_history ( - download_type, title, artists, timestamp, status, service, - quality_format, quality_bitrate, total_tracks, successful_tracks, - failed_tracks, skipped_tracks, children_table, task_id, - external_ids, metadata, release_date, genres, images, owner, - album_type, duration_total_ms, explicit - ) VALUES (?, ?, ?, strftime('%s','now'), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) - """, - ( - "album", - "Random Access Memories", - "[\"Daft Punk\"]", - "partial", - "spotify", - "FLAC", - "1411", - 13, - 12, - 1, - 0, - "album_f9e8d7c6b5", - "celery-task-id-789", - "{\"spotify\": \"4m2880jivSbbyEGAKfITCa\"}", - "{\"callback_type\": \"album\"}", - "{\"year\": 2013, \"month\": 5, \"day\": 17}", - "[\"disco\", \"funk\"]", - "[{\"url\": \"https://i.scdn.co/image/...\"}]", - None, - "album", - 4478293, - 0 - ), - ) - conn.executemany( - """ - INSERT INTO album_f9e8d7c6b5 ( - title, artists, album_title, duration_ms, track_number, disc_number, explicit, status, - external_ids, genres, isrc, timestamp, position, metadata - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, strftime('%s','now'), ?, ?) - """, - [ - ( - "Get Lucky (feat. Pharrell Williams & Nile Rodgers)", - "[\"Daft Punk\", \"Pharrell Williams\", \"Nile Rodgers\"]", - "Random Access Memories", - 369626, - 8, - 1, - 0, - "completed", - "{\"spotify\": \"69kOkLUCdZlE8ApD28j1JG\", \"isrc\": \"GBUJH1300019\"}", - "[]", - "GBUJH1300019", - 0, - "{\"album\": {...}, \"type\": \"track\"}", - ), - ( - "Lose Yourself to Dance (feat. Pharrell Williams)", - "[\"Daft Punk\", \"Pharrell Williams\"]", - "Random Access Memories", - 353893, - 6, - 1, - 0, - "failed", - "{\"spotify\": \"5L95vS64r8PAj5M8H1oYkm\", \"isrc\": \"GBUJH1300017\"}", - "[]", - "GBUJH1300017", - 0, - "{\"album\": {...}, \"failure_reason\": \"Could not find matching track on Deezer.\"}", - ), - ] - ) - - -def _create_306_watch_dbs(playlists_db: Path, artists_db: Path) -> None: - playlists_db.parent.mkdir(parents=True, exist_ok=True) - with sqlite3.connect(str(playlists_db)) as pconn: - pconn.executescript( - """ - CREATE TABLE IF NOT EXISTS watched_playlists ( - spotify_id TEXT PRIMARY KEY, - name TEXT, - owner_id TEXT, - owner_name TEXT, - total_tracks INTEGER, - link TEXT, - snapshot_id TEXT, - last_checked INTEGER, - added_at INTEGER, - is_active INTEGER DEFAULT 1 - ); - """ - ) - # Insert a sample watched playlist row (docs example) - pconn.execute( - """ - INSERT OR REPLACE INTO watched_playlists ( - spotify_id, name, owner_id, owner_name, total_tracks, link, snapshot_id, last_checked, added_at, is_active - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) - """, - ( - "37i9dQZF1DXcBWIGoYBM5M", - "Today's Top Hits", - "spotify", - "Spotify", - 50, - "https://open.spotify.com/playlist/37i9dQZF1DXcBWIGoYBM5M", - "MTY3NzE4NjgwMCwwMDAwMDAwMDk1ODVmYjI5ZDY5MGUzN2Q4Y2U4OWY2YmY1ZDE4ZTAy", - 1677187000, - 1677186950, - 1, - ), - ) - # Create a legacy/minimal playlist dynamic table to test schema upgrade - pconn.execute( - "CREATE TABLE IF NOT EXISTS playlist_legacy (spotify_track_id TEXT PRIMARY KEY, title TEXT)" - ) - # Create a fully-specified playlist dynamic table (docs example) and add rows - pconn.execute( - """ - CREATE TABLE IF NOT EXISTS playlist_37i9dQZF1DXcBWIGoYBM5M ( - spotify_track_id TEXT PRIMARY KEY, - title TEXT, - artist_names TEXT, - album_name TEXT, - album_artist_names TEXT, - track_number INTEGER, - album_spotify_id TEXT, - duration_ms INTEGER, - added_at_playlist TEXT, - added_to_db INTEGER, - is_present_in_spotify INTEGER, - last_seen_in_spotify INTEGER, - snapshot_id TEXT, - final_path TEXT - ) - """ - ) - pconn.executemany( - """ - INSERT OR REPLACE INTO playlist_37i9dQZF1DXcBWIGoYBM5M ( - spotify_track_id, title, artist_names, album_name, album_artist_names, track_number, album_spotify_id, - duration_ms, added_at_playlist, added_to_db, is_present_in_spotify, last_seen_in_spotify, snapshot_id, final_path - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) - """, - [ - ( - "4k6Uh1HXdhtusDW5y80vNN", - "As It Was", - "Harry Styles", - "Harry's House", - "Harry Styles", - 4, - "5r36AJ6VOJtp00oxSkNaAO", - 167303, - "2023-02-20T10:00:00Z", - 1677186980, - 1, - 1677187000, - "MTY3NzE4NjgwMCwwMDAwMDAwMDk1ODVmYjI5ZDY5MGUzN2Q4Y2U4OWY2YmY1ZDE4ZTAy", - "/downloads/music/Harry Styles/Harry's House/04 - As It Was.flac", - ), - ( - "5ww2BF9slyYgAno5EAsoOJ", - "Flowers", - "Miley Cyrus", - "Endless Summer Vacation", - "Miley Cyrus", - 1, - "1lw0K2sIKi84gav3e4pG3c", - 194952, - "2023-02-23T12:00:00Z", - 1677186995, - 1, - 1677187000, - "MTY3NzE4NjgwMCwwMDAwMDAwMDk1ODVmYjI5ZDY5MGUzN2Q4Y2U4OWY2YmY1ZDE4ZTAy", - None, - ), - ] - ) - with sqlite3.connect(str(artists_db)) as aconn: - aconn.executescript( - """ - CREATE TABLE IF NOT EXISTS watched_artists ( - spotify_id TEXT PRIMARY KEY, - name TEXT, - link TEXT, - total_albums_on_spotify INTEGER, - last_checked INTEGER, - added_at INTEGER, - is_active INTEGER DEFAULT 1, - genres TEXT, - popularity INTEGER, - image_url TEXT - ); - """ - ) - # Insert a sample watched artist row (docs example) - aconn.execute( - """ - INSERT OR REPLACE INTO watched_artists ( - spotify_id, name, link, total_albums_on_spotify, last_checked, added_at, is_active, genres, popularity, image_url - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) - """, - ( - "4oLeXFyACqeem2VImYeBFe", - "Madeon", - "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe", - 45, - 1677188000, - 1677187900, - 1, - "electro house, filter house, french house", - 65, - "https://i.scdn.co/image/ab6761610000e5eb...", - ), - ) - # Create a legacy/minimal artist dynamic table to test schema upgrade - aconn.execute( - "CREATE TABLE IF NOT EXISTS artist_legacy (album_spotify_id TEXT PRIMARY KEY, name TEXT)" - ) - # Create a fully-specified artist dynamic table (docs example) and add rows - aconn.execute( - """ - CREATE TABLE IF NOT EXISTS artist_4oLeXFyACqeem2VImYeBFe ( - album_spotify_id TEXT PRIMARY KEY, - artist_spotify_id TEXT, - name TEXT, - album_group TEXT, - album_type TEXT, - release_date TEXT, - release_date_precision TEXT, - total_tracks INTEGER, - link TEXT, - image_url TEXT, - added_to_db INTEGER, - last_seen_on_spotify INTEGER, - download_task_id TEXT, - download_status INTEGER, - is_fully_downloaded_managed_by_app INTEGER - ) - """ - ) - aconn.executemany( - """ - INSERT OR REPLACE INTO artist_4oLeXFyACqeem2VImYeBFe ( - album_spotify_id, artist_spotify_id, name, album_group, album_type, release_date, release_date_precision, - total_tracks, link, image_url, added_to_db, last_seen_on_spotify, download_task_id, download_status, is_fully_downloaded_managed_by_app - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) - """, - [ - ( - "2GWMnf2ltOQd2v2T62a2m8", - "4oLeXFyACqeem2VImYeBFe", - "Good Faith", - "album", - "album", - "2019-11-15", - "day", - 10, - "https://open.spotify.com/album/2GWMnf2ltOQd2v2T62a2m8", - "https://i.scdn.co/image/ab67616d0000b273...", - 1677187950, - 1677188000, - "celery-task-id-123", - 2, - 1, - ), - ( - "2smfe2S0AVaxH2I1a5p55n", - "4oLeXFyACqeem2VImYeBFe", - "Gonna Be Good", - "single", - "single", - "2023-01-19", - "day", - 1, - "https://open.spotify.com/album/2smfe2S0AVaxH2I1a5p55n", - "https://i.scdn.co/image/ab67616d0000b273...", - 1677187960, - 1677188000, - "celery-task-id-456", - 1, - 0, - ), - ] - ) - - -def _create_306_accounts(creds_dir: Path, accounts_db: Path) -> None: - creds_dir.mkdir(parents=True, exist_ok=True) - with sqlite3.connect(str(accounts_db)) as conn: - conn.executescript( - """ - CREATE TABLE IF NOT EXISTS spotify ( - name TEXT PRIMARY KEY, - region TEXT, - created_at REAL, - updated_at REAL - ); - CREATE TABLE IF NOT EXISTS deezer ( - name TEXT PRIMARY KEY, - arl TEXT, - region TEXT, - created_at REAL, - updated_at REAL - ); - """ - ) - conn.execute( - "INSERT OR REPLACE INTO spotify (name, region, created_at, updated_at) VALUES (?, ?, ?, ?)", - ("my_main_spotify", "US", 1677190000.0, 1677190000.0), - ) - conn.execute( - "INSERT OR REPLACE INTO deezer (name, arl, region, created_at, updated_at) VALUES (?, ?, ?, ?, ?)", - ("my_hifi_deezer", "a1b2c3d4e5f6a1b2c3d4e5f6...", "FR", 1677190100.0, 1677190100.0), - ) - # Pre-create creds filesystem - search_json = creds_dir / "search.json" - if not search_json.exists(): - search_json.write_text('{"client_id":"your_global_spotify_client_id","client_secret":"your_global_spotify_client_secret"}\n', encoding="utf-8") - blobs_dir = creds_dir / "blobs" / "my_main_spotify" - blobs_dir.mkdir(parents=True, exist_ok=True) - creds_blob = blobs_dir / "credentials.json" - if not creds_blob.exists(): - creds_blob.write_text( - '{"version":"v1","access_token":"...","expires_at":1677193600,"refresh_token":"...","scope":"user-read-private user-read-email playlist-read-private"}\n', - encoding="utf-8", - ) - - -def _get_columns(db_path: Path, table: str) -> set[str]: - with sqlite3.connect(str(db_path)) as conn: - cur = conn.execute(f"PRAGMA table_info({table})") - return {row[1] for row in cur.fetchall()} - - -def _get_count(db_path: Path, table: str) -> int: - with sqlite3.connect(str(db_path)) as conn: - cur = conn.execute(f"SELECT COUNT(*) FROM {table}") - return cur.fetchone()[0] - - -def test_migration_children_tables_created_and_upgraded(tmp_path: Path, monkeypatch: pytest.MonkeyPatch): - # Arrange temp paths - data_dir = tmp_path / "data" - history_db = data_dir / "history" / "download_history.db" - playlists_db = data_dir / "watch" / "playlists.db" - artists_db = data_dir / "watch" / "artists.db" - creds_dir = data_dir / "creds" - accounts_db = creds_dir / "accounts.db" - blobs_dir = creds_dir / "blobs" - search_json = creds_dir / "search.json" - - # Create 3.0.6 base schemas and sample data (full simulation) - _create_306_history_db(history_db) - _create_306_watch_dbs(playlists_db, artists_db) - _create_306_accounts(creds_dir, accounts_db) - - # Point the migration runner to our temp DBs - from routes.migrations import runner - monkeypatch.setattr(runner, "DATA_DIR", data_dir) - monkeypatch.setattr(runner, "HISTORY_DB", history_db) - monkeypatch.setattr(runner, "WATCH_DIR", data_dir / "watch") - monkeypatch.setattr(runner, "PLAYLISTS_DB", playlists_db) - monkeypatch.setattr(runner, "ARTISTS_DB", artists_db) - monkeypatch.setattr(runner, "CREDS_DIR", creds_dir) - monkeypatch.setattr(runner, "ACCOUNTS_DB", accounts_db) - monkeypatch.setattr(runner, "BLOBS_DIR", blobs_dir) - monkeypatch.setattr(runner, "SEARCH_JSON", search_json) - - # Act: run migrations - runner.run_migrations_if_needed() - # Run twice to ensure idempotency - runner.run_migrations_if_needed() - - # Assert: referenced children tables exist with expected columns - expected_children_cols = { - "id", - "title", - "artists", - "album_title", - "duration_ms", - "track_number", - "disc_number", - "explicit", - "status", - "external_ids", - "genres", - "isrc", - "timestamp", - "position", - "metadata", - } - assert _get_columns(history_db, "album_test1").issuperset(expected_children_cols) - assert _get_columns(history_db, "playlist_test2").issuperset(expected_children_cols) - # Legacy table upgraded - assert _get_columns(history_db, "album_legacy").issuperset(expected_children_cols) - # Pre-existing children table preserved and correct - assert _get_columns(history_db, "album_f9e8d7c6b5").issuperset(expected_children_cols) - assert _get_count(history_db, "album_f9e8d7c6b5") == 2 - - # Assert: accounts DB created/preserved with expected tables and columns - assert accounts_db.exists() - spotify_cols = _get_columns(accounts_db, "spotify") - deezer_cols = _get_columns(accounts_db, "deezer") - assert {"name", "region", "created_at", "updated_at"}.issubset(spotify_cols) - assert {"name", "arl", "region", "created_at", "updated_at"}.issubset(deezer_cols) - - # Assert: creds filesystem and pre-existing blob preserved - assert blobs_dir.exists() and blobs_dir.is_dir() - assert search_json.exists() - data = json.loads(search_json.read_text()) - assert set(data.keys()) == {"client_id", "client_secret"} - assert (blobs_dir / "my_main_spotify" / "credentials.json").exists() - - # Assert: watch playlists core and dynamic tables upgraded to/at 3.1.2 schema - watched_playlists_cols = _get_columns(playlists_db, "watched_playlists") - assert { - "spotify_id", - "name", - "owner_id", - "owner_name", - "total_tracks", - "link", - "snapshot_id", - "last_checked", - "added_at", - "is_active", - }.issubset(watched_playlists_cols) - playlist_dynamic_expected = { - "spotify_track_id", - "title", - "artist_names", - "album_name", - "album_artist_names", - "track_number", - "album_spotify_id", - "duration_ms", - "added_at_playlist", - "added_to_db", - "is_present_in_spotify", - "last_seen_in_spotify", - "snapshot_id", - "final_path", - } - assert _get_columns(playlists_db, "playlist_legacy").issuperset(playlist_dynamic_expected) - assert _get_columns(playlists_db, "playlist_37i9dQZF1DXcBWIGoYBM5M").issuperset(playlist_dynamic_expected) - assert _get_count(playlists_db, "playlist_37i9dQZF1DXcBWIGoYBM5M") == 2 - - # Assert: watch artists core and dynamic tables upgraded to/at 3.1.2 schema - watched_artists_cols = _get_columns(artists_db, "watched_artists") - assert { - "spotify_id", - "name", - "link", - "total_albums_on_spotify", - "last_checked", - "added_at", - "is_active", - "genres", - "popularity", - "image_url", - }.issubset(watched_artists_cols) - artist_dynamic_expected = { - "album_spotify_id", - "artist_spotify_id", - "name", - "album_group", - "album_type", - "release_date", - "release_date_precision", - "total_tracks", - "link", - "image_url", - "added_to_db", - "last_seen_on_spotify", - "download_task_id", - "download_status", - "is_fully_downloaded_managed_by_app", - } - assert _get_columns(artists_db, "artist_legacy").issuperset(artist_dynamic_expected) - assert _get_columns(artists_db, "artist_4oLeXFyACqeem2VImYeBFe").issuperset(artist_dynamic_expected) - assert _get_count(artists_db, "artist_4oLeXFyACqeem2VImYeBFe") == 2 \ No newline at end of file diff --git a/tests/migration/test_v3_1_0.py b/tests/migration/test_v3_1_0.py deleted file mode 100644 index 3447fe1..0000000 --- a/tests/migration/test_v3_1_0.py +++ /dev/null @@ -1,65 +0,0 @@ -import sqlite3 -from pathlib import Path -import pytest - -import sqlite3 -from pathlib import Path -import pytest - -from routes.migrations.v3_1_0 import MigrationV3_1_0 - -# Override the autouse credentials fixture from conftest for this module -@pytest.fixture(scope="session", autouse=True) -def setup_credentials_for_tests(): - # No-op to avoid external API calls - yield - - -def _create_310_watch_artists_db(db_path: Path) -> None: - db_path.parent.mkdir(parents=True, exist_ok=True) - with sqlite3.connect(str(db_path)) as conn: - conn.executescript( - """ - CREATE TABLE watched_artists ( - spotify_id TEXT PRIMARY KEY, - name TEXT - ); - CREATE TABLE "artist_a1b2c3" ( - album_spotify_id TEXT PRIMARY KEY, - artist_spotify_id TEXT, - name TEXT, - album_type TEXT, - release_date TEXT, - total_tracks INTEGER, - link TEXT, - image_url TEXT, - added_to_db INTEGER, - last_seen_on_spotify INTEGER - ); - """ - ) - conn.execute("INSERT INTO watched_artists (spotify_id) VALUES (?)", ('a1b2c3',)) - - -def test_watch_artists_migration(tmp_path): - # 1. Setup mock v3.1.0 database - db_path = tmp_path / "artists.db" - _create_310_watch_artists_db(db_path) - - # 2. Run the migration - migration = MigrationV3_1_0() - with sqlite3.connect(db_path) as conn: - # Sanity check before migration - cur = conn.execute('PRAGMA table_info("artist_a1b2c3")') - columns_before = {row[1] for row in cur.fetchall()} - assert 'download_status' not in columns_before - - # Apply migration - migration.update_watch_artists(conn) - - # 3. Assert migration was successful - cur = conn.execute('PRAGMA table_info("artist_a1b2c3")') - columns_after = {row[1] for row in cur.fetchall()} - - expected_columns = migration.ARTIST_ALBUMS_EXPECTED_COLUMNS.keys() - assert set(expected_columns).issubset(columns_after) diff --git a/tests/migration/test_v3_1_1.py b/tests/migration/test_v3_1_1.py deleted file mode 100644 index ac90fde..0000000 --- a/tests/migration/test_v3_1_1.py +++ /dev/null @@ -1,135 +0,0 @@ -import sqlite3 -import unittest -from pathlib import Path -from tempfile import mkdtemp -from shutil import rmtree -import pytest - -from routes.migrations.v3_1_1 import MigrationV3_1_1 - -# Override the autouse credentials fixture from conftest for this module -@pytest.fixture(scope="session", autouse=True) -def setup_credentials_for_tests(): - # No-op to avoid external API calls; this shadows the session autouse fixture in conftest.py - yield - - -class TestMigrationV3_1_1(unittest.TestCase): - """ - Tests the dummy migration from 3.1.1 to 3.1.2, ensuring no changes are made. - """ - - def setUp(self): - self.temp_dir = Path(mkdtemp()) - self.history_db_path = self.temp_dir / "history" / "download_history.db" - self.artists_db_path = self.temp_dir / "watch" / "artists.db" - self.playlists_db_path = self.temp_dir / "watch" / "playlists.db" - self.accounts_db_path = self.temp_dir / "creds" / "accounts.db" - self._create_mock_databases() - - def tearDown(self): - rmtree(self.temp_dir) - - def _get_db_schema(self, db_path: Path) -> dict: - """Helper to get the schema of a database.""" - schema = {} - with sqlite3.connect(db_path) as conn: - cursor = conn.execute("SELECT name FROM sqlite_master WHERE type='table';") - tables = [row[0] for row in cursor.fetchall() if not row[0].startswith("sqlite_")] - for table_name in tables: - info_cursor = conn.execute(f'PRAGMA table_info("{table_name}")') - schema[table_name] = {row[1] for row in info_cursor.fetchall()} - return schema - - def _create_mock_databases(self): - """Creates a set of mock databases with the 3.1.1 schema.""" - # History DB - self.history_db_path.parent.mkdir(parents=True, exist_ok=True) - with sqlite3.connect(self.history_db_path) as conn: - conn.executescript( - """ - CREATE TABLE download_history ( - id INTEGER PRIMARY KEY, download_type TEXT, title TEXT, artists TEXT, - timestamp REAL, status TEXT, service TEXT, quality_format TEXT, - quality_bitrate TEXT, total_tracks INTEGER, successful_tracks INTEGER, - failed_tracks INTEGER, skipped_tracks INTEGER, children_table TEXT, - task_id TEXT, external_ids TEXT, metadata TEXT, release_date TEXT, - genres TEXT, images TEXT, owner TEXT, album_type TEXT, - duration_total_ms INTEGER, explicit BOOLEAN - ); - CREATE TABLE playlist_p1l2a3 ( - id INTEGER PRIMARY KEY, title TEXT, artists TEXT, album_title TEXT, - duration_ms INTEGER, track_number INTEGER, disc_number INTEGER, - explicit BOOLEAN, status TEXT, external_ids TEXT, genres TEXT, - isrc TEXT, timestamp REAL, position INTEGER, metadata TEXT - ); - """ - ) - - # Watch Artists DB - self.artists_db_path.parent.mkdir(parents=True, exist_ok=True) - with sqlite3.connect(self.artists_db_path) as conn: - conn.executescript( - """ - CREATE TABLE watched_artists (id TEXT PRIMARY KEY, children_table TEXT); - INSERT INTO watched_artists (id, children_table) VALUES ('a1b2c3d4', 'artist_a1b2c3d4'); - CREATE TABLE artist_a1b2c3d4 ( - id TEXT PRIMARY KEY, title TEXT, artists TEXT, album_type TEXT, - release_date TEXT, total_tracks INTEGER, external_ids TEXT, - images TEXT, album_group TEXT, release_date_precision TEXT, - download_task_id TEXT, download_status TEXT, - is_fully_downloaded_managed_by_app BOOLEAN - ); - """ - ) - - # Watch Playlists DB - self.playlists_db_path.parent.mkdir(parents=True, exist_ok=True) - with sqlite3.connect(self.playlists_db_path) as conn: - conn.executescript( - """ - CREATE TABLE watched_playlists (id TEXT PRIMARY KEY, children_table TEXT); - CREATE TABLE playlist_p1l2a3 (id TEXT PRIMARY KEY, title TEXT); - """ - ) - - # Accounts DB - self.accounts_db_path.parent.mkdir(parents=True, exist_ok=True) - with sqlite3.connect(self.accounts_db_path) as conn: - conn.execute("CREATE TABLE accounts (id TEXT PRIMARY KEY, service TEXT, details TEXT);") - - def test_migration_leaves_schema_unchanged(self): - """Asserts that the dummy migration makes no changes to any database.""" - # Get initial schemas - initial_schemas = { - "history": self._get_db_schema(self.history_db_path), - "artists": self._get_db_schema(self.artists_db_path), - "playlists": self._get_db_schema(self.playlists_db_path), - "accounts": self._get_db_schema(self.accounts_db_path), - } - - # Run the dummy migration - migration = MigrationV3_1_1() - with sqlite3.connect(self.history_db_path) as conn: - migration.update_history(conn) - with sqlite3.connect(self.artists_db_path) as conn: - migration.update_watch_artists(conn) - with sqlite3.connect(self.playlists_db_path) as conn: - migration.update_watch_playlists(conn) - with sqlite3.connect(self.accounts_db_path) as conn: - migration.update_accounts(conn) - - # Get final schemas - final_schemas = { - "history": self._get_db_schema(self.history_db_path), - "artists": self._get_db_schema(self.artists_db_path), - "playlists": self._get_db_schema(self.playlists_db_path), - "accounts": self._get_db_schema(self.accounts_db_path), - } - - # Assert schemas are identical - self.assertEqual(initial_schemas, final_schemas) - - -if __name__ == '__main__': - unittest.main() From 690e6b0a18b15ce2c39326ef996dd5b324fb6d36 Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Sat, 23 Aug 2025 23:22:11 -0600 Subject: [PATCH 13/32] feat(ui): Add spinner for "Downloading..." element in UI and add save icon in config page --- spotizerr-ui/public/save.svg | 4 ++++ spotizerr-ui/public/spinner.svg | 2 ++ spotizerr-ui/src/components/AlbumCard.tsx | 2 +- .../src/components/SearchResultCard.tsx | 2 +- .../src/components/config/AccountsTab.tsx | 7 +++++- .../src/components/config/DownloadsTab.tsx | 15 ++++++++----- .../src/components/config/FormattingTab.tsx | 11 +++++++--- .../src/components/config/GeneralTab.tsx | 7 +++++- .../src/components/config/ProfileTab.tsx | 13 +++++++---- .../src/components/config/ServerTab.tsx | 22 ++++++++++++++----- .../components/config/UserManagementTab.tsx | 20 +++++++---------- .../src/components/config/WatchTab.tsx | 7 +++++- spotizerr-ui/src/routes/album.tsx | 2 +- spotizerr-ui/src/routes/artist.tsx | 4 ++-- spotizerr-ui/src/routes/playlist.tsx | 6 ++--- spotizerr-ui/src/routes/track.tsx | 6 ++--- 16 files changed, 86 insertions(+), 44 deletions(-) create mode 100644 spotizerr-ui/public/save.svg create mode 100644 spotizerr-ui/public/spinner.svg diff --git a/spotizerr-ui/public/save.svg b/spotizerr-ui/public/save.svg new file mode 100644 index 0000000..583bdb5 --- /dev/null +++ b/spotizerr-ui/public/save.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/spotizerr-ui/public/spinner.svg b/spotizerr-ui/public/spinner.svg new file mode 100644 index 0000000..93f03e9 --- /dev/null +++ b/spotizerr-ui/public/spinner.svg @@ -0,0 +1,2 @@ + + \ No newline at end of file diff --git a/spotizerr-ui/src/components/AlbumCard.tsx b/spotizerr-ui/src/components/AlbumCard.tsx index 00026e9..23221c5 100644 --- a/spotizerr-ui/src/components/AlbumCard.tsx +++ b/spotizerr-ui/src/components/AlbumCard.tsx @@ -54,7 +54,7 @@ export const AlbumCard = ({ album, onDownload }: AlbumCardProps) => { ? "Queued." : status === "error" ? Download - : "Downloading..." + : Loading : Download } diff --git a/spotizerr-ui/src/components/SearchResultCard.tsx b/spotizerr-ui/src/components/SearchResultCard.tsx index 9add427..9b1c2b7 100644 --- a/spotizerr-ui/src/components/SearchResultCard.tsx +++ b/spotizerr-ui/src/components/SearchResultCard.tsx @@ -65,7 +65,7 @@ export const SearchResultCard = ({ id, name, subtitle, imageUrl, type, onDownloa ? "Queued." : status === "error" ? Download - : "Downloading..." + : Loading : Download } diff --git a/spotizerr-ui/src/components/config/AccountsTab.tsx b/spotizerr-ui/src/components/config/AccountsTab.tsx index c8297c7..f0aa3c3 100644 --- a/spotizerr-ui/src/components/config/AccountsTab.tsx +++ b/spotizerr-ui/src/components/config/AccountsTab.tsx @@ -174,8 +174,13 @@ export function AccountsTab() { type="submit" disabled={addMutation.isPending} className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50" + title="Save Account" > - {addMutation.isPending ? "Saving..." : "Save Account"} + {addMutation.isPending ? ( + Saving + ) : ( + Save + )}
@@ -359,7 +364,7 @@ export function DownloadsTab({ config, isLoading }: DownloadsTabProps) { type="number" min="1" {...register("retryDelaySeconds")} - className="block w-full p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline-none focus:ring-2 focus:ring-input-focus" + className="block w_full p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline-none focus:ring-2 focus:ring-input-focus" />
@@ -369,7 +374,7 @@ export function DownloadsTab({ config, isLoading }: DownloadsTabProps) { type="number" min="0" {...register("retryDelayIncrease")} - className="block w-full p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline-none focus:ring-2 focus:ring-input-focus" + className="block w-full p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline_none focus:ring-2 focus:ring-input-focus" />
diff --git a/spotizerr-ui/src/components/config/FormattingTab.tsx b/spotizerr-ui/src/components/config/FormattingTab.tsx index f1f0a8f..a86bc14 100644 --- a/spotizerr-ui/src/components/config/FormattingTab.tsx +++ b/spotizerr-ui/src/components/config/FormattingTab.tsx @@ -88,8 +88,8 @@ export function FormattingTab({ config, isLoading }: FormattingTabProps) { queryClient.invalidateQueries({ queryKey: ["config"] }); }, onError: (error) => { - console.error("Failed to save formatting settings:", error.message); - toast.error(`Failed to save settings: ${error.message}`); + console.error("Failed to save formatting settings:", (error as any).message); + toast.error(`Failed to save settings: ${(error as any).message}`); }, }); @@ -131,8 +131,13 @@ export function FormattingTab({ config, isLoading }: FormattingTabProps) { type="submit" disabled={mutation.isPending} className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50" + title="Save Formatting Settings" > - {mutation.isPending ? "Saving..." : "Save Formatting Settings"} + {mutation.isPending ? ( + Saving + ) : ( + Save + )} diff --git a/spotizerr-ui/src/components/config/GeneralTab.tsx b/spotizerr-ui/src/components/config/GeneralTab.tsx index b3cc430..220f316 100644 --- a/spotizerr-ui/src/components/config/GeneralTab.tsx +++ b/spotizerr-ui/src/components/config/GeneralTab.tsx @@ -83,8 +83,13 @@ export function GeneralTab({ config, isLoading: isConfigLoading }: GeneralTabPro type="submit" disabled={mutation.isPending} className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50" + title="Save General Settings" > - {mutation.isPending ? "Saving..." : "Save General Settings"} + {mutation.isPending ? ( + Saving + ) : ( + Save + )} diff --git a/spotizerr-ui/src/components/config/ProfileTab.tsx b/spotizerr-ui/src/components/config/ProfileTab.tsx index 614ee35..a1cabfa 100644 --- a/spotizerr-ui/src/components/config/ProfileTab.tsx +++ b/spotizerr-ui/src/components/config/ProfileTab.tsx @@ -126,7 +126,7 @@ export function ProfileTab() {

-
-
@@ -119,7 +124,7 @@ function WebhookForm() { queryClient.invalidateQueries({ queryKey: ["webhookConfig"] }); }, onError: (e) => { - toast.error(`Failed to save: ${e.message}`); + toast.error(`Failed to save: ${(e as any).message}`); }, }); @@ -128,7 +133,7 @@ function WebhookForm() { onSuccess: () => { // No toast needed }, - onError: (e) => toast.error(`Webhook test failed: ${e.message}`), + onError: (e) => toast.error(`Webhook test failed: ${(e as any).message}`), }); useEffect(() => { @@ -147,8 +152,13 @@ function WebhookForm() { type="submit" disabled={mutation.isPending} className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50" + title="Save Webhook" > - {mutation.isPending ? "Saving..." : "Save Webhook"} + {mutation.isPending ? ( + Saving + ) : ( + Save + )} diff --git a/spotizerr-ui/src/components/config/UserManagementTab.tsx b/spotizerr-ui/src/components/config/UserManagementTab.tsx index 6b4cc8c..0b11795 100644 --- a/spotizerr-ui/src/components/config/UserManagementTab.tsx +++ b/spotizerr-ui/src/components/config/UserManagementTab.tsx @@ -252,7 +252,7 @@ export function UserManagementTab() { errors.email ? "border-error focus:border-error" : "border-input-border dark:border-input-border-dark focus:border-primary" - } bg-input-background dark:bg-input-background-dark text-content-primary dark:text-content-primary-dark focus:outline-none focus:ring-2 focus:ring-primary/20`} + } bg-input-background dark:bg-input-background-dark text-content-primary dark:text-content-primary-dark focus:outline_none focus:ring-2 focus:ring-primary/20`} placeholder="Enter email (optional)" disabled={isCreating} /> @@ -302,15 +302,13 @@ export function UserManagementTab() { @@ -474,14 +472,12 @@ export function UserManagementTab() { type="submit" disabled={isResettingPassword} className="px-4 py-2 bg-primary hover:bg-primary-hover text-white rounded-lg font-medium transition-colors disabled:opacity-50 disabled:cursor-not-allowed flex items-center gap-2" + title="Save Password" > {isResettingPassword ? ( - <> -
- Resetting... - + Saving ) : ( - "Reset Password" + Save )}
diff --git a/spotizerr-ui/src/components/config/WatchTab.tsx b/spotizerr-ui/src/components/config/WatchTab.tsx index 2722293..9b83ab4 100644 --- a/spotizerr-ui/src/components/config/WatchTab.tsx +++ b/spotizerr-ui/src/components/config/WatchTab.tsx @@ -181,8 +181,13 @@ export function WatchTab() { type="submit" disabled={mutation.isPending || !!validationError} className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50" + title="Save Watch Settings" > - {mutation.isPending ? "Saving..." : "Save Watch Settings"} + {mutation.isPending ? ( + Saving + ) : ( + Save + )} diff --git a/spotizerr-ui/src/routes/album.tsx b/spotizerr-ui/src/routes/album.tsx index 992e844..4cc1f6e 100644 --- a/spotizerr-ui/src/routes/album.tsx +++ b/spotizerr-ui/src/routes/album.tsx @@ -205,7 +205,7 @@ export const Album = () => { ? "Queued." : albumStatus === "error" ? "Download Album" - : "Downloading..." + : Loading : "Download Album"} diff --git a/spotizerr-ui/src/routes/artist.tsx b/spotizerr-ui/src/routes/artist.tsx index ac75c34..89c9f2a 100644 --- a/spotizerr-ui/src/routes/artist.tsx +++ b/spotizerr-ui/src/routes/artist.tsx @@ -300,7 +300,7 @@ export const Artist = () => { ? artistStatus === "queued" ? "Queued." : artistStatus === "downloading" - ? "Downloading..." + ? Loading : <> Download All @@ -361,7 +361,7 @@ export const Artist = () => { ? "Queued." : trackStatuses[track.id] === "error" ? "Download" - : "Downloading..." + : Loading : "Download"} diff --git a/spotizerr-ui/src/routes/playlist.tsx b/spotizerr-ui/src/routes/playlist.tsx index 93096b5..7fdfe7b 100644 --- a/spotizerr-ui/src/routes/playlist.tsx +++ b/spotizerr-ui/src/routes/playlist.tsx @@ -239,7 +239,7 @@ export const Playlist = () => { ? "Queued." : playlistStatus === "error" ? "Download All" - : "Downloading..." + : Loading : "Download All"} {settings?.watch?.enabled && ( @@ -264,7 +264,7 @@ export const Playlist = () => { {/* Tracks Section */}
-
+

Tracks

{tracks.length > 0 && ( @@ -335,7 +335,7 @@ export const Playlist = () => { ? "Queued." : trackStatuses[track.id] === "error" ? Download - : "Downloading..." + : Loading : Download } diff --git a/spotizerr-ui/src/routes/track.tsx b/spotizerr-ui/src/routes/track.tsx index 387f7e4..9ee47e1 100644 --- a/spotizerr-ui/src/routes/track.tsx +++ b/spotizerr-ui/src/routes/track.tsx @@ -174,7 +174,7 @@ export const Track = () => { style={{ width: `${track.popularity}%` }} >
- + {track.popularity}%
@@ -193,14 +193,14 @@ export const Track = () => { ? "Queued." : trackStatus === "error" ? "Download" - : "Downloading..." + : Loading : "Download"} From fd9cf52170b957fe72d1ec0ec37320ca5e78cbf1 Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Mon, 25 Aug 2025 07:46:51 -0600 Subject: [PATCH 14/32] fix: consider spotify's upc padding when spo->dee --- docs/user/configuration.md | 45 ++++++++++++++++++++++++++++---------- requirements.txt | 2 +- 2 files changed, 34 insertions(+), 13 deletions(-) diff --git a/docs/user/configuration.md b/docs/user/configuration.md index 4d71ed5..799a121 100644 --- a/docs/user/configuration.md +++ b/docs/user/configuration.md @@ -4,18 +4,39 @@ See also: [Environment variables](environment.md) Open Configuration in the web UI. Tabs: -- General (admin) - - App version, basic info -- Downloads (admin) - - Concurrent downloads, retry behavior - - Quality/format defaults and conversion - - Real-time mode: aligns download time with track length -- Formatting (admin) - - File/folder naming patterns (examples) - - `%artist%/%album%/%tracknum%. %title%` - - `%ar_album%/%album% (%year%)/%title%` -- Accounts (admin) - - Spotify: use `spotizerr-auth` to add credentials +# General + - **Default service:** Right now, the only one available is Spotify. Deezer-only mode coming soon! + - **Active accounts:** Accounts to use for API-related things with the respective service. + +# Downloads + - **Max Concurrent Downloads:** Sets the maximum number of download tasks that can run simultaneously. + - **Real-Time Downloading:** Matches the download duration to the actual track length, helping to avoid rate limits. + - **Real-Time Multiplier:** When real-time downloading is enabled, this multiplier adjusts how much faster (or slower) the download occurs compared to the track length. + - **Download Fallback:** Download from Deezer with a fallback to Spotify. + - **Recursive Quality:** When download fallback is enabled, try with lower qualities if the specified Deezer quality is not available. + - **Separate Tracks by User:** When multi-user mode is enabled, separate every download in individual users' folders. + - **Spotify/Deezer Quality:** Quality to request to the service being used to download (account tier limitations apply). + - **Convert to Format:** Format to convert every file downloading. + - **Bitrate:** When convertion is enabled and a lossy format is enabled, this sets the bitrate with which perform the transcoding. + - **Max Retry Attempts:** Maximum number of automatic retries to perform + - **Initial Retry Delay:** Seconds between the first failure and the first retry. + - **Retry Delay Increase:** Seconds to increase to the delay beyween retries after each failure. + + +# Formatting +- **Custom Directory Format:** Choose which metadata fields determine how directories are named. +- **Custom Track Format:** Choose which metadata fields determine how individual track files are named. +- **Track Number Padding:** Enable or disable leading zeros for number-based metadata (e.g., `%tracknum%`, `%playlistnum%`). +- **Track Number Padding Width:** Sets how many digits to use for padded numbers. For example: + + * `01. Track` (width: 2) + * `001. Track` (width: 3) +- **Artist Separator:** When a track has multiple artists (or album artists), this string will be used to separate them in both metadata and file/directory naming. +- **Save Album Cover:** Whether to save the cover as a separate `cover.jpg` file or not. +- **Use Spotify Metadata in Deezer Fallback:** Whether to use Spotify metadata when downloading from Deezer or not. It generally is better to leave this enabled, since it has no added API cost and Spotify's metadata tends to be better. + +# Accounts (admin) + - **Spotify:** use `spotizerr-auth` to add credentials. - Deezer ARL (optional): - Chrome/Edge: DevTools → Application → Cookies → https://www.deezer.com → copy `arl` - Firefox: DevTools → Storage → Cookies → https://www.deezer.com → copy `arl` diff --git a/requirements.txt b/requirements.txt index 2ea3f67..7f0f7b6 100755 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ fastapi==0.116.1 uvicorn[standard]==0.35.0 celery==5.5.3 -deezspot-spotizerr==2.7.6 +deezspot-spotizerr==2.7.7 httpx==0.28.1 bcrypt==4.2.1 PyJWT==2.10.1 From dc4a4f506f667c32d7009e0fe21add73b2b5d39d Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Mon, 25 Aug 2025 07:50:00 -0600 Subject: [PATCH 15/32] chore(docs): update configuration docs --- docs/user/configuration.md | 37 +++++++++++++++++++------------------ 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/docs/user/configuration.md b/docs/user/configuration.md index 799a121..3e24681 100644 --- a/docs/user/configuration.md +++ b/docs/user/configuration.md @@ -35,31 +35,32 @@ Open Configuration in the web UI. Tabs: - **Save Album Cover:** Whether to save the cover as a separate `cover.jpg` file or not. - **Use Spotify Metadata in Deezer Fallback:** Whether to use Spotify metadata when downloading from Deezer or not. It generally is better to leave this enabled, since it has no added API cost and Spotify's metadata tends to be better. -# Accounts (admin) +# Accounts - **Spotify:** use `spotizerr-auth` to add credentials. - - Deezer ARL (optional): - - Chrome/Edge: DevTools → Application → Cookies → https://www.deezer.com → copy `arl` - - Firefox: DevTools → Storage → Cookies → https://www.deezer.com → copy `arl` - - Paste ARL in Accounts - - Select main account when multiple exist -- Watch (admin) - - Enable/disable watch system - - Set check intervals - - Manually trigger checks (artists/playlists) -- Server (admin) + - **Deezer ARL (optional but recommended):** + - Chrome/Edge: DevTools → Application → Cookies → https://www.deezer.com → copy `arl`. + - Firefox: DevTools → Storage → Cookies → https://www.deezer.com → copy `arl`. + - Paste ARL in Accounts. + - Select main account when multiple exist. + +# Watch + - Enable/disable watch system. + - Set check intervals. + - Set check chunk size. + - Set album groups to consider for watched artists. + + +# Server - System info and advanced settings -- Profile (all users when auth is enabled) + +# Profile - Change password, view role and email -Quality formats (reference): +# Quality formats (reference) - Spotify: OGG 96k/160k/320k (320k requires Premium) - Deezer: MP3 128k/320k (320k may require Premium), FLAC (Premium) - Conversion: MP3/FLAC/AAC/OGG/OPUS/WAV/ALAC with custom bitrate -Fallback system: -- Configure primary and fallback services -- Automatically switches if primary fails (useful for geo/account limits) - -Notes: +# Notes - Explicit content filter applies in pages (e.g., hides explicit tracks on album/playlist views) - Watch system must be enabled before adding items \ No newline at end of file From c54a441228bdd36c80a483ceaa1b29da97fd4287 Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Mon, 25 Aug 2025 08:03:59 -0600 Subject: [PATCH 16/32] feat: implement generic oauth provider --- docs/user/environment.md | 18 ++ routes/auth/auth.py | 172 +++++++-------- routes/auth/sso.py | 464 +++++++++++++++++++++++++++++++++------ 3 files changed, 489 insertions(+), 165 deletions(-) diff --git a/docs/user/environment.md b/docs/user/environment.md index 7d0df4c..1d913f1 100644 --- a/docs/user/environment.md +++ b/docs/user/environment.md @@ -30,6 +30,24 @@ Location: project `.env`. Minimal reference for server admins. - FRONTEND_URL: Public UI base (e.g. `http://127.0.0.1:7171`) - GOOGLE_CLIENT_ID / GOOGLE_CLIENT_SECRET - GITHUB_CLIENT_ID / GITHUB_CLIENT_SECRET +- Custom/Generic OAuth (set all to enable a custom provider): + - CUSTOM_SSO_CLIENT_ID / CUSTOM_SSO_CLIENT_SECRET + - CUSTOM_SSO_AUTHORIZATION_ENDPOINT + - CUSTOM_SSO_TOKEN_ENDPOINT + - CUSTOM_SSO_USERINFO_ENDPOINT + - CUSTOM_SSO_SCOPE: Comma-separated scopes (optional) + - CUSTOM_SSO_NAME: Internal provider name (optional, default `custom`) + - CUSTOM_SSO_DISPLAY_NAME: UI name (optional, default `Custom`) +- Multiple Custom/Generic OAuth providers (up to 10): + - For provider index `i` (1..10), set: + - CUSTOM_SSO_CLIENT_ID_i / CUSTOM_SSO_CLIENT_SECRET_i + - CUSTOM_SSO_AUTHORIZATION_ENDPOINT_i + - CUSTOM_SSO_TOKEN_ENDPOINT_i + - CUSTOM_SSO_USERINFO_ENDPOINT_i + - CUSTOM_SSO_SCOPE_i (optional) + - CUSTOM_SSO_NAME_i (optional, default `custom{i}`) + - CUSTOM_SSO_DISPLAY_NAME_i (optional, default `Custom {i}`) + - Login URLs will be `/api/auth/sso/login/custom/i` and callback `/api/auth/sso/callback/custom/i`. ### Tips - If running behind a reverse proxy, set `FRONTEND_URL` and `SSO_BASE_REDIRECT_URI` to public URLs. diff --git a/routes/auth/auth.py b/routes/auth/auth.py index fa41290..c29fb77 100644 --- a/routes/auth/auth.py +++ b/routes/auth/auth.py @@ -1,4 +1,4 @@ -from fastapi import APIRouter, HTTPException, Depends, Request +from fastapi import APIRouter, HTTPException, Depends from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials from pydantic import BaseModel from typing import Optional, List @@ -14,6 +14,7 @@ security = HTTPBearer(auto_error=False) # Include SSO sub-router try: from .sso import router as sso_router + router.include_router(sso_router, tags=["sso"]) logging.info("SSO sub-router included in auth router") except ImportError as e: @@ -34,6 +35,7 @@ class RegisterRequest(BaseModel): class CreateUserRequest(BaseModel): """Admin-only request to create users when registration is disabled""" + username: str password: str email: Optional[str] = None @@ -42,17 +44,20 @@ class CreateUserRequest(BaseModel): class RoleUpdateRequest(BaseModel): """Request to update user role""" + role: str class PasswordChangeRequest(BaseModel): """Request to change user password""" + current_password: str new_password: str class AdminPasswordResetRequest(BaseModel): """Request for admin to reset user password""" + new_password: str @@ -87,20 +92,20 @@ class AuthStatusResponse(BaseModel): # Dependency to get current user async def get_current_user( - credentials: HTTPAuthorizationCredentials = Depends(security) + credentials: HTTPAuthorizationCredentials = Depends(security), ) -> Optional[User]: """Get current user from JWT token""" if not AUTH_ENABLED: # When auth is disabled, return a mock admin user return User(username="system", role="admin") - + if not credentials: return None - + payload = token_manager.verify_token(credentials.credentials) if not payload: return None - + user = user_manager.get_user(payload["username"]) return user @@ -109,25 +114,22 @@ async def require_auth(current_user: User = Depends(get_current_user)) -> User: """Require authentication - raises HTTPException if not authenticated""" if not AUTH_ENABLED: return User(username="system", role="admin") - + if not current_user: raise HTTPException( status_code=401, detail="Authentication required", headers={"WWW-Authenticate": "Bearer"}, ) - + return current_user async def require_admin(current_user: User = Depends(require_auth)) -> User: """Require admin role - raises HTTPException if not admin""" if current_user.role != "admin": - raise HTTPException( - status_code=403, - detail="Admin access required" - ) - + raise HTTPException(status_code=403, detail="Admin access required") + return current_user @@ -138,24 +140,33 @@ async def auth_status(current_user: Optional[User] = Depends(get_current_user)): # Check if SSO is enabled and get available providers sso_enabled = False sso_providers = [] - + try: from . import sso + sso_enabled = sso.SSO_ENABLED and AUTH_ENABLED if sso.google_sso: sso_providers.append("google") if sso.github_sso: sso_providers.append("github") + if getattr(sso, "custom_sso", None): + sso_providers.append("custom") + if getattr(sso, "custom_sso_providers", None): + if ( + len(getattr(sso, "custom_sso_providers", {})) > 0 + and "custom" not in sso_providers + ): + sso_providers.append("custom") except ImportError: pass # SSO module not available - + return AuthStatusResponse( auth_enabled=AUTH_ENABLED, authenticated=current_user is not None, user=UserResponse(**current_user.to_public_dict()) if current_user else None, registration_enabled=AUTH_ENABLED and not DISABLE_REGISTRATION, sso_enabled=sso_enabled, - sso_providers=sso_providers + sso_providers=sso_providers, ) @@ -163,23 +174,16 @@ async def auth_status(current_user: Optional[User] = Depends(get_current_user)): async def login(request: LoginRequest): """Authenticate user and return access token""" if not AUTH_ENABLED: - raise HTTPException( - status_code=400, - detail="Authentication is disabled" - ) - + raise HTTPException(status_code=400, detail="Authentication is disabled") + user = user_manager.authenticate_user(request.username, request.password) if not user: - raise HTTPException( - status_code=401, - detail="Invalid username or password" - ) - + raise HTTPException(status_code=401, detail="Invalid username or password") + access_token = token_manager.create_token(user) - + return LoginResponse( - access_token=access_token, - user=UserResponse(**user.to_public_dict()) + access_token=access_token, user=UserResponse(**user.to_public_dict()) ) @@ -187,31 +191,28 @@ async def login(request: LoginRequest): async def register(request: RegisterRequest): """Register a new user""" if not AUTH_ENABLED: - raise HTTPException( - status_code=400, - detail="Authentication is disabled" - ) - + raise HTTPException(status_code=400, detail="Authentication is disabled") + if DISABLE_REGISTRATION: raise HTTPException( status_code=403, - detail="Public registration is disabled. Contact an administrator to create an account." + detail="Public registration is disabled. Contact an administrator to create an account.", ) - + # Check if this is the first user (should be admin) existing_users = user_manager.list_users() role = "admin" if len(existing_users) == 0 else "user" - + success, message = user_manager.create_user( username=request.username, password=request.password, email=request.email, - role=role + role=role, ) - + if not success: raise HTTPException(status_code=400, detail=message) - + return MessageResponse(message=message) @@ -233,70 +234,57 @@ async def list_users(current_user: User = Depends(require_admin)): async def delete_user(username: str, current_user: User = Depends(require_admin)): """Delete a user (admin only)""" if username == current_user.username: - raise HTTPException( - status_code=400, - detail="Cannot delete your own account" - ) - + raise HTTPException(status_code=400, detail="Cannot delete your own account") + success, message = user_manager.delete_user(username) if not success: raise HTTPException(status_code=404, detail=message) - + return MessageResponse(message=message) @router.put("/users/{username}/role", response_model=MessageResponse) async def update_user_role( - username: str, - request: RoleUpdateRequest, - current_user: User = Depends(require_admin) + username: str, + request: RoleUpdateRequest, + current_user: User = Depends(require_admin), ): """Update user role (admin only)""" if request.role not in ["user", "admin"]: - raise HTTPException( - status_code=400, - detail="Role must be 'user' or 'admin'" - ) - + raise HTTPException(status_code=400, detail="Role must be 'user' or 'admin'") + if username == current_user.username: - raise HTTPException( - status_code=400, - detail="Cannot change your own role" - ) - + raise HTTPException(status_code=400, detail="Cannot change your own role") + success, message = user_manager.update_user_role(username, request.role) if not success: raise HTTPException(status_code=404, detail=message) - + return MessageResponse(message=message) @router.post("/users/create", response_model=MessageResponse) -async def create_user_admin(request: CreateUserRequest, current_user: User = Depends(require_admin)): +async def create_user_admin( + request: CreateUserRequest, current_user: User = Depends(require_admin) +): """Create a new user (admin only) - for use when registration is disabled""" if not AUTH_ENABLED: - raise HTTPException( - status_code=400, - detail="Authentication is disabled" - ) - + raise HTTPException(status_code=400, detail="Authentication is disabled") + # Validate role if request.role not in ["user", "admin"]: - raise HTTPException( - status_code=400, - detail="Role must be 'user' or 'admin'" - ) - + raise HTTPException(status_code=400, detail="Role must be 'user' or 'admin'") + success, message = user_manager.create_user( username=request.username, password=request.password, email=request.email, - role=request.role + role=request.role, ) - + if not success: raise HTTPException(status_code=400, detail=message) - + return MessageResponse(message=message) @@ -309,22 +297,18 @@ async def get_profile(current_user: User = Depends(require_auth)): @router.put("/profile/password", response_model=MessageResponse) async def change_password( - request: PasswordChangeRequest, - current_user: User = Depends(require_auth) + request: PasswordChangeRequest, current_user: User = Depends(require_auth) ): """Change current user's password""" if not AUTH_ENABLED: - raise HTTPException( - status_code=400, - detail="Authentication is disabled" - ) - + raise HTTPException(status_code=400, detail="Authentication is disabled") + success, message = user_manager.change_password( username=current_user.username, current_password=request.current_password, - new_password=request.new_password + new_password=request.new_password, ) - + if not success: # Determine appropriate HTTP status code based on error message if "Current password is incorrect" in message: @@ -333,9 +317,9 @@ async def change_password( status_code = 404 else: status_code = 400 - + raise HTTPException(status_code=status_code, detail=message) - + return MessageResponse(message=message) @@ -343,30 +327,26 @@ async def change_password( async def admin_reset_password( username: str, request: AdminPasswordResetRequest, - current_user: User = Depends(require_admin) + current_user: User = Depends(require_admin), ): """Admin reset user password (admin only)""" if not AUTH_ENABLED: - raise HTTPException( - status_code=400, - detail="Authentication is disabled" - ) - + raise HTTPException(status_code=400, detail="Authentication is disabled") + success, message = user_manager.admin_reset_password( - username=username, - new_password=request.new_password + username=username, new_password=request.new_password ) - + if not success: # Determine appropriate HTTP status code based on error message if "User not found" in message: status_code = 404 else: status_code = 400 - + raise HTTPException(status_code=status_code, detail=message) - + return MessageResponse(message=message) -# Note: SSO routes are included in the main app, not here to avoid circular imports \ No newline at end of file +# Note: SSO routes are included in the main app, not here to avoid circular imports diff --git a/routes/auth/sso.py b/routes/auth/sso.py index f7ae7e5..f5ad728 100644 --- a/routes/auth/sso.py +++ b/routes/auth/sso.py @@ -1,17 +1,19 @@ """ SSO (Single Sign-On) implementation for Google and GitHub authentication """ + import os import logging from typing import Optional, Dict, Any from datetime import datetime, timedelta -from fastapi import APIRouter, Request, HTTPException, Depends +from fastapi import APIRouter, Request, HTTPException from fastapi.responses import RedirectResponse from fastapi_sso.sso.google import GoogleSSO from fastapi_sso.sso.github import GithubSSO from fastapi_sso.sso.base import OpenID from pydantic import BaseModel +from fastapi_sso.sso.generic import create_provider from . import user_manager, token_manager, User, AUTH_ENABLED, DISABLE_REGISTRATION @@ -25,11 +27,14 @@ GOOGLE_CLIENT_ID = os.getenv("GOOGLE_CLIENT_ID") GOOGLE_CLIENT_SECRET = os.getenv("GOOGLE_CLIENT_SECRET") GITHUB_CLIENT_ID = os.getenv("GITHUB_CLIENT_ID") GITHUB_CLIENT_SECRET = os.getenv("GITHUB_CLIENT_SECRET") -SSO_BASE_REDIRECT_URI = os.getenv("SSO_BASE_REDIRECT_URI", "http://localhost:7171/api/auth/sso/callback") +SSO_BASE_REDIRECT_URI = os.getenv( + "SSO_BASE_REDIRECT_URI", "http://localhost:7171/api/auth/sso/callback" +) # Initialize SSO providers google_sso = None github_sso = None +custom_sso = None if GOOGLE_CLIENT_ID and GOOGLE_CLIENT_SECRET: google_sso = GoogleSSO( @@ -47,6 +52,154 @@ if GITHUB_CLIENT_ID and GITHUB_CLIENT_SECRET: allow_insecure_http=True, # Set to False in production with HTTPS ) +# Custom/Generic OAuth provider configuration +CUSTOM_SSO_CLIENT_ID = os.getenv("CUSTOM_SSO_CLIENT_ID") +CUSTOM_SSO_CLIENT_SECRET = os.getenv("CUSTOM_SSO_CLIENT_SECRET") +CUSTOM_SSO_AUTHORIZATION_ENDPOINT = os.getenv("CUSTOM_SSO_AUTHORIZATION_ENDPOINT") +CUSTOM_SSO_TOKEN_ENDPOINT = os.getenv("CUSTOM_SSO_TOKEN_ENDPOINT") +CUSTOM_SSO_USERINFO_ENDPOINT = os.getenv("CUSTOM_SSO_USERINFO_ENDPOINT") +CUSTOM_SSO_SCOPE = os.getenv("CUSTOM_SSO_SCOPE") # comma-separated list +CUSTOM_SSO_NAME = os.getenv("CUSTOM_SSO_NAME", "custom") +CUSTOM_SSO_DISPLAY_NAME = os.getenv("CUSTOM_SSO_DISPLAY_NAME", "Custom") + + +def _default_custom_response_convertor( + userinfo: Dict[str, Any], _client=None +) -> OpenID: + """Best-effort convertor from generic userinfo to OpenID.""" + user_id = ( + userinfo.get("sub") + or userinfo.get("id") + or userinfo.get("user_id") + or userinfo.get("uid") + or userinfo.get("uuid") + ) + email = userinfo.get("email") + display_name = ( + userinfo.get("name") + or userinfo.get("preferred_username") + or userinfo.get("login") + or email + or (str(user_id) if user_id is not None else None) + ) + picture = userinfo.get("picture") or userinfo.get("avatar_url") + if not user_id and email: + user_id = email + return OpenID( + id=str(user_id) if user_id is not None else "", + email=email, + display_name=display_name, + picture=picture, + provider=CUSTOM_SSO_NAME, + ) + + +if all( + [ + CUSTOM_SSO_CLIENT_ID, + CUSTOM_SSO_CLIENT_SECRET, + CUSTOM_SSO_AUTHORIZATION_ENDPOINT, + CUSTOM_SSO_TOKEN_ENDPOINT, + CUSTOM_SSO_USERINFO_ENDPOINT, + ] +): + discovery = { + "authorization_endpoint": CUSTOM_SSO_AUTHORIZATION_ENDPOINT, + "token_endpoint": CUSTOM_SSO_TOKEN_ENDPOINT, + "userinfo_endpoint": CUSTOM_SSO_USERINFO_ENDPOINT, + } + default_scope = ( + [s.strip() for s in CUSTOM_SSO_SCOPE.split(",") if s.strip()] + if CUSTOM_SSO_SCOPE + else None + ) + CustomProvider = create_provider( + name=CUSTOM_SSO_NAME, + discovery_document=discovery, + response_convertor=_default_custom_response_convertor, + default_scope=default_scope, + ) + custom_sso = CustomProvider( + client_id=CUSTOM_SSO_CLIENT_ID, + client_secret=CUSTOM_SSO_CLIENT_SECRET, + redirect_uri=f"{SSO_BASE_REDIRECT_URI}/custom", + allow_insecure_http=True, # Set to False in production with HTTPS + ) + +# Support multiple indexed custom providers (CUSTOM_*_i), up to 10 +custom_sso_providers: Dict[int, Dict[str, Any]] = {} + + +def _make_response_convertor(provider_name: str): + def _convert(userinfo: Dict[str, Any], _client=None) -> OpenID: + user_id = ( + userinfo.get("sub") + or userinfo.get("id") + or userinfo.get("user_id") + or userinfo.get("uid") + or userinfo.get("uuid") + ) + email = userinfo.get("email") + display_name = ( + userinfo.get("name") + or userinfo.get("preferred_username") + or userinfo.get("login") + or email + or (str(user_id) if user_id is not None else None) + ) + picture = userinfo.get("picture") or userinfo.get("avatar_url") + if not user_id and email: + user_id = email + return OpenID( + id=str(user_id) if user_id is not None else "", + email=email, + display_name=display_name, + picture=picture, + provider=provider_name, + ) + + return _convert + + +for i in range(1, 11): + cid = os.getenv(f"CUSTOM_SSO_CLIENT_ID_{i}") + csecret = os.getenv(f"CUSTOM_SSO_CLIENT_SECRET_{i}") + auth_ep = os.getenv(f"CUSTOM_SSO_AUTHORIZATION_ENDPOINT_{i}") + token_ep = os.getenv(f"CUSTOM_SSO_TOKEN_ENDPOINT_{i}") + userinfo_ep = os.getenv(f"CUSTOM_SSO_USERINFO_ENDPOINT_{i}") + scope_raw = os.getenv(f"CUSTOM_SSO_SCOPE_{i}") + name_i = os.getenv(f"CUSTOM_SSO_NAME_{i}", f"custom{i}") + display_name_i = os.getenv(f"CUSTOM_SSO_DISPLAY_NAME_{i}", f"Custom {i}") + + if all([cid, csecret, auth_ep, token_ep, userinfo_ep]): + discovery_i = { + "authorization_endpoint": auth_ep, + "token_endpoint": token_ep, + "userinfo_endpoint": userinfo_ep, + } + default_scope_i = ( + [s.strip() for s in scope_raw.split(",") if s.strip()] + if scope_raw + else None + ) + ProviderClass = create_provider( + name=name_i, + discovery_document=discovery_i, + response_convertor=_make_response_convertor(name_i), + default_scope=default_scope_i, + ) + provider_instance = ProviderClass( + client_id=cid, + client_secret=csecret, + redirect_uri=f"{SSO_BASE_REDIRECT_URI}/custom/{i}", + allow_insecure_http=True, # Set to False in production with HTTPS + ) + custom_sso_providers[i] = { + "sso": provider_instance, + "name": name_i, + "display_name": display_name_i, + } + class MessageResponse(BaseModel): message: str @@ -70,21 +223,25 @@ def create_or_update_sso_user(openid: OpenID, provider: str) -> User: # Generate username from email or use provider ID email = openid.email if not email: - raise HTTPException(status_code=400, detail="Email is required for SSO authentication") - + raise HTTPException( + status_code=400, detail="Email is required for SSO authentication" + ) + # Use email prefix as username, fallback to provider + id username = email.split("@")[0] if not username: username = f"{provider}_{openid.id}" - + # Check if user already exists by email existing_user = None users = user_manager.load_users() for user_data in users.values(): if user_data.get("email") == email: - existing_user = User(**{k: v for k, v in user_data.items() if k != "password_hash"}) + existing_user = User( + **{k: v for k, v in user_data.items() if k != "password_hash"} + ) break - + if existing_user: # Update last login for existing user (always allowed) users[existing_user.username]["last_login"] = datetime.utcnow().isoformat() @@ -96,10 +253,10 @@ def create_or_update_sso_user(openid: OpenID, provider: str) -> User: # Check if registration is disabled before creating new user if DISABLE_REGISTRATION: raise HTTPException( - status_code=403, - detail="Registration is disabled. Contact an administrator to create an account." + status_code=403, + detail="Registration is disabled. Contact an administrator to create an account.", ) - + # Create new user # Ensure username is unique counter = 1 @@ -107,20 +264,20 @@ def create_or_update_sso_user(openid: OpenID, provider: str) -> User: while username in users: username = f"{original_username}{counter}" counter += 1 - + user = User( username=username, email=email, - role="user" # Default role for SSO users + role="user", # Default role for SSO users ) - + users[username] = { **user.to_dict(), "sso_provider": provider, "sso_id": openid.id, - "password_hash": None # SSO users don't have passwords + "password_hash": None, # SSO users don't have passwords } - + user_manager.save_users(users) logger.info(f"Created SSO user: {username} via {provider}") return user @@ -130,27 +287,51 @@ def create_or_update_sso_user(openid: OpenID, provider: str) -> User: async def sso_status(): """Get SSO status and available providers""" providers = [] - + if google_sso: - providers.append(SSOProvider( - name="google", - display_name="Google", - enabled=True, - login_url="/api/auth/sso/login/google" - )) - + providers.append( + SSOProvider( + name="google", + display_name="Google", + enabled=True, + login_url="/api/auth/sso/login/google", + ) + ) + if github_sso: - providers.append(SSOProvider( - name="github", - display_name="GitHub", - enabled=True, - login_url="/api/auth/sso/login/github" - )) - + providers.append( + SSOProvider( + name="github", + display_name="GitHub", + enabled=True, + login_url="/api/auth/sso/login/github", + ) + ) + + if custom_sso: + providers.append( + SSOProvider( + name="custom", + display_name=CUSTOM_SSO_DISPLAY_NAME, + enabled=True, + login_url="/api/auth/sso/login/custom", + ) + ) + + for idx, cfg in custom_sso_providers.items(): + providers.append( + SSOProvider( + name=cfg["name"], + display_name=cfg.get("display_name", cfg["name"]), + enabled=True, + login_url=f"/api/auth/sso/login/custom/{idx}", + ) + ) + return SSOStatusResponse( sso_enabled=SSO_ENABLED and AUTH_ENABLED, providers=providers, - registration_enabled=not DISABLE_REGISTRATION + registration_enabled=not DISABLE_REGISTRATION, ) @@ -159,12 +340,14 @@ async def google_login(): """Initiate Google SSO login""" if not SSO_ENABLED or not AUTH_ENABLED: raise HTTPException(status_code=400, detail="SSO is disabled") - + if not google_sso: raise HTTPException(status_code=400, detail="Google SSO is not configured") - + async with google_sso: - return await google_sso.get_login_redirect(params={"prompt": "consent", "access_type": "offline"}) + return await google_sso.get_login_redirect( + params={"prompt": "consent", "access_type": "offline"} + ) @router.get("/sso/login/github") @@ -172,37 +355,66 @@ async def github_login(): """Initiate GitHub SSO login""" if not SSO_ENABLED or not AUTH_ENABLED: raise HTTPException(status_code=400, detail="SSO is disabled") - + if not github_sso: raise HTTPException(status_code=400, detail="GitHub SSO is not configured") - + async with github_sso: return await github_sso.get_login_redirect() +@router.get("/sso/login/custom") +async def custom_login(): + """Initiate Custom SSO login""" + if not SSO_ENABLED or not AUTH_ENABLED: + raise HTTPException(status_code=400, detail="SSO is disabled") + + if not custom_sso: + raise HTTPException(status_code=400, detail="Custom SSO is not configured") + + async with custom_sso: + return await custom_sso.get_login_redirect() + + +@router.get("/sso/login/custom/{index}") +async def custom_login_indexed(index: int): + """Initiate indexed Custom SSO login""" + if not SSO_ENABLED or not AUTH_ENABLED: + raise HTTPException(status_code=400, detail="SSO is disabled") + + cfg = custom_sso_providers.get(index) + if not cfg: + raise HTTPException( + status_code=400, detail="Custom SSO provider not configured" + ) + + async with cfg["sso"]: + return await cfg["sso"].get_login_redirect() + + @router.get("/sso/callback/google") async def google_callback(request: Request): """Handle Google SSO callback""" if not SSO_ENABLED or not AUTH_ENABLED: raise HTTPException(status_code=400, detail="SSO is disabled") - + if not google_sso: raise HTTPException(status_code=400, detail="Google SSO is not configured") - + try: async with google_sso: openid = await google_sso.verify_and_process(request) - + # Create or update user user = create_or_update_sso_user(openid, "google") - + # Create JWT token access_token = token_manager.create_token(user) - + # Redirect to frontend with token (you might want to customize this) frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000") response = RedirectResponse(url=f"{frontend_url}?token={access_token}") - + # Also set as HTTP-only cookie response.set_cookie( key="access_token", @@ -210,18 +422,18 @@ async def google_callback(request: Request): httponly=True, secure=False, # Set to True in production with HTTPS samesite="lax", - max_age=timedelta(hours=24).total_seconds() + max_age=timedelta(hours=24).total_seconds(), ) - + return response - + except HTTPException as e: # Handle specific HTTP exceptions (like registration disabled) frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000") - error_msg = e.detail if hasattr(e, 'detail') else "Authentication failed" + error_msg = e.detail if hasattr(e, "detail") else "Authentication failed" logger.warning(f"Google SSO callback error: {error_msg}") return RedirectResponse(url=f"{frontend_url}?error={error_msg}") - + except Exception as e: logger.error(f"Google SSO callback error: {e}") frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000") @@ -233,24 +445,24 @@ async def github_callback(request: Request): """Handle GitHub SSO callback""" if not SSO_ENABLED or not AUTH_ENABLED: raise HTTPException(status_code=400, detail="SSO is disabled") - + if not github_sso: raise HTTPException(status_code=400, detail="GitHub SSO is not configured") - + try: async with github_sso: openid = await github_sso.verify_and_process(request) - + # Create or update user user = create_or_update_sso_user(openid, "github") - + # Create JWT token access_token = token_manager.create_token(user) - + # Redirect to frontend with token (you might want to customize this) frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000") response = RedirectResponse(url=f"{frontend_url}?token={access_token}") - + # Also set as HTTP-only cookie response.set_cookie( key="access_token", @@ -258,24 +470,123 @@ async def github_callback(request: Request): httponly=True, secure=False, # Set to True in production with HTTPS samesite="lax", - max_age=timedelta(hours=24).total_seconds() + max_age=timedelta(hours=24).total_seconds(), ) - + return response - + except HTTPException as e: # Handle specific HTTP exceptions (like registration disabled) frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000") - error_msg = e.detail if hasattr(e, 'detail') else "Authentication failed" + error_msg = e.detail if hasattr(e, "detail") else "Authentication failed" logger.warning(f"GitHub SSO callback error: {error_msg}") return RedirectResponse(url=f"{frontend_url}?error={error_msg}") - + except Exception as e: logger.error(f"GitHub SSO callback error: {e}") frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000") return RedirectResponse(url=f"{frontend_url}?error=Authentication failed") +@router.get("/sso/callback/custom") +async def custom_callback(request: Request): + """Handle Custom SSO callback""" + if not SSO_ENABLED or not AUTH_ENABLED: + raise HTTPException(status_code=400, detail="SSO is disabled") + + if not custom_sso: + raise HTTPException(status_code=400, detail="Custom SSO is not configured") + + try: + async with custom_sso: + openid = await custom_sso.verify_and_process(request) + + # Create or update user + user = create_or_update_sso_user(openid, "custom") + + # Create JWT token + access_token = token_manager.create_token(user) + + # Redirect to frontend with token (you might want to customize this) + frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000") + response = RedirectResponse(url=f"{frontend_url}?token={access_token}") + + # Also set as HTTP-only cookie + response.set_cookie( + key="access_token", + value=access_token, + httponly=True, + secure=False, # Set to True in production with HTTPS + samesite="lax", + max_age=timedelta(hours=24).total_seconds(), + ) + + return response + + except HTTPException as e: + # Handle specific HTTP exceptions (like registration disabled) + frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000") + error_msg = e.detail if hasattr(e, "detail") else "Authentication failed" + logger.warning(f"Custom SSO callback error: {error_msg}") + return RedirectResponse(url=f"{frontend_url}?error={error_msg}") + + except Exception as e: + logger.error(f"Custom SSO callback error: {e}") + frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000") + return RedirectResponse(url=f"{frontend_url}?error=Authentication failed") + + +@router.get("/sso/callback/custom/{index}") +async def custom_callback_indexed(request: Request, index: int): + """Handle indexed Custom SSO callback""" + if not SSO_ENABLED or not AUTH_ENABLED: + raise HTTPException(status_code=400, detail="SSO is disabled") + + cfg = custom_sso_providers.get(index) + if not cfg: + raise HTTPException( + status_code=400, detail="Custom SSO provider not configured" + ) + + try: + async with cfg["sso"]: + openid = await cfg["sso"].verify_and_process(request) + + # Create or update user + user = create_or_update_sso_user(openid, cfg["name"]) + + # Create JWT token + access_token = token_manager.create_token(user) + + # Redirect to frontend with token (you might want to customize this) + frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000") + response = RedirectResponse(url=f"{frontend_url}?token={access_token}") + + # Also set as HTTP-only cookie + response.set_cookie( + key="access_token", + value=access_token, + httponly=True, + secure=False, # Set to True in production with HTTPS + samesite="lax", + max_age=timedelta(hours=24).total_seconds(), + ) + + return response + + except HTTPException as e: + # Handle specific HTTP exceptions (like registration disabled) + frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000") + error_msg = e.detail if hasattr(e, "detail") else "Authentication failed" + logger.warning(f"Custom[{index}] SSO callback error: {error_msg}") + return RedirectResponse(url=f"{frontend_url}?error={error_msg}") + + except Exception as e: + logger.error(f"Custom[{index}] SSO callback error: {e}") + frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000") + return RedirectResponse(url=f"{frontend_url}?error=Authentication failed") + + @router.post("/sso/unlink/{provider}", response_model=MessageResponse) async def unlink_sso_provider( provider: str, @@ -284,27 +595,42 @@ async def unlink_sso_provider( """Unlink SSO provider from user account""" if not SSO_ENABLED or not AUTH_ENABLED: raise HTTPException(status_code=400, detail="SSO is disabled") - - if provider not in ["google", "github"]: + + available = [] + if google_sso: + available.append("google") + if github_sso: + available.append("github") + if custom_sso: + available.append("custom") + + for cfg in custom_sso_providers.values(): + available.append(cfg["name"]) + + if provider not in available: raise HTTPException(status_code=400, detail="Invalid SSO provider") - + # Get current user from request (avoiding circular imports) from .middleware import require_auth_from_state - + current_user = await require_auth_from_state(request) - + if not current_user.sso_provider: - raise HTTPException(status_code=400, detail="User is not linked to any SSO provider") - + raise HTTPException( + status_code=400, detail="User is not linked to any SSO provider" + ) + if current_user.sso_provider != provider: raise HTTPException(status_code=400, detail=f"User is not linked to {provider}") - + # Update user to remove SSO linkage users = user_manager.load_users() if current_user.username in users: users[current_user.username]["sso_provider"] = None users[current_user.username]["sso_id"] = None user_manager.save_users(users) - logger.info(f"Unlinked SSO provider {provider} from user {current_user.username}") - - return MessageResponse(message=f"SSO provider {provider} unlinked successfully") \ No newline at end of file + logger.info( + f"Unlinked SSO provider {provider} from user {current_user.username}" + ) + + return MessageResponse(message=f"SSO provider {provider} unlinked successfully") From da982e44b858da9ad12f127995ba20d1ddffee34 Mon Sep 17 00:00:00 2001 From: Phlogi Date: Tue, 26 Aug 2025 16:55:00 +0200 Subject: [PATCH 17/32] feat(logging): load dotenv early and improve logging config --- app.py | 31 +++++++++++++++++-------------- routes/utils/celery_manager.py | 5 ++++- 2 files changed, 21 insertions(+), 15 deletions(-) diff --git a/app.py b/app.py index 2a99d78..e523637 100755 --- a/app.py +++ b/app.py @@ -12,16 +12,20 @@ import sys import redis import socket from urllib.parse import urlparse +from dotenv import load_dotenv +load_dotenv() -# Define a mapping from string log levels to logging constants -LOG_LEVELS = { - "CRITICAL": logging.CRITICAL, - "ERROR": logging.ERROR, - "WARNING": logging.WARNING, - "INFO": logging.INFO, - "DEBUG": logging.DEBUG, - "NOTSET": logging.NOTSET, -} +# Parse log level from environment as early as possible, default to INFO for visibility +log_level_str = os.getenv("LOG_LEVEL", "WARNING").upper() +log_level = getattr(logging, log_level_str, logging.INFO) + +# Set up a very basic logging config immediately, so early logs (including import/migration errors) are visible +logging.basicConfig( + level=log_level, + format="%(asctime)s [%(levelname)s] %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + stream=sys.stderr, +) # Run DB migrations as early as possible, before importing any routers that may touch DBs try: @@ -37,10 +41,6 @@ except Exception as e: ) sys.exit(1) -# Get log level from environment variable, default to INFO -log_level_str = os.getenv("LOG_LEVEL", "WARNING").upper() -log_level = LOG_LEVELS.get(log_level_str, logging.INFO) - # Apply process umask from environment as early as possible _umask_value = os.getenv("UMASK") if _umask_value: @@ -139,6 +139,7 @@ def setup_logging(): "uvicorn", # General Uvicorn logger "uvicorn.access", # Uvicorn access logs "uvicorn.error", # Uvicorn error logs + "spotizerr", ]: logger = logging.getLogger(logger_name) logger.setLevel(log_level) @@ -146,7 +147,7 @@ def setup_logging(): # if access_log=False is used in uvicorn.run, and to ensure our middleware handles it. logger.propagate = False if logger_name == "uvicorn.access" else True - logging.info("Logging system initialized") + logger.info("Logging system initialized") def check_redis_connection(): @@ -197,6 +198,8 @@ async def lifespan(app: FastAPI): """Handle application startup and shutdown""" # Startup setup_logging() + effective_level = logging.getLevelName(log_level) + logging.getLogger(__name__).info(f"Logging system fully initialized (lifespan startup). Effective log level: {effective_level}") # Run migrations before initializing services try: diff --git a/routes/utils/celery_manager.py b/routes/utils/celery_manager.py index d26d291..686dda9 100644 --- a/routes/utils/celery_manager.py +++ b/routes/utils/celery_manager.py @@ -5,6 +5,9 @@ import threading import os import sys +from dotenv import load_dotenv +load_dotenv() + # Import Celery task utilities from .celery_config import get_config_params, MAX_CONCURRENT_DL @@ -161,7 +164,7 @@ class CeleryManager: queues="utility_tasks,default", # Listen to utility and default concurrency=5, # Increased concurrency for SSE updates and utility tasks worker_name_suffix="utw", # Utility Worker - log_level_env=os.getenv("LOG_LEVEL", "ERROR").upper(), + log_level_env=os.getenv("LOG_LEVEL", "WARNING").upper(), ) logger.info( From a6bdf966a485efc3518f336b81d9b750cdea1f94 Mon Sep 17 00:00:00 2001 From: Phlogi Date: Tue, 26 Aug 2025 16:56:33 +0200 Subject: [PATCH 18/32] chore(logging): switch SSEBroadcaster log level to debug --- routes/system/progress.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/routes/system/progress.py b/routes/system/progress.py index 104a617..f5d5d78 100755 --- a/routes/system/progress.py +++ b/routes/system/progress.py @@ -82,7 +82,7 @@ class SSEBroadcaster: # Clean up disconnected clients for client in disconnected: self.clients.discard(client) - logger.info( + logger.debug( f"SSE Broadcaster: Successfully sent to {sent_count} clients, removed {len(disconnected)} disconnected clients" ) From 9f834a67bce35b55525131cba311519232f4e590 Mon Sep 17 00:00:00 2001 From: Phlogi Date: Tue, 26 Aug 2025 16:59:14 +0200 Subject: [PATCH 19/32] avoid double logging of celery --- routes/utils/celery_manager.py | 8 ++++++++ routes/utils/celery_tasks.py | 11 +++++++++-- 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/routes/utils/celery_manager.py b/routes/utils/celery_manager.py index 686dda9..e75519b 100644 --- a/routes/utils/celery_manager.py +++ b/routes/utils/celery_manager.py @@ -73,6 +73,12 @@ class CeleryManager: logger.debug(f"Generated Celery command: {' '.join(command)}") return command + def _get_worker_env(self): + # Inherit current environment, but set NO_CONSOLE_LOG=1 for subprocess + env = os.environ.copy() + env["NO_CONSOLE_LOG"] = "1" + return env + def _process_output_reader(self, stream, log_prefix, error=False): logger.debug(f"Log reader thread started for {log_prefix}") try: @@ -141,6 +147,7 @@ class CeleryManager: text=True, bufsize=1, universal_newlines=True, + env=self._get_worker_env(), ) self.download_log_thread_stdout = threading.Thread( target=self._process_output_reader, @@ -177,6 +184,7 @@ class CeleryManager: text=True, bufsize=1, universal_newlines=True, + env=self._get_worker_env(), ) self.utility_log_thread_stdout = threading.Thread( target=self._process_output_reader, diff --git a/routes/utils/celery_tasks.py b/routes/utils/celery_tasks.py index 52853f5..9bec287 100644 --- a/routes/utils/celery_tasks.py +++ b/routes/utils/celery_tasks.py @@ -285,9 +285,16 @@ def setup_celery_logging(**kwargs): """ This handler ensures Celery uses our application logging settings instead of its own. Prevents duplicate log configurations. + Also disables console logging if NO_CONSOLE_LOG=1 is set in the environment. """ - # Using the root logger's handlers and level preserves our config - return logging.getLogger() + root_logger = logging.getLogger() + import os + if os.environ.get("NO_CONSOLE_LOG") == "1": + # Remove all StreamHandlers (console handlers) from the root logger + handlers_to_remove = [h for h in root_logger.handlers if isinstance(h, logging.StreamHandler)] + for h in handlers_to_remove: + root_logger.removeHandler(h) + return root_logger # The initialization of a worker will log the worker configuration From be17ff95b0db8bafd39f60586e437e003abaa7bf Mon Sep 17 00:00:00 2001 From: Phlogi Date: Tue, 26 Aug 2025 21:09:38 +0200 Subject: [PATCH 20/32] fix(ui): improve spotify url regex to support more formats --- spotizerr-ui/src/router.tsx | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/spotizerr-ui/src/router.tsx b/spotizerr-ui/src/router.tsx index d025d2e..98d56b9 100644 --- a/spotizerr-ui/src/router.tsx +++ b/spotizerr-ui/src/router.tsx @@ -34,8 +34,7 @@ export const indexRoute = createRoute({ loader: async ({ deps: { q, type } }) => { if (!q || q.length < 3) return { items: [] }; - const spotifyUrlRegex = /https:\/\/open\.spotify\.com\/(playlist|album|artist|track)\/([a-zA-Z0-9]+)/; - const match = q.match(spotifyUrlRegex); + const spotifyUrlRegex = /https:\/\/open\.spotify\.com\/(?:[a-zA-Z0-9-]+\/)*(playlist|album|artist|track)\/([a-zA-Z0-9]+)/; const match = q.match(spotifyUrlRegex); if (match) { const [, urlType, id] = match; From bf3725b0e223abf671ba6bb1882aaf0e41cf5f9a Mon Sep 17 00:00:00 2001 From: Phlogi Date: Tue, 26 Aug 2025 21:11:11 +0200 Subject: [PATCH 21/32] format --- spotizerr-ui/src/router.tsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/spotizerr-ui/src/router.tsx b/spotizerr-ui/src/router.tsx index 98d56b9..3a361a9 100644 --- a/spotizerr-ui/src/router.tsx +++ b/spotizerr-ui/src/router.tsx @@ -34,7 +34,8 @@ export const indexRoute = createRoute({ loader: async ({ deps: { q, type } }) => { if (!q || q.length < 3) return { items: [] }; - const spotifyUrlRegex = /https:\/\/open\.spotify\.com\/(?:[a-zA-Z0-9-]+\/)*(playlist|album|artist|track)\/([a-zA-Z0-9]+)/; const match = q.match(spotifyUrlRegex); + const spotifyUrlRegex = /https:\/\/open\.spotify\.com\/(?:[a-zA-Z0-9-]+\/)*(playlist|album|artist|track)\/([a-zA-Z0-9]+)/; + const match = q.match(spotifyUrlRegex); if (match) { const [, urlType, id] = match; From d83e320a82d3ad98626f074a5bc0abf661f652dd Mon Sep 17 00:00:00 2001 From: Phlogi Date: Wed, 27 Aug 2025 09:43:01 +0200 Subject: [PATCH 22/32] refactor(api): replace direct celery tasks with queue manager in bulk add --- routes/content/bulk_add.py | 68 ++++++++++++++++---------------------- 1 file changed, 29 insertions(+), 39 deletions(-) diff --git a/routes/content/bulk_add.py b/routes/content/bulk_add.py index b5471ea..38a2824 100644 --- a/routes/content/bulk_add.py +++ b/routes/content/bulk_add.py @@ -1,12 +1,15 @@ import re from typing import List, Dict, Any -from fastapi import APIRouter, HTTPException +from fastapi import APIRouter, Request, Depends from pydantic import BaseModel import logging -# Assuming these imports are available for queue management and Spotify info +# Import authentication dependencies +from routes.auth.middleware import require_auth_from_state, User + +# Import queue management and Spotify info from routes.utils.get_info import get_spotify_info -from routes.utils.celery_tasks import download_track, download_album, download_playlist +from routes.utils.celery_queue_manager import download_queue_manager router = APIRouter() logger = logging.getLogger(__name__) @@ -15,7 +18,7 @@ class BulkAddLinksRequest(BaseModel): links: List[str] @router.post("/bulk-add-spotify-links") -async def bulk_add_spotify_links(request: BulkAddLinksRequest): +async def bulk_add_spotify_links(request: BulkAddLinksRequest, req: Request, current_user: User = Depends(require_auth_from_state)): added_count = 0 failed_links = [] total_links = len(request.links) @@ -32,6 +35,7 @@ async def bulk_add_spotify_links(request: BulkAddLinksRequest): spotify_type = match.group(1) spotify_id = match.group(2) + logger.debug(f"Extracted from link: spotify_type={spotify_type}, spotify_id={spotify_id}") try: # Get basic info to confirm existence and get name/artist @@ -54,41 +58,27 @@ async def bulk_add_spotify_links(request: BulkAddLinksRequest): # Construct URL for the download task spotify_url = f"https://open.spotify.com/{spotify_type}/{spotify_id}" - # Add to Celery queue based on type - if spotify_type == "track": - download_track.delay( - url=spotify_url, - spotify_id=spotify_id, - type=spotify_type, - name=item_name, - artist=artist_name, - download_type="track", - ) - elif spotify_type == "album": - download_album.delay( - url=spotify_url, - spotify_id=spotify_id, - type=spotify_type, - name=item_name, - artist=artist_name, - download_type="album", - ) - elif spotify_type == "playlist": - download_playlist.delay( - url=spotify_url, - spotify_id=spotify_id, - type=spotify_type, - name=item_name, - artist=artist_name, - download_type="playlist", - ) - else: - logger.warning(f"Unsupported Spotify type for download: {spotify_type} for link: {link}") - failed_links.append(link) - continue + # Prepare task data for the queue manager + task_data = { + "download_type": spotify_type, + "url": spotify_url, + "name": item_name, + "artist": artist_name, + "spotify_id": spotify_id, + "type": spotify_type, + "username": current_user.username, + "orig_request": dict(req.query_params), + } - added_count += 1 - logger.debug(f"Added {added_count+1}/{total_links} {spotify_type} '{item_name}' ({spotify_id}) to queue.") + # Add to download queue using the queue manager + task_id = download_queue_manager.add_task(task_data) + + if task_id: + added_count += 1 + logger.debug(f"Added {added_count}/{total_links} {spotify_type} '{item_name}' ({spotify_id}) to queue with task_id: {task_id}.") + else: + logger.warning(f"Failed to add {spotify_type} '{item_name}' ({spotify_id}) to queue.") + failed_links.append(link) except Exception as e: logger.error(f"Error processing Spotify link {link}: {e}", exc_info=True) @@ -105,4 +95,4 @@ async def bulk_add_spotify_links(request: BulkAddLinksRequest): "message": message, "count": added_count, "failed_links": failed_links, - } \ No newline at end of file + } From af1e74294cf5b2aa1b637a781a7072b0426f5c52 Mon Sep 17 00:00:00 2001 From: Phlogi Date: Wed, 27 Aug 2025 14:20:05 +0200 Subject: [PATCH 23/32] feat(config): add logging configuration to .env.example - Add LOG_LEVEL environment variable with possible values and usage guidance - Improve redis host documentation for docker-compose compatibility --- .env.example | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.env.example b/.env.example index 227381e..e229325 100644 --- a/.env.example +++ b/.env.example @@ -11,6 +11,7 @@ HOST=0.0.0.0 # Redis connection (external or internal). +# Host name 'redis' works with docker-compose.yml setup REDIS_HOST=redis REDIS_PORT=6379 REDIS_DB=0 @@ -56,4 +57,9 @@ GOOGLE_CLIENT_SECRET= # GitHub SSO (get from GitHub Developer Settings) GITHUB_CLIENT_ID= -GITHUB_CLIENT_SECRET= \ No newline at end of file +GITHUB_CLIENT_SECRET= + +# Log level for application logging. +# Possible values: debug, info, warning, error, critical +# Set to 'info' or 'warning' for general use. Use 'debug' for troubleshooting. +LOG_LEVEL=info \ No newline at end of file From 1e9271eac47ac30ed3521abf9bc847dae3a7cdb0 Mon Sep 17 00:00:00 2001 From: che-pj Date: Wed, 27 Aug 2025 16:30:00 +0200 Subject: [PATCH 24/32] fix(ui): improve watchlist loading with batching and skeletons --- spotizerr-ui/src/routes/watchlist.tsx | 100 ++++++++++++++++++++------ 1 file changed, 79 insertions(+), 21 deletions(-) diff --git a/spotizerr-ui/src/routes/watchlist.tsx b/spotizerr-ui/src/routes/watchlist.tsx index ddbbffb..88ab4ff 100644 --- a/spotizerr-ui/src/routes/watchlist.tsx +++ b/spotizerr-ui/src/routes/watchlist.tsx @@ -20,39 +20,77 @@ export const Watchlist = () => { const { settings, isLoading: settingsLoading } = useSettings(); const [items, setItems] = useState([]); const [isLoading, setIsLoading] = useState(true); + const [expectedCount, setExpectedCount] = useState(null); + + // Utility to batch fetch details + async function batchFetch( + ids: string[], + fetchFn: (id: string) => Promise, + batchSize: number, + onBatch: (results: T[]) => void + ) { + for (let i = 0; i < ids.length; i += batchSize) { + const batchIds = ids.slice(i, i + batchSize); + const batchResults = await Promise.all( + batchIds.map((id) => fetchFn(id).catch(() => null)) + ); + onBatch(batchResults.filter(Boolean) as T[]); + } + } const fetchWatchlist = useCallback(async () => { setIsLoading(true); + setItems([]); // Clear previous items + setExpectedCount(null); try { const [artistsRes, playlistsRes] = await Promise.all([ apiClient.get("/artist/watch/list"), apiClient.get("/playlist/watch/list"), ]); - const artistDetailsPromises = artistsRes.data.map((artist) => - apiClient.get(`/artist/info?id=${artist.spotify_id}`), - ); - const playlistDetailsPromises = playlistsRes.data.map((playlist) => - apiClient.get(`/playlist/info?id=${playlist.spotify_id}`), + // Prepare lists of IDs + const artistIds = artistsRes.data.map((artist) => artist.spotify_id); + const playlistIds = playlistsRes.data.map((playlist) => playlist.spotify_id); + setExpectedCount(artistIds.length + playlistIds.length); + + // Allow UI to render grid and skeletons immediately + setIsLoading(false); + + // Helper to update state incrementally + const appendItems = (newItems: WatchedItem[]) => { + setItems((prev) => [...prev, ...newItems]); + }; + + // Fetch artist details in batches + await batchFetch( + artistIds, + (id) => apiClient.get(`/artist/info?id=${id}`).then(res => res.data), + 5, // batch size + (results) => { + const items: WatchedArtist[] = results.map((data) => ({ + ...data, + itemType: "artist", + })); + appendItems(items); + } ); - const [artistDetailsRes, playlistDetailsRes] = await Promise.all([ - Promise.all(artistDetailsPromises), - Promise.all(playlistDetailsPromises), - ]); - - const artists: WatchedItem[] = artistDetailsRes.map((res) => ({ ...res.data, itemType: "artist" })); - const playlists: WatchedItem[] = playlistDetailsRes.map((res) => ({ - ...res.data, - itemType: "playlist", - spotify_id: res.data.id, - })); - - setItems([...artists, ...playlists]); + // Fetch playlist details in batches + await batchFetch( + playlistIds, + (id) => apiClient.get(`/playlist/info?id=${id}`).then(res => res.data), + 5, // batch size + (results) => { + const items: WatchedPlaylist[] = results.map((data) => ({ + ...data, + itemType: "playlist", + spotify_id: data.id, + })); + appendItems(items); + } + ); } catch { toast.error("Failed to load watchlist."); - } finally { - setIsLoading(false); } }, []); @@ -110,7 +148,8 @@ export const Watchlist = () => { ); } - if (items.length === 0) { + // Show "empty" only if not loading and nothing expected + if (!isLoading && items.length === 0 && (!expectedCount || expectedCount === 0)) { return (

Watchlist is Empty

@@ -158,6 +197,25 @@ export const Watchlist = () => {
))} + {/* Skeletons for loading items */} + {isLoading && expectedCount && items.length < expectedCount && + Array.from({ length: expectedCount - items.length }).map((_, idx) => ( +
+
+
+
+
+
+
+
+
+
+
+ )) + }
); From 2de323a75f683f7e01b54f50d25d0ec3a2b568c7 Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Wed, 27 Aug 2025 10:06:33 -0600 Subject: [PATCH 25/32] BREAKING CHANGE: migrate api to librespot internal client --- requirements.txt | 2 +- routes/content/album.py | 29 +- routes/content/artist.py | 111 ++--- routes/content/bulk_add.py | 58 ++- routes/content/playlist.py | 304 ++++++------- routes/content/track.py | 58 +-- routes/utils/artist.py | 2 +- routes/utils/get_info.py | 496 +++++----------------- routes/utils/watch/manager.py | 108 ++++- spotizerr-ui/package.json | 2 +- spotizerr-ui/src/components/AlbumCard.tsx | 10 +- spotizerr-ui/src/index.css | 43 ++ spotizerr-ui/src/routes/album.tsx | 73 ++-- spotizerr-ui/src/routes/artist.tsx | 350 +++++++++------ spotizerr-ui/src/routes/playlist.tsx | 157 ++++--- spotizerr-ui/src/routes/track.tsx | 10 +- spotizerr-ui/src/types/librespot.ts | 155 +++++++ 17 files changed, 1035 insertions(+), 933 deletions(-) create mode 100644 spotizerr-ui/src/types/librespot.ts diff --git a/requirements.txt b/requirements.txt index 7f0f7b6..44feb72 100755 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ fastapi==0.116.1 uvicorn[standard]==0.35.0 celery==5.5.3 -deezspot-spotizerr==2.7.7 +deezspot-spotizerr==3.1.0 httpx==0.28.1 bcrypt==4.2.1 PyJWT==2.10.1 diff --git a/routes/content/album.py b/routes/content/album.py index de98864..d9d8ca4 100755 --- a/routes/content/album.py +++ b/routes/content/album.py @@ -5,11 +5,12 @@ import uuid import time from routes.utils.celery_queue_manager import download_queue_manager from routes.utils.celery_tasks import store_task_info, store_task_status, ProgressState -from routes.utils.get_info import get_spotify_info +from routes.utils.get_info import get_client, get_album from routes.utils.errors import DuplicateDownloadError # Import authentication dependencies from routes.auth.middleware import require_auth_from_state, User +# Config and credentials helpers router = APIRouter() @@ -34,7 +35,8 @@ async def handle_download( # Fetch metadata from Spotify try: - album_info = get_spotify_info(album_id, "album") + client = get_client() + album_info = get_album(client, album_id) if ( not album_info or not album_info.get("name") @@ -155,6 +157,7 @@ async def get_album_info( """ Retrieve Spotify album metadata given a Spotify album ID. Expects a query parameter 'id' that contains the Spotify album ID. + Returns the raw JSON from get_album in routes.utils.get_info. """ spotify_id = request.query_params.get("id") @@ -162,27 +165,9 @@ async def get_album_info( return JSONResponse(content={"error": "Missing parameter: id"}, status_code=400) try: - # Optional pagination params for tracks - limit_param = request.query_params.get("limit") - offset_param = request.query_params.get("offset") - limit = int(limit_param) if limit_param is not None else None - offset = int(offset_param) if offset_param is not None else None - - # Fetch album metadata - album_info = get_spotify_info(spotify_id, "album") - # Fetch album tracks with pagination - album_tracks = get_spotify_info( - spotify_id, "album_tracks", limit=limit, offset=offset - ) - - # Merge tracks into album payload in the same shape Spotify returns on album - album_info["tracks"] = album_tracks - + client = get_client() + album_info = get_album(client, spotify_id) return JSONResponse(content=album_info, status_code=200) - except ValueError as ve: - return JSONResponse( - content={"error": f"Invalid limit/offset: {str(ve)}"}, status_code=400 - ) except Exception as e: error_data = {"error": str(e), "traceback": traceback.format_exc()} return JSONResponse(content=error_data, status_code=500) diff --git a/routes/content/artist.py b/routes/content/artist.py index b4c5302..4649d27 100644 --- a/routes/content/artist.py +++ b/routes/content/artist.py @@ -18,10 +18,9 @@ from routes.utils.watch.db import ( get_watched_artists, add_specific_albums_to_artist_table, remove_specific_albums_from_artist_table, - is_album_in_artist_db, ) from routes.utils.watch.manager import check_watched_artists, get_watch_config -from routes.utils.get_info import get_spotify_info +from routes.utils.get_info import get_client, get_artist, get_album # Import authentication dependencies from routes.auth.middleware import require_auth_from_state, User @@ -66,9 +65,6 @@ async def handle_artist_download( ) try: - # Import and call the updated download_artist_albums() function. - # from routes.utils.artist import download_artist_albums # Already imported at top - # Delegate to the download_artist_albums function which will handle album filtering successfully_queued_albums, duplicate_albums = download_artist_albums( url=url, @@ -118,13 +114,15 @@ async def cancel_artist_download(): @router.get("/info") async def get_artist_info( - request: Request, current_user: User = Depends(require_auth_from_state), - limit: int = Query(10, ge=1), # default=10, must be >=1 - offset: int = Query(0, ge=0) # default=0, must be >=0 + request: Request, + current_user: User = Depends(require_auth_from_state), + limit: int = Query(10, ge=1), # default=10, must be >=1 + offset: int = Query(0, ge=0), # default=0, must be >=0 ): """ Retrieves Spotify artist metadata given a Spotify artist ID. Expects a query parameter 'id' with the Spotify artist ID. + Returns the raw JSON from get_artist in routes.utils.get_info. """ spotify_id = request.query_params.get("id") @@ -132,37 +130,8 @@ async def get_artist_info( return JSONResponse(content={"error": "Missing parameter: id"}, status_code=400) try: - # Get artist metadata first - artist_metadata = get_spotify_info(spotify_id, "artist") - - # Get artist discography for albums - artist_discography = get_spotify_info(spotify_id, "artist_discography", limit=limit, offset=offset) - - # Combine metadata with discography - artist_info = {**artist_metadata, "albums": artist_discography} - - # If artist_info is successfully fetched and has albums, - # check if the artist is watched and augment album items with is_locally_known status - if ( - artist_info - and artist_info.get("albums") - and artist_info["albums"].get("items") - ): - watched_artist_details = get_watched_artist( - spotify_id - ) # spotify_id is the artist ID - if watched_artist_details: # Artist is being watched - for album_item in artist_info["albums"]["items"]: - if album_item and album_item.get("id"): - album_id = album_item["id"] - album_item["is_locally_known"] = is_album_in_artist_db( - spotify_id, album_id - ) - elif album_item: # Album object exists but no ID - album_item["is_locally_known"] = False - # If not watched, or no albums, is_locally_known will not be added. - # Frontend should handle absence of this key as false. - + client = get_client() + artist_info = get_artist(client, spotify_id) return JSONResponse(content=artist_info, status_code=200) except Exception as e: return JSONResponse( @@ -191,15 +160,9 @@ async def add_artist_to_watchlist( if get_watched_artist(artist_spotify_id): return {"message": f"Artist {artist_spotify_id} is already being watched."} - # Get artist metadata directly for name and basic info - artist_metadata = get_spotify_info(artist_spotify_id, "artist") + client = get_client() + artist_metadata = get_artist(client, artist_spotify_id) - # Get artist discography for album count - artist_album_list_data = get_spotify_info( - artist_spotify_id, "artist_discography" - ) - - # Check if we got artist metadata if not artist_metadata or not artist_metadata.get("name"): logger.error( f"Could not fetch artist metadata for {artist_spotify_id} from Spotify." @@ -211,24 +174,22 @@ async def add_artist_to_watchlist( }, ) - # Check if we got album data - if not artist_album_list_data or not isinstance( - artist_album_list_data.get("items"), list + # Derive a rough total album count from groups if present + total_albums = 0 + for key in ( + "album_group", + "single_group", + "compilation_group", + "appears_on_group", ): - logger.warning( - f"Could not fetch album list details for artist {artist_spotify_id} from Spotify. Proceeding with metadata only." - ) + grp = artist_metadata.get(key) + if isinstance(grp, list): + total_albums += len(grp) - # Construct the artist_data object expected by add_artist_db artist_data_for_db = { "id": artist_spotify_id, "name": artist_metadata.get("name", "Unknown Artist"), - "albums": { # Mimic structure if add_artist_db expects it for total_albums - "total": artist_album_list_data.get("total", 0) - if artist_album_list_data - else 0 - }, - # Add any other fields add_artist_db might expect from a true artist object if necessary + "albums": {"total": total_albums}, } add_artist_db(artist_data_for_db) @@ -446,21 +407,25 @@ async def mark_albums_as_known_for_artist( detail={"error": f"Artist {artist_spotify_id} is not being watched."}, ) + client = get_client() fetched_albums_details = [] - for album_id in album_ids: - try: - # We need full album details. get_spotify_info with type "album" should provide this. - album_detail = get_spotify_info(album_id, "album") - if album_detail and album_detail.get("id"): - fetched_albums_details.append(album_detail) - else: - logger.warning( - f"Could not fetch details for album {album_id} when marking as known for artist {artist_spotify_id}." + try: + for album_id in album_ids: + try: + album_detail = get_album(client, album_id) + if album_detail and album_detail.get("id"): + fetched_albums_details.append(album_detail) + else: + logger.warning( + f"Could not fetch details for album {album_id} when marking as known for artist {artist_spotify_id}." + ) + except Exception as e: + logger.error( + f"Failed to fetch Spotify details for album {album_id}: {e}" ) - except Exception as e: - logger.error( - f"Failed to fetch Spotify details for album {album_id}: {e}" - ) + finally: + # No need to close_client here, as get_client is shared + pass if not fetched_albums_details: return { diff --git a/routes/content/bulk_add.py b/routes/content/bulk_add.py index b5471ea..2110fa3 100644 --- a/routes/content/bulk_add.py +++ b/routes/content/bulk_add.py @@ -1,32 +1,46 @@ import re -from typing import List, Dict, Any -from fastapi import APIRouter, HTTPException +from typing import List +from fastapi import APIRouter from pydantic import BaseModel import logging # Assuming these imports are available for queue management and Spotify info -from routes.utils.get_info import get_spotify_info +from routes.utils.get_info import ( + get_client, + get_track, + get_album, + get_playlist, + get_artist, +) from routes.utils.celery_tasks import download_track, download_album, download_playlist router = APIRouter() logger = logging.getLogger(__name__) + class BulkAddLinksRequest(BaseModel): links: List[str] + @router.post("/bulk-add-spotify-links") async def bulk_add_spotify_links(request: BulkAddLinksRequest): added_count = 0 failed_links = [] total_links = len(request.links) - + + client = get_client() for link in request.links: # Assuming links are pre-filtered by the frontend, # but still handle potential errors during info retrieval or unsupported types # Extract type and ID from the link directly using regex - match = re.match(r"https://open\.spotify\.com(?:/intl-[a-z]{2})?/(track|album|playlist|artist)/([a-zA-Z0-9]+)(?:\?.*)?", link) + match = re.match( + r"https://open\.spotify\.com(?:/intl-[a-z]{2})?/(track|album|playlist|artist)/([a-zA-Z0-9]+)(?:\?.*)?", + link, + ) if not match: - logger.warning(f"Could not parse Spotify link (unexpected format after frontend filter): {link}") + logger.warning( + f"Could not parse Spotify link (unexpected format after frontend filter): {link}" + ) failed_links.append(link) continue @@ -35,18 +49,30 @@ async def bulk_add_spotify_links(request: BulkAddLinksRequest): try: # Get basic info to confirm existence and get name/artist - # For playlists, we might want to get full info later when adding to queue if spotify_type == "playlist": - item_info = get_spotify_info(spotify_id, "playlist_metadata") + item_info = get_playlist(client, spotify_id, expand_items=False) + elif spotify_type == "track": + item_info = get_track(client, spotify_id) + elif spotify_type == "album": + item_info = get_album(client, spotify_id) + elif spotify_type == "artist": + # Not queued below, but fetch to validate link and name if needed + item_info = get_artist(client, spotify_id) else: - item_info = get_spotify_info(spotify_id, spotify_type) - + logger.warning( + f"Unsupported Spotify type: {spotify_type} for link: {link}" + ) + failed_links.append(link) + continue + item_name = item_info.get("name", "Unknown Name") artist_name = "" if spotify_type in ["track", "album"]: artists = item_info.get("artists", []) if artists: - artist_name = ", ".join([a.get("name", "Unknown Artist") for a in artists]) + artist_name = ", ".join( + [a.get("name", "Unknown Artist") for a in artists] + ) elif spotify_type == "playlist": owner = item_info.get("owner", {}) artist_name = owner.get("display_name", "Unknown Owner") @@ -83,12 +109,16 @@ async def bulk_add_spotify_links(request: BulkAddLinksRequest): download_type="playlist", ) else: - logger.warning(f"Unsupported Spotify type for download: {spotify_type} for link: {link}") + logger.warning( + f"Unsupported Spotify type for download: {spotify_type} for link: {link}" + ) failed_links.append(link) continue added_count += 1 - logger.debug(f"Added {added_count+1}/{total_links} {spotify_type} '{item_name}' ({spotify_id}) to queue.") + logger.debug( + f"Added {added_count + 1}/{total_links} {spotify_type} '{item_name}' ({spotify_id}) to queue." + ) except Exception as e: logger.error(f"Error processing Spotify link {link}: {e}", exc_info=True) @@ -105,4 +135,4 @@ async def bulk_add_spotify_links(request: BulkAddLinksRequest): "message": message, "count": added_count, "failed_links": failed_links, - } \ No newline at end of file + } diff --git a/routes/content/playlist.py b/routes/content/playlist.py index 066fafe..27881eb 100755 --- a/routes/content/playlist.py +++ b/routes/content/playlist.py @@ -1,6 +1,5 @@ from fastapi import APIRouter, HTTPException, Request, Depends from fastapi.responses import JSONResponse -import json import traceback import logging # Added logging import import uuid # For generating error task IDs @@ -20,10 +19,9 @@ from routes.utils.watch.db import ( get_watched_playlist, get_watched_playlists, add_specific_tracks_to_playlist_table, - remove_specific_tracks_from_playlist_table, - is_track_in_playlist_db, # Added import + remove_specific_tracks_from_playlist_table, # Added import ) -from routes.utils.get_info import get_spotify_info # Already used, but ensure it's here +from routes.utils.get_info import get_client, get_playlist, get_track from routes.utils.watch.manager import ( check_watched_playlists, get_watch_config, @@ -31,7 +29,9 @@ from routes.utils.watch.manager import ( from routes.utils.errors import DuplicateDownloadError # Import authentication dependencies -from routes.auth.middleware import require_auth_from_state, require_admin_from_state, User +from routes.auth.middleware import require_auth_from_state, User +from routes.utils.celery_config import get_config_params +from routes.utils.credentials import get_spotify_blob_path logger = logging.getLogger(__name__) # Added logger initialization router = APIRouter() @@ -43,7 +43,11 @@ def construct_spotify_url(item_id: str, item_type: str = "track") -> str: @router.get("/download/{playlist_id}") -async def handle_download(playlist_id: str, request: Request, current_user: User = Depends(require_auth_from_state)): +async def handle_download( + playlist_id: str, + request: Request, + current_user: User = Depends(require_auth_from_state), +): # Retrieve essential parameters from the request. # name = request.args.get('name') # Removed # artist = request.args.get('artist') # Removed @@ -51,11 +55,14 @@ async def handle_download(playlist_id: str, request: Request, current_user: User # Construct the URL from playlist_id url = construct_spotify_url(playlist_id, "playlist") - orig_params["original_url"] = str(request.url) # Update original_url to the constructed one + orig_params["original_url"] = str( + request.url + ) # Update original_url to the constructed one # Fetch metadata from Spotify using optimized function try: from routes.utils.get_info import get_playlist_metadata + playlist_info = get_playlist_metadata(playlist_id) if ( not playlist_info @@ -66,7 +73,7 @@ async def handle_download(playlist_id: str, request: Request, current_user: User content={ "error": f"Could not retrieve metadata for playlist ID: {playlist_id}" }, - status_code=404 + status_code=404, ) name_from_spotify = playlist_info.get("name") @@ -79,14 +86,13 @@ async def handle_download(playlist_id: str, request: Request, current_user: User content={ "error": f"Failed to fetch metadata for playlist {playlist_id}: {str(e)}" }, - status_code=500 + status_code=500, ) # Validate required parameters if not url: # This check might be redundant now but kept for safety return JSONResponse( - content={"error": "Missing required parameter: url"}, - status_code=400 + content={"error": "Missing required parameter: url"}, status_code=400 ) try: @@ -106,7 +112,7 @@ async def handle_download(playlist_id: str, request: Request, current_user: User "error": "Duplicate download detected.", "existing_task": e.existing_task, }, - status_code=409 + status_code=409, ) except Exception as e: # Generic error handling for other issues during task submission @@ -136,25 +142,23 @@ async def handle_download(playlist_id: str, request: Request, current_user: User "error": f"Failed to queue playlist download: {str(e)}", "task_id": error_task_id, }, - status_code=500 + status_code=500, ) - return JSONResponse( - content={"task_id": task_id}, - status_code=202 - ) + return JSONResponse(content={"task_id": task_id}, status_code=202) @router.get("/download/cancel") -async def cancel_download(request: Request, current_user: User = Depends(require_auth_from_state)): +async def cancel_download( + request: Request, current_user: User = Depends(require_auth_from_state) +): """ Cancel a running playlist download process by its task id. """ task_id = request.query_params.get("task_id") if not task_id: return JSONResponse( - content={"error": "Missing task id (task_id) parameter"}, - status_code=400 + content={"error": "Missing task id (task_id) parameter"}, status_code=400 ) # Use the queue manager's cancellation method. @@ -165,124 +169,94 @@ async def cancel_download(request: Request, current_user: User = Depends(require @router.get("/info") -async def get_playlist_info(request: Request, current_user: User = Depends(require_auth_from_state)): +async def get_playlist_info( + request: Request, current_user: User = Depends(require_auth_from_state) +): """ Retrieve Spotify playlist metadata given a Spotify playlist ID. Expects a query parameter 'id' that contains the Spotify playlist ID. - """ - spotify_id = request.query_params.get("id") - include_tracks = request.query_params.get("include_tracks", "false").lower() == "true" - - if not spotify_id: - return JSONResponse( - content={"error": "Missing parameter: id"}, - status_code=400 - ) - - try: - # Use the optimized playlist info function - from routes.utils.get_info import get_playlist_info_optimized - playlist_info = get_playlist_info_optimized(spotify_id, include_tracks=include_tracks) - - # If playlist_info is successfully fetched, check if it's watched - # and augment track items with is_locally_known status - if playlist_info and playlist_info.get("id"): - watched_playlist_details = get_watched_playlist(playlist_info["id"]) - if watched_playlist_details: # Playlist is being watched - if playlist_info.get("tracks") and playlist_info["tracks"].get("items"): - for item in playlist_info["tracks"]["items"]: - if item and item.get("track") and item["track"].get("id"): - track_id = item["track"]["id"] - item["track"]["is_locally_known"] = is_track_in_playlist_db( - playlist_info["id"], track_id - ) - elif item and item.get( - "track" - ): # Track object exists but no ID - item["track"]["is_locally_known"] = False - # If not watched, or no tracks, is_locally_known will not be added, or tracks won't exist to add it to. - # Frontend should handle absence of this key as false. - - return JSONResponse( - content=playlist_info, status_code=200 - ) - except Exception as e: - error_data = {"error": str(e), "traceback": traceback.format_exc()} - return JSONResponse(content=error_data, status_code=500) - - -@router.get("/metadata") -async def get_playlist_metadata(request: Request, current_user: User = Depends(require_auth_from_state)): - """ - Retrieve only Spotify playlist metadata (no tracks) to avoid rate limiting. - Expects a query parameter 'id' that contains the Spotify playlist ID. + Always returns the raw JSON from get_playlist with expand_items=False. """ spotify_id = request.query_params.get("id") if not spotify_id: - return JSONResponse( - content={"error": "Missing parameter: id"}, - status_code=400 - ) + return JSONResponse(content={"error": "Missing parameter: id"}, status_code=400) try: - # Use the optimized playlist metadata function - from routes.utils.get_info import get_playlist_metadata - playlist_metadata = get_playlist_metadata(spotify_id) + # Resolve active account's credentials blob + cfg = get_config_params() or {} + active_account = cfg.get("spotify") + if not active_account: + return JSONResponse( + content={"error": "Active Spotify account not set in configuration."}, + status_code=500, + ) + blob_path = get_spotify_blob_path(active_account) + if not blob_path.exists(): + return JSONResponse( + content={ + "error": f"Spotify credentials blob not found for account '{active_account}'" + }, + status_code=500, + ) - return JSONResponse( - content=playlist_metadata, status_code=200 - ) - except Exception as e: - error_data = {"error": str(e), "traceback": traceback.format_exc()} - return JSONResponse(content=error_data, status_code=500) + client = get_client() + try: + playlist_info = get_playlist(client, spotify_id, expand_items=False) + finally: + pass - -@router.get("/tracks") -async def get_playlist_tracks(request: Request, current_user: User = Depends(require_auth_from_state)): - """ - Retrieve playlist tracks with pagination support for progressive loading. - Expects query parameters: 'id' (playlist ID), 'limit' (optional), 'offset' (optional). - """ - spotify_id = request.query_params.get("id") - limit = int(request.query_params.get("limit", 50)) - offset = int(request.query_params.get("offset", 0)) - - if not spotify_id: - return JSONResponse( - content={"error": "Missing parameter: id"}, - status_code=400 - ) - - try: - # Use the optimized playlist tracks function - from routes.utils.get_info import get_playlist_tracks - tracks_data = get_playlist_tracks(spotify_id, limit=limit, offset=offset) - - return JSONResponse( - content=tracks_data, status_code=200 - ) + return JSONResponse(content=playlist_info, status_code=200) except Exception as e: error_data = {"error": str(e), "traceback": traceback.format_exc()} return JSONResponse(content=error_data, status_code=500) @router.put("/watch/{playlist_spotify_id}") -async def add_to_watchlist(playlist_spotify_id: str, current_user: User = Depends(require_auth_from_state)): +async def add_to_watchlist( + playlist_spotify_id: str, current_user: User = Depends(require_auth_from_state) +): """Adds a playlist to the watchlist.""" watch_config = get_watch_config() if not watch_config.get("enabled", False): - raise HTTPException(status_code=403, detail={"error": "Watch feature is currently disabled globally."}) + raise HTTPException( + status_code=403, + detail={"error": "Watch feature is currently disabled globally."}, + ) logger.info(f"Attempting to add playlist {playlist_spotify_id} to watchlist.") try: # Check if already watched if get_watched_playlist(playlist_spotify_id): - return {"message": f"Playlist {playlist_spotify_id} is already being watched."} + return { + "message": f"Playlist {playlist_spotify_id} is already being watched." + } + + # Fetch playlist details from Spotify to populate our DB (metadata only) + cfg = get_config_params() or {} + active_account = cfg.get("spotify") + if not active_account: + raise HTTPException( + status_code=500, + detail={"error": "Active Spotify account not set in configuration."}, + ) + blob_path = get_spotify_blob_path(active_account) + if not blob_path.exists(): + raise HTTPException( + status_code=500, + detail={ + "error": f"Spotify credentials blob not found for account '{active_account}'" + }, + ) + + client = get_client() + try: + playlist_data = get_playlist( + client, playlist_spotify_id, expand_items=False + ) + finally: + pass - # Fetch playlist details from Spotify to populate our DB - from routes.utils.get_info import get_playlist_metadata - playlist_data = get_playlist_metadata(playlist_spotify_id) if not playlist_data or "id" not in playlist_data: logger.error( f"Could not fetch details for playlist {playlist_spotify_id} from Spotify." @@ -291,19 +265,11 @@ async def add_to_watchlist(playlist_spotify_id: str, current_user: User = Depend status_code=404, detail={ "error": f"Could not fetch details for playlist {playlist_spotify_id} from Spotify." - } + }, ) add_playlist_db(playlist_data) # This also creates the tracks table - # REMOVED: Do not add initial tracks directly to DB. - # The playlist watch manager will pick them up as new and queue downloads. - # Tracks will be added to DB only after successful download via Celery task callback. - # initial_track_items = playlist_data.get('tracks', {}).get('items', []) - # if initial_track_items: - # from routes.utils.watch.db import add_tracks_to_playlist_db # Keep local import for clarity - # add_tracks_to_playlist_db(playlist_spotify_id, initial_track_items) - logger.info( f"Playlist {playlist_spotify_id} added to watchlist. Its tracks will be processed by the watch manager." ) @@ -317,11 +283,16 @@ async def add_to_watchlist(playlist_spotify_id: str, current_user: User = Depend f"Error adding playlist {playlist_spotify_id} to watchlist: {e}", exc_info=True, ) - raise HTTPException(status_code=500, detail={"error": f"Could not add playlist to watchlist: {str(e)}"}) + raise HTTPException( + status_code=500, + detail={"error": f"Could not add playlist to watchlist: {str(e)}"}, + ) @router.get("/watch/{playlist_spotify_id}/status") -async def get_playlist_watch_status(playlist_spotify_id: str, current_user: User = Depends(require_auth_from_state)): +async def get_playlist_watch_status( + playlist_spotify_id: str, current_user: User = Depends(require_auth_from_state) +): """Checks if a specific playlist is being watched.""" logger.info(f"Checking watch status for playlist {playlist_spotify_id}.") try: @@ -337,22 +308,31 @@ async def get_playlist_watch_status(playlist_spotify_id: str, current_user: User f"Error checking watch status for playlist {playlist_spotify_id}: {e}", exc_info=True, ) - raise HTTPException(status_code=500, detail={"error": f"Could not check watch status: {str(e)}"}) + raise HTTPException( + status_code=500, detail={"error": f"Could not check watch status: {str(e)}"} + ) @router.delete("/watch/{playlist_spotify_id}") -async def remove_from_watchlist(playlist_spotify_id: str, current_user: User = Depends(require_auth_from_state)): +async def remove_from_watchlist( + playlist_spotify_id: str, current_user: User = Depends(require_auth_from_state) +): """Removes a playlist from the watchlist.""" watch_config = get_watch_config() if not watch_config.get("enabled", False): - raise HTTPException(status_code=403, detail={"error": "Watch feature is currently disabled globally."}) + raise HTTPException( + status_code=403, + detail={"error": "Watch feature is currently disabled globally."}, + ) logger.info(f"Attempting to remove playlist {playlist_spotify_id} from watchlist.") try: if not get_watched_playlist(playlist_spotify_id): raise HTTPException( status_code=404, - detail={"error": f"Playlist {playlist_spotify_id} not found in watchlist."} + detail={ + "error": f"Playlist {playlist_spotify_id} not found in watchlist." + }, ) remove_playlist_db(playlist_spotify_id) @@ -369,12 +349,16 @@ async def remove_from_watchlist(playlist_spotify_id: str, current_user: User = D ) raise HTTPException( status_code=500, - detail={"error": f"Could not remove playlist from watchlist: {str(e)}"} + detail={"error": f"Could not remove playlist from watchlist: {str(e)}"}, ) @router.post("/watch/{playlist_spotify_id}/tracks") -async def mark_tracks_as_known(playlist_spotify_id: str, request: Request, current_user: User = Depends(require_auth_from_state)): +async def mark_tracks_as_known( + playlist_spotify_id: str, + request: Request, + current_user: User = Depends(require_auth_from_state), +): """Fetches details for given track IDs and adds/updates them in the playlist's local DB table.""" watch_config = get_watch_config() if not watch_config.get("enabled", False): @@ -382,7 +366,7 @@ async def mark_tracks_as_known(playlist_spotify_id: str, request: Request, curre status_code=403, detail={ "error": "Watch feature is currently disabled globally. Cannot mark tracks." - } + }, ) logger.info( @@ -397,19 +381,22 @@ async def mark_tracks_as_known(playlist_spotify_id: str, request: Request, curre status_code=400, detail={ "error": "Invalid request body. Expecting a JSON array of track Spotify IDs." - } + }, ) if not get_watched_playlist(playlist_spotify_id): raise HTTPException( status_code=404, - detail={"error": f"Playlist {playlist_spotify_id} is not being watched."} + detail={ + "error": f"Playlist {playlist_spotify_id} is not being watched." + }, ) fetched_tracks_details = [] + client = get_client() for track_id in track_ids: try: - track_detail = get_spotify_info(track_id, "track") + track_detail = get_track(client, track_id) if track_detail and track_detail.get("id"): fetched_tracks_details.append(track_detail) else: @@ -443,11 +430,18 @@ async def mark_tracks_as_known(playlist_spotify_id: str, request: Request, curre f"Error marking tracks as known for playlist {playlist_spotify_id}: {e}", exc_info=True, ) - raise HTTPException(status_code=500, detail={"error": f"Could not mark tracks as known: {str(e)}"}) + raise HTTPException( + status_code=500, + detail={"error": f"Could not mark tracks as known: {str(e)}"}, + ) @router.delete("/watch/{playlist_spotify_id}/tracks") -async def mark_tracks_as_missing_locally(playlist_spotify_id: str, request: Request, current_user: User = Depends(require_auth_from_state)): +async def mark_tracks_as_missing_locally( + playlist_spotify_id: str, + request: Request, + current_user: User = Depends(require_auth_from_state), +): """Removes specified tracks from the playlist's local DB table.""" watch_config = get_watch_config() if not watch_config.get("enabled", False): @@ -455,7 +449,7 @@ async def mark_tracks_as_missing_locally(playlist_spotify_id: str, request: Requ status_code=403, detail={ "error": "Watch feature is currently disabled globally. Cannot mark tracks." - } + }, ) logger.info( @@ -470,13 +464,15 @@ async def mark_tracks_as_missing_locally(playlist_spotify_id: str, request: Requ status_code=400, detail={ "error": "Invalid request body. Expecting a JSON array of track Spotify IDs." - } + }, ) if not get_watched_playlist(playlist_spotify_id): raise HTTPException( status_code=404, - detail={"error": f"Playlist {playlist_spotify_id} is not being watched."} + detail={ + "error": f"Playlist {playlist_spotify_id} is not being watched." + }, ) deleted_count = remove_specific_tracks_from_playlist_table( @@ -495,22 +491,32 @@ async def mark_tracks_as_missing_locally(playlist_spotify_id: str, request: Requ f"Error marking tracks as missing (deleting locally) for playlist {playlist_spotify_id}: {e}", exc_info=True, ) - raise HTTPException(status_code=500, detail={"error": f"Could not mark tracks as missing: {str(e)}"}) + raise HTTPException( + status_code=500, + detail={"error": f"Could not mark tracks as missing: {str(e)}"}, + ) @router.get("/watch/list") -async def list_watched_playlists_endpoint(current_user: User = Depends(require_auth_from_state)): +async def list_watched_playlists_endpoint( + current_user: User = Depends(require_auth_from_state), +): """Lists all playlists currently in the watchlist.""" try: playlists = get_watched_playlists() return playlists except Exception as e: logger.error(f"Error listing watched playlists: {e}", exc_info=True) - raise HTTPException(status_code=500, detail={"error": f"Could not list watched playlists: {str(e)}"}) + raise HTTPException( + status_code=500, + detail={"error": f"Could not list watched playlists: {str(e)}"}, + ) @router.post("/watch/trigger_check") -async def trigger_playlist_check_endpoint(current_user: User = Depends(require_auth_from_state)): +async def trigger_playlist_check_endpoint( + current_user: User = Depends(require_auth_from_state), +): """Manually triggers the playlist checking mechanism for all watched playlists.""" watch_config = get_watch_config() if not watch_config.get("enabled", False): @@ -518,7 +524,7 @@ async def trigger_playlist_check_endpoint(current_user: User = Depends(require_a status_code=403, detail={ "error": "Watch feature is currently disabled globally. Cannot trigger check." - } + }, ) logger.info("Manual trigger for playlist check received for all playlists.") @@ -535,12 +541,14 @@ async def trigger_playlist_check_endpoint(current_user: User = Depends(require_a ) raise HTTPException( status_code=500, - detail={"error": f"Could not trigger playlist check for all: {str(e)}"} + detail={"error": f"Could not trigger playlist check for all: {str(e)}"}, ) @router.post("/watch/trigger_check/{playlist_spotify_id}") -async def trigger_specific_playlist_check_endpoint(playlist_spotify_id: str, current_user: User = Depends(require_auth_from_state)): +async def trigger_specific_playlist_check_endpoint( + playlist_spotify_id: str, current_user: User = Depends(require_auth_from_state) +): """Manually triggers the playlist checking mechanism for a specific playlist.""" watch_config = get_watch_config() if not watch_config.get("enabled", False): @@ -548,7 +556,7 @@ async def trigger_specific_playlist_check_endpoint(playlist_spotify_id: str, cur status_code=403, detail={ "error": "Watch feature is currently disabled globally. Cannot trigger check." - } + }, ) logger.info( @@ -565,7 +573,7 @@ async def trigger_specific_playlist_check_endpoint(playlist_spotify_id: str, cur status_code=404, detail={ "error": f"Playlist {playlist_spotify_id} is not in the watchlist. Add it first." - } + }, ) # Run check_watched_playlists with the specific ID @@ -590,5 +598,5 @@ async def trigger_specific_playlist_check_endpoint(playlist_spotify_id: str, cur status_code=500, detail={ "error": f"Could not trigger playlist check for {playlist_spotify_id}: {str(e)}" - } + }, ) diff --git a/routes/content/track.py b/routes/content/track.py index b3d6d3c..f3f1213 100755 --- a/routes/content/track.py +++ b/routes/content/track.py @@ -1,12 +1,11 @@ -from fastapi import APIRouter, HTTPException, Request, Depends +from fastapi import APIRouter, Request, Depends from fastapi.responses import JSONResponse -import json import traceback import uuid import time from routes.utils.celery_queue_manager import download_queue_manager from routes.utils.celery_tasks import store_task_info, store_task_status, ProgressState -from routes.utils.get_info import get_spotify_info +from routes.utils.get_info import get_client, get_track from routes.utils.errors import DuplicateDownloadError # Import authentication dependencies @@ -21,7 +20,11 @@ def construct_spotify_url(item_id: str, item_type: str = "track") -> str: @router.get("/download/{track_id}") -async def handle_download(track_id: str, request: Request, current_user: User = Depends(require_auth_from_state)): +async def handle_download( + track_id: str, + request: Request, + current_user: User = Depends(require_auth_from_state), +): # Retrieve essential parameters from the request. # name = request.args.get('name') # Removed # artist = request.args.get('artist') # Removed @@ -31,15 +34,18 @@ async def handle_download(track_id: str, request: Request, current_user: User = # Fetch metadata from Spotify try: - track_info = get_spotify_info(track_id, "track") + client = get_client() + track_info = get_track(client, track_id) if ( not track_info or not track_info.get("name") or not track_info.get("artists") ): return JSONResponse( - content={"error": f"Could not retrieve metadata for track ID: {track_id}"}, - status_code=404 + content={ + "error": f"Could not retrieve metadata for track ID: {track_id}" + }, + status_code=404, ) name_from_spotify = track_info.get("name") @@ -51,15 +57,16 @@ async def handle_download(track_id: str, request: Request, current_user: User = except Exception as e: return JSONResponse( - content={"error": f"Failed to fetch metadata for track {track_id}: {str(e)}"}, - status_code=500 + content={ + "error": f"Failed to fetch metadata for track {track_id}: {str(e)}" + }, + status_code=500, ) # Validate required parameters if not url: return JSONResponse( - content={"error": "Missing required parameter: url"}, - status_code=400 + content={"error": "Missing required parameter: url"}, status_code=400 ) # Add the task to the queue with only essential parameters @@ -84,7 +91,7 @@ async def handle_download(track_id: str, request: Request, current_user: User = "error": "Duplicate download detected.", "existing_task": e.existing_task, }, - status_code=409 + status_code=409, ) except Exception as e: # Generic error handling for other issues during task submission @@ -116,25 +123,23 @@ async def handle_download(track_id: str, request: Request, current_user: User = "error": f"Failed to queue track download: {str(e)}", "task_id": error_task_id, }, - status_code=500 + status_code=500, ) - return JSONResponse( - content={"task_id": task_id}, - status_code=202 - ) + return JSONResponse(content={"task_id": task_id}, status_code=202) @router.get("/download/cancel") -async def cancel_download(request: Request, current_user: User = Depends(require_auth_from_state)): +async def cancel_download( + request: Request, current_user: User = Depends(require_auth_from_state) +): """ Cancel a running download process by its task id. """ task_id = request.query_params.get("task_id") if not task_id: return JSONResponse( - content={"error": "Missing process id (task_id) parameter"}, - status_code=400 + content={"error": "Missing process id (task_id) parameter"}, status_code=400 ) # Use the queue manager's cancellation method. @@ -145,7 +150,9 @@ async def cancel_download(request: Request, current_user: User = Depends(require @router.get("/info") -async def get_track_info(request: Request, current_user: User = Depends(require_auth_from_state)): +async def get_track_info( + request: Request, current_user: User = Depends(require_auth_from_state) +): """ Retrieve Spotify track metadata given a Spotify track ID. Expects a query parameter 'id' that contains the Spotify track ID. @@ -153,14 +160,11 @@ async def get_track_info(request: Request, current_user: User = Depends(require_ spotify_id = request.query_params.get("id") if not spotify_id: - return JSONResponse( - content={"error": "Missing parameter: id"}, - status_code=400 - ) + return JSONResponse(content={"error": "Missing parameter: id"}, status_code=400) try: - # Use the get_spotify_info function (already imported at top) - track_info = get_spotify_info(spotify_id, "track") + client = get_client() + track_info = get_track(client, spotify_id) return JSONResponse(content=track_info, status_code=200) except Exception as e: error_data = {"error": str(e), "traceback": traceback.format_exc()} diff --git a/routes/utils/artist.py b/routes/utils/artist.py index e08474e..6cc2973 100644 --- a/routes/utils/artist.py +++ b/routes/utils/artist.py @@ -2,9 +2,9 @@ import json from routes.utils.watch.manager import get_watch_config import logging from routes.utils.celery_queue_manager import download_queue_manager -from routes.utils.get_info import get_spotify_info from routes.utils.credentials import get_credential, _get_global_spotify_api_creds from routes.utils.errors import DuplicateDownloadError +from routes.utils.get_info import get_spotify_info from deezspot.libutils.utils import get_ids, link_is_valid diff --git a/routes/utils/get_info.py b/routes/utils/get_info.py index df1384d..778e65f 100644 --- a/routes/utils/get_info.py +++ b/routes/utils/get_info.py @@ -1,422 +1,152 @@ -import spotipy -from spotipy.oauth2 import SpotifyClientCredentials -from routes.utils.credentials import _get_global_spotify_api_creds -import logging -import time -from typing import Dict, Optional, Any +import os +from typing import Any, Dict, Optional +import threading -# Import Deezer API and logging -from deezspot.deezloader.dee_api import API as DeezerAPI +from deezspot.libutils import LibrespotClient -# Initialize logger -logger = logging.getLogger(__name__) - -# Global Spotify client instance for reuse -_spotify_client = None -_last_client_init = 0 -_client_init_interval = 3600 # Reinitialize client every hour +# Config helpers to resolve active credentials +from routes.utils.celery_config import get_config_params +from routes.utils.credentials import get_spotify_blob_path -def _get_spotify_client(): - """ - Get or create a Spotify client with global credentials. - Implements client reuse and periodic reinitialization. - """ - global _spotify_client, _last_client_init +# -------- Shared Librespot client (process-wide) -------- - current_time = time.time() +_shared_client: Optional[LibrespotClient] = None +_shared_blob_path: Optional[str] = None +_client_lock = threading.RLock() - # Reinitialize client if it's been more than an hour or if client doesn't exist - if ( - _spotify_client is None - or current_time - _last_client_init > _client_init_interval - ): - client_id, client_secret = _get_global_spotify_api_creds() - if not client_id or not client_secret: - raise ValueError( - "Global Spotify API client_id or client_secret not configured in ./data/creds/search.json." - ) - - # Create new client - _spotify_client = spotipy.Spotify( - client_credentials_manager=SpotifyClientCredentials( - client_id=client_id, client_secret=client_secret - ) +def _resolve_blob_path() -> str: + cfg = get_config_params() or {} + active_account = cfg.get("spotify") + if not active_account: + raise RuntimeError("Active Spotify account not set in configuration.") + blob_path = get_spotify_blob_path(active_account) + abs_path = os.path.abspath(str(blob_path)) + if not os.path.isfile(abs_path): + raise FileNotFoundError( + f"Spotify credentials blob not found for account '{active_account}' at {abs_path}" ) - _last_client_init = current_time - logger.info("Spotify client initialized/reinitialized") - - return _spotify_client + return abs_path -def _rate_limit_handler(func): +def get_client() -> LibrespotClient: """ - Decorator to handle rate limiting with exponential backoff. + Return a shared LibrespotClient instance initialized from the active account blob. + Re-initializes if the active account changes. """ - - def wrapper(*args, **kwargs): - max_retries = 3 - base_delay = 1 - - for attempt in range(max_retries): + global _shared_client, _shared_blob_path + with _client_lock: + desired_blob = _resolve_blob_path() + if _shared_client is None or _shared_blob_path != desired_blob: try: - return func(*args, **kwargs) - except Exception as e: - if "429" in str(e) or "rate limit" in str(e).lower(): - if attempt < max_retries - 1: - delay = base_delay * (2**attempt) - logger.warning(f"Rate limited, retrying in {delay} seconds...") - time.sleep(delay) - continue - raise e - return func(*args, **kwargs) - - return wrapper + if _shared_client is not None: + _shared_client.close() + except Exception: + pass + _shared_client = LibrespotClient(stored_credentials_path=desired_blob) + _shared_blob_path = desired_blob + return _shared_client -@_rate_limit_handler -def get_playlist_metadata(playlist_id: str) -> Dict[str, Any]: +# -------- Thin wrapper API (programmatic use) -------- + + +def create_client(credentials_path: str) -> LibrespotClient: """ - Get playlist metadata only (no tracks) to avoid rate limiting. - - Args: - playlist_id: The Spotify playlist ID - - Returns: - Dictionary with playlist metadata (name, description, owner, etc.) + Create a LibrespotClient from a librespot-generated credentials.json file. """ - client = _get_spotify_client() - - try: - # Get basic playlist info without tracks - playlist = client.playlist( - playlist_id, - fields="id,name,description,owner,images,snapshot_id,public,followers,tracks.total", - ) - - # Add a flag to indicate this is metadata only - playlist["_metadata_only"] = True - playlist["_tracks_loaded"] = False - - logger.debug( - f"Retrieved playlist metadata for {playlist_id}: {playlist.get('name', 'Unknown')}" - ) - return playlist - - except Exception as e: - logger.error(f"Error fetching playlist metadata for {playlist_id}: {e}") - raise + abs_path = os.path.abspath(credentials_path) + if not os.path.isfile(abs_path): + raise FileNotFoundError(f"Credentials file not found: {abs_path}") + return LibrespotClient(stored_credentials_path=abs_path) -@_rate_limit_handler -def get_playlist_tracks( - playlist_id: str, limit: int = 100, offset: int = 0 +def close_client(client: LibrespotClient) -> None: + """ + Dispose a LibrespotClient instance. + """ + client.close() + + +def get_track(client: LibrespotClient, track_in: str) -> Dict[str, Any]: + """Fetch a track object.""" + return client.get_track(track_in) + + +def get_album( + client: LibrespotClient, album_in: str, include_tracks: bool = False ) -> Dict[str, Any]: - """ - Get playlist tracks with pagination support to handle large playlists efficiently. - - Args: - playlist_id: The Spotify playlist ID - limit: Number of tracks to fetch per request (max 100) - offset: Starting position for pagination - - Returns: - Dictionary with tracks data - """ - client = _get_spotify_client() - - try: - # Get tracks with specified limit and offset - tracks_data = client.playlist_tracks( - playlist_id, - limit=min(limit, 100), # Spotify API max is 100 - offset=offset, - fields="items(track(id,name,artists,album,external_urls,preview_url,duration_ms,explicit,popularity)),total,limit,offset", - ) - - logger.debug( - f"Retrieved {len(tracks_data.get('items', []))} tracks for playlist {playlist_id} (offset: {offset})" - ) - return tracks_data - - except Exception as e: - logger.error(f"Error fetching playlist tracks for {playlist_id}: {e}") - raise + """Fetch an album object; optionally include expanded tracks.""" + return client.get_album(album_in, include_tracks=include_tracks) -@_rate_limit_handler -def get_playlist_full(playlist_id: str, batch_size: int = 100) -> Dict[str, Any]: - """ - Get complete playlist data with all tracks, using batched requests to avoid rate limiting. - - Args: - playlist_id: The Spotify playlist ID - batch_size: Number of tracks to fetch per batch (max 100) - - Returns: - Complete playlist data with all tracks - """ - try: - # First get metadata - playlist = get_playlist_metadata(playlist_id) - - # Get total track count - total_tracks = playlist.get("tracks", {}).get("total", 0) - - if total_tracks == 0: - playlist["tracks"] = {"items": [], "total": 0} - return playlist - - # Fetch all tracks in batches - all_tracks = [] - offset = 0 - - while offset < total_tracks: - batch = get_playlist_tracks(playlist_id, limit=batch_size, offset=offset) - batch_items = batch.get("items", []) - all_tracks.extend(batch_items) - - offset += len(batch_items) - - # Add small delay between batches to be respectful to API - if offset < total_tracks: - time.sleep(0.1) - - # Update playlist with complete tracks data - playlist["tracks"] = { - "items": all_tracks, - "total": total_tracks, - "limit": batch_size, - "offset": 0, - } - playlist["_metadata_only"] = False - playlist["_tracks_loaded"] = True - - logger.info( - f"Retrieved complete playlist {playlist_id} with {total_tracks} tracks" - ) - return playlist - - except Exception as e: - logger.error(f"Error fetching complete playlist {playlist_id}: {e}") - raise +def get_artist(client: LibrespotClient, artist_in: str) -> Dict[str, Any]: + """Fetch an artist object.""" + return client.get_artist(artist_in) -def check_playlist_updated(playlist_id: str, last_snapshot_id: str) -> bool: - """ - Check if playlist has been updated by comparing snapshot_id. - This is much more efficient than fetching all tracks. - - Args: - playlist_id: The Spotify playlist ID - last_snapshot_id: The last known snapshot_id - - Returns: - True if playlist has been updated, False otherwise - """ - try: - metadata = get_playlist_metadata(playlist_id) - current_snapshot_id = metadata.get("snapshot_id") - - return current_snapshot_id != last_snapshot_id - - except Exception as e: - logger.error(f"Error checking playlist update status for {playlist_id}: {e}") - raise +def get_playlist( + client: LibrespotClient, playlist_in: str, expand_items: bool = False +) -> Dict[str, Any]: + """Fetch a playlist object; optionally expand track items to full track objects.""" + return client.get_playlist(playlist_in, expand_items=expand_items) -@_rate_limit_handler def get_spotify_info( spotify_id: str, - spotify_type: str, - limit: Optional[int] = None, - offset: Optional[int] = None, + info_type: str, + limit: int = 50, + offset: int = 0, ) -> Dict[str, Any]: """ - Get info from Spotify API using Spotipy directly. - Optimized to prevent rate limiting by using appropriate endpoints. + Thin, typed wrapper around common Spotify info lookups using the shared client. - Args: - spotify_id: The Spotify ID of the entity - spotify_type: The type of entity (track, album, playlist, artist, artist_discography, episode, album_tracks) - limit (int, optional): The maximum number of items to return. Used for pagination. - offset (int, optional): The index of the first item to return. Used for pagination. + Currently supports: + - "artist_discography": returns a paginated view over the artist's releases + combined across album_group/single_group/compilation_group/appears_on_group. - Returns: - Dictionary with the entity information + Returns a mapping with at least: items, total, limit, offset. + Also includes a truthy "next" key when more pages are available. """ - client = _get_spotify_client() + client = get_client() - try: - if spotify_type == "track": - return client.track(spotify_id) + if info_type == "artist_discography": + artist = client.get_artist(spotify_id) + all_items = [] + for key in ( + "album_group", + "single_group", + "compilation_group", + "appears_on_group", + ): + grp = artist.get(key) + if isinstance(grp, list): + all_items.extend(grp) + elif isinstance(grp, dict): + items = grp.get("items") or grp.get("releases") or [] + if isinstance(items, list): + all_items.extend(items) + total = len(all_items) + start = max(0, offset or 0) + page_limit = max(1, limit or 50) + end = min(total, start + page_limit) + page_items = all_items[start:end] + has_more = end < total + return { + "items": page_items, + "total": total, + "limit": page_limit, + "offset": start, + "next": bool(has_more), + } - elif spotify_type == "album": - return client.album(spotify_id) - - elif spotify_type == "album_tracks": - # Fetch album's tracks with pagination support - return client.album_tracks( - spotify_id, limit=limit or 20, offset=offset or 0 - ) - - elif spotify_type == "playlist": - # Use optimized playlist fetching - return get_playlist_full(spotify_id) - - elif spotify_type == "playlist_metadata": - # Get only metadata for playlists - return get_playlist_metadata(spotify_id) - - elif spotify_type == "artist": - return client.artist(spotify_id) - - elif spotify_type == "artist_discography": - # Get artist's albums with pagination - albums = client.artist_albums( - spotify_id, - limit=limit or 20, - offset=offset or 0, - include_groups="single,album,appears_on", - ) - return albums - - elif spotify_type == "episode": - return client.episode(spotify_id) - - else: - raise ValueError(f"Unsupported Spotify type: {spotify_type}") - - except Exception as e: - logger.error(f"Error fetching {spotify_type} {spotify_id}: {e}") - raise + raise ValueError(f"Unsupported info_type: {info_type}") -# Cache for playlist metadata to reduce API calls -_playlist_metadata_cache: Dict[str, tuple[Dict[str, Any], float]] = {} -_cache_ttl = 300 # 5 minutes cache - - -def get_cached_playlist_metadata(playlist_id: str) -> Optional[Dict[str, Any]]: +def get_playlist_metadata(playlist_id: str) -> Dict[str, Any]: """ - Get playlist metadata from cache if available and not expired. - - Args: - playlist_id: The Spotify playlist ID - - Returns: - Cached metadata or None if not available/expired + Fetch playlist metadata using the shared client without expanding items. """ - if playlist_id in _playlist_metadata_cache: - cached_data, timestamp = _playlist_metadata_cache[playlist_id] - if time.time() - timestamp < _cache_ttl: - return cached_data - - return None - - -def cache_playlist_metadata(playlist_id: str, metadata: Dict[str, Any]): - """ - Cache playlist metadata with timestamp. - - Args: - playlist_id: The Spotify playlist ID - metadata: The metadata to cache - """ - _playlist_metadata_cache[playlist_id] = (metadata, time.time()) - - -def get_playlist_info_optimized( - playlist_id: str, include_tracks: bool = False -) -> Dict[str, Any]: - """ - Optimized playlist info function that uses caching and selective loading. - - Args: - playlist_id: The Spotify playlist ID - include_tracks: Whether to include track data (default: False to save API calls) - - Returns: - Playlist data with or without tracks - """ - # Check cache first - cached_metadata = get_cached_playlist_metadata(playlist_id) - - if cached_metadata and not include_tracks: - logger.debug(f"Returning cached metadata for playlist {playlist_id}") - return cached_metadata - - if include_tracks: - # Get complete playlist data - playlist_data = get_playlist_full(playlist_id) - # Cache the metadata portion - metadata_only = {k: v for k, v in playlist_data.items() if k != "tracks"} - metadata_only["_metadata_only"] = True - metadata_only["_tracks_loaded"] = False - cache_playlist_metadata(playlist_id, metadata_only) - return playlist_data - else: - # Get metadata only - metadata = get_playlist_metadata(playlist_id) - cache_playlist_metadata(playlist_id, metadata) - return metadata - - -# Keep the existing Deezer functions unchanged -def get_deezer_info(deezer_id, deezer_type, limit=None): - """ - Get info from Deezer API. - - Args: - deezer_id: The Deezer ID of the entity. - deezer_type: The type of entity (track, album, playlist, artist, episode, - artist_top_tracks, artist_albums, artist_related, - artist_radio, artist_playlists). - limit (int, optional): The maximum number of items to return. Used for - artist_top_tracks, artist_albums, artist_playlists. - Deezer API methods usually have their own defaults (e.g., 25) - if limit is not provided or None is passed to them. - - Returns: - Dictionary with the entity information. - Raises: - ValueError: If deezer_type is unsupported. - Various exceptions from DeezerAPI (NoDataApi, QuotaExceeded, requests.exceptions.RequestException, etc.) - """ - logger.debug( - f"Fetching Deezer info for ID {deezer_id}, type {deezer_type}, limit {limit}" - ) - - # DeezerAPI uses class methods; its @classmethod __init__ handles setup. - # No specific ARL or account handling here as DeezerAPI seems to use general endpoints. - - if deezer_type == "track": - return DeezerAPI.get_track(deezer_id) - elif deezer_type == "album": - return DeezerAPI.get_album(deezer_id) - elif deezer_type == "playlist": - return DeezerAPI.get_playlist(deezer_id) - elif deezer_type == "artist": - return DeezerAPI.get_artist(deezer_id) - elif deezer_type == "episode": - return DeezerAPI.get_episode(deezer_id) - elif deezer_type == "artist_top_tracks": - if limit is not None: - return DeezerAPI.get_artist_top_tracks(deezer_id, limit=limit) - return DeezerAPI.get_artist_top_tracks(deezer_id) # Use API default limit - elif deezer_type == "artist_albums": # Maps to get_artist_top_albums - if limit is not None: - return DeezerAPI.get_artist_top_albums(deezer_id, limit=limit) - return DeezerAPI.get_artist_top_albums(deezer_id) # Use API default limit - elif deezer_type == "artist_related": - return DeezerAPI.get_artist_related(deezer_id) - elif deezer_type == "artist_radio": - return DeezerAPI.get_artist_radio(deezer_id) - elif deezer_type == "artist_playlists": - if limit is not None: - return DeezerAPI.get_artist_top_playlists(deezer_id, limit=limit) - return DeezerAPI.get_artist_top_playlists(deezer_id) # Use API default limit - else: - logger.error(f"Unsupported Deezer type: {deezer_type}") - raise ValueError(f"Unsupported Deezer type: {deezer_type}") + client = get_client() + return get_playlist(client, playlist_id, expand_items=False) diff --git a/routes/utils/watch/manager.py b/routes/utils/watch/manager.py index 4a10e78..32014c5 100644 --- a/routes/utils/watch/manager.py +++ b/routes/utils/watch/manager.py @@ -27,15 +27,9 @@ from routes.utils.watch.db import ( get_artist_batch_next_offset, set_artist_batch_next_offset, ) -from routes.utils.get_info import ( - get_spotify_info, - get_playlist_metadata, - get_playlist_tracks, -) # To fetch playlist, track, artist, and album details -from routes.utils.celery_queue_manager import download_queue_manager -# Added import to fetch base formatting config -from routes.utils.celery_queue_manager import get_config_params +from routes.utils.celery_queue_manager import download_queue_manager, get_config_params +from routes.utils.get_info import get_client logger = logging.getLogger(__name__) MAIN_CONFIG_FILE_PATH = Path("./data/config/main.json") @@ -358,7 +352,7 @@ def find_tracks_in_playlist( while not_found_tracks and offset < 10000: # Safety limit try: - tracks_batch = get_playlist_tracks( + tracks_batch = _fetch_playlist_tracks_page( playlist_spotify_id, limit=limit, offset=offset ) @@ -459,7 +453,9 @@ def check_watched_playlists(specific_playlist_id: str = None): ensure_playlist_table_schema(playlist_spotify_id) # First, get playlist metadata to check if it has changed - current_playlist_metadata = get_playlist_metadata(playlist_spotify_id) + current_playlist_metadata = _fetch_playlist_metadata( + playlist_spotify_id + ) if not current_playlist_metadata: logger.error( f"Playlist Watch Manager: Failed to fetch metadata from Spotify for playlist {playlist_spotify_id}." @@ -507,7 +503,7 @@ def check_watched_playlists(specific_playlist_id: str = None): progress_offset, _ = get_playlist_batch_progress( playlist_spotify_id ) - tracks_batch = get_playlist_tracks( + tracks_batch = _fetch_playlist_tracks_page( playlist_spotify_id, limit=batch_limit, offset=progress_offset, @@ -573,7 +569,7 @@ def check_watched_playlists(specific_playlist_id: str = None): logger.info( f"Playlist Watch Manager: Fetching one batch (limit={batch_limit}, offset={progress_offset}) for playlist '{playlist_name}'." ) - tracks_batch = get_playlist_tracks( + tracks_batch = _fetch_playlist_tracks_page( playlist_spotify_id, limit=batch_limit, offset=progress_offset ) batch_items = tracks_batch.get("items", []) if tracks_batch else [] @@ -734,8 +730,8 @@ def check_watched_artists(specific_artist_id: str = None): logger.debug( f"Artist Watch Manager: Fetching albums for {artist_spotify_id}. Limit: {limit}, Offset: {offset}" ) - artist_albums_page = get_spotify_info( - artist_spotify_id, "artist_discography", limit=limit, offset=offset + artist_albums_page = _fetch_artist_discography_page( + artist_spotify_id, limit=limit, offset=offset ) current_page_albums = ( @@ -911,7 +907,8 @@ def run_playlist_check_over_intervals(playlist_spotify_id: str) -> None: # Determine if we are done: no active processing snapshot and no pending sync cfg = get_watch_config() interval = cfg.get("watchPollIntervalSeconds", 3600) - metadata = get_playlist_metadata(playlist_spotify_id) + # Use local helper that leverages Librespot client + metadata = _fetch_playlist_metadata(playlist_spotify_id) if not metadata: logger.warning( f"Manual Playlist Runner: Could not load metadata for {playlist_spotify_id}. Stopping." @@ -1167,3 +1164,84 @@ def update_playlist_m3u_file(playlist_spotify_id: str): f"Error updating m3u file for playlist {playlist_spotify_id}: {e}", exc_info=True, ) + + +# Helper to build a Librespot client from active account + + +def _build_librespot_client(): + try: + # Reuse shared client managed in routes.utils.get_info + return get_client() + except Exception as e: + raise RuntimeError(f"Failed to initialize Librespot client: {e}") + + +def _fetch_playlist_metadata(playlist_id: str) -> dict: + client = _build_librespot_client() + return client.get_playlist(playlist_id, expand_items=False) + + +def _fetch_playlist_tracks_page(playlist_id: str, limit: int, offset: int) -> dict: + client = _build_librespot_client() + # Fetch playlist with minimal items to avoid expanding all tracks unnecessarily + pl = client.get_playlist(playlist_id, expand_items=False) + items = (pl.get("tracks", {}) or {}).get("items", []) + total = (pl.get("tracks", {}) or {}).get("total", len(items)) + start = max(0, offset or 0) + end = start + max(1, limit or 50) + page_items_minimal = items[start:end] + + # Expand only the tracks in this page using client cache for efficiency + page_items_expanded = [] + for item in page_items_minimal: + track_stub = (item or {}).get("track") or {} + track_id = track_stub.get("id") + expanded_track = None + if track_id: + try: + expanded_track = client.get_track(track_id) + except Exception: + expanded_track = None + if expanded_track is None: + # Keep stub as fallback; ensure structure + expanded_track = { + k: v + for k, v in track_stub.items() + if k in ("id", "uri", "type", "external_urls") + } + # Propagate local flag onto track for downstream checks + if item and isinstance(item, dict) and item.get("is_local"): + expanded_track["is_local"] = True + # Rebuild item with expanded track + new_item = dict(item) + new_item["track"] = expanded_track + page_items_expanded.append(new_item) + + return { + "items": page_items_expanded, + "total": total, + "limit": end - start, + "offset": start, + } + + +def _fetch_artist_discography_page(artist_id: str, limit: int, offset: int) -> dict: + # LibrespotClient.get_artist returns a pruned mapping; flatten common discography groups + client = _build_librespot_client() + artist = client.get_artist(artist_id) + all_items = [] + # Collect from known groups; also support nested structures if present + for key in ("album_group", "single_group", "compilation_group", "appears_on_group"): + grp = artist.get(key) + if isinstance(grp, list): + all_items.extend(grp) + elif isinstance(grp, dict): + items = grp.get("items") or grp.get("releases") or [] + if isinstance(items, list): + all_items.extend(items) + total = len(all_items) + start = max(0, offset or 0) + end = start + max(1, limit or 50) + page_items = all_items[start:end] + return {"items": page_items, "total": total, "limit": limit, "offset": start} diff --git a/spotizerr-ui/package.json b/spotizerr-ui/package.json index 8cc0b17..93ccbb8 100644 --- a/spotizerr-ui/package.json +++ b/spotizerr-ui/package.json @@ -1,7 +1,7 @@ { "name": "spotizerr-ui", "private": true, - "version": "3.3.1", + "version": "4.0.0", "type": "module", "scripts": { "dev": "vite", diff --git a/spotizerr-ui/src/components/AlbumCard.tsx b/spotizerr-ui/src/components/AlbumCard.tsx index 23221c5..a97c8aa 100644 --- a/spotizerr-ui/src/components/AlbumCard.tsx +++ b/spotizerr-ui/src/components/AlbumCard.tsx @@ -2,10 +2,10 @@ import { Link } from "@tanstack/react-router"; import { useContext, useEffect } from "react"; import { toast } from "sonner"; import { QueueContext, getStatus } from "../contexts/queue-context"; -import type { AlbumType } from "../types/spotify"; +import type { LibrespotAlbumType } from "@/types/librespot"; interface AlbumCardProps { - album: AlbumType; + album: LibrespotAlbumType; onDownload?: () => void; } @@ -38,7 +38,7 @@ export const AlbumCard = ({ album, onDownload }: AlbumCardProps) => { onDownload(); }} disabled={!!status && status !== "error"} - className="absolute bottom-2 right-2 p-2 bg-button-success hover:bg-button-success-hover text-button-success-text rounded-full transition-opacity shadow-lg opacity-0 group-hover:opacity-100 duration-300 disabled:opacity-50 disabled:cursor-not-allowed" + className="absolute bottom-2 right-2 p-2 bg-button-success hover:bg-button-success-hover text-button-success-text rounded-full transition-opacity shadow-lg opacity-100 sm:opacity-0 sm:group-hover:opacity-100 duration-300 z-10 disabled:opacity-50 disabled:cursor-not-allowed" title={ status ? status === "queued" @@ -53,9 +53,9 @@ export const AlbumCard = ({ album, onDownload }: AlbumCardProps) => { ? status === "queued" ? "Queued." : status === "error" - ? Download + ? Download : Loading - : Download + : Download } )} diff --git a/spotizerr-ui/src/index.css b/spotizerr-ui/src/index.css index 31436db..3bd8dcc 100644 --- a/spotizerr-ui/src/index.css +++ b/spotizerr-ui/src/index.css @@ -235,3 +235,46 @@ } } + +@layer components { + /* Artist hero banner (Spotify-like) */ + .artist-hero { + position: relative; + height: clamp(220px, 40vh, 460px); + border-radius: 0.75rem; + overflow: hidden; + background-size: cover; + background-position: center center; + background-repeat: no-repeat; + box-shadow: 0 10px 30px rgba(0,0,0,0.35); + } + .artist-hero::after { + content: ""; + position: absolute; + inset: 0; + /* top vignette and bottom darkening for readable text */ + background: linear-gradient(180deg, rgba(0,0,0,0.25) 0%, rgba(0,0,0,0.45) 55%, rgba(0,0,0,0.70) 100%); + } + .dark .artist-hero::after { + background: linear-gradient(180deg, rgba(0,0,0,0.35) 0%, rgba(0,0,0,0.55) 55%, rgba(0,0,0,0.85) 100%); + } + .artist-hero-content { + position: absolute; + left: 0; + right: 0; + bottom: 0; + padding: 1rem 1.25rem 1.5rem 1.25rem; + color: var(--color-content-inverse); + display: flex; + flex-direction: column; + gap: 0.75rem; + z-index: 1; + } + .artist-hero-title { + font-size: clamp(2rem, 7vw, 5rem); + line-height: 1; + font-weight: 800; + letter-spacing: -0.02em; + text-shadow: 0 2px 24px rgba(0,0,0,0.45); + } +} diff --git a/spotizerr-ui/src/routes/album.tsx b/spotizerr-ui/src/routes/album.tsx index 4cc1f6e..c7de14f 100644 --- a/spotizerr-ui/src/routes/album.tsx +++ b/spotizerr-ui/src/routes/album.tsx @@ -3,14 +3,14 @@ import { useEffect, useState, useContext, useRef, useCallback } from "react"; import apiClient from "../lib/api-client"; import { QueueContext, getStatus } from "../contexts/queue-context"; import { useSettings } from "../contexts/settings-context"; -import type { AlbumType, TrackType } from "../types/spotify"; +import type { LibrespotAlbumType, LibrespotTrackType } from "@/types/librespot"; import { toast } from "sonner"; import { FaArrowLeft } from "react-icons/fa"; export const Album = () => { const { albumId } = useParams({ from: "/album/$albumId" }); - const [album, setAlbum] = useState(null); - const [tracks, setTracks] = useState([]); + const [album, setAlbum] = useState(null); + const [tracks, setTracks] = useState([]); const [offset, setOffset] = useState(0); const [isLoading, setIsLoading] = useState(false); const [isLoadingMore, setIsLoadingMore] = useState(false); @@ -19,7 +19,7 @@ export const Album = () => { const { settings } = useSettings(); const loadMoreRef = useRef(null); - const PAGE_SIZE = 50; + const PAGE_SIZE = 6; if (!context) { throw new Error("useQueue must be used within a QueueProvider"); @@ -48,11 +48,28 @@ export const Album = () => { setIsLoading(true); setError(null); try { - const response = await apiClient.get(`/album/info?id=${albumId}&limit=${PAGE_SIZE}&offset=0`); - const data: AlbumType & { tracks: { items: TrackType[]; total?: number; limit?: number; offset?: number } } = response.data; + const response = await apiClient.get(`/album/info?id=${albumId}`); + const data: LibrespotAlbumType = response.data; setAlbum(data); - setTracks(data.tracks.items || []); - setOffset((data.tracks.items || []).length); + // Tracks may be string[] (ids) or expanded track objects depending on backend + const rawTracks = data.tracks; + if (Array.isArray(rawTracks) && rawTracks.length > 0) { + if (typeof rawTracks[0] === "string") { + // fetch first page of tracks by id + const ids = (rawTracks as string[]).slice(0, PAGE_SIZE); + const trackResponses = await Promise.all( + ids.map((id) => apiClient.get(`/track/info?id=${id}`).then(r => r.data).catch(() => null)) + ); + setTracks(trackResponses.filter(Boolean) as LibrespotTrackType[]); + setOffset(ids.length); + } else { + setTracks((rawTracks as LibrespotTrackType[]).slice(0, PAGE_SIZE)); + setOffset(Math.min(PAGE_SIZE, (rawTracks as LibrespotTrackType[]).length)); + } + } else { + setTracks([]); + setOffset(0); + } } catch (err) { setError("Failed to load album"); console.error("Error fetching album:", err); @@ -71,20 +88,31 @@ export const Album = () => { }, [albumId]); const loadMore = useCallback(async () => { - if (!albumId || isLoadingMore || !hasMore) return; + if (!albumId || isLoadingMore || !hasMore || !album) return; setIsLoadingMore(true); try { - const response = await apiClient.get(`/album/info?id=${albumId}&limit=${PAGE_SIZE}&offset=${offset}`); - const data: AlbumType & { tracks: { items: TrackType[]; total?: number; limit?: number; offset?: number } } = response.data; - const newItems = data.tracks.items || []; - setTracks((prev) => [...prev, ...newItems]); - setOffset((prev) => prev + newItems.length); + // If album.tracks is a list of ids, continue fetching by ids + if (Array.isArray(album.tracks) && (album.tracks.length === 0 || typeof album.tracks[0] === "string")) { + const ids = (album.tracks as string[]).slice(offset, offset + PAGE_SIZE); + const trackResponses = await Promise.all( + ids.map((id) => apiClient.get(`/track/info?id=${id}`).then(r => r.data).catch(() => null)) + ); + const newItems = trackResponses.filter(Boolean) as LibrespotTrackType[]; + setTracks((prev) => [...prev, ...newItems]); + setOffset((prev) => prev + newItems.length); + } else { + // Already expanded; append next page from in-memory array + const raw = album.tracks as LibrespotTrackType[]; + const slice = raw.slice(offset, offset + PAGE_SIZE); + setTracks((prev) => [...prev, ...slice]); + setOffset((prev) => prev + slice.length); + } } catch (err) { console.error("Error fetching more tracks:", err); } finally { setIsLoadingMore(false); } - }, [albumId, offset, isLoadingMore, hasMore]); + }, [albumId, offset, isLoadingMore, hasMore, album]); // IntersectionObserver to trigger loadMore useEffect(() => { @@ -107,7 +135,7 @@ export const Album = () => { }; }, [loadMore]); - const handleDownloadTrack = (track: TrackType) => { + const handleDownloadTrack = (track: LibrespotTrackType) => { if (!track.id) return; toast.info(`Adding ${track.name} to queue...`); addItem({ spotifyId: track.id, type: "track", name: track.name }); @@ -129,16 +157,7 @@ export const Album = () => { const isExplicitFilterEnabled = settings?.explicitFilter ?? false; - // Show placeholder for an entirely explicit album - if (isExplicitFilterEnabled && album.explicit) { - return ( -
-

Explicit Content Filtered

-

This album has been filtered based on your settings.

-
- ); - } - + // Not provided by librespot directly; keep feature gated by settings const hasExplicitTrack = tracks.some((track) => track.explicit); return ( @@ -178,7 +197,7 @@ export const Album = () => {

{new Date(album.release_date).getFullYear()} • {album.total_tracks} songs

-

{album.label}

+ {album.label &&

{album.label}

}
diff --git a/spotizerr-ui/src/routes/artist.tsx b/spotizerr-ui/src/routes/artist.tsx index 89c9f2a..5fa2ecb 100644 --- a/spotizerr-ui/src/routes/artist.tsx +++ b/spotizerr-ui/src/routes/artist.tsx @@ -2,17 +2,30 @@ import { Link, useParams } from "@tanstack/react-router"; import { useEffect, useState, useContext, useRef, useCallback } from "react"; import { toast } from "sonner"; import apiClient from "../lib/api-client"; -import type { AlbumType, ArtistType, TrackType } from "../types/spotify"; +import type { LibrespotAlbumType, LibrespotArtistType, LibrespotTrackType, LibrespotImage } from "@/types/librespot"; import { QueueContext, getStatus } from "../contexts/queue-context"; import { useSettings } from "../contexts/settings-context"; import { FaArrowLeft, FaBookmark, FaRegBookmark, FaDownload } from "react-icons/fa"; import { AlbumCard } from "../components/AlbumCard"; +// Narrow type for the artist info response additions +type ArtistInfoResponse = LibrespotArtistType & { + biography?: Array<{ text?: string; portrait_group?: { image?: LibrespotImage[] } }>; + portrait_group?: { image?: LibrespotImage[] }; + top_track?: Array<{ country: string; track: string[] }>; + album_group?: string[]; + single_group?: string[]; + appears_on_group?: string[]; +}; + export const Artist = () => { const { artistId } = useParams({ from: "/artist/$artistId" }); - const [artist, setArtist] = useState(null); - const [albums, setAlbums] = useState([]); - const [topTracks, setTopTracks] = useState([]); + const [artist, setArtist] = useState(null); + const [artistAlbums, setArtistAlbums] = useState([]); + const [artistSingles, setArtistSingles] = useState([]); + const [artistAppearsOn, setArtistAppearsOn] = useState([]); + const [topTracks, setTopTracks] = useState([]); + const [bannerUrl, setBannerUrl] = useState(null); const [isWatched, setIsWatched] = useState(false); const [artistStatus, setArtistStatus] = useState(null); const [error, setError] = useState(null); @@ -22,8 +35,10 @@ export const Artist = () => { const sentinelRef = useRef(null); // Pagination state - const LIMIT = 20; // tune as you like - const [offset, setOffset] = useState(0); + const ALBUM_BATCH = 12; + const [albumOffset, setAlbumOffset] = useState(0); + const [singleOffset, setSingleOffset] = useState(0); + const [appearsOffset, setAppearsOffset] = useState(0); const [loading, setLoading] = useState(false); const [loadingMore, setLoadingMore] = useState(false); const [hasMore, setHasMore] = useState(true); // assume more until we learn otherwise @@ -33,26 +48,27 @@ export const Artist = () => { } const { addItem, items } = context; + // Preload commonly used icons ASAP (before first buttons need them) + useEffect(() => { + const i = new Image(); + i.src = "/download.svg"; + return () => { /* no-op */ }; + }, []); + // Track queue status mapping const trackStatuses = topTracks.reduce((acc, t) => { const qi = items.find(item => item.downloadType === "track" && item.spotifyId === t.id); acc[t.id] = qi ? getStatus(qi) : null; return acc; }, {} as Record); - - const applyFilters = useCallback( - (items: AlbumType[]) => { - return items.filter((item) => (settings?.explicitFilter ? !item.explicit : true)); - }, - [settings?.explicitFilter] - ); - - // Helper to dedupe albums by id - const dedupeAppendAlbums = (current: AlbumType[], incoming: AlbumType[]) => { - const seen = new Set(current.map((a) => a.id)); - const filtered = incoming.filter((a) => !seen.has(a.id)); - return current.concat(filtered); - }; + + // Helper: fetch a batch of albums by ids + const fetchAlbumsByIds = useCallback(async (ids: string[]): Promise => { + const results = await Promise.all( + ids.map((id) => apiClient.get(`/album/info?id=${id}`).then(r => r.data).catch(() => null)) + ); + return results.filter(Boolean) as LibrespotAlbumType[]; + }, []); // Fetch artist info & first page of albums useEffect(() => { @@ -63,48 +79,90 @@ export const Artist = () => { const fetchInitial = async () => { setLoading(true); setError(null); - setAlbums([]); - setOffset(0); + setArtistAlbums([]); + setArtistSingles([]); + setArtistAppearsOn([]); + setAlbumOffset(0); + setSingleOffset(0); + setAppearsOffset(0); setHasMore(true); + setBannerUrl(null); // reset hero; will lazy-load below try { - const resp = await apiClient.get(`/artist/info?id=${artistId}&limit=${LIMIT}&offset=0`); - const data = resp.data; + const resp = await apiClient.get(`/artist/info?id=${artistId}`); + const data: ArtistInfoResponse = resp.data; if (cancelled) return; if (data?.id && data?.name) { // set artist meta - setArtist({ - id: data.id, - name: data.name, - images: data.images || [], - external_urls: data.external_urls || { spotify: "" }, - followers: data.followers || { total: 0 }, - genres: data.genres || [], - popularity: data.popularity || 0, - type: data.type || "artist", - uri: data.uri || "", - }); + setArtist(data); + + // Lazy-load banner image after render + const bioEntry = Array.isArray(data.biography) && data.biography.length > 0 ? data.biography[0] : undefined; + const portraitImages = data.portrait_group?.image ?? bioEntry?.portrait_group?.image ?? []; + const allImages = [...(portraitImages ?? []), ...((data.images as LibrespotImage[] | undefined) ?? [])]; + const candidateBanner = allImages.sort((a, b) => (b?.width ?? 0) - (a?.width ?? 0))[0]?.url || "/placeholder.jpg"; + // Use async preload to avoid blocking initial paint + setTimeout(() => { + const img = new Image(); + img.src = candidateBanner; + img.onload = () => { if (!cancelled) setBannerUrl(candidateBanner); }; + }, 0); // top tracks (if provided) - if (Array.isArray(data.top_tracks)) { - setTopTracks(data.top_tracks); + const topTrackIds = Array.isArray(data.top_track) && data.top_track.length > 0 + ? data.top_track[0].track.slice(0, 10) + : []; + if (topTrackIds.length) { + const tracksFull = await Promise.all( + topTrackIds.map((id) => apiClient.get(`/track/info?id=${id}`).then(r => r.data).catch(() => null)) + ); + if (!cancelled) setTopTracks(tracksFull.filter(Boolean) as LibrespotTrackType[]); } else { - setTopTracks([]); + if (!cancelled) setTopTracks([]); } - // albums pagination info - const items: AlbumType[] = (data?.albums?.items as AlbumType[]) || []; - const total: number | undefined = data?.albums?.total; + // Progressive album loading: album -> single -> appears_on + const albumIds = data.album_group ?? []; + const singleIds = data.single_group ?? []; + const appearsIds = data.appears_on_group ?? []; - setAlbums(items); - setOffset(items.length); - if (typeof total === "number") { - setHasMore(items.length < total); - } else { - // If server didn't return total, default behavior: stop when an empty page arrives. - setHasMore(items.length > 0); + // Determine initial number based on screen size: 4 on small screens + const isSmallScreen = typeof window !== "undefined" && !window.matchMedia("(min-width: 640px)").matches; + const initialTarget = isSmallScreen ? 4 : ALBUM_BATCH; + + // Load initial batch from albumIds, then if needed from singles, then appears + const initialBatch: LibrespotAlbumType[] = []; + let aOff = 0, sOff = 0, apOff = 0; + if (albumIds.length > 0) { + const take = albumIds.slice(0, initialTarget); + initialBatch.push(...await fetchAlbumsByIds(take)); + aOff = take.length; + } + if (initialBatch.length < initialTarget && singleIds.length > 0) { + const remaining = initialTarget - initialBatch.length; + const take = singleIds.slice(0, remaining); + initialBatch.push(...await fetchAlbumsByIds(take)); + sOff = take.length; + } + if (initialBatch.length < initialTarget && appearsIds.length > 0) { + const remaining = initialTarget - initialBatch.length; + const take = appearsIds.slice(0, remaining); + initialBatch.push(...await fetchAlbumsByIds(take)); + apOff = take.length; + } + + if (!cancelled) { + setArtistAlbums(initialBatch.filter(a => a.album_type === "album")); + setArtistSingles(initialBatch.filter(a => a.album_type === "single")); + setArtistAppearsOn([]); // placeholder; appears_on grouping not explicitly typed + // Store offsets for next loads + setAlbumOffset(aOff); + setSingleOffset(sOff); + setAppearsOffset(apOff); + // Determine if more remain + setHasMore((albumIds.length > aOff) || (singleIds.length > sOff) || (appearsIds.length > apOff)); } } else { setError("Could not load artist data."); @@ -133,28 +191,44 @@ export const Artist = () => { return () => { cancelled = true; }; - }, [artistId, LIMIT]); + }, [artistId, fetchAlbumsByIds]); // Fetch more albums (next page) const fetchMoreAlbums = useCallback(async () => { - if (!artistId || loadingMore || loading || !hasMore) return; + if (!artistId || loadingMore || loading || !hasMore || !artist) return; setLoadingMore(true); try { - const resp = await apiClient.get(`/artist/info?id=${artistId}&limit=${LIMIT}&offset=${offset}`); - const data = resp.data; - const items: AlbumType[] = (data?.albums?.items as AlbumType[]) || []; - const total: number | undefined = data?.albums?.total; + const albumIds = artist.album_group ?? []; + const singleIds = artist.single_group ?? []; + const appearsIds = artist.appears_on_group ?? []; - setAlbums((cur) => dedupeAppendAlbums(cur, items)); - setOffset((cur) => cur + items.length); - - if (typeof total === "number") { - setHasMore((prev) => prev && offset + items.length < total); - } else { - // if server doesn't expose total, stop when we get fewer than LIMIT items - setHasMore(items.length === LIMIT); + const nextBatch: LibrespotAlbumType[] = []; + let aOff = albumOffset, sOff = singleOffset, apOff = appearsOffset; + if (aOff < albumIds.length) { + const take = albumIds.slice(aOff, aOff + ALBUM_BATCH - nextBatch.length); + nextBatch.push(...await fetchAlbumsByIds(take)); + aOff += take.length; } + if (nextBatch.length < ALBUM_BATCH && sOff < singleIds.length) { + const remaining = ALBUM_BATCH - nextBatch.length; + const take = singleIds.slice(sOff, sOff + remaining); + nextBatch.push(...await fetchAlbumsByIds(take)); + sOff += take.length; + } + if (nextBatch.length < ALBUM_BATCH && apOff < appearsIds.length) { + const remaining = ALBUM_BATCH - nextBatch.length; + const take = appearsIds.slice(apOff, apOff + remaining); + nextBatch.push(...await fetchAlbumsByIds(take)); + apOff += take.length; + } + + setArtistAlbums((cur) => cur.concat(nextBatch.filter(a => a.album_type === "album"))); + setArtistSingles((cur) => cur.concat(nextBatch.filter(a => a.album_type === "single"))); + setAppearsOffset(apOff); + setAlbumOffset(aOff); + setSingleOffset(sOff); + setHasMore((albumIds.length > aOff) || (singleIds.length > sOff) || (appearsIds.length > apOff)); } catch (err) { console.error("Failed to load more albums", err); toast.error("Failed to load more albums"); @@ -162,7 +236,7 @@ export const Artist = () => { } finally { setLoadingMore(false); } - }, [artistId, offset, LIMIT, loadingMore, loading, hasMore]); + }, [artistId, loadingMore, loading, hasMore, artist, albumOffset, singleOffset, appearsOffset, fetchAlbumsByIds]); // IntersectionObserver to trigger fetchMoreAlbums when sentinel is visible useEffect(() => { @@ -190,13 +264,13 @@ export const Artist = () => { }, [fetchMoreAlbums, hasMore]); // --- existing handlers (unchanged) --- - const handleDownloadTrack = (track: TrackType) => { + const handleDownloadTrack = (track: LibrespotTrackType) => { if (!track.id) return; toast.info(`Adding ${track.name} to queue...`); addItem({ spotifyId: track.id, type: "track", name: track.name }); }; - const handleDownloadAlbum = (album: AlbumType) => { + const handleDownloadAlbum = (album: LibrespotAlbumType) => { toast.info(`Adding ${album.name} to queue...`); addItem({ spotifyId: album.id, type: "album", name: album.name }); }; @@ -258,11 +332,6 @@ export const Artist = () => { return
Artist data could not be fully loaded. Please try again later.
; } - const artistAlbums = applyFilters(albums.filter((album) => (album.album_group ?? album.album_type) === "album")); - const artistSingles = applyFilters(albums.filter((album) => (album.album_group ?? album.album_type) === "single")); - const artistCompilations = applyFilters(albums.filter((album) => (album.album_group ?? album.album_type) === "compilation")); - const artistAppearsOn = applyFilters(albums.filter((album) => (album.album_group ?? "") === "appears_on")); - return (
@@ -274,64 +343,65 @@ export const Artist = () => { Back to results
-
- {artist.images && artist.images.length > 0 && ( - {artist.name} - )} -

{artist.name}

-
- - {settings?.watch?.enabled && ( - - )} + {settings?.watch?.enabled && ( + + )} +
@@ -339,10 +409,10 @@ export const Artist = () => {

Top Tracks

- {topTracks.map((track) => ( + {topTracks.map((track, index) => (
= 5 ? "hidden sm:flex" : ""}`} > {
))} @@ -394,18 +474,6 @@ export const Artist = () => {
)} - {/* Compilations */} - {artistCompilations.length > 0 && ( -
-

Compilations

-
- {artistCompilations.map((album) => ( - handleDownloadAlbum(album)} /> - ))} -
-
- )} - {/* Appears On */} {artistAppearsOn.length > 0 && (
diff --git a/spotizerr-ui/src/routes/playlist.tsx b/spotizerr-ui/src/routes/playlist.tsx index 7fdfe7b..a8e850b 100644 --- a/spotizerr-ui/src/routes/playlist.tsx +++ b/spotizerr-ui/src/routes/playlist.tsx @@ -3,16 +3,14 @@ import { useEffect, useState, useContext, useRef, useCallback } from "react"; import apiClient from "../lib/api-client"; import { useSettings } from "../contexts/settings-context"; import { toast } from "sonner"; -import type { TrackType, PlaylistMetadataType, PlaylistTracksResponseType, PlaylistItemType } from "../types/spotify"; +import type { LibrespotTrackType, LibrespotPlaylistType, LibrespotPlaylistItemType, LibrespotPlaylistTrackStubType } from "@/types/librespot"; import { QueueContext, getStatus } from "../contexts/queue-context"; import { FaArrowLeft } from "react-icons/fa"; - - export const Playlist = () => { const { playlistId } = useParams({ from: "/playlist/$playlistId" }); - const [playlistMetadata, setPlaylistMetadata] = useState(null); - const [tracks, setTracks] = useState([]); + const [playlistMetadata, setPlaylistMetadata] = useState(null); + const [items, setItems] = useState([]); const [isWatched, setIsWatched] = useState(false); const [error, setError] = useState(null); const [loadingTracks, setLoadingTracks] = useState(false); @@ -28,11 +26,11 @@ export const Playlist = () => { if (!context) { throw new Error("useQueue must be used within a QueueProvider"); } - const { addItem, items } = context; + const { addItem, items: queueItems } = context; // Playlist queue status const playlistQueueItem = playlistMetadata - ? items.find(item => item.downloadType === "playlist" && item.spotifyId === playlistMetadata.id) + ? queueItems.find(item => item.downloadType === "playlist" && item.spotifyId === (playlistId ?? "")) : undefined; const playlistStatus = playlistQueueItem ? getStatus(playlistQueueItem) : null; @@ -44,14 +42,15 @@ export const Playlist = () => { } }, [playlistStatus]); - // Load playlist metadata first + // Load playlist metadata first (no expanded items) useEffect(() => { - const fetchPlaylistMetadata = async () => { + const fetchPlaylist = async () => { if (!playlistId) return; try { - const response = await apiClient.get(`/playlist/metadata?id=${playlistId}`); - setPlaylistMetadata(response.data); - setTotalTracks(response.data.tracks.total); + const response = await apiClient.get(`/playlist/info?id=${playlistId}`); + const data = response.data; + setPlaylistMetadata(data); + setTotalTracks(data.tracks.total); } catch (err) { setError("Failed to load playlist metadata"); console.error(err); @@ -70,27 +69,49 @@ export const Playlist = () => { } }; - fetchPlaylistMetadata(); + setItems([]); + setTracksOffset(0); + setHasMoreTracks(true); + setTotalTracks(0); + setError(null); + fetchPlaylist(); checkWatchStatus(); }, [playlistId]); - // Load tracks progressively + const BATCH_SIZE = 6; + + // Load items progressively by expanding track stubs when needed const loadMoreTracks = useCallback(async () => { - if (!playlistId || loadingTracks || !hasMoreTracks) return; + if (!playlistId || loadingTracks || !hasMoreTracks || !playlistMetadata) return; setLoadingTracks(true); try { - const limit = 50; // Load 50 tracks at a time - const response = await apiClient.get( - `/playlist/tracks?id=${playlistId}&limit=${limit}&offset=${tracksOffset}` - ); + // Fetch full playlist snapshot (stub items) + const response = await apiClient.get(`/playlist/info?id=${playlistId}`); + const allItems = response.data.tracks.items; + const slice = allItems.slice(tracksOffset, tracksOffset + BATCH_SIZE); - const newTracks = response.data.items; - setTracks(prev => [...prev, ...newTracks]); - setTracksOffset(prev => prev + newTracks.length); - - // Check if we've loaded all tracks - if (tracksOffset + newTracks.length >= totalTracks) { + // Expand any stubbed track entries by fetching full track info + const expandedSlice: LibrespotPlaylistItemType[] = await Promise.all( + slice.map(async (it) => { + const t = it.track as LibrespotPlaylistTrackStubType | LibrespotTrackType; + // If track has only stub fields (no duration_ms), fetch full + if (t && (t as any).id && !("duration_ms" in (t as any))) { + try { + const full = await apiClient.get(`/track/info?id=${(t as LibrespotPlaylistTrackStubType).id}`).then(r => r.data); + return { ...it, track: full } as LibrespotPlaylistItemType; + } catch { + return it; // fallback to stub if fetch fails + } + } + return it; + }) + ); + + setItems((prev) => [...prev, ...expandedSlice]); + const loaded = tracksOffset + expandedSlice.length; + setTracksOffset(loaded); + if (loaded >= totalTracks) { setHasMoreTracks(false); } } catch (err) { @@ -99,7 +120,7 @@ export const Playlist = () => { } finally { setLoadingTracks(false); } - }, [playlistId, loadingTracks, hasMoreTracks, tracksOffset, totalTracks]); + }, [playlistId, loadingTracks, hasMoreTracks, tracksOffset, totalTracks, playlistMetadata]); // Intersection Observer for infinite scroll useEffect(() => { @@ -125,22 +146,14 @@ export const Playlist = () => { }; }, [loadMoreTracks, hasMoreTracks, loadingTracks]); - // Load initial tracks when metadata is loaded + // Kick off initial batch useEffect(() => { - if (playlistMetadata && tracks.length === 0 && totalTracks > 0) { + if (playlistMetadata && items.length === 0 && totalTracks > 0) { loadMoreTracks(); } - }, [playlistMetadata, tracks.length, totalTracks, loadMoreTracks]); + }, [playlistMetadata, items.length, totalTracks, loadMoreTracks]); - // Reset state when playlist ID changes - useEffect(() => { - setTracks([]); - setTracksOffset(0); - setHasMoreTracks(true); - setTotalTracks(0); - }, [playlistId]); - - const handleDownloadTrack = (track: TrackType) => { + const handleDownloadTrack = (track: LibrespotTrackType) => { if (!track?.id) return; addItem({ spotifyId: track.id, type: "track", name: track.name }); toast.info(`Adding ${track.name} to queue...`); @@ -149,7 +162,7 @@ export const Playlist = () => { const handleDownloadPlaylist = () => { if (!playlistMetadata) return; addItem({ - spotifyId: playlistMetadata.id, + spotifyId: playlistId!, type: "playlist", name: playlistMetadata.name, }); @@ -182,16 +195,19 @@ export const Playlist = () => { } // Map track download statuses - const trackStatuses = tracks.reduce((acc, { track }) => { - if (!track) return acc; - const qi = items.find(item => item.downloadType === "track" && item.spotifyId === track.id); - acc[track.id] = qi ? getStatus(qi) : null; + const trackStatuses = items.reduce((acc, { track }) => { + if (!track || (track as any).id === undefined) return acc; + const t = track as LibrespotTrackType; + const qi = queueItems.find(item => item.downloadType === "track" && item.spotifyId === t.id); + acc[t.id] = qi ? getStatus(qi) : null; return acc; }, {} as Record); - const filteredTracks = tracks.filter(({ track }) => { - if (!track) return false; - if (settings?.explicitFilter && track.explicit) return false; + const filteredItems = items.filter(({ track }) => { + const t = track as LibrespotTrackType | LibrespotPlaylistTrackStubType | null; + if (!t || (t as any).id === undefined) return false; + const full = t as LibrespotTrackType; + if (settings?.explicitFilter && full.explicit) return false; return true; }); @@ -222,7 +238,7 @@ export const Playlist = () => {

{playlistMetadata.description}

)}

- By {playlistMetadata.owner.display_name} • {playlistMetadata.followers.total.toLocaleString()} followers • {totalTracks} songs + By {playlistMetadata.owner.display_name} • {totalTracks} songs

@@ -266,37 +282,38 @@ export const Playlist = () => {

Tracks

- {tracks.length > 0 && ( + {items.length > 0 && ( - Showing {tracks.length} of {totalTracks} tracks + Showing {items.length} of {totalTracks} tracks )}
- {filteredTracks.map(({ track }, index) => { - if (!track) return null; + {filteredItems.map(({ track }, index) => { + const t = track as LibrespotTrackType; + if (!t || !t.id) return null; return (
{index + 1} - + {track.album.name}
- - {track.name} + + {t.name}

- {track.artists.map((artist, index) => ( + {t.artists.map((artist, index) => ( { > {artist.name} - {index < track.artists.length - 1 && ", "} + {index < t.artists.length - 1 && ", "} ))}

@@ -313,27 +330,27 @@ export const Playlist = () => {
- {Math.floor(track.duration_ms / 60000)}: - {((track.duration_ms % 60000) / 1000).toFixed(0).padStart(2, "0")} + {Math.floor(t.duration_ms / 60000)}: + {((t.duration_ms % 60000) / 1000).toFixed(0).padStart(2, "0")}
diff --git a/spotizerr-ui/src/types/librespot.ts b/spotizerr-ui/src/types/librespot.ts new file mode 100644 index 0000000..66f4c61 --- /dev/null +++ b/spotizerr-ui/src/types/librespot.ts @@ -0,0 +1,155 @@ +// Librespot wrapper response types for frontend consumption + +export interface LibrespotExternalUrls { + spotify: string; +} + +export interface LibrespotImage { + url: string; + width?: number; + height?: number; +} + +export interface LibrespotArtistStub { + id: string; + name: string; + type?: "artist"; + uri?: string; + external_urls?: LibrespotExternalUrls; +} + +// Full artist object (get_artist) +export interface LibrespotArtistType { + id: string; + name: string; + images?: LibrespotImage[]; + external_urls?: LibrespotExternalUrls; + followers?: { total: number }; + genres?: string[]; + popularity?: number; + type?: "artist"; + uri?: string; +} + +export interface LibrespotCopyright { + text: string; + type: string; +} + +export type LibrespotReleaseDatePrecision = "day" | "month" | "year"; + +// Minimal embedded album object returned inside track objects (does not include tracks array) +export interface LibrespotAlbumRef { + id: string; + name: string; + images?: LibrespotImage[]; + release_date?: string; + release_date_precision?: LibrespotReleaseDatePrecision; + type?: "album"; + uri?: string; + album_type?: "album" | "single" | "compilation"; + external_urls?: LibrespotExternalUrls; + artists?: LibrespotArtistStub[]; +} + +export interface LibrespotTrackType { + album: LibrespotAlbumRef; + artists: LibrespotArtistStub[]; + available_markets?: string[]; + disc_number: number; + duration_ms: number; + explicit: boolean; + external_ids?: { isrc?: string }; + external_urls: LibrespotExternalUrls; + id: string; + name: string; + popularity?: number; + track_number: number; + type: "track"; + uri: string; + preview_url?: string; + has_lyrics?: boolean; + earliest_live_timestamp?: number; + licensor_uuid?: string; // when available +} + +export interface LibrespotAlbumType { + album_type: "album" | "single" | "compilation"; + total_tracks: number; + available_markets?: string[]; + external_urls: LibrespotExternalUrls; + id: string; + images: LibrespotImage[]; + name: string; + release_date: string; + release_date_precision: LibrespotReleaseDatePrecision; + type: "album"; + uri: string; + artists: LibrespotArtistStub[]; + // When include_tracks=False -> string[] of base62 IDs + // When include_tracks=True -> LibrespotTrackType[] + tracks: string[] | LibrespotTrackType[]; + copyrights?: LibrespotCopyright[]; + external_ids?: { upc?: string }; + label?: string; + popularity?: number; +} + +// Playlist types +export interface LibrespotPlaylistOwnerType { + id: string; + type: "user"; + uri: string; + external_urls: LibrespotExternalUrls; + display_name: string; +} + +export interface LibrespotPlaylistTrackStubType { + id: string; + uri: string; // spotify:track:{id} + type: "track"; + external_urls: LibrespotExternalUrls; +} + +export interface LibrespotPlaylistItemType { + added_at: string; + added_by: LibrespotPlaylistOwnerType; + is_local: boolean; + // If expand_items=False -> LibrespotPlaylistTrackStubType + // If expand_items=True -> LibrespotTrackType + track: LibrespotPlaylistTrackStubType | LibrespotTrackType; + // Additional reference, not a Web API field + item_id?: string; +} + +export interface LibrespotPlaylistTracksPageType { + offset: number; + total: number; + items: LibrespotPlaylistItemType[]; +} + +export interface LibrespotPlaylistType { + name: string; + description?: string | null; + collaborative?: boolean; + images?: Array & Partial>; + owner: LibrespotPlaylistOwnerType; + snapshot_id: string; + tracks: LibrespotPlaylistTracksPageType; + type: "playlist"; +} + +// Type guards +export function isAlbumWithExpandedTracks( + album: LibrespotAlbumType +): album is LibrespotAlbumType & { tracks: LibrespotTrackType[] } { + const { tracks } = album as LibrespotAlbumType; + return Array.isArray(tracks) && (tracks.length === 0 || typeof tracks[0] === "object"); +} + +export function isPlaylistItemWithExpandedTrack( + item: LibrespotPlaylistItemType +): item is LibrespotPlaylistItemType & { track: LibrespotTrackType } { + const t = item.track as unknown; + return !!t && typeof t === "object" && (t as any).type === "track" && "duration_ms" in (t as any); +} \ No newline at end of file From 6c6a215e7cd180536a92254006830b852a5da737 Mon Sep 17 00:00:00 2001 From: che-pj Date: Wed, 27 Aug 2025 21:19:23 +0200 Subject: [PATCH 26/32] set default level to info --- app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app.py b/app.py index e523637..74e6f97 100755 --- a/app.py +++ b/app.py @@ -16,7 +16,7 @@ from dotenv import load_dotenv load_dotenv() # Parse log level from environment as early as possible, default to INFO for visibility -log_level_str = os.getenv("LOG_LEVEL", "WARNING").upper() +log_level_str = os.getenv("LOG_LEVEL", "INFO").upper() log_level = getattr(logging, log_level_str, logging.INFO) # Set up a very basic logging config immediately, so early logs (including import/migration errors) are visible From 957928bfa050b870027697e21dc60b3dd1827034 Mon Sep 17 00:00:00 2001 From: Phlogi Date: Wed, 27 Aug 2025 09:43:01 +0200 Subject: [PATCH 27/32] refactor(api): replace direct celery tasks with queue manager in bulk add --- routes/content/bulk_add.py | 34 ++++++++++++++++++++++++++++------ 1 file changed, 28 insertions(+), 6 deletions(-) diff --git a/routes/content/bulk_add.py b/routes/content/bulk_add.py index 52ecd12..daa0cd0 100644 --- a/routes/content/bulk_add.py +++ b/routes/content/bulk_add.py @@ -1,6 +1,6 @@ import re -from typing import List, Dict, Any -from fastapi import APIRouter, Request, Depends +from typing import List, Dict, Any, Dict, Any +from fastapi import APIRouter, Request, Depends, Request, Depends from pydantic import BaseModel import logging @@ -11,7 +11,10 @@ from routes.auth.middleware import require_auth_from_state, User from routes.utils.get_info import get_spotify_info from routes.utils.celery_queue_manager import download_queue_manager -# Assuming these imports are available for queue management and Spotify info +# Import authentication dependencies +from routes.auth.middleware import require_auth_from_state, User + +# Import queue management and Spotify info from routes.utils.get_info import ( get_client, get_track, @@ -19,6 +22,7 @@ from routes.utils.get_info import ( get_playlist, get_artist, ) +from routes.utils.celery_queue_manager import download_queue_manager router = APIRouter() logger = logging.getLogger(__name__) @@ -29,6 +33,7 @@ class BulkAddLinksRequest(BaseModel): @router.post("/bulk-add-spotify-links") +async def bulk_add_spotify_links(request: BulkAddLinksRequest, req: Request, current_user: User = Depends(require_auth_from_state)): async def bulk_add_spotify_links(request: BulkAddLinksRequest, req: Request, current_user: User = Depends(require_auth_from_state)): added_count = 0 failed_links = [] @@ -53,6 +58,7 @@ async def bulk_add_spotify_links(request: BulkAddLinksRequest, req: Request, cur spotify_type = match.group(1) spotify_id = match.group(2) logger.debug(f"Extracted from link: spotify_type={spotify_type}, spotify_id={spotify_id}") + logger.debug(f"Extracted from link: spotify_type={spotify_type}, spotify_id={spotify_id}") try: # Get basic info to confirm existence and get name/artist @@ -102,13 +108,29 @@ async def bulk_add_spotify_links(request: BulkAddLinksRequest, req: Request, cur # Add to download queue using the queue manager task_id = download_queue_manager.add_task(task_data) + if task_id: + added_count += 1 + logger.debug(f"Added {added_count}/{total_links} {spotify_type} '{item_name}' ({spotify_id}) to queue with task_id: {task_id}.") + # Prepare task data for the queue manager + task_data = { + "download_type": spotify_type, + "url": spotify_url, + "name": item_name, + "artist": artist_name, + "spotify_id": spotify_id, + "type": spotify_type, + "username": current_user.username, + "orig_request": dict(req.query_params), + } + + # Add to download queue using the queue manager + task_id = download_queue_manager.add_task(task_data) + if task_id: added_count += 1 logger.debug(f"Added {added_count}/{total_links} {spotify_type} '{item_name}' ({spotify_id}) to queue with task_id: {task_id}.") else: - logger.warning( - f"Unsupported Spotify type for download: {spotify_type} for link: {link}" - ) + logger.warning(f"Failed to add {spotify_type} '{item_name}' ({spotify_id}) to queue.") failed_links.append(link) continue From 7b7e32c92378eb404dbc77c376c66f4dc15d4fb6 Mon Sep 17 00:00:00 2001 From: che-pj Date: Wed, 27 Aug 2025 21:39:08 +0200 Subject: [PATCH 28/32] fixup after merge/rebase to dev --- routes/content/bulk_add.py | 28 ++-------------------------- 1 file changed, 2 insertions(+), 26 deletions(-) diff --git a/routes/content/bulk_add.py b/routes/content/bulk_add.py index daa0cd0..61ea837 100644 --- a/routes/content/bulk_add.py +++ b/routes/content/bulk_add.py @@ -1,5 +1,5 @@ import re -from typing import List, Dict, Any, Dict, Any +from typing import List from fastapi import APIRouter, Request, Depends, Request, Depends from pydantic import BaseModel import logging @@ -33,7 +33,6 @@ class BulkAddLinksRequest(BaseModel): @router.post("/bulk-add-spotify-links") -async def bulk_add_spotify_links(request: BulkAddLinksRequest, req: Request, current_user: User = Depends(require_auth_from_state)): async def bulk_add_spotify_links(request: BulkAddLinksRequest, req: Request, current_user: User = Depends(require_auth_from_state)): added_count = 0 failed_links = [] @@ -45,7 +44,7 @@ async def bulk_add_spotify_links(request: BulkAddLinksRequest, req: Request, cur # but still handle potential errors during info retrieval or unsupported types # Extract type and ID from the link directly using regex match = re.match( - r"https://open\.spotify\.com(?:/intl-[a-z]{2})?/(track|album|playlist|artist)/([a-zA-Z0-9]+)(?:\?.*)?", + r"https://open\.spotify\.com(?:/[a-z]{2})?/(track|album|playlist|artist)/([a-zA-Z0-9]+)(?:\?.*)?", link, ) if not match: @@ -108,24 +107,6 @@ async def bulk_add_spotify_links(request: BulkAddLinksRequest, req: Request, cur # Add to download queue using the queue manager task_id = download_queue_manager.add_task(task_data) - if task_id: - added_count += 1 - logger.debug(f"Added {added_count}/{total_links} {spotify_type} '{item_name}' ({spotify_id}) to queue with task_id: {task_id}.") - # Prepare task data for the queue manager - task_data = { - "download_type": spotify_type, - "url": spotify_url, - "name": item_name, - "artist": artist_name, - "spotify_id": spotify_id, - "type": spotify_type, - "username": current_user.username, - "orig_request": dict(req.query_params), - } - - # Add to download queue using the queue manager - task_id = download_queue_manager.add_task(task_data) - if task_id: added_count += 1 logger.debug(f"Added {added_count}/{total_links} {spotify_type} '{item_name}' ({spotify_id}) to queue with task_id: {task_id}.") @@ -134,11 +115,6 @@ async def bulk_add_spotify_links(request: BulkAddLinksRequest, req: Request, cur failed_links.append(link) continue - added_count += 1 - logger.debug( - f"Added {added_count + 1}/{total_links} {spotify_type} '{item_name}' ({spotify_id}) to queue." - ) - except Exception as e: logger.error(f"Error processing Spotify link {link}: {e}", exc_info=True) failed_links.append(link) From 4476d39d39329b8a8c543ba3777f4d293a4e7503 Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Thu, 28 Aug 2025 07:16:05 -0600 Subject: [PATCH 29/32] fix: artist frontend rendering --- log.txt | 0 requirements.txt | 2 +- spotizerr-ui/src/routes/artist.tsx | 126 ++++++++++++++++++++-------- spotizerr-ui/src/types/librespot.ts | 5 ++ 4 files changed, 95 insertions(+), 38 deletions(-) create mode 100644 log.txt diff --git a/log.txt b/log.txt new file mode 100644 index 0000000..e69de29 diff --git a/requirements.txt b/requirements.txt index 44feb72..40d5860 100755 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ fastapi==0.116.1 uvicorn[standard]==0.35.0 celery==5.5.3 -deezspot-spotizerr==3.1.0 +deezspot-spotizerr==3.1.2 httpx==0.28.1 bcrypt==4.2.1 PyJWT==2.10.1 diff --git a/spotizerr-ui/src/routes/artist.tsx b/spotizerr-ui/src/routes/artist.tsx index 5fa2ecb..072ab12 100644 --- a/spotizerr-ui/src/routes/artist.tsx +++ b/spotizerr-ui/src/routes/artist.tsx @@ -15,6 +15,7 @@ type ArtistInfoResponse = LibrespotArtistType & { top_track?: Array<{ country: string; track: string[] }>; album_group?: string[]; single_group?: string[]; + compilation_group?: string[]; appears_on_group?: string[]; }; @@ -23,6 +24,7 @@ export const Artist = () => { const [artist, setArtist] = useState(null); const [artistAlbums, setArtistAlbums] = useState([]); const [artistSingles, setArtistSingles] = useState([]); + const [artistCompilations, setArtistCompilations] = useState([]); const [artistAppearsOn, setArtistAppearsOn] = useState([]); const [topTracks, setTopTracks] = useState([]); const [bannerUrl, setBannerUrl] = useState(null); @@ -38,6 +40,7 @@ export const Artist = () => { const ALBUM_BATCH = 12; const [albumOffset, setAlbumOffset] = useState(0); const [singleOffset, setSingleOffset] = useState(0); + const [compOffset, setCompOffset] = useState(0); const [appearsOffset, setAppearsOffset] = useState(0); const [loading, setLoading] = useState(false); const [loadingMore, setLoadingMore] = useState(false); @@ -81,9 +84,11 @@ export const Artist = () => { setError(null); setArtistAlbums([]); setArtistSingles([]); + setArtistCompilations([]); setArtistAppearsOn([]); setAlbumOffset(0); setSingleOffset(0); + setCompOffset(0); setAppearsOffset(0); setHasMore(true); setBannerUrl(null); // reset hero; will lazy-load below @@ -123,46 +128,61 @@ export const Artist = () => { if (!cancelled) setTopTracks([]); } - // Progressive album loading: album -> single -> appears_on + // Progressive album loading: album -> single -> compilation -> appears_on const albumIds = data.album_group ?? []; const singleIds = data.single_group ?? []; + const compIds = data.compilation_group ?? []; const appearsIds = data.appears_on_group ?? []; // Determine initial number based on screen size: 4 on small screens const isSmallScreen = typeof window !== "undefined" && !window.matchMedia("(min-width: 640px)").matches; const initialTarget = isSmallScreen ? 4 : ALBUM_BATCH; - // Load initial batch from albumIds, then if needed from singles, then appears - const initialBatch: LibrespotAlbumType[] = []; - let aOff = 0, sOff = 0, apOff = 0; - if (albumIds.length > 0) { - const take = albumIds.slice(0, initialTarget); - initialBatch.push(...await fetchAlbumsByIds(take)); + // Load initial sets from each group in order until initialTarget reached + let aOff = 0, sOff = 0, cOff = 0, apOff = 0; + let loaded = 0; + let aList: LibrespotAlbumType[] = []; + let sList: LibrespotAlbumType[] = []; + let cList: LibrespotAlbumType[] = []; + let apList: LibrespotAlbumType[] = []; + + if (albumIds.length > 0 && loaded < initialTarget) { + const take = albumIds.slice(0, initialTarget - loaded); + aList = await fetchAlbumsByIds(take); aOff = take.length; + loaded += aList.length; } - if (initialBatch.length < initialTarget && singleIds.length > 0) { - const remaining = initialTarget - initialBatch.length; - const take = singleIds.slice(0, remaining); - initialBatch.push(...await fetchAlbumsByIds(take)); + if (singleIds.length > 0 && loaded < initialTarget) { + const take = singleIds.slice(0, initialTarget - loaded); + sList = await fetchAlbumsByIds(take); sOff = take.length; + loaded += sList.length; } - if (initialBatch.length < initialTarget && appearsIds.length > 0) { - const remaining = initialTarget - initialBatch.length; - const take = appearsIds.slice(0, remaining); - initialBatch.push(...await fetchAlbumsByIds(take)); + if (compIds.length > 0 && loaded < initialTarget) { + const take = compIds.slice(0, initialTarget - loaded); + cList = await fetchAlbumsByIds(take); + cOff = take.length; + loaded += cList.length; + } + if (appearsIds.length > 0 && loaded < initialTarget) { + const take = appearsIds.slice(0, initialTarget - loaded); + apList = await fetchAlbumsByIds(take); apOff = take.length; + loaded += apList.length; } if (!cancelled) { - setArtistAlbums(initialBatch.filter(a => a.album_type === "album")); - setArtistSingles(initialBatch.filter(a => a.album_type === "single")); - setArtistAppearsOn([]); // placeholder; appears_on grouping not explicitly typed + setArtistAlbums(aList); + setArtistSingles(sList); + setArtistCompilations(cList); + setArtistAppearsOn(apList); // Store offsets for next loads setAlbumOffset(aOff); setSingleOffset(sOff); + setCompOffset(cOff); setAppearsOffset(apOff); // Determine if more remain - setHasMore((albumIds.length > aOff) || (singleIds.length > sOff) || (appearsIds.length > apOff)); + setHasMore((albumIds.length > aOff) || (singleIds.length > sOff) || (compIds.length > cOff) || (appearsIds.length > apOff)); } } else { setError("Could not load artist data."); @@ -201,34 +221,54 @@ export const Artist = () => { try { const albumIds = artist.album_group ?? []; const singleIds = artist.single_group ?? []; + const compIds = artist.compilation_group ?? []; const appearsIds = artist.appears_on_group ?? []; - const nextBatch: LibrespotAlbumType[] = []; - let aOff = albumOffset, sOff = singleOffset, apOff = appearsOffset; - if (aOff < albumIds.length) { - const take = albumIds.slice(aOff, aOff + ALBUM_BATCH - nextBatch.length); - nextBatch.push(...await fetchAlbumsByIds(take)); + const nextA: LibrespotAlbumType[] = []; + const nextS: LibrespotAlbumType[] = []; + const nextC: LibrespotAlbumType[] = []; + const nextAp: LibrespotAlbumType[] = []; + + let aOff = albumOffset, sOff = singleOffset, cOff = compOffset, apOff = appearsOffset; + + const totalLoaded = () => nextA.length + nextS.length + nextC.length + nextAp.length; + + if (aOff < albumIds.length && totalLoaded() < ALBUM_BATCH) { + const remaining = ALBUM_BATCH - totalLoaded(); + const take = albumIds.slice(aOff, aOff + remaining); + nextA.push(...await fetchAlbumsByIds(take)); aOff += take.length; } - if (nextBatch.length < ALBUM_BATCH && sOff < singleIds.length) { - const remaining = ALBUM_BATCH - nextBatch.length; + if (sOff < singleIds.length && totalLoaded() < ALBUM_BATCH) { + const remaining = ALBUM_BATCH - totalLoaded(); const take = singleIds.slice(sOff, sOff + remaining); - nextBatch.push(...await fetchAlbumsByIds(take)); + nextS.push(...await fetchAlbumsByIds(take)); sOff += take.length; } - if (nextBatch.length < ALBUM_BATCH && apOff < appearsIds.length) { - const remaining = ALBUM_BATCH - nextBatch.length; + if (cOff < compIds.length && totalLoaded() < ALBUM_BATCH) { + const remaining = ALBUM_BATCH - totalLoaded(); + const take = compIds.slice(cOff, cOff + remaining); + nextC.push(...await fetchAlbumsByIds(take)); + cOff += take.length; + } + if (apOff < appearsIds.length && totalLoaded() < ALBUM_BATCH) { + const remaining = ALBUM_BATCH - totalLoaded(); const take = appearsIds.slice(apOff, apOff + remaining); - nextBatch.push(...await fetchAlbumsByIds(take)); + nextAp.push(...await fetchAlbumsByIds(take)); apOff += take.length; } - setArtistAlbums((cur) => cur.concat(nextBatch.filter(a => a.album_type === "album"))); - setArtistSingles((cur) => cur.concat(nextBatch.filter(a => a.album_type === "single"))); - setAppearsOffset(apOff); + setArtistAlbums((cur) => cur.concat(nextA)); + setArtistSingles((cur) => cur.concat(nextS)); + setArtistCompilations((cur) => cur.concat(nextC)); + setArtistAppearsOn((cur) => cur.concat(nextAp)); + setAlbumOffset(aOff); setSingleOffset(sOff); - setHasMore((albumIds.length > aOff) || (singleIds.length > sOff) || (appearsIds.length > apOff)); + setCompOffset(cOff); + setAppearsOffset(apOff); + + setHasMore((albumIds.length > aOff) || (singleIds.length > sOff) || (compIds.length > cOff) || (appearsIds.length > apOff)); } catch (err) { console.error("Failed to load more albums", err); toast.error("Failed to load more albums"); @@ -236,7 +276,7 @@ export const Artist = () => { } finally { setLoadingMore(false); } - }, [artistId, loadingMore, loading, hasMore, artist, albumOffset, singleOffset, appearsOffset, fetchAlbumsByIds]); + }, [artistId, loadingMore, loading, hasMore, artist, albumOffset, singleOffset, compOffset, appearsOffset, fetchAlbumsByIds]); // IntersectionObserver to trigger fetchMoreAlbums when sentinel is visible useEffect(() => { @@ -474,6 +514,18 @@ export const Artist = () => {
)} + {/* Compilations */} + {artistCompilations.length > 0 && ( +
+

Compilations

+
+ {artistCompilations.map((album) => ( + handleDownloadAlbum(album)} /> + ))} +
+
+ )} + {/* Appears On */} {artistAppearsOn.length > 0 && (
@@ -494,9 +546,9 @@ export const Artist = () => { {hasMore && !loadingMore && ( )}
diff --git a/spotizerr-ui/src/types/librespot.ts b/spotizerr-ui/src/types/librespot.ts index 66f4c61..a38864f 100644 --- a/spotizerr-ui/src/types/librespot.ts +++ b/spotizerr-ui/src/types/librespot.ts @@ -29,6 +29,11 @@ export interface LibrespotArtistType { popularity?: number; type?: "artist"; uri?: string; + // Album groups: arrays of album IDs + album_group?: string[]; + single_group?: string[]; + compilation_group?: string[]; + appears_on_group?: string[]; } export interface LibrespotCopyright { From 0b7c9d0da8be8b9ebba46f660f3fffd5eca0e5a2 Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Thu, 28 Aug 2025 07:51:10 -0600 Subject: [PATCH 30/32] feat: Reimplement download artist discography per groups in artist page --- requirements.txt | 2 +- routes/content/bulk_add.py | 27 ++++-- routes/utils/album.py | 7 ++ routes/utils/artist.py | 140 ++++++++++++++++------------- routes/utils/get_info.py | 51 ----------- routes/utils/playlist.py | 8 ++ routes/utils/track.py | 7 ++ spotizerr-ui/src/routes/artist.tsx | 67 +++++++++++++- 8 files changed, 184 insertions(+), 125 deletions(-) diff --git a/requirements.txt b/requirements.txt index 40d5860..db9629c 100755 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ fastapi==0.116.1 uvicorn[standard]==0.35.0 celery==5.5.3 -deezspot-spotizerr==3.1.2 +deezspot-spotizerr==3.1.4 httpx==0.28.1 bcrypt==4.2.1 PyJWT==2.10.1 diff --git a/routes/content/bulk_add.py b/routes/content/bulk_add.py index 61ea837..ac81959 100644 --- a/routes/content/bulk_add.py +++ b/routes/content/bulk_add.py @@ -1,6 +1,6 @@ import re from typing import List -from fastapi import APIRouter, Request, Depends, Request, Depends +from fastapi import APIRouter, Request, Depends from pydantic import BaseModel import logging @@ -8,11 +8,9 @@ import logging from routes.auth.middleware import require_auth_from_state, User # Import queue management and Spotify info -from routes.utils.get_info import get_spotify_info from routes.utils.celery_queue_manager import download_queue_manager # Import authentication dependencies -from routes.auth.middleware import require_auth_from_state, User # Import queue management and Spotify info from routes.utils.get_info import ( @@ -22,7 +20,6 @@ from routes.utils.get_info import ( get_playlist, get_artist, ) -from routes.utils.celery_queue_manager import download_queue_manager router = APIRouter() logger = logging.getLogger(__name__) @@ -33,7 +30,11 @@ class BulkAddLinksRequest(BaseModel): @router.post("/bulk-add-spotify-links") -async def bulk_add_spotify_links(request: BulkAddLinksRequest, req: Request, current_user: User = Depends(require_auth_from_state)): +async def bulk_add_spotify_links( + request: BulkAddLinksRequest, + req: Request, + current_user: User = Depends(require_auth_from_state), +): added_count = 0 failed_links = [] total_links = len(request.links) @@ -56,8 +57,12 @@ async def bulk_add_spotify_links(request: BulkAddLinksRequest, req: Request, cur spotify_type = match.group(1) spotify_id = match.group(2) - logger.debug(f"Extracted from link: spotify_type={spotify_type}, spotify_id={spotify_id}") - logger.debug(f"Extracted from link: spotify_type={spotify_type}, spotify_id={spotify_id}") + logger.debug( + f"Extracted from link: spotify_type={spotify_type}, spotify_id={spotify_id}" + ) + logger.debug( + f"Extracted from link: spotify_type={spotify_type}, spotify_id={spotify_id}" + ) try: # Get basic info to confirm existence and get name/artist @@ -109,9 +114,13 @@ async def bulk_add_spotify_links(request: BulkAddLinksRequest, req: Request, cur if task_id: added_count += 1 - logger.debug(f"Added {added_count}/{total_links} {spotify_type} '{item_name}' ({spotify_id}) to queue with task_id: {task_id}.") + logger.debug( + f"Added {added_count}/{total_links} {spotify_type} '{item_name}' ({spotify_id}) to queue with task_id: {task_id}." + ) else: - logger.warning(f"Failed to add {spotify_type} '{item_name}' ({spotify_id}) to queue.") + logger.warning( + f"Failed to add {spotify_type} '{item_name}' ({spotify_id}) to queue." + ) failed_links.append(link) continue diff --git a/routes/utils/album.py b/routes/utils/album.py index be67078..1e51966 100755 --- a/routes/utils/album.py +++ b/routes/utils/album.py @@ -8,6 +8,7 @@ from routes.utils.credentials import ( ) from routes.utils.celery_queue_manager import get_existing_task_id from routes.utils.errors import DuplicateDownloadError +from routes.utils.celery_config import get_config_params def download_album( @@ -98,6 +99,7 @@ def download_album( spotify_client_id=global_spotify_client_id, spotify_client_secret=global_spotify_client_secret, progress_callback=progress_callback, + spotify_credentials_path=str(get_spotify_blob_path(main)), ) dl.download_albumspo( link_album=url, # Spotify URL @@ -257,6 +259,11 @@ def download_album( spotify_client_id=global_spotify_client_id, # Global Spotify keys spotify_client_secret=global_spotify_client_secret, # Global Spotify keys progress_callback=progress_callback, + spotify_credentials_path=( + str(get_spotify_blob_path(get_config_params().get("spotify"))) + if get_config_params().get("spotify") + else None + ), ) dl.download_albumdee( # Deezer URL, download via Deezer link_album=url, diff --git a/routes/utils/artist.py b/routes/utils/artist.py index 6cc2973..c5e107b 100644 --- a/routes/utils/artist.py +++ b/routes/utils/artist.py @@ -4,7 +4,7 @@ import logging from routes.utils.celery_queue_manager import download_queue_manager from routes.utils.credentials import get_credential, _get_global_spotify_api_creds from routes.utils.errors import DuplicateDownloadError -from routes.utils.get_info import get_spotify_info +from routes.utils.get_info import get_client, get_artist from deezspot.libutils.utils import get_ids, link_is_valid @@ -77,10 +77,26 @@ def get_artist_discography( log_json({"status": "error", "message": msg}) raise ValueError(msg) + # Fetch artist once and return grouped arrays without pagination try: - # Use the optimized get_spotify_info function - discography = get_spotify_info(artist_id, "artist_discography") - return discography + client = get_client() + artist_obj = get_artist(client, artist_id) + + # Normalize groups as arrays of IDs; tolerate dict shape from some sources + def normalize_group(val): + if isinstance(val, list): + return val + if isinstance(val, dict): + items = val.get("items") or val.get("releases") or [] + return items if isinstance(items, list) else [] + return [] + + return { + "album_group": normalize_group(artist_obj.get("album_group")), + "single_group": normalize_group(artist_obj.get("single_group")), + "compilation_group": normalize_group(artist_obj.get("compilation_group")), + "appears_on_group": normalize_group(artist_obj.get("appears_on_group")), + } except Exception as fetch_error: msg = f"An error occurred while fetching the discography: {fetch_error}" log_json({"status": "error", "message": msg}) @@ -120,61 +136,55 @@ def download_artist_albums(url, album_type=None, request_args=None, username=Non raise ValueError(error_msg) # Get watch config to determine which album groups to download - watch_config = get_watch_config() - allowed_groups = [ - g.lower() - for g in watch_config.get("watchedArtistAlbumGroup", ["album", "single"]) - ] + valid_groups = {"album", "single", "compilation", "appears_on"} + if album_type and isinstance(album_type, str): + requested = [g.strip().lower() for g in album_type.split(",") if g.strip()] + allowed_groups = [g for g in requested if g in valid_groups] + if not allowed_groups: + logger.warning( + f"album_type query provided but no valid groups found in {requested}; falling back to watch config." + ) + if not album_type or not isinstance(album_type, str) or not allowed_groups: + watch_config = get_watch_config() + allowed_groups = [ + g.lower() + for g in watch_config.get("watchedArtistAlbumGroup", ["album", "single"]) + if g.lower() in valid_groups + ] logger.info( - f"Filtering albums by watchedArtistAlbumGroup setting (exact album_group match): {allowed_groups}" + f"Filtering albums by album_type/watch setting (exact album_group match): {allowed_groups}" ) - # Fetch all artist albums with pagination + # Fetch artist and aggregate group arrays without pagination + client = get_client() + artist_obj = get_artist(client, artist_id) + + def normalize_group(val): + if isinstance(val, list): + return val + if isinstance(val, dict): + items = val.get("items") or val.get("releases") or [] + return items if isinstance(items, list) else [] + return [] + + group_key_to_type = [ + ("album_group", "album"), + ("single_group", "single"), + ("compilation_group", "compilation"), + ("appears_on_group", "appears_on"), + ] + all_artist_albums = [] - offset = 0 - limit = 50 # Spotify API limit for artist albums - - logger.info(f"Fetching all albums for artist ID: {artist_id} with pagination") - - while True: - logger.debug( - f"Fetching albums for {artist_id}. Limit: {limit}, Offset: {offset}" - ) - artist_data_page = get_spotify_info( - artist_id, "artist_discography", limit=limit, offset=offset - ) - - if not artist_data_page or not isinstance(artist_data_page.get("items"), list): - logger.warning( - f"No album items found or invalid format for artist {artist_id} at offset {offset}. Response: {artist_data_page}" + for key, group_type in group_key_to_type: + ids = normalize_group(artist_obj.get(key)) + # transform to minimal album objects with album_group tagging for filtering parity + for album_id in ids: + all_artist_albums.append( + { + "id": album_id, + "album_group": group_type, + } ) - break - - current_page_albums = artist_data_page.get("items", []) - if not current_page_albums: - logger.info( - f"No more albums on page for artist {artist_id} at offset {offset}. Total fetched so far: {len(all_artist_albums)}." - ) - break - - logger.debug( - f"Fetched {len(current_page_albums)} albums on current page for artist {artist_id}." - ) - all_artist_albums.extend(current_page_albums) - - # Check if Spotify indicates a next page URL - if artist_data_page.get("next"): - offset += limit # Increment offset by the limit used for the request - else: - logger.info( - f"No next page URL for artist {artist_id}. Pagination complete. Total albums fetched: {len(all_artist_albums)}." - ) - break - - if not all_artist_albums: - raise ValueError( - f"Failed to retrieve artist data or no albums found for artist ID {artist_id}" - ) # Filter albums based on the allowed types using album_group field (like in manager.py) filtered_albums = [] @@ -201,13 +211,23 @@ def download_artist_albums(url, album_type=None, request_args=None, username=Non duplicate_albums = [] for album in filtered_albums: - album_url = album.get("external_urls", {}).get("spotify", "") - album_name = album.get("name", "Unknown Album") - album_artists = album.get("artists", []) + album_id = album.get("id") + if not album_id: + logger.warning("Skipping album without ID in filtered list.") + continue + # fetch album details to construct URL and names + try: + album_obj = download_queue_manager.client.get_album( + album_id, include_tracks=False + ) # type: ignore[attr-defined] + except AttributeError: + # If download_queue_manager lacks a client, fallback to shared client + album_obj = get_client().get_album(album_id, include_tracks=False) + album_url = album_obj.get("external_urls", {}).get("spotify", "") + album_name = album_obj.get("name", "Unknown Album") + artists = album_obj.get("artists", []) or [] album_artist = ( - album_artists[0].get("name", "Unknown Artist") - if album_artists - else "Unknown Artist" + artists[0].get("name", "Unknown Artist") if artists else "Unknown Artist" ) if not album_url: diff --git a/routes/utils/get_info.py b/routes/utils/get_info.py index 778e65f..99d04e0 100644 --- a/routes/utils/get_info.py +++ b/routes/utils/get_info.py @@ -93,57 +93,6 @@ def get_playlist( return client.get_playlist(playlist_in, expand_items=expand_items) -def get_spotify_info( - spotify_id: str, - info_type: str, - limit: int = 50, - offset: int = 0, -) -> Dict[str, Any]: - """ - Thin, typed wrapper around common Spotify info lookups using the shared client. - - Currently supports: - - "artist_discography": returns a paginated view over the artist's releases - combined across album_group/single_group/compilation_group/appears_on_group. - - Returns a mapping with at least: items, total, limit, offset. - Also includes a truthy "next" key when more pages are available. - """ - client = get_client() - - if info_type == "artist_discography": - artist = client.get_artist(spotify_id) - all_items = [] - for key in ( - "album_group", - "single_group", - "compilation_group", - "appears_on_group", - ): - grp = artist.get(key) - if isinstance(grp, list): - all_items.extend(grp) - elif isinstance(grp, dict): - items = grp.get("items") or grp.get("releases") or [] - if isinstance(items, list): - all_items.extend(items) - total = len(all_items) - start = max(0, offset or 0) - page_limit = max(1, limit or 50) - end = min(total, start + page_limit) - page_items = all_items[start:end] - has_more = end < total - return { - "items": page_items, - "total": total, - "limit": page_limit, - "offset": start, - "next": bool(has_more), - } - - raise ValueError(f"Unsupported info_type: {info_type}") - - def get_playlist_metadata(playlist_id: str) -> Dict[str, Any]: """ Fetch playlist metadata using the shared client without expanding items. diff --git a/routes/utils/playlist.py b/routes/utils/playlist.py index efdec27..4410b22 100755 --- a/routes/utils/playlist.py +++ b/routes/utils/playlist.py @@ -3,6 +3,8 @@ from deezspot.spotloader import SpoLogin from deezspot.deezloader import DeeLogin from pathlib import Path from routes.utils.credentials import get_credential, _get_global_spotify_api_creds +from routes.utils.credentials import get_spotify_blob_path +from routes.utils.celery_config import get_config_params from routes.utils.celery_queue_manager import get_existing_task_id from routes.utils.errors import DuplicateDownloadError @@ -95,6 +97,7 @@ def download_playlist( spotify_client_id=global_spotify_client_id, spotify_client_secret=global_spotify_client_secret, progress_callback=progress_callback, + spotify_credentials_path=str(get_spotify_blob_path(main)), ) dl.download_playlistspo( link_playlist=url, # Spotify URL @@ -265,6 +268,11 @@ def download_playlist( spotify_client_id=global_spotify_client_id, # Global Spotify keys spotify_client_secret=global_spotify_client_secret, # Global Spotify keys progress_callback=progress_callback, + spotify_credentials_path=( + str(get_spotify_blob_path(get_config_params().get("spotify"))) + if get_config_params().get("spotify") + else None + ), ) dl.download_playlistdee( # Deezer URL, download via Deezer link_playlist=url, diff --git a/routes/utils/track.py b/routes/utils/track.py index 6259482..76156fe 100755 --- a/routes/utils/track.py +++ b/routes/utils/track.py @@ -6,6 +6,7 @@ from routes.utils.credentials import ( _get_global_spotify_api_creds, get_spotify_blob_path, ) +from routes.utils.celery_config import get_config_params def download_track( @@ -90,6 +91,7 @@ def download_track( spotify_client_id=global_spotify_client_id, # Global creds spotify_client_secret=global_spotify_client_secret, # Global creds progress_callback=progress_callback, + spotify_credentials_path=str(get_spotify_blob_path(main)), ) # download_trackspo means: Spotify URL, download via Deezer dl.download_trackspo( @@ -251,6 +253,11 @@ def download_track( spotify_client_id=global_spotify_client_id, # Global Spotify keys for internal Spo use by DeeLogin spotify_client_secret=global_spotify_client_secret, # Global Spotify keys progress_callback=progress_callback, + spotify_credentials_path=( + str(get_spotify_blob_path(get_config_params().get("spotify"))) + if get_config_params().get("spotify") + else None + ), ) dl.download_trackdee( # Deezer URL, download via Deezer link_track=url, diff --git a/spotizerr-ui/src/routes/artist.tsx b/spotizerr-ui/src/routes/artist.tsx index 072ab12..0a4845e 100644 --- a/spotizerr-ui/src/routes/artist.tsx +++ b/spotizerr-ui/src/routes/artist.tsx @@ -343,6 +343,25 @@ export const Artist = () => { } }; + const handleDownloadGroup = async (group: "album" | "single" | "compilation" | "appears_on") => { + if (!artistId || !artist) return; + try { + toast.info(`Queueing ${group} downloads for ${artist.name}...`); + const response = await apiClient.get(`/artist/download/${artistId}?album_type=${group}`); + const count = response.data?.queued_albums?.length ?? 0; + if (count > 0) { + toast.success(`Queued ${count} ${group}${count > 1 ? "s" : ""}.`); + } else { + toast.info(`No new ${group} releases to download.`); + } + } catch (error: any) { + console.error(`Failed to queue ${group} downloads:`, error); + toast.error(`Failed to queue ${group} downloads`, { + description: error.response?.data?.error || "An unexpected error occurred.", + }); + } + }; + const handleToggleWatch = async () => { if (!artistId || !artist) return; try { @@ -493,7 +512,17 @@ export const Artist = () => { {/* Albums */} {artistAlbums.length > 0 && (
-

Albums

+
+

Albums

+ +
{artistAlbums.map((album) => ( handleDownloadAlbum(album)} /> @@ -505,7 +534,17 @@ export const Artist = () => { {/* Singles */} {artistSingles.length > 0 && (
-

Singles

+
+

Singles

+ +
{artistSingles.map((album) => ( handleDownloadAlbum(album)} /> @@ -517,7 +556,17 @@ export const Artist = () => { {/* Compilations */} {artistCompilations.length > 0 && (
-

Compilations

+
+

Compilations

+ +
{artistCompilations.map((album) => ( handleDownloadAlbum(album)} /> @@ -529,7 +578,17 @@ export const Artist = () => { {/* Appears On */} {artistAppearsOn.length > 0 && (
-

Appears On

+
+

Appears On

+ +
{artistAppearsOn.map((album) => ( handleDownloadAlbum(album)} /> From f800251de143a95dadee4db396c4cac14d8d2c55 Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Thu, 28 Aug 2025 08:40:39 -0600 Subject: [PATCH 31/32] fix: load playlist image on frontend --- requirements.txt | 2 +- spotizerr-ui/src/routes/album.tsx | 10 ++++++ spotizerr-ui/src/routes/artist.tsx | 10 ++++++ spotizerr-ui/src/routes/playlist.tsx | 49 +++++++++++++++++++++++++--- spotizerr-ui/src/types/librespot.ts | 1 + 5 files changed, 66 insertions(+), 6 deletions(-) diff --git a/requirements.txt b/requirements.txt index db9629c..d3c468d 100755 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ fastapi==0.116.1 uvicorn[standard]==0.35.0 celery==5.5.3 -deezspot-spotizerr==3.1.4 +deezspot-spotizerr==3.1.5 httpx==0.28.1 bcrypt==4.2.1 PyJWT==2.10.1 diff --git a/spotizerr-ui/src/routes/album.tsx b/spotizerr-ui/src/routes/album.tsx index c7de14f..a72d7dc 100644 --- a/spotizerr-ui/src/routes/album.tsx +++ b/spotizerr-ui/src/routes/album.tsx @@ -135,6 +135,16 @@ export const Album = () => { }; }, [loadMore]); + // Auto progressive loading regardless of scroll + useEffect(() => { + if (!album) return; + if (!hasMore || isLoadingMore) return; + const t = setTimeout(() => { + loadMore(); + }, 300); + return () => clearTimeout(t); + }, [album, hasMore, isLoadingMore, loadMore]); + const handleDownloadTrack = (track: LibrespotTrackType) => { if (!track.id) return; toast.info(`Adding ${track.name} to queue...`); diff --git a/spotizerr-ui/src/routes/artist.tsx b/spotizerr-ui/src/routes/artist.tsx index 0a4845e..0c5d496 100644 --- a/spotizerr-ui/src/routes/artist.tsx +++ b/spotizerr-ui/src/routes/artist.tsx @@ -303,6 +303,16 @@ export const Artist = () => { return () => observer.disconnect(); }, [fetchMoreAlbums, hasMore]); + // Auto progressive loading regardless of scroll + useEffect(() => { + if (!artist) return; + if (!hasMore || loading || loadingMore) return; + const t = setTimeout(() => { + fetchMoreAlbums(); + }, 350); + return () => clearTimeout(t); + }, [artist, hasMore, loading, loadingMore, fetchMoreAlbums]); + // --- existing handlers (unchanged) --- const handleDownloadTrack = (track: LibrespotTrackType) => { if (!track.id) return; diff --git a/spotizerr-ui/src/routes/playlist.tsx b/spotizerr-ui/src/routes/playlist.tsx index a8e850b..38f5f85 100644 --- a/spotizerr-ui/src/routes/playlist.tsx +++ b/spotizerr-ui/src/routes/playlist.tsx @@ -153,6 +153,16 @@ export const Playlist = () => { } }, [playlistMetadata, items.length, totalTracks, loadMoreTracks]); + // Auto progressive loading regardless of scroll + useEffect(() => { + if (!playlistMetadata) return; + if (!hasMoreTracks || loadingTracks) return; + const t = setTimeout(() => { + loadMoreTracks(); + }, 300); + return () => clearTimeout(t); + }, [playlistMetadata, hasMoreTracks, loadingTracks, loadMoreTracks]); + const handleDownloadTrack = (track: LibrespotTrackType) => { if (!track?.id) return; addItem({ spotifyId: track.id, type: "track", name: track.name }); @@ -227,11 +237,40 @@ export const Playlist = () => { {/* Playlist Header - Mobile Optimized */}
- {playlistMetadata.name} + {playlistMetadata.picture ? ( + {playlistMetadata.name} + ) : ( +
+ {(Array.from( + new Map( + filteredItems + .map(({ track }) => (track as any)?.album?.images?.at(-1)?.url) + .filter((u) => !!u) + .map((u) => [u, u] as const) + ).values() + ) as string[]).slice(0, 4).map((url, i) => ( + {`Cover + ))} + {filteredItems.length === 0 && ( + {playlistMetadata.name} + )} +
+ )}

{playlistMetadata.name}

{playlistMetadata.description && ( diff --git a/spotizerr-ui/src/types/librespot.ts b/spotizerr-ui/src/types/librespot.ts index a38864f..57704b9 100644 --- a/spotizerr-ui/src/types/librespot.ts +++ b/spotizerr-ui/src/types/librespot.ts @@ -142,6 +142,7 @@ export interface LibrespotPlaylistType { snapshot_id: string; tracks: LibrespotPlaylistTracksPageType; type: "playlist"; + picture?: string; } // Type guards From f9cf953de142dc4de52fad48ffb49b2ca9ca1688 Mon Sep 17 00:00:00 2001 From: che-pj Date: Sat, 30 Aug 2025 09:32:44 +0200 Subject: [PATCH 32/32] feat(api): add per-task sse throttling and batching for robust updates --- routes/system/progress.py | 99 +++++++++++++++++++++++++++++------ routes/utils/celery_config.py | 3 +- 2 files changed, 85 insertions(+), 17 deletions(-) diff --git a/routes/system/progress.py b/routes/system/progress.py index f5d5d78..e2a6cc0 100755 --- a/routes/system/progress.py +++ b/routes/system/progress.py @@ -8,7 +8,7 @@ from typing import Set, Optional import redis import threading -from routes.utils.celery_config import REDIS_URL +from routes.utils.celery_config import REDIS_URL, get_config_params from routes.utils.celery_tasks import ( get_task_info, @@ -37,6 +37,11 @@ router = APIRouter() class SSEBroadcaster: def __init__(self): self.clients: Set[asyncio.Queue] = set() + # Per-task throttling/batching/deduplication state + self._task_state = {} # task_id -> dict with last_sent, last_event, last_send_time, scheduled_handle + # Load configurable interval + config = get_config_params() + self.sse_update_interval = float(config.get("sseUpdateIntervalSeconds", 1)) async def add_client(self, queue: asyncio.Queue): """Add a new SSE client""" @@ -49,43 +54,105 @@ class SSEBroadcaster: logger.debug(f"SSE: Client disconnected (total: {len(self.clients)})") async def broadcast_event(self, event_data: dict): - """Broadcast an event to all connected clients""" - logger.debug( - f"SSE Broadcaster: Attempting to broadcast to {len(self.clients)} clients" - ) - + """ + Throttle, batch, and deduplicate SSE events per task. + Only emit at most 1 update/sec per task, aggregate within window, suppress redundant updates. + """ if not self.clients: logger.debug("SSE Broadcaster: No clients connected, skipping broadcast") return - # Add global task counts right before broadcasting - this is the single source of truth + # Defensive: always work with a list of tasks + tasks = event_data.get("tasks", []) + if not isinstance(tasks, list): + tasks = [tasks] + + # For each task, throttle/batch/dedupe + for task in tasks: + task_id = task.get("task_id") + if not task_id: + continue + + now = time.time() + state = self._task_state.setdefault(task_id, { + "last_sent": None, + "last_event": None, + "last_send_time": 0, + "scheduled_handle": None, + }) + + # Deduplication: if event is identical to last sent, skip + if state["last_sent"] is not None and self._events_equal(state["last_sent"], task): + logger.debug(f"SSE: Deduped event for task {task_id}") + continue + + # Throttling: if within interval, batch (store as last_event, schedule send) + elapsed = now - state["last_send_time"] + if elapsed < self.sse_update_interval: + state["last_event"] = task + if state["scheduled_handle"] is None: + delay = self.sse_update_interval - elapsed + loop = asyncio.get_event_loop() + state["scheduled_handle"] = loop.call_later( + delay, lambda: asyncio.create_task(self._send_batched_event(task_id)) + ) + continue + + # Otherwise, send immediately + await self._send_event(task_id, task) + state["last_send_time"] = now + state["last_sent"] = task + state["last_event"] = None + if state["scheduled_handle"]: + state["scheduled_handle"].cancel() + state["scheduled_handle"] = None + + async def _send_batched_event(self, task_id): + state = self._task_state.get(task_id) + if not state or not state["last_event"]: + return + await self._send_event(task_id, state["last_event"]) + state["last_send_time"] = time.time() + state["last_sent"] = state["last_event"] + state["last_event"] = None + state["scheduled_handle"] = None + + async def _send_event(self, task_id, task): + # Compose event_data for this task + event_data = { + "tasks": [task], + "current_timestamp": time.time(), + "change_type": "update", + } enhanced_event_data = add_global_task_counts_to_event(event_data.copy()) event_json = json.dumps(enhanced_event_data) sse_data = f"data: {event_json}\n\n" - logger.debug( - f"SSE Broadcaster: Broadcasting event: {enhanced_event_data.get('change_type', 'unknown')} with {enhanced_event_data.get('active_tasks', 0)} active tasks" - ) - - # Send to all clients, remove disconnected ones disconnected = set() sent_count = 0 for client_queue in self.clients.copy(): try: await client_queue.put(sse_data) sent_count += 1 - logger.debug("SSE: Successfully sent to client queue") except Exception as e: logger.error(f"SSE: Failed to send to client: {e}") disconnected.add(client_queue) - - # Clean up disconnected clients for client in disconnected: self.clients.discard(client) logger.debug( - f"SSE Broadcaster: Successfully sent to {sent_count} clients, removed {len(disconnected)} disconnected clients" + f"SSE Broadcaster: Sent throttled/batched event for task {task_id} to {sent_count} clients" ) + def _events_equal(self, a, b): + # Compare two task dicts for deduplication (ignore timestamps) + if not isinstance(a, dict) or not isinstance(b, dict): + return False + a_copy = dict(a) + b_copy = dict(b) + a_copy.pop("timestamp", None) + b_copy.pop("timestamp", None) + return a_copy == b_copy + # Global broadcaster instance sse_broadcaster = SSEBroadcaster() diff --git a/routes/utils/celery_config.py b/routes/utils/celery_config.py index 83814fd..e97c24f 100644 --- a/routes/utils/celery_config.py +++ b/routes/utils/celery_config.py @@ -52,6 +52,7 @@ DEFAULT_MAIN_CONFIG = { "watch": {}, "realTimeMultiplier": 0, "padNumberWidth": 3, + "sseUpdateIntervalSeconds": 1, # Configurable SSE update interval (default: 1s) } @@ -188,7 +189,7 @@ task_annotations = { "rate_limit": f"{MAX_CONCURRENT_DL}/m", }, "routes.utils.celery_tasks.trigger_sse_update_task": { - "rate_limit": "500/m", # Allow high rate for real-time SSE updates + "rate_limit": "60/m", # Throttle to 1 update/sec per task (matches SSE throttle) "default_retry_delay": 1, # Quick retry for SSE updates "max_retries": 1, # Limited retries for best-effort delivery "ignore_result": True, # Don't store results for SSE tasks