Merge branch 'performance-improvements' into gh-wf

This commit is contained in:
Phlogi
2025-08-30 13:08:45 +02:00
committed by GitHub
63 changed files with 3203 additions and 3083 deletions

View File

@@ -11,6 +11,7 @@
HOST=0.0.0.0 HOST=0.0.0.0
# Redis connection (external or internal). # Redis connection (external or internal).
# Host name 'redis' works with docker-compose.yml setup
REDIS_HOST=redis REDIS_HOST=redis
REDIS_PORT=6379 REDIS_PORT=6379
REDIS_DB=0 REDIS_DB=0
@@ -57,3 +58,8 @@ GOOGLE_CLIENT_SECRET=
# GitHub SSO (get from GitHub Developer Settings) # GitHub SSO (get from GitHub Developer Settings)
GITHUB_CLIENT_ID= GITHUB_CLIENT_ID=
GITHUB_CLIENT_SECRET= GITHUB_CLIENT_SECRET=
# Log level for application logging.
# Possible values: debug, info, warning, error, critical
# Set to 'info' or 'warning' for general use. Use 'debug' for troubleshooting.
LOG_LEVEL=info

View File

@@ -64,7 +64,7 @@ Access logs via Docker:
docker logs spotizerr docker logs spotizerr
``` ```
**Log Locations:** **Log and File Locations:**
- Application Logs: `docker logs spotizerr` (main app and Celery workers) - Application Logs: `docker logs spotizerr` (main app and Celery workers)
- Individual Task Logs: `./logs/tasks/` (inside container, maps to your volume) - Individual Task Logs: `./logs/tasks/` (inside container, maps to your volume)
- Credentials: `./data/creds/` - Credentials: `./data/creds/`
@@ -74,6 +74,12 @@ docker logs spotizerr
- Download History Database: `./data/history/` - Download History Database: `./data/history/`
- Spotify Token Cache: `./.cache/` (if `SPOTIPY_CACHE_PATH` is mapped) - Spotify Token Cache: `./.cache/` (if `SPOTIPY_CACHE_PATH` is mapped)
**Global Logging Level:**
The application's global logging level can be controlled via the `LOG_LEVEL` environment variable.
Supported values (case-insensitive): `CRITICAL`, `ERROR`, `WARNING`, `INFO`, `DEBUG`, `NOTSET`.
If not set, the default logging level is `WARNING`.
Example in `.env` file: `LOG_LEVEL=DEBUG`
## 🤝 Contributing ## 🤝 Contributing
1. Fork the repository 1. Fork the repository
@@ -81,6 +87,21 @@ docker logs spotizerr
3. Make your changes 3. Make your changes
4. Submit a pull request 4. Submit a pull request
Here is the text to add to your `README.md` file, preferably after the "Quick Start" section:
## 💻 Development Setup
To run Spotizerr in development mode:
1. **Backend (API):**
* Ensure Python dependencies are installed (e.g., using `uv pip install -r requirements.txt`).
* Start a Redis server.
* Run the app insidie your activated virtual env: `python3 app.py`
2. **Frontend (UI):**
* Navigate to `spotizerr-ui/`.
* Install dependencies: `pnpm install`.
* Start the development server: `pnpm dev`.
## 📄 License ## 📄 License
This project is licensed under the GPL yada yada, see [LICENSE](LICENSE) file for details. This project is licensed under the GPL yada yada, see [LICENSE](LICENSE) file for details.

77
app.py
View File

@@ -12,6 +12,34 @@ import sys
import redis import redis
import socket import socket
from urllib.parse import urlparse from urllib.parse import urlparse
from dotenv import load_dotenv
load_dotenv()
# Parse log level from environment as early as possible, default to INFO for visibility
log_level_str = os.getenv("LOG_LEVEL", "INFO").upper()
log_level = getattr(logging, log_level_str, logging.INFO)
# Set up a very basic logging config immediately, so early logs (including import/migration errors) are visible
logging.basicConfig(
level=log_level,
format="%(asctime)s [%(levelname)s] %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
stream=sys.stderr,
)
# Run DB migrations as early as possible, before importing any routers that may touch DBs
try:
from routes.migrations import run_migrations_if_needed
run_migrations_if_needed()
logging.getLogger(__name__).info(
"Database migrations executed (if needed) early in startup."
)
except Exception as e:
logging.getLogger(__name__).error(
f"Database migration step failed early in startup: {e}", exc_info=True
)
sys.exit(1)
# Apply process umask from environment as early as possible # Apply process umask from environment as early as possible
_umask_value = os.getenv("UMASK") _umask_value = os.getenv("UMASK")
@@ -22,7 +50,32 @@ if _umask_value:
# Defer logging setup; avoid failing on invalid UMASK # Defer logging setup; avoid failing on invalid UMASK
pass pass
# Import and initialize routes (this will start the watch manager) # Import and initialize routes (this will start the watch manager)
from routes.auth.credentials import router as credentials_router
from routes.auth.auth import router as auth_router
from routes.content.album import router as album_router
from routes.content.artist import router as artist_router
from routes.content.track import router as track_router
from routes.content.playlist import router as playlist_router
from routes.content.bulk_add import router as bulk_add_router
from routes.core.search import router as search_router
from routes.core.history import router as history_router
from routes.system.progress import router as prgs_router
from routes.system.config import router as config_router
# Import Celery configuration and manager
from routes.utils.celery_manager import celery_manager
from routes.utils.celery_config import REDIS_URL
# Import authentication system
from routes.auth import AUTH_ENABLED
from routes.auth.middleware import AuthMiddleware
# Import watch manager controls (start/stop) without triggering side effects
from routes.utils.watch.manager import start_watch_manager, stop_watch_manager
# Configure application-wide logging # Configure application-wide logging
@@ -48,7 +101,7 @@ def setup_logging():
# Configure root logger # Configure root logger
root_logger = logging.getLogger() root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG) root_logger.setLevel(log_level)
# Clear any existing handlers from the root logger # Clear any existing handlers from the root logger
if root_logger.hasHandlers(): if root_logger.hasHandlers():
@@ -65,12 +118,12 @@ def setup_logging():
main_log, maxBytes=10 * 1024 * 1024, backupCount=5, encoding="utf-8" main_log, maxBytes=10 * 1024 * 1024, backupCount=5, encoding="utf-8"
) )
file_handler.setFormatter(log_format) file_handler.setFormatter(log_format)
file_handler.setLevel(logging.INFO) file_handler.setLevel(log_level)
# Console handler for stderr # Console handler for stderr
console_handler = logging.StreamHandler(sys.stderr) console_handler = logging.StreamHandler(sys.stderr)
console_handler.setFormatter(log_format) console_handler.setFormatter(log_format)
console_handler.setLevel(logging.INFO) console_handler.setLevel(log_level)
# Add handlers to root logger # Add handlers to root logger
root_logger.addHandler(file_handler) root_logger.addHandler(file_handler)
@@ -83,12 +136,18 @@ def setup_logging():
"routes.utils.celery_manager", "routes.utils.celery_manager",
"routes.utils.celery_tasks", "routes.utils.celery_tasks",
"routes.utils.watch", "routes.utils.watch",
"uvicorn", # General Uvicorn logger
"uvicorn.access", # Uvicorn access logs
"uvicorn.error", # Uvicorn error logs
"spotizerr",
]: ]:
logger = logging.getLogger(logger_name) logger = logging.getLogger(logger_name)
logger.setLevel(logging.INFO) logger.setLevel(log_level)
logger.propagate = True # Propagate to root logger # For uvicorn.access, we explicitly set propagate to False to prevent duplicate logging
# if access_log=False is used in uvicorn.run, and to ensure our middleware handles it.
logger.propagate = False if logger_name == "uvicorn.access" else True
logging.info("Logging system initialized") logger.info("Logging system initialized")
def check_redis_connection(): def check_redis_connection():
@@ -139,6 +198,8 @@ async def lifespan(app: FastAPI):
"""Handle application startup and shutdown""" """Handle application startup and shutdown"""
# Startup # Startup
setup_logging() setup_logging()
effective_level = logging.getLevelName(log_level)
logging.getLogger(__name__).info(f"Logging system fully initialized (lifespan startup). Effective log level: {effective_level}")
# Run migrations before initializing services # Run migrations before initializing services
try: try:
@@ -241,6 +302,7 @@ def create_app():
from routes.content.album import router as album_router from routes.content.album import router as album_router
from routes.content.track import router as track_router from routes.content.track import router as track_router
from routes.content.playlist import router as playlist_router from routes.content.playlist import router as playlist_router
from routes.content.bulk_add import router as bulk_add_router
from routes.content.artist import router as artist_router from routes.content.artist import router as artist_router
from routes.system.progress import router as prgs_router from routes.system.progress import router as prgs_router
from routes.core.history import router as history_router from routes.core.history import router as history_router
@@ -263,6 +325,7 @@ def create_app():
app.include_router(album_router, prefix="/api/album", tags=["album"]) app.include_router(album_router, prefix="/api/album", tags=["album"])
app.include_router(track_router, prefix="/api/track", tags=["track"]) app.include_router(track_router, prefix="/api/track", tags=["track"])
app.include_router(playlist_router, prefix="/api/playlist", tags=["playlist"]) app.include_router(playlist_router, prefix="/api/playlist", tags=["playlist"])
app.include_router(bulk_add_router, prefix="/api/bulk", tags=["bulk"])
app.include_router(artist_router, prefix="/api/artist", tags=["artist"]) app.include_router(artist_router, prefix="/api/artist", tags=["artist"])
app.include_router(prgs_router, prefix="/api/prgs", tags=["progress"]) app.include_router(prgs_router, prefix="/api/prgs", tags=["progress"])
app.include_router(history_router, prefix="/api/history", tags=["history"]) app.include_router(history_router, prefix="/api/history", tags=["history"])
@@ -386,4 +449,4 @@ if __name__ == "__main__":
except ValueError: except ValueError:
port = 7171 port = 7171
uvicorn.run(app, host=host, port=port, log_level="info", access_log=True) uvicorn.run(app, host=host, port=port, log_level=log_level_str.lower(), access_log=False)

View File

@@ -4,41 +4,63 @@ See also: [Environment variables](environment.md)
Open Configuration in the web UI. Tabs: Open Configuration in the web UI. Tabs:
- General (admin) # General
- App version, basic info - **Default service:** Right now, the only one available is Spotify. Deezer-only mode coming soon!
- Downloads (admin) - **Active accounts:** Accounts to use for API-related things with the respective service.
- Concurrent downloads, retry behavior
- Quality/format defaults and conversion # Downloads
- Real-time mode: aligns download time with track length - **Max Concurrent Downloads:** Sets the maximum number of download tasks that can run simultaneously.
- Formatting (admin) - **Real-Time Downloading:** Matches the download duration to the actual track length, helping to avoid rate limits.
- File/folder naming patterns (examples) - **Real-Time Multiplier:** When real-time downloading is enabled, this multiplier adjusts how much faster (or slower) the download occurs compared to the track length.
- `%artist%/%album%/%tracknum%. %title%` - **Download Fallback:** Download from Deezer with a fallback to Spotify.
- `%ar_album%/%album% (%year%)/%title%` - **Recursive Quality:** When download fallback is enabled, try with lower qualities if the specified Deezer quality is not available.
- Accounts (admin) - **Separate Tracks by User:** When multi-user mode is enabled, separate every download in individual users' folders.
- Spotify: use `spotizerr-auth` to add credentials - **Spotify/Deezer Quality:** Quality to request to the service being used to download (account tier limitations apply).
- Deezer ARL (optional): - **Convert to Format:** Format to convert every file downloading.
- Chrome/Edge: DevTools → Application → Cookies → https://www.deezer.com → copy `arl` - **Bitrate:** When convertion is enabled and a lossy format is enabled, this sets the bitrate with which perform the transcoding.
- Firefox: DevTools → Storage → Cookies → https://www.deezer.com → copy `arl` - **Max Retry Attempts:** Maximum number of automatic retries to perform
- Paste ARL in Accounts - **Initial Retry Delay:** Seconds between the first failure and the first retry.
- Select main account when multiple exist - **Retry Delay Increase:** Seconds to increase to the delay beyween retries after each failure.
- Watch (admin)
- Enable/disable watch system
- Set check intervals # Formatting
- Manually trigger checks (artists/playlists) - **Custom Directory Format:** Choose which metadata fields determine how directories are named.
- Server (admin) - **Custom Track Format:** Choose which metadata fields determine how individual track files are named.
- **Track Number Padding:** Enable or disable leading zeros for number-based metadata (e.g., `%tracknum%`, `%playlistnum%`).
- **Track Number Padding Width:** Sets how many digits to use for padded numbers. For example:
* `01. Track` (width: 2)
* `001. Track` (width: 3)
- **Artist Separator:** When a track has multiple artists (or album artists), this string will be used to separate them in both metadata and file/directory naming.
- **Save Album Cover:** Whether to save the cover as a separate `cover.jpg` file or not.
- **Use Spotify Metadata in Deezer Fallback:** Whether to use Spotify metadata when downloading from Deezer or not. It generally is better to leave this enabled, since it has no added API cost and Spotify's metadata tends to be better.
# Accounts
- **Spotify:** use `spotizerr-auth` to add credentials.
- **Deezer ARL (optional but recommended):**
- Chrome/Edge: DevTools → Application → Cookies → https://www.deezer.com → copy `arl`.
- Firefox: DevTools → Storage → Cookies → https://www.deezer.com → copy `arl`.
- Paste ARL in Accounts.
- Select main account when multiple exist.
# Watch
- Enable/disable watch system.
- Set check intervals.
- Set check chunk size.
- Set album groups to consider for watched artists.
# Server
- System info and advanced settings - System info and advanced settings
- Profile (all users when auth is enabled)
# Profile
- Change password, view role and email - Change password, view role and email
Quality formats (reference): # Quality formats (reference)
- Spotify: OGG 96k/160k/320k (320k requires Premium) - Spotify: OGG 96k/160k/320k (320k requires Premium)
- Deezer: MP3 128k/320k (320k may require Premium), FLAC (Premium) - Deezer: MP3 128k/320k (320k may require Premium), FLAC (Premium)
- Conversion: MP3/FLAC/AAC/OGG/OPUS/WAV/ALAC with custom bitrate - Conversion: MP3/FLAC/AAC/OGG/OPUS/WAV/ALAC with custom bitrate
Fallback system: # Notes
- Configure primary and fallback services
- Automatically switches if primary fails (useful for geo/account limits)
Notes:
- Explicit content filter applies in pages (e.g., hides explicit tracks on album/playlist views) - Explicit content filter applies in pages (e.g., hides explicit tracks on album/playlist views)
- Watch system must be enabled before adding items - Watch system must be enabled before adding items

View File

@@ -30,6 +30,24 @@ Location: project `.env`. Minimal reference for server admins.
- FRONTEND_URL: Public UI base (e.g. `http://127.0.0.1:7171`) - FRONTEND_URL: Public UI base (e.g. `http://127.0.0.1:7171`)
- GOOGLE_CLIENT_ID / GOOGLE_CLIENT_SECRET - GOOGLE_CLIENT_ID / GOOGLE_CLIENT_SECRET
- GITHUB_CLIENT_ID / GITHUB_CLIENT_SECRET - GITHUB_CLIENT_ID / GITHUB_CLIENT_SECRET
- Custom/Generic OAuth (set all to enable a custom provider):
- CUSTOM_SSO_CLIENT_ID / CUSTOM_SSO_CLIENT_SECRET
- CUSTOM_SSO_AUTHORIZATION_ENDPOINT
- CUSTOM_SSO_TOKEN_ENDPOINT
- CUSTOM_SSO_USERINFO_ENDPOINT
- CUSTOM_SSO_SCOPE: Comma-separated scopes (optional)
- CUSTOM_SSO_NAME: Internal provider name (optional, default `custom`)
- CUSTOM_SSO_DISPLAY_NAME: UI name (optional, default `Custom`)
- Multiple Custom/Generic OAuth providers (up to 10):
- For provider index `i` (1..10), set:
- CUSTOM_SSO_CLIENT_ID_i / CUSTOM_SSO_CLIENT_SECRET_i
- CUSTOM_SSO_AUTHORIZATION_ENDPOINT_i
- CUSTOM_SSO_TOKEN_ENDPOINT_i
- CUSTOM_SSO_USERINFO_ENDPOINT_i
- CUSTOM_SSO_SCOPE_i (optional)
- CUSTOM_SSO_NAME_i (optional, default `custom{i}`)
- CUSTOM_SSO_DISPLAY_NAME_i (optional, default `Custom {i}`)
- Login URLs will be `/api/auth/sso/login/custom/i` and callback `/api/auth/sso/callback/custom/i`.
### Tips ### Tips
- If running behind a reverse proxy, set `FRONTEND_URL` and `SSO_BASE_REDIRECT_URI` to public URLs. - If running behind a reverse proxy, set `FRONTEND_URL` and `SSO_BASE_REDIRECT_URI` to public URLs.

0
log.txt Normal file
View File

View File

@@ -1,7 +1,7 @@
fastapi==0.116.1 fastapi==0.116.1
uvicorn[standard]==0.35.0 uvicorn[standard]==0.35.0
celery==5.5.3 celery==5.5.3
deezspot-spotizerr==2.7.6 deezspot-spotizerr==3.1.5
httpx==0.28.1 httpx==0.28.1
bcrypt==4.2.1 bcrypt==4.2.1
PyJWT==2.10.1 PyJWT==2.10.1

View File

@@ -1,7 +1,3 @@
import logging import logging
# Configure basic logging for the application if not already configured
# This remains safe to execute on import
logging.basicConfig(level=logging.INFO, format="%(message)s")
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@@ -1,4 +1,4 @@
from fastapi import APIRouter, HTTPException, Depends, Request from fastapi import APIRouter, HTTPException, Depends
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from pydantic import BaseModel from pydantic import BaseModel
from typing import Optional, List from typing import Optional, List
@@ -14,6 +14,7 @@ security = HTTPBearer(auto_error=False)
# Include SSO sub-router # Include SSO sub-router
try: try:
from .sso import router as sso_router from .sso import router as sso_router
router.include_router(sso_router, tags=["sso"]) router.include_router(sso_router, tags=["sso"])
logging.info("SSO sub-router included in auth router") logging.info("SSO sub-router included in auth router")
except ImportError as e: except ImportError as e:
@@ -34,6 +35,7 @@ class RegisterRequest(BaseModel):
class CreateUserRequest(BaseModel): class CreateUserRequest(BaseModel):
"""Admin-only request to create users when registration is disabled""" """Admin-only request to create users when registration is disabled"""
username: str username: str
password: str password: str
email: Optional[str] = None email: Optional[str] = None
@@ -42,17 +44,20 @@ class CreateUserRequest(BaseModel):
class RoleUpdateRequest(BaseModel): class RoleUpdateRequest(BaseModel):
"""Request to update user role""" """Request to update user role"""
role: str role: str
class PasswordChangeRequest(BaseModel): class PasswordChangeRequest(BaseModel):
"""Request to change user password""" """Request to change user password"""
current_password: str current_password: str
new_password: str new_password: str
class AdminPasswordResetRequest(BaseModel): class AdminPasswordResetRequest(BaseModel):
"""Request for admin to reset user password""" """Request for admin to reset user password"""
new_password: str new_password: str
@@ -87,7 +92,7 @@ class AuthStatusResponse(BaseModel):
# Dependency to get current user # Dependency to get current user
async def get_current_user( async def get_current_user(
credentials: HTTPAuthorizationCredentials = Depends(security) credentials: HTTPAuthorizationCredentials = Depends(security),
) -> Optional[User]: ) -> Optional[User]:
"""Get current user from JWT token""" """Get current user from JWT token"""
if not AUTH_ENABLED: if not AUTH_ENABLED:
@@ -123,10 +128,7 @@ async def require_auth(current_user: User = Depends(get_current_user)) -> User:
async def require_admin(current_user: User = Depends(require_auth)) -> User: async def require_admin(current_user: User = Depends(require_auth)) -> User:
"""Require admin role - raises HTTPException if not admin""" """Require admin role - raises HTTPException if not admin"""
if current_user.role != "admin": if current_user.role != "admin":
raise HTTPException( raise HTTPException(status_code=403, detail="Admin access required")
status_code=403,
detail="Admin access required"
)
return current_user return current_user
@@ -141,11 +143,20 @@ async def auth_status(current_user: Optional[User] = Depends(get_current_user)):
try: try:
from . import sso from . import sso
sso_enabled = sso.SSO_ENABLED and AUTH_ENABLED sso_enabled = sso.SSO_ENABLED and AUTH_ENABLED
if sso.google_sso: if sso.google_sso:
sso_providers.append("google") sso_providers.append("google")
if sso.github_sso: if sso.github_sso:
sso_providers.append("github") sso_providers.append("github")
if getattr(sso, "custom_sso", None):
sso_providers.append("custom")
if getattr(sso, "custom_sso_providers", None):
if (
len(getattr(sso, "custom_sso_providers", {})) > 0
and "custom" not in sso_providers
):
sso_providers.append("custom")
except ImportError: except ImportError:
pass # SSO module not available pass # SSO module not available
@@ -155,7 +166,7 @@ async def auth_status(current_user: Optional[User] = Depends(get_current_user)):
user=UserResponse(**current_user.to_public_dict()) if current_user else None, user=UserResponse(**current_user.to_public_dict()) if current_user else None,
registration_enabled=AUTH_ENABLED and not DISABLE_REGISTRATION, registration_enabled=AUTH_ENABLED and not DISABLE_REGISTRATION,
sso_enabled=sso_enabled, sso_enabled=sso_enabled,
sso_providers=sso_providers sso_providers=sso_providers,
) )
@@ -163,23 +174,16 @@ async def auth_status(current_user: Optional[User] = Depends(get_current_user)):
async def login(request: LoginRequest): async def login(request: LoginRequest):
"""Authenticate user and return access token""" """Authenticate user and return access token"""
if not AUTH_ENABLED: if not AUTH_ENABLED:
raise HTTPException( raise HTTPException(status_code=400, detail="Authentication is disabled")
status_code=400,
detail="Authentication is disabled"
)
user = user_manager.authenticate_user(request.username, request.password) user = user_manager.authenticate_user(request.username, request.password)
if not user: if not user:
raise HTTPException( raise HTTPException(status_code=401, detail="Invalid username or password")
status_code=401,
detail="Invalid username or password"
)
access_token = token_manager.create_token(user) access_token = token_manager.create_token(user)
return LoginResponse( return LoginResponse(
access_token=access_token, access_token=access_token, user=UserResponse(**user.to_public_dict())
user=UserResponse(**user.to_public_dict())
) )
@@ -187,15 +191,12 @@ async def login(request: LoginRequest):
async def register(request: RegisterRequest): async def register(request: RegisterRequest):
"""Register a new user""" """Register a new user"""
if not AUTH_ENABLED: if not AUTH_ENABLED:
raise HTTPException( raise HTTPException(status_code=400, detail="Authentication is disabled")
status_code=400,
detail="Authentication is disabled"
)
if DISABLE_REGISTRATION: if DISABLE_REGISTRATION:
raise HTTPException( raise HTTPException(
status_code=403, status_code=403,
detail="Public registration is disabled. Contact an administrator to create an account." detail="Public registration is disabled. Contact an administrator to create an account.",
) )
# Check if this is the first user (should be admin) # Check if this is the first user (should be admin)
@@ -206,7 +207,7 @@ async def register(request: RegisterRequest):
username=request.username, username=request.username,
password=request.password, password=request.password,
email=request.email, email=request.email,
role=role role=role,
) )
if not success: if not success:
@@ -233,10 +234,7 @@ async def list_users(current_user: User = Depends(require_admin)):
async def delete_user(username: str, current_user: User = Depends(require_admin)): async def delete_user(username: str, current_user: User = Depends(require_admin)):
"""Delete a user (admin only)""" """Delete a user (admin only)"""
if username == current_user.username: if username == current_user.username:
raise HTTPException( raise HTTPException(status_code=400, detail="Cannot delete your own account")
status_code=400,
detail="Cannot delete your own account"
)
success, message = user_manager.delete_user(username) success, message = user_manager.delete_user(username)
if not success: if not success:
@@ -249,20 +247,14 @@ async def delete_user(username: str, current_user: User = Depends(require_admin)
async def update_user_role( async def update_user_role(
username: str, username: str,
request: RoleUpdateRequest, request: RoleUpdateRequest,
current_user: User = Depends(require_admin) current_user: User = Depends(require_admin),
): ):
"""Update user role (admin only)""" """Update user role (admin only)"""
if request.role not in ["user", "admin"]: if request.role not in ["user", "admin"]:
raise HTTPException( raise HTTPException(status_code=400, detail="Role must be 'user' or 'admin'")
status_code=400,
detail="Role must be 'user' or 'admin'"
)
if username == current_user.username: if username == current_user.username:
raise HTTPException( raise HTTPException(status_code=400, detail="Cannot change your own role")
status_code=400,
detail="Cannot change your own role"
)
success, message = user_manager.update_user_role(username, request.role) success, message = user_manager.update_user_role(username, request.role)
if not success: if not success:
@@ -272,26 +264,22 @@ async def update_user_role(
@router.post("/users/create", response_model=MessageResponse) @router.post("/users/create", response_model=MessageResponse)
async def create_user_admin(request: CreateUserRequest, current_user: User = Depends(require_admin)): async def create_user_admin(
request: CreateUserRequest, current_user: User = Depends(require_admin)
):
"""Create a new user (admin only) - for use when registration is disabled""" """Create a new user (admin only) - for use when registration is disabled"""
if not AUTH_ENABLED: if not AUTH_ENABLED:
raise HTTPException( raise HTTPException(status_code=400, detail="Authentication is disabled")
status_code=400,
detail="Authentication is disabled"
)
# Validate role # Validate role
if request.role not in ["user", "admin"]: if request.role not in ["user", "admin"]:
raise HTTPException( raise HTTPException(status_code=400, detail="Role must be 'user' or 'admin'")
status_code=400,
detail="Role must be 'user' or 'admin'"
)
success, message = user_manager.create_user( success, message = user_manager.create_user(
username=request.username, username=request.username,
password=request.password, password=request.password,
email=request.email, email=request.email,
role=request.role role=request.role,
) )
if not success: if not success:
@@ -309,20 +297,16 @@ async def get_profile(current_user: User = Depends(require_auth)):
@router.put("/profile/password", response_model=MessageResponse) @router.put("/profile/password", response_model=MessageResponse)
async def change_password( async def change_password(
request: PasswordChangeRequest, request: PasswordChangeRequest, current_user: User = Depends(require_auth)
current_user: User = Depends(require_auth)
): ):
"""Change current user's password""" """Change current user's password"""
if not AUTH_ENABLED: if not AUTH_ENABLED:
raise HTTPException( raise HTTPException(status_code=400, detail="Authentication is disabled")
status_code=400,
detail="Authentication is disabled"
)
success, message = user_manager.change_password( success, message = user_manager.change_password(
username=current_user.username, username=current_user.username,
current_password=request.current_password, current_password=request.current_password,
new_password=request.new_password new_password=request.new_password,
) )
if not success: if not success:
@@ -343,18 +327,14 @@ async def change_password(
async def admin_reset_password( async def admin_reset_password(
username: str, username: str,
request: AdminPasswordResetRequest, request: AdminPasswordResetRequest,
current_user: User = Depends(require_admin) current_user: User = Depends(require_admin),
): ):
"""Admin reset user password (admin only)""" """Admin reset user password (admin only)"""
if not AUTH_ENABLED: if not AUTH_ENABLED:
raise HTTPException( raise HTTPException(status_code=400, detail="Authentication is disabled")
status_code=400,
detail="Authentication is disabled"
)
success, message = user_manager.admin_reset_password( success, message = user_manager.admin_reset_password(
username=username, username=username, new_password=request.new_password
new_password=request.new_password
) )
if not success: if not success:

View File

@@ -1,17 +1,19 @@
""" """
SSO (Single Sign-On) implementation for Google and GitHub authentication SSO (Single Sign-On) implementation for Google and GitHub authentication
""" """
import os import os
import logging import logging
from typing import Optional, Dict, Any from typing import Optional, Dict, Any
from datetime import datetime, timedelta from datetime import datetime, timedelta
from fastapi import APIRouter, Request, HTTPException, Depends from fastapi import APIRouter, Request, HTTPException
from fastapi.responses import RedirectResponse from fastapi.responses import RedirectResponse
from fastapi_sso.sso.google import GoogleSSO from fastapi_sso.sso.google import GoogleSSO
from fastapi_sso.sso.github import GithubSSO from fastapi_sso.sso.github import GithubSSO
from fastapi_sso.sso.base import OpenID from fastapi_sso.sso.base import OpenID
from pydantic import BaseModel from pydantic import BaseModel
from fastapi_sso.sso.generic import create_provider
from . import user_manager, token_manager, User, AUTH_ENABLED, DISABLE_REGISTRATION from . import user_manager, token_manager, User, AUTH_ENABLED, DISABLE_REGISTRATION
@@ -25,11 +27,14 @@ GOOGLE_CLIENT_ID = os.getenv("GOOGLE_CLIENT_ID")
GOOGLE_CLIENT_SECRET = os.getenv("GOOGLE_CLIENT_SECRET") GOOGLE_CLIENT_SECRET = os.getenv("GOOGLE_CLIENT_SECRET")
GITHUB_CLIENT_ID = os.getenv("GITHUB_CLIENT_ID") GITHUB_CLIENT_ID = os.getenv("GITHUB_CLIENT_ID")
GITHUB_CLIENT_SECRET = os.getenv("GITHUB_CLIENT_SECRET") GITHUB_CLIENT_SECRET = os.getenv("GITHUB_CLIENT_SECRET")
SSO_BASE_REDIRECT_URI = os.getenv("SSO_BASE_REDIRECT_URI", "http://localhost:7171/api/auth/sso/callback") SSO_BASE_REDIRECT_URI = os.getenv(
"SSO_BASE_REDIRECT_URI", "http://localhost:7171/api/auth/sso/callback"
)
# Initialize SSO providers # Initialize SSO providers
google_sso = None google_sso = None
github_sso = None github_sso = None
custom_sso = None
if GOOGLE_CLIENT_ID and GOOGLE_CLIENT_SECRET: if GOOGLE_CLIENT_ID and GOOGLE_CLIENT_SECRET:
google_sso = GoogleSSO( google_sso = GoogleSSO(
@@ -47,6 +52,154 @@ if GITHUB_CLIENT_ID and GITHUB_CLIENT_SECRET:
allow_insecure_http=True, # Set to False in production with HTTPS allow_insecure_http=True, # Set to False in production with HTTPS
) )
# Custom/Generic OAuth provider configuration
CUSTOM_SSO_CLIENT_ID = os.getenv("CUSTOM_SSO_CLIENT_ID")
CUSTOM_SSO_CLIENT_SECRET = os.getenv("CUSTOM_SSO_CLIENT_SECRET")
CUSTOM_SSO_AUTHORIZATION_ENDPOINT = os.getenv("CUSTOM_SSO_AUTHORIZATION_ENDPOINT")
CUSTOM_SSO_TOKEN_ENDPOINT = os.getenv("CUSTOM_SSO_TOKEN_ENDPOINT")
CUSTOM_SSO_USERINFO_ENDPOINT = os.getenv("CUSTOM_SSO_USERINFO_ENDPOINT")
CUSTOM_SSO_SCOPE = os.getenv("CUSTOM_SSO_SCOPE") # comma-separated list
CUSTOM_SSO_NAME = os.getenv("CUSTOM_SSO_NAME", "custom")
CUSTOM_SSO_DISPLAY_NAME = os.getenv("CUSTOM_SSO_DISPLAY_NAME", "Custom")
def _default_custom_response_convertor(
userinfo: Dict[str, Any], _client=None
) -> OpenID:
"""Best-effort convertor from generic userinfo to OpenID."""
user_id = (
userinfo.get("sub")
or userinfo.get("id")
or userinfo.get("user_id")
or userinfo.get("uid")
or userinfo.get("uuid")
)
email = userinfo.get("email")
display_name = (
userinfo.get("name")
or userinfo.get("preferred_username")
or userinfo.get("login")
or email
or (str(user_id) if user_id is not None else None)
)
picture = userinfo.get("picture") or userinfo.get("avatar_url")
if not user_id and email:
user_id = email
return OpenID(
id=str(user_id) if user_id is not None else "",
email=email,
display_name=display_name,
picture=picture,
provider=CUSTOM_SSO_NAME,
)
if all(
[
CUSTOM_SSO_CLIENT_ID,
CUSTOM_SSO_CLIENT_SECRET,
CUSTOM_SSO_AUTHORIZATION_ENDPOINT,
CUSTOM_SSO_TOKEN_ENDPOINT,
CUSTOM_SSO_USERINFO_ENDPOINT,
]
):
discovery = {
"authorization_endpoint": CUSTOM_SSO_AUTHORIZATION_ENDPOINT,
"token_endpoint": CUSTOM_SSO_TOKEN_ENDPOINT,
"userinfo_endpoint": CUSTOM_SSO_USERINFO_ENDPOINT,
}
default_scope = (
[s.strip() for s in CUSTOM_SSO_SCOPE.split(",") if s.strip()]
if CUSTOM_SSO_SCOPE
else None
)
CustomProvider = create_provider(
name=CUSTOM_SSO_NAME,
discovery_document=discovery,
response_convertor=_default_custom_response_convertor,
default_scope=default_scope,
)
custom_sso = CustomProvider(
client_id=CUSTOM_SSO_CLIENT_ID,
client_secret=CUSTOM_SSO_CLIENT_SECRET,
redirect_uri=f"{SSO_BASE_REDIRECT_URI}/custom",
allow_insecure_http=True, # Set to False in production with HTTPS
)
# Support multiple indexed custom providers (CUSTOM_*_i), up to 10
custom_sso_providers: Dict[int, Dict[str, Any]] = {}
def _make_response_convertor(provider_name: str):
def _convert(userinfo: Dict[str, Any], _client=None) -> OpenID:
user_id = (
userinfo.get("sub")
or userinfo.get("id")
or userinfo.get("user_id")
or userinfo.get("uid")
or userinfo.get("uuid")
)
email = userinfo.get("email")
display_name = (
userinfo.get("name")
or userinfo.get("preferred_username")
or userinfo.get("login")
or email
or (str(user_id) if user_id is not None else None)
)
picture = userinfo.get("picture") or userinfo.get("avatar_url")
if not user_id and email:
user_id = email
return OpenID(
id=str(user_id) if user_id is not None else "",
email=email,
display_name=display_name,
picture=picture,
provider=provider_name,
)
return _convert
for i in range(1, 11):
cid = os.getenv(f"CUSTOM_SSO_CLIENT_ID_{i}")
csecret = os.getenv(f"CUSTOM_SSO_CLIENT_SECRET_{i}")
auth_ep = os.getenv(f"CUSTOM_SSO_AUTHORIZATION_ENDPOINT_{i}")
token_ep = os.getenv(f"CUSTOM_SSO_TOKEN_ENDPOINT_{i}")
userinfo_ep = os.getenv(f"CUSTOM_SSO_USERINFO_ENDPOINT_{i}")
scope_raw = os.getenv(f"CUSTOM_SSO_SCOPE_{i}")
name_i = os.getenv(f"CUSTOM_SSO_NAME_{i}", f"custom{i}")
display_name_i = os.getenv(f"CUSTOM_SSO_DISPLAY_NAME_{i}", f"Custom {i}")
if all([cid, csecret, auth_ep, token_ep, userinfo_ep]):
discovery_i = {
"authorization_endpoint": auth_ep,
"token_endpoint": token_ep,
"userinfo_endpoint": userinfo_ep,
}
default_scope_i = (
[s.strip() for s in scope_raw.split(",") if s.strip()]
if scope_raw
else None
)
ProviderClass = create_provider(
name=name_i,
discovery_document=discovery_i,
response_convertor=_make_response_convertor(name_i),
default_scope=default_scope_i,
)
provider_instance = ProviderClass(
client_id=cid,
client_secret=csecret,
redirect_uri=f"{SSO_BASE_REDIRECT_URI}/custom/{i}",
allow_insecure_http=True, # Set to False in production with HTTPS
)
custom_sso_providers[i] = {
"sso": provider_instance,
"name": name_i,
"display_name": display_name_i,
}
class MessageResponse(BaseModel): class MessageResponse(BaseModel):
message: str message: str
@@ -70,7 +223,9 @@ def create_or_update_sso_user(openid: OpenID, provider: str) -> User:
# Generate username from email or use provider ID # Generate username from email or use provider ID
email = openid.email email = openid.email
if not email: if not email:
raise HTTPException(status_code=400, detail="Email is required for SSO authentication") raise HTTPException(
status_code=400, detail="Email is required for SSO authentication"
)
# Use email prefix as username, fallback to provider + id # Use email prefix as username, fallback to provider + id
username = email.split("@")[0] username = email.split("@")[0]
@@ -82,7 +237,9 @@ def create_or_update_sso_user(openid: OpenID, provider: str) -> User:
users = user_manager.load_users() users = user_manager.load_users()
for user_data in users.values(): for user_data in users.values():
if user_data.get("email") == email: if user_data.get("email") == email:
existing_user = User(**{k: v for k, v in user_data.items() if k != "password_hash"}) existing_user = User(
**{k: v for k, v in user_data.items() if k != "password_hash"}
)
break break
if existing_user: if existing_user:
@@ -97,7 +254,7 @@ def create_or_update_sso_user(openid: OpenID, provider: str) -> User:
if DISABLE_REGISTRATION: if DISABLE_REGISTRATION:
raise HTTPException( raise HTTPException(
status_code=403, status_code=403,
detail="Registration is disabled. Contact an administrator to create an account." detail="Registration is disabled. Contact an administrator to create an account.",
) )
# Create new user # Create new user
@@ -111,14 +268,14 @@ def create_or_update_sso_user(openid: OpenID, provider: str) -> User:
user = User( user = User(
username=username, username=username,
email=email, email=email,
role="user" # Default role for SSO users role="user", # Default role for SSO users
) )
users[username] = { users[username] = {
**user.to_dict(), **user.to_dict(),
"sso_provider": provider, "sso_provider": provider,
"sso_id": openid.id, "sso_id": openid.id,
"password_hash": None # SSO users don't have passwords "password_hash": None, # SSO users don't have passwords
} }
user_manager.save_users(users) user_manager.save_users(users)
@@ -132,25 +289,49 @@ async def sso_status():
providers = [] providers = []
if google_sso: if google_sso:
providers.append(SSOProvider( providers.append(
SSOProvider(
name="google", name="google",
display_name="Google", display_name="Google",
enabled=True, enabled=True,
login_url="/api/auth/sso/login/google" login_url="/api/auth/sso/login/google",
)) )
)
if github_sso: if github_sso:
providers.append(SSOProvider( providers.append(
SSOProvider(
name="github", name="github",
display_name="GitHub", display_name="GitHub",
enabled=True, enabled=True,
login_url="/api/auth/sso/login/github" login_url="/api/auth/sso/login/github",
)) )
)
if custom_sso:
providers.append(
SSOProvider(
name="custom",
display_name=CUSTOM_SSO_DISPLAY_NAME,
enabled=True,
login_url="/api/auth/sso/login/custom",
)
)
for idx, cfg in custom_sso_providers.items():
providers.append(
SSOProvider(
name=cfg["name"],
display_name=cfg.get("display_name", cfg["name"]),
enabled=True,
login_url=f"/api/auth/sso/login/custom/{idx}",
)
)
return SSOStatusResponse( return SSOStatusResponse(
sso_enabled=SSO_ENABLED and AUTH_ENABLED, sso_enabled=SSO_ENABLED and AUTH_ENABLED,
providers=providers, providers=providers,
registration_enabled=not DISABLE_REGISTRATION registration_enabled=not DISABLE_REGISTRATION,
) )
@@ -164,7 +345,9 @@ async def google_login():
raise HTTPException(status_code=400, detail="Google SSO is not configured") raise HTTPException(status_code=400, detail="Google SSO is not configured")
async with google_sso: async with google_sso:
return await google_sso.get_login_redirect(params={"prompt": "consent", "access_type": "offline"}) return await google_sso.get_login_redirect(
params={"prompt": "consent", "access_type": "offline"}
)
@router.get("/sso/login/github") @router.get("/sso/login/github")
@@ -180,6 +363,35 @@ async def github_login():
return await github_sso.get_login_redirect() return await github_sso.get_login_redirect()
@router.get("/sso/login/custom")
async def custom_login():
"""Initiate Custom SSO login"""
if not SSO_ENABLED or not AUTH_ENABLED:
raise HTTPException(status_code=400, detail="SSO is disabled")
if not custom_sso:
raise HTTPException(status_code=400, detail="Custom SSO is not configured")
async with custom_sso:
return await custom_sso.get_login_redirect()
@router.get("/sso/login/custom/{index}")
async def custom_login_indexed(index: int):
"""Initiate indexed Custom SSO login"""
if not SSO_ENABLED or not AUTH_ENABLED:
raise HTTPException(status_code=400, detail="SSO is disabled")
cfg = custom_sso_providers.get(index)
if not cfg:
raise HTTPException(
status_code=400, detail="Custom SSO provider not configured"
)
async with cfg["sso"]:
return await cfg["sso"].get_login_redirect()
@router.get("/sso/callback/google") @router.get("/sso/callback/google")
async def google_callback(request: Request): async def google_callback(request: Request):
"""Handle Google SSO callback""" """Handle Google SSO callback"""
@@ -210,7 +422,7 @@ async def google_callback(request: Request):
httponly=True, httponly=True,
secure=False, # Set to True in production with HTTPS secure=False, # Set to True in production with HTTPS
samesite="lax", samesite="lax",
max_age=timedelta(hours=24).total_seconds() max_age=timedelta(hours=24).total_seconds(),
) )
return response return response
@@ -218,7 +430,7 @@ async def google_callback(request: Request):
except HTTPException as e: except HTTPException as e:
# Handle specific HTTP exceptions (like registration disabled) # Handle specific HTTP exceptions (like registration disabled)
frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000") frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000")
error_msg = e.detail if hasattr(e, 'detail') else "Authentication failed" error_msg = e.detail if hasattr(e, "detail") else "Authentication failed"
logger.warning(f"Google SSO callback error: {error_msg}") logger.warning(f"Google SSO callback error: {error_msg}")
return RedirectResponse(url=f"{frontend_url}?error={error_msg}") return RedirectResponse(url=f"{frontend_url}?error={error_msg}")
@@ -258,7 +470,7 @@ async def github_callback(request: Request):
httponly=True, httponly=True,
secure=False, # Set to True in production with HTTPS secure=False, # Set to True in production with HTTPS
samesite="lax", samesite="lax",
max_age=timedelta(hours=24).total_seconds() max_age=timedelta(hours=24).total_seconds(),
) )
return response return response
@@ -266,7 +478,7 @@ async def github_callback(request: Request):
except HTTPException as e: except HTTPException as e:
# Handle specific HTTP exceptions (like registration disabled) # Handle specific HTTP exceptions (like registration disabled)
frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000") frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000")
error_msg = e.detail if hasattr(e, 'detail') else "Authentication failed" error_msg = e.detail if hasattr(e, "detail") else "Authentication failed"
logger.warning(f"GitHub SSO callback error: {error_msg}") logger.warning(f"GitHub SSO callback error: {error_msg}")
return RedirectResponse(url=f"{frontend_url}?error={error_msg}") return RedirectResponse(url=f"{frontend_url}?error={error_msg}")
@@ -276,6 +488,105 @@ async def github_callback(request: Request):
return RedirectResponse(url=f"{frontend_url}?error=Authentication failed") return RedirectResponse(url=f"{frontend_url}?error=Authentication failed")
@router.get("/sso/callback/custom")
async def custom_callback(request: Request):
"""Handle Custom SSO callback"""
if not SSO_ENABLED or not AUTH_ENABLED:
raise HTTPException(status_code=400, detail="SSO is disabled")
if not custom_sso:
raise HTTPException(status_code=400, detail="Custom SSO is not configured")
try:
async with custom_sso:
openid = await custom_sso.verify_and_process(request)
# Create or update user
user = create_or_update_sso_user(openid, "custom")
# Create JWT token
access_token = token_manager.create_token(user)
# Redirect to frontend with token (you might want to customize this)
frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000")
response = RedirectResponse(url=f"{frontend_url}?token={access_token}")
# Also set as HTTP-only cookie
response.set_cookie(
key="access_token",
value=access_token,
httponly=True,
secure=False, # Set to True in production with HTTPS
samesite="lax",
max_age=timedelta(hours=24).total_seconds(),
)
return response
except HTTPException as e:
# Handle specific HTTP exceptions (like registration disabled)
frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000")
error_msg = e.detail if hasattr(e, "detail") else "Authentication failed"
logger.warning(f"Custom SSO callback error: {error_msg}")
return RedirectResponse(url=f"{frontend_url}?error={error_msg}")
except Exception as e:
logger.error(f"Custom SSO callback error: {e}")
frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000")
return RedirectResponse(url=f"{frontend_url}?error=Authentication failed")
@router.get("/sso/callback/custom/{index}")
async def custom_callback_indexed(request: Request, index: int):
"""Handle indexed Custom SSO callback"""
if not SSO_ENABLED or not AUTH_ENABLED:
raise HTTPException(status_code=400, detail="SSO is disabled")
cfg = custom_sso_providers.get(index)
if not cfg:
raise HTTPException(
status_code=400, detail="Custom SSO provider not configured"
)
try:
async with cfg["sso"]:
openid = await cfg["sso"].verify_and_process(request)
# Create or update user
user = create_or_update_sso_user(openid, cfg["name"])
# Create JWT token
access_token = token_manager.create_token(user)
# Redirect to frontend with token (you might want to customize this)
frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000")
response = RedirectResponse(url=f"{frontend_url}?token={access_token}")
# Also set as HTTP-only cookie
response.set_cookie(
key="access_token",
value=access_token,
httponly=True,
secure=False, # Set to True in production with HTTPS
samesite="lax",
max_age=timedelta(hours=24).total_seconds(),
)
return response
except HTTPException as e:
# Handle specific HTTP exceptions (like registration disabled)
frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000")
error_msg = e.detail if hasattr(e, "detail") else "Authentication failed"
logger.warning(f"Custom[{index}] SSO callback error: {error_msg}")
return RedirectResponse(url=f"{frontend_url}?error={error_msg}")
except Exception as e:
logger.error(f"Custom[{index}] SSO callback error: {e}")
frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000")
return RedirectResponse(url=f"{frontend_url}?error=Authentication failed")
@router.post("/sso/unlink/{provider}", response_model=MessageResponse) @router.post("/sso/unlink/{provider}", response_model=MessageResponse)
async def unlink_sso_provider( async def unlink_sso_provider(
provider: str, provider: str,
@@ -285,7 +596,18 @@ async def unlink_sso_provider(
if not SSO_ENABLED or not AUTH_ENABLED: if not SSO_ENABLED or not AUTH_ENABLED:
raise HTTPException(status_code=400, detail="SSO is disabled") raise HTTPException(status_code=400, detail="SSO is disabled")
if provider not in ["google", "github"]: available = []
if google_sso:
available.append("google")
if github_sso:
available.append("github")
if custom_sso:
available.append("custom")
for cfg in custom_sso_providers.values():
available.append(cfg["name"])
if provider not in available:
raise HTTPException(status_code=400, detail="Invalid SSO provider") raise HTTPException(status_code=400, detail="Invalid SSO provider")
# Get current user from request (avoiding circular imports) # Get current user from request (avoiding circular imports)
@@ -294,7 +616,9 @@ async def unlink_sso_provider(
current_user = await require_auth_from_state(request) current_user = await require_auth_from_state(request)
if not current_user.sso_provider: if not current_user.sso_provider:
raise HTTPException(status_code=400, detail="User is not linked to any SSO provider") raise HTTPException(
status_code=400, detail="User is not linked to any SSO provider"
)
if current_user.sso_provider != provider: if current_user.sso_provider != provider:
raise HTTPException(status_code=400, detail=f"User is not linked to {provider}") raise HTTPException(status_code=400, detail=f"User is not linked to {provider}")
@@ -305,6 +629,8 @@ async def unlink_sso_provider(
users[current_user.username]["sso_provider"] = None users[current_user.username]["sso_provider"] = None
users[current_user.username]["sso_id"] = None users[current_user.username]["sso_id"] = None
user_manager.save_users(users) user_manager.save_users(users)
logger.info(f"Unlinked SSO provider {provider} from user {current_user.username}") logger.info(
f"Unlinked SSO provider {provider} from user {current_user.username}"
)
return MessageResponse(message=f"SSO provider {provider} unlinked successfully") return MessageResponse(message=f"SSO provider {provider} unlinked successfully")

View File

@@ -5,11 +5,12 @@ import uuid
import time import time
from routes.utils.celery_queue_manager import download_queue_manager from routes.utils.celery_queue_manager import download_queue_manager
from routes.utils.celery_tasks import store_task_info, store_task_status, ProgressState from routes.utils.celery_tasks import store_task_info, store_task_status, ProgressState
from routes.utils.get_info import get_spotify_info from routes.utils.get_info import get_client, get_album
from routes.utils.errors import DuplicateDownloadError from routes.utils.errors import DuplicateDownloadError
# Import authentication dependencies # Import authentication dependencies
from routes.auth.middleware import require_auth_from_state, User from routes.auth.middleware import require_auth_from_state, User
# Config and credentials helpers
router = APIRouter() router = APIRouter()
@@ -34,7 +35,8 @@ async def handle_download(
# Fetch metadata from Spotify # Fetch metadata from Spotify
try: try:
album_info = get_spotify_info(album_id, "album") client = get_client()
album_info = get_album(client, album_id)
if ( if (
not album_info not album_info
or not album_info.get("name") or not album_info.get("name")
@@ -155,6 +157,7 @@ async def get_album_info(
""" """
Retrieve Spotify album metadata given a Spotify album ID. Retrieve Spotify album metadata given a Spotify album ID.
Expects a query parameter 'id' that contains the Spotify album ID. Expects a query parameter 'id' that contains the Spotify album ID.
Returns the raw JSON from get_album in routes.utils.get_info.
""" """
spotify_id = request.query_params.get("id") spotify_id = request.query_params.get("id")
@@ -162,27 +165,9 @@ async def get_album_info(
return JSONResponse(content={"error": "Missing parameter: id"}, status_code=400) return JSONResponse(content={"error": "Missing parameter: id"}, status_code=400)
try: try:
# Optional pagination params for tracks client = get_client()
limit_param = request.query_params.get("limit") album_info = get_album(client, spotify_id)
offset_param = request.query_params.get("offset")
limit = int(limit_param) if limit_param is not None else None
offset = int(offset_param) if offset_param is not None else None
# Fetch album metadata
album_info = get_spotify_info(spotify_id, "album")
# Fetch album tracks with pagination
album_tracks = get_spotify_info(
spotify_id, "album_tracks", limit=limit, offset=offset
)
# Merge tracks into album payload in the same shape Spotify returns on album
album_info["tracks"] = album_tracks
return JSONResponse(content=album_info, status_code=200) return JSONResponse(content=album_info, status_code=200)
except ValueError as ve:
return JSONResponse(
content={"error": f"Invalid limit/offset: {str(ve)}"}, status_code=400
)
except Exception as e: except Exception as e:
error_data = {"error": str(e), "traceback": traceback.format_exc()} error_data = {"error": str(e), "traceback": traceback.format_exc()}
return JSONResponse(content=error_data, status_code=500) return JSONResponse(content=error_data, status_code=500)

View File

@@ -18,10 +18,9 @@ from routes.utils.watch.db import (
get_watched_artists, get_watched_artists,
add_specific_albums_to_artist_table, add_specific_albums_to_artist_table,
remove_specific_albums_from_artist_table, remove_specific_albums_from_artist_table,
is_album_in_artist_db,
) )
from routes.utils.watch.manager import check_watched_artists, get_watch_config from routes.utils.watch.manager import check_watched_artists, get_watch_config
from routes.utils.get_info import get_spotify_info from routes.utils.get_info import get_client, get_artist, get_album
# Import authentication dependencies # Import authentication dependencies
from routes.auth.middleware import require_auth_from_state, User from routes.auth.middleware import require_auth_from_state, User
@@ -66,9 +65,6 @@ async def handle_artist_download(
) )
try: try:
# Import and call the updated download_artist_albums() function.
# from routes.utils.artist import download_artist_albums # Already imported at top
# Delegate to the download_artist_albums function which will handle album filtering # Delegate to the download_artist_albums function which will handle album filtering
successfully_queued_albums, duplicate_albums = download_artist_albums( successfully_queued_albums, duplicate_albums = download_artist_albums(
url=url, url=url,
@@ -118,13 +114,15 @@ async def cancel_artist_download():
@router.get("/info") @router.get("/info")
async def get_artist_info( async def get_artist_info(
request: Request, current_user: User = Depends(require_auth_from_state), request: Request,
current_user: User = Depends(require_auth_from_state),
limit: int = Query(10, ge=1), # default=10, must be >=1 limit: int = Query(10, ge=1), # default=10, must be >=1
offset: int = Query(0, ge=0) # default=0, must be >=0 offset: int = Query(0, ge=0), # default=0, must be >=0
): ):
""" """
Retrieves Spotify artist metadata given a Spotify artist ID. Retrieves Spotify artist metadata given a Spotify artist ID.
Expects a query parameter 'id' with the Spotify artist ID. Expects a query parameter 'id' with the Spotify artist ID.
Returns the raw JSON from get_artist in routes.utils.get_info.
""" """
spotify_id = request.query_params.get("id") spotify_id = request.query_params.get("id")
@@ -132,37 +130,8 @@ async def get_artist_info(
return JSONResponse(content={"error": "Missing parameter: id"}, status_code=400) return JSONResponse(content={"error": "Missing parameter: id"}, status_code=400)
try: try:
# Get artist metadata first client = get_client()
artist_metadata = get_spotify_info(spotify_id, "artist") artist_info = get_artist(client, spotify_id)
# Get artist discography for albums
artist_discography = get_spotify_info(spotify_id, "artist_discography", limit=limit, offset=offset)
# Combine metadata with discography
artist_info = {**artist_metadata, "albums": artist_discography}
# If artist_info is successfully fetched and has albums,
# check if the artist is watched and augment album items with is_locally_known status
if (
artist_info
and artist_info.get("albums")
and artist_info["albums"].get("items")
):
watched_artist_details = get_watched_artist(
spotify_id
) # spotify_id is the artist ID
if watched_artist_details: # Artist is being watched
for album_item in artist_info["albums"]["items"]:
if album_item and album_item.get("id"):
album_id = album_item["id"]
album_item["is_locally_known"] = is_album_in_artist_db(
spotify_id, album_id
)
elif album_item: # Album object exists but no ID
album_item["is_locally_known"] = False
# If not watched, or no albums, is_locally_known will not be added.
# Frontend should handle absence of this key as false.
return JSONResponse(content=artist_info, status_code=200) return JSONResponse(content=artist_info, status_code=200)
except Exception as e: except Exception as e:
return JSONResponse( return JSONResponse(
@@ -191,15 +160,9 @@ async def add_artist_to_watchlist(
if get_watched_artist(artist_spotify_id): if get_watched_artist(artist_spotify_id):
return {"message": f"Artist {artist_spotify_id} is already being watched."} return {"message": f"Artist {artist_spotify_id} is already being watched."}
# Get artist metadata directly for name and basic info client = get_client()
artist_metadata = get_spotify_info(artist_spotify_id, "artist") artist_metadata = get_artist(client, artist_spotify_id)
# Get artist discography for album count
artist_album_list_data = get_spotify_info(
artist_spotify_id, "artist_discography"
)
# Check if we got artist metadata
if not artist_metadata or not artist_metadata.get("name"): if not artist_metadata or not artist_metadata.get("name"):
logger.error( logger.error(
f"Could not fetch artist metadata for {artist_spotify_id} from Spotify." f"Could not fetch artist metadata for {artist_spotify_id} from Spotify."
@@ -211,24 +174,22 @@ async def add_artist_to_watchlist(
}, },
) )
# Check if we got album data # Derive a rough total album count from groups if present
if not artist_album_list_data or not isinstance( total_albums = 0
artist_album_list_data.get("items"), list for key in (
"album_group",
"single_group",
"compilation_group",
"appears_on_group",
): ):
logger.warning( grp = artist_metadata.get(key)
f"Could not fetch album list details for artist {artist_spotify_id} from Spotify. Proceeding with metadata only." if isinstance(grp, list):
) total_albums += len(grp)
# Construct the artist_data object expected by add_artist_db
artist_data_for_db = { artist_data_for_db = {
"id": artist_spotify_id, "id": artist_spotify_id,
"name": artist_metadata.get("name", "Unknown Artist"), "name": artist_metadata.get("name", "Unknown Artist"),
"albums": { # Mimic structure if add_artist_db expects it for total_albums "albums": {"total": total_albums},
"total": artist_album_list_data.get("total", 0)
if artist_album_list_data
else 0
},
# Add any other fields add_artist_db might expect from a true artist object if necessary
} }
add_artist_db(artist_data_for_db) add_artist_db(artist_data_for_db)
@@ -446,11 +407,12 @@ async def mark_albums_as_known_for_artist(
detail={"error": f"Artist {artist_spotify_id} is not being watched."}, detail={"error": f"Artist {artist_spotify_id} is not being watched."},
) )
client = get_client()
fetched_albums_details = [] fetched_albums_details = []
try:
for album_id in album_ids: for album_id in album_ids:
try: try:
# We need full album details. get_spotify_info with type "album" should provide this. album_detail = get_album(client, album_id)
album_detail = get_spotify_info(album_id, "album")
if album_detail and album_detail.get("id"): if album_detail and album_detail.get("id"):
fetched_albums_details.append(album_detail) fetched_albums_details.append(album_detail)
else: else:
@@ -461,6 +423,9 @@ async def mark_albums_as_known_for_artist(
logger.error( logger.error(
f"Failed to fetch Spotify details for album {album_id}: {e}" f"Failed to fetch Spotify details for album {album_id}: {e}"
) )
finally:
# No need to close_client here, as get_client is shared
pass
if not fetched_albums_details: if not fetched_albums_details:
return { return {

142
routes/content/bulk_add.py Normal file
View File

@@ -0,0 +1,142 @@
import re
from typing import List
from fastapi import APIRouter, Request, Depends
from pydantic import BaseModel
import logging
# Import authentication dependencies
from routes.auth.middleware import require_auth_from_state, User
# Import queue management and Spotify info
from routes.utils.celery_queue_manager import download_queue_manager
# Import authentication dependencies
# Import queue management and Spotify info
from routes.utils.get_info import (
get_client,
get_track,
get_album,
get_playlist,
get_artist,
)
router = APIRouter()
logger = logging.getLogger(__name__)
class BulkAddLinksRequest(BaseModel):
links: List[str]
@router.post("/bulk-add-spotify-links")
async def bulk_add_spotify_links(
request: BulkAddLinksRequest,
req: Request,
current_user: User = Depends(require_auth_from_state),
):
added_count = 0
failed_links = []
total_links = len(request.links)
client = get_client()
for link in request.links:
# Assuming links are pre-filtered by the frontend,
# but still handle potential errors during info retrieval or unsupported types
# Extract type and ID from the link directly using regex
match = re.match(
r"https://open\.spotify\.com(?:/[a-z]{2})?/(track|album|playlist|artist)/([a-zA-Z0-9]+)(?:\?.*)?",
link,
)
if not match:
logger.warning(
f"Could not parse Spotify link (unexpected format after frontend filter): {link}"
)
failed_links.append(link)
continue
spotify_type = match.group(1)
spotify_id = match.group(2)
logger.debug(
f"Extracted from link: spotify_type={spotify_type}, spotify_id={spotify_id}"
)
logger.debug(
f"Extracted from link: spotify_type={spotify_type}, spotify_id={spotify_id}"
)
try:
# Get basic info to confirm existence and get name/artist
if spotify_type == "playlist":
item_info = get_playlist(client, spotify_id, expand_items=False)
elif spotify_type == "track":
item_info = get_track(client, spotify_id)
elif spotify_type == "album":
item_info = get_album(client, spotify_id)
elif spotify_type == "artist":
# Not queued below, but fetch to validate link and name if needed
item_info = get_artist(client, spotify_id)
else:
logger.warning(
f"Unsupported Spotify type: {spotify_type} for link: {link}"
)
failed_links.append(link)
continue
item_name = item_info.get("name", "Unknown Name")
artist_name = ""
if spotify_type in ["track", "album"]:
artists = item_info.get("artists", [])
if artists:
artist_name = ", ".join(
[a.get("name", "Unknown Artist") for a in artists]
)
elif spotify_type == "playlist":
owner = item_info.get("owner", {})
artist_name = owner.get("display_name", "Unknown Owner")
# Construct URL for the download task
spotify_url = f"https://open.spotify.com/{spotify_type}/{spotify_id}"
# Prepare task data for the queue manager
task_data = {
"download_type": spotify_type,
"url": spotify_url,
"name": item_name,
"artist": artist_name,
"spotify_id": spotify_id,
"type": spotify_type,
"username": current_user.username,
"orig_request": dict(req.query_params),
}
# Add to download queue using the queue manager
task_id = download_queue_manager.add_task(task_data)
if task_id:
added_count += 1
logger.debug(
f"Added {added_count}/{total_links} {spotify_type} '{item_name}' ({spotify_id}) to queue with task_id: {task_id}."
)
else:
logger.warning(
f"Failed to add {spotify_type} '{item_name}' ({spotify_id}) to queue."
)
failed_links.append(link)
continue
except Exception as e:
logger.error(f"Error processing Spotify link {link}: {e}", exc_info=True)
failed_links.append(link)
message = f"Successfully added {added_count}/{total_links} links to queue."
if failed_links:
message += f" Failed to add {len(failed_links)} links."
logger.warning(f"Bulk add completed with {len(failed_links)} failures.")
else:
logger.info(f"Bulk add completed successfully. Added {added_count} links.")
return {
"message": message,
"count": added_count,
"failed_links": failed_links,
}

View File

@@ -1,6 +1,5 @@
from fastapi import APIRouter, HTTPException, Request, Depends from fastapi import APIRouter, HTTPException, Request, Depends
from fastapi.responses import JSONResponse from fastapi.responses import JSONResponse
import json
import traceback import traceback
import logging # Added logging import import logging # Added logging import
import uuid # For generating error task IDs import uuid # For generating error task IDs
@@ -20,10 +19,9 @@ from routes.utils.watch.db import (
get_watched_playlist, get_watched_playlist,
get_watched_playlists, get_watched_playlists,
add_specific_tracks_to_playlist_table, add_specific_tracks_to_playlist_table,
remove_specific_tracks_from_playlist_table, remove_specific_tracks_from_playlist_table, # Added import
is_track_in_playlist_db, # Added import
) )
from routes.utils.get_info import get_spotify_info # Already used, but ensure it's here from routes.utils.get_info import get_client, get_playlist, get_track
from routes.utils.watch.manager import ( from routes.utils.watch.manager import (
check_watched_playlists, check_watched_playlists,
get_watch_config, get_watch_config,
@@ -31,7 +29,9 @@ from routes.utils.watch.manager import (
from routes.utils.errors import DuplicateDownloadError from routes.utils.errors import DuplicateDownloadError
# Import authentication dependencies # Import authentication dependencies
from routes.auth.middleware import require_auth_from_state, require_admin_from_state, User from routes.auth.middleware import require_auth_from_state, User
from routes.utils.celery_config import get_config_params
from routes.utils.credentials import get_spotify_blob_path
logger = logging.getLogger(__name__) # Added logger initialization logger = logging.getLogger(__name__) # Added logger initialization
router = APIRouter() router = APIRouter()
@@ -43,7 +43,11 @@ def construct_spotify_url(item_id: str, item_type: str = "track") -> str:
@router.get("/download/{playlist_id}") @router.get("/download/{playlist_id}")
async def handle_download(playlist_id: str, request: Request, current_user: User = Depends(require_auth_from_state)): async def handle_download(
playlist_id: str,
request: Request,
current_user: User = Depends(require_auth_from_state),
):
# Retrieve essential parameters from the request. # Retrieve essential parameters from the request.
# name = request.args.get('name') # Removed # name = request.args.get('name') # Removed
# artist = request.args.get('artist') # Removed # artist = request.args.get('artist') # Removed
@@ -51,11 +55,14 @@ async def handle_download(playlist_id: str, request: Request, current_user: User
# Construct the URL from playlist_id # Construct the URL from playlist_id
url = construct_spotify_url(playlist_id, "playlist") url = construct_spotify_url(playlist_id, "playlist")
orig_params["original_url"] = str(request.url) # Update original_url to the constructed one orig_params["original_url"] = str(
request.url
) # Update original_url to the constructed one
# Fetch metadata from Spotify using optimized function # Fetch metadata from Spotify using optimized function
try: try:
from routes.utils.get_info import get_playlist_metadata from routes.utils.get_info import get_playlist_metadata
playlist_info = get_playlist_metadata(playlist_id) playlist_info = get_playlist_metadata(playlist_id)
if ( if (
not playlist_info not playlist_info
@@ -66,7 +73,7 @@ async def handle_download(playlist_id: str, request: Request, current_user: User
content={ content={
"error": f"Could not retrieve metadata for playlist ID: {playlist_id}" "error": f"Could not retrieve metadata for playlist ID: {playlist_id}"
}, },
status_code=404 status_code=404,
) )
name_from_spotify = playlist_info.get("name") name_from_spotify = playlist_info.get("name")
@@ -79,14 +86,13 @@ async def handle_download(playlist_id: str, request: Request, current_user: User
content={ content={
"error": f"Failed to fetch metadata for playlist {playlist_id}: {str(e)}" "error": f"Failed to fetch metadata for playlist {playlist_id}: {str(e)}"
}, },
status_code=500 status_code=500,
) )
# Validate required parameters # Validate required parameters
if not url: # This check might be redundant now but kept for safety if not url: # This check might be redundant now but kept for safety
return JSONResponse( return JSONResponse(
content={"error": "Missing required parameter: url"}, content={"error": "Missing required parameter: url"}, status_code=400
status_code=400
) )
try: try:
@@ -106,7 +112,7 @@ async def handle_download(playlist_id: str, request: Request, current_user: User
"error": "Duplicate download detected.", "error": "Duplicate download detected.",
"existing_task": e.existing_task, "existing_task": e.existing_task,
}, },
status_code=409 status_code=409,
) )
except Exception as e: except Exception as e:
# Generic error handling for other issues during task submission # Generic error handling for other issues during task submission
@@ -136,25 +142,23 @@ async def handle_download(playlist_id: str, request: Request, current_user: User
"error": f"Failed to queue playlist download: {str(e)}", "error": f"Failed to queue playlist download: {str(e)}",
"task_id": error_task_id, "task_id": error_task_id,
}, },
status_code=500 status_code=500,
) )
return JSONResponse( return JSONResponse(content={"task_id": task_id}, status_code=202)
content={"task_id": task_id},
status_code=202
)
@router.get("/download/cancel") @router.get("/download/cancel")
async def cancel_download(request: Request, current_user: User = Depends(require_auth_from_state)): async def cancel_download(
request: Request, current_user: User = Depends(require_auth_from_state)
):
""" """
Cancel a running playlist download process by its task id. Cancel a running playlist download process by its task id.
""" """
task_id = request.query_params.get("task_id") task_id = request.query_params.get("task_id")
if not task_id: if not task_id:
return JSONResponse( return JSONResponse(
content={"error": "Missing task id (task_id) parameter"}, content={"error": "Missing task id (task_id) parameter"}, status_code=400
status_code=400
) )
# Use the queue manager's cancellation method. # Use the queue manager's cancellation method.
@@ -165,124 +169,94 @@ async def cancel_download(request: Request, current_user: User = Depends(require
@router.get("/info") @router.get("/info")
async def get_playlist_info(request: Request, current_user: User = Depends(require_auth_from_state)): async def get_playlist_info(
request: Request, current_user: User = Depends(require_auth_from_state)
):
""" """
Retrieve Spotify playlist metadata given a Spotify playlist ID. Retrieve Spotify playlist metadata given a Spotify playlist ID.
Expects a query parameter 'id' that contains the Spotify playlist ID. Expects a query parameter 'id' that contains the Spotify playlist ID.
""" Always returns the raw JSON from get_playlist with expand_items=False.
spotify_id = request.query_params.get("id")
include_tracks = request.query_params.get("include_tracks", "false").lower() == "true"
if not spotify_id:
return JSONResponse(
content={"error": "Missing parameter: id"},
status_code=400
)
try:
# Use the optimized playlist info function
from routes.utils.get_info import get_playlist_info_optimized
playlist_info = get_playlist_info_optimized(spotify_id, include_tracks=include_tracks)
# If playlist_info is successfully fetched, check if it's watched
# and augment track items with is_locally_known status
if playlist_info and playlist_info.get("id"):
watched_playlist_details = get_watched_playlist(playlist_info["id"])
if watched_playlist_details: # Playlist is being watched
if playlist_info.get("tracks") and playlist_info["tracks"].get("items"):
for item in playlist_info["tracks"]["items"]:
if item and item.get("track") and item["track"].get("id"):
track_id = item["track"]["id"]
item["track"]["is_locally_known"] = is_track_in_playlist_db(
playlist_info["id"], track_id
)
elif item and item.get(
"track"
): # Track object exists but no ID
item["track"]["is_locally_known"] = False
# If not watched, or no tracks, is_locally_known will not be added, or tracks won't exist to add it to.
# Frontend should handle absence of this key as false.
return JSONResponse(
content=playlist_info, status_code=200
)
except Exception as e:
error_data = {"error": str(e), "traceback": traceback.format_exc()}
return JSONResponse(content=error_data, status_code=500)
@router.get("/metadata")
async def get_playlist_metadata(request: Request, current_user: User = Depends(require_auth_from_state)):
"""
Retrieve only Spotify playlist metadata (no tracks) to avoid rate limiting.
Expects a query parameter 'id' that contains the Spotify playlist ID.
""" """
spotify_id = request.query_params.get("id") spotify_id = request.query_params.get("id")
if not spotify_id: if not spotify_id:
return JSONResponse( return JSONResponse(content={"error": "Missing parameter: id"}, status_code=400)
content={"error": "Missing parameter: id"},
status_code=400
)
try: try:
# Use the optimized playlist metadata function # Resolve active account's credentials blob
from routes.utils.get_info import get_playlist_metadata cfg = get_config_params() or {}
playlist_metadata = get_playlist_metadata(spotify_id) active_account = cfg.get("spotify")
if not active_account:
return JSONResponse( return JSONResponse(
content=playlist_metadata, status_code=200 content={"error": "Active Spotify account not set in configuration."},
status_code=500,
) )
except Exception as e: blob_path = get_spotify_blob_path(active_account)
error_data = {"error": str(e), "traceback": traceback.format_exc()} if not blob_path.exists():
return JSONResponse(content=error_data, status_code=500)
@router.get("/tracks")
async def get_playlist_tracks(request: Request, current_user: User = Depends(require_auth_from_state)):
"""
Retrieve playlist tracks with pagination support for progressive loading.
Expects query parameters: 'id' (playlist ID), 'limit' (optional), 'offset' (optional).
"""
spotify_id = request.query_params.get("id")
limit = int(request.query_params.get("limit", 50))
offset = int(request.query_params.get("offset", 0))
if not spotify_id:
return JSONResponse( return JSONResponse(
content={"error": "Missing parameter: id"}, content={
status_code=400 "error": f"Spotify credentials blob not found for account '{active_account}'"
},
status_code=500,
) )
client = get_client()
try: try:
# Use the optimized playlist tracks function playlist_info = get_playlist(client, spotify_id, expand_items=False)
from routes.utils.get_info import get_playlist_tracks finally:
tracks_data = get_playlist_tracks(spotify_id, limit=limit, offset=offset) pass
return JSONResponse( return JSONResponse(content=playlist_info, status_code=200)
content=tracks_data, status_code=200
)
except Exception as e: except Exception as e:
error_data = {"error": str(e), "traceback": traceback.format_exc()} error_data = {"error": str(e), "traceback": traceback.format_exc()}
return JSONResponse(content=error_data, status_code=500) return JSONResponse(content=error_data, status_code=500)
@router.put("/watch/{playlist_spotify_id}") @router.put("/watch/{playlist_spotify_id}")
async def add_to_watchlist(playlist_spotify_id: str, current_user: User = Depends(require_auth_from_state)): async def add_to_watchlist(
playlist_spotify_id: str, current_user: User = Depends(require_auth_from_state)
):
"""Adds a playlist to the watchlist.""" """Adds a playlist to the watchlist."""
watch_config = get_watch_config() watch_config = get_watch_config()
if not watch_config.get("enabled", False): if not watch_config.get("enabled", False):
raise HTTPException(status_code=403, detail={"error": "Watch feature is currently disabled globally."}) raise HTTPException(
status_code=403,
detail={"error": "Watch feature is currently disabled globally."},
)
logger.info(f"Attempting to add playlist {playlist_spotify_id} to watchlist.") logger.info(f"Attempting to add playlist {playlist_spotify_id} to watchlist.")
try: try:
# Check if already watched # Check if already watched
if get_watched_playlist(playlist_spotify_id): if get_watched_playlist(playlist_spotify_id):
return {"message": f"Playlist {playlist_spotify_id} is already being watched."} return {
"message": f"Playlist {playlist_spotify_id} is already being watched."
}
# Fetch playlist details from Spotify to populate our DB (metadata only)
cfg = get_config_params() or {}
active_account = cfg.get("spotify")
if not active_account:
raise HTTPException(
status_code=500,
detail={"error": "Active Spotify account not set in configuration."},
)
blob_path = get_spotify_blob_path(active_account)
if not blob_path.exists():
raise HTTPException(
status_code=500,
detail={
"error": f"Spotify credentials blob not found for account '{active_account}'"
},
)
client = get_client()
try:
playlist_data = get_playlist(
client, playlist_spotify_id, expand_items=False
)
finally:
pass
# Fetch playlist details from Spotify to populate our DB
from routes.utils.get_info import get_playlist_metadata
playlist_data = get_playlist_metadata(playlist_spotify_id)
if not playlist_data or "id" not in playlist_data: if not playlist_data or "id" not in playlist_data:
logger.error( logger.error(
f"Could not fetch details for playlist {playlist_spotify_id} from Spotify." f"Could not fetch details for playlist {playlist_spotify_id} from Spotify."
@@ -291,19 +265,11 @@ async def add_to_watchlist(playlist_spotify_id: str, current_user: User = Depend
status_code=404, status_code=404,
detail={ detail={
"error": f"Could not fetch details for playlist {playlist_spotify_id} from Spotify." "error": f"Could not fetch details for playlist {playlist_spotify_id} from Spotify."
} },
) )
add_playlist_db(playlist_data) # This also creates the tracks table add_playlist_db(playlist_data) # This also creates the tracks table
# REMOVED: Do not add initial tracks directly to DB.
# The playlist watch manager will pick them up as new and queue downloads.
# Tracks will be added to DB only after successful download via Celery task callback.
# initial_track_items = playlist_data.get('tracks', {}).get('items', [])
# if initial_track_items:
# from routes.utils.watch.db import add_tracks_to_playlist_db # Keep local import for clarity
# add_tracks_to_playlist_db(playlist_spotify_id, initial_track_items)
logger.info( logger.info(
f"Playlist {playlist_spotify_id} added to watchlist. Its tracks will be processed by the watch manager." f"Playlist {playlist_spotify_id} added to watchlist. Its tracks will be processed by the watch manager."
) )
@@ -317,11 +283,16 @@ async def add_to_watchlist(playlist_spotify_id: str, current_user: User = Depend
f"Error adding playlist {playlist_spotify_id} to watchlist: {e}", f"Error adding playlist {playlist_spotify_id} to watchlist: {e}",
exc_info=True, exc_info=True,
) )
raise HTTPException(status_code=500, detail={"error": f"Could not add playlist to watchlist: {str(e)}"}) raise HTTPException(
status_code=500,
detail={"error": f"Could not add playlist to watchlist: {str(e)}"},
)
@router.get("/watch/{playlist_spotify_id}/status") @router.get("/watch/{playlist_spotify_id}/status")
async def get_playlist_watch_status(playlist_spotify_id: str, current_user: User = Depends(require_auth_from_state)): async def get_playlist_watch_status(
playlist_spotify_id: str, current_user: User = Depends(require_auth_from_state)
):
"""Checks if a specific playlist is being watched.""" """Checks if a specific playlist is being watched."""
logger.info(f"Checking watch status for playlist {playlist_spotify_id}.") logger.info(f"Checking watch status for playlist {playlist_spotify_id}.")
try: try:
@@ -337,22 +308,31 @@ async def get_playlist_watch_status(playlist_spotify_id: str, current_user: User
f"Error checking watch status for playlist {playlist_spotify_id}: {e}", f"Error checking watch status for playlist {playlist_spotify_id}: {e}",
exc_info=True, exc_info=True,
) )
raise HTTPException(status_code=500, detail={"error": f"Could not check watch status: {str(e)}"}) raise HTTPException(
status_code=500, detail={"error": f"Could not check watch status: {str(e)}"}
)
@router.delete("/watch/{playlist_spotify_id}") @router.delete("/watch/{playlist_spotify_id}")
async def remove_from_watchlist(playlist_spotify_id: str, current_user: User = Depends(require_auth_from_state)): async def remove_from_watchlist(
playlist_spotify_id: str, current_user: User = Depends(require_auth_from_state)
):
"""Removes a playlist from the watchlist.""" """Removes a playlist from the watchlist."""
watch_config = get_watch_config() watch_config = get_watch_config()
if not watch_config.get("enabled", False): if not watch_config.get("enabled", False):
raise HTTPException(status_code=403, detail={"error": "Watch feature is currently disabled globally."}) raise HTTPException(
status_code=403,
detail={"error": "Watch feature is currently disabled globally."},
)
logger.info(f"Attempting to remove playlist {playlist_spotify_id} from watchlist.") logger.info(f"Attempting to remove playlist {playlist_spotify_id} from watchlist.")
try: try:
if not get_watched_playlist(playlist_spotify_id): if not get_watched_playlist(playlist_spotify_id):
raise HTTPException( raise HTTPException(
status_code=404, status_code=404,
detail={"error": f"Playlist {playlist_spotify_id} not found in watchlist."} detail={
"error": f"Playlist {playlist_spotify_id} not found in watchlist."
},
) )
remove_playlist_db(playlist_spotify_id) remove_playlist_db(playlist_spotify_id)
@@ -369,12 +349,16 @@ async def remove_from_watchlist(playlist_spotify_id: str, current_user: User = D
) )
raise HTTPException( raise HTTPException(
status_code=500, status_code=500,
detail={"error": f"Could not remove playlist from watchlist: {str(e)}"} detail={"error": f"Could not remove playlist from watchlist: {str(e)}"},
) )
@router.post("/watch/{playlist_spotify_id}/tracks") @router.post("/watch/{playlist_spotify_id}/tracks")
async def mark_tracks_as_known(playlist_spotify_id: str, request: Request, current_user: User = Depends(require_auth_from_state)): async def mark_tracks_as_known(
playlist_spotify_id: str,
request: Request,
current_user: User = Depends(require_auth_from_state),
):
"""Fetches details for given track IDs and adds/updates them in the playlist's local DB table.""" """Fetches details for given track IDs and adds/updates them in the playlist's local DB table."""
watch_config = get_watch_config() watch_config = get_watch_config()
if not watch_config.get("enabled", False): if not watch_config.get("enabled", False):
@@ -382,7 +366,7 @@ async def mark_tracks_as_known(playlist_spotify_id: str, request: Request, curre
status_code=403, status_code=403,
detail={ detail={
"error": "Watch feature is currently disabled globally. Cannot mark tracks." "error": "Watch feature is currently disabled globally. Cannot mark tracks."
} },
) )
logger.info( logger.info(
@@ -397,19 +381,22 @@ async def mark_tracks_as_known(playlist_spotify_id: str, request: Request, curre
status_code=400, status_code=400,
detail={ detail={
"error": "Invalid request body. Expecting a JSON array of track Spotify IDs." "error": "Invalid request body. Expecting a JSON array of track Spotify IDs."
} },
) )
if not get_watched_playlist(playlist_spotify_id): if not get_watched_playlist(playlist_spotify_id):
raise HTTPException( raise HTTPException(
status_code=404, status_code=404,
detail={"error": f"Playlist {playlist_spotify_id} is not being watched."} detail={
"error": f"Playlist {playlist_spotify_id} is not being watched."
},
) )
fetched_tracks_details = [] fetched_tracks_details = []
client = get_client()
for track_id in track_ids: for track_id in track_ids:
try: try:
track_detail = get_spotify_info(track_id, "track") track_detail = get_track(client, track_id)
if track_detail and track_detail.get("id"): if track_detail and track_detail.get("id"):
fetched_tracks_details.append(track_detail) fetched_tracks_details.append(track_detail)
else: else:
@@ -443,11 +430,18 @@ async def mark_tracks_as_known(playlist_spotify_id: str, request: Request, curre
f"Error marking tracks as known for playlist {playlist_spotify_id}: {e}", f"Error marking tracks as known for playlist {playlist_spotify_id}: {e}",
exc_info=True, exc_info=True,
) )
raise HTTPException(status_code=500, detail={"error": f"Could not mark tracks as known: {str(e)}"}) raise HTTPException(
status_code=500,
detail={"error": f"Could not mark tracks as known: {str(e)}"},
)
@router.delete("/watch/{playlist_spotify_id}/tracks") @router.delete("/watch/{playlist_spotify_id}/tracks")
async def mark_tracks_as_missing_locally(playlist_spotify_id: str, request: Request, current_user: User = Depends(require_auth_from_state)): async def mark_tracks_as_missing_locally(
playlist_spotify_id: str,
request: Request,
current_user: User = Depends(require_auth_from_state),
):
"""Removes specified tracks from the playlist's local DB table.""" """Removes specified tracks from the playlist's local DB table."""
watch_config = get_watch_config() watch_config = get_watch_config()
if not watch_config.get("enabled", False): if not watch_config.get("enabled", False):
@@ -455,7 +449,7 @@ async def mark_tracks_as_missing_locally(playlist_spotify_id: str, request: Requ
status_code=403, status_code=403,
detail={ detail={
"error": "Watch feature is currently disabled globally. Cannot mark tracks." "error": "Watch feature is currently disabled globally. Cannot mark tracks."
} },
) )
logger.info( logger.info(
@@ -470,13 +464,15 @@ async def mark_tracks_as_missing_locally(playlist_spotify_id: str, request: Requ
status_code=400, status_code=400,
detail={ detail={
"error": "Invalid request body. Expecting a JSON array of track Spotify IDs." "error": "Invalid request body. Expecting a JSON array of track Spotify IDs."
} },
) )
if not get_watched_playlist(playlist_spotify_id): if not get_watched_playlist(playlist_spotify_id):
raise HTTPException( raise HTTPException(
status_code=404, status_code=404,
detail={"error": f"Playlist {playlist_spotify_id} is not being watched."} detail={
"error": f"Playlist {playlist_spotify_id} is not being watched."
},
) )
deleted_count = remove_specific_tracks_from_playlist_table( deleted_count = remove_specific_tracks_from_playlist_table(
@@ -495,22 +491,32 @@ async def mark_tracks_as_missing_locally(playlist_spotify_id: str, request: Requ
f"Error marking tracks as missing (deleting locally) for playlist {playlist_spotify_id}: {e}", f"Error marking tracks as missing (deleting locally) for playlist {playlist_spotify_id}: {e}",
exc_info=True, exc_info=True,
) )
raise HTTPException(status_code=500, detail={"error": f"Could not mark tracks as missing: {str(e)}"}) raise HTTPException(
status_code=500,
detail={"error": f"Could not mark tracks as missing: {str(e)}"},
)
@router.get("/watch/list") @router.get("/watch/list")
async def list_watched_playlists_endpoint(current_user: User = Depends(require_auth_from_state)): async def list_watched_playlists_endpoint(
current_user: User = Depends(require_auth_from_state),
):
"""Lists all playlists currently in the watchlist.""" """Lists all playlists currently in the watchlist."""
try: try:
playlists = get_watched_playlists() playlists = get_watched_playlists()
return playlists return playlists
except Exception as e: except Exception as e:
logger.error(f"Error listing watched playlists: {e}", exc_info=True) logger.error(f"Error listing watched playlists: {e}", exc_info=True)
raise HTTPException(status_code=500, detail={"error": f"Could not list watched playlists: {str(e)}"}) raise HTTPException(
status_code=500,
detail={"error": f"Could not list watched playlists: {str(e)}"},
)
@router.post("/watch/trigger_check") @router.post("/watch/trigger_check")
async def trigger_playlist_check_endpoint(current_user: User = Depends(require_auth_from_state)): async def trigger_playlist_check_endpoint(
current_user: User = Depends(require_auth_from_state),
):
"""Manually triggers the playlist checking mechanism for all watched playlists.""" """Manually triggers the playlist checking mechanism for all watched playlists."""
watch_config = get_watch_config() watch_config = get_watch_config()
if not watch_config.get("enabled", False): if not watch_config.get("enabled", False):
@@ -518,7 +524,7 @@ async def trigger_playlist_check_endpoint(current_user: User = Depends(require_a
status_code=403, status_code=403,
detail={ detail={
"error": "Watch feature is currently disabled globally. Cannot trigger check." "error": "Watch feature is currently disabled globally. Cannot trigger check."
} },
) )
logger.info("Manual trigger for playlist check received for all playlists.") logger.info("Manual trigger for playlist check received for all playlists.")
@@ -535,12 +541,14 @@ async def trigger_playlist_check_endpoint(current_user: User = Depends(require_a
) )
raise HTTPException( raise HTTPException(
status_code=500, status_code=500,
detail={"error": f"Could not trigger playlist check for all: {str(e)}"} detail={"error": f"Could not trigger playlist check for all: {str(e)}"},
) )
@router.post("/watch/trigger_check/{playlist_spotify_id}") @router.post("/watch/trigger_check/{playlist_spotify_id}")
async def trigger_specific_playlist_check_endpoint(playlist_spotify_id: str, current_user: User = Depends(require_auth_from_state)): async def trigger_specific_playlist_check_endpoint(
playlist_spotify_id: str, current_user: User = Depends(require_auth_from_state)
):
"""Manually triggers the playlist checking mechanism for a specific playlist.""" """Manually triggers the playlist checking mechanism for a specific playlist."""
watch_config = get_watch_config() watch_config = get_watch_config()
if not watch_config.get("enabled", False): if not watch_config.get("enabled", False):
@@ -548,7 +556,7 @@ async def trigger_specific_playlist_check_endpoint(playlist_spotify_id: str, cur
status_code=403, status_code=403,
detail={ detail={
"error": "Watch feature is currently disabled globally. Cannot trigger check." "error": "Watch feature is currently disabled globally. Cannot trigger check."
} },
) )
logger.info( logger.info(
@@ -565,7 +573,7 @@ async def trigger_specific_playlist_check_endpoint(playlist_spotify_id: str, cur
status_code=404, status_code=404,
detail={ detail={
"error": f"Playlist {playlist_spotify_id} is not in the watchlist. Add it first." "error": f"Playlist {playlist_spotify_id} is not in the watchlist. Add it first."
} },
) )
# Run check_watched_playlists with the specific ID # Run check_watched_playlists with the specific ID
@@ -590,5 +598,5 @@ async def trigger_specific_playlist_check_endpoint(playlist_spotify_id: str, cur
status_code=500, status_code=500,
detail={ detail={
"error": f"Could not trigger playlist check for {playlist_spotify_id}: {str(e)}" "error": f"Could not trigger playlist check for {playlist_spotify_id}: {str(e)}"
} },
) )

View File

@@ -1,12 +1,11 @@
from fastapi import APIRouter, HTTPException, Request, Depends from fastapi import APIRouter, Request, Depends
from fastapi.responses import JSONResponse from fastapi.responses import JSONResponse
import json
import traceback import traceback
import uuid import uuid
import time import time
from routes.utils.celery_queue_manager import download_queue_manager from routes.utils.celery_queue_manager import download_queue_manager
from routes.utils.celery_tasks import store_task_info, store_task_status, ProgressState from routes.utils.celery_tasks import store_task_info, store_task_status, ProgressState
from routes.utils.get_info import get_spotify_info from routes.utils.get_info import get_client, get_track
from routes.utils.errors import DuplicateDownloadError from routes.utils.errors import DuplicateDownloadError
# Import authentication dependencies # Import authentication dependencies
@@ -21,7 +20,11 @@ def construct_spotify_url(item_id: str, item_type: str = "track") -> str:
@router.get("/download/{track_id}") @router.get("/download/{track_id}")
async def handle_download(track_id: str, request: Request, current_user: User = Depends(require_auth_from_state)): async def handle_download(
track_id: str,
request: Request,
current_user: User = Depends(require_auth_from_state),
):
# Retrieve essential parameters from the request. # Retrieve essential parameters from the request.
# name = request.args.get('name') # Removed # name = request.args.get('name') # Removed
# artist = request.args.get('artist') # Removed # artist = request.args.get('artist') # Removed
@@ -31,15 +34,18 @@ async def handle_download(track_id: str, request: Request, current_user: User =
# Fetch metadata from Spotify # Fetch metadata from Spotify
try: try:
track_info = get_spotify_info(track_id, "track") client = get_client()
track_info = get_track(client, track_id)
if ( if (
not track_info not track_info
or not track_info.get("name") or not track_info.get("name")
or not track_info.get("artists") or not track_info.get("artists")
): ):
return JSONResponse( return JSONResponse(
content={"error": f"Could not retrieve metadata for track ID: {track_id}"}, content={
status_code=404 "error": f"Could not retrieve metadata for track ID: {track_id}"
},
status_code=404,
) )
name_from_spotify = track_info.get("name") name_from_spotify = track_info.get("name")
@@ -51,15 +57,16 @@ async def handle_download(track_id: str, request: Request, current_user: User =
except Exception as e: except Exception as e:
return JSONResponse( return JSONResponse(
content={"error": f"Failed to fetch metadata for track {track_id}: {str(e)}"}, content={
status_code=500 "error": f"Failed to fetch metadata for track {track_id}: {str(e)}"
},
status_code=500,
) )
# Validate required parameters # Validate required parameters
if not url: if not url:
return JSONResponse( return JSONResponse(
content={"error": "Missing required parameter: url"}, content={"error": "Missing required parameter: url"}, status_code=400
status_code=400
) )
# Add the task to the queue with only essential parameters # Add the task to the queue with only essential parameters
@@ -84,7 +91,7 @@ async def handle_download(track_id: str, request: Request, current_user: User =
"error": "Duplicate download detected.", "error": "Duplicate download detected.",
"existing_task": e.existing_task, "existing_task": e.existing_task,
}, },
status_code=409 status_code=409,
) )
except Exception as e: except Exception as e:
# Generic error handling for other issues during task submission # Generic error handling for other issues during task submission
@@ -116,25 +123,23 @@ async def handle_download(track_id: str, request: Request, current_user: User =
"error": f"Failed to queue track download: {str(e)}", "error": f"Failed to queue track download: {str(e)}",
"task_id": error_task_id, "task_id": error_task_id,
}, },
status_code=500 status_code=500,
) )
return JSONResponse( return JSONResponse(content={"task_id": task_id}, status_code=202)
content={"task_id": task_id},
status_code=202
)
@router.get("/download/cancel") @router.get("/download/cancel")
async def cancel_download(request: Request, current_user: User = Depends(require_auth_from_state)): async def cancel_download(
request: Request, current_user: User = Depends(require_auth_from_state)
):
""" """
Cancel a running download process by its task id. Cancel a running download process by its task id.
""" """
task_id = request.query_params.get("task_id") task_id = request.query_params.get("task_id")
if not task_id: if not task_id:
return JSONResponse( return JSONResponse(
content={"error": "Missing process id (task_id) parameter"}, content={"error": "Missing process id (task_id) parameter"}, status_code=400
status_code=400
) )
# Use the queue manager's cancellation method. # Use the queue manager's cancellation method.
@@ -145,7 +150,9 @@ async def cancel_download(request: Request, current_user: User = Depends(require
@router.get("/info") @router.get("/info")
async def get_track_info(request: Request, current_user: User = Depends(require_auth_from_state)): async def get_track_info(
request: Request, current_user: User = Depends(require_auth_from_state)
):
""" """
Retrieve Spotify track metadata given a Spotify track ID. Retrieve Spotify track metadata given a Spotify track ID.
Expects a query parameter 'id' that contains the Spotify track ID. Expects a query parameter 'id' that contains the Spotify track ID.
@@ -153,14 +160,11 @@ async def get_track_info(request: Request, current_user: User = Depends(require_
spotify_id = request.query_params.get("id") spotify_id = request.query_params.get("id")
if not spotify_id: if not spotify_id:
return JSONResponse( return JSONResponse(content={"error": "Missing parameter: id"}, status_code=400)
content={"error": "Missing parameter: id"},
status_code=400
)
try: try:
# Use the get_spotify_info function (already imported at top) client = get_client()
track_info = get_spotify_info(spotify_id, "track") track_info = get_track(client, spotify_id)
return JSONResponse(content=track_info, status_code=200) return JSONResponse(content=track_info, status_code=200)
except Exception as e: except Exception as e:
error_data = {"error": str(e), "traceback": traceback.format_exc()} error_data = {"error": str(e), "traceback": traceback.format_exc()}

View File

@@ -3,16 +3,11 @@ import sqlite3
from pathlib import Path from pathlib import Path
from typing import Optional from typing import Optional
from .v3_2_0 import MigrationV3_2_0 from .v3_3_0 import MigrationV3_3_0
from .v3_2_1 import log_noop_migration_detected
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
DATA_DIR = Path("./data") DATA_DIR = Path("./data")
HISTORY_DB = DATA_DIR / "history" / "download_history.db"
WATCH_DIR = DATA_DIR / "watch"
PLAYLISTS_DB = WATCH_DIR / "playlists.db"
ARTISTS_DB = WATCH_DIR / "artists.db"
# Credentials # Credentials
CREDS_DIR = DATA_DIR / "creds" CREDS_DIR = DATA_DIR / "creds"
@@ -20,89 +15,6 @@ ACCOUNTS_DB = CREDS_DIR / "accounts.db"
BLOBS_DIR = CREDS_DIR / "blobs" BLOBS_DIR = CREDS_DIR / "blobs"
SEARCH_JSON = CREDS_DIR / "search.json" SEARCH_JSON = CREDS_DIR / "search.json"
# Expected children table columns for history (album_/playlist_)
CHILDREN_EXPECTED_COLUMNS: dict[str, str] = {
"id": "INTEGER PRIMARY KEY AUTOINCREMENT",
"title": "TEXT NOT NULL",
"artists": "TEXT",
"album_title": "TEXT",
"duration_ms": "INTEGER",
"track_number": "INTEGER",
"disc_number": "INTEGER",
"explicit": "BOOLEAN",
"status": "TEXT NOT NULL",
"external_ids": "TEXT",
"genres": "TEXT",
"isrc": "TEXT",
"timestamp": "REAL NOT NULL",
"position": "INTEGER",
"metadata": "TEXT",
}
# 3.2.0 expected schemas for Watch DBs (kept here to avoid importing modules with side-effects)
EXPECTED_WATCHED_PLAYLISTS_COLUMNS: dict[str, str] = {
"spotify_id": "TEXT PRIMARY KEY",
"name": "TEXT",
"owner_id": "TEXT",
"owner_name": "TEXT",
"total_tracks": "INTEGER",
"link": "TEXT",
"snapshot_id": "TEXT",
"last_checked": "INTEGER",
"added_at": "INTEGER",
"is_active": "INTEGER DEFAULT 1",
}
EXPECTED_PLAYLIST_TRACKS_COLUMNS: dict[str, str] = {
"spotify_track_id": "TEXT PRIMARY KEY",
"title": "TEXT",
"artist_names": "TEXT",
"album_name": "TEXT",
"album_artist_names": "TEXT",
"track_number": "INTEGER",
"album_spotify_id": "TEXT",
"duration_ms": "INTEGER",
"added_at_playlist": "TEXT",
"added_to_db": "INTEGER",
"is_present_in_spotify": "INTEGER DEFAULT 1",
"last_seen_in_spotify": "INTEGER",
"snapshot_id": "TEXT",
"final_path": "TEXT",
}
EXPECTED_WATCHED_ARTISTS_COLUMNS: dict[str, str] = {
"spotify_id": "TEXT PRIMARY KEY",
"name": "TEXT",
"link": "TEXT",
"total_albums_on_spotify": "INTEGER",
"last_checked": "INTEGER",
"added_at": "INTEGER",
"is_active": "INTEGER DEFAULT 1",
"genres": "TEXT",
"popularity": "INTEGER",
"image_url": "TEXT",
}
EXPECTED_ARTIST_ALBUMS_COLUMNS: dict[str, str] = {
"album_spotify_id": "TEXT PRIMARY KEY",
"artist_spotify_id": "TEXT",
"name": "TEXT",
"album_group": "TEXT",
"album_type": "TEXT",
"release_date": "TEXT",
"release_date_precision": "TEXT",
"total_tracks": "INTEGER",
"link": "TEXT",
"image_url": "TEXT",
"added_to_db": "INTEGER",
"last_seen_on_spotify": "INTEGER",
"download_task_id": "TEXT",
"download_status": "INTEGER DEFAULT 0",
"is_fully_downloaded_managed_by_app": "INTEGER DEFAULT 0",
}
m320 = MigrationV3_2_0()
def _safe_connect(path: Path) -> Optional[sqlite3.Connection]: def _safe_connect(path: Path) -> Optional[sqlite3.Connection]:
try: try:
@@ -115,245 +27,6 @@ def _safe_connect(path: Path) -> Optional[sqlite3.Connection]:
return None return None
def _ensure_table_schema(
conn: sqlite3.Connection,
table_name: str,
expected_columns: dict[str, str],
table_description: str,
) -> None:
try:
cur = conn.execute(f"PRAGMA table_info({table_name})")
existing_info = cur.fetchall()
existing_names = {row[1] for row in existing_info}
for col_name, col_type in expected_columns.items():
if col_name in existing_names:
continue
col_type_for_add = (
col_type.replace("PRIMARY KEY", "")
.replace("AUTOINCREMENT", "")
.replace("NOT NULL", "")
.strip()
)
try:
conn.execute(
f"ALTER TABLE {table_name} ADD COLUMN {col_name} {col_type_for_add}"
)
logger.info(
f"Added missing column '{col_name} {col_type_for_add}' to {table_description} table '{table_name}'."
)
except sqlite3.OperationalError as e:
logger.warning(
f"Could not add column '{col_name}' to {table_description} table '{table_name}': {e}"
)
except Exception as e:
logger.error(
f"Error ensuring schema for {table_description} table '{table_name}': {e}",
exc_info=True,
)
def _create_or_update_children_table(conn: sqlite3.Connection, table_name: str) -> None:
conn.execute(
f"""
CREATE TABLE IF NOT EXISTS {table_name} (
id INTEGER PRIMARY KEY AUTOINCREMENT,
title TEXT NOT NULL,
artists TEXT,
album_title TEXT,
duration_ms INTEGER,
track_number INTEGER,
disc_number INTEGER,
explicit BOOLEAN,
status TEXT NOT NULL,
external_ids TEXT,
genres TEXT,
isrc TEXT,
timestamp REAL NOT NULL,
position INTEGER,
metadata TEXT
)
"""
)
_ensure_table_schema(
conn, table_name, CHILDREN_EXPECTED_COLUMNS, "children history"
)
# --- Helper to validate instance is at least 3.1.2 on history DB ---
def _history_children_tables(conn: sqlite3.Connection) -> list[str]:
tables: set[str] = set()
try:
cur = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND (name LIKE 'album_%' OR name LIKE 'playlist_%') AND name != 'download_history'"
)
for row in cur.fetchall():
if row and row[0]:
tables.add(row[0])
except sqlite3.Error as e:
logger.warning(f"Failed to scan sqlite_master for children tables: {e}")
try:
cur = conn.execute(
"SELECT DISTINCT children_table FROM download_history WHERE children_table IS NOT NULL AND TRIM(children_table) != ''"
)
for row in cur.fetchall():
t = row[0]
if t:
tables.add(t)
except sqlite3.Error as e:
logger.warning(f"Failed to scan download_history for children tables: {e}")
return sorted(tables)
def _is_history_at_least_3_2_0(conn: sqlite3.Connection) -> bool:
required_cols = {"service", "quality_format", "quality_bitrate"}
tables = _history_children_tables(conn)
if not tables:
# Nothing to migrate implies OK
return True
for t in tables:
try:
cur = conn.execute(f"PRAGMA table_info({t})")
cols = {row[1] for row in cur.fetchall()}
if not required_cols.issubset(cols):
return False
except sqlite3.OperationalError:
return False
return True
# --- 3.2.0 verification helpers for Watch DBs ---
def _update_watch_playlists_db(conn: sqlite3.Connection) -> None:
try:
# Ensure core watched_playlists table exists and has expected schema
conn.execute(
"""
CREATE TABLE IF NOT EXISTS watched_playlists (
spotify_id TEXT PRIMARY KEY,
name TEXT,
owner_id TEXT,
owner_name TEXT,
total_tracks INTEGER,
link TEXT,
snapshot_id TEXT,
last_checked INTEGER,
added_at INTEGER,
is_active INTEGER DEFAULT 1
)
"""
)
_ensure_table_schema(
conn,
"watched_playlists",
EXPECTED_WATCHED_PLAYLISTS_COLUMNS,
"watched playlists",
)
# Upgrade all dynamic playlist_ tables
cur = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'playlist_%'"
)
for row in cur.fetchall():
table_name = row[0]
conn.execute(
f"""
CREATE TABLE IF NOT EXISTS {table_name} (
spotify_track_id TEXT PRIMARY KEY,
title TEXT,
artist_names TEXT,
album_name TEXT,
album_artist_names TEXT,
track_number INTEGER,
album_spotify_id TEXT,
duration_ms INTEGER,
added_at_playlist TEXT,
added_to_db INTEGER,
is_present_in_spotify INTEGER DEFAULT 1,
last_seen_in_spotify INTEGER,
snapshot_id TEXT,
final_path TEXT
)
"""
)
_ensure_table_schema(
conn,
table_name,
EXPECTED_PLAYLIST_TRACKS_COLUMNS,
f"playlist tracks ({table_name})",
)
except Exception:
logger.error(
"Failed to upgrade watch playlists DB to 3.2.0 base schema", exc_info=True
)
def _update_watch_artists_db(conn: sqlite3.Connection) -> None:
try:
# Ensure core watched_artists table exists and has expected schema
conn.execute(
"""
CREATE TABLE IF NOT EXISTS watched_artists (
spotify_id TEXT PRIMARY KEY,
name TEXT,
link TEXT,
total_albums_on_spotify INTEGER,
last_checked INTEGER,
added_at INTEGER,
is_active INTEGER DEFAULT 1,
genres TEXT,
popularity INTEGER,
image_url TEXT
)
"""
)
_ensure_table_schema(
conn, "watched_artists", EXPECTED_WATCHED_ARTISTS_COLUMNS, "watched artists"
)
# Upgrade all dynamic artist_ tables
cur = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'artist_%'"
)
for row in cur.fetchall():
table_name = row[0]
conn.execute(
f"""
CREATE TABLE IF NOT EXISTS {table_name} (
album_spotify_id TEXT PRIMARY KEY,
artist_spotify_id TEXT,
name TEXT,
album_group TEXT,
album_type TEXT,
release_date TEXT,
release_date_precision TEXT,
total_tracks INTEGER,
link TEXT,
image_url TEXT,
added_to_db INTEGER,
last_seen_on_spotify INTEGER,
download_task_id TEXT,
download_status INTEGER DEFAULT 0,
is_fully_downloaded_managed_by_app INTEGER DEFAULT 0
)
"""
)
_ensure_table_schema(
conn,
table_name,
EXPECTED_ARTIST_ALBUMS_COLUMNS,
f"artist albums ({table_name})",
)
except Exception:
logger.error(
"Failed to upgrade watch artists DB to 3.2.0 base schema", exc_info=True
)
def _ensure_creds_filesystem() -> None: def _ensure_creds_filesystem() -> None:
try: try:
BLOBS_DIR.mkdir(parents=True, exist_ok=True) BLOBS_DIR.mkdir(parents=True, exist_ok=True)
@@ -374,35 +47,10 @@ def run_migrations_if_needed():
return return
try: try:
# Require instance to be at least 3.2.0 on history DB; otherwise abort # Validate configuration version strictly at 3.3.0
with _safe_connect(HISTORY_DB) as history_conn: MigrationV3_3_0.assert_config_version_is_3_3_0()
if history_conn and not _is_history_at_least_3_2_0(history_conn):
logger.error(
"Instance is not at schema version 3.2.0. Please upgrade to 3.2.0 before applying 3.3.0."
)
raise RuntimeError(
"Instance is not at schema version 3.2.0. Please upgrade to 3.2.0 before applying 3.3.0."
)
# Watch playlists DB # No schema changes in 3.3.0 path; just ensure Accounts DB can be opened
with _safe_connect(PLAYLISTS_DB) as conn:
if conn:
_update_watch_playlists_db(conn)
# Apply 3.2.0 additions (batch progress columns)
if not m320.check_watch_playlists(conn):
m320.update_watch_playlists(conn)
conn.commit()
# Watch artists DB (if exists)
if ARTISTS_DB.exists():
with _safe_connect(ARTISTS_DB) as conn:
if conn:
_update_watch_artists_db(conn)
if not m320.check_watch_artists(conn):
m320.update_watch_artists(conn)
conn.commit()
# Accounts DB (no changes for this migration path)
with _safe_connect(ACCOUNTS_DB) as conn: with _safe_connect(ACCOUNTS_DB) as conn:
if conn: if conn:
conn.commit() conn.commit()
@@ -412,5 +60,4 @@ def run_migrations_if_needed():
raise raise
else: else:
_ensure_creds_filesystem() _ensure_creds_filesystem()
log_noop_migration_detected() logger.info("Migration validation completed (3.3.0 gate)")
logger.info("Database migrations check completed (3.2.0 -> 3.3.0 path)")

View File

@@ -1,100 +0,0 @@
import sqlite3
import logging
logger = logging.getLogger(__name__)
class MigrationV3_2_0:
"""
Migration for version 3.2.0 (upgrade path 3.2.0 -> 3.3.0).
- Adds per-item batch progress columns to Watch DBs to support page-by-interval processing.
- Enforces prerequisite: previous instance version must be 3.1.2 (validated by runner).
"""
# New columns to add to watched tables
PLAYLISTS_ADDED_COLUMNS: dict[str, str] = {
"batch_next_offset": "INTEGER DEFAULT 0",
"batch_processing_snapshot_id": "TEXT",
}
ARTISTS_ADDED_COLUMNS: dict[str, str] = {
"batch_next_offset": "INTEGER DEFAULT 0",
}
# --- No-op for history/accounts in 3.3.0 ---
def check_history(self, conn: sqlite3.Connection) -> bool:
return True
def update_history(self, conn: sqlite3.Connection) -> None:
pass
def check_accounts(self, conn: sqlite3.Connection) -> bool:
return True
def update_accounts(self, conn: sqlite3.Connection) -> None:
pass
# --- Watch: playlists ---
def check_watch_playlists(self, conn: sqlite3.Connection) -> bool:
try:
cur = conn.execute("PRAGMA table_info(watched_playlists)")
cols = {row[1] for row in cur.fetchall()}
return set(self.PLAYLISTS_ADDED_COLUMNS.keys()).issubset(cols)
except sqlite3.OperationalError:
# Table missing means not ready
return False
def update_watch_playlists(self, conn: sqlite3.Connection) -> None:
# Add new columns if missing
try:
cur = conn.execute("PRAGMA table_info(watched_playlists)")
existing = {row[1] for row in cur.fetchall()}
for col_name, col_type in self.PLAYLISTS_ADDED_COLUMNS.items():
if col_name in existing:
continue
try:
conn.execute(
f"ALTER TABLE watched_playlists ADD COLUMN {col_name} {col_type}"
)
logger.info(
f"Added column '{col_name} {col_type}' to watched_playlists for 3.3.0 batch progress."
)
except sqlite3.OperationalError as e:
logger.warning(
f"Could not add column '{col_name}' to watched_playlists: {e}"
)
except Exception:
logger.error("Failed to update watched_playlists for 3.3.0", exc_info=True)
# --- Watch: artists ---
def check_watch_artists(self, conn: sqlite3.Connection) -> bool:
try:
cur = conn.execute("PRAGMA table_info(watched_artists)")
cols = {row[1] for row in cur.fetchall()}
return set(self.ARTISTS_ADDED_COLUMNS.keys()).issubset(cols)
except sqlite3.OperationalError:
return False
def update_watch_artists(self, conn: sqlite3.Connection) -> None:
try:
cur = conn.execute("PRAGMA table_info(watched_artists)")
existing = {row[1] for row in cur.fetchall()}
for col_name, col_type in self.ARTISTS_ADDED_COLUMNS.items():
if col_name in existing:
continue
try:
conn.execute(
f"ALTER TABLE watched_artists ADD COLUMN {col_name} {col_type}"
)
logger.info(
f"Added column '{col_name} {col_type}' to watched_artists for 3.3.0 batch progress."
)
except sqlite3.OperationalError as e:
logger.warning(
f"Could not add column '{col_name}' to watched_artists: {e}"
)
except Exception:
logger.error("Failed to update watched_artists for 3.3.0", exc_info=True)

View File

@@ -1,41 +0,0 @@
import logging
import sqlite3
logger = logging.getLogger(__name__)
class MigrationV3_2_1:
"""
No-op migration for version 3.2.1 (upgrade path 3.2.1 -> 3.3.0).
No database schema changes are required.
"""
def check_history(self, conn: sqlite3.Connection) -> bool:
return True
def update_history(self, conn: sqlite3.Connection) -> None:
pass
def check_accounts(self, conn: sqlite3.Connection) -> bool:
return True
def update_accounts(self, conn: sqlite3.Connection) -> None:
pass
def check_watch_playlists(self, conn: sqlite3.Connection) -> bool:
return True
def update_watch_playlists(self, conn: sqlite3.Connection) -> None:
pass
def check_watch_artists(self, conn: sqlite3.Connection) -> bool:
return True
def update_watch_artists(self, conn: sqlite3.Connection) -> None:
pass
def log_noop_migration_detected() -> None:
logger.info(
"No migration performed: detected schema for 3.2.1; no changes needed for 3.2.1 -> 3.3.0."
)

View File

@@ -0,0 +1,69 @@
import json
import logging
from pathlib import Path
from typing import Optional
logger = logging.getLogger(__name__)
CONFIG_PATH = Path("./data/config/main.json")
REQUIRED_VERSION = "3.3.0"
TARGET_VERSION = "3.3.1"
def _load_config(config_path: Path) -> Optional[dict]:
try:
if not config_path.exists():
logger.error(f"Configuration file not found at {config_path}")
return None
content = config_path.read_text(encoding="utf-8")
return json.loads(content)
except Exception:
logger.error("Failed to read configuration file for migration", exc_info=True)
return None
def _save_config(config_path: Path, cfg: dict) -> None:
config_path.parent.mkdir(parents=True, exist_ok=True)
config_path.write_text(json.dumps(cfg, indent=4) + "\n", encoding="utf-8")
class MigrationV3_3_0:
"""
3.3.0 migration gate. This migration verifies the configuration indicates
version 3.3.0, then bumps it to 3.3.1.
If the `version` key is missing or not equal to 3.3.0, execution aborts and
prompts the user to update their instance to 3.3.0.
"""
@staticmethod
def assert_config_version_is_3_3_0() -> None:
cfg = _load_config(CONFIG_PATH)
if not cfg or "version" not in cfg:
raise RuntimeError(
"Missing 'version' in data/config/main.json. Please update your configuration to 3.3.0."
)
version = str(cfg.get("version", "")).strip()
# Case 1: exactly 3.3.0 -> bump to 3.3.1
if version == REQUIRED_VERSION:
cfg["version"] = TARGET_VERSION
try:
_save_config(CONFIG_PATH, cfg)
logger.info(
f"Configuration version bumped from {REQUIRED_VERSION} to {TARGET_VERSION}."
)
except Exception:
logger.error(
"Failed to bump configuration version to 3.3.1", exc_info=True
)
raise
return
# Case 2: already 3.3.1 -> OK
if version == TARGET_VERSION:
logger.info("Configuration version 3.3.1 detected. Proceeding.")
return
# Case 3: anything else -> abort and instruct to update to 3.3.0 first
raise RuntimeError(
f"Unsupported configuration version '{version}'. Please update to {REQUIRED_VERSION}."
)

View File

@@ -4,11 +4,11 @@ import logging
import time import time
import json import json
import asyncio import asyncio
from typing import Set from typing import Set, Optional
import redis import redis
import threading import threading
from routes.utils.celery_config import REDIS_URL from routes.utils.celery_config import REDIS_URL, get_config_params
from routes.utils.celery_tasks import ( from routes.utils.celery_tasks import (
get_task_info, get_task_info,
@@ -37,56 +37,122 @@ router = APIRouter()
class SSEBroadcaster: class SSEBroadcaster:
def __init__(self): def __init__(self):
self.clients: Set[asyncio.Queue] = set() self.clients: Set[asyncio.Queue] = set()
# Per-task throttling/batching/deduplication state
self._task_state = {} # task_id -> dict with last_sent, last_event, last_send_time, scheduled_handle
# Load configurable interval
config = get_config_params()
self.sse_update_interval = float(config.get("sseUpdateIntervalSeconds", 1))
async def add_client(self, queue: asyncio.Queue): async def add_client(self, queue: asyncio.Queue):
"""Add a new SSE client""" """Add a new SSE client"""
self.clients.add(queue) self.clients.add(queue)
logger.info(f"SSE: Client connected (total: {len(self.clients)})") logger.debug(f"SSE: Client connected (total: {len(self.clients)})")
async def remove_client(self, queue: asyncio.Queue): async def remove_client(self, queue: asyncio.Queue):
"""Remove an SSE client""" """Remove an SSE client"""
self.clients.discard(queue) self.clients.discard(queue)
logger.info(f"SSE: Client disconnected (total: {len(self.clients)})") logger.debug(f"SSE: Client disconnected (total: {len(self.clients)})")
async def broadcast_event(self, event_data: dict): async def broadcast_event(self, event_data: dict):
"""Broadcast an event to all connected clients""" """
logger.debug( Throttle, batch, and deduplicate SSE events per task.
f"SSE Broadcaster: Attempting to broadcast to {len(self.clients)} clients" Only emit at most 1 update/sec per task, aggregate within window, suppress redundant updates.
) """
if not self.clients: if not self.clients:
logger.debug("SSE Broadcaster: No clients connected, skipping broadcast") logger.debug("SSE Broadcaster: No clients connected, skipping broadcast")
return return
# Add global task counts right before broadcasting - this is the single source of truth # Defensive: always work with a list of tasks
tasks = event_data.get("tasks", [])
if not isinstance(tasks, list):
tasks = [tasks]
# For each task, throttle/batch/dedupe
for task in tasks:
task_id = task.get("task_id")
if not task_id:
continue
now = time.time()
state = self._task_state.setdefault(task_id, {
"last_sent": None,
"last_event": None,
"last_send_time": 0,
"scheduled_handle": None,
})
# Deduplication: if event is identical to last sent, skip
if state["last_sent"] is not None and self._events_equal(state["last_sent"], task):
logger.debug(f"SSE: Deduped event for task {task_id}")
continue
# Throttling: if within interval, batch (store as last_event, schedule send)
elapsed = now - state["last_send_time"]
if elapsed < self.sse_update_interval:
state["last_event"] = task
if state["scheduled_handle"] is None:
delay = self.sse_update_interval - elapsed
loop = asyncio.get_event_loop()
state["scheduled_handle"] = loop.call_later(
delay, lambda: asyncio.create_task(self._send_batched_event(task_id))
)
continue
# Otherwise, send immediately
await self._send_event(task_id, task)
state["last_send_time"] = now
state["last_sent"] = task
state["last_event"] = None
if state["scheduled_handle"]:
state["scheduled_handle"].cancel()
state["scheduled_handle"] = None
async def _send_batched_event(self, task_id):
state = self._task_state.get(task_id)
if not state or not state["last_event"]:
return
await self._send_event(task_id, state["last_event"])
state["last_send_time"] = time.time()
state["last_sent"] = state["last_event"]
state["last_event"] = None
state["scheduled_handle"] = None
async def _send_event(self, task_id, task):
# Compose event_data for this task
event_data = {
"tasks": [task],
"current_timestamp": time.time(),
"change_type": "update",
}
enhanced_event_data = add_global_task_counts_to_event(event_data.copy()) enhanced_event_data = add_global_task_counts_to_event(event_data.copy())
event_json = json.dumps(enhanced_event_data) event_json = json.dumps(enhanced_event_data)
sse_data = f"data: {event_json}\n\n" sse_data = f"data: {event_json}\n\n"
logger.debug(
f"SSE Broadcaster: Broadcasting event: {enhanced_event_data.get('change_type', 'unknown')} with {enhanced_event_data.get('active_tasks', 0)} active tasks"
)
# Send to all clients, remove disconnected ones
disconnected = set() disconnected = set()
sent_count = 0 sent_count = 0
for client_queue in self.clients.copy(): for client_queue in self.clients.copy():
try: try:
await client_queue.put(sse_data) await client_queue.put(sse_data)
sent_count += 1 sent_count += 1
logger.debug("SSE: Successfully sent to client queue")
except Exception as e: except Exception as e:
logger.error(f"SSE: Failed to send to client: {e}") logger.error(f"SSE: Failed to send to client: {e}")
disconnected.add(client_queue) disconnected.add(client_queue)
# Clean up disconnected clients
for client in disconnected: for client in disconnected:
self.clients.discard(client) self.clients.discard(client)
logger.debug(
logger.info( f"SSE Broadcaster: Sent throttled/batched event for task {task_id} to {sent_count} clients"
f"SSE Broadcaster: Successfully sent to {sent_count} clients, removed {len(disconnected)} disconnected clients"
) )
def _events_equal(self, a, b):
# Compare two task dicts for deduplication (ignore timestamps)
if not isinstance(a, dict) or not isinstance(b, dict):
return False
a_copy = dict(a)
b_copy = dict(b)
a_copy.pop("timestamp", None)
b_copy.pop("timestamp", None)
return a_copy == b_copy
# Global broadcaster instance # Global broadcaster instance
sse_broadcaster = SSEBroadcaster() sse_broadcaster = SSEBroadcaster()
@@ -119,26 +185,22 @@ def start_sse_redis_subscriber():
# Handle different event types # Handle different event types
if event_type == "progress_update": if event_type == "progress_update":
# Transform callback data into task format expected by frontend # Transform callback data into standardized update format expected by frontend
standardized = standardize_incoming_event(event_data)
if standardized:
loop = asyncio.new_event_loop() loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop) asyncio.set_event_loop(loop)
try: try:
broadcast_data = loop.run_until_complete(
transform_callback_to_task_format(
task_id, event_data
)
)
if broadcast_data:
loop.run_until_complete( loop.run_until_complete(
sse_broadcaster.broadcast_event(broadcast_data) sse_broadcaster.broadcast_event(standardized)
) )
logger.debug( logger.debug(
f"SSE Redis Subscriber: Broadcasted callback to {len(sse_broadcaster.clients)} clients" f"SSE Redis Subscriber: Broadcasted standardized progress update to {len(sse_broadcaster.clients)} clients"
) )
finally: finally:
loop.close() loop.close()
elif event_type == "summary_update": elif event_type == "summary_update":
# Task summary update - use existing trigger_sse_update logic # Task summary update - use standardized trigger
loop = asyncio.new_event_loop() loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop) asyncio.set_event_loop(loop)
try: try:
@@ -153,15 +215,17 @@ def start_sse_redis_subscriber():
finally: finally:
loop.close() loop.close()
else: else:
# Unknown event type - broadcast as-is # Unknown event type - attempt to standardize and broadcast
standardized = standardize_incoming_event(event_data)
if standardized:
loop = asyncio.new_event_loop() loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop) asyncio.set_event_loop(loop)
try: try:
loop.run_until_complete( loop.run_until_complete(
sse_broadcaster.broadcast_event(event_data) sse_broadcaster.broadcast_event(standardized)
) )
logger.debug( logger.debug(
f"SSE Redis Subscriber: Broadcasted {event_type} to {len(sse_broadcaster.clients)} clients" f"SSE Redis Subscriber: Broadcasted standardized {event_type} to {len(sse_broadcaster.clients)} clients"
) )
finally: finally:
loop.close() loop.close()
@@ -178,7 +242,86 @@ def start_sse_redis_subscriber():
# Start Redis subscriber in background thread # Start Redis subscriber in background thread
thread = threading.Thread(target=redis_subscriber_thread, daemon=True) thread = threading.Thread(target=redis_subscriber_thread, daemon=True)
thread.start() thread.start()
logger.info("SSE Redis Subscriber: Background thread started") logger.debug("SSE Redis Subscriber: Background thread started")
def build_task_object_from_callback(
task_id: str, callback_data: dict
) -> Optional[dict]:
"""Build a standardized task object from callback payload and task info."""
try:
task_info = get_task_info(task_id)
if not task_info:
return None
return {
"task_id": task_id,
"original_url": f"http://localhost:7171/api/{task_info.get('download_type', 'track')}/download/{task_info.get('url', '').split('/')[-1] if task_info.get('url') else ''}",
"last_line": callback_data,
"timestamp": time.time(),
"download_type": task_info.get("download_type", "track"),
"type": task_info.get("type", task_info.get("download_type", "track")),
"name": task_info.get("name", "Unknown"),
"artist": task_info.get("artist", ""),
"created_at": task_info.get("created_at"),
}
except Exception as e:
logger.error(
f"Error building task object from callback for {task_id}: {e}",
exc_info=True,
)
return None
def standardize_incoming_event(event_data: dict) -> Optional[dict]:
"""
Convert various incoming event shapes into a standardized SSE payload:
{
'change_type': 'update' | 'heartbeat',
'tasks': [...],
'current_timestamp': float,
'trigger_reason': str (optional)
}
"""
try:
# Heartbeat passthrough (ensure tasks array exists)
if event_data.get("change_type") == "heartbeat":
return {
"change_type": "heartbeat",
"tasks": [],
"current_timestamp": time.time(),
}
# If already has tasks, just coerce change_type
if isinstance(event_data.get("tasks"), list):
return {
"change_type": event_data.get("change_type", "update"),
"tasks": event_data["tasks"],
"current_timestamp": time.time(),
"trigger_reason": event_data.get("trigger_reason"),
}
# If it's a callback-shaped event
callback_data = event_data.get("callback_data")
task_id = event_data.get("task_id")
if callback_data and task_id:
task_obj = build_task_object_from_callback(task_id, callback_data)
if task_obj:
return {
"change_type": "update",
"tasks": [task_obj],
"current_timestamp": time.time(),
"trigger_reason": event_data.get("event_type", "callback_update"),
}
# Fallback to empty update
return {
"change_type": "update",
"tasks": [],
"current_timestamp": time.time(),
}
except Exception as e:
logger.error(f"Failed to standardize incoming event: {e}", exc_info=True)
return None
async def transform_callback_to_task_format(task_id: str, event_data: dict) -> dict: async def transform_callback_to_task_format(task_id: str, event_data: dict) -> dict:
@@ -211,7 +354,7 @@ async def transform_callback_to_task_format(task_id: str, event_data: dict) -> d
# Build minimal event data - global counts will be added at broadcast time # Build minimal event data - global counts will be added at broadcast time
return { return {
"change_type": "update", # Use "update" so it gets processed by existing frontend logic "change_type": "update",
"tasks": [task_object], # Frontend expects tasks array "tasks": [task_object], # Frontend expects tasks array
"current_timestamp": time.time(), "current_timestamp": time.time(),
"updated_count": 1, "updated_count": 1,
@@ -254,12 +397,12 @@ async def trigger_sse_update(task_id: str, reason: str = "task_update"):
task_info, last_status, task_id, current_time, dummy_request task_info, last_status, task_id, current_time, dummy_request
) )
# Create minimal event data - global counts will be added at broadcast time # Create standardized event data - global counts will be added at broadcast time
event_data = { event_data = {
"tasks": [task_response], "tasks": [task_response],
"current_timestamp": current_time, "current_timestamp": current_time,
"since_timestamp": current_time, "since_timestamp": current_time,
"change_type": "realtime", "change_type": "update",
"trigger_reason": reason, "trigger_reason": reason,
} }
@@ -420,6 +563,14 @@ def add_global_task_counts_to_event(event_data):
event_data["active_tasks"] = global_task_counts["active"] event_data["active_tasks"] = global_task_counts["active"]
event_data["all_tasks_count"] = sum(global_task_counts.values()) event_data["all_tasks_count"] = sum(global_task_counts.values())
# Ensure tasks array is present for schema consistency
if "tasks" not in event_data:
event_data["tasks"] = []
# Ensure change_type is present
if "change_type" not in event_data:
event_data["change_type"] = "update"
return event_data return event_data
except Exception as e: except Exception as e:
@@ -496,7 +647,11 @@ def _build_task_response(
try: try:
item_id = item_url.split("/")[-1] item_id = item_url.split("/")[-1]
if item_id: if item_id:
base_url = str(request.base_url).rstrip("/") base_url = (
str(request.base_url).rstrip("/")
if request
else "http://localhost:7171"
)
dynamic_original_url = ( dynamic_original_url = (
f"{base_url}/api/{download_type}/download/{item_id}" f"{base_url}/api/{download_type}/download/{item_id}"
) )
@@ -575,7 +730,7 @@ def _build_task_response(
async def get_paginated_tasks( async def get_paginated_tasks(
page=1, limit=20, active_only=False, request: Request = None page=1, limit=20, active_only=False, request: Optional[Request] = None
): ):
""" """
Get paginated list of tasks. Get paginated list of tasks.
@@ -1069,51 +1224,18 @@ async def stream_task_updates(
try: try:
# Register this client with the broadcaster # Register this client with the broadcaster
logger.info("SSE Stream: New client connecting...") logger.debug("SSE Stream: New client connecting...")
await sse_broadcaster.add_client(client_queue) await sse_broadcaster.add_client(client_queue)
logger.info( logger.debug(
f"SSE Stream: Client registered successfully, total clients: {len(sse_broadcaster.clients)}" f"SSE Stream: Client registered successfully, total clients: {len(sse_broadcaster.clients)}"
) )
# Send initial data immediately upon connection # Send initial data immediately upon connection (standardized 'update')
initial_data = await generate_task_update_event( initial_data = await generate_task_update_event(
time.time(), active_only, request time.time(), active_only, request
) )
yield initial_data yield initial_data
# Also send any active tasks as callback-style events to newly connected clients
all_tasks = get_all_tasks()
for task_summary in all_tasks:
task_id = task_summary.get("task_id")
if not task_id:
continue
task_info = get_task_info(task_id)
if not task_info:
continue
last_status = get_last_task_status(task_id)
task_status = get_task_status_from_last_status(last_status)
# Send recent callback data for active or recently completed tasks
if is_task_active(task_status) or (
last_status and last_status.get("timestamp", 0) > time.time() - 30
):
if last_status and "raw_callback" in last_status:
callback_event = {
"task_id": task_id,
"callback_data": last_status["raw_callback"],
"timestamp": last_status.get("timestamp", time.time()),
"change_type": "callback",
"event_type": "progress_update",
"replay": True, # Mark as replay for client
}
event_json = json.dumps(callback_event)
yield f"data: {event_json}\n\n"
logger.info(
f"SSE Stream: Sent replay callback for task {task_id}"
)
# Send periodic heartbeats and listen for real-time events # Send periodic heartbeats and listen for real-time events
last_heartbeat = time.time() last_heartbeat = time.time()
heartbeat_interval = 30.0 heartbeat_interval = 30.0
@@ -1180,6 +1302,7 @@ async def stream_task_updates(
+ task_counts["retrying"], + task_counts["retrying"],
"task_counts": task_counts, "task_counts": task_counts,
"change_type": "heartbeat", "change_type": "heartbeat",
"tasks": [],
} }
event_json = json.dumps(heartbeat_data) event_json = json.dumps(heartbeat_data)
@@ -1194,13 +1317,14 @@ async def stream_task_updates(
"error": "Internal server error", "error": "Internal server error",
"timestamp": time.time(), "timestamp": time.time(),
"change_type": "error", "change_type": "error",
"tasks": [],
} }
) )
yield f"data: {error_data}\n\n" yield f"data: {error_data}\n\n"
await asyncio.sleep(1) await asyncio.sleep(1)
except asyncio.CancelledError: except asyncio.CancelledError:
logger.info("SSE client disconnected") logger.debug("SSE client disconnected")
return return
except Exception as e: except Exception as e:
logger.error(f"SSE connection error: {e}", exc_info=True) logger.error(f"SSE connection error: {e}", exc_info=True)
@@ -1296,6 +1420,7 @@ async def generate_task_update_event(
"current_timestamp": current_time, "current_timestamp": current_time,
"updated_count": len(updated_tasks), "updated_count": len(updated_tasks),
"since_timestamp": since_timestamp, "since_timestamp": since_timestamp,
"change_type": "update",
"initial": True, # Mark as initial load "initial": True, # Mark as initial load
} }
@@ -1308,7 +1433,12 @@ async def generate_task_update_event(
except Exception as e: except Exception as e:
logger.error(f"Error generating initial SSE event: {e}", exc_info=True) logger.error(f"Error generating initial SSE event: {e}", exc_info=True)
error_data = json.dumps( error_data = json.dumps(
{"error": "Failed to load initial data", "timestamp": time.time()} {
"error": "Failed to load initial data",
"timestamp": time.time(),
"tasks": [],
"change_type": "error",
}
) )
return f"data: {error_data}\n\n" return f"data: {error_data}\n\n"

View File

@@ -8,6 +8,7 @@ from routes.utils.credentials import (
) )
from routes.utils.celery_queue_manager import get_existing_task_id from routes.utils.celery_queue_manager import get_existing_task_id
from routes.utils.errors import DuplicateDownloadError from routes.utils.errors import DuplicateDownloadError
from routes.utils.celery_config import get_config_params
def download_album( def download_album(
@@ -98,10 +99,11 @@ def download_album(
spotify_client_id=global_spotify_client_id, spotify_client_id=global_spotify_client_id,
spotify_client_secret=global_spotify_client_secret, spotify_client_secret=global_spotify_client_secret,
progress_callback=progress_callback, progress_callback=progress_callback,
spotify_credentials_path=str(get_spotify_blob_path(main)),
) )
dl.download_albumspo( dl.download_albumspo(
link_album=url, # Spotify URL link_album=url, # Spotify URL
output_dir="/app/downloads", output_dir="./downloads",
quality_download=quality, # Deezer quality quality_download=quality, # Deezer quality
recursive_quality=recursive_quality, recursive_quality=recursive_quality,
recursive_download=False, recursive_download=False,
@@ -159,7 +161,7 @@ def download_album(
) )
spo.download_album( spo.download_album(
link_album=url, # Spotify URL link_album=url, # Spotify URL
output_dir="/app/downloads", output_dir="./downloads",
quality_download=fall_quality, # Spotify quality quality_download=fall_quality, # Spotify quality
recursive_quality=recursive_quality, recursive_quality=recursive_quality,
recursive_download=False, recursive_download=False,
@@ -216,7 +218,7 @@ def download_album(
) )
spo.download_album( spo.download_album(
link_album=url, link_album=url,
output_dir="/app/downloads", output_dir="./downloads",
quality_download=quality, quality_download=quality,
recursive_quality=recursive_quality, recursive_quality=recursive_quality,
recursive_download=False, recursive_download=False,
@@ -257,10 +259,15 @@ def download_album(
spotify_client_id=global_spotify_client_id, # Global Spotify keys spotify_client_id=global_spotify_client_id, # Global Spotify keys
spotify_client_secret=global_spotify_client_secret, # Global Spotify keys spotify_client_secret=global_spotify_client_secret, # Global Spotify keys
progress_callback=progress_callback, progress_callback=progress_callback,
spotify_credentials_path=(
str(get_spotify_blob_path(get_config_params().get("spotify")))
if get_config_params().get("spotify")
else None
),
) )
dl.download_albumdee( # Deezer URL, download via Deezer dl.download_albumdee( # Deezer URL, download via Deezer
link_album=url, link_album=url,
output_dir="/app/downloads", output_dir="./downloads",
quality_download=quality, quality_download=quality,
recursive_quality=recursive_quality, recursive_quality=recursive_quality,
recursive_download=False, recursive_download=False,

View File

@@ -2,9 +2,9 @@ import json
from routes.utils.watch.manager import get_watch_config from routes.utils.watch.manager import get_watch_config
import logging import logging
from routes.utils.celery_queue_manager import download_queue_manager from routes.utils.celery_queue_manager import download_queue_manager
from routes.utils.get_info import get_spotify_info
from routes.utils.credentials import get_credential, _get_global_spotify_api_creds from routes.utils.credentials import get_credential, _get_global_spotify_api_creds
from routes.utils.errors import DuplicateDownloadError from routes.utils.errors import DuplicateDownloadError
from routes.utils.get_info import get_client, get_artist
from deezspot.libutils.utils import get_ids, link_is_valid from deezspot.libutils.utils import get_ids, link_is_valid
@@ -77,10 +77,26 @@ def get_artist_discography(
log_json({"status": "error", "message": msg}) log_json({"status": "error", "message": msg})
raise ValueError(msg) raise ValueError(msg)
# Fetch artist once and return grouped arrays without pagination
try: try:
# Use the optimized get_spotify_info function client = get_client()
discography = get_spotify_info(artist_id, "artist_discography") artist_obj = get_artist(client, artist_id)
return discography
# Normalize groups as arrays of IDs; tolerate dict shape from some sources
def normalize_group(val):
if isinstance(val, list):
return val
if isinstance(val, dict):
items = val.get("items") or val.get("releases") or []
return items if isinstance(items, list) else []
return []
return {
"album_group": normalize_group(artist_obj.get("album_group")),
"single_group": normalize_group(artist_obj.get("single_group")),
"compilation_group": normalize_group(artist_obj.get("compilation_group")),
"appears_on_group": normalize_group(artist_obj.get("appears_on_group")),
}
except Exception as fetch_error: except Exception as fetch_error:
msg = f"An error occurred while fetching the discography: {fetch_error}" msg = f"An error occurred while fetching the discography: {fetch_error}"
log_json({"status": "error", "message": msg}) log_json({"status": "error", "message": msg})
@@ -120,60 +136,54 @@ def download_artist_albums(url, album_type=None, request_args=None, username=Non
raise ValueError(error_msg) raise ValueError(error_msg)
# Get watch config to determine which album groups to download # Get watch config to determine which album groups to download
valid_groups = {"album", "single", "compilation", "appears_on"}
if album_type and isinstance(album_type, str):
requested = [g.strip().lower() for g in album_type.split(",") if g.strip()]
allowed_groups = [g for g in requested if g in valid_groups]
if not allowed_groups:
logger.warning(
f"album_type query provided but no valid groups found in {requested}; falling back to watch config."
)
if not album_type or not isinstance(album_type, str) or not allowed_groups:
watch_config = get_watch_config() watch_config = get_watch_config()
allowed_groups = [ allowed_groups = [
g.lower() g.lower()
for g in watch_config.get("watchedArtistAlbumGroup", ["album", "single"]) for g in watch_config.get("watchedArtistAlbumGroup", ["album", "single"])
if g.lower() in valid_groups
] ]
logger.info( logger.info(
f"Filtering albums by watchedArtistAlbumGroup setting (exact album_group match): {allowed_groups}" f"Filtering albums by album_type/watch setting (exact album_group match): {allowed_groups}"
) )
# Fetch all artist albums with pagination # Fetch artist and aggregate group arrays without pagination
client = get_client()
artist_obj = get_artist(client, artist_id)
def normalize_group(val):
if isinstance(val, list):
return val
if isinstance(val, dict):
items = val.get("items") or val.get("releases") or []
return items if isinstance(items, list) else []
return []
group_key_to_type = [
("album_group", "album"),
("single_group", "single"),
("compilation_group", "compilation"),
("appears_on_group", "appears_on"),
]
all_artist_albums = [] all_artist_albums = []
offset = 0 for key, group_type in group_key_to_type:
limit = 50 # Spotify API limit for artist albums ids = normalize_group(artist_obj.get(key))
# transform to minimal album objects with album_group tagging for filtering parity
logger.info(f"Fetching all albums for artist ID: {artist_id} with pagination") for album_id in ids:
all_artist_albums.append(
while True: {
logger.debug( "id": album_id,
f"Fetching albums for {artist_id}. Limit: {limit}, Offset: {offset}" "album_group": group_type,
) }
artist_data_page = get_spotify_info(
artist_id, "artist_discography", limit=limit, offset=offset
)
if not artist_data_page or not isinstance(artist_data_page.get("items"), list):
logger.warning(
f"No album items found or invalid format for artist {artist_id} at offset {offset}. Response: {artist_data_page}"
)
break
current_page_albums = artist_data_page.get("items", [])
if not current_page_albums:
logger.info(
f"No more albums on page for artist {artist_id} at offset {offset}. Total fetched so far: {len(all_artist_albums)}."
)
break
logger.debug(
f"Fetched {len(current_page_albums)} albums on current page for artist {artist_id}."
)
all_artist_albums.extend(current_page_albums)
# Check if Spotify indicates a next page URL
if artist_data_page.get("next"):
offset += limit # Increment offset by the limit used for the request
else:
logger.info(
f"No next page URL for artist {artist_id}. Pagination complete. Total albums fetched: {len(all_artist_albums)}."
)
break
if not all_artist_albums:
raise ValueError(
f"Failed to retrieve artist data or no albums found for artist ID {artist_id}"
) )
# Filter albums based on the allowed types using album_group field (like in manager.py) # Filter albums based on the allowed types using album_group field (like in manager.py)
@@ -201,13 +211,23 @@ def download_artist_albums(url, album_type=None, request_args=None, username=Non
duplicate_albums = [] duplicate_albums = []
for album in filtered_albums: for album in filtered_albums:
album_url = album.get("external_urls", {}).get("spotify", "") album_id = album.get("id")
album_name = album.get("name", "Unknown Album") if not album_id:
album_artists = album.get("artists", []) logger.warning("Skipping album without ID in filtered list.")
continue
# fetch album details to construct URL and names
try:
album_obj = download_queue_manager.client.get_album(
album_id, include_tracks=False
) # type: ignore[attr-defined]
except AttributeError:
# If download_queue_manager lacks a client, fallback to shared client
album_obj = get_client().get_album(album_id, include_tracks=False)
album_url = album_obj.get("external_urls", {}).get("spotify", "")
album_name = album_obj.get("name", "Unknown Album")
artists = album_obj.get("artists", []) or []
album_artist = ( album_artist = (
album_artists[0].get("name", "Unknown Artist") artists[0].get("name", "Unknown Artist") if artists else "Unknown Artist"
if album_artists
else "Unknown Artist"
) )
if not album_url: if not album_url:

View File

@@ -28,7 +28,7 @@ CONFIG_FILE_PATH = Path("./data/config/main.json")
DEFAULT_MAIN_CONFIG = { DEFAULT_MAIN_CONFIG = {
"service": "spotify", "service": "spotify",
"version": "3.3.0", "version": "3.3.1",
"spotify": "", "spotify": "",
"deezer": "", "deezer": "",
"fallback": False, "fallback": False,
@@ -52,6 +52,7 @@ DEFAULT_MAIN_CONFIG = {
"watch": {}, "watch": {},
"realTimeMultiplier": 0, "realTimeMultiplier": 0,
"padNumberWidth": 3, "padNumberWidth": 3,
"sseUpdateIntervalSeconds": 1, # Configurable SSE update interval (default: 1s)
} }
@@ -188,7 +189,7 @@ task_annotations = {
"rate_limit": f"{MAX_CONCURRENT_DL}/m", "rate_limit": f"{MAX_CONCURRENT_DL}/m",
}, },
"routes.utils.celery_tasks.trigger_sse_update_task": { "routes.utils.celery_tasks.trigger_sse_update_task": {
"rate_limit": "500/m", # Allow high rate for real-time SSE updates "rate_limit": "60/m", # Throttle to 1 update/sec per task (matches SSE throttle)
"default_retry_delay": 1, # Quick retry for SSE updates "default_retry_delay": 1, # Quick retry for SSE updates
"max_retries": 1, # Limited retries for best-effort delivery "max_retries": 1, # Limited retries for best-effort delivery
"ignore_result": True, # Don't store results for SSE tasks "ignore_result": True, # Don't store results for SSE tasks

View File

@@ -2,8 +2,12 @@ import subprocess
import logging import logging
import time import time
import threading import threading
import os
import sys import sys
from dotenv import load_dotenv
load_dotenv()
# Import Celery task utilities # Import Celery task utilities
from .celery_config import get_config_params, MAX_CONCURRENT_DL from .celery_config import get_config_params, MAX_CONCURRENT_DL
@@ -41,8 +45,10 @@ class CeleryManager:
) )
def _get_worker_command( def _get_worker_command(
self, queues, concurrency, worker_name_suffix, log_level="INFO" self, queues, concurrency, worker_name_suffix, log_level_env=None
): ):
# Use LOG_LEVEL from environment if provided, otherwise default to INFO
log_level = log_level_env if log_level_env else os.getenv("LOG_LEVEL", "WARNING").upper()
# Use a unique worker name to avoid conflicts. # Use a unique worker name to avoid conflicts.
# %h is replaced by celery with the actual hostname. # %h is replaced by celery with the actual hostname.
hostname = f"worker_{worker_name_suffix}@%h" hostname = f"worker_{worker_name_suffix}@%h"
@@ -67,6 +73,12 @@ class CeleryManager:
logger.debug(f"Generated Celery command: {' '.join(command)}") logger.debug(f"Generated Celery command: {' '.join(command)}")
return command return command
def _get_worker_env(self):
# Inherit current environment, but set NO_CONSOLE_LOG=1 for subprocess
env = os.environ.copy()
env["NO_CONSOLE_LOG"] = "1"
return env
def _process_output_reader(self, stream, log_prefix, error=False): def _process_output_reader(self, stream, log_prefix, error=False):
logger.debug(f"Log reader thread started for {log_prefix}") logger.debug(f"Log reader thread started for {log_prefix}")
try: try:
@@ -123,6 +135,7 @@ class CeleryManager:
queues="downloads", queues="downloads",
concurrency=self.concurrency, concurrency=self.concurrency,
worker_name_suffix="dlw", # Download Worker worker_name_suffix="dlw", # Download Worker
log_level_env=os.getenv("LOG_LEVEL", "WARNING").upper(),
) )
logger.info( logger.info(
f"Starting Celery Download Worker with command: {' '.join(download_cmd)}" f"Starting Celery Download Worker with command: {' '.join(download_cmd)}"
@@ -134,6 +147,7 @@ class CeleryManager:
text=True, text=True,
bufsize=1, bufsize=1,
universal_newlines=True, universal_newlines=True,
env=self._get_worker_env(),
) )
self.download_log_thread_stdout = threading.Thread( self.download_log_thread_stdout = threading.Thread(
target=self._process_output_reader, target=self._process_output_reader,
@@ -157,7 +171,8 @@ class CeleryManager:
queues="utility_tasks,default", # Listen to utility and default queues="utility_tasks,default", # Listen to utility and default
concurrency=5, # Increased concurrency for SSE updates and utility tasks concurrency=5, # Increased concurrency for SSE updates and utility tasks
worker_name_suffix="utw", # Utility Worker worker_name_suffix="utw", # Utility Worker
log_level="ERROR", # Reduce log verbosity for utility worker (only errors) log_level_env=os.getenv("LOG_LEVEL", "WARNING").upper(),
) )
logger.info( logger.info(
f"Starting Celery Utility Worker with command: {' '.join(utility_cmd)}" f"Starting Celery Utility Worker with command: {' '.join(utility_cmd)}"
@@ -169,6 +184,7 @@ class CeleryManager:
text=True, text=True,
bufsize=1, bufsize=1,
universal_newlines=True, universal_newlines=True,
env=self._get_worker_env(),
) )
self.utility_log_thread_stdout = threading.Thread( self.utility_log_thread_stdout = threading.Thread(
target=self._process_output_reader, target=self._process_output_reader,
@@ -256,7 +272,7 @@ class CeleryManager:
# Restart only the download worker # Restart only the download worker
download_cmd = self._get_worker_command( download_cmd = self._get_worker_command(
"downloads", self.concurrency, "dlw" "downloads", self.concurrency, "dlw", log_level_env=os.getenv("LOG_LEVEL", "WARNING").upper()
) )
logger.info( logger.info(
f"Restarting Celery Download Worker with command: {' '.join(download_cmd)}" f"Restarting Celery Download Worker with command: {' '.join(download_cmd)}"
@@ -372,10 +388,7 @@ celery_manager = CeleryManager()
# Example of how to use the manager (typically called from your main app script) # Example of how to use the manager (typically called from your main app script)
if __name__ == "__main__": if __name__ == "__main__":
logging.basicConfig( # Removed logging.basicConfig as it's handled by the main app's setup_logging
level=logging.INFO,
format="%(message)s",
)
logger.info("Starting Celery Manager example...") logger.info("Starting Celery Manager example...")
celery_manager.start() celery_manager.start()
try: try:

View File

@@ -246,7 +246,7 @@ class CeleryDownloadQueueManager:
"""Initialize the Celery-based download queue manager""" """Initialize the Celery-based download queue manager"""
self.max_concurrent = MAX_CONCURRENT_DL self.max_concurrent = MAX_CONCURRENT_DL
self.paused = False self.paused = False
print( logger.info(
f"Celery Download Queue Manager initialized with max_concurrent={self.max_concurrent}" f"Celery Download Queue Manager initialized with max_concurrent={self.max_concurrent}"
) )

View File

@@ -285,9 +285,16 @@ def setup_celery_logging(**kwargs):
""" """
This handler ensures Celery uses our application logging settings This handler ensures Celery uses our application logging settings
instead of its own. Prevents duplicate log configurations. instead of its own. Prevents duplicate log configurations.
Also disables console logging if NO_CONSOLE_LOG=1 is set in the environment.
""" """
# Using the root logger's handlers and level preserves our config root_logger = logging.getLogger()
return logging.getLogger() import os
if os.environ.get("NO_CONSOLE_LOG") == "1":
# Remove all StreamHandlers (console handlers) from the root logger
handlers_to_remove = [h for h in root_logger.handlers if isinstance(h, logging.StreamHandler)]
for h in handlers_to_remove:
root_logger.removeHandler(h)
return root_logger
# The initialization of a worker will log the worker configuration # The initialization of a worker will log the worker configuration

View File

@@ -1,422 +1,101 @@
import spotipy import os
from spotipy.oauth2 import SpotifyClientCredentials from typing import Any, Dict, Optional
from routes.utils.credentials import _get_global_spotify_api_creds import threading
import logging
import time
from typing import Dict, Optional, Any
# Import Deezer API and logging from deezspot.libutils import LibrespotClient
from deezspot.deezloader.dee_api import API as DeezerAPI
# Initialize logger # Config helpers to resolve active credentials
logger = logging.getLogger(__name__) from routes.utils.celery_config import get_config_params
from routes.utils.credentials import get_spotify_blob_path
# Global Spotify client instance for reuse
_spotify_client = None
_last_client_init = 0
_client_init_interval = 3600 # Reinitialize client every hour
def _get_spotify_client(): # -------- Shared Librespot client (process-wide) --------
"""
Get or create a Spotify client with global credentials.
Implements client reuse and periodic reinitialization.
"""
global _spotify_client, _last_client_init
current_time = time.time() _shared_client: Optional[LibrespotClient] = None
_shared_blob_path: Optional[str] = None
_client_lock = threading.RLock()
# Reinitialize client if it's been more than an hour or if client doesn't exist
if (
_spotify_client is None
or current_time - _last_client_init > _client_init_interval
):
client_id, client_secret = _get_global_spotify_api_creds()
if not client_id or not client_secret: def _resolve_blob_path() -> str:
raise ValueError( cfg = get_config_params() or {}
"Global Spotify API client_id or client_secret not configured in ./data/creds/search.json." active_account = cfg.get("spotify")
if not active_account:
raise RuntimeError("Active Spotify account not set in configuration.")
blob_path = get_spotify_blob_path(active_account)
abs_path = os.path.abspath(str(blob_path))
if not os.path.isfile(abs_path):
raise FileNotFoundError(
f"Spotify credentials blob not found for account '{active_account}' at {abs_path}"
) )
return abs_path
# Create new client
_spotify_client = spotipy.Spotify(
client_credentials_manager=SpotifyClientCredentials(
client_id=client_id, client_secret=client_secret
)
)
_last_client_init = current_time
logger.info("Spotify client initialized/reinitialized")
return _spotify_client
def _rate_limit_handler(func): def get_client() -> LibrespotClient:
""" """
Decorator to handle rate limiting with exponential backoff. Return a shared LibrespotClient instance initialized from the active account blob.
Re-initializes if the active account changes.
""" """
global _shared_client, _shared_blob_path
def wrapper(*args, **kwargs): with _client_lock:
max_retries = 3 desired_blob = _resolve_blob_path()
base_delay = 1 if _shared_client is None or _shared_blob_path != desired_blob:
for attempt in range(max_retries):
try: try:
return func(*args, **kwargs) if _shared_client is not None:
except Exception as e: _shared_client.close()
if "429" in str(e) or "rate limit" in str(e).lower(): except Exception:
if attempt < max_retries - 1: pass
delay = base_delay * (2**attempt) _shared_client = LibrespotClient(stored_credentials_path=desired_blob)
logger.warning(f"Rate limited, retrying in {delay} seconds...") _shared_blob_path = desired_blob
time.sleep(delay) return _shared_client
continue
raise e
return func(*args, **kwargs) # -------- Thin wrapper API (programmatic use) --------
return wrapper
def create_client(credentials_path: str) -> LibrespotClient:
"""
Create a LibrespotClient from a librespot-generated credentials.json file.
"""
abs_path = os.path.abspath(credentials_path)
if not os.path.isfile(abs_path):
raise FileNotFoundError(f"Credentials file not found: {abs_path}")
return LibrespotClient(stored_credentials_path=abs_path)
def close_client(client: LibrespotClient) -> None:
"""
Dispose a LibrespotClient instance.
"""
client.close()
def get_track(client: LibrespotClient, track_in: str) -> Dict[str, Any]:
"""Fetch a track object."""
return client.get_track(track_in)
def get_album(
client: LibrespotClient, album_in: str, include_tracks: bool = False
) -> Dict[str, Any]:
"""Fetch an album object; optionally include expanded tracks."""
return client.get_album(album_in, include_tracks=include_tracks)
def get_artist(client: LibrespotClient, artist_in: str) -> Dict[str, Any]:
"""Fetch an artist object."""
return client.get_artist(artist_in)
def get_playlist(
client: LibrespotClient, playlist_in: str, expand_items: bool = False
) -> Dict[str, Any]:
"""Fetch a playlist object; optionally expand track items to full track objects."""
return client.get_playlist(playlist_in, expand_items=expand_items)
@_rate_limit_handler
def get_playlist_metadata(playlist_id: str) -> Dict[str, Any]: def get_playlist_metadata(playlist_id: str) -> Dict[str, Any]:
""" """
Get playlist metadata only (no tracks) to avoid rate limiting. Fetch playlist metadata using the shared client without expanding items.
Args:
playlist_id: The Spotify playlist ID
Returns:
Dictionary with playlist metadata (name, description, owner, etc.)
""" """
client = _get_spotify_client() client = get_client()
return get_playlist(client, playlist_id, expand_items=False)
try:
# Get basic playlist info without tracks
playlist = client.playlist(
playlist_id,
fields="id,name,description,owner,images,snapshot_id,public,followers,tracks.total",
)
# Add a flag to indicate this is metadata only
playlist["_metadata_only"] = True
playlist["_tracks_loaded"] = False
logger.debug(
f"Retrieved playlist metadata for {playlist_id}: {playlist.get('name', 'Unknown')}"
)
return playlist
except Exception as e:
logger.error(f"Error fetching playlist metadata for {playlist_id}: {e}")
raise
@_rate_limit_handler
def get_playlist_tracks(
playlist_id: str, limit: int = 100, offset: int = 0
) -> Dict[str, Any]:
"""
Get playlist tracks with pagination support to handle large playlists efficiently.
Args:
playlist_id: The Spotify playlist ID
limit: Number of tracks to fetch per request (max 100)
offset: Starting position for pagination
Returns:
Dictionary with tracks data
"""
client = _get_spotify_client()
try:
# Get tracks with specified limit and offset
tracks_data = client.playlist_tracks(
playlist_id,
limit=min(limit, 100), # Spotify API max is 100
offset=offset,
fields="items(track(id,name,artists,album,external_urls,preview_url,duration_ms,explicit,popularity)),total,limit,offset",
)
logger.debug(
f"Retrieved {len(tracks_data.get('items', []))} tracks for playlist {playlist_id} (offset: {offset})"
)
return tracks_data
except Exception as e:
logger.error(f"Error fetching playlist tracks for {playlist_id}: {e}")
raise
@_rate_limit_handler
def get_playlist_full(playlist_id: str, batch_size: int = 100) -> Dict[str, Any]:
"""
Get complete playlist data with all tracks, using batched requests to avoid rate limiting.
Args:
playlist_id: The Spotify playlist ID
batch_size: Number of tracks to fetch per batch (max 100)
Returns:
Complete playlist data with all tracks
"""
try:
# First get metadata
playlist = get_playlist_metadata(playlist_id)
# Get total track count
total_tracks = playlist.get("tracks", {}).get("total", 0)
if total_tracks == 0:
playlist["tracks"] = {"items": [], "total": 0}
return playlist
# Fetch all tracks in batches
all_tracks = []
offset = 0
while offset < total_tracks:
batch = get_playlist_tracks(playlist_id, limit=batch_size, offset=offset)
batch_items = batch.get("items", [])
all_tracks.extend(batch_items)
offset += len(batch_items)
# Add small delay between batches to be respectful to API
if offset < total_tracks:
time.sleep(0.1)
# Update playlist with complete tracks data
playlist["tracks"] = {
"items": all_tracks,
"total": total_tracks,
"limit": batch_size,
"offset": 0,
}
playlist["_metadata_only"] = False
playlist["_tracks_loaded"] = True
logger.info(
f"Retrieved complete playlist {playlist_id} with {total_tracks} tracks"
)
return playlist
except Exception as e:
logger.error(f"Error fetching complete playlist {playlist_id}: {e}")
raise
def check_playlist_updated(playlist_id: str, last_snapshot_id: str) -> bool:
"""
Check if playlist has been updated by comparing snapshot_id.
This is much more efficient than fetching all tracks.
Args:
playlist_id: The Spotify playlist ID
last_snapshot_id: The last known snapshot_id
Returns:
True if playlist has been updated, False otherwise
"""
try:
metadata = get_playlist_metadata(playlist_id)
current_snapshot_id = metadata.get("snapshot_id")
return current_snapshot_id != last_snapshot_id
except Exception as e:
logger.error(f"Error checking playlist update status for {playlist_id}: {e}")
raise
@_rate_limit_handler
def get_spotify_info(
spotify_id: str,
spotify_type: str,
limit: Optional[int] = None,
offset: Optional[int] = None,
) -> Dict[str, Any]:
"""
Get info from Spotify API using Spotipy directly.
Optimized to prevent rate limiting by using appropriate endpoints.
Args:
spotify_id: The Spotify ID of the entity
spotify_type: The type of entity (track, album, playlist, artist, artist_discography, episode, album_tracks)
limit (int, optional): The maximum number of items to return. Used for pagination.
offset (int, optional): The index of the first item to return. Used for pagination.
Returns:
Dictionary with the entity information
"""
client = _get_spotify_client()
try:
if spotify_type == "track":
return client.track(spotify_id)
elif spotify_type == "album":
return client.album(spotify_id)
elif spotify_type == "album_tracks":
# Fetch album's tracks with pagination support
return client.album_tracks(
spotify_id, limit=limit or 20, offset=offset or 0
)
elif spotify_type == "playlist":
# Use optimized playlist fetching
return get_playlist_full(spotify_id)
elif spotify_type == "playlist_metadata":
# Get only metadata for playlists
return get_playlist_metadata(spotify_id)
elif spotify_type == "artist":
return client.artist(spotify_id)
elif spotify_type == "artist_discography":
# Get artist's albums with pagination
albums = client.artist_albums(
spotify_id,
limit=limit or 20,
offset=offset or 0,
include_groups="single,album,appears_on",
)
return albums
elif spotify_type == "episode":
return client.episode(spotify_id)
else:
raise ValueError(f"Unsupported Spotify type: {spotify_type}")
except Exception as e:
logger.error(f"Error fetching {spotify_type} {spotify_id}: {e}")
raise
# Cache for playlist metadata to reduce API calls
_playlist_metadata_cache: Dict[str, tuple[Dict[str, Any], float]] = {}
_cache_ttl = 300 # 5 minutes cache
def get_cached_playlist_metadata(playlist_id: str) -> Optional[Dict[str, Any]]:
"""
Get playlist metadata from cache if available and not expired.
Args:
playlist_id: The Spotify playlist ID
Returns:
Cached metadata or None if not available/expired
"""
if playlist_id in _playlist_metadata_cache:
cached_data, timestamp = _playlist_metadata_cache[playlist_id]
if time.time() - timestamp < _cache_ttl:
return cached_data
return None
def cache_playlist_metadata(playlist_id: str, metadata: Dict[str, Any]):
"""
Cache playlist metadata with timestamp.
Args:
playlist_id: The Spotify playlist ID
metadata: The metadata to cache
"""
_playlist_metadata_cache[playlist_id] = (metadata, time.time())
def get_playlist_info_optimized(
playlist_id: str, include_tracks: bool = False
) -> Dict[str, Any]:
"""
Optimized playlist info function that uses caching and selective loading.
Args:
playlist_id: The Spotify playlist ID
include_tracks: Whether to include track data (default: False to save API calls)
Returns:
Playlist data with or without tracks
"""
# Check cache first
cached_metadata = get_cached_playlist_metadata(playlist_id)
if cached_metadata and not include_tracks:
logger.debug(f"Returning cached metadata for playlist {playlist_id}")
return cached_metadata
if include_tracks:
# Get complete playlist data
playlist_data = get_playlist_full(playlist_id)
# Cache the metadata portion
metadata_only = {k: v for k, v in playlist_data.items() if k != "tracks"}
metadata_only["_metadata_only"] = True
metadata_only["_tracks_loaded"] = False
cache_playlist_metadata(playlist_id, metadata_only)
return playlist_data
else:
# Get metadata only
metadata = get_playlist_metadata(playlist_id)
cache_playlist_metadata(playlist_id, metadata)
return metadata
# Keep the existing Deezer functions unchanged
def get_deezer_info(deezer_id, deezer_type, limit=None):
"""
Get info from Deezer API.
Args:
deezer_id: The Deezer ID of the entity.
deezer_type: The type of entity (track, album, playlist, artist, episode,
artist_top_tracks, artist_albums, artist_related,
artist_radio, artist_playlists).
limit (int, optional): The maximum number of items to return. Used for
artist_top_tracks, artist_albums, artist_playlists.
Deezer API methods usually have their own defaults (e.g., 25)
if limit is not provided or None is passed to them.
Returns:
Dictionary with the entity information.
Raises:
ValueError: If deezer_type is unsupported.
Various exceptions from DeezerAPI (NoDataApi, QuotaExceeded, requests.exceptions.RequestException, etc.)
"""
logger.debug(
f"Fetching Deezer info for ID {deezer_id}, type {deezer_type}, limit {limit}"
)
# DeezerAPI uses class methods; its @classmethod __init__ handles setup.
# No specific ARL or account handling here as DeezerAPI seems to use general endpoints.
if deezer_type == "track":
return DeezerAPI.get_track(deezer_id)
elif deezer_type == "album":
return DeezerAPI.get_album(deezer_id)
elif deezer_type == "playlist":
return DeezerAPI.get_playlist(deezer_id)
elif deezer_type == "artist":
return DeezerAPI.get_artist(deezer_id)
elif deezer_type == "episode":
return DeezerAPI.get_episode(deezer_id)
elif deezer_type == "artist_top_tracks":
if limit is not None:
return DeezerAPI.get_artist_top_tracks(deezer_id, limit=limit)
return DeezerAPI.get_artist_top_tracks(deezer_id) # Use API default limit
elif deezer_type == "artist_albums": # Maps to get_artist_top_albums
if limit is not None:
return DeezerAPI.get_artist_top_albums(deezer_id, limit=limit)
return DeezerAPI.get_artist_top_albums(deezer_id) # Use API default limit
elif deezer_type == "artist_related":
return DeezerAPI.get_artist_related(deezer_id)
elif deezer_type == "artist_radio":
return DeezerAPI.get_artist_radio(deezer_id)
elif deezer_type == "artist_playlists":
if limit is not None:
return DeezerAPI.get_artist_top_playlists(deezer_id, limit=limit)
return DeezerAPI.get_artist_top_playlists(deezer_id) # Use API default limit
else:
logger.error(f"Unsupported Deezer type: {deezer_type}")
raise ValueError(f"Unsupported Deezer type: {deezer_type}")

View File

@@ -3,6 +3,8 @@ from deezspot.spotloader import SpoLogin
from deezspot.deezloader import DeeLogin from deezspot.deezloader import DeeLogin
from pathlib import Path from pathlib import Path
from routes.utils.credentials import get_credential, _get_global_spotify_api_creds from routes.utils.credentials import get_credential, _get_global_spotify_api_creds
from routes.utils.credentials import get_spotify_blob_path
from routes.utils.celery_config import get_config_params
from routes.utils.celery_queue_manager import get_existing_task_id from routes.utils.celery_queue_manager import get_existing_task_id
from routes.utils.errors import DuplicateDownloadError from routes.utils.errors import DuplicateDownloadError
@@ -95,10 +97,11 @@ def download_playlist(
spotify_client_id=global_spotify_client_id, spotify_client_id=global_spotify_client_id,
spotify_client_secret=global_spotify_client_secret, spotify_client_secret=global_spotify_client_secret,
progress_callback=progress_callback, progress_callback=progress_callback,
spotify_credentials_path=str(get_spotify_blob_path(main)),
) )
dl.download_playlistspo( dl.download_playlistspo(
link_playlist=url, # Spotify URL link_playlist=url, # Spotify URL
output_dir="/app/downloads", output_dir="./downloads",
quality_download=quality, # Deezer quality quality_download=quality, # Deezer quality
recursive_quality=recursive_quality, recursive_quality=recursive_quality,
recursive_download=False, recursive_download=False,
@@ -161,7 +164,7 @@ def download_playlist(
) )
spo.download_playlist( spo.download_playlist(
link_playlist=url, # Spotify URL link_playlist=url, # Spotify URL
output_dir="/app/downloads", output_dir="./downloads",
quality_download=fall_quality, # Spotify quality quality_download=fall_quality, # Spotify quality
recursive_quality=recursive_quality, recursive_quality=recursive_quality,
recursive_download=False, recursive_download=False,
@@ -224,7 +227,7 @@ def download_playlist(
) )
spo.download_playlist( spo.download_playlist(
link_playlist=url, link_playlist=url,
output_dir="/app/downloads", output_dir="./downloads",
quality_download=quality, quality_download=quality,
recursive_quality=recursive_quality, recursive_quality=recursive_quality,
recursive_download=False, recursive_download=False,
@@ -265,10 +268,15 @@ def download_playlist(
spotify_client_id=global_spotify_client_id, # Global Spotify keys spotify_client_id=global_spotify_client_id, # Global Spotify keys
spotify_client_secret=global_spotify_client_secret, # Global Spotify keys spotify_client_secret=global_spotify_client_secret, # Global Spotify keys
progress_callback=progress_callback, progress_callback=progress_callback,
spotify_credentials_path=(
str(get_spotify_blob_path(get_config_params().get("spotify")))
if get_config_params().get("spotify")
else None
),
) )
dl.download_playlistdee( # Deezer URL, download via Deezer dl.download_playlistdee( # Deezer URL, download via Deezer
link_playlist=url, link_playlist=url,
output_dir="/app/downloads", output_dir="./downloads",
quality_download=quality, quality_download=quality,
recursive_quality=recursive_quality, # Usually False for playlists to get individual track qualities recursive_quality=recursive_quality, # Usually False for playlists to get individual track qualities
recursive_download=False, recursive_download=False,

View File

@@ -6,6 +6,7 @@ from routes.utils.credentials import (
_get_global_spotify_api_creds, _get_global_spotify_api_creds,
get_spotify_blob_path, get_spotify_blob_path,
) )
from routes.utils.celery_config import get_config_params
def download_track( def download_track(
@@ -90,11 +91,12 @@ def download_track(
spotify_client_id=global_spotify_client_id, # Global creds spotify_client_id=global_spotify_client_id, # Global creds
spotify_client_secret=global_spotify_client_secret, # Global creds spotify_client_secret=global_spotify_client_secret, # Global creds
progress_callback=progress_callback, progress_callback=progress_callback,
spotify_credentials_path=str(get_spotify_blob_path(main)),
) )
# download_trackspo means: Spotify URL, download via Deezer # download_trackspo means: Spotify URL, download via Deezer
dl.download_trackspo( dl.download_trackspo(
link_track=url, # Spotify URL link_track=url, # Spotify URL
output_dir="/app/downloads", output_dir="./downloads",
quality_download=quality, # Deezer quality quality_download=quality, # Deezer quality
recursive_quality=recursive_quality, recursive_quality=recursive_quality,
recursive_download=False, recursive_download=False,
@@ -153,7 +155,7 @@ def download_track(
) )
spo.download_track( spo.download_track(
link_track=url, # Spotify URL link_track=url, # Spotify URL
output_dir="/app/downloads", output_dir="./downloads",
quality_download=fall_quality, # Spotify quality quality_download=fall_quality, # Spotify quality
recursive_quality=recursive_quality, recursive_quality=recursive_quality,
recursive_download=False, recursive_download=False,
@@ -211,7 +213,7 @@ def download_track(
) )
spo.download_track( spo.download_track(
link_track=url, link_track=url,
output_dir="/app/downloads", output_dir="./downloads",
quality_download=quality, quality_download=quality,
recursive_quality=recursive_quality, recursive_quality=recursive_quality,
recursive_download=False, recursive_download=False,
@@ -251,10 +253,15 @@ def download_track(
spotify_client_id=global_spotify_client_id, # Global Spotify keys for internal Spo use by DeeLogin spotify_client_id=global_spotify_client_id, # Global Spotify keys for internal Spo use by DeeLogin
spotify_client_secret=global_spotify_client_secret, # Global Spotify keys spotify_client_secret=global_spotify_client_secret, # Global Spotify keys
progress_callback=progress_callback, progress_callback=progress_callback,
spotify_credentials_path=(
str(get_spotify_blob_path(get_config_params().get("spotify")))
if get_config_params().get("spotify")
else None
),
) )
dl.download_trackdee( # Deezer URL, download via Deezer dl.download_trackdee( # Deezer URL, download via Deezer
link_track=url, link_track=url,
output_dir="/app/downloads", output_dir="./downloads",
quality_download=quality, quality_download=quality,
recursive_quality=recursive_quality, recursive_quality=recursive_quality,
recursive_download=False, recursive_download=False,

View File

@@ -27,15 +27,9 @@ from routes.utils.watch.db import (
get_artist_batch_next_offset, get_artist_batch_next_offset,
set_artist_batch_next_offset, set_artist_batch_next_offset,
) )
from routes.utils.get_info import (
get_spotify_info,
get_playlist_metadata,
get_playlist_tracks,
) # To fetch playlist, track, artist, and album details
from routes.utils.celery_queue_manager import download_queue_manager
# Added import to fetch base formatting config from routes.utils.celery_queue_manager import download_queue_manager, get_config_params
from routes.utils.celery_queue_manager import get_config_params from routes.utils.get_info import get_client
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
MAIN_CONFIG_FILE_PATH = Path("./data/config/main.json") MAIN_CONFIG_FILE_PATH = Path("./data/config/main.json")
@@ -358,7 +352,7 @@ def find_tracks_in_playlist(
while not_found_tracks and offset < 10000: # Safety limit while not_found_tracks and offset < 10000: # Safety limit
try: try:
tracks_batch = get_playlist_tracks( tracks_batch = _fetch_playlist_tracks_page(
playlist_spotify_id, limit=limit, offset=offset playlist_spotify_id, limit=limit, offset=offset
) )
@@ -459,7 +453,9 @@ def check_watched_playlists(specific_playlist_id: str = None):
ensure_playlist_table_schema(playlist_spotify_id) ensure_playlist_table_schema(playlist_spotify_id)
# First, get playlist metadata to check if it has changed # First, get playlist metadata to check if it has changed
current_playlist_metadata = get_playlist_metadata(playlist_spotify_id) current_playlist_metadata = _fetch_playlist_metadata(
playlist_spotify_id
)
if not current_playlist_metadata: if not current_playlist_metadata:
logger.error( logger.error(
f"Playlist Watch Manager: Failed to fetch metadata from Spotify for playlist {playlist_spotify_id}." f"Playlist Watch Manager: Failed to fetch metadata from Spotify for playlist {playlist_spotify_id}."
@@ -507,7 +503,7 @@ def check_watched_playlists(specific_playlist_id: str = None):
progress_offset, _ = get_playlist_batch_progress( progress_offset, _ = get_playlist_batch_progress(
playlist_spotify_id playlist_spotify_id
) )
tracks_batch = get_playlist_tracks( tracks_batch = _fetch_playlist_tracks_page(
playlist_spotify_id, playlist_spotify_id,
limit=batch_limit, limit=batch_limit,
offset=progress_offset, offset=progress_offset,
@@ -573,7 +569,7 @@ def check_watched_playlists(specific_playlist_id: str = None):
logger.info( logger.info(
f"Playlist Watch Manager: Fetching one batch (limit={batch_limit}, offset={progress_offset}) for playlist '{playlist_name}'." f"Playlist Watch Manager: Fetching one batch (limit={batch_limit}, offset={progress_offset}) for playlist '{playlist_name}'."
) )
tracks_batch = get_playlist_tracks( tracks_batch = _fetch_playlist_tracks_page(
playlist_spotify_id, limit=batch_limit, offset=progress_offset playlist_spotify_id, limit=batch_limit, offset=progress_offset
) )
batch_items = tracks_batch.get("items", []) if tracks_batch else [] batch_items = tracks_batch.get("items", []) if tracks_batch else []
@@ -734,8 +730,8 @@ def check_watched_artists(specific_artist_id: str = None):
logger.debug( logger.debug(
f"Artist Watch Manager: Fetching albums for {artist_spotify_id}. Limit: {limit}, Offset: {offset}" f"Artist Watch Manager: Fetching albums for {artist_spotify_id}. Limit: {limit}, Offset: {offset}"
) )
artist_albums_page = get_spotify_info( artist_albums_page = _fetch_artist_discography_page(
artist_spotify_id, "artist_discography", limit=limit, offset=offset artist_spotify_id, limit=limit, offset=offset
) )
current_page_albums = ( current_page_albums = (
@@ -911,7 +907,8 @@ def run_playlist_check_over_intervals(playlist_spotify_id: str) -> None:
# Determine if we are done: no active processing snapshot and no pending sync # Determine if we are done: no active processing snapshot and no pending sync
cfg = get_watch_config() cfg = get_watch_config()
interval = cfg.get("watchPollIntervalSeconds", 3600) interval = cfg.get("watchPollIntervalSeconds", 3600)
metadata = get_playlist_metadata(playlist_spotify_id) # Use local helper that leverages Librespot client
metadata = _fetch_playlist_metadata(playlist_spotify_id)
if not metadata: if not metadata:
logger.warning( logger.warning(
f"Manual Playlist Runner: Could not load metadata for {playlist_spotify_id}. Stopping." f"Manual Playlist Runner: Could not load metadata for {playlist_spotify_id}. Stopping."
@@ -1098,7 +1095,7 @@ def update_playlist_m3u_file(playlist_spotify_id: str):
# Get configuration settings # Get configuration settings
output_dir = ( output_dir = (
"/app/downloads" # This matches the output_dir used in download functions "./downloads" # This matches the output_dir used in download functions
) )
# Get all tracks for the playlist # Get all tracks for the playlist
@@ -1125,14 +1122,14 @@ def update_playlist_m3u_file(playlist_spotify_id: str):
skipped_missing_final_path = 0 skipped_missing_final_path = 0
for track in tracks: for track in tracks:
# Use final_path from deezspot summary and convert from /app/downloads to ../ relative path # Use final_path from deezspot summary and convert from ./downloads to ../ relative path
final_path = track.get("final_path") final_path = track.get("final_path")
if not final_path: if not final_path:
skipped_missing_final_path += 1 skipped_missing_final_path += 1
continue continue
normalized = str(final_path).replace("\\", "/") normalized = str(final_path).replace("\\", "/")
if normalized.startswith("/app/downloads/"): if normalized.startswith("./downloads/"):
relative_path = normalized.replace("/app/downloads/", "../", 1) relative_path = normalized.replace("./downloads/", "../", 1)
elif "/downloads/" in normalized.lower(): elif "/downloads/" in normalized.lower():
idx = normalized.lower().rfind("/downloads/") idx = normalized.lower().rfind("/downloads/")
relative_path = "../" + normalized[idx + len("/downloads/") :] relative_path = "../" + normalized[idx + len("/downloads/") :]
@@ -1167,3 +1164,84 @@ def update_playlist_m3u_file(playlist_spotify_id: str):
f"Error updating m3u file for playlist {playlist_spotify_id}: {e}", f"Error updating m3u file for playlist {playlist_spotify_id}: {e}",
exc_info=True, exc_info=True,
) )
# Helper to build a Librespot client from active account
def _build_librespot_client():
try:
# Reuse shared client managed in routes.utils.get_info
return get_client()
except Exception as e:
raise RuntimeError(f"Failed to initialize Librespot client: {e}")
def _fetch_playlist_metadata(playlist_id: str) -> dict:
client = _build_librespot_client()
return client.get_playlist(playlist_id, expand_items=False)
def _fetch_playlist_tracks_page(playlist_id: str, limit: int, offset: int) -> dict:
client = _build_librespot_client()
# Fetch playlist with minimal items to avoid expanding all tracks unnecessarily
pl = client.get_playlist(playlist_id, expand_items=False)
items = (pl.get("tracks", {}) or {}).get("items", [])
total = (pl.get("tracks", {}) or {}).get("total", len(items))
start = max(0, offset or 0)
end = start + max(1, limit or 50)
page_items_minimal = items[start:end]
# Expand only the tracks in this page using client cache for efficiency
page_items_expanded = []
for item in page_items_minimal:
track_stub = (item or {}).get("track") or {}
track_id = track_stub.get("id")
expanded_track = None
if track_id:
try:
expanded_track = client.get_track(track_id)
except Exception:
expanded_track = None
if expanded_track is None:
# Keep stub as fallback; ensure structure
expanded_track = {
k: v
for k, v in track_stub.items()
if k in ("id", "uri", "type", "external_urls")
}
# Propagate local flag onto track for downstream checks
if item and isinstance(item, dict) and item.get("is_local"):
expanded_track["is_local"] = True
# Rebuild item with expanded track
new_item = dict(item)
new_item["track"] = expanded_track
page_items_expanded.append(new_item)
return {
"items": page_items_expanded,
"total": total,
"limit": end - start,
"offset": start,
}
def _fetch_artist_discography_page(artist_id: str, limit: int, offset: int) -> dict:
# LibrespotClient.get_artist returns a pruned mapping; flatten common discography groups
client = _build_librespot_client()
artist = client.get_artist(artist_id)
all_items = []
# Collect from known groups; also support nested structures if present
for key in ("album_group", "single_group", "compilation_group", "appears_on_group"):
grp = artist.get(key)
if isinstance(grp, list):
all_items.extend(grp)
elif isinstance(grp, dict):
items = grp.get("items") or grp.get("releases") or []
if isinstance(items, list):
all_items.extend(items)
total = len(all_items)
start = max(0, offset or 0)
end = start + max(1, limit or 50)
page_items = all_items[start:end]
return {"items": page_items, "total": total, "limit": limit, "offset": start}

View File

@@ -1,7 +1,7 @@
{ {
"name": "spotizerr-ui", "name": "spotizerr-ui",
"private": true, "private": true,
"version": "3.3.0", "version": "4.0.0",
"type": "module", "type": "module",
"scripts": { "scripts": {
"dev": "vite", "dev": "vite",

View File

@@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?><!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
<svg width="800px" height="800px" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M18.1716 1C18.702 1 19.2107 1.21071 19.5858 1.58579L22.4142 4.41421C22.7893 4.78929 23 5.29799 23 5.82843V20C23 21.6569 21.6569 23 20 23H4C2.34315 23 1 21.6569 1 20V4C1 2.34315 2.34315 1 4 1H18.1716ZM4 3C3.44772 3 3 3.44772 3 4V20C3 20.5523 3.44772 21 4 21L5 21L5 15C5 13.3431 6.34315 12 8 12L16 12C17.6569 12 19 13.3431 19 15V21H20C20.5523 21 21 20.5523 21 20V6.82843C21 6.29799 20.7893 5.78929 20.4142 5.41421L18.5858 3.58579C18.2107 3.21071 17.702 3 17.1716 3H17V5C17 6.65685 15.6569 8 14 8H10C8.34315 8 7 6.65685 7 5V3H4ZM17 21V15C17 14.4477 16.5523 14 16 14L8 14C7.44772 14 7 14.4477 7 15L7 21L17 21ZM9 3H15V5C15 5.55228 14.5523 6 14 6H10C9.44772 6 9 5.55228 9 5V3Z" fill="#0F0F0F"/>
</svg>

After

Width:  |  Height:  |  Size: 968 B

View File

@@ -0,0 +1,2 @@
<?xml version="1.0" encoding="utf-8"?><!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
<svg width="800px" height="800px" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M13 2a1 1 0 0 0-2 0v4.167a1 1 0 1 0 2 0V2ZM13 17.833a1 1 0 0 0-2 0V22a1 1 0 1 0 2 0v-4.167ZM16.834 12a1 1 0 0 1 1-1H22a1 1 0 0 1 0 2h-4.166a1 1 0 0 1-1-1ZM2 11a1 1 0 0 0 0 2h4.167a1 1 0 1 0 0-2H2ZM19.916 4.085a1 1 0 0 1 0 1.414l-2.917 2.917A1 1 0 1 1 15.585 7l2.917-2.916a1 1 0 0 1 1.414 0ZM8.415 16.999a1 1 0 0 0-1.414-1.414L4.084 18.5A1 1 0 1 0 5.5 19.916l2.916-2.917ZM15.585 15.585a1 1 0 0 1 1.414 0l2.917 2.916a1 1 0 1 1-1.414 1.415l-2.917-2.917a1 1 0 0 1 0-1.414ZM5.499 4.085a1 1 0 0 0-1.415 1.414l2.917 2.917A1 1 0 0 0 8.415 7L5.5 4.085Z" fill="#000000"/></svg>

After

Width:  |  Height:  |  Size: 796 B

View File

@@ -2,10 +2,10 @@ import { Link } from "@tanstack/react-router";
import { useContext, useEffect } from "react"; import { useContext, useEffect } from "react";
import { toast } from "sonner"; import { toast } from "sonner";
import { QueueContext, getStatus } from "../contexts/queue-context"; import { QueueContext, getStatus } from "../contexts/queue-context";
import type { AlbumType } from "../types/spotify"; import type { LibrespotAlbumType } from "@/types/librespot";
interface AlbumCardProps { interface AlbumCardProps {
album: AlbumType; album: LibrespotAlbumType;
onDownload?: () => void; onDownload?: () => void;
} }
@@ -38,7 +38,7 @@ export const AlbumCard = ({ album, onDownload }: AlbumCardProps) => {
onDownload(); onDownload();
}} }}
disabled={!!status && status !== "error"} disabled={!!status && status !== "error"}
className="absolute bottom-2 right-2 p-2 bg-button-success hover:bg-button-success-hover text-button-success-text rounded-full transition-opacity shadow-lg opacity-0 group-hover:opacity-100 duration-300 disabled:opacity-50 disabled:cursor-not-allowed" className="absolute bottom-2 right-2 p-2 bg-button-success hover:bg-button-success-hover text-button-success-text rounded-full transition-opacity shadow-lg opacity-100 sm:opacity-0 sm:group-hover:opacity-100 duration-300 z-10 disabled:opacity-50 disabled:cursor-not-allowed"
title={ title={
status status
? status === "queued" ? status === "queued"
@@ -53,9 +53,9 @@ export const AlbumCard = ({ album, onDownload }: AlbumCardProps) => {
? status === "queued" ? status === "queued"
? "Queued." ? "Queued."
: status === "error" : status === "error"
? <img src="/download.svg" alt="Download" className="w-5 h-5 icon-inverse" /> ? <img src="/download.svg" alt="Download" className="w-5 h-5 logo" />
: "Downloading..." : <img src="/spinner.svg" alt="Loading" className="w-5 h-5 animate-spin" />
: <img src="/download.svg" alt="Download" className="w-5 h-5 icon-inverse" /> : <img src="/download.svg" alt="Download" className="w-5 h-5 logo" />
} }
</button> </button>
)} )}

View File

@@ -772,7 +772,7 @@ export const Queue = () => {
const priorities = { const priorities = {
"real-time": 1, downloading: 2, processing: 3, initializing: 4, "real-time": 1, downloading: 2, processing: 3, initializing: 4,
retrying: 5, queued: 6, done: 7, completed: 7, error: 8, cancelled: 9, skipped: 10 retrying: 5, queued: 6, done: 7, completed: 7, error: 8, cancelled: 9, skipped: 10
}; } as Record<string, number>;
return priorities[status as keyof typeof priorities] || 10; return priorities[status as keyof typeof priorities] || 10;
}; };

View File

@@ -65,7 +65,7 @@ export const SearchResultCard = ({ id, name, subtitle, imageUrl, type, onDownloa
? "Queued." ? "Queued."
: status === "error" : status === "error"
? <img src="/download.svg" alt="Download" className="w-5 h-5 logo" /> ? <img src="/download.svg" alt="Download" className="w-5 h-5 logo" />
: "Downloading..." : <img src="/spinner.svg" alt="Loading" className="w-5 h-5 animate-spin" />
: <img src="/download.svg" alt="Download" className="w-5 h-5 logo" /> : <img src="/download.svg" alt="Download" className="w-5 h-5 logo" />
} }
</button> </button>

View File

@@ -53,6 +53,19 @@ function extractApiErrorMessage(error: unknown): string {
if (typeof data?.detail === "string") return data.detail; if (typeof data?.detail === "string") return data.detail;
if (typeof data?.message === "string") return data.message; if (typeof data?.message === "string") return data.message;
if (typeof data?.error === "string") return data.error; if (typeof data?.error === "string") return data.error;
// If data.error is an object, try to extract a message from it
if (typeof data?.error === "object" && data.error !== null && typeof data.error.message === "string") {
return data.error.message;
}
// If data is an object but none of the above matched, try JSON stringifying it
if (typeof data === "object" && data !== null) {
try {
return JSON.stringify(data);
} catch (e) {
// Fallback if stringify fails
return fallback;
}
}
} }
if (typeof anyErr?.message === "string") return anyErr.message; if (typeof anyErr?.message === "string") return anyErr.message;
return fallback; return fallback;
@@ -66,7 +79,6 @@ export function AccountsTab() {
const queryClient = useQueryClient(); const queryClient = useQueryClient();
const [activeService, setActiveService] = useState<Service>("spotify"); const [activeService, setActiveService] = useState<Service>("spotify");
const [isAdding, setIsAdding] = useState(false); const [isAdding, setIsAdding] = useState(false);
const [submitError, setSubmitError] = useState<string | null>(null);
const { data: credentials, isLoading } = useQuery({ const { data: credentials, isLoading } = useQuery({
queryKey: ["credentials", activeService], queryKey: ["credentials", activeService],
@@ -87,12 +99,10 @@ export function AccountsTab() {
queryClient.invalidateQueries({ queryKey: ["credentials", activeService] }); queryClient.invalidateQueries({ queryKey: ["credentials", activeService] });
queryClient.invalidateQueries({ queryKey: ["config"] }); // Invalidate config to update active Spotify/Deezer account in UI queryClient.invalidateQueries({ queryKey: ["config"] }); // Invalidate config to update active Spotify/Deezer account in UI
setIsAdding(false); setIsAdding(false);
setSubmitError(null);
reset(); reset();
}, },
onError: (error) => { onError: (error) => {
const msg = extractApiErrorMessage(error); const msg = extractApiErrorMessage(error);
setSubmitError(msg);
toast.error(msg); toast.error(msg);
}, },
}); });
@@ -110,7 +120,6 @@ export function AccountsTab() {
}); });
const onSubmit: SubmitHandler<AccountFormData> = (data) => { const onSubmit: SubmitHandler<AccountFormData> = (data) => {
setSubmitError(null);
addMutation.mutate({ service: activeService, data }); addMutation.mutate({ service: activeService, data });
}; };
@@ -118,11 +127,6 @@ export function AccountsTab() {
<form onSubmit={handleSubmit(onSubmit)} className="p-4 border border-line dark:border-border-dark rounded-lg mt-4 space-y-4"> <form onSubmit={handleSubmit(onSubmit)} className="p-4 border border-line dark:border-border-dark rounded-lg mt-4 space-y-4">
<h4 className="font-semibold text-content-primary dark:text-content-primary-dark">Add New {activeService === "spotify" ? "Spotify" : "Deezer"} Account</h4> <h4 className="font-semibold text-content-primary dark:text-content-primary-dark">Add New {activeService === "spotify" ? "Spotify" : "Deezer"} Account</h4>
{submitError && (
<div className="text-error-text bg-error-muted border border-error rounded p-2 text-sm">
{submitError}
</div>
)}
<div className="flex flex-col gap-2"> <div className="flex flex-col gap-2">
<label htmlFor="accountName" className="text-content-primary dark:text-content-primary-dark">Account Name</label> <label htmlFor="accountName" className="text-content-primary dark:text-content-primary-dark">Account Name</label>
@@ -170,8 +174,13 @@ export function AccountsTab() {
type="submit" type="submit"
disabled={addMutation.isPending} disabled={addMutation.isPending}
className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50" className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50"
title="Save Account"
> >
{addMutation.isPending ? "Saving..." : "Save Account"} {addMutation.isPending ? (
<img src="/spinner.svg" alt="Saving" className="w-5 h-5 animate-spin logo" />
) : (
<img src="/save.svg" alt="Save" className="w-5 h-5 logo" />
)}
</button> </button>
<button <button
type="button" type="button"

View File

@@ -100,8 +100,8 @@ export function DownloadsTab({ config, isLoading }: DownloadsTabProps) {
queryClient.invalidateQueries({ queryKey: ["config"] }); queryClient.invalidateQueries({ queryKey: ["config"] });
}, },
onError: (error) => { onError: (error) => {
console.error("Failed to save settings", error.message); console.error("Failed to save settings", (error as any).message);
toast.error(`Failed to save settings: ${error.message}`); toast.error(`Failed to save settings: ${(error as any).message}`);
}, },
}); });
@@ -180,8 +180,13 @@ export function DownloadsTab({ config, isLoading }: DownloadsTabProps) {
type="submit" type="submit"
disabled={mutation.isPending || !!validationError} disabled={mutation.isPending || !!validationError}
className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50" className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50"
title="Save Download Settings"
> >
{mutation.isPending ? "Saving..." : "Save Download Settings"} {mutation.isPending ? (
<img src="/spinner.svg" alt="Saving" className="w-5 h-5 animate-spin logo" />
) : (
<img src="/save.svg" alt="Save" className="w-5 h-5 logo" />
)}
</button> </button>
</div> </div>
</div> </div>
@@ -359,7 +364,7 @@ export function DownloadsTab({ config, isLoading }: DownloadsTabProps) {
type="number" type="number"
min="1" min="1"
{...register("retryDelaySeconds")} {...register("retryDelaySeconds")}
className="block w-full p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline-none focus:ring-2 focus:ring-input-focus" className="block w_full p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline-none focus:ring-2 focus:ring-input-focus"
/> />
</div> </div>
<div className="flex flex-col gap-2"> <div className="flex flex-col gap-2">
@@ -369,7 +374,7 @@ export function DownloadsTab({ config, isLoading }: DownloadsTabProps) {
type="number" type="number"
min="0" min="0"
{...register("retryDelayIncrease")} {...register("retryDelayIncrease")}
className="block w-full p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline-none focus:ring-2 focus:ring-input-focus" className="block w-full p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline_none focus:ring-2 focus:ring-input-focus"
/> />
</div> </div>
</div> </div>

View File

@@ -88,8 +88,8 @@ export function FormattingTab({ config, isLoading }: FormattingTabProps) {
queryClient.invalidateQueries({ queryKey: ["config"] }); queryClient.invalidateQueries({ queryKey: ["config"] });
}, },
onError: (error) => { onError: (error) => {
console.error("Failed to save formatting settings:", error.message); console.error("Failed to save formatting settings:", (error as any).message);
toast.error(`Failed to save settings: ${error.message}`); toast.error(`Failed to save settings: ${(error as any).message}`);
}, },
}); });
@@ -131,8 +131,13 @@ export function FormattingTab({ config, isLoading }: FormattingTabProps) {
type="submit" type="submit"
disabled={mutation.isPending} disabled={mutation.isPending}
className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50" className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50"
title="Save Formatting Settings"
> >
{mutation.isPending ? "Saving..." : "Save Formatting Settings"} {mutation.isPending ? (
<img src="/spinner.svg" alt="Saving" className="w-5 h-5 animate-spin logo" />
) : (
<img src="/save.svg" alt="Save" className="w-5 h-5 logo" />
)}
</button> </button>
</div> </div>
</div> </div>

View File

@@ -83,8 +83,13 @@ export function GeneralTab({ config, isLoading: isConfigLoading }: GeneralTabPro
type="submit" type="submit"
disabled={mutation.isPending} disabled={mutation.isPending}
className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50" className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50"
title="Save General Settings"
> >
{mutation.isPending ? "Saving..." : "Save General Settings"} {mutation.isPending ? (
<img src="/spinner.svg" alt="Saving" className="w-5 h-5 animate-spin logo" />
) : (
<img src="/save.svg" alt="Save" className="w-5 h-5 logo" />
)}
</button> </button>
</div> </div>
</div> </div>

View File

@@ -126,7 +126,7 @@ export function ProfileTab() {
</p> </p>
</div> </div>
<div> <div>
<label className="block text-sm font-medium text-content-secondary dark:text-content-secondary-dark mb-1"> <label className="block text_sm font-medium text-content-secondary dark:text-content-secondary-dark mb-1">
Role Role
</label> </label>
<p className="text-content-primary dark:text-content-primary-dark"> <p className="text-content-primary dark:text-content-primary-dark">
@@ -177,7 +177,7 @@ export function ProfileTab() {
</div> </div>
<div> <div>
<label className="block text-sm font-medium text-content-secondary dark:text-content-secondary-dark mb-2"> <label className="block text-sm font_medium text-content-secondary dark:text-content-secondary-dark mb-2">
New Password New Password
</label> </label>
<input <input
@@ -226,8 +226,13 @@ export function ProfileTab() {
type="submit" type="submit"
disabled={isChangingPassword} disabled={isChangingPassword}
className="px-4 py-2 bg-primary hover:bg-primary-hover text-white rounded-lg font-medium transition-colors disabled:opacity-50 disabled:cursor-not-allowed" className="px-4 py-2 bg-primary hover:bg-primary-hover text-white rounded-lg font-medium transition-colors disabled:opacity-50 disabled:cursor-not-allowed"
title="Save Password"
> >
{isChangingPassword ? "Changing Password..." : "Change Password"} {isChangingPassword ? (
<img src="/spinner.svg" alt="Saving" className="w-5 h-5 animate-spin inline-block logo" />
) : (
<img src="/save.svg" alt="Save" className="w-5 h-5 inline-block logo" />
)}
</button> </button>
<button <button
type="button" type="button"
@@ -252,7 +257,7 @@ export function ProfileTab() {
{/* SSO User Notice */} {/* SSO User Notice */}
{user?.is_sso_user && ( {user?.is_sso_user && (
<div className="bg-blue-50 dark:bg-blue-900/20 border border-blue-200 dark:border-blue-800 rounded-lg p-6"> <div className="bg-blue-50 dark:bg-blue-900/20 border border-blue-200 dark:border-blue-800 rounded-lg p-6">
<h3 className="text-lg font-medium text-blue-900 dark:text-blue-100 mb-2"> <h3 className="text-lg font-semibold text-blue-900 dark:text-blue-100 mb-2">
SSO Account SSO Account
</h3> </h3>
<p className="text-blue-800 dark:text-blue-200"> <p className="text-blue-800 dark:text-blue-200">

View File

@@ -54,8 +54,8 @@ function SpotifyApiForm() {
queryClient.invalidateQueries({ queryKey: ["spotifyApiConfig"] }); queryClient.invalidateQueries({ queryKey: ["spotifyApiConfig"] });
}, },
onError: (e) => { onError: (e) => {
console.error("Failed to save Spotify API settings:", e.message); console.error("Failed to save Spotify API settings:", (e as any).message);
toast.error(`Failed to save: ${e.message}`); toast.error(`Failed to save: ${(e as any).message}`);
}, },
}); });
@@ -75,8 +75,13 @@ function SpotifyApiForm() {
type="submit" type="submit"
disabled={mutation.isPending} disabled={mutation.isPending}
className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50" className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50"
title="Save Spotify API"
> >
{mutation.isPending ? "Saving..." : "Save Spotify API"} {mutation.isPending ? (
<img src="/spinner.svg" alt="Saving" className="w-5 h-5 animate-spin logo" />
) : (
<img src="/save.svg" alt="Save" className="w-5 h-5 logo" />
)}
</button> </button>
</div> </div>
</div> </div>
@@ -119,7 +124,7 @@ function WebhookForm() {
queryClient.invalidateQueries({ queryKey: ["webhookConfig"] }); queryClient.invalidateQueries({ queryKey: ["webhookConfig"] });
}, },
onError: (e) => { onError: (e) => {
toast.error(`Failed to save: ${e.message}`); toast.error(`Failed to save: ${(e as any).message}`);
}, },
}); });
@@ -128,7 +133,7 @@ function WebhookForm() {
onSuccess: () => { onSuccess: () => {
// No toast needed // No toast needed
}, },
onError: (e) => toast.error(`Webhook test failed: ${e.message}`), onError: (e) => toast.error(`Webhook test failed: ${(e as any).message}`),
}); });
useEffect(() => { useEffect(() => {
@@ -147,8 +152,13 @@ function WebhookForm() {
type="submit" type="submit"
disabled={mutation.isPending} disabled={mutation.isPending}
className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50" className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50"
title="Save Webhook"
> >
{mutation.isPending ? "Saving..." : "Save Webhook"} {mutation.isPending ? (
<img src="/spinner.svg" alt="Saving" className="w-5 h-5 animate-spin logo" />
) : (
<img src="/save.svg" alt="Save" className="w-5 h-5 logo" />
)}
</button> </button>
</div> </div>
</div> </div>

View File

@@ -252,7 +252,7 @@ export function UserManagementTab() {
errors.email errors.email
? "border-error focus:border-error" ? "border-error focus:border-error"
: "border-input-border dark:border-input-border-dark focus:border-primary" : "border-input-border dark:border-input-border-dark focus:border-primary"
} bg-input-background dark:bg-input-background-dark text-content-primary dark:text-content-primary-dark focus:outline-none focus:ring-2 focus:ring-primary/20`} } bg-input-background dark:bg-input-background-dark text-content-primary dark:text-content-primary-dark focus:outline_none focus:ring-2 focus:ring-primary/20`}
placeholder="Enter email (optional)" placeholder="Enter email (optional)"
disabled={isCreating} disabled={isCreating}
/> />
@@ -302,15 +302,13 @@ export function UserManagementTab() {
<button <button
type="submit" type="submit"
disabled={isCreating} disabled={isCreating}
className="px-4 py-2 bg-primary hover:bg-primary-hover text-white rounded-lg font-medium transition-colors disabled:opacity-50 disabled:cursor-not-allowed flex items-center gap-2" className="px-4 py-2 bg-primary hover:bg-primary-hover text_white rounded-lg font-medium transition-colors disabled:opacity-50 disabled:cursor-not-allowed flex items-center gap-2"
title="Save User"
> >
{isCreating ? ( {isCreating ? (
<> <img src="/spinner.svg" alt="Saving" className="w-4 h-4 animate-spin logo" />
<div className="w-4 h-4 border-2 border-white border-t-transparent rounded-full animate-spin" />
Creating...
</>
) : ( ) : (
"Create User" <img src="/save.svg" alt="Save" className="w-4 h-4 logo" />
)} )}
</button> </button>
</div> </div>
@@ -474,14 +472,12 @@ export function UserManagementTab() {
type="submit" type="submit"
disabled={isResettingPassword} disabled={isResettingPassword}
className="px-4 py-2 bg-primary hover:bg-primary-hover text-white rounded-lg font-medium transition-colors disabled:opacity-50 disabled:cursor-not-allowed flex items-center gap-2" className="px-4 py-2 bg-primary hover:bg-primary-hover text-white rounded-lg font-medium transition-colors disabled:opacity-50 disabled:cursor-not-allowed flex items-center gap-2"
title="Save Password"
> >
{isResettingPassword ? ( {isResettingPassword ? (
<> <img src="/spinner.svg" alt="Saving" className="w-4 h-4 animate-spin logo" />
<div className="w-4 h-4 border-2 border-white border-t-transparent rounded-full animate-spin" />
Resetting...
</>
) : ( ) : (
"Reset Password" <img src="/save.svg" alt="Save" className="w-4 h-4 logo" />
)} )}
</button> </button>
</div> </div>

View File

@@ -89,6 +89,7 @@ export function WatchTab() {
onSuccess: () => { onSuccess: () => {
toast.success("Watch settings saved successfully!"); toast.success("Watch settings saved successfully!");
queryClient.invalidateQueries({ queryKey: ["watchConfig"] }); queryClient.invalidateQueries({ queryKey: ["watchConfig"] });
queryClient.invalidateQueries({ queryKey: ["config"] }); // Invalidate main config to refresh watch.enabled in SettingsProvider
}, },
onError: (error: any) => { onError: (error: any) => {
const message = error?.response?.data?.error || error?.message || "Unknown error"; const message = error?.response?.data?.error || error?.message || "Unknown error";
@@ -180,8 +181,13 @@ export function WatchTab() {
type="submit" type="submit"
disabled={mutation.isPending || !!validationError} disabled={mutation.isPending || !!validationError}
className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50" className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50"
title="Save Watch Settings"
> >
{mutation.isPending ? "Saving..." : "Save Watch Settings"} {mutation.isPending ? (
<img src="/spinner.svg" alt="Saving" className="w-5 h-5 animate-spin logo" />
) : (
<img src="/save.svg" alt="Save" className="w-5 h-5 logo" />
)}
</button> </button>
</div> </div>
</div> </div>

View File

@@ -10,7 +10,7 @@ import {
} from "./queue-context"; } from "./queue-context";
import { toast } from "sonner"; import { toast } from "sonner";
import { v4 as uuidv4 } from "uuid"; import { v4 as uuidv4 } from "uuid";
import type { CallbackObject } from "@/types/callbacks"; import type { CallbackObject, SummaryObject, IDs } from "@/types/callbacks";
import { useAuth } from "@/contexts/auth-context"; import { useAuth } from "@/contexts/auth-context";
export function QueueProvider({ children }: { children: ReactNode }) { export function QueueProvider({ children }: { children: ReactNode }) {
@@ -43,51 +43,86 @@ export function QueueProvider({ children }: { children: ReactNode }) {
return items.filter(item => isActiveStatus(getStatus(item))).length; return items.filter(item => isActiveStatus(getStatus(item))).length;
}, [items]); }, [items]);
// Improved deduplication - check both id and taskId fields const extractIDs = useCallback((cb?: CallbackObject): IDs | undefined => {
const itemExists = useCallback((taskId: string, items: QueueItem[]): boolean => { if (!cb) return undefined;
return items.some(item => if ((cb as any).track) return (cb as any).track.ids as IDs;
item.id === taskId || if ((cb as any).album) return (cb as any).album.ids as IDs;
item.taskId === taskId || if ((cb as any).playlist) return (cb as any).playlist.ids as IDs;
// Also check spotify ID to prevent same track being added multiple times return undefined;
(item.spotifyId && item.spotifyId === taskId)
);
}, []); }, []);
// Convert SSE task data to QueueItem // Convert SSE task data to QueueItem
const createQueueItemFromTask = useCallback((task: any): QueueItem => { const createQueueItemFromTask = useCallback((task: any): QueueItem => {
const spotifyId = task.original_url?.split("/").pop() || ""; const lastCallback = task.last_line as CallbackObject | undefined;
const ids = extractIDs(lastCallback);
// Determine container type up-front
const downloadType = (task.download_type || task.type || "track") as DownloadType;
// Compute spotifyId fallback chain
const fallbackFromUrl = task.original_url?.split("/").pop() || "";
const spotifyId = ids?.spotify || fallbackFromUrl || "";
// Extract display info from callback // Extract display info from callback
let name = task.name || "Unknown"; let name: string = task.name || "Unknown";
let artist = task.artist || ""; let artist: string = task.artist || "";
// Handle different callback structures
if (task.last_line) {
try { try {
if ("track" in task.last_line) { if (lastCallback) {
name = task.last_line.track.title || name; if ((lastCallback as any).track) {
artist = task.last_line.track.artists?.[0]?.name || artist; // Prefer parent container title if this is an album/playlist operation
} else if ("album" in task.last_line) { const parent = (lastCallback as any).parent;
name = task.last_line.album.title || name; if (downloadType === "playlist" && parent && (parent as any).title) {
artist = task.last_line.album.artists?.map((a: any) => a.name).join(", ") || artist; name = (parent as any).title || name;
} else if ("playlist" in task.last_line) { artist = (parent as any).owner?.name || artist;
name = task.last_line.playlist.title || name; } else if (downloadType === "album" && parent && (parent as any).title) {
artist = task.last_line.playlist.owner?.name || artist; name = (parent as any).title || name;
const arts = (parent as any).artists || [];
artist = Array.isArray(arts) && arts.length > 0 ? (arts.map((a: any) => a.name).filter(Boolean).join(", ")) : artist;
} else {
// Fallback to the current track's info for standalone track downloads
name = (lastCallback as any).track.title || name;
const arts = (lastCallback as any).track.artists || [];
artist = Array.isArray(arts) && arts.length > 0 ? (arts.map((a: any) => a.name).filter(Boolean).join(", ")) : artist;
}
} else if ((lastCallback as any).album) {
name = (lastCallback as any).album.title || name;
const arts = (lastCallback as any).album.artists || [];
artist = Array.isArray(arts) && arts.length > 0 ? (arts.map((a: any) => a.name).filter(Boolean).join(", ")) : artist;
} else if ((lastCallback as any).playlist) {
name = (lastCallback as any).playlist.title || name;
artist = (lastCallback as any).playlist.owner?.name || artist;
} else if ((lastCallback as any).status === "processing") {
name = (lastCallback as any).name || name;
artist = (lastCallback as any).artist || artist;
}
} }
} catch (error) { } catch (error) {
console.warn(`createQueueItemFromTask: Error parsing callback for task ${task.task_id}:`, error); console.warn(`createQueueItemFromTask: Error parsing callback for task ${task.task_id}:`, error);
} }
// Prefer summary from callback status_info if present; fallback to task.summary
let summary: SummaryObject | undefined = undefined;
try {
const statusInfo = (lastCallback as any)?.status_info;
if (statusInfo && typeof statusInfo === "object" && "summary" in statusInfo) {
summary = (statusInfo as any).summary || undefined;
}
} catch {}
if (!summary && task.summary) {
summary = task.summary as SummaryObject;
} }
const queueItem: QueueItem = { const queueItem: QueueItem = {
id: task.task_id, id: task.task_id,
taskId: task.task_id, taskId: task.task_id,
downloadType: task.download_type || task.type || "track", downloadType,
spotifyId, spotifyId,
lastCallback: task.last_line as CallbackObject, ids,
lastCallback: lastCallback as CallbackObject,
name, name,
artist, artist,
summary: task.summary, summary,
error: task.error, error: task.error,
}; };
@@ -98,7 +133,7 @@ export function QueueProvider({ children }: { children: ReactNode }) {
} }
return queueItem; return queueItem;
}, []); }, [extractIDs]);
// Schedule auto-removal for completed tasks // Schedule auto-removal for completed tasks
const scheduleRemoval = useCallback((taskId: string, delay: number = 10000) => { const scheduleRemoval = useCallback((taskId: string, delay: number = 10000) => {
@@ -209,7 +244,7 @@ export function QueueProvider({ children }: { children: ReactNode }) {
return; return;
} }
// Handle different message types from optimized backend // Handle message types from backend
const changeType = data.change_type || "update"; const changeType = data.change_type || "update";
const triggerReason = data.trigger_reason || ""; const triggerReason = data.trigger_reason || "";
@@ -221,7 +256,6 @@ export function QueueProvider({ children }: { children: ReactNode }) {
(total_tasks || 0); (total_tasks || 0);
setTotalTasks(calculatedTotal); setTotalTasks(calculatedTotal);
lastHeartbeat.current = Date.now(); lastHeartbeat.current = Date.now();
// Reduce heartbeat logging noise - only log every 10th heartbeat
if (Math.random() < 0.1) { if (Math.random() < 0.1) {
console.log("SSE: Connection active (heartbeat)"); console.log("SSE: Connection active (heartbeat)");
} }
@@ -249,9 +283,10 @@ export function QueueProvider({ children }: { children: ReactNode }) {
setItems(prev => { setItems(prev => {
// Create improved deduplication maps // Create improved deduplication maps
const existingTaskIds = new Set(); const existingTaskIds = new Set<string>();
const existingSpotifyIds = new Set(); const existingSpotifyIds = new Set<string>();
const existingItemsMap = new Map(); const existingDeezerIds = new Set<string>();
const existingItemsMap = new Map<string, QueueItem>();
prev.forEach(item => { prev.forEach(item => {
if (item.id) { if (item.id) {
@@ -263,6 +298,7 @@ export function QueueProvider({ children }: { children: ReactNode }) {
existingItemsMap.set(item.taskId, item); existingItemsMap.set(item.taskId, item);
} }
if (item.spotifyId) existingSpotifyIds.add(item.spotifyId); if (item.spotifyId) existingSpotifyIds.add(item.spotifyId);
if (item.ids?.deezer) existingDeezerIds.add(item.ids.deezer);
}); });
// Process each updated task // Process each updated task
@@ -271,33 +307,37 @@ export function QueueProvider({ children }: { children: ReactNode }) {
const newTasksToAdd: QueueItem[] = []; const newTasksToAdd: QueueItem[] = [];
for (const task of updatedTasks) { for (const task of updatedTasks) {
const taskId = task.task_id; const taskId = task.task_id as string;
const spotifyId = task.original_url?.split("/").pop();
// Skip if already processed (shouldn't happen but safety check) // Skip if already processed (shouldn't happen but safety check)
if (processedTaskIds.has(taskId)) continue; if (processedTaskIds.has(taskId)) continue;
processedTaskIds.add(taskId); processedTaskIds.add(taskId);
// Check if this task exists in current queue // Check if this task exists in current queue
const existingItem = existingItemsMap.get(taskId) || const existingItem = existingItemsMap.get(taskId);
Array.from(existingItemsMap.values()).find(item => const newItemCandidate = createQueueItemFromTask(task);
item.spotifyId === spotifyId const candidateSpotify = newItemCandidate.spotifyId;
const candidateDeezer = newItemCandidate.ids?.deezer;
// If not found by id, try to match by identifiers
const existingById = existingItem || Array.from(existingItemsMap.values()).find(item =>
(candidateSpotify && item.spotifyId === candidateSpotify) ||
(candidateDeezer && item.ids?.deezer === candidateDeezer)
); );
if (existingItem) { if (existingById) {
// Skip SSE updates for items that are already cancelled by user action // Skip SSE updates for items that are already cancelled by user action
const existingStatus = getStatus(existingItem); const existingStatus = getStatus(existingById);
if (existingStatus === "cancelled" && existingItem.error === "Cancelled by user") { if (existingStatus === "cancelled" && existingById.error === "Cancelled by user") {
console.log(`SSE: Skipping update for user-cancelled task ${taskId}`); console.log(`SSE: Skipping update for user-cancelled task ${taskId}`);
continue; continue;
} }
// Update existing item // Update existing item
const updatedItem = createQueueItemFromTask(task); const updatedItem = newItemCandidate;
const status = getStatus(updatedItem); const status = getStatus(updatedItem);
const previousStatus = getStatus(existingItem); const previousStatus = getStatus(existingById);
// Only log significant status changes
if (previousStatus !== status) { if (previousStatus !== status) {
console.log(`SSE: Status change ${taskId}: ${previousStatus}${status}`); console.log(`SSE: Status change ${taskId}: ${previousStatus}${status}`);
} }
@@ -305,33 +345,32 @@ export function QueueProvider({ children }: { children: ReactNode }) {
// Schedule removal for terminal states // Schedule removal for terminal states
if (isTerminalStatus(status)) { if (isTerminalStatus(status)) {
const delay = status === "cancelled" ? 5000 : 10000; const delay = status === "cancelled" ? 5000 : 10000;
scheduleRemoval(existingItem.id, delay); scheduleRemoval(existingById.id, delay);
console.log(`SSE: Scheduling removal for terminal task ${taskId} (${status}) in ${delay}ms`); console.log(`SSE: Scheduling removal for terminal task ${taskId} (${status}) in ${delay}ms`);
} }
updatedItems.push(updatedItem); updatedItems.push(updatedItem);
} else { } else {
// This is a new task from SSE // This is a new task from SSE
const newItem = createQueueItemFromTask(task); const newItem = newItemCandidate;
const status = getStatus(newItem); const status = getStatus(newItem);
// Check for duplicates by spotify ID // Check for duplicates by identifiers
if (spotifyId && existingSpotifyIds.has(spotifyId)) { if ((candidateSpotify && existingSpotifyIds.has(candidateSpotify)) ||
console.log(`SSE: Skipping duplicate by spotify ID: ${spotifyId}`); (candidateDeezer && existingDeezerIds.has(candidateDeezer))) {
console.log(`SSE: Skipping duplicate by identifier: ${candidateSpotify || candidateDeezer}`);
continue; continue;
} }
// Check if this is a pending download // Check if this is a pending download (by spotify id for now)
if (pendingDownloads.current.has(spotifyId || taskId)) { if (pendingDownloads.current.has(candidateSpotify || newItem.id)) {
console.log(`SSE: Skipping pending download: ${taskId}`); console.log(`SSE: Skipping pending download: ${taskId}`);
continue; continue;
} }
// For terminal tasks from SSE, these should be tasks that just transitioned // For terminal tasks from SSE
// (backend now filters out already-terminal tasks)
if (isTerminalStatus(status)) { if (isTerminalStatus(status)) {
console.log(`SSE: Adding recently completed task: ${taskId} (${status})`); console.log(`SSE: Adding recently completed task: ${taskId} (${status})`);
// Schedule immediate removal for terminal tasks
const delay = status === "cancelled" ? 5000 : 10000; const delay = status === "cancelled" ? 5000 : 10000;
scheduleRemoval(newItem.id, delay); scheduleRemoval(newItem.id, delay);
} else if (isActiveStatus(status)) { } else if (isActiveStatus(status)) {
@@ -349,7 +388,9 @@ export function QueueProvider({ children }: { children: ReactNode }) {
const finalItems = prev.map(item => { const finalItems = prev.map(item => {
const updated = updatedItems.find(u => const updated = updatedItems.find(u =>
u.id === item.id || u.taskId === item.id || u.id === item.id || u.taskId === item.id ||
u.id === item.taskId || u.taskId === item.taskId u.id === item.taskId || u.taskId === item.taskId ||
(u.spotifyId && u.spotifyId === item.spotifyId) ||
(u.ids?.deezer && u.ids.deezer === item.ids?.deezer)
); );
return updated || item; return updated || item;
}); });
@@ -422,7 +463,7 @@ export function QueueProvider({ children }: { children: ReactNode }) {
toast.error("Failed to establish connection"); toast.error("Failed to establish connection");
} }
} }
}, [createQueueItemFromTask, scheduleRemoval, startHealthCheck, authEnabled]); }, [createQueueItemFromTask, startHealthCheck, authEnabled, stopHealthCheck]);
const disconnectSSE = useCallback(() => { const disconnectSSE = useCallback(() => {
if (sseConnection.current) { if (sseConnection.current) {
@@ -449,17 +490,19 @@ export function QueueProvider({ children }: { children: ReactNode }) {
if (newTasks.length > 0) { if (newTasks.length > 0) {
setItems(prev => { setItems(prev => {
const uniqueNewTasks = newTasks const extended = newTasks
.filter((task: any) => !itemExists(task.task_id, prev)) .map((task: any) => createQueueItemFromTask(task))
.filter((task: any) => { .filter((qi: QueueItem) => {
const tempItem = createQueueItemFromTask(task); const status = getStatus(qi);
const status = getStatus(tempItem);
// Consistent filtering - exclude all terminal state tasks in pagination too // Consistent filtering - exclude all terminal state tasks in pagination too
return !isTerminalStatus(status); if (isTerminalStatus(status)) return false;
}) // Dedupe by task id or identifiers
.map((task: any) => createQueueItemFromTask(task)); if (prev.some(p => p.id === qi.id || p.taskId === qi.id)) return false;
if (qi.spotifyId && prev.some(p => p.spotifyId === qi.spotifyId)) return false;
return [...prev, ...uniqueNewTasks]; if (qi.ids?.deezer && prev.some(p => p.ids?.deezer === qi.ids?.deezer)) return false;
return true;
});
return [...prev, ...extended];
}); });
setCurrentPage(nextPage); setCurrentPage(nextPage);
} }
@@ -471,7 +514,7 @@ export function QueueProvider({ children }: { children: ReactNode }) {
} finally { } finally {
setIsLoadingMore(false); setIsLoadingMore(false);
} }
}, [hasMore, isLoadingMore, currentPage, createQueueItemFromTask, itemExists]); }, [hasMore, isLoadingMore, currentPage, createQueueItemFromTask]);
// Note: SSE connection state is managed through the initialize effect and restartSSE method // Note: SSE connection state is managed through the initialize effect and restartSSE method
// The auth context should call restartSSE() when login/logout occurs // The auth context should call restartSSE() when login/logout occurs
@@ -496,13 +539,11 @@ export function QueueProvider({ children }: { children: ReactNode }) {
const { tasks, pagination, total_tasks, task_counts } = response.data; const { tasks, pagination, total_tasks, task_counts } = response.data;
const queueItems = tasks const queueItems = tasks
.filter((task: any) => { .map((task: any) => createQueueItemFromTask(task))
const tempItem = createQueueItemFromTask(task); .filter((qi: QueueItem) => {
const status = getStatus(tempItem); const status = getStatus(qi);
// On refresh, exclude all terminal state tasks to start with a clean queue
return !isTerminalStatus(status); return !isTerminalStatus(status);
}) });
.map((task: any) => createQueueItemFromTask(task));
console.log(`Queue initialized: ${queueItems.length} items (filtered out terminal state tasks)`); console.log(`Queue initialized: ${queueItems.length} items (filtered out terminal state tasks)`);
setItems(queueItems); setItems(queueItems);
@@ -542,8 +583,8 @@ export function QueueProvider({ children }: { children: ReactNode }) {
return; return;
} }
// Check if item already exists in queue // Check if item already exists in queue (by spotify id or identifiers on items)
if (itemExists(item.spotifyId, items)) { if (items.some(i => i.spotifyId === item.spotifyId || i.ids?.spotify === item.spotifyId)) {
toast.info("Item already in queue"); toast.info("Item already in queue");
return; return;
} }
@@ -557,7 +598,7 @@ export function QueueProvider({ children }: { children: ReactNode }) {
spotifyId: item.spotifyId, spotifyId: item.spotifyId,
name: item.name, name: item.name,
artist: item.artist || "", artist: item.artist || "",
}; } as QueueItem;
setItems(prev => [newItem, ...prev]); setItems(prev => [newItem, ...prev]);
@@ -583,7 +624,7 @@ export function QueueProvider({ children }: { children: ReactNode }) {
setItems(prev => prev.filter(i => i.id !== tempId)); setItems(prev => prev.filter(i => i.id !== tempId));
pendingDownloads.current.delete(item.spotifyId); pendingDownloads.current.delete(item.spotifyId);
} }
}, [connectSSE, itemExists, items]); }, [connectSSE, items]);
const removeItem = useCallback((id: string) => { const removeItem = useCallback((id: string) => {
const item = items.find(i => i.id === id); const item = items.find(i => i.id === id);
@@ -610,26 +651,12 @@ export function QueueProvider({ children }: { children: ReactNode }) {
try { try {
await authApiClient.client.post(`/prgs/cancel/${item.taskId}`); await authApiClient.client.post(`/prgs/cancel/${item.taskId}`);
setItems(prev => // Mark as cancelled via error field to preserve type safety
prev.map(i => setItems(prev => prev.map(i => i.id === id ? { ...i, error: "Cancelled by user" } : i));
i.id === id ? {
...i,
error: "Cancelled by user",
lastCallback: {
status: "cancelled",
timestamp: Date.now() / 1000,
type: item.downloadType,
name: item.name,
artist: item.artist
} as unknown as CallbackObject
} : i
)
);
// Remove immediately after showing cancelled state briefly // Remove shortly after showing cancelled state
setTimeout(() => { setTimeout(() => {
setItems(prev => prev.filter(i => i.id !== id)); setItems(prev => prev.filter(i => i.id !== id));
// Clean up any existing removal timer
if (removalTimers.current[id]) { if (removalTimers.current[id]) {
clearTimeout(removalTimers.current[id]); clearTimeout(removalTimers.current[id]);
delete removalTimers.current[id]; delete removalTimers.current[id];
@@ -641,7 +668,7 @@ export function QueueProvider({ children }: { children: ReactNode }) {
console.error("Failed to cancel task:", error); console.error("Failed to cancel task:", error);
toast.error(`Failed to cancel: ${item.name}`); toast.error(`Failed to cancel: ${item.name}`);
} }
}, [items, scheduleRemoval]); }, [items]);
const cancelAll = useCallback(async () => { const cancelAll = useCallback(async () => {
const activeItems = items.filter(item => { const activeItems = items.filter(item => {
@@ -657,26 +684,11 @@ export function QueueProvider({ children }: { children: ReactNode }) {
try { try {
await authApiClient.client.post("/prgs/cancel/all"); await authApiClient.client.post("/prgs/cancel/all");
// Mark each active item as cancelled via error field
activeItems.forEach(item => { activeItems.forEach(item => {
setItems(prev => setItems(prev => prev.map(i => i.id === item.id ? { ...i, error: "Cancelled by user" } : i));
prev.map(i =>
i.id === item.id ? {
...i,
error: "Cancelled by user",
lastCallback: {
status: "cancelled",
timestamp: Date.now() / 1000,
type: item.downloadType,
name: item.name,
artist: item.artist
} as unknown as CallbackObject
} : i
)
);
// Remove immediately after showing cancelled state briefly
setTimeout(() => { setTimeout(() => {
setItems(prev => prev.filter(i => i.id !== item.id)); setItems(prev => prev.filter(i => i.id !== item.id));
// Clean up any existing removal timer
if (removalTimers.current[item.id]) { if (removalTimers.current[item.id]) {
clearTimeout(removalTimers.current[item.id]); clearTimeout(removalTimers.current[item.id]);
delete removalTimers.current[item.id]; delete removalTimers.current[item.id];
@@ -689,7 +701,7 @@ export function QueueProvider({ children }: { children: ReactNode }) {
console.error("Failed to cancel all:", error); console.error("Failed to cancel all:", error);
toast.error("Failed to cancel downloads"); toast.error("Failed to cancel downloads");
} }
}, [items, scheduleRemoval]); }, [items]);
const clearCompleted = useCallback(() => { const clearCompleted = useCallback(() => {
setItems(prev => prev.filter(item => { setItems(prev => prev.filter(item => {

View File

@@ -1,23 +1,23 @@
import { createContext, useContext } from "react"; import { createContext, useContext } from "react";
import type { SummaryObject, CallbackObject, TrackCallbackObject, AlbumCallbackObject, PlaylistCallbackObject, ProcessingCallbackObject } from "@/types/callbacks"; import type { SummaryObject, CallbackObject, TrackCallbackObject, AlbumCallbackObject, PlaylistCallbackObject, ProcessingCallbackObject, IDs } from "@/types/callbacks";
export type DownloadType = "track" | "album" | "playlist"; export type DownloadType = "track" | "album" | "playlist";
// Type guards for callback objects // Type guards for callback objects
const isProcessingCallback = (obj: CallbackObject): obj is ProcessingCallbackObject => { const isProcessingCallback = (obj: CallbackObject): obj is ProcessingCallbackObject => {
return "status" in obj && typeof obj.status === "string"; return "status" in obj && typeof (obj as ProcessingCallbackObject).status === "string" && (obj as any).name !== undefined;
}; };
const isTrackCallback = (obj: CallbackObject): obj is TrackCallbackObject => { const isTrackCallback = (obj: CallbackObject): obj is TrackCallbackObject => {
return "track" in obj && "status_info" in obj; return (obj as any).track !== undefined && (obj as any).status_info !== undefined;
}; };
const isAlbumCallback = (obj: CallbackObject): obj is AlbumCallbackObject => { const isAlbumCallback = (obj: CallbackObject): obj is AlbumCallbackObject => {
return "album" in obj && "status_info" in obj; return (obj as any).album !== undefined && (obj as any).status_info !== undefined;
}; };
const isPlaylistCallback = (obj: CallbackObject): obj is PlaylistCallbackObject => { const isPlaylistCallback = (obj: CallbackObject): obj is PlaylistCallbackObject => {
return "playlist" in obj && "status_info" in obj; return (obj as any).playlist !== undefined && (obj as any).status_info !== undefined;
}; };
// Simplified queue item that works directly with callback objects // Simplified queue item that works directly with callback objects
@@ -27,6 +27,9 @@ export interface QueueItem {
downloadType: DownloadType; downloadType: DownloadType;
spotifyId: string; spotifyId: string;
// Primary identifiers from callback (spotify/deezer/isrc/upc)
ids?: IDs;
// Current callback data - this is the source of truth // Current callback data - this is the source of truth
lastCallback?: CallbackObject; lastCallback?: CallbackObject;
@@ -43,6 +46,11 @@ export interface QueueItem {
// Status extraction utilities // Status extraction utilities
export const getStatus = (item: QueueItem): string => { export const getStatus = (item: QueueItem): string => {
// If user locally cancelled the task, reflect it without fabricating a callback
if (item.error === "Cancelled by user") {
return "cancelled";
}
if (!item.lastCallback) { if (!item.lastCallback) {
// Only log if this seems problematic (task has been around for a while) // Only log if this seems problematic (task has been around for a while)
return "initializing"; return "initializing";
@@ -57,32 +65,30 @@ export const getStatus = (item: QueueItem): string => {
if (item.downloadType === "album" || item.downloadType === "playlist") { if (item.downloadType === "album" || item.downloadType === "playlist") {
const currentTrack = item.lastCallback.current_track || 1; const currentTrack = item.lastCallback.current_track || 1;
const totalTracks = item.lastCallback.total_tracks || 1; const totalTracks = item.lastCallback.total_tracks || 1;
const trackStatus = item.lastCallback.status_info.status; const trackStatus = item.lastCallback.status_info.status as string;
// If this is the last track and it's in a terminal state, the parent is done // If this is the last track and it's in a terminal state, the parent is done
if (currentTrack >= totalTracks && ["done", "skipped", "error"].includes(trackStatus)) { if (currentTrack >= totalTracks && ["done", "skipped", "error"].includes(trackStatus)) {
console.log(`🎵 Playlist/Album completed: ${item.name} (track ${currentTrack}/${totalTracks}, status: ${trackStatus})`);
return "completed"; return "completed";
} }
// If track is in terminal state but not the last track, parent is still downloading // If track is in terminal state but not the last track, parent is still downloading
if (["done", "skipped", "error"].includes(trackStatus)) { if (["done", "skipped", "error"].includes(trackStatus)) {
console.log(`🎵 Playlist/Album progress: ${item.name} (track ${currentTrack}/${totalTracks}, status: ${trackStatus}) - continuing...`);
return "downloading"; return "downloading";
} }
// Track is actively being processed // Track is actively being processed
return "downloading"; return "downloading";
} }
return item.lastCallback.status_info.status; return item.lastCallback.status_info.status as string;
} }
if (isAlbumCallback(item.lastCallback)) { if (isAlbumCallback(item.lastCallback)) {
return item.lastCallback.status_info.status; return item.lastCallback.status_info.status as string;
} }
if (isPlaylistCallback(item.lastCallback)) { if (isPlaylistCallback(item.lastCallback)) {
return item.lastCallback.status_info.status; return item.lastCallback.status_info.status as string;
} }
console.warn(`getStatus: Unknown callback type for item ${item.id}:`, item.lastCallback); console.warn(`getStatus: Unknown callback type for item ${item.id}:`, item.lastCallback);
@@ -104,8 +110,8 @@ export const getProgress = (item: QueueItem): number | undefined => {
// For individual tracks // For individual tracks
if (item.downloadType === "track" && isTrackCallback(item.lastCallback)) { if (item.downloadType === "track" && isTrackCallback(item.lastCallback)) {
if (item.lastCallback.status_info.status === "real-time" && "progress" in item.lastCallback.status_info) { if ((item.lastCallback.status_info as any).status === "real-time" && "progress" in (item.lastCallback.status_info as any)) {
return item.lastCallback.status_info.progress; return (item.lastCallback.status_info as any).progress as number;
} }
return undefined; return undefined;
} }
@@ -115,8 +121,9 @@ export const getProgress = (item: QueueItem): number | undefined => {
const callback = item.lastCallback; const callback = item.lastCallback;
const currentTrack = callback.current_track || 1; const currentTrack = callback.current_track || 1;
const totalTracks = callback.total_tracks || 1; const totalTracks = callback.total_tracks || 1;
const trackProgress = (callback.status_info.status === "real-time" && "progress" in callback.status_info) const statusInfo: any = callback.status_info;
? callback.status_info.progress : 0; const trackProgress = (statusInfo.status === "real-time" && "progress" in statusInfo)
? statusInfo.progress : 0;
// Formula: ((completed tracks) + (current track progress / 100)) / total tracks * 100 // Formula: ((completed tracks) + (current track progress / 100)) / total tracks * 100
const completedTracks = currentTrack - 1; const completedTracks = currentTrack - 1;

View File

@@ -235,3 +235,46 @@
} }
} }
@layer components {
/* Artist hero banner (Spotify-like) */
.artist-hero {
position: relative;
height: clamp(220px, 40vh, 460px);
border-radius: 0.75rem;
overflow: hidden;
background-size: cover;
background-position: center center;
background-repeat: no-repeat;
box-shadow: 0 10px 30px rgba(0,0,0,0.35);
}
.artist-hero::after {
content: "";
position: absolute;
inset: 0;
/* top vignette and bottom darkening for readable text */
background: linear-gradient(180deg, rgba(0,0,0,0.25) 0%, rgba(0,0,0,0.45) 55%, rgba(0,0,0,0.70) 100%);
}
.dark .artist-hero::after {
background: linear-gradient(180deg, rgba(0,0,0,0.35) 0%, rgba(0,0,0,0.55) 55%, rgba(0,0,0,0.85) 100%);
}
.artist-hero-content {
position: absolute;
left: 0;
right: 0;
bottom: 0;
padding: 1rem 1.25rem 1.5rem 1.25rem;
color: var(--color-content-inverse);
display: flex;
flex-direction: column;
gap: 0.75rem;
z-index: 1;
}
.artist-hero-title {
font-size: clamp(2rem, 7vw, 5rem);
line-height: 1;
font-weight: 800;
letter-spacing: -0.02em;
text-shadow: 0 2px 24px rgba(0,0,0,0.45);
}
}

View File

@@ -8,7 +8,7 @@ import type {
AuthStatusResponse, AuthStatusResponse,
User, User,
CreateUserRequest, CreateUserRequest,
SSOStatusResponse SSOStatusResponse,
} from "@/types/auth"; } from "@/types/auth";
class AuthApiClient { class AuthApiClient {
@@ -38,7 +38,7 @@ class AuthApiClient {
} }
return config; return config;
}, },
(error) => Promise.reject(error) (error) => Promise.reject(error),
); );
// Response interceptor for error handling // Response interceptor for error handling
@@ -96,7 +96,8 @@ class AuthApiClient {
description: "The server did not respond in time. Please try again later.", description: "The server did not respond in time. Please try again later.",
}); });
} else { } else {
const errorMessage = error.response?.data?.detail || const errorMessage =
error.response?.data?.detail ||
error.response?.data?.error || error.response?.data?.error ||
error.message || error.message ||
"An unknown error occurred."; "An unknown error occurred.";
@@ -109,7 +110,7 @@ class AuthApiClient {
} }
} }
return Promise.reject(error); return Promise.reject(error);
} },
); );
} }
@@ -158,7 +159,7 @@ class AuthApiClient {
if (token) { if (token) {
this.token = token; this.token = token;
console.log(`Loaded ${isRemembered ? 'persistent' : 'session'} token from storage`); console.log(`Loaded ${isRemembered ? "persistent" : "session"} token from storage`);
} }
} }
@@ -229,7 +230,7 @@ class AuthApiClient {
this.setToken(loginData.access_token, rememberMe); this.setToken(loginData.access_token, rememberMe);
toast.success("Login Successful", { toast.success("Login Successful", {
description: `Test , ${loginData.user.username}!`, description: `Welcome, ${loginData.user.username}!`,
}); });
return loginData; return loginData;

View File

@@ -0,0 +1,15 @@
export interface ParsedSpotifyUrl {
type: "track" | "album" | "playlist" | "artist" | "unknown";
id: string;
}
export const parseSpotifyUrl = (url: string): ParsedSpotifyUrl => {
const match = url.match(/https:\/\/open\.spotify\.com(?:\/intl-[a-z]{2})?\/(track|album|playlist|artist)\/([a-zA-Z0-9]+)(?:\?.*)?/);
if (match) {
return {
type: match[1] as ParsedSpotifyUrl["type"],
id: match[2],
};
}
return { type: "unknown", id: "" };
};

View File

@@ -34,7 +34,7 @@ export const indexRoute = createRoute({
loader: async ({ deps: { q, type } }) => { loader: async ({ deps: { q, type } }) => {
if (!q || q.length < 3) return { items: [] }; if (!q || q.length < 3) return { items: [] };
const spotifyUrlRegex = /https:\/\/open\.spotify\.com\/(playlist|album|artist|track)\/([a-zA-Z0-9]+)/; const spotifyUrlRegex = /https:\/\/open\.spotify\.com\/(?:[a-zA-Z0-9-]+\/)*(playlist|album|artist|track)\/([a-zA-Z0-9]+)/;
const match = q.match(spotifyUrlRegex); const match = q.match(spotifyUrlRegex);
if (match) { if (match) {

View File

@@ -3,14 +3,14 @@ import { useEffect, useState, useContext, useRef, useCallback } from "react";
import apiClient from "../lib/api-client"; import apiClient from "../lib/api-client";
import { QueueContext, getStatus } from "../contexts/queue-context"; import { QueueContext, getStatus } from "../contexts/queue-context";
import { useSettings } from "../contexts/settings-context"; import { useSettings } from "../contexts/settings-context";
import type { AlbumType, TrackType } from "../types/spotify"; import type { LibrespotAlbumType, LibrespotTrackType } from "@/types/librespot";
import { toast } from "sonner"; import { toast } from "sonner";
import { FaArrowLeft } from "react-icons/fa"; import { FaArrowLeft } from "react-icons/fa";
export const Album = () => { export const Album = () => {
const { albumId } = useParams({ from: "/album/$albumId" }); const { albumId } = useParams({ from: "/album/$albumId" });
const [album, setAlbum] = useState<AlbumType | null>(null); const [album, setAlbum] = useState<LibrespotAlbumType | null>(null);
const [tracks, setTracks] = useState<TrackType[]>([]); const [tracks, setTracks] = useState<LibrespotTrackType[]>([]);
const [offset, setOffset] = useState<number>(0); const [offset, setOffset] = useState<number>(0);
const [isLoading, setIsLoading] = useState<boolean>(false); const [isLoading, setIsLoading] = useState<boolean>(false);
const [isLoadingMore, setIsLoadingMore] = useState<boolean>(false); const [isLoadingMore, setIsLoadingMore] = useState<boolean>(false);
@@ -19,7 +19,7 @@ export const Album = () => {
const { settings } = useSettings(); const { settings } = useSettings();
const loadMoreRef = useRef<HTMLDivElement | null>(null); const loadMoreRef = useRef<HTMLDivElement | null>(null);
const PAGE_SIZE = 50; const PAGE_SIZE = 6;
if (!context) { if (!context) {
throw new Error("useQueue must be used within a QueueProvider"); throw new Error("useQueue must be used within a QueueProvider");
@@ -48,11 +48,28 @@ export const Album = () => {
setIsLoading(true); setIsLoading(true);
setError(null); setError(null);
try { try {
const response = await apiClient.get(`/album/info?id=${albumId}&limit=${PAGE_SIZE}&offset=0`); const response = await apiClient.get(`/album/info?id=${albumId}`);
const data: AlbumType & { tracks: { items: TrackType[]; total?: number; limit?: number; offset?: number } } = response.data; const data: LibrespotAlbumType = response.data;
setAlbum(data); setAlbum(data);
setTracks(data.tracks.items || []); // Tracks may be string[] (ids) or expanded track objects depending on backend
setOffset((data.tracks.items || []).length); const rawTracks = data.tracks;
if (Array.isArray(rawTracks) && rawTracks.length > 0) {
if (typeof rawTracks[0] === "string") {
// fetch first page of tracks by id
const ids = (rawTracks as string[]).slice(0, PAGE_SIZE);
const trackResponses = await Promise.all(
ids.map((id) => apiClient.get<LibrespotTrackType>(`/track/info?id=${id}`).then(r => r.data).catch(() => null))
);
setTracks(trackResponses.filter(Boolean) as LibrespotTrackType[]);
setOffset(ids.length);
} else {
setTracks((rawTracks as LibrespotTrackType[]).slice(0, PAGE_SIZE));
setOffset(Math.min(PAGE_SIZE, (rawTracks as LibrespotTrackType[]).length));
}
} else {
setTracks([]);
setOffset(0);
}
} catch (err) { } catch (err) {
setError("Failed to load album"); setError("Failed to load album");
console.error("Error fetching album:", err); console.error("Error fetching album:", err);
@@ -71,20 +88,31 @@ export const Album = () => {
}, [albumId]); }, [albumId]);
const loadMore = useCallback(async () => { const loadMore = useCallback(async () => {
if (!albumId || isLoadingMore || !hasMore) return; if (!albumId || isLoadingMore || !hasMore || !album) return;
setIsLoadingMore(true); setIsLoadingMore(true);
try { try {
const response = await apiClient.get(`/album/info?id=${albumId}&limit=${PAGE_SIZE}&offset=${offset}`); // If album.tracks is a list of ids, continue fetching by ids
const data: AlbumType & { tracks: { items: TrackType[]; total?: number; limit?: number; offset?: number } } = response.data; if (Array.isArray(album.tracks) && (album.tracks.length === 0 || typeof album.tracks[0] === "string")) {
const newItems = data.tracks.items || []; const ids = (album.tracks as string[]).slice(offset, offset + PAGE_SIZE);
const trackResponses = await Promise.all(
ids.map((id) => apiClient.get<LibrespotTrackType>(`/track/info?id=${id}`).then(r => r.data).catch(() => null))
);
const newItems = trackResponses.filter(Boolean) as LibrespotTrackType[];
setTracks((prev) => [...prev, ...newItems]); setTracks((prev) => [...prev, ...newItems]);
setOffset((prev) => prev + newItems.length); setOffset((prev) => prev + newItems.length);
} else {
// Already expanded; append next page from in-memory array
const raw = album.tracks as LibrespotTrackType[];
const slice = raw.slice(offset, offset + PAGE_SIZE);
setTracks((prev) => [...prev, ...slice]);
setOffset((prev) => prev + slice.length);
}
} catch (err) { } catch (err) {
console.error("Error fetching more tracks:", err); console.error("Error fetching more tracks:", err);
} finally { } finally {
setIsLoadingMore(false); setIsLoadingMore(false);
} }
}, [albumId, offset, isLoadingMore, hasMore]); }, [albumId, offset, isLoadingMore, hasMore, album]);
// IntersectionObserver to trigger loadMore // IntersectionObserver to trigger loadMore
useEffect(() => { useEffect(() => {
@@ -107,7 +135,17 @@ export const Album = () => {
}; };
}, [loadMore]); }, [loadMore]);
const handleDownloadTrack = (track: TrackType) => { // Auto progressive loading regardless of scroll
useEffect(() => {
if (!album) return;
if (!hasMore || isLoadingMore) return;
const t = setTimeout(() => {
loadMore();
}, 300);
return () => clearTimeout(t);
}, [album, hasMore, isLoadingMore, loadMore]);
const handleDownloadTrack = (track: LibrespotTrackType) => {
if (!track.id) return; if (!track.id) return;
toast.info(`Adding ${track.name} to queue...`); toast.info(`Adding ${track.name} to queue...`);
addItem({ spotifyId: track.id, type: "track", name: track.name }); addItem({ spotifyId: track.id, type: "track", name: track.name });
@@ -129,16 +167,7 @@ export const Album = () => {
const isExplicitFilterEnabled = settings?.explicitFilter ?? false; const isExplicitFilterEnabled = settings?.explicitFilter ?? false;
// Show placeholder for an entirely explicit album // Not provided by librespot directly; keep feature gated by settings
if (isExplicitFilterEnabled && album.explicit) {
return (
<div className="p-8 text-center border rounded-lg">
<h2 className="text-2xl font-bold">Explicit Content Filtered</h2>
<p className="mt-2 text-gray-500">This album has been filtered based on your settings.</p>
</div>
);
}
const hasExplicitTrack = tracks.some((track) => track.explicit); const hasExplicitTrack = tracks.some((track) => track.explicit);
return ( return (
@@ -178,7 +207,7 @@ export const Album = () => {
<p className="text-sm text-content-muted dark:text-content-muted-dark"> <p className="text-sm text-content-muted dark:text-content-muted-dark">
{new Date(album.release_date).getFullYear()} {album.total_tracks} songs {new Date(album.release_date).getFullYear()} {album.total_tracks} songs
</p> </p>
<p className="text-xs text-content-muted dark:text-content-muted-dark">{album.label}</p> {album.label && <p className="text-xs text-content-muted dark:text-content-muted-dark">{album.label}</p>}
</div> </div>
</div> </div>
@@ -205,7 +234,7 @@ export const Album = () => {
? "Queued." ? "Queued."
: albumStatus === "error" : albumStatus === "error"
? "Download Album" ? "Download Album"
: "Downloading..." : <img src="/spinner.svg" alt="Loading" className="w-5 h-5 animate-spin inline-block" />
: "Download Album"} : "Download Album"}
</button> </button>
</div> </div>

View File

@@ -2,17 +2,32 @@ import { Link, useParams } from "@tanstack/react-router";
import { useEffect, useState, useContext, useRef, useCallback } from "react"; import { useEffect, useState, useContext, useRef, useCallback } from "react";
import { toast } from "sonner"; import { toast } from "sonner";
import apiClient from "../lib/api-client"; import apiClient from "../lib/api-client";
import type { AlbumType, ArtistType, TrackType } from "../types/spotify"; import type { LibrespotAlbumType, LibrespotArtistType, LibrespotTrackType, LibrespotImage } from "@/types/librespot";
import { QueueContext, getStatus } from "../contexts/queue-context"; import { QueueContext, getStatus } from "../contexts/queue-context";
import { useSettings } from "../contexts/settings-context"; import { useSettings } from "../contexts/settings-context";
import { FaArrowLeft, FaBookmark, FaRegBookmark, FaDownload } from "react-icons/fa"; import { FaArrowLeft, FaBookmark, FaRegBookmark, FaDownload } from "react-icons/fa";
import { AlbumCard } from "../components/AlbumCard"; import { AlbumCard } from "../components/AlbumCard";
// Narrow type for the artist info response additions
type ArtistInfoResponse = LibrespotArtistType & {
biography?: Array<{ text?: string; portrait_group?: { image?: LibrespotImage[] } }>;
portrait_group?: { image?: LibrespotImage[] };
top_track?: Array<{ country: string; track: string[] }>;
album_group?: string[];
single_group?: string[];
compilation_group?: string[];
appears_on_group?: string[];
};
export const Artist = () => { export const Artist = () => {
const { artistId } = useParams({ from: "/artist/$artistId" }); const { artistId } = useParams({ from: "/artist/$artistId" });
const [artist, setArtist] = useState<ArtistType | null>(null); const [artist, setArtist] = useState<ArtistInfoResponse | null>(null);
const [albums, setAlbums] = useState<AlbumType[]>([]); const [artistAlbums, setArtistAlbums] = useState<LibrespotAlbumType[]>([]);
const [topTracks, setTopTracks] = useState<TrackType[]>([]); const [artistSingles, setArtistSingles] = useState<LibrespotAlbumType[]>([]);
const [artistCompilations, setArtistCompilations] = useState<LibrespotAlbumType[]>([]);
const [artistAppearsOn, setArtistAppearsOn] = useState<LibrespotAlbumType[]>([]);
const [topTracks, setTopTracks] = useState<LibrespotTrackType[]>([]);
const [bannerUrl, setBannerUrl] = useState<string | null>(null);
const [isWatched, setIsWatched] = useState(false); const [isWatched, setIsWatched] = useState(false);
const [artistStatus, setArtistStatus] = useState<string | null>(null); const [artistStatus, setArtistStatus] = useState<string | null>(null);
const [error, setError] = useState<string | null>(null); const [error, setError] = useState<string | null>(null);
@@ -22,8 +37,11 @@ export const Artist = () => {
const sentinelRef = useRef<HTMLDivElement | null>(null); const sentinelRef = useRef<HTMLDivElement | null>(null);
// Pagination state // Pagination state
const LIMIT = 20; // tune as you like const ALBUM_BATCH = 12;
const [offset, setOffset] = useState<number>(0); const [albumOffset, setAlbumOffset] = useState<number>(0);
const [singleOffset, setSingleOffset] = useState<number>(0);
const [compOffset, setCompOffset] = useState<number>(0);
const [appearsOffset, setAppearsOffset] = useState<number>(0);
const [loading, setLoading] = useState<boolean>(false); const [loading, setLoading] = useState<boolean>(false);
const [loadingMore, setLoadingMore] = useState<boolean>(false); const [loadingMore, setLoadingMore] = useState<boolean>(false);
const [hasMore, setHasMore] = useState<boolean>(true); // assume more until we learn otherwise const [hasMore, setHasMore] = useState<boolean>(true); // assume more until we learn otherwise
@@ -33,6 +51,13 @@ export const Artist = () => {
} }
const { addItem, items } = context; const { addItem, items } = context;
// Preload commonly used icons ASAP (before first buttons need them)
useEffect(() => {
const i = new Image();
i.src = "/download.svg";
return () => { /* no-op */ };
}, []);
// Track queue status mapping // Track queue status mapping
const trackStatuses = topTracks.reduce((acc, t) => { const trackStatuses = topTracks.reduce((acc, t) => {
const qi = items.find(item => item.downloadType === "track" && item.spotifyId === t.id); const qi = items.find(item => item.downloadType === "track" && item.spotifyId === t.id);
@@ -40,19 +65,13 @@ export const Artist = () => {
return acc; return acc;
}, {} as Record<string, string | null>); }, {} as Record<string, string | null>);
const applyFilters = useCallback( // Helper: fetch a batch of albums by ids
(items: AlbumType[]) => { const fetchAlbumsByIds = useCallback(async (ids: string[]): Promise<LibrespotAlbumType[]> => {
return items.filter((item) => (settings?.explicitFilter ? !item.explicit : true)); const results = await Promise.all(
}, ids.map((id) => apiClient.get<LibrespotAlbumType>(`/album/info?id=${id}`).then(r => r.data).catch(() => null))
[settings?.explicitFilter]
); );
return results.filter(Boolean) as LibrespotAlbumType[];
// Helper to dedupe albums by id }, []);
const dedupeAppendAlbums = (current: AlbumType[], incoming: AlbumType[]) => {
const seen = new Set(current.map((a) => a.id));
const filtered = incoming.filter((a) => !seen.has(a.id));
return current.concat(filtered);
};
// Fetch artist info & first page of albums // Fetch artist info & first page of albums
useEffect(() => { useEffect(() => {
@@ -63,48 +82,107 @@ export const Artist = () => {
const fetchInitial = async () => { const fetchInitial = async () => {
setLoading(true); setLoading(true);
setError(null); setError(null);
setAlbums([]); setArtistAlbums([]);
setOffset(0); setArtistSingles([]);
setArtistCompilations([]);
setArtistAppearsOn([]);
setAlbumOffset(0);
setSingleOffset(0);
setCompOffset(0);
setAppearsOffset(0);
setHasMore(true); setHasMore(true);
setBannerUrl(null); // reset hero; will lazy-load below
try { try {
const resp = await apiClient.get(`/artist/info?id=${artistId}&limit=${LIMIT}&offset=0`); const resp = await apiClient.get<ArtistInfoResponse>(`/artist/info?id=${artistId}`);
const data = resp.data; const data: ArtistInfoResponse = resp.data;
if (cancelled) return; if (cancelled) return;
if (data?.id && data?.name) { if (data?.id && data?.name) {
// set artist meta // set artist meta
setArtist({ setArtist(data);
id: data.id,
name: data.name, // Lazy-load banner image after render
images: data.images || [], const bioEntry = Array.isArray(data.biography) && data.biography.length > 0 ? data.biography[0] : undefined;
external_urls: data.external_urls || { spotify: "" }, const portraitImages = data.portrait_group?.image ?? bioEntry?.portrait_group?.image ?? [];
followers: data.followers || { total: 0 }, const allImages = [...(portraitImages ?? []), ...((data.images as LibrespotImage[] | undefined) ?? [])];
genres: data.genres || [], const candidateBanner = allImages.sort((a, b) => (b?.width ?? 0) - (a?.width ?? 0))[0]?.url || "/placeholder.jpg";
popularity: data.popularity || 0, // Use async preload to avoid blocking initial paint
type: data.type || "artist", setTimeout(() => {
uri: data.uri || "", const img = new Image();
}); img.src = candidateBanner;
img.onload = () => { if (!cancelled) setBannerUrl(candidateBanner); };
}, 0);
// top tracks (if provided) // top tracks (if provided)
if (Array.isArray(data.top_tracks)) { const topTrackIds = Array.isArray(data.top_track) && data.top_track.length > 0
setTopTracks(data.top_tracks); ? data.top_track[0].track.slice(0, 10)
: [];
if (topTrackIds.length) {
const tracksFull = await Promise.all(
topTrackIds.map((id) => apiClient.get<LibrespotTrackType>(`/track/info?id=${id}`).then(r => r.data).catch(() => null))
);
if (!cancelled) setTopTracks(tracksFull.filter(Boolean) as LibrespotTrackType[]);
} else { } else {
setTopTracks([]); if (!cancelled) setTopTracks([]);
} }
// albums pagination info // Progressive album loading: album -> single -> compilation -> appears_on
const items: AlbumType[] = (data?.albums?.items as AlbumType[]) || []; const albumIds = data.album_group ?? [];
const total: number | undefined = data?.albums?.total; const singleIds = data.single_group ?? [];
const compIds = data.compilation_group ?? [];
const appearsIds = data.appears_on_group ?? [];
setAlbums(items); // Determine initial number based on screen size: 4 on small screens
setOffset(items.length); const isSmallScreen = typeof window !== "undefined" && !window.matchMedia("(min-width: 640px)").matches;
if (typeof total === "number") { const initialTarget = isSmallScreen ? 4 : ALBUM_BATCH;
setHasMore(items.length < total);
} else { // Load initial sets from each group in order until initialTarget reached
// If server didn't return total, default behavior: stop when an empty page arrives. let aOff = 0, sOff = 0, cOff = 0, apOff = 0;
setHasMore(items.length > 0); let loaded = 0;
let aList: LibrespotAlbumType[] = [];
let sList: LibrespotAlbumType[] = [];
let cList: LibrespotAlbumType[] = [];
let apList: LibrespotAlbumType[] = [];
if (albumIds.length > 0 && loaded < initialTarget) {
const take = albumIds.slice(0, initialTarget - loaded);
aList = await fetchAlbumsByIds(take);
aOff = take.length;
loaded += aList.length;
}
if (singleIds.length > 0 && loaded < initialTarget) {
const take = singleIds.slice(0, initialTarget - loaded);
sList = await fetchAlbumsByIds(take);
sOff = take.length;
loaded += sList.length;
}
if (compIds.length > 0 && loaded < initialTarget) {
const take = compIds.slice(0, initialTarget - loaded);
cList = await fetchAlbumsByIds(take);
cOff = take.length;
loaded += cList.length;
}
if (appearsIds.length > 0 && loaded < initialTarget) {
const take = appearsIds.slice(0, initialTarget - loaded);
apList = await fetchAlbumsByIds(take);
apOff = take.length;
loaded += apList.length;
}
if (!cancelled) {
setArtistAlbums(aList);
setArtistSingles(sList);
setArtistCompilations(cList);
setArtistAppearsOn(apList);
// Store offsets for next loads
setAlbumOffset(aOff);
setSingleOffset(sOff);
setCompOffset(cOff);
setAppearsOffset(apOff);
// Determine if more remain
setHasMore((albumIds.length > aOff) || (singleIds.length > sOff) || (compIds.length > cOff) || (appearsIds.length > apOff));
} }
} else { } else {
setError("Could not load artist data."); setError("Could not load artist data.");
@@ -133,28 +211,64 @@ export const Artist = () => {
return () => { return () => {
cancelled = true; cancelled = true;
}; };
}, [artistId, LIMIT]); }, [artistId, fetchAlbumsByIds]);
// Fetch more albums (next page) // Fetch more albums (next page)
const fetchMoreAlbums = useCallback(async () => { const fetchMoreAlbums = useCallback(async () => {
if (!artistId || loadingMore || loading || !hasMore) return; if (!artistId || loadingMore || loading || !hasMore || !artist) return;
setLoadingMore(true); setLoadingMore(true);
try { try {
const resp = await apiClient.get(`/artist/info?id=${artistId}&limit=${LIMIT}&offset=${offset}`); const albumIds = artist.album_group ?? [];
const data = resp.data; const singleIds = artist.single_group ?? [];
const items: AlbumType[] = (data?.albums?.items as AlbumType[]) || []; const compIds = artist.compilation_group ?? [];
const total: number | undefined = data?.albums?.total; const appearsIds = artist.appears_on_group ?? [];
setAlbums((cur) => dedupeAppendAlbums(cur, items)); const nextA: LibrespotAlbumType[] = [];
setOffset((cur) => cur + items.length); const nextS: LibrespotAlbumType[] = [];
const nextC: LibrespotAlbumType[] = [];
const nextAp: LibrespotAlbumType[] = [];
if (typeof total === "number") { let aOff = albumOffset, sOff = singleOffset, cOff = compOffset, apOff = appearsOffset;
setHasMore((prev) => prev && offset + items.length < total);
} else { const totalLoaded = () => nextA.length + nextS.length + nextC.length + nextAp.length;
// if server doesn't expose total, stop when we get fewer than LIMIT items
setHasMore(items.length === LIMIT); if (aOff < albumIds.length && totalLoaded() < ALBUM_BATCH) {
const remaining = ALBUM_BATCH - totalLoaded();
const take = albumIds.slice(aOff, aOff + remaining);
nextA.push(...await fetchAlbumsByIds(take));
aOff += take.length;
} }
if (sOff < singleIds.length && totalLoaded() < ALBUM_BATCH) {
const remaining = ALBUM_BATCH - totalLoaded();
const take = singleIds.slice(sOff, sOff + remaining);
nextS.push(...await fetchAlbumsByIds(take));
sOff += take.length;
}
if (cOff < compIds.length && totalLoaded() < ALBUM_BATCH) {
const remaining = ALBUM_BATCH - totalLoaded();
const take = compIds.slice(cOff, cOff + remaining);
nextC.push(...await fetchAlbumsByIds(take));
cOff += take.length;
}
if (apOff < appearsIds.length && totalLoaded() < ALBUM_BATCH) {
const remaining = ALBUM_BATCH - totalLoaded();
const take = appearsIds.slice(apOff, apOff + remaining);
nextAp.push(...await fetchAlbumsByIds(take));
apOff += take.length;
}
setArtistAlbums((cur) => cur.concat(nextA));
setArtistSingles((cur) => cur.concat(nextS));
setArtistCompilations((cur) => cur.concat(nextC));
setArtistAppearsOn((cur) => cur.concat(nextAp));
setAlbumOffset(aOff);
setSingleOffset(sOff);
setCompOffset(cOff);
setAppearsOffset(apOff);
setHasMore((albumIds.length > aOff) || (singleIds.length > sOff) || (compIds.length > cOff) || (appearsIds.length > apOff));
} catch (err) { } catch (err) {
console.error("Failed to load more albums", err); console.error("Failed to load more albums", err);
toast.error("Failed to load more albums"); toast.error("Failed to load more albums");
@@ -162,7 +276,7 @@ export const Artist = () => {
} finally { } finally {
setLoadingMore(false); setLoadingMore(false);
} }
}, [artistId, offset, LIMIT, loadingMore, loading, hasMore]); }, [artistId, loadingMore, loading, hasMore, artist, albumOffset, singleOffset, compOffset, appearsOffset, fetchAlbumsByIds]);
// IntersectionObserver to trigger fetchMoreAlbums when sentinel is visible // IntersectionObserver to trigger fetchMoreAlbums when sentinel is visible
useEffect(() => { useEffect(() => {
@@ -189,14 +303,24 @@ export const Artist = () => {
return () => observer.disconnect(); return () => observer.disconnect();
}, [fetchMoreAlbums, hasMore]); }, [fetchMoreAlbums, hasMore]);
// Auto progressive loading regardless of scroll
useEffect(() => {
if (!artist) return;
if (!hasMore || loading || loadingMore) return;
const t = setTimeout(() => {
fetchMoreAlbums();
}, 350);
return () => clearTimeout(t);
}, [artist, hasMore, loading, loadingMore, fetchMoreAlbums]);
// --- existing handlers (unchanged) --- // --- existing handlers (unchanged) ---
const handleDownloadTrack = (track: TrackType) => { const handleDownloadTrack = (track: LibrespotTrackType) => {
if (!track.id) return; if (!track.id) return;
toast.info(`Adding ${track.name} to queue...`); toast.info(`Adding ${track.name} to queue...`);
addItem({ spotifyId: track.id, type: "track", name: track.name }); addItem({ spotifyId: track.id, type: "track", name: track.name });
}; };
const handleDownloadAlbum = (album: AlbumType) => { const handleDownloadAlbum = (album: LibrespotAlbumType) => {
toast.info(`Adding ${album.name} to queue...`); toast.info(`Adding ${album.name} to queue...`);
addItem({ spotifyId: album.id, type: "album", name: album.name }); addItem({ spotifyId: album.id, type: "album", name: album.name });
}; };
@@ -229,6 +353,25 @@ export const Artist = () => {
} }
}; };
const handleDownloadGroup = async (group: "album" | "single" | "compilation" | "appears_on") => {
if (!artistId || !artist) return;
try {
toast.info(`Queueing ${group} downloads for ${artist.name}...`);
const response = await apiClient.get(`/artist/download/${artistId}?album_type=${group}`);
const count = response.data?.queued_albums?.length ?? 0;
if (count > 0) {
toast.success(`Queued ${count} ${group}${count > 1 ? "s" : ""}.`);
} else {
toast.info(`No new ${group} releases to download.`);
}
} catch (error: any) {
console.error(`Failed to queue ${group} downloads:`, error);
toast.error(`Failed to queue ${group} downloads`, {
description: error.response?.data?.error || "An unexpected error occurred.",
});
}
};
const handleToggleWatch = async () => { const handleToggleWatch = async () => {
if (!artistId || !artist) return; if (!artistId || !artist) return;
try { try {
@@ -258,11 +401,6 @@ export const Artist = () => {
return <div>Artist data could not be fully loaded. Please try again later.</div>; return <div>Artist data could not be fully loaded. Please try again later.</div>;
} }
const artistAlbums = applyFilters(albums.filter((album) => (album.album_group ?? album.album_type) === "album"));
const artistSingles = applyFilters(albums.filter((album) => (album.album_group ?? album.album_type) === "single"));
const artistCompilations = applyFilters(albums.filter((album) => (album.album_group ?? album.album_type) === "compilation"));
const artistAppearsOn = applyFilters(albums.filter((album) => (album.album_group ?? "") === "appears_on"));
return ( return (
<div className="artist-page"> <div className="artist-page">
<div className="mb-4 md:mb-6"> <div className="mb-4 md:mb-6">
@@ -274,16 +412,16 @@ export const Artist = () => {
<span>Back to results</span> <span>Back to results</span>
</button> </button>
</div> </div>
<div className="artist-header mb-8 text-center">
{artist.images && artist.images.length > 0 && ( {/* Hero banner using highest resolution image (lazy-loaded) */}
<img <div
src={artist.images[0]?.url} className="relative mb-8 rounded-xl overflow-hidden h-56 sm:h-64 md:h-80 lg:h-[420px] bg-surface-accent dark:bg-surface-accent-dark"
alt={artist.name} style={bannerUrl ? { backgroundImage: `url(${bannerUrl})`, backgroundSize: "cover", backgroundPosition: "center" } : undefined}
className="artist-image w-48 h-48 rounded-full mx-auto mb-4 shadow-lg" >
/> <div className="absolute inset-0 bg-gradient-to-b from-black/30 via-black/50 to-black/80" />
)} <div className="absolute inset-x-0 bottom-0 p-4 md:p-6 flex flex-col gap-3 text-white">
<h1 className="text-5xl font-bold text-content-primary dark:text-content-primary-dark">{artist.name}</h1> <h1 className="text-4xl md:text-6xl font-extrabold tracking-tight leading-none">{artist.name}</h1>
<div className="flex gap-4 justify-center mt-4"> <div className="flex flex-wrap items-center gap-3">
<button <button
onClick={handleDownloadArtist} onClick={handleDownloadArtist}
disabled={artistStatus === "downloading" || artistStatus === "queued"} disabled={artistStatus === "downloading" || artistStatus === "queued"}
@@ -300,7 +438,7 @@ export const Artist = () => {
? artistStatus === "queued" ? artistStatus === "queued"
? "Queued." ? "Queued."
: artistStatus === "downloading" : artistStatus === "downloading"
? "Downloading..." ? <img src="/spinner.svg" alt="Loading" className="w-5 h-5 animate-spin" />
: <> : <>
<FaDownload className="icon-inverse" /> <FaDownload className="icon-inverse" />
<span>Download All</span> <span>Download All</span>
@@ -334,15 +472,16 @@ export const Artist = () => {
)} )}
</div> </div>
</div> </div>
</div>
{topTracks.length > 0 && ( {topTracks.length > 0 && (
<div className="mb-12"> <div className="mb-12">
<h2 className="text-3xl font-bold mb-6 text-content-primary dark:text-content-primary-dark">Top Tracks</h2> <h2 className="text-3xl font-bold mb-6 text-content-primary dark:text-content-primary-dark">Top Tracks</h2>
<div className="track-list space-y-2"> <div className="track-list space-y-2">
{topTracks.map((track) => ( {topTracks.map((track, index) => (
<div <div
key={track.id} key={track.id}
className="track-item flex items-center justify-between p-2 rounded-md hover:bg-surface-muted dark:hover:bg-surface-muted-dark transition-colors" className={`track-item flex items-center justify-between p-2 rounded-md hover:bg-surface-muted dark:hover:bg-surface-muted-dark transition-colors ${index >= 5 ? "hidden sm:flex" : ""}`}
> >
<Link <Link
to="/track/$trackId" to="/track/$trackId"
@@ -354,15 +493,25 @@ export const Artist = () => {
<button <button
onClick={() => handleDownloadTrack(track)} onClick={() => handleDownloadTrack(track)}
disabled={!!trackStatuses[track.id] && trackStatuses[track.id] !== "error"} disabled={!!trackStatuses[track.id] && trackStatuses[track.id] !== "error"}
className="px-3 py-1 bg-button-secondary hover:bg-button-secondary-hover text-button-secondary-text hover:text-button-secondary-text-hover rounded disabled:opacity-50 disabled:cursor-not-allowed" className="w-9 h-9 md:w-10 md:h-10 flex items-center justify-center bg-surface-muted dark:bg-surface-muted-dark hover:bg-surface-accent dark:hover:bg-surface-accent-dark border border-border-muted dark:border-border-muted-dark hover:border-border-accent dark:hover:border-border-accent-dark rounded-full transition-all disabled:opacity-50 disabled:cursor-not-allowed"
> title={
{trackStatuses[track.id] trackStatuses[track.id]
? trackStatuses[track.id] === "queued" ? trackStatuses[track.id] === "queued"
? "Queued." ? "Queued."
: trackStatuses[track.id] === "error" : trackStatuses[track.id] === "error"
? "Download" ? "Download"
: "Downloading..." : "Downloading..."
: "Download"} : "Download"
}
>
{trackStatuses[track.id]
? trackStatuses[track.id] === "queued"
? "Queued."
: trackStatuses[track.id] === "error"
? <img src="/download.svg" alt="Download" className="w-4 h-4 logo" />
: <img src="/spinner.svg" alt="Loading" className="w-4 h-4 animate-spin" />
: <img src="/download.svg" alt="Download" className="w-4 h-4 logo" />
}
</button> </button>
</div> </div>
))} ))}
@@ -373,7 +522,17 @@ export const Artist = () => {
{/* Albums */} {/* Albums */}
{artistAlbums.length > 0 && ( {artistAlbums.length > 0 && (
<div className="mb-12"> <div className="mb-12">
<h2 className="text-3xl font-bold mb-6 text-content-primary dark:text-content-primary-dark">Albums</h2> <div className="flex items-center justify-between mb-6">
<h2 className="text-3xl font-bold text-content-primary dark:text-content-primary-dark">Albums</h2>
<button
onClick={() => handleDownloadGroup("album")}
className="flex items-center gap-2 px-3 py-1.5 text-sm bg-button-success hover:bg-button-success-hover text-button-success-text rounded-md transition-colors"
title="Download all albums"
>
<img src="/download.svg" alt="Download" className="w-4 h-4 logo" />
<span>Download</span>
</button>
</div>
<div className="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 xl:grid-cols-5 gap-6"> <div className="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 xl:grid-cols-5 gap-6">
{artistAlbums.map((album) => ( {artistAlbums.map((album) => (
<AlbumCard key={album.id} album={album} onDownload={() => handleDownloadAlbum(album)} /> <AlbumCard key={album.id} album={album} onDownload={() => handleDownloadAlbum(album)} />
@@ -385,7 +544,17 @@ export const Artist = () => {
{/* Singles */} {/* Singles */}
{artistSingles.length > 0 && ( {artistSingles.length > 0 && (
<div className="mb-12"> <div className="mb-12">
<h2 className="text-3xl font-bold mb-6 text-content-primary dark:text-content-primary-dark">Singles</h2> <div className="flex items-center justify-between mb-6">
<h2 className="text-3xl font-bold text-content-primary dark:text-content-primary-dark">Singles</h2>
<button
onClick={() => handleDownloadGroup("single")}
className="flex items-center gap-2 px-3 py-1.5 text-sm bg-button-success hover:bg-button-success-hover text-button-success-text rounded-md transition-colors"
title="Download all singles"
>
<img src="/download.svg" alt="Download" className="w-4 h-4 logo" />
<span>Download</span>
</button>
</div>
<div className="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 xl:grid-cols-5 gap-6"> <div className="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 xl:grid-cols-5 gap-6">
{artistSingles.map((album) => ( {artistSingles.map((album) => (
<AlbumCard key={album.id} album={album} onDownload={() => handleDownloadAlbum(album)} /> <AlbumCard key={album.id} album={album} onDownload={() => handleDownloadAlbum(album)} />
@@ -397,7 +566,17 @@ export const Artist = () => {
{/* Compilations */} {/* Compilations */}
{artistCompilations.length > 0 && ( {artistCompilations.length > 0 && (
<div className="mb-12"> <div className="mb-12">
<h2 className="text-3xl font-bold mb-6 text-content-primary dark:text-content-primary-dark">Compilations</h2> <div className="flex items-center justify-between mb-6">
<h2 className="text-3xl font-bold text-content-primary dark:text-content-primary-dark">Compilations</h2>
<button
onClick={() => handleDownloadGroup("compilation")}
className="flex items-center gap-2 px-3 py-1.5 text-sm bg-button-success hover:bg-button-success-hover text-button-success-text rounded-md transition-colors"
title="Download all compilations"
>
<img src="/download.svg" alt="Download" className="w-4 h-4 logo" />
<span>Download</span>
</button>
</div>
<div className="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 xl:grid-cols-5 gap-6"> <div className="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 xl:grid-cols-5 gap-6">
{artistCompilations.map((album) => ( {artistCompilations.map((album) => (
<AlbumCard key={album.id} album={album} onDownload={() => handleDownloadAlbum(album)} /> <AlbumCard key={album.id} album={album} onDownload={() => handleDownloadAlbum(album)} />
@@ -409,7 +588,17 @@ export const Artist = () => {
{/* Appears On */} {/* Appears On */}
{artistAppearsOn.length > 0 && ( {artistAppearsOn.length > 0 && (
<div className="mb-12"> <div className="mb-12">
<h2 className="text-3xl font-bold mb-6 text-content-primary dark:text-content-primary-dark">Appears On</h2> <div className="flex items-center justify-between mb-6">
<h2 className="text-3xl font-bold text-content-primary dark:text-content-primary-dark">Appears On</h2>
<button
onClick={() => handleDownloadGroup("appears_on")}
className="flex items-center gap-2 px-3 py-1.5 text-sm bg-button-success hover:bg-button-success-hover text-button-success-text rounded-md transition-colors"
title="Download all appears on"
>
<img src="/download.svg" alt="Download" className="w-4 h-4 logo" />
<span>Download</span>
</button>
</div>
<div className="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 xl:grid-cols-5 gap-6"> <div className="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 xl:grid-cols-5 gap-6">
{artistAppearsOn.map((album) => ( {artistAppearsOn.map((album) => (
<AlbumCard key={album.id} album={album} onDownload={() => handleDownloadAlbum(album)} /> <AlbumCard key={album.id} album={album} onDownload={() => handleDownloadAlbum(album)} />
@@ -426,9 +615,9 @@ export const Artist = () => {
{hasMore && !loadingMore && ( {hasMore && !loadingMore && (
<button <button
onClick={() => fetchMoreAlbums()} onClick={() => fetchMoreAlbums()}
className="px-4 py-2 mb-6 rounded bg-surface-muted hover:bg-surface-muted-dark" className="px-4 py-2 mb-6 rounded"
> >
Load more Loading...
</button> </button>
)} )}
<div ref={sentinelRef} style={{ height: 1, width: "100%" }} /> <div ref={sentinelRef} style={{ height: 1, width: "100%" }} />

View File

@@ -3,21 +3,26 @@ import { useNavigate, useSearch, useRouterState } from "@tanstack/react-router";
import { useDebounce } from "use-debounce"; import { useDebounce } from "use-debounce";
import { toast } from "sonner"; import { toast } from "sonner";
import type { TrackType, AlbumType, SearchResult } from "@/types/spotify"; import type { TrackType, AlbumType, SearchResult } from "@/types/spotify";
import { parseSpotifyUrl } from "@/lib/spotify-utils";
import { QueueContext } from "@/contexts/queue-context"; import { QueueContext } from "@/contexts/queue-context";
import { SearchResultCard } from "@/components/SearchResultCard"; import { SearchResultCard } from "@/components/SearchResultCard";
import { indexRoute } from "@/router"; import { indexRoute } from "@/router";
import { Music, Disc, User, ListMusic } from "lucide-react";
import { authApiClient } from "@/lib/api-client";
import { useSettings } from "@/contexts/settings-context";
import { FaEye, FaDownload } from "react-icons/fa";
// Utility function to safely get properties from search results // Utility function to safely get properties from search results
const safelyGetProperty = <T,>(obj: any, path: string[], fallback: T): T => { const safelyGetProperty = <T,>(obj: any, path: string[], fallback: T): T => {
try { try {
let current = obj; let current = obj;
for (const key of path) { for (const key of path) {
if (current == null || typeof current !== 'object') { if (current == null || typeof current !== "object") {
return fallback; return fallback;
} }
current = current[key]; current = current[key];
} }
return current ?? fallback; return (current ?? fallback) as T;
} catch { } catch {
return fallback; return fallback;
} }
@@ -30,18 +35,23 @@ export const Home = () => {
const { q, type } = useSearch({ from: "/" }); const { q, type } = useSearch({ from: "/" });
const { items: allResults } = indexRoute.useLoaderData(); const { items: allResults } = indexRoute.useLoaderData();
const isLoading = useRouterState({ select: (s) => s.status === "pending" }); const isLoading = useRouterState({ select: (s) => s.status === "pending" });
const { settings } = useSettings();
const [query, setQuery] = useState(q || ""); const [query, setQuery] = useState(q || "");
const [searchType, setSearchType] = useState<"track" | "album" | "artist" | "playlist">(type || "track"); const [searchType, setSearchType] = useState<
"track" | "album" | "artist" | "playlist"
>(type || "track");
const [debouncedQuery] = useDebounce(query, 500); const [debouncedQuery] = useDebounce(query, 500);
const [activeTab, setActiveTab] = useState<"search" | "bulkAdd">("search");
const [linksInput, setLinksInput] = useState("");
const [isBulkAdding, setIsBulkAdding] = useState(false);
const [isBulkWatching, setIsBulkWatching] = useState(false);
const [displayedResults, setDisplayedResults] = useState<SearchResult[]>([]); const [displayedResults, setDisplayedResults] = useState<SearchResult[]>([]);
const [isLoadingMore, setIsLoadingMore] = useState(false); const [isLoadingMore, setIsLoadingMore] = useState(false);
const context = useContext(QueueContext); const context = useContext(QueueContext);
const loaderRef = useRef<HTMLDivElement | null>(null); const loaderRef = useRef<HTMLDivElement | null>(null);
// Removed scroll locking on mobile empty state to avoid blocking scroll globally
useEffect(() => { useEffect(() => {
navigate({ search: (prev) => ({ ...prev, q: debouncedQuery, type: searchType }) }); navigate({ search: (prev) => ({ ...prev, q: debouncedQuery, type: searchType }) });
}, [debouncedQuery, searchType, navigate]); }, [debouncedQuery, searchType, navigate]);
@@ -55,6 +65,131 @@ export const Home = () => {
} }
const { addItem } = context; const { addItem } = context;
const handleAddBulkLinks = useCallback(async () => {
const allLinks = linksInput
.split("\n")
.map((link) => link.trim())
.filter(Boolean);
if (allLinks.length === 0) {
toast.info("No links provided to add.");
return;
}
const supportedLinks: string[] = [];
const unsupportedLinks: string[] = [];
allLinks.forEach((link) => {
const parsed = parseSpotifyUrl(link);
if (parsed.type !== "unknown") {
supportedLinks.push(link);
} else {
unsupportedLinks.push(link);
}
});
if (unsupportedLinks.length > 0) {
toast.warning("Some links are not supported and will be skipped.", {
description: `Unsupported: ${unsupportedLinks.join(", ")}`,
});
}
if (supportedLinks.length === 0) {
toast.info("No supported links to add.");
return;
}
setIsBulkAdding(true);
try {
const response = await authApiClient.client.post("/bulk/bulk-add-spotify-links", {
links: supportedLinks,
});
const { count, failed_links } = response.data;
if (failed_links && failed_links.length > 0) {
toast.warning("Bulk Add Completed with Warnings", {
description: `${count} links added. Failed to add ${failed_links.length} links: ${failed_links.join(
", "
)}`,
});
} else {
toast.success("Bulk Add Successful", {
description: `${count} links added to queue.`,
});
}
setLinksInput(""); // Clear input after successful add
} catch (error: any) {
const errorMessage = error.response?.data?.detail?.message || error.message;
const failedLinks = error.response?.data?.detail?.failed_links || [];
let description = errorMessage;
if (failedLinks.length > 0) {
description += ` Failed links: ${failedLinks.join(", ")}`;
}
toast.error("Bulk Add Failed", {
description: description,
});
if (failedLinks.length > 0) {
console.error("Failed links:", failedLinks);
}
} finally {
setIsBulkAdding(false);
}
}, [linksInput]);
const handleWatchBulkLinks = useCallback(async () => {
const links = linksInput
.split("\n")
.map((link) => link.trim())
.filter(Boolean);
if (links.length === 0) {
toast.info("No links provided to watch.");
return;
}
const supportedLinks: { type: "artist" | "playlist"; id: string }[] = [];
const unsupportedLinks: string[] = [];
links.forEach((link) => {
const parsed = parseSpotifyUrl(link);
if (parsed.type === "artist" || parsed.type === "playlist") {
supportedLinks.push({ type: parsed.type, id: parsed.id });
} else {
unsupportedLinks.push(link);
}
});
if (unsupportedLinks.length > 0) {
toast.warning("Some links are not supported for watching.", {
description: `Unsupported: ${unsupportedLinks.join(", ")}`,
});
}
if (supportedLinks.length === 0) {
toast.info("No supported links to watch.");
return;
}
setIsBulkWatching(true);
try {
const watchPromises = supportedLinks.map((item) =>
authApiClient.client.put(`/${item.type}/watch/${item.id}`)
);
await Promise.all(watchPromises);
toast.success("Bulk Watch Successful", {
description: `${supportedLinks.length} supported links added to watchlist.`,
});
setLinksInput(""); // Clear input after successful add
} catch (error: any) {
const errorMessage = error.response?.data?.detail?.message || error.message;
toast.error("Bulk Watch Failed", {
description: errorMessage,
});
} finally {
setIsBulkWatching(false);
}
}, [linksInput]);
const loadMore = useCallback(() => { const loadMore = useCallback(() => {
setIsLoadingMore(true); setIsLoadingMore(true);
setTimeout(() => { setTimeout(() => {
@@ -73,7 +208,7 @@ export const Home = () => {
loadMore(); loadMore();
} }
}, },
{ threshold: 1.0 }, { threshold: 1.0 }
); );
const currentLoader = loaderRef.current; const currentLoader = loaderRef.current;
@@ -94,7 +229,7 @@ export const Home = () => {
addItem({ spotifyId: track.id, type: "track", name: track.name, artist: artistName }); addItem({ spotifyId: track.id, type: "track", name: track.name, artist: artistName });
toast.info(`Adding ${track.name} to queue...`); toast.info(`Adding ${track.name} to queue...`);
}, },
[addItem], [addItem]
); );
const handleDownloadAlbum = useCallback( const handleDownloadAlbum = useCallback(
@@ -103,38 +238,47 @@ export const Home = () => {
addItem({ spotifyId: album.id, type: "album", name: album.name, artist: artistName }); addItem({ spotifyId: album.id, type: "album", name: album.name, artist: artistName });
toast.info(`Adding ${album.name} to queue...`); toast.info(`Adding ${album.name} to queue...`);
}, },
[addItem], [addItem]
); );
const resultComponent = useMemo(() => { const resultComponent = useMemo(() => {
return ( return (
<div className="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 gap-4"> <div className="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 gap-4">
{displayedResults.map((item) => { {displayedResults
.map((item) => {
// Add safety checks for essential properties // Add safety checks for essential properties
if (!item || !item.id || !item.name || !item.model) { if (!item || !item.id || !item.name || !item.model) {
return null; return null;
} }
let imageUrl; let imageUrl;
let onDownload; let onDownload: (() => void) | undefined;
let subtitle; let subtitle: string | undefined;
if (item.model === "track") { if (item.model === "track") {
imageUrl = safelyGetProperty(item, ['album', 'images', '0', 'url'], undefined); imageUrl = safelyGetProperty(item, ["album", "images", "0", "url"], undefined);
onDownload = () => handleDownloadTrack(item as TrackType); onDownload = () => handleDownloadTrack(item as TrackType);
const artists = safelyGetProperty(item, ['artists'], []); const artists = safelyGetProperty(item, ["artists"], []);
subtitle = Array.isArray(artists) ? artists.map((a: any) => safelyGetProperty(a, ['name'], 'Unknown')).join(", ") : "Unknown Artist"; subtitle = Array.isArray(artists)
? artists
.map((a: any) => safelyGetProperty(a, ["name"], "Unknown"))
.join(", ")
: "Unknown Artist";
} else if (item.model === "album") { } else if (item.model === "album") {
imageUrl = safelyGetProperty(item, ['images', '0', 'url'], undefined); imageUrl = safelyGetProperty(item, ["images", "0", "url"], undefined);
onDownload = () => handleDownloadAlbum(item as AlbumType); onDownload = () => handleDownloadAlbum(item as AlbumType);
const artists = safelyGetProperty(item, ['artists'], []); const artists = safelyGetProperty(item, ["artists"], []);
subtitle = Array.isArray(artists) ? artists.map((a: any) => safelyGetProperty(a, ['name'], 'Unknown')).join(", ") : "Unknown Artist"; subtitle = Array.isArray(artists)
? artists
.map((a: any) => safelyGetProperty(a, ["name"], "Unknown"))
.join(", ")
: "Unknown Artist";
} else if (item.model === "artist") { } else if (item.model === "artist") {
imageUrl = safelyGetProperty(item, ['images', '0', 'url'], undefined); imageUrl = safelyGetProperty(item, ["images", "0", "url"], undefined);
subtitle = "Artist"; subtitle = "Artist";
} else if (item.model === "playlist") { } else if (item.model === "playlist") {
imageUrl = safelyGetProperty(item, ['images', '0', 'url'], undefined); imageUrl = safelyGetProperty(item, ["images", "0", "url"], undefined);
const ownerName = safelyGetProperty(item, ['owner', 'display_name'], 'Unknown'); const ownerName = safelyGetProperty(item, ["owner", "display_name"], "Unknown");
subtitle = `By ${ownerName}`; subtitle = `By ${ownerName}`;
} }
@@ -149,7 +293,8 @@ export const Home = () => {
onDownload={onDownload} onDownload={onDownload}
/> />
); );
}).filter(Boolean)} {/* Filter out null components */} })
.filter(Boolean)}
</div> </div>
); );
}, [displayedResults, handleDownloadTrack, handleDownloadAlbum]); }, [displayedResults, handleDownloadTrack, handleDownloadAlbum]);
@@ -159,7 +304,35 @@ export const Home = () => {
<div className="text-center mb-4 md:mb-8 px-4 md:px-0"> <div className="text-center mb-4 md:mb-8 px-4 md:px-0">
<h1 className="text-2xl font-bold text-content-primary dark:text-content-primary-dark">Spotizerr</h1> <h1 className="text-2xl font-bold text-content-primary dark:text-content-primary-dark">Spotizerr</h1>
</div> </div>
<div className="flex flex-col sm:flex-row gap-3 mb-4 md:mb-6 px-4 md:px-0 flex-shrink-0">
{/* Tabs */}
<div className="flex justify-center mb-4 md:mb-6 px-4 md:px-0 border-b border-gray-300 dark:border-gray-700">
<button
className={`flex-1 py-2 text-center transition-colors duration-200 ${
activeTab === "search"
? "border-b-2 border-green-500 text-green-500"
: "border-b-2 border-transparent text-gray-800 dark:text-gray-200 hover:text-green-500"
}`}
onClick={() => setActiveTab("search")}
>
Search
</button>
<button
className={`flex-1 py-2 text-center transition-colors duration-200 ${
activeTab === "bulkAdd"
? "border-b-2 border-green-500 text-green-500"
: "border-b-2 border-transparent text-gray-800 dark:text-gray-200 hover:text-green-500"
}`}
onClick={() => setActiveTab("bulkAdd")}
>
Bulk Add
</button>
</div>
{activeTab === "search" && (
<>
<div className="flex flex-col gap-3 mb-4 md:mb-6 px-4 md:px-0 flex-shrink-0">
<div className="flex flex-col sm:flex-row gap-3">
<input <input
type="text" type="text"
value={query} value={query}
@@ -167,10 +340,42 @@ export const Home = () => {
placeholder="Search for a track, album, or artist" placeholder="Search for a track, album, or artist"
className="flex-1 p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline-none focus:ring-2 focus:ring-input-focus" className="flex-1 p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline-none focus:ring-2 focus:ring-input-focus"
/> />
{/* Icon buttons for search type (larger screens) */}
<div className="hidden sm:flex gap-2 items-center">
{(["track", "album", "artist", "playlist"] as const).map((typeOption) => (
<button
key={typeOption}
onClick={() => setSearchType(typeOption)}
aria-label={`Search ${typeOption}`}
className={`flex items-center gap-1 p-2 rounded-md text-sm font-medium transition-colors border ${
searchType === typeOption
? "bg-green-600 text-white border-green-600"
: "bg-gray-100 dark:bg-gray-700 text-gray-800 dark:text-gray-200 border-gray-300 dark:border-gray-600 hover:bg-gray-200 dark:hover:bg-gray-600"
}`}
>
{
{
track: <Music size={16} />,
album: <Disc size={16} />,
artist: <User size={16} />,
playlist: <ListMusic size={16} />,
}[typeOption]
}
<span className="hidden md:inline">
{typeOption.charAt(0).toUpperCase() + typeOption.slice(1)}
</span>
</button>
))}
</div>
{/* Select for smaller screens */}
<select <select
value={searchType} value={searchType}
onChange={(e) => setSearchType(e.target.value as "track" | "album" | "artist" | "playlist")} onChange={(e) =>
className="p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline-none focus:ring-2 focus:ring-input-focus" setSearchType(e.target.value as "track" | "album" | "artist" | "playlist")
}
className="p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline-none focus:ring-2 focus:ring-input-focus sm:hidden"
> >
<option value="track">Track</option> <option value="track">Track</option>
<option value="album">Album</option> <option value="album">Album</option>
@@ -178,20 +383,72 @@ export const Home = () => {
<option value="playlist">Playlist</option> <option value="playlist">Playlist</option>
</select> </select>
</div> </div>
<div className={`flex-1 px-4 md:px-0 pb-4 ${ </div>
<div
className={`flex-1 px-4 md:px-0 pb-4 ${
// Only restrict overflow on mobile when there are results, otherwise allow normal behavior // Only restrict overflow on mobile when there are results, otherwise allow normal behavior
displayedResults.length > 0 ? 'overflow-y-auto md:overflow-visible' : '' displayedResults.length > 0 ? "overflow-y-auto md:overflow-visible" : ""
}`}> }`}
>
{isLoading ? ( {isLoading ? (
<p className="text-center my-4 text-content-muted dark:text-content-muted-dark">Loading results...</p> <p className="text-center my-4 text-content-muted dark:text-content-muted-dark">Loading results...</p>
) : ( ) : (
<> <>
{resultComponent} {resultComponent}
<div ref={loaderRef} /> <div ref={loaderRef} />
{isLoadingMore && <p className="text-center my-4 text-content-muted dark:text-content-muted-dark">Loading more results...</p>} {isLoadingMore && (
<p className="text-center my-4 text-content-muted dark:text-content-muted-dark">Loading more results...</p>
)}
</> </>
)} )}
</div> </div>
</>
)}
{activeTab === "bulkAdd" && (
<div className="flex flex-col gap-3 mb-4 md:mb-6 px-4 md:px-0 flex-shrink-0">
<textarea
className="w-full h-60 p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md mb-4 focus:outline-none focus:ring-2 focus:ring-green-500"
placeholder="Paste Spotify links here, one per line..."
value={linksInput}
onChange={(e) => setLinksInput(e.target.value)}
></textarea>
<div className="flex justify-end gap-3">
<button
onClick={() => setLinksInput("")} // Clear input
className="px-4 py-2 bg-gray-300 dark:bg-gray-700 text-content-primary dark:text-content-primary-dark rounded-md hover:bg-gray-400 dark:hover:bg-gray-600 focus:outline-none focus:ring-2 focus:ring-gray-500"
>
Clear
</button>
<button
onClick={handleAddBulkLinks}
disabled={isBulkAdding}
className="px-4 py-2 bg-green-500 text-white rounded-md hover:bg-green-600 focus:outline-none focus:ring-2 focus:ring-green-400 disabled:opacity-50 disabled:cursor-not-allowed flex items-center gap-2"
>
{isBulkAdding ? "Adding..." : (
<>
<FaDownload className="icon-inverse" /> Download
</>
)}
</button>
{settings?.watch?.enabled && (
<button
onClick={handleWatchBulkLinks}
disabled={isBulkWatching}
className="px-4 py-2 bg-error hover:bg-error-hover text-button-primary-text rounded-md flex items-center gap-2 disabled:opacity-50 disabled:cursor-not-allowed"
title="Only Spotify Artist and Playlist links are supported for watching."
>
{isBulkWatching ? "Watching..." : (
<>
<FaEye className="icon-inverse" /> Watch
</>
)}
</button>
)}
</div>
</div>
)}
</div> </div>
); );
}; };

View File

@@ -3,16 +3,14 @@ import { useEffect, useState, useContext, useRef, useCallback } from "react";
import apiClient from "../lib/api-client"; import apiClient from "../lib/api-client";
import { useSettings } from "../contexts/settings-context"; import { useSettings } from "../contexts/settings-context";
import { toast } from "sonner"; import { toast } from "sonner";
import type { TrackType, PlaylistMetadataType, PlaylistTracksResponseType, PlaylistItemType } from "../types/spotify"; import type { LibrespotTrackType, LibrespotPlaylistType, LibrespotPlaylistItemType, LibrespotPlaylistTrackStubType } from "@/types/librespot";
import { QueueContext, getStatus } from "../contexts/queue-context"; import { QueueContext, getStatus } from "../contexts/queue-context";
import { FaArrowLeft } from "react-icons/fa"; import { FaArrowLeft } from "react-icons/fa";
export const Playlist = () => { export const Playlist = () => {
const { playlistId } = useParams({ from: "/playlist/$playlistId" }); const { playlistId } = useParams({ from: "/playlist/$playlistId" });
const [playlistMetadata, setPlaylistMetadata] = useState<PlaylistMetadataType | null>(null); const [playlistMetadata, setPlaylistMetadata] = useState<LibrespotPlaylistType | null>(null);
const [tracks, setTracks] = useState<PlaylistItemType[]>([]); const [items, setItems] = useState<LibrespotPlaylistItemType[]>([]);
const [isWatched, setIsWatched] = useState(false); const [isWatched, setIsWatched] = useState(false);
const [error, setError] = useState<string | null>(null); const [error, setError] = useState<string | null>(null);
const [loadingTracks, setLoadingTracks] = useState(false); const [loadingTracks, setLoadingTracks] = useState(false);
@@ -28,11 +26,11 @@ export const Playlist = () => {
if (!context) { if (!context) {
throw new Error("useQueue must be used within a QueueProvider"); throw new Error("useQueue must be used within a QueueProvider");
} }
const { addItem, items } = context; const { addItem, items: queueItems } = context;
// Playlist queue status // Playlist queue status
const playlistQueueItem = playlistMetadata const playlistQueueItem = playlistMetadata
? items.find(item => item.downloadType === "playlist" && item.spotifyId === playlistMetadata.id) ? queueItems.find(item => item.downloadType === "playlist" && item.spotifyId === (playlistId ?? ""))
: undefined; : undefined;
const playlistStatus = playlistQueueItem ? getStatus(playlistQueueItem) : null; const playlistStatus = playlistQueueItem ? getStatus(playlistQueueItem) : null;
@@ -44,14 +42,15 @@ export const Playlist = () => {
} }
}, [playlistStatus]); }, [playlistStatus]);
// Load playlist metadata first // Load playlist metadata first (no expanded items)
useEffect(() => { useEffect(() => {
const fetchPlaylistMetadata = async () => { const fetchPlaylist = async () => {
if (!playlistId) return; if (!playlistId) return;
try { try {
const response = await apiClient.get<PlaylistMetadataType>(`/playlist/metadata?id=${playlistId}`); const response = await apiClient.get<LibrespotPlaylistType>(`/playlist/info?id=${playlistId}`);
setPlaylistMetadata(response.data); const data = response.data;
setTotalTracks(response.data.tracks.total); setPlaylistMetadata(data);
setTotalTracks(data.tracks.total);
} catch (err) { } catch (err) {
setError("Failed to load playlist metadata"); setError("Failed to load playlist metadata");
console.error(err); console.error(err);
@@ -70,27 +69,49 @@ export const Playlist = () => {
} }
}; };
fetchPlaylistMetadata(); setItems([]);
setTracksOffset(0);
setHasMoreTracks(true);
setTotalTracks(0);
setError(null);
fetchPlaylist();
checkWatchStatus(); checkWatchStatus();
}, [playlistId]); }, [playlistId]);
// Load tracks progressively const BATCH_SIZE = 6;
// Load items progressively by expanding track stubs when needed
const loadMoreTracks = useCallback(async () => { const loadMoreTracks = useCallback(async () => {
if (!playlistId || loadingTracks || !hasMoreTracks) return; if (!playlistId || loadingTracks || !hasMoreTracks || !playlistMetadata) return;
setLoadingTracks(true); setLoadingTracks(true);
try { try {
const limit = 50; // Load 50 tracks at a time // Fetch full playlist snapshot (stub items)
const response = await apiClient.get<PlaylistTracksResponseType>( const response = await apiClient.get<LibrespotPlaylistType>(`/playlist/info?id=${playlistId}`);
`/playlist/tracks?id=${playlistId}&limit=${limit}&offset=${tracksOffset}` const allItems = response.data.tracks.items;
const slice = allItems.slice(tracksOffset, tracksOffset + BATCH_SIZE);
// Expand any stubbed track entries by fetching full track info
const expandedSlice: LibrespotPlaylistItemType[] = await Promise.all(
slice.map(async (it) => {
const t = it.track as LibrespotPlaylistTrackStubType | LibrespotTrackType;
// If track has only stub fields (no duration_ms), fetch full
if (t && (t as any).id && !("duration_ms" in (t as any))) {
try {
const full = await apiClient.get<LibrespotTrackType>(`/track/info?id=${(t as LibrespotPlaylistTrackStubType).id}`).then(r => r.data);
return { ...it, track: full } as LibrespotPlaylistItemType;
} catch {
return it; // fallback to stub if fetch fails
}
}
return it;
})
); );
const newTracks = response.data.items; setItems((prev) => [...prev, ...expandedSlice]);
setTracks(prev => [...prev, ...newTracks]); const loaded = tracksOffset + expandedSlice.length;
setTracksOffset(prev => prev + newTracks.length); setTracksOffset(loaded);
if (loaded >= totalTracks) {
// Check if we've loaded all tracks
if (tracksOffset + newTracks.length >= totalTracks) {
setHasMoreTracks(false); setHasMoreTracks(false);
} }
} catch (err) { } catch (err) {
@@ -99,7 +120,7 @@ export const Playlist = () => {
} finally { } finally {
setLoadingTracks(false); setLoadingTracks(false);
} }
}, [playlistId, loadingTracks, hasMoreTracks, tracksOffset, totalTracks]); }, [playlistId, loadingTracks, hasMoreTracks, tracksOffset, totalTracks, playlistMetadata]);
// Intersection Observer for infinite scroll // Intersection Observer for infinite scroll
useEffect(() => { useEffect(() => {
@@ -125,22 +146,24 @@ export const Playlist = () => {
}; };
}, [loadMoreTracks, hasMoreTracks, loadingTracks]); }, [loadMoreTracks, hasMoreTracks, loadingTracks]);
// Load initial tracks when metadata is loaded // Kick off initial batch
useEffect(() => { useEffect(() => {
if (playlistMetadata && tracks.length === 0 && totalTracks > 0) { if (playlistMetadata && items.length === 0 && totalTracks > 0) {
loadMoreTracks(); loadMoreTracks();
} }
}, [playlistMetadata, tracks.length, totalTracks, loadMoreTracks]); }, [playlistMetadata, items.length, totalTracks, loadMoreTracks]);
// Reset state when playlist ID changes // Auto progressive loading regardless of scroll
useEffect(() => { useEffect(() => {
setTracks([]); if (!playlistMetadata) return;
setTracksOffset(0); if (!hasMoreTracks || loadingTracks) return;
setHasMoreTracks(true); const t = setTimeout(() => {
setTotalTracks(0); loadMoreTracks();
}, [playlistId]); }, 300);
return () => clearTimeout(t);
}, [playlistMetadata, hasMoreTracks, loadingTracks, loadMoreTracks]);
const handleDownloadTrack = (track: TrackType) => { const handleDownloadTrack = (track: LibrespotTrackType) => {
if (!track?.id) return; if (!track?.id) return;
addItem({ spotifyId: track.id, type: "track", name: track.name }); addItem({ spotifyId: track.id, type: "track", name: track.name });
toast.info(`Adding ${track.name} to queue...`); toast.info(`Adding ${track.name} to queue...`);
@@ -149,7 +172,7 @@ export const Playlist = () => {
const handleDownloadPlaylist = () => { const handleDownloadPlaylist = () => {
if (!playlistMetadata) return; if (!playlistMetadata) return;
addItem({ addItem({
spotifyId: playlistMetadata.id, spotifyId: playlistId!,
type: "playlist", type: "playlist",
name: playlistMetadata.name, name: playlistMetadata.name,
}); });
@@ -182,16 +205,19 @@ export const Playlist = () => {
} }
// Map track download statuses // Map track download statuses
const trackStatuses = tracks.reduce((acc, { track }) => { const trackStatuses = items.reduce((acc, { track }) => {
if (!track) return acc; if (!track || (track as any).id === undefined) return acc;
const qi = items.find(item => item.downloadType === "track" && item.spotifyId === track.id); const t = track as LibrespotTrackType;
acc[track.id] = qi ? getStatus(qi) : null; const qi = queueItems.find(item => item.downloadType === "track" && item.spotifyId === t.id);
acc[t.id] = qi ? getStatus(qi) : null;
return acc; return acc;
}, {} as Record<string, string | null>); }, {} as Record<string, string | null>);
const filteredTracks = tracks.filter(({ track }) => { const filteredItems = items.filter(({ track }) => {
if (!track) return false; const t = track as LibrespotTrackType | LibrespotPlaylistTrackStubType | null;
if (settings?.explicitFilter && track.explicit) return false; if (!t || (t as any).id === undefined) return false;
const full = t as LibrespotTrackType;
if (settings?.explicitFilter && full.explicit) return false;
return true; return true;
}); });
@@ -211,18 +237,47 @@ export const Playlist = () => {
{/* Playlist Header - Mobile Optimized */} {/* Playlist Header - Mobile Optimized */}
<div className="bg-surface dark:bg-surface-dark border border-border dark:border-border-dark rounded-xl p-4 md:p-6 shadow-sm"> <div className="bg-surface dark:bg-surface-dark border border-border dark:border-border-dark rounded-xl p-4 md:p-6 shadow-sm">
<div className="flex flex-col items-center gap-4 md:gap-6"> <div className="flex flex-col items-center gap-4 md:gap-6">
{playlistMetadata.picture ? (
<img <img
src={playlistMetadata.images?.at(0)?.url || "/placeholder.jpg"} src={playlistMetadata.picture}
alt={playlistMetadata.name} alt={playlistMetadata.name}
className="w-32 h-32 sm:w-40 sm:h-40 md:w-48 md:h-48 object-cover rounded-lg shadow-lg mx-auto" className="w-32 h-32 sm:w-40 sm:h-40 md:w-48 md:h-48 object-cover rounded-lg shadow-lg mx-auto"
/> />
) : (
<div
className="w-32 h-32 sm:w-40 sm:h-40 md:w-48 md:h-48 rounded-lg shadow-lg mx-auto overflow-hidden bg-surface-muted dark:bg-surface-muted-dark grid grid-cols-2 grid-rows-2"
>
{(Array.from(
new Map(
filteredItems
.map(({ track }) => (track as any)?.album?.images?.at(-1)?.url)
.filter((u) => !!u)
.map((u) => [u, u] as const)
).values()
) as string[]).slice(0, 4).map((url, i) => (
<img
key={`${url}-${i}`}
src={url}
alt={`Cover ${i + 1}`}
className="w-full h-full object-cover"
/>
))}
{filteredItems.length === 0 && (
<img
src="/placeholder.jpg"
alt={playlistMetadata.name}
className="col-span-2 row-span-2 w-full h-full object-cover"
/>
)}
</div>
)}
<div className="flex-grow space-y-2 text-center"> <div className="flex-grow space-y-2 text-center">
<h1 className="text-2xl md:text-3xl font-bold text-content-primary dark:text-content-primary-dark leading-tight">{playlistMetadata.name}</h1> <h1 className="text-2xl md:text-3xl font-bold text-content-primary dark:text-content-primary-dark leading-tight">{playlistMetadata.name}</h1>
{playlistMetadata.description && ( {playlistMetadata.description && (
<p className="text-base md:text-lg text-content-secondary dark:text-content-secondary-dark">{playlistMetadata.description}</p> <p className="text-base md:text-lg text-content-secondary dark:text-content-secondary-dark">{playlistMetadata.description}</p>
)} )}
<p className="text-sm text-content-muted dark:text-content-muted-dark"> <p className="text-sm text-content-muted dark:text-content-muted-dark">
By {playlistMetadata.owner.display_name} {playlistMetadata.followers.total.toLocaleString()} followers {totalTracks} songs By {playlistMetadata.owner.display_name} {totalTracks} songs
</p> </p>
</div> </div>
</div> </div>
@@ -239,7 +294,7 @@ export const Playlist = () => {
? "Queued." ? "Queued."
: playlistStatus === "error" : playlistStatus === "error"
? "Download All" ? "Download All"
: "Downloading..." : <img src="/spinner.svg" alt="Loading" className="w-5 h-5 animate-spin inline-block" />
: "Download All"} : "Download All"}
</button> </button>
{settings?.watch?.enabled && ( {settings?.watch?.enabled && (
@@ -264,39 +319,40 @@ export const Playlist = () => {
{/* Tracks Section */} {/* Tracks Section */}
<div className="space-y-3 md:space-y-4"> <div className="space-y-3 md:space-y-4">
<div className="flex items-center justify-between px-1"> <div className="flex items-center justify_between px-1">
<h2 className="text-xl font-semibold text-content-primary dark:text-content-primary-dark">Tracks</h2> <h2 className="text-xl font-semibold text-content-primary dark:text-content-primary-dark">Tracks</h2>
{tracks.length > 0 && ( {items.length > 0 && (
<span className="text-sm text-content-muted dark:text-content-muted-dark"> <span className="text-sm text-content-muted dark:text-content-muted-dark">
Showing {tracks.length} of {totalTracks} tracks Showing {items.length} of {totalTracks} tracks
</span> </span>
)} )}
</div> </div>
<div className="bg-surface-muted dark:bg-surface-muted-dark rounded-xl p-2 md:p-4 shadow-sm"> <div className="bg-surface-muted dark:bg-surface-muted-dark rounded-xl p-2 md:p-4 shadow-sm">
<div className="space-y-1 md:space-y-2"> <div className="space-y-1 md:space-y-2">
{filteredTracks.map(({ track }, index) => { {filteredItems.map(({ track }, index) => {
if (!track) return null; const t = track as LibrespotTrackType;
if (!t || !t.id) return null;
return ( return (
<div <div
key={track.id} key={`${t.id}-${index}`}
className="flex items-center justify-between p-3 md:p-4 hover:bg-surface-secondary dark:hover:bg-surface-secondary-dark rounded-lg transition-colors duration-200 group" className="flex items-center justify-between p-3 md:p-4 hover:bg-surface-secondary dark:hover:bg-surface-secondary-dark rounded-lg transition-colors duration-200 group"
> >
<div className="flex items-center gap-3 md:gap-4 min-w-0 flex-1"> <div className="flex items-center gap-3 md:gap-4 min-w-0 flex-1">
<span className="text-content-muted dark:text-content-muted-dark w-6 md:w-8 text-right text-sm font-medium">{index + 1}</span> <span className="text-content-muted dark:text-content-muted-dark w-6 md:w-8 text-right text-sm font-medium">{index + 1}</span>
<Link to="/album/$albumId" params={{ albumId: track.album.id }}> <Link to="/album/$albumId" params={{ albumId: t.album.id }}>
<img <img
src={track.album.images?.at(-1)?.url || "/placeholder.jpg "} src={t.album.images?.at(-1)?.url || "/placeholder.jpg "}
alt={track.album.name} alt={t.album.name}
className="w-10 h-10 md:w-12 md:h-12 object-cover rounded hover:scale-105 transition-transform duration-300" className="w-10 h-10 md:w-12 md:h-12 object-cover rounded hover:scale-105 transition-transform duration-300"
/> />
</Link> </Link>
<div className="min-w-0 flex-1"> <div className="min-w-0 flex-1">
<Link to="/track/$trackId" params={{ trackId: track.id }} className="font-medium text-content-primary dark:text-content-primary-dark text-sm md:text-base hover:underline block truncate"> <Link to="/track/$trackId" params={{ trackId: t.id }} className="font-medium text-content-primary dark:text-content-primary-dark text-sm md:text-base hover:underline block truncate">
{track.name} {t.name}
</Link> </Link>
<p className="text-xs md:text-sm text-content-secondary dark:text-content-secondary-dark truncate"> <p className="text-xs md:text-sm text-content-secondary dark:text-content-secondary-dark truncate">
{track.artists.map((artist, index) => ( {t.artists.map((artist, index) => (
<span key={artist.id}> <span key={artist.id}>
<Link <Link
to="/artist/$artistId" to="/artist/$artistId"
@@ -305,7 +361,7 @@ export const Playlist = () => {
> >
{artist.name} {artist.name}
</Link> </Link>
{index < track.artists.length - 1 && ", "} {index < t.artists.length - 1 && ", "}
</span> </span>
))} ))}
</p> </p>
@@ -313,29 +369,29 @@ export const Playlist = () => {
</div> </div>
<div className="flex items-center gap-2 md:gap-4 shrink-0"> <div className="flex items-center gap-2 md:gap-4 shrink-0">
<span className="text-content-muted dark:text-content-muted-dark text-xs md:text-sm hidden sm:block"> <span className="text-content-muted dark:text-content-muted-dark text-xs md:text-sm hidden sm:block">
{Math.floor(track.duration_ms / 60000)}: {Math.floor(t.duration_ms / 60000)}:
{((track.duration_ms % 60000) / 1000).toFixed(0).padStart(2, "0")} {((t.duration_ms % 60000) / 1000).toFixed(0).padStart(2, "0")}
</span> </span>
<button <button
onClick={() => handleDownloadTrack(track)} onClick={() => handleDownloadTrack(t)}
disabled={!!trackStatuses[track.id] && trackStatuses[track.id] !== "error"} disabled={!!trackStatuses[t.id] && trackStatuses[t.id] !== "error"}
className="w-9 h-9 md:w-10 md:h-10 flex items-center justify-center bg-surface-muted dark:bg-surface-muted-dark hover:bg-surface-accent dark:hover:bg-surface-accent-dark border border-border-muted dark:border-border-muted-dark hover:border-border-accent dark:hover:border-border-accent-dark rounded-full transition-all disabled:opacity-50 disabled:cursor-not-allowed" className="w-9 h-9 md:w-10 md:h-10 flex items-center justify-center bg-surface-muted dark:bg-surface-muted-dark hover:bg-surface-accent dark:hover:bg-surface-accent-dark border border-border-muted dark:border-border-muted-dark hover:border-border-accent dark:hover:border-border-accent-dark rounded-full transition-all disabled:opacity-50 disabled:cursor-not-allowed"
title={ title={
trackStatuses[track.id] trackStatuses[t.id]
? trackStatuses[track.id] === "queued" ? trackStatuses[t.id] === "queued"
? "Queued." ? "Queued."
: trackStatuses[track.id] === "error" : trackStatuses[t.id] === "error"
? "Download" ? "Download"
: "Downloading..." : "Downloading..."
: "Download" : "Download"
} }
> >
{trackStatuses[track.id] {trackStatuses[t.id]
? trackStatuses[track.id] === "queued" ? trackStatuses[t.id] === "queued"
? "Queued." ? "Queued."
: trackStatuses[track.id] === "error" : trackStatuses[t.id] === "error"
? <img src="/download.svg" alt="Download" className="w-4 h-4 logo" /> ? <img src="/download.svg" alt="Download" className="w-4 h-4 logo" />
: "Downloading..." : <img src="/spinner.svg" alt="Loading" className="w-4 h-4 animate-spin" />
: <img src="/download.svg" alt="Download" className="w-4 h-4 logo" /> : <img src="/download.svg" alt="Download" className="w-4 h-4 logo" />
} }
</button> </button>
@@ -357,7 +413,7 @@ export const Playlist = () => {
)} )}
{/* End of tracks indicator */} {/* End of tracks indicator */}
{!hasMoreTracks && tracks.length > 0 && ( {!hasMoreTracks && items.length > 0 && (
<div className="text-center py-4 text-content-muted dark:text-content-muted-dark"> <div className="text-center py-4 text-content-muted dark:text-content-muted-dark">
All tracks loaded All tracks loaded
</div> </div>

View File

@@ -7,6 +7,7 @@ import { ProtectedRoute } from "@/components/auth/ProtectedRoute";
import { UserMenu } from "@/components/auth/UserMenu"; import { UserMenu } from "@/components/auth/UserMenu";
import { useContext, useState, useEffect } from "react"; import { useContext, useState, useEffect } from "react";
import { getTheme, toggleTheme } from "@/lib/theme"; import { getTheme, toggleTheme } from "@/lib/theme";
import { useSettings } from "@/contexts/settings-context";
function ThemeToggle() { function ThemeToggle() {
const [currentTheme, setCurrentTheme] = useState<'light' | 'dark' | 'system'>('system'); const [currentTheme, setCurrentTheme] = useState<'light' | 'dark' | 'system'>('system');
@@ -80,6 +81,8 @@ function ThemeToggle() {
function AppLayout() { function AppLayout() {
const { toggleVisibility, totalTasks } = useContext(QueueContext) || {}; const { toggleVisibility, totalTasks } = useContext(QueueContext) || {};
const { settings } = useSettings();
const watchEnabled = !!settings?.watch?.enabled;
return ( return (
<div className="min-h-screen bg-gradient-to-br from-surface-secondary via-surface-muted to-surface-accent dark:from-surface-dark dark:via-surface-muted-dark dark:to-surface-secondary-dark text-content-primary dark:text-content-primary-dark flex flex-col overflow-hidden"> <div className="min-h-screen bg-gradient-to-br from-surface-secondary via-surface-muted to-surface-accent dark:from-surface-dark dark:via-surface-muted-dark dark:to-surface-secondary-dark text-content-primary dark:text-content-primary-dark flex flex-col overflow-hidden">
@@ -92,9 +95,11 @@ function AppLayout() {
<div className="flex items-center gap-2"> <div className="flex items-center gap-2">
<ThemeToggle /> <ThemeToggle />
<UserMenu /> <UserMenu />
{watchEnabled && (
<Link to="/watchlist" className="p-2 rounded-full hover:bg-icon-button-hover dark:hover:bg-icon-button-hover-dark"> <Link to="/watchlist" className="p-2 rounded-full hover:bg-icon-button-hover dark:hover:bg-icon-button-hover-dark">
<img src="/binoculars.svg" alt="Watchlist" className="w-6 h-6 logo" /> <img src="/binoculars.svg" alt="Watchlist" className="w-6 h-6 logo" />
</Link> </Link>
)}
<Link to="/history" className="p-2 rounded-full hover:bg-icon-button-hover dark:hover:bg-icon-button-hover-dark"> <Link to="/history" className="p-2 rounded-full hover:bg-icon-button-hover dark:hover:bg-icon-button-hover-dark">
<img src="/history.svg" alt="History" className="w-6 h-6 logo" /> <img src="/history.svg" alt="History" className="w-6 h-6 logo" />
</Link> </Link>
@@ -144,9 +149,11 @@ function AppLayout() {
<Link to="/" className="p-3 rounded-full hover:bg-icon-button-hover dark:hover:bg-icon-button-hover-dark"> <Link to="/" className="p-3 rounded-full hover:bg-icon-button-hover dark:hover:bg-icon-button-hover-dark">
<img src="/home.svg" alt="Home" className="w-6 h-6 logo" /> <img src="/home.svg" alt="Home" className="w-6 h-6 logo" />
</Link> </Link>
{watchEnabled && (
<Link to="/watchlist" className="p-3 rounded-full hover:bg-icon-button-hover dark:hover:bg-icon-button-hover-dark"> <Link to="/watchlist" className="p-3 rounded-full hover:bg-icon-button-hover dark:hover:bg-icon-button-hover-dark">
<img src="/binoculars.svg" alt="Watchlist" className="w-6 h-6 logo" /> <img src="/binoculars.svg" alt="Watchlist" className="w-6 h-6 logo" />
</Link> </Link>
)}
<Link to="/history" className="p-3 rounded-full hover:bg-icon-button-hover dark:hover:bg-icon-button-hover-dark"> <Link to="/history" className="p-3 rounded-full hover:bg-icon-button-hover dark:hover:bg-icon-button-hover-dark">
<img src="/history.svg" alt="History" className="w-6 h-6 logo" /> <img src="/history.svg" alt="History" className="w-6 h-6 logo" />
</Link> </Link>

View File

@@ -1,7 +1,7 @@
import { Link, useParams } from "@tanstack/react-router"; import { Link, useParams } from "@tanstack/react-router";
import { useEffect, useState, useContext } from "react"; import { useEffect, useState, useContext } from "react";
import apiClient from "../lib/api-client"; import apiClient from "../lib/api-client";
import type { TrackType } from "../types/spotify"; import type { LibrespotTrackType } from "@/types/librespot";
import { toast } from "sonner"; import { toast } from "sonner";
import { QueueContext, getStatus } from "../contexts/queue-context"; import { QueueContext, getStatus } from "../contexts/queue-context";
import { FaSpotify, FaArrowLeft } from "react-icons/fa"; import { FaSpotify, FaArrowLeft } from "react-icons/fa";
@@ -15,7 +15,7 @@ const formatDuration = (ms: number) => {
export const Track = () => { export const Track = () => {
const { trackId } = useParams({ from: "/track/$trackId" }); const { trackId } = useParams({ from: "/track/$trackId" });
const [track, setTrack] = useState<TrackType | null>(null); const [track, setTrack] = useState<LibrespotTrackType | null>(null);
const [error, setError] = useState<string | null>(null); const [error, setError] = useState<string | null>(null);
const context = useContext(QueueContext); const context = useContext(QueueContext);
@@ -40,7 +40,7 @@ export const Track = () => {
const fetchTrack = async () => { const fetchTrack = async () => {
if (!trackId) return; if (!trackId) return;
try { try {
const response = await apiClient.get<TrackType>(`/track/info?id=${trackId}`); const response = await apiClient.get<LibrespotTrackType>(`/track/info?id=${trackId}`);
setTrack(response.data); setTrack(response.data);
} catch (err) { } catch (err) {
setError("Failed to load track"); setError("Failed to load track");
@@ -171,11 +171,11 @@ export const Track = () => {
<div className="flex-1 bg-surface-muted dark:bg-surface-muted-dark rounded-full h-3"> <div className="flex-1 bg-surface-muted dark:bg-surface-muted-dark rounded-full h-3">
<div <div
className="bg-primary h-3 rounded-full transition-all duration-500" className="bg-primary h-3 rounded-full transition-all duration-500"
style={{ width: `${track.popularity}%` }} style={{ width: `${track.popularity ?? 0}%` }}
></div> ></div>
</div> </div>
<span className="text-sm font-medium text-content-secondary dark:text-content-secondary-dark"> <span className="text-sm font-medium text-content_secondary dark:text-content-secondary-dark">
{track.popularity}% {track.popularity ?? 0}%
</span> </span>
</div> </div>
</div> </div>
@@ -193,14 +193,14 @@ export const Track = () => {
? "Queued." ? "Queued."
: trackStatus === "error" : trackStatus === "error"
? "Download" ? "Download"
: "Downloading..." : <img src="/spinner.svg" alt="Loading" className="w-5 h-5 animate-spin inline-block" />
: "Download"} : "Download"}
</button> </button>
<a <a
href={track.external_urls.spotify} href={track.external_urls.spotify}
target="_blank" target="_blank"
rel="noopener noreferrer" rel="noopener noreferrer"
className="w-full sm:w-auto flex items-center justify-center gap-3 text-content-secondary dark:text-content-secondary-dark hover:text-content-primary dark:hover:text-content-primary-dark transition duration-300 py-3 px-8 border border-border dark:border-border-dark rounded-full hover:border-border-accent dark:hover:border-border-accent-dark" className="w-full sm:w-auto flex items_center justify-center gap-3 text-content-secondary dark:text-content-secondary-dark hover:text-content-primary dark:hover:text-content-primary-dark transition duration-300 py-3 px-8 border border-border dark:border-border-dark rounded-full hover:border-border-accent dark:hover:border-border-accent-dark"
aria-label="Listen on Spotify" aria-label="Listen on Spotify"
> >
<FaSpotify size={20} className="icon-secondary hover:icon-primary" /> <FaSpotify size={20} className="icon-secondary hover:icon-primary" />

View File

@@ -20,39 +20,77 @@ export const Watchlist = () => {
const { settings, isLoading: settingsLoading } = useSettings(); const { settings, isLoading: settingsLoading } = useSettings();
const [items, setItems] = useState<WatchedItem[]>([]); const [items, setItems] = useState<WatchedItem[]>([]);
const [isLoading, setIsLoading] = useState(true); const [isLoading, setIsLoading] = useState(true);
const [expectedCount, setExpectedCount] = useState<number | null>(null);
// Utility to batch fetch details
async function batchFetch<T>(
ids: string[],
fetchFn: (id: string) => Promise<T>,
batchSize: number,
onBatch: (results: T[]) => void
) {
for (let i = 0; i < ids.length; i += batchSize) {
const batchIds = ids.slice(i, i + batchSize);
const batchResults = await Promise.all(
batchIds.map((id) => fetchFn(id).catch(() => null))
);
onBatch(batchResults.filter(Boolean) as T[]);
}
}
const fetchWatchlist = useCallback(async () => { const fetchWatchlist = useCallback(async () => {
setIsLoading(true); setIsLoading(true);
setItems([]); // Clear previous items
setExpectedCount(null);
try { try {
const [artistsRes, playlistsRes] = await Promise.all([ const [artistsRes, playlistsRes] = await Promise.all([
apiClient.get<BaseWatched[]>("/artist/watch/list"), apiClient.get<BaseWatched[]>("/artist/watch/list"),
apiClient.get<BaseWatched[]>("/playlist/watch/list"), apiClient.get<BaseWatched[]>("/playlist/watch/list"),
]); ]);
const artistDetailsPromises = artistsRes.data.map((artist) => // Prepare lists of IDs
apiClient.get<ArtistType>(`/artist/info?id=${artist.spotify_id}`), const artistIds = artistsRes.data.map((artist) => artist.spotify_id);
); const playlistIds = playlistsRes.data.map((playlist) => playlist.spotify_id);
const playlistDetailsPromises = playlistsRes.data.map((playlist) => setExpectedCount(artistIds.length + playlistIds.length);
apiClient.get<PlaylistType>(`/playlist/info?id=${playlist.spotify_id}`),
);
const [artistDetailsRes, playlistDetailsRes] = await Promise.all([ // Allow UI to render grid and skeletons immediately
Promise.all(artistDetailsPromises), setIsLoading(false);
Promise.all(playlistDetailsPromises),
]);
const artists: WatchedItem[] = artistDetailsRes.map((res) => ({ ...res.data, itemType: "artist" })); // Helper to update state incrementally
const playlists: WatchedItem[] = playlistDetailsRes.map((res) => ({ const appendItems = (newItems: WatchedItem[]) => {
...res.data, setItems((prev) => [...prev, ...newItems]);
itemType: "playlist", };
spotify_id: res.data.id,
// Fetch artist details in batches
await batchFetch<ArtistType>(
artistIds,
(id) => apiClient.get<ArtistType>(`/artist/info?id=${id}`).then(res => res.data),
5, // batch size
(results) => {
const items: WatchedArtist[] = results.map((data) => ({
...data,
itemType: "artist",
})); }));
appendItems(items);
}
);
setItems([...artists, ...playlists]); // Fetch playlist details in batches
await batchFetch<PlaylistType>(
playlistIds,
(id) => apiClient.get<PlaylistType>(`/playlist/info?id=${id}`).then(res => res.data),
5, // batch size
(results) => {
const items: WatchedPlaylist[] = results.map((data) => ({
...data,
itemType: "playlist",
spotify_id: data.id,
}));
appendItems(items);
}
);
} catch { } catch {
toast.error("Failed to load watchlist."); toast.error("Failed to load watchlist.");
} finally {
setIsLoading(false);
} }
}, []); }, []);
@@ -110,7 +148,8 @@ export const Watchlist = () => {
); );
} }
if (items.length === 0) { // Show "empty" only if not loading and nothing expected
if (!isLoading && items.length === 0 && (!expectedCount || expectedCount === 0)) {
return ( return (
<div className="text-center p-8"> <div className="text-center p-8">
<h2 className="text-2xl font-bold mb-2 text-content-primary dark:text-content-primary-dark">Watchlist is Empty</h2> <h2 className="text-2xl font-bold mb-2 text-content-primary dark:text-content-primary-dark">Watchlist is Empty</h2>
@@ -158,6 +197,25 @@ export const Watchlist = () => {
</div> </div>
</div> </div>
))} ))}
{/* Skeletons for loading items */}
{isLoading && expectedCount && items.length < expectedCount &&
Array.from({ length: expectedCount - items.length }).map((_, idx) => (
<div
key={`skeleton-${idx}`}
className="bg-surface dark:bg-surface-secondary-dark p-4 rounded-lg shadow space-y-2 flex flex-col animate-pulse"
>
<div className="flex-grow">
<div className="w-full aspect-square bg-gray-200 dark:bg-gray-700 rounded-md mb-2" />
<div className="h-5 bg-gray-200 dark:bg-gray-700 rounded w-3/4 mb-1" />
<div className="h-4 bg-gray-100 dark:bg-gray-800 rounded w-1/2" />
</div>
<div className="flex gap-2 pt-2">
<div className="w-full h-8 bg-gray-200 dark:bg-gray-700 rounded" />
<div className="w-full h-8 bg-gray-100 dark:bg-gray-800 rounded" />
</div>
</div>
))
}
</div> </div>
</div> </div>
); );

View File

@@ -222,11 +222,22 @@ export interface SummaryObject {
total_successful: number; total_successful: number;
total_skipped: number; total_skipped: number;
total_failed: number; total_failed: number;
// Optional metadata present in deezspot summaries (album/playlist and sometimes single-track)
service: "spotify" | "deezer";
quality: string; // e.g., "ogg", "flac"
bitrate: string; // e.g., "320k"
m3u_path?: string; // playlist convenience output
// Convenience fields that may appear for single-track flows
final_path?: string;
download_quality?: string; // e.g., "OGG_320"
} }
export interface DoneObject extends BaseStatusObject { export interface DoneObject extends BaseStatusObject {
status: "done"; status: "done";
summary?: SummaryObject; summary?: SummaryObject;
// Convenience fields often present on done for tracks
final_path?: string;
download_quality?: string;
} }
export type StatusInfo = export type StatusInfo =

View File

@@ -0,0 +1,161 @@
// Librespot wrapper response types for frontend consumption
export interface LibrespotExternalUrls {
spotify: string;
}
export interface LibrespotImage {
url: string;
width?: number;
height?: number;
}
export interface LibrespotArtistStub {
id: string;
name: string;
type?: "artist";
uri?: string;
external_urls?: LibrespotExternalUrls;
}
// Full artist object (get_artist)
export interface LibrespotArtistType {
id: string;
name: string;
images?: LibrespotImage[];
external_urls?: LibrespotExternalUrls;
followers?: { total: number };
genres?: string[];
popularity?: number;
type?: "artist";
uri?: string;
// Album groups: arrays of album IDs
album_group?: string[];
single_group?: string[];
compilation_group?: string[];
appears_on_group?: string[];
}
export interface LibrespotCopyright {
text: string;
type: string;
}
export type LibrespotReleaseDatePrecision = "day" | "month" | "year";
// Minimal embedded album object returned inside track objects (does not include tracks array)
export interface LibrespotAlbumRef {
id: string;
name: string;
images?: LibrespotImage[];
release_date?: string;
release_date_precision?: LibrespotReleaseDatePrecision;
type?: "album";
uri?: string;
album_type?: "album" | "single" | "compilation";
external_urls?: LibrespotExternalUrls;
artists?: LibrespotArtistStub[];
}
export interface LibrespotTrackType {
album: LibrespotAlbumRef;
artists: LibrespotArtistStub[];
available_markets?: string[];
disc_number: number;
duration_ms: number;
explicit: boolean;
external_ids?: { isrc?: string };
external_urls: LibrespotExternalUrls;
id: string;
name: string;
popularity?: number;
track_number: number;
type: "track";
uri: string;
preview_url?: string;
has_lyrics?: boolean;
earliest_live_timestamp?: number;
licensor_uuid?: string; // when available
}
export interface LibrespotAlbumType {
album_type: "album" | "single" | "compilation";
total_tracks: number;
available_markets?: string[];
external_urls: LibrespotExternalUrls;
id: string;
images: LibrespotImage[];
name: string;
release_date: string;
release_date_precision: LibrespotReleaseDatePrecision;
type: "album";
uri: string;
artists: LibrespotArtistStub[];
// When include_tracks=False -> string[] of base62 IDs
// When include_tracks=True -> LibrespotTrackType[]
tracks: string[] | LibrespotTrackType[];
copyrights?: LibrespotCopyright[];
external_ids?: { upc?: string };
label?: string;
popularity?: number;
}
// Playlist types
export interface LibrespotPlaylistOwnerType {
id: string;
type: "user";
uri: string;
external_urls: LibrespotExternalUrls;
display_name: string;
}
export interface LibrespotPlaylistTrackStubType {
id: string;
uri: string; // spotify:track:{id}
type: "track";
external_urls: LibrespotExternalUrls;
}
export interface LibrespotPlaylistItemType {
added_at: string;
added_by: LibrespotPlaylistOwnerType;
is_local: boolean;
// If expand_items=False -> LibrespotPlaylistTrackStubType
// If expand_items=True -> LibrespotTrackType
track: LibrespotPlaylistTrackStubType | LibrespotTrackType;
// Additional reference, not a Web API field
item_id?: string;
}
export interface LibrespotPlaylistTracksPageType {
offset: number;
total: number;
items: LibrespotPlaylistItemType[];
}
export interface LibrespotPlaylistType {
name: string;
description?: string | null;
collaborative?: boolean;
images?: Array<Pick<LibrespotImage, "url"> & Partial<LibrespotImage>>;
owner: LibrespotPlaylistOwnerType;
snapshot_id: string;
tracks: LibrespotPlaylistTracksPageType;
type: "playlist";
picture?: string;
}
// Type guards
export function isAlbumWithExpandedTracks(
album: LibrespotAlbumType
): album is LibrespotAlbumType & { tracks: LibrespotTrackType[] } {
const { tracks } = album as LibrespotAlbumType;
return Array.isArray(tracks) && (tracks.length === 0 || typeof tracks[0] === "object");
}
export function isPlaylistItemWithExpandedTrack(
item: LibrespotPlaylistItemType
): item is LibrespotPlaylistItemType & { track: LibrespotTrackType } {
const t = item.track as unknown;
return !!t && typeof t === "object" && (t as any).type === "track" && "duration_ms" in (t as any);
}

View File

@@ -1 +0,0 @@

View File

@@ -1,633 +0,0 @@
import sqlite3
from pathlib import Path
import pytest
import json
# Override the autouse credentials fixture from conftest for this module
@pytest.fixture(scope="session", autouse=True)
def setup_credentials_for_tests():
# No-op to avoid external API calls; this shadows the session autouse fixture in conftest.py
yield
def _create_306_history_db(db_path: Path) -> None:
db_path.parent.mkdir(parents=True, exist_ok=True)
with sqlite3.connect(str(db_path)) as conn:
conn.executescript(
"""
CREATE TABLE IF NOT EXISTS download_history (
id INTEGER PRIMARY KEY AUTOINCREMENT,
download_type TEXT NOT NULL,
title TEXT NOT NULL,
artists TEXT,
timestamp REAL NOT NULL,
status TEXT NOT NULL,
service TEXT,
quality_format TEXT,
quality_bitrate TEXT,
total_tracks INTEGER,
successful_tracks INTEGER,
failed_tracks INTEGER,
skipped_tracks INTEGER,
children_table TEXT,
task_id TEXT,
external_ids TEXT,
metadata TEXT,
release_date TEXT,
genres TEXT,
images TEXT,
owner TEXT,
album_type TEXT,
duration_total_ms INTEGER,
explicit BOOLEAN
);
CREATE INDEX IF NOT EXISTS idx_download_history_timestamp ON download_history(timestamp);
CREATE INDEX IF NOT EXISTS idx_download_history_type_status ON download_history(download_type, status);
CREATE INDEX IF NOT EXISTS idx_download_history_task_id ON download_history(task_id);
CREATE UNIQUE INDEX IF NOT EXISTS uq_download_history_task_type_ids ON download_history(task_id, download_type, external_ids);
"""
)
# Insert rows that reference non-existent children tables
conn.execute(
"""
INSERT INTO download_history (
download_type, title, artists, timestamp, status, service,
quality_format, quality_bitrate, total_tracks, successful_tracks,
failed_tracks, skipped_tracks, children_table, task_id,
external_ids, metadata, release_date, genres, images, owner,
album_type, duration_total_ms, explicit
) VALUES (?, ?, ?, strftime('%s','now'), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
"album",
"Test Album",
"[]",
"completed",
"spotify",
"FLAC",
"1411kbps",
10,
8,
1,
1,
"album_test1",
"task-album-1",
"{}",
"{}",
"{}",
"[]",
"[]",
"{}",
"album",
123456,
0,
),
)
conn.execute(
"""
INSERT INTO download_history (
download_type, title, artists, timestamp, status, service,
quality_format, quality_bitrate, total_tracks, successful_tracks,
failed_tracks, skipped_tracks, children_table, task_id,
external_ids, metadata, release_date, genres, images, owner,
album_type, duration_total_ms, explicit
) VALUES (?, ?, ?, strftime('%s','now'), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
"playlist",
"Test Playlist",
"[]",
"partial",
"spotify",
"MP3",
"320kbps",
20,
15,
3,
2,
"playlist_test2",
"task-playlist-1",
"{}",
"{}",
"{}",
"[]",
"[]",
"{}",
"",
654321,
0,
),
)
# Create a legacy children table with too-few columns to test schema upgrade
conn.execute(
"CREATE TABLE IF NOT EXISTS album_legacy (id INTEGER PRIMARY KEY AUTOINCREMENT, title TEXT NOT NULL)"
)
# Create a fully-specified children table from docs and add rows
conn.execute(
"""
CREATE TABLE IF NOT EXISTS album_f9e8d7c6b5 (
id INTEGER PRIMARY KEY AUTOINCREMENT,
title TEXT NOT NULL,
artists TEXT,
album_title TEXT,
duration_ms INTEGER,
track_number INTEGER,
disc_number INTEGER,
explicit BOOLEAN,
status TEXT NOT NULL,
external_ids TEXT,
genres TEXT,
isrc TEXT,
timestamp REAL NOT NULL,
position INTEGER,
metadata TEXT
)
"""
)
conn.execute(
"""
INSERT INTO download_history (
download_type, title, artists, timestamp, status, service,
quality_format, quality_bitrate, total_tracks, successful_tracks,
failed_tracks, skipped_tracks, children_table, task_id,
external_ids, metadata, release_date, genres, images, owner,
album_type, duration_total_ms, explicit
) VALUES (?, ?, ?, strftime('%s','now'), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
"album",
"Random Access Memories",
"[\"Daft Punk\"]",
"partial",
"spotify",
"FLAC",
"1411",
13,
12,
1,
0,
"album_f9e8d7c6b5",
"celery-task-id-789",
"{\"spotify\": \"4m2880jivSbbyEGAKfITCa\"}",
"{\"callback_type\": \"album\"}",
"{\"year\": 2013, \"month\": 5, \"day\": 17}",
"[\"disco\", \"funk\"]",
"[{\"url\": \"https://i.scdn.co/image/...\"}]",
None,
"album",
4478293,
0
),
)
conn.executemany(
"""
INSERT INTO album_f9e8d7c6b5 (
title, artists, album_title, duration_ms, track_number, disc_number, explicit, status,
external_ids, genres, isrc, timestamp, position, metadata
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, strftime('%s','now'), ?, ?)
""",
[
(
"Get Lucky (feat. Pharrell Williams & Nile Rodgers)",
"[\"Daft Punk\", \"Pharrell Williams\", \"Nile Rodgers\"]",
"Random Access Memories",
369626,
8,
1,
0,
"completed",
"{\"spotify\": \"69kOkLUCdZlE8ApD28j1JG\", \"isrc\": \"GBUJH1300019\"}",
"[]",
"GBUJH1300019",
0,
"{\"album\": {...}, \"type\": \"track\"}",
),
(
"Lose Yourself to Dance (feat. Pharrell Williams)",
"[\"Daft Punk\", \"Pharrell Williams\"]",
"Random Access Memories",
353893,
6,
1,
0,
"failed",
"{\"spotify\": \"5L95vS64r8PAj5M8H1oYkm\", \"isrc\": \"GBUJH1300017\"}",
"[]",
"GBUJH1300017",
0,
"{\"album\": {...}, \"failure_reason\": \"Could not find matching track on Deezer.\"}",
),
]
)
def _create_306_watch_dbs(playlists_db: Path, artists_db: Path) -> None:
playlists_db.parent.mkdir(parents=True, exist_ok=True)
with sqlite3.connect(str(playlists_db)) as pconn:
pconn.executescript(
"""
CREATE TABLE IF NOT EXISTS watched_playlists (
spotify_id TEXT PRIMARY KEY,
name TEXT,
owner_id TEXT,
owner_name TEXT,
total_tracks INTEGER,
link TEXT,
snapshot_id TEXT,
last_checked INTEGER,
added_at INTEGER,
is_active INTEGER DEFAULT 1
);
"""
)
# Insert a sample watched playlist row (docs example)
pconn.execute(
"""
INSERT OR REPLACE INTO watched_playlists (
spotify_id, name, owner_id, owner_name, total_tracks, link, snapshot_id, last_checked, added_at, is_active
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
"37i9dQZF1DXcBWIGoYBM5M",
"Today's Top Hits",
"spotify",
"Spotify",
50,
"https://open.spotify.com/playlist/37i9dQZF1DXcBWIGoYBM5M",
"MTY3NzE4NjgwMCwwMDAwMDAwMDk1ODVmYjI5ZDY5MGUzN2Q4Y2U4OWY2YmY1ZDE4ZTAy",
1677187000,
1677186950,
1,
),
)
# Create a legacy/minimal playlist dynamic table to test schema upgrade
pconn.execute(
"CREATE TABLE IF NOT EXISTS playlist_legacy (spotify_track_id TEXT PRIMARY KEY, title TEXT)"
)
# Create a fully-specified playlist dynamic table (docs example) and add rows
pconn.execute(
"""
CREATE TABLE IF NOT EXISTS playlist_37i9dQZF1DXcBWIGoYBM5M (
spotify_track_id TEXT PRIMARY KEY,
title TEXT,
artist_names TEXT,
album_name TEXT,
album_artist_names TEXT,
track_number INTEGER,
album_spotify_id TEXT,
duration_ms INTEGER,
added_at_playlist TEXT,
added_to_db INTEGER,
is_present_in_spotify INTEGER,
last_seen_in_spotify INTEGER,
snapshot_id TEXT,
final_path TEXT
)
"""
)
pconn.executemany(
"""
INSERT OR REPLACE INTO playlist_37i9dQZF1DXcBWIGoYBM5M (
spotify_track_id, title, artist_names, album_name, album_artist_names, track_number, album_spotify_id,
duration_ms, added_at_playlist, added_to_db, is_present_in_spotify, last_seen_in_spotify, snapshot_id, final_path
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
[
(
"4k6Uh1HXdhtusDW5y80vNN",
"As It Was",
"Harry Styles",
"Harry's House",
"Harry Styles",
4,
"5r36AJ6VOJtp00oxSkNaAO",
167303,
"2023-02-20T10:00:00Z",
1677186980,
1,
1677187000,
"MTY3NzE4NjgwMCwwMDAwMDAwMDk1ODVmYjI5ZDY5MGUzN2Q4Y2U4OWY2YmY1ZDE4ZTAy",
"/downloads/music/Harry Styles/Harry's House/04 - As It Was.flac",
),
(
"5ww2BF9slyYgAno5EAsoOJ",
"Flowers",
"Miley Cyrus",
"Endless Summer Vacation",
"Miley Cyrus",
1,
"1lw0K2sIKi84gav3e4pG3c",
194952,
"2023-02-23T12:00:00Z",
1677186995,
1,
1677187000,
"MTY3NzE4NjgwMCwwMDAwMDAwMDk1ODVmYjI5ZDY5MGUzN2Q4Y2U4OWY2YmY1ZDE4ZTAy",
None,
),
]
)
with sqlite3.connect(str(artists_db)) as aconn:
aconn.executescript(
"""
CREATE TABLE IF NOT EXISTS watched_artists (
spotify_id TEXT PRIMARY KEY,
name TEXT,
link TEXT,
total_albums_on_spotify INTEGER,
last_checked INTEGER,
added_at INTEGER,
is_active INTEGER DEFAULT 1,
genres TEXT,
popularity INTEGER,
image_url TEXT
);
"""
)
# Insert a sample watched artist row (docs example)
aconn.execute(
"""
INSERT OR REPLACE INTO watched_artists (
spotify_id, name, link, total_albums_on_spotify, last_checked, added_at, is_active, genres, popularity, image_url
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
"4oLeXFyACqeem2VImYeBFe",
"Madeon",
"https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe",
45,
1677188000,
1677187900,
1,
"electro house, filter house, french house",
65,
"https://i.scdn.co/image/ab6761610000e5eb...",
),
)
# Create a legacy/minimal artist dynamic table to test schema upgrade
aconn.execute(
"CREATE TABLE IF NOT EXISTS artist_legacy (album_spotify_id TEXT PRIMARY KEY, name TEXT)"
)
# Create a fully-specified artist dynamic table (docs example) and add rows
aconn.execute(
"""
CREATE TABLE IF NOT EXISTS artist_4oLeXFyACqeem2VImYeBFe (
album_spotify_id TEXT PRIMARY KEY,
artist_spotify_id TEXT,
name TEXT,
album_group TEXT,
album_type TEXT,
release_date TEXT,
release_date_precision TEXT,
total_tracks INTEGER,
link TEXT,
image_url TEXT,
added_to_db INTEGER,
last_seen_on_spotify INTEGER,
download_task_id TEXT,
download_status INTEGER,
is_fully_downloaded_managed_by_app INTEGER
)
"""
)
aconn.executemany(
"""
INSERT OR REPLACE INTO artist_4oLeXFyACqeem2VImYeBFe (
album_spotify_id, artist_spotify_id, name, album_group, album_type, release_date, release_date_precision,
total_tracks, link, image_url, added_to_db, last_seen_on_spotify, download_task_id, download_status, is_fully_downloaded_managed_by_app
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
[
(
"2GWMnf2ltOQd2v2T62a2m8",
"4oLeXFyACqeem2VImYeBFe",
"Good Faith",
"album",
"album",
"2019-11-15",
"day",
10,
"https://open.spotify.com/album/2GWMnf2ltOQd2v2T62a2m8",
"https://i.scdn.co/image/ab67616d0000b273...",
1677187950,
1677188000,
"celery-task-id-123",
2,
1,
),
(
"2smfe2S0AVaxH2I1a5p55n",
"4oLeXFyACqeem2VImYeBFe",
"Gonna Be Good",
"single",
"single",
"2023-01-19",
"day",
1,
"https://open.spotify.com/album/2smfe2S0AVaxH2I1a5p55n",
"https://i.scdn.co/image/ab67616d0000b273...",
1677187960,
1677188000,
"celery-task-id-456",
1,
0,
),
]
)
def _create_306_accounts(creds_dir: Path, accounts_db: Path) -> None:
creds_dir.mkdir(parents=True, exist_ok=True)
with sqlite3.connect(str(accounts_db)) as conn:
conn.executescript(
"""
CREATE TABLE IF NOT EXISTS spotify (
name TEXT PRIMARY KEY,
region TEXT,
created_at REAL,
updated_at REAL
);
CREATE TABLE IF NOT EXISTS deezer (
name TEXT PRIMARY KEY,
arl TEXT,
region TEXT,
created_at REAL,
updated_at REAL
);
"""
)
conn.execute(
"INSERT OR REPLACE INTO spotify (name, region, created_at, updated_at) VALUES (?, ?, ?, ?)",
("my_main_spotify", "US", 1677190000.0, 1677190000.0),
)
conn.execute(
"INSERT OR REPLACE INTO deezer (name, arl, region, created_at, updated_at) VALUES (?, ?, ?, ?, ?)",
("my_hifi_deezer", "a1b2c3d4e5f6a1b2c3d4e5f6...", "FR", 1677190100.0, 1677190100.0),
)
# Pre-create creds filesystem
search_json = creds_dir / "search.json"
if not search_json.exists():
search_json.write_text('{"client_id":"your_global_spotify_client_id","client_secret":"your_global_spotify_client_secret"}\n', encoding="utf-8")
blobs_dir = creds_dir / "blobs" / "my_main_spotify"
blobs_dir.mkdir(parents=True, exist_ok=True)
creds_blob = blobs_dir / "credentials.json"
if not creds_blob.exists():
creds_blob.write_text(
'{"version":"v1","access_token":"...","expires_at":1677193600,"refresh_token":"...","scope":"user-read-private user-read-email playlist-read-private"}\n',
encoding="utf-8",
)
def _get_columns(db_path: Path, table: str) -> set[str]:
with sqlite3.connect(str(db_path)) as conn:
cur = conn.execute(f"PRAGMA table_info({table})")
return {row[1] for row in cur.fetchall()}
def _get_count(db_path: Path, table: str) -> int:
with sqlite3.connect(str(db_path)) as conn:
cur = conn.execute(f"SELECT COUNT(*) FROM {table}")
return cur.fetchone()[0]
def test_migration_children_tables_created_and_upgraded(tmp_path: Path, monkeypatch: pytest.MonkeyPatch):
# Arrange temp paths
data_dir = tmp_path / "data"
history_db = data_dir / "history" / "download_history.db"
playlists_db = data_dir / "watch" / "playlists.db"
artists_db = data_dir / "watch" / "artists.db"
creds_dir = data_dir / "creds"
accounts_db = creds_dir / "accounts.db"
blobs_dir = creds_dir / "blobs"
search_json = creds_dir / "search.json"
# Create 3.0.6 base schemas and sample data (full simulation)
_create_306_history_db(history_db)
_create_306_watch_dbs(playlists_db, artists_db)
_create_306_accounts(creds_dir, accounts_db)
# Point the migration runner to our temp DBs
from routes.migrations import runner
monkeypatch.setattr(runner, "DATA_DIR", data_dir)
monkeypatch.setattr(runner, "HISTORY_DB", history_db)
monkeypatch.setattr(runner, "WATCH_DIR", data_dir / "watch")
monkeypatch.setattr(runner, "PLAYLISTS_DB", playlists_db)
monkeypatch.setattr(runner, "ARTISTS_DB", artists_db)
monkeypatch.setattr(runner, "CREDS_DIR", creds_dir)
monkeypatch.setattr(runner, "ACCOUNTS_DB", accounts_db)
monkeypatch.setattr(runner, "BLOBS_DIR", blobs_dir)
monkeypatch.setattr(runner, "SEARCH_JSON", search_json)
# Act: run migrations
runner.run_migrations_if_needed()
# Run twice to ensure idempotency
runner.run_migrations_if_needed()
# Assert: referenced children tables exist with expected columns
expected_children_cols = {
"id",
"title",
"artists",
"album_title",
"duration_ms",
"track_number",
"disc_number",
"explicit",
"status",
"external_ids",
"genres",
"isrc",
"timestamp",
"position",
"metadata",
}
assert _get_columns(history_db, "album_test1").issuperset(expected_children_cols)
assert _get_columns(history_db, "playlist_test2").issuperset(expected_children_cols)
# Legacy table upgraded
assert _get_columns(history_db, "album_legacy").issuperset(expected_children_cols)
# Pre-existing children table preserved and correct
assert _get_columns(history_db, "album_f9e8d7c6b5").issuperset(expected_children_cols)
assert _get_count(history_db, "album_f9e8d7c6b5") == 2
# Assert: accounts DB created/preserved with expected tables and columns
assert accounts_db.exists()
spotify_cols = _get_columns(accounts_db, "spotify")
deezer_cols = _get_columns(accounts_db, "deezer")
assert {"name", "region", "created_at", "updated_at"}.issubset(spotify_cols)
assert {"name", "arl", "region", "created_at", "updated_at"}.issubset(deezer_cols)
# Assert: creds filesystem and pre-existing blob preserved
assert blobs_dir.exists() and blobs_dir.is_dir()
assert search_json.exists()
data = json.loads(search_json.read_text())
assert set(data.keys()) == {"client_id", "client_secret"}
assert (blobs_dir / "my_main_spotify" / "credentials.json").exists()
# Assert: watch playlists core and dynamic tables upgraded to/at 3.1.2 schema
watched_playlists_cols = _get_columns(playlists_db, "watched_playlists")
assert {
"spotify_id",
"name",
"owner_id",
"owner_name",
"total_tracks",
"link",
"snapshot_id",
"last_checked",
"added_at",
"is_active",
}.issubset(watched_playlists_cols)
playlist_dynamic_expected = {
"spotify_track_id",
"title",
"artist_names",
"album_name",
"album_artist_names",
"track_number",
"album_spotify_id",
"duration_ms",
"added_at_playlist",
"added_to_db",
"is_present_in_spotify",
"last_seen_in_spotify",
"snapshot_id",
"final_path",
}
assert _get_columns(playlists_db, "playlist_legacy").issuperset(playlist_dynamic_expected)
assert _get_columns(playlists_db, "playlist_37i9dQZF1DXcBWIGoYBM5M").issuperset(playlist_dynamic_expected)
assert _get_count(playlists_db, "playlist_37i9dQZF1DXcBWIGoYBM5M") == 2
# Assert: watch artists core and dynamic tables upgraded to/at 3.1.2 schema
watched_artists_cols = _get_columns(artists_db, "watched_artists")
assert {
"spotify_id",
"name",
"link",
"total_albums_on_spotify",
"last_checked",
"added_at",
"is_active",
"genres",
"popularity",
"image_url",
}.issubset(watched_artists_cols)
artist_dynamic_expected = {
"album_spotify_id",
"artist_spotify_id",
"name",
"album_group",
"album_type",
"release_date",
"release_date_precision",
"total_tracks",
"link",
"image_url",
"added_to_db",
"last_seen_on_spotify",
"download_task_id",
"download_status",
"is_fully_downloaded_managed_by_app",
}
assert _get_columns(artists_db, "artist_legacy").issuperset(artist_dynamic_expected)
assert _get_columns(artists_db, "artist_4oLeXFyACqeem2VImYeBFe").issuperset(artist_dynamic_expected)
assert _get_count(artists_db, "artist_4oLeXFyACqeem2VImYeBFe") == 2

View File

@@ -1,65 +0,0 @@
import sqlite3
from pathlib import Path
import pytest
import sqlite3
from pathlib import Path
import pytest
from routes.migrations.v3_1_0 import MigrationV3_1_0
# Override the autouse credentials fixture from conftest for this module
@pytest.fixture(scope="session", autouse=True)
def setup_credentials_for_tests():
# No-op to avoid external API calls
yield
def _create_310_watch_artists_db(db_path: Path) -> None:
db_path.parent.mkdir(parents=True, exist_ok=True)
with sqlite3.connect(str(db_path)) as conn:
conn.executescript(
"""
CREATE TABLE watched_artists (
spotify_id TEXT PRIMARY KEY,
name TEXT
);
CREATE TABLE "artist_a1b2c3" (
album_spotify_id TEXT PRIMARY KEY,
artist_spotify_id TEXT,
name TEXT,
album_type TEXT,
release_date TEXT,
total_tracks INTEGER,
link TEXT,
image_url TEXT,
added_to_db INTEGER,
last_seen_on_spotify INTEGER
);
"""
)
conn.execute("INSERT INTO watched_artists (spotify_id) VALUES (?)", ('a1b2c3',))
def test_watch_artists_migration(tmp_path):
# 1. Setup mock v3.1.0 database
db_path = tmp_path / "artists.db"
_create_310_watch_artists_db(db_path)
# 2. Run the migration
migration = MigrationV3_1_0()
with sqlite3.connect(db_path) as conn:
# Sanity check before migration
cur = conn.execute('PRAGMA table_info("artist_a1b2c3")')
columns_before = {row[1] for row in cur.fetchall()}
assert 'download_status' not in columns_before
# Apply migration
migration.update_watch_artists(conn)
# 3. Assert migration was successful
cur = conn.execute('PRAGMA table_info("artist_a1b2c3")')
columns_after = {row[1] for row in cur.fetchall()}
expected_columns = migration.ARTIST_ALBUMS_EXPECTED_COLUMNS.keys()
assert set(expected_columns).issubset(columns_after)

View File

@@ -1,135 +0,0 @@
import sqlite3
import unittest
from pathlib import Path
from tempfile import mkdtemp
from shutil import rmtree
import pytest
from routes.migrations.v3_1_1 import MigrationV3_1_1
# Override the autouse credentials fixture from conftest for this module
@pytest.fixture(scope="session", autouse=True)
def setup_credentials_for_tests():
# No-op to avoid external API calls; this shadows the session autouse fixture in conftest.py
yield
class TestMigrationV3_1_1(unittest.TestCase):
"""
Tests the dummy migration from 3.1.1 to 3.1.2, ensuring no changes are made.
"""
def setUp(self):
self.temp_dir = Path(mkdtemp())
self.history_db_path = self.temp_dir / "history" / "download_history.db"
self.artists_db_path = self.temp_dir / "watch" / "artists.db"
self.playlists_db_path = self.temp_dir / "watch" / "playlists.db"
self.accounts_db_path = self.temp_dir / "creds" / "accounts.db"
self._create_mock_databases()
def tearDown(self):
rmtree(self.temp_dir)
def _get_db_schema(self, db_path: Path) -> dict:
"""Helper to get the schema of a database."""
schema = {}
with sqlite3.connect(db_path) as conn:
cursor = conn.execute("SELECT name FROM sqlite_master WHERE type='table';")
tables = [row[0] for row in cursor.fetchall() if not row[0].startswith("sqlite_")]
for table_name in tables:
info_cursor = conn.execute(f'PRAGMA table_info("{table_name}")')
schema[table_name] = {row[1] for row in info_cursor.fetchall()}
return schema
def _create_mock_databases(self):
"""Creates a set of mock databases with the 3.1.1 schema."""
# History DB
self.history_db_path.parent.mkdir(parents=True, exist_ok=True)
with sqlite3.connect(self.history_db_path) as conn:
conn.executescript(
"""
CREATE TABLE download_history (
id INTEGER PRIMARY KEY, download_type TEXT, title TEXT, artists TEXT,
timestamp REAL, status TEXT, service TEXT, quality_format TEXT,
quality_bitrate TEXT, total_tracks INTEGER, successful_tracks INTEGER,
failed_tracks INTEGER, skipped_tracks INTEGER, children_table TEXT,
task_id TEXT, external_ids TEXT, metadata TEXT, release_date TEXT,
genres TEXT, images TEXT, owner TEXT, album_type TEXT,
duration_total_ms INTEGER, explicit BOOLEAN
);
CREATE TABLE playlist_p1l2a3 (
id INTEGER PRIMARY KEY, title TEXT, artists TEXT, album_title TEXT,
duration_ms INTEGER, track_number INTEGER, disc_number INTEGER,
explicit BOOLEAN, status TEXT, external_ids TEXT, genres TEXT,
isrc TEXT, timestamp REAL, position INTEGER, metadata TEXT
);
"""
)
# Watch Artists DB
self.artists_db_path.parent.mkdir(parents=True, exist_ok=True)
with sqlite3.connect(self.artists_db_path) as conn:
conn.executescript(
"""
CREATE TABLE watched_artists (id TEXT PRIMARY KEY, children_table TEXT);
INSERT INTO watched_artists (id, children_table) VALUES ('a1b2c3d4', 'artist_a1b2c3d4');
CREATE TABLE artist_a1b2c3d4 (
id TEXT PRIMARY KEY, title TEXT, artists TEXT, album_type TEXT,
release_date TEXT, total_tracks INTEGER, external_ids TEXT,
images TEXT, album_group TEXT, release_date_precision TEXT,
download_task_id TEXT, download_status TEXT,
is_fully_downloaded_managed_by_app BOOLEAN
);
"""
)
# Watch Playlists DB
self.playlists_db_path.parent.mkdir(parents=True, exist_ok=True)
with sqlite3.connect(self.playlists_db_path) as conn:
conn.executescript(
"""
CREATE TABLE watched_playlists (id TEXT PRIMARY KEY, children_table TEXT);
CREATE TABLE playlist_p1l2a3 (id TEXT PRIMARY KEY, title TEXT);
"""
)
# Accounts DB
self.accounts_db_path.parent.mkdir(parents=True, exist_ok=True)
with sqlite3.connect(self.accounts_db_path) as conn:
conn.execute("CREATE TABLE accounts (id TEXT PRIMARY KEY, service TEXT, details TEXT);")
def test_migration_leaves_schema_unchanged(self):
"""Asserts that the dummy migration makes no changes to any database."""
# Get initial schemas
initial_schemas = {
"history": self._get_db_schema(self.history_db_path),
"artists": self._get_db_schema(self.artists_db_path),
"playlists": self._get_db_schema(self.playlists_db_path),
"accounts": self._get_db_schema(self.accounts_db_path),
}
# Run the dummy migration
migration = MigrationV3_1_1()
with sqlite3.connect(self.history_db_path) as conn:
migration.update_history(conn)
with sqlite3.connect(self.artists_db_path) as conn:
migration.update_watch_artists(conn)
with sqlite3.connect(self.playlists_db_path) as conn:
migration.update_watch_playlists(conn)
with sqlite3.connect(self.accounts_db_path) as conn:
migration.update_accounts(conn)
# Get final schemas
final_schemas = {
"history": self._get_db_schema(self.history_db_path),
"artists": self._get_db_schema(self.artists_db_path),
"playlists": self._get_db_schema(self.playlists_db_path),
"accounts": self._get_db_schema(self.accounts_db_path),
}
# Assert schemas are identical
self.assertEqual(initial_schemas, final_schemas)
if __name__ == '__main__':
unittest.main()