Compare commits
34 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
eb6e7bd4b2
|
|||
|
9c1c195353
|
|||
|
|
773e5a55e1 | ||
|
|
46af6b518d | ||
|
|
3ff6134712 | ||
|
|
5942e6ea36 | ||
|
|
9e4b2fcd01 | ||
|
|
63afc969c0 | ||
|
|
bf2f9eda29 | ||
|
|
91fead1f51 | ||
|
|
6922b4a5da | ||
|
|
1016d333cc | ||
|
|
f9cf953de1 | ||
|
|
e777dbeba2 | ||
|
|
41db454414 | ||
|
|
fe5e7964fa | ||
|
|
f800251de1 | ||
|
|
0b7c9d0da8 | ||
|
|
4476d39d39 | ||
|
|
84b93f900e | ||
|
|
c5e9d0cabc | ||
|
|
c81df38571 | ||
|
|
7b7e32c923 | ||
|
|
957928bfa0 | ||
|
|
6c6a215e7c | ||
|
|
8806e2da34 | ||
|
|
1e9271eac4 | ||
|
|
af1e74294c | ||
|
|
d83e320a82 | ||
|
|
8b90c7b75b | ||
|
|
09a623f98b | ||
|
|
e5aa4f0aef | ||
|
|
499a2472e5 | ||
|
|
7848c8f218 |
@@ -4,13 +4,14 @@
|
||||
### can leave the defaults as they are.
|
||||
###
|
||||
### If you plan on using for a server,
|
||||
### see [insert docs url]
|
||||
### see https://spotizerr.rtfd.io
|
||||
###
|
||||
|
||||
# Interface to bind to. Unless you know what you're doing, don't change this
|
||||
HOST=0.0.0.0
|
||||
|
||||
# Redis connection (external or internal).
|
||||
# Host name 'redis' works with docker-compose.yml setup
|
||||
REDIS_HOST=redis
|
||||
REDIS_PORT=6379
|
||||
REDIS_DB=0
|
||||
@@ -57,3 +58,8 @@ GOOGLE_CLIENT_SECRET=
|
||||
# GitHub SSO (get from GitHub Developer Settings)
|
||||
GITHUB_CLIENT_ID=
|
||||
GITHUB_CLIENT_SECRET=
|
||||
|
||||
# Log level for application logging.
|
||||
# Possible values: debug, info, warning, error, critical
|
||||
# Set to 'info' or 'warning' for general use. Use 'debug' for troubleshooting.
|
||||
LOG_LEVEL=info
|
||||
|
||||
60
.github/workflows/pr-build.yml
vendored
Normal file
60
.github/workflows/pr-build.yml
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
name: PR Dev/Test Container
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
pr_number:
|
||||
description: 'Pull request number (optional, for manual runs)'
|
||||
required: false
|
||||
branch:
|
||||
description: 'Branch to build (optional, defaults to PR head or main)'
|
||||
required: false
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.inputs.branch || github.head_ref || github.ref }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Extract Docker metadata
|
||||
- name: Extract Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=raw,value=dev-pr-${{ github.event.inputs.pr_number || github.event.pull_request.number }}
|
||||
|
||||
# Build and push multi-arch dev image
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
@@ -8,11 +8,14 @@ COPY spotizerr-ui/. .
|
||||
RUN pnpm build
|
||||
|
||||
# Stage 2: Python dependencies builder (create relocatable deps dir)
|
||||
FROM python:3.11-slim AS py-deps
|
||||
FROM python:3.11-alpine AS py-deps
|
||||
WORKDIR /app
|
||||
COPY requirements.txt .
|
||||
COPY --from=ghcr.io/astral-sh/uv:latest /uv /bin/
|
||||
RUN uv pip install --target /python -r requirements.txt
|
||||
RUN apk add --no-cache git; \
|
||||
uv pip install --target /python -r requirements.txt; \
|
||||
uv pip install --target /python "git+https://git.jdm17.ru/JDM170/librespot-spotizerr-dev.git@main"; \
|
||||
uv pip install --target /python "git+https://git.jdm17.ru/JDM170/deezspot-spotizerr-dev.git@main"
|
||||
|
||||
# Stage 3: Fetch static ffmpeg/ffprobe binaries
|
||||
FROM debian:stable-slim AS ffmpeg
|
||||
|
||||
@@ -27,6 +27,10 @@ If you self-host a music server with other users than yourself, you almost certa
|
||||
<img width="1588" height="994" alt="image" src="https://github.com/user-attachments/assets/e34d7dbb-29e3-4d75-bcbd-0cee03fa57dc" />
|
||||
</details>
|
||||
|
||||
## How do I start?
|
||||
|
||||
Docs are available at: https://spotizerr.rtfd.io
|
||||
|
||||
### Common Issues
|
||||
|
||||
**Downloads not starting?**
|
||||
|
||||
81
app.py
81
app.py
@@ -13,11 +13,12 @@ import redis
|
||||
import socket
|
||||
from urllib.parse import urlparse
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
# Parse log level from environment as early as possible, default to INFO for visibility
|
||||
log_level_str = os.getenv("LOG_LEVEL", "WARNING").upper()
|
||||
log_level = getattr(logging, log_level_str, logging.INFO)
|
||||
log_level = getattr(logging, log_level_str, logging.WARNING)
|
||||
|
||||
# Set up a very basic logging config immediately, so early logs (including import/migration errors) are visible
|
||||
logging.basicConfig(
|
||||
@@ -50,32 +51,20 @@ if _umask_value:
|
||||
# Defer logging setup; avoid failing on invalid UMASK
|
||||
pass
|
||||
|
||||
|
||||
# Import and initialize routes (this will start the watch manager)
|
||||
from routes.auth.credentials import router as credentials_router
|
||||
from routes.auth.auth import router as auth_router
|
||||
from routes.content.album import router as album_router
|
||||
from routes.content.artist import router as artist_router
|
||||
from routes.content.track import router as track_router
|
||||
from routes.content.playlist import router as playlist_router
|
||||
from routes.content.bulk_add import router as bulk_add_router
|
||||
from routes.core.search import router as search_router
|
||||
from routes.core.history import router as history_router
|
||||
from routes.system.progress import router as prgs_router
|
||||
from routes.system.config import router as config_router
|
||||
|
||||
|
||||
# Import Celery configuration and manager
|
||||
from routes.utils.celery_manager import celery_manager
|
||||
from routes.utils.celery_config import REDIS_URL
|
||||
|
||||
# Import authentication system
|
||||
from routes.auth import AUTH_ENABLED
|
||||
from routes.auth.middleware import AuthMiddleware
|
||||
|
||||
# Import watch manager controls (start/stop) without triggering side effects
|
||||
from routes.utils.watch.manager import start_watch_manager, stop_watch_manager
|
||||
from routes.auth.credentials import router as credentials_router # noqa: E402
|
||||
from routes.auth.auth import router as auth_router # noqa: E402
|
||||
from routes.content.album import router as album_router # noqa: E402
|
||||
from routes.content.artist import router as artist_router # noqa: E402
|
||||
from routes.content.track import router as track_router # noqa: E402
|
||||
from routes.content.playlist import router as playlist_router # noqa: E402
|
||||
from routes.content.bulk_add import router as bulk_add_router # noqa: E402
|
||||
from routes.core.search import router as search_router # noqa: E402
|
||||
from routes.core.history import router as history_router # noqa: E402
|
||||
from routes.system.progress import router as prgs_router # noqa: E402
|
||||
from routes.system.config import router as config_router # noqa: E402
|
||||
|
||||
from routes.utils.celery_config import REDIS_URL # noqa: E402
|
||||
|
||||
|
||||
# Configure application-wide logging
|
||||
@@ -152,7 +141,6 @@ def setup_logging():
|
||||
|
||||
def check_redis_connection():
|
||||
"""Check if Redis is available and accessible"""
|
||||
from routes.utils.celery_config import REDIS_URL
|
||||
|
||||
if not REDIS_URL:
|
||||
logging.error("REDIS_URL is not configured. Please check your environment.")
|
||||
@@ -199,7 +187,9 @@ async def lifespan(app: FastAPI):
|
||||
# Startup
|
||||
setup_logging()
|
||||
effective_level = logging.getLevelName(log_level)
|
||||
logging.getLogger(__name__).info(f"Logging system fully initialized (lifespan startup). Effective log level: {effective_level}")
|
||||
logging.getLogger(__name__).info(
|
||||
f"Logging system fully initialized (lifespan startup). Effective log level: {effective_level}"
|
||||
)
|
||||
|
||||
# Run migrations before initializing services
|
||||
try:
|
||||
@@ -226,8 +216,19 @@ async def lifespan(app: FastAPI):
|
||||
try:
|
||||
from routes.utils.celery_manager import celery_manager
|
||||
|
||||
start_workers = os.getenv("START_EMBEDDED_WORKERS", "true").lower() in (
|
||||
"1",
|
||||
"true",
|
||||
"yes",
|
||||
"on",
|
||||
)
|
||||
if start_workers:
|
||||
celery_manager.start()
|
||||
logging.info("Celery workers started successfully")
|
||||
else:
|
||||
logging.info(
|
||||
"START_EMBEDDED_WORKERS is false; skipping embedded Celery workers startup."
|
||||
)
|
||||
except Exception as e:
|
||||
logging.error(f"Failed to start Celery workers: {e}")
|
||||
|
||||
@@ -257,8 +258,19 @@ async def lifespan(app: FastAPI):
|
||||
try:
|
||||
from routes.utils.celery_manager import celery_manager
|
||||
|
||||
start_workers = os.getenv("START_EMBEDDED_WORKERS", "true").lower() in (
|
||||
"1",
|
||||
"true",
|
||||
"yes",
|
||||
"on",
|
||||
)
|
||||
if start_workers:
|
||||
celery_manager.stop()
|
||||
logging.info("Celery workers stopped")
|
||||
else:
|
||||
logging.info(
|
||||
"START_EMBEDDED_WORKERS is false; no embedded Celery workers to stop."
|
||||
)
|
||||
except Exception as e:
|
||||
logging.error(f"Error stopping Celery workers: {e}")
|
||||
|
||||
@@ -295,17 +307,6 @@ def create_app():
|
||||
logging.warning(f"Auth system initialization failed or unavailable: {e}")
|
||||
|
||||
# Register routers with URL prefixes
|
||||
from routes.auth.auth import router as auth_router
|
||||
from routes.system.config import router as config_router
|
||||
from routes.core.search import router as search_router
|
||||
from routes.auth.credentials import router as credentials_router
|
||||
from routes.content.album import router as album_router
|
||||
from routes.content.track import router as track_router
|
||||
from routes.content.playlist import router as playlist_router
|
||||
from routes.content.bulk_add import router as bulk_add_router
|
||||
from routes.content.artist import router as artist_router
|
||||
from routes.system.progress import router as prgs_router
|
||||
from routes.core.history import router as history_router
|
||||
|
||||
app.include_router(auth_router, prefix="/api/auth", tags=["auth"])
|
||||
|
||||
@@ -449,4 +450,6 @@ if __name__ == "__main__":
|
||||
except ValueError:
|
||||
port = 7171
|
||||
|
||||
uvicorn.run(app, host=host, port=port, log_level=log_level_str.lower(), access_log=False)
|
||||
uvicorn.run(
|
||||
app, host=host, port=port, log_level=log_level_str.lower(), access_log=False
|
||||
)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
fastapi==0.116.1
|
||||
uvicorn[standard]==0.35.0
|
||||
celery==5.5.3
|
||||
deezspot-spotizerr==3.1.0
|
||||
httpx==0.28.1
|
||||
bcrypt==4.2.1
|
||||
PyJWT==2.10.1
|
||||
|
||||
@@ -1,10 +1,18 @@
|
||||
import re
|
||||
from typing import List
|
||||
from fastapi import APIRouter
|
||||
from fastapi import APIRouter, Request, Depends
|
||||
from pydantic import BaseModel
|
||||
import logging
|
||||
|
||||
# Assuming these imports are available for queue management and Spotify info
|
||||
# Import authentication dependencies
|
||||
from routes.auth.middleware import require_auth_from_state, User
|
||||
|
||||
# Import queue management and Spotify info
|
||||
from routes.utils.celery_queue_manager import download_queue_manager
|
||||
|
||||
# Import authentication dependencies
|
||||
|
||||
# Import queue management and Spotify info
|
||||
from routes.utils.get_info import (
|
||||
get_client,
|
||||
get_track,
|
||||
@@ -12,7 +20,6 @@ from routes.utils.get_info import (
|
||||
get_playlist,
|
||||
get_artist,
|
||||
)
|
||||
from routes.utils.celery_tasks import download_track, download_album, download_playlist
|
||||
|
||||
router = APIRouter()
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -23,7 +30,11 @@ class BulkAddLinksRequest(BaseModel):
|
||||
|
||||
|
||||
@router.post("/bulk-add-spotify-links")
|
||||
async def bulk_add_spotify_links(request: BulkAddLinksRequest):
|
||||
async def bulk_add_spotify_links(
|
||||
request: BulkAddLinksRequest,
|
||||
req: Request,
|
||||
current_user: User = Depends(require_auth_from_state),
|
||||
):
|
||||
added_count = 0
|
||||
failed_links = []
|
||||
total_links = len(request.links)
|
||||
@@ -34,7 +45,7 @@ async def bulk_add_spotify_links(request: BulkAddLinksRequest):
|
||||
# but still handle potential errors during info retrieval or unsupported types
|
||||
# Extract type and ID from the link directly using regex
|
||||
match = re.match(
|
||||
r"https://open\.spotify\.com(?:/intl-[a-z]{2})?/(track|album|playlist|artist)/([a-zA-Z0-9]+)(?:\?.*)?",
|
||||
r"https://open\.spotify\.com(?:/[a-z]{2})?/(track|album|playlist|artist)/([a-zA-Z0-9]+)(?:\?.*)?",
|
||||
link,
|
||||
)
|
||||
if not match:
|
||||
@@ -46,6 +57,12 @@ async def bulk_add_spotify_links(request: BulkAddLinksRequest):
|
||||
|
||||
spotify_type = match.group(1)
|
||||
spotify_id = match.group(2)
|
||||
logger.debug(
|
||||
f"Extracted from link: spotify_type={spotify_type}, spotify_id={spotify_id}"
|
||||
)
|
||||
logger.debug(
|
||||
f"Extracted from link: spotify_type={spotify_type}, spotify_id={spotify_id}"
|
||||
)
|
||||
|
||||
try:
|
||||
# Get basic info to confirm existence and get name/artist
|
||||
@@ -80,46 +97,33 @@ async def bulk_add_spotify_links(request: BulkAddLinksRequest):
|
||||
# Construct URL for the download task
|
||||
spotify_url = f"https://open.spotify.com/{spotify_type}/{spotify_id}"
|
||||
|
||||
# Add to Celery queue based on type
|
||||
if spotify_type == "track":
|
||||
download_track.delay(
|
||||
url=spotify_url,
|
||||
spotify_id=spotify_id,
|
||||
type=spotify_type,
|
||||
name=item_name,
|
||||
artist=artist_name,
|
||||
download_type="track",
|
||||
)
|
||||
elif spotify_type == "album":
|
||||
download_album.delay(
|
||||
url=spotify_url,
|
||||
spotify_id=spotify_id,
|
||||
type=spotify_type,
|
||||
name=item_name,
|
||||
artist=artist_name,
|
||||
download_type="album",
|
||||
)
|
||||
elif spotify_type == "playlist":
|
||||
download_playlist.delay(
|
||||
url=spotify_url,
|
||||
spotify_id=spotify_id,
|
||||
type=spotify_type,
|
||||
name=item_name,
|
||||
artist=artist_name,
|
||||
download_type="playlist",
|
||||
# Prepare task data for the queue manager
|
||||
task_data = {
|
||||
"download_type": spotify_type,
|
||||
"url": spotify_url,
|
||||
"name": item_name,
|
||||
"artist": artist_name,
|
||||
"spotify_id": spotify_id,
|
||||
"type": spotify_type,
|
||||
"username": current_user.username,
|
||||
"orig_request": dict(req.query_params),
|
||||
}
|
||||
|
||||
# Add to download queue using the queue manager
|
||||
task_id = download_queue_manager.add_task(task_data)
|
||||
|
||||
if task_id:
|
||||
added_count += 1
|
||||
logger.debug(
|
||||
f"Added {added_count}/{total_links} {spotify_type} '{item_name}' ({spotify_id}) to queue with task_id: {task_id}."
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
f"Unsupported Spotify type for download: {spotify_type} for link: {link}"
|
||||
f"Failed to add {spotify_type} '{item_name}' ({spotify_id}) to queue."
|
||||
)
|
||||
failed_links.append(link)
|
||||
continue
|
||||
|
||||
added_count += 1
|
||||
logger.debug(
|
||||
f"Added {added_count + 1}/{total_links} {spotify_type} '{item_name}' ({spotify_id}) to queue."
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing Spotify link {link}: {e}", exc_info=True)
|
||||
failed_links.append(link)
|
||||
|
||||
@@ -205,6 +205,9 @@ async def get_playlist_info(
|
||||
playlist_info = get_playlist(client, spotify_id, expand_items=False)
|
||||
finally:
|
||||
pass
|
||||
# Ensure id field is present (librespot sometimes omits it)
|
||||
if playlist_info and "id" not in playlist_info:
|
||||
playlist_info["id"] = spotify_id
|
||||
|
||||
return JSONResponse(content=playlist_info, status_code=200)
|
||||
except Exception as e:
|
||||
@@ -233,41 +236,70 @@ async def add_to_watchlist(
|
||||
}
|
||||
|
||||
# Fetch playlist details from Spotify to populate our DB (metadata only)
|
||||
cfg = get_config_params() or {}
|
||||
active_account = cfg.get("spotify")
|
||||
if not active_account:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail={"error": "Active Spotify account not set in configuration."},
|
||||
# Use shared helper and add a safe fallback for missing 'id'
|
||||
try:
|
||||
from routes.utils.get_info import get_playlist_metadata
|
||||
|
||||
playlist_data = get_playlist_metadata(playlist_spotify_id) or {}
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to fetch playlist metadata for {playlist_spotify_id}: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
blob_path = get_spotify_blob_path(active_account)
|
||||
if not blob_path.exists():
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail={
|
||||
"error": f"Spotify credentials blob not found for account '{active_account}'"
|
||||
"error": f"Failed to fetch metadata for playlist {playlist_spotify_id}: {str(e)}"
|
||||
},
|
||||
)
|
||||
|
||||
client = get_client()
|
||||
try:
|
||||
playlist_data = get_playlist(
|
||||
client, playlist_spotify_id, expand_items=False
|
||||
# Some Librespot responses may omit 'id' even when the payload is valid.
|
||||
# Fall back to the path parameter to avoid false negatives.
|
||||
if playlist_data and "id" not in playlist_data:
|
||||
logger.warning(
|
||||
f"Playlist metadata for {playlist_spotify_id} missing 'id'. Injecting from path param. Keys: {list(playlist_data.keys())}"
|
||||
)
|
||||
finally:
|
||||
try:
|
||||
playlist_data["id"] = playlist_spotify_id
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not playlist_data or "id" not in playlist_data:
|
||||
# Validate minimal fields needed downstream and normalize shape to be resilient to client changes
|
||||
if not playlist_data or not playlist_data.get("name"):
|
||||
logger.error(
|
||||
f"Could not fetch details for playlist {playlist_spotify_id} from Spotify."
|
||||
f"Insufficient playlist metadata for {playlist_spotify_id}. Keys present: {list(playlist_data.keys()) if isinstance(playlist_data, dict) else type(playlist_data)}"
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail={
|
||||
"error": f"Could not fetch details for playlist {playlist_spotify_id} from Spotify."
|
||||
"error": f"Could not fetch sufficient details for playlist {playlist_spotify_id} from Spotify."
|
||||
},
|
||||
)
|
||||
|
||||
# Ensure 'owner' is a dict with at least id/display_name to satisfy DB layer
|
||||
owner = playlist_data.get("owner")
|
||||
if not isinstance(owner, dict):
|
||||
owner = {}
|
||||
if "id" not in owner or not owner.get("id"):
|
||||
owner["id"] = "unknown_owner"
|
||||
if "display_name" not in owner or not owner.get("display_name"):
|
||||
owner["display_name"] = owner.get("id", "Unknown Owner")
|
||||
playlist_data["owner"] = owner
|
||||
|
||||
# Ensure 'tracks' is a dict with a numeric 'total'
|
||||
tracks = playlist_data.get("tracks")
|
||||
if not isinstance(tracks, dict):
|
||||
tracks = {}
|
||||
total = tracks.get("total")
|
||||
if not isinstance(total, int):
|
||||
items = tracks.get("items")
|
||||
if isinstance(items, list):
|
||||
total = len(items)
|
||||
else:
|
||||
total = 0
|
||||
tracks["total"] = total
|
||||
playlist_data["tracks"] = tracks
|
||||
|
||||
add_playlist_db(playlist_data) # This also creates the tracks table
|
||||
|
||||
logger.info(
|
||||
|
||||
@@ -40,6 +40,90 @@ NOTIFY_PARAMETERS = [
|
||||
]
|
||||
|
||||
|
||||
# Helper functions to get final merged configs (simulate save without actually saving)
|
||||
def get_final_main_config(new_config_data: dict) -> dict:
|
||||
"""Returns the final main config that will be saved after merging with new_config_data."""
|
||||
try:
|
||||
# Load current or default config
|
||||
existing_config = {}
|
||||
if MAIN_CONFIG_FILE_PATH.exists():
|
||||
with open(MAIN_CONFIG_FILE_PATH, "r") as f_read:
|
||||
existing_config = json.load(f_read)
|
||||
else:
|
||||
existing_config = DEFAULT_MAIN_CONFIG.copy()
|
||||
|
||||
# Update with new data
|
||||
for key, value in new_config_data.items():
|
||||
existing_config[key] = value
|
||||
|
||||
# Migration: unify legacy keys to camelCase
|
||||
_migrate_legacy_keys_inplace(existing_config)
|
||||
|
||||
# Ensure all default keys are still there
|
||||
for default_key, default_value in DEFAULT_MAIN_CONFIG.items():
|
||||
if default_key not in existing_config:
|
||||
existing_config[default_key] = default_value
|
||||
|
||||
return existing_config
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating final main config: {e}", exc_info=True)
|
||||
return DEFAULT_MAIN_CONFIG.copy()
|
||||
|
||||
|
||||
def get_final_watch_config(new_watch_config_data: dict) -> dict:
|
||||
"""Returns the final watch config that will be saved after merging with new_watch_config_data."""
|
||||
try:
|
||||
# Load current main config
|
||||
main_cfg: dict = {}
|
||||
if WATCH_MAIN_CONFIG_FILE_PATH.exists():
|
||||
with open(WATCH_MAIN_CONFIG_FILE_PATH, "r") as f:
|
||||
main_cfg = json.load(f) or {}
|
||||
else:
|
||||
main_cfg = DEFAULT_MAIN_CONFIG.copy()
|
||||
|
||||
# Get and update watch config
|
||||
watch_value = main_cfg.get("watch")
|
||||
current_watch = (
|
||||
watch_value.copy() if isinstance(watch_value, dict) else {}
|
||||
).copy()
|
||||
current_watch.update(new_watch_config_data or {})
|
||||
|
||||
# Ensure defaults
|
||||
for k, v in DEFAULT_WATCH_CONFIG.items():
|
||||
if k not in current_watch:
|
||||
current_watch[k] = v
|
||||
|
||||
return current_watch
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating final watch config: {e}", exc_info=True)
|
||||
return DEFAULT_WATCH_CONFIG.copy()
|
||||
|
||||
|
||||
def get_final_main_config_for_watch(new_watch_config_data: dict) -> dict:
|
||||
"""Returns the final main config when updating watch config."""
|
||||
try:
|
||||
# Load current main config
|
||||
main_cfg: dict = {}
|
||||
if WATCH_MAIN_CONFIG_FILE_PATH.exists():
|
||||
with open(WATCH_MAIN_CONFIG_FILE_PATH, "r") as f:
|
||||
main_cfg = json.load(f) or {}
|
||||
else:
|
||||
main_cfg = DEFAULT_MAIN_CONFIG.copy()
|
||||
|
||||
# Migrate legacy keys
|
||||
_migrate_legacy_keys_inplace(main_cfg)
|
||||
|
||||
# Ensure all default keys are still there
|
||||
for default_key, default_value in DEFAULT_MAIN_CONFIG.items():
|
||||
if default_key not in main_cfg:
|
||||
main_cfg[default_key] = default_value
|
||||
|
||||
return main_cfg
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating final main config for watch: {e}", exc_info=True)
|
||||
return DEFAULT_MAIN_CONFIG.copy()
|
||||
|
||||
|
||||
# Helper function to check if credentials exist for a service
|
||||
def has_credentials(service: str) -> bool:
|
||||
"""Check if credentials exist for the specified service (spotify or deezer)."""
|
||||
@@ -68,8 +152,11 @@ def validate_config(config_data: dict, watch_config: dict = None) -> tuple[bool,
|
||||
Returns (is_valid, error_message).
|
||||
"""
|
||||
try:
|
||||
# Get current watch config if not provided
|
||||
# Get final merged watch config for validation
|
||||
if watch_config is None:
|
||||
if "watch" in config_data:
|
||||
watch_config = get_final_watch_config(config_data["watch"])
|
||||
else:
|
||||
watch_config = get_watch_config_http()
|
||||
|
||||
# Ensure realTimeMultiplier is a valid integer in range 0..10 if provided
|
||||
@@ -137,9 +224,9 @@ def validate_watch_config(
|
||||
Returns (is_valid, error_message).
|
||||
"""
|
||||
try:
|
||||
# Get current main config if not provided
|
||||
# Get final merged main config for validation
|
||||
if main_config is None:
|
||||
main_config = get_config()
|
||||
main_config = get_final_main_config_for_watch(watch_data)
|
||||
|
||||
# Check if trying to enable watch without download methods
|
||||
if watch_data.get("enabled", False):
|
||||
|
||||
@@ -8,7 +8,7 @@ from typing import Set, Optional
|
||||
|
||||
import redis
|
||||
import threading
|
||||
from routes.utils.celery_config import REDIS_URL
|
||||
from routes.utils.celery_config import REDIS_URL, get_config_params
|
||||
|
||||
from routes.utils.celery_tasks import (
|
||||
get_task_info,
|
||||
@@ -37,6 +37,11 @@ router = APIRouter()
|
||||
class SSEBroadcaster:
|
||||
def __init__(self):
|
||||
self.clients: Set[asyncio.Queue] = set()
|
||||
# Per-task throttling/batching/deduplication state
|
||||
self._task_state = {} # task_id -> dict with last_sent, last_event, last_send_time, scheduled_handle
|
||||
# Load configurable interval
|
||||
config = get_config_params()
|
||||
self.sse_update_interval = float(config.get("sseUpdateIntervalSeconds", 1))
|
||||
|
||||
async def add_client(self, queue: asyncio.Queue):
|
||||
"""Add a new SSE client"""
|
||||
@@ -49,43 +54,105 @@ class SSEBroadcaster:
|
||||
logger.debug(f"SSE: Client disconnected (total: {len(self.clients)})")
|
||||
|
||||
async def broadcast_event(self, event_data: dict):
|
||||
"""Broadcast an event to all connected clients"""
|
||||
logger.debug(
|
||||
f"SSE Broadcaster: Attempting to broadcast to {len(self.clients)} clients"
|
||||
)
|
||||
|
||||
"""
|
||||
Throttle, batch, and deduplicate SSE events per task.
|
||||
Only emit at most 1 update/sec per task, aggregate within window, suppress redundant updates.
|
||||
"""
|
||||
if not self.clients:
|
||||
logger.debug("SSE Broadcaster: No clients connected, skipping broadcast")
|
||||
return
|
||||
# Defensive: always work with a list of tasks
|
||||
tasks = event_data.get("tasks", [])
|
||||
if not isinstance(tasks, list):
|
||||
tasks = [tasks]
|
||||
|
||||
# Add global task counts right before broadcasting - this is the single source of truth
|
||||
# For each task, throttle/batch/dedupe
|
||||
for task in tasks:
|
||||
task_id = task.get("task_id")
|
||||
if not task_id:
|
||||
continue
|
||||
|
||||
now = time.time()
|
||||
state = self._task_state.setdefault(task_id, {
|
||||
"last_sent": None,
|
||||
"last_event": None,
|
||||
"last_send_time": 0,
|
||||
"scheduled_handle": None,
|
||||
})
|
||||
|
||||
# Deduplication: if event is identical to last sent, skip
|
||||
if state["last_sent"] is not None and self._events_equal(state["last_sent"], task):
|
||||
logger.debug(f"SSE: Deduped event for task {task_id}")
|
||||
continue
|
||||
|
||||
# Throttling: if within interval, batch (store as last_event, schedule send)
|
||||
elapsed = now - state["last_send_time"]
|
||||
if elapsed < self.sse_update_interval:
|
||||
state["last_event"] = task
|
||||
if state["scheduled_handle"] is None:
|
||||
delay = self.sse_update_interval - elapsed
|
||||
loop = asyncio.get_event_loop()
|
||||
state["scheduled_handle"] = loop.call_later(
|
||||
delay, lambda: asyncio.create_task(self._send_batched_event(task_id))
|
||||
)
|
||||
continue
|
||||
|
||||
# Otherwise, send immediately
|
||||
await self._send_event(task_id, task)
|
||||
state["last_send_time"] = now
|
||||
state["last_sent"] = task
|
||||
state["last_event"] = None
|
||||
if state["scheduled_handle"]:
|
||||
state["scheduled_handle"].cancel()
|
||||
state["scheduled_handle"] = None
|
||||
|
||||
async def _send_batched_event(self, task_id):
|
||||
state = self._task_state.get(task_id)
|
||||
if not state or not state["last_event"]:
|
||||
return
|
||||
await self._send_event(task_id, state["last_event"])
|
||||
state["last_send_time"] = time.time()
|
||||
state["last_sent"] = state["last_event"]
|
||||
state["last_event"] = None
|
||||
state["scheduled_handle"] = None
|
||||
|
||||
async def _send_event(self, task_id, task):
|
||||
# Compose event_data for this task
|
||||
event_data = {
|
||||
"tasks": [task],
|
||||
"current_timestamp": time.time(),
|
||||
"change_type": "update",
|
||||
}
|
||||
enhanced_event_data = add_global_task_counts_to_event(event_data.copy())
|
||||
|
||||
event_json = json.dumps(enhanced_event_data)
|
||||
sse_data = f"data: {event_json}\n\n"
|
||||
|
||||
logger.debug(
|
||||
f"SSE Broadcaster: Broadcasting event: {enhanced_event_data.get('change_type', 'unknown')} with {enhanced_event_data.get('active_tasks', 0)} active tasks"
|
||||
)
|
||||
|
||||
# Send to all clients, remove disconnected ones
|
||||
disconnected = set()
|
||||
sent_count = 0
|
||||
for client_queue in self.clients.copy():
|
||||
try:
|
||||
await client_queue.put(sse_data)
|
||||
sent_count += 1
|
||||
logger.debug("SSE: Successfully sent to client queue")
|
||||
except Exception as e:
|
||||
logger.error(f"SSE: Failed to send to client: {e}")
|
||||
disconnected.add(client_queue)
|
||||
|
||||
# Clean up disconnected clients
|
||||
for client in disconnected:
|
||||
self.clients.discard(client)
|
||||
logger.debug(
|
||||
f"SSE Broadcaster: Successfully sent to {sent_count} clients, removed {len(disconnected)} disconnected clients"
|
||||
f"SSE Broadcaster: Sent throttled/batched event for task {task_id} to {sent_count} clients"
|
||||
)
|
||||
|
||||
def _events_equal(self, a, b):
|
||||
# Compare two task dicts for deduplication (ignore timestamps)
|
||||
if not isinstance(a, dict) or not isinstance(b, dict):
|
||||
return False
|
||||
a_copy = dict(a)
|
||||
b_copy = dict(b)
|
||||
a_copy.pop("timestamp", None)
|
||||
b_copy.pop("timestamp", None)
|
||||
return a_copy == b_copy
|
||||
|
||||
|
||||
# Global broadcaster instance
|
||||
sse_broadcaster = SSEBroadcaster()
|
||||
@@ -105,6 +172,10 @@ def start_sse_redis_subscriber():
|
||||
pubsub.subscribe("sse_events")
|
||||
logger.info("SSE Redis Subscriber: Started listening for events")
|
||||
|
||||
# Create a single event loop for this thread and reuse it
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
for message in pubsub.listen():
|
||||
if message["type"] == "message":
|
||||
try:
|
||||
@@ -121,22 +192,21 @@ def start_sse_redis_subscriber():
|
||||
# Transform callback data into standardized update format expected by frontend
|
||||
standardized = standardize_incoming_event(event_data)
|
||||
if standardized:
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
try:
|
||||
loop.run_until_complete(
|
||||
sse_broadcaster.broadcast_event(standardized)
|
||||
)
|
||||
logger.debug(
|
||||
f"SSE Redis Subscriber: Broadcasted standardized progress update to {len(sse_broadcaster.clients)} clients"
|
||||
)
|
||||
finally:
|
||||
loop.close()
|
||||
elif event_type == "summary_update":
|
||||
# Task summary update - use standardized trigger
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
# Short-circuit if task no longer exists to avoid expensive processing
|
||||
try:
|
||||
if not get_task_info(task_id):
|
||||
logger.debug(
|
||||
f"SSE Redis Subscriber: summary_update for missing task {task_id}, skipping"
|
||||
)
|
||||
else:
|
||||
loop.run_until_complete(
|
||||
trigger_sse_update(
|
||||
task_id, event_data.get("reason", "update")
|
||||
@@ -145,23 +215,21 @@ def start_sse_redis_subscriber():
|
||||
logger.debug(
|
||||
f"SSE Redis Subscriber: Processed summary update for {task_id}"
|
||||
)
|
||||
finally:
|
||||
loop.close()
|
||||
except Exception as _e:
|
||||
logger.error(
|
||||
f"SSE Redis Subscriber: Error handling summary_update for {task_id}: {_e}",
|
||||
exc_info=True,
|
||||
)
|
||||
else:
|
||||
# Unknown event type - attempt to standardize and broadcast
|
||||
standardized = standardize_incoming_event(event_data)
|
||||
if standardized:
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
try:
|
||||
loop.run_until_complete(
|
||||
sse_broadcaster.broadcast_event(standardized)
|
||||
)
|
||||
logger.debug(
|
||||
f"SSE Redis Subscriber: Broadcasted standardized {event_type} to {len(sse_broadcaster.clients)} clients"
|
||||
)
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
@@ -315,7 +383,7 @@ async def trigger_sse_update(task_id: str, reason: str = "task_update"):
|
||||
# Find the specific task that changed
|
||||
task_info = get_task_info(task_id)
|
||||
if not task_info:
|
||||
logger.warning(f"SSE: Task {task_id} not found for update")
|
||||
logger.debug(f"SSE: Task {task_id} not found for update")
|
||||
return
|
||||
|
||||
last_status = get_last_task_status(task_id)
|
||||
|
||||
@@ -8,6 +8,7 @@ from routes.utils.credentials import (
|
||||
)
|
||||
from routes.utils.celery_queue_manager import get_existing_task_id
|
||||
from routes.utils.errors import DuplicateDownloadError
|
||||
from routes.utils.celery_config import get_config_params
|
||||
|
||||
|
||||
def download_album(
|
||||
@@ -98,6 +99,7 @@ def download_album(
|
||||
spotify_client_id=global_spotify_client_id,
|
||||
spotify_client_secret=global_spotify_client_secret,
|
||||
progress_callback=progress_callback,
|
||||
spotify_credentials_path=str(get_spotify_blob_path(main)),
|
||||
)
|
||||
dl.download_albumspo(
|
||||
link_album=url, # Spotify URL
|
||||
@@ -257,6 +259,11 @@ def download_album(
|
||||
spotify_client_id=global_spotify_client_id, # Global Spotify keys
|
||||
spotify_client_secret=global_spotify_client_secret, # Global Spotify keys
|
||||
progress_callback=progress_callback,
|
||||
spotify_credentials_path=(
|
||||
str(get_spotify_blob_path(get_config_params().get("spotify")))
|
||||
if get_config_params().get("spotify")
|
||||
else None
|
||||
),
|
||||
)
|
||||
dl.download_albumdee( # Deezer URL, download via Deezer
|
||||
link_album=url,
|
||||
|
||||
@@ -4,7 +4,7 @@ import logging
|
||||
from routes.utils.celery_queue_manager import download_queue_manager
|
||||
from routes.utils.credentials import get_credential, _get_global_spotify_api_creds
|
||||
from routes.utils.errors import DuplicateDownloadError
|
||||
from routes.utils.get_info import get_spotify_info
|
||||
from routes.utils.get_info import get_client, get_artist
|
||||
|
||||
from deezspot.libutils.utils import get_ids, link_is_valid
|
||||
|
||||
@@ -77,10 +77,26 @@ def get_artist_discography(
|
||||
log_json({"status": "error", "message": msg})
|
||||
raise ValueError(msg)
|
||||
|
||||
# Fetch artist once and return grouped arrays without pagination
|
||||
try:
|
||||
# Use the optimized get_spotify_info function
|
||||
discography = get_spotify_info(artist_id, "artist_discography")
|
||||
return discography
|
||||
client = get_client()
|
||||
artist_obj = get_artist(client, artist_id)
|
||||
|
||||
# Normalize groups as arrays of IDs; tolerate dict shape from some sources
|
||||
def normalize_group(val):
|
||||
if isinstance(val, list):
|
||||
return val
|
||||
if isinstance(val, dict):
|
||||
items = val.get("items") or val.get("releases") or []
|
||||
return items if isinstance(items, list) else []
|
||||
return []
|
||||
|
||||
return {
|
||||
"album_group": normalize_group(artist_obj.get("album_group")),
|
||||
"single_group": normalize_group(artist_obj.get("single_group")),
|
||||
"compilation_group": normalize_group(artist_obj.get("compilation_group")),
|
||||
"appears_on_group": normalize_group(artist_obj.get("appears_on_group")),
|
||||
}
|
||||
except Exception as fetch_error:
|
||||
msg = f"An error occurred while fetching the discography: {fetch_error}"
|
||||
log_json({"status": "error", "message": msg})
|
||||
@@ -120,60 +136,54 @@ def download_artist_albums(url, album_type=None, request_args=None, username=Non
|
||||
raise ValueError(error_msg)
|
||||
|
||||
# Get watch config to determine which album groups to download
|
||||
valid_groups = {"album", "single", "compilation", "appears_on"}
|
||||
if album_type and isinstance(album_type, str):
|
||||
requested = [g.strip().lower() for g in album_type.split(",") if g.strip()]
|
||||
allowed_groups = [g for g in requested if g in valid_groups]
|
||||
if not allowed_groups:
|
||||
logger.warning(
|
||||
f"album_type query provided but no valid groups found in {requested}; falling back to watch config."
|
||||
)
|
||||
if not album_type or not isinstance(album_type, str) or not allowed_groups:
|
||||
watch_config = get_watch_config()
|
||||
allowed_groups = [
|
||||
g.lower()
|
||||
for g in watch_config.get("watchedArtistAlbumGroup", ["album", "single"])
|
||||
if g.lower() in valid_groups
|
||||
]
|
||||
logger.info(
|
||||
f"Filtering albums by watchedArtistAlbumGroup setting (exact album_group match): {allowed_groups}"
|
||||
f"Filtering albums by album_type/watch setting (exact album_group match): {allowed_groups}"
|
||||
)
|
||||
|
||||
# Fetch all artist albums with pagination
|
||||
# Fetch artist and aggregate group arrays without pagination
|
||||
client = get_client()
|
||||
artist_obj = get_artist(client, artist_id)
|
||||
|
||||
def normalize_group(val):
|
||||
if isinstance(val, list):
|
||||
return val
|
||||
if isinstance(val, dict):
|
||||
items = val.get("items") or val.get("releases") or []
|
||||
return items if isinstance(items, list) else []
|
||||
return []
|
||||
|
||||
group_key_to_type = [
|
||||
("album_group", "album"),
|
||||
("single_group", "single"),
|
||||
("compilation_group", "compilation"),
|
||||
("appears_on_group", "appears_on"),
|
||||
]
|
||||
|
||||
all_artist_albums = []
|
||||
offset = 0
|
||||
limit = 50 # Spotify API limit for artist albums
|
||||
|
||||
logger.info(f"Fetching all albums for artist ID: {artist_id} with pagination")
|
||||
|
||||
while True:
|
||||
logger.debug(
|
||||
f"Fetching albums for {artist_id}. Limit: {limit}, Offset: {offset}"
|
||||
)
|
||||
artist_data_page = get_spotify_info(
|
||||
artist_id, "artist_discography", limit=limit, offset=offset
|
||||
)
|
||||
|
||||
if not artist_data_page or not isinstance(artist_data_page.get("items"), list):
|
||||
logger.warning(
|
||||
f"No album items found or invalid format for artist {artist_id} at offset {offset}. Response: {artist_data_page}"
|
||||
)
|
||||
break
|
||||
|
||||
current_page_albums = artist_data_page.get("items", [])
|
||||
if not current_page_albums:
|
||||
logger.info(
|
||||
f"No more albums on page for artist {artist_id} at offset {offset}. Total fetched so far: {len(all_artist_albums)}."
|
||||
)
|
||||
break
|
||||
|
||||
logger.debug(
|
||||
f"Fetched {len(current_page_albums)} albums on current page for artist {artist_id}."
|
||||
)
|
||||
all_artist_albums.extend(current_page_albums)
|
||||
|
||||
# Check if Spotify indicates a next page URL
|
||||
if artist_data_page.get("next"):
|
||||
offset += limit # Increment offset by the limit used for the request
|
||||
else:
|
||||
logger.info(
|
||||
f"No next page URL for artist {artist_id}. Pagination complete. Total albums fetched: {len(all_artist_albums)}."
|
||||
)
|
||||
break
|
||||
|
||||
if not all_artist_albums:
|
||||
raise ValueError(
|
||||
f"Failed to retrieve artist data or no albums found for artist ID {artist_id}"
|
||||
for key, group_type in group_key_to_type:
|
||||
ids = normalize_group(artist_obj.get(key))
|
||||
# transform to minimal album objects with album_group tagging for filtering parity
|
||||
for album_id in ids:
|
||||
all_artist_albums.append(
|
||||
{
|
||||
"id": album_id,
|
||||
"album_group": group_type,
|
||||
}
|
||||
)
|
||||
|
||||
# Filter albums based on the allowed types using album_group field (like in manager.py)
|
||||
@@ -201,13 +211,23 @@ def download_artist_albums(url, album_type=None, request_args=None, username=Non
|
||||
duplicate_albums = []
|
||||
|
||||
for album in filtered_albums:
|
||||
album_url = album.get("external_urls", {}).get("spotify", "")
|
||||
album_name = album.get("name", "Unknown Album")
|
||||
album_artists = album.get("artists", [])
|
||||
album_id = album.get("id")
|
||||
if not album_id:
|
||||
logger.warning("Skipping album without ID in filtered list.")
|
||||
continue
|
||||
# fetch album details to construct URL and names
|
||||
try:
|
||||
album_obj = download_queue_manager.client.get_album(
|
||||
album_id, include_tracks=False
|
||||
) # type: ignore[attr-defined]
|
||||
except AttributeError:
|
||||
# If download_queue_manager lacks a client, fallback to shared client
|
||||
album_obj = get_client().get_album(album_id, include_tracks=False)
|
||||
album_url = album_obj.get("external_urls", {}).get("spotify", "")
|
||||
album_name = album_obj.get("name", "Unknown Album")
|
||||
artists = album_obj.get("artists", []) or []
|
||||
album_artist = (
|
||||
album_artists[0].get("name", "Unknown Artist")
|
||||
if album_artists
|
||||
else "Unknown Artist"
|
||||
artists[0].get("name", "Unknown Artist") if artists else "Unknown Artist"
|
||||
)
|
||||
|
||||
if not album_url:
|
||||
|
||||
@@ -40,6 +40,8 @@ DEFAULT_MAIN_CONFIG = {
|
||||
"tracknumPadding": True,
|
||||
"saveCover": True,
|
||||
"maxConcurrentDownloads": 3,
|
||||
"utilityConcurrency": 1,
|
||||
"librespotConcurrency": 2,
|
||||
"maxRetries": 3,
|
||||
"retryDelaySeconds": 5,
|
||||
"retryDelayIncrease": 5,
|
||||
@@ -52,6 +54,7 @@ DEFAULT_MAIN_CONFIG = {
|
||||
"watch": {},
|
||||
"realTimeMultiplier": 0,
|
||||
"padNumberWidth": 3,
|
||||
"sseUpdateIntervalSeconds": 1, # Configurable SSE update interval (default: 1s)
|
||||
}
|
||||
|
||||
|
||||
@@ -188,7 +191,7 @@ task_annotations = {
|
||||
"rate_limit": f"{MAX_CONCURRENT_DL}/m",
|
||||
},
|
||||
"routes.utils.celery_tasks.trigger_sse_update_task": {
|
||||
"rate_limit": "500/m", # Allow high rate for real-time SSE updates
|
||||
"rate_limit": "60/m", # Throttle to 1 update/sec per task (matches SSE throttle)
|
||||
"default_retry_delay": 1, # Quick retry for SSE updates
|
||||
"max_retries": 1, # Limited retries for best-effort delivery
|
||||
"ignore_result": True, # Don't store results for SSE tasks
|
||||
|
||||
@@ -6,10 +6,11 @@ import os
|
||||
import sys
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
# Import Celery task utilities
|
||||
from .celery_config import get_config_params, MAX_CONCURRENT_DL
|
||||
from .celery_config import get_config_params, MAX_CONCURRENT_DL # noqa: E402
|
||||
|
||||
# Configure logging
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -40,15 +41,22 @@ class CeleryManager:
|
||||
self.concurrency = get_config_params().get(
|
||||
"maxConcurrentDownloads", MAX_CONCURRENT_DL
|
||||
)
|
||||
self.utility_concurrency = max(
|
||||
1, int(get_config_params().get("utilityConcurrency", 1))
|
||||
)
|
||||
logger.info(
|
||||
f"CeleryManager initialized. Download concurrency set to: {self.concurrency}"
|
||||
f"CeleryManager initialized. Download concurrency set to: {self.concurrency} | Utility concurrency: {self.utility_concurrency}"
|
||||
)
|
||||
|
||||
def _get_worker_command(
|
||||
self, queues, concurrency, worker_name_suffix, log_level_env=None
|
||||
):
|
||||
# Use LOG_LEVEL from environment if provided, otherwise default to INFO
|
||||
log_level = log_level_env if log_level_env else os.getenv("LOG_LEVEL", "WARNING").upper()
|
||||
log_level = (
|
||||
log_level_env
|
||||
if log_level_env
|
||||
else os.getenv("LOG_LEVEL", "WARNING").upper()
|
||||
)
|
||||
# Use a unique worker name to avoid conflicts.
|
||||
# %h is replaced by celery with the actual hostname.
|
||||
hostname = f"worker_{worker_name_suffix}@%h"
|
||||
@@ -167,12 +175,19 @@ class CeleryManager:
|
||||
if self.utility_worker_process and self.utility_worker_process.poll() is None:
|
||||
logger.info("Celery Utility Worker is already running.")
|
||||
else:
|
||||
self.utility_concurrency = max(
|
||||
1,
|
||||
int(
|
||||
get_config_params().get(
|
||||
"utilityConcurrency", self.utility_concurrency
|
||||
)
|
||||
),
|
||||
)
|
||||
utility_cmd = self._get_worker_command(
|
||||
queues="utility_tasks,default", # Listen to utility and default
|
||||
concurrency=5, # Increased concurrency for SSE updates and utility tasks
|
||||
concurrency=self.utility_concurrency,
|
||||
worker_name_suffix="utw", # Utility Worker
|
||||
log_level_env=os.getenv("LOG_LEVEL", "WARNING").upper(),
|
||||
|
||||
)
|
||||
logger.info(
|
||||
f"Starting Celery Utility Worker with command: {' '.join(utility_cmd)}"
|
||||
@@ -197,7 +212,7 @@ class CeleryManager:
|
||||
self.utility_log_thread_stdout.start()
|
||||
self.utility_log_thread_stderr.start()
|
||||
logger.info(
|
||||
f"Celery Utility Worker (PID: {self.utility_worker_process.pid}) started with concurrency 5."
|
||||
f"Celery Utility Worker (PID: {self.utility_worker_process.pid}) started with concurrency {self.utility_concurrency}."
|
||||
)
|
||||
|
||||
if (
|
||||
@@ -221,7 +236,9 @@ class CeleryManager:
|
||||
)
|
||||
while not self.stop_event.is_set():
|
||||
try:
|
||||
time.sleep(10) # Check every 10 seconds
|
||||
# Wait using stop_event to be responsive to shutdown and respect interval
|
||||
if self.stop_event.wait(CONFIG_CHECK_INTERVAL):
|
||||
break
|
||||
if self.stop_event.is_set():
|
||||
break
|
||||
|
||||
@@ -229,6 +246,14 @@ class CeleryManager:
|
||||
new_max_concurrent_downloads = current_config.get(
|
||||
"maxConcurrentDownloads", self.concurrency
|
||||
)
|
||||
new_utility_concurrency = max(
|
||||
1,
|
||||
int(
|
||||
current_config.get(
|
||||
"utilityConcurrency", self.utility_concurrency
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
if new_max_concurrent_downloads != self.concurrency:
|
||||
logger.info(
|
||||
@@ -272,7 +297,10 @@ class CeleryManager:
|
||||
|
||||
# Restart only the download worker
|
||||
download_cmd = self._get_worker_command(
|
||||
"downloads", self.concurrency, "dlw", log_level_env=os.getenv("LOG_LEVEL", "WARNING").upper()
|
||||
"downloads",
|
||||
self.concurrency,
|
||||
"dlw",
|
||||
log_level_env=os.getenv("LOG_LEVEL", "WARNING").upper(),
|
||||
)
|
||||
logger.info(
|
||||
f"Restarting Celery Download Worker with command: {' '.join(download_cmd)}"
|
||||
@@ -303,6 +331,82 @@ class CeleryManager:
|
||||
f"Celery Download Worker (PID: {self.download_worker_process.pid}) restarted with new concurrency {self.concurrency}."
|
||||
)
|
||||
|
||||
# Handle utility worker concurrency changes
|
||||
if new_utility_concurrency != self.utility_concurrency:
|
||||
logger.info(
|
||||
f"CeleryManager: Detected change in utilityConcurrency from {self.utility_concurrency} to {new_utility_concurrency}. Restarting utility worker only."
|
||||
)
|
||||
|
||||
if (
|
||||
self.utility_worker_process
|
||||
and self.utility_worker_process.poll() is None
|
||||
):
|
||||
logger.info(
|
||||
f"Stopping Celery Utility Worker (PID: {self.utility_worker_process.pid}) for config update..."
|
||||
)
|
||||
self.utility_worker_process.terminate()
|
||||
try:
|
||||
self.utility_worker_process.wait(timeout=10)
|
||||
logger.info(
|
||||
f"Celery Utility Worker (PID: {self.utility_worker_process.pid}) terminated."
|
||||
)
|
||||
except subprocess.TimeoutExpired:
|
||||
logger.warning(
|
||||
f"Celery Utility Worker (PID: {self.utility_worker_process.pid}) did not terminate gracefully, killing."
|
||||
)
|
||||
self.utility_worker_process.kill()
|
||||
self.utility_worker_process = None
|
||||
|
||||
# Wait for log threads of utility worker to finish
|
||||
if (
|
||||
self.utility_log_thread_stdout
|
||||
and self.utility_log_thread_stdout.is_alive()
|
||||
):
|
||||
self.utility_log_thread_stdout.join(timeout=5)
|
||||
if (
|
||||
self.utility_log_thread_stderr
|
||||
and self.utility_log_thread_stderr.is_alive()
|
||||
):
|
||||
self.utility_log_thread_stderr.join(timeout=5)
|
||||
|
||||
self.utility_concurrency = new_utility_concurrency
|
||||
|
||||
# Restart only the utility worker
|
||||
utility_cmd = self._get_worker_command(
|
||||
"utility_tasks,default",
|
||||
self.utility_concurrency,
|
||||
"utw",
|
||||
log_level_env=os.getenv("LOG_LEVEL", "WARNING").upper(),
|
||||
)
|
||||
logger.info(
|
||||
f"Restarting Celery Utility Worker with command: {' '.join(utility_cmd)}"
|
||||
)
|
||||
self.utility_worker_process = subprocess.Popen(
|
||||
utility_cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
bufsize=1,
|
||||
universal_newlines=True,
|
||||
)
|
||||
self.utility_log_thread_stdout = threading.Thread(
|
||||
target=self._process_output_reader,
|
||||
args=(self.utility_worker_process.stdout, "Celery[UW-STDOUT]"),
|
||||
)
|
||||
self.utility_log_thread_stderr = threading.Thread(
|
||||
target=self._process_output_reader,
|
||||
args=(
|
||||
self.utility_worker_process.stderr,
|
||||
"Celery[UW-STDERR]",
|
||||
True,
|
||||
),
|
||||
)
|
||||
self.utility_log_thread_stdout.start()
|
||||
self.utility_log_thread_stderr.start()
|
||||
logger.info(
|
||||
f"Celery Utility Worker (PID: {self.utility_worker_process.pid}) restarted with new concurrency {self.utility_concurrency}."
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"CeleryManager: Error in config monitor thread: {e}", exc_info=True
|
||||
|
||||
@@ -44,7 +44,11 @@ def get_client() -> LibrespotClient:
|
||||
_shared_client.close()
|
||||
except Exception:
|
||||
pass
|
||||
_shared_client = LibrespotClient(stored_credentials_path=desired_blob)
|
||||
cfg = get_config_params() or {}
|
||||
max_workers = int(cfg.get("librespotConcurrency", 2) or 2)
|
||||
_shared_client = LibrespotClient(
|
||||
stored_credentials_path=desired_blob, max_workers=max_workers
|
||||
)
|
||||
_shared_blob_path = desired_blob
|
||||
return _shared_client
|
||||
|
||||
@@ -59,7 +63,9 @@ def create_client(credentials_path: str) -> LibrespotClient:
|
||||
abs_path = os.path.abspath(credentials_path)
|
||||
if not os.path.isfile(abs_path):
|
||||
raise FileNotFoundError(f"Credentials file not found: {abs_path}")
|
||||
return LibrespotClient(stored_credentials_path=abs_path)
|
||||
cfg = get_config_params() or {}
|
||||
max_workers = int(cfg.get("librespotConcurrency", 2) or 2)
|
||||
return LibrespotClient(stored_credentials_path=abs_path, max_workers=max_workers)
|
||||
|
||||
|
||||
def close_client(client: LibrespotClient) -> None:
|
||||
@@ -93,57 +99,6 @@ def get_playlist(
|
||||
return client.get_playlist(playlist_in, expand_items=expand_items)
|
||||
|
||||
|
||||
def get_spotify_info(
|
||||
spotify_id: str,
|
||||
info_type: str,
|
||||
limit: int = 50,
|
||||
offset: int = 0,
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Thin, typed wrapper around common Spotify info lookups using the shared client.
|
||||
|
||||
Currently supports:
|
||||
- "artist_discography": returns a paginated view over the artist's releases
|
||||
combined across album_group/single_group/compilation_group/appears_on_group.
|
||||
|
||||
Returns a mapping with at least: items, total, limit, offset.
|
||||
Also includes a truthy "next" key when more pages are available.
|
||||
"""
|
||||
client = get_client()
|
||||
|
||||
if info_type == "artist_discography":
|
||||
artist = client.get_artist(spotify_id)
|
||||
all_items = []
|
||||
for key in (
|
||||
"album_group",
|
||||
"single_group",
|
||||
"compilation_group",
|
||||
"appears_on_group",
|
||||
):
|
||||
grp = artist.get(key)
|
||||
if isinstance(grp, list):
|
||||
all_items.extend(grp)
|
||||
elif isinstance(grp, dict):
|
||||
items = grp.get("items") or grp.get("releases") or []
|
||||
if isinstance(items, list):
|
||||
all_items.extend(items)
|
||||
total = len(all_items)
|
||||
start = max(0, offset or 0)
|
||||
page_limit = max(1, limit or 50)
|
||||
end = min(total, start + page_limit)
|
||||
page_items = all_items[start:end]
|
||||
has_more = end < total
|
||||
return {
|
||||
"items": page_items,
|
||||
"total": total,
|
||||
"limit": page_limit,
|
||||
"offset": start,
|
||||
"next": bool(has_more),
|
||||
}
|
||||
|
||||
raise ValueError(f"Unsupported info_type: {info_type}")
|
||||
|
||||
|
||||
def get_playlist_metadata(playlist_id: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Fetch playlist metadata using the shared client without expanding items.
|
||||
|
||||
@@ -3,6 +3,8 @@ from deezspot.spotloader import SpoLogin
|
||||
from deezspot.deezloader import DeeLogin
|
||||
from pathlib import Path
|
||||
from routes.utils.credentials import get_credential, _get_global_spotify_api_creds
|
||||
from routes.utils.credentials import get_spotify_blob_path
|
||||
from routes.utils.celery_config import get_config_params
|
||||
from routes.utils.celery_queue_manager import get_existing_task_id
|
||||
from routes.utils.errors import DuplicateDownloadError
|
||||
|
||||
@@ -95,6 +97,7 @@ def download_playlist(
|
||||
spotify_client_id=global_spotify_client_id,
|
||||
spotify_client_secret=global_spotify_client_secret,
|
||||
progress_callback=progress_callback,
|
||||
spotify_credentials_path=str(get_spotify_blob_path(main)),
|
||||
)
|
||||
dl.download_playlistspo(
|
||||
link_playlist=url, # Spotify URL
|
||||
@@ -265,6 +268,11 @@ def download_playlist(
|
||||
spotify_client_id=global_spotify_client_id, # Global Spotify keys
|
||||
spotify_client_secret=global_spotify_client_secret, # Global Spotify keys
|
||||
progress_callback=progress_callback,
|
||||
spotify_credentials_path=(
|
||||
str(get_spotify_blob_path(get_config_params().get("spotify")))
|
||||
if get_config_params().get("spotify")
|
||||
else None
|
||||
),
|
||||
)
|
||||
dl.download_playlistdee( # Deezer URL, download via Deezer
|
||||
link_playlist=url,
|
||||
|
||||
@@ -6,6 +6,7 @@ from routes.utils.credentials import (
|
||||
_get_global_spotify_api_creds,
|
||||
get_spotify_blob_path,
|
||||
)
|
||||
from routes.utils.celery_config import get_config_params
|
||||
|
||||
|
||||
def download_track(
|
||||
@@ -90,6 +91,7 @@ def download_track(
|
||||
spotify_client_id=global_spotify_client_id, # Global creds
|
||||
spotify_client_secret=global_spotify_client_secret, # Global creds
|
||||
progress_callback=progress_callback,
|
||||
spotify_credentials_path=str(get_spotify_blob_path(main)),
|
||||
)
|
||||
# download_trackspo means: Spotify URL, download via Deezer
|
||||
dl.download_trackspo(
|
||||
@@ -169,7 +171,6 @@ def download_track(
|
||||
convert_to=convert_to,
|
||||
bitrate=bitrate,
|
||||
artist_separator=artist_separator,
|
||||
spotify_metadata=spotify_metadata,
|
||||
pad_number_width=pad_number_width,
|
||||
)
|
||||
print(
|
||||
@@ -251,6 +252,11 @@ def download_track(
|
||||
spotify_client_id=global_spotify_client_id, # Global Spotify keys for internal Spo use by DeeLogin
|
||||
spotify_client_secret=global_spotify_client_secret, # Global Spotify keys
|
||||
progress_callback=progress_callback,
|
||||
spotify_credentials_path=(
|
||||
str(get_spotify_blob_path(get_config_params().get("spotify")))
|
||||
if get_config_params().get("spotify")
|
||||
else None
|
||||
),
|
||||
)
|
||||
dl.download_trackdee( # Deezer URL, download via Deezer
|
||||
link_track=url,
|
||||
|
||||
@@ -167,6 +167,46 @@ def get_watch_config():
|
||||
watch_cfg["maxItemsPerRun"] = clamped_value
|
||||
migrated = True
|
||||
|
||||
# Enforce sane ranges and types for poll/delay intervals to prevent tight loops
|
||||
def _safe_int(value, default):
|
||||
try:
|
||||
return int(value)
|
||||
except Exception:
|
||||
return default
|
||||
|
||||
# Clamp poll interval to at least 1 second
|
||||
poll_val = _safe_int(
|
||||
watch_cfg.get(
|
||||
"watchPollIntervalSeconds",
|
||||
DEFAULT_WATCH_CONFIG["watchPollIntervalSeconds"],
|
||||
),
|
||||
DEFAULT_WATCH_CONFIG["watchPollIntervalSeconds"],
|
||||
)
|
||||
if poll_val < 1:
|
||||
watch_cfg["watchPollIntervalSeconds"] = 1
|
||||
migrated = True
|
||||
# Clamp per-item delays to at least 1 second
|
||||
delay_pl = _safe_int(
|
||||
watch_cfg.get(
|
||||
"delayBetweenPlaylistsSeconds",
|
||||
DEFAULT_WATCH_CONFIG["delayBetweenPlaylistsSeconds"],
|
||||
),
|
||||
DEFAULT_WATCH_CONFIG["delayBetweenPlaylistsSeconds"],
|
||||
)
|
||||
if delay_pl < 1:
|
||||
watch_cfg["delayBetweenPlaylistsSeconds"] = 1
|
||||
migrated = True
|
||||
delay_ar = _safe_int(
|
||||
watch_cfg.get(
|
||||
"delayBetweenArtistsSeconds",
|
||||
DEFAULT_WATCH_CONFIG["delayBetweenArtistsSeconds"],
|
||||
),
|
||||
DEFAULT_WATCH_CONFIG["delayBetweenArtistsSeconds"],
|
||||
)
|
||||
if delay_ar < 1:
|
||||
watch_cfg["delayBetweenArtistsSeconds"] = 1
|
||||
migrated = True
|
||||
|
||||
if migrated or legacy_file_found:
|
||||
# Persist migration back to main.json
|
||||
main_cfg["watch"] = watch_cfg
|
||||
@@ -670,7 +710,9 @@ def check_watched_playlists(specific_playlist_id: str = None):
|
||||
|
||||
# Only sleep between items when running a batch (no specific ID)
|
||||
if not specific_playlist_id:
|
||||
time.sleep(max(1, config.get("delayBetweenPlaylistsSeconds", 2)))
|
||||
time.sleep(
|
||||
max(1, _safe_to_int(config.get("delayBetweenPlaylistsSeconds"), 2))
|
||||
)
|
||||
|
||||
logger.info("Playlist Watch Manager: Finished checking all watched playlists.")
|
||||
|
||||
@@ -817,7 +859,9 @@ def check_watched_artists(specific_artist_id: str = None):
|
||||
|
||||
# Only sleep between items when running a batch (no specific ID)
|
||||
if not specific_artist_id:
|
||||
time.sleep(max(1, config.get("delayBetweenArtistsSeconds", 5)))
|
||||
time.sleep(
|
||||
max(1, _safe_to_int(config.get("delayBetweenArtistsSeconds"), 5))
|
||||
)
|
||||
|
||||
logger.info("Artist Watch Manager: Finished checking all watched artists.")
|
||||
|
||||
@@ -832,6 +876,14 @@ def playlist_watch_scheduler():
|
||||
interval = current_config.get("watchPollIntervalSeconds", 3600)
|
||||
watch_enabled = current_config.get("enabled", False) # Get enabled status
|
||||
|
||||
# Ensure interval is a positive integer to avoid tight loops
|
||||
try:
|
||||
interval = int(interval)
|
||||
except Exception:
|
||||
interval = 3600
|
||||
if interval < 1:
|
||||
interval = 1
|
||||
|
||||
if not watch_enabled:
|
||||
logger.info(
|
||||
"Watch Scheduler: Watch feature is disabled in config. Skipping checks."
|
||||
@@ -907,6 +959,13 @@ def run_playlist_check_over_intervals(playlist_spotify_id: str) -> None:
|
||||
# Determine if we are done: no active processing snapshot and no pending sync
|
||||
cfg = get_watch_config()
|
||||
interval = cfg.get("watchPollIntervalSeconds", 3600)
|
||||
# Ensure interval is a positive integer
|
||||
try:
|
||||
interval = int(interval)
|
||||
except Exception:
|
||||
interval = 3600
|
||||
if interval < 1:
|
||||
interval = 1
|
||||
# Use local helper that leverages Librespot client
|
||||
metadata = _fetch_playlist_metadata(playlist_spotify_id)
|
||||
if not metadata:
|
||||
@@ -1169,6 +1228,17 @@ def update_playlist_m3u_file(playlist_spotify_id: str):
|
||||
# Helper to build a Librespot client from active account
|
||||
|
||||
|
||||
# Add a small internal helper for safe int conversion
|
||||
_def_safe_int_added = True
|
||||
|
||||
|
||||
def _safe_to_int(value, default):
|
||||
try:
|
||||
return int(value)
|
||||
except Exception:
|
||||
return default
|
||||
|
||||
|
||||
def _build_librespot_client():
|
||||
try:
|
||||
# Reuse shared client managed in routes.utils.get_info
|
||||
@@ -1235,11 +1305,35 @@ def _fetch_artist_discography_page(artist_id: str, limit: int, offset: int) -> d
|
||||
for key in ("album_group", "single_group", "compilation_group", "appears_on_group"):
|
||||
grp = artist.get(key)
|
||||
if isinstance(grp, list):
|
||||
all_items.extend(grp)
|
||||
# Check if items are strings (IDs) or dictionaries (metadata)
|
||||
if grp and isinstance(grp[0], str):
|
||||
# Items are album IDs as strings, fetch metadata for each
|
||||
for album_id in grp:
|
||||
try:
|
||||
album_data = client.get_album(album_id, include_tracks=False)
|
||||
if album_data:
|
||||
# Add the album_group type for filtering
|
||||
album_data["album_group"] = key.replace("_group", "")
|
||||
all_items.append(album_data)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to fetch album {album_id}: {e}")
|
||||
else:
|
||||
# Items are already dictionaries (album metadata)
|
||||
for item in grp:
|
||||
if isinstance(item, dict):
|
||||
# Ensure album_group is set for filtering
|
||||
if "album_group" not in item:
|
||||
item["album_group"] = key.replace("_group", "")
|
||||
all_items.append(item)
|
||||
elif isinstance(grp, dict):
|
||||
items = grp.get("items") or grp.get("releases") or []
|
||||
if isinstance(items, list):
|
||||
all_items.extend(items)
|
||||
for item in items:
|
||||
if isinstance(item, dict):
|
||||
# Ensure album_group is set for filtering
|
||||
if "album_group" not in item:
|
||||
item["album_group"] = key.replace("_group", "")
|
||||
all_items.append(item)
|
||||
total = len(all_items)
|
||||
start = max(0, offset or 0)
|
||||
end = start + max(1, limit or 50)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useEffect } from "react";
|
||||
import { useEffect, useState } from "react";
|
||||
import { useForm, Controller } from "react-hook-form";
|
||||
import { authApiClient } from "../../lib/api-client";
|
||||
import { toast } from "sonner";
|
||||
@@ -16,12 +16,32 @@ interface WebhookSettings {
|
||||
available_events: string[]; // Provided by API, not saved
|
||||
}
|
||||
|
||||
// --- API Functions ---
|
||||
const fetchSpotifyApiConfig = async (): Promise<SpotifyApiSettings> => {
|
||||
const { data } = await authApiClient.client.get("/credentials/spotify_api_config");
|
||||
return data;
|
||||
interface ServerConfig {
|
||||
client_id?: string;
|
||||
client_secret?: string;
|
||||
utilityConcurrency?: number;
|
||||
librespotConcurrency?: number;
|
||||
url?: string;
|
||||
events?: string[];
|
||||
}
|
||||
|
||||
const fetchServerConfig = async (): Promise<ServerConfig> => {
|
||||
const [spotifyConfig, generalConfig] = await Promise.all([
|
||||
authApiClient.client.get("/credentials/spotify_api_config").catch(() => ({ data: {} })),
|
||||
authApiClient.getConfig<any>(),
|
||||
]);
|
||||
|
||||
return {
|
||||
...spotifyConfig.data,
|
||||
...generalConfig,
|
||||
};
|
||||
};
|
||||
|
||||
const saveServerConfig = async (data: Partial<ServerConfig>) => {
|
||||
const payload = { ...data };
|
||||
const { data: response } = await authApiClient.client.post("/config", payload);
|
||||
return response;
|
||||
};
|
||||
const saveSpotifyApiConfig = (data: SpotifyApiSettings) => authApiClient.client.put("/credentials/spotify_api_config", data);
|
||||
|
||||
const fetchWebhookConfig = async (): Promise<WebhookSettings> => {
|
||||
// Mock a response since backend endpoint doesn't exist
|
||||
@@ -32,40 +52,34 @@ const fetchWebhookConfig = async (): Promise<WebhookSettings> => {
|
||||
available_events: ["download_start", "download_complete", "download_failed", "watch_added"],
|
||||
});
|
||||
};
|
||||
const saveWebhookConfig = (data: Partial<WebhookSettings>) => {
|
||||
toast.info("Webhook configuration is not available.");
|
||||
return Promise.resolve(data);
|
||||
|
||||
const saveWebhookConfig = async (data: Partial<WebhookSettings>) => {
|
||||
const payload = { ...data };
|
||||
const { data: response } = await authApiClient.client.post("/config", payload);
|
||||
return response;
|
||||
};
|
||||
|
||||
const testWebhook = (url: string) => {
|
||||
toast.info("Webhook testing is not available.");
|
||||
return Promise.resolve(url);
|
||||
};
|
||||
|
||||
// --- Components ---
|
||||
function SpotifyApiForm() {
|
||||
const queryClient = useQueryClient();
|
||||
const { data, isLoading } = useQuery({ queryKey: ["spotifyApiConfig"], queryFn: fetchSpotifyApiConfig });
|
||||
function SpotifyApiForm({ config, onConfigChange }: { config: ServerConfig; onConfigChange: (updates: Partial<ServerConfig>) => void }) {
|
||||
const { register, handleSubmit, reset } = useForm<SpotifyApiSettings>();
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: saveSpotifyApiConfig,
|
||||
onSuccess: () => {
|
||||
toast.success("Spotify API settings saved!");
|
||||
queryClient.invalidateQueries({ queryKey: ["spotifyApiConfig"] });
|
||||
},
|
||||
onError: (e) => {
|
||||
console.error("Failed to save Spotify API settings:", (e as any).message);
|
||||
toast.error(`Failed to save: ${(e as any).message}`);
|
||||
},
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (data) reset(data);
|
||||
}, [data, reset]);
|
||||
if (config) {
|
||||
reset({
|
||||
client_id: config.client_id || "",
|
||||
client_secret: config.client_secret || "",
|
||||
});
|
||||
}
|
||||
}, [config, reset]);
|
||||
|
||||
const onSubmit = (formData: SpotifyApiSettings) => mutation.mutate(formData);
|
||||
|
||||
if (isLoading) return <p className="text-content-muted dark:text-content-muted-dark">Loading Spotify API settings...</p>;
|
||||
const onSubmit = (formData: SpotifyApiSettings) => {
|
||||
onConfigChange(formData);
|
||||
};
|
||||
|
||||
return (
|
||||
<form onSubmit={handleSubmit(onSubmit)} className="space-y-4">
|
||||
@@ -73,15 +87,10 @@ function SpotifyApiForm() {
|
||||
<div className="flex items-center gap-3">
|
||||
<button
|
||||
type="submit"
|
||||
disabled={mutation.isPending}
|
||||
className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50"
|
||||
title="Save Spotify API"
|
||||
className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md"
|
||||
title="Save Spotify API Settings"
|
||||
>
|
||||
{mutation.isPending ? (
|
||||
<img src="/spinner.svg" alt="Saving" className="w-5 h-5 animate-spin logo" />
|
||||
) : (
|
||||
<img src="/save.svg" alt="Save" className="w-5 h-5 logo" />
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
@@ -110,6 +119,101 @@ function SpotifyApiForm() {
|
||||
);
|
||||
}
|
||||
|
||||
function UtilityConcurrencyForm({ config, onConfigChange }: { config: ServerConfig; onConfigChange: (updates: Partial<ServerConfig>) => void }) {
|
||||
const { register, handleSubmit, reset, formState: { isDirty } } = useForm<{ utilityConcurrency: number }>();
|
||||
|
||||
useEffect(() => {
|
||||
if (config) {
|
||||
reset({ utilityConcurrency: Number(config.utilityConcurrency ?? 1) });
|
||||
}
|
||||
}, [config, reset]);
|
||||
|
||||
const onSubmit = (values: { utilityConcurrency: number }) => {
|
||||
const value = Math.max(1, Number(values.utilityConcurrency || 1));
|
||||
onConfigChange({ utilityConcurrency: value });
|
||||
};
|
||||
|
||||
return (
|
||||
<form onSubmit={handleSubmit(onSubmit)} className="space-y-4">
|
||||
<div className="flex items-center justify-end mb-2">
|
||||
<div className="flex items-center gap-3">
|
||||
<button
|
||||
type="submit"
|
||||
disabled={!isDirty}
|
||||
className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50"
|
||||
title="Save Utility Concurrency"
|
||||
>
|
||||
<img src="/save.svg" alt="Save" className="w-5 h-5 logo" />
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex flex-col gap-2">
|
||||
<label htmlFor="utilityConcurrency" className="text-content-primary dark:text-content-primary-dark">Utility Worker Concurrency</label>
|
||||
<input
|
||||
id="utilityConcurrency"
|
||||
type="number"
|
||||
min={1}
|
||||
step={1}
|
||||
{...register("utilityConcurrency", { valueAsNumber: true })}
|
||||
className="block w-full p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline-none focus:ring-2 focus:ring-input-focus"
|
||||
placeholder="1"
|
||||
/>
|
||||
<p className="text-xs text-content-secondary dark:text-content-secondary-dark">Controls concurrency of the utility Celery worker. Minimum 1.</p>
|
||||
</div>
|
||||
</form>
|
||||
);
|
||||
}
|
||||
|
||||
function LibrespotConcurrencyForm({ config, onConfigChange }: { config: ServerConfig; onConfigChange: (updates: Partial<ServerConfig>) => void }) {
|
||||
const { register, handleSubmit, reset, formState: { isDirty } } = useForm<{ librespotConcurrency: number }>();
|
||||
|
||||
useEffect(() => {
|
||||
if (config) {
|
||||
reset({ librespotConcurrency: Number(config.librespotConcurrency ?? 2) });
|
||||
}
|
||||
}, [config, reset]);
|
||||
|
||||
const onSubmit = (values: { librespotConcurrency: number }) => {
|
||||
const raw = Number(values.librespotConcurrency || 2);
|
||||
const safe = Math.max(1, Math.min(16, raw));
|
||||
onConfigChange({ librespotConcurrency: safe });
|
||||
};
|
||||
|
||||
return (
|
||||
<form onSubmit={handleSubmit(onSubmit)} className="space-y-4">
|
||||
<div className="flex items-center justify-end mb-2">
|
||||
<div className="flex items-center gap-3">
|
||||
<button
|
||||
type="submit"
|
||||
disabled={!isDirty}
|
||||
className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50"
|
||||
title="Save Librespot Concurrency"
|
||||
>
|
||||
<img src="/save.svg" alt="Save" className="w-5 h-5 logo" />
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex flex-col gap-2">
|
||||
<label htmlFor="librespotConcurrency" className="text-content-primary dark:text-content-primary-dark">Librespot Concurrency</label>
|
||||
<input
|
||||
id="librespotConcurrency"
|
||||
type="number"
|
||||
min={1}
|
||||
max={16}
|
||||
step={1}
|
||||
{...register("librespotConcurrency", { valueAsNumber: true })}
|
||||
className="block w-full p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline-none focus:ring-2 focus:ring-input-focus"
|
||||
placeholder="2"
|
||||
/>
|
||||
<p className="text-xs text-content-secondary dark:text-content-secondary-dark">Controls worker threads used by the Librespot client. 1–16 is recommended.</p>
|
||||
</div>
|
||||
</form>
|
||||
);
|
||||
}
|
||||
|
||||
// --- Components ---
|
||||
function WebhookForm() {
|
||||
const queryClient = useQueryClient();
|
||||
const { data, isLoading } = useQuery({ queryKey: ["webhookConfig"], queryFn: fetchWebhookConfig });
|
||||
@@ -152,7 +256,7 @@ function WebhookForm() {
|
||||
type="submit"
|
||||
disabled={mutation.isPending}
|
||||
className="px-4 py-2 bg-button-primary hover:bg-button-primary-hover text-button-primary-text rounded-md disabled:opacity-50"
|
||||
title="Save Webhook"
|
||||
title="Save Webhook Settings"
|
||||
>
|
||||
{mutation.isPending ? (
|
||||
<img src="/spinner.svg" alt="Saving" className="w-5 h-5 animate-spin logo" />
|
||||
@@ -215,12 +319,61 @@ function WebhookForm() {
|
||||
}
|
||||
|
||||
export function ServerTab() {
|
||||
const queryClient = useQueryClient();
|
||||
const [localConfig, setLocalConfig] = useState<ServerConfig>({});
|
||||
|
||||
const { data: serverConfig, isLoading } = useQuery({
|
||||
queryKey: ["serverConfig"],
|
||||
queryFn: fetchServerConfig,
|
||||
});
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: saveServerConfig,
|
||||
onSuccess: () => {
|
||||
toast.success("Server settings saved successfully!");
|
||||
queryClient.invalidateQueries({ queryKey: ["serverConfig"] });
|
||||
queryClient.invalidateQueries({ queryKey: ["config"] });
|
||||
},
|
||||
onError: (error) => {
|
||||
console.error("Failed to save server settings", (error as any).message);
|
||||
toast.error(`Failed to save server settings: ${(error as any).message}`);
|
||||
},
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (serverConfig) {
|
||||
setLocalConfig(serverConfig);
|
||||
}
|
||||
}, [serverConfig]);
|
||||
|
||||
const handleConfigChange = (updates: Partial<ServerConfig>) => {
|
||||
const newConfig = { ...localConfig, ...updates };
|
||||
setLocalConfig(newConfig);
|
||||
mutation.mutate(newConfig);
|
||||
};
|
||||
|
||||
if (isLoading) {
|
||||
return <div>Loading server settings...</div>;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="space-y-8">
|
||||
<div>
|
||||
<h3 className="text-xl font-semibold text-content-primary dark:text-content-primary-dark">Spotify API</h3>
|
||||
<p className="text-sm text-content-muted dark:text-content-muted-dark mt-1">Provide your own API credentials to avoid rate-limiting issues.</p>
|
||||
<SpotifyApiForm />
|
||||
<SpotifyApiForm config={localConfig} onConfigChange={handleConfigChange} />
|
||||
</div>
|
||||
<hr className="border-border dark:border-border-dark" />
|
||||
<div>
|
||||
<h3 className="text-xl font-semibold text-content-primary dark:text-content-primary-dark">Utility Worker</h3>
|
||||
<p className="text-sm text-content-muted dark:text-content-muted-dark mt-1">Tune background utility worker concurrency for low-powered systems.</p>
|
||||
<UtilityConcurrencyForm config={localConfig} onConfigChange={handleConfigChange} />
|
||||
</div>
|
||||
<hr className="border-border dark:border-border-dark" />
|
||||
<div>
|
||||
<h3 className="text-xl font-semibold text-content-primary dark:text-content-primary-dark">Librespot</h3>
|
||||
<p className="text-sm text-content-muted dark:text-content-muted-dark mt-1">Adjust Librespot client worker threads.</p>
|
||||
<LibrespotConcurrencyForm config={localConfig} onConfigChange={handleConfigChange} />
|
||||
</div>
|
||||
<hr className="border-border dark:border-border-dark" />
|
||||
<div>
|
||||
|
||||
@@ -32,6 +32,8 @@ export type FlatAppSettings = {
|
||||
deezer: string;
|
||||
deezerQuality: "MP3_128" | "MP3_320" | "FLAC";
|
||||
maxConcurrentDownloads: number;
|
||||
utilityConcurrency: number;
|
||||
librespotConcurrency: number;
|
||||
realTime: boolean;
|
||||
fallback: boolean;
|
||||
convertTo: "MP3" | "AAC" | "OGG" | "OPUS" | "FLAC" | "WAV" | "ALAC" | "";
|
||||
@@ -72,6 +74,8 @@ const defaultSettings: FlatAppSettings = {
|
||||
deezer: "",
|
||||
deezerQuality: "MP3_128",
|
||||
maxConcurrentDownloads: 3,
|
||||
utilityConcurrency: 1,
|
||||
librespotConcurrency: 2,
|
||||
realTime: false,
|
||||
fallback: false,
|
||||
convertTo: "",
|
||||
@@ -135,6 +139,8 @@ const fetchSettings = async (): Promise<FlatAppSettings> => {
|
||||
// Ensure required frontend-only fields exist
|
||||
recursiveQuality: Boolean((camelData as any).recursiveQuality ?? false),
|
||||
realTimeMultiplier: Number((camelData as any).realTimeMultiplier ?? 0),
|
||||
utilityConcurrency: Number((camelData as any).utilityConcurrency ?? 1),
|
||||
librespotConcurrency: Number((camelData as any).librespotConcurrency ?? 2),
|
||||
// Ensure watch subkeys default if missing
|
||||
watch: {
|
||||
...(camelData.watch as any),
|
||||
|
||||
@@ -8,6 +8,8 @@ export interface AppSettings {
|
||||
deezer: string;
|
||||
deezerQuality: "MP3_128" | "MP3_320" | "FLAC";
|
||||
maxConcurrentDownloads: number;
|
||||
utilityConcurrency: number;
|
||||
librespotConcurrency: number;
|
||||
realTime: boolean;
|
||||
fallback: boolean;
|
||||
convertTo: "MP3" | "AAC" | "OGG" | "OPUS" | "FLAC" | "WAV" | "ALAC" | "";
|
||||
|
||||
@@ -369,6 +369,17 @@ class AuthApiClient {
|
||||
get client() {
|
||||
return this.apiClient;
|
||||
}
|
||||
|
||||
// General config helpers
|
||||
async getConfig<T = any>(): Promise<T> {
|
||||
const response = await this.apiClient.get<T>("/config");
|
||||
return response.data;
|
||||
}
|
||||
|
||||
async updateConfig<T = any>(partial: Record<string, unknown>): Promise<T> {
|
||||
const response = await this.apiClient.put<T>("/config", partial);
|
||||
return response.data;
|
||||
}
|
||||
}
|
||||
|
||||
// Create and export a singleton instance
|
||||
|
||||
@@ -135,6 +135,16 @@ export const Album = () => {
|
||||
};
|
||||
}, [loadMore]);
|
||||
|
||||
// Auto progressive loading regardless of scroll
|
||||
useEffect(() => {
|
||||
if (!album) return;
|
||||
if (!hasMore || isLoadingMore) return;
|
||||
const t = setTimeout(() => {
|
||||
loadMore();
|
||||
}, 300);
|
||||
return () => clearTimeout(t);
|
||||
}, [album, hasMore, isLoadingMore, loadMore]);
|
||||
|
||||
const handleDownloadTrack = (track: LibrespotTrackType) => {
|
||||
if (!track.id) return;
|
||||
toast.info(`Adding ${track.name} to queue...`);
|
||||
|
||||
@@ -2,27 +2,18 @@ import { Link, useParams } from "@tanstack/react-router";
|
||||
import { useEffect, useState, useContext, useRef, useCallback } from "react";
|
||||
import { toast } from "sonner";
|
||||
import apiClient from "../lib/api-client";
|
||||
import type { LibrespotAlbumType, LibrespotArtistType, LibrespotTrackType, LibrespotImage } from "@/types/librespot";
|
||||
import type { LibrespotAlbumType, LibrespotArtistType, LibrespotTrackType } from "@/types/librespot";
|
||||
import { QueueContext, getStatus } from "../contexts/queue-context";
|
||||
import { useSettings } from "../contexts/settings-context";
|
||||
import { FaArrowLeft, FaBookmark, FaRegBookmark, FaDownload } from "react-icons/fa";
|
||||
import { AlbumCard } from "../components/AlbumCard";
|
||||
|
||||
// Narrow type for the artist info response additions
|
||||
type ArtistInfoResponse = LibrespotArtistType & {
|
||||
biography?: Array<{ text?: string; portrait_group?: { image?: LibrespotImage[] } }>;
|
||||
portrait_group?: { image?: LibrespotImage[] };
|
||||
top_track?: Array<{ country: string; track: string[] }>;
|
||||
album_group?: string[];
|
||||
single_group?: string[];
|
||||
appears_on_group?: string[];
|
||||
};
|
||||
|
||||
export const Artist = () => {
|
||||
const { artistId } = useParams({ from: "/artist/$artistId" });
|
||||
const [artist, setArtist] = useState<ArtistInfoResponse | null>(null);
|
||||
const [artist, setArtist] = useState<LibrespotArtistType | null>(null);
|
||||
const [artistAlbums, setArtistAlbums] = useState<LibrespotAlbumType[]>([]);
|
||||
const [artistSingles, setArtistSingles] = useState<LibrespotAlbumType[]>([]);
|
||||
const [artistCompilations, setArtistCompilations] = useState<LibrespotAlbumType[]>([]);
|
||||
const [artistAppearsOn, setArtistAppearsOn] = useState<LibrespotAlbumType[]>([]);
|
||||
const [topTracks, setTopTracks] = useState<LibrespotTrackType[]>([]);
|
||||
const [bannerUrl, setBannerUrl] = useState<string | null>(null);
|
||||
@@ -38,6 +29,7 @@ export const Artist = () => {
|
||||
const ALBUM_BATCH = 12;
|
||||
const [albumOffset, setAlbumOffset] = useState<number>(0);
|
||||
const [singleOffset, setSingleOffset] = useState<number>(0);
|
||||
const [compOffset, setCompOffset] = useState<number>(0);
|
||||
const [appearsOffset, setAppearsOffset] = useState<number>(0);
|
||||
const [loading, setLoading] = useState<boolean>(false);
|
||||
const [loadingMore, setLoadingMore] = useState<boolean>(false);
|
||||
@@ -81,16 +73,18 @@ export const Artist = () => {
|
||||
setError(null);
|
||||
setArtistAlbums([]);
|
||||
setArtistSingles([]);
|
||||
setArtistCompilations([]);
|
||||
setArtistAppearsOn([]);
|
||||
setAlbumOffset(0);
|
||||
setSingleOffset(0);
|
||||
setCompOffset(0);
|
||||
setAppearsOffset(0);
|
||||
setHasMore(true);
|
||||
setBannerUrl(null); // reset hero; will lazy-load below
|
||||
|
||||
try {
|
||||
const resp = await apiClient.get<ArtistInfoResponse>(`/artist/info?id=${artistId}`);
|
||||
const data: ArtistInfoResponse = resp.data;
|
||||
const resp = await apiClient.get<LibrespotArtistType>(`/artist/info?id=${artistId}`);
|
||||
const data: LibrespotArtistType = resp.data;
|
||||
|
||||
if (cancelled) return;
|
||||
|
||||
@@ -99,10 +93,10 @@ export const Artist = () => {
|
||||
setArtist(data);
|
||||
|
||||
// Lazy-load banner image after render
|
||||
const bioEntry = Array.isArray(data.biography) && data.biography.length > 0 ? data.biography[0] : undefined;
|
||||
const portraitImages = data.portrait_group?.image ?? bioEntry?.portrait_group?.image ?? [];
|
||||
const allImages = [...(portraitImages ?? []), ...((data.images as LibrespotImage[] | undefined) ?? [])];
|
||||
const candidateBanner = allImages.sort((a, b) => (b?.width ?? 0) - (a?.width ?? 0))[0]?.url || "/placeholder.jpg";
|
||||
const allImages = [...(data.portrait_group.image ?? []), ...(data.biography?.[0].portrait_group.image ?? [])];
|
||||
const candidateBanner = allImages
|
||||
.filter(img => img && typeof img === 'object' && 'url' in img)
|
||||
.sort((a, b) => (b.width ?? 0) - (a.width ?? 0))[0]?.url || "/placeholder.jpg";
|
||||
// Use async preload to avoid blocking initial paint
|
||||
setTimeout(() => {
|
||||
const img = new Image();
|
||||
@@ -123,46 +117,61 @@ export const Artist = () => {
|
||||
if (!cancelled) setTopTracks([]);
|
||||
}
|
||||
|
||||
// Progressive album loading: album -> single -> appears_on
|
||||
// Progressive album loading: album -> single -> compilation -> appears_on
|
||||
const albumIds = data.album_group ?? [];
|
||||
const singleIds = data.single_group ?? [];
|
||||
const compIds = data.compilation_group ?? [];
|
||||
const appearsIds = data.appears_on_group ?? [];
|
||||
|
||||
// Determine initial number based on screen size: 4 on small screens
|
||||
const isSmallScreen = typeof window !== "undefined" && !window.matchMedia("(min-width: 640px)").matches;
|
||||
const initialTarget = isSmallScreen ? 4 : ALBUM_BATCH;
|
||||
|
||||
// Load initial batch from albumIds, then if needed from singles, then appears
|
||||
const initialBatch: LibrespotAlbumType[] = [];
|
||||
let aOff = 0, sOff = 0, apOff = 0;
|
||||
if (albumIds.length > 0) {
|
||||
const take = albumIds.slice(0, initialTarget);
|
||||
initialBatch.push(...await fetchAlbumsByIds(take));
|
||||
// Load initial sets from each group in order until initialTarget reached
|
||||
let aOff = 0, sOff = 0, cOff = 0, apOff = 0;
|
||||
let loaded = 0;
|
||||
let aList: LibrespotAlbumType[] = [];
|
||||
let sList: LibrespotAlbumType[] = [];
|
||||
let cList: LibrespotAlbumType[] = [];
|
||||
let apList: LibrespotAlbumType[] = [];
|
||||
|
||||
if (albumIds.length > 0 && loaded < initialTarget) {
|
||||
const take = albumIds.slice(0, initialTarget - loaded);
|
||||
aList = await fetchAlbumsByIds(take);
|
||||
aOff = take.length;
|
||||
loaded += aList.length;
|
||||
}
|
||||
if (initialBatch.length < initialTarget && singleIds.length > 0) {
|
||||
const remaining = initialTarget - initialBatch.length;
|
||||
const take = singleIds.slice(0, remaining);
|
||||
initialBatch.push(...await fetchAlbumsByIds(take));
|
||||
if (singleIds.length > 0 && loaded < initialTarget) {
|
||||
const take = singleIds.slice(0, initialTarget - loaded);
|
||||
sList = await fetchAlbumsByIds(take);
|
||||
sOff = take.length;
|
||||
loaded += sList.length;
|
||||
}
|
||||
if (initialBatch.length < initialTarget && appearsIds.length > 0) {
|
||||
const remaining = initialTarget - initialBatch.length;
|
||||
const take = appearsIds.slice(0, remaining);
|
||||
initialBatch.push(...await fetchAlbumsByIds(take));
|
||||
if (compIds.length > 0 && loaded < initialTarget) {
|
||||
const take = compIds.slice(0, initialTarget - loaded);
|
||||
cList = await fetchAlbumsByIds(take);
|
||||
cOff = take.length;
|
||||
loaded += cList.length;
|
||||
}
|
||||
if (appearsIds.length > 0 && loaded < initialTarget) {
|
||||
const take = appearsIds.slice(0, initialTarget - loaded);
|
||||
apList = await fetchAlbumsByIds(take);
|
||||
apOff = take.length;
|
||||
loaded += apList.length;
|
||||
}
|
||||
|
||||
if (!cancelled) {
|
||||
setArtistAlbums(initialBatch.filter(a => a.album_type === "album"));
|
||||
setArtistSingles(initialBatch.filter(a => a.album_type === "single"));
|
||||
setArtistAppearsOn([]); // placeholder; appears_on grouping not explicitly typed
|
||||
setArtistAlbums(aList);
|
||||
setArtistSingles(sList);
|
||||
setArtistCompilations(cList);
|
||||
setArtistAppearsOn(apList);
|
||||
// Store offsets for next loads
|
||||
setAlbumOffset(aOff);
|
||||
setSingleOffset(sOff);
|
||||
setCompOffset(cOff);
|
||||
setAppearsOffset(apOff);
|
||||
// Determine if more remain
|
||||
setHasMore((albumIds.length > aOff) || (singleIds.length > sOff) || (appearsIds.length > apOff));
|
||||
setHasMore((albumIds.length > aOff) || (singleIds.length > sOff) || (compIds.length > cOff) || (appearsIds.length > apOff));
|
||||
}
|
||||
} else {
|
||||
setError("Could not load artist data.");
|
||||
@@ -201,34 +210,54 @@ export const Artist = () => {
|
||||
try {
|
||||
const albumIds = artist.album_group ?? [];
|
||||
const singleIds = artist.single_group ?? [];
|
||||
const compIds = artist.compilation_group ?? [];
|
||||
const appearsIds = artist.appears_on_group ?? [];
|
||||
|
||||
const nextBatch: LibrespotAlbumType[] = [];
|
||||
let aOff = albumOffset, sOff = singleOffset, apOff = appearsOffset;
|
||||
if (aOff < albumIds.length) {
|
||||
const take = albumIds.slice(aOff, aOff + ALBUM_BATCH - nextBatch.length);
|
||||
nextBatch.push(...await fetchAlbumsByIds(take));
|
||||
const nextA: LibrespotAlbumType[] = [];
|
||||
const nextS: LibrespotAlbumType[] = [];
|
||||
const nextC: LibrespotAlbumType[] = [];
|
||||
const nextAp: LibrespotAlbumType[] = [];
|
||||
|
||||
let aOff = albumOffset, sOff = singleOffset, cOff = compOffset, apOff = appearsOffset;
|
||||
|
||||
const totalLoaded = () => nextA.length + nextS.length + nextC.length + nextAp.length;
|
||||
|
||||
if (aOff < albumIds.length && totalLoaded() < ALBUM_BATCH) {
|
||||
const remaining = ALBUM_BATCH - totalLoaded();
|
||||
const take = albumIds.slice(aOff, aOff + remaining);
|
||||
nextA.push(...await fetchAlbumsByIds(take));
|
||||
aOff += take.length;
|
||||
}
|
||||
if (nextBatch.length < ALBUM_BATCH && sOff < singleIds.length) {
|
||||
const remaining = ALBUM_BATCH - nextBatch.length;
|
||||
if (sOff < singleIds.length && totalLoaded() < ALBUM_BATCH) {
|
||||
const remaining = ALBUM_BATCH - totalLoaded();
|
||||
const take = singleIds.slice(sOff, sOff + remaining);
|
||||
nextBatch.push(...await fetchAlbumsByIds(take));
|
||||
nextS.push(...await fetchAlbumsByIds(take));
|
||||
sOff += take.length;
|
||||
}
|
||||
if (nextBatch.length < ALBUM_BATCH && apOff < appearsIds.length) {
|
||||
const remaining = ALBUM_BATCH - nextBatch.length;
|
||||
if (cOff < compIds.length && totalLoaded() < ALBUM_BATCH) {
|
||||
const remaining = ALBUM_BATCH - totalLoaded();
|
||||
const take = compIds.slice(cOff, cOff + remaining);
|
||||
nextC.push(...await fetchAlbumsByIds(take));
|
||||
cOff += take.length;
|
||||
}
|
||||
if (apOff < appearsIds.length && totalLoaded() < ALBUM_BATCH) {
|
||||
const remaining = ALBUM_BATCH - totalLoaded();
|
||||
const take = appearsIds.slice(apOff, apOff + remaining);
|
||||
nextBatch.push(...await fetchAlbumsByIds(take));
|
||||
nextAp.push(...await fetchAlbumsByIds(take));
|
||||
apOff += take.length;
|
||||
}
|
||||
|
||||
setArtistAlbums((cur) => cur.concat(nextBatch.filter(a => a.album_type === "album")));
|
||||
setArtistSingles((cur) => cur.concat(nextBatch.filter(a => a.album_type === "single")));
|
||||
setAppearsOffset(apOff);
|
||||
setArtistAlbums((cur) => cur.concat(nextA));
|
||||
setArtistSingles((cur) => cur.concat(nextS));
|
||||
setArtistCompilations((cur) => cur.concat(nextC));
|
||||
setArtistAppearsOn((cur) => cur.concat(nextAp));
|
||||
|
||||
setAlbumOffset(aOff);
|
||||
setSingleOffset(sOff);
|
||||
setHasMore((albumIds.length > aOff) || (singleIds.length > sOff) || (appearsIds.length > apOff));
|
||||
setCompOffset(cOff);
|
||||
setAppearsOffset(apOff);
|
||||
|
||||
setHasMore((albumIds.length > aOff) || (singleIds.length > sOff) || (compIds.length > cOff) || (appearsIds.length > apOff));
|
||||
} catch (err) {
|
||||
console.error("Failed to load more albums", err);
|
||||
toast.error("Failed to load more albums");
|
||||
@@ -236,7 +265,7 @@ export const Artist = () => {
|
||||
} finally {
|
||||
setLoadingMore(false);
|
||||
}
|
||||
}, [artistId, loadingMore, loading, hasMore, artist, albumOffset, singleOffset, appearsOffset, fetchAlbumsByIds]);
|
||||
}, [artistId, loadingMore, loading, hasMore, artist, albumOffset, singleOffset, compOffset, appearsOffset, fetchAlbumsByIds]);
|
||||
|
||||
// IntersectionObserver to trigger fetchMoreAlbums when sentinel is visible
|
||||
useEffect(() => {
|
||||
@@ -263,6 +292,16 @@ export const Artist = () => {
|
||||
return () => observer.disconnect();
|
||||
}, [fetchMoreAlbums, hasMore]);
|
||||
|
||||
// Auto progressive loading regardless of scroll
|
||||
useEffect(() => {
|
||||
if (!artist) return;
|
||||
if (!hasMore || loading || loadingMore) return;
|
||||
const t = setTimeout(() => {
|
||||
fetchMoreAlbums();
|
||||
}, 350);
|
||||
return () => clearTimeout(t);
|
||||
}, [artist, hasMore, loading, loadingMore, fetchMoreAlbums]);
|
||||
|
||||
// --- existing handlers (unchanged) ---
|
||||
const handleDownloadTrack = (track: LibrespotTrackType) => {
|
||||
if (!track.id) return;
|
||||
@@ -303,6 +342,25 @@ export const Artist = () => {
|
||||
}
|
||||
};
|
||||
|
||||
const handleDownloadGroup = async (group: "album" | "single" | "compilation" | "appears_on") => {
|
||||
if (!artistId || !artist) return;
|
||||
try {
|
||||
toast.info(`Queueing ${group} downloads for ${artist.name}...`);
|
||||
const response = await apiClient.get(`/artist/download/${artistId}?album_type=${group}`);
|
||||
const count = response.data?.queued_albums?.length ?? 0;
|
||||
if (count > 0) {
|
||||
toast.success(`Queued ${count} ${group}${count > 1 ? "s" : ""}.`);
|
||||
} else {
|
||||
toast.info(`No new ${group} releases to download.`);
|
||||
}
|
||||
} catch (error: any) {
|
||||
console.error(`Failed to queue ${group} downloads:`, error);
|
||||
toast.error(`Failed to queue ${group} downloads`, {
|
||||
description: error.response?.data?.error || "An unexpected error occurred.",
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const handleToggleWatch = async () => {
|
||||
if (!artistId || !artist) return;
|
||||
try {
|
||||
@@ -453,7 +511,17 @@ export const Artist = () => {
|
||||
{/* Albums */}
|
||||
{artistAlbums.length > 0 && (
|
||||
<div className="mb-12">
|
||||
<h2 className="text-3xl font-bold mb-6 text-content-primary dark:text-content-primary-dark">Albums</h2>
|
||||
<div className="flex items-center justify-between mb-6">
|
||||
<h2 className="text-3xl font-bold text-content-primary dark:text-content-primary-dark">Albums</h2>
|
||||
<button
|
||||
onClick={() => handleDownloadGroup("album")}
|
||||
className="flex items-center gap-2 px-3 py-1.5 text-sm bg-button-success hover:bg-button-success-hover text-button-success-text rounded-md transition-colors"
|
||||
title="Download all albums"
|
||||
>
|
||||
<img src="/download.svg" alt="Download" className="w-4 h-4 logo" />
|
||||
<span>Download</span>
|
||||
</button>
|
||||
</div>
|
||||
<div className="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 xl:grid-cols-5 gap-6">
|
||||
{artistAlbums.map((album) => (
|
||||
<AlbumCard key={album.id} album={album} onDownload={() => handleDownloadAlbum(album)} />
|
||||
@@ -465,7 +533,17 @@ export const Artist = () => {
|
||||
{/* Singles */}
|
||||
{artistSingles.length > 0 && (
|
||||
<div className="mb-12">
|
||||
<h2 className="text-3xl font-bold mb-6 text-content-primary dark:text-content-primary-dark">Singles</h2>
|
||||
<div className="flex items-center justify-between mb-6">
|
||||
<h2 className="text-3xl font-bold text-content-primary dark:text-content-primary-dark">Singles</h2>
|
||||
<button
|
||||
onClick={() => handleDownloadGroup("single")}
|
||||
className="flex items-center gap-2 px-3 py-1.5 text-sm bg-button-success hover:bg-button-success-hover text-button-success-text rounded-md transition-colors"
|
||||
title="Download all singles"
|
||||
>
|
||||
<img src="/download.svg" alt="Download" className="w-4 h-4 logo" />
|
||||
<span>Download</span>
|
||||
</button>
|
||||
</div>
|
||||
<div className="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 xl:grid-cols-5 gap-6">
|
||||
{artistSingles.map((album) => (
|
||||
<AlbumCard key={album.id} album={album} onDownload={() => handleDownloadAlbum(album)} />
|
||||
@@ -474,10 +552,42 @@ export const Artist = () => {
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Compilations */}
|
||||
{artistCompilations.length > 0 && (
|
||||
<div className="mb-12">
|
||||
<div className="flex items-center justify-between mb-6">
|
||||
<h2 className="text-3xl font-bold text-content-primary dark:text-content-primary-dark">Compilations</h2>
|
||||
<button
|
||||
onClick={() => handleDownloadGroup("compilation")}
|
||||
className="flex items-center gap-2 px-3 py-1.5 text-sm bg-button-success hover:bg-button-success-hover text-button-success-text rounded-md transition-colors"
|
||||
title="Download all compilations"
|
||||
>
|
||||
<img src="/download.svg" alt="Download" className="w-4 h-4 logo" />
|
||||
<span>Download</span>
|
||||
</button>
|
||||
</div>
|
||||
<div className="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 xl:grid-cols-5 gap-6">
|
||||
{artistCompilations.map((album) => (
|
||||
<AlbumCard key={album.id} album={album} onDownload={() => handleDownloadAlbum(album)} />
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Appears On */}
|
||||
{artistAppearsOn.length > 0 && (
|
||||
<div className="mb-12">
|
||||
<h2 className="text-3xl font-bold mb-6 text-content-primary dark:text-content-primary-dark">Appears On</h2>
|
||||
<div className="flex items-center justify-between mb-6">
|
||||
<h2 className="text-3xl font-bold text-content-primary dark:text-content-primary-dark">Appears On</h2>
|
||||
<button
|
||||
onClick={() => handleDownloadGroup("appears_on")}
|
||||
className="flex items-center gap-2 px-3 py-1.5 text-sm bg-button-success hover:bg-button-success-hover text-button-success-text rounded-md transition-colors"
|
||||
title="Download all appears on"
|
||||
>
|
||||
<img src="/download.svg" alt="Download" className="w-4 h-4 logo" />
|
||||
<span>Download</span>
|
||||
</button>
|
||||
</div>
|
||||
<div className="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 xl:grid-cols-5 gap-6">
|
||||
{artistAppearsOn.map((album) => (
|
||||
<AlbumCard key={album.id} album={album} onDownload={() => handleDownloadAlbum(album)} />
|
||||
@@ -494,9 +604,9 @@ export const Artist = () => {
|
||||
{hasMore && !loadingMore && (
|
||||
<button
|
||||
onClick={() => fetchMoreAlbums()}
|
||||
className="px-4 py-2 mb-6 rounded bg-surface-muted hover:bg-surface-muted-dark"
|
||||
className="px-4 py-2 mb-6 rounded"
|
||||
>
|
||||
Load more
|
||||
Loading...
|
||||
</button>
|
||||
)}
|
||||
<div ref={sentinelRef} style={{ height: 1, width: "100%" }} />
|
||||
|
||||
@@ -153,6 +153,16 @@ export const Playlist = () => {
|
||||
}
|
||||
}, [playlistMetadata, items.length, totalTracks, loadMoreTracks]);
|
||||
|
||||
// Auto progressive loading regardless of scroll
|
||||
useEffect(() => {
|
||||
if (!playlistMetadata) return;
|
||||
if (!hasMoreTracks || loadingTracks) return;
|
||||
const t = setTimeout(() => {
|
||||
loadMoreTracks();
|
||||
}, 300);
|
||||
return () => clearTimeout(t);
|
||||
}, [playlistMetadata, hasMoreTracks, loadingTracks, loadMoreTracks]);
|
||||
|
||||
const handleDownloadTrack = (track: LibrespotTrackType) => {
|
||||
if (!track?.id) return;
|
||||
addItem({ spotifyId: track.id, type: "track", name: track.name });
|
||||
@@ -227,11 +237,40 @@ export const Playlist = () => {
|
||||
{/* Playlist Header - Mobile Optimized */}
|
||||
<div className="bg-surface dark:bg-surface-dark border border-border dark:border-border-dark rounded-xl p-4 md:p-6 shadow-sm">
|
||||
<div className="flex flex-col items-center gap-4 md:gap-6">
|
||||
{playlistMetadata.picture ? (
|
||||
<img
|
||||
src={playlistMetadata.images?.at(0)?.url || "/placeholder.jpg"}
|
||||
src={playlistMetadata.picture}
|
||||
alt={playlistMetadata.name}
|
||||
className="w-32 h-32 sm:w-40 sm:h-40 md:w-48 md:h-48 object-cover rounded-lg shadow-lg mx-auto"
|
||||
/>
|
||||
) : (
|
||||
<div
|
||||
className="w-32 h-32 sm:w-40 sm:h-40 md:w-48 md:h-48 rounded-lg shadow-lg mx-auto overflow-hidden bg-surface-muted dark:bg-surface-muted-dark grid grid-cols-2 grid-rows-2"
|
||||
>
|
||||
{(Array.from(
|
||||
new Map(
|
||||
filteredItems
|
||||
.map(({ track }) => (track as any)?.album?.images?.at(-1)?.url)
|
||||
.filter((u) => !!u)
|
||||
.map((u) => [u, u] as const)
|
||||
).values()
|
||||
) as string[]).slice(0, 4).map((url, i) => (
|
||||
<img
|
||||
key={`${url}-${i}`}
|
||||
src={url}
|
||||
alt={`Cover ${i + 1}`}
|
||||
className="w-full h-full object-cover"
|
||||
/>
|
||||
))}
|
||||
{filteredItems.length === 0 && (
|
||||
<img
|
||||
src="/placeholder.jpg"
|
||||
alt={playlistMetadata.name}
|
||||
className="col-span-2 row-span-2 w-full h-full object-cover"
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
<div className="flex-grow space-y-2 text-center">
|
||||
<h1 className="text-2xl md:text-3xl font-bold text-content-primary dark:text-content-primary-dark leading-tight">{playlistMetadata.name}</h1>
|
||||
{playlistMetadata.description && (
|
||||
|
||||
@@ -3,7 +3,7 @@ import apiClient from "../lib/api-client";
|
||||
import { toast } from "sonner";
|
||||
import { useSettings } from "../contexts/settings-context";
|
||||
import { Link } from "@tanstack/react-router";
|
||||
import type { ArtistType, PlaylistType } from "../types/spotify";
|
||||
import type { LibrespotArtistType, LibrespotPlaylistType } from "../types/librespot";
|
||||
import { FaRegTrashAlt, FaSearch } from "react-icons/fa";
|
||||
|
||||
// --- Type Definitions ---
|
||||
@@ -11,8 +11,8 @@ interface BaseWatched {
|
||||
itemType: "artist" | "playlist";
|
||||
spotify_id: string;
|
||||
}
|
||||
type WatchedArtist = ArtistType & { itemType: "artist" };
|
||||
type WatchedPlaylist = PlaylistType & { itemType: "playlist" };
|
||||
type WatchedArtist = LibrespotArtistType & { itemType: "artist" };
|
||||
type WatchedPlaylist = LibrespotPlaylistType & { itemType: "playlist" };
|
||||
|
||||
type WatchedItem = WatchedArtist | WatchedPlaylist;
|
||||
|
||||
@@ -20,39 +20,77 @@ export const Watchlist = () => {
|
||||
const { settings, isLoading: settingsLoading } = useSettings();
|
||||
const [items, setItems] = useState<WatchedItem[]>([]);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [expectedCount, setExpectedCount] = useState<number | null>(null);
|
||||
|
||||
// Utility to batch fetch details
|
||||
async function batchFetch<T>(
|
||||
ids: string[],
|
||||
fetchFn: (id: string) => Promise<T>,
|
||||
batchSize: number,
|
||||
onBatch: (results: T[]) => void
|
||||
) {
|
||||
for (let i = 0; i < ids.length; i += batchSize) {
|
||||
const batchIds = ids.slice(i, i + batchSize);
|
||||
const batchResults = await Promise.all(
|
||||
batchIds.map((id) => fetchFn(id).catch(() => null))
|
||||
);
|
||||
onBatch(batchResults.filter(Boolean) as T[]);
|
||||
}
|
||||
}
|
||||
|
||||
const fetchWatchlist = useCallback(async () => {
|
||||
setIsLoading(true);
|
||||
setItems([]); // Clear previous items
|
||||
setExpectedCount(null);
|
||||
try {
|
||||
const [artistsRes, playlistsRes] = await Promise.all([
|
||||
apiClient.get<BaseWatched[]>("/artist/watch/list"),
|
||||
apiClient.get<BaseWatched[]>("/playlist/watch/list"),
|
||||
]);
|
||||
|
||||
const artistDetailsPromises = artistsRes.data.map((artist) =>
|
||||
apiClient.get<ArtistType>(`/artist/info?id=${artist.spotify_id}`),
|
||||
);
|
||||
const playlistDetailsPromises = playlistsRes.data.map((playlist) =>
|
||||
apiClient.get<PlaylistType>(`/playlist/info?id=${playlist.spotify_id}`),
|
||||
);
|
||||
// Prepare lists of IDs
|
||||
const artistIds = artistsRes.data.map((artist) => artist.spotify_id);
|
||||
const playlistIds = playlistsRes.data.map((playlist) => playlist.spotify_id);
|
||||
setExpectedCount(artistIds.length + playlistIds.length);
|
||||
|
||||
const [artistDetailsRes, playlistDetailsRes] = await Promise.all([
|
||||
Promise.all(artistDetailsPromises),
|
||||
Promise.all(playlistDetailsPromises),
|
||||
]);
|
||||
// Allow UI to render grid and skeletons immediately
|
||||
setIsLoading(false);
|
||||
|
||||
const artists: WatchedItem[] = artistDetailsRes.map((res) => ({ ...res.data, itemType: "artist" }));
|
||||
const playlists: WatchedItem[] = playlistDetailsRes.map((res) => ({
|
||||
...res.data,
|
||||
itemType: "playlist",
|
||||
spotify_id: res.data.id,
|
||||
// Helper to update state incrementally
|
||||
const appendItems = (newItems: WatchedItem[]) => {
|
||||
setItems((prev) => [...prev, ...newItems]);
|
||||
};
|
||||
|
||||
// Fetch artist details in batches
|
||||
await batchFetch<LibrespotArtistType>(
|
||||
artistIds,
|
||||
(id) => apiClient.get<LibrespotArtistType>(`/artist/info?id=${id}`).then(res => res.data),
|
||||
5, // batch size
|
||||
(results) => {
|
||||
const items: WatchedArtist[] = results.map((data) => ({
|
||||
...data,
|
||||
itemType: "artist",
|
||||
}));
|
||||
appendItems(items);
|
||||
}
|
||||
);
|
||||
|
||||
setItems([...artists, ...playlists]);
|
||||
// Fetch playlist details in batches
|
||||
await batchFetch<LibrespotPlaylistType>(
|
||||
playlistIds,
|
||||
(id) => apiClient.get<LibrespotPlaylistType>(`/playlist/info?id=${id}`).then(res => res.data),
|
||||
5, // batch size
|
||||
(results) => {
|
||||
const items: WatchedPlaylist[] = results.map((data) => ({
|
||||
...data,
|
||||
itemType: "playlist",
|
||||
spotify_id: data.id,
|
||||
}));
|
||||
appendItems(items);
|
||||
}
|
||||
);
|
||||
} catch {
|
||||
toast.error("Failed to load watchlist.");
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
@@ -110,7 +148,8 @@ export const Watchlist = () => {
|
||||
);
|
||||
}
|
||||
|
||||
if (items.length === 0) {
|
||||
// Show "empty" only if not loading and nothing expected
|
||||
if (!isLoading && items.length === 0 && (!expectedCount || expectedCount === 0)) {
|
||||
return (
|
||||
<div className="text-center p-8">
|
||||
<h2 className="text-2xl font-bold mb-2 text-content-primary dark:text-content-primary-dark">Watchlist is Empty</h2>
|
||||
@@ -135,7 +174,11 @@ export const Watchlist = () => {
|
||||
<div key={item.id} className="bg-surface dark:bg-surface-secondary-dark p-4 rounded-lg shadow space-y-2 flex flex-col">
|
||||
<a href={`/${item.itemType}/${item.id}`} className="flex-grow">
|
||||
<img
|
||||
src={item.images?.[0]?.url || "/images/placeholder.jpg"}
|
||||
src={
|
||||
item.itemType === "artist"
|
||||
? (item as WatchedArtist).portrait_group.image[0].url || "/images/placeholder.jpg"
|
||||
: (item as WatchedPlaylist).picture || "/images/placeholder.jpg"
|
||||
}
|
||||
alt={item.name}
|
||||
className="w-full h-auto object-cover rounded-md aspect-square"
|
||||
/>
|
||||
@@ -158,6 +201,25 @@ export const Watchlist = () => {
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
{/* Skeletons for loading items */}
|
||||
{isLoading && expectedCount && items.length < expectedCount &&
|
||||
Array.from({ length: expectedCount - items.length }).map((_, idx) => (
|
||||
<div
|
||||
key={`skeleton-${idx}`}
|
||||
className="bg-surface dark:bg-surface-secondary-dark p-4 rounded-lg shadow space-y-2 flex flex-col animate-pulse"
|
||||
>
|
||||
<div className="flex-grow">
|
||||
<div className="w-full aspect-square bg-gray-200 dark:bg-gray-700 rounded-md mb-2" />
|
||||
<div className="h-5 bg-gray-200 dark:bg-gray-700 rounded w-3/4 mb-1" />
|
||||
<div className="h-4 bg-gray-100 dark:bg-gray-800 rounded w-1/2" />
|
||||
</div>
|
||||
<div className="flex gap-2 pt-2">
|
||||
<div className="w-full h-8 bg-gray-200 dark:bg-gray-700 rounded" />
|
||||
<div className="w-full h-8 bg-gray-100 dark:bg-gray-800 rounded" />
|
||||
</div>
|
||||
</div>
|
||||
))
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -6,8 +6,8 @@ export interface LibrespotExternalUrls {
|
||||
|
||||
export interface LibrespotImage {
|
||||
url: string;
|
||||
width?: number;
|
||||
height?: number;
|
||||
width: number;
|
||||
height: number;
|
||||
}
|
||||
|
||||
export interface LibrespotArtistStub {
|
||||
@@ -18,17 +18,32 @@ export interface LibrespotArtistStub {
|
||||
external_urls?: LibrespotExternalUrls;
|
||||
}
|
||||
|
||||
export interface LibrespotBiographyType {
|
||||
text: string;
|
||||
portrait_group: LibrespotArtistImageType;
|
||||
}
|
||||
|
||||
export interface LibrespotTopTrackType {
|
||||
country: string;
|
||||
track: string[];
|
||||
}
|
||||
|
||||
export interface LibrespotArtistImageType {
|
||||
image: LibrespotImage[];
|
||||
}
|
||||
|
||||
// Full artist object (get_artist)
|
||||
export interface LibrespotArtistType {
|
||||
id: string;
|
||||
name: string;
|
||||
images?: LibrespotImage[];
|
||||
external_urls?: LibrespotExternalUrls;
|
||||
followers?: { total: number };
|
||||
genres?: string[];
|
||||
popularity?: number;
|
||||
type?: "artist";
|
||||
uri?: string;
|
||||
top_track: LibrespotTopTrackType[];
|
||||
portrait_group: LibrespotArtistImageType;
|
||||
popularity: number;
|
||||
biography?: LibrespotBiographyType[];
|
||||
album_group?: string[];
|
||||
single_group?: string[];
|
||||
compilation_group?: string[];
|
||||
appears_on_group?: string[];
|
||||
}
|
||||
|
||||
export interface LibrespotCopyright {
|
||||
@@ -59,24 +74,23 @@ export interface LibrespotTrackType {
|
||||
disc_number: number;
|
||||
duration_ms: number;
|
||||
explicit: boolean;
|
||||
external_ids?: { isrc?: string };
|
||||
external_ids: { isrc?: string };
|
||||
external_urls: LibrespotExternalUrls;
|
||||
id: string;
|
||||
name: string;
|
||||
popularity?: number;
|
||||
popularity: number;
|
||||
track_number: number;
|
||||
type: "track";
|
||||
uri: string;
|
||||
preview_url?: string;
|
||||
has_lyrics?: boolean;
|
||||
earliest_live_timestamp?: number;
|
||||
licensor_uuid?: string; // when available
|
||||
preview_url: string;
|
||||
has_lyrics: boolean;
|
||||
earliest_live_timestamp: number;
|
||||
licensor_uuid: string; // when available
|
||||
}
|
||||
|
||||
export interface LibrespotAlbumType {
|
||||
album_type: "album" | "single" | "compilation";
|
||||
total_tracks: number;
|
||||
available_markets?: string[];
|
||||
external_urls: LibrespotExternalUrls;
|
||||
id: string;
|
||||
images: LibrespotImage[];
|
||||
@@ -91,8 +105,8 @@ export interface LibrespotAlbumType {
|
||||
tracks: string[] | LibrespotTrackType[];
|
||||
copyrights?: LibrespotCopyright[];
|
||||
external_ids?: { upc?: string };
|
||||
label?: string;
|
||||
popularity?: number;
|
||||
label: string;
|
||||
popularity: number;
|
||||
}
|
||||
|
||||
// Playlist types
|
||||
@@ -130,13 +144,14 @@ export interface LibrespotPlaylistTracksPageType {
|
||||
|
||||
export interface LibrespotPlaylistType {
|
||||
name: string;
|
||||
description?: string | null;
|
||||
collaborative?: boolean;
|
||||
images?: Array<Pick<LibrespotImage, "url"> & Partial<LibrespotImage>>;
|
||||
id: string;
|
||||
description: string | null;
|
||||
collaborative: boolean;
|
||||
owner: LibrespotPlaylistOwnerType;
|
||||
snapshot_id: string;
|
||||
tracks: LibrespotPlaylistTracksPageType;
|
||||
type: "playlist";
|
||||
picture: string;
|
||||
}
|
||||
|
||||
// Type guards
|
||||
|
||||
Reference in New Issue
Block a user