Merge branch 'dev' into buttons-no-dropdown
This commit is contained in:
@@ -1,62 +1,36 @@
|
||||
# Git
|
||||
.git
|
||||
.gitignore
|
||||
.gitattributes
|
||||
# Allowlist minimal build context
|
||||
*
|
||||
|
||||
# Docker
|
||||
docker-compose.yaml
|
||||
docker-compose.yml
|
||||
Dockerfile
|
||||
.dockerignore
|
||||
# Backend
|
||||
!requirements.txt
|
||||
!app.py
|
||||
!routes/**
|
||||
# Re-ignore caches and compiled files inside routes
|
||||
routes/**/__pycache__/
|
||||
routes/**/.pytest_cache/
|
||||
routes/**/*.pyc
|
||||
routes/**/*.pyo
|
||||
|
||||
# Node
|
||||
node_modules
|
||||
spotizerr-ui/node_modules
|
||||
npm-debug.log
|
||||
pnpm-lock.yaml
|
||||
# Frontend: only what's needed to build
|
||||
!spotizerr-ui/package.json
|
||||
!spotizerr-ui/pnpm-lock.yaml
|
||||
!spotizerr-ui/pnpm-workspace.yaml
|
||||
!spotizerr-ui/index.html
|
||||
!spotizerr-ui/vite.config.ts
|
||||
!spotizerr-ui/postcss.config.mjs
|
||||
!spotizerr-ui/tsconfig.json
|
||||
!spotizerr-ui/tsconfig.app.json
|
||||
!spotizerr-ui/tsconfig.node.json
|
||||
!spotizerr-ui/src/**
|
||||
!spotizerr-ui/public/**
|
||||
!spotizerr-ui/scripts/**
|
||||
# Exclude heavy/unnecessary frontend folders
|
||||
spotizerr-ui/node_modules/**
|
||||
spotizerr-ui/dist/**
|
||||
spotizerr-ui/dev-dist/**
|
||||
|
||||
# Python
|
||||
__pycache__
|
||||
*.pyc
|
||||
*.pyo
|
||||
*.pyd
|
||||
.Python
|
||||
.env
|
||||
.venv
|
||||
venv/
|
||||
env/
|
||||
.env.example
|
||||
|
||||
# Editor/OS
|
||||
.vscode
|
||||
.idea
|
||||
.DS_Store
|
||||
*.swp
|
||||
|
||||
# Application data
|
||||
credentials.json
|
||||
test.py
|
||||
downloads/
|
||||
creds/
|
||||
Test.py
|
||||
prgs/
|
||||
flask_server.log
|
||||
test.sh
|
||||
routes/__pycache__/*
|
||||
routes/utils/__pycache__/*
|
||||
search_test.py
|
||||
config/main.json
|
||||
.cache
|
||||
config/state/queue_state.json
|
||||
output.log
|
||||
queue_state.json
|
||||
search_demo.py
|
||||
celery_worker.log
|
||||
static/js/*
|
||||
# Always exclude local data/logs/tests/etc.
|
||||
.data/
|
||||
logs/
|
||||
data
|
||||
Downloads/
|
||||
tests/
|
||||
|
||||
# Non-essential files
|
||||
docs/
|
||||
README.md
|
||||
|
||||
@@ -19,12 +19,6 @@ REDIS_PASSWORD=CHANGE_ME
|
||||
# Set to true to filter out explicit content.
|
||||
EXPLICIT_FILTER=false
|
||||
|
||||
|
||||
|
||||
# User and group ID for the container. Sets the owner of the downloaded files.
|
||||
PUID=1000
|
||||
PGID=1000
|
||||
|
||||
# Optional: Sets the default file permissions for newly created files within the container.
|
||||
UMASK=0022
|
||||
|
||||
|
||||
11
.readthedocs.yaml
Normal file
11
.readthedocs.yaml
Normal file
@@ -0,0 +1,11 @@
|
||||
version: 2
|
||||
mkdocs:
|
||||
configuration: mkdocs.yml
|
||||
build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: "3.11"
|
||||
jobs:
|
||||
post_install:
|
||||
- pip install --upgrade pip
|
||||
- pip install mkdocs mkdocs-material
|
||||
81
Dockerfile
81
Dockerfile
@@ -7,40 +7,71 @@ RUN pnpm install --frozen-lockfile
|
||||
COPY spotizerr-ui/. .
|
||||
RUN pnpm build
|
||||
|
||||
# Stage 2: Final application image
|
||||
FROM python:3.12-slim
|
||||
# Stage 2: Python dependencies builder (create relocatable deps dir)
|
||||
FROM python:3.11-slim AS py-deps
|
||||
WORKDIR /app
|
||||
COPY requirements.txt .
|
||||
COPY --from=ghcr.io/astral-sh/uv:latest /uv /bin/
|
||||
RUN uv pip install --target /python -r requirements.txt
|
||||
|
||||
# Set an environment variable for non-interactive frontend installation
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
# Stage 3: Fetch static ffmpeg/ffprobe binaries
|
||||
FROM debian:stable-slim AS ffmpeg
|
||||
ARG TARGETARCH
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
ca-certificates curl xz-utils jq \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
RUN set -euo pipefail; \
|
||||
case "$TARGETARCH" in \
|
||||
amd64) ARCH_SUFFIX=linux64 ;; \
|
||||
arm64) ARCH_SUFFIX=linuxarm64 ;; \
|
||||
*) echo "Unsupported arch: $TARGETARCH" && exit 1 ;; \
|
||||
esac; \
|
||||
ASSET_URL=$(curl -fsSL https://api.github.com/repos/BtbN/FFmpeg-Builds/releases/latest \
|
||||
| jq -r ".assets[] | select(.name | endswith(\"${ARCH_SUFFIX}-gpl.tar.xz\")) | .browser_download_url" \
|
||||
| head -n1); \
|
||||
if [ -z "$ASSET_URL" ]; then \
|
||||
echo "Failed to resolve FFmpeg asset for arch ${ARCH_SUFFIX}" && exit 1; \
|
||||
fi; \
|
||||
echo "Fetching FFmpeg from: $ASSET_URL"; \
|
||||
curl -fsSL -o /tmp/ffmpeg.tar.xz "$ASSET_URL"; \
|
||||
tar -xJf /tmp/ffmpeg.tar.xz -C /tmp; \
|
||||
mv /tmp/ffmpeg-* /ffmpeg
|
||||
|
||||
# Stage 4: Prepare world-writable runtime directories
|
||||
FROM busybox:1.36.1-musl AS runtime-dirs
|
||||
RUN mkdir -p /artifact/downloads /artifact/data/config /artifact/data/creds /artifact/data/watch /artifact/data/history /artifact/logs/tasks \
|
||||
&& touch /artifact/.cache \
|
||||
&& chmod -R 0777 /artifact
|
||||
|
||||
# Stage 5: Final application image (distroless)
|
||||
FROM gcr.io/distroless/python3-debian12
|
||||
|
||||
LABEL org.opencontainers.image.source="https://github.com/Xoconoch/spotizerr"
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
ffmpeg gosu\
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
# Ensure Python finds vendored site-packages and unbuffered output
|
||||
ENV PYTHONPATH=/python
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ENV PYTHONUTF8=1
|
||||
ENV LANG=C.UTF-8
|
||||
ENV LC_ALL=C.UTF-8
|
||||
|
||||
# Install Python dependencies
|
||||
COPY requirements.txt .
|
||||
# Copy application code
|
||||
COPY --chown=65532:65532 . .
|
||||
|
||||
COPY --from=ghcr.io/astral-sh/uv:latest /uv /bin/
|
||||
RUN uv pip install --system -r requirements.txt
|
||||
# Copy compiled assets from the frontend build
|
||||
COPY --from=frontend-builder --chown=65532:65532 /app/spotizerr-ui/dist ./spotizerr-ui/dist
|
||||
|
||||
# Copy application code (excluding UI source and TS source)
|
||||
COPY . .
|
||||
# Copy vendored Python dependencies
|
||||
COPY --from=py-deps --chown=65532:65532 /python /python
|
||||
|
||||
# Copy compiled assets from previous stages
|
||||
COPY --from=frontend-builder /app/spotizerr-ui/dist ./spotizerr-ui/dist
|
||||
# Copy static ffmpeg binaries
|
||||
COPY --from=ffmpeg --chown=65532:65532 /ffmpeg/bin/ffmpeg /usr/local/bin/ffmpeg
|
||||
COPY --from=ffmpeg --chown=65532:65532 /ffmpeg/bin/ffprobe /usr/local/bin/ffprobe
|
||||
|
||||
# Create necessary directories with proper permissions
|
||||
RUN mkdir -p downloads data/config data/creds data/watch data/history logs/tasks && \
|
||||
chmod -R 777 downloads data logs
|
||||
# Copy pre-created world-writable runtime directories
|
||||
COPY --from=runtime-dirs --chown=65532:65532 /artifact/ ./
|
||||
|
||||
# Make entrypoint script executable
|
||||
RUN chmod +x entrypoint.sh
|
||||
|
||||
# Set entrypoint to our script
|
||||
ENTRYPOINT ["/app/entrypoint.sh"]
|
||||
# No shell or package manager available in distroless
|
||||
ENTRYPOINT ["python3", "app.py"]
|
||||
|
||||
174
README.md
174
README.md
@@ -27,157 +27,6 @@ If you self-host a music server with other users than yourself, you almost certa
|
||||
<img width="1588" height="994" alt="image" src="https://github.com/user-attachments/assets/e34d7dbb-29e3-4d75-bcbd-0cee03fa57dc" />
|
||||
</details>
|
||||
|
||||
## ✨ Key Features
|
||||
|
||||
### 🎵 **Granular download support**
|
||||
- **Individual Tracks** - Download any single track
|
||||
- **Complete Albums** - Download entire albums with proper metadata
|
||||
- **Full Playlists** - Download complete playlists (even massive ones with 1000+ tracks)
|
||||
- **Artist Discographies** - Download an artist's complete catalog with filtering options
|
||||
- **Spotify URL Support** - Paste any Spotify URL directly to queue downloads
|
||||
|
||||
### 📱 **Modern Web Interface**
|
||||
- **Progressive Web App (PWA)** - Install as a native client on mobile/desktop (installation process may vary depending on the browser/device)
|
||||
- **Multiple Themes** - Light, dark, and system themes
|
||||
- **Touch-friendly** - Swipe gestures and mobile-optimized controls
|
||||
|
||||
### 🤖 **Intelligent Monitoring**
|
||||
- **Playlist Watching** - Automatically download new tracks added to Spotify playlists
|
||||
- **Artist Watching** - Monitor artists for new releases and download them automatically
|
||||
- **Configurable Intervals** - Set how often to check for updates
|
||||
- **Manual Triggers** - Force immediate checks when needed
|
||||
|
||||
### ⚡ **Advanced Queue Management**
|
||||
- **Concurrent Downloads** - Configure multiple simultaneous downloads
|
||||
- **Real-time Updates** - Live progress updates via Server-Sent Events
|
||||
- **Duplicate Prevention** - Automatically prevents duplicate downloads
|
||||
- **Queue Persistence** - Downloads continue even after browser restart
|
||||
- **Cancellation Support** - Cancel individual downloads or clear entire queue
|
||||
|
||||
### 🔧 **Extensive Configuration**
|
||||
- **Quality Control** - Configure audio quality per service (limitations per account tier apply)
|
||||
- **Format Options** - Convert to MP3, FLAC, AAC, OGG, OPUS, WAV, ALAC in various bitrates
|
||||
- **Custom Naming** - Flexible file and folder naming patterns
|
||||
- **Content Filtering** - Hide explicit content if desired
|
||||
|
||||
### 📊 **Comprehensive History**
|
||||
- **Download Tracking** - Complete history of all downloads with metadata
|
||||
- **Success Analytics** - Track success rates, failures, and skipped items
|
||||
- **Search & Filter** - Find past downloads by title, artist, or status
|
||||
- **Detailed Logs** - View individual track status for album/playlist downloads
|
||||
- **Export Data** - Access complete metadata and external service IDs
|
||||
|
||||
### 👥 **Multi-User Support**
|
||||
- **User Authentication** - Secure login system with JWT tokens
|
||||
- **SSO Integration** - Single Sign-On with Google and GitHub
|
||||
- **Admin Panel** - User management and system configuration
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
### Prerequisites
|
||||
- Docker and Docker Compose
|
||||
- Spotify account(s)
|
||||
- Deezer account(s) (optional, but recommended)
|
||||
- Spotify API credentials (Client ID & Secret from [Spotify Developer Dashboard](https://developer.spotify.com/dashboard))
|
||||
|
||||
### Installation
|
||||
|
||||
1. **Create project directory**
|
||||
```bash
|
||||
mkdir spotizerr && cd spotizerr
|
||||
```
|
||||
|
||||
2. **Setup environment file**
|
||||
```bash
|
||||
# Download .env.example from the repository and create .env
|
||||
# Update all variables (e.g. Redis credentials, PUID/PGID, UMASK)
|
||||
```
|
||||
|
||||
3. **Copy docker-compose.yaml**
|
||||
```bash
|
||||
# Download docker-compose.yaml from the repository
|
||||
```
|
||||
|
||||
4. **Start the application**
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
5. **Next steps**
|
||||
- Before doing anything, it is recommended to go straight to [Configuration](#-configuration)
|
||||
|
||||
## 🔧 Configuration
|
||||
|
||||
### Service Accounts Setup
|
||||
|
||||
1. **Spotify setup**
|
||||
- Spotify is very restrictive, so use the [spotizerr-auth](https://github.com/Xoconoch/spotizerr-auth) tool on a computer with the spotify client installed to simplify this part of the setup.
|
||||
|
||||
2. **Deezer setup (Optional but recommended for better stability, even if it's a free account)**
|
||||
- Get your Deezer ARL token:
|
||||
- **Chrome/Edge**: Open [Deezer](https://www.deezer.com/), press F12 → Application → Cookies → "https://www.deezer.com" → Copy "arl" value
|
||||
- **Firefox**: Open [Deezer](https://www.deezer.com/), press F12 → Storage → Cookies → "https://www.deezer.com" → Copy "arl" value
|
||||
- Add the ARL token in Settings → Accounts
|
||||
|
||||
3. **Configure Download Settings**
|
||||
- Set audio quality preferences
|
||||
- Configure output format and naming
|
||||
- Adjust concurrent download limits
|
||||
|
||||
### Watch System Setup
|
||||
|
||||
1. **Enable Monitoring**
|
||||
- Go to Settings → Watch
|
||||
- Enable the watch system
|
||||
- Set check intervals
|
||||
|
||||
2. **Add Items to Watch**
|
||||
- Search for playlists or artists
|
||||
- Click the "Watch" button
|
||||
- New content will be automatically downloaded
|
||||
|
||||
## 📋 Usage Examples
|
||||
|
||||
### Download a Playlist
|
||||
1. Search for the playlist or paste its Spotify URL
|
||||
2. Click the download button
|
||||
3. Monitor progress in the real-time queue
|
||||
|
||||
### Monitor an Artist
|
||||
1. Search for the artist
|
||||
2. Click "Add to Watchlist"
|
||||
3. Configure which release types to monitor (albums, singles, etc.)
|
||||
4. New releases will be automatically downloaded
|
||||
|
||||
### Bulk Download an Artist's Discography
|
||||
1. Go to the artist page
|
||||
2. Select release types (albums, singles, compilations)
|
||||
3. Click "Download Discography"
|
||||
4. All albums will be queued automatically
|
||||
|
||||
## 🔍 Advanced Features
|
||||
|
||||
### Custom File Naming
|
||||
Configure how files and folders are named:
|
||||
- `%artist%/%album%/%tracknum%. %title%`
|
||||
- `%ar_album%/%album% (%year%)/%title%`
|
||||
- Support for track numbers, artists, albums, years, and more
|
||||
|
||||
### Quality Settings
|
||||
- **Spotify**: OGG 96k, 160k, and 320k (320k requires Premium)
|
||||
- **Deezer**: MP3 128k, MP3 320k (sometimes requires Premium), and FLAC (Premium only)
|
||||
- **Conversion**: Convert to any supported format with custom bitrate
|
||||
|
||||
### Fallback System
|
||||
- Configure primary and fallback services
|
||||
- Automatically switches if primary service fails
|
||||
- Useful for geographic restrictions or account limits
|
||||
|
||||
### Real-time Mode
|
||||
- **Spotify only**: Matches track length with download time for optimal timing
|
||||
|
||||
## 🆘 Support & Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
**Downloads not starting?**
|
||||
@@ -211,7 +60,7 @@ Access logs via Docker:
|
||||
docker logs spotizerr
|
||||
```
|
||||
|
||||
**Log Locations:**
|
||||
**Log and File Locations:**
|
||||
- Application Logs: `docker logs spotizerr` (main app and Celery workers)
|
||||
- Individual Task Logs: `./logs/tasks/` (inside container, maps to your volume)
|
||||
- Credentials: `./data/creds/`
|
||||
@@ -221,6 +70,12 @@ docker logs spotizerr
|
||||
- Download History Database: `./data/history/`
|
||||
- Spotify Token Cache: `./.cache/` (if `SPOTIPY_CACHE_PATH` is mapped)
|
||||
|
||||
**Global Logging Level:**
|
||||
The application's global logging level can be controlled via the `LOG_LEVEL` environment variable.
|
||||
Supported values (case-insensitive): `CRITICAL`, `ERROR`, `WARNING`, `INFO`, `DEBUG`, `NOTSET`.
|
||||
If not set, the default logging level is `WARNING`.
|
||||
Example in `.env` file: `LOG_LEVEL=DEBUG`
|
||||
|
||||
## 🤝 Contributing
|
||||
|
||||
1. Fork the repository
|
||||
@@ -228,6 +83,21 @@ docker logs spotizerr
|
||||
3. Make your changes
|
||||
4. Submit a pull request
|
||||
|
||||
Here is the text to add to your `README.md` file, preferably after the "Quick Start" section:
|
||||
|
||||
## 💻 Development Setup
|
||||
|
||||
To run Spotizerr in development mode:
|
||||
|
||||
1. **Backend (API):**
|
||||
* Ensure Python dependencies are installed (e.g., using `uv pip install -r requirements.txt`).
|
||||
* Start a Redis server.
|
||||
* Run the app insidie your activated virtual env: `python3 app.py`
|
||||
2. **Frontend (UI):**
|
||||
* Navigate to `spotizerr-ui/`.
|
||||
* Install dependencies: `pnpm install`.
|
||||
* Start the development server: `pnpm dev`.
|
||||
|
||||
## 📄 License
|
||||
|
||||
This project is licensed under the GPL yada yada, see [LICENSE](LICENSE) file for details.
|
||||
|
||||
101
app.py
101
app.py
@@ -13,6 +13,16 @@ import redis
|
||||
import socket
|
||||
from urllib.parse import urlparse
|
||||
|
||||
# Define a mapping from string log levels to logging constants
|
||||
LOG_LEVELS = {
|
||||
"CRITICAL": logging.CRITICAL,
|
||||
"ERROR": logging.ERROR,
|
||||
"WARNING": logging.WARNING,
|
||||
"INFO": logging.INFO,
|
||||
"DEBUG": logging.DEBUG,
|
||||
"NOTSET": logging.NOTSET,
|
||||
}
|
||||
|
||||
# Run DB migrations as early as possible, before importing any routers that may touch DBs
|
||||
try:
|
||||
from routes.migrations import run_migrations_if_needed
|
||||
@@ -27,13 +37,28 @@ except Exception as e:
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
# Import route routers (to be created)
|
||||
# Get log level from environment variable, default to INFO
|
||||
log_level_str = os.getenv("LOG_LEVEL", "WARNING").upper()
|
||||
log_level = LOG_LEVELS.get(log_level_str, logging.INFO)
|
||||
|
||||
# Apply process umask from environment as early as possible
|
||||
_umask_value = os.getenv("UMASK")
|
||||
if _umask_value:
|
||||
try:
|
||||
os.umask(int(_umask_value, 8))
|
||||
except Exception:
|
||||
# Defer logging setup; avoid failing on invalid UMASK
|
||||
pass
|
||||
|
||||
|
||||
# Import and initialize routes (this will start the watch manager)
|
||||
from routes.auth.credentials import router as credentials_router
|
||||
from routes.auth.auth import router as auth_router
|
||||
from routes.content.artist import router as artist_router
|
||||
from routes.content.album import router as album_router
|
||||
from routes.content.artist import router as artist_router
|
||||
from routes.content.track import router as track_router
|
||||
from routes.content.playlist import router as playlist_router
|
||||
from routes.content.bulk_add import router as bulk_add_router
|
||||
from routes.core.search import router as search_router
|
||||
from routes.core.history import router as history_router
|
||||
from routes.system.progress import router as prgs_router
|
||||
@@ -51,7 +76,6 @@ from routes.auth.middleware import AuthMiddleware
|
||||
# Import watch manager controls (start/stop) without triggering side effects
|
||||
from routes.utils.watch.manager import start_watch_manager, stop_watch_manager
|
||||
|
||||
# Import and initialize routes (this will start the watch manager)
|
||||
|
||||
|
||||
# Configure application-wide logging
|
||||
@@ -61,12 +85,23 @@ def setup_logging():
|
||||
logs_dir = Path("logs")
|
||||
logs_dir.mkdir(exist_ok=True)
|
||||
|
||||
# Ensure required runtime directories exist
|
||||
for p in [
|
||||
Path("downloads"),
|
||||
Path("data/config"),
|
||||
Path("data/creds"),
|
||||
Path("data/watch"),
|
||||
Path("data/history"),
|
||||
Path("logs/tasks"),
|
||||
]:
|
||||
p.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Set up log file paths
|
||||
main_log = logs_dir / "spotizerr.log"
|
||||
|
||||
# Configure root logger
|
||||
root_logger = logging.getLogger()
|
||||
root_logger.setLevel(logging.DEBUG)
|
||||
root_logger.setLevel(log_level)
|
||||
|
||||
# Clear any existing handlers from the root logger
|
||||
if root_logger.hasHandlers():
|
||||
@@ -83,12 +118,12 @@ def setup_logging():
|
||||
main_log, maxBytes=10 * 1024 * 1024, backupCount=5, encoding="utf-8"
|
||||
)
|
||||
file_handler.setFormatter(log_format)
|
||||
file_handler.setLevel(logging.INFO)
|
||||
file_handler.setLevel(log_level)
|
||||
|
||||
# Console handler for stderr
|
||||
console_handler = logging.StreamHandler(sys.stderr)
|
||||
console_handler.setFormatter(log_format)
|
||||
console_handler.setLevel(logging.INFO)
|
||||
console_handler.setLevel(log_level)
|
||||
|
||||
# Add handlers to root logger
|
||||
root_logger.addHandler(file_handler)
|
||||
@@ -101,16 +136,23 @@ def setup_logging():
|
||||
"routes.utils.celery_manager",
|
||||
"routes.utils.celery_tasks",
|
||||
"routes.utils.watch",
|
||||
"uvicorn", # General Uvicorn logger
|
||||
"uvicorn.access", # Uvicorn access logs
|
||||
"uvicorn.error", # Uvicorn error logs
|
||||
]:
|
||||
logger = logging.getLogger(logger_name)
|
||||
logger.setLevel(logging.INFO)
|
||||
logger.propagate = True # Propagate to root logger
|
||||
logger.setLevel(log_level)
|
||||
# For uvicorn.access, we explicitly set propagate to False to prevent duplicate logging
|
||||
# if access_log=False is used in uvicorn.run, and to ensure our middleware handles it.
|
||||
logger.propagate = False if logger_name == "uvicorn.access" else True
|
||||
|
||||
logging.info("Logging system initialized")
|
||||
|
||||
|
||||
def check_redis_connection():
|
||||
"""Check if Redis is available and accessible"""
|
||||
from routes.utils.celery_config import REDIS_URL
|
||||
|
||||
if not REDIS_URL:
|
||||
logging.error("REDIS_URL is not configured. Please check your environment.")
|
||||
return False
|
||||
@@ -156,6 +198,20 @@ async def lifespan(app: FastAPI):
|
||||
# Startup
|
||||
setup_logging()
|
||||
|
||||
# Run migrations before initializing services
|
||||
try:
|
||||
from routes.migrations import run_migrations_if_needed
|
||||
|
||||
run_migrations_if_needed()
|
||||
logging.getLogger(__name__).info(
|
||||
"Database migrations executed (if needed) early in startup."
|
||||
)
|
||||
except Exception as e:
|
||||
logging.getLogger(__name__).error(
|
||||
f"Database migration step failed early in startup: {e}", exc_info=True
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
# Check Redis connection
|
||||
if not check_redis_connection():
|
||||
logging.error(
|
||||
@@ -165,6 +221,8 @@ async def lifespan(app: FastAPI):
|
||||
|
||||
# Start Celery workers
|
||||
try:
|
||||
from routes.utils.celery_manager import celery_manager
|
||||
|
||||
celery_manager.start()
|
||||
logging.info("Celery workers started successfully")
|
||||
except Exception as e:
|
||||
@@ -172,6 +230,8 @@ async def lifespan(app: FastAPI):
|
||||
|
||||
# Start Watch Manager after Celery is up
|
||||
try:
|
||||
from routes.utils.watch.manager import start_watch_manager, stop_watch_manager
|
||||
|
||||
start_watch_manager()
|
||||
logging.info("Watch Manager initialized and registered for shutdown.")
|
||||
except Exception as e:
|
||||
@@ -184,12 +244,16 @@ async def lifespan(app: FastAPI):
|
||||
|
||||
# Shutdown
|
||||
try:
|
||||
from routes.utils.watch.manager import start_watch_manager, stop_watch_manager
|
||||
|
||||
stop_watch_manager()
|
||||
logging.info("Watch Manager stopped")
|
||||
except Exception as e:
|
||||
logging.error(f"Error stopping Watch Manager: {e}")
|
||||
|
||||
try:
|
||||
from routes.utils.celery_manager import celery_manager
|
||||
|
||||
celery_manager.stop()
|
||||
logging.info("Celery workers stopped")
|
||||
except Exception as e:
|
||||
@@ -215,13 +279,31 @@ def create_app():
|
||||
)
|
||||
|
||||
# Add authentication middleware (only if auth is enabled)
|
||||
try:
|
||||
from routes.auth import AUTH_ENABLED
|
||||
from routes.auth.middleware import AuthMiddleware
|
||||
|
||||
if AUTH_ENABLED:
|
||||
app.add_middleware(AuthMiddleware)
|
||||
logging.info("Authentication system enabled")
|
||||
else:
|
||||
logging.info("Authentication system disabled")
|
||||
except Exception as e:
|
||||
logging.warning(f"Auth system initialization failed or unavailable: {e}")
|
||||
|
||||
# Register routers with URL prefixes
|
||||
from routes.auth.auth import router as auth_router
|
||||
from routes.system.config import router as config_router
|
||||
from routes.core.search import router as search_router
|
||||
from routes.auth.credentials import router as credentials_router
|
||||
from routes.content.album import router as album_router
|
||||
from routes.content.track import router as track_router
|
||||
from routes.content.playlist import router as playlist_router
|
||||
from routes.content.bulk_add import router as bulk_add_router
|
||||
from routes.content.artist import router as artist_router
|
||||
from routes.system.progress import router as prgs_router
|
||||
from routes.core.history import router as history_router
|
||||
|
||||
app.include_router(auth_router, prefix="/api/auth", tags=["auth"])
|
||||
|
||||
# Include SSO router if available
|
||||
@@ -240,6 +322,7 @@ def create_app():
|
||||
app.include_router(album_router, prefix="/api/album", tags=["album"])
|
||||
app.include_router(track_router, prefix="/api/track", tags=["track"])
|
||||
app.include_router(playlist_router, prefix="/api/playlist", tags=["playlist"])
|
||||
app.include_router(bulk_add_router, prefix="/api/bulk", tags=["bulk"])
|
||||
app.include_router(artist_router, prefix="/api/artist", tags=["artist"])
|
||||
app.include_router(prgs_router, prefix="/api/prgs", tags=["progress"])
|
||||
app.include_router(history_router, prefix="/api/history", tags=["history"])
|
||||
@@ -363,4 +446,4 @@ if __name__ == "__main__":
|
||||
except ValueError:
|
||||
port = 7171
|
||||
|
||||
uvicorn.run(app, host=host, port=port, log_level="info", access_log=True)
|
||||
uvicorn.run(app, host=host, port=port, log_level=log_level_str.lower(), access_log=False)
|
||||
|
||||
@@ -1,16 +1,24 @@
|
||||
# HEY, YOU! READ THE DOCS BEFORE YOU DO ANYTHING!
|
||||
# https://spotizerr.rtfd.io
|
||||
|
||||
name: spotizerr
|
||||
services:
|
||||
spotizerr:
|
||||
image: cooldockerizer93/spotizerr
|
||||
user: "1000:1000" # Spotizerr user:group ids
|
||||
volumes:
|
||||
- ./data:/app/data
|
||||
- ./downloads:/app/downloads
|
||||
- ./logs:/app/logs
|
||||
# Ensure these directories and the .cache file exist and are writable by the container user
|
||||
- ./data:/app/data # data directory, contains config, creds, watch, history
|
||||
- ./downloads:/app/downloads # downloads directory, contains downloaded files
|
||||
- ./logs:/app/logs # logs directory, contains logs
|
||||
- ./.cache:/app/.cache # cache file
|
||||
ports:
|
||||
# Port to expose the app on
|
||||
- 7171:7171
|
||||
container_name: spotizerr-app
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
# Ensure you have a .env file in the root of the project, with the correct values
|
||||
- .env
|
||||
depends_on:
|
||||
- redis
|
||||
|
||||
@@ -196,6 +196,8 @@ Download an entire album.
|
||||
Get album metadata.
|
||||
**Query Parameters:**
|
||||
- `id`: Spotify album ID
|
||||
- `limit`: Tracks page size (optional)
|
||||
- `offset`: Tracks page offset (optional)
|
||||
|
||||
### Playlist Downloads
|
||||
|
||||
@@ -216,6 +218,7 @@ Download an entire playlist.
|
||||
Get playlist metadata.
|
||||
**Query Parameters:**
|
||||
- `id`: Spotify playlist ID
|
||||
- `include_tracks`: true to include tracks (default: false)
|
||||
|
||||
#### `GET /playlist/metadata`
|
||||
Get detailed playlist metadata including tracks.
|
||||
@@ -244,14 +247,12 @@ Download artist's discography.
|
||||
}
|
||||
```
|
||||
|
||||
#### `GET /artist/download/cancel`
|
||||
**Query Parameters:**
|
||||
- `task_id`: Task ID to cancel
|
||||
|
||||
#### `GET /artist/info`
|
||||
Get artist metadata.
|
||||
**Query Parameters:**
|
||||
- `id`: Spotify artist ID
|
||||
- `limit`: Albums page size (default: 10, min: 1)
|
||||
- `offset`: Albums page offset (default: 0, min: 0)
|
||||
|
||||
## 📺 Watch Functionality
|
||||
|
||||
@@ -371,11 +372,11 @@ Search Spotify content.
|
||||
### Task Monitoring
|
||||
|
||||
#### `GET /prgs/list`
|
||||
List all tasks with optional filtering.
|
||||
List tasks with pagination.
|
||||
**Query Parameters:**
|
||||
- `status`: Filter by status (`pending`, `running`, `completed`, `failed`)
|
||||
- `download_type`: Filter by type (`track`, `album`, `playlist`)
|
||||
- `limit`: Results limit
|
||||
- `page`: Page number (default: 1)
|
||||
- `limit`: Items per page (default: 50, max: 100)
|
||||
- `active_only`: If true, only return active tasks
|
||||
|
||||
#### `GET /prgs/{task_id}`
|
||||
Get specific task details and progress.
|
||||
@@ -383,7 +384,10 @@ Get specific task details and progress.
|
||||
#### `GET /prgs/updates`
|
||||
Get task updates since last check.
|
||||
**Query Parameters:**
|
||||
- `since`: Timestamp to get updates since
|
||||
- `since`: Unix timestamp (required for delta updates). If omitted, returns a paginated snapshot.
|
||||
- `page`: Page number for non-active tasks (default: 1)
|
||||
- `limit`: Items per page for non-active tasks (default: 20, max: 100)
|
||||
- `active_only`: If true, only return active tasks
|
||||
|
||||
#### `GET /prgs/stream`
|
||||
**Server-Sent Events (SSE)** endpoint for real-time progress updates.
|
||||
@@ -448,13 +452,13 @@ Get download statistics.
|
||||
#### `GET /history/search`
|
||||
Search download history.
|
||||
**Query Parameters:**
|
||||
- `q`: Search query
|
||||
- `field`: Field to search (`name`, `artist`, `url`)
|
||||
- `q`: Search query (required)
|
||||
- `limit`: Max results (default: 50, max: 200)
|
||||
|
||||
#### `GET /history/recent`
|
||||
Get recent downloads.
|
||||
**Query Parameters:**
|
||||
- `hours`: Hours to look back (default: 24)
|
||||
- `limit`: Max results (default: 20, max: 100)
|
||||
|
||||
#### `GET /history/failed`
|
||||
Get failed downloads.
|
||||
@@ -464,8 +468,7 @@ Clean up old history entries.
|
||||
**Request:**
|
||||
```json
|
||||
{
|
||||
"older_than_days": 30,
|
||||
"keep_failed": true
|
||||
"days_old": 30
|
||||
}
|
||||
```
|
||||
|
||||
16
docs/index.md
Normal file
16
docs/index.md
Normal file
@@ -0,0 +1,16 @@
|
||||
## Spotizerr Documentation
|
||||
|
||||
Start with Getting started, then explore the sections below.
|
||||
|
||||
- [Getting started](user/getting-started.md)
|
||||
- [Configuration](user/configuration.md)
|
||||
- [Environment](user/environment.md)
|
||||
- [Tracks](user/tracks.md)
|
||||
- [Albums](user/albums.md)
|
||||
- [Playlists](user/playlists.md)
|
||||
- [Artists](user/artists.md)
|
||||
- [Watchlist](user/watchlist.md)
|
||||
- [History](user/history.md)
|
||||
- [Multi-user](user/multi-user.md)
|
||||
|
||||
For API details, see [API](api.md).
|
||||
13
docs/user/albums.md
Normal file
13
docs/user/albums.md
Normal file
@@ -0,0 +1,13 @@
|
||||
## Albums
|
||||
|
||||
- Open from Search or an Artist page.
|
||||
- Actions:
|
||||
- Download full album or any track
|
||||
- Browse tracklist (order, artists, duration)
|
||||
- Large albums: tracks load in pages as you scroll
|
||||
- Explicit filter hides explicit tracks when enabled in Config
|
||||
|
||||
Endpoints:
|
||||
- GET `/api/album/info?id=...&limit=50&offset=...` — album metadata + paged tracks
|
||||
- GET `/api/album/download/{album_id}` — queue album download
|
||||
- GET `/api/prgs/stream` — live progress via SSE
|
||||
26
docs/user/artists.md
Normal file
26
docs/user/artists.md
Normal file
@@ -0,0 +1,26 @@
|
||||
## Artists
|
||||
|
||||
- Open from Search.
|
||||
- Discography sections: Albums, Singles, Compilations, Appears On (infinite scroll)
|
||||
- Download:
|
||||
- Download all (queues albums by selected types)
|
||||
- Download any album individually
|
||||
- Watch:
|
||||
- Add/remove artist to Watchlist
|
||||
- Configure release types and intervals in Configuration → Watch
|
||||
|
||||
How to monitor an artist:
|
||||
1. Search the artist and open their page
|
||||
2. Click Watch
|
||||
3. Configure in Configuration → Watch
|
||||
|
||||
How to download discography:
|
||||
1. Open the artist page
|
||||
2. Select release types (Albums, Singles, Compilations)
|
||||
3. Click Download All; track in Queue and History
|
||||
|
||||
Endpoints:
|
||||
- GET `/api/artist/info?id=...&limit=10&offset=...` — metadata + paged albums
|
||||
- GET `/api/artist/download/{artist_id}?album_type=album,single,compilation` — queue discography
|
||||
- PUT `/api/artist/watch/{artist_id}` / DELETE `/api/artist/watch/{artist_id}`
|
||||
- GET `/api/artist/watch/{artist_id}/status`
|
||||
44
docs/user/configuration.md
Normal file
44
docs/user/configuration.md
Normal file
@@ -0,0 +1,44 @@
|
||||
## Configuration
|
||||
|
||||
See also: [Environment variables](environment.md)
|
||||
|
||||
Open Configuration in the web UI. Tabs:
|
||||
|
||||
- General (admin)
|
||||
- App version, basic info
|
||||
- Downloads (admin)
|
||||
- Concurrent downloads, retry behavior
|
||||
- Quality/format defaults and conversion
|
||||
- Real-time mode: aligns download time with track length
|
||||
- Formatting (admin)
|
||||
- File/folder naming patterns (examples)
|
||||
- `%artist%/%album%/%tracknum%. %title%`
|
||||
- `%ar_album%/%album% (%year%)/%title%`
|
||||
- Accounts (admin)
|
||||
- Spotify: use `spotizerr-auth` to add credentials
|
||||
- Deezer ARL (optional):
|
||||
- Chrome/Edge: DevTools → Application → Cookies → https://www.deezer.com → copy `arl`
|
||||
- Firefox: DevTools → Storage → Cookies → https://www.deezer.com → copy `arl`
|
||||
- Paste ARL in Accounts
|
||||
- Select main account when multiple exist
|
||||
- Watch (admin)
|
||||
- Enable/disable watch system
|
||||
- Set check intervals
|
||||
- Manually trigger checks (artists/playlists)
|
||||
- Server (admin)
|
||||
- System info and advanced settings
|
||||
- Profile (all users when auth is enabled)
|
||||
- Change password, view role and email
|
||||
|
||||
Quality formats (reference):
|
||||
- Spotify: OGG 96k/160k/320k (320k requires Premium)
|
||||
- Deezer: MP3 128k/320k (320k may require Premium), FLAC (Premium)
|
||||
- Conversion: MP3/FLAC/AAC/OGG/OPUS/WAV/ALAC with custom bitrate
|
||||
|
||||
Fallback system:
|
||||
- Configure primary and fallback services
|
||||
- Automatically switches if primary fails (useful for geo/account limits)
|
||||
|
||||
Notes:
|
||||
- Explicit content filter applies in pages (e.g., hides explicit tracks on album/playlist views)
|
||||
- Watch system must be enabled before adding items
|
||||
36
docs/user/environment.md
Normal file
36
docs/user/environment.md
Normal file
@@ -0,0 +1,36 @@
|
||||
## Environment variables
|
||||
|
||||
Location: project `.env`. Minimal reference for server admins.
|
||||
|
||||
### Core
|
||||
- HOST: Interface to bind (default `0.0.0.0`)
|
||||
- EXPLICIT_FILTER: Filter explicit content (`true|false`, default `false`)
|
||||
|
||||
### Redis
|
||||
- REDIS_HOST: Hostname (default `redis`)
|
||||
- REDIS_PORT: Port (default `6379`)
|
||||
- REDIS_DB: Database index (default `0`)
|
||||
- REDIS_PASSWORD: Password
|
||||
|
||||
### File ownership & permissions
|
||||
- UMASK: Default permissions for new files (default `0022`)
|
||||
- SKIP_SET_PERMISSIONS: Skip permission fix on startup (`true|false`, default `false`)
|
||||
|
||||
### Multi-user & auth
|
||||
- ENABLE_AUTH: Enable authentication (`true|false`, default `false`)
|
||||
- JWT_SECRET: Long random string for tokens (required if auth enabled)
|
||||
- JWT_EXPIRATION_HOURS: Session duration in hours (default `720`)
|
||||
- DEFAULT_ADMIN_USERNAME: Seed admin username (default `admin`)
|
||||
- DEFAULT_ADMIN_PASSWORD: Seed admin password (change it!)
|
||||
- DISABLE_REGISTRATION: Disable public signups (`true|false`, default `false`)
|
||||
|
||||
### SSO
|
||||
- SSO_ENABLED: Enable SSO (`true|false`)
|
||||
- SSO_BASE_REDIRECT_URI: Base backend callback (e.g. `http://127.0.0.1:7171/api/auth/sso/callback`)
|
||||
- FRONTEND_URL: Public UI base (e.g. `http://127.0.0.1:7171`)
|
||||
- GOOGLE_CLIENT_ID / GOOGLE_CLIENT_SECRET
|
||||
- GITHUB_CLIENT_ID / GITHUB_CLIENT_SECRET
|
||||
|
||||
### Tips
|
||||
- If running behind a reverse proxy, set `FRONTEND_URL` and `SSO_BASE_REDIRECT_URI` to public URLs.
|
||||
- Change `DEFAULT_ADMIN_*` on first login or disable registration and create users from the admin panel.
|
||||
90
docs/user/getting-started.md
Normal file
90
docs/user/getting-started.md
Normal file
@@ -0,0 +1,90 @@
|
||||
## Getting started
|
||||
|
||||
### Prerequisites
|
||||
- Docker and Docker Compose
|
||||
- Spotify account(s)
|
||||
- Deezer account (optional, recommended for FLAC)
|
||||
- Spotify API `client_id` and `client_secret` (from Spotify Developer Dashboard)
|
||||
|
||||
Quick start (Docker Compose):
|
||||
|
||||
```bash
|
||||
mkdir spotizerr && cd spotizerr
|
||||
mkdir -p data logs downloads && touch .cache
|
||||
wget https://github.com/spotizerr-dev/spotizerr/blob/main/docker-compose.yaml
|
||||
|
||||
# Before running this last command, check your docker compose file first, it is well-documented.
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
### Initial setup
|
||||
- Open the web UI (default: `http://localhost:7171`)
|
||||
- Go to Configuration → Accounts
|
||||
- Use `spotizerr-auth` to register Spotify credentials quickly
|
||||
|
||||
Spotify account setup with spotizerr-auth:
|
||||
|
||||
```bash
|
||||
docker run --network=host --rm -it cooldockerizer93/spotizerr-auth
|
||||
```
|
||||
or, if docker doesn't work:
|
||||
|
||||
#### Alternative installers
|
||||
|
||||
<details>
|
||||
<summary>Linux / macOS</summary>
|
||||
|
||||
```bash
|
||||
python3 -m venv .venv && source .venv/bin/activate && pip install spotizerr-auth
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Windows (PowerShell)</summary>
|
||||
|
||||
```powershell
|
||||
python -m venv .venv; .venv\Scripts\Activate.ps1; pip install spotizerr-auth
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Windows (cmd.exe)</summary>
|
||||
|
||||
```cmd
|
||||
python -m venv .venv && .venv\Scripts\activate && pip install spotizerr-auth
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
Then run `spotizerr-auth`.
|
||||
|
||||
_Note: You will have to enable the virtual environment everytime you want to register a new account._
|
||||
|
||||
### Registering account
|
||||
- Ensure Spotify client is opened before starting
|
||||
- Enter Spotizerr URL (e.g., http://localhost:7171)
|
||||
- Enter Spotify API `client_id` and `client_secret` if prompted (one-time)
|
||||
- Name the account + region code (e.g., US)
|
||||
- Transfer playback to the temporary device when asked
|
||||
- Credentials are posted to Spotizerr automatically
|
||||
|
||||
### Next steps
|
||||
|
||||
- Add Deezer ARL in Configuration → Accounts (optional, allows for FLAC availability if premium)
|
||||
- Adjust Download and Formatting options
|
||||
- Enable Watch system if you want automatic downloads
|
||||
|
||||
### Troubleshooting (quick)
|
||||
|
||||
- Downloads not starting: verify service credentials and API keys
|
||||
- Watch not working: enable in Configuration → Watch and set intervals
|
||||
- Auth issues: ensure JWT secret and SSO creds (if used); try clearing browser cache
|
||||
- Queue stalling: force-refresh the page (ctrl+F5)
|
||||
|
||||
### Logs
|
||||
```bash
|
||||
docker logs spotizerr
|
||||
```
|
||||
- Enable Watch system if you want auto-downloads
|
||||
17
docs/user/history.md
Normal file
17
docs/user/history.md
Normal file
@@ -0,0 +1,17 @@
|
||||
## History
|
||||
|
||||
See all downloads and their outcomes.
|
||||
|
||||
- Filters
|
||||
- By type (track/album/playlist) and status (completed/failed/skipped/in_progress)
|
||||
- Pagination for large histories
|
||||
- Drill-down
|
||||
- Open an entry to view child tracks for albums/playlists
|
||||
- Re-queue failures from the UI
|
||||
|
||||
Backend endpoints used:
|
||||
|
||||
- GET `/api/history?download_type=&status=&limit=&offset=`
|
||||
- GET `/api/history/{task_id}` (entry)
|
||||
- GET `/api/history/{task_id}/children` (child tracks)
|
||||
- GET `/api/history/stats`, `/api/history/recent`, `/api/history/failed` (summaries)
|
||||
28
docs/user/multi-user.md
Normal file
28
docs/user/multi-user.md
Normal file
@@ -0,0 +1,28 @@
|
||||
## Multi-user
|
||||
|
||||
Authentication is optional. When enabled:
|
||||
|
||||
Login/Register
|
||||
|
||||
- Local accounts with username/password
|
||||
- First registered user becomes admin
|
||||
- Public registration can be disabled
|
||||
|
||||
SSO (optional)
|
||||
|
||||
- Google and GitHub when configured
|
||||
|
||||
Roles
|
||||
|
||||
- User: can search/download, manage their profile
|
||||
- Admin: access to all Configuration tabs and user management
|
||||
|
||||
Admin actions
|
||||
|
||||
- Create/delete users, change roles
|
||||
- Reset user passwords
|
||||
|
||||
Where to find it in the UI:
|
||||
|
||||
- User menu (top-right) → Profile settings
|
||||
- Configuration → User Management (admin)
|
||||
28
docs/user/playlists.md
Normal file
28
docs/user/playlists.md
Normal file
@@ -0,0 +1,28 @@
|
||||
## Playlists
|
||||
|
||||
Open a playlist from search.
|
||||
|
||||
- Download
|
||||
- Download entire playlist
|
||||
- Download individual tracks
|
||||
- Metadata and tracks
|
||||
- Loads metadata first (fast, avoids rate limits)
|
||||
- Tracks load in pages as you scroll
|
||||
- Watch
|
||||
- Add/remove playlist to Watchlist (auto-download new additions when enabled)
|
||||
|
||||
How-to: download a playlist
|
||||
|
||||
1. Search for the playlist or paste its Spotify URL
|
||||
2. Click Download
|
||||
3. Monitor progress in the Queue; results appear in History
|
||||
|
||||
Backend endpoints used:
|
||||
|
||||
- GET `/api/playlist/metadata?id=...` (metadata only)
|
||||
- GET `/api/playlist/tracks?id=...&limit=50&offset=...` (paged tracks)
|
||||
- GET `/api/playlist/info?id=...&include_tracks=true` (full info when needed)
|
||||
- GET `/api/playlist/download/{playlist_id}` (queue download)
|
||||
- PUT `/api/playlist/watch/{playlist_id}` (watch)
|
||||
- DELETE `/api/playlist/watch/{playlist_id}` (unwatch)
|
||||
- GET `/api/playlist/watch/{playlist_id}/status` (status)
|
||||
17
docs/user/tracks.md
Normal file
17
docs/user/tracks.md
Normal file
@@ -0,0 +1,17 @@
|
||||
## Tracks
|
||||
|
||||
Find a track via search or open a track page.
|
||||
|
||||
- Download
|
||||
- Click Download on result card or track page
|
||||
- Progress visible in the Queue drawer
|
||||
- Open on Spotify
|
||||
- From track page, open the Spotify link
|
||||
- Details shown
|
||||
- Artists, album, duration, popularity
|
||||
|
||||
Backend endpoints used:
|
||||
|
||||
- GET `/api/track/info?id=...` (metadata)
|
||||
- GET `/api/track/download/{track_id}` (queue download)
|
||||
- GET `/api/progress/stream` (live queue updates)
|
||||
18
docs/user/watchlist.md
Normal file
18
docs/user/watchlist.md
Normal file
@@ -0,0 +1,18 @@
|
||||
## Watchlist
|
||||
|
||||
Enable the watch system in Configuration → Watch first.
|
||||
|
||||
- Add items
|
||||
- From Artist or Playlist pages, click Watch
|
||||
- What it does
|
||||
- Periodically checks watched items
|
||||
- Queues new releases (artists) and/or newly added tracks (playlists)
|
||||
- Setup
|
||||
- Enable watch system and set intervals in Configuration → Watch
|
||||
- Trigger a manual check if you want immediate processing
|
||||
|
||||
Backend endpoints used:
|
||||
|
||||
- Artists: PUT/DELETE/GET status under `/api/artist/watch/*`
|
||||
- Playlists: PUT/DELETE/GET status under `/api/playlist/watch/*`
|
||||
- Manual triggers: POST `/api/artist/watch/trigger_check` and `/api/playlist/watch/trigger_check`
|
||||
@@ -1,91 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Set umask if UMASK variable is provided
|
||||
if [ -n "${UMASK}" ]; then
|
||||
umask "${UMASK}"
|
||||
fi
|
||||
|
||||
# Compose Redis URLs from base variables if not explicitly provided
|
||||
if [ -z "${REDIS_URL}" ]; then
|
||||
REDIS_HOST=${REDIS_HOST:-redis}
|
||||
REDIS_PORT=${REDIS_PORT:-6379}
|
||||
REDIS_DB=${REDIS_DB:-0}
|
||||
|
||||
if [ -n "${REDIS_PASSWORD}" ]; then
|
||||
if [ -n "${REDIS_USERNAME}" ]; then
|
||||
AUTH_PART="${REDIS_USERNAME}:${REDIS_PASSWORD}@"
|
||||
else
|
||||
AUTH_PART=":${REDIS_PASSWORD}@"
|
||||
fi
|
||||
else
|
||||
AUTH_PART=""
|
||||
fi
|
||||
export REDIS_URL="redis://${AUTH_PART}${REDIS_HOST}:${REDIS_PORT}/${REDIS_DB}"
|
||||
fi
|
||||
|
||||
if [ -z "${REDIS_BACKEND}" ]; then
|
||||
export REDIS_BACKEND="${REDIS_URL}"
|
||||
fi
|
||||
|
||||
# Redis is now in a separate container so we don't need to start it locally
|
||||
echo "Using Redis at ${REDIS_URL}"
|
||||
|
||||
# Check if both PUID and PGID are not set
|
||||
if [ -z "${PUID}" ] && [ -z "${PGID}" ]; then
|
||||
# Run as root directly
|
||||
echo "Running as root user (no PUID/PGID specified)"
|
||||
exec python app.py
|
||||
else
|
||||
# Verify both PUID and PGID are set
|
||||
if [ -z "${PUID}" ] || [ -z "${PGID}" ]; then
|
||||
echo "ERROR: Must supply both PUID and PGID or neither"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check for root user request
|
||||
if [ "${PUID}" -eq 0 ] && [ "${PGID}" -eq 0 ]; then
|
||||
echo "Running as root user (PUID/PGID=0)"
|
||||
exec python app.py
|
||||
else
|
||||
# Check if the group with the specified GID already exists
|
||||
if getent group "${PGID}" >/dev/null; then
|
||||
# If the group exists, use its name instead of creating a new one
|
||||
GROUP_NAME=$(getent group "${PGID}" | cut -d: -f1)
|
||||
echo "Using existing group: ${GROUP_NAME} (GID: ${PGID})"
|
||||
else
|
||||
# If the group doesn't exist, create it
|
||||
GROUP_NAME="appgroup"
|
||||
groupadd -g "${PGID}" "${GROUP_NAME}"
|
||||
echo "Created group: ${GROUP_NAME} (GID: ${PGID})"
|
||||
fi
|
||||
|
||||
# Check if the user with the specified UID already exists
|
||||
if getent passwd "${PUID}" >/dev/null; then
|
||||
# If the user exists, use its name instead of creating a new one
|
||||
USER_NAME=$(getent passwd "${PUID}" | cut -d: -f1)
|
||||
echo "Using existing user: ${USER_NAME} (UID: ${PUID})"
|
||||
else
|
||||
# If the user doesn't exist, create it
|
||||
USER_NAME="appuser"
|
||||
useradd -u "${PUID}" -g "${GROUP_NAME}" -d /app "${USER_NAME}"
|
||||
echo "Created user: ${USER_NAME} (UID: ${PUID})"
|
||||
fi
|
||||
|
||||
# Ensure proper permissions for all app directories unless skipped via env var
|
||||
if [ "${SKIP_SET_PERMISSIONS}" = "true" ] || [ "${SKIP_SET_PERMISSIONS}" = "1" ]; then
|
||||
echo "SKIP_SET_PERMISSIONS is set; skipping permissions for /app/downloads /app/data /app/logs"
|
||||
else
|
||||
echo "Setting permissions for /app directories..."
|
||||
chown -R "${USER_NAME}:${GROUP_NAME}" /app/downloads /app/data /app/logs || true
|
||||
fi
|
||||
|
||||
# Ensure Spotipy cache file exists and is writable (fast, local to container)
|
||||
touch /app/.cache || true
|
||||
chown "${USER_NAME}:${GROUP_NAME}" /app/.cache || true
|
||||
|
||||
# Run as specified user
|
||||
echo "Starting application as ${USER_NAME}..."
|
||||
exec gosu "${USER_NAME}" python app.py
|
||||
fi
|
||||
fi
|
||||
30
mkdocs.yml
Normal file
30
mkdocs.yml
Normal file
@@ -0,0 +1,30 @@
|
||||
site_name: Spotizerr Documentation
|
||||
site_description: Straight-to-the-point docs for Spotizerr
|
||||
site_dir: site
|
||||
use_directory_urls: true
|
||||
theme:
|
||||
name: material
|
||||
features:
|
||||
- navigation.instant
|
||||
- navigation.tracking
|
||||
- content.action.edit
|
||||
- search.suggest
|
||||
- search.highlight
|
||||
nav:
|
||||
- Getting started: user/getting-started
|
||||
- Configuration: user/configuration
|
||||
- Environment: user/environment
|
||||
- Tracks: user/tracks
|
||||
- Albums: user/albums
|
||||
- Playlists: user/playlists
|
||||
- Artists: user/artists
|
||||
- Watchlist: user/watchlist
|
||||
- History: user/history
|
||||
- Multi-user: user/multi-user
|
||||
- API: api
|
||||
markdown_extensions:
|
||||
- toc:
|
||||
permalink: true
|
||||
- admonition
|
||||
- tables
|
||||
- fenced_code
|
||||
@@ -1,9 +1,11 @@
|
||||
fastapi==0.116.1
|
||||
uvicorn[standard]==0.35.0
|
||||
celery==5.5.3
|
||||
deezspot-spotizerr==2.7.3
|
||||
deezspot-spotizerr==2.7.6
|
||||
httpx==0.28.1
|
||||
bcrypt==4.2.1
|
||||
PyJWT==2.10.1
|
||||
python-multipart==0.0.17
|
||||
fastapi-sso==0.18.0
|
||||
redis==5.0.7
|
||||
async-timeout==4.0.3
|
||||
|
||||
@@ -1,7 +1,3 @@
|
||||
import logging
|
||||
|
||||
# Configure basic logging for the application if not already configured
|
||||
# This remains safe to execute on import
|
||||
logging.basicConfig(level=logging.INFO, format="%(message)s")
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -23,6 +23,22 @@ router = APIRouter()
|
||||
init_credentials_db()
|
||||
|
||||
|
||||
def _set_active_account_if_empty(service: str, name: str):
|
||||
"""
|
||||
Sets the newly created account as the active account in the main config
|
||||
if no active account is currently set for the given service.
|
||||
"""
|
||||
try:
|
||||
from routes.utils.celery_config import get_config_params as get_main_config_params
|
||||
from routes.system.config import save_config
|
||||
config = get_main_config_params()
|
||||
if not config.get(service):
|
||||
config[service] = name
|
||||
save_config(config)
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not set new {service.capitalize()} account '{name}' as active: {e}")
|
||||
|
||||
|
||||
@router.get("/spotify_api_config")
|
||||
@router.put("/spotify_api_config")
|
||||
async def handle_spotify_api_config(request: Request, current_user: User = Depends(require_admin_from_state)):
|
||||
@@ -130,18 +146,7 @@ async def handle_create_credential(service: str, name: str, request: Request, cu
|
||||
# Validation is handled within create_credential utility function
|
||||
result = create_credential(service, name, data)
|
||||
|
||||
# set as active Spotify account if none is set
|
||||
if service == "spotify":
|
||||
try:
|
||||
from routes.utils.celery_config import get_config_params as get_main_config_params
|
||||
from routes.system.config import save_config
|
||||
config = get_main_config_params()
|
||||
# The field is likely "spotify" (as used in frontend)
|
||||
if not config.get("spotify"):
|
||||
config["spotify"] = name
|
||||
save_config(config)
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not set new Spotify account '{name}' as active: {e}")
|
||||
_set_active_account_if_empty(service, name)
|
||||
|
||||
return {
|
||||
"message": f"Credential for '{name}' ({service}) created successfully.",
|
||||
|
||||
108
routes/content/bulk_add.py
Normal file
108
routes/content/bulk_add.py
Normal file
@@ -0,0 +1,108 @@
|
||||
import re
|
||||
from typing import List, Dict, Any
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from pydantic import BaseModel
|
||||
import logging
|
||||
|
||||
# Assuming these imports are available for queue management and Spotify info
|
||||
from routes.utils.get_info import get_spotify_info
|
||||
from routes.utils.celery_tasks import download_track, download_album, download_playlist
|
||||
|
||||
router = APIRouter()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class BulkAddLinksRequest(BaseModel):
|
||||
links: List[str]
|
||||
|
||||
@router.post("/bulk-add-spotify-links")
|
||||
async def bulk_add_spotify_links(request: BulkAddLinksRequest):
|
||||
added_count = 0
|
||||
failed_links = []
|
||||
total_links = len(request.links)
|
||||
|
||||
for link in request.links:
|
||||
# Assuming links are pre-filtered by the frontend,
|
||||
# but still handle potential errors during info retrieval or unsupported types
|
||||
# Extract type and ID from the link directly using regex
|
||||
match = re.match(r"https://open\.spotify\.com(?:/intl-[a-z]{2})?/(track|album|playlist|artist)/([a-zA-Z0-9]+)(?:\?.*)?", link)
|
||||
if not match:
|
||||
logger.warning(f"Could not parse Spotify link (unexpected format after frontend filter): {link}")
|
||||
failed_links.append(link)
|
||||
continue
|
||||
|
||||
spotify_type = match.group(1)
|
||||
spotify_id = match.group(2)
|
||||
|
||||
try:
|
||||
# Get basic info to confirm existence and get name/artist
|
||||
# For playlists, we might want to get full info later when adding to queue
|
||||
if spotify_type == "playlist":
|
||||
item_info = get_spotify_info(spotify_id, "playlist_metadata")
|
||||
else:
|
||||
item_info = get_spotify_info(spotify_id, spotify_type)
|
||||
|
||||
item_name = item_info.get("name", "Unknown Name")
|
||||
artist_name = ""
|
||||
if spotify_type in ["track", "album"]:
|
||||
artists = item_info.get("artists", [])
|
||||
if artists:
|
||||
artist_name = ", ".join([a.get("name", "Unknown Artist") for a in artists])
|
||||
elif spotify_type == "playlist":
|
||||
owner = item_info.get("owner", {})
|
||||
artist_name = owner.get("display_name", "Unknown Owner")
|
||||
|
||||
# Construct URL for the download task
|
||||
spotify_url = f"https://open.spotify.com/{spotify_type}/{spotify_id}"
|
||||
|
||||
# Add to Celery queue based on type
|
||||
if spotify_type == "track":
|
||||
download_track.delay(
|
||||
url=spotify_url,
|
||||
spotify_id=spotify_id,
|
||||
type=spotify_type,
|
||||
name=item_name,
|
||||
artist=artist_name,
|
||||
download_type="track",
|
||||
)
|
||||
elif spotify_type == "album":
|
||||
download_album.delay(
|
||||
url=spotify_url,
|
||||
spotify_id=spotify_id,
|
||||
type=spotify_type,
|
||||
name=item_name,
|
||||
artist=artist_name,
|
||||
download_type="album",
|
||||
)
|
||||
elif spotify_type == "playlist":
|
||||
download_playlist.delay(
|
||||
url=spotify_url,
|
||||
spotify_id=spotify_id,
|
||||
type=spotify_type,
|
||||
name=item_name,
|
||||
artist=artist_name,
|
||||
download_type="playlist",
|
||||
)
|
||||
else:
|
||||
logger.warning(f"Unsupported Spotify type for download: {spotify_type} for link: {link}")
|
||||
failed_links.append(link)
|
||||
continue
|
||||
|
||||
added_count += 1
|
||||
logger.debug(f"Added {added_count+1}/{total_links} {spotify_type} '{item_name}' ({spotify_id}) to queue.")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing Spotify link {link}: {e}", exc_info=True)
|
||||
failed_links.append(link)
|
||||
|
||||
message = f"Successfully added {added_count}/{total_links} links to queue."
|
||||
if failed_links:
|
||||
message += f" Failed to add {len(failed_links)} links."
|
||||
logger.warning(f"Bulk add completed with {len(failed_links)} failures.")
|
||||
else:
|
||||
logger.info(f"Bulk add completed successfully. Added {added_count} links.")
|
||||
|
||||
return {
|
||||
"message": message,
|
||||
"count": added_count,
|
||||
"failed_links": failed_links,
|
||||
}
|
||||
@@ -4,6 +4,7 @@ from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from .v3_2_0 import MigrationV3_2_0
|
||||
from .v3_2_1 import log_noop_migration_detected
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -285,7 +286,6 @@ def _update_watch_playlists_db(conn: sqlite3.Connection) -> None:
|
||||
EXPECTED_PLAYLIST_TRACKS_COLUMNS,
|
||||
f"playlist tracks ({table_name})",
|
||||
)
|
||||
logger.info("Upgraded watch playlists DB to 3.2.0 base schema")
|
||||
except Exception:
|
||||
logger.error(
|
||||
"Failed to upgrade watch playlists DB to 3.2.0 base schema", exc_info=True
|
||||
@@ -348,7 +348,6 @@ def _update_watch_artists_db(conn: sqlite3.Connection) -> None:
|
||||
EXPECTED_ARTIST_ALBUMS_COLUMNS,
|
||||
f"artist albums ({table_name})",
|
||||
)
|
||||
logger.info("Upgraded watch artists DB to 3.2.0 base schema")
|
||||
except Exception:
|
||||
logger.error(
|
||||
"Failed to upgrade watch artists DB to 3.2.0 base schema", exc_info=True
|
||||
@@ -379,10 +378,10 @@ def run_migrations_if_needed():
|
||||
with _safe_connect(HISTORY_DB) as history_conn:
|
||||
if history_conn and not _is_history_at_least_3_2_0(history_conn):
|
||||
logger.error(
|
||||
"Instance is not at schema version 3.2.0. Please upgrade to 3.2.0 before applying 3.2.1."
|
||||
"Instance is not at schema version 3.2.0. Please upgrade to 3.2.0 before applying 3.3.0."
|
||||
)
|
||||
raise RuntimeError(
|
||||
"Instance is not at schema version 3.2.0. Please upgrade to 3.2.0 before applying 3.2.1."
|
||||
"Instance is not at schema version 3.2.0. Please upgrade to 3.2.0 before applying 3.3.0."
|
||||
)
|
||||
|
||||
# Watch playlists DB
|
||||
@@ -413,4 +412,5 @@ def run_migrations_if_needed():
|
||||
raise
|
||||
else:
|
||||
_ensure_creds_filesystem()
|
||||
logger.info("Database migrations check completed (3.2.0 -> 3.2.1 path)")
|
||||
log_noop_migration_detected()
|
||||
logger.info("Database migrations check completed (3.2.0 -> 3.3.0 path)")
|
||||
|
||||
@@ -6,7 +6,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class MigrationV3_2_0:
|
||||
"""
|
||||
Migration for version 3.2.0 (upgrade path 3.2.0 -> 3.2.1).
|
||||
Migration for version 3.2.0 (upgrade path 3.2.0 -> 3.3.0).
|
||||
- Adds per-item batch progress columns to Watch DBs to support page-by-interval processing.
|
||||
- Enforces prerequisite: previous instance version must be 3.1.2 (validated by runner).
|
||||
"""
|
||||
@@ -21,7 +21,7 @@ class MigrationV3_2_0:
|
||||
"batch_next_offset": "INTEGER DEFAULT 0",
|
||||
}
|
||||
|
||||
# --- No-op for history/accounts in 3.2.1 ---
|
||||
# --- No-op for history/accounts in 3.3.0 ---
|
||||
|
||||
def check_history(self, conn: sqlite3.Connection) -> bool:
|
||||
return True
|
||||
@@ -59,14 +59,14 @@ class MigrationV3_2_0:
|
||||
f"ALTER TABLE watched_playlists ADD COLUMN {col_name} {col_type}"
|
||||
)
|
||||
logger.info(
|
||||
f"Added column '{col_name} {col_type}' to watched_playlists for 3.2.1 batch progress."
|
||||
f"Added column '{col_name} {col_type}' to watched_playlists for 3.3.0 batch progress."
|
||||
)
|
||||
except sqlite3.OperationalError as e:
|
||||
logger.warning(
|
||||
f"Could not add column '{col_name}' to watched_playlists: {e}"
|
||||
)
|
||||
except Exception:
|
||||
logger.error("Failed to update watched_playlists for 3.2.1", exc_info=True)
|
||||
logger.error("Failed to update watched_playlists for 3.3.0", exc_info=True)
|
||||
|
||||
# --- Watch: artists ---
|
||||
|
||||
@@ -90,11 +90,11 @@ class MigrationV3_2_0:
|
||||
f"ALTER TABLE watched_artists ADD COLUMN {col_name} {col_type}"
|
||||
)
|
||||
logger.info(
|
||||
f"Added column '{col_name} {col_type}' to watched_artists for 3.2.1 batch progress."
|
||||
f"Added column '{col_name} {col_type}' to watched_artists for 3.3.0 batch progress."
|
||||
)
|
||||
except sqlite3.OperationalError as e:
|
||||
logger.warning(
|
||||
f"Could not add column '{col_name}' to watched_artists: {e}"
|
||||
)
|
||||
except Exception:
|
||||
logger.error("Failed to update watched_artists for 3.2.1", exc_info=True)
|
||||
logger.error("Failed to update watched_artists for 3.3.0", exc_info=True)
|
||||
|
||||
41
routes/migrations/v3_2_1.py
Normal file
41
routes/migrations/v3_2_1.py
Normal file
@@ -0,0 +1,41 @@
|
||||
import logging
|
||||
import sqlite3
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MigrationV3_2_1:
|
||||
"""
|
||||
No-op migration for version 3.2.1 (upgrade path 3.2.1 -> 3.3.0).
|
||||
No database schema changes are required.
|
||||
"""
|
||||
|
||||
def check_history(self, conn: sqlite3.Connection) -> bool:
|
||||
return True
|
||||
|
||||
def update_history(self, conn: sqlite3.Connection) -> None:
|
||||
pass
|
||||
|
||||
def check_accounts(self, conn: sqlite3.Connection) -> bool:
|
||||
return True
|
||||
|
||||
def update_accounts(self, conn: sqlite3.Connection) -> None:
|
||||
pass
|
||||
|
||||
def check_watch_playlists(self, conn: sqlite3.Connection) -> bool:
|
||||
return True
|
||||
|
||||
def update_watch_playlists(self, conn: sqlite3.Connection) -> None:
|
||||
pass
|
||||
|
||||
def check_watch_artists(self, conn: sqlite3.Connection) -> bool:
|
||||
return True
|
||||
|
||||
def update_watch_artists(self, conn: sqlite3.Connection) -> None:
|
||||
pass
|
||||
|
||||
|
||||
def log_noop_migration_detected() -> None:
|
||||
logger.info(
|
||||
"No migration performed: detected schema for 3.2.1; no changes needed for 3.2.1 -> 3.3.0."
|
||||
)
|
||||
@@ -1,10 +1,14 @@
|
||||
from fastapi import APIRouter, HTTPException, Request, Depends
|
||||
from fastapi.responses import JSONResponse, StreamingResponse
|
||||
from fastapi.responses import StreamingResponse
|
||||
import logging
|
||||
import time
|
||||
import json
|
||||
import asyncio
|
||||
from typing import Dict, Set
|
||||
from typing import Set
|
||||
|
||||
import redis
|
||||
import threading
|
||||
from routes.utils.celery_config import REDIS_URL
|
||||
|
||||
from routes.utils.celery_tasks import (
|
||||
get_task_info,
|
||||
@@ -12,17 +16,23 @@ from routes.utils.celery_tasks import (
|
||||
get_last_task_status,
|
||||
get_all_tasks,
|
||||
cancel_task,
|
||||
delete_task_data_and_log,
|
||||
ProgressState,
|
||||
)
|
||||
|
||||
# Import authentication dependencies
|
||||
from routes.auth.middleware import require_auth_from_state, get_current_user_from_state, User
|
||||
from routes.auth.middleware import (
|
||||
require_auth_from_state,
|
||||
get_current_user_from_state,
|
||||
User,
|
||||
)
|
||||
|
||||
# Configure logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
# Global SSE Event Broadcaster
|
||||
class SSEBroadcaster:
|
||||
def __init__(self):
|
||||
@@ -31,16 +41,18 @@ class SSEBroadcaster:
|
||||
async def add_client(self, queue: asyncio.Queue):
|
||||
"""Add a new SSE client"""
|
||||
self.clients.add(queue)
|
||||
logger.info(f"SSE: Client connected (total: {len(self.clients)})")
|
||||
logger.debug(f"SSE: Client connected (total: {len(self.clients)})")
|
||||
|
||||
async def remove_client(self, queue: asyncio.Queue):
|
||||
"""Remove an SSE client"""
|
||||
self.clients.discard(queue)
|
||||
logger.info(f"SSE: Client disconnected (total: {len(self.clients)})")
|
||||
logger.debug(f"SSE: Client disconnected (total: {len(self.clients)})")
|
||||
|
||||
async def broadcast_event(self, event_data: dict):
|
||||
"""Broadcast an event to all connected clients"""
|
||||
logger.debug(f"SSE Broadcaster: Attempting to broadcast to {len(self.clients)} clients")
|
||||
logger.debug(
|
||||
f"SSE Broadcaster: Attempting to broadcast to {len(self.clients)} clients"
|
||||
)
|
||||
|
||||
if not self.clients:
|
||||
logger.debug("SSE Broadcaster: No clients connected, skipping broadcast")
|
||||
@@ -51,7 +63,9 @@ class SSEBroadcaster:
|
||||
event_json = json.dumps(enhanced_event_data)
|
||||
sse_data = f"data: {event_json}\n\n"
|
||||
|
||||
logger.debug(f"SSE Broadcaster: Broadcasting event: {enhanced_event_data.get('change_type', 'unknown')} with {enhanced_event_data.get('active_tasks', 0)} active tasks")
|
||||
logger.debug(
|
||||
f"SSE Broadcaster: Broadcasting event: {enhanced_event_data.get('change_type', 'unknown')} with {enhanced_event_data.get('active_tasks', 0)} active tasks"
|
||||
)
|
||||
|
||||
# Send to all clients, remove disconnected ones
|
||||
disconnected = set()
|
||||
@@ -60,7 +74,7 @@ class SSEBroadcaster:
|
||||
try:
|
||||
await client_queue.put(sse_data)
|
||||
sent_count += 1
|
||||
logger.debug(f"SSE: Successfully sent to client queue")
|
||||
logger.debug("SSE: Successfully sent to client queue")
|
||||
except Exception as e:
|
||||
logger.error(f"SSE: Failed to send to client: {e}")
|
||||
disconnected.add(client_queue)
|
||||
@@ -68,22 +82,23 @@ class SSEBroadcaster:
|
||||
# Clean up disconnected clients
|
||||
for client in disconnected:
|
||||
self.clients.discard(client)
|
||||
logger.info(
|
||||
f"SSE Broadcaster: Successfully sent to {sent_count} clients, removed {len(disconnected)} disconnected clients"
|
||||
)
|
||||
|
||||
logger.info(f"SSE Broadcaster: Successfully sent to {sent_count} clients, removed {len(disconnected)} disconnected clients")
|
||||
|
||||
# Global broadcaster instance
|
||||
sse_broadcaster = SSEBroadcaster()
|
||||
|
||||
# Redis subscriber for cross-process SSE events
|
||||
import redis
|
||||
import threading
|
||||
from routes.utils.celery_config import REDIS_URL
|
||||
|
||||
# Redis client for SSE pub/sub
|
||||
sse_redis_client = redis.Redis.from_url(REDIS_URL)
|
||||
|
||||
|
||||
def start_sse_redis_subscriber():
|
||||
"""Start Redis subscriber to listen for SSE events from Celery workers"""
|
||||
|
||||
def redis_subscriber_thread():
|
||||
try:
|
||||
pubsub = sse_redis_client.pubsub()
|
||||
@@ -91,33 +106,49 @@ def start_sse_redis_subscriber():
|
||||
logger.info("SSE Redis Subscriber: Started listening for events")
|
||||
|
||||
for message in pubsub.listen():
|
||||
if message['type'] == 'message':
|
||||
if message["type"] == "message":
|
||||
try:
|
||||
event_data = json.loads(message['data'].decode('utf-8'))
|
||||
event_type = event_data.get('event_type', 'unknown')
|
||||
task_id = event_data.get('task_id', 'unknown')
|
||||
event_data = json.loads(message["data"].decode("utf-8"))
|
||||
event_type = event_data.get("event_type", "unknown")
|
||||
task_id = event_data.get("task_id", "unknown")
|
||||
|
||||
logger.debug(f"SSE Redis Subscriber: Received {event_type} for task {task_id}")
|
||||
logger.debug(
|
||||
f"SSE Redis Subscriber: Received {event_type} for task {task_id}"
|
||||
)
|
||||
|
||||
# Handle different event types
|
||||
if event_type == 'progress_update':
|
||||
if event_type == "progress_update":
|
||||
# Transform callback data into task format expected by frontend
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
try:
|
||||
broadcast_data = loop.run_until_complete(transform_callback_to_task_format(task_id, event_data))
|
||||
broadcast_data = loop.run_until_complete(
|
||||
transform_callback_to_task_format(
|
||||
task_id, event_data
|
||||
)
|
||||
)
|
||||
if broadcast_data:
|
||||
loop.run_until_complete(sse_broadcaster.broadcast_event(broadcast_data))
|
||||
logger.debug(f"SSE Redis Subscriber: Broadcasted callback to {len(sse_broadcaster.clients)} clients")
|
||||
loop.run_until_complete(
|
||||
sse_broadcaster.broadcast_event(broadcast_data)
|
||||
)
|
||||
logger.debug(
|
||||
f"SSE Redis Subscriber: Broadcasted callback to {len(sse_broadcaster.clients)} clients"
|
||||
)
|
||||
finally:
|
||||
loop.close()
|
||||
elif event_type == 'summary_update':
|
||||
elif event_type == "summary_update":
|
||||
# Task summary update - use existing trigger_sse_update logic
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
try:
|
||||
loop.run_until_complete(trigger_sse_update(task_id, event_data.get('reason', 'update')))
|
||||
logger.debug(f"SSE Redis Subscriber: Processed summary update for {task_id}")
|
||||
loop.run_until_complete(
|
||||
trigger_sse_update(
|
||||
task_id, event_data.get("reason", "update")
|
||||
)
|
||||
)
|
||||
logger.debug(
|
||||
f"SSE Redis Subscriber: Processed summary update for {task_id}"
|
||||
)
|
||||
finally:
|
||||
loop.close()
|
||||
else:
|
||||
@@ -125,13 +156,20 @@ def start_sse_redis_subscriber():
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
try:
|
||||
loop.run_until_complete(sse_broadcaster.broadcast_event(event_data))
|
||||
logger.debug(f"SSE Redis Subscriber: Broadcasted {event_type} to {len(sse_broadcaster.clients)} clients")
|
||||
loop.run_until_complete(
|
||||
sse_broadcaster.broadcast_event(event_data)
|
||||
)
|
||||
logger.debug(
|
||||
f"SSE Redis Subscriber: Broadcasted {event_type} to {len(sse_broadcaster.clients)} clients"
|
||||
)
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"SSE Redis Subscriber: Error processing message: {e}", exc_info=True)
|
||||
logger.error(
|
||||
f"SSE Redis Subscriber: Error processing message: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"SSE Redis Subscriber: Fatal error: {e}", exc_info=True)
|
||||
@@ -139,13 +177,14 @@ def start_sse_redis_subscriber():
|
||||
# Start Redis subscriber in background thread
|
||||
thread = threading.Thread(target=redis_subscriber_thread, daemon=True)
|
||||
thread.start()
|
||||
logger.info("SSE Redis Subscriber: Background thread started")
|
||||
logger.debug("SSE Redis Subscriber: Background thread started")
|
||||
|
||||
|
||||
async def transform_callback_to_task_format(task_id: str, event_data: dict) -> dict:
|
||||
"""Transform callback event data into the task format expected by frontend"""
|
||||
try:
|
||||
# Import here to avoid circular imports
|
||||
from routes.utils.celery_tasks import get_task_info, get_all_tasks
|
||||
from routes.utils.celery_tasks import get_task_info
|
||||
|
||||
# Get task info to build complete task object
|
||||
task_info = get_task_info(task_id)
|
||||
@@ -154,19 +193,19 @@ async def transform_callback_to_task_format(task_id: str, event_data: dict) -> d
|
||||
return None
|
||||
|
||||
# Extract callback data
|
||||
callback_data = event_data.get('callback_data', {})
|
||||
callback_data = event_data.get("callback_data", {})
|
||||
|
||||
# Build task object in the format expected by frontend
|
||||
task_object = {
|
||||
"task_id": task_id,
|
||||
"original_url": f"http://localhost:7171/api/{task_info.get('download_type', 'track')}/download/{task_info.get('url', '').split('/')[-1] if task_info.get('url') else ''}",
|
||||
"last_line": callback_data, # This is what frontend expects for callback data
|
||||
"timestamp": event_data.get('timestamp', time.time()),
|
||||
"download_type": task_info.get('download_type', 'track'),
|
||||
"type": task_info.get('type', task_info.get('download_type', 'track')),
|
||||
"name": task_info.get('name', 'Unknown'),
|
||||
"artist": task_info.get('artist', ''),
|
||||
"created_at": task_info.get('created_at'),
|
||||
"timestamp": event_data.get("timestamp", time.time()),
|
||||
"download_type": task_info.get("download_type", "track"),
|
||||
"type": task_info.get("type", task_info.get("download_type", "track")),
|
||||
"name": task_info.get("name", "Unknown"),
|
||||
"artist": task_info.get("artist", ""),
|
||||
"created_at": task_info.get("created_at"),
|
||||
}
|
||||
|
||||
# Build minimal event data - global counts will be added at broadcast time
|
||||
@@ -176,16 +215,21 @@ async def transform_callback_to_task_format(task_id: str, event_data: dict) -> d
|
||||
"current_timestamp": time.time(),
|
||||
"updated_count": 1,
|
||||
"since_timestamp": time.time(),
|
||||
"trigger_reason": "callback_update"
|
||||
"trigger_reason": "callback_update",
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"SSE Transform: Error transforming callback for task {task_id}: {e}", exc_info=True)
|
||||
logger.error(
|
||||
f"SSE Transform: Error transforming callback for task {task_id}: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
# Start the Redis subscriber when module loads
|
||||
start_sse_redis_subscriber()
|
||||
|
||||
|
||||
async def trigger_sse_update(task_id: str, reason: str = "task_update"):
|
||||
"""Trigger an immediate SSE update for a specific task"""
|
||||
try:
|
||||
@@ -200,13 +244,14 @@ async def trigger_sse_update(task_id: str, reason: str = "task_update"):
|
||||
last_status = get_last_task_status(task_id)
|
||||
|
||||
# Create a dummy request for the _build_task_response function
|
||||
from fastapi import Request
|
||||
class DummyRequest:
|
||||
def __init__(self):
|
||||
self.base_url = "http://localhost:7171"
|
||||
|
||||
dummy_request = DummyRequest()
|
||||
task_response = _build_task_response(task_info, last_status, task_id, current_time, dummy_request)
|
||||
task_response = _build_task_response(
|
||||
task_info, last_status, task_id, current_time, dummy_request
|
||||
)
|
||||
|
||||
# Create minimal event data - global counts will be added at broadcast time
|
||||
event_data = {
|
||||
@@ -214,7 +259,7 @@ async def trigger_sse_update(task_id: str, reason: str = "task_update"):
|
||||
"current_timestamp": current_time,
|
||||
"since_timestamp": current_time,
|
||||
"change_type": "realtime",
|
||||
"trigger_reason": reason
|
||||
"trigger_reason": reason,
|
||||
}
|
||||
|
||||
await sse_broadcaster.broadcast_event(event_data)
|
||||
@@ -223,6 +268,7 @@ async def trigger_sse_update(task_id: str, reason: str = "task_update"):
|
||||
except Exception as e:
|
||||
logger.error(f"SSE: Failed to trigger update for task {task_id}: {e}")
|
||||
|
||||
|
||||
# Define active task states using ProgressState constants
|
||||
ACTIVE_TASK_STATES = {
|
||||
ProgressState.INITIALIZING, # "initializing" - task is starting up
|
||||
@@ -246,6 +292,7 @@ TERMINAL_TASK_STATES = {
|
||||
ProgressState.SKIPPED, # "skipped" - task was skipped
|
||||
}
|
||||
|
||||
|
||||
def get_task_status_from_last_status(last_status):
|
||||
"""
|
||||
Extract the task status from last_status, checking both possible locations.
|
||||
@@ -306,7 +353,7 @@ def get_global_task_counts():
|
||||
"error": 0,
|
||||
"cancelled": 0,
|
||||
"retrying": 0,
|
||||
"skipped": 0
|
||||
"skipped": 0,
|
||||
}
|
||||
|
||||
try:
|
||||
@@ -342,7 +389,9 @@ def get_global_task_counts():
|
||||
elif is_active_task:
|
||||
task_counts["active"] += 1
|
||||
|
||||
logger.debug(f"Global task counts: {task_counts} (total: {len(all_tasks)} tasks)")
|
||||
logger.debug(
|
||||
f"Global task counts: {task_counts} (total: {len(all_tasks)} tasks)"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting global task counts: {e}", exc_info=True)
|
||||
@@ -373,7 +422,9 @@ def add_global_task_counts_to_event(event_data):
|
||||
return event_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error adding global task counts to SSE event: {e}", exc_info=True)
|
||||
logger.error(
|
||||
f"Error adding global task counts to SSE event: {e}", exc_info=True
|
||||
)
|
||||
return event_data
|
||||
|
||||
|
||||
@@ -397,10 +448,9 @@ def _build_error_callback_object(last_status):
|
||||
callback_object["album"] = {
|
||||
"type": "album",
|
||||
"title": name,
|
||||
"artists": [{
|
||||
"type": "artistAlbum",
|
||||
"name": artist_or_owner
|
||||
}] if artist_or_owner else [],
|
||||
"artists": [{"type": "artistAlbum", "name": artist_or_owner}]
|
||||
if artist_or_owner
|
||||
else [],
|
||||
}
|
||||
elif download_type == "playlist":
|
||||
playlist_payload = {"type": "playlist", "title": name}
|
||||
@@ -411,10 +461,9 @@ def _build_error_callback_object(last_status):
|
||||
callback_object["track"] = {
|
||||
"type": "track",
|
||||
"title": name,
|
||||
"artists": [{
|
||||
"type": "artistTrack",
|
||||
"name": artist_or_owner
|
||||
}] if artist_or_owner else [],
|
||||
"artists": [{"type": "artistTrack", "name": artist_or_owner}]
|
||||
if artist_or_owner
|
||||
else [],
|
||||
}
|
||||
else:
|
||||
# Fallback for unknown types to avoid breaking the client, returning a basic error structure.
|
||||
@@ -431,7 +480,9 @@ def _build_error_callback_object(last_status):
|
||||
return callback_object
|
||||
|
||||
|
||||
def _build_task_response(task_info, last_status, task_id, current_time, request: Request):
|
||||
def _build_task_response(
|
||||
task_info, last_status, task_id, current_time, request: Request
|
||||
):
|
||||
"""
|
||||
Helper function to build a standardized task response object.
|
||||
"""
|
||||
@@ -444,7 +495,7 @@ def _build_task_response(task_info, last_status, task_id, current_time, request:
|
||||
try:
|
||||
item_id = item_url.split("/")[-1]
|
||||
if item_id:
|
||||
base_url = str(request.base_url).rstrip("/")
|
||||
base_url = str(request.base_url).rstrip("/") if request else "http://localhost:7171"
|
||||
dynamic_original_url = (
|
||||
f"{base_url}/api/{download_type}/download/{item_id}"
|
||||
)
|
||||
@@ -465,6 +516,27 @@ def _build_task_response(task_info, last_status, task_id, current_time, request:
|
||||
logger.warning(
|
||||
f"Missing download_type ('{download_type}') or item_url ('{item_url}') in task_info for task {task_id}. Falling back for original_url."
|
||||
)
|
||||
# Auto-delete faulty task data to keep the queue clean
|
||||
try:
|
||||
delete_task_data_and_log(
|
||||
task_id,
|
||||
reason="Auto-cleaned: Missing download_type or url in task_info.",
|
||||
)
|
||||
# Trigger SSE so clients refresh their task lists
|
||||
try:
|
||||
# Avoid circular import at top-level
|
||||
import asyncio as _asyncio
|
||||
|
||||
# Fire-and-forget; if no event loop available, ignore
|
||||
loop = _asyncio.get_event_loop()
|
||||
if loop.is_running():
|
||||
_asyncio.create_task(
|
||||
trigger_sse_update(task_id, "auto_deleted_faulty")
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
except Exception as _e:
|
||||
logger.error(f"Auto-delete failed for faulty task {task_id}: {_e}")
|
||||
original_request_obj = task_info.get("original_request", {})
|
||||
dynamic_original_url = original_request_obj.get("original_url", "")
|
||||
|
||||
@@ -478,13 +550,18 @@ def _build_task_response(task_info, last_status, task_id, current_time, request:
|
||||
else:
|
||||
last_line_content = last_status
|
||||
|
||||
# Normalize created_at to a numeric timestamp
|
||||
created_at_value = task_info.get("created_at")
|
||||
if not isinstance(created_at_value, (int, float)):
|
||||
created_at_value = current_time
|
||||
|
||||
task_response = {
|
||||
"original_url": dynamic_original_url,
|
||||
"last_line": last_line_content,
|
||||
"timestamp": last_status.get("timestamp") if last_status else current_time,
|
||||
"task_id": task_id,
|
||||
"status_count": status_count,
|
||||
"created_at": task_info.get("created_at"),
|
||||
"created_at": created_at_value,
|
||||
"name": task_info.get("name"),
|
||||
"artist": task_info.get("artist"),
|
||||
"type": task_info.get("type"),
|
||||
@@ -496,7 +573,7 @@ def _build_task_response(task_info, last_status, task_id, current_time, request:
|
||||
return task_response
|
||||
|
||||
|
||||
async def get_paginated_tasks(page=1, limit=20, active_only=False, request: Request = None):
|
||||
async def get_paginated_tasks(page=1, limit=20, active_only=False, request: Optional[Request] = None):
|
||||
"""
|
||||
Get paginated list of tasks.
|
||||
"""
|
||||
@@ -523,7 +600,9 @@ async def get_paginated_tasks(page=1, limit=20, active_only=False, request: Requ
|
||||
task_status = get_task_status_from_last_status(last_status)
|
||||
is_active_task = is_task_active(task_status)
|
||||
|
||||
task_response = _build_task_response(task_info, last_status, task_id, time.time(), request)
|
||||
task_response = _build_task_response(
|
||||
task_info, last_status, task_id, time.time(), request
|
||||
)
|
||||
|
||||
if is_active_task:
|
||||
active_tasks.append(task_response)
|
||||
@@ -531,7 +610,7 @@ async def get_paginated_tasks(page=1, limit=20, active_only=False, request: Requ
|
||||
other_tasks.append(task_response)
|
||||
|
||||
# Sort other tasks by creation time (newest first)
|
||||
other_tasks.sort(key=lambda x: x.get("created_at", 0), reverse=True)
|
||||
other_tasks.sort(key=lambda x: (x.get("created_at") or 0.0), reverse=True)
|
||||
|
||||
if active_only:
|
||||
paginated_tasks = active_tasks
|
||||
@@ -540,7 +619,7 @@ async def get_paginated_tasks(page=1, limit=20, active_only=False, request: Requ
|
||||
"limit": limit,
|
||||
"total_non_active": 0,
|
||||
"has_more": False,
|
||||
"returned_non_active": 0
|
||||
"returned_non_active": 0,
|
||||
}
|
||||
else:
|
||||
# Apply pagination to non-active tasks
|
||||
@@ -553,32 +632,38 @@ async def get_paginated_tasks(page=1, limit=20, active_only=False, request: Requ
|
||||
"limit": limit,
|
||||
"total_non_active": len(other_tasks),
|
||||
"has_more": len(other_tasks) > offset + limit,
|
||||
"returned_non_active": len(paginated_other_tasks)
|
||||
"returned_non_active": len(paginated_other_tasks),
|
||||
}
|
||||
|
||||
response = {
|
||||
"tasks": paginated_tasks,
|
||||
"current_timestamp": time.time(),
|
||||
"total_tasks": task_counts["active"] + task_counts["retrying"], # Only active/retrying tasks for counter
|
||||
"total_tasks": task_counts["active"]
|
||||
+ task_counts["retrying"], # Only active/retrying tasks for counter
|
||||
"all_tasks_count": len(all_tasks), # Total count of all tasks
|
||||
"task_counts": task_counts, # Categorized counts
|
||||
"active_tasks": len(active_tasks),
|
||||
"updated_count": len(paginated_tasks),
|
||||
"pagination": pagination_info
|
||||
"pagination": pagination_info,
|
||||
}
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in get_paginated_tasks: {e}", exc_info=True)
|
||||
raise HTTPException(status_code=500, detail={"error": "Failed to retrieve paginated tasks"})
|
||||
raise HTTPException(
|
||||
status_code=500, detail={"error": "Failed to retrieve paginated tasks"}
|
||||
)
|
||||
|
||||
|
||||
# IMPORTANT: Specific routes MUST come before parameterized routes in FastAPI
|
||||
# Otherwise "updates" gets matched as a {task_id} parameter!
|
||||
|
||||
|
||||
@router.get("/list")
|
||||
async def list_tasks(request: Request, current_user: User = Depends(require_auth_from_state)):
|
||||
async def list_tasks(
|
||||
request: Request, current_user: User = Depends(require_auth_from_state)
|
||||
):
|
||||
"""
|
||||
Retrieve a paginated list of all tasks in the system.
|
||||
Returns a detailed list of task objects including status and metadata.
|
||||
@@ -590,9 +675,9 @@ async def list_tasks(request: Request, current_user: User = Depends(require_auth
|
||||
"""
|
||||
try:
|
||||
# Get query parameters
|
||||
page = int(request.query_params.get('page', 1))
|
||||
limit = min(int(request.query_params.get('limit', 50)), 100) # Cap at 100
|
||||
active_only = request.query_params.get('active_only', '').lower() == 'true'
|
||||
page = int(request.query_params.get("page", 1))
|
||||
limit = min(int(request.query_params.get("limit", 50)), 100) # Cap at 100
|
||||
active_only = request.query_params.get("active_only", "").lower() == "true"
|
||||
|
||||
tasks = get_all_tasks()
|
||||
active_tasks = []
|
||||
@@ -606,7 +691,7 @@ async def list_tasks(request: Request, current_user: User = Depends(require_auth
|
||||
"error": 0,
|
||||
"cancelled": 0,
|
||||
"retrying": 0,
|
||||
"skipped": 0
|
||||
"skipped": 0,
|
||||
}
|
||||
|
||||
for task_summary in tasks:
|
||||
@@ -625,7 +710,10 @@ async def list_tasks(request: Request, current_user: User = Depends(require_auth
|
||||
# Categorize tasks by status using ProgressState constants
|
||||
if task_status == ProgressState.RETRYING:
|
||||
task_counts["retrying"] += 1
|
||||
elif task_status in {ProgressState.QUEUED, "pending"}: # Keep "pending" for backward compatibility
|
||||
elif task_status in {
|
||||
ProgressState.QUEUED,
|
||||
"pending",
|
||||
}: # Keep "pending" for backward compatibility
|
||||
task_counts["queued"] += 1
|
||||
elif task_status in {ProgressState.COMPLETE, ProgressState.DONE}:
|
||||
task_counts["completed"] += 1
|
||||
@@ -638,7 +726,9 @@ async def list_tasks(request: Request, current_user: User = Depends(require_auth
|
||||
elif is_active_task:
|
||||
task_counts["active"] += 1
|
||||
|
||||
task_response = _build_task_response(task_info, last_status, task_id, time.time(), request)
|
||||
task_response = _build_task_response(
|
||||
task_info, last_status, task_id, time.time(), request
|
||||
)
|
||||
|
||||
if is_active_task:
|
||||
active_tasks.append(task_response)
|
||||
@@ -646,7 +736,7 @@ async def list_tasks(request: Request, current_user: User = Depends(require_auth
|
||||
other_tasks.append(task_response)
|
||||
|
||||
# Sort other tasks by creation time (newest first)
|
||||
other_tasks.sort(key=lambda x: x.get("created_at", 0), reverse=True)
|
||||
other_tasks.sort(key=lambda x: (x.get("created_at") or 0.0), reverse=True)
|
||||
|
||||
if active_only:
|
||||
# Return only active tasks without pagination
|
||||
@@ -656,7 +746,7 @@ async def list_tasks(request: Request, current_user: User = Depends(require_auth
|
||||
"limit": limit,
|
||||
"total_items": len(active_tasks),
|
||||
"total_pages": 1,
|
||||
"has_more": False
|
||||
"has_more": False,
|
||||
}
|
||||
else:
|
||||
# Apply pagination to non-active tasks and combine with active tasks
|
||||
@@ -674,7 +764,9 @@ async def list_tasks(request: Request, current_user: User = Depends(require_auth
|
||||
adjusted_offset = offset - len(active_tasks)
|
||||
if adjusted_offset < 0:
|
||||
adjusted_offset = 0
|
||||
paginated_other_tasks = other_tasks[adjusted_offset:adjusted_offset + limit]
|
||||
paginated_other_tasks = other_tasks[
|
||||
adjusted_offset : adjusted_offset + limit
|
||||
]
|
||||
response_tasks = paginated_other_tasks
|
||||
|
||||
total_items = len(active_tasks) + len(other_tasks)
|
||||
@@ -687,27 +779,32 @@ async def list_tasks(request: Request, current_user: User = Depends(require_auth
|
||||
"total_pages": total_pages,
|
||||
"has_more": page < total_pages,
|
||||
"active_tasks": len(active_tasks),
|
||||
"total_other_tasks": len(other_tasks)
|
||||
"total_other_tasks": len(other_tasks),
|
||||
}
|
||||
|
||||
response = {
|
||||
"tasks": response_tasks,
|
||||
"pagination": pagination_info,
|
||||
"total_tasks": task_counts["active"] + task_counts["retrying"], # Only active/retrying tasks for counter
|
||||
"total_tasks": task_counts["active"]
|
||||
+ task_counts["retrying"], # Only active/retrying tasks for counter
|
||||
"all_tasks_count": len(tasks), # Total count of all tasks
|
||||
"task_counts": task_counts, # Categorized counts
|
||||
"active_tasks": len(active_tasks),
|
||||
"timestamp": time.time()
|
||||
"timestamp": time.time(),
|
||||
}
|
||||
|
||||
return response
|
||||
except Exception as e:
|
||||
logger.error(f"Error in /api/prgs/list: {e}", exc_info=True)
|
||||
raise HTTPException(status_code=500, detail={"error": "Failed to retrieve task list"})
|
||||
raise HTTPException(
|
||||
status_code=500, detail={"error": "Failed to retrieve task list"}
|
||||
)
|
||||
|
||||
|
||||
@router.get("/updates")
|
||||
async def get_task_updates(request: Request, current_user: User = Depends(require_auth_from_state)):
|
||||
async def get_task_updates(
|
||||
request: Request, current_user: User = Depends(require_auth_from_state)
|
||||
):
|
||||
"""
|
||||
Retrieve only tasks that have been updated since the specified timestamp.
|
||||
This endpoint is optimized for polling to reduce unnecessary data transfer.
|
||||
@@ -728,10 +825,10 @@ async def get_task_updates(request: Request, current_user: User = Depends(requir
|
||||
"""
|
||||
try:
|
||||
# Get query parameters
|
||||
since_param = request.query_params.get('since')
|
||||
page = int(request.query_params.get('page', 1))
|
||||
limit = min(int(request.query_params.get('limit', 20)), 100) # Cap at 100
|
||||
active_only = request.query_params.get('active_only', '').lower() == 'true'
|
||||
since_param = request.query_params.get("since")
|
||||
page = int(request.query_params.get("page", 1))
|
||||
limit = min(int(request.query_params.get("limit", 20)), 100) # Cap at 100
|
||||
active_only = request.query_params.get("active_only", "").lower() == "true"
|
||||
|
||||
if not since_param:
|
||||
# If no 'since' parameter, return paginated tasks (fallback behavior)
|
||||
@@ -741,7 +838,9 @@ async def get_task_updates(request: Request, current_user: User = Depends(requir
|
||||
try:
|
||||
since_timestamp = float(since_param)
|
||||
except (ValueError, TypeError):
|
||||
raise HTTPException(status_code=400, detail={"error": "Invalid 'since' timestamp format"})
|
||||
raise HTTPException(
|
||||
status_code=400, detail={"error": "Invalid 'since' timestamp format"}
|
||||
)
|
||||
|
||||
# Get all tasks
|
||||
all_tasks = get_all_tasks()
|
||||
@@ -768,16 +867,28 @@ async def get_task_updates(request: Request, current_user: User = Depends(requir
|
||||
is_active_task = is_task_active(task_status)
|
||||
|
||||
# Check if task has been updated since the given timestamp
|
||||
task_timestamp = last_status.get("timestamp") if last_status else task_info.get("created_at", 0)
|
||||
task_timestamp = (
|
||||
last_status.get("timestamp")
|
||||
if last_status
|
||||
else task_info.get("created_at", 0)
|
||||
)
|
||||
|
||||
# Always include active tasks in updates, apply filtering to others
|
||||
# Also include recently completed/terminal tasks to ensure "done" status gets sent
|
||||
is_recently_terminal = task_status in TERMINAL_TASK_STATES and task_timestamp > since_timestamp
|
||||
should_include = is_active_task or (task_timestamp > since_timestamp and not active_only) or is_recently_terminal
|
||||
is_recently_terminal = (
|
||||
task_status in TERMINAL_TASK_STATES and task_timestamp > since_timestamp
|
||||
)
|
||||
should_include = (
|
||||
is_active_task
|
||||
or (task_timestamp > since_timestamp and not active_only)
|
||||
or is_recently_terminal
|
||||
)
|
||||
|
||||
if should_include:
|
||||
# Construct the same detailed task object as in list_tasks()
|
||||
task_response = _build_task_response(task_info, last_status, task_id, current_time, request)
|
||||
task_response = _build_task_response(
|
||||
task_info, last_status, task_id, current_time, request
|
||||
)
|
||||
|
||||
if is_active_task:
|
||||
active_tasks.append(task_response)
|
||||
@@ -786,21 +897,26 @@ async def get_task_updates(request: Request, current_user: User = Depends(requir
|
||||
|
||||
# Apply pagination to non-active tasks
|
||||
offset = (page - 1) * limit
|
||||
paginated_updated_tasks = updated_tasks[offset:offset + limit] if not active_only else []
|
||||
paginated_updated_tasks = (
|
||||
updated_tasks[offset : offset + limit] if not active_only else []
|
||||
)
|
||||
|
||||
# Combine active tasks (always shown) with paginated updated tasks
|
||||
all_returned_tasks = active_tasks + paginated_updated_tasks
|
||||
|
||||
# Sort by priority (active first, then by creation time)
|
||||
all_returned_tasks.sort(key=lambda x: (
|
||||
all_returned_tasks.sort(
|
||||
key=lambda x: (
|
||||
0 if x.get("task_id") in [t["task_id"] for t in active_tasks] else 1,
|
||||
-(x.get("created_at") or 0)
|
||||
))
|
||||
-(x.get("created_at") or 0),
|
||||
)
|
||||
)
|
||||
|
||||
response = {
|
||||
"tasks": all_returned_tasks,
|
||||
"current_timestamp": current_time,
|
||||
"total_tasks": task_counts["active"] + task_counts["retrying"], # Only active/retrying tasks for counter
|
||||
"total_tasks": task_counts["active"]
|
||||
+ task_counts["retrying"], # Only active/retrying tasks for counter
|
||||
"all_tasks_count": len(all_tasks), # Total count of all tasks
|
||||
"task_counts": task_counts, # Categorized counts
|
||||
"active_tasks": len(active_tasks),
|
||||
@@ -811,18 +927,22 @@ async def get_task_updates(request: Request, current_user: User = Depends(requir
|
||||
"limit": limit,
|
||||
"total_non_active": len(updated_tasks),
|
||||
"has_more": len(updated_tasks) > offset + limit,
|
||||
"returned_non_active": len(paginated_updated_tasks)
|
||||
}
|
||||
"returned_non_active": len(paginated_updated_tasks),
|
||||
},
|
||||
}
|
||||
|
||||
logger.debug(f"Returning {len(active_tasks)} active + {len(paginated_updated_tasks)} paginated tasks out of {len(all_tasks)} total")
|
||||
logger.debug(
|
||||
f"Returning {len(active_tasks)} active + {len(paginated_updated_tasks)} paginated tasks out of {len(all_tasks)} total"
|
||||
)
|
||||
return response
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error in /api/prgs/updates: {e}", exc_info=True)
|
||||
raise HTTPException(status_code=500, detail={"error": "Failed to retrieve task updates"})
|
||||
raise HTTPException(
|
||||
status_code=500, detail={"error": "Failed to retrieve task updates"}
|
||||
)
|
||||
|
||||
|
||||
@router.post("/cancel/all")
|
||||
@@ -855,11 +975,15 @@ async def cancel_all_tasks(current_user: User = Depends(require_auth_from_state)
|
||||
return response
|
||||
except Exception as e:
|
||||
logger.error(f"Error in /api/prgs/cancel/all: {e}", exc_info=True)
|
||||
raise HTTPException(status_code=500, detail={"error": "Failed to cancel all tasks"})
|
||||
raise HTTPException(
|
||||
status_code=500, detail={"error": "Failed to cancel all tasks"}
|
||||
)
|
||||
|
||||
|
||||
@router.post("/cancel/{task_id}")
|
||||
async def cancel_task_endpoint(task_id: str, current_user: User = Depends(require_auth_from_state)):
|
||||
async def cancel_task_endpoint(
|
||||
task_id: str, current_user: User = Depends(require_auth_from_state)
|
||||
):
|
||||
"""
|
||||
Cancel a running or queued task.
|
||||
|
||||
@@ -888,7 +1012,7 @@ async def cancel_task_endpoint(task_id: str, current_user: User = Depends(requir
|
||||
detail={
|
||||
"status": "error",
|
||||
"message": "Cancellation for old system is not supported in the new API. Please use the new task ID format.",
|
||||
}
|
||||
},
|
||||
)
|
||||
except HTTPException:
|
||||
raise
|
||||
@@ -897,7 +1021,9 @@ async def cancel_task_endpoint(task_id: str, current_user: User = Depends(requir
|
||||
|
||||
|
||||
@router.delete("/delete/{task_id}")
|
||||
async def delete_task(task_id: str, current_user: User = Depends(require_auth_from_state)):
|
||||
async def delete_task(
|
||||
task_id: str, current_user: User = Depends(require_auth_from_state)
|
||||
):
|
||||
"""
|
||||
Delete a task's information and history.
|
||||
|
||||
@@ -916,7 +1042,9 @@ async def delete_task(task_id: str, current_user: User = Depends(require_auth_fr
|
||||
|
||||
|
||||
@router.get("/stream")
|
||||
async def stream_task_updates(request: Request, current_user: User = Depends(get_current_user_from_state)):
|
||||
async def stream_task_updates(
|
||||
request: Request, current_user: User = Depends(get_current_user_from_state)
|
||||
):
|
||||
"""
|
||||
Stream real-time task updates via Server-Sent Events (SSE).
|
||||
Now uses event-driven architecture for true real-time updates.
|
||||
@@ -930,7 +1058,7 @@ async def stream_task_updates(request: Request, current_user: User = Depends(get
|
||||
"""
|
||||
|
||||
# Get query parameters
|
||||
active_only = request.query_params.get('active_only', '').lower() == 'true'
|
||||
active_only = request.query_params.get("active_only", "").lower() == "true"
|
||||
|
||||
async def event_generator():
|
||||
# Create a queue for this client
|
||||
@@ -938,12 +1066,14 @@ async def stream_task_updates(request: Request, current_user: User = Depends(get
|
||||
|
||||
try:
|
||||
# Register this client with the broadcaster
|
||||
logger.info(f"SSE Stream: New client connecting...")
|
||||
logger.debug(f"SSE Stream: New client connecting...")
|
||||
await sse_broadcaster.add_client(client_queue)
|
||||
logger.info(f"SSE Stream: Client registered successfully, total clients: {len(sse_broadcaster.clients)}")
|
||||
logger.debug(f"SSE Stream: Client registered successfully, total clients: {len(sse_broadcaster.clients)}")
|
||||
|
||||
# Send initial data immediately upon connection
|
||||
initial_data = await generate_task_update_event(time.time(), active_only, request)
|
||||
initial_data = await generate_task_update_event(
|
||||
time.time(), active_only, request
|
||||
)
|
||||
yield initial_data
|
||||
|
||||
# Also send any active tasks as callback-style events to newly connected clients
|
||||
@@ -961,7 +1091,9 @@ async def stream_task_updates(request: Request, current_user: User = Depends(get
|
||||
task_status = get_task_status_from_last_status(last_status)
|
||||
|
||||
# Send recent callback data for active or recently completed tasks
|
||||
if is_task_active(task_status) or (last_status and last_status.get("timestamp", 0) > time.time() - 30):
|
||||
if is_task_active(task_status) or (
|
||||
last_status and last_status.get("timestamp", 0) > time.time() - 30
|
||||
):
|
||||
if last_status and "raw_callback" in last_status:
|
||||
callback_event = {
|
||||
"task_id": task_id,
|
||||
@@ -969,11 +1101,11 @@ async def stream_task_updates(request: Request, current_user: User = Depends(get
|
||||
"timestamp": last_status.get("timestamp", time.time()),
|
||||
"change_type": "callback",
|
||||
"event_type": "progress_update",
|
||||
"replay": True # Mark as replay for client
|
||||
"replay": True, # Mark as replay for client
|
||||
}
|
||||
event_json = json.dumps(callback_event)
|
||||
yield f"data: {event_json}\n\n"
|
||||
logger.info(f"SSE Stream: Sent replay callback for task {task_id}")
|
||||
logger.debug(f"SSE Stream: Sent replay callback for task {task_id}")
|
||||
|
||||
# Send periodic heartbeats and listen for real-time events
|
||||
last_heartbeat = time.time()
|
||||
@@ -983,7 +1115,9 @@ async def stream_task_updates(request: Request, current_user: User = Depends(get
|
||||
try:
|
||||
# Wait for either an event or timeout for heartbeat
|
||||
try:
|
||||
event_data = await asyncio.wait_for(client_queue.get(), timeout=heartbeat_interval)
|
||||
event_data = await asyncio.wait_for(
|
||||
client_queue.get(), timeout=heartbeat_interval
|
||||
)
|
||||
# Send the real-time event
|
||||
yield event_data
|
||||
last_heartbeat = time.time()
|
||||
@@ -993,7 +1127,15 @@ async def stream_task_updates(request: Request, current_user: User = Depends(get
|
||||
if current_time - last_heartbeat >= heartbeat_interval:
|
||||
# Generate current task counts for heartbeat
|
||||
all_tasks = get_all_tasks()
|
||||
task_counts = {"active": 0, "queued": 0, "completed": 0, "error": 0, "cancelled": 0, "retrying": 0, "skipped": 0}
|
||||
task_counts = {
|
||||
"active": 0,
|
||||
"queued": 0,
|
||||
"completed": 0,
|
||||
"error": 0,
|
||||
"cancelled": 0,
|
||||
"retrying": 0,
|
||||
"skipped": 0,
|
||||
}
|
||||
|
||||
for task_summary in all_tasks:
|
||||
task_id = task_summary.get("task_id")
|
||||
@@ -1003,13 +1145,18 @@ async def stream_task_updates(request: Request, current_user: User = Depends(get
|
||||
if not task_info:
|
||||
continue
|
||||
last_status = get_last_task_status(task_id)
|
||||
task_status = get_task_status_from_last_status(last_status)
|
||||
task_status = get_task_status_from_last_status(
|
||||
last_status
|
||||
)
|
||||
|
||||
if task_status == ProgressState.RETRYING:
|
||||
task_counts["retrying"] += 1
|
||||
elif task_status in {ProgressState.QUEUED, "pending"}:
|
||||
task_counts["queued"] += 1
|
||||
elif task_status in {ProgressState.COMPLETE, ProgressState.DONE}:
|
||||
elif task_status in {
|
||||
ProgressState.COMPLETE,
|
||||
ProgressState.DONE,
|
||||
}:
|
||||
task_counts["completed"] += 1
|
||||
elif task_status == ProgressState.ERROR:
|
||||
task_counts["error"] += 1
|
||||
@@ -1022,9 +1169,10 @@ async def stream_task_updates(request: Request, current_user: User = Depends(get
|
||||
|
||||
heartbeat_data = {
|
||||
"current_timestamp": current_time,
|
||||
"total_tasks": task_counts["active"] + task_counts["retrying"],
|
||||
"total_tasks": task_counts["active"]
|
||||
+ task_counts["retrying"],
|
||||
"task_counts": task_counts,
|
||||
"change_type": "heartbeat"
|
||||
"change_type": "heartbeat",
|
||||
}
|
||||
|
||||
event_json = json.dumps(heartbeat_data)
|
||||
@@ -1034,12 +1182,18 @@ async def stream_task_updates(request: Request, current_user: User = Depends(get
|
||||
except Exception as e:
|
||||
logger.error(f"Error in SSE event streaming: {e}", exc_info=True)
|
||||
# Send error event and continue
|
||||
error_data = json.dumps({"error": "Internal server error", "timestamp": time.time(), "change_type": "error"})
|
||||
error_data = json.dumps(
|
||||
{
|
||||
"error": "Internal server error",
|
||||
"timestamp": time.time(),
|
||||
"change_type": "error",
|
||||
}
|
||||
)
|
||||
yield f"data: {error_data}\n\n"
|
||||
await asyncio.sleep(1)
|
||||
|
||||
except asyncio.CancelledError:
|
||||
logger.info("SSE client disconnected")
|
||||
logger.debug("SSE client disconnected")
|
||||
return
|
||||
except Exception as e:
|
||||
logger.error(f"SSE connection error: {e}", exc_info=True)
|
||||
@@ -1056,12 +1210,14 @@ async def stream_task_updates(request: Request, current_user: User = Depends(get
|
||||
"Connection": "keep-alive",
|
||||
"Content-Type": "text/event-stream",
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Access-Control-Allow-Headers": "Cache-Control"
|
||||
}
|
||||
"Access-Control-Allow-Headers": "Cache-Control",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def generate_task_update_event(since_timestamp: float, active_only: bool, request: Request) -> str:
|
||||
async def generate_task_update_event(
|
||||
since_timestamp: float, active_only: bool, request: Request
|
||||
) -> str:
|
||||
"""
|
||||
Generate initial task update event for SSE connection.
|
||||
This replicates the logic from get_task_updates but for SSE format.
|
||||
@@ -1089,16 +1245,28 @@ async def generate_task_update_event(since_timestamp: float, active_only: bool,
|
||||
is_active_task = is_task_active(task_status)
|
||||
|
||||
# Check if task has been updated since the given timestamp
|
||||
task_timestamp = last_status.get("timestamp") if last_status else task_info.get("created_at", 0)
|
||||
task_timestamp = (
|
||||
last_status.get("timestamp")
|
||||
if last_status
|
||||
else task_info.get("created_at", 0)
|
||||
)
|
||||
|
||||
# Always include active tasks in updates, apply filtering to others
|
||||
# Also include recently completed/terminal tasks to ensure "done" status gets sent
|
||||
is_recently_terminal = task_status in TERMINAL_TASK_STATES and task_timestamp > since_timestamp
|
||||
should_include = is_active_task or (task_timestamp > since_timestamp and not active_only) or is_recently_terminal
|
||||
is_recently_terminal = (
|
||||
task_status in TERMINAL_TASK_STATES and task_timestamp > since_timestamp
|
||||
)
|
||||
should_include = (
|
||||
is_active_task
|
||||
or (task_timestamp > since_timestamp and not active_only)
|
||||
or is_recently_terminal
|
||||
)
|
||||
|
||||
if should_include:
|
||||
# Construct the same detailed task object as in updates endpoint
|
||||
task_response = _build_task_response(task_info, last_status, task_id, current_time, request)
|
||||
task_response = _build_task_response(
|
||||
task_info, last_status, task_id, current_time, request
|
||||
)
|
||||
|
||||
if is_active_task:
|
||||
active_tasks.append(task_response)
|
||||
@@ -1109,17 +1277,19 @@ async def generate_task_update_event(since_timestamp: float, active_only: bool,
|
||||
all_returned_tasks = active_tasks + updated_tasks
|
||||
|
||||
# Sort by priority (active first, then by creation time)
|
||||
all_returned_tasks.sort(key=lambda x: (
|
||||
all_returned_tasks.sort(
|
||||
key=lambda x: (
|
||||
0 if x.get("task_id") in [t["task_id"] for t in active_tasks] else 1,
|
||||
-(x.get("created_at") or 0)
|
||||
))
|
||||
-(x.get("created_at") or 0),
|
||||
)
|
||||
)
|
||||
|
||||
initial_data = {
|
||||
"tasks": all_returned_tasks,
|
||||
"current_timestamp": current_time,
|
||||
"updated_count": len(updated_tasks),
|
||||
"since_timestamp": since_timestamp,
|
||||
"initial": True # Mark as initial load
|
||||
"initial": True, # Mark as initial load
|
||||
}
|
||||
|
||||
# Add global task counts since this bypasses the broadcaster
|
||||
@@ -1130,14 +1300,20 @@ async def generate_task_update_event(since_timestamp: float, active_only: bool,
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error generating initial SSE event: {e}", exc_info=True)
|
||||
error_data = json.dumps({"error": "Failed to load initial data", "timestamp": time.time()})
|
||||
error_data = json.dumps(
|
||||
{"error": "Failed to load initial data", "timestamp": time.time()}
|
||||
)
|
||||
return f"data: {error_data}\n\n"
|
||||
|
||||
|
||||
# IMPORTANT: This parameterized route MUST come AFTER all specific routes
|
||||
# Otherwise FastAPI will match specific routes like "/updates" as task_id parameters
|
||||
@router.get("/{task_id}")
|
||||
async def get_task_details(task_id: str, request: Request, current_user: User = Depends(require_auth_from_state)):
|
||||
async def get_task_details(
|
||||
task_id: str,
|
||||
request: Request,
|
||||
current_user: User = Depends(require_auth_from_state),
|
||||
):
|
||||
"""
|
||||
Return a JSON object with the resource type, its name (title),
|
||||
the last progress update, and, if available, the original request parameters.
|
||||
|
||||
@@ -101,7 +101,7 @@ def download_album(
|
||||
)
|
||||
dl.download_albumspo(
|
||||
link_album=url, # Spotify URL
|
||||
output_dir="./downloads",
|
||||
output_dir="/app/downloads",
|
||||
quality_download=quality, # Deezer quality
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
@@ -159,7 +159,7 @@ def download_album(
|
||||
)
|
||||
spo.download_album(
|
||||
link_album=url, # Spotify URL
|
||||
output_dir="./downloads",
|
||||
output_dir="/app/downloads",
|
||||
quality_download=fall_quality, # Spotify quality
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
@@ -216,7 +216,7 @@ def download_album(
|
||||
)
|
||||
spo.download_album(
|
||||
link_album=url,
|
||||
output_dir="./downloads",
|
||||
output_dir="/app/downloads",
|
||||
quality_download=quality,
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
@@ -260,7 +260,7 @@ def download_album(
|
||||
)
|
||||
dl.download_albumdee( # Deezer URL, download via Deezer
|
||||
link_album=url,
|
||||
output_dir="./downloads",
|
||||
output_dir="/app/downloads",
|
||||
quality_download=quality,
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
|
||||
@@ -28,6 +28,7 @@ CONFIG_FILE_PATH = Path("./data/config/main.json")
|
||||
|
||||
DEFAULT_MAIN_CONFIG = {
|
||||
"service": "spotify",
|
||||
"version": "3.3.0",
|
||||
"spotify": "",
|
||||
"deezer": "",
|
||||
"fallback": False,
|
||||
|
||||
@@ -2,6 +2,8 @@ import subprocess
|
||||
import logging
|
||||
import time
|
||||
import threading
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Import Celery task utilities
|
||||
from .celery_config import get_config_params, MAX_CONCURRENT_DL
|
||||
@@ -40,12 +42,16 @@ class CeleryManager:
|
||||
)
|
||||
|
||||
def _get_worker_command(
|
||||
self, queues, concurrency, worker_name_suffix, log_level="INFO"
|
||||
self, queues, concurrency, worker_name_suffix, log_level_env=None
|
||||
):
|
||||
# Use LOG_LEVEL from environment if provided, otherwise default to INFO
|
||||
log_level = log_level_env if log_level_env else os.getenv("LOG_LEVEL", "WARNING").upper()
|
||||
# Use a unique worker name to avoid conflicts.
|
||||
# %h is replaced by celery with the actual hostname.
|
||||
hostname = f"worker_{worker_name_suffix}@%h"
|
||||
command = [
|
||||
sys.executable,
|
||||
"-m",
|
||||
"celery",
|
||||
"-A",
|
||||
self.app_name,
|
||||
@@ -73,7 +79,10 @@ class CeleryManager:
|
||||
log_method = logger.info # Default log method
|
||||
|
||||
if error: # This is a stderr stream
|
||||
if " - ERROR - " in line_stripped or " - CRITICAL - " in line_stripped:
|
||||
if (
|
||||
" - ERROR - " in line_stripped
|
||||
or " - CRITICAL - " in line_stripped
|
||||
):
|
||||
log_method = logger.error
|
||||
elif " - WARNING - " in line_stripped:
|
||||
log_method = logger.warning
|
||||
@@ -117,6 +126,7 @@ class CeleryManager:
|
||||
queues="downloads",
|
||||
concurrency=self.concurrency,
|
||||
worker_name_suffix="dlw", # Download Worker
|
||||
log_level_env=os.getenv("LOG_LEVEL", "WARNING").upper(),
|
||||
)
|
||||
logger.info(
|
||||
f"Starting Celery Download Worker with command: {' '.join(download_cmd)}"
|
||||
@@ -151,7 +161,8 @@ class CeleryManager:
|
||||
queues="utility_tasks,default", # Listen to utility and default
|
||||
concurrency=5, # Increased concurrency for SSE updates and utility tasks
|
||||
worker_name_suffix="utw", # Utility Worker
|
||||
log_level="ERROR" # Reduce log verbosity for utility worker (only errors)
|
||||
log_level_env=os.getenv("LOG_LEVEL", "ERROR").upper(),
|
||||
|
||||
)
|
||||
logger.info(
|
||||
f"Starting Celery Utility Worker with command: {' '.join(utility_cmd)}"
|
||||
@@ -250,7 +261,7 @@ class CeleryManager:
|
||||
|
||||
# Restart only the download worker
|
||||
download_cmd = self._get_worker_command(
|
||||
"downloads", self.concurrency, "dlw"
|
||||
"downloads", self.concurrency, "dlw", log_level_env=os.getenv("LOG_LEVEL", "WARNING").upper()
|
||||
)
|
||||
logger.info(
|
||||
f"Restarting Celery Download Worker with command: {' '.join(download_cmd)}"
|
||||
@@ -366,10 +377,7 @@ celery_manager = CeleryManager()
|
||||
|
||||
# Example of how to use the manager (typically called from your main app script)
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(message)s",
|
||||
)
|
||||
# Removed logging.basicConfig as it's handled by the main app's setup_logging
|
||||
logger.info("Starting Celery Manager example...")
|
||||
celery_manager.start()
|
||||
try:
|
||||
|
||||
@@ -246,7 +246,7 @@ class CeleryDownloadQueueManager:
|
||||
"""Initialize the Celery-based download queue manager"""
|
||||
self.max_concurrent = MAX_CONCURRENT_DL
|
||||
self.paused = False
|
||||
print(
|
||||
logger.info(
|
||||
f"Celery Download Queue Manager initialized with max_concurrent={self.max_concurrent}"
|
||||
)
|
||||
|
||||
|
||||
@@ -98,7 +98,7 @@ def download_playlist(
|
||||
)
|
||||
dl.download_playlistspo(
|
||||
link_playlist=url, # Spotify URL
|
||||
output_dir="./downloads",
|
||||
output_dir="/app/downloads",
|
||||
quality_download=quality, # Deezer quality
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
@@ -161,7 +161,7 @@ def download_playlist(
|
||||
)
|
||||
spo.download_playlist(
|
||||
link_playlist=url, # Spotify URL
|
||||
output_dir="./downloads",
|
||||
output_dir="/app/downloads",
|
||||
quality_download=fall_quality, # Spotify quality
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
@@ -224,7 +224,7 @@ def download_playlist(
|
||||
)
|
||||
spo.download_playlist(
|
||||
link_playlist=url,
|
||||
output_dir="./downloads",
|
||||
output_dir="/app/downloads",
|
||||
quality_download=quality,
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
@@ -268,7 +268,7 @@ def download_playlist(
|
||||
)
|
||||
dl.download_playlistdee( # Deezer URL, download via Deezer
|
||||
link_playlist=url,
|
||||
output_dir="./downloads",
|
||||
output_dir="/app/downloads",
|
||||
quality_download=quality,
|
||||
recursive_quality=recursive_quality, # Usually False for playlists to get individual track qualities
|
||||
recursive_download=False,
|
||||
|
||||
@@ -94,7 +94,7 @@ def download_track(
|
||||
# download_trackspo means: Spotify URL, download via Deezer
|
||||
dl.download_trackspo(
|
||||
link_track=url, # Spotify URL
|
||||
output_dir="./downloads",
|
||||
output_dir="/app/downloads",
|
||||
quality_download=quality, # Deezer quality
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
@@ -153,7 +153,7 @@ def download_track(
|
||||
)
|
||||
spo.download_track(
|
||||
link_track=url, # Spotify URL
|
||||
output_dir="./downloads",
|
||||
output_dir="/app/downloads",
|
||||
quality_download=fall_quality, # Spotify quality
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
@@ -169,7 +169,7 @@ def download_track(
|
||||
convert_to=convert_to,
|
||||
bitrate=bitrate,
|
||||
artist_separator=artist_separator,
|
||||
real_time_multiplier=real_time_multiplier,
|
||||
spotify_metadata=spotify_metadata,
|
||||
pad_number_width=pad_number_width,
|
||||
)
|
||||
print(
|
||||
@@ -211,7 +211,7 @@ def download_track(
|
||||
)
|
||||
spo.download_track(
|
||||
link_track=url,
|
||||
output_dir="./downloads",
|
||||
output_dir="/app/downloads",
|
||||
quality_download=quality,
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
@@ -254,7 +254,7 @@ def download_track(
|
||||
)
|
||||
dl.download_trackdee( # Deezer URL, download via Deezer
|
||||
link_track=url,
|
||||
output_dir="./downloads",
|
||||
output_dir="/app/downloads",
|
||||
quality_download=quality,
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
|
||||
@@ -1098,7 +1098,7 @@ def update_playlist_m3u_file(playlist_spotify_id: str):
|
||||
# Get configuration settings
|
||||
|
||||
output_dir = (
|
||||
"./downloads" # This matches the output_dir used in download functions
|
||||
"/app/downloads" # This matches the output_dir used in download functions
|
||||
)
|
||||
|
||||
# Get all tracks for the playlist
|
||||
@@ -1125,14 +1125,14 @@ def update_playlist_m3u_file(playlist_spotify_id: str):
|
||||
skipped_missing_final_path = 0
|
||||
|
||||
for track in tracks:
|
||||
# Use final_path from deezspot summary and convert from ./downloads to ../ relative path
|
||||
# Use final_path from deezspot summary and convert from /app/downloads to ../ relative path
|
||||
final_path = track.get("final_path")
|
||||
if not final_path:
|
||||
skipped_missing_final_path += 1
|
||||
continue
|
||||
normalized = str(final_path).replace("\\", "/")
|
||||
if normalized.startswith("./downloads/"):
|
||||
relative_path = normalized.replace("./downloads/", "../", 1)
|
||||
if normalized.startswith("/app/downloads/"):
|
||||
relative_path = normalized.replace("/app/downloads/", "../", 1)
|
||||
elif "/downloads/" in normalized.lower():
|
||||
idx = normalized.lower().rfind("/downloads/")
|
||||
relative_path = "../" + normalized[idx + len("/downloads/") :]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "spotizerr-ui",
|
||||
"private": true,
|
||||
"version": "3.2.1",
|
||||
"version": "3.3.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -49,7 +49,7 @@ export const SearchResultCard = ({ id, name, subtitle, imageUrl, type, onDownloa
|
||||
<button
|
||||
onClick={onDownload}
|
||||
disabled={!!status && status !== "error"}
|
||||
className="absolute bottom-2 right-2 p-2 bg-button-success hover:bg-button-success-hover text-button-success-text rounded-full transition-opacity shadow-lg opacity-0 group-hover:opacity-100 duration-300 z-10 disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
className="absolute bottom-2 right-2 p-2 bg-button-success hover:bg-button-success-hover text-button-success-text rounded-full transition-opacity shadow-lg opacity-100 sm:opacity-0 sm:group-hover:opacity-100 duration-300 z-10 disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
title={
|
||||
status
|
||||
? status === "queued"
|
||||
|
||||
@@ -53,6 +53,19 @@ function extractApiErrorMessage(error: unknown): string {
|
||||
if (typeof data?.detail === "string") return data.detail;
|
||||
if (typeof data?.message === "string") return data.message;
|
||||
if (typeof data?.error === "string") return data.error;
|
||||
// If data.error is an object, try to extract a message from it
|
||||
if (typeof data?.error === "object" && data.error !== null && typeof data.error.message === "string") {
|
||||
return data.error.message;
|
||||
}
|
||||
// If data is an object but none of the above matched, try JSON stringifying it
|
||||
if (typeof data === "object" && data !== null) {
|
||||
try {
|
||||
return JSON.stringify(data);
|
||||
} catch (e) {
|
||||
// Fallback if stringify fails
|
||||
return fallback;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (typeof anyErr?.message === "string") return anyErr.message;
|
||||
return fallback;
|
||||
@@ -66,7 +79,6 @@ export function AccountsTab() {
|
||||
const queryClient = useQueryClient();
|
||||
const [activeService, setActiveService] = useState<Service>("spotify");
|
||||
const [isAdding, setIsAdding] = useState(false);
|
||||
const [submitError, setSubmitError] = useState<string | null>(null);
|
||||
|
||||
const { data: credentials, isLoading } = useQuery({
|
||||
queryKey: ["credentials", activeService],
|
||||
@@ -85,15 +97,12 @@ export function AccountsTab() {
|
||||
onSuccess: () => {
|
||||
toast.success("Account added successfully!");
|
||||
queryClient.invalidateQueries({ queryKey: ["credentials", activeService] });
|
||||
queryClient.invalidateQueries({ queryKey: ["config"] }); // Invalidate config to update active Spotify account in UI
|
||||
queryClient.invalidateQueries({ queryKey: ["config"] }); // Invalidate config to update active Spotify/Deezer account in UI
|
||||
setIsAdding(false);
|
||||
setSubmitError(null);
|
||||
reset();
|
||||
},
|
||||
onError: (error) => {
|
||||
const msg = extractApiErrorMessage(error);
|
||||
setSubmitError(msg);
|
||||
toast.error(msg);
|
||||
},
|
||||
});
|
||||
|
||||
@@ -110,7 +119,6 @@ export function AccountsTab() {
|
||||
});
|
||||
|
||||
const onSubmit: SubmitHandler<AccountFormData> = (data) => {
|
||||
setSubmitError(null);
|
||||
addMutation.mutate({ service: activeService, data });
|
||||
};
|
||||
|
||||
@@ -118,11 +126,6 @@ export function AccountsTab() {
|
||||
<form onSubmit={handleSubmit(onSubmit)} className="p-4 border border-line dark:border-border-dark rounded-lg mt-4 space-y-4">
|
||||
<h4 className="font-semibold text-content-primary dark:text-content-primary-dark">Add New {activeService === "spotify" ? "Spotify" : "Deezer"} Account</h4>
|
||||
|
||||
{submitError && (
|
||||
<div className="text-error-text bg-error-muted border border-error rounded p-2 text-sm">
|
||||
{submitError}
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="flex flex-col gap-2">
|
||||
<label htmlFor="accountName" className="text-content-primary dark:text-content-primary-dark">Account Name</label>
|
||||
|
||||
@@ -89,6 +89,7 @@ export function WatchTab() {
|
||||
onSuccess: () => {
|
||||
toast.success("Watch settings saved successfully!");
|
||||
queryClient.invalidateQueries({ queryKey: ["watchConfig"] });
|
||||
queryClient.invalidateQueries({ queryKey: ["config"] }); // Invalidate main config to refresh watch.enabled in SettingsProvider
|
||||
},
|
||||
onError: (error: any) => {
|
||||
const message = error?.response?.data?.error || error?.message || "Unknown error";
|
||||
|
||||
@@ -8,7 +8,7 @@ import type {
|
||||
AuthStatusResponse,
|
||||
User,
|
||||
CreateUserRequest,
|
||||
SSOStatusResponse
|
||||
SSOStatusResponse,
|
||||
} from "@/types/auth";
|
||||
|
||||
class AuthApiClient {
|
||||
@@ -38,7 +38,7 @@ class AuthApiClient {
|
||||
}
|
||||
return config;
|
||||
},
|
||||
(error) => Promise.reject(error)
|
||||
(error) => Promise.reject(error),
|
||||
);
|
||||
|
||||
// Response interceptor for error handling
|
||||
@@ -96,7 +96,8 @@ class AuthApiClient {
|
||||
description: "The server did not respond in time. Please try again later.",
|
||||
});
|
||||
} else {
|
||||
const errorMessage = error.response?.data?.detail ||
|
||||
const errorMessage =
|
||||
error.response?.data?.detail ||
|
||||
error.response?.data?.error ||
|
||||
error.message ||
|
||||
"An unknown error occurred.";
|
||||
@@ -109,7 +110,7 @@ class AuthApiClient {
|
||||
}
|
||||
}
|
||||
return Promise.reject(error);
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@@ -158,7 +159,7 @@ class AuthApiClient {
|
||||
|
||||
if (token) {
|
||||
this.token = token;
|
||||
console.log(`Loaded ${isRemembered ? 'persistent' : 'session'} token from storage`);
|
||||
console.log(`Loaded ${isRemembered ? "persistent" : "session"} token from storage`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -229,7 +230,7 @@ class AuthApiClient {
|
||||
this.setToken(loginData.access_token, rememberMe);
|
||||
|
||||
toast.success("Login Successful", {
|
||||
description: `Test , ${loginData.user.username}!`,
|
||||
description: `Welcome, ${loginData.user.username}!`,
|
||||
});
|
||||
|
||||
return loginData;
|
||||
|
||||
15
spotizerr-ui/src/lib/spotify-utils.ts
Normal file
15
spotizerr-ui/src/lib/spotify-utils.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
export interface ParsedSpotifyUrl {
|
||||
type: "track" | "album" | "playlist" | "artist" | "unknown";
|
||||
id: string;
|
||||
}
|
||||
|
||||
export const parseSpotifyUrl = (url: string): ParsedSpotifyUrl => {
|
||||
const match = url.match(/https:\/\/open\.spotify\.com(?:\/intl-[a-z]{2})?\/(track|album|playlist|artist)\/([a-zA-Z0-9]+)(?:\?.*)?/);
|
||||
if (match) {
|
||||
return {
|
||||
type: match[1] as ParsedSpotifyUrl["type"],
|
||||
id: match[2],
|
||||
};
|
||||
}
|
||||
return { type: "unknown", id: "" };
|
||||
};
|
||||
@@ -166,18 +166,32 @@ export const History = () => {
|
||||
cell: (info) => {
|
||||
const entry = info.row.original;
|
||||
const isChild = "album_title" in entry;
|
||||
return isChild ? (
|
||||
const historyEntry = entry as HistoryEntry;
|
||||
const spotifyId = historyEntry.external_ids?.spotify;
|
||||
const downloadType = historyEntry.download_type;
|
||||
|
||||
const titleContent = isChild ? (
|
||||
<span className="pl-4 text-muted-foreground">└─ {entry.title}</span>
|
||||
) : (
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="font-semibold">{entry.title}</span>
|
||||
{(entry as HistoryEntry).children_table && (
|
||||
{historyEntry.children_table && (
|
||||
<span className="text-xs bg-primary/10 text-primary px-1.5 py-0.5 rounded">
|
||||
{(entry as HistoryEntry).total_tracks || "?"} tracks
|
||||
{historyEntry.total_tracks || "?"} tracks
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
|
||||
if (!isChild && spotifyId && downloadType) {
|
||||
return (
|
||||
<a href={`/${downloadType}/${spotifyId}`} className="hover:underline">
|
||||
{titleContent}
|
||||
</a>
|
||||
);
|
||||
}
|
||||
|
||||
return titleContent;
|
||||
},
|
||||
}),
|
||||
columnHelper.accessor("artists", {
|
||||
|
||||
@@ -3,22 +3,26 @@ import { useNavigate, useSearch, useRouterState } from "@tanstack/react-router";
|
||||
import { useDebounce } from "use-debounce";
|
||||
import { toast } from "sonner";
|
||||
import type { TrackType, AlbumType, SearchResult } from "@/types/spotify";
|
||||
import { parseSpotifyUrl } from "@/lib/spotify-utils";
|
||||
import { QueueContext } from "@/contexts/queue-context";
|
||||
import { SearchResultCard } from "@/components/SearchResultCard";
|
||||
import { indexRoute } from "@/router";
|
||||
import { Music, Disc, User, ListMusic } from "lucide-react";
|
||||
import { authApiClient } from "@/lib/api-client";
|
||||
import { useSettings } from "@/contexts/settings-context";
|
||||
import { FaEye, FaDownload } from "react-icons/fa";
|
||||
|
||||
// Utility function to safely get properties from search results
|
||||
const safelyGetProperty = <T,>(obj: any, path: string[], fallback: T): T => {
|
||||
try {
|
||||
let current = obj;
|
||||
for (const key of path) {
|
||||
if (current == null || typeof current !== 'object') {
|
||||
if (current == null || typeof current !== "object") {
|
||||
return fallback;
|
||||
}
|
||||
current = current[key];
|
||||
}
|
||||
return current ?? fallback;
|
||||
return (current ?? fallback) as T;
|
||||
} catch {
|
||||
return fallback;
|
||||
}
|
||||
@@ -31,18 +35,23 @@ export const Home = () => {
|
||||
const { q, type } = useSearch({ from: "/" });
|
||||
const { items: allResults } = indexRoute.useLoaderData();
|
||||
const isLoading = useRouterState({ select: (s) => s.status === "pending" });
|
||||
const { settings } = useSettings();
|
||||
|
||||
const [query, setQuery] = useState(q || "");
|
||||
const [searchType, setSearchType] = useState<"track" | "album" | "artist" | "playlist">(type || "track");
|
||||
const [searchType, setSearchType] = useState<
|
||||
"track" | "album" | "artist" | "playlist"
|
||||
>(type || "track");
|
||||
const [debouncedQuery] = useDebounce(query, 500);
|
||||
const [activeTab, setActiveTab] = useState<"search" | "bulkAdd">("search");
|
||||
const [linksInput, setLinksInput] = useState("");
|
||||
const [isBulkAdding, setIsBulkAdding] = useState(false);
|
||||
const [isBulkWatching, setIsBulkWatching] = useState(false);
|
||||
|
||||
const [displayedResults, setDisplayedResults] = useState<SearchResult[]>([]);
|
||||
const [isLoadingMore, setIsLoadingMore] = useState(false);
|
||||
const context = useContext(QueueContext);
|
||||
const loaderRef = useRef<HTMLDivElement | null>(null);
|
||||
|
||||
// Removed scroll locking on mobile empty state to avoid blocking scroll globally
|
||||
|
||||
useEffect(() => {
|
||||
navigate({ search: (prev) => ({ ...prev, q: debouncedQuery, type: searchType }) });
|
||||
}, [debouncedQuery, searchType, navigate]);
|
||||
@@ -56,6 +65,131 @@ export const Home = () => {
|
||||
}
|
||||
const { addItem } = context;
|
||||
|
||||
const handleAddBulkLinks = useCallback(async () => {
|
||||
const allLinks = linksInput
|
||||
.split("\n")
|
||||
.map((link) => link.trim())
|
||||
.filter(Boolean);
|
||||
if (allLinks.length === 0) {
|
||||
toast.info("No links provided to add.");
|
||||
return;
|
||||
}
|
||||
|
||||
const supportedLinks: string[] = [];
|
||||
const unsupportedLinks: string[] = [];
|
||||
|
||||
allLinks.forEach((link) => {
|
||||
const parsed = parseSpotifyUrl(link);
|
||||
if (parsed.type !== "unknown") {
|
||||
supportedLinks.push(link);
|
||||
} else {
|
||||
unsupportedLinks.push(link);
|
||||
}
|
||||
});
|
||||
|
||||
if (unsupportedLinks.length > 0) {
|
||||
toast.warning("Some links are not supported and will be skipped.", {
|
||||
description: `Unsupported: ${unsupportedLinks.join(", ")}`,
|
||||
});
|
||||
}
|
||||
|
||||
if (supportedLinks.length === 0) {
|
||||
toast.info("No supported links to add.");
|
||||
return;
|
||||
}
|
||||
|
||||
setIsBulkAdding(true);
|
||||
try {
|
||||
const response = await authApiClient.client.post("/bulk/bulk-add-spotify-links", {
|
||||
links: supportedLinks,
|
||||
});
|
||||
const { count, failed_links } = response.data;
|
||||
|
||||
if (failed_links && failed_links.length > 0) {
|
||||
toast.warning("Bulk Add Completed with Warnings", {
|
||||
description: `${count} links added. Failed to add ${failed_links.length} links: ${failed_links.join(
|
||||
", "
|
||||
)}`,
|
||||
});
|
||||
} else {
|
||||
toast.success("Bulk Add Successful", {
|
||||
description: `${count} links added to queue.`,
|
||||
});
|
||||
}
|
||||
setLinksInput(""); // Clear input after successful add
|
||||
} catch (error: any) {
|
||||
const errorMessage = error.response?.data?.detail?.message || error.message;
|
||||
const failedLinks = error.response?.data?.detail?.failed_links || [];
|
||||
|
||||
let description = errorMessage;
|
||||
if (failedLinks.length > 0) {
|
||||
description += ` Failed links: ${failedLinks.join(", ")}`;
|
||||
}
|
||||
|
||||
toast.error("Bulk Add Failed", {
|
||||
description: description,
|
||||
});
|
||||
if (failedLinks.length > 0) {
|
||||
console.error("Failed links:", failedLinks);
|
||||
}
|
||||
} finally {
|
||||
setIsBulkAdding(false);
|
||||
}
|
||||
}, [linksInput]);
|
||||
|
||||
const handleWatchBulkLinks = useCallback(async () => {
|
||||
const links = linksInput
|
||||
.split("\n")
|
||||
.map((link) => link.trim())
|
||||
.filter(Boolean);
|
||||
if (links.length === 0) {
|
||||
toast.info("No links provided to watch.");
|
||||
return;
|
||||
}
|
||||
|
||||
const supportedLinks: { type: "artist" | "playlist"; id: string }[] = [];
|
||||
const unsupportedLinks: string[] = [];
|
||||
|
||||
links.forEach((link) => {
|
||||
const parsed = parseSpotifyUrl(link);
|
||||
if (parsed.type === "artist" || parsed.type === "playlist") {
|
||||
supportedLinks.push({ type: parsed.type, id: parsed.id });
|
||||
} else {
|
||||
unsupportedLinks.push(link);
|
||||
}
|
||||
});
|
||||
|
||||
if (unsupportedLinks.length > 0) {
|
||||
toast.warning("Some links are not supported for watching.", {
|
||||
description: `Unsupported: ${unsupportedLinks.join(", ")}`,
|
||||
});
|
||||
}
|
||||
|
||||
if (supportedLinks.length === 0) {
|
||||
toast.info("No supported links to watch.");
|
||||
return;
|
||||
}
|
||||
|
||||
setIsBulkWatching(true);
|
||||
try {
|
||||
const watchPromises = supportedLinks.map((item) =>
|
||||
authApiClient.client.put(`/${item.type}/watch/${item.id}`)
|
||||
);
|
||||
await Promise.all(watchPromises);
|
||||
toast.success("Bulk Watch Successful", {
|
||||
description: `${supportedLinks.length} supported links added to watchlist.`,
|
||||
});
|
||||
setLinksInput(""); // Clear input after successful add
|
||||
} catch (error: any) {
|
||||
const errorMessage = error.response?.data?.detail?.message || error.message;
|
||||
toast.error("Bulk Watch Failed", {
|
||||
description: errorMessage,
|
||||
});
|
||||
} finally {
|
||||
setIsBulkWatching(false);
|
||||
}
|
||||
}, [linksInput]);
|
||||
|
||||
const loadMore = useCallback(() => {
|
||||
setIsLoadingMore(true);
|
||||
setTimeout(() => {
|
||||
@@ -74,7 +208,7 @@ export const Home = () => {
|
||||
loadMore();
|
||||
}
|
||||
},
|
||||
{ threshold: 1.0 },
|
||||
{ threshold: 1.0 }
|
||||
);
|
||||
|
||||
const currentLoader = loaderRef.current;
|
||||
@@ -95,7 +229,7 @@ export const Home = () => {
|
||||
addItem({ spotifyId: track.id, type: "track", name: track.name, artist: artistName });
|
||||
toast.info(`Adding ${track.name} to queue...`);
|
||||
},
|
||||
[addItem],
|
||||
[addItem]
|
||||
);
|
||||
|
||||
const handleDownloadAlbum = useCallback(
|
||||
@@ -104,38 +238,47 @@ export const Home = () => {
|
||||
addItem({ spotifyId: album.id, type: "album", name: album.name, artist: artistName });
|
||||
toast.info(`Adding ${album.name} to queue...`);
|
||||
},
|
||||
[addItem],
|
||||
[addItem]
|
||||
);
|
||||
|
||||
const resultComponent = useMemo(() => {
|
||||
return (
|
||||
<div className="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 gap-4">
|
||||
{displayedResults.map((item) => {
|
||||
{displayedResults
|
||||
.map((item) => {
|
||||
// Add safety checks for essential properties
|
||||
if (!item || !item.id || !item.name || !item.model) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let imageUrl;
|
||||
let onDownload;
|
||||
let subtitle;
|
||||
let onDownload: (() => void) | undefined;
|
||||
let subtitle: string | undefined;
|
||||
|
||||
if (item.model === "track") {
|
||||
imageUrl = safelyGetProperty(item, ['album', 'images', '0', 'url'], undefined);
|
||||
imageUrl = safelyGetProperty(item, ["album", "images", "0", "url"], undefined);
|
||||
onDownload = () => handleDownloadTrack(item as TrackType);
|
||||
const artists = safelyGetProperty(item, ['artists'], []);
|
||||
subtitle = Array.isArray(artists) ? artists.map((a: any) => safelyGetProperty(a, ['name'], 'Unknown')).join(", ") : "Unknown Artist";
|
||||
const artists = safelyGetProperty(item, ["artists"], []);
|
||||
subtitle = Array.isArray(artists)
|
||||
? artists
|
||||
.map((a: any) => safelyGetProperty(a, ["name"], "Unknown"))
|
||||
.join(", ")
|
||||
: "Unknown Artist";
|
||||
} else if (item.model === "album") {
|
||||
imageUrl = safelyGetProperty(item, ['images', '0', 'url'], undefined);
|
||||
imageUrl = safelyGetProperty(item, ["images", "0", "url"], undefined);
|
||||
onDownload = () => handleDownloadAlbum(item as AlbumType);
|
||||
const artists = safelyGetProperty(item, ['artists'], []);
|
||||
subtitle = Array.isArray(artists) ? artists.map((a: any) => safelyGetProperty(a, ['name'], 'Unknown')).join(", ") : "Unknown Artist";
|
||||
const artists = safelyGetProperty(item, ["artists"], []);
|
||||
subtitle = Array.isArray(artists)
|
||||
? artists
|
||||
.map((a: any) => safelyGetProperty(a, ["name"], "Unknown"))
|
||||
.join(", ")
|
||||
: "Unknown Artist";
|
||||
} else if (item.model === "artist") {
|
||||
imageUrl = safelyGetProperty(item, ['images', '0', 'url'], undefined);
|
||||
imageUrl = safelyGetProperty(item, ["images", "0", "url"], undefined);
|
||||
subtitle = "Artist";
|
||||
} else if (item.model === "playlist") {
|
||||
imageUrl = safelyGetProperty(item, ['images', '0', 'url'], undefined);
|
||||
const ownerName = safelyGetProperty(item, ['owner', 'display_name'], 'Unknown');
|
||||
imageUrl = safelyGetProperty(item, ["images", "0", "url"], undefined);
|
||||
const ownerName = safelyGetProperty(item, ["owner", "display_name"], "Unknown");
|
||||
subtitle = `By ${ownerName}`;
|
||||
}
|
||||
|
||||
@@ -150,7 +293,8 @@ export const Home = () => {
|
||||
onDownload={onDownload}
|
||||
/>
|
||||
);
|
||||
}).filter(Boolean)} {/* Filter out null components */}
|
||||
})
|
||||
.filter(Boolean)}
|
||||
</div>
|
||||
);
|
||||
}, [displayedResults, handleDownloadTrack, handleDownloadAlbum]);
|
||||
@@ -160,25 +304,55 @@ export const Home = () => {
|
||||
<div className="text-center mb-4 md:mb-8 px-4 md:px-0">
|
||||
<h1 className="text-2xl font-bold text-content-primary dark:text-content-primary-dark">Spotizerr</h1>
|
||||
</div>
|
||||
<div className="flex flex-col sm:flex-row gap-3 mb-4 md:mb-6 px-4 md:px-0 flex-shrink-0">
|
||||
|
||||
{/* Tabs */}
|
||||
<div className="flex justify-center mb-4 md:mb-6 px-4 md:px-0 border-b border-gray-300 dark:border-gray-700">
|
||||
<button
|
||||
className={`flex-1 py-2 text-center transition-colors duration-200 ${
|
||||
activeTab === "search"
|
||||
? "border-b-2 border-green-500 text-green-500"
|
||||
: "border-b-2 border-transparent text-gray-800 dark:text-gray-200 hover:text-green-500"
|
||||
}`}
|
||||
onClick={() => setActiveTab("search")}
|
||||
>
|
||||
Search
|
||||
</button>
|
||||
<button
|
||||
className={`flex-1 py-2 text-center transition-colors duration-200 ${
|
||||
activeTab === "bulkAdd"
|
||||
? "border-b-2 border-green-500 text-green-500"
|
||||
: "border-b-2 border-transparent text-gray-800 dark:text-gray-200 hover:text-green-500"
|
||||
}`}
|
||||
onClick={() => setActiveTab("bulkAdd")}
|
||||
>
|
||||
Bulk Add
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{activeTab === "search" && (
|
||||
<>
|
||||
<div className="flex flex-col gap-3 mb-4 md:mb-6 px-4 md:px-0 flex-shrink-0">
|
||||
<div className="flex flex-col sm:flex-row gap-3">
|
||||
<input
|
||||
type="text"
|
||||
value={query}
|
||||
onChange={(e) => setQuery(e.target.value)}
|
||||
placeholder="Search for a track, album, artist, or playlist"
|
||||
placeholder="Search for a track, album, or artist"
|
||||
className="flex-1 p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline-none focus:ring-2 focus:ring-input-focus"
|
||||
/>
|
||||
<div className="flex gap-2">
|
||||
{["track", "album", "artist", "playlist"].map((typeOption) => (
|
||||
|
||||
{/* Icon buttons for search type (larger screens) */}
|
||||
<div className="hidden sm:flex gap-2 items-center">
|
||||
{(["track", "album", "artist", "playlist"] as const).map((typeOption) => (
|
||||
<button
|
||||
key={typeOption}
|
||||
onClick={() => setSearchType(typeOption as "track" | "album" | "artist" | "playlist")}
|
||||
onClick={() => setSearchType(typeOption)}
|
||||
aria-label={`Search ${typeOption}`}
|
||||
className={`flex items-center gap-1 p-2 rounded-md text-sm font-medium transition-colors border ${
|
||||
searchType === typeOption
|
||||
? "bg-green-600 text-white border-green-600"
|
||||
: "bg-gray-100 dark:bg-gray-700 text-gray-800 dark:text-gray-200 border-gray-300 dark:border-gray-600 hover:bg-gray-200 dark:hover:bg-gray-600"
|
||||
}`}
|
||||
|
||||
>
|
||||
{
|
||||
{
|
||||
@@ -188,25 +362,93 @@ export const Home = () => {
|
||||
playlist: <ListMusic size={16} />,
|
||||
}[typeOption]
|
||||
}
|
||||
<span className="hidden md:inline">
|
||||
{typeOption.charAt(0).toUpperCase() + typeOption.slice(1)}
|
||||
</span>
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
|
||||
{/* Select for smaller screens */}
|
||||
<select
|
||||
value={searchType}
|
||||
onChange={(e) =>
|
||||
setSearchType(e.target.value as "track" | "album" | "artist" | "playlist")
|
||||
}
|
||||
className="p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline-none focus:ring-2 focus:ring-input-focus sm:hidden"
|
||||
>
|
||||
<option value="track">Track</option>
|
||||
<option value="album">Album</option>
|
||||
<option value="artist">Artist</option>
|
||||
<option value="playlist">Playlist</option>
|
||||
</select>
|
||||
</div>
|
||||
<div className={`flex-1 px-4 md:px-0 pb-4 ${
|
||||
</div>
|
||||
|
||||
<div
|
||||
className={`flex-1 px-4 md:px-0 pb-4 ${
|
||||
// Only restrict overflow on mobile when there are results, otherwise allow normal behavior
|
||||
displayedResults.length > 0 ? 'overflow-y-auto md:overflow-visible' : ''
|
||||
}`}>
|
||||
displayedResults.length > 0 ? "overflow-y-auto md:overflow-visible" : ""
|
||||
}`}
|
||||
>
|
||||
{isLoading ? (
|
||||
<p className="text-center my-4 text-content-muted dark:text-content-muted-dark">Loading results...</p>
|
||||
) : (
|
||||
<>
|
||||
{resultComponent}
|
||||
<div ref={loaderRef} />
|
||||
{isLoadingMore && <p className="text-center my-4 text-content-muted dark:text-content-muted-dark">Loading more results...</p>}
|
||||
{isLoadingMore && (
|
||||
<p className="text-center my-4 text-content-muted dark:text-content-muted-dark">Loading more results...</p>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
{activeTab === "bulkAdd" && (
|
||||
<div className="flex flex-col gap-3 mb-4 md:mb-6 px-4 md:px-0 flex-shrink-0">
|
||||
<textarea
|
||||
className="w-full h-60 p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md mb-4 focus:outline-none focus:ring-2 focus:ring-green-500"
|
||||
placeholder="Paste Spotify links here, one per line..."
|
||||
value={linksInput}
|
||||
onChange={(e) => setLinksInput(e.target.value)}
|
||||
></textarea>
|
||||
<div className="flex justify-end gap-3">
|
||||
<button
|
||||
onClick={() => setLinksInput("")} // Clear input
|
||||
className="px-4 py-2 bg-gray-300 dark:bg-gray-700 text-content-primary dark:text-content-primary-dark rounded-md hover:bg-gray-400 dark:hover:bg-gray-600 focus:outline-none focus:ring-2 focus:ring-gray-500"
|
||||
>
|
||||
Clear
|
||||
</button>
|
||||
<button
|
||||
onClick={handleAddBulkLinks}
|
||||
disabled={isBulkAdding}
|
||||
className="px-4 py-2 bg-green-500 text-white rounded-md hover:bg-green-600 focus:outline-none focus:ring-2 focus:ring-green-400 disabled:opacity-50 disabled:cursor-not-allowed flex items-center gap-2"
|
||||
>
|
||||
{isBulkAdding ? "Adding..." : (
|
||||
<>
|
||||
<FaDownload className="icon-inverse" /> Download
|
||||
</>
|
||||
)}
|
||||
</button>
|
||||
{settings?.watch?.enabled && (
|
||||
<button
|
||||
onClick={handleWatchBulkLinks}
|
||||
disabled={isBulkWatching}
|
||||
className="px-4 py-2 bg-error hover:bg-error-hover text-button-primary-text rounded-md flex items-center gap-2 disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
title="Only Spotify Artist and Playlist links are supported for watching."
|
||||
>
|
||||
{isBulkWatching ? "Watching..." : (
|
||||
<>
|
||||
<FaEye className="icon-inverse" /> Watch
|
||||
</>
|
||||
)}
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user