Merge branch main into ui-overhaul

This commit is contained in:
Mustafa Soylu
2025-06-11 09:40:31 +02:00
31 changed files with 5563 additions and 134 deletions

View File

@@ -24,3 +24,4 @@ logs/
.env
.venv
data
tests/

View File

@@ -1,5 +1,22 @@
# Use an official Python runtime as a parent image
FROM python:3.12-slim
# Stage 1: TypeScript build
FROM node:22.16.0-slim AS typescript-builder
# Set working directory
WORKDIR /app
# Copy necessary files for TypeScript build
COPY tsconfig.json ./tsconfig.json
COPY src/js ./src/js
# Install TypeScript globally
RUN npm install -g typescript
# Compile TypeScript
RUN tsc
# Stage 2: Final image
FROM python:3.12-slim AS python-builder
LABEL org.opencontainers.image.source="https://github.com/Xoconoch/spotizerr"
# Set the working directory in the container
WORKDIR /app
@@ -10,8 +27,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
gosu \
git \
ffmpeg \
nodejs \
npm \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
@@ -22,6 +37,7 @@ RUN npm install -g pnpm
# Copy only the requirements file to leverage Docker cache
COPY requirements.txt .
# Install Python dependencies
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# --- Frontend Node.js Dependencies ---

2
app.py
View File

@@ -46,7 +46,7 @@ def setup_logging():
# Log formatting
log_format = logging.Formatter(
"%(asctime)s [%(processName)s:%(threadName)s] [%(name)s] [%(levelname)s] - %(message)s",
"%(asctime)s [%(levelname)s] %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)

View File

@@ -2,4 +2,4 @@ waitress==3.0.2
celery==5.5.3
Flask==3.1.1
flask_cors==6.0.0
deezspot-spotizerr==1.7.0
deezspot-spotizerr==1.10.0

View File

@@ -4,7 +4,7 @@ import atexit
# Configure basic logging for the application if not already configured
# This is a good place for it if routes are a central part of your app structure.
logging.basicConfig(
level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
level=logging.INFO, format="%(message)s"
)
logger = logging.getLogger(__name__)

View File

@@ -111,25 +111,25 @@ def handle_download(album_id):
)
return Response(
json.dumps({"prg_file": task_id}), status=202, mimetype="application/json"
json.dumps({"task_id": task_id}), status=202, mimetype="application/json"
)
@album_bp.route("/download/cancel", methods=["GET"])
def cancel_download():
"""
Cancel a running download process by its prg file name.
Cancel a running download process by its task id.
"""
prg_file = request.args.get("prg_file")
if not prg_file:
task_id = request.args.get("task_id")
if not task_id:
return Response(
json.dumps({"error": "Missing process id (prg_file) parameter"}),
json.dumps({"error": "Missing process id (task_id) parameter"}),
status=400,
mimetype="application/json",
)
# Use the queue manager's cancellation method.
result = download_queue_manager.cancel_task(prg_file)
result = download_queue_manager.cancel_task(task_id)
status_code = 200 if result.get("status") == "cancelled" else 404
return Response(json.dumps(result), status=status_code, mimetype="application/json")

View File

@@ -15,20 +15,38 @@ def get_download_history():
sort_by = request.args.get("sort_by", "timestamp_completed")
sort_order = request.args.get("sort_order", "DESC")
# Basic filtering example: filter by status_final or download_type
# Create filters dictionary for various filter options
filters = {}
# Status filter
status_filter = request.args.get("status_final")
if status_filter:
filters["status_final"] = status_filter
# Download type filter
type_filter = request.args.get("download_type")
if type_filter:
filters["download_type"] = type_filter
# Add more filters as needed, e.g., by item_name (would need LIKE for partial match)
# search_term = request.args.get('search')
# if search_term:
# filters['item_name'] = f'%{search_term}%' # This would require LIKE in get_history_entries
# Parent task filter
parent_task_filter = request.args.get("parent_task_id")
if parent_task_filter:
filters["parent_task_id"] = parent_task_filter
# Track status filter
track_status_filter = request.args.get("track_status")
if track_status_filter:
filters["track_status"] = track_status_filter
# Show/hide child tracks
hide_child_tracks = request.args.get("hide_child_tracks", "false").lower() == "true"
if hide_child_tracks:
filters["parent_task_id"] = None # Only show parent entries or standalone tracks
# Show only tracks with specific parent
only_parent_tracks = request.args.get("only_parent_tracks", "false").lower() == "true"
if only_parent_tracks and not parent_task_filter:
filters["parent_task_id"] = "NOT_NULL" # Special value to indicate we want only child tracks
entries, total_count = get_history_entries(
limit, offset, sort_by, sort_order, filters
@@ -45,3 +63,34 @@ def get_download_history():
except Exception as e:
logger.error(f"Error in /api/history endpoint: {e}", exc_info=True)
return jsonify({"error": "Failed to retrieve download history"}), 500
@history_bp.route("/tracks/<parent_task_id>", methods=["GET"])
def get_tracks_for_parent(parent_task_id):
"""API endpoint to retrieve all track entries for a specific parent task."""
try:
# We don't need pagination for this endpoint as we want all tracks for a parent
filters = {"parent_task_id": parent_task_id}
# Optional sorting
sort_by = request.args.get("sort_by", "timestamp_completed")
sort_order = request.args.get("sort_order", "DESC")
entries, total_count = get_history_entries(
limit=1000, # High limit to get all tracks
offset=0,
sort_by=sort_by,
sort_order=sort_order,
filters=filters
)
return jsonify(
{
"parent_task_id": parent_task_id,
"tracks": entries,
"total_count": total_count,
}
)
except Exception as e:
logger.error(f"Error in /api/history/tracks endpoint: {e}", exc_info=True)
return jsonify({"error": f"Failed to retrieve tracks for parent task {parent_task_id}"}), 500

View File

@@ -133,7 +133,7 @@ def handle_download(playlist_id):
)
return Response(
json.dumps({"prg_file": task_id}), # prg_file is the old name for task_id
json.dumps({"task_id": task_id}),
status=202,
mimetype="application/json",
)
@@ -142,18 +142,18 @@ def handle_download(playlist_id):
@playlist_bp.route("/download/cancel", methods=["GET"])
def cancel_download():
"""
Cancel a running playlist download process by its prg file name.
Cancel a running playlist download process by its task id.
"""
prg_file = request.args.get("prg_file")
if not prg_file:
task_id = request.args.get("task_id")
if not task_id:
return Response(
json.dumps({"error": "Missing process id (prg_file) parameter"}),
json.dumps({"error": "Missing task id (task_id) parameter"}),
status=400,
mimetype="application/json",
)
# Use the queue manager's cancellation method.
result = download_queue_manager.cancel_task(prg_file)
result = download_queue_manager.cancel_task(task_id)
status_code = 200 if result.get("status") == "cancelled" else 404
return Response(json.dumps(result), status=status_code, mimetype="application/json")

View File

@@ -21,16 +21,15 @@ prgs_bp = Blueprint("prgs", __name__, url_prefix="/api/prgs")
@prgs_bp.route("/<task_id>", methods=["GET"])
def get_prg_file(task_id):
def get_task_details(task_id):
"""
Return a JSON object with the resource type, its name (title),
the last progress update, and, if available, the original request parameters.
This function works with both the old PRG file system (for backward compatibility)
and the new task ID based system.
This function works with the new task ID based system.
Args:
task_id: Either a task UUID from Celery or a PRG filename from the old system
task_id: A task UUID from Celery
"""
# Only support new task IDs
task_info = get_task_info(task_id)
@@ -77,24 +76,31 @@ def get_prg_file(task_id):
last_status = get_last_task_status(task_id)
status_count = len(get_task_status(task_id))
# Default to the full last_status object, then check for the raw callback
last_line_content = last_status
if last_status and "raw_callback" in last_status:
last_line_content = last_status["raw_callback"]
response = {
"original_url": dynamic_original_url,
"last_line": last_status,
"last_line": last_line_content,
"timestamp": time.time(),
"task_id": task_id,
"status_count": status_count,
}
if last_status and last_status.get("summary"):
response["summary"] = last_status["summary"]
return jsonify(response)
@prgs_bp.route("/delete/<task_id>", methods=["DELETE"])
def delete_prg_file(task_id):
def delete_task(task_id):
"""
Delete a task's information and history.
Works with both the old PRG file system and the new task ID based system.
Args:
task_id: Either a task UUID from Celery or a PRG filename from the old system
task_id: A task UUID from Celery
"""
# Only support new task IDs
task_info = get_task_info(task_id)
@@ -107,7 +113,7 @@ def delete_prg_file(task_id):
@prgs_bp.route("/list", methods=["GET"])
def list_prg_files():
def list_tasks():
"""
Retrieve a list of all tasks in the system.
Returns a detailed list of task objects including status and metadata.
@@ -124,33 +130,34 @@ def list_prg_files():
last_status = get_last_task_status(task_id)
if task_info and last_status:
detailed_tasks.append(
{
"task_id": task_id,
"type": task_info.get(
"type", task_summary.get("type", "unknown")
),
"name": task_info.get(
"name", task_summary.get("name", "Unknown")
),
"artist": task_info.get(
"artist", task_summary.get("artist", "")
),
"download_type": task_info.get(
"download_type",
task_summary.get("download_type", "unknown"),
),
"status": last_status.get(
"status", "unknown"
), # Keep summary status for quick access
"last_status_obj": last_status, # Full last status object
"original_request": task_info.get("original_request", {}),
"created_at": task_info.get("created_at", 0),
"timestamp": last_status.get(
"timestamp", task_info.get("created_at", 0)
),
}
)
task_details = {
"task_id": task_id,
"type": task_info.get(
"type", task_summary.get("type", "unknown")
),
"name": task_info.get(
"name", task_summary.get("name", "Unknown")
),
"artist": task_info.get(
"artist", task_summary.get("artist", "")
),
"download_type": task_info.get(
"download_type",
task_summary.get("download_type", "unknown"),
),
"status": last_status.get(
"status", "unknown"
), # Keep summary status for quick access
"last_status_obj": last_status, # Full last status object
"original_request": task_info.get("original_request", {}),
"created_at": task_info.get("created_at", 0),
"timestamp": last_status.get(
"timestamp", task_info.get("created_at", 0)
),
}
if last_status.get("summary"):
task_details["summary"] = last_status["summary"]
detailed_tasks.append(task_details)
elif (
task_info
): # If last_status is somehow missing, still provide some info

View File

@@ -127,7 +127,7 @@ def handle_download(track_id):
)
return Response(
json.dumps({"prg_file": task_id}), # prg_file is the old name for task_id
json.dumps({"task_id": task_id}),
status=202,
mimetype="application/json",
)
@@ -136,18 +136,18 @@ def handle_download(track_id):
@track_bp.route("/download/cancel", methods=["GET"])
def cancel_download():
"""
Cancel a running track download process by its process id (prg file name).
Cancel a running track download process by its task id.
"""
prg_file = request.args.get("prg_file")
if not prg_file:
task_id = request.args.get("task_id")
if not task_id:
return Response(
json.dumps({"error": "Missing process id (prg_file) parameter"}),
json.dumps({"error": "Missing task id (task_id) parameter"}),
status=400,
mimetype="application/json",
)
# Use the queue manager's cancellation method.
result = download_queue_manager.cancel_task(prg_file)
result = download_queue_manager.cancel_task(task_id)
status_code = 200 if result.get("status") == "cancelled" else 404
return Response(json.dumps(result), status=status_code, mimetype="application/json")

View File

@@ -69,8 +69,16 @@ class CeleryManager:
try:
for line in iter(stream.readline, ""):
if line:
log_method = logger.error if error else logger.info
log_method(f"{log_prefix}: {line.strip()}")
line_stripped = line.strip()
log_method = logger.info # Default log method
if error: # This is a stderr stream
if " - ERROR - " in line_stripped or " - CRITICAL - " in line_stripped:
log_method = logger.error
elif " - WARNING - " in line_stripped:
log_method = logger.warning
log_method(f"{log_prefix}: {line_stripped}")
elif (
self.stop_event.is_set()
): # If empty line and stop is set, likely EOF
@@ -359,7 +367,7 @@ celery_manager = CeleryManager()
if __name__ == "__main__":
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s [%(levelname)s] [%(threadName)s] [%(name)s] - %(message)s",
format="%(message)s",
)
logger.info("Starting Celery Manager example...")
celery_manager.start()

View File

@@ -127,6 +127,7 @@ class CeleryDownloadQueueManager:
NON_BLOCKING_STATES = [
ProgressState.COMPLETE,
ProgressState.DONE,
ProgressState.CANCELLED,
ProgressState.ERROR,
ProgressState.ERROR_RETRIED,
@@ -354,7 +355,11 @@ class CeleryDownloadQueueManager:
status = task.get("status")
# Only cancel tasks that are not already completed or cancelled
if status not in [ProgressState.COMPLETE, ProgressState.CANCELLED]:
if status not in [
ProgressState.COMPLETE,
ProgressState.DONE,
ProgressState.CANCELLED,
]:
result = cancel_celery_task(task_id)
if result.get("status") == "cancelled":
cancelled_count += 1

View File

@@ -29,7 +29,7 @@ from routes.utils.watch.db import (
)
# Import history manager function
from .history_manager import add_entry_to_history
from .history_manager import add_entry_to_history, add_tracks_from_summary
# Create Redis connection for storing task data that's not part of the Celery result backend
import redis
@@ -238,6 +238,9 @@ def _log_task_to_history(task_id, final_status_str, error_msg=None):
except Exception:
spotify_id = None # Ignore errors in parsing
# Check for the new summary object in the last status
summary_obj = last_status_obj.get("summary") if last_status_obj else None
history_entry = {
"task_id": task_id,
"download_type": task_info.get("download_type"),
@@ -271,15 +274,34 @@ def _log_task_to_history(task_id, final_status_str, error_msg=None):
"bitrate": bitrate_str
if bitrate_str
else None, # Store None if empty string
"summary_json": json.dumps(summary_obj) if summary_obj else None,
"total_successful": summary_obj.get("total_successful")
if summary_obj
else None,
"total_skipped": summary_obj.get("total_skipped") if summary_obj else None,
"total_failed": summary_obj.get("total_failed") if summary_obj else None,
}
# Add the main history entry for the task
add_entry_to_history(history_entry)
# Process track-level entries from summary if this is a multi-track download
if summary_obj and task_info.get("download_type") in ["album", "playlist"]:
tracks_processed = add_tracks_from_summary(
summary_data=summary_obj,
parent_task_id=task_id,
parent_history_data=history_entry
)
logger.info(
f"Track-level history: Processed {tracks_processed['successful']} successful, "
f"{tracks_processed['skipped']} skipped, and {tracks_processed['failed']} failed tracks for task {task_id}"
)
except Exception as e:
logger.error(
f"History: Error preparing or logging history for task {task_id}: {e}",
exc_info=True,
)
# --- End History Logging Helper ---
@@ -366,8 +388,8 @@ def retry_task(task_id):
# Update service settings
if service == "spotify":
if fallback_enabled:
task_info["main"] = config_params.get("deezer", "")
task_info["fallback"] = config_params.get("spotify", "")
task_info["main"] = config_params.get("spotify", "")
task_info["fallback"] = config_params.get("deezer", "")
task_info["quality"] = config_params.get("deezerQuality", "MP3_128")
task_info["fall_quality"] = config_params.get(
"spotifyQuality", "NORMAL"
@@ -536,6 +558,9 @@ class ProgressTrackingTask(Task):
Args:
progress_data: Dictionary containing progress information from deezspot
"""
# Store a copy of the original, unprocessed callback data
raw_callback_data = progress_data.copy()
task_id = self.request.id
# Ensure ./logs/tasks directory exists
@@ -570,9 +595,6 @@ class ProgressTrackingTask(Task):
# Get status type
status = progress_data.get("status", "unknown")
# Create a work copy of the data to avoid modifying the original
stored_data = progress_data.copy()
# Get task info for context
task_info = get_task_info(task_id)
@@ -585,44 +607,47 @@ class ProgressTrackingTask(Task):
# Process based on status type using a more streamlined approach
if status == "initializing":
# --- INITIALIZING: Start of a download operation ---
self._handle_initializing(task_id, stored_data, task_info)
self._handle_initializing(task_id, progress_data, task_info)
elif status == "downloading":
# --- DOWNLOADING: Track download started ---
self._handle_downloading(task_id, stored_data, task_info)
self._handle_downloading(task_id, progress_data, task_info)
elif status == "progress":
# --- PROGRESS: Album/playlist track progress ---
self._handle_progress(task_id, stored_data, task_info)
self._handle_progress(task_id, progress_data, task_info)
elif status == "real_time" or status == "track_progress":
# --- REAL_TIME/TRACK_PROGRESS: Track download real-time progress ---
self._handle_real_time(task_id, stored_data)
self._handle_real_time(task_id, progress_data)
elif status == "skipped":
# --- SKIPPED: Track was skipped ---
self._handle_skipped(task_id, stored_data, task_info)
self._handle_skipped(task_id, progress_data, task_info)
elif status == "retrying":
# --- RETRYING: Download failed and being retried ---
self._handle_retrying(task_id, stored_data, task_info)
self._handle_retrying(task_id, progress_data, task_info)
elif status == "error":
# --- ERROR: Error occurred during download ---
self._handle_error(task_id, stored_data, task_info)
self._handle_error(task_id, progress_data, task_info)
elif status == "done":
# --- DONE: Download operation completed ---
self._handle_done(task_id, stored_data, task_info)
self._handle_done(task_id, progress_data, task_info)
else:
# --- UNKNOWN: Unrecognized status ---
logger.info(
f"Task {task_id} {status}: {stored_data.get('message', 'No details')}"
f"Task {task_id} {status}: {progress_data.get('message', 'No details')}"
)
# Embed the raw callback data into the status object before storing
progress_data["raw_callback"] = raw_callback_data
# Store the processed status update
store_task_status(task_id, stored_data)
store_task_status(task_id, progress_data)
def _handle_initializing(self, task_id, data, task_info):
"""Handle initializing status from deezspot"""
@@ -663,7 +688,7 @@ class ProgressTrackingTask(Task):
store_task_info(task_id, task_info)
# Update status in data
data["status"] = ProgressState.INITIALIZING
# data["status"] = ProgressState.INITIALIZING
def _handle_downloading(self, task_id, data, task_info):
"""Handle downloading status from deezspot"""
@@ -720,7 +745,7 @@ class ProgressTrackingTask(Task):
logger.info(f"Task {task_id} downloading: '{track_name}'")
# Update status
data["status"] = ProgressState.DOWNLOADING
# data["status"] = ProgressState.DOWNLOADING
def _handle_progress(self, task_id, data, task_info):
"""Handle progress status from deezspot"""
@@ -776,7 +801,7 @@ class ProgressTrackingTask(Task):
logger.error(f"Error parsing track numbers '{current_track_raw}': {e}")
# Ensure correct status
data["status"] = ProgressState.PROGRESS
# data["status"] = ProgressState.PROGRESS
def _handle_real_time(self, task_id, data):
"""Handle real-time progress status from deezspot"""
@@ -818,11 +843,11 @@ class ProgressTrackingTask(Task):
logger.debug(f"Task {task_id} track progress: {title} by {artist}: {percent}%")
# Set appropriate status
data["status"] = (
ProgressState.REAL_TIME
if data.get("status") == "real_time"
else ProgressState.TRACK_PROGRESS
)
# data["status"] = (
# ProgressState.REAL_TIME
# if data.get("status") == "real_time"
# else ProgressState.TRACK_PROGRESS
# )
def _handle_skipped(self, task_id, data, task_info):
"""Handle skipped status from deezspot"""
@@ -872,7 +897,7 @@ class ProgressTrackingTask(Task):
store_task_status(task_id, progress_update)
# Set status
data["status"] = ProgressState.SKIPPED
# data["status"] = ProgressState.SKIPPED
def _handle_retrying(self, task_id, data, task_info):
"""Handle retrying status from deezspot"""
@@ -895,7 +920,7 @@ class ProgressTrackingTask(Task):
store_task_info(task_id, task_info)
# Set status
data["status"] = ProgressState.RETRYING
# data["status"] = ProgressState.RETRYING
def _handle_error(self, task_id, data, task_info):
"""Handle error status from deezspot"""
@@ -911,7 +936,7 @@ class ProgressTrackingTask(Task):
store_task_info(task_id, task_info)
# Set status and error message
data["status"] = ProgressState.ERROR
# data["status"] = ProgressState.ERROR
data["error"] = message
def _handle_done(self, task_id, data, task_info):
@@ -931,7 +956,7 @@ class ProgressTrackingTask(Task):
logger.info(f"Task {task_id} completed: Track '{song}'")
# Update status to track_complete
data["status"] = ProgressState.TRACK_COMPLETE
# data["status"] = ProgressState.TRACK_COMPLETE
# Update task info
completed_tracks = task_info.get("completed_tracks", 0) + 1
@@ -989,15 +1014,28 @@ class ProgressTrackingTask(Task):
logger.info(f"Task {task_id} completed: {content_type.upper()}")
# Add summary
data["status"] = ProgressState.COMPLETE
data["message"] = (
f"Download complete: {completed_tracks} tracks downloaded, {skipped_tracks} skipped"
)
# data["status"] = ProgressState.COMPLETE
summary_obj = data.get("summary")
# Log summary
logger.info(
f"Task {task_id} summary: {completed_tracks} completed, {skipped_tracks} skipped, {error_count} errors"
)
if summary_obj:
total_successful = summary_obj.get("total_successful", 0)
total_skipped = summary_obj.get("total_skipped", 0)
total_failed = summary_obj.get("total_failed", 0)
# data[
# "message"
# ] = f"Download complete: {total_successful} tracks downloaded, {total_skipped} skipped, {total_failed} failed."
# Log summary from the summary object
logger.info(
f"Task {task_id} summary: {total_successful} successful, {total_skipped} skipped, {total_failed} failed."
)
else:
# data["message"] = (
# f"Download complete: {completed_tracks} tracks downloaded, {skipped_tracks} skipped"
# )
# Log summary
logger.info(
f"Task {task_id} summary: {completed_tracks} completed, {skipped_tracks} skipped, {error_count} errors"
)
# Schedule deletion for completed multi-track downloads
delayed_delete_task_data.apply_async(
args=[task_id, "Task completed successfully and auto-cleaned."],
@@ -1066,8 +1104,8 @@ class ProgressTrackingTask(Task):
else:
# Generic done for other types
logger.info(f"Task {task_id} completed: {content_type.upper()}")
data["status"] = ProgressState.COMPLETE
data["message"] = "Download complete"
# data["status"] = ProgressState.COMPLETE
# data["message"] = "Download complete"
# Celery signal handlers
@@ -1134,18 +1172,11 @@ def task_postrun_handler(
)
if state == states.SUCCESS:
if current_redis_status != ProgressState.COMPLETE:
store_task_status(
task_id,
{
"status": ProgressState.COMPLETE,
"timestamp": time.time(),
"type": task_info.get("type", "unknown"),
"name": task_info.get("name", "Unknown"),
"artist": task_info.get("artist", ""),
"message": "Download completed successfully.",
},
)
if current_redis_status not in [ProgressState.COMPLETE, "done"]:
# The final status is now set by the 'done' callback from deezspot.
# We no longer need to store a generic 'COMPLETE' status here.
# This ensures the raw callback data is the last thing in the log.
pass
logger.info(
f"Task {task_id} completed successfully: {task_info.get('name', 'Unknown')}"
)
@@ -1335,8 +1366,8 @@ def download_track(self, **task_data):
# Determine service parameters
if service == "spotify":
if fallback_enabled:
main = config_params.get("deezer", "")
fallback = config_params.get("spotify", "")
main = config_params.get("spotify", "")
fallback = config_params.get("deezer", "")
quality = config_params.get("deezerQuality", "MP3_128")
fall_quality = config_params.get("spotifyQuality", "NORMAL")
else:
@@ -1421,8 +1452,8 @@ def download_album(self, **task_data):
# Determine service parameters
if service == "spotify":
if fallback_enabled:
main = config_params.get("deezer", "")
fallback = config_params.get("spotify", "")
main = config_params.get("spotify", "")
fallback = config_params.get("deezer", "")
quality = config_params.get("deezerQuality", "MP3_128")
fall_quality = config_params.get("spotifyQuality", "NORMAL")
else:
@@ -1507,8 +1538,8 @@ def download_playlist(self, **task_data):
# Determine service parameters
if service == "spotify":
if fallback_enabled:
main = config_params.get("deezer", "")
fallback = config_params.get("spotify", "")
main = config_params.get("spotify", "")
fallback = config_params.get("deezer", "")
quality = config_params.get("deezerQuality", "MP3_128")
fall_quality = config_params.get("spotifyQuality", "NORMAL")
else:

View File

@@ -403,6 +403,9 @@ def get_credential(service, name):
"name": data.get("name"),
"region": data.get("region"),
"blob_content": data.get("blob_content"),
"blob_file_path": data.get(
"blob_file_path"
), # Ensure blob_file_path is returned
}
return cleaned_data

View File

@@ -2,6 +2,7 @@ import sqlite3
import json
import time
import logging
import uuid
from pathlib import Path
logger = logging.getLogger(__name__)
@@ -27,6 +28,12 @@ EXPECTED_COLUMNS = {
"quality_profile": "TEXT",
"convert_to": "TEXT",
"bitrate": "TEXT",
"parent_task_id": "TEXT", # Reference to parent task for individual tracks
"track_status": "TEXT", # 'SUCCESSFUL', 'SKIPPED', 'FAILED'
"summary_json": "TEXT", # JSON string of the summary object from task
"total_successful": "INTEGER", # Count of successful tracks
"total_skipped": "INTEGER", # Count of skipped tracks
"total_failed": "INTEGER", # Count of failed tracks
}
@@ -61,7 +68,13 @@ def init_history_db():
service_used TEXT,
quality_profile TEXT,
convert_to TEXT,
bitrate TEXT
bitrate TEXT,
parent_task_id TEXT,
track_status TEXT,
summary_json TEXT,
total_successful INTEGER,
total_skipped INTEGER,
total_failed INTEGER
)
"""
cursor.execute(create_table_sql)
@@ -106,6 +119,27 @@ def init_history_db():
f"Could not add column '{col_name}': {alter_e}. It might already exist or there's a schema mismatch."
)
# Add additional columns for summary data if they don't exist
for col_name, col_type in {
"summary_json": "TEXT",
"total_successful": "INTEGER",
"total_skipped": "INTEGER",
"total_failed": "INTEGER"
}.items():
if col_name not in existing_column_names and col_name not in EXPECTED_COLUMNS:
try:
cursor.execute(
f"ALTER TABLE download_history ADD COLUMN {col_name} {col_type}"
)
logger.info(
f"Added missing column '{col_name} {col_type}' to download_history table."
)
added_columns = True
except sqlite3.OperationalError as alter_e:
logger.warning(
f"Could not add column '{col_name}': {alter_e}. It might already exist or there's a schema mismatch."
)
if added_columns:
conn.commit()
logger.info(f"Download history table schema updated at {HISTORY_DB_FILE}")
@@ -148,6 +182,12 @@ def add_entry_to_history(history_data: dict):
"quality_profile",
"convert_to",
"bitrate",
"parent_task_id",
"track_status",
"summary_json",
"total_successful",
"total_skipped",
"total_failed",
]
# Ensure all keys are present, filling with None if not
for key in required_keys:
@@ -164,8 +204,9 @@ def add_entry_to_history(history_data: dict):
item_url, spotify_id, status_final, error_message,
timestamp_added, timestamp_completed, original_request_json,
last_status_obj_json, service_used, quality_profile,
convert_to, bitrate
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
convert_to, bitrate, parent_task_id, track_status,
summary_json, total_successful, total_skipped, total_failed
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
history_data["task_id"],
@@ -185,6 +226,12 @@ def add_entry_to_history(history_data: dict):
history_data["quality_profile"],
history_data["convert_to"],
history_data["bitrate"],
history_data["parent_task_id"],
history_data["track_status"],
history_data["summary_json"],
history_data["total_successful"],
history_data["total_skipped"],
history_data["total_failed"],
),
)
conn.commit()
@@ -239,8 +286,16 @@ def get_history_entries(
for column, value in filters.items():
# Basic security: ensure column is a valid one (alphanumeric + underscore)
if column.replace("_", "").isalnum():
where_clauses.append(f"{column} = ?")
params.append(value)
# Special case for 'NOT_NULL' value for parent_task_id
if column == "parent_task_id" and value == "NOT_NULL":
where_clauses.append(f"{column} IS NOT NULL")
# Regular case for NULL value
elif value is None:
where_clauses.append(f"{column} IS NULL")
# Regular case for exact match
else:
where_clauses.append(f"{column} = ?")
params.append(value)
if where_clauses:
where_sql = " WHERE " + " AND ".join(where_clauses)
@@ -266,6 +321,11 @@ def get_history_entries(
"quality_profile",
"convert_to",
"bitrate",
"parent_task_id",
"track_status",
"total_successful",
"total_skipped",
"total_failed",
]
if sort_by not in valid_sort_columns:
sort_by = "timestamp_completed" # Default sort
@@ -292,6 +352,157 @@ def get_history_entries(
conn.close()
def add_track_entry_to_history(track_name, artist_name, parent_task_id, track_status, parent_history_data=None):
"""Adds a track-specific entry to the history database.
Args:
track_name (str): The name of the track
artist_name (str): The artist name
parent_task_id (str): The ID of the parent task (album or playlist)
track_status (str): The status of the track ('SUCCESSFUL', 'SKIPPED', 'FAILED')
parent_history_data (dict, optional): The history data of the parent task
Returns:
str: The task_id of the created track entry
"""
# Generate a unique ID for this track entry
track_task_id = f"{parent_task_id}_track_{uuid.uuid4().hex[:8]}"
# Create a copy of parent data or initialize empty dict
track_history_data = {}
if parent_history_data:
# Copy relevant fields from parent
for key in EXPECTED_COLUMNS:
if key in parent_history_data and key not in ['task_id', 'item_name', 'item_artist']:
track_history_data[key] = parent_history_data[key]
# Set track-specific fields
track_history_data.update({
"task_id": track_task_id,
"download_type": "track",
"item_name": track_name,
"item_artist": artist_name,
"parent_task_id": parent_task_id,
"track_status": track_status,
"status_final": "COMPLETED" if track_status == "SUCCESSFUL" else
"SKIPPED" if track_status == "SKIPPED" else "ERROR",
"timestamp_completed": time.time()
})
# Extract track URL if possible (from last_status_obj_json)
if parent_history_data and parent_history_data.get("last_status_obj_json"):
try:
last_status = json.loads(parent_history_data["last_status_obj_json"])
# Try to match track name in the tracks lists to find URL
track_key = f"{track_name} - {artist_name}"
if "raw_callback" in last_status and last_status["raw_callback"].get("url"):
track_history_data["item_url"] = last_status["raw_callback"].get("url")
# Extract Spotify ID from URL if possible
url = last_status["raw_callback"].get("url", "")
if url and "spotify.com" in url:
try:
spotify_id = url.split("/")[-1]
if spotify_id and len(spotify_id) == 22 and spotify_id.isalnum():
track_history_data["spotify_id"] = spotify_id
except Exception:
pass
except (json.JSONDecodeError, KeyError, AttributeError) as e:
logger.warning(f"Could not extract track URL for {track_name}: {e}")
# Add entry to history
add_entry_to_history(track_history_data)
return track_task_id
def add_tracks_from_summary(summary_data, parent_task_id, parent_history_data=None):
"""Processes a summary object from a completed task and adds individual track entries.
Args:
summary_data (dict): The summary data containing track lists
parent_task_id (str): The ID of the parent task
parent_history_data (dict, optional): The history data of the parent task
Returns:
dict: Summary of processed tracks
"""
processed = {
"successful": 0,
"skipped": 0,
"failed": 0
}
if not summary_data:
logger.warning(f"No summary data provided for task {parent_task_id}")
return processed
# Process successful tracks
for track_entry in summary_data.get("successful_tracks", []):
try:
# Parse "track_name - artist_name" format
parts = track_entry.split(" - ", 1)
if len(parts) == 2:
track_name, artist_name = parts
add_track_entry_to_history(
track_name=track_name,
artist_name=artist_name,
parent_task_id=parent_task_id,
track_status="SUCCESSFUL",
parent_history_data=parent_history_data
)
processed["successful"] += 1
else:
logger.warning(f"Could not parse track entry: {track_entry}")
except Exception as e:
logger.error(f"Error processing successful track {track_entry}: {e}", exc_info=True)
# Process skipped tracks
for track_entry in summary_data.get("skipped_tracks", []):
try:
parts = track_entry.split(" - ", 1)
if len(parts) == 2:
track_name, artist_name = parts
add_track_entry_to_history(
track_name=track_name,
artist_name=artist_name,
parent_task_id=parent_task_id,
track_status="SKIPPED",
parent_history_data=parent_history_data
)
processed["skipped"] += 1
else:
logger.warning(f"Could not parse skipped track entry: {track_entry}")
except Exception as e:
logger.error(f"Error processing skipped track {track_entry}: {e}", exc_info=True)
# Process failed tracks
for track_entry in summary_data.get("failed_tracks", []):
try:
parts = track_entry.split(" - ", 1)
if len(parts) == 2:
track_name, artist_name = parts
add_track_entry_to_history(
track_name=track_name,
artist_name=artist_name,
parent_task_id=parent_task_id,
track_status="FAILED",
parent_history_data=parent_history_data
)
processed["failed"] += 1
else:
logger.warning(f"Could not parse failed track entry: {track_entry}")
except Exception as e:
logger.error(f"Error processing failed track {track_entry}: {e}", exc_info=True)
logger.info(
f"Added {processed['successful']} successful, {processed['skipped']} skipped, "
f"and {processed['failed']} failed track entries for task {parent_task_id}"
)
return processed
if __name__ == "__main__":
# For testing purposes
logging.basicConfig(level=logging.INFO)

View File

@@ -124,6 +124,10 @@ def download_playlist(
"spotify", main
) # For blob path
blob_file_path = spotify_main_creds.get("blob_file_path")
if blob_file_path is None:
raise ValueError(
f"Spotify credentials for account '{main}' don't contain a blob_file_path. Please check your credentials configuration."
)
if not Path(blob_file_path).exists():
raise FileNotFoundError(
f"Spotify credentials blob file not found at {blob_file_path} for account '{main}'"
@@ -180,6 +184,10 @@ def download_playlist(
spotify_main_creds = get_credential("spotify", main) # For blob path
blob_file_path = spotify_main_creds.get("blob_file_path")
if blob_file_path is None:
raise ValueError(
f"Spotify credentials for account '{main}' don't contain a blob_file_path. Please check your credentials configuration."
)
if not Path(blob_file_path).exists():
raise FileNotFoundError(
f"Spotify credentials blob file not found at {blob_file_path} for account '{main}'"

View File

@@ -0,0 +1,3 @@
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 16 16">
<path fill="#ffffff" d="M2 1a1 1 0 0 0-1 1v12a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1V2a1 1 0 0 0-1-1H2zm0 1h12v12H2V2zm2 2v1h8V4H4zm0 3v1h8V7H4zm0 3v1h8v-1H4z"/>
</svg>

After

Width:  |  Height:  |  Size: 247 B

View File

@@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?><!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
<svg width="800px" height="800px" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M17 6V18M13.5239 12.7809L8.6247 16.7002C7.96993 17.2241 7 16.7579 7 15.9194V8.08062C7 7.24212 7.96993 6.77595 8.6247 7.29976L13.5239 11.2191C14.0243 11.6195 14.0243 12.3805 13.5239 12.7809Z" stroke="#000000" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

After

Width:  |  Height:  |  Size: 510 B

330
src/js/history.ts Normal file
View File

@@ -0,0 +1,330 @@
document.addEventListener('DOMContentLoaded', () => {
const historyTableBody = document.getElementById('history-table-body') as HTMLTableSectionElement | null;
const prevButton = document.getElementById('prev-page') as HTMLButtonElement | null;
const nextButton = document.getElementById('next-page') as HTMLButtonElement | null;
const pageInfo = document.getElementById('page-info') as HTMLSpanElement | null;
const limitSelect = document.getElementById('limit-select') as HTMLSelectElement | null;
const statusFilter = document.getElementById('status-filter') as HTMLSelectElement | null;
const typeFilter = document.getElementById('type-filter') as HTMLSelectElement | null;
const trackFilter = document.getElementById('track-filter') as HTMLSelectElement | null;
const hideChildTracksCheckbox = document.getElementById('hide-child-tracks') as HTMLInputElement | null;
let currentPage = 1;
let limit = 25;
let totalEntries = 0;
let currentSortBy = 'timestamp_completed';
let currentSortOrder = 'DESC';
let currentParentTaskId: string | null = null;
async function fetchHistory(page = 1) {
if (!historyTableBody || !prevButton || !nextButton || !pageInfo || !limitSelect || !statusFilter || !typeFilter) {
console.error('One or more critical UI elements are missing for history page.');
return;
}
const offset = (page - 1) * limit;
let apiUrl = `/api/history?limit=${limit}&offset=${offset}&sort_by=${currentSortBy}&sort_order=${currentSortOrder}`;
const statusVal = statusFilter.value;
if (statusVal) {
apiUrl += `&status_final=${statusVal}`;
}
const typeVal = typeFilter.value;
if (typeVal) {
apiUrl += `&download_type=${typeVal}`;
}
// Add track status filter if present
if (trackFilter && trackFilter.value) {
apiUrl += `&track_status=${trackFilter.value}`;
}
// Add parent task filter if viewing a specific parent's tracks
if (currentParentTaskId) {
apiUrl += `&parent_task_id=${currentParentTaskId}`;
}
// Add hide child tracks filter if checkbox is checked
if (hideChildTracksCheckbox && hideChildTracksCheckbox.checked) {
apiUrl += `&hide_child_tracks=true`;
}
try {
const response = await fetch(apiUrl);
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const data = await response.json();
renderHistory(data.entries);
totalEntries = data.total_count;
currentPage = Math.floor(offset / limit) + 1;
updatePagination();
updateSortIndicators();
// Update page title if viewing tracks for a parent
updatePageTitle();
} catch (error) {
console.error('Error fetching history:', error);
if (historyTableBody) {
historyTableBody.innerHTML = '<tr><td colspan="10">Error loading history.</td></tr>';
}
}
}
function renderHistory(entries: any[]) {
if (!historyTableBody) return;
historyTableBody.innerHTML = ''; // Clear existing rows
if (!entries || entries.length === 0) {
historyTableBody.innerHTML = '<tr><td colspan="10">No history entries found.</td></tr>';
return;
}
entries.forEach(entry => {
const row = historyTableBody.insertRow();
// Add class for parent/child styling
if (entry.parent_task_id) {
row.classList.add('child-track-row');
} else if (entry.download_type === 'album' || entry.download_type === 'playlist') {
row.classList.add('parent-task-row');
}
// Item name with indentation for child tracks
const nameCell = row.insertCell();
if (entry.parent_task_id) {
nameCell.innerHTML = `<span class="child-track-indent">└─ </span>${entry.item_name || 'N/A'}`;
} else {
nameCell.textContent = entry.item_name || 'N/A';
}
row.insertCell().textContent = entry.item_artist || 'N/A';
// Type cell - show track status for child tracks
const typeCell = row.insertCell();
if (entry.parent_task_id && entry.track_status) {
typeCell.textContent = entry.track_status;
typeCell.classList.add(`track-status-${entry.track_status.toLowerCase()}`);
} else {
typeCell.textContent = entry.download_type ? entry.download_type.charAt(0).toUpperCase() + entry.download_type.slice(1) : 'N/A';
}
row.insertCell().textContent = entry.service_used || 'N/A';
// Construct Quality display string
const qualityCell = row.insertCell();
let qualityDisplay = entry.quality_profile || 'N/A';
// Check if convert_to exists and is not "None"
if (entry.convert_to && entry.convert_to !== "None") {
qualityDisplay = `${entry.convert_to.toUpperCase()}`;
// Check if bitrate exists and is not "None"
if (entry.bitrate && entry.bitrate !== "None") {
qualityDisplay += ` ${entry.bitrate}k`;
}
qualityDisplay += ` (${entry.quality_profile || 'Original'})`;
} else if (entry.bitrate && entry.bitrate !== "None") { // Case where convert_to might not be set, but bitrate is (e.g. for OGG Vorbis quality settings)
qualityDisplay = `${entry.bitrate}k (${entry.quality_profile || 'Profile'})`;
}
// If both are "None" or null, it will just use the quality_profile value set above
qualityCell.textContent = qualityDisplay;
const statusCell = row.insertCell();
statusCell.textContent = entry.status_final || 'N/A';
statusCell.className = `status-${entry.status_final?.toLowerCase() || 'unknown'}`;
row.insertCell().textContent = entry.timestamp_added ? new Date(entry.timestamp_added * 1000).toLocaleString() : 'N/A';
row.insertCell().textContent = entry.timestamp_completed ? new Date(entry.timestamp_completed * 1000).toLocaleString() : 'N/A';
const actionsCell = row.insertCell();
// Add details button
const detailsButton = document.createElement('button');
detailsButton.innerHTML = `<img src="/static/images/info.svg" alt="Details">`;
detailsButton.className = 'details-btn btn-icon';
detailsButton.title = 'Show Details';
detailsButton.onclick = () => showDetailsModal(entry);
actionsCell.appendChild(detailsButton);
// Add view tracks button for album/playlist entries with child tracks
if (!entry.parent_task_id && (entry.download_type === 'album' || entry.download_type === 'playlist') &&
(entry.total_successful > 0 || entry.total_skipped > 0 || entry.total_failed > 0)) {
const viewTracksButton = document.createElement('button');
viewTracksButton.innerHTML = `<img src="/static/images/list.svg" alt="Tracks">`;
viewTracksButton.className = 'tracks-btn btn-icon';
viewTracksButton.title = 'View Tracks';
viewTracksButton.setAttribute('data-task-id', entry.task_id);
viewTracksButton.onclick = () => viewTracksForParent(entry.task_id);
actionsCell.appendChild(viewTracksButton);
// Add track counts display
const trackCountsSpan = document.createElement('span');
trackCountsSpan.className = 'track-counts';
trackCountsSpan.title = `Successful: ${entry.total_successful || 0}, Skipped: ${entry.total_skipped || 0}, Failed: ${entry.total_failed || 0}`;
trackCountsSpan.innerHTML = `
<span class="track-count success">${entry.total_successful || 0}</span> /
<span class="track-count skipped">${entry.total_skipped || 0}</span> /
<span class="track-count failed">${entry.total_failed || 0}</span>
`;
actionsCell.appendChild(trackCountsSpan);
}
if (entry.status_final === 'ERROR' && entry.error_message) {
const errorSpan = document.createElement('span');
errorSpan.textContent = ' (Show Error)';
errorSpan.className = 'error-message-toggle';
errorSpan.style.marginLeft = '5px';
errorSpan.onclick = (e) => {
e.stopPropagation(); // Prevent click on row if any
let errorDetailsDiv = row.querySelector('.error-details') as HTMLElement | null;
if (!errorDetailsDiv) {
errorDetailsDiv = document.createElement('div');
errorDetailsDiv.className = 'error-details';
const newCell = row.insertCell(); // This will append to the end of the row
newCell.colSpan = 10; // Span across all columns
newCell.appendChild(errorDetailsDiv);
}
errorDetailsDiv.textContent = entry.error_message;
// Toggle display by directly manipulating the style of the details div
errorDetailsDiv.style.display = errorDetailsDiv.style.display === 'none' ? 'block' : 'none';
};
statusCell.appendChild(errorSpan);
}
});
}
function updatePagination() {
if (!pageInfo || !prevButton || !nextButton) return;
const totalPages = Math.ceil(totalEntries / limit) || 1;
pageInfo.textContent = `Page ${currentPage} of ${totalPages}`;
prevButton.disabled = currentPage === 1;
nextButton.disabled = currentPage === totalPages;
}
function updatePageTitle() {
const titleElement = document.getElementById('history-title');
if (!titleElement) return;
if (currentParentTaskId) {
titleElement.textContent = 'Download History - Viewing Tracks';
// Add back button
if (!document.getElementById('back-to-history')) {
const backButton = document.createElement('button');
backButton.id = 'back-to-history';
backButton.className = 'btn btn-secondary';
backButton.innerHTML = '&larr; Back to All History';
backButton.onclick = () => {
currentParentTaskId = null;
updatePageTitle();
fetchHistory(1);
};
titleElement.parentNode?.insertBefore(backButton, titleElement);
}
} else {
titleElement.textContent = 'Download History';
// Remove back button if it exists
const backButton = document.getElementById('back-to-history');
if (backButton) {
backButton.remove();
}
}
}
function showDetailsModal(entry: any) {
// Create more detailed modal content with new fields
let details = `Task ID: ${entry.task_id}\n` +
`Type: ${entry.download_type}\n` +
`Name: ${entry.item_name}\n` +
`Artist: ${entry.item_artist}\n` +
`Album: ${entry.item_album || 'N/A'}\n` +
`URL: ${entry.item_url || 'N/A'}\n` +
`Spotify ID: ${entry.spotify_id || 'N/A'}\n` +
`Service Used: ${entry.service_used || 'N/A'}\n` +
`Quality Profile (Original): ${entry.quality_profile || 'N/A'}\n` +
`ConvertTo: ${entry.convert_to || 'N/A'}\n` +
`Bitrate: ${entry.bitrate ? entry.bitrate + 'k' : 'N/A'}\n` +
`Status: ${entry.status_final}\n` +
`Error: ${entry.error_message || 'None'}\n` +
`Added: ${new Date(entry.timestamp_added * 1000).toLocaleString()}\n` +
`Completed/Ended: ${new Date(entry.timestamp_completed * 1000).toLocaleString()}\n`;
// Add track-specific details if this is a track
if (entry.parent_task_id) {
details += `Parent Task ID: ${entry.parent_task_id}\n` +
`Track Status: ${entry.track_status || 'N/A'}\n`;
}
// Add summary details if this is a parent task
if (entry.total_successful !== null || entry.total_skipped !== null || entry.total_failed !== null) {
details += `\nTrack Summary:\n` +
`Successful: ${entry.total_successful || 0}\n` +
`Skipped: ${entry.total_skipped || 0}\n` +
`Failed: ${entry.total_failed || 0}\n`;
}
details += `\nOriginal Request: ${JSON.stringify(JSON.parse(entry.original_request_json || '{}'), null, 2)}\n\n` +
`Last Status Object: ${JSON.stringify(JSON.parse(entry.last_status_obj_json || '{}'), null, 2)}`;
// Try to parse and display summary if available
if (entry.summary_json) {
try {
const summary = JSON.parse(entry.summary_json);
details += `\nSummary: ${JSON.stringify(summary, null, 2)}`;
} catch (e) {
console.error('Error parsing summary JSON:', e);
}
}
alert(details);
}
// Function to view tracks for a parent task
async function viewTracksForParent(taskId: string) {
currentParentTaskId = taskId;
currentPage = 1;
fetchHistory(1);
}
document.querySelectorAll('th[data-sort]').forEach(headerCell => {
headerCell.addEventListener('click', () => {
const sortField = (headerCell as HTMLElement).dataset.sort;
if (!sortField) return;
if (currentSortBy === sortField) {
currentSortOrder = currentSortOrder === 'ASC' ? 'DESC' : 'ASC';
} else {
currentSortBy = sortField;
currentSortOrder = 'DESC';
}
fetchHistory(1);
});
});
function updateSortIndicators() {
document.querySelectorAll('th[data-sort]').forEach(headerCell => {
const th = headerCell as HTMLElement;
th.classList.remove('sort-asc', 'sort-desc');
if (th.dataset.sort === currentSortBy) {
th.classList.add(currentSortOrder === 'ASC' ? 'sort-asc' : 'sort-desc');
}
});
}
// Event listeners for pagination and filters
prevButton?.addEventListener('click', () => fetchHistory(currentPage - 1));
nextButton?.addEventListener('click', () => fetchHistory(currentPage + 1));
limitSelect?.addEventListener('change', (e) => {
limit = parseInt((e.target as HTMLSelectElement).value, 10);
fetchHistory(1);
});
statusFilter?.addEventListener('change', () => fetchHistory(1));
typeFilter?.addEventListener('change', () => fetchHistory(1));
trackFilter?.addEventListener('change', () => fetchHistory(1));
hideChildTracksCheckbox?.addEventListener('change', () => fetchHistory(1));
// Initial fetch
fetchHistory();
});

2895
src/js/queue.ts Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,203 @@
body {
font-family: sans-serif;
margin: 0;
background-color: #121212;
color: #e0e0e0;
}
.container {
padding: 20px;
max-width: 1200px;
margin: auto;
}
h1 {
color: #1DB954; /* Spotify Green */
text-align: center;
}
table {
width: 100%;
border-collapse: collapse;
margin-top: 20px;
background-color: #1e1e1e;
}
th, td {
border: 1px solid #333;
padding: 10px 12px;
text-align: left;
}
th {
background-color: #282828;
cursor: pointer;
}
tr:nth-child(even) {
background-color: #222;
}
/* Parent and child track styling */
.parent-task-row {
background-color: #282828 !important;
font-weight: bold;
}
.child-track-row {
background-color: #1a1a1a !important;
font-size: 0.9em;
}
.child-track-indent {
color: #1DB954;
margin-right: 5px;
}
/* Track status styling */
.track-status-successful {
color: #1DB954;
font-weight: bold;
}
.track-status-skipped {
color: #FFD700;
font-weight: bold;
}
.track-status-failed {
color: #FF4136;
font-weight: bold;
}
/* Track counts display */
.track-counts {
margin-left: 10px;
font-size: 0.85em;
}
.track-count.success {
color: #1DB954;
}
.track-count.skipped {
color: #FFD700;
}
.track-count.failed {
color: #FF4136;
}
/* Back button */
#back-to-history {
margin-right: 15px;
padding: 5px 10px;
background-color: #333;
color: white;
border: none;
border-radius: 4px;
cursor: pointer;
}
#back-to-history:hover {
background-color: #444;
}
.pagination {
margin-top: 20px;
text-align: center;
}
.pagination button, .pagination select {
padding: 8px 12px;
margin: 0 5px;
background-color: #1DB954;
color: white;
border: none;
border-radius: 4px;
cursor: pointer;
}
.pagination button:disabled {
background-color: #555;
cursor: not-allowed;
}
.filters {
margin-bottom: 20px;
display: flex;
gap: 15px;
align-items: center;
flex-wrap: wrap;
}
.filters label, .filters select, .filters input {
margin-right: 5px;
}
.filters select, .filters input {
padding: 8px;
background-color: #282828;
color: #e0e0e0;
border: 1px solid #333;
border-radius: 4px;
}
.checkbox-filter {
display: flex;
align-items: center;
gap: 5px;
}
.status-COMPLETED { color: #1DB954; font-weight: bold; }
.status-ERROR { color: #FF4136; font-weight: bold; }
.status-CANCELLED { color: #AAAAAA; }
.status-skipped { color: #FFD700; font-weight: bold; }
.error-message-toggle {
cursor: pointer;
color: #FF4136; /* Red for error indicator */
text-decoration: underline;
}
.error-details {
display: none; /* Hidden by default */
white-space: pre-wrap; /* Preserve formatting */
background-color: #303030;
padding: 5px;
margin-top: 5px;
border-radius: 3px;
font-size: 0.9em;
}
/* Styling for the buttons in the table */
.btn-icon {
background-color: transparent; /* Or a subtle color like #282828 */
border: none;
border-radius: 50%; /* Make it circular */
padding: 5px; /* Adjust padding to control size */
cursor: pointer;
display: inline-flex; /* Important for aligning the image */
align-items: center;
justify-content: center;
transition: background-color 0.2s ease;
margin-right: 5px;
}
.btn-icon img {
width: 16px; /* Icon size */
height: 16px;
filter: invert(1); /* Make icon white if it's dark, adjust if needed */
}
.btn-icon:hover {
background-color: #333; /* Darker on hover */
}
.details-btn:hover img {
filter: invert(0.8) sepia(1) saturate(5) hue-rotate(175deg); /* Make icon blue on hover */
}
.tracks-btn:hover img {
filter: invert(0.8) sepia(1) saturate(5) hue-rotate(90deg); /* Make icon green on hover */
}

825
static/css/queue/queue.css Normal file
View File

@@ -0,0 +1,825 @@
/* ---------------------- */
/* DOWNLOAD QUEUE STYLES */
/* ---------------------- */
/* Container for the download queue sidebar */
#downloadQueue {
position: fixed;
top: 0;
right: -350px; /* Hidden offscreen by default */
width: 350px;
height: 100vh;
background: #181818;
padding: 20px;
transition: right 0.3s cubic-bezier(0.4, 0, 0.2, 1);
z-index: 1001;
/* Remove overflow-y here to delegate scrolling to the queue items container */
box-shadow: -20px 0 30px rgba(0, 0, 0, 0.4);
/* Added for flex layout */
display: flex;
flex-direction: column;
}
/* When active, the sidebar slides into view */
#downloadQueue.active {
right: 0;
}
/* Header inside the queue sidebar */
.sidebar-header {
display: flex;
justify-content: space-between;
align-items: center;
padding-bottom: 15px;
border-bottom: 1px solid rgba(255, 255, 255, 0.1);
margin-bottom: 20px;
}
.sidebar-header h2 {
font-size: 1.25rem;
font-weight: 600;
color: #fff;
margin: 0;
}
/* Queue subtitle with statistics */
.queue-subtitle {
display: flex;
gap: 10px;
margin-top: 5px;
font-size: 0.8rem;
color: #b3b3b3;
}
.queue-stat {
padding: 2px 6px;
border-radius: 4px;
font-weight: 500;
}
.queue-stat-active {
color: #4a90e2;
background-color: rgba(74, 144, 226, 0.1);
}
.queue-stat-completed {
color: #1DB954;
background-color: rgba(29, 185, 84, 0.1);
}
.queue-stat-error {
color: #ff5555;
background-color: rgba(255, 85, 85, 0.1);
}
.header-actions {
display: flex;
gap: 10px;
align-items: center;
}
/* Refresh queue button */
#refreshQueueBtn {
background: #2a2a2a;
border: none;
color: #fff;
padding: 8px;
border-radius: 4px;
cursor: pointer;
transition: background 0.3s ease, transform 0.2s ease;
display: flex;
align-items: center;
justify-content: center;
}
#refreshQueueBtn:hover {
background: #333;
transform: translateY(-1px);
}
#refreshQueueBtn:active {
transform: scale(0.95);
}
#refreshQueueBtn.refreshing {
animation: spin 1s linear infinite;
}
/* Artist queue message */
.queue-artist-message {
background: #2a2a2a;
padding: 15px;
border-radius: 8px;
margin-bottom: 15px;
color: #fff;
text-align: center;
border-left: 4px solid #4a90e2;
animation: pulse 1.5s infinite;
font-weight: 500;
}
@keyframes pulse {
0% { opacity: 0.8; }
50% { opacity: 1; }
100% { opacity: 0.8; }
}
/* Cancel all button styling */
#cancelAllBtn {
background: #8b0000; /* Dark blood red */
border: none;
color: #fff;
padding: 8px 12px;
border-radius: 4px;
cursor: pointer;
transition: background 0.3s ease, transform 0.2s ease;
font-size: 14px;
font-weight: 600;
display: flex;
align-items: center;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.3);
text-shadow: 0 1px 2px rgba(0, 0, 0, 0.5);
}
#cancelAllBtn:hover {
background: #a30000; /* Slightly lighter red on hover */
transform: translateY(-1px);
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.4);
}
#cancelAllBtn:active {
transform: scale(0.98);
}
/* Close button for the queue sidebar */
.close-btn {
background: #2a2a2a;
border: none;
border-radius: 50%;
width: 32px;
height: 32px;
display: flex;
align-items: center;
justify-content: center;
color: #ffffff;
font-size: 20px;
cursor: pointer;
transition: background-color 0.3s ease, transform 0.2s ease;
}
.close-btn:hover {
background-color: #333;
transform: scale(1.05);
}
.close-btn:active {
transform: scale(0.95);
}
/* Container for all queue items */
#queueItems {
/* Allow the container to fill all available space in the sidebar */
flex: 1;
overflow-y: auto;
padding-right: 5px; /* Add slight padding for scrollbar */
scrollbar-width: thin;
scrollbar-color: #1DB954 rgba(255, 255, 255, 0.1);
}
/* Custom scrollbar styles */
#queueItems::-webkit-scrollbar {
width: 6px;
}
#queueItems::-webkit-scrollbar-track {
background: rgba(255, 255, 255, 0.1);
border-radius: 10px;
}
#queueItems::-webkit-scrollbar-thumb {
background-color: #1DB954;
border-radius: 10px;
}
/* Each download queue item */
.queue-item {
background: #2a2a2a;
padding: 15px;
border-radius: 8px;
margin-bottom: 15px;
transition: all 0.3s ease;
display: flex;
flex-direction: column;
gap: 6px;
position: relative;
border-left: 4px solid transparent;
box-shadow: 0 2px 5px rgba(0, 0, 0, 0.2);
}
/* Animation only for newly added items */
.queue-item-new {
animation: fadeIn 0.3s ease;
}
@keyframes fadeIn {
from { opacity: 0; transform: translateY(5px); }
to { opacity: 1; transform: translateY(0); }
}
.queue-item:hover {
background-color: #333;
transform: translateY(-5px);
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.3);
}
/* Title text in a queue item */
.queue-item .title {
font-weight: 600;
margin-bottom: 4px;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
color: #fff;
font-size: 14px;
}
/* Type indicator (e.g. track, album) */
.queue-item .type {
font-size: 11px;
color: #1DB954;
text-transform: uppercase;
letter-spacing: 0.7px;
font-weight: 600;
background-color: rgba(29, 185, 84, 0.1);
padding: 3px 6px;
border-radius: 4px;
display: inline-block;
width: fit-content;
}
/* Album type - for better visual distinction */
.queue-item .type.album {
color: #4a90e2;
background-color: rgba(74, 144, 226, 0.1);
}
/* Track type */
.queue-item .type.track {
color: #1DB954;
background-color: rgba(29, 185, 84, 0.1);
}
/* Playlist type */
.queue-item .type.playlist {
color: #e67e22;
background-color: rgba(230, 126, 34, 0.1);
}
/* Log text for status messages */
.queue-item .log {
font-size: 13px;
color: #b3b3b3;
line-height: 1.4;
font-family: 'SF Mono', Menlo, monospace;
padding: 8px 0;
word-break: break-word;
}
/* Optional state indicators for each queue item */
.queue-item--complete,
.queue-item.download-success {
border-left-color: #1DB954;
}
.queue-item--error {
border-left-color: #ff5555;
}
.queue-item--processing {
border-left-color: #4a90e2;
}
/* Progress bar for downloads */
.status-bar {
height: 3px;
background: #1DB954;
width: 0;
transition: width 0.3s ease;
margin-top: 8px;
border-radius: 2px;
}
/* Overall progress container for albums and playlists */
.overall-progress-container {
margin-top: 12px;
padding-top: 8px;
border-top: 1px solid rgba(255, 255, 255, 0.1);
position: relative; /* Positioning context for z-index */
z-index: 2; /* Ensure overall progress appears above track progress */
}
.overall-progress-header {
display: flex;
justify-content: space-between;
margin-bottom: 5px;
font-size: 11px;
color: #b3b3b3;
}
.overall-progress-label {
font-weight: 600;
text-transform: uppercase;
letter-spacing: 0.5px;
}
.overall-progress-count {
font-weight: 600;
color: #1DB954;
}
.overall-progress-bar-container {
height: 6px;
background: rgba(255, 255, 255, 0.1);
border-radius: 3px;
overflow: hidden;
}
.overall-progress-bar {
height: 100%;
background: linear-gradient(90deg, #4a90e2, #7a67ee); /* Changed to blue-purple gradient */
width: 0;
border-radius: 3px;
transition: width 0.4s cubic-bezier(0.4, 0, 0.2, 1);
position: relative;
}
.overall-progress-bar.complete {
background: #4a90e2; /* Changed to solid blue for completed overall progress */
}
/* Track progress bar container */
.track-progress-bar-container {
height: 4px;
background: rgba(255, 255, 255, 0.1);
border-radius: 2px;
overflow: hidden;
margin-top: 8px;
margin-bottom: 4px;
position: relative;
z-index: 1; /* Ensure it's below the overall progress */
}
/* Track progress bar */
.track-progress-bar {
height: 100%;
background: #1DB954; /* Keep green for track-level progress */
width: 0;
border-radius: 2px;
transition: width 0.3s ease;
box-shadow: 0 0 3px rgba(29, 185, 84, 0.5); /* Add subtle glow to differentiate */
}
/* Complete state for track progress */
/* Real-time progress style */
.track-progress-bar.real-time {
background: #1DB954; /* Vivid green for real-time progress */
background: #1DB954;
}
/* Pulsing animation for indeterminate progress */
.track-progress-bar.progress-pulse {
background: linear-gradient(90deg, #1DB954 0%, #2cd267 50%, #1DB954 100%); /* Keep in green family */
background-size: 200% 100%;
animation: progress-pulse-slide 1.5s ease infinite;
}
@keyframes progress-pulse-slide {
0% { background-position: 0% 50%; }
50% { background-position: 100% 50%; }
100% { background-position: 0% 50%; }
}
/* Progress percentage text */
.progress-percent {
text-align: right;
font-weight: bold;
font-size: 12px;
color: #1DB954;
margin-top: 4px;
}
/* Optional status message colors (if using state classes) */
.log--success {
color: #1DB954 !important;
}
.log--error {
color: #ff5555 !important;
}
.log--warning {
color: #ffaa00 !important;
}
.log--info {
color: #4a90e2 !important;
}
/* Loader animations for real-time progress */
@keyframes progress-pulse {
0% { opacity: 0.5; }
50% { opacity: 1; }
100% { opacity: 0.5; }
}
.progress-indicator {
display: inline-block;
margin-left: 8px;
animation: progress-pulse 1.5s infinite;
}
/* Loading spinner style */
.loading-spinner {
display: inline-block;
width: 14px;
height: 14px;
border: 2px solid rgba(255, 255, 255, 0.3);
border-radius: 50%;
border-top-color: #1DB954;
animation: spin 1s ease-in-out infinite;
margin-right: 6px;
vertical-align: middle;
}
.loading-spinner.small {
width: 10px;
height: 10px;
border-width: 1px;
margin-right: 4px;
}
@keyframes spin {
to { transform: rotate(360deg); }
}
/* Cancel button inside each queue item */
.cancel-btn {
background: none;
border: none;
cursor: pointer;
padding: 5px;
outline: none;
margin-top: 10px;
/* Optionally constrain the overall size */
max-width: 24px;
max-height: 24px;
position: absolute;
top: 10px;
right: 10px;
opacity: 0.7;
transition: opacity 0.2s ease, transform 0.2s ease;
}
.cancel-btn:hover {
opacity: 1;
}
.cancel-btn img {
width: 16px;
height: 16px;
filter: invert(1);
transition: transform 0.3s ease;
}
.cancel-btn:hover img {
transform: scale(1.1);
}
.cancel-btn:active img {
transform: scale(0.9);
}
/* Group header for multiple albums from same artist */
.queue-group-header {
font-size: 14px;
color: #b3b3b3;
margin: 15px 0 10px;
padding-bottom: 8px;
border-bottom: 1px solid rgba(255, 255, 255, 0.1);
display: flex;
align-items: center;
justify-content: space-between;
}
.queue-group-header span {
display: flex;
align-items: center;
}
.queue-group-header span::before {
content: '';
display: inline-block;
width: 10px;
height: 10px;
border-radius: 50%;
background-color: #1DB954;
margin-right: 8px;
}
/* ------------------------------- */
/* FOOTER & "SHOW MORE" BUTTON */
/* ------------------------------- */
#queueFooter {
text-align: center;
padding-top: 15px;
border-top: 1px solid rgba(255, 255, 255, 0.1);
margin-top: 10px;
}
#queueFooter button {
background: #1DB954;
border: none;
padding: 10px 18px;
border-radius: 20px;
color: #fff;
cursor: pointer;
transition: all 0.3s ease;
font-size: 14px;
font-weight: 500;
box-shadow: 0 2px 5px rgba(0, 0, 0, 0.2);
}
#queueFooter button:hover {
background: #17a448;
transform: translateY(-1px);
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.3);
}
#queueFooter button:active {
transform: scale(0.98);
}
/* -------------------------- */
/* ERROR BUTTONS STYLES */
/* -------------------------- */
/* Container for error action buttons */
.error-buttons {
display: flex;
justify-content: flex-end;
gap: 8px;
margin-top: 8px;
}
/* ----------------------------- */
/* DOWNLOAD SUMMARY ICONS */
/* ----------------------------- */
/* Base styles for all summary icons */
.summary-icon {
width: 14px;
height: 14px;
vertical-align: middle;
margin-right: 4px;
margin-top: -2px;
}
/* Download summary formatting */
.download-summary {
background: rgba(255, 255, 255, 0.05);
border-radius: 6px;
padding: 12px;
margin-top: 5px;
}
.summary-line {
display: flex;
align-items: center;
gap: 12px;
margin-bottom: 8px;
}
.summary-line span {
display: flex;
align-items: center;
padding: 3px 8px;
border-radius: 4px;
font-weight: 500;
}
/* Specific icon background colors */
.summary-line span:nth-child(2) {
background: rgba(29, 185, 84, 0.1); /* Success background */
}
.summary-line span:nth-child(3) {
background: rgba(230, 126, 34, 0.1); /* Skip background */
}
.summary-line span:nth-child(4) {
background: rgba(255, 85, 85, 0.1); /* Failed background */
}
/* Failed tracks list styling */
.failed-tracks-title {
color: #ff5555;
font-weight: 600;
margin: 10px 0 5px;
font-size: 13px;
}
.failed-tracks-list {
list-style-type: none;
padding-left: 10px;
margin: 0;
font-size: 12px;
color: #b3b3b3;
max-height: 100px;
overflow-y: auto;
}
.failed-tracks-list li {
padding: 3px 0;
position: relative;
}
.failed-tracks-list li::before {
content: "•";
color: #ff5555;
position: absolute;
left: -10px;
}
/* Base styles for error buttons */
.error-buttons button {
border: none;
border-radius: 4px;
padding: 6px 12px;
font-size: 12px;
font-weight: 600;
cursor: pointer;
transition: all 0.2s ease;
}
/* Hover state for all error buttons */
.error-buttons button:hover {
transform: translateY(-2px);
}
.error-buttons button:active {
transform: translateY(0);
}
/* Specific styles for the Close (X) error button */
.close-error-btn {
background-color: #333;
color: #fff;
}
.close-error-btn:hover {
background-color: #444;
}
/* Specific styles for the Retry button */
.retry-btn {
background-color: #ff5555;
color: #fff;
padding: 6px 15px !important;
}
.retry-btn:hover {
background-color: #ff6b6b;
}
/* Empty queue state */
.queue-empty {
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
height: 200px;
color: #b3b3b3;
text-align: center;
padding: 20px;
}
.queue-empty img {
width: 60px;
height: 60px;
margin-bottom: 15px;
opacity: 0.6;
}
.queue-empty p {
font-size: 14px;
line-height: 1.5;
}
/* Error notification in queue */
.queue-error {
background-color: rgba(192, 57, 43, 0.1);
color: #ff5555;
padding: 10px 15px;
border-radius: 8px;
margin-bottom: 15px;
font-size: 14px;
border-left: 3px solid #ff5555;
animation: fadeIn 0.3s ease;
}
/* Error state styling */
.queue-item.error {
border-left: 4px solid #ff5555;
background-color: rgba(255, 85, 85, 0.05);
transition: none !important; /* Remove all transitions */
transform: none !important; /* Prevent any transform */
position: relative !important; /* Keep normal positioning */
left: 0 !important; /* Prevent any left movement */
right: 0 !important; /* Prevent any right movement */
top: 0 !important; /* Prevent any top movement */
}
.queue-item.error:hover {
background-color: rgba(255, 85, 85, 0.1);
transform: none !important; /* Force disable any transform */
box-shadow: 0 2px 5px rgba(0, 0, 0, 0.2) !important; /* Keep original shadow */
position: relative !important; /* Force normal positioning */
left: 0 !important; /* Prevent any left movement */
right: 0 !important; /* Prevent any right movement */
top: 0 !important; /* Prevent any top movement */
}
.error-message {
color: #ff5555;
margin-bottom: 10px;
font-size: 13px;
line-height: 1.4;
}
/* ------------------------------- */
/* MOBILE RESPONSIVE ADJUSTMENTS */
/* ------------------------------- */
@media (max-width: 600px) {
/* Make the sidebar full width on mobile */
#downloadQueue {
width: 100%;
right: -100%; /* Off-screen fully */
padding: 15px;
}
/* When active, the sidebar slides into view from full width */
#downloadQueue.active {
right: 0;
}
/* Adjust header and title for smaller screens */
.sidebar-header {
flex-direction: row;
align-items: center;
padding-bottom: 12px;
margin-bottom: 15px;
}
.sidebar-header h2 {
font-size: 1.1rem;
}
/* Reduce the size of the close buttons */
.close-btn {
width: 28px;
height: 28px;
font-size: 18px;
}
/* Adjust queue items padding */
.queue-item {
padding: 12px;
margin-bottom: 12px;
}
/* Ensure text remains legible on smaller screens */
.queue-item .log,
.queue-item .type {
font-size: 12px;
}
#cancelAllBtn {
padding: 6px 10px;
font-size: 12px;
}
.error-buttons {
flex-direction: row;
}
.close-error-btn {
width: 28px;
height: 28px;
}
.retry-btn {
padding: 6px 12px !important;
}
}

98
static/html/history.html Normal file
View File

@@ -0,0 +1,98 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Download History</title>
<!-- Link to global stylesheets first -->
<link rel="stylesheet" href="{{ url_for('static', filename='css/main/base.css') }}">
<link rel="stylesheet" href="{{ url_for('static', filename='css/main/icons.css') }}">
<!-- Link to page-specific stylesheet -->
<link rel="stylesheet" href="{{ url_for('static', filename='css/history/history.css') }}">
<!-- Helper function for image errors, if not already in base.css or loaded globally -->
<script>
function handleImageError(img) {
img.onerror = null; // Prevent infinite loop if placeholder also fails
img.src = "{{ url_for('static', filename='images/placeholder.jpg') }}";
}
</script>
</head>
<body>
<div class="container">
<h1 id="history-title">Download History</h1>
<div class="filters">
<label for="status-filter">Status:</label>
<select id="status-filter">
<option value="">All</option>
<option value="COMPLETED">Completed</option>
<option value="ERROR">Error</option>
<option value="CANCELLED">Cancelled</option>
</select>
<label for="type-filter">Type:</label>
<select id="type-filter">
<option value="">All</option>
<option value="track">Track</option>
<option value="album">Album</option>
<option value="playlist">Playlist</option>
<option value="artist">Artist</option>
</select>
<label for="track-filter">Track Status:</label>
<select id="track-filter">
<option value="">All</option>
<option value="SUCCESSFUL">Successful</option>
<option value="SKIPPED">Skipped</option>
<option value="FAILED">Failed</option>
</select>
<div class="checkbox-filter">
<input type="checkbox" id="hide-child-tracks" />
<label for="hide-child-tracks">Hide Individual Tracks</label>
</div>
</div>
<table>
<thead>
<tr>
<th data-sort="item_name">Name</th>
<th data-sort="item_artist">Artist</th>
<th data-sort="download_type">Type/Status</th>
<th data-sort="service_used">Service</th>
<th data-sort="quality_profile">Quality</th>
<th data-sort="status_final">Status</th>
<th data-sort="timestamp_added">Date Added</th>
<th data-sort="timestamp_completed">Date Completed/Ended</th>
<th>Actions</th>
</tr>
</thead>
<tbody id="history-table-body">
<!-- Rows will be inserted here by JavaScript -->
</tbody>
</table>
<div class="pagination">
<button id="prev-page" disabled>Previous</button>
<span id="page-info">Page 1 of 1</span>
<button id="next-page" disabled>Next</button>
<select id="limit-select">
<option value="10">10 per page</option>
<option value="25" selected>25 per page</option>
<option value="50">50 per page</option>
<option value="100">100 per page</option>
</select>
</div>
</div>
<!-- Fixed floating buttons for home and queue -->
<a href="/" class="btn-icon home-btn floating-icon" aria-label="Return to home" title="Go to Home">
<img src="{{ url_for('static', filename='images/home.svg') }}" alt="Home" onerror="handleImageError(this)"/>
</a>
<!-- Link to the new TypeScript file (compiled to JS) -->
<script type="module" src="{{ url_for('static', filename='js/history.js') }}"></script>
<!-- Queue icon, assuming queue.js handles its own initialization if included -->
<!-- You might want to include queue.js here if the queue icon is desired on this page -->
<!-- <script type="module" src="{{ url_for('static', filename='js/queue.js') }}"></script> -->
</body>
</html>

44
tests/README.md Normal file
View File

@@ -0,0 +1,44 @@
# Spotizerr Backend Tests
This directory contains automated tests for the Spotizerr backend API.
## Prerequisites
1. **Running Backend**: Ensure the Spotizerr Flask application is running and accessible at `http://localhost:7171`. You can start it with `python app.py`.
2. **Python Dependencies**: Install the necessary Python packages for testing.
```bash
pip install pytest requests python-dotenv
```
3. **Credentials**: These tests require valid Spotify and Deezer credentials. Create a file named `.env` in the root directory of the project (`spotizerr`) and add your credentials to it. The tests will load this file automatically.
**Example `.env` file:**
```
SPOTIFY_API_CLIENT_ID="your_spotify_client_id"
SPOTIFY_API_CLIENT_SECRET="your_spotify_client_secret"
# This should be the full JSON content of your credentials blob as a single line string
SPOTIFY_BLOB_CONTENT='{"username": "your_spotify_username", "password": "your_spotify_password", ...}'
DEEZER_ARL="your_deezer_arl"
```
The tests will automatically use these credentials to create and manage test accounts named `test-spotify-account` and `test-deezer-account`.
## Running Tests
To run all tests, navigate to the root directory of the project (`spotizerr`) and run `pytest`:
```bash
pytest
```
To run a specific test file:
```bash
pytest tests/test_downloads.py
```
For more detailed output, use the `-v` (verbose) and `-s` (show print statements) flags:
```bash
pytest -v -s
```

1
tests/__init__.py Normal file
View File

@@ -0,0 +1 @@

149
tests/conftest.py Normal file
View File

@@ -0,0 +1,149 @@
import pytest
import requests
import time
import os
import json
from dotenv import load_dotenv
# Load environment variables from .env file in the project root
load_dotenv()
# --- Environment-based secrets for testing ---
SPOTIFY_API_CLIENT_ID = os.environ.get("SPOTIFY_API_CLIENT_ID", "your_spotify_client_id")
SPOTIFY_API_CLIENT_SECRET = os.environ.get("SPOTIFY_API_CLIENT_SECRET", "your_spotify_client_secret")
SPOTIFY_BLOB_CONTENT_STR = os.environ.get("SPOTIFY_BLOB_CONTENT", '{}')
try:
SPOTIFY_BLOB_CONTENT = json.loads(SPOTIFY_BLOB_CONTENT_STR)
except json.JSONDecodeError:
SPOTIFY_BLOB_CONTENT = {}
DEEZER_ARL = os.environ.get("DEEZER_ARL", "your_deezer_arl")
# --- Standard names for test accounts ---
SPOTIFY_ACCOUNT_NAME = "test-spotify-account"
DEEZER_ACCOUNT_NAME = "test-deezer-account"
@pytest.fixture(scope="session")
def base_url():
"""Provides the base URL for the API tests."""
return "http://localhost:7171/api"
def wait_for_task(base_url, task_id, timeout=600):
"""
Waits for a Celery task to reach a terminal state (complete, error, etc.).
Polls the progress endpoint and prints status updates.
"""
print(f"\n--- Waiting for task {task_id} (timeout: {timeout}s) ---")
start_time = time.time()
while time.time() - start_time < timeout:
try:
response = requests.get(f"{base_url}/prgs/{task_id}")
if response.status_code == 404:
time.sleep(1)
continue
response.raise_for_status() # Raise an exception for bad status codes
data = response.json()
if not data or not data.get("last_line"):
time.sleep(1)
continue
last_status = data["last_line"]
status = last_status.get("status")
# More verbose logging for debugging during tests
message = last_status.get('message', '')
track = last_status.get('track', '')
progress = last_status.get('overall_progress', '')
print(f"Task {task_id} | Status: {status:<12} | Progress: {progress or 'N/A':>3}% | Track: {track:<30} | Message: {message}")
if status in ["complete", "ERROR", "cancelled", "ERROR_RETRIED", "ERROR_AUTO_CLEANED"]:
print(f"--- Task {task_id} finished with status: {status} ---")
return last_status
time.sleep(2)
except requests.exceptions.RequestException as e:
print(f"Warning: Request to fetch task status for {task_id} failed: {e}. Retrying...")
time.sleep(5)
raise TimeoutError(f"Task {task_id} did not complete within {timeout} seconds.")
@pytest.fixture(scope="session")
def task_waiter(base_url):
"""Provides a fixture that returns the wait_for_task helper function."""
def _waiter(task_id, timeout=600):
return wait_for_task(base_url, task_id, timeout)
return _waiter
@pytest.fixture(scope="session", autouse=True)
def setup_credentials_for_tests(base_url):
"""
A session-wide, automatic fixture to set up all necessary credentials.
It runs once before any tests, and tears down the credentials after all tests are complete.
"""
print("\n--- Setting up credentials for test session ---")
print("\n--- DEBUGGING CREDENTIALS ---")
print(f"SPOTIFY_API_CLIENT_ID: {SPOTIFY_API_CLIENT_ID}")
print(f"SPOTIFY_API_CLIENT_SECRET: {SPOTIFY_API_CLIENT_SECRET}")
print(f"DEEZER_ARL: {DEEZER_ARL}")
print(f"SPOTIFY_BLOB_CONTENT {SPOTIFY_BLOB_CONTENT}")
print("--- END DEBUGGING ---\n")
# Skip all tests if secrets are not provided in the environment
if SPOTIFY_API_CLIENT_ID == "your_spotify_client_id" or \
SPOTIFY_API_CLIENT_SECRET == "your_spotify_client_secret" or \
not SPOTIFY_BLOB_CONTENT or \
DEEZER_ARL == "your_deezer_arl":
pytest.skip("Required credentials not provided in .env file or environment. Skipping credential-dependent tests.")
# 1. Set global Spotify API creds
data = {"client_id": SPOTIFY_API_CLIENT_ID, "client_secret": SPOTIFY_API_CLIENT_SECRET}
response = requests.put(f"{base_url}/credentials/spotify_api_config", json=data)
if response.status_code != 200:
pytest.fail(f"Failed to set global Spotify API creds: {response.text}")
print("Global Spotify API credentials set.")
# 2. Delete any pre-existing test credentials to ensure a clean state
requests.delete(f"{base_url}/credentials/spotify/{SPOTIFY_ACCOUNT_NAME}")
requests.delete(f"{base_url}/credentials/deezer/{DEEZER_ACCOUNT_NAME}")
print("Cleaned up any old test credentials.")
# 3. Create Deezer credential
data = {"name": DEEZER_ACCOUNT_NAME, "arl": DEEZER_ARL, "region": "US"}
response = requests.post(f"{base_url}/credentials/deezer/{DEEZER_ACCOUNT_NAME}", json=data)
if response.status_code != 201:
pytest.fail(f"Failed to create Deezer credential: {response.text}")
print("Deezer test credential created.")
# 4. Create Spotify credential
data = {"name": SPOTIFY_ACCOUNT_NAME, "blob_content": SPOTIFY_BLOB_CONTENT, "region": "US"}
response = requests.post(f"{base_url}/credentials/spotify/{SPOTIFY_ACCOUNT_NAME}", json=data)
if response.status_code != 201:
pytest.fail(f"Failed to create Spotify credential: {response.text}")
print("Spotify test credential created.")
# 5. Set main config to use these accounts for downloads
config_payload = {
"spotify": SPOTIFY_ACCOUNT_NAME,
"deezer": DEEZER_ACCOUNT_NAME,
}
response = requests.post(f"{base_url}/config", json=config_payload)
if response.status_code != 200:
pytest.fail(f"Failed to set main config for tests: {response.text}")
print("Main config set to use test credentials.")
yield # This is where the tests will run
# --- Teardown ---
print("\n--- Tearing down test credentials ---")
response = requests.delete(f"{base_url}/credentials/spotify/{SPOTIFY_ACCOUNT_NAME}")
assert response.status_code in [200, 404]
response = requests.delete(f"{base_url}/credentials/deezer/{DEEZER_ACCOUNT_NAME}")
assert response.status_code in [200, 404]
print("Test credentials deleted.")

113
tests/test_config.py Normal file
View File

@@ -0,0 +1,113 @@
import requests
import pytest
@pytest.fixture
def reset_config(base_url):
"""A fixture to ensure the main config is reset after a test case."""
response = requests.get(f"{base_url}/config")
assert response.status_code == 200
original_config = response.json()
yield
response = requests.post(f"{base_url}/config", json=original_config)
assert response.status_code == 200
def test_get_main_config(base_url):
"""Tests if the main configuration can be retrieved."""
response = requests.get(f"{base_url}/config")
assert response.status_code == 200
config = response.json()
assert "service" in config
assert "maxConcurrentDownloads" in config
assert "spotify" in config # Should be set by conftest
assert "deezer" in config # Should be set by conftest
assert "fallback" in config
assert "realTime" in config
assert "maxRetries" in config
def test_update_main_config(base_url, reset_config):
"""Tests updating various fields in the main configuration based on frontend capabilities."""
new_settings = {
"maxConcurrentDownloads": 5,
"spotifyQuality": "HIGH",
"deezerQuality": "FLAC",
"customDirFormat": "%artist%/%album%",
"customTrackFormat": "%tracknum% %title%",
"save_cover": False,
"fallback": True,
"realTime": False,
"maxRetries": 5,
"retryDelaySeconds": 10,
"retry_delay_increase": 10,
"tracknum_padding": False,
}
response = requests.post(f"{base_url}/config", json=new_settings)
assert response.status_code == 200
updated_config = response.json()
for key, value in new_settings.items():
assert updated_config[key] == value
def test_get_watch_config(base_url):
"""Tests if the watch-specific configuration can be retrieved."""
response = requests.get(f"{base_url}/config/watch")
assert response.status_code == 200
config = response.json()
assert "enabled" in config
assert "watchPollIntervalSeconds" in config
assert "watchedArtistAlbumGroup" in config
def test_update_watch_config(base_url):
"""Tests updating the watch-specific configuration."""
response = requests.get(f"{base_url}/config/watch")
original_config = response.json()
new_settings = {
"enabled": False,
"watchPollIntervalSeconds": 7200,
"watchedArtistAlbumGroup": ["album", "single"],
}
response = requests.post(f"{base_url}/config/watch", json=new_settings)
assert response.status_code == 200
# The response for updating watch config is just a success message,
# so we need to GET the config again to verify.
verify_response = requests.get(f"{base_url}/config/watch")
assert verify_response.status_code == 200
updated_config = verify_response.json()
for key, value in new_settings.items():
assert updated_config[key] == value
# Revert to original
requests.post(f"{base_url}/config/watch", json=original_config)
def test_update_conversion_config(base_url, reset_config):
"""
Iterates through supported conversion formats and bitrates from the frontend,
updating the config and verifying the changes.
"""
# Formats and bitrates aligned with src/js/config.ts
conversion_formats = ["MP3", "AAC", "OGG", "OPUS", "FLAC", "WAV", "ALAC"]
bitrates = {
"MP3": ["128k", "320k"],
"AAC": ["128k", "256k"],
"OGG": ["128k", "320k"],
"OPUS": ["96k", "256k"],
"FLAC": [None],
"WAV": [None],
"ALAC": [None],
}
for format_val in conversion_formats:
for br in bitrates.get(format_val, [None]):
print(f"Testing conversion config: format={format_val}, bitrate={br}")
new_settings = {"convertTo": format_val, "bitrate": br}
response = requests.post(f"{base_url}/config", json=new_settings)
assert response.status_code == 200
updated_config = response.json()
assert updated_config["convertTo"] == format_val
# The backend might return null for empty bitrate, which is fine
assert updated_config["bitrate"] == br

212
tests/test_downloads.py Normal file
View File

@@ -0,0 +1,212 @@
import requests
import pytest
import os
import shutil
# URLs for testing
SPOTIFY_TRACK_URL = "https://open.spotify.com/track/1Cts4YV9aOXVAP3bm3Ro6r"
SPOTIFY_ALBUM_URL = "https://open.spotify.com/album/4K0JVP5veNYTVI6IMamlla"
SPOTIFY_PLAYLIST_URL = "https://open.spotify.com/playlist/26CiMxIxdn5WhXyccMCPOB"
SPOTIFY_ARTIST_URL = "https://open.spotify.com/artist/7l6cdPhOLYO7lehz5xfzLV"
# Corresponding IDs extracted from URLs
TRACK_ID = SPOTIFY_TRACK_URL.split('/')[-1].split('?')[0]
ALBUM_ID = SPOTIFY_ALBUM_URL.split('/')[-1].split('?')[0]
PLAYLIST_ID = SPOTIFY_PLAYLIST_URL.split('/')[-1].split('?')[0]
ARTIST_ID = SPOTIFY_ARTIST_URL.split('/')[-1].split('?')[0]
DOWNLOAD_DIR = "downloads/"
def get_downloaded_files(directory=DOWNLOAD_DIR):
"""Walks a directory and returns a list of all file paths."""
file_paths = []
if not os.path.isdir(directory):
return file_paths
for root, _, files in os.walk(directory):
for file in files:
# Ignore hidden files like .DS_Store
if not file.startswith('.'):
file_paths.append(os.path.join(root, file))
return file_paths
@pytest.fixture(autouse=True)
def cleanup_downloads_dir():
"""
Ensures the download directory is removed and recreated, providing a clean
slate before and after each test.
"""
if os.path.exists(DOWNLOAD_DIR):
shutil.rmtree(DOWNLOAD_DIR)
os.makedirs(DOWNLOAD_DIR, exist_ok=True)
yield
if os.path.exists(DOWNLOAD_DIR):
shutil.rmtree(DOWNLOAD_DIR)
@pytest.fixture
def reset_config(base_url):
"""
Fixture to get original config, set single concurrent download for test
isolation, and restore the original config after the test.
"""
response = requests.get(f"{base_url}/config")
original_config = response.json()
# Set max concurrent downloads to 1 for all tests using this fixture.
requests.post(f"{base_url}/config", json={"maxConcurrentDownloads": 1})
yield
# Restore original config
requests.post(f"{base_url}/config", json=original_config)
@pytest.mark.parametrize("download_type, item_id, timeout, expected_files_min", [
("track", TRACK_ID, 600, 1),
("album", ALBUM_ID, 900, 14), # "After Hours" has 14 tracks
("playlist", PLAYLIST_ID, 1200, 4), # Test playlist has 4 tracks
])
def test_spotify_download_and_verify_files(base_url, task_waiter, reset_config, download_type, item_id, timeout, expected_files_min):
"""
Tests downloading a track, album, or playlist and verifies that the
expected number of files are created on disk.
"""
print(f"\n--- Testing Spotify-only '{download_type}' download and verifying files ---")
config_payload = {
"service": "spotify",
"fallback": False,
"realTime": True,
"spotifyQuality": "NORMAL"
}
requests.post(f"{base_url}/config", json=config_payload)
response = requests.get(f"{base_url}/{download_type}/download/{item_id}")
assert response.status_code == 202
task_id = response.json()["task_id"]
final_status = task_waiter(task_id, timeout=timeout)
assert final_status["status"] == "complete", f"Task failed for {download_type} {item_id}: {final_status.get('error')}"
# Verify that the correct number of files were downloaded
downloaded_files = get_downloaded_files()
assert len(downloaded_files) >= expected_files_min, (
f"Expected at least {expected_files_min} file(s) for {download_type} {item_id}, "
f"but found {len(downloaded_files)}."
)
def test_artist_download_and_verify_files(base_url, task_waiter, reset_config):
"""
Tests queuing an artist download and verifies that files are created.
Does not check for exact file count due to the variability of artist discographies.
"""
print("\n--- Testing Spotify-only artist download and verifying files ---")
config_payload = {"service": "spotify", "fallback": False, "realTime": True, "spotifyQuality": "NORMAL"}
requests.post(f"{base_url}/config", json=config_payload)
response = requests.get(f"{base_url}/artist/download/{ARTIST_ID}?album_type=album,single")
assert response.status_code == 202
response_data = response.json()
queued_albums = response_data.get("queued_albums", [])
assert len(queued_albums) > 0, "No albums were queued for the artist."
for album in queued_albums:
task_id = album["task_id"]
print(f"--- Waiting for artist album: {album['name']} ({task_id}) ---")
final_status = task_waiter(task_id, timeout=900)
assert final_status["status"] == "complete", f"Artist album task {album['name']} failed: {final_status.get('error')}"
# After all tasks complete, verify that at least some files were downloaded.
downloaded_files = get_downloaded_files()
assert len(downloaded_files) > 0, "Artist download ran but no files were found in the download directory."
def test_download_with_deezer_fallback_and_verify_files(base_url, task_waiter, reset_config):
"""Tests downloading with Deezer fallback and verifies the file exists."""
print("\n--- Testing track download with Deezer fallback and verifying files ---")
config_payload = {
"service": "spotify",
"fallback": True,
"deezerQuality": "FLAC" # Test with high quality fallback
}
requests.post(f"{base_url}/config", json=config_payload)
response = requests.get(f"{base_url}/track/download/{TRACK_ID}")
assert response.status_code == 202
task_id = response.json()["task_id"]
final_status = task_waiter(task_id)
assert final_status["status"] == "complete", f"Task failed with fallback: {final_status.get('error')}"
# Verify that at least one file was downloaded.
downloaded_files = get_downloaded_files()
assert len(downloaded_files) >= 1, "Fallback download completed but no file was found."
def test_download_without_realtime_and_verify_files(base_url, task_waiter, reset_config):
"""Tests a non-realtime download and verifies the file exists."""
print("\n--- Testing download with realTime: False and verifying files ---")
config_payload = {
"service": "spotify",
"fallback": False,
"realTime": False,
"spotifyQuality": "NORMAL"
}
requests.post(f"{base_url}/config", json=config_payload)
response = requests.get(f"{base_url}/track/download/{TRACK_ID}")
assert response.status_code == 202
task_id = response.json()["task_id"]
final_status = task_waiter(task_id)
assert final_status["status"] == "complete", f"Task failed with realTime=False: {final_status.get('error')}"
# Verify that at least one file was downloaded.
downloaded_files = get_downloaded_files()
assert len(downloaded_files) >= 1, "Non-realtime download completed but no file was found."
# Aligned with formats in src/js/config.ts's CONVERSION_FORMATS
@pytest.mark.parametrize("format_name,bitrate,expected_ext", [
("mp3", "320k", ".mp3"),
("aac", "256k", ".m4a"), # AAC is typically in an M4A container
("ogg", "320k", ".ogg"),
("opus", "256k", ".opus"),
("flac", None, ".flac"),
("wav", None, ".wav"),
("alac", None, ".m4a"), # ALAC is also in an M4A container
])
def test_download_with_conversion_and_verify_format(base_url, task_waiter, reset_config, format_name, bitrate, expected_ext):
"""
Tests downloading a track with various conversion formats and verifies
that the created file has the correct extension.
"""
print(f"\n--- Testing conversion: {format_name.upper()} @ {bitrate or 'default'} ---")
config_payload = {
"service": "spotify",
"fallback": False,
"realTime": True,
"spotifyQuality": "NORMAL",
"convertTo": format_name.upper(),
"bitrate": bitrate
}
requests.post(f"{base_url}/config", json=config_payload)
response = requests.get(f"{base_url}/track/download/{TRACK_ID}")
assert response.status_code == 202
task_id = response.json()["task_id"]
final_status = task_waiter(task_id)
assert final_status["status"] == "complete", f"Download failed for format {format_name} bitrate {bitrate}: {final_status.get('error')}"
# Verify that a file with the correct extension was created.
downloaded_files = get_downloaded_files()
assert len(downloaded_files) >= 1, "Conversion download completed but no file was found."
found_correct_format = any(f.lower().endswith(expected_ext) for f in downloaded_files)
assert found_correct_format, (
f"No file with expected extension '{expected_ext}' found for format '{format_name}'. "
f"Found files: {downloaded_files}"
)

61
tests/test_history.py Normal file
View File

@@ -0,0 +1,61 @@
import requests
import pytest
import time
TRACK_ID = "1Cts4YV9aOXVAP3bm3Ro6r" # Use a known, short track
@pytest.fixture
def reset_config(base_url):
"""Fixture to reset the main config after a test."""
response = requests.get(f"{base_url}/config")
original_config = response.json()
yield
requests.post(f"{base_url}/config", json=original_config)
def test_history_logging_and_filtering(base_url, task_waiter, reset_config):
"""
Tests if a completed download appears in the history and
verifies that history filtering works correctly.
"""
# First, complete a download task to ensure there's a history entry
config_payload = {"service": "spotify", "fallback": False, "realTime": True}
requests.post(f"{base_url}/config", json=config_payload)
response = requests.get(f"{base_url}/track/download/{TRACK_ID}")
assert response.status_code == 200
task_id = response.json()["task_id"]
task_waiter(task_id) # Wait for the download to complete
# Give a moment for history to be written if it's asynchronous
time.sleep(2)
# 1. Get all history and check if our task is present
print("\n--- Verifying task appears in general history ---")
response = requests.get(f"{base_url}/history")
assert response.status_code == 200
history_data = response.json()
assert "entries" in history_data
assert "total" in history_data
assert history_data["total"] > 0
# Find our specific task in the history
history_entry = next((entry for entry in history_data["entries"] if entry['task_id'] == task_id), None)
assert history_entry is not None, f"Task {task_id} not found in download history."
assert history_entry["status_final"] == "COMPLETED"
# 2. Test filtering for COMPLETED tasks
print("\n--- Verifying history filtering for COMPLETED status ---")
response = requests.get(f"{base_url}/history?filters[status_final]=COMPLETED")
assert response.status_code == 200
completed_history = response.json()
assert completed_history["total"] > 0
assert any(entry['task_id'] == task_id for entry in completed_history["entries"])
assert all(entry['status_final'] == 'COMPLETED' for entry in completed_history["entries"])
# 3. Test filtering for an item name
print(f"\n--- Verifying history filtering for item_name: {history_entry['item_name']} ---")
item_name_query = requests.utils.quote(history_entry['item_name'])
response = requests.get(f"{base_url}/history?filters[item_name]={item_name_query}")
assert response.status_code == 200
named_history = response.json()
assert named_history["total"] > 0
assert any(entry['task_id'] == task_id for entry in named_history["entries"])

35
tests/test_search.py Normal file
View File

@@ -0,0 +1,35 @@
import requests
import pytest
def test_search_spotify_artist(base_url):
"""Tests searching for an artist on Spotify."""
response = requests.get(f"{base_url}/search?q=Daft+Punk&search_type=artist")
assert response.status_code == 200
results = response.json()
assert "items" in results
assert len(results["items"]) > 0
assert "Daft Punk" in results["items"][0]["name"]
def test_search_spotify_track(base_url):
"""Tests searching for a track on Spotify."""
response = requests.get(f"{base_url}/search?q=Get+Lucky&search_type=track")
assert response.status_code == 200
results = response.json()
assert "items" in results
assert len(results["items"]) > 0
def test_search_deezer_track(base_url):
"""Tests searching for a track on Deezer."""
response = requests.get(f"{base_url}/search?q=Instant+Crush&search_type=track")
assert response.status_code == 200
results = response.json()
assert "items" in results
assert len(results["items"]) > 0
def test_search_deezer_album(base_url):
"""Tests searching for an album on Deezer."""
response = requests.get(f"{base_url}/search?q=Random+Access+Memories&search_type=album")
assert response.status_code == 200
results = response.json()
assert "items" in results
assert len(results["items"]) > 0

117
tests/test_watch.py Normal file
View File

@@ -0,0 +1,117 @@
import requests
import pytest
import time
SPOTIFY_PLAYLIST_ID = "26CiMxIxdn5WhXyccMCPOB"
SPOTIFY_ARTIST_ID = "7l6cdPhOLYO7lehz5xfzLV"
@pytest.fixture(autouse=True)
def setup_and_cleanup_watch_tests(base_url):
"""
A fixture that enables watch mode, cleans the watchlist before each test,
and then restores original state and cleans up after each test.
"""
# Get original watch config to restore it later
response = requests.get(f"{base_url}/config/watch")
assert response.status_code == 200
original_config = response.json()
# Enable watch mode for testing if it's not already
if not original_config.get("enabled"):
response = requests.post(f"{base_url}/config/watch", json={"enabled": True})
assert response.status_code == 200
# Cleanup any existing watched items before the test
requests.delete(f"{base_url}/playlist/watch/{SPOTIFY_PLAYLIST_ID}")
requests.delete(f"{base_url}/artist/watch/{SPOTIFY_ARTIST_ID}")
yield
# Cleanup watched items created during the test
requests.delete(f"{base_url}/playlist/watch/{SPOTIFY_PLAYLIST_ID}")
requests.delete(f"{base_url}/artist/watch/{SPOTIFY_ARTIST_ID}")
# Restore original watch config
response = requests.post(f"{base_url}/config/watch", json=original_config)
assert response.status_code == 200
def test_add_and_list_playlist_to_watch(base_url):
"""Tests adding a playlist to the watch list and verifying it appears in the list."""
response = requests.put(f"{base_url}/playlist/watch/{SPOTIFY_PLAYLIST_ID}")
assert response.status_code == 200
assert "Playlist added to watch list" in response.json()["message"]
# Verify it's in the watched list
response = requests.get(f"{base_url}/playlist/watch/list")
assert response.status_code == 200
watched_playlists = response.json()
assert any(p['spotify_id'] == SPOTIFY_PLAYLIST_ID for p in watched_playlists)
def test_add_and_list_artist_to_watch(base_url):
"""Tests adding an artist to the watch list and verifying it appears in the list."""
response = requests.put(f"{base_url}/artist/watch/{SPOTIFY_ARTIST_ID}")
assert response.status_code == 200
assert "Artist added to watch list" in response.json()["message"]
# Verify it's in the watched list
response = requests.get(f"{base_url}/artist/watch/list")
assert response.status_code == 200
watched_artists = response.json()
assert any(a['spotify_id'] == SPOTIFY_ARTIST_ID for a in watched_artists)
def test_trigger_playlist_check(base_url):
"""Tests the endpoint for manually triggering a check on a watched playlist."""
# First, add the playlist to the watch list
requests.put(f"{base_url}/playlist/watch/{SPOTIFY_PLAYLIST_ID}")
# Trigger the check
response = requests.post(f"{base_url}/playlist/watch/trigger_check/{SPOTIFY_PLAYLIST_ID}")
assert response.status_code == 200
assert "Check triggered for playlist" in response.json()["message"]
# A full verification would require inspecting the database or new tasks,
# but for an API test, confirming the trigger endpoint responds correctly is the key goal.
print("Playlist check triggered. Note: This does not verify new downloads were queued.")
def test_trigger_artist_check(base_url):
"""Tests the endpoint for manually triggering a check on a watched artist."""
# First, add the artist to the watch list
requests.put(f"{base_url}/artist/watch/{SPOTIFY_ARTIST_ID}")
# Trigger the check
response = requests.post(f"{base_url}/artist/watch/trigger_check/{SPOTIFY_ARTIST_ID}")
assert response.status_code == 200
assert "Check triggered for artist" in response.json()["message"]
print("Artist check triggered. Note: This does not verify new downloads were queued.")
def test_remove_playlist_from_watch(base_url):
"""Tests removing a playlist from the watch list."""
# Add the playlist first to ensure it exists
requests.put(f"{base_url}/playlist/watch/{SPOTIFY_PLAYLIST_ID}")
# Now, remove it
response = requests.delete(f"{base_url}/playlist/watch/{SPOTIFY_PLAYLIST_ID}")
assert response.status_code == 200
assert "Playlist removed from watch list" in response.json()["message"]
# Verify it's no longer in the list
response = requests.get(f"{base_url}/playlist/watch/list")
assert response.status_code == 200
watched_playlists = response.json()
assert not any(p['spotify_id'] == SPOTIFY_PLAYLIST_ID for p in watched_playlists)
def test_remove_artist_from_watch(base_url):
"""Tests removing an artist from the watch list."""
# Add the artist first to ensure it exists
requests.put(f"{base_url}/artist/watch/{SPOTIFY_ARTIST_ID}")
# Now, remove it
response = requests.delete(f"{base_url}/artist/watch/{SPOTIFY_ARTIST_ID}")
assert response.status_code == 200
assert "Artist removed from watch list" in response.json()["message"]
# Verify it's no longer in the list
response = requests.get(f"{base_url}/artist/watch/list")
assert response.status_code == 200
watched_artists = response.json()
assert not any(a['spotify_id'] == SPOTIFY_ARTIST_ID for a in watched_artists)