Finally fix history (#187)
This commit is contained in:
4
app.py
4
app.py
@@ -23,7 +23,6 @@ from urllib.parse import urlparse
|
||||
# Import Celery configuration and manager
|
||||
from routes.utils.celery_manager import celery_manager
|
||||
from routes.utils.celery_config import REDIS_URL
|
||||
from routes.utils.history_manager import init_history_db
|
||||
|
||||
|
||||
# Configure application-wide logging
|
||||
@@ -150,9 +149,6 @@ def create_app():
|
||||
# Set up CORS
|
||||
CORS(app)
|
||||
|
||||
# Initialize databases
|
||||
init_history_db()
|
||||
|
||||
# Register blueprints
|
||||
app.register_blueprint(config_bp, url_prefix="/api")
|
||||
app.register_blueprint(search_bp, url_prefix="/api")
|
||||
|
||||
@@ -1,649 +1,374 @@
|
||||
from flask import Blueprint, jsonify, request
|
||||
from routes.utils.history_manager import (
|
||||
get_task_history,
|
||||
get_child_tracks,
|
||||
get_status_history,
|
||||
get_track_mini_history,
|
||||
add_track_status_update,
|
||||
# Legacy compatibility
|
||||
get_history_entries
|
||||
)
|
||||
from flask import Blueprint, Response, request, jsonify
|
||||
import json
|
||||
import traceback
|
||||
import logging
|
||||
from routes.utils.history_manager import history_manager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
history_bp = Blueprint("history", __name__, url_prefix="/api/history")
|
||||
|
||||
"""
|
||||
Enhanced History API Endpoints:
|
||||
|
||||
Main History Endpoints:
|
||||
- GET /api/history - Get paginated download history with filtering
|
||||
- GET /api/history/task/<task_id> - Get detailed task information
|
||||
- GET /api/history/summary - Get summary statistics
|
||||
|
||||
Track Management Endpoints:
|
||||
- GET /api/history/tracks/<parent_task_id> - Get all tracks for a parent task
|
||||
?include_mini_histories=true - Include comprehensive mini-histories for each track
|
||||
- GET /api/history/tracks/<parent_task_id>/mini-histories - Get mini-histories for all tracks
|
||||
|
||||
Individual Track Endpoints:
|
||||
- GET /api/history/track/<parent_task_id>/<track_id>/mini-history - Get comprehensive mini-history for a specific track
|
||||
- GET /api/history/track/<parent_task_id>/<track_id>/timeline - Get simplified timeline view
|
||||
- POST /api/history/track/<parent_task_id>/<track_id>/status - Update track status (admin/testing)
|
||||
|
||||
Status & Legacy:
|
||||
- GET /api/history/status/<task_id> - Get complete status history for a task
|
||||
- GET /api/history/legacy - Legacy endpoint for backward compatibility
|
||||
|
||||
Mini-History Features:
|
||||
- Complete status progression timeline with timestamps
|
||||
- Progress tracking and retry information
|
||||
- File size, quality, and download path details
|
||||
- Error information and duration statistics
|
||||
- Human-readable timestamps and calculated metrics
|
||||
"""
|
||||
history_bp = Blueprint("history", __name__)
|
||||
|
||||
|
||||
@history_bp.route("", methods=["GET"])
|
||||
def get_download_history():
|
||||
"""API endpoint to retrieve download history with pagination, sorting, and filtering."""
|
||||
@history_bp.route("/", methods=["GET"])
|
||||
def get_history():
|
||||
"""
|
||||
Retrieve download history with optional filtering and pagination.
|
||||
|
||||
Query parameters:
|
||||
- limit: Maximum number of records (default: 100, max: 500)
|
||||
- offset: Number of records to skip (default: 0)
|
||||
- download_type: Filter by type ('track', 'album', 'playlist')
|
||||
- status: Filter by status ('completed', 'failed', 'skipped', 'in_progress')
|
||||
"""
|
||||
try:
|
||||
limit = request.args.get("limit", 25, type=int)
|
||||
offset = request.args.get("offset", 0, type=int)
|
||||
sort_by = request.args.get("sort_by", "timestamp_updated")
|
||||
sort_order = request.args.get("sort_order", "DESC")
|
||||
include_children = request.args.get("include_children", "false").lower() == "true"
|
||||
|
||||
# Create filters dictionary for various filter options
|
||||
filters = {}
|
||||
# Parse query parameters
|
||||
limit = min(int(request.args.get("limit", 100)), 500) # Cap at 500
|
||||
offset = max(int(request.args.get("offset", 0)), 0)
|
||||
download_type = request.args.get("download_type")
|
||||
status = request.args.get("status")
|
||||
|
||||
# Status filter - support both old and new field names
|
||||
status_filter = request.args.get("status_final")
|
||||
if status_filter:
|
||||
filters["status_final"] = status_filter
|
||||
|
||||
# Task type filter (renamed from download_type)
|
||||
type_filter = request.args.get("task_type") or request.args.get("download_type")
|
||||
if type_filter:
|
||||
filters["task_type"] = type_filter
|
||||
|
||||
# Parent task filter
|
||||
parent_task_filter = request.args.get("parent_task_id")
|
||||
if parent_task_filter:
|
||||
filters["parent_task_id"] = parent_task_filter
|
||||
|
||||
# Show/hide child tracks (tasks with parent_task_id)
|
||||
hide_child_tracks = request.args.get("hide_child_tracks", "false").lower() == "true"
|
||||
if hide_child_tracks:
|
||||
filters["parent_task_id"] = None # Only show parent entries or standalone tracks
|
||||
|
||||
# Show only child tracks
|
||||
only_child_tracks = request.args.get("only_child_tracks", "false").lower() == "true"
|
||||
if only_child_tracks and not parent_task_filter:
|
||||
# This would require a NOT NULL filter, but we'll handle it differently
|
||||
# by excluding tasks that don't have a parent_task_id
|
||||
pass # We'll implement this in the query logic
|
||||
|
||||
# Additional filters
|
||||
current_status_filter = request.args.get("status_current")
|
||||
if current_status_filter:
|
||||
filters["status_current"] = current_status_filter
|
||||
|
||||
tasks, total_count = get_task_history(
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
sort_by=sort_by,
|
||||
sort_order=sort_order,
|
||||
filters=filters,
|
||||
include_children=include_children
|
||||
# Validate download_type if provided
|
||||
valid_types = ["track", "album", "playlist"]
|
||||
if download_type and download_type not in valid_types:
|
||||
return Response(
|
||||
json.dumps({"error": f"Invalid download_type. Must be one of: {valid_types}"}),
|
||||
status=400,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
# Validate status if provided
|
||||
valid_statuses = ["completed", "failed", "skipped", "in_progress"]
|
||||
if status and status not in valid_statuses:
|
||||
return Response(
|
||||
json.dumps({"error": f"Invalid status. Must be one of: {valid_statuses}"}),
|
||||
status=400,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
# Get history from manager
|
||||
history = history_manager.get_download_history(
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
download_type=download_type,
|
||||
status=status
|
||||
)
|
||||
|
||||
# Transform data for backward compatibility and add computed fields
|
||||
entries = []
|
||||
for task in tasks:
|
||||
entry = {
|
||||
# Core fields
|
||||
"task_id": task["task_id"],
|
||||
"task_type": task["task_type"],
|
||||
"title": task["title"],
|
||||
"status_current": task["status_current"],
|
||||
"status_final": task["status_final"],
|
||||
"timestamp_created": task["timestamp_created"],
|
||||
"timestamp_updated": task["timestamp_updated"],
|
||||
"timestamp_completed": task["timestamp_completed"],
|
||||
"parent_task_id": task["parent_task_id"],
|
||||
"position": task["position"],
|
||||
|
||||
# Legacy compatibility fields
|
||||
"download_type": task["task_type"],
|
||||
"item_name": task["title"],
|
||||
"timestamp_added": task["timestamp_created"],
|
||||
|
||||
# Rich data fields (parsed JSON)
|
||||
"artists": task.get("artists", []),
|
||||
"ids": task.get("ids", {}),
|
||||
"metadata": task.get("metadata", {}),
|
||||
"config": task.get("config", {}),
|
||||
"error_info": task.get("error_info", {}),
|
||||
"progress": task.get("progress", {}),
|
||||
"summary": task.get("summary", {}),
|
||||
|
||||
# Child information
|
||||
"children_table": task["children_table"],
|
||||
"has_children": bool(task["children_table"]),
|
||||
"child_tracks": task.get("child_tracks", []) if include_children else []
|
||||
|
||||
# Add pagination info
|
||||
response_data = {
|
||||
"downloads": history,
|
||||
"pagination": {
|
||||
"limit": limit,
|
||||
"offset": offset,
|
||||
"returned_count": len(history)
|
||||
}
|
||||
|
||||
# Extract commonly used fields for easier access
|
||||
if entry["artists"]:
|
||||
entry["artist_names"] = [artist.get("name", "") for artist in entry["artists"]]
|
||||
entry["item_artist"] = ", ".join(entry["artist_names"]) # Legacy compatibility
|
||||
|
||||
if entry["config"]:
|
||||
entry["service_used"] = entry["config"].get("service_used")
|
||||
entry["quality_profile"] = entry["config"].get("quality_profile")
|
||||
entry["convert_to"] = entry["config"].get("convert_to")
|
||||
entry["bitrate"] = entry["config"].get("bitrate")
|
||||
|
||||
if entry["error_info"]:
|
||||
entry["error_message"] = entry["error_info"].get("message") # Legacy compatibility
|
||||
|
||||
# Extract album info from metadata if available
|
||||
if entry["metadata"] and "album" in entry["metadata"]:
|
||||
entry["item_album"] = entry["metadata"]["album"].get("title")
|
||||
|
||||
# Child track summary
|
||||
if entry["child_tracks"]:
|
||||
entry["child_track_count"] = len(entry["child_tracks"])
|
||||
entry["child_track_summary"] = {
|
||||
"completed": len([t for t in entry["child_tracks"] if t.get("status_final") == "COMPLETED"]),
|
||||
"error": len([t for t in entry["child_tracks"] if t.get("status_final") == "ERROR"]),
|
||||
"skipped": len([t for t in entry["child_tracks"] if t.get("status_final") == "SKIPPED"])
|
||||
}
|
||||
|
||||
entries.append(entry)
|
||||
|
||||
return jsonify({
|
||||
"entries": entries,
|
||||
"total_count": total_count,
|
||||
"limit": limit,
|
||||
"offset": offset,
|
||||
"include_children": include_children
|
||||
})
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in /api/history endpoint: {e}", exc_info=True)
|
||||
return jsonify({"error": "Failed to retrieve download history"}), 500
|
||||
|
||||
|
||||
@history_bp.route("/task/<task_id>", methods=["GET"])
|
||||
def get_task_details(task_id):
|
||||
"""API endpoint to retrieve detailed information about a specific task."""
|
||||
try:
|
||||
include_children = request.args.get("include_children", "true").lower() == "true"
|
||||
include_status_history = request.args.get("include_status_history", "false").lower() == "true"
|
||||
if download_type:
|
||||
response_data["filters"] = {"download_type": download_type}
|
||||
if status:
|
||||
if "filters" not in response_data:
|
||||
response_data["filters"] = {}
|
||||
response_data["filters"]["status"] = status
|
||||
|
||||
# Get the task
|
||||
tasks, _ = get_task_history(
|
||||
limit=1,
|
||||
offset=0,
|
||||
filters={"task_id": task_id},
|
||||
include_children=include_children
|
||||
return Response(
|
||||
json.dumps(response_data),
|
||||
status=200,
|
||||
mimetype="application/json"
|
||||
)
|
||||
|
||||
if not tasks:
|
||||
return jsonify({"error": f"Task {task_id} not found"}), 404
|
||||
|
||||
task = tasks[0]
|
||||
|
||||
# Add status history if requested
|
||||
if include_status_history:
|
||||
task["status_history"] = get_status_history(task_id)
|
||||
|
||||
return jsonify({
|
||||
"task": task,
|
||||
"include_children": include_children,
|
||||
"include_status_history": include_status_history
|
||||
})
|
||||
|
||||
except ValueError as e:
|
||||
return Response(
|
||||
json.dumps({"error": f"Invalid parameter value: {str(e)}"}),
|
||||
status=400,
|
||||
mimetype="application/json",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in /api/history/task/{task_id} endpoint: {e}", exc_info=True)
|
||||
return jsonify({"error": f"Failed to retrieve task {task_id}"}), 500
|
||||
logger.error(f"Error retrieving download history: {e}", exc_info=True)
|
||||
return Response(
|
||||
json.dumps({"error": "Failed to retrieve download history", "details": str(e)}),
|
||||
status=500,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
|
||||
@history_bp.route("/tracks/<parent_task_id>", methods=["GET"])
|
||||
def get_tracks_for_parent(parent_task_id):
|
||||
"""API endpoint to retrieve all track entries for a specific parent task."""
|
||||
@history_bp.route("/<task_id>", methods=["GET"])
|
||||
def get_download_by_task_id(task_id):
|
||||
"""
|
||||
Retrieve specific download history by task ID.
|
||||
|
||||
Args:
|
||||
task_id: Celery task ID
|
||||
"""
|
||||
try:
|
||||
# First, verify the parent task exists and get its children table
|
||||
parent_tasks, _ = get_task_history(
|
||||
limit=1,
|
||||
offset=0,
|
||||
filters={"task_id": parent_task_id}
|
||||
download = history_manager.get_download_by_task_id(task_id)
|
||||
|
||||
if not download:
|
||||
return Response(
|
||||
json.dumps({"error": f"Download with task ID '{task_id}' not found"}),
|
||||
status=404,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
return Response(
|
||||
json.dumps(download),
|
||||
status=200,
|
||||
mimetype="application/json"
|
||||
)
|
||||
|
||||
if not parent_tasks:
|
||||
return jsonify({"error": f"Parent task {parent_task_id} not found"}), 404
|
||||
except Exception as e:
|
||||
logger.error(f"Error retrieving download for task {task_id}: {e}", exc_info=True)
|
||||
return Response(
|
||||
json.dumps({"error": "Failed to retrieve download", "details": str(e)}),
|
||||
status=500,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
|
||||
@history_bp.route("/<task_id>/children", methods=["GET"])
|
||||
def get_download_children(task_id):
|
||||
"""
|
||||
Retrieve children tracks for an album or playlist download.
|
||||
|
||||
Args:
|
||||
task_id: Celery task ID
|
||||
"""
|
||||
try:
|
||||
# First get the main download to find the children table
|
||||
download = history_manager.get_download_by_task_id(task_id)
|
||||
|
||||
parent_task = parent_tasks[0]
|
||||
children_table = parent_task.get("children_table")
|
||||
if not download:
|
||||
return Response(
|
||||
json.dumps({"error": f"Download with task ID '{task_id}' not found"}),
|
||||
status=404,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
children_table = download.get("children_table")
|
||||
if not children_table:
|
||||
return jsonify({
|
||||
"parent_task_id": parent_task_id,
|
||||
"tracks": [],
|
||||
"total_count": 0,
|
||||
"message": "No child tracks found for this task"
|
||||
})
|
||||
return Response(
|
||||
json.dumps({"error": f"Download '{task_id}' has no children tracks"}),
|
||||
status=404,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
# Get tracks from the child table
|
||||
tracks = get_child_tracks(children_table)
|
||||
# Get children tracks
|
||||
children = history_manager.get_children_history(children_table)
|
||||
|
||||
# Check if mini-histories should be included
|
||||
include_mini_histories = request.args.get("include_mini_histories", "false").lower() == "true"
|
||||
|
||||
# Sort tracks if requested
|
||||
sort_by = request.args.get("sort_by", "position")
|
||||
sort_order = request.args.get("sort_order", "ASC")
|
||||
|
||||
if sort_by == "position":
|
||||
tracks.sort(key=lambda x: x.get("position", 0), reverse=(sort_order.upper() == "DESC"))
|
||||
elif sort_by == "timestamp_completed":
|
||||
tracks.sort(key=lambda x: x.get("timestamp_completed", 0) or 0, reverse=(sort_order.upper() == "DESC"))
|
||||
|
||||
# Transform tracks for easier consumption
|
||||
transformed_tracks = []
|
||||
for track in tracks:
|
||||
track_info = {
|
||||
"track_id": track["track_id"],
|
||||
"parent_task_id": track["parent_task_id"],
|
||||
"position": track["position"],
|
||||
"status_current": track["status_current"],
|
||||
"status_final": track["status_final"],
|
||||
"timestamp_created": track["timestamp_created"],
|
||||
"timestamp_completed": track["timestamp_completed"],
|
||||
"error_info": track.get("error_info"),
|
||||
"config": track.get("config"),
|
||||
}
|
||||
|
||||
# Parse track data
|
||||
if track["track_data"]:
|
||||
track_data = track["track_data"]
|
||||
track_info.update({
|
||||
"title": track_data.get("title"),
|
||||
"artists": track_data.get("artists", []),
|
||||
"album": track_data.get("album", {}),
|
||||
"duration_ms": track_data.get("duration_ms"),
|
||||
"track_number": track_data.get("track_number"),
|
||||
"disc_number": track_data.get("disc_number"),
|
||||
"explicit": track_data.get("explicit"),
|
||||
"ids": track_data.get("ids", {})
|
||||
})
|
||||
|
||||
# Extract artist names for easier display
|
||||
if track_info["artists"]:
|
||||
track_info["artist_names"] = [artist.get("name", "") for artist in track_info["artists"]]
|
||||
|
||||
# Include mini-history if requested
|
||||
if include_mini_histories:
|
||||
mini_history = get_track_mini_history(track["track_id"], children_table)
|
||||
if mini_history:
|
||||
track_info["mini_history"] = mini_history
|
||||
# Add quick access to timeline and key metrics
|
||||
track_info["timeline"] = mini_history.get("timeline", [])
|
||||
track_info["retry_count"] = mini_history.get("retry_count", 0)
|
||||
track_info["time_elapsed"] = mini_history.get("time_elapsed")
|
||||
track_info["quality_achieved"] = mini_history.get("quality_achieved")
|
||||
track_info["file_size"] = mini_history.get("file_size")
|
||||
track_info["download_path"] = mini_history.get("download_path")
|
||||
|
||||
transformed_tracks.append(track_info)
|
||||
|
||||
return jsonify({
|
||||
"parent_task_id": parent_task_id,
|
||||
"parent_task_info": {
|
||||
"title": parent_task["title"],
|
||||
"task_type": parent_task["task_type"],
|
||||
"status_final": parent_task["status_final"]
|
||||
},
|
||||
"tracks": transformed_tracks,
|
||||
"total_count": len(transformed_tracks),
|
||||
"include_mini_histories": include_mini_histories
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in /api/history/tracks/{parent_task_id} endpoint: {e}", exc_info=True)
|
||||
return jsonify({"error": f"Failed to retrieve tracks for parent task {parent_task_id}"}), 500
|
||||
|
||||
|
||||
@history_bp.route("/status/<task_id>", methods=["GET"])
|
||||
def get_task_status_history(task_id):
|
||||
"""API endpoint to retrieve the complete status history for a task."""
|
||||
try:
|
||||
status_history = get_status_history(task_id)
|
||||
|
||||
if not status_history:
|
||||
return jsonify({
|
||||
"task_id": task_id,
|
||||
"status_history": [],
|
||||
"message": "No status history found for this task"
|
||||
})
|
||||
|
||||
return jsonify({
|
||||
response_data = {
|
||||
"task_id": task_id,
|
||||
"status_history": status_history,
|
||||
"total_updates": len(status_history)
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in /api/history/status/{task_id} endpoint: {e}", exc_info=True)
|
||||
return jsonify({"error": f"Failed to retrieve status history for task {task_id}"}), 500
|
||||
|
||||
|
||||
@history_bp.route("/summary", methods=["GET"])
|
||||
def get_history_summary():
|
||||
"""API endpoint to retrieve summary statistics about download history."""
|
||||
try:
|
||||
# Get overall statistics
|
||||
all_tasks, total_tasks = get_task_history(limit=10000, offset=0) # Get a large number to count
|
||||
|
||||
# Calculate statistics
|
||||
stats = {
|
||||
"total_tasks": total_tasks,
|
||||
"by_type": {},
|
||||
"by_status": {},
|
||||
"recent_activity": {
|
||||
"last_24h": 0,
|
||||
"last_7d": 0,
|
||||
"last_30d": 0
|
||||
}
|
||||
"download_type": download.get("download_type"),
|
||||
"title": download.get("title"),
|
||||
"children_table": children_table,
|
||||
"tracks": children,
|
||||
"track_count": len(children)
|
||||
}
|
||||
|
||||
import time
|
||||
current_time = time.time()
|
||||
day_seconds = 24 * 60 * 60
|
||||
|
||||
for task in all_tasks:
|
||||
# Count by type
|
||||
task_type = task.get("task_type", "unknown")
|
||||
stats["by_type"][task_type] = stats["by_type"].get(task_type, 0) + 1
|
||||
|
||||
# Count by status
|
||||
status = task.get("status_final", "unknown")
|
||||
stats["by_status"][status] = stats["by_status"].get(status, 0) + 1
|
||||
|
||||
# Count recent activity
|
||||
if task.get("timestamp_created"):
|
||||
time_diff = current_time - task["timestamp_created"]
|
||||
if time_diff <= day_seconds:
|
||||
stats["recent_activity"]["last_24h"] += 1
|
||||
if time_diff <= 7 * day_seconds:
|
||||
stats["recent_activity"]["last_7d"] += 1
|
||||
if time_diff <= 30 * day_seconds:
|
||||
stats["recent_activity"]["last_30d"] += 1
|
||||
|
||||
return jsonify(stats)
|
||||
return Response(
|
||||
json.dumps(response_data),
|
||||
status=200,
|
||||
mimetype="application/json"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in /api/history/summary endpoint: {e}", exc_info=True)
|
||||
return jsonify({"error": "Failed to retrieve history summary"}), 500
|
||||
|
||||
|
||||
@history_bp.route("/track/<parent_task_id>/<track_id>/mini-history", methods=["GET"])
|
||||
def get_track_mini_history_api(parent_task_id, track_id):
|
||||
"""API endpoint to retrieve comprehensive mini-history for a specific track."""
|
||||
try:
|
||||
# First, verify the parent task exists and get its children table
|
||||
parent_tasks, _ = get_task_history(
|
||||
limit=1,
|
||||
offset=0,
|
||||
filters={"task_id": parent_task_id}
|
||||
logger.error(f"Error retrieving children for task {task_id}: {e}", exc_info=True)
|
||||
return Response(
|
||||
json.dumps({"error": "Failed to retrieve download children", "details": str(e)}),
|
||||
status=500,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
|
||||
@history_bp.route("/stats", methods=["GET"])
|
||||
def get_download_stats():
|
||||
"""
|
||||
Get download statistics and summary information.
|
||||
"""
|
||||
try:
|
||||
stats = history_manager.get_download_stats()
|
||||
|
||||
if not parent_tasks:
|
||||
return jsonify({"error": f"Parent task {parent_task_id} not found"}), 404
|
||||
|
||||
parent_task = parent_tasks[0]
|
||||
children_table = parent_task.get("children_table")
|
||||
|
||||
if not children_table:
|
||||
return jsonify({"error": f"No child tracks found for parent task {parent_task_id}"}), 404
|
||||
|
||||
# Get the track mini-history
|
||||
mini_history = get_track_mini_history(track_id, children_table)
|
||||
|
||||
if not mini_history:
|
||||
return jsonify({"error": f"Track {track_id} not found in parent task {parent_task_id}"}), 404
|
||||
|
||||
return jsonify({
|
||||
"parent_task_id": parent_task_id,
|
||||
"parent_task_info": {
|
||||
"title": parent_task["title"],
|
||||
"task_type": parent_task["task_type"]
|
||||
},
|
||||
"track_mini_history": mini_history
|
||||
})
|
||||
return Response(
|
||||
json.dumps(stats),
|
||||
status=200,
|
||||
mimetype="application/json"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in /api/history/track/{parent_task_id}/{track_id}/mini-history endpoint: {e}", exc_info=True)
|
||||
return jsonify({"error": f"Failed to retrieve mini-history for track {track_id}"}), 500
|
||||
logger.error(f"Error retrieving download stats: {e}", exc_info=True)
|
||||
return Response(
|
||||
json.dumps({"error": "Failed to retrieve download statistics", "details": str(e)}),
|
||||
status=500,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
|
||||
@history_bp.route("/tracks/<parent_task_id>/mini-histories", methods=["GET"])
|
||||
def get_all_track_mini_histories(parent_task_id):
|
||||
"""API endpoint to retrieve mini-histories for all tracks in a parent task."""
|
||||
@history_bp.route("/search", methods=["GET"])
|
||||
def search_history():
|
||||
"""
|
||||
Search download history by title or artist.
|
||||
|
||||
Query parameters:
|
||||
- q: Search query (required)
|
||||
- limit: Maximum number of results (default: 50, max: 200)
|
||||
"""
|
||||
try:
|
||||
# Verify the parent task exists and get its children table
|
||||
parent_tasks, _ = get_task_history(
|
||||
limit=1,
|
||||
offset=0,
|
||||
filters={"task_id": parent_task_id}
|
||||
)
|
||||
query = request.args.get("q")
|
||||
if not query:
|
||||
return Response(
|
||||
json.dumps({"error": "Missing required parameter: q (search query)"}),
|
||||
status=400,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
if not parent_tasks:
|
||||
return jsonify({"error": f"Parent task {parent_task_id} not found"}), 404
|
||||
limit = min(int(request.args.get("limit", 50)), 200) # Cap at 200
|
||||
|
||||
parent_task = parent_tasks[0]
|
||||
children_table = parent_task.get("children_table")
|
||||
# Search history
|
||||
results = history_manager.search_history(query, limit)
|
||||
|
||||
if not children_table:
|
||||
return jsonify({
|
||||
"parent_task_id": parent_task_id,
|
||||
"track_mini_histories": [],
|
||||
"total_count": 0,
|
||||
"message": "No child tracks found for this task"
|
||||
})
|
||||
|
||||
# Get all child tracks
|
||||
tracks = get_child_tracks(children_table)
|
||||
|
||||
# Get mini-history for each track
|
||||
track_mini_histories = []
|
||||
for track in tracks:
|
||||
mini_history = get_track_mini_history(track["track_id"], children_table)
|
||||
if mini_history:
|
||||
track_mini_histories.append(mini_history)
|
||||
|
||||
# Sort by position or track number
|
||||
track_mini_histories.sort(key=lambda x: (
|
||||
x.get("disc_number", 1),
|
||||
x.get("track_number", 0),
|
||||
x.get("position", 0)
|
||||
))
|
||||
|
||||
return jsonify({
|
||||
"parent_task_id": parent_task_id,
|
||||
"parent_task_info": {
|
||||
"title": parent_task["title"],
|
||||
"task_type": parent_task["task_type"],
|
||||
"status_final": parent_task["status_final"]
|
||||
},
|
||||
"track_mini_histories": track_mini_histories,
|
||||
"total_count": len(track_mini_histories)
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in /api/history/tracks/{parent_task_id}/mini-histories endpoint: {e}", exc_info=True)
|
||||
return jsonify({"error": f"Failed to retrieve mini-histories for parent task {parent_task_id}"}), 500
|
||||
|
||||
|
||||
@history_bp.route("/track/<parent_task_id>/<track_id>/status", methods=["POST"])
|
||||
def update_track_status(parent_task_id, track_id):
|
||||
"""API endpoint to update the status of a specific track (for testing/admin purposes)."""
|
||||
try:
|
||||
# Verify the parent task exists and get its children table
|
||||
parent_tasks, _ = get_task_history(
|
||||
limit=1,
|
||||
offset=0,
|
||||
filters={"task_id": parent_task_id}
|
||||
)
|
||||
|
||||
if not parent_tasks:
|
||||
return jsonify({"error": f"Parent task {parent_task_id} not found"}), 404
|
||||
|
||||
parent_task = parent_tasks[0]
|
||||
children_table = parent_task.get("children_table")
|
||||
|
||||
if not children_table:
|
||||
return jsonify({"error": f"No child tracks found for parent task {parent_task_id}"}), 404
|
||||
|
||||
# Parse request data
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
return jsonify({"error": "Request body must contain JSON data"}), 400
|
||||
|
||||
status_type = data.get("status_type")
|
||||
if not status_type:
|
||||
return jsonify({"error": "status_type is required"}), 400
|
||||
|
||||
status_data = data.get("status_data", {})
|
||||
progress_info = data.get("progress_info")
|
||||
error_info = data.get("error_info")
|
||||
|
||||
# Update the track status
|
||||
add_track_status_update(
|
||||
track_id=track_id,
|
||||
table_name=children_table,
|
||||
status_type=status_type,
|
||||
status_data=status_data,
|
||||
progress_info=progress_info,
|
||||
error_info=error_info
|
||||
)
|
||||
|
||||
# Get updated mini-history
|
||||
updated_mini_history = get_track_mini_history(track_id, children_table)
|
||||
|
||||
return jsonify({
|
||||
"message": f"Track {track_id} status updated to {status_type}",
|
||||
"parent_task_id": parent_task_id,
|
||||
"track_id": track_id,
|
||||
"updated_mini_history": updated_mini_history
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in /api/history/track/{parent_task_id}/{track_id}/status endpoint: {e}", exc_info=True)
|
||||
return jsonify({"error": f"Failed to update status for track {track_id}"}), 500
|
||||
|
||||
|
||||
@history_bp.route("/track/<parent_task_id>/<track_id>/timeline", methods=["GET"])
|
||||
def get_track_timeline(parent_task_id, track_id):
|
||||
"""API endpoint to get a simplified timeline view of a track's status progression."""
|
||||
try:
|
||||
# Verify the parent task exists and get its children table
|
||||
parent_tasks, _ = get_task_history(
|
||||
limit=1,
|
||||
offset=0,
|
||||
filters={"task_id": parent_task_id}
|
||||
)
|
||||
|
||||
if not parent_tasks:
|
||||
return jsonify({"error": f"Parent task {parent_task_id} not found"}), 404
|
||||
|
||||
parent_task = parent_tasks[0]
|
||||
children_table = parent_task.get("children_table")
|
||||
|
||||
if not children_table:
|
||||
return jsonify({"error": f"No child tracks found for parent task {parent_task_id}"}), 404
|
||||
|
||||
# Get the track mini-history
|
||||
mini_history = get_track_mini_history(track_id, children_table)
|
||||
|
||||
if not mini_history:
|
||||
return jsonify({"error": f"Track {track_id} not found in parent task {parent_task_id}"}), 404
|
||||
|
||||
# Extract timeline and add summary statistics
|
||||
timeline = mini_history.get("timeline", [])
|
||||
|
||||
# Calculate timeline statistics
|
||||
timeline_stats = {
|
||||
"total_status_changes": len(timeline),
|
||||
"duration_seconds": mini_history.get("time_elapsed"),
|
||||
"calculated_duration": mini_history.get("calculated_duration"),
|
||||
"retry_count": mini_history.get("retry_count", 0),
|
||||
"final_status": mini_history.get("status_final"),
|
||||
"quality_achieved": mini_history.get("quality_achieved"),
|
||||
"file_size": mini_history.get("file_size"),
|
||||
"download_path": mini_history.get("download_path")
|
||||
response_data = {
|
||||
"query": query,
|
||||
"results": results,
|
||||
"result_count": len(results),
|
||||
"limit": limit
|
||||
}
|
||||
|
||||
return jsonify({
|
||||
"parent_task_id": parent_task_id,
|
||||
"track_id": track_id,
|
||||
"track_info": {
|
||||
"title": mini_history.get("title"),
|
||||
"disc_number": mini_history.get("disc_number"),
|
||||
"track_number": mini_history.get("track_number"),
|
||||
"position": mini_history.get("position"),
|
||||
"duration_ms": mini_history.get("duration_ms")
|
||||
},
|
||||
"timeline": timeline,
|
||||
"timeline_stats": timeline_stats
|
||||
})
|
||||
return Response(
|
||||
json.dumps(response_data),
|
||||
status=200,
|
||||
mimetype="application/json"
|
||||
)
|
||||
|
||||
except ValueError as e:
|
||||
return Response(
|
||||
json.dumps({"error": f"Invalid parameter value: {str(e)}"}),
|
||||
status=400,
|
||||
mimetype="application/json",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in /api/history/track/{parent_task_id}/{track_id}/timeline endpoint: {e}", exc_info=True)
|
||||
return jsonify({"error": f"Failed to retrieve timeline for track {track_id}"}), 500
|
||||
|
||||
|
||||
# Legacy endpoint for backward compatibility
|
||||
@history_bp.route("/legacy", methods=["GET"])
|
||||
def get_download_history_legacy():
|
||||
"""Legacy API endpoint using the old history system (for backward compatibility)."""
|
||||
try:
|
||||
limit = request.args.get("limit", 25, type=int)
|
||||
offset = request.args.get("offset", 0, type=int)
|
||||
sort_by = request.args.get("sort_by", "timestamp_completed")
|
||||
sort_order = request.args.get("sort_order", "DESC")
|
||||
|
||||
filters = {}
|
||||
|
||||
# Status filter
|
||||
status_filter = request.args.get("status_final")
|
||||
if status_filter:
|
||||
filters["status_final"] = status_filter
|
||||
|
||||
# Download type filter
|
||||
type_filter = request.args.get("download_type")
|
||||
if type_filter:
|
||||
filters["download_type"] = type_filter
|
||||
|
||||
# Parent task filter
|
||||
parent_task_filter = request.args.get("parent_task_id")
|
||||
if parent_task_filter:
|
||||
filters["parent_task_id"] = parent_task_filter
|
||||
|
||||
entries, total_count = get_history_entries(
|
||||
limit, offset, sort_by, sort_order, filters
|
||||
logger.error(f"Error searching download history: {e}", exc_info=True)
|
||||
return Response(
|
||||
json.dumps({"error": "Failed to search download history", "details": str(e)}),
|
||||
status=500,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
"entries": entries,
|
||||
"total_count": total_count,
|
||||
"limit": limit,
|
||||
"offset": offset,
|
||||
"note": "This is the legacy endpoint. Consider migrating to /api/history"
|
||||
})
|
||||
|
||||
@history_bp.route("/recent", methods=["GET"])
|
||||
def get_recent_downloads():
|
||||
"""
|
||||
Get most recent downloads.
|
||||
|
||||
Query parameters:
|
||||
- limit: Maximum number of results (default: 20, max: 100)
|
||||
"""
|
||||
try:
|
||||
limit = min(int(request.args.get("limit", 20)), 100) # Cap at 100
|
||||
|
||||
recent = history_manager.get_recent_downloads(limit)
|
||||
|
||||
response_data = {
|
||||
"downloads": recent,
|
||||
"count": len(recent),
|
||||
"limit": limit
|
||||
}
|
||||
|
||||
return Response(
|
||||
json.dumps(response_data),
|
||||
status=200,
|
||||
mimetype="application/json"
|
||||
)
|
||||
|
||||
except ValueError as e:
|
||||
return Response(
|
||||
json.dumps({"error": f"Invalid parameter value: {str(e)}"}),
|
||||
status=400,
|
||||
mimetype="application/json",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error retrieving recent downloads: {e}", exc_info=True)
|
||||
return Response(
|
||||
json.dumps({"error": "Failed to retrieve recent downloads", "details": str(e)}),
|
||||
status=500,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
|
||||
@history_bp.route("/failed", methods=["GET"])
|
||||
def get_failed_downloads():
|
||||
"""
|
||||
Get failed downloads.
|
||||
|
||||
Query parameters:
|
||||
- limit: Maximum number of results (default: 50, max: 200)
|
||||
"""
|
||||
try:
|
||||
limit = min(int(request.args.get("limit", 50)), 200) # Cap at 200
|
||||
|
||||
failed = history_manager.get_failed_downloads(limit)
|
||||
|
||||
response_data = {
|
||||
"downloads": failed,
|
||||
"count": len(failed),
|
||||
"limit": limit
|
||||
}
|
||||
|
||||
return Response(
|
||||
json.dumps(response_data),
|
||||
status=200,
|
||||
mimetype="application/json"
|
||||
)
|
||||
|
||||
except ValueError as e:
|
||||
return Response(
|
||||
json.dumps({"error": f"Invalid parameter value: {str(e)}"}),
|
||||
status=400,
|
||||
mimetype="application/json",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error retrieving failed downloads: {e}", exc_info=True)
|
||||
return Response(
|
||||
json.dumps({"error": "Failed to retrieve failed downloads", "details": str(e)}),
|
||||
status=500,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
|
||||
@history_bp.route("/cleanup", methods=["POST"])
|
||||
def cleanup_old_history():
|
||||
"""
|
||||
Clean up old download history.
|
||||
|
||||
JSON body:
|
||||
- days_old: Number of days old to keep (default: 30)
|
||||
"""
|
||||
try:
|
||||
data = request.get_json() or {}
|
||||
days_old = data.get("days_old", 30)
|
||||
|
||||
if not isinstance(days_old, int) or days_old <= 0:
|
||||
return Response(
|
||||
json.dumps({"error": "days_old must be a positive integer"}),
|
||||
status=400,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
deleted_count = history_manager.clear_old_history(days_old)
|
||||
|
||||
response_data = {
|
||||
"message": f"Successfully cleaned up old download history",
|
||||
"deleted_records": deleted_count,
|
||||
"days_old": days_old
|
||||
}
|
||||
|
||||
return Response(
|
||||
json.dumps(response_data),
|
||||
status=200,
|
||||
mimetype="application/json"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in /api/history/legacy endpoint: {e}", exc_info=True)
|
||||
return jsonify({"error": "Failed to retrieve download history"}), 500
|
||||
logger.error(f"Error cleaning up old history: {e}", exc_info=True)
|
||||
return Response(
|
||||
json.dumps({"error": "Failed to cleanup old history", "details": str(e)}),
|
||||
status=500,
|
||||
mimetype="application/json",
|
||||
)
|
||||
@@ -28,8 +28,8 @@ from routes.utils.watch.db import (
|
||||
add_or_update_album_for_artist,
|
||||
)
|
||||
|
||||
# Import history manager function
|
||||
from .history_manager import add_entry_to_history, add_tracks_from_summary
|
||||
# Import for download history management
|
||||
from routes.utils.history_manager import history_manager
|
||||
|
||||
# Create Redis connection for storing task data that's not part of the Celery result backend
|
||||
import redis
|
||||
@@ -217,131 +217,6 @@ def get_all_tasks():
|
||||
return []
|
||||
|
||||
|
||||
|
||||
# --- History Logging Helper ---
|
||||
def _log_task_to_history(task_id, final_status_str, error_msg=None):
|
||||
"""Helper function to gather task data and log it to the history database."""
|
||||
try:
|
||||
task_info = get_task_info(task_id)
|
||||
last_status_obj = get_last_task_status(task_id)
|
||||
|
||||
if not task_info:
|
||||
logger.warning(
|
||||
f"History: No task_info found for task_id {task_id}. Cannot log to history."
|
||||
)
|
||||
return
|
||||
|
||||
# Determine service_used and quality_profile
|
||||
main_service_name = str(
|
||||
task_info.get("main", "Unknown")
|
||||
).capitalize() # e.g. Spotify, Deezer from their respective .env values
|
||||
fallback_service_name = str(task_info.get("fallback", "")).capitalize()
|
||||
|
||||
service_used_str = main_service_name
|
||||
if (
|
||||
task_info.get("fallback") and fallback_service_name
|
||||
): # Check if fallback was configured
|
||||
# Try to infer actual service used if possible, otherwise show configured.
|
||||
# This part is a placeholder for more accurate determination if deezspot gives explicit feedback.
|
||||
# For now, we assume 'main' was used unless an error hints otherwise.
|
||||
# A more robust solution would involve deezspot callback providing this.
|
||||
service_used_str = (
|
||||
f"{main_service_name} (Fallback: {fallback_service_name})"
|
||||
)
|
||||
# If error message indicates fallback, we could try to parse it.
|
||||
# e.g. if error_msg and "fallback" in error_msg.lower(): service_used_str = f"{fallback_service_name} (Used Fallback)"
|
||||
|
||||
# Determine quality profile (primarily from the 'quality' field)
|
||||
# 'quality' usually holds the primary service's quality (e.g., spotifyQuality, deezerQuality)
|
||||
quality_profile_str = str(task_info.get("quality", "N/A"))
|
||||
|
||||
# Get convertTo and bitrate
|
||||
convert_to_str = str(
|
||||
task_info.get("convertTo", "")
|
||||
) # Empty string if None or not present
|
||||
bitrate_str = str(
|
||||
task_info.get("bitrate", "")
|
||||
) # Empty string if None or not present
|
||||
|
||||
# Extract Spotify ID from item URL if possible
|
||||
spotify_id = None
|
||||
item_url = task_info.get("url", "")
|
||||
if item_url:
|
||||
try:
|
||||
spotify_id = item_url.split("/")[-1]
|
||||
# Further validation if it looks like a Spotify ID (e.g., 22 chars, alphanumeric)
|
||||
if not (spotify_id and len(spotify_id) == 22 and spotify_id.isalnum()):
|
||||
spotify_id = None # Reset if not a valid-looking ID
|
||||
except Exception:
|
||||
spotify_id = None # Ignore errors in parsing
|
||||
|
||||
# Check for the new summary object in the last status
|
||||
summary_obj = last_status_obj.get("summary") if last_status_obj else None
|
||||
|
||||
history_entry = {
|
||||
"task_id": task_id,
|
||||
"download_type": task_info.get("download_type"),
|
||||
"item_name": task_info.get("name"),
|
||||
"item_artist": task_info.get("artist"),
|
||||
"item_album": task_info.get(
|
||||
"album",
|
||||
task_info.get("name")
|
||||
if task_info.get("download_type") == "album"
|
||||
else None,
|
||||
),
|
||||
"item_url": item_url,
|
||||
"spotify_id": spotify_id,
|
||||
"status_final": final_status_str,
|
||||
"error_message": error_msg
|
||||
if error_msg
|
||||
else (last_status_obj.get("error") if last_status_obj else None),
|
||||
"timestamp_added": task_info.get("created_at", time.time()),
|
||||
"timestamp_completed": last_status_obj.get("timestamp", time.time())
|
||||
if last_status_obj
|
||||
else time.time(),
|
||||
"original_request_json": json.dumps(task_info.get("original_request", {})),
|
||||
"last_status_obj_json": json.dumps(
|
||||
last_status_obj if last_status_obj else {}
|
||||
),
|
||||
"service_used": service_used_str,
|
||||
"quality_profile": quality_profile_str,
|
||||
"convert_to": convert_to_str
|
||||
if convert_to_str
|
||||
else None, # Store None if empty string
|
||||
"bitrate": bitrate_str
|
||||
if bitrate_str
|
||||
else None, # Store None if empty string
|
||||
"summary_json": json.dumps(summary_obj) if summary_obj else None,
|
||||
"total_successful": summary_obj.get("total_successful")
|
||||
if summary_obj
|
||||
else None,
|
||||
"total_skipped": summary_obj.get("total_skipped") if summary_obj else None,
|
||||
"total_failed": summary_obj.get("total_failed") if summary_obj else None,
|
||||
}
|
||||
|
||||
# Add the main history entry for the task
|
||||
add_entry_to_history(history_entry)
|
||||
|
||||
# Process track-level entries from summary if this is a multi-track download
|
||||
if summary_obj and task_info.get("download_type") in ["album", "playlist"]:
|
||||
tracks_processed = add_tracks_from_summary(
|
||||
summary_data=summary_obj,
|
||||
parent_task_id=task_id,
|
||||
parent_history_data=history_entry
|
||||
)
|
||||
logger.info(
|
||||
f"Track-level history: Processed {tracks_processed['successful']} successful, "
|
||||
f"{tracks_processed['skipped']} skipped, and {tracks_processed['failed']} failed tracks for task {task_id}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"History: Error preparing or logging history for task {task_id}: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
# --- End History Logging Helper ---
|
||||
|
||||
|
||||
def cancel_task(task_id):
|
||||
"""Cancel a task by its ID"""
|
||||
try:
|
||||
@@ -358,9 +233,6 @@ def cancel_task(task_id):
|
||||
# Try to revoke the Celery task if it hasn't started yet
|
||||
celery_app.control.revoke(task_id, terminate=True, signal="SIGTERM")
|
||||
|
||||
# Log cancellation to history
|
||||
_log_task_to_history(task_id, "CANCELLED", "Task cancelled by user")
|
||||
|
||||
# Schedule deletion of task data after 30 seconds
|
||||
delayed_delete_task_data.apply_async(
|
||||
args=[task_id, "Task cancelled by user and auto-cleaned."], countdown=30
|
||||
@@ -592,8 +464,12 @@ class ProgressTrackingTask(Task):
|
||||
if "timestamp" not in progress_data:
|
||||
progress_data["timestamp"] = time.time()
|
||||
|
||||
status = progress_data.get("status", "unknown")
|
||||
# Extract status from status_info (deezspot callback format)
|
||||
status_info = progress_data.get("status_info", {})
|
||||
status = status_info.get("status", progress_data.get("status", "unknown"))
|
||||
task_info = get_task_info(task_id)
|
||||
|
||||
logger.debug(f"Task {task_id}: Extracted status: '{status}' from callback")
|
||||
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
logger.debug(
|
||||
@@ -609,12 +485,18 @@ class ProgressTrackingTask(Task):
|
||||
elif status in ["real_time", "track_progress"]:
|
||||
self._handle_real_time(task_id, progress_data)
|
||||
elif status == "skipped":
|
||||
# Re-fetch task_info to ensure we have the latest children_table info
|
||||
task_info = get_task_info(task_id)
|
||||
self._handle_skipped(task_id, progress_data, task_info)
|
||||
elif status == "retrying":
|
||||
self._handle_retrying(task_id, progress_data, task_info)
|
||||
elif status == "error":
|
||||
# Re-fetch task_info to ensure we have the latest children_table info
|
||||
task_info = get_task_info(task_id)
|
||||
self._handle_error(task_id, progress_data, task_info)
|
||||
elif status == "done":
|
||||
# Re-fetch task_info to ensure we have the latest children_table info
|
||||
task_info = get_task_info(task_id)
|
||||
self._handle_done(task_id, progress_data, task_info)
|
||||
else:
|
||||
logger.info(
|
||||
@@ -627,9 +509,46 @@ class ProgressTrackingTask(Task):
|
||||
def _handle_initializing(self, task_id, data, task_info):
|
||||
"""Handle initializing status from deezspot"""
|
||||
logger.info(f"Task {task_id} initializing...")
|
||||
|
||||
# Initializing object is now very basic, mainly for acknowledging the start.
|
||||
# More detailed info comes with 'progress' or 'downloading' states.
|
||||
data["status"] = ProgressState.INITIALIZING
|
||||
|
||||
# Store initial history entry for download start
|
||||
try:
|
||||
# Check for album/playlist FIRST since their callbacks contain both parent and track info
|
||||
if "album" in data:
|
||||
# Album download - create children table and store name in task info
|
||||
logger.info(f"Task {task_id}: Creating album children table")
|
||||
children_table = history_manager.store_album_history(data, task_id, "in_progress")
|
||||
if children_table:
|
||||
task_info["children_table"] = children_table
|
||||
store_task_info(task_id, task_info)
|
||||
logger.info(f"Task {task_id}: Created and stored children table '{children_table}' in task info")
|
||||
else:
|
||||
logger.error(f"Task {task_id}: Failed to create album children table")
|
||||
elif "playlist" in data:
|
||||
# Playlist download - create children table and store name in task info
|
||||
logger.info(f"Task {task_id}: Creating playlist children table")
|
||||
children_table = history_manager.store_playlist_history(data, task_id, "in_progress")
|
||||
if children_table:
|
||||
task_info["children_table"] = children_table
|
||||
store_task_info(task_id, task_info)
|
||||
logger.info(f"Task {task_id}: Created and stored children table '{children_table}' in task info")
|
||||
else:
|
||||
logger.error(f"Task {task_id}: Failed to create playlist children table")
|
||||
elif "track" in data:
|
||||
# Individual track download - check if it's part of an album/playlist
|
||||
children_table = task_info.get("children_table")
|
||||
if children_table:
|
||||
# Track is part of album/playlist - don't store in main table during initialization
|
||||
logger.info(f"Task {task_id}: Skipping track initialization storage (part of album/playlist, children table: {children_table})")
|
||||
else:
|
||||
# Individual track download - store in main table
|
||||
logger.info(f"Task {task_id}: Storing individual track history (initializing)")
|
||||
history_manager.store_track_history(data, task_id, "in_progress")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to store initial history for task {task_id}: {e}", exc_info=True)
|
||||
|
||||
def _handle_downloading(self, task_id, data, task_info):
|
||||
"""Handle downloading status from deezspot"""
|
||||
@@ -725,7 +644,7 @@ class ProgressTrackingTask(Task):
|
||||
)
|
||||
|
||||
# Log at debug level
|
||||
logger.debug(f"Task {task_id} track progress: {title} by {artist}: {percent}%")
|
||||
logger.debug(f"Task {task_id} track progress: {track_name} by {artist}: {percent}%")
|
||||
|
||||
# Set appropriate status
|
||||
# data["status"] = (
|
||||
@@ -736,7 +655,25 @@ class ProgressTrackingTask(Task):
|
||||
|
||||
def _handle_skipped(self, task_id, data, task_info):
|
||||
"""Handle skipped status from deezspot"""
|
||||
# Extract track info
|
||||
|
||||
# Store skipped history for deezspot callback format
|
||||
try:
|
||||
if "track" in data:
|
||||
# Individual track skipped - check if we should use children table
|
||||
children_table = task_info.get("children_table")
|
||||
logger.debug(f"Task {task_id}: Skipped track, children_table = '{children_table}'")
|
||||
if children_table:
|
||||
# Part of album/playlist - store progressively in children table
|
||||
logger.info(f"Task {task_id}: Storing skipped track in children table '{children_table}' (progressive)")
|
||||
history_manager.store_track_history(data, task_id, "skipped", children_table)
|
||||
else:
|
||||
# Individual track download - store in main table
|
||||
logger.info(f"Task {task_id}: Storing skipped track in main table (individual download)")
|
||||
history_manager.store_track_history(data, task_id, "skipped")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to store skipped history for task {task_id}: {e}")
|
||||
|
||||
# Extract track info (legacy format support)
|
||||
title = data.get("song", "Unknown")
|
||||
artist = data.get("artist", "Unknown")
|
||||
reason = data.get("reason", "Unknown reason")
|
||||
@@ -809,7 +746,34 @@ class ProgressTrackingTask(Task):
|
||||
|
||||
def _handle_error(self, task_id, data, task_info):
|
||||
"""Handle error status from deezspot"""
|
||||
# Extract error info
|
||||
|
||||
# Store error history for deezspot callback format
|
||||
try:
|
||||
# Check for album/playlist FIRST since their callbacks contain both parent and track info
|
||||
if "album" in data:
|
||||
# Album failed - store in main table
|
||||
logger.info(f"Task {task_id}: Storing album history (failed)")
|
||||
history_manager.store_album_history(data, task_id, "failed")
|
||||
elif "playlist" in data:
|
||||
# Playlist failed - store in main table
|
||||
logger.info(f"Task {task_id}: Storing playlist history (failed)")
|
||||
history_manager.store_playlist_history(data, task_id, "failed")
|
||||
elif "track" in data:
|
||||
# Individual track failed - check if we should use children table
|
||||
children_table = task_info.get("children_table")
|
||||
logger.debug(f"Task {task_id}: Failed track, children_table = '{children_table}'")
|
||||
if children_table:
|
||||
# Part of album/playlist - store progressively in children table
|
||||
logger.info(f"Task {task_id}: Storing failed track in children table '{children_table}' (progressive)")
|
||||
history_manager.store_track_history(data, task_id, "failed", children_table)
|
||||
else:
|
||||
# Individual track download - store in main table
|
||||
logger.info(f"Task {task_id}: Storing failed track in main table (individual download)")
|
||||
history_manager.store_track_history(data, task_id, "failed")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to store error history for task {task_id}: {e}")
|
||||
|
||||
# Extract error info (legacy format support)
|
||||
message = data.get("message", "Unknown error")
|
||||
|
||||
# Log error
|
||||
@@ -826,7 +790,34 @@ class ProgressTrackingTask(Task):
|
||||
|
||||
def _handle_done(self, task_id, data, task_info):
|
||||
"""Handle done status from deezspot"""
|
||||
# Extract data
|
||||
|
||||
# Store completion history for deezspot callback format
|
||||
try:
|
||||
# Check for album/playlist FIRST since their callbacks contain both parent and track info
|
||||
if "album" in data:
|
||||
# Album completion with summary - store in main table
|
||||
logger.info(f"Task {task_id}: Storing album history (completed)")
|
||||
history_manager.store_album_history(data, task_id, "completed")
|
||||
elif "playlist" in data:
|
||||
# Playlist completion with summary - store in main table
|
||||
logger.info(f"Task {task_id}: Storing playlist history (completed)")
|
||||
history_manager.store_playlist_history(data, task_id, "completed")
|
||||
elif "track" in data:
|
||||
# Individual track completion - check if we should use children table
|
||||
children_table = task_info.get("children_table")
|
||||
logger.debug(f"Task {task_id}: Completed track, children_table = '{children_table}'")
|
||||
if children_table:
|
||||
# Part of album/playlist - store progressively in children table
|
||||
logger.info(f"Task {task_id}: Storing completed track in children table '{children_table}' (progressive)")
|
||||
history_manager.store_track_history(data, task_id, "completed", children_table)
|
||||
else:
|
||||
# Individual track download - store in main table
|
||||
logger.info(f"Task {task_id}: Storing completed track in main table (individual download)")
|
||||
history_manager.store_track_history(data, task_id, "completed")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to store completion history for task {task_id}: {e}", exc_info=True)
|
||||
|
||||
# Extract data (legacy format support)
|
||||
content_type = data.get("type", "").lower()
|
||||
album = data.get("album", "")
|
||||
artist = data.get("artist", "")
|
||||
@@ -1025,9 +1016,6 @@ def task_postrun_handler(
|
||||
):
|
||||
"""Signal handler when a task finishes"""
|
||||
try:
|
||||
# Define download task names
|
||||
download_task_names = ["download_track", "download_album", "download_playlist"]
|
||||
|
||||
last_status_for_history = get_last_task_status(task_id)
|
||||
if last_status_for_history and last_status_for_history.get("status") in [
|
||||
ProgressState.COMPLETE,
|
||||
@@ -1041,14 +1029,8 @@ def task_postrun_handler(
|
||||
and last_status_for_history.get("status") != ProgressState.CANCELLED
|
||||
):
|
||||
logger.info(
|
||||
f"Task {task_id} was REVOKED (likely cancelled), logging to history."
|
||||
f"Task {task_id} was REVOKED (likely cancelled)."
|
||||
)
|
||||
if (
|
||||
task and task.name in download_task_names
|
||||
): # Check if it's a download task
|
||||
_log_task_to_history(
|
||||
task_id, "CANCELLED", "Task was revoked/cancelled."
|
||||
)
|
||||
# return # Let status update proceed if necessary
|
||||
|
||||
task_info = get_task_info(task_id)
|
||||
@@ -1065,10 +1047,6 @@ def task_postrun_handler(
|
||||
logger.info(
|
||||
f"Task {task_id} completed successfully: {task_info.get('name', 'Unknown')}"
|
||||
)
|
||||
if (
|
||||
task and task.name in download_task_names
|
||||
): # Check if it's a download task
|
||||
_log_task_to_history(task_id, "COMPLETED")
|
||||
|
||||
if (
|
||||
task_info.get("download_type") == "track"
|
||||
@@ -1189,10 +1167,6 @@ def task_failure_handler(
|
||||
)
|
||||
|
||||
logger.error(f"Task {task_id} failed: {str(exception)}")
|
||||
if (
|
||||
sender and sender.name in download_task_names
|
||||
): # Check if it's a download task
|
||||
_log_task_to_history(task_id, "ERROR", str(exception))
|
||||
|
||||
if can_retry:
|
||||
logger.info(f"Task {task_id} can be retried ({retry_count}/{max_retries})")
|
||||
@@ -1552,12 +1526,6 @@ def delete_task_data_and_log(task_id, reason="Task data deleted"):
|
||||
"timestamp": time.time(),
|
||||
},
|
||||
)
|
||||
# History logging for COMPLETION, CANCELLATION, or definitive ERROR should have occurred when those states were first reached.
|
||||
# If this cleanup is for a task that *wasn't* in such a state (e.g. stale, still processing), log it now.
|
||||
if final_redis_status == ProgressState.ERROR_AUTO_CLEANED:
|
||||
_log_task_to_history(
|
||||
task_id, "ERROR", error_message_for_status
|
||||
) # Or a more specific status if desired
|
||||
|
||||
# Delete Redis keys associated with the task
|
||||
redis_client.delete(f"task:{task_id}:info")
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user