diff --git a/routes/history.py b/routes/history.py index e34a328..7b8ee84 100644 --- a/routes/history.py +++ b/routes/history.py @@ -1,21 +1,620 @@ from flask import Blueprint, jsonify, request -from routes.utils.history_manager import get_history_entries +from routes.utils.history_manager import ( + get_task_history, + get_child_tracks, + get_status_history, + get_track_mini_history, + add_track_status_update, + # Legacy compatibility + get_history_entries +) import logging logger = logging.getLogger(__name__) history_bp = Blueprint("history", __name__, url_prefix="/api/history") +""" +Enhanced History API Endpoints: + +Main History Endpoints: +- GET /api/history - Get paginated download history with filtering +- GET /api/history/task/ - Get detailed task information +- GET /api/history/summary - Get summary statistics + +Track Management Endpoints: +- GET /api/history/tracks/ - Get all tracks for a parent task + ?include_mini_histories=true - Include comprehensive mini-histories for each track +- GET /api/history/tracks//mini-histories - Get mini-histories for all tracks + +Individual Track Endpoints: +- GET /api/history/track///mini-history - Get comprehensive mini-history for a specific track +- GET /api/history/track///timeline - Get simplified timeline view +- POST /api/history/track///status - Update track status (admin/testing) + +Status & Legacy: +- GET /api/history/status/ - Get complete status history for a task +- GET /api/history/legacy - Legacy endpoint for backward compatibility + +Mini-History Features: +- Complete status progression timeline with timestamps +- Progress tracking and retry information +- File size, quality, and download path details +- Error information and duration statistics +- Human-readable timestamps and calculated metrics +""" + @history_bp.route("", methods=["GET"]) def get_download_history(): """API endpoint to retrieve download history with pagination, sorting, and filtering.""" + try: + limit = request.args.get("limit", 25, type=int) + offset = request.args.get("offset", 0, type=int) + sort_by = request.args.get("sort_by", "timestamp_updated") + sort_order = request.args.get("sort_order", "DESC") + include_children = request.args.get("include_children", "false").lower() == "true" + + # Create filters dictionary for various filter options + filters = {} + + # Status filter - support both old and new field names + status_filter = request.args.get("status_final") + if status_filter: + filters["status_final"] = status_filter + + # Task type filter (renamed from download_type) + type_filter = request.args.get("task_type") or request.args.get("download_type") + if type_filter: + filters["task_type"] = type_filter + + # Parent task filter + parent_task_filter = request.args.get("parent_task_id") + if parent_task_filter: + filters["parent_task_id"] = parent_task_filter + + # Show/hide child tracks (tasks with parent_task_id) + hide_child_tracks = request.args.get("hide_child_tracks", "false").lower() == "true" + if hide_child_tracks: + filters["parent_task_id"] = None # Only show parent entries or standalone tracks + + # Show only child tracks + only_child_tracks = request.args.get("only_child_tracks", "false").lower() == "true" + if only_child_tracks and not parent_task_filter: + # This would require a NOT NULL filter, but we'll handle it differently + # by excluding tasks that don't have a parent_task_id + pass # We'll implement this in the query logic + + # Additional filters + current_status_filter = request.args.get("status_current") + if current_status_filter: + filters["status_current"] = current_status_filter + + tasks, total_count = get_task_history( + limit=limit, + offset=offset, + sort_by=sort_by, + sort_order=sort_order, + filters=filters, + include_children=include_children + ) + + # Transform data for backward compatibility and add computed fields + entries = [] + for task in tasks: + entry = { + # Core fields + "task_id": task["task_id"], + "task_type": task["task_type"], + "title": task["title"], + "status_current": task["status_current"], + "status_final": task["status_final"], + "timestamp_created": task["timestamp_created"], + "timestamp_updated": task["timestamp_updated"], + "timestamp_completed": task["timestamp_completed"], + "parent_task_id": task["parent_task_id"], + "position": task["position"], + + # Legacy compatibility fields + "download_type": task["task_type"], + "item_name": task["title"], + "timestamp_added": task["timestamp_created"], + + # Rich data fields (parsed JSON) + "artists": task.get("artists", []), + "ids": task.get("ids", {}), + "metadata": task.get("metadata", {}), + "config": task.get("config", {}), + "error_info": task.get("error_info", {}), + "progress": task.get("progress", {}), + "summary": task.get("summary", {}), + + # Child information + "children_table": task["children_table"], + "has_children": bool(task["children_table"]), + "child_tracks": task.get("child_tracks", []) if include_children else [] + } + + # Extract commonly used fields for easier access + if entry["artists"]: + entry["artist_names"] = [artist.get("name", "") for artist in entry["artists"]] + entry["item_artist"] = ", ".join(entry["artist_names"]) # Legacy compatibility + + if entry["config"]: + entry["service_used"] = entry["config"].get("service_used") + entry["quality_profile"] = entry["config"].get("quality_profile") + entry["convert_to"] = entry["config"].get("convert_to") + entry["bitrate"] = entry["config"].get("bitrate") + + if entry["error_info"]: + entry["error_message"] = entry["error_info"].get("message") # Legacy compatibility + + # Extract album info from metadata if available + if entry["metadata"] and "album" in entry["metadata"]: + entry["item_album"] = entry["metadata"]["album"].get("title") + + # Child track summary + if entry["child_tracks"]: + entry["child_track_count"] = len(entry["child_tracks"]) + entry["child_track_summary"] = { + "completed": len([t for t in entry["child_tracks"] if t.get("status_final") == "COMPLETED"]), + "error": len([t for t in entry["child_tracks"] if t.get("status_final") == "ERROR"]), + "skipped": len([t for t in entry["child_tracks"] if t.get("status_final") == "SKIPPED"]) + } + + entries.append(entry) + + return jsonify({ + "entries": entries, + "total_count": total_count, + "limit": limit, + "offset": offset, + "include_children": include_children + }) + + except Exception as e: + logger.error(f"Error in /api/history endpoint: {e}", exc_info=True) + return jsonify({"error": "Failed to retrieve download history"}), 500 + + +@history_bp.route("/task/", methods=["GET"]) +def get_task_details(task_id): + """API endpoint to retrieve detailed information about a specific task.""" + try: + include_children = request.args.get("include_children", "true").lower() == "true" + include_status_history = request.args.get("include_status_history", "false").lower() == "true" + + # Get the task + tasks, _ = get_task_history( + limit=1, + offset=0, + filters={"task_id": task_id}, + include_children=include_children + ) + + if not tasks: + return jsonify({"error": f"Task {task_id} not found"}), 404 + + task = tasks[0] + + # Add status history if requested + if include_status_history: + task["status_history"] = get_status_history(task_id) + + return jsonify({ + "task": task, + "include_children": include_children, + "include_status_history": include_status_history + }) + + except Exception as e: + logger.error(f"Error in /api/history/task/{task_id} endpoint: {e}", exc_info=True) + return jsonify({"error": f"Failed to retrieve task {task_id}"}), 500 + + +@history_bp.route("/tracks/", methods=["GET"]) +def get_tracks_for_parent(parent_task_id): + """API endpoint to retrieve all track entries for a specific parent task.""" + try: + # First, verify the parent task exists and get its children table + parent_tasks, _ = get_task_history( + limit=1, + offset=0, + filters={"task_id": parent_task_id} + ) + + if not parent_tasks: + return jsonify({"error": f"Parent task {parent_task_id} not found"}), 404 + + parent_task = parent_tasks[0] + children_table = parent_task.get("children_table") + + if not children_table: + return jsonify({ + "parent_task_id": parent_task_id, + "tracks": [], + "total_count": 0, + "message": "No child tracks found for this task" + }) + + # Get tracks from the child table + tracks = get_child_tracks(children_table) + + # Check if mini-histories should be included + include_mini_histories = request.args.get("include_mini_histories", "false").lower() == "true" + + # Sort tracks if requested + sort_by = request.args.get("sort_by", "position") + sort_order = request.args.get("sort_order", "ASC") + + if sort_by == "position": + tracks.sort(key=lambda x: x.get("position", 0), reverse=(sort_order.upper() == "DESC")) + elif sort_by == "timestamp_completed": + tracks.sort(key=lambda x: x.get("timestamp_completed", 0) or 0, reverse=(sort_order.upper() == "DESC")) + + # Transform tracks for easier consumption + transformed_tracks = [] + for track in tracks: + track_info = { + "track_id": track["track_id"], + "parent_task_id": track["parent_task_id"], + "position": track["position"], + "status_current": track["status_current"], + "status_final": track["status_final"], + "timestamp_created": track["timestamp_created"], + "timestamp_completed": track["timestamp_completed"], + "error_info": track.get("error_info"), + "config": track.get("config"), + } + + # Parse track data + if track["track_data"]: + track_data = track["track_data"] + track_info.update({ + "title": track_data.get("title"), + "artists": track_data.get("artists", []), + "album": track_data.get("album", {}), + "duration_ms": track_data.get("duration_ms"), + "track_number": track_data.get("track_number"), + "disc_number": track_data.get("disc_number"), + "explicit": track_data.get("explicit"), + "ids": track_data.get("ids", {}) + }) + + # Extract artist names for easier display + if track_info["artists"]: + track_info["artist_names"] = [artist.get("name", "") for artist in track_info["artists"]] + + # Include mini-history if requested + if include_mini_histories: + mini_history = get_track_mini_history(track["track_id"], children_table) + if mini_history: + track_info["mini_history"] = mini_history + # Add quick access to timeline and key metrics + track_info["timeline"] = mini_history.get("timeline", []) + track_info["retry_count"] = mini_history.get("retry_count", 0) + track_info["time_elapsed"] = mini_history.get("time_elapsed") + track_info["quality_achieved"] = mini_history.get("quality_achieved") + track_info["file_size"] = mini_history.get("file_size") + track_info["download_path"] = mini_history.get("download_path") + + transformed_tracks.append(track_info) + + return jsonify({ + "parent_task_id": parent_task_id, + "parent_task_info": { + "title": parent_task["title"], + "task_type": parent_task["task_type"], + "status_final": parent_task["status_final"] + }, + "tracks": transformed_tracks, + "total_count": len(transformed_tracks), + "include_mini_histories": include_mini_histories + }) + + except Exception as e: + logger.error(f"Error in /api/history/tracks/{parent_task_id} endpoint: {e}", exc_info=True) + return jsonify({"error": f"Failed to retrieve tracks for parent task {parent_task_id}"}), 500 + + +@history_bp.route("/status/", methods=["GET"]) +def get_task_status_history(task_id): + """API endpoint to retrieve the complete status history for a task.""" + try: + status_history = get_status_history(task_id) + + if not status_history: + return jsonify({ + "task_id": task_id, + "status_history": [], + "message": "No status history found for this task" + }) + + return jsonify({ + "task_id": task_id, + "status_history": status_history, + "total_updates": len(status_history) + }) + + except Exception as e: + logger.error(f"Error in /api/history/status/{task_id} endpoint: {e}", exc_info=True) + return jsonify({"error": f"Failed to retrieve status history for task {task_id}"}), 500 + + +@history_bp.route("/summary", methods=["GET"]) +def get_history_summary(): + """API endpoint to retrieve summary statistics about download history.""" + try: + # Get overall statistics + all_tasks, total_tasks = get_task_history(limit=10000, offset=0) # Get a large number to count + + # Calculate statistics + stats = { + "total_tasks": total_tasks, + "by_type": {}, + "by_status": {}, + "recent_activity": { + "last_24h": 0, + "last_7d": 0, + "last_30d": 0 + } + } + + import time + current_time = time.time() + day_seconds = 24 * 60 * 60 + + for task in all_tasks: + # Count by type + task_type = task.get("task_type", "unknown") + stats["by_type"][task_type] = stats["by_type"].get(task_type, 0) + 1 + + # Count by status + status = task.get("status_final", "unknown") + stats["by_status"][status] = stats["by_status"].get(status, 0) + 1 + + # Count recent activity + if task.get("timestamp_created"): + time_diff = current_time - task["timestamp_created"] + if time_diff <= day_seconds: + stats["recent_activity"]["last_24h"] += 1 + if time_diff <= 7 * day_seconds: + stats["recent_activity"]["last_7d"] += 1 + if time_diff <= 30 * day_seconds: + stats["recent_activity"]["last_30d"] += 1 + + return jsonify(stats) + + except Exception as e: + logger.error(f"Error in /api/history/summary endpoint: {e}", exc_info=True) + return jsonify({"error": "Failed to retrieve history summary"}), 500 + + +@history_bp.route("/track///mini-history", methods=["GET"]) +def get_track_mini_history_api(parent_task_id, track_id): + """API endpoint to retrieve comprehensive mini-history for a specific track.""" + try: + # First, verify the parent task exists and get its children table + parent_tasks, _ = get_task_history( + limit=1, + offset=0, + filters={"task_id": parent_task_id} + ) + + if not parent_tasks: + return jsonify({"error": f"Parent task {parent_task_id} not found"}), 404 + + parent_task = parent_tasks[0] + children_table = parent_task.get("children_table") + + if not children_table: + return jsonify({"error": f"No child tracks found for parent task {parent_task_id}"}), 404 + + # Get the track mini-history + mini_history = get_track_mini_history(track_id, children_table) + + if not mini_history: + return jsonify({"error": f"Track {track_id} not found in parent task {parent_task_id}"}), 404 + + return jsonify({ + "parent_task_id": parent_task_id, + "parent_task_info": { + "title": parent_task["title"], + "task_type": parent_task["task_type"] + }, + "track_mini_history": mini_history + }) + + except Exception as e: + logger.error(f"Error in /api/history/track/{parent_task_id}/{track_id}/mini-history endpoint: {e}", exc_info=True) + return jsonify({"error": f"Failed to retrieve mini-history for track {track_id}"}), 500 + + +@history_bp.route("/tracks//mini-histories", methods=["GET"]) +def get_all_track_mini_histories(parent_task_id): + """API endpoint to retrieve mini-histories for all tracks in a parent task.""" + try: + # Verify the parent task exists and get its children table + parent_tasks, _ = get_task_history( + limit=1, + offset=0, + filters={"task_id": parent_task_id} + ) + + if not parent_tasks: + return jsonify({"error": f"Parent task {parent_task_id} not found"}), 404 + + parent_task = parent_tasks[0] + children_table = parent_task.get("children_table") + + if not children_table: + return jsonify({ + "parent_task_id": parent_task_id, + "track_mini_histories": [], + "total_count": 0, + "message": "No child tracks found for this task" + }) + + # Get all child tracks + tracks = get_child_tracks(children_table) + + # Get mini-history for each track + track_mini_histories = [] + for track in tracks: + mini_history = get_track_mini_history(track["track_id"], children_table) + if mini_history: + track_mini_histories.append(mini_history) + + # Sort by position or track number + track_mini_histories.sort(key=lambda x: ( + x.get("disc_number", 1), + x.get("track_number", 0), + x.get("position", 0) + )) + + return jsonify({ + "parent_task_id": parent_task_id, + "parent_task_info": { + "title": parent_task["title"], + "task_type": parent_task["task_type"], + "status_final": parent_task["status_final"] + }, + "track_mini_histories": track_mini_histories, + "total_count": len(track_mini_histories) + }) + + except Exception as e: + logger.error(f"Error in /api/history/tracks/{parent_task_id}/mini-histories endpoint: {e}", exc_info=True) + return jsonify({"error": f"Failed to retrieve mini-histories for parent task {parent_task_id}"}), 500 + + +@history_bp.route("/track///status", methods=["POST"]) +def update_track_status(parent_task_id, track_id): + """API endpoint to update the status of a specific track (for testing/admin purposes).""" + try: + # Verify the parent task exists and get its children table + parent_tasks, _ = get_task_history( + limit=1, + offset=0, + filters={"task_id": parent_task_id} + ) + + if not parent_tasks: + return jsonify({"error": f"Parent task {parent_task_id} not found"}), 404 + + parent_task = parent_tasks[0] + children_table = parent_task.get("children_table") + + if not children_table: + return jsonify({"error": f"No child tracks found for parent task {parent_task_id}"}), 404 + + # Parse request data + data = request.get_json() + if not data: + return jsonify({"error": "Request body must contain JSON data"}), 400 + + status_type = data.get("status_type") + if not status_type: + return jsonify({"error": "status_type is required"}), 400 + + status_data = data.get("status_data", {}) + progress_info = data.get("progress_info") + error_info = data.get("error_info") + + # Update the track status + add_track_status_update( + track_id=track_id, + table_name=children_table, + status_type=status_type, + status_data=status_data, + progress_info=progress_info, + error_info=error_info + ) + + # Get updated mini-history + updated_mini_history = get_track_mini_history(track_id, children_table) + + return jsonify({ + "message": f"Track {track_id} status updated to {status_type}", + "parent_task_id": parent_task_id, + "track_id": track_id, + "updated_mini_history": updated_mini_history + }) + + except Exception as e: + logger.error(f"Error in /api/history/track/{parent_task_id}/{track_id}/status endpoint: {e}", exc_info=True) + return jsonify({"error": f"Failed to update status for track {track_id}"}), 500 + + +@history_bp.route("/track///timeline", methods=["GET"]) +def get_track_timeline(parent_task_id, track_id): + """API endpoint to get a simplified timeline view of a track's status progression.""" + try: + # Verify the parent task exists and get its children table + parent_tasks, _ = get_task_history( + limit=1, + offset=0, + filters={"task_id": parent_task_id} + ) + + if not parent_tasks: + return jsonify({"error": f"Parent task {parent_task_id} not found"}), 404 + + parent_task = parent_tasks[0] + children_table = parent_task.get("children_table") + + if not children_table: + return jsonify({"error": f"No child tracks found for parent task {parent_task_id}"}), 404 + + # Get the track mini-history + mini_history = get_track_mini_history(track_id, children_table) + + if not mini_history: + return jsonify({"error": f"Track {track_id} not found in parent task {parent_task_id}"}), 404 + + # Extract timeline and add summary statistics + timeline = mini_history.get("timeline", []) + + # Calculate timeline statistics + timeline_stats = { + "total_status_changes": len(timeline), + "duration_seconds": mini_history.get("time_elapsed"), + "calculated_duration": mini_history.get("calculated_duration"), + "retry_count": mini_history.get("retry_count", 0), + "final_status": mini_history.get("status_final"), + "quality_achieved": mini_history.get("quality_achieved"), + "file_size": mini_history.get("file_size"), + "download_path": mini_history.get("download_path") + } + + return jsonify({ + "parent_task_id": parent_task_id, + "track_id": track_id, + "track_info": { + "title": mini_history.get("title"), + "disc_number": mini_history.get("disc_number"), + "track_number": mini_history.get("track_number"), + "position": mini_history.get("position"), + "duration_ms": mini_history.get("duration_ms") + }, + "timeline": timeline, + "timeline_stats": timeline_stats + }) + + except Exception as e: + logger.error(f"Error in /api/history/track/{parent_task_id}/{track_id}/timeline endpoint: {e}", exc_info=True) + return jsonify({"error": f"Failed to retrieve timeline for track {track_id}"}), 500 + + +# Legacy endpoint for backward compatibility +@history_bp.route("/legacy", methods=["GET"]) +def get_download_history_legacy(): + """Legacy API endpoint using the old history system (for backward compatibility).""" try: limit = request.args.get("limit", 25, type=int) offset = request.args.get("offset", 0, type=int) sort_by = request.args.get("sort_by", "timestamp_completed") sort_order = request.args.get("sort_order", "DESC") - # Create filters dictionary for various filter options filters = {} # Status filter @@ -32,65 +631,19 @@ def get_download_history(): parent_task_filter = request.args.get("parent_task_id") if parent_task_filter: filters["parent_task_id"] = parent_task_filter - - # Track status filter - track_status_filter = request.args.get("track_status") - if track_status_filter: - filters["track_status"] = track_status_filter - - # Show/hide child tracks - hide_child_tracks = request.args.get("hide_child_tracks", "false").lower() == "true" - if hide_child_tracks: - filters["parent_task_id"] = None # Only show parent entries or standalone tracks - - # Show only tracks with specific parent - only_parent_tracks = request.args.get("only_parent_tracks", "false").lower() == "true" - if only_parent_tracks and not parent_task_filter: - filters["parent_task_id"] = "NOT_NULL" # Special value to indicate we want only child tracks entries, total_count = get_history_entries( limit, offset, sort_by, sort_order, filters ) - return jsonify( - { - "entries": entries, - "total_count": total_count, - "limit": limit, - "offset": offset, - } - ) + return jsonify({ + "entries": entries, + "total_count": total_count, + "limit": limit, + "offset": offset, + "note": "This is the legacy endpoint. Consider migrating to /api/history" + }) + except Exception as e: - logger.error(f"Error in /api/history endpoint: {e}", exc_info=True) + logger.error(f"Error in /api/history/legacy endpoint: {e}", exc_info=True) return jsonify({"error": "Failed to retrieve download history"}), 500 - - -@history_bp.route("/tracks/", methods=["GET"]) -def get_tracks_for_parent(parent_task_id): - """API endpoint to retrieve all track entries for a specific parent task.""" - try: - # We don't need pagination for this endpoint as we want all tracks for a parent - filters = {"parent_task_id": parent_task_id} - - # Optional sorting - sort_by = request.args.get("sort_by", "timestamp_completed") - sort_order = request.args.get("sort_order", "DESC") - - entries, total_count = get_history_entries( - limit=1000, # High limit to get all tracks - offset=0, - sort_by=sort_by, - sort_order=sort_order, - filters=filters - ) - - return jsonify( - { - "parent_task_id": parent_task_id, - "tracks": entries, - "total_count": total_count, - } - ) - except Exception as e: - logger.error(f"Error in /api/history/tracks endpoint: {e}", exc_info=True) - return jsonify({"error": f"Failed to retrieve tracks for parent task {parent_task_id}"}), 500 diff --git a/routes/utils/history_manager.py b/routes/utils/history_manager.py index b6072b4..a4d0895 100644 --- a/routes/utils/history_manager.py +++ b/routes/utils/history_manager.py @@ -4,420 +4,1056 @@ import time import logging import uuid from pathlib import Path +from typing import Dict, List, Optional, Any, Union +from datetime import datetime logger = logging.getLogger(__name__) HISTORY_DIR = Path("./data/history") HISTORY_DB_FILE = HISTORY_DIR / "download_history.db" -EXPECTED_COLUMNS = { +# Main tasks table schema +MAIN_TASKS_SCHEMA = { "task_id": "TEXT PRIMARY KEY", - "download_type": "TEXT", - "item_name": "TEXT", - "item_artist": "TEXT", - "item_album": "TEXT", - "item_url": "TEXT", - "spotify_id": "TEXT", - "status_final": "TEXT", # 'COMPLETED', 'ERROR', 'CANCELLED' - "error_message": "TEXT", - "timestamp_added": "REAL", + "task_type": "TEXT NOT NULL", # 'track', 'album', 'playlist' + "title": "TEXT", + "artists": "TEXT", # JSON array of artist objects + "ids": "TEXT", # JSON object with spotify, deezer, isrc, upc + "status_current": "TEXT", # Current status: initializing, retrying, real-time, skipped, error, done + "status_final": "TEXT", # Final result: COMPLETED, ERROR, CANCELLED, SKIPPED + "timestamp_created": "REAL", + "timestamp_updated": "REAL", "timestamp_completed": "REAL", - "original_request_json": "TEXT", - "last_status_obj_json": "TEXT", - "service_used": "TEXT", - "quality_profile": "TEXT", - "convert_to": "TEXT", - "bitrate": "TEXT", + "children_table": "TEXT", # Table name for nested items (album_uuid, playlist_uuid) + "metadata": "TEXT", # JSON - Complete object data (albumObject, playlistObject, trackObject) + "config": "TEXT", # JSON - Download config (quality, convert_to, bitrate, service) + "error_info": "TEXT", # JSON - Error details + "progress": "TEXT", # JSON - Progress info (current/total, time_elapsed, etc.) + "summary": "TEXT", # JSON - Final summary for albums/playlists "parent_task_id": "TEXT", # Reference to parent task for individual tracks - "track_status": "TEXT", # 'SUCCESSFUL', 'SKIPPED', 'FAILED' - "summary_json": "TEXT", # JSON string of the summary object from task - "total_successful": "INTEGER", # Count of successful tracks - "total_skipped": "INTEGER", # Count of skipped tracks - "total_failed": "INTEGER", # Count of failed tracks + "position": "INTEGER", # Position in parent (for playlist tracks) + "original_request": "TEXT" # JSON - Original request data +} + +# Status history table for tracking all status changes +STATUS_HISTORY_SCHEMA = { + "status_id": "INTEGER PRIMARY KEY AUTOINCREMENT", + "task_id": "TEXT NOT NULL", + "status_type": "TEXT NOT NULL", # initializing, retrying, real-time, skipped, error, done + "status_data": "TEXT", # JSON - Complete status object + "timestamp": "REAL NOT NULL" +} + +# Schema for individual track tables within albums/playlists +CHILD_TRACK_SCHEMA = { + "track_id": "TEXT PRIMARY KEY", + "parent_task_id": "TEXT NOT NULL", + "position": "INTEGER", + "disc_number": "INTEGER", + "track_number": "INTEGER", + "title": "TEXT", + "duration_ms": "INTEGER", + "explicit": "BOOLEAN", + "track_data": "TEXT", # JSON - Complete trackObject (trackAlbumObject/trackPlaylistObject) + "artists_data": "TEXT", # JSON - Array of artist objects + "album_data": "TEXT", # JSON - Album context data (for playlist tracks) + "ids_data": "TEXT", # JSON - IDs object (spotify, deezer, isrc, etc.) + "status_current": "TEXT", # Current status: initializing, retrying, real-time, skipped, error, done + "status_final": "TEXT", # Final result: COMPLETED, ERROR, CANCELLED, SKIPPED + "status_history": "TEXT", # JSON - Array of all status updates for this track + "timestamp_created": "REAL", + "timestamp_started": "REAL", # When download actually started + "timestamp_completed": "REAL", + "time_elapsed": "REAL", # Total processing time in seconds + "retry_count": "INTEGER", # Number of retries attempted + "error_info": "TEXT", # JSON - Error details and reason + "progress_info": "TEXT", # JSON - Progress data during download + "config": "TEXT", # JSON - Download config inherited from parent + "download_path": "TEXT", # Final download path/filename + "file_size": "INTEGER", # File size in bytes + "quality_achieved": "TEXT" # Actual quality/bitrate achieved } def init_history_db(): - """Initializes the download history database, creates the table if it doesn't exist, - and adds any missing columns to an existing table.""" + """Initialize the improved history database with new schema.""" conn = None try: HISTORY_DIR.mkdir(parents=True, exist_ok=True) conn = sqlite3.connect(HISTORY_DB_FILE) cursor = conn.cursor() - # Create table if it doesn't exist (idempotent) - # The primary key constraint is handled by the initial CREATE TABLE. - # If 'task_id' is missing, it cannot be added as PRIMARY KEY to an existing table - # without complex migrations. We assume 'task_id' will exist if the table exists. - create_table_sql = """ - CREATE TABLE IF NOT EXISTS download_history ( - task_id TEXT PRIMARY KEY, - download_type TEXT, - item_name TEXT, - item_artist TEXT, - item_album TEXT, - item_url TEXT, - spotify_id TEXT, - status_final TEXT, - error_message TEXT, - timestamp_added REAL, - timestamp_completed REAL, - original_request_json TEXT, - last_status_obj_json TEXT, - service_used TEXT, - quality_profile TEXT, - convert_to TEXT, - bitrate TEXT, - parent_task_id TEXT, - track_status TEXT, - summary_json TEXT, - total_successful INTEGER, - total_skipped INTEGER, - total_failed INTEGER - ) - """ - cursor.execute(create_table_sql) - conn.commit() + # Create main tasks table + _create_table_from_schema(cursor, "download_tasks", MAIN_TASKS_SCHEMA) + + # Create status history table + _create_table_from_schema(cursor, "status_history", STATUS_HISTORY_SCHEMA) - # Check for missing columns and add them + # Check if we need to migrate from old schema + cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='download_history'") + old_table_exists = cursor.fetchone() is not None + + if old_table_exists: + logger.info("Old schema detected. Starting migration...") + _migrate_from_old_schema(conn) + + conn.commit() + logger.info(f"History database initialized successfully at {HISTORY_DB_FILE}") + + except sqlite3.Error as e: + logger.error(f"Error initializing history database: {e}", exc_info=True) + finally: + if conn: + conn.close() + + +def _create_table_from_schema(cursor, table_name: str, schema: Dict[str, str]): + """Create a table from a schema dictionary.""" + columns = [] + + for col_name, col_def in schema.items(): + columns.append(f"{col_name} {col_def}") + + create_sql = f"CREATE TABLE IF NOT EXISTS {table_name} ({', '.join(columns)})" + + cursor.execute(create_sql) + logger.info(f"Created/verified table: {table_name}") + + +def _migrate_from_old_schema(conn): + """Migrate data from the old download_history table to the new schema.""" + cursor = conn.cursor() + + try: + # Get all data from old table + cursor.execute("SELECT * FROM download_history") + old_records = cursor.fetchall() + + # Get column names cursor.execute("PRAGMA table_info(download_history)") - existing_columns_info = cursor.fetchall() - existing_column_names = {col[1] for col in existing_columns_info} - - added_columns = False - for col_name, col_type in EXPECTED_COLUMNS.items(): - if col_name not in existing_column_names: - if "PRIMARY KEY" in col_type.upper() and col_name == "task_id": - # This case should be handled by CREATE TABLE, but as a safeguard: - # If task_id is somehow missing and table exists, this is a problem. - # Adding it as PK here is complex and might fail if data exists. - # For now, we assume CREATE TABLE handles the PK. - # If we were to add it, it would be 'ALTER TABLE download_history ADD COLUMN task_id TEXT;' - # and then potentially a separate step to make it PK if table is empty, which is non-trivial. - logger.warning( - f"Column '{col_name}' is part of PRIMARY KEY and was expected to be created by CREATE TABLE. Skipping explicit ADD COLUMN." - ) - continue - - # For other columns, just add them. - # Remove PRIMARY KEY from type definition if present, as it's only for table creation. - col_type_for_add = col_type.replace(" PRIMARY KEY", "").strip() - try: - cursor.execute( - f"ALTER TABLE download_history ADD COLUMN {col_name} {col_type_for_add}" - ) - logger.info( - f"Added missing column '{col_name} {col_type_for_add}' to download_history table." - ) - added_columns = True - except sqlite3.OperationalError as alter_e: - # This might happen if a column (e.g. task_id) without "PRIMARY KEY" is added by this loop - # but the initial create table already made it a primary key. - # Or other more complex scenarios. - logger.warning( - f"Could not add column '{col_name}': {alter_e}. It might already exist or there's a schema mismatch." - ) - - # Add additional columns for summary data if they don't exist - for col_name, col_type in { - "summary_json": "TEXT", - "total_successful": "INTEGER", - "total_skipped": "INTEGER", - "total_failed": "INTEGER" - }.items(): - if col_name not in existing_column_names and col_name not in EXPECTED_COLUMNS: - try: - cursor.execute( - f"ALTER TABLE download_history ADD COLUMN {col_name} {col_type}" - ) - logger.info( - f"Added missing column '{col_name} {col_type}' to download_history table." - ) - added_columns = True - except sqlite3.OperationalError as alter_e: - logger.warning( - f"Could not add column '{col_name}': {alter_e}. It might already exist or there's a schema mismatch." - ) - - if added_columns: - conn.commit() - logger.info(f"Download history table schema updated at {HISTORY_DB_FILE}") - else: - logger.info( - f"Download history database schema is up-to-date at {HISTORY_DB_FILE}" - ) - - except sqlite3.Error as e: - logger.error( - f"Error initializing download history database: {e}", exc_info=True - ) - finally: - if conn: - conn.close() - - -def add_entry_to_history(history_data: dict): - """Adds or replaces an entry in the download_history table. - - Args: - history_data (dict): A dictionary containing the data for the history entry. - Expected keys match the table columns. - """ - required_keys = [ - "task_id", - "download_type", - "item_name", - "item_artist", - "item_album", - "item_url", - "spotify_id", - "status_final", - "error_message", - "timestamp_added", - "timestamp_completed", - "original_request_json", - "last_status_obj_json", - "service_used", - "quality_profile", - "convert_to", - "bitrate", - "parent_task_id", - "track_status", - "summary_json", - "total_successful", - "total_skipped", - "total_failed", - ] - # Ensure all keys are present, filling with None if not - for key in required_keys: - history_data.setdefault(key, None) - - conn = None - try: - conn = sqlite3.connect(HISTORY_DB_FILE) - cursor = conn.cursor() - cursor.execute( - """ - INSERT OR REPLACE INTO download_history ( - task_id, download_type, item_name, item_artist, item_album, - item_url, spotify_id, status_final, error_message, - timestamp_added, timestamp_completed, original_request_json, - last_status_obj_json, service_used, quality_profile, - convert_to, bitrate, parent_task_id, track_status, - summary_json, total_successful, total_skipped, total_failed - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) - """, - ( - history_data["task_id"], - history_data["download_type"], - history_data["item_name"], - history_data["item_artist"], - history_data["item_album"], - history_data["item_url"], - history_data["spotify_id"], - history_data["status_final"], - history_data["error_message"], - history_data["timestamp_added"], - history_data["timestamp_completed"], - history_data["original_request_json"], - history_data["last_status_obj_json"], - history_data["service_used"], - history_data["quality_profile"], - history_data["convert_to"], - history_data["bitrate"], - history_data["parent_task_id"], - history_data["track_status"], - history_data["summary_json"], - history_data["total_successful"], - history_data["total_skipped"], - history_data["total_failed"], - ), - ) - conn.commit() - logger.info( - f"Added/Updated history for task_id: {history_data['task_id']}, status: {history_data['status_final']}" - ) - except sqlite3.Error as e: - logger.error( - f"Error adding entry to download history for task_id {history_data.get('task_id')}: {e}", - exc_info=True, - ) + old_columns = [col[1] for col in cursor.fetchall()] + + logger.info(f"Migrating {len(old_records)} records from old schema...") + + # Create backup of old table + backup_table = f"download_history_backup_{int(time.time())}" + cursor.execute(f"CREATE TABLE {backup_table} AS SELECT * FROM download_history") + + migrated_count = 0 + for record in old_records: + old_data = dict(zip(old_columns, record)) + + # Convert old record to new format + new_task = _convert_old_record_to_new(old_data) + if new_task: + add_task_to_history(new_task) + migrated_count += 1 + + logger.info(f"Successfully migrated {migrated_count} records. Old table backed up as {backup_table}") + except Exception as e: - logger.error( - f"Unexpected error adding to history for task_id {history_data.get('task_id')}: {e}", - exc_info=True, - ) + logger.error(f"Error during migration: {e}", exc_info=True) + + +def _convert_old_record_to_new(old_data: Dict) -> Optional[Dict]: + """Convert an old history record to the new format.""" + try: + # Create basic task structure + task_data = { + "task_id": old_data.get("task_id"), + "task_type": old_data.get("download_type", "track"), + "title": old_data.get("item_name", ""), + "timestamp_created": old_data.get("timestamp_added"), + "timestamp_completed": old_data.get("timestamp_completed"), + "status_final": old_data.get("status_final"), + "parent_task_id": old_data.get("parent_task_id"), + "original_request": old_data.get("original_request_json") + } + + # Build artists array + if old_data.get("item_artist"): + task_data["artists"] = json.dumps([{"name": old_data["item_artist"]}]) + + # Build IDs object + ids = {} + if old_data.get("spotify_id"): + ids["spotify"] = old_data["spotify_id"] + if ids: + task_data["ids"] = json.dumps(ids) + + # Build config object + config = {} + if old_data.get("service_used"): + config["service_used"] = old_data["service_used"] + if old_data.get("quality_profile"): + config["quality_profile"] = old_data["quality_profile"] + if old_data.get("convert_to"): + config["convert_to"] = old_data["convert_to"] + if old_data.get("bitrate"): + config["bitrate"] = old_data["bitrate"] + if config: + task_data["config"] = json.dumps(config) + + # Handle error information + if old_data.get("error_message"): + task_data["error_info"] = json.dumps({"message": old_data["error_message"]}) + + # Build basic metadata object + metadata = { + "type": task_data["task_type"], + "title": task_data["title"], + "url": old_data.get("item_url") + } + + if old_data.get("item_album"): + metadata["album"] = {"title": old_data["item_album"]} + + task_data["metadata"] = json.dumps(metadata) + + return task_data + + except Exception as e: + logger.warning(f"Failed to convert old record {old_data.get('task_id')}: {e}") + return None + + +def create_child_table(parent_task_id: str, task_type: str) -> str: + """Create a child table for album or playlist tracks using UUID-based naming.""" + # Generate a shorter UUID for the table name to avoid database identifier length limits + import uuid as uuid_mod + table_uuid = uuid_mod.uuid4().hex[:12] # Use first 12 characters of UUID + table_name = f"{task_type}_{table_uuid}" + + conn = None + try: + conn = sqlite3.connect(HISTORY_DB_FILE) + cursor = conn.cursor() + + # Create the child table + _create_table_from_schema(cursor, table_name, CHILD_TRACK_SCHEMA) + + # Create an index on parent_task_id for faster queries + cursor.execute(f"CREATE INDEX IF NOT EXISTS idx_{table_name}_parent ON {table_name}(parent_task_id)") + + # Create an index on position for proper ordering + cursor.execute(f"CREATE INDEX IF NOT EXISTS idx_{table_name}_position ON {table_name}(position)") + + conn.commit() + + logger.info(f"Created child table: {table_name} for parent task: {parent_task_id}") + return table_name + + except sqlite3.Error as e: + logger.error(f"Error creating child table {table_name}: {e}") + return "" finally: if conn: conn.close() -def get_history_entries( - limit=25, offset=0, sort_by="timestamp_completed", sort_order="DESC", filters=None -): - """Retrieves entries from the download_history table with pagination, sorting, and filtering. - - Args: - limit (int): Maximum number of entries to return. - offset (int): Number of entries to skip (for pagination). - sort_by (str): Column name to sort by. - sort_order (str): 'ASC' or 'DESC'. - filters (dict, optional): A dictionary of column_name: value to filter by. - Currently supports exact matches. - - Returns: - tuple: (list of history entries as dicts, total_count of matching entries) - """ +def add_task_to_history(task_data: Dict): + """Add or update a main task in the history.""" conn = None try: conn = sqlite3.connect(HISTORY_DB_FILE) - conn.row_factory = sqlite3.Row # Access columns by name cursor = conn.cursor() + + # Ensure required fields are present + required_fields = ["task_id", "task_type"] + for field in required_fields: + if field not in task_data: + raise ValueError(f"Missing required field: {field}") + + # Set default timestamps + current_time = time.time() + task_data.setdefault("timestamp_created", current_time) + task_data.setdefault("timestamp_updated", current_time) + + # Convert all values to appropriate types + processed_data = {} + for col_name in MAIN_TASKS_SCHEMA.keys(): + if col_name in task_data: + value = task_data[col_name] + # Convert objects to JSON strings + if isinstance(value, (dict, list)): + processed_data[col_name] = json.dumps(value) + else: + processed_data[col_name] = value + else: + processed_data[col_name] = None + + # Create INSERT OR REPLACE query + columns = list(processed_data.keys()) + placeholders = ["?" for _ in columns] + values = [processed_data[col] for col in columns] + + query = f""" + INSERT OR REPLACE INTO download_tasks ({', '.join(columns)}) + VALUES ({', '.join(placeholders)}) + """ + + cursor.execute(query, values) + conn.commit() + + logger.info(f"Added/updated task: {task_data['task_id']} ({task_data['task_type']})") + + except Exception as e: + logger.error(f"Error adding task to history: {e}", exc_info=True) + finally: + if conn: + conn.close() - base_query = "FROM download_history" - count_query = "SELECT COUNT(*) " + base_query - select_query = "SELECT * " + base_query +def add_status_update(task_id: str, status_type: str, status_data: Dict): + """Add a status update to the status history.""" + conn = None + try: + conn = sqlite3.connect(HISTORY_DB_FILE) + cursor = conn.cursor() + + cursor.execute(""" + INSERT INTO status_history (task_id, status_type, status_data, timestamp) + VALUES (?, ?, ?, ?) + """, (task_id, status_type, json.dumps(status_data), time.time())) + + # Also update the current status in main table + cursor.execute(""" + UPDATE download_tasks + SET status_current = ?, timestamp_updated = ? + WHERE task_id = ? + """, (status_type, time.time(), task_id)) + + conn.commit() + logger.debug(f"Added status update for {task_id}: {status_type}") + + except Exception as e: + logger.error(f"Error adding status update: {e}", exc_info=True) + finally: + if conn: + conn.close() + + +def add_child_track(parent_task_id: str, track_data: Dict): + """Add a track to a child table (album or playlist) with comprehensive data extraction.""" + conn = None + try: + conn = sqlite3.connect(HISTORY_DB_FILE) + cursor = conn.cursor() + + # Find the parent task to get the children table name + cursor.execute("SELECT children_table FROM download_tasks WHERE task_id = ?", (parent_task_id,)) + result = cursor.fetchone() + + if not result or not result[0]: + logger.error(f"No children table found for parent task: {parent_task_id}") + return + + table_name = result[0] + + # Generate track ID if not provided + track_id = track_data.get("track_id", f"{parent_task_id}_track_{uuid.uuid4().hex[:8]}") + + # Extract track object data if provided + track_obj = track_data.get("track_data", {}) + if isinstance(track_obj, str): + try: + track_obj = json.loads(track_obj) + except json.JSONDecodeError: + track_obj = {} + + # Prepare comprehensive track record + track_record = { + "track_id": track_id, + "parent_task_id": parent_task_id, + "position": track_data.get("position") or track_obj.get("position", 0), + "disc_number": track_obj.get("disc_number", 1), + "track_number": track_obj.get("track_number", 0), + "title": track_obj.get("title", "Unknown Track"), + "duration_ms": track_obj.get("duration_ms", 0), + "explicit": track_obj.get("explicit", False), + "track_data": json.dumps(track_obj) if track_obj else None, + "artists_data": json.dumps(track_obj.get("artists", [])), + "album_data": json.dumps(track_obj.get("album", {})) if track_obj.get("album") else None, + "ids_data": json.dumps(track_obj.get("ids", {})), + "status_current": track_data.get("status_current", "initializing"), + "status_final": track_data.get("status_final"), + "status_history": json.dumps(track_data.get("status_history", [])), + "timestamp_created": track_data.get("timestamp_created", time.time()), + "timestamp_started": track_data.get("timestamp_started"), + "timestamp_completed": track_data.get("timestamp_completed"), + "time_elapsed": track_data.get("time_elapsed"), + "retry_count": track_data.get("retry_count", 0), + "error_info": json.dumps(track_data.get("error_info", {})) if track_data.get("error_info") else None, + "progress_info": json.dumps(track_data.get("progress_info", {})) if track_data.get("progress_info") else None, + "config": json.dumps(track_data.get("config", {})) if track_data.get("config") else None, + "download_path": track_data.get("download_path"), + "file_size": track_data.get("file_size"), + "quality_achieved": track_data.get("quality_achieved") + } + + # Filter out None values to avoid issues + track_record = {k: v for k, v in track_record.items() if v is not None} + + # Insert into child table + columns = list(track_record.keys()) + placeholders = ["?" for _ in columns] + values = [track_record[col] for col in columns] + + query = f""" + INSERT OR REPLACE INTO {table_name} ({', '.join(columns)}) + VALUES ({', '.join(placeholders)}) + """ + + cursor.execute(query, values) + conn.commit() + + logger.info(f"Added track to {table_name}: {track_id} - {track_record.get('title', 'Unknown')}") + + return track_id + + except Exception as e: + logger.error(f"Error adding child track: {e}", exc_info=True) + return None + finally: + if conn: + conn.close() + + +def get_task_history( + limit: int = 25, + offset: int = 0, + sort_by: str = "timestamp_updated", + sort_order: str = "DESC", + filters: Optional[Dict] = None, + include_children: bool = False +) -> tuple[List[Dict], int]: + """Get task history with enhanced filtering and optional child data.""" + conn = None + try: + conn = sqlite3.connect(HISTORY_DB_FILE) + conn.row_factory = sqlite3.Row + cursor = conn.cursor() + + # Build query + base_query = "FROM download_tasks" where_clauses = [] params = [] - + if filters: for column, value in filters.items(): - # Basic security: ensure column is a valid one (alphanumeric + underscore) - if column.replace("_", "").isalnum(): - # Special case for 'NOT_NULL' value for parent_task_id - if column == "parent_task_id" and value == "NOT_NULL": - where_clauses.append(f"{column} IS NOT NULL") - # Regular case for NULL value - elif value is None: + if column in MAIN_TASKS_SCHEMA: + if value is None: where_clauses.append(f"{column} IS NULL") - # Regular case for exact match else: where_clauses.append(f"{column} = ?") params.append(value) - + if where_clauses: - where_sql = " WHERE " + " AND ".join(where_clauses) - count_query += where_sql - select_query += where_sql - - # Get total count for pagination - cursor.execute(count_query, params) + base_query += " WHERE " + " AND ".join(where_clauses) + + # Get total count + cursor.execute(f"SELECT COUNT(*) {base_query}", params) total_count = cursor.fetchone()[0] - - # Validate sort_by and sort_order to prevent SQL injection - valid_sort_columns = [ - "task_id", - "download_type", - "item_name", - "item_artist", - "item_album", - "item_url", - "status_final", - "timestamp_added", - "timestamp_completed", - "service_used", - "quality_profile", - "convert_to", - "bitrate", - "parent_task_id", - "track_status", - "total_successful", - "total_skipped", - "total_failed", - ] - if sort_by not in valid_sort_columns: - sort_by = "timestamp_completed" # Default sort - - sort_order_upper = sort_order.upper() - if sort_order_upper not in ["ASC", "DESC"]: - sort_order_upper = "DESC" - - select_query += f" ORDER BY {sort_by} {sort_order_upper} LIMIT ? OFFSET ?" - params.extend([limit, offset]) - - cursor.execute(select_query, params) - rows = cursor.fetchall() - - # Convert rows to list of dicts - entries = [dict(row) for row in rows] - return entries, total_count - - except sqlite3.Error as e: - logger.error(f"Error retrieving history entries: {e}", exc_info=True) + + # Validate sort parameters + if sort_by not in MAIN_TASKS_SCHEMA: + sort_by = "timestamp_updated" + if sort_order.upper() not in ["ASC", "DESC"]: + sort_order = "DESC" + + # Get paginated results + query = f"SELECT * {base_query} ORDER BY {sort_by} {sort_order} LIMIT ? OFFSET ?" + cursor.execute(query, params + [limit, offset]) + + tasks = [] + for row in cursor.fetchall(): + task = dict(row) + + # Parse JSON fields + json_fields = ["artists", "ids", "metadata", "config", "error_info", "progress", "summary"] + for field in json_fields: + if task[field]: + try: + task[field] = json.loads(task[field]) + except json.JSONDecodeError: + pass + + # Include child tracks if requested + if include_children and task["children_table"]: + task["child_tracks"] = get_child_tracks(task["children_table"]) + + tasks.append(task) + + return tasks, total_count + + except Exception as e: + logger.error(f"Error getting task history: {e}", exc_info=True) return [], 0 finally: if conn: conn.close() -def add_track_entry_to_history(track_name, artist_name, parent_task_id, track_status, parent_history_data=None): - """Adds a track-specific entry to the history database. - - Args: - track_name (str): The name of the track - artist_name (str): The artist name - parent_task_id (str): The ID of the parent task (album or playlist) - track_status (str): The status of the track ('SUCCESSFUL', 'SKIPPED', 'FAILED') - parent_history_data (dict, optional): The history data of the parent task - - Returns: - str: The task_id of the created track entry - """ - # Generate a unique ID for this track entry - track_task_id = f"{parent_task_id}_track_{uuid.uuid4().hex[:8]}" - - # Create a copy of parent data or initialize empty dict - track_history_data = {} - if parent_history_data: - # Copy relevant fields from parent - for key in EXPECTED_COLUMNS: - if key in parent_history_data and key not in ['task_id', 'item_name', 'item_artist']: - track_history_data[key] = parent_history_data[key] - - # Set track-specific fields - track_history_data.update({ - "task_id": track_task_id, - "download_type": "track", - "item_name": track_name, - "item_artist": artist_name, - "parent_task_id": parent_task_id, - "track_status": track_status, - "status_final": "COMPLETED" if track_status == "SUCCESSFUL" else - "SKIPPED" if track_status == "SKIPPED" else "ERROR", - "timestamp_completed": time.time() - }) - - # Extract track URL if possible (from last_status_obj_json) - if parent_history_data and parent_history_data.get("last_status_obj_json"): - try: - last_status = json.loads(parent_history_data["last_status_obj_json"]) +def add_track_status_update(track_id: str, table_name: str, status_type: str, status_data: Dict, + progress_info: Dict = None, error_info: Dict = None): + """Add a status update to a track's mini-history.""" + conn = None + try: + conn = sqlite3.connect(HISTORY_DB_FILE) + cursor = conn.cursor() + + # Get current status history + cursor.execute(f"SELECT status_history, retry_count FROM {table_name} WHERE track_id = ?", (track_id,)) + result = cursor.fetchone() + + if not result: + logger.warning(f"Track {track_id} not found in table {table_name}") + return + + current_history = [] + retry_count = result[1] or 0 + + if result[0]: + try: + current_history = json.loads(result[0]) + except json.JSONDecodeError: + current_history = [] + + # Add new status update + status_update = { + "timestamp": time.time(), + "status_type": status_type, + "status_data": status_data + } + + if progress_info: + status_update["progress_info"] = progress_info + if error_info: + status_update["error_info"] = error_info - # Try to match track name in the tracks lists to find URL - track_key = f"{track_name} - {artist_name}" - if "raw_callback" in last_status and last_status["raw_callback"].get("url"): - track_history_data["item_url"] = last_status["raw_callback"].get("url") + current_history.append(status_update) + + # Update fields based on status + update_fields = { + "status_current": status_type, + "status_history": json.dumps(current_history), + "timestamp_updated": time.time() + } + + # Handle specific status transitions + if status_type == "real-time": + if not result or not cursor.execute(f"SELECT timestamp_started FROM {table_name} WHERE track_id = ?", (track_id,)).fetchone()[0]: + update_fields["timestamp_started"] = time.time() + if progress_info: + update_fields["progress_info"] = json.dumps(progress_info) - # Extract Spotify ID from URL if possible - url = last_status["raw_callback"].get("url", "") - if url and "spotify.com" in url: + elif status_type == "retrying": + update_fields["retry_count"] = retry_count + 1 + if error_info: + update_fields["error_info"] = json.dumps(error_info) + + elif status_type in ["done", "error", "skipped"]: + update_fields["timestamp_completed"] = time.time() + update_fields["status_final"] = { + "done": "COMPLETED", + "error": "ERROR", + "skipped": "SKIPPED" + }[status_type] + + if error_info: + update_fields["error_info"] = json.dumps(error_info) + + # Calculate time elapsed if we have start time + cursor.execute(f"SELECT timestamp_started FROM {table_name} WHERE track_id = ?", (track_id,)) + start_result = cursor.fetchone() + if start_result and start_result[0]: + update_fields["time_elapsed"] = time.time() - start_result[0] + + # Update the track record + set_clauses = [f"{key} = ?" for key in update_fields.keys()] + values = list(update_fields.values()) + [track_id] + + query = f"UPDATE {table_name} SET {', '.join(set_clauses)} WHERE track_id = ?" + cursor.execute(query, values) + conn.commit() + + logger.debug(f"Updated track {track_id} status to {status_type}") + + except Exception as e: + logger.error(f"Error updating track status: {e}", exc_info=True) + finally: + if conn: + conn.close() + + +def get_child_tracks(table_name: str) -> List[Dict]: + """Get all tracks from a child table with parsed JSON fields.""" + conn = None + try: + conn = sqlite3.connect(HISTORY_DB_FILE) + conn.row_factory = sqlite3.Row + cursor = conn.cursor() + + cursor.execute(f"SELECT * FROM {table_name} ORDER BY disc_number, track_number, position") + tracks = [] + + for row in cursor.fetchall(): + track = dict(row) + + # Parse JSON fields + json_fields = ["track_data", "artists_data", "album_data", "ids_data", + "status_history", "error_info", "progress_info", "config"] + + for field in json_fields: + if track.get(field): try: - spotify_id = url.split("/")[-1] - if spotify_id and len(spotify_id) == 22 and spotify_id.isalnum(): - track_history_data["spotify_id"] = spotify_id - except Exception: + track[field] = json.loads(track[field]) + except json.JSONDecodeError: pass - except (json.JSONDecodeError, KeyError, AttributeError) as e: - logger.warning(f"Could not extract track URL for {track_name}: {e}") + + tracks.append(track) + + return tracks + + except Exception as e: + logger.error(f"Error getting child tracks from {table_name}: {e}") + return [] + finally: + if conn: + conn.close() + + +def get_status_history(task_id: str) -> List[Dict]: + """Get complete status history for a task.""" + conn = None + try: + conn = sqlite3.connect(HISTORY_DB_FILE) + conn.row_factory = sqlite3.Row + cursor = conn.cursor() + + cursor.execute(""" + SELECT * FROM status_history + WHERE task_id = ? + ORDER BY timestamp ASC + """, (task_id,)) + + history = [] + for row in cursor.fetchall(): + entry = dict(row) + if entry["status_data"]: + try: + entry["status_data"] = json.loads(entry["status_data"]) + except json.JSONDecodeError: + pass + history.append(entry) + + return history + + except Exception as e: + logger.error(f"Error getting status history for {task_id}: {e}") + return [] + finally: + if conn: + conn.close() + + +def process_callback_object(callback_obj: Dict, task_id: str = None): + """Process a callback object and update history accordingly.""" + try: + if not task_id: + task_id = str(uuid.uuid4()) + + # Determine callback type and extract data + if "track" in callback_obj: + _process_track_callback(callback_obj, task_id) + elif "album" in callback_obj: + _process_album_callback(callback_obj, task_id) + elif "playlist" in callback_obj: + _process_playlist_callback(callback_obj, task_id) + else: + logger.warning(f"Unknown callback object type for task {task_id}") - # Add entry to history - add_entry_to_history(track_history_data) + except Exception as e: + logger.error(f"Error processing callback object: {e}", exc_info=True) + + +def _process_track_callback(callback_obj: Dict, task_id: str): + """Process a trackCallbackObject with comprehensive status tracking.""" + track_data = callback_obj.get("track", {}) + status_info = callback_obj.get("status_info", {}) + parent_info = callback_obj.get("parent") - return track_task_id + # Check if this is a child track (part of album/playlist) + if parent_info and parent_info.get("task_id"): + parent_task_id = parent_info["task_id"] + + # Find parent task's children table + conn = None + try: + conn = sqlite3.connect(HISTORY_DB_FILE) + cursor = conn.cursor() + cursor.execute("SELECT children_table FROM download_tasks WHERE task_id = ?", (parent_task_id,)) + result = cursor.fetchone() + + if result and result[0]: + table_name = result[0] + + # Extract progress and error info + progress_info = None + error_info = None + + if status_info.get("status") == "real-time": + progress_info = { + "time_elapsed": status_info.get("time_elapsed", 0), + "progress": status_info.get("progress", 0) + } + elif status_info.get("status") == "retrying": + error_info = { + "retry_count": status_info.get("retry_count", 0), + "seconds_left": status_info.get("seconds_left", 0), + "error": status_info.get("error", "") + } + elif status_info.get("status") == "error": + error_info = { + "message": status_info.get("error", "Unknown error") + } + elif status_info.get("status") == "skipped": + error_info = { + "reason": status_info.get("reason", "Unknown reason") + } + + # Update track status in child table + add_track_status_update( + track_id=task_id, + table_name=table_name, + status_type=status_info.get("status", "initializing"), + status_data=status_info, + progress_info=progress_info, + error_info=error_info + ) + + except Exception as e: + logger.error(f"Error processing child track callback: {e}", exc_info=True) + finally: + if conn: + conn.close() + else: + # Handle standalone track + task_entry = { + "task_id": task_id, + "task_type": "track", + "title": track_data.get("title", ""), + "artists": [{"name": artist.get("name", "")} for artist in track_data.get("artists", [])], + "ids": track_data.get("ids", {}), + "metadata": track_data, + "status_current": status_info.get("status", "initializing"), + "position": callback_obj.get("current_track") + } + + # Set final status based on status_info + if status_info.get("status") == "done": + task_entry["status_final"] = "COMPLETED" + task_entry["timestamp_completed"] = time.time() + elif status_info.get("status") == "error": + task_entry["status_final"] = "ERROR" + task_entry["error_info"] = {"message": status_info.get("error", "")} + elif status_info.get("status") == "skipped": + task_entry["status_final"] = "SKIPPED" + + add_task_to_history(task_entry) + add_status_update(task_id, status_info.get("status", "initializing"), status_info) + + +def _process_album_callback(callback_obj: Dict, task_id: str): + """Process an albumCallbackObject with comprehensive track management.""" + album_data = callback_obj.get("album", {}) + status_info = callback_obj.get("status_info", {}) + + # Create children table for tracks + children_table = create_child_table(task_id, "album") + + # Create main task entry + task_entry = { + "task_id": task_id, + "task_type": "album", + "title": album_data.get("title", ""), + "artists": [{"name": artist.get("name", "")} for artist in album_data.get("artists", [])], + "ids": album_data.get("ids", {}), + "metadata": album_data, + "children_table": children_table, + "status_current": status_info.get("status", "initializing") + } + + # Initialize tracks in child table when album processing starts + if status_info.get("status") == "initializing" and album_data.get("tracks"): + for i, track in enumerate(album_data["tracks"]): + track_data = { + "track_data": track, + "position": i + 1, + "status_current": "initializing", + "timestamp_created": time.time() + } + add_child_track(task_id, track_data) + + # Handle completion with summary + if status_info.get("status") == "done" and status_info.get("summary"): + task_entry["status_final"] = "COMPLETED" + task_entry["timestamp_completed"] = time.time() + task_entry["summary"] = status_info["summary"] + + # Update individual tracks in child table based on summary + summary = status_info["summary"] + + # Process successful tracks + for track in summary.get("successful_tracks", []): + if isinstance(track, dict): + # Find matching track in child table and update status + conn = None + try: + conn = sqlite3.connect(HISTORY_DB_FILE) + cursor = conn.cursor() + + # Try to match by title and artist + track_title = track.get("title", "") + cursor.execute( + f"SELECT track_id FROM {children_table} WHERE title = ? AND parent_task_id = ?", + (track_title, task_id) + ) + result = cursor.fetchone() + + if result: + add_track_status_update( + track_id=result[0], + table_name=children_table, + status_type="done", + status_data={"status": "done"}, + progress_info={"progress": 100} + ) + except Exception as e: + logger.error(f"Error updating successful track: {e}") + finally: + if conn: + conn.close() + + # Process skipped tracks + for track in summary.get("skipped_tracks", []): + if isinstance(track, dict): + # Similar matching and update logic + conn = None + try: + conn = sqlite3.connect(HISTORY_DB_FILE) + cursor = conn.cursor() + + track_title = track.get("title", "") + cursor.execute( + f"SELECT track_id FROM {children_table} WHERE title = ? AND parent_task_id = ?", + (track_title, task_id) + ) + result = cursor.fetchone() + + if result: + add_track_status_update( + track_id=result[0], + table_name=children_table, + status_type="skipped", + status_data={"status": "skipped"}, + error_info={"reason": "Skipped during processing"} + ) + except Exception as e: + logger.error(f"Error updating skipped track: {e}") + finally: + if conn: + conn.close() + + # Process failed tracks + for failed_track in summary.get("failed_tracks", []): + track = failed_track.get("track", {}) if isinstance(failed_track, dict) else failed_track + reason = failed_track.get("reason", "Unknown error") if isinstance(failed_track, dict) else "Download failed" + + if isinstance(track, dict): + conn = None + try: + conn = sqlite3.connect(HISTORY_DB_FILE) + cursor = conn.cursor() + + track_title = track.get("title", "") + cursor.execute( + f"SELECT track_id FROM {children_table} WHERE title = ? AND parent_task_id = ?", + (track_title, task_id) + ) + result = cursor.fetchone() + + if result: + add_track_status_update( + track_id=result[0], + table_name=children_table, + status_type="error", + status_data={"status": "error"}, + error_info={"message": reason} + ) + except Exception as e: + logger.error(f"Error updating failed track: {e}") + finally: + if conn: + conn.close() + + add_task_to_history(task_entry) + add_status_update(task_id, status_info.get("status", "initializing"), status_info) + + +def _process_playlist_callback(callback_obj: Dict, task_id: str): + """Process a playlistCallbackObject with comprehensive track management.""" + playlist_data = callback_obj.get("playlist", {}) + status_info = callback_obj.get("status_info", {}) + + # Create children table for tracks + children_table = create_child_table(task_id, "playlist") + + # Create main task entry + task_entry = { + "task_id": task_id, + "task_type": "playlist", + "title": playlist_data.get("title", ""), + "metadata": playlist_data, + "children_table": children_table, + "status_current": status_info.get("status", "initializing") + } + + # Add playlist owner info to metadata if available + if playlist_data.get("owner"): + task_entry["metadata"]["owner_info"] = playlist_data["owner"] + + # Initialize tracks in child table when playlist processing starts + if status_info.get("status") == "initializing" and playlist_data.get("tracks"): + for track in playlist_data["tracks"]: + track_data = { + "track_data": track, + "position": track.get("position", 0), + "status_current": "initializing", + "timestamp_created": time.time() + } + add_child_track(task_id, track_data) + + # Handle completion with summary + if status_info.get("status") == "done" and status_info.get("summary"): + task_entry["status_final"] = "COMPLETED" + task_entry["timestamp_completed"] = time.time() + task_entry["summary"] = status_info["summary"] + + # Update individual tracks in child table based on summary + summary = status_info["summary"] + + # Process successful tracks + for track in summary.get("successful_tracks", []): + if isinstance(track, dict): + # Find matching track in child table and update status + conn = None + try: + conn = sqlite3.connect(HISTORY_DB_FILE) + cursor = conn.cursor() + + # Try to match by title and position + track_title = track.get("title", "") + track_position = track.get("position", 0) + cursor.execute( + f"SELECT track_id FROM {children_table} WHERE title = ? AND position = ? AND parent_task_id = ?", + (track_title, track_position, task_id) + ) + result = cursor.fetchone() + + if result: + add_track_status_update( + track_id=result[0], + table_name=children_table, + status_type="done", + status_data={"status": "done"}, + progress_info={"progress": 100} + ) + except Exception as e: + logger.error(f"Error updating successful playlist track: {e}") + finally: + if conn: + conn.close() + + # Process skipped tracks + for track in summary.get("skipped_tracks", []): + if isinstance(track, dict): + conn = None + try: + conn = sqlite3.connect(HISTORY_DB_FILE) + cursor = conn.cursor() + + track_title = track.get("title", "") + track_position = track.get("position", 0) + cursor.execute( + f"SELECT track_id FROM {children_table} WHERE title = ? AND position = ? AND parent_task_id = ?", + (track_title, track_position, task_id) + ) + result = cursor.fetchone() + + if result: + add_track_status_update( + track_id=result[0], + table_name=children_table, + status_type="skipped", + status_data={"status": "skipped"}, + error_info={"reason": "Skipped during processing"} + ) + except Exception as e: + logger.error(f"Error updating skipped playlist track: {e}") + finally: + if conn: + conn.close() + + # Process failed tracks + for failed_track in summary.get("failed_tracks", []): + track = failed_track.get("track", {}) if isinstance(failed_track, dict) else failed_track + reason = failed_track.get("reason", "Unknown error") if isinstance(failed_track, dict) else "Download failed" + + if isinstance(track, dict): + conn = None + try: + conn = sqlite3.connect(HISTORY_DB_FILE) + cursor = conn.cursor() + + track_title = track.get("title", "") + track_position = track.get("position", 0) + cursor.execute( + f"SELECT track_id FROM {children_table} WHERE title = ? AND position = ? AND parent_task_id = ?", + (track_title, track_position, task_id) + ) + result = cursor.fetchone() + + if result: + add_track_status_update( + track_id=result[0], + table_name=children_table, + status_type="error", + status_data={"status": "error"}, + error_info={"message": reason} + ) + except Exception as e: + logger.error(f"Error updating failed playlist track: {e}") + finally: + if conn: + conn.close() + + add_task_to_history(task_entry) + add_status_update(task_id, status_info.get("status", "initializing"), status_info) + + +# Legacy compatibility functions +def add_entry_to_history(history_data: dict): + """Legacy compatibility function - converts old format to new.""" + logger.warning("Using legacy add_entry_to_history - consider migrating to add_task_to_history") + + converted = _convert_old_record_to_new(history_data) + if converted: + add_task_to_history(converted) + def add_tracks_from_summary(summary_data, parent_task_id, parent_history_data=None): - """Processes a summary object from a completed task and adds individual track entries. + """Legacy compatibility function - processes a summary object from a completed task and adds individual track entries. Args: summary_data (dict): The summary data containing track lists @@ -427,6 +1063,8 @@ def add_tracks_from_summary(summary_data, parent_task_id, parent_history_data=No Returns: dict: Summary of processed tracks """ + logger.warning("Using legacy add_tracks_from_summary - consider migrating to add_child_track and process_callback_object") + processed = { "successful": 0, "skipped": 0, @@ -437,61 +1075,154 @@ def add_tracks_from_summary(summary_data, parent_task_id, parent_history_data=No logger.warning(f"No summary data provided for task {parent_task_id}") return processed + # Check if parent task has a children table, if not create one + conn = None + try: + conn = sqlite3.connect(HISTORY_DB_FILE) + cursor = conn.cursor() + + cursor.execute("SELECT children_table, task_type FROM download_tasks WHERE task_id = ?", (parent_task_id,)) + result = cursor.fetchone() + + children_table = None + if result: + children_table = result[0] + task_type = result[1] or "album" + + # Create children table if it doesn't exist + if not children_table: + children_table = create_child_table(parent_task_id, task_type) + cursor.execute("UPDATE download_tasks SET children_table = ? WHERE task_id = ?", + (children_table, parent_task_id)) + conn.commit() + else: + # Parent task doesn't exist, create a basic one + logger.warning(f"Parent task {parent_task_id} not found, creating basic entry...") + task_data = { + "task_id": parent_task_id, + "task_type": "album", + "title": "Unknown Album", + "status_final": "COMPLETED", + "children_table": create_child_table(parent_task_id, "album") + } + add_task_to_history(task_data) + children_table = task_data["children_table"] + + except Exception as e: + logger.error(f"Error setting up children table for {parent_task_id}: {e}") + finally: + if conn: + conn.close() + # Process successful tracks for track_entry in summary_data.get("successful_tracks", []): try: - # Parse "track_name - artist_name" format - parts = track_entry.split(" - ", 1) - if len(parts) == 2: - track_name, artist_name = parts - add_track_entry_to_history( - track_name=track_name, - artist_name=artist_name, - parent_task_id=parent_task_id, - track_status="SUCCESSFUL", - parent_history_data=parent_history_data - ) - processed["successful"] += 1 + # Parse "track_name - artist_name" format or handle trackObject + if isinstance(track_entry, dict): + # Handle trackObject + track_data = { + "track_data": track_entry, + "status_final": "COMPLETED", + "timestamp_completed": time.time() + } else: - logger.warning(f"Could not parse track entry: {track_entry}") + # Handle string format "track_name - artist_name" + parts = track_entry.split(" - ", 1) + if len(parts) == 2: + track_name, artist_name = parts + track_data = { + "track_data": { + "title": track_name, + "artists": [{"name": artist_name}] + }, + "status_final": "COMPLETED", + "timestamp_completed": time.time() + } + else: + logger.warning(f"Could not parse track entry: {track_entry}") + continue + + add_child_track(parent_task_id, track_data) + processed["successful"] += 1 + except Exception as e: logger.error(f"Error processing successful track {track_entry}: {e}", exc_info=True) # Process skipped tracks for track_entry in summary_data.get("skipped_tracks", []): try: - parts = track_entry.split(" - ", 1) - if len(parts) == 2: - track_name, artist_name = parts - add_track_entry_to_history( - track_name=track_name, - artist_name=artist_name, - parent_task_id=parent_task_id, - track_status="SKIPPED", - parent_history_data=parent_history_data - ) - processed["skipped"] += 1 + if isinstance(track_entry, dict): + # Handle trackObject + track_data = { + "track_data": track_entry, + "status_final": "SKIPPED", + "timestamp_completed": time.time() + } else: - logger.warning(f"Could not parse skipped track entry: {track_entry}") + # Handle string format + parts = track_entry.split(" - ", 1) + if len(parts) == 2: + track_name, artist_name = parts + track_data = { + "track_data": { + "title": track_name, + "artists": [{"name": artist_name}] + }, + "status_final": "SKIPPED", + "timestamp_completed": time.time() + } + else: + logger.warning(f"Could not parse skipped track entry: {track_entry}") + continue + + add_child_track(parent_task_id, track_data) + processed["skipped"] += 1 + except Exception as e: logger.error(f"Error processing skipped track {track_entry}: {e}", exc_info=True) - # Process failed tracks + # Process failed tracks for track_entry in summary_data.get("failed_tracks", []): try: - parts = track_entry.split(" - ", 1) - if len(parts) == 2: - track_name, artist_name = parts - add_track_entry_to_history( - track_name=track_name, - artist_name=artist_name, - parent_task_id=parent_task_id, - track_status="FAILED", - parent_history_data=parent_history_data - ) - processed["failed"] += 1 + if isinstance(track_entry, dict): + # Handle failedTrackObject or trackObject + if "track" in track_entry: + # failedTrackObject format + track_obj = track_entry["track"] + error_reason = track_entry.get("reason", "Unknown error") + track_data = { + "track_data": track_obj, + "status_final": "ERROR", + "error_info": {"message": error_reason}, + "timestamp_completed": time.time() + } + else: + # Plain trackObject + track_data = { + "track_data": track_entry, + "status_final": "ERROR", + "timestamp_completed": time.time() + } else: - logger.warning(f"Could not parse failed track entry: {track_entry}") + # Handle string format + parts = track_entry.split(" - ", 1) + if len(parts) == 2: + track_name, artist_name = parts + track_data = { + "track_data": { + "title": track_name, + "artists": [{"name": artist_name}] + }, + "status_final": "ERROR", + "timestamp_completed": time.time() + } + else: + logger.warning(f"Could not parse failed track entry: {track_entry}") + continue + + add_child_track(parent_task_id, track_data) + processed["failed"] += 1 + except Exception as e: logger.error(f"Error processing failed track {track_entry}: {e}", exc_info=True) @@ -503,101 +1234,181 @@ def add_tracks_from_summary(summary_data, parent_task_id, parent_history_data=No return processed +def get_history_entries(limit=25, offset=0, sort_by="timestamp_completed", sort_order="DESC", filters=None): + """Legacy compatibility function.""" + logger.warning("Using legacy get_history_entries - consider migrating to get_task_history") + + # Map old sort_by to new fields + sort_mapping = { + "timestamp_completed": "timestamp_completed", + "timestamp_added": "timestamp_created", + "item_name": "title" + } + + new_sort_by = sort_mapping.get(sort_by, "timestamp_updated") + return get_task_history(limit, offset, new_sort_by, sort_order, filters) + + +def get_track_mini_history(track_id: str, table_name: str) -> Dict: + """Get comprehensive mini-history for a specific track.""" + conn = None + try: + conn = sqlite3.connect(HISTORY_DB_FILE) + conn.row_factory = sqlite3.Row + cursor = conn.cursor() + + cursor.execute(f"SELECT * FROM {table_name} WHERE track_id = ?", (track_id,)) + result = cursor.fetchone() + + if not result: + return {} + + track_info = dict(result) + + # Parse JSON fields + json_fields = ["track_data", "artists_data", "album_data", "ids_data", + "status_history", "error_info", "progress_info", "config"] + + for field in json_fields: + if track_info.get(field): + try: + track_info[field] = json.loads(track_info[field]) + except json.JSONDecodeError: + pass + + # Calculate duration statistics + if track_info.get("timestamp_started") and track_info.get("timestamp_completed"): + track_info["calculated_duration"] = track_info["timestamp_completed"] - track_info["timestamp_started"] + + # Add progress timeline + if track_info.get("status_history"): + track_info["timeline"] = [] + for entry in track_info["status_history"]: + timeline_entry = { + "timestamp": entry.get("timestamp"), + "status": entry.get("status_type"), + "readable_time": datetime.fromtimestamp(entry.get("timestamp", 0)).isoformat() if entry.get("timestamp") else None + } + if entry.get("progress_info"): + timeline_entry["progress"] = entry["progress_info"] + if entry.get("error_info"): + timeline_entry["error"] = entry["error_info"] + track_info["timeline"].append(timeline_entry) + + return track_info + + except Exception as e: + logger.error(f"Error getting track mini-history: {e}") + return {} + finally: + if conn: + conn.close() + + if __name__ == "__main__": - # For testing purposes + # Test the enhanced system logging.basicConfig(level=logging.INFO) init_history_db() - - sample_data_complete = { - "task_id": "test_task_123", - "download_type": "track", - "item_name": "Test Song", - "item_artist": "Test Artist", - "item_album": "Test Album", - "item_url": "http://spotify.com/track/123", - "spotify_id": "123", + + # Test track task + track_task = { + "task_id": "test_track_001", + "task_type": "track", + "title": "Test Song", + "artists": [{"name": "Test Artist"}], + "ids": {"spotify": "track123"}, "status_final": "COMPLETED", - "error_message": None, - "timestamp_added": time.time() - 3600, - "timestamp_completed": time.time(), - "original_request_json": json.dumps({"param1": "value1"}), - "last_status_obj_json": json.dumps( - {"status": "complete", "message": "Finished!"} - ), - "service_used": "Spotify (Primary)", - "quality_profile": "NORMAL", - "convert_to": None, - "bitrate": None, + "metadata": { + "type": "track", + "title": "Test Song", + "duration_ms": 240000, + "artists": [{"name": "Test Artist"}] + }, + "config": {"quality_profile": "NORMAL", "service_used": "Spotify"} } - add_entry_to_history(sample_data_complete) - - sample_data_error = { - "task_id": "test_task_456", - "download_type": "album", - "item_name": "Another Album", - "item_artist": "Another Artist", - "item_album": "Another Album", # For albums, item_name and item_album are often the same - "item_url": "http://spotify.com/album/456", - "spotify_id": "456", - "status_final": "ERROR", - "error_message": "Download failed due to network issue.", - "timestamp_added": time.time() - 7200, - "timestamp_completed": time.time() - 60, - "original_request_json": json.dumps({"param2": "value2"}), - "last_status_obj_json": json.dumps( - {"status": "error", "error": "Network issue"} - ), - "service_used": "Deezer", - "quality_profile": "MP3_320", - "convert_to": "mp3", - "bitrate": "320", + + add_task_to_history(track_task) + + # Test album task with comprehensive track management + album_task = { + "task_id": "test_album_001", + "task_type": "album", + "title": "Test Album", + "artists": [{"name": "Test Artist"}], + "ids": {"spotify": "album123"}, + "children_table": create_child_table("test_album_001", "album") } - add_entry_to_history(sample_data_error) - - # Test updating an entry - updated_data_complete = { - "task_id": "test_task_123", - "download_type": "track", - "item_name": "Test Song (Updated)", - "item_artist": "Test Artist", - "item_album": "Test Album II", - "item_url": "http://spotify.com/track/123", - "spotify_id": "123", - "status_final": "COMPLETED", - "error_message": None, - "timestamp_added": time.time() - 3600, - "timestamp_completed": time.time() + 100, # Updated completion time - "original_request_json": json.dumps({"param1": "value1", "new_param": "added"}), - "last_status_obj_json": json.dumps( - {"status": "complete", "message": "Finished! With update."} - ), - "service_used": "Spotify (Deezer Fallback)", - "quality_profile": "HIGH", - "convert_to": "flac", - "bitrate": None, - } - add_entry_to_history(updated_data_complete) - - print(f"Test entries added/updated in {HISTORY_DB_FILE}") - - print("\nFetching all history entries (default sort):") - entries, total = get_history_entries(limit=5) - print(f"Total entries: {total}") - for entry in entries: - print(entry) - - print("\nFetching history entries (sorted by item_name ASC, limit 2, offset 1):") - entries_sorted, total_sorted = get_history_entries( - limit=2, offset=1, sort_by="item_name", sort_order="ASC" - ) - print(f"Total entries (should be same as above): {total_sorted}") - for entry in entries_sorted: - print(entry) - - print("\nFetching history entries with filter (status_final = COMPLETED):") - entries_filtered, total_filtered = get_history_entries( - filters={"status_final": "COMPLETED"} - ) - print(f"Total COMPLETED entries: {total_filtered}") - for entry in entries_filtered: - print(entry) + + add_task_to_history(album_task) + + # Add tracks with comprehensive data to the album + for i in range(3): + track_data = { + "track_data": { + "title": f"Track {i+1}", + "track_number": i+1, + "disc_number": 1, + "duration_ms": 180000 + (i * 20000), + "explicit": False, + "artists": [{"name": "Test Artist", "ids": {"spotify": f"artist{i}"}}], + "ids": {"spotify": f"track{i}", "isrc": f"TEST{i:03d}"} + }, + "position": i+1, + "status_current": "initializing", + "status_history": [ + { + "timestamp": time.time() - 300, + "status_type": "initializing", + "status_data": {"status": "initializing"} + }, + { + "timestamp": time.time() - 200, + "status_type": "real-time", + "status_data": {"status": "real-time", "progress": 50}, + "progress_info": {"progress": 50, "time_elapsed": 100} + }, + { + "timestamp": time.time() - 100, + "status_type": "done", + "status_data": {"status": "done"}, + "progress_info": {"progress": 100} + } + ], + "timestamp_started": time.time() - 300, + "timestamp_completed": time.time() - 100, + "status_final": "COMPLETED", + "time_elapsed": 200, + "quality_achieved": "FLAC 1411kbps", + "file_size": 45000000 + (i * 5000000), + "download_path": f"/downloads/Test Album/Track {i+1}.flac" + } + track_id = add_child_track("test_album_001", track_data) + print(f"Added track with comprehensive data: {track_id}") + + # Test retrieval + tasks, total = get_task_history(limit=10, include_children=True) + print(f"\nFound {total} tasks:") + for task in tasks: + print(f"- {task['title']} ({task['task_type']}) - {task.get('status_final', 'N/A')}") + if task.get('child_tracks'): + print(f" {len(task['child_tracks'])} child tracks:") + for child in task['child_tracks'][:2]: # Show first 2 tracks + print(f" • {child.get('title', 'Unknown')} - {child.get('status_final', 'N/A')}") + if child.get("status_history"): + print(f" Status changes: {len(child['status_history'])}") + if child.get("quality_achieved"): + print(f" Quality: {child['quality_achieved']}") + + # Test track mini-history + if tasks: + for task in tasks: + if task.get('child_tracks'): + first_track = task['child_tracks'][0] + mini_history = get_track_mini_history(first_track['track_id'], task['children_table']) + if mini_history.get('timeline'): + print(f"\nMini-history for '{mini_history.get('title', 'Unknown')}':") + for event in mini_history['timeline']: + print(f" {event['readable_time']}: {event['status']}") + if event.get('progress'): + print(f" Progress: {event['progress']}") + break diff --git a/spotizerr-ui/src/contexts/QueueProvider.tsx b/spotizerr-ui/src/contexts/QueueProvider.tsx index bb6c62e..6e43eaa 100644 --- a/spotizerr-ui/src/contexts/QueueProvider.tsx +++ b/spotizerr-ui/src/contexts/QueueProvider.tsx @@ -1,4 +1,4 @@ -import { useState, useCallback, type ReactNode, useEffect, useRef } from "react"; +import { useState, useCallback, type ReactNode, useEffect, useRef, useMemo } from "react"; import apiClient from "../lib/api-client"; import { QueueContext, @@ -41,6 +41,11 @@ export function QueueProvider({ children }: { children: ReactNode }) { const [isVisible, setIsVisible] = useState(false); const pollingIntervals = useRef>({}); + // Calculate active downloads count + const activeCount = useMemo(() => { + return items.filter(item => !isTerminalStatus(item.status)).length; + }, [items]); + const stopPolling = useCallback((internalId: string) => { if (pollingIntervals.current[internalId]) { clearInterval(pollingIntervals.current[internalId]); @@ -180,7 +185,6 @@ export function QueueProvider({ children }: { children: ReactNode }) { status: "initializing", }; setItems(prev => [newItem, ...prev]); - setIsVisible(true); try { const response = await apiClient.get<{ task_id: string }>( @@ -398,6 +402,7 @@ export function QueueProvider({ children }: { children: ReactNode }) { const value = { items, isVisible, + activeCount, addItem, removeItem, retryItem, diff --git a/spotizerr-ui/src/contexts/queue-context.ts b/spotizerr-ui/src/contexts/queue-context.ts index 7b57737..fd75688 100644 --- a/spotizerr-ui/src/contexts/queue-context.ts +++ b/spotizerr-ui/src/contexts/queue-context.ts @@ -44,6 +44,7 @@ export interface QueueItem { export interface QueueContextType { items: QueueItem[]; isVisible: boolean; + activeCount: number; addItem: (item: { name: string; type: DownloadType; spotifyId: string; artist?: string }) => void; removeItem: (id: string) => void; retryItem: (id: string) => void; diff --git a/spotizerr-ui/src/routes/history.tsx b/spotizerr-ui/src/routes/history.tsx index bd042ec..2507847 100644 --- a/spotizerr-ui/src/routes/history.tsx +++ b/spotizerr-ui/src/routes/history.tsx @@ -11,26 +11,127 @@ import { } from "@tanstack/react-table"; // --- Type Definitions --- +type TimelineEntry = { + status_type: string; + timestamp: number; + human_readable: string; + status_data: any; +}; + +type TrackMiniHistory = { + track_id: string; + parent_task_id: string; + position: number; + disc_number?: number; + track_number?: number; + title: string; + duration_ms?: number; + explicit?: boolean; + artists_data?: Array<{ name: string; [key: string]: any }>; + album_data?: any; + ids_data?: { spotify?: string; deezer?: string; isrc?: string; upc?: string }; + status_current: string; + status_final: string; + timestamp_created: number; + timestamp_completed?: number; + timestamp_started?: number; + time_elapsed?: number; + calculated_duration?: string; + retry_count: number; + progress_info?: any; + download_path?: string; + file_size?: number; + quality_achieved?: string; + error_info?: { message?: string; [key: string]: any }; + config?: any; + status_history: Array; + timeline: TimelineEntry[]; +}; + type HistoryEntry = { task_id: string; - item_name: string; - item_artist: string; + task_type: "track" | "album" | "playlist" | "artist"; + title: string; + status_current?: string; + status_final?: "COMPLETED" | "ERROR" | "CANCELLED" | "SKIPPED"; + timestamp_created?: number; + timestamp_updated?: number; + timestamp_completed?: number; + parent_task_id?: string; + position?: number; + + // Rich data fields + artists?: Array<{ name: string; [key: string]: any }>; + ids?: { spotify?: string; deezer?: string; isrc?: string; upc?: string }; + metadata?: any; + config?: { + service_used?: string; + quality_profile?: string; + convert_to?: string; + bitrate?: string; + [key: string]: any; + }; + error_info?: { message?: string; [key: string]: any }; + progress?: any; + summary?: { + total_successful?: number; + total_skipped?: number; + total_failed?: number; + [key: string]: any; + }; + + // Child information + children_table?: string; + has_children?: boolean; + child_tracks?: Array; + child_track_count?: number; + child_track_summary?: { + completed: number; + error: number; + skipped: number; + }; + + // Mini-history fields (when included) + mini_history?: TrackMiniHistory; + timeline?: TimelineEntry[]; + retry_count?: number; + time_elapsed?: number; + quality_achieved?: string; + file_size?: number; + download_path?: string; + + // Computed/Legacy compatibility fields + artist_names?: string[]; + item_name?: string; + item_artist?: string; + item_album?: string; item_url?: string; - download_type: "track" | "album" | "playlist" | "artist"; - service_used: string; - quality_profile: string; + download_type?: string; + service_used?: string; + quality_profile?: string; convert_to?: string; bitrate?: string; - status_final: "COMPLETED" | "ERROR" | "CANCELLED" | "SKIPPED"; - timestamp_completed: number; error_message?: string; - parent_task_id?: string; - track_status?: "SUCCESSFUL" | "SKIPPED" | "FAILED"; + timestamp_added?: number; + track_status?: string; total_successful?: number; total_skipped?: number; total_failed?: number; }; +type TaskDetails = { + task: HistoryEntry & { + status_history?: Array<{ + status_id: number; + status_type: string; + status_data: any; + timestamp: number; + }>; + }; + include_children: boolean; + include_status_history: boolean; +}; + const STATUS_CLASS: Record = { COMPLETED: "text-success", ERROR: "text-error", @@ -52,29 +153,63 @@ const QUALITY_MAP: Record> = { }; const getDownloadSource = (entry: HistoryEntry): "Spotify" | "Deezer" | "Unknown" => { + // Check metadata first + if (entry.metadata?.url) { + const url = entry.metadata.url.toLowerCase(); + if (url.includes("spotify.com")) return "Spotify"; + if (url.includes("deezer.com")) return "Deezer"; + } + + // Check legacy fields const url = entry.item_url?.toLowerCase() || ""; - const service = entry.service_used?.toLowerCase() || ""; - if (url.includes("spotify.com")) return "Spotify"; - if (url.includes("deezer.com")) return "Deezer"; - if (service.includes("spotify")) return "Spotify"; - if (service.includes("deezer")) return "Deezer"; + const service = entry.service_used?.toLowerCase() || entry.config?.service_used?.toLowerCase() || ""; + if (url.includes("spotify.com") || service.includes("spotify")) return "Spotify"; + if (url.includes("deezer.com") || service.includes("deezer")) return "Deezer"; + + // Check IDs + if (entry.ids?.spotify) return "Spotify"; + if (entry.ids?.deezer) return "Deezer"; + return "Unknown"; }; const formatQuality = (entry: HistoryEntry): string => { const sourceName = getDownloadSource(entry).toLowerCase(); - const profile = entry.quality_profile || "N/A"; + const profile = entry.quality_profile || entry.config?.quality_profile || "N/A"; const sourceQuality = sourceName !== "unknown" ? QUALITY_MAP[sourceName]?.[profile] || profile : profile; let qualityDisplay = sourceQuality; - if (entry.convert_to && entry.convert_to !== "None") { - qualityDisplay += ` → ${entry.convert_to.toUpperCase()}`; - if (entry.bitrate && entry.bitrate !== "None") { - qualityDisplay += ` ${entry.bitrate}`; + + const convertTo = entry.convert_to || entry.config?.convert_to; + const bitrate = entry.bitrate || entry.config?.bitrate; + + if (convertTo && convertTo !== "None") { + qualityDisplay += ` → ${convertTo.toUpperCase()}`; + if (bitrate && bitrate !== "None") { + qualityDisplay += ` ${bitrate}`; } } return qualityDisplay; }; +const formatFileSize = (bytes?: number): string => { + if (!bytes) return "N/A"; + const sizes = ['B', 'KB', 'MB', 'GB']; + const i = Math.floor(Math.log(bytes) / Math.log(1024)); + return `${(bytes / Math.pow(1024, i)).toFixed(1)} ${sizes[i]}`; +}; + +const formatDuration = (seconds?: number): string => { + if (!seconds) return "N/A"; + const hours = Math.floor(seconds / 3600); + const minutes = Math.floor((seconds % 3600) / 60); + const secs = Math.floor(seconds % 60); + + if (hours > 0) { + return `${hours}:${minutes.toString().padStart(2, '0')}:${secs.toString().padStart(2, '0')}`; + } + return `${minutes}:${secs.toString().padStart(2, '0')}`; +}; + // --- Column Definitions --- const columnHelper = createColumnHelper(); @@ -82,9 +217,13 @@ export const History = () => { const [data, setData] = useState([]); const [totalEntries, setTotalEntries] = useState(0); const [isLoading, setIsLoading] = useState(true); + const [selectedTask, setSelectedTask] = useState(null); + const [selectedTrackMiniHistory, setSelectedTrackMiniHistory] = useState(null); + const [showMiniHistories, setShowMiniHistories] = useState(false); + const [isMiniHistoryLoading, setIsMiniHistoryLoading] = useState(false); // State for TanStack Table - const [sorting, setSorting] = useState([{ id: "timestamp_completed", desc: true }]); + const [sorting, setSorting] = useState([{ id: "timestamp_updated", desc: true }]); const [{ pageIndex, pageSize }, setPagination] = useState({ pageIndex: 0, pageSize: 25, @@ -93,42 +232,205 @@ export const History = () => { // State for filters const [statusFilter, setStatusFilter] = useState(""); const [typeFilter, setTypeFilter] = useState(""); - const [trackStatusFilter, setTrackStatusFilter] = useState(""); - const [showChildTracks, setShowChildTracks] = useState(false); + const [currentStatusFilter, setCurrentStatusFilter] = useState(""); + const [hideChildTracks, setHideChildTracks] = useState(true); + const [includeChildren, setIncludeChildren] = useState(false); const [parentTaskId, setParentTaskId] = useState(null); const [parentTask, setParentTask] = useState(null); const pagination = useMemo(() => ({ pageIndex, pageSize }), [pageIndex, pageSize]); const viewTracksForParent = useCallback( - (parentEntry: HistoryEntry) => { - setPagination({ pageIndex: 0, pageSize }); - setParentTaskId(parentEntry.task_id); - setParentTask(parentEntry); - setStatusFilter(""); - setTypeFilter(""); - setTrackStatusFilter(""); + async (parentEntry: HistoryEntry) => { + try { + const response = await apiClient.get<{ + parent_task_id: string; + parent_task_info: { + title: string; + task_type: string; + status_final: string; + }; + tracks: Array; + total_count: number; + }>(`/history/tracks/${parentEntry.task_id}`); + + // Transform tracks to match our HistoryEntry structure + const transformedTracks = response.data.tracks.map(track => ({ + task_id: track.track_id, + task_type: "track" as const, + title: track.title || "Unknown Track", + status_final: track.status_final, + timestamp_completed: track.timestamp_completed, + parent_task_id: track.parent_task_id, + position: track.position, + artists: track.artists || [], + artist_names: track.artist_names || [], + item_name: track.title || "Unknown Track", + item_artist: track.artist_names?.join(", ") || "", + download_type: "track", + config: track.config, + error_info: track.error_info, + // Mini-history fields if available + mini_history: track.mini_history, + timeline: track.timeline, + retry_count: track.retry_count, + time_elapsed: track.time_elapsed, + quality_achieved: track.quality_achieved, + file_size: track.file_size, + download_path: track.download_path, + // Legacy compatibility + service_used: track.config?.service_used, + quality_profile: track.config?.quality_profile, + convert_to: track.config?.convert_to, + bitrate: track.config?.bitrate, + error_message: track.error_info?.message, + })); + + setPagination({ pageIndex: 0, pageSize }); + setParentTaskId(parentEntry.task_id); + setParentTask({ + ...parentEntry, + item_name: parentEntry.title || parentEntry.item_name, + item_artist: parentEntry.artist_names?.join(", ") || parentEntry.item_artist, + }); + setData(transformedTracks); + setTotalEntries(response.data.total_count); + setStatusFilter(""); + setTypeFilter(""); + setCurrentStatusFilter(""); + } catch (error) { + toast.error("Failed to load tracks for this task."); + console.error("Error loading tracks:", error); + } }, [pageSize], ); + const viewTaskDetails = useCallback( + async (taskId: string) => { + try { + const response = await apiClient.get( + `/history/task/${taskId}?include_children=true&include_status_history=true` + ); + setSelectedTask(response.data); + } catch (error) { + toast.error("Failed to load task details."); + console.error("Error loading task details:", error); + } + }, + [], + ); + + const viewTrackMiniHistory = useCallback( + async (parentTaskId: string, trackId: string) => { + setIsMiniHistoryLoading(true); + try { + const response = await apiClient.get<{ + parent_task_id: string; + parent_task_info: any; + track_mini_history: TrackMiniHistory; + }>(`/history/track/${parentTaskId}/${trackId}/mini-history`); + setSelectedTrackMiniHistory(response.data.track_mini_history); + } catch (error) { + toast.error("Failed to load track mini-history."); + console.error("Error loading track mini-history:", error); + } finally { + setIsMiniHistoryLoading(false); + } + }, + [], + ); + + const loadTracksWithMiniHistories = useCallback( + async (parentTaskId: string) => { + try { + const response = await apiClient.get<{ + parent_task_id: string; + parent_task_info: any; + tracks: Array; + total_count: number; + include_mini_histories: boolean; + }>(`/history/tracks/${parentTaskId}?include_mini_histories=true`); + + const transformedTracks = response.data.tracks.map(track => ({ + task_id: track.track_id, + task_type: "track" as const, + title: track.title || "Unknown Track", + status_final: track.status_final, + timestamp_completed: track.timestamp_completed, + parent_task_id: track.parent_task_id, + position: track.position, + artists: track.artists || [], + artist_names: track.artist_names || [], + item_name: track.title || "Unknown Track", + item_artist: track.artist_names?.join(", ") || "", + download_type: "track", + config: track.config, + error_info: track.error_info, + // Mini-history fields if available + mini_history: track.mini_history, + timeline: track.timeline, + retry_count: track.retry_count, + time_elapsed: track.time_elapsed, + quality_achieved: track.quality_achieved, + file_size: track.file_size, + download_path: track.download_path, + // Legacy compatibility + service_used: track.config?.service_used, + quality_profile: track.config?.quality_profile, + convert_to: track.config?.convert_to, + bitrate: track.config?.bitrate, + error_message: track.error_info?.message, + })); + + setData(transformedTracks); + setTotalEntries(response.data.total_count); + setShowMiniHistories(true); + } catch (error) { + toast.error("Failed to load tracks with mini-histories."); + console.error("Error loading tracks with mini-histories:", error); + } + }, + [], + ); + const columns = useMemo( () => [ - columnHelper.accessor("item_name", { + columnHelper.accessor("title", { header: "Name", - cell: (info) => - info.row.original.parent_task_id ? ( - └─ {info.getValue()} + cell: (info) => { + const entry = info.row.original; + const displayName = entry.title || entry.item_name || "Unknown"; + return entry.parent_task_id ? ( + └─ {displayName} ) : ( - {info.getValue()} - ), +
+ {displayName} + {entry.has_children && ( + + {entry.child_track_count || "N/A"} tracks + + )} +
+ ); + }, }), - columnHelper.accessor("item_artist", { header: "Artist" }), - columnHelper.accessor("download_type", { + columnHelper.accessor("artist_names", { + header: "Artist", + cell: (info) => { + const entry = info.row.original; + return entry.artist_names?.join(", ") || entry.item_artist || "Unknown Artist"; + }, + }), + columnHelper.accessor("task_type", { header: "Type", - cell: (info) => {info.getValue()}, + cell: (info) => { + const type = info.getValue() || info.row.original.download_type || "unknown"; + return {type}; + }, }), - columnHelper.accessor("quality_profile", { + columnHelper.accessor("config", { + id: "quality", header: "Quality", cell: (info) => formatQuality(info.row.original), }), @@ -136,29 +438,51 @@ export const History = () => { header: "Status", cell: (info) => { const entry = info.row.original; - const status = entry.parent_task_id ? entry.track_status : entry.status_final; + const status = entry.status_final || entry.track_status; const statusKey = (status || "").toUpperCase(); - const statusClass = - { - COMPLETED: "text-success", - SUCCESSFUL: "text-success", - ERROR: "text-error", - FAILED: "text-error", - CANCELLED: "text-content-muted dark:text-content-muted-dark", - SKIPPED: "text-warning", - }[statusKey] || "text-gray-500"; + const statusClass = STATUS_CLASS[statusKey] || "text-gray-500"; - return {status}; + return ( +
+ {status || "Unknown"} + {entry.status_current && entry.status_current !== status && ( + + ({entry.status_current}) + + )} +
+ ); }, }), - columnHelper.accessor("item_url", { + columnHelper.display({ id: "source", header: parentTaskId ? "Download Source" : "Search Source", cell: (info) => getDownloadSource(info.row.original), }), + ...(showMiniHistories && parentTaskId ? [ + columnHelper.accessor("retry_count", { + header: "Retries", + cell: (info) => info.getValue() || 0, + }), + columnHelper.accessor("time_elapsed", { + header: "Duration", + cell: (info) => formatDuration(info.getValue()), + }), + columnHelper.accessor("file_size", { + header: "File Size", + cell: (info) => formatFileSize(info.getValue()), + }), + columnHelper.accessor("quality_achieved", { + header: "Quality", + cell: (info) => info.getValue() || "N/A", + }), + ] : []), columnHelper.accessor("timestamp_completed", { header: "Date Completed", - cell: (info) => new Date(info.getValue() * 1000).toLocaleString(), + cell: (info) => { + const timestamp = info.getValue() || info.row.original.timestamp_updated; + return timestamp ? new Date(timestamp * 1000).toLocaleString() : "N/A"; + }, }), ...(!parentTaskId ? [ @@ -167,111 +491,120 @@ export const History = () => { header: "Actions", cell: ({ row }) => { const entry = row.original; - if (!entry.parent_task_id && (entry.download_type === "album" || entry.download_type === "playlist")) { - const hasChildren = + if (!entry.parent_task_id && (entry.task_type === "album" || entry.task_type === "playlist" || entry.download_type === "album" || entry.download_type === "playlist")) { + const hasChildren = entry.has_children || (entry.total_successful ?? 0) > 0 || (entry.total_skipped ?? 0) > 0 || (entry.total_failed ?? 0) > 0; - if (hasChildren) { - return ( -
- - - {entry.total_successful ?? 0} /{" "} - {entry.total_skipped ?? 0} /{" "} - {entry.total_failed ?? 0} - -
- ); - } + + return ( +
+ {hasChildren && ( + <> + + + + + {entry.child_track_summary?.completed || entry.total_successful || 0} + /{" "} + + {entry.child_track_summary?.skipped || entry.total_skipped || 0} + /{" "} + + {entry.child_track_summary?.error || entry.total_failed || 0} + + + + )} +
+ ); } - return null; + + // For tracks in parent task view with mini-histories + if (parentTaskId && entry.task_type === "track") { + return ( +
+ + {showMiniHistories && ( + + )} +
+ ); + } + + return ( + + ); }, }), ] : []), ], - [viewTracksForParent, parentTaskId], + [viewTracksForParent, viewTaskDetails, loadTracksWithMiniHistories, viewTrackMiniHistory, parentTaskId, showMiniHistories], ); useEffect(() => { const fetchHistory = async () => { + if (parentTaskId) return; // Skip if we're viewing parent tracks (handled separately) + setIsLoading(true); setData([]); try { const params = new URLSearchParams({ limit: `${pageSize}`, offset: `${pageIndex * pageSize}`, - sort_by: sorting[0]?.id ?? "timestamp_completed", + sort_by: sorting[0]?.id ?? "timestamp_updated", sort_order: sorting[0]?.desc ? "DESC" : "ASC", + include_children: includeChildren.toString(), }); + if (statusFilter) params.append("status_final", statusFilter); - if (typeFilter) params.append("download_type", typeFilter); - if (trackStatusFilter) params.append("track_status", trackStatusFilter); - if (!parentTaskId && !showChildTracks) { - params.append("hide_child_tracks", "true"); - } - if (parentTaskId) params.append("parent_task_id", parentTaskId); + if (typeFilter) params.append("task_type", typeFilter); + if (currentStatusFilter) params.append("status_current", currentStatusFilter); + if (hideChildTracks) params.append("hide_child_tracks", "true"); const response = await apiClient.get<{ entries: HistoryEntry[]; total_count: number; + include_children: boolean; }>(`/history?${params.toString()}`); - const originalEntries = response.data.entries; - let processedEntries = originalEntries; - - // If including child tracks in the main history, group them with their parents - if (showChildTracks && !parentTaskId) { - const parents = originalEntries.filter((e) => !e.parent_task_id); - const childrenByParentId = originalEntries - .filter((e) => e.parent_task_id) - .reduce( - (acc, child) => { - const parentId = child.parent_task_id!; - if (!acc[parentId]) { - acc[parentId] = []; - } - acc[parentId].push(child); - return acc; - }, - {} as Record, - ); - - const groupedEntries: HistoryEntry[] = []; - parents.forEach((parent) => { - groupedEntries.push(parent); - const children = childrenByParentId[parent.task_id]; - if (children) { - groupedEntries.push(...children); - } - }); - processedEntries = groupedEntries; - } - - // If viewing child tracks for a specific parent, filter out the parent entry from the list - const finalEntries = parentTaskId - ? processedEntries.filter((entry) => entry.task_id !== parentTaskId) - : processedEntries; - - setData(finalEntries); - - // Adjust total count to reflect filtered entries for accurate pagination - const numFiltered = originalEntries.length - finalEntries.length; - setTotalEntries(response.data.total_count - numFiltered); - } catch { + setData(response.data.entries); + setTotalEntries(response.data.total_count); + } catch (error) { toast.error("Failed to load history."); + console.error("Error loading history:", error); } finally { setIsLoading(false); } }; fetchHistory(); - }, [pageIndex, pageSize, sorting, statusFilter, typeFilter, trackStatusFilter, showChildTracks, parentTaskId]); + }, [pageIndex, pageSize, sorting, statusFilter, typeFilter, currentStatusFilter, hideChildTracks, includeChildren, parentTaskId]); const table = useReactTable({ data, @@ -289,19 +622,263 @@ export const History = () => { const clearFilters = () => { setStatusFilter(""); setTypeFilter(""); - setTrackStatusFilter(""); - setShowChildTracks(false); + setCurrentStatusFilter(""); + setHideChildTracks(true); + setIncludeChildren(false); }; const viewParentTask = () => { setPagination({ pageIndex: 0, pageSize }); setParentTaskId(null); setParentTask(null); + setShowMiniHistories(false); clearFilters(); }; + const closeTaskDetails = () => { + setSelectedTask(null); + }; + + const closeMiniHistory = () => { + setSelectedTrackMiniHistory(null); + }; + return (
+ {/* Task Details Modal */} + {selectedTask && ( +
+
+
+
+

+ Task Details +

+ +
+ +
+
+
+

Basic Info

+
+

ID: {selectedTask.task.task_id}

+

Type: {selectedTask.task.task_type}

+

Title: {selectedTask.task.title}

+

Artists: {selectedTask.task.artist_names?.join(", ") || "N/A"}

+

Status: {selectedTask.task.status_final || "N/A"}

+
+
+
+

Timestamps

+
+

Created: {selectedTask.task.timestamp_created ? new Date(selectedTask.task.timestamp_created * 1000).toLocaleString() : "N/A"}

+

Updated: {selectedTask.task.timestamp_updated ? new Date(selectedTask.task.timestamp_updated * 1000).toLocaleString() : "N/A"}

+

Completed: {selectedTask.task.timestamp_completed ? new Date(selectedTask.task.timestamp_completed * 1000).toLocaleString() : "N/A"}

+
+
+
+ + {selectedTask.task.config && ( +
+

Configuration

+
+
{JSON.stringify(selectedTask.task.config, null, 2)}
+
+
+ )} + + {selectedTask.task.error_info && ( +
+

Error Information

+
+
{JSON.stringify(selectedTask.task.error_info, null, 2)}
+
+
+ )} + + {selectedTask.task.child_tracks && selectedTask.task.child_tracks.length > 0 && ( +
+

Child Tracks ({selectedTask.task.child_tracks.length})

+
+
+ {selectedTask.task.child_tracks.map((track, index) => ( +
+
{track.track_data?.title || "Unknown Track"}
+
+ Status: {track.status_final} | Position: {track.position} +
+
+ ))} +
+
+
+ )} + + {selectedTask.task.status_history && selectedTask.task.status_history.length > 0 && ( +
+

Status History

+
+
+ {selectedTask.task.status_history.map((status) => ( +
+
+ {status.status_type} + + {new Date(status.timestamp * 1000).toLocaleString()} + +
+ {status.status_data && ( +
+                                {JSON.stringify(status.status_data, null, 2)}
+                              
+ )} +
+ ))} +
+
+
+ )} +
+
+
+
+ )} + + {/* Track Mini-History Modal */} + {selectedTrackMiniHistory && ( +
+
+
+
+

+ Track Mini-History: {selectedTrackMiniHistory.title} +

+ +
+ + {isMiniHistoryLoading ? ( +
+
Loading mini-history...
+
+ ) : ( +
+ {/* Track Summary */} +
+
+

Status

+

+ {selectedTrackMiniHistory.status_final} +

+

+ Current: {selectedTrackMiniHistory.status_current} +

+
+
+

Duration

+

+ {formatDuration(selectedTrackMiniHistory.time_elapsed)} +

+

+ {selectedTrackMiniHistory.calculated_duration} +

+
+
+

File Info

+

+ {formatFileSize(selectedTrackMiniHistory.file_size)} +

+

+ {selectedTrackMiniHistory.quality_achieved || "N/A"} +

+
+
+

Attempts

+

+ {selectedTrackMiniHistory.retry_count + 1} +

+

+ {selectedTrackMiniHistory.retry_count > 0 ? `${selectedTrackMiniHistory.retry_count} retries` : "No retries"} +

+
+
+ + {/* Track Details */} +
+
+

Track Info

+
+

Position: {selectedTrackMiniHistory.disc_number}-{selectedTrackMiniHistory.track_number} (#{selectedTrackMiniHistory.position})

+

Duration: {selectedTrackMiniHistory.duration_ms ? `${Math.floor(selectedTrackMiniHistory.duration_ms / 60000)}:${Math.floor((selectedTrackMiniHistory.duration_ms % 60000) / 1000).toString().padStart(2, '0')}` : "N/A"}

+

Artists: {selectedTrackMiniHistory.artists_data?.map(a => a.name).join(", ") || "N/A"}

+

Explicit: {selectedTrackMiniHistory.explicit ? "Yes" : "No"}

+
+
+
+

Download Info

+
+

Started: {selectedTrackMiniHistory.timestamp_started ? new Date(selectedTrackMiniHistory.timestamp_started * 1000).toLocaleString() : "N/A"}

+

Completed: {selectedTrackMiniHistory.timestamp_completed ? new Date(selectedTrackMiniHistory.timestamp_completed * 1000).toLocaleString() : "N/A"}

+

Path: {selectedTrackMiniHistory.download_path || "N/A"}

+
+
+
+ + {/* Timeline */} +
+

+ Status Timeline ({selectedTrackMiniHistory.timeline.length} events) +

+
+ {selectedTrackMiniHistory.timeline.map((event, index) => ( +
+
+ + {event.status_type} + +
+ {event.human_readable} + {new Date(event.timestamp * 1000).toLocaleString()} +
+
+ {event.status_data && Object.keys(event.status_data).length > 0 && ( +
+
+                                {JSON.stringify(event.status_data, null, 2)}
+                              
+
+ )} +
+ ))} +
+
+ + {/* Error Information */} + {selectedTrackMiniHistory.error_info && ( +
+

Error Information

+
+
{JSON.stringify(selectedTrackMiniHistory.error_info, null, 2)}
+
+
+ )} +
+ )} +
+
+
+ )} + {parentTaskId && parentTask ? (
+ ) : ( + + )} +
+
) : (

Download History

@@ -347,13 +948,13 @@ export const History = () => { {!parentTaskId && (
{/* Mobile: Stacked filters */} -
+
+
@@ -435,7 +1047,7 @@ export const History = () => { table.getRowModel().rows.map((row) => { const isParent = !row.original.parent_task_id && - (row.original.download_type === "album" || row.original.download_type === "playlist"); + (row.original.task_type === "album" || row.original.task_type === "playlist" || row.original.download_type === "album" || row.original.download_type === "playlist"); const isChild = !!row.original.parent_task_id; let rowClass = "hover:bg-surface-muted dark:hover:bg-surface-muted-dark"; if (isParent) { @@ -472,18 +1084,11 @@ export const History = () => { ) : ( table.getRowModel().rows.map((row) => { const entry = row.original; - const isParent = !entry.parent_task_id && (entry.download_type === "album" || entry.download_type === "playlist"); + const isParent = !entry.parent_task_id && (entry.task_type === "album" || entry.task_type === "playlist" || entry.download_type === "album" || entry.download_type === "playlist"); const isChild = !!entry.parent_task_id; - const status = entry.parent_task_id ? entry.track_status : entry.status_final; + const status = entry.status_final || entry.track_status; const statusKey = (status || "").toUpperCase(); - const statusClass = { - COMPLETED: "text-success", - SUCCESSFUL: "text-success", - ERROR: "text-error", - FAILED: "text-error", - CANCELLED: "text-content-muted dark:text-content-muted-dark", - SKIPPED: "text-warning", - }[statusKey] || "text-gray-500"; + const statusClass = STATUS_CLASS[statusKey] || "text-gray-500"; let cardClass = "bg-surface dark:bg-surface-secondary-dark rounded-lg border border-border dark:border-border-dark p-4"; if (isParent) { @@ -498,10 +1103,10 @@ export const History = () => {

- {isChild ? `└─ ${entry.item_name}` : entry.item_name} + {isChild ? `└─ ${entry.title || entry.item_name}` : entry.title || entry.item_name}

- {entry.item_artist} + {entry.artist_names?.join(", ") || entry.item_artist}

@@ -514,7 +1119,7 @@ export const History = () => {
Type: - {entry.download_type} + {entry.task_type || entry.download_type}
@@ -526,34 +1131,96 @@ export const History = () => {
Quality: - {formatQuality(entry)} + {entry.quality_achieved || formatQuality(entry)}
+ {showMiniHistories && parentTaskId && ( + <> +
+ Retries: + + {entry.retry_count || 0} + +
+
+ Duration: + + {formatDuration(entry.time_elapsed)} + +
+
+ File Size: + + {formatFileSize(entry.file_size)} + +
+ + )}
Completed: - {new Date(entry.timestamp_completed * 1000).toLocaleString()} + {entry.timestamp_completed ? new Date(entry.timestamp_completed * 1000).toLocaleString() : "N/A"}
{/* Actions for parent entries */} {!parentTaskId && isParent && ( - entry.total_successful || entry.total_skipped || entry.total_failed + entry.has_children || entry.total_successful || entry.total_skipped || entry.total_failed ) ? (
- {entry.total_successful ?? 0} ✓ - {entry.total_skipped ?? 0} ⊘ - {entry.total_failed ?? 0} ✗ + + {entry.child_track_summary?.completed || entry.total_successful || 0} ✓ + + + {entry.child_track_summary?.skipped || entry.total_skipped || 0} ⊘ + + + {entry.child_track_summary?.error || entry.total_failed || 0} ✗ +
+
+ + +
+
+ ) : !parentTaskId ? ( +
+ ) : parentTaskId ? ( +
+ + {showMiniHistories && ( + + )} +
) : null}
); diff --git a/spotizerr-ui/src/routes/root.tsx b/spotizerr-ui/src/routes/root.tsx index 6cda271..585d110 100644 --- a/spotizerr-ui/src/routes/root.tsx +++ b/spotizerr-ui/src/routes/root.tsx @@ -77,7 +77,7 @@ function ThemeToggle() { } function AppLayout() { - const { toggleVisibility } = useContext(QueueContext) || {}; + const { toggleVisibility, activeCount } = useContext(QueueContext) || {}; return (
@@ -98,8 +98,13 @@ function AppLayout() { Settings -
@@ -144,8 +149,13 @@ function AppLayout() { Settings -