I think we good

This commit is contained in:
cool.gitter.not.me.again.duh
2025-05-29 18:10:37 -06:00
parent ba33e10afc
commit 1a39af3730
30 changed files with 1374 additions and 496 deletions

6
app.py
View File

@@ -8,6 +8,7 @@ from routes.playlist import playlist_bp
from routes.prgs import prgs_bp
from routes.config import config_bp
from routes.artist import artist_bp
from routes.history import history_bp
import logging
import logging.handlers
import time
@@ -149,6 +150,7 @@ def create_app():
app.register_blueprint(playlist_bp, url_prefix='/api/playlist')
app.register_blueprint(artist_bp, url_prefix='/api/artist')
app.register_blueprint(prgs_bp, url_prefix='/api/prgs')
app.register_blueprint(history_bp, url_prefix='/api/history')
# Serve frontend
@app.route('/')
@@ -186,6 +188,10 @@ def create_app():
# The id parameter is captured, but you can use it as needed.
return render_template('artist.html')
@app.route('/history')
def serve_history_page():
return render_template('history.html')
@app.route('/static/<path:path>')
def serve_static(path):
return send_from_directory('static', path)

View File

@@ -42,6 +42,7 @@ six==1.17.0
sniffio==1.3.1
spotipy==2.25.1
spotipy_anon==1.4
sse-starlette==2.3.5
starlette==0.46.2
tqdm==4.67.1
typing-inspection==0.4.1

View File

@@ -3,7 +3,7 @@ import atexit
# Configure basic logging for the application if not already configured
# This is a good place for it if routes are a central part of your app structure.
logging.basicConfig(level=logging.DEBUG,
logging.basicConfig(level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)

View File

@@ -6,18 +6,39 @@ import uuid
import time
from routes.utils.celery_queue_manager import download_queue_manager
from routes.utils.celery_tasks import store_task_info, store_task_status, ProgressState
from routes.utils.get_info import get_spotify_info
album_bp = Blueprint('album', __name__)
@album_bp.route('/download/<album_id>', methods=['GET'])
def handle_download(album_id):
# Retrieve essential parameters from the request.
name = request.args.get('name')
artist = request.args.get('artist')
# name = request.args.get('name')
# artist = request.args.get('artist')
# Construct the URL from album_id
url = f"https://open.spotify.com/album/{album_id}"
# Fetch metadata from Spotify
try:
album_info = get_spotify_info(album_id, "album")
if not album_info or not album_info.get('name') or not album_info.get('artists'):
return Response(
json.dumps({"error": f"Could not retrieve metadata for album ID: {album_id}"}),
status=404,
mimetype='application/json'
)
name_from_spotify = album_info.get('name')
artist_from_spotify = album_info['artists'][0].get('name') if album_info['artists'] else "Unknown Artist"
except Exception as e:
return Response(
json.dumps({"error": f"Failed to fetch metadata for album {album_id}: {str(e)}"}),
status=500,
mimetype='application/json'
)
# Validate required parameters
if not url:
return Response(
@@ -35,8 +56,8 @@ def handle_download(album_id):
task_id = download_queue_manager.add_task({
"download_type": "album",
"url": url,
"name": name,
"artist": artist,
"name": name_from_spotify,
"artist": artist_from_spotify,
"orig_request": orig_params
})
except Exception as e:
@@ -47,8 +68,8 @@ def handle_download(album_id):
store_task_info(error_task_id, {
"download_type": "album",
"url": url,
"name": name,
"artist": artist,
"name": name_from_spotify,
"artist": artist_from_spotify,
"original_request": orig_params,
"created_at": time.time(),
"is_submission_error_task": True

View File

@@ -124,7 +124,7 @@ def get_artist_info():
try:
from routes.utils.get_info import get_spotify_info
artist_info = get_spotify_info(spotify_id, "artist")
artist_info = get_spotify_info(spotify_id, "artist_discography")
# If artist_info is successfully fetched (it contains album items),
# check if the artist is watched and augment album items with is_locally_known status
@@ -166,11 +166,11 @@ def add_artist_to_watchlist(artist_spotify_id):
return jsonify({"message": f"Artist {artist_spotify_id} is already being watched."}), 200
# This call returns an album list-like structure based on logs
artist_album_list_data = get_spotify_info(artist_spotify_id, "artist")
artist_album_list_data = get_spotify_info(artist_spotify_id, "artist_discography")
# Check if we got any data and if it has items
if not artist_album_list_data or not isinstance(artist_album_list_data.get('items'), list):
logger.error(f"Could not fetch album list details for artist {artist_spotify_id} from Spotify using get_spotify_info('artist'). Data: {artist_album_list_data}")
logger.error(f"Could not fetch album list details for artist {artist_spotify_id} from Spotify using get_spotify_info('artist_discography'). Data: {artist_album_list_data}")
return jsonify({"error": f"Could not fetch sufficient details for artist {artist_spotify_id} to initiate watch."}), 404
# Attempt to extract artist name and verify ID

42
routes/history.py Normal file
View File

@@ -0,0 +1,42 @@
from flask import Blueprint, jsonify, request
from routes.utils.history_manager import get_history_entries
import logging
logger = logging.getLogger(__name__)
history_bp = Blueprint('history', __name__, url_prefix='/api/history')
@history_bp.route('', methods=['GET'])
def get_download_history():
"""API endpoint to retrieve download history with pagination, sorting, and filtering."""
try:
limit = request.args.get('limit', 25, type=int)
offset = request.args.get('offset', 0, type=int)
sort_by = request.args.get('sort_by', 'timestamp_completed')
sort_order = request.args.get('sort_order', 'DESC')
# Basic filtering example: filter by status_final or download_type
filters = {}
status_filter = request.args.get('status_final')
if status_filter:
filters['status_final'] = status_filter
type_filter = request.args.get('download_type')
if type_filter:
filters['download_type'] = type_filter
# Add more filters as needed, e.g., by item_name (would need LIKE for partial match)
# search_term = request.args.get('search')
# if search_term:
# filters['item_name'] = f'%{search_term}%' # This would require LIKE in get_history_entries
entries, total_count = get_history_entries(limit, offset, sort_by, sort_order, filters)
return jsonify({
'entries': entries,
'total_count': total_count,
'limit': limit,
'offset': offset
})
except Exception as e:
logger.error(f"Error in /api/history endpoint: {e}", exc_info=True)
return jsonify({"error": "Failed to retrieve download history"}), 500

View File

@@ -28,13 +28,35 @@ playlist_bp = Blueprint('playlist', __name__, url_prefix='/api/playlist')
@playlist_bp.route('/download/<playlist_id>', methods=['GET'])
def handle_download(playlist_id):
# Retrieve essential parameters from the request.
name = request.args.get('name')
artist = request.args.get('artist')
# name = request.args.get('name') # Removed
# artist = request.args.get('artist') # Removed
orig_params = request.args.to_dict()
# Construct the URL from playlist_id
url = f"https://open.spotify.com/playlist/{playlist_id}"
orig_params["original_url"] = url # Update original_url to the constructed one
orig_params["original_url"] = request.url # Update original_url to the constructed one
# Fetch metadata from Spotify
try:
playlist_info = get_spotify_info(playlist_id, "playlist")
if not playlist_info or not playlist_info.get('name') or not playlist_info.get('owner'):
return Response(
json.dumps({"error": f"Could not retrieve metadata for playlist ID: {playlist_id}"}),
status=404,
mimetype='application/json'
)
name_from_spotify = playlist_info.get('name')
# Use owner's display_name as the 'artist' for playlists
owner_info = playlist_info.get('owner', {})
artist_from_spotify = owner_info.get('display_name', "Unknown Owner")
except Exception as e:
return Response(
json.dumps({"error": f"Failed to fetch metadata for playlist {playlist_id}: {str(e)}"}),
status=500,
mimetype='application/json'
)
# Validate required parameters
if not url: # This check might be redundant now but kept for safety
@@ -48,8 +70,8 @@ def handle_download(playlist_id):
task_id = download_queue_manager.add_task({
"download_type": "playlist",
"url": url,
"name": name,
"artist": artist,
"name": name_from_spotify, # Use fetched name
"artist": artist_from_spotify, # Use fetched owner name as artist
"orig_request": orig_params
})
# Removed DuplicateDownloadError handling, add_task now manages this by creating an error task.
@@ -59,8 +81,8 @@ def handle_download(playlist_id):
store_task_info(error_task_id, {
"download_type": "playlist",
"url": url,
"name": name,
"artist": artist,
"name": name_from_spotify, # Use fetched name
"artist": artist_from_spotify, # Use fetched owner name as artist
"original_request": orig_params,
"created_at": time.time(),
"is_submission_error_task": True

View File

@@ -1,4 +1,4 @@
from flask import Blueprint, abort, jsonify, Response, stream_with_context
from flask import Blueprint, abort, jsonify, Response, stream_with_context, request
import os
import json
import logging
@@ -38,11 +38,38 @@ def get_prg_file(task_id):
task_info = get_task_info(task_id)
if not task_info:
abort(404, "Task not found")
original_request = task_info.get("original_request", {})
# Dynamically construct original_url
dynamic_original_url = ""
download_type = task_info.get("download_type")
# The 'url' field in task_info stores the Spotify/Deezer URL of the item
# e.g., https://open.spotify.com/album/albumId or https://www.deezer.com/track/trackId
item_url = task_info.get("url")
if download_type and item_url:
try:
# Extract the ID from the item_url (last part of the path)
item_id = item_url.split('/')[-1]
if item_id: # Ensure item_id is not empty
base_url = request.host_url.rstrip('/')
dynamic_original_url = f"{base_url}/api/{download_type}/download/{item_id}"
else:
logger.warning(f"Could not extract item ID from URL: {item_url} for task {task_id}. Falling back for original_url.")
original_request_obj = task_info.get("original_request", {})
dynamic_original_url = original_request_obj.get("original_url", "")
except Exception as e:
logger.error(f"Error constructing dynamic original_url for task {task_id}: {e}", exc_info=True)
original_request_obj = task_info.get("original_request", {})
dynamic_original_url = original_request_obj.get("original_url", "") # Fallback on any error
else:
logger.warning(f"Missing download_type ('{download_type}') or item_url ('{item_url}') in task_info for task {task_id}. Falling back for original_url.")
original_request_obj = task_info.get("original_request", {})
dynamic_original_url = original_request_obj.get("original_url", "")
last_status = get_last_task_status(task_id)
status_count = len(get_task_status(task_id))
response = {
"original_url": original_request.get("original_url", ""),
"original_url": dynamic_original_url,
"last_line": last_status,
"timestamp": time.time(),
"task_id": task_id,
@@ -75,12 +102,53 @@ def delete_prg_file(task_id):
def list_prg_files():
"""
Retrieve a list of all tasks in the system.
Combines results from both the old PRG file system and the new task ID based system.
Returns a detailed list of task objects including status and metadata.
"""
# List only new system tasks
tasks = get_all_tasks()
task_ids = [task["task_id"] for task in tasks]
return jsonify(task_ids)
try:
tasks = get_all_tasks() # This already gets summary data
detailed_tasks = []
for task_summary in tasks:
task_id = task_summary.get("task_id")
if not task_id:
continue
task_info = get_task_info(task_id)
last_status = get_last_task_status(task_id)
if task_info and last_status:
detailed_tasks.append({
"task_id": task_id,
"type": task_info.get("type", task_summary.get("type", "unknown")),
"name": task_info.get("name", task_summary.get("name", "Unknown")),
"artist": task_info.get("artist", task_summary.get("artist", "")),
"download_type": task_info.get("download_type", task_summary.get("download_type", "unknown")),
"status": last_status.get("status", "unknown"), # Keep summary status for quick access
"last_status_obj": last_status, # Full last status object
"original_request": task_info.get("original_request", {}),
"created_at": task_info.get("created_at", 0),
"timestamp": last_status.get("timestamp", task_info.get("created_at", 0))
})
elif task_info: # If last_status is somehow missing, still provide some info
detailed_tasks.append({
"task_id": task_id,
"type": task_info.get("type", "unknown"),
"name": task_info.get("name", "Unknown"),
"artist": task_info.get("artist", ""),
"download_type": task_info.get("download_type", "unknown"),
"status": "unknown",
"last_status_obj": None,
"original_request": task_info.get("original_request", {}),
"created_at": task_info.get("created_at", 0),
"timestamp": task_info.get("created_at", 0)
})
# Sort tasks by creation time (newest first, or by timestamp if creation time is missing)
detailed_tasks.sort(key=lambda x: x.get('timestamp', x.get('created_at', 0)), reverse=True)
return jsonify(detailed_tasks)
except Exception as e:
logger.error(f"Error in /api/prgs/list: {e}", exc_info=True)
return jsonify({"error": "Failed to retrieve task list"}), 500
@prgs_bp.route('/retry/<task_id>', methods=['POST'])

View File

@@ -7,19 +7,40 @@ import time # For timestamps
from routes.utils.celery_queue_manager import download_queue_manager
from routes.utils.celery_tasks import store_task_info, store_task_status, ProgressState # For error task creation
from urllib.parse import urlparse # for URL validation
from routes.utils.get_info import get_spotify_info # Added import
track_bp = Blueprint('track', __name__)
@track_bp.route('/download/<track_id>', methods=['GET'])
def handle_download(track_id):
# Retrieve essential parameters from the request.
name = request.args.get('name')
artist = request.args.get('artist')
# name = request.args.get('name') # Removed
# artist = request.args.get('artist') # Removed
orig_params = request.args.to_dict()
# Construct the URL from track_id
url = f"https://open.spotify.com/track/{track_id}"
orig_params["original_url"] = url # Update original_url to the constructed one
# Fetch metadata from Spotify
try:
track_info = get_spotify_info(track_id, "track")
if not track_info or not track_info.get('name') or not track_info.get('artists'):
return Response(
json.dumps({"error": f"Could not retrieve metadata for track ID: {track_id}"}),
status=404,
mimetype='application/json'
)
name_from_spotify = track_info.get('name')
artist_from_spotify = track_info['artists'][0].get('name') if track_info['artists'] else "Unknown Artist"
except Exception as e:
return Response(
json.dumps({"error": f"Failed to fetch metadata for track {track_id}: {str(e)}"}),
status=500,
mimetype='application/json'
)
# Validate required parameters
if not url:
@@ -42,8 +63,8 @@ def handle_download(track_id):
task_id = download_queue_manager.add_task({
"download_type": "track",
"url": url,
"name": name,
"artist": artist,
"name": name_from_spotify, # Use fetched name
"artist": artist_from_spotify, # Use fetched artist
"orig_request": orig_params
})
# Removed DuplicateDownloadError handling, add_task now manages this by creating an error task.
@@ -53,8 +74,8 @@ def handle_download(track_id):
store_task_info(error_task_id, {
"download_type": "track",
"url": url,
"name": name,
"artist": artist,
"name": name_from_spotify, # Use fetched name
"artist": artist_from_spotify, # Use fetched artist
"original_request": orig_params,
"created_at": time.time(),
"is_submission_error_task": True

View File

@@ -18,9 +18,12 @@ from .celery_tasks import (
get_last_task_status,
store_task_status,
get_all_tasks as get_all_celery_tasks_info,
cleanup_stale_errors
cleanup_stale_errors,
delayed_delete_task_data
)
from .celery_config import get_config_params
# Import history manager
from .history_manager import init_history_db
# Configure logging
logger = logging.getLogger(__name__)
@@ -91,11 +94,73 @@ class CeleryManager:
}
store_task_status(task_id, error_payload)
stale_tasks_count += 1
# Schedule deletion for this interrupted task
logger.info(f"Task {task_id} was interrupted. Data scheduled for deletion in 30s.")
delayed_delete_task_data.apply_async(
args=[task_id, "Task interrupted by application restart and auto-cleaned."],
countdown=30
)
if stale_tasks_count > 0:
logger.info(f"Marked {stale_tasks_count} stale tasks as 'error'.")
else:
logger.info("No stale tasks found that needed cleanup.")
logger.info("No stale tasks found that needed cleanup (active states).")
# NEW: Check for tasks that are already terminal but might have missed their cleanup
logger.info("Checking for terminal tasks (COMPLETE, CANCELLED, terminal ERROR) that might have missed cleanup...")
cleaned_during_this_pass = 0
# `tasks` variable is from `get_all_celery_tasks_info()` called at the beginning of the method
for task_summary in tasks:
task_id = task_summary.get("task_id")
if not task_id:
continue
last_status_data = get_last_task_status(task_id)
if last_status_data:
current_status_str = last_status_data.get("status")
task_info_details = get_task_info(task_id) # Get full info for download_type etc.
cleanup_reason = ""
schedule_cleanup = False
if current_status_str == ProgressState.COMPLETE:
# If a task is COMPLETE (any download_type) and still here, its original scheduled deletion was missed.
logger.warning(f"Task {task_id} ('{task_summary.get('name', 'Unknown')}', type: {task_info_details.get('download_type')}) is COMPLETE and still in Redis. Re-scheduling cleanup.")
cleanup_reason = f"Task ({task_info_details.get('download_type')}) was COMPLETE; re-scheduling auto-cleanup."
schedule_cleanup = True
elif current_status_str == ProgressState.CANCELLED:
logger.warning(f"Task {task_id} ('{task_summary.get('name', 'Unknown')}') is CANCELLED and still in Redis. Re-scheduling cleanup.")
cleanup_reason = "Task was CANCELLED; re-scheduling auto-cleanup."
schedule_cleanup = True
elif current_status_str == ProgressState.ERROR:
can_retry_flag = last_status_data.get("can_retry", False)
# is_submission_error_task and is_duplicate_error_task are flags on task_info, not typically on last_status
is_submission_error = task_info_details.get("is_submission_error_task", False)
is_duplicate_error = task_info_details.get("is_duplicate_error_task", False)
# Check if it's an error state that should have been cleaned up
if not can_retry_flag or is_submission_error or is_duplicate_error or last_status_data.get("status") == ProgressState.ERROR_RETRIED:
# ERROR_RETRIED means the original task is done and should be cleaned.
logger.warning(f"Task {task_id} ('{task_summary.get('name', 'Unknown')}') is in a terminal ERROR state ('{last_status_data.get('error')}') and still in Redis. Re-scheduling cleanup.")
cleanup_reason = f"Task was in terminal ERROR state ('{last_status_data.get('error', 'Unknown error')}'); re-scheduling auto-cleanup."
schedule_cleanup = True
elif current_status_str == ProgressState.ERROR_RETRIED:
# This state itself implies the task is terminal and its data can be cleaned.
logger.warning(f"Task {task_id} ('{task_summary.get('name', 'Unknown')}') is ERROR_RETRIED and still in Redis. Re-scheduling cleanup.")
cleanup_reason = "Task was ERROR_RETRIED; re-scheduling auto-cleanup."
schedule_cleanup = True
if schedule_cleanup:
delayed_delete_task_data.apply_async(
args=[task_id, cleanup_reason],
countdown=30 # Schedule with 30s delay
)
cleaned_during_this_pass +=1
if cleaned_during_this_pass > 0:
logger.info(f"Re-scheduled cleanup for {cleaned_during_this_pass} terminal tasks that were still in Redis.")
else:
logger.info("No additional terminal tasks found in Redis needing cleanup re-scheduling.")
except Exception as e:
logger.error(f"Error during stale task cleanup: {e}", exc_info=True)
@@ -107,6 +172,9 @@ class CeleryManager:
self.running = True
# Initialize history database
init_history_db()
# Clean up stale tasks BEFORE starting/restarting workers
self._cleanup_stale_tasks()
@@ -221,7 +289,7 @@ class CeleryManager:
'worker',
'--loglevel=info',
f'--concurrency={new_worker_count}',
'-Q', 'downloads',
'-Q', 'downloads,default',
'--logfile=-', # Output logs to stdout
'--without-heartbeat', # Reduce log noise
'--without-gossip', # Reduce log noise

View File

@@ -18,6 +18,9 @@ from routes.utils.celery_config import REDIS_URL, REDIS_BACKEND, REDIS_PASSWORD,
# Import for playlist watch DB update
from routes.utils.watch.db import add_single_track_to_playlist_db
# Import history manager function
from .history_manager import add_entry_to_history
# Initialize Celery app
celery_app = Celery('download_tasks',
broker=REDIS_URL,
@@ -146,6 +149,50 @@ def get_task_info(task_id):
logger.error(f"Error getting task info: {e}")
return {}
# --- History Logging Helper ---
def _log_task_to_history(task_id, final_status_str, error_msg=None):
"""Helper function to gather task data and log it to the history database."""
try:
task_info = get_task_info(task_id)
last_status_obj = get_last_task_status(task_id)
if not task_info:
logger.warning(f"History: No task_info found for task_id {task_id}. Cannot log to history.")
return
# Extract Spotify ID from item URL if possible
spotify_id = None
item_url = task_info.get('url', '')
if item_url:
try:
spotify_id = item_url.split('/')[-1]
# Further validation if it looks like a Spotify ID (e.g., 22 chars, alphanumeric)
if not (spotify_id and len(spotify_id) == 22 and spotify_id.isalnum()):
spotify_id = None # Reset if not a valid-looking ID
except Exception:
spotify_id = None # Ignore errors in parsing
history_entry = {
'task_id': task_id,
'download_type': task_info.get('download_type'),
'item_name': task_info.get('name'),
'item_artist': task_info.get('artist'),
'item_album': task_info.get('album', task_info.get('name') if task_info.get('download_type') == 'album' else None),
'item_url': item_url,
'spotify_id': spotify_id,
'status_final': final_status_str,
'error_message': error_msg if error_msg else (last_status_obj.get('error') if last_status_obj else None),
'timestamp_added': task_info.get('created_at', time.time()),
'timestamp_completed': last_status_obj.get('timestamp', time.time()) if last_status_obj else time.time(),
'original_request_json': json.dumps(task_info.get('original_request', {})),
'last_status_obj_json': json.dumps(last_status_obj if last_status_obj else {})
}
add_entry_to_history(history_entry)
except Exception as e:
logger.error(f"History: Error preparing or logging history for task {task_id}: {e}", exc_info=True)
# --- End History Logging Helper ---
def cancel_task(task_id):
"""Cancel a task by its ID"""
try:
@@ -159,7 +206,16 @@ def cancel_task(task_id):
# Try to revoke the Celery task if it hasn't started yet
celery_app.control.revoke(task_id, terminate=True, signal='SIGTERM')
logger.info(f"Task {task_id} cancelled by user")
# Log cancellation to history
_log_task_to_history(task_id, 'CANCELLED', "Task cancelled by user")
# Schedule deletion of task data after 30 seconds
delayed_delete_task_data.apply_async(
args=[task_id, "Task cancelled by user and auto-cleaned."],
countdown=30
)
logger.info(f"Task {task_id} cancelled by user. Data scheduled for deletion in 30s.")
return {"status": "cancelled", "task_id": task_id}
except Exception as e:
logger.error(f"Error cancelling task {task_id}: {e}")
@@ -440,17 +496,6 @@ class ProgressTrackingTask(Task):
# Store the processed status update
store_task_status(task_id, stored_data)
# Immediately delete task info from Redis after marking as complete
if stored_data.get("status") == ProgressState.COMPLETE:
logger.info(f"Task {task_id} completed. Deleting task data from Redis.")
try:
redis_client.delete(f"task:{task_id}:info")
redis_client.delete(f"task:{task_id}:status")
redis_client.delete(f"task:{task_id}:status:next_id") # Also delete the counter
logger.info(f"Successfully deleted Redis data for completed task {task_id}.")
except Exception as e:
logger.error(f"Error deleting Redis data for completed task {task_id}: {e}", exc_info=True)
def _handle_initializing(self, task_id, data, task_info):
"""Handle initializing status from deezspot"""
# Extract relevant fields
@@ -789,6 +834,11 @@ class ProgressTrackingTask(Task):
# Log summary
logger.info(f"Task {task_id} summary: {completed_tracks} completed, {skipped_tracks} skipped, {error_count} errors")
# Schedule deletion for completed multi-track downloads
delayed_delete_task_data.apply_async(
args=[task_id, "Task completed successfully and auto-cleaned."],
countdown=30 # Delay in seconds
)
else:
# Generic done for other types
@@ -796,20 +846,6 @@ class ProgressTrackingTask(Task):
data["status"] = ProgressState.COMPLETE
data["message"] = "Download complete"
# Store the processed status update
store_task_status(task_id, data)
# Immediately delete task info from Redis after marking as complete
if data.get("status") == ProgressState.COMPLETE:
logger.info(f"Task {task_id} ({task_info.get('name', 'Unknown')}) completed. Deleting task data from Redis.")
try:
redis_client.delete(f"task:{task_id}:info")
redis_client.delete(f"task:{task_id}:status")
redis_client.delete(f"task:{task_id}:status:next_id") # Also delete the counter
logger.info(f"Successfully deleted Redis data for completed task {task_id}.")
except Exception as e:
logger.error(f"Error deleting Redis data for completed task {task_id}: {e}", exc_info=True)
# Celery signal handlers
@task_prerun.connect
def task_prerun_handler(task_id=None, task=None, *args, **kwargs):
@@ -834,25 +870,40 @@ def task_prerun_handler(task_id=None, task=None, *args, **kwargs):
def task_postrun_handler(task_id=None, task=None, retval=None, state=None, *args, **kwargs):
"""Signal handler when a task finishes"""
try:
# Skip if task is already marked as complete or error in Redis
last_status = get_last_task_status(task_id)
if last_status and last_status.get("status") in [ProgressState.COMPLETE, ProgressState.ERROR]:
return
# Get task info
# Skip if task is already marked as complete or error in Redis for history logging purposes
last_status_for_history = get_last_task_status(task_id)
if last_status_for_history and last_status_for_history.get("status") in [ProgressState.COMPLETE, ProgressState.ERROR, ProgressState.CANCELLED, "ERROR_RETRIED", "ERROR_AUTO_CLEANED"]:
# Check if it was a REVOKED (cancelled) task, if so, ensure it's logged.
if state == states.REVOKED and last_status_for_history.get("status") != ProgressState.CANCELLED:
logger.info(f"Task {task_id} was REVOKED (likely cancelled), logging to history.")
_log_task_to_history(task_id, 'CANCELLED', "Task was revoked/cancelled.")
# else:
# logger.debug(f"History: Task {task_id} already in terminal state {last_status_for_history.get('status')} in Redis. History logging likely handled.")
# return # Do not return here, let the normal status update proceed for Redis if necessary
task_info = get_task_info(task_id)
current_redis_status = last_status_for_history.get("status") if last_status_for_history else None
# Update task status based on Celery task state
if state == states.SUCCESS:
store_task_status(task_id, {
"status": ProgressState.COMPLETE,
"timestamp": time.time(),
"type": task_info.get("type", "unknown"),
"name": task_info.get("name", "Unknown"),
"artist": task_info.get("artist", ""),
"message": "Download completed successfully."
})
if current_redis_status != ProgressState.COMPLETE:
store_task_status(task_id, {
"status": ProgressState.COMPLETE,
"timestamp": time.time(),
"type": task_info.get("type", "unknown"),
"name": task_info.get("name", "Unknown"),
"artist": task_info.get("artist", ""),
"message": "Download completed successfully."
})
logger.info(f"Task {task_id} completed successfully: {task_info.get('name', 'Unknown')}")
_log_task_to_history(task_id, 'COMPLETED')
# If the task was a single track, schedule its data for deletion after a delay
if task_info.get("download_type") == "track":
delayed_delete_task_data.apply_async(
args=[task_id, "Task completed successfully and auto-cleaned."],
countdown=30 # Delay in seconds
)
# If from playlist_watch and successful, add track to DB
original_request = task_info.get("original_request", {})
@@ -896,24 +947,34 @@ def task_failure_handler(task_id=None, exception=None, traceback=None, *args, **
# Check if we can retry
can_retry = retry_count < max_retries
# Update task status to error
error_message_str = str(exception)
store_task_status(task_id, {
"status": ProgressState.ERROR,
"timestamp": time.time(),
"type": task_info.get("type", "unknown"),
"name": task_info.get("name", "Unknown"),
"artist": task_info.get("artist", ""),
"error": error_message_str,
"traceback": str(traceback),
"can_retry": can_retry,
"retry_count": retry_count,
"max_retries": max_retries
})
# Update task status to error in Redis if not already an error
if last_status and last_status.get("status") != ProgressState.ERROR:
store_task_status(task_id, {
"status": ProgressState.ERROR,
"timestamp": time.time(),
"type": task_info.get("type", "unknown"),
"name": task_info.get("name", "Unknown"),
"artist": task_info.get("artist", ""),
"error": str(exception),
"traceback": str(traceback),
"can_retry": can_retry,
"retry_count": retry_count,
"max_retries": max_retries
})
logger.error(f"Task {task_id} failed: {error_message_str}")
logger.error(f"Task {task_id} failed: {str(exception)}")
_log_task_to_history(task_id, 'ERROR', str(exception))
if can_retry:
logger.info(f"Task {task_id} can be retried ({retry_count}/{max_retries})")
else:
# If task cannot be retried, schedule its data for deletion
logger.info(f"Task {task_id} failed and cannot be retried. Data scheduled for deletion in 30s.")
delayed_delete_task_data.apply_async(
args=[task_id, f"Task failed ({str(exception)}) and max retries reached. Auto-cleaned."],
countdown=30
)
except Exception as e:
logger.error(f"Error in task_failure_handler: {e}")
@@ -1147,15 +1208,40 @@ def delete_task_data_and_log(task_id, reason="Task data deleted"):
try:
task_info = get_task_info(task_id) # Get info before deleting
last_status = get_last_task_status(task_id)
current_status_val = last_status.get("status") if last_status else None
# Update status to cancelled if it's not already in a terminal state that implies deletion is okay
if not last_status or last_status.get("status") not in [ProgressState.CANCELLED, ProgressState.ERROR_RETRIED, ProgressState.ERROR_AUTO_CLEANED]:
# Determine the final status for Redis before deletion
# The reason passed to this function indicates why it's being deleted.
final_redis_status = ProgressState.ERROR_AUTO_CLEANED # Default for most cleanup scenarios
error_message_for_status = reason
if reason == "Task completed successfully and auto-cleaned.":
final_redis_status = ProgressState.COMPLETE # It was already complete
error_message_for_status = "Task completed and auto-cleaned."
elif reason == "Task cancelled by user and auto-cleaned.":
final_redis_status = ProgressState.CANCELLED # It was already cancelled
error_message_for_status = "Task cancelled and auto-cleaned."
elif "Task failed" in reason and "max retries reached" in reason:
final_redis_status = ProgressState.ERROR # It was already an error (non-retryable)
error_message_for_status = reason
elif reason == "Task interrupted by application restart and auto-cleaned.":
final_redis_status = ProgressState.ERROR # It was marked as ERROR (interrupted)
error_message_for_status = reason
# Add more specific conditions if needed based on other reasons `delayed_delete_task_data` might be called with.
# Update Redis status one last time if it's not already reflecting the final intended state for this cleanup.
# This is mainly for cases where cleanup is initiated for tasks not yet in a fully terminal state by other handlers.
if current_status_val not in [ProgressState.COMPLETE, ProgressState.CANCELLED, ProgressState.ERROR_RETRIED, ProgressState.ERROR_AUTO_CLEANED, final_redis_status]:
store_task_status(task_id, {
"status": ProgressState.ERROR_AUTO_CLEANED, # Use specific status
"error": reason,
"status": final_redis_status,
"error": error_message_for_status, # Use the reason as the error/message for this status
"timestamp": time.time()
})
# History logging for COMPLETION, CANCELLATION, or definitive ERROR should have occurred when those states were first reached.
# If this cleanup is for a task that *wasn't* in such a state (e.g. stale, still processing), log it now.
if final_redis_status == ProgressState.ERROR_AUTO_CLEANED:
_log_task_to_history(task_id, 'ERROR', error_message_for_status) # Or a more specific status if desired
# Delete Redis keys associated with the task
redis_client.delete(f"task:{task_id}:info")
redis_client.delete(f"task:{task_id}:status")
@@ -1204,4 +1290,12 @@ def cleanup_stale_errors():
return {"status": "complete", "cleaned_count": cleaned_count}
except Exception as e:
logger.error(f"Error during cleanup_stale_errors: {e}", exc_info=True)
return {"status": "error", "error": str(e)}
return {"status": "error", "error": str(e)}
@celery_app.task(name="delayed_delete_task_data", queue="default") # Use default queue for utility tasks
def delayed_delete_task_data(task_id, reason):
"""
Celery task to delete task data after a delay.
"""
logger.info(f"Executing delayed deletion for task {task_id}. Reason: {reason}")
delete_task_data_and_log(task_id, reason)

View File

@@ -52,15 +52,17 @@ def get_spotify_info(spotify_id, spotify_type, limit=None, offset=None):
return Spo.get_album(spotify_id)
elif spotify_type == "playlist":
return Spo.get_playlist(spotify_id)
elif spotify_type == "artist":
elif spotify_type == "artist_discography":
if limit is not None and offset is not None:
return Spo.get_artist(spotify_id, limit=limit, offset=offset)
return Spo.get_artist_discography(spotify_id, limit=limit, offset=offset)
elif limit is not None:
return Spo.get_artist(spotify_id, limit=limit)
return Spo.get_artist_discography(spotify_id, limit=limit)
elif offset is not None:
return Spo.get_artist(spotify_id, offset=offset)
return Spo.get_artist_discography(spotify_id, offset=offset)
else:
return Spo.get_artist(spotify_id)
return Spo.get_artist_discography(spotify_id)
elif spotify_type == "artist":
return Spo.get_artist(spotify_id)
elif spotify_type == "episode":
return Spo.get_episode(spotify_id)
else:

View File

@@ -0,0 +1,235 @@
import sqlite3
import json
import time
import logging
from pathlib import Path
logger = logging.getLogger(__name__)
HISTORY_DIR = Path('./data/history')
HISTORY_DB_FILE = HISTORY_DIR / 'download_history.db'
def init_history_db():
"""Initializes the download history database and creates the table if it doesn't exist."""
try:
HISTORY_DIR.mkdir(parents=True, exist_ok=True)
conn = sqlite3.connect(HISTORY_DB_FILE)
cursor = conn.cursor()
cursor.execute("""
CREATE TABLE IF NOT EXISTS download_history (
task_id TEXT PRIMARY KEY,
download_type TEXT,
item_name TEXT,
item_artist TEXT,
item_album TEXT,
item_url TEXT,
spotify_id TEXT,
status_final TEXT, -- 'COMPLETED', 'ERROR', 'CANCELLED'
error_message TEXT,
timestamp_added REAL,
timestamp_completed REAL,
original_request_json TEXT,
last_status_obj_json TEXT
)
""")
conn.commit()
logger.info(f"Download history database initialized at {HISTORY_DB_FILE}")
except sqlite3.Error as e:
logger.error(f"Error initializing download history database: {e}", exc_info=True)
finally:
if conn:
conn.close()
def add_entry_to_history(history_data: dict):
"""Adds or replaces an entry in the download_history table.
Args:
history_data (dict): A dictionary containing the data for the history entry.
Expected keys match the table columns.
"""
required_keys = [
'task_id', 'download_type', 'item_name', 'item_artist', 'item_album',
'item_url', 'spotify_id', 'status_final', 'error_message',
'timestamp_added', 'timestamp_completed', 'original_request_json',
'last_status_obj_json'
]
# Ensure all keys are present, filling with None if not
for key in required_keys:
history_data.setdefault(key, None)
conn = None
try:
conn = sqlite3.connect(HISTORY_DB_FILE)
cursor = conn.cursor()
cursor.execute("""
INSERT OR REPLACE INTO download_history (
task_id, download_type, item_name, item_artist, item_album,
item_url, spotify_id, status_final, error_message,
timestamp_added, timestamp_completed, original_request_json,
last_status_obj_json
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", (
history_data['task_id'], history_data['download_type'], history_data['item_name'],
history_data['item_artist'], history_data['item_album'], history_data['item_url'],
history_data['spotify_id'], history_data['status_final'], history_data['error_message'],
history_data['timestamp_added'], history_data['timestamp_completed'],
history_data['original_request_json'], history_data['last_status_obj_json']
))
conn.commit()
logger.info(f"Added/Updated history for task_id: {history_data['task_id']}, status: {history_data['status_final']}")
except sqlite3.Error as e:
logger.error(f"Error adding entry to download history for task_id {history_data.get('task_id')}: {e}", exc_info=True)
except Exception as e:
logger.error(f"Unexpected error adding to history for task_id {history_data.get('task_id')}: {e}", exc_info=True)
finally:
if conn:
conn.close()
def get_history_entries(limit=25, offset=0, sort_by='timestamp_completed', sort_order='DESC', filters=None):
"""Retrieves entries from the download_history table with pagination, sorting, and filtering.
Args:
limit (int): Maximum number of entries to return.
offset (int): Number of entries to skip (for pagination).
sort_by (str): Column name to sort by.
sort_order (str): 'ASC' or 'DESC'.
filters (dict, optional): A dictionary of column_name: value to filter by.
Currently supports exact matches.
Returns:
tuple: (list of history entries as dicts, total_count of matching entries)
"""
conn = None
try:
conn = sqlite3.connect(HISTORY_DB_FILE)
conn.row_factory = sqlite3.Row # Access columns by name
cursor = conn.cursor()
base_query = "FROM download_history"
count_query = "SELECT COUNT(*) " + base_query
select_query = "SELECT * " + base_query
where_clauses = []
params = []
if filters:
for column, value in filters.items():
# Basic security: ensure column is a valid one (alphanumeric + underscore)
if column.replace('_', '').isalnum():
where_clauses.append(f"{column} = ?")
params.append(value)
if where_clauses:
where_sql = " WHERE " + " AND ".join(where_clauses)
count_query += where_sql
select_query += where_sql
# Get total count for pagination
cursor.execute(count_query, params)
total_count = cursor.fetchone()[0]
# Validate sort_by and sort_order to prevent SQL injection
valid_sort_columns = [
'task_id', 'download_type', 'item_name', 'item_artist', 'item_album',
'item_url', 'status_final', 'timestamp_added', 'timestamp_completed'
]
if sort_by not in valid_sort_columns:
sort_by = 'timestamp_completed' # Default sort
sort_order_upper = sort_order.upper()
if sort_order_upper not in ['ASC', 'DESC']:
sort_order_upper = 'DESC'
select_query += f" ORDER BY {sort_by} {sort_order_upper} LIMIT ? OFFSET ?"
params.extend([limit, offset])
cursor.execute(select_query, params)
rows = cursor.fetchall()
# Convert rows to list of dicts
entries = [dict(row) for row in rows]
return entries, total_count
except sqlite3.Error as e:
logger.error(f"Error retrieving history entries: {e}", exc_info=True)
return [], 0
finally:
if conn:
conn.close()
if __name__ == '__main__':
# For testing purposes
logging.basicConfig(level=logging.INFO)
init_history_db()
sample_data_complete = {
'task_id': 'test_task_123',
'download_type': 'track',
'item_name': 'Test Song',
'item_artist': 'Test Artist',
'item_album': 'Test Album',
'item_url': 'http://spotify.com/track/123',
'spotify_id': '123',
'status_final': 'COMPLETED',
'error_message': None,
'timestamp_added': time.time() - 3600,
'timestamp_completed': time.time(),
'original_request_json': json.dumps({'param1': 'value1'}),
'last_status_obj_json': json.dumps({'status': 'complete', 'message': 'Finished!'})
}
add_entry_to_history(sample_data_complete)
sample_data_error = {
'task_id': 'test_task_456',
'download_type': 'album',
'item_name': 'Another Album',
'item_artist': 'Another Artist',
'item_album': 'Another Album', # For albums, item_name and item_album are often the same
'item_url': 'http://spotify.com/album/456',
'spotify_id': '456',
'status_final': 'ERROR',
'error_message': 'Download failed due to network issue.',
'timestamp_added': time.time() - 7200,
'timestamp_completed': time.time() - 60,
'original_request_json': json.dumps({'param2': 'value2'}),
'last_status_obj_json': json.dumps({'status': 'error', 'error': 'Network issue'})
}
add_entry_to_history(sample_data_error)
# Test updating an entry
updated_data_complete = {
'task_id': 'test_task_123',
'download_type': 'track',
'item_name': 'Test Song (Updated)',
'item_artist': 'Test Artist',
'item_album': 'Test Album II',
'item_url': 'http://spotify.com/track/123',
'spotify_id': '123',
'status_final': 'COMPLETED',
'error_message': None,
'timestamp_added': time.time() - 3600,
'timestamp_completed': time.time() + 100, # Updated completion time
'original_request_json': json.dumps({'param1': 'value1', 'new_param': 'added'}),
'last_status_obj_json': json.dumps({'status': 'complete', 'message': 'Finished! With update.'})
}
add_entry_to_history(updated_data_complete)
print(f"Test entries added/updated in {HISTORY_DB_FILE}")
print("\nFetching all history entries (default sort):")
entries, total = get_history_entries(limit=5)
print(f"Total entries: {total}")
for entry in entries:
print(entry)
print("\nFetching history entries (sorted by item_name ASC, limit 2, offset 1):")
entries_sorted, total_sorted = get_history_entries(limit=2, offset=1, sort_by='item_name', sort_order='ASC')
print(f"Total entries (should be same as above): {total_sorted}")
for entry in entries_sorted:
print(entry)
print("\nFetching history entries with filter (status_final = COMPLETED):")
entries_filtered, total_filtered = get_history_entries(filters={'status_final': 'COMPLETED'})
print(f"Total COMPLETED entries: {total_filtered}")
for entry in entries_filtered:
print(entry)

View File

@@ -231,7 +231,7 @@ def check_watched_artists(specific_artist_id: str = None):
# The 'artist-albums' type for get_spotify_info needs to support pagination params.
# And return a list of album objects.
logger.debug(f"Artist Watch Manager: Fetching albums for {artist_spotify_id}. Limit: {limit}, Offset: {offset}")
artist_albums_page = get_spotify_info(artist_spotify_id, "artist", limit=limit, offset=offset)
artist_albums_page = get_spotify_info(artist_spotify_id, "artist_discography", limit=limit, offset=offset)
if not artist_albums_page or not isinstance(artist_albums_page.get('items'), list):
logger.warning(f"Artist Watch Manager: No album items found or invalid format for artist {artist_spotify_id} (name: '{artist_name}') at offset {offset}. Response: {artist_albums_page}")

160
src/js/history.ts Normal file
View File

@@ -0,0 +1,160 @@
document.addEventListener('DOMContentLoaded', () => {
const historyTableBody = document.getElementById('history-table-body') as HTMLTableSectionElement | null;
const prevButton = document.getElementById('prev-page') as HTMLButtonElement | null;
const nextButton = document.getElementById('next-page') as HTMLButtonElement | null;
const pageInfo = document.getElementById('page-info') as HTMLSpanElement | null;
const limitSelect = document.getElementById('limit-select') as HTMLSelectElement | null;
const statusFilter = document.getElementById('status-filter') as HTMLSelectElement | null;
const typeFilter = document.getElementById('type-filter') as HTMLSelectElement | null;
let currentPage = 1;
let limit = 25;
let totalEntries = 0;
let currentSortBy = 'timestamp_completed';
let currentSortOrder = 'DESC';
async function fetchHistory(page = 1) {
if (!historyTableBody || !prevButton || !nextButton || !pageInfo || !limitSelect || !statusFilter || !typeFilter) {
console.error('One or more critical UI elements are missing for history page.');
return;
}
const offset = (page - 1) * limit;
let apiUrl = `/api/history?limit=${limit}&offset=${offset}&sort_by=${currentSortBy}&sort_order=${currentSortOrder}`;
const statusVal = statusFilter.value;
if (statusVal) {
apiUrl += `&status_final=${statusVal}`;
}
const typeVal = typeFilter.value;
if (typeVal) {
apiUrl += `&download_type=${typeVal}`;
}
try {
const response = await fetch(apiUrl);
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const data = await response.json();
renderHistory(data.entries);
totalEntries = data.total_count;
currentPage = Math.floor(offset / limit) + 1;
updatePagination();
} catch (error) {
console.error('Error fetching history:', error);
if (historyTableBody) {
historyTableBody.innerHTML = '<tr><td colspan="7">Error loading history.</td></tr>';
}
}
}
function renderHistory(entries: any[]) {
if (!historyTableBody) return;
historyTableBody.innerHTML = ''; // Clear existing rows
if (!entries || entries.length === 0) {
historyTableBody.innerHTML = '<tr><td colspan="7">No history entries found.</td></tr>';
return;
}
entries.forEach(entry => {
const row = historyTableBody.insertRow();
row.insertCell().textContent = entry.item_name || 'N/A';
row.insertCell().textContent = entry.item_artist || 'N/A';
row.insertCell().textContent = entry.download_type ? entry.download_type.charAt(0).toUpperCase() + entry.download_type.slice(1) : 'N/A';
const statusCell = row.insertCell();
statusCell.textContent = entry.status_final || 'N/A';
statusCell.className = `status-${entry.status_final}`;
row.insertCell().textContent = entry.timestamp_added ? new Date(entry.timestamp_added * 1000).toLocaleString() : 'N/A';
row.insertCell().textContent = entry.timestamp_completed ? new Date(entry.timestamp_completed * 1000).toLocaleString() : 'N/A';
const detailsCell = row.insertCell();
const detailsButton = document.createElement('button');
detailsButton.innerHTML = `<img src="/static/images/info.svg" alt="Details">`;
detailsButton.className = 'details-btn btn-icon';
detailsButton.title = 'Show Details';
detailsButton.onclick = () => showDetailsModal(entry);
detailsCell.appendChild(detailsButton);
if (entry.status_final === 'ERROR' && entry.error_message) {
const errorSpan = document.createElement('span');
errorSpan.textContent = ' (Show Error)';
errorSpan.className = 'error-message-toggle';
errorSpan.style.marginLeft = '5px';
errorSpan.onclick = (e) => {
e.stopPropagation(); // Prevent click on row if any
let errorDetailsDiv = row.querySelector('.error-details') as HTMLElement | null;
if (!errorDetailsDiv) {
errorDetailsDiv = document.createElement('div');
errorDetailsDiv.className = 'error-details';
const newCell = row.insertCell(); // This will append to the end of the row
newCell.colSpan = 7; // Span across all columns
newCell.appendChild(errorDetailsDiv);
// Visually, this new cell will be after the 'Details' button cell.
// To make it appear as part of the status cell or below the row, more complex DOM manipulation or CSS would be needed.
}
errorDetailsDiv.textContent = entry.error_message;
// Toggle display by directly manipulating the style of the details div
errorDetailsDiv.style.display = errorDetailsDiv.style.display === 'none' ? 'block' : 'none';
};
statusCell.appendChild(errorSpan);
}
});
}
function updatePagination() {
if (!pageInfo || !prevButton || !nextButton) return;
const totalPages = Math.ceil(totalEntries / limit) || 1;
pageInfo.textContent = `Page ${currentPage} of ${totalPages}`;
prevButton.disabled = currentPage === 1;
nextButton.disabled = currentPage === totalPages;
}
function showDetailsModal(entry: any) {
const details = `Task ID: ${entry.task_id}\n` +
`Type: ${entry.download_type}\n` +
`Name: ${entry.item_name}\n` +
`Artist: ${entry.item_artist}\n` +
`Album: ${entry.item_album || 'N/A'}\n` +
`URL: ${entry.item_url}\n` +
`Spotify ID: ${entry.spotify_id || 'N/A'}\n` +
`Status: ${entry.status_final}\n` +
`Error: ${entry.error_message || 'None'}\n` +
`Added: ${new Date(entry.timestamp_added * 1000).toLocaleString()}\n` +
`Completed/Ended: ${new Date(entry.timestamp_completed * 1000).toLocaleString()}\n\n` +
`Original Request: ${JSON.stringify(JSON.parse(entry.original_request_json || '{}'), null, 2)}\n\n` +
`Last Status Object: ${JSON.stringify(JSON.parse(entry.last_status_obj_json || '{}'), null, 2)}`;
alert(details);
}
document.querySelectorAll('th[data-sort]').forEach(headerCell => {
headerCell.addEventListener('click', () => {
const sortField = (headerCell as HTMLElement).dataset.sort;
if (!sortField) return;
if (currentSortBy === sortField) {
currentSortOrder = currentSortOrder === 'ASC' ? 'DESC' : 'ASC';
} else {
currentSortBy = sortField;
currentSortOrder = 'DESC';
}
fetchHistory(1);
});
});
prevButton?.addEventListener('click', () => fetchHistory(currentPage - 1));
nextButton?.addEventListener('click', () => fetchHistory(currentPage + 1));
limitSelect?.addEventListener('change', (e) => {
limit = parseInt((e.target as HTMLSelectElement).value, 10);
fetchHistory(1);
});
statusFilter?.addEventListener('change', () => fetchHistory(1));
typeFilter?.addEventListener('change', () => fetchHistory(1));
// Initial fetch
fetchHistory();
});

View File

@@ -162,7 +162,6 @@ export class DownloadQueue {
// Load the saved visible count (or default to 10)
visibleCount: number;
globalSyncIntervalId: number | null = null; // For the new global sync
constructor() {
const storedVisibleCount = localStorage.getItem("downloadQueueVisibleCount");
@@ -203,10 +202,9 @@ export class DownloadQueue {
// Wait for initDOM to complete before setting up event listeners and loading existing PRG files.
this.initDOM().then(() => {
this.initEventListeners();
this.loadExistingPrgFiles().then(() => { // Ensure loadExistingPrgFiles completes
// Start global task list synchronization after initial load
this.startGlobalTaskSync();
});
this.loadExistingPrgFiles();
// Start periodic sync
setInterval(() => this.periodicSyncWithServer(), 10000); // Sync every 10 seconds
});
}
@@ -391,18 +389,6 @@ export class DownloadQueue {
* Adds a new download entry.
*/
addDownload(item: QueueItem, type: string, prgFile: string, requestUrl: string | null = null, startMonitoring: boolean = false): string {
// Check if an entry with this prgFile already exists
const existingQueueId = this.findQueueIdByPrgFile(prgFile);
if (existingQueueId) {
console.log(`addDownload: Entry for prgFile ${prgFile} already exists with queueId ${existingQueueId}. Ensuring monitoring.`);
const existingEntry = this.queueEntries[existingQueueId];
if (existingEntry && !existingEntry.hasEnded && startMonitoring && !this.pollingIntervals[existingQueueId]) {
// If it exists, is not ended, needs monitoring, and isn't currently polled, start its individual polling.
this.startDownloadStatusMonitoring(existingQueueId);
}
return existingQueueId; // Return existing ID
}
const queueId = this.generateQueueId();
const entry = this.createQueueEntry(item, type, prgFile, queueId, requestUrl);
this.queueEntries[queueId] = entry;
@@ -988,16 +974,7 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string)
return index >= 0 && index < this.visibleCount;
}
findQueueIdByPrgFile(prgFile: string): string | undefined {
for (const queueId in this.queueEntries) {
if (this.queueEntries[queueId].prgFile === prgFile) {
return queueId;
}
}
return undefined;
}
async cleanupEntry(queueId: string /* Parameter deleteFromServer removed */) {
async cleanupEntry(queueId: string) {
const entry = this.queueEntries[queueId];
if (entry) {
// Close any polling interval
@@ -1023,9 +1000,6 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string)
localStorage.setItem("downloadQueueCache", JSON.stringify(this.queueCache));
}
// The block for deleting from server has been removed.
// console.log(`Entry ${queueId} (${entry.prgFile}) cleaned up from UI and local cache.`);
// Update the queue display
this.updateQueueOrder();
}
@@ -1319,22 +1293,16 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string)
// Stop polling
this.clearPollingInterval(queueId);
const statusData = typeof progress === 'object' ? progress : entry.lastStatus;
if (statusData && (statusData.status === 'complete' || statusData.status === 'done')) {
// For completed tasks, show for 2 seconds then remove from UI only
setTimeout(() => {
this.cleanupEntry(queueId); // Pass only queueId
}, 2000);
} else {
// For other terminal states (error, cancelled), use existing cleanup logic (default 10s)
// The server-side delete for these will be handled by backend mechanisms or specific cancel actions
const cleanupDelay = 10000;
setTimeout(() => {
this.cleanupEntry(queueId); // Pass only queueId
}, cleanupDelay);
}
// Use 3 seconds cleanup delay for completed, 10 seconds for other terminal states like errors
const cleanupDelay = (progress && typeof progress !== 'number' && (progress.status === 'complete' || progress.status === 'done')) ? 3000 :
(progress && typeof progress !== 'number' && (progress.status === 'cancelled' || progress.status === 'cancel' || progress.status === 'skipped')) ? 20000 :
10000; // Default for other errors if not caught by the more specific error handler delay
// Clean up after the appropriate delay
setTimeout(() => {
this.cleanupEntry(queueId);
}, cleanupDelay);
}
handleInactivity(entry: QueueEntry, queueId: string, logElement: HTMLElement | null) { // Add types
@@ -1519,9 +1487,9 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string)
// Prepare query parameters
const queryParams = new URLSearchParams();
// Add item.name and item.artist only if they are not empty or undefined
if (item.name && item.name.trim() !== '') queryParams.append('name', item.name);
if (item.artist && item.artist.trim() !== '') queryParams.append('artist', item.artist);
// item.name and item.artist are no longer sent as query parameters
// if (item.name && item.name.trim() !== '') queryParams.append('name', item.name);
// if (item.artist && item.artist.trim() !== '') queryParams.append('artist', item.artist);
// For artist downloads, include album_type as it may still be needed
if (type === 'artist' && albumType) {
@@ -1663,202 +1631,107 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string)
// Clear existing queue entries first to avoid duplicates when refreshing
for (const queueId in this.queueEntries) {
const entry = this.queueEntries[queueId];
// Close any active connections
this.clearPollingInterval(queueId);
// Don't remove the entry from DOM - we'll rebuild it entirely
delete this.queueEntries[queueId];
}
// Fetch detailed task list from the new endpoint
const response = await fetch('/api/prgs/list');
const prgFiles: string[] = await response.json(); // Add type
// Sort filenames by the numeric portion (assumes format "type_number.prg").
prgFiles.sort((a, b) => {
const numA = parseInt(a.split('_')[1]);
const numB = parseInt(b.split('_')[1]);
return numA - numB;
});
if (!response.ok) {
console.error("Failed to load existing tasks:", response.status, await response.text());
return;
}
const existingTasks: any[] = await response.json(); // We expect an array of detailed task objects
// Iterate through each PRG file and add it as a dummy queue entry.
for (const prgFile of prgFiles) {
try {
const prgResponse = await fetch(`/api/prgs/${prgFile}`);
if (!prgResponse.ok) continue;
const prgData: StatusData = await prgResponse.json(); // Add type
// Skip prg files that are marked as cancelled, completed, or interrupted
if (prgData.last_line &&
(prgData.last_line.status === "cancel" ||
prgData.last_line.status === "cancelled" ||
prgData.last_line.status === "interrupted" ||
prgData.last_line.status === "complete")) {
// Delete old completed or cancelled PRG files
try {
await fetch(`/api/prgs/delete/${prgFile}`, { method: 'DELETE' });
console.log(`Cleaned up old PRG file: ${prgFile}`);
} catch (error) {
console.error(`Failed to delete completed/cancelled PRG file ${prgFile}:`, error);
}
continue;
const terminalStates = ['complete', 'done', 'cancelled', 'ERROR_AUTO_CLEANED', 'ERROR_RETRIED', 'cancel', 'interrupted', 'error'];
for (const taskData of existingTasks) {
const prgFile = taskData.task_id; // Use task_id as prgFile identifier
const lastStatus = taskData.last_status_obj;
const originalRequest = taskData.original_request || {};
// Skip adding to UI if the task is already in a terminal state
if (lastStatus && terminalStates.includes(lastStatus.status)) {
console.log(`Skipping UI addition for terminal task ${prgFile}, status: ${lastStatus.status}`);
// Also ensure it's cleaned from local cache if it was there
if (this.queueCache[prgFile]) {
delete this.queueCache[prgFile];
}
// Check cached status - if we marked it cancelled locally, delete it and skip
const cachedStatus: StatusData | undefined = this.queueCache[prgFile]; // Add type
if (cachedStatus &&
(cachedStatus.status === 'cancelled' ||
cachedStatus.status === 'cancel' ||
cachedStatus.status === 'interrupted' ||
cachedStatus.status === 'complete')) {
try {
await fetch(`/api/prgs/delete/${prgFile}`, { method: 'DELETE' });
console.log(`Cleaned up cached cancelled PRG file: ${prgFile}`);
} catch (error) {
console.error(`Failed to delete cached cancelled PRG file ${prgFile}:`, error);
}
continue;
}
// Use the enhanced original request info from the first line
const originalRequest = prgData.original_request || {};
let lastLineData: StatusData = prgData.last_line || {}; // Add type
// First check if this is a track with a parent (part of an album/playlist)
let itemType = lastLineData.type || prgData.display_type || originalRequest.display_type || originalRequest.type || 'unknown';
let dummyItem: QueueItem = {}; // Add type
// If this is a track with a parent, treat it as the parent type for UI purposes
if (lastLineData.type === 'track' && lastLineData.parent) {
const parent = lastLineData.parent;
if (parent.type === 'album') {
itemType = 'album';
dummyItem = {
name: parent.title || 'Unknown Album',
artist: parent.artist || 'Unknown Artist',
type: 'album',
url: parent.url || '',
// Keep track of the current track info for progress display
current_track: lastLineData.current_track,
total_tracks: (typeof parent.total_tracks === 'string' ? parseInt(parent.total_tracks, 10) : parent.total_tracks) || (typeof lastLineData.total_tracks === 'string' ? parseInt(lastLineData.total_tracks, 10) : lastLineData.total_tracks) || 0,
// Store parent info directly in the item
parent: parent
};
} else if (parent.type === 'playlist') {
itemType = 'playlist';
dummyItem = {
name: parent.name || 'Unknown Playlist',
owner: parent.owner || 'Unknown Creator',
type: 'playlist',
url: parent.url || '',
// Keep track of the current track info for progress display
current_track: lastLineData.current_track,
total_tracks: (typeof parent.total_tracks === 'string' ? parseInt(parent.total_tracks, 10) : parent.total_tracks) || (typeof lastLineData.total_tracks === 'string' ? parseInt(lastLineData.total_tracks, 10) : lastLineData.total_tracks) || 0,
// Store parent info directly in the item
parent: parent
};
}
} else {
// Use the explicit display fields if available, or fall back to other fields
continue;
}
let itemType = taskData.type || originalRequest.type || 'unknown';
let dummyItem: QueueItem = {
name: taskData.name || originalRequest.name || prgFile,
artist: taskData.artist || originalRequest.artist || '',
type: itemType,
url: originalRequest.url || lastStatus?.url || '',
endpoint: originalRequest.endpoint || '',
download_type: taskData.download_type || originalRequest.download_type || '',
total_tracks: lastStatus?.total_tracks || originalRequest.total_tracks,
current_track: lastStatus?.current_track,
};
// If this is a track with a parent from the last_status, adjust item and type
if (lastStatus && lastStatus.type === 'track' && lastStatus.parent) {
const parent = lastStatus.parent;
if (parent.type === 'album') {
itemType = 'album';
dummyItem = {
name: prgData.display_title || originalRequest.display_title || lastLineData.name || lastLineData.song || lastLineData.title || originalRequest.name || prgFile,
artist: prgData.display_artist || originalRequest.display_artist || lastLineData.artist || originalRequest.artist || '',
type: itemType,
url: originalRequest.url || lastLineData.url || '',
endpoint: originalRequest.endpoint || '',
download_type: originalRequest.download_type || '',
// Include any available track info
song: lastLineData.song,
title: lastLineData.title,
total_tracks: typeof lastLineData.total_tracks === 'string' ? parseInt(lastLineData.total_tracks, 10) : lastLineData.total_tracks,
current_track: lastLineData.current_track
name: parent.title || 'Unknown Album',
artist: parent.artist || 'Unknown Artist',
type: 'album',
url: parent.url || '',
total_tracks: parent.total_tracks || lastStatus.total_tracks,
parent: parent
};
};
// Check if this is a retry file and get the retry count
let retryCount = 0;
if (prgFile.includes('_retry')) {
} else if (parent.type === 'playlist') {
itemType = 'playlist';
dummyItem = {
name: parent.name || 'Unknown Playlist',
owner: parent.owner || 'Unknown Creator',
type: 'playlist',
url: parent.url || '',
total_tracks: parent.total_tracks || lastStatus.total_tracks,
parent: parent
};
}
}
let retryCount = 0;
if (lastStatus && lastStatus.retry_count) {
retryCount = lastStatus.retry_count;
} else if (prgFile.includes('_retry')) {
const retryMatch = prgFile.match(/_retry(\d+)/);
if (retryMatch && retryMatch[1]) {
retryCount = parseInt(retryMatch[1], 10);
} else if (prgData.last_line && prgData.last_line.retry_count) {
retryCount = prgData.last_line.retry_count;
}
} else if (prgData.last_line && prgData.last_line.retry_count) {
retryCount = prgData.last_line.retry_count;
}
// Build a potential requestUrl from the original information
let requestUrl: string | null = null; // Add type
if (dummyItem.endpoint && dummyItem.url) {
const params = new CustomURLSearchParams();
params.append('url', dummyItem.url);
if (dummyItem.name) params.append('name', dummyItem.name);
if (dummyItem.artist) params.append('artist', dummyItem.artist);
// Add any other parameters from the original request
for (const [key, value] of Object.entries(originalRequest)) {
if (!['url', 'name', 'artist', 'type', 'endpoint', 'download_type',
'display_title', 'display_type', 'display_artist', 'service'].includes(key)) {
params.append(key, value as string); // Cast value to string
}
}
requestUrl = `${dummyItem.endpoint}?${params.toString()}`;
}
// Add to download queue
const queueId = this.generateQueueId();
const entry = this.createQueueEntry(dummyItem, itemType, prgFile, queueId, requestUrl);
entry.retryCount = retryCount;
// Set the entry's last status from the PRG file
if (prgData.last_line) {
entry.lastStatus = prgData.last_line;
// If this is a track that's part of an album/playlist
if (prgData.last_line.parent) {
entry.parentInfo = prgData.last_line.parent;
}
// Make sure to save the status to the cache for persistence
this.queueCache[prgFile] = prgData.last_line;
// Apply proper status classes
this.applyStatusClasses(entry, prgData.last_line);
// Update log display with current info
const logElement = entry.element.querySelector('.log') as HTMLElement | null;
if (logElement) {
if (prgData.last_line.song && prgData.last_line.artist &&
['progress', 'real-time', 'real_time', 'processing', 'downloading'].includes(prgData.last_line.status || '')) { // Add null check
logElement.textContent = `Currently downloading: ${prgData.last_line.song} by ${prgData.last_line.artist}`;
} else if (entry.parentInfo && !['done', 'complete', 'error', 'skipped'].includes(prgData.last_line.status || '')) {
// Show parent info for non-terminal states
if (entry.parentInfo.type === 'album') {
logElement.textContent = `From album: "${entry.parentInfo.title}"`;
} else if (entry.parentInfo.type === 'playlist') {
logElement.textContent = `From playlist: "${entry.parentInfo.name}" by ${entry.parentInfo.owner}`;
}
}
}
}
this.queueEntries[queueId] = entry;
} catch (error) {
console.error("Error fetching details for", prgFile, error);
}
const requestUrl = originalRequest.url ? `/api/${itemType}/download/${originalRequest.url.split('/').pop()}?name=${encodeURIComponent(dummyItem.name || '')}&artist=${encodeURIComponent(dummyItem.artist || '')}` : null;
const queueId = this.generateQueueId();
const entry = this.createQueueEntry(dummyItem, itemType, prgFile, queueId, requestUrl);
entry.retryCount = retryCount;
if (lastStatus) {
entry.lastStatus = lastStatus;
if (lastStatus.parent) {
entry.parentInfo = lastStatus.parent;
}
this.queueCache[prgFile] = lastStatus; // Cache the last known status
this.applyStatusClasses(entry, lastStatus);
const logElement = entry.element.querySelector('.log') as HTMLElement | null;
if (logElement) {
logElement.textContent = this.getStatusMessage(lastStatus);
}
}
this.queueEntries[queueId] = entry;
}
// Save updated cache to localStorage
localStorage.setItem("downloadQueueCache", JSON.stringify(this.queueCache));
// After adding all entries, update the queue
this.updateQueueOrder();
// Start monitoring for all active entries that are visible
// This is the key change to ensure continued status updates after page refresh
this.startMonitoringActiveEntries();
} catch (error) {
console.error("Error loading existing PRG files:", error);
@@ -2028,32 +1901,27 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string)
if (data.last_line.status === 'cancelled' || data.last_line.status === 'cancel') {
console.log('Cleaning up cancelled download immediately');
this.clearPollingInterval(queueId);
this.cleanupEntry(queueId); // Pass only queueId
this.cleanupEntry(queueId);
return; // No need to process further
}
// For completed tasks, start 2s UI timer
if (data.last_line.status === 'complete' || data.last_line.status === 'done') {
this.clearPollingInterval(queueId);
// Only set up cleanup if this is not an error that we're in the process of retrying
// If status is 'error' but the status message contains 'Retrying', don't clean up
const isRetrying = entry.isRetrying ||
(data.last_line.status === 'error' &&
entry.element.querySelector('.log')?.textContent?.includes('Retry'));
if (!isRetrying) {
setTimeout(() => {
this.cleanupEntry(queueId); // Pass only queueId
}, 2000);
// Do not return here, allow UI to update to complete state first
} else {
// For other terminal states like 'error'
// Only set up cleanup if this is not an error that we're in the process of retrying
const isRetrying = entry.isRetrying ||
(data.last_line.status === 'error' &&
entry.element.querySelector('.log')?.textContent?.includes('Retry'));
if (!isRetrying) {
// Errors will use the handleDownloadCompletion logic which has its own timeout
// this.handleDownloadCompletion(entry, queueId, data.last_line);
// No, we want to ensure polling stops here for errors too if not retrying
this.clearPollingInterval(queueId);
// Existing logic for error display and auto-cleanup (15s) is below
// and cleanupEntry for errors will be called from there or from handleDownloadCompletion
}
// Double-check the entry still exists and has not been retried before cleaning up
const currentEntry = this.queueEntries[queueId]; // Get current entry
if (currentEntry && // Check if currentEntry exists
!currentEntry.isRetrying &&
currentEntry.hasEnded) {
this.clearPollingInterval(queueId);
this.cleanupEntry(queueId);
}
}, data.last_line.status === 'complete' || data.last_line.status === 'done' ? 3000 : 5000); // 3s for complete/done, 5s for others
}
}
@@ -2247,7 +2115,7 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string)
const closeErrorBtn = errorLogElement.querySelector('.close-error-btn') as HTMLButtonElement | null;
if (closeErrorBtn) {
closeErrorBtn.addEventListener('click', () => {
this.cleanupEntry(queueId); // Pass only queueId
this.cleanupEntry(queueId);
});
}
@@ -2273,7 +2141,7 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string)
!currentEntryForCleanup.isRetrying) {
this.cleanupEntry(queueId);
}
}, 15000);
}, 20000); // Changed from 15000 to 20000
} else { // Error UI already exists, just update the message text if it's different
if (errorMessageElement.textContent !== errMsg) {
@@ -2286,21 +2154,7 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string)
// Handle terminal states for non-error cases
if (['complete', 'cancel', 'cancelled', 'done', 'skipped'].includes(status)) {
entry.hasEnded = true;
// this.handleDownloadCompletion(entry, queueId, statusData); // Already called from fetchDownloadStatus for terminal states
// We need to ensure the 2-second rule for 'complete'/'done' is applied here too, if not already handled
if (status === 'complete' || status === 'done') {
if (!this.pollingIntervals[queueId]) { // Check if polling was already cleared (meaning timeout started)
this.clearPollingInterval(queueId);
setTimeout(() => {
this.cleanupEntry(queueId); // Pass only queueId
}, 2000);
}
} else if (status === 'cancel' || status === 'cancelled' || status === 'skipped') {
// For cancelled or skipped, can cleanup sooner or use existing server delete logic
this.clearPollingInterval(queueId);
this.cleanupEntry(queueId); // Pass only queueId
}
// Errors are handled by their specific block below
this.handleDownloadCompletion(entry, queueId, statusData);
}
// Cache the status for potential page reloads
@@ -2774,79 +2628,127 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string)
for (const queueId in this.pollingIntervals) {
this.clearPollingInterval(queueId);
}
if (this.globalSyncIntervalId !== null) {
clearInterval(this.globalSyncIntervalId as number);
this.globalSyncIntervalId = null;
console.log('Stopped global task sync polling.');
}
}
async syncWithBackendTaskList() {
/* New method for periodic server sync */
async periodicSyncWithServer() {
console.log("Performing periodic sync with server...");
try {
const response = await fetch('/api/prgs/list');
if (!response.ok) {
console.error('Failed to fetch backend task list:', response.status);
console.error("Periodic sync: Failed to fetch task list from server", response.status);
return;
}
const backendTaskIds: string[] = await response.json();
const backendTaskIdSet = new Set(backendTaskIds);
const serverTasks: any[] = await response.json();
// console.log('Backend task IDs:', backendTaskIds);
// console.log('Frontend task IDs (prgFiles):', Object.values(this.queueEntries).map(e => e.prgFile));
const localTaskPrgFiles = new Set(Object.values(this.queueEntries).map(entry => entry.prgFile));
const serverTaskPrgFiles = new Set(serverTasks.map(task => task.task_id));
// 1. Add new tasks from backend that are not in frontend
for (const taskId of backendTaskIds) {
if (!this.findQueueIdByPrgFile(taskId)) {
console.log(`Sync: Task ${taskId} found in backend but not frontend. Fetching details.`);
try {
const taskDetailsResponse = await fetch(`/api/prgs/${taskId}`);
if (taskDetailsResponse.ok) {
const taskDetails: StatusData = await taskDetailsResponse.json();
// Construct a minimal item for addDownload. The actual details will be filled by status updates.
const item: QueueItem = {
name: taskDetails.last_line?.name || taskDetails.last_line?.song || taskDetails.last_line?.title || taskDetails.original_request?.name || taskId,
artist: taskDetails.last_line?.artist || taskDetails.original_request?.artist || '',
type: taskDetails.last_line?.type || taskDetails.original_request?.type || 'unknown'
};
const requestUrl = taskDetails.original_url || taskDetails.original_request?.url || null;
this.addDownload(item, item.type || 'unknown', taskId, requestUrl, true); // true to start monitoring
} else {
console.warn(`Sync: Failed to fetch details for new task ${taskId} from backend.`);
const terminalStates = ['complete', 'done', 'cancelled', 'ERROR_AUTO_CLEANED', 'ERROR_RETRIED', 'cancel', 'interrupted', 'error'];
// 1. Add new tasks from server not known locally or update existing ones
for (const serverTask of serverTasks) {
const taskId = serverTask.task_id; // This is the prgFile
const lastStatus = serverTask.last_status_obj;
const originalRequest = serverTask.original_request || {};
if (terminalStates.includes(lastStatus?.status)) {
// If server says it's terminal, and we have it locally, ensure it's cleaned up
const localEntry = Object.values(this.queueEntries).find(e => e.prgFile === taskId);
if (localEntry && !localEntry.hasEnded) {
console.log(`Periodic sync: Server task ${taskId} is terminal (${lastStatus.status}), cleaning up local entry.`);
// Use a status object for handleDownloadCompletion
this.handleDownloadCompletion(localEntry, localEntry.uniqueId, lastStatus);
}
continue; // Skip adding terminal tasks to UI if not already there
}
if (!localTaskPrgFiles.has(taskId)) {
console.log(`Periodic sync: Found new non-terminal task ${taskId} on server. Adding to queue.`);
let itemType = serverTask.type || originalRequest.type || 'unknown';
let dummyItem: QueueItem = {
name: serverTask.name || originalRequest.name || taskId,
artist: serverTask.artist || originalRequest.artist || '',
type: itemType,
url: originalRequest.url || lastStatus?.url || '',
endpoint: originalRequest.endpoint || '',
download_type: serverTask.download_type || originalRequest.download_type || '',
total_tracks: lastStatus?.total_tracks || originalRequest.total_tracks,
current_track: lastStatus?.current_track,
};
if (lastStatus && lastStatus.type === 'track' && lastStatus.parent) {
const parent = lastStatus.parent;
if (parent.type === 'album') {
itemType = 'album';
dummyItem = {
name: parent.title || 'Unknown Album',
artist: parent.artist || 'Unknown Artist',
type: 'album', url: parent.url || '',
total_tracks: parent.total_tracks || lastStatus.total_tracks,
parent: parent };
} else if (parent.type === 'playlist') {
itemType = 'playlist';
dummyItem = {
name: parent.name || 'Unknown Playlist',
owner: parent.owner || 'Unknown Creator',
type: 'playlist', url: parent.url || '',
total_tracks: parent.total_tracks || lastStatus.total_tracks,
parent: parent };
}
}
const requestUrl = originalRequest.url ? `/api/${itemType}/download/${originalRequest.url.split('/').pop()}?name=${encodeURIComponent(dummyItem.name || '')}&artist=${encodeURIComponent(dummyItem.artist || '')}` : null;
// Add with startMonitoring = true
const queueId = this.addDownload(dummyItem, itemType, taskId, requestUrl, true);
const newEntry = this.queueEntries[queueId];
if (newEntry && lastStatus) {
// Manually set lastStatus and update UI as addDownload might not have full server info yet
newEntry.lastStatus = lastStatus;
if(lastStatus.parent) newEntry.parentInfo = lastStatus.parent;
this.applyStatusClasses(newEntry, lastStatus);
const logEl = newEntry.element.querySelector('.log') as HTMLElement | null;
if(logEl) logEl.textContent = this.getStatusMessage(lastStatus);
// Ensure polling is active for this newly added item
this.setupPollingInterval(newEntry.uniqueId);
}
} else {
// Task exists locally, check if status needs update from server list
const localEntry = Object.values(this.queueEntries).find(e => e.prgFile === taskId);
if (localEntry && lastStatus && JSON.stringify(localEntry.lastStatus) !== JSON.stringify(lastStatus)) {
if (!localEntry.hasEnded) {
console.log(`Periodic sync: Updating status for existing task ${taskId} from ${localEntry.lastStatus?.status} to ${lastStatus.status}`);
// Create a data object that handleStatusUpdate expects
const updateData: StatusData = { ...serverTask, last_line: lastStatus };
this.handleStatusUpdate(localEntry.uniqueId, updateData);
}
} catch (fetchError) {
console.error(`Sync: Error fetching details for task ${taskId}:`, fetchError);
}
}
}
// 2. Remove stale tasks from frontend that are not in backend active list
const frontendPrgFiles = Object.values(this.queueEntries).map(entry => entry.prgFile);
for (const prgFile of frontendPrgFiles) {
const queueId = this.findQueueIdByPrgFile(prgFile);
if (queueId && !backendTaskIdSet.has(prgFile)) {
const entry = this.queueEntries[queueId];
// Only remove if it's not already considered ended by frontend (e.g., completed and timer running)
if (entry && !entry.hasEnded) {
console.log(`Sync: Task ${prgFile} (queueId: ${queueId}) found in frontend but not in backend active list. Removing.`);
this.cleanupEntry(queueId);
// 2. Remove local tasks that are no longer on the server or are now terminal on server
for (const localEntry of Object.values(this.queueEntries)) {
if (!serverTaskPrgFiles.has(localEntry.prgFile)) {
if (!localEntry.hasEnded) {
console.log(`Periodic sync: Local task ${localEntry.prgFile} not found on server. Assuming completed/cleaned. Removing.`);
this.cleanupEntry(localEntry.uniqueId);
}
} else {
const serverEquivalent = serverTasks.find(st => st.task_id === localEntry.prgFile);
if (serverEquivalent && serverEquivalent.last_status_obj && terminalStates.includes(serverEquivalent.last_status_obj.status)) {
if (!localEntry.hasEnded) {
console.log(`Periodic sync: Local task ${localEntry.prgFile} is now terminal on server (${serverEquivalent.last_status_obj.status}). Cleaning up.`);
this.handleDownloadCompletion(localEntry, localEntry.uniqueId, serverEquivalent.last_status_obj);
}
}
}
}
this.updateQueueOrder();
} catch (error) {
console.error('Error during global task sync:', error);
console.error("Error during periodic sync with server:", error);
}
}
startGlobalTaskSync() {
if (this.globalSyncIntervalId !== null) {
clearInterval(this.globalSyncIntervalId as number);
}
this.syncWithBackendTaskList(); // Initial sync
this.globalSyncIntervalId = setInterval(() => {
this.syncWithBackendTaskList();
}, 5000) as unknown as number; // Poll every 5 seconds
console.log('Started global task sync polling every 5 seconds.');
}
}
// Singleton instance

View File

@@ -65,36 +65,6 @@ body {
font-weight: bold;
}
/* Back button as floating icon - keep this for our floating button */
.back-button.floating-icon {
position: fixed;
width: 56px;
height: 56px;
bottom: 20px;
left: 20px;
background-color: var(--color-primary);
border-radius: 50%;
box-shadow: var(--shadow-lg);
z-index: 9999;
display: flex;
align-items: center;
justify-content: center;
transition: transform 0.2s ease, background-color 0.2s ease;
text-decoration: none !important;
}
.back-button.floating-icon:hover {
background-color: var(--color-primary-hover);
transform: scale(1.05);
}
.back-button.floating-icon img {
width: 24px;
height: 24px;
filter: brightness(0) invert(1);
margin: 0;
}
/* Queue Sidebar for Config Page */
#downloadQueue {
position: fixed;

View File

@@ -0,0 +1,121 @@
body {
font-family: sans-serif;
margin: 0;
background-color: #121212;
color: #e0e0e0;
}
.container {
padding: 20px;
max-width: 1200px;
margin: auto;
}
h1 {
color: #1DB954; /* Spotify Green */
text-align: center;
}
table {
width: 100%;
border-collapse: collapse;
margin-top: 20px;
background-color: #1e1e1e;
}
th, td {
border: 1px solid #333;
padding: 10px 12px;
text-align: left;
}
th {
background-color: #282828;
cursor: pointer;
}
tr:nth-child(even) {
background-color: #222;
}
.pagination {
margin-top: 20px;
text-align: center;
}
.pagination button, .pagination select {
padding: 8px 12px;
margin: 0 5px;
background-color: #1DB954;
color: white;
border: none;
border-radius: 4px;
cursor: pointer;
}
.pagination button:disabled {
background-color: #555;
cursor: not-allowed;
}
.filters {
margin-bottom: 20px;
display: flex;
gap: 15px;
align-items: center;
}
.filters label, .filters select, .filters input {
margin-right: 5px;
}
.filters select, .filters input {
padding: 8px;
background-color: #282828;
color: #e0e0e0;
border: 1px solid #333;
border-radius: 4px;
}
.status-COMPLETED { color: #1DB954; font-weight: bold; }
.status-ERROR { color: #FF4136; font-weight: bold; }
.status-CANCELLED { color: #AAAAAA; }
.error-message-toggle {
cursor: pointer;
color: #FF4136; /* Red for error indicator */
text-decoration: underline;
}
.error-details {
display: none; /* Hidden by default */
white-space: pre-wrap; /* Preserve formatting */
background-color: #303030;
padding: 5px;
margin-top: 5px;
border-radius: 3px;
font-size: 0.9em;
}
/* Styling for the Details icon button in the table */
.details-btn {
background-color: transparent; /* Or a subtle color like #282828 */
border: none;
border-radius: 50%; /* Make it circular */
padding: 5px; /* Adjust padding to control size */
cursor: pointer;
display: inline-flex; /* Important for aligning the image */
align-items: center;
justify-content: center;
transition: background-color 0.2s ease;
}
.details-btn img {
width: 16px; /* Icon size */
height: 16px;
filter: invert(1); /* Make icon white if it's dark, adjust if needed */
}
.details-btn:hover {
background-color: #333; /* Darker on hover */
}

View File

@@ -158,44 +158,36 @@ a:hover, a:focus {
background-color: var(--color-surface-hover);
}
/* Floating icons (queue and settings) */
/* General styles for floating action buttons (FABs) */
.floating-icon {
position: fixed;
width: 56px;
height: 56px;
bottom: 20px;
background-color: var(--color-primary);
border-radius: var(--radius-round);
box-shadow: var(--shadow-lg);
z-index: 9999;
display: flex;
align-items: center;
justify-content: center;
transition: transform 0.2s ease, background-color 0.2s ease;
text-decoration: none !important;
position: fixed;
z-index: 1000; /* Base z-index, can be overridden */
border-radius: 50%;
box-shadow: 0 2px 5px rgba(0,0,0,0.2);
display: flex;
align-items: center;
justify-content: center;
width: 48px; /* Standard size */
height: 48px; /* Standard size */
background-color: #282828; /* Dark background */
transition: background-color 0.3s ease, transform 0.2s ease;
text-decoration: none !important; /* Ensure no underline for <a> tags */
}
.floating-icon:hover {
background-color: #333; /* Slightly lighter on hover */
transform: scale(1.05);
}
.floating-icon:hover,
.floating-icon:active {
background-color: var(--color-primary-hover);
transform: scale(1.05);
transform: scale(0.98);
}
.floating-icon img {
width: 24px;
height: 24px;
filter: brightness(0) invert(1);
margin: 0;
}
/* Settings icon - bottom left */
.settings-icon {
left: 20px;
}
/* Queue icon - bottom right */
.queue-icon {
right: 20px;
width: 24px;
height: 24px;
filter: invert(1); /* White icon */
margin: 0; /* Reset any margin if inherited */
}
/* Home button */
@@ -221,20 +213,17 @@ a:hover, a:focus {
transform: scale(0.98);
}
/* When home button is used as a floating button */
.floating-icon.home-btn {
background-color: var(--color-primary);
padding: 0;
display: flex;
align-items: center;
justify-content: center;
}
.floating-icon.home-btn img {
width: 24px;
height: 24px;
filter: brightness(0) invert(1);
margin: 0;
/* Styles for buttons that are specifically floating icons (like home button when it's a FAB) */
/* This ensures that if a .home-btn also has .floating-icon, it gets the correct FAB styling. */
.home-btn.floating-icon,
.settings-icon.floating-icon, /* If settings button is an <a> or <button> with this class */
.back-button.floating-icon, /* If back button is an <a> or <button> with this class */
.history-nav-btn.floating-icon, /* If history button is an <a> or <button> with this class */
.queue-icon.floating-icon, /* If queue button is an <a> or <button> with this class */
.watch-nav-btn.floating-icon { /* If watch button is an <a> or <button> with this class */
/* Specific overrides if needed, but mostly inherits from .floating-icon */
/* For example, if a specific button needs a different background */
/* background-color: var(--color-primary); */ /* Example if some should use primary color */
}
/* Download button */

View File

@@ -162,3 +162,44 @@
margin: 4px; /* Reduce margins to better fit mobile layouts */
}
}
/* Positioning for floating action buttons */
/* Base .floating-icon style is now in base.css */
/* Left-aligned buttons (Home, Settings, Back, History) */
.home-btn, .settings-icon, .back-button, .history-nav-btn {
left: 20px;
}
.settings-icon { /* Covers config, main */
bottom: 20px;
}
.home-btn { /* Covers album, artist, playlist, track, watch, history */
bottom: 20px;
}
.back-button { /* Specific to config page */
bottom: 20px;
}
/* New History button specific positioning - above other left buttons */
.history-nav-btn {
bottom: 80px; /* Positioned 60px above the buttons at 20px (48px button height + 12px margin) */
}
/* Right-aligned buttons (Queue, Watch) */
.queue-icon, .watch-nav-btn {
right: 20px;
z-index: 1002; /* Ensure these are above the sidebar (z-index: 1001) and other FABs (z-index: 1000) */
}
.queue-icon {
bottom: 20px;
}
/* Watch button specific positioning - above Queue */
.watch-nav-btn {
bottom: 80px; /* Positioned 60px above the queue button (48px button height + 12px margin) */
}

View File

@@ -46,6 +46,9 @@
</div>
<!-- Fixed floating buttons for home and queue -->
<a href="/history" class="btn-icon history-nav-btn floating-icon home-btn" aria-label="Download History" title="Go to Download History">
<img src="{{ url_for('static', filename='images/history.svg') }}" alt="History" onerror="handleImageError(this)"/>
</a>
<button id="homeButton" class="btn-icon home-btn floating-icon settings-icon" aria-label="Return to home">
<img src="{{ url_for('static', filename='images/home.svg') }}" alt="Home">
</button>

View File

@@ -51,6 +51,9 @@
</div>
<!-- Fixed floating buttons for home and queue -->
<a href="/history" class="btn-icon history-nav-btn floating-icon home-btn" aria-label="Download History" title="Go to Download History">
<img src="{{ url_for('static', filename='images/history.svg') }}" alt="History" onerror="handleImageError(this)"/>
</a>
<button id="homeButton" class="btn-icon home-btn floating-icon settings-icon" aria-label="Return to home">
<img src="{{ url_for('static', filename='images/home.svg') }}" alt="Home">
</button>

View File

@@ -317,6 +317,9 @@
</div>
<!-- Fixed floating buttons for back and queue -->
<a href="/history" class="btn-icon history-nav-btn floating-icon settings-icon" aria-label="Download History" title="Go to Download History">
<img src="{{ url_for('static', filename='images/history.svg') }}" alt="History" onerror="handleImageError(this)"/>
</a>
<a href="/" class="back-button floating-icon settings-icon" aria-label="Back to app">
<img src="{{ url_for('static', filename='images/arrow-left.svg') }}" alt="Back" />
</a>

83
static/html/history.html Normal file
View File

@@ -0,0 +1,83 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Download History</title>
<!-- Link to global stylesheets first -->
<link rel="stylesheet" href="{{ url_for('static', filename='css/main/base.css') }}">
<link rel="stylesheet" href="{{ url_for('static', filename='css/main/icons.css') }}">
<!-- Link to page-specific stylesheet -->
<link rel="stylesheet" href="{{ url_for('static', filename='css/history/history.css') }}">
<!-- Helper function for image errors, if not already in base.css or loaded globally -->
<script>
function handleImageError(img) {
img.onerror = null; // Prevent infinite loop if placeholder also fails
img.src = "{{ url_for('static', filename='images/placeholder.jpg') }}";
}
</script>
</head>
<body>
<div class="container">
<h1>Download History</h1>
<div class="filters">
<label for="status-filter">Status:</label>
<select id="status-filter">
<option value="">All</option>
<option value="COMPLETED">Completed</option>
<option value="ERROR">Error</option>
<option value="CANCELLED">Cancelled</option>
</select>
<label for="type-filter">Type:</label>
<select id="type-filter">
<option value="">All</option>
<option value="track">Track</option>
<option value="album">Album</option>
<option value="playlist">Playlist</option>
<option value="artist">Artist</option>
</select>
</div>
<table>
<thead>
<tr>
<th data-sort="item_name">Name</th>
<th data-sort="item_artist">Artist</th>
<th data-sort="download_type">Type</th>
<th data-sort="status_final">Status</th>
<th data-sort="timestamp_added">Date Added</th>
<th data-sort="timestamp_completed">Date Completed/Ended</th>
<th>Details</th>
</tr>
</thead>
<tbody id="history-table-body">
<!-- Rows will be inserted here by JavaScript -->
</tbody>
</table>
<div class="pagination">
<button id="prev-page" disabled>Previous</button>
<span id="page-info">Page 1 of 1</span>
<button id="next-page" disabled>Next</button>
<select id="limit-select">
<option value="10">10 per page</option>
<option value="25" selected>25 per page</option>
<option value="50">50 per page</option>
<option value="100">100 per page</option>
</select>
</div>
</div>
<!-- Fixed floating buttons for home and queue -->
<a href="/" class="btn-icon home-btn floating-icon" aria-label="Return to home" title="Go to Home">
<img src="{{ url_for('static', filename='images/home.svg') }}" alt="Home" onerror="handleImageError(this)"/>
</a>
<!-- Link to the new TypeScript file (compiled to JS) -->
<script type="module" src="{{ url_for('static', filename='js/history.js') }}"></script>
<!-- Queue icon, assuming queue.js handles its own initialization if included -->
<!-- You might want to include queue.js here if the queue icon is desired on this page -->
<!-- <script type="module" src="{{ url_for('static', filename='js/queue.js') }}"></script> -->
</body>
</html>

View File

@@ -59,6 +59,9 @@
</div>
<!-- Fixed floating buttons for settings and queue -->
<a href="/history" class="btn-icon history-nav-btn floating-icon settings-icon" aria-label="Download History" title="Go to Download History">
<img src="{{ url_for('static', filename='images/history.svg') }}" alt="History" onerror="handleImageError(this)"/>
</a>
<a href="/config" class="btn-icon settings-icon floating-icon" aria-label="Settings">
<img src="{{ url_for('static', filename='images/settings.svg') }}" alt="Settings" onerror="handleImageError(this)"/>
</a>

View File

@@ -58,6 +58,9 @@
</div>
<!-- Fixed floating buttons for home and queue -->
<a href="/history" class="btn-icon history-nav-btn floating-icon home-btn" aria-label="Download History" title="Go to Download History">
<img src="{{ url_for('static', filename='images/history.svg') }}" alt="History" onerror="handleImageError(this)"/>
</a>
<button id="homeButton" class="btn-icon home-btn floating-icon settings-icon" aria-label="Return to home">
<img src="{{ url_for('static', filename='images/home.svg') }}" alt="Home">
</button>

View File

@@ -45,6 +45,9 @@
</div>
<!-- Fixed floating buttons for home and queue -->
<a href="/history" class="btn-icon history-nav-btn floating-icon home-btn" aria-label="Download History" title="Go to Download History">
<img src="{{ url_for('static', filename='images/history.svg') }}" alt="History" onerror="handleImageError(this)"/>
</a>
<button id="homeButton" class="btn-icon home-btn floating-icon settings-icon" aria-label="Return to home">
<img src="{{ url_for('static', filename='images/home.svg') }}" alt="Home">
</button>

View File

@@ -43,6 +43,9 @@
</div>
<!-- Fixed floating buttons for settings and queue -->
<a href="/history" class="btn-icon history-nav-btn floating-icon home-btn" aria-label="Download History" title="Go to Download History">
<img src="{{ url_for('static', filename='images/history.svg') }}" alt="History" onerror="handleImageError(this)"/>
</a>
<a href="/" class="btn-icon home-btn floating-icon settings-icon" aria-label="Return to Home" title="Return to Home">
<img src="{{ url_for('static', filename='images/home.svg') }}" alt="Home" onerror="handleImageError(this)"/>
</a>

View File

@@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?><!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
<svg width="800px" height="800px" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M5.07868 5.06891C8.87402 1.27893 15.0437 1.31923 18.8622 5.13778C22.6824 8.95797 22.7211 15.1313 18.9262 18.9262C15.1312 22.7211 8.95793 22.6824 5.13774 18.8622C2.87389 16.5984 1.93904 13.5099 2.34047 10.5812C2.39672 10.1708 2.775 9.88377 3.18537 9.94002C3.59575 9.99627 3.88282 10.3745 3.82658 10.7849C3.4866 13.2652 4.27782 15.881 6.1984 17.8016C9.44288 21.0461 14.6664 21.0646 17.8655 17.8655C21.0646 14.6664 21.046 9.44292 17.8015 6.19844C14.5587 2.95561 9.33889 2.93539 6.13935 6.12957L6.88705 6.13333C7.30126 6.13541 7.63535 6.47288 7.63327 6.88709C7.63119 7.3013 7.29372 7.63539 6.87951 7.63331L4.33396 7.62052C3.92269 7.61845 3.58981 7.28556 3.58774 6.8743L3.57495 4.32874C3.57286 3.91454 3.90696 3.57707 4.32117 3.57498C4.73538 3.5729 5.07285 3.907 5.07493 4.32121L5.07868 5.06891ZM11.9999 7.24992C12.4141 7.24992 12.7499 7.58571 12.7499 7.99992V11.6893L15.0302 13.9696C15.3231 14.2625 15.3231 14.7374 15.0302 15.0302C14.7373 15.3231 14.2624 15.3231 13.9696 15.0302L11.2499 12.3106V7.99992C11.2499 7.58571 11.5857 7.24992 11.9999 7.24992Z" fill="#1C274C"/>
</svg>

After

Width:  |  Height:  |  Size: 1.3 KiB

7
static/images/info.svg Normal file
View File

@@ -0,0 +1,7 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
<svg width="800px" height="800px" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<g id="Warning / Info">
<path id="Vector" d="M12 11V16M12 21C7.02944 21 3 16.9706 3 12C3 7.02944 7.02944 3 12 3C16.9706 3 21 7.02944 21 12C21 16.9706 16.9706 21 12 21ZM12.0498 8V8.1L11.9502 8.1002V8H12.0498Z" stroke="#000000" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
</g>

After

Width:  |  Height:  |  Size: 531 B