From e97efb6b19642ba523d3d6ea71bb3b85c7f5e0ad Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Sat, 7 Jun 2025 14:01:38 -0600 Subject: [PATCH 1/7] Clean log format --- app.py | 2 +- routes/__init__.py | 2 +- routes/utils/celery_manager.py | 14 +++++++++++--- 3 files changed, 13 insertions(+), 5 deletions(-) diff --git a/app.py b/app.py index 3a54956..5373b93 100755 --- a/app.py +++ b/app.py @@ -46,7 +46,7 @@ def setup_logging(): # Log formatting log_format = logging.Formatter( - "%(asctime)s [%(processName)s:%(threadName)s] [%(name)s] [%(levelname)s] - %(message)s", + "%(asctime)s [%(levelname)s] %(message)s", datefmt="%Y-%m-%d %H:%M:%S", ) diff --git a/routes/__init__.py b/routes/__init__.py index 9f959dc..4d89a25 100755 --- a/routes/__init__.py +++ b/routes/__init__.py @@ -4,7 +4,7 @@ import atexit # Configure basic logging for the application if not already configured # This is a good place for it if routes are a central part of your app structure. logging.basicConfig( - level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" + level=logging.INFO, format="%(message)s" ) logger = logging.getLogger(__name__) diff --git a/routes/utils/celery_manager.py b/routes/utils/celery_manager.py index f2fcf40..1fc0504 100644 --- a/routes/utils/celery_manager.py +++ b/routes/utils/celery_manager.py @@ -69,8 +69,16 @@ class CeleryManager: try: for line in iter(stream.readline, ""): if line: - log_method = logger.error if error else logger.info - log_method(f"{log_prefix}: {line.strip()}") + line_stripped = line.strip() + log_method = logger.info # Default log method + + if error: # This is a stderr stream + if " - ERROR - " in line_stripped or " - CRITICAL - " in line_stripped: + log_method = logger.error + elif " - WARNING - " in line_stripped: + log_method = logger.warning + + log_method(f"{log_prefix}: {line_stripped}") elif ( self.stop_event.is_set() ): # If empty line and stop is set, likely EOF @@ -359,7 +367,7 @@ celery_manager = CeleryManager() if __name__ == "__main__": logging.basicConfig( level=logging.INFO, - format="%(asctime)s [%(levelname)s] [%(threadName)s] [%(name)s] - %(message)s", + format="%(message)s", ) logger.info("Starting Celery Manager example...") celery_manager.start() From e81ee40a1d2c8ee749973036fc0cfd1ee8aea6b2 Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Sat, 7 Jun 2025 14:56:13 -0600 Subject: [PATCH 2/7] test suite --- .dockerignore | 1 + routes/album.py | 12 +-- routes/playlist.py | 12 +-- routes/prgs.py | 14 ++- routes/track.py | 12 +-- src/js/queue.ts | 202 ++++++++++++++++++++-------------------- tests/README.md | 44 +++++++++ tests/__init__.py | 1 + tests/conftest.py | 149 +++++++++++++++++++++++++++++ tests/test_config.py | 94 +++++++++++++++++++ tests/test_downloads.py | 128 +++++++++++++++++++++++++ tests/test_history.py | 61 ++++++++++++ tests/test_prgs.py | 93 ++++++++++++++++++ tests/test_search.py | 35 +++++++ tests/test_watch.py | 117 +++++++++++++++++++++++ 15 files changed, 846 insertions(+), 129 deletions(-) create mode 100644 tests/README.md create mode 100644 tests/__init__.py create mode 100644 tests/conftest.py create mode 100644 tests/test_config.py create mode 100644 tests/test_downloads.py create mode 100644 tests/test_history.py create mode 100644 tests/test_prgs.py create mode 100644 tests/test_search.py create mode 100644 tests/test_watch.py diff --git a/.dockerignore b/.dockerignore index 617bfc5..790e94d 100755 --- a/.dockerignore +++ b/.dockerignore @@ -24,3 +24,4 @@ logs/ .env .venv data +tests/ \ No newline at end of file diff --git a/routes/album.py b/routes/album.py index 98f6d6d..6b24c4a 100755 --- a/routes/album.py +++ b/routes/album.py @@ -111,25 +111,25 @@ def handle_download(album_id): ) return Response( - json.dumps({"prg_file": task_id}), status=202, mimetype="application/json" + json.dumps({"task_id": task_id}), status=202, mimetype="application/json" ) @album_bp.route("/download/cancel", methods=["GET"]) def cancel_download(): """ - Cancel a running download process by its prg file name. + Cancel a running download process by its task id. """ - prg_file = request.args.get("prg_file") - if not prg_file: + task_id = request.args.get("task_id") + if not task_id: return Response( - json.dumps({"error": "Missing process id (prg_file) parameter"}), + json.dumps({"error": "Missing process id (task_id) parameter"}), status=400, mimetype="application/json", ) # Use the queue manager's cancellation method. - result = download_queue_manager.cancel_task(prg_file) + result = download_queue_manager.cancel_task(task_id) status_code = 200 if result.get("status") == "cancelled" else 404 return Response(json.dumps(result), status=status_code, mimetype="application/json") diff --git a/routes/playlist.py b/routes/playlist.py index 268b772..a17a98f 100755 --- a/routes/playlist.py +++ b/routes/playlist.py @@ -133,7 +133,7 @@ def handle_download(playlist_id): ) return Response( - json.dumps({"prg_file": task_id}), # prg_file is the old name for task_id + json.dumps({"task_id": task_id}), status=202, mimetype="application/json", ) @@ -142,18 +142,18 @@ def handle_download(playlist_id): @playlist_bp.route("/download/cancel", methods=["GET"]) def cancel_download(): """ - Cancel a running playlist download process by its prg file name. + Cancel a running playlist download process by its task id. """ - prg_file = request.args.get("prg_file") - if not prg_file: + task_id = request.args.get("task_id") + if not task_id: return Response( - json.dumps({"error": "Missing process id (prg_file) parameter"}), + json.dumps({"error": "Missing task id (task_id) parameter"}), status=400, mimetype="application/json", ) # Use the queue manager's cancellation method. - result = download_queue_manager.cancel_task(prg_file) + result = download_queue_manager.cancel_task(task_id) status_code = 200 if result.get("status") == "cancelled" else 404 return Response(json.dumps(result), status=status_code, mimetype="application/json") diff --git a/routes/prgs.py b/routes/prgs.py index 5795ee8..c6d0d92 100755 --- a/routes/prgs.py +++ b/routes/prgs.py @@ -21,16 +21,15 @@ prgs_bp = Blueprint("prgs", __name__, url_prefix="/api/prgs") @prgs_bp.route("/", methods=["GET"]) -def get_prg_file(task_id): +def get_task_details(task_id): """ Return a JSON object with the resource type, its name (title), the last progress update, and, if available, the original request parameters. - This function works with both the old PRG file system (for backward compatibility) - and the new task ID based system. + This function works with the new task ID based system. Args: - task_id: Either a task UUID from Celery or a PRG filename from the old system + task_id: A task UUID from Celery """ # Only support new task IDs task_info = get_task_info(task_id) @@ -88,13 +87,12 @@ def get_prg_file(task_id): @prgs_bp.route("/delete/", methods=["DELETE"]) -def delete_prg_file(task_id): +def delete_task(task_id): """ Delete a task's information and history. - Works with both the old PRG file system and the new task ID based system. Args: - task_id: Either a task UUID from Celery or a PRG filename from the old system + task_id: A task UUID from Celery """ # Only support new task IDs task_info = get_task_info(task_id) @@ -107,7 +105,7 @@ def delete_prg_file(task_id): @prgs_bp.route("/list", methods=["GET"]) -def list_prg_files(): +def list_tasks(): """ Retrieve a list of all tasks in the system. Returns a detailed list of task objects including status and metadata. diff --git a/routes/track.py b/routes/track.py index 01406c9..3f86828 100755 --- a/routes/track.py +++ b/routes/track.py @@ -127,7 +127,7 @@ def handle_download(track_id): ) return Response( - json.dumps({"prg_file": task_id}), # prg_file is the old name for task_id + json.dumps({"task_id": task_id}), status=202, mimetype="application/json", ) @@ -136,18 +136,18 @@ def handle_download(track_id): @track_bp.route("/download/cancel", methods=["GET"]) def cancel_download(): """ - Cancel a running track download process by its process id (prg file name). + Cancel a running track download process by its task id. """ - prg_file = request.args.get("prg_file") - if not prg_file: + task_id = request.args.get("task_id") + if not task_id: return Response( - json.dumps({"error": "Missing process id (prg_file) parameter"}), + json.dumps({"error": "Missing task id (task_id) parameter"}), status=400, mimetype="application/json", ) # Use the queue manager's cancellation method. - result = download_queue_manager.cancel_task(prg_file) + result = download_queue_manager.cancel_task(task_id) status_code = 200 if result.get("status") == "cancelled" else 404 return Response(json.dumps(result), status=status_code, mimetype="application/json") diff --git a/src/js/queue.ts b/src/js/queue.ts index eeab46f..9db4fdf 100644 --- a/src/js/queue.ts +++ b/src/js/queue.ts @@ -71,7 +71,7 @@ interface StatusData { retry_count?: number; max_retries?: number; // from config potentially seconds_left?: number; - prg_file?: string; + task_id?: string; url?: string; reason?: string; // for skipped parent?: ParentInfo; @@ -100,7 +100,7 @@ interface StatusData { interface QueueEntry { item: QueueItem; type: string; - prgFile: string; + taskId: string; requestUrl: string | null; element: HTMLElement; lastStatus: StatusData; @@ -196,13 +196,13 @@ export class DownloadQueue { // const storedVisibleCount = localStorage.getItem("downloadQueueVisibleCount"); // this.visibleCount = storedVisibleCount ? parseInt(storedVisibleCount, 10) : 10; - // Load the cached status info (object keyed by prgFile) - This is also redundant + // Load the cached status info (object keyed by taskId) - This is also redundant // this.queueCache = JSON.parse(localStorage.getItem("downloadQueueCache") || "{}"); // Wait for initDOM to complete before setting up event listeners and loading existing PRG files. this.initDOM().then(() => { this.initEventListeners(); - this.loadExistingPrgFiles(); + this.loadExistingTasks(); // Start periodic sync setInterval(() => this.periodicSyncWithServer(), 10000); // Sync every 10 seconds }); @@ -278,8 +278,8 @@ export class DownloadQueue { cancelAllBtn.addEventListener('click', () => { for (const queueId in this.queueEntries) { const entry = this.queueEntries[queueId]; - const logElement = document.getElementById(`log-${entry.uniqueId}-${entry.prgFile}`) as HTMLElement | null; - if (entry && !entry.hasEnded && entry.prgFile) { + const logElement = document.getElementById(`log-${entry.uniqueId}-${entry.taskId}`) as HTMLElement | null; + if (entry && !entry.hasEnded && entry.taskId) { // Mark as cancelling visually if (entry.element) { entry.element.classList.add('cancelling'); @@ -289,7 +289,7 @@ export class DownloadQueue { } // Cancel each active download - fetch(`/api/${entry.type}/download/cancel?prg_file=${entry.prgFile}`) + fetch(`/api/${entry.type}/download/cancel?task_id=${entry.taskId}`) .then(response => response.json()) .then(data => { // API returns status 'cancelled' when cancellation succeeds @@ -388,9 +388,9 @@ export class DownloadQueue { /** * Adds a new download entry. */ - addDownload(item: QueueItem, type: string, prgFile: string, requestUrl: string | null = null, startMonitoring: boolean = false): string { + addDownload(item: QueueItem, type: string, taskId: string, requestUrl: string | null = null, startMonitoring: boolean = false): string { const queueId = this.generateQueueId(); - const entry = this.createQueueEntry(item, type, prgFile, queueId, requestUrl); + const entry = this.createQueueEntry(item, type, taskId, queueId, requestUrl); this.queueEntries[queueId] = entry; // Re-render and update which entries are processed. this.updateQueueOrder(); @@ -417,17 +417,17 @@ export class DownloadQueue { // Show a preparing message for new entries if (entry.isNew) { - const logElement = document.getElementById(`log-${entry.uniqueId}-${entry.prgFile}`) as HTMLElement | null; + const logElement = document.getElementById(`log-${entry.uniqueId}-${entry.taskId}`) as HTMLElement | null; if (logElement) { logElement.textContent = "Initializing download..."; } } - console.log(`Starting monitoring for ${entry.type} with PRG file: ${entry.prgFile}`); + console.log(`Starting monitoring for ${entry.type} with task ID: ${entry.taskId}`); // For backward compatibility, first try to get initial status from the REST API try { - const response = await fetch(`/api/prgs/${entry.prgFile}`); + const response = await fetch(`/api/prgs/${entry.taskId}`); if (response.ok) { const data: StatusData = await response.json(); // Add type to data @@ -464,7 +464,7 @@ export class DownloadQueue { entry.status = data.last_line.status || 'unknown'; // Ensure status is not undefined // Update status message without recreating the element - const logElement = document.getElementById(`log-${entry.uniqueId}-${entry.prgFile}`) as HTMLElement | null; + const logElement = document.getElementById(`log-${entry.uniqueId}-${entry.taskId}`) as HTMLElement | null; if (logElement) { const statusMessage = this.getStatusMessage(data.last_line); logElement.textContent = statusMessage; @@ -474,7 +474,7 @@ export class DownloadQueue { this.applyStatusClasses(entry, data.last_line); // Save updated status to cache, ensuring we preserve parent data - this.queueCache[entry.prgFile] = { + this.queueCache[entry.taskId] = { ...data.last_line, // Ensure parent data is preserved parent: data.last_line.parent || entry.lastStatus?.parent @@ -540,11 +540,11 @@ export class DownloadQueue { /** * Creates a new queue entry. It checks localStorage for any cached info. */ - createQueueEntry(item: QueueItem, type: string, prgFile: string, queueId: string, requestUrl: string | null): QueueEntry { + createQueueEntry(item: QueueItem, type: string, taskId: string, queueId: string, requestUrl: string | null): QueueEntry { console.log(`Creating queue entry with initial type: ${type}`); // Get cached data if it exists - const cachedData: StatusData | undefined = this.queueCache[prgFile]; // Add type + const cachedData: StatusData | undefined = this.queueCache[taskId]; // Add type // If we have cached data, use it to determine the true type and item properties if (cachedData) { @@ -588,9 +588,9 @@ export class DownloadQueue { const entry: QueueEntry = { // Add type to entry item, type, - prgFile, + taskId, requestUrl, // for potential retry - element: this.createQueueItem(item, type, prgFile, queueId), + element: this.createQueueItem(item, type, taskId, queueId), lastStatus: { // Initialize with basic item metadata for immediate display type, @@ -615,7 +615,7 @@ export class DownloadQueue { realTimeStallDetector: { count: 0, lastStatusJson: '' } // For detecting stalled real_time downloads }; - // If cached info exists for this PRG file, use it. + // If cached info exists for this task, use it. if (cachedData) { entry.lastStatus = cachedData; const logEl = entry.element.querySelector('.log') as HTMLElement | null; @@ -640,7 +640,7 @@ export class DownloadQueue { /** * Returns an HTML element for the queue entry with modern UI styling. */ -createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string): HTMLElement { +createQueueItem(item: QueueItem, type: string, taskId: string, queueId:string): HTMLElement { // Track whether this is a multi-track item (album or playlist) const isMultiTrack = type === 'album' || type === 'playlist'; const defaultMessage = (type === 'playlist') ? 'Reading track list' : 'Initializing download...'; @@ -664,26 +664,26 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) ${displayArtist ? `
${displayArtist}
` : ''}
${displayType}
-
-
${defaultMessage}
+
${defaultMessage}
- +
-
-
+
-
+
`; @@ -693,10 +693,10 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string)
Overall Progress - 0/0 + 0/0
-
`; @@ -745,7 +745,7 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) case 'error': entry.element.classList.add('error'); // Hide error-details to prevent duplicate error display - const errorDetailsContainer = entry.element.querySelector(`#error-details-${entry.uniqueId}-${entry.prgFile}`) as HTMLElement | null; + const errorDetailsContainer = entry.element.querySelector(`#error-details-${entry.uniqueId}-${entry.taskId}`) as HTMLElement | null; if (errorDetailsContainer) { errorDetailsContainer.style.display = 'none'; } @@ -755,7 +755,7 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) entry.element.classList.add('complete'); // Hide error details if present if (entry.element) { - const errorDetailsContainer = entry.element.querySelector(`#error-details-${entry.uniqueId}-${entry.prgFile}`) as HTMLElement | null; + const errorDetailsContainer = entry.element.querySelector(`#error-details-${entry.uniqueId}-${entry.taskId}`) as HTMLElement | null; if (errorDetailsContainer) { errorDetailsContainer.style.display = 'none'; } @@ -765,7 +765,7 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) entry.element.classList.add('cancelled'); // Hide error details if present if (entry.element) { - const errorDetailsContainer = entry.element.querySelector(`#error-details-${entry.uniqueId}-${entry.prgFile}`) as HTMLElement | null; + const errorDetailsContainer = entry.element.querySelector(`#error-details-${entry.uniqueId}-${entry.taskId}`) as HTMLElement | null; if (errorDetailsContainer) { errorDetailsContainer.style.display = 'none'; } @@ -778,8 +778,8 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) const btn = (e.target as HTMLElement).closest('button') as HTMLButtonElement | null; // Add types and null check if (!btn) return; // Guard clause btn.style.display = 'none'; - const { prg, type, queueid } = btn.dataset; - if (!prg || !type || !queueid) return; // Guard against undefined dataset properties + const { taskid, type, queueid } = btn.dataset; + if (!taskid || !type || !queueid) return; // Guard against undefined dataset properties try { // Get the queue item element @@ -790,13 +790,13 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) } // Show cancellation in progress - const logElement = document.getElementById(`log-${queueid}-${prg}`) as HTMLElement | null; + const logElement = document.getElementById(`log-${queueid}-${taskid}`) as HTMLElement | null; if (logElement) { logElement.textContent = "Cancelling..."; } // First cancel the download - const response = await fetch(`/api/${type}/download/cancel?prg_file=${prg}`); + const response = await fetch(`/api/${type}/download/cancel?task_id=${taskid}`); const data = await response.json(); // API returns status 'cancelled' when cancellation succeeds if (data.status === "cancelled" || data.status === "cancel") { @@ -813,7 +813,7 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) // Mark as cancelled in the cache to prevent re-loading on page refresh entry.status = "cancelled"; - this.queueCache[prg] = { status: "cancelled" }; + this.queueCache[taskid] = { status: "cancelled" }; localStorage.setItem("downloadQueueCache", JSON.stringify(this.queueCache)); // Immediately remove the item from the UI @@ -924,7 +924,7 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) // This is important for items that become visible after "Show More" or other UI changes Object.values(this.queueEntries).forEach(entry => { if (this.isEntryVisible(entry.uniqueId) && !entry.hasEnded && !this.pollingIntervals[entry.uniqueId]) { - console.log(`updateQueueOrder: Ensuring polling for visible/active entry ${entry.uniqueId} (${entry.prgFile})`); + console.log(`updateQueueOrder: Ensuring polling for visible/active entry ${entry.uniqueId} (${entry.taskId})`); this.setupPollingInterval(entry.uniqueId); } }); @@ -995,8 +995,8 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) delete this.queueEntries[queueId]; // Remove the cached info - if (this.queueCache[entry.prgFile]) { - delete this.queueCache[entry.prgFile]; + if (this.queueCache[entry.taskId]) { + delete this.queueCache[entry.taskId]; localStorage.setItem("downloadQueueCache", JSON.stringify(this.queueCache)); } @@ -1025,13 +1025,13 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) // Find the queue item this status belongs to let queueItem: QueueEntry | null = null; - const prgFile = data.prg_file || Object.keys(this.queueCache).find(key => + const taskId = data.task_id || Object.keys(this.queueCache).find(key => this.queueCache[key].status === data.status && this.queueCache[key].type === data.type ); - if (prgFile) { + if (taskId) { const queueId = Object.keys(this.queueEntries).find(id => - this.queueEntries[id].prgFile === prgFile + this.queueEntries[id].taskId === taskId ); if (queueId) { queueItem = this.queueEntries[queueId]; @@ -1408,17 +1408,17 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) const retryData: StatusData = await retryResponse.json(); // Add type - if (retryData.prg_file) { - const newPrgFile = retryData.prg_file; + if (retryData.task_id) { + const newTaskId = retryData.task_id; - // Clean up the old entry from UI, memory, cache, and server (PRG file) + // Clean up the old entry from UI, memory, cache, and server (task file) // logElement and retryBtn are part of the old entry's DOM structure and will be removed. await this.cleanupEntry(queueId); // Add the new download entry. This will create a new element, start monitoring, etc. - this.addDownload(originalItem, apiTypeForNewEntry, newPrgFile, requestUrlForNewEntry, true); + this.addDownload(originalItem, apiTypeForNewEntry, newTaskId, requestUrlForNewEntry, true); - // The old setTimeout block for deleting oldPrgFile is no longer needed as cleanupEntry handles it. + // The old setTimeout block for deleting old task file is no longer needed as cleanupEntry handles it. } else { if (errorMessageDiv) errorMessageDiv.textContent = 'Retry failed: invalid response from server.'; const currentEntry = this.queueEntries[queueId]; // Check if old entry still exists @@ -1574,8 +1574,8 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) // Make queue visible this.toggleVisibility(true); - // Just load existing PRG files as a fallback - await this.loadExistingPrgFiles(); + // Just load existing task files as a fallback + await this.loadExistingTasks(); // Force start monitoring for all loaded entries for (const queueId in this.queueEntries) { @@ -1590,12 +1590,12 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) } // Handle single-file downloads (tracks, albums, playlists) - if ('prg_file' in data && data.prg_file) { // Type guard - console.log(`Adding ${type} PRG file: ${data.prg_file}`); + if ('task_id' in data && data.task_id) { // Type guard + console.log(`Adding ${type} task with ID: ${data.task_id}`); // Store the initial metadata in the cache so it's available // even before the first status update - this.queueCache[data.prg_file] = { + this.queueCache[data.task_id] = { type, status: 'initializing', name: item.name || 'Unknown', @@ -1606,7 +1606,7 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) }; // Use direct monitoring for all downloads for consistency - const queueId = this.addDownload(item, type, data.prg_file, apiUrl, true); + const queueId = this.addDownload(item, type, data.task_id, apiUrl, true); // Make queue visible to show progress if not already visible if (this.config && !this.config.downloadQueueVisible) { // Add null check for config @@ -1624,9 +1624,9 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) } /** - * Loads existing PRG files from the /api/prgs/list endpoint and adds them as queue entries. + * Loads existing task files from the /api/prgs/list endpoint and adds them as queue entries. */ - async loadExistingPrgFiles() { + async loadExistingTasks() { try { // Clear existing queue entries first to avoid duplicates when refreshing for (const queueId in this.queueEntries) { @@ -1646,23 +1646,23 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) const terminalStates = ['complete', 'done', 'cancelled', 'ERROR_AUTO_CLEANED', 'ERROR_RETRIED', 'cancel', 'interrupted', 'error']; for (const taskData of existingTasks) { - const prgFile = taskData.task_id; // Use task_id as prgFile identifier + const taskId = taskData.task_id; // Use task_id as taskId identifier const lastStatus = taskData.last_status_obj; const originalRequest = taskData.original_request || {}; // Skip adding to UI if the task is already in a terminal state if (lastStatus && terminalStates.includes(lastStatus.status)) { - console.log(`Skipping UI addition for terminal task ${prgFile}, status: ${lastStatus.status}`); + console.log(`Skipping UI addition for terminal task ${taskId}, status: ${lastStatus.status}`); // Also ensure it's cleaned from local cache if it was there - if (this.queueCache[prgFile]) { - delete this.queueCache[prgFile]; + if (this.queueCache[taskId]) { + delete this.queueCache[taskId]; } continue; } let itemType = taskData.type || originalRequest.type || 'unknown'; let dummyItem: QueueItem = { - name: taskData.name || originalRequest.name || prgFile, + name: taskData.name || originalRequest.name || taskId, artist: taskData.artist || originalRequest.artist || '', type: itemType, url: originalRequest.url || lastStatus?.url || '', @@ -1680,29 +1680,25 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) dummyItem = { name: parent.title || 'Unknown Album', artist: parent.artist || 'Unknown Artist', - type: 'album', - url: parent.url || '', + type: 'album', url: parent.url || '', total_tracks: parent.total_tracks || lastStatus.total_tracks, - parent: parent - }; + parent: parent }; } else if (parent.type === 'playlist') { itemType = 'playlist'; dummyItem = { name: parent.name || 'Unknown Playlist', owner: parent.owner || 'Unknown Creator', - type: 'playlist', - url: parent.url || '', + type: 'playlist', url: parent.url || '', total_tracks: parent.total_tracks || lastStatus.total_tracks, - parent: parent - }; + parent: parent }; } } let retryCount = 0; if (lastStatus && lastStatus.retry_count) { retryCount = lastStatus.retry_count; - } else if (prgFile.includes('_retry')) { - const retryMatch = prgFile.match(/_retry(\d+)/); + } else if (taskId.includes('_retry')) { + const retryMatch = taskId.match(/_retry(\d+)/); if (retryMatch && retryMatch[1]) { retryCount = parseInt(retryMatch[1], 10); } @@ -1711,7 +1707,7 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) const requestUrl = originalRequest.url ? `/api/${itemType}/download/${originalRequest.url.split('/').pop()}?name=${encodeURIComponent(dummyItem.name || '')}&artist=${encodeURIComponent(dummyItem.artist || '')}` : null; const queueId = this.generateQueueId(); - const entry = this.createQueueEntry(dummyItem, itemType, prgFile, queueId, requestUrl); + const entry = this.createQueueEntry(dummyItem, itemType, taskId, queueId, requestUrl); entry.retryCount = retryCount; if (lastStatus) { @@ -1719,7 +1715,7 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) if (lastStatus.parent) { entry.parentInfo = lastStatus.parent; } - this.queueCache[prgFile] = lastStatus; // Cache the last known status + this.queueCache[taskId] = lastStatus; // Cache the last known status this.applyStatusClasses(entry, lastStatus); const logElement = entry.element.querySelector('.log') as HTMLElement | null; @@ -1734,7 +1730,7 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) this.updateQueueOrder(); this.startMonitoringActiveEntries(); } catch (error) { - console.error("Error loading existing PRG files:", error); + console.error("Error loading existing task files:", error); } } @@ -1792,8 +1788,8 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) setupPollingInterval(queueId: string) { // Add type console.log(`Setting up polling for ${queueId}`); const entry = this.queueEntries[queueId]; - if (!entry || !entry.prgFile) { - console.warn(`No entry or prgFile for ${queueId}`); + if (!entry || !entry.taskId) { + console.warn(`No entry or taskId for ${queueId}`); return; } @@ -1813,7 +1809,7 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) this.pollingIntervals[queueId] = intervalId as unknown as number; // Cast to number via unknown } catch (error) { console.error(`Error creating polling for ${queueId}:`, error); - const logElement = document.getElementById(`log-${entry.uniqueId}-${entry.prgFile}`) as HTMLElement | null; + const logElement = document.getElementById(`log-${entry.uniqueId}-${entry.taskId}`) as HTMLElement | null; if (logElement) { logElement.textContent = `Error with download: ${(error as Error).message}`; // Cast to Error entry.element.classList.add('error'); @@ -1823,13 +1819,13 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) async fetchDownloadStatus(queueId: string) { // Add type const entry = this.queueEntries[queueId]; - if (!entry || !entry.prgFile) { - console.warn(`No entry or prgFile for ${queueId}`); + if (!entry || !entry.taskId) { + console.warn(`No entry or taskId for ${queueId}`); return; } try { - const response = await fetch(`/api/prgs/${entry.prgFile}`); + const response = await fetch(`/api/prgs/${entry.taskId}`); if (!response.ok) { throw new Error(`HTTP error: ${response.status}`); } @@ -1929,7 +1925,7 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) console.error(`Error fetching status for ${queueId}:`, error); // Show error in log - const logElement = document.getElementById(`log-${entry.uniqueId}-${entry.prgFile}`) as HTMLElement | null; + const logElement = document.getElementById(`log-${entry.uniqueId}-${entry.taskId}`) as HTMLElement | null; if (logElement) { logElement.textContent = `Error updating status: ${(error as Error).message}`; // Cast to Error } @@ -2010,7 +2006,7 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) const STALL_THRESHOLD = 600; // Approx 5 minutes (600 polls * 0.5s/poll) if (detector.count >= STALL_THRESHOLD) { - console.warn(`Download ${queueId} (${entry.prgFile}) appears stalled in real_time state. Metrics: ${detector.lastStatusJson}. Stall count: ${detector.count}. Forcing error.`); + console.warn(`Download ${queueId} (${entry.taskId}) appears stalled in real_time state. Metrics: ${detector.lastStatusJson}. Stall count: ${detector.count}. Forcing error.`); statusData.status = 'error'; statusData.error = 'Download stalled (no progress updates for 5 minutes)'; statusData.can_retry = true; // Allow manual retry for stalled items @@ -2045,7 +2041,7 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) // Update log message - but only if we're not handling a track update for an album/playlist // That case is handled separately in updateItemMetadata to ensure we show the right track info - const logElement = document.getElementById(`log-${entry.uniqueId}-${entry.prgFile}`) as HTMLElement | null; + const logElement = document.getElementById(`log-${entry.uniqueId}-${entry.taskId}`) as HTMLElement | null; if (logElement && status !== 'error' && !(statusData.type === 'track' && statusData.parent && (entry.type === 'album' || entry.type === 'playlist'))) { logElement.textContent = message; @@ -2076,12 +2072,12 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) if (cancelBtn) cancelBtn.style.display = 'none'; // Hide progress bars for errored items - const trackProgressContainer = entry.element.querySelector(`#track-progress-container-${entry.uniqueId}-${entry.prgFile}`) as HTMLElement | null; + const trackProgressContainer = entry.element.querySelector(`#track-progress-container-${entry.uniqueId}-${entry.taskId}`) as HTMLElement | null; if (trackProgressContainer) trackProgressContainer.style.display = 'none'; const overallProgressContainer = entry.element.querySelector('.overall-progress-container') as HTMLElement | null; if (overallProgressContainer) overallProgressContainer.style.display = 'none'; // Hide time elapsed for errored items - const timeElapsedContainer = entry.element.querySelector(`#time-elapsed-${entry.uniqueId}-${entry.prgFile}`) as HTMLElement | null; + const timeElapsedContainer = entry.element.querySelector(`#time-elapsed-${entry.uniqueId}-${entry.taskId}`) as HTMLElement | null; if (timeElapsedContainer) timeElapsedContainer.style.display = 'none'; // Extract error details @@ -2094,7 +2090,7 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) console.log(`Error for ${entry.type} download. Can retry: ${!!entry.requestUrl}. Retry URL: ${entry.requestUrl}`); - const errorLogElement = document.getElementById(`log-${entry.uniqueId}-${entry.prgFile}`) as HTMLElement | null; // Use a different variable name + const errorLogElement = document.getElementById(`log-${entry.uniqueId}-${entry.taskId}`) as HTMLElement | null; // Use a different variable name if (errorLogElement) { // Check errorLogElement let errorMessageElement = errorLogElement.querySelector('.error-message') as HTMLElement | null; @@ -2158,7 +2154,7 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) } // Cache the status for potential page reloads - this.queueCache[entry.prgFile] = statusData; + this.queueCache[entry.taskId] = statusData; localStorage.setItem("downloadQueueCache", JSON.stringify(this.queueCache)); } @@ -2212,8 +2208,8 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) // Update real-time progress for track downloads updateRealTimeProgress(entry: QueueEntry, statusData: StatusData) { // Add types // Get track progress bar - const trackProgressBar = entry.element.querySelector('#track-progress-bar-' + entry.uniqueId + '-' + entry.prgFile) as HTMLElement | null; - const timeElapsedEl = entry.element.querySelector('#time-elapsed-' + entry.uniqueId + '-' + entry.prgFile) as HTMLElement | null; + const trackProgressBar = entry.element.querySelector('#track-progress-bar-' + entry.uniqueId + '-' + entry.taskId) as HTMLElement | null; + const timeElapsedEl = entry.element.querySelector('#time-elapsed-' + entry.uniqueId + '-' + entry.taskId) as HTMLElement | null; if (trackProgressBar && statusData.progress !== undefined) { // Update track progress bar @@ -2242,8 +2238,8 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) // Update progress for single track downloads updateSingleTrackProgress(entry: QueueEntry, statusData: StatusData) { // Add types // Get track progress bar and other UI elements - const trackProgressBar = entry.element.querySelector('#track-progress-bar-' + entry.uniqueId + '-' + entry.prgFile) as HTMLElement | null; - const logElement = document.getElementById(`log-${entry.uniqueId}-${entry.prgFile}`) as HTMLElement | null; + const trackProgressBar = entry.element.querySelector('#track-progress-bar-' + entry.uniqueId + '-' + entry.taskId) as HTMLElement | null; + const logElement = document.getElementById(`log-${entry.uniqueId}-${entry.taskId}`) as HTMLElement | null; const titleElement = entry.element.querySelector('.title') as HTMLElement | null; const artistElement = entry.element.querySelector('.artist') as HTMLElement | null; let progress = 0; // Declare progress here @@ -2348,7 +2344,7 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) trackProgressBar.setAttribute('aria-valuenow', safeProgress.toString()); // Use string // Make sure progress bar is visible - const trackProgressContainer = entry.element.querySelector('#track-progress-container-' + entry.uniqueId + '-' + entry.prgFile) as HTMLElement | null; + const trackProgressContainer = entry.element.querySelector('#track-progress-container-' + entry.uniqueId + '-' + entry.taskId) as HTMLElement | null; if (trackProgressContainer) { trackProgressContainer.style.display = 'block'; } @@ -2365,10 +2361,10 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) // Update progress for multi-track downloads (albums and playlists) updateMultiTrackProgress(entry: QueueEntry, statusData: StatusData) { // Add types // Get progress elements - const progressCounter = document.getElementById(`progress-count-${entry.uniqueId}-${entry.prgFile}`) as HTMLElement | null; - const overallProgressBar = document.getElementById(`overall-bar-${entry.uniqueId}-${entry.prgFile}`) as HTMLElement | null; - const trackProgressBar = entry.element.querySelector('#track-progress-bar-' + entry.uniqueId + '-' + entry.prgFile) as HTMLElement | null; - const logElement = document.getElementById(`log-${entry.uniqueId}-${entry.prgFile}`) as HTMLElement | null; + const progressCounter = document.getElementById(`progress-count-${entry.uniqueId}-${entry.taskId}`) as HTMLElement | null; + const overallProgressBar = document.getElementById(`overall-bar-${entry.uniqueId}-${entry.taskId}`) as HTMLElement | null; + const trackProgressBar = entry.element.querySelector('#track-progress-bar-' + entry.uniqueId + '-' + entry.taskId) as HTMLElement | null; + const logElement = document.getElementById(`log-${entry.uniqueId}-${entry.taskId}`) as HTMLElement | null; const titleElement = entry.element.querySelector('.title') as HTMLElement | null; const artistElement = entry.element.querySelector('.artist') as HTMLElement | null; let progress = 0; // Declare progress here for this function's scope @@ -2465,7 +2461,7 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) // Update the track-level progress bar if (trackProgressBar) { // Make sure progress bar container is visible - const trackProgressContainer = entry.element.querySelector('#track-progress-container-' + entry.uniqueId + '-' + entry.prgFile) as HTMLElement | null; + const trackProgressContainer = entry.element.querySelector('#track-progress-container-' + entry.uniqueId + '-' + entry.taskId) as HTMLElement | null; if (trackProgressContainer) { trackProgressContainer.style.display = 'block'; } @@ -2641,7 +2637,7 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) } const serverTasks: any[] = await response.json(); - const localTaskPrgFiles = new Set(Object.values(this.queueEntries).map(entry => entry.prgFile)); + const localTaskPrgFiles = new Set(Object.values(this.queueEntries).map(entry => entry.taskId)); const serverTaskPrgFiles = new Set(serverTasks.map(task => task.task_id)); const terminalStates = ['complete', 'done', 'cancelled', 'ERROR_AUTO_CLEANED', 'ERROR_RETRIED', 'cancel', 'interrupted', 'error']; @@ -2654,7 +2650,7 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) if (terminalStates.includes(lastStatus?.status)) { // If server says it's terminal, and we have it locally, ensure it's cleaned up - const localEntry = Object.values(this.queueEntries).find(e => e.prgFile === taskId); + const localEntry = Object.values(this.queueEntries).find(e => e.taskId === taskId); if (localEntry && !localEntry.hasEnded) { console.log(`Periodic sync: Server task ${taskId} is terminal (${lastStatus.status}), cleaning up local entry.`); // Use a status object for handleDownloadCompletion @@ -2713,7 +2709,7 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) } } else { // Task exists locally, check if status needs update from server list - const localEntry = Object.values(this.queueEntries).find(e => e.prgFile === taskId); + const localEntry = Object.values(this.queueEntries).find(e => e.taskId === taskId); if (localEntry && lastStatus && JSON.stringify(localEntry.lastStatus) !== JSON.stringify(lastStatus)) { if (!localEntry.hasEnded) { console.log(`Periodic sync: Updating status for existing task ${taskId} from ${localEntry.lastStatus?.status} to ${lastStatus.status}`); @@ -2727,16 +2723,16 @@ createQueueItem(item: QueueItem, type: string, prgFile: string, queueId: string) // 2. Remove local tasks that are no longer on the server or are now terminal on server for (const localEntry of Object.values(this.queueEntries)) { - if (!serverTaskPrgFiles.has(localEntry.prgFile)) { + if (!serverTaskPrgFiles.has(localEntry.taskId)) { if (!localEntry.hasEnded) { - console.log(`Periodic sync: Local task ${localEntry.prgFile} not found on server. Assuming completed/cleaned. Removing.`); + console.log(`Periodic sync: Local task ${localEntry.taskId} not found on server. Assuming completed/cleaned. Removing.`); this.cleanupEntry(localEntry.uniqueId); } } else { - const serverEquivalent = serverTasks.find(st => st.task_id === localEntry.prgFile); + const serverEquivalent = serverTasks.find(st => st.task_id === localEntry.taskId); if (serverEquivalent && serverEquivalent.last_status_obj && terminalStates.includes(serverEquivalent.last_status_obj.status)) { if (!localEntry.hasEnded) { - console.log(`Periodic sync: Local task ${localEntry.prgFile} is now terminal on server (${serverEquivalent.last_status_obj.status}). Cleaning up.`); + console.log(`Periodic sync: Local task ${localEntry.taskId} is now terminal on server (${serverEquivalent.last_status_obj.status}). Cleaning up.`); this.handleDownloadCompletion(localEntry, localEntry.uniqueId, serverEquivalent.last_status_obj); } } diff --git a/tests/README.md b/tests/README.md new file mode 100644 index 0000000..a64a48e --- /dev/null +++ b/tests/README.md @@ -0,0 +1,44 @@ +# Spotizerr Backend Tests + +This directory contains automated tests for the Spotizerr backend API. + +## Prerequisites + +1. **Running Backend**: Ensure the Spotizerr Flask application is running and accessible at `http://localhost:7171`. You can start it with `python app.py`. + +2. **Python Dependencies**: Install the necessary Python packages for testing. + ```bash + pip install pytest requests python-dotenv + ``` + +3. **Credentials**: These tests require valid Spotify and Deezer credentials. Create a file named `.env` in the root directory of the project (`spotizerr`) and add your credentials to it. The tests will load this file automatically. + + **Example `.env` file:** + ``` + SPOTIFY_API_CLIENT_ID="your_spotify_client_id" + SPOTIFY_API_CLIENT_SECRET="your_spotify_client_secret" + # This should be the full JSON content of your credentials blob as a single line string + SPOTIFY_BLOB_CONTENT='{"username": "your_spotify_username", "password": "your_spotify_password", ...}' + DEEZER_ARL="your_deezer_arl" + ``` + + The tests will automatically use these credentials to create and manage test accounts named `test-spotify-account` and `test-deezer-account`. + +## Running Tests + +To run all tests, navigate to the root directory of the project (`spotizerr`) and run `pytest`: + +```bash +pytest +``` + +To run a specific test file: + +```bash +pytest tests/test_downloads.py +``` + +For more detailed output, use the `-v` (verbose) and `-s` (show print statements) flags: +```bash +pytest -v -s +``` \ No newline at end of file diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..0519ecb --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..8cdc020 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,149 @@ +import pytest +import requests +import time +import os +import json +from dotenv import load_dotenv + +# Load environment variables from .env file in the project root +load_dotenv() + +# --- Environment-based secrets for testing --- +SPOTIFY_API_CLIENT_ID = os.environ.get("SPOTIFY_API_CLIENT_ID", "your_spotify_client_id") +SPOTIFY_API_CLIENT_SECRET = os.environ.get("SPOTIFY_API_CLIENT_SECRET", "your_spotify_client_secret") +SPOTIFY_BLOB_CONTENT_STR = os.environ.get("SPOTIFY_BLOB_CONTENT_STR", '{}') +try: + SPOTIFY_BLOB_CONTENT = json.loads(SPOTIFY_BLOB_CONTENT_STR) +except json.JSONDecodeError: + SPOTIFY_BLOB_CONTENT = {} + +DEEZER_ARL = os.environ.get("DEEZER_ARL", "your_deezer_arl") + +# --- Standard names for test accounts --- +SPOTIFY_ACCOUNT_NAME = "test-spotify-account" +DEEZER_ACCOUNT_NAME = "test-deezer-account" + + +@pytest.fixture(scope="session") +def base_url(): + """Provides the base URL for the API tests.""" + return "http://localhost:7171/api" + + +def wait_for_task(base_url, task_id, timeout=600): + """ + Waits for a Celery task to reach a terminal state (complete, error, etc.). + Polls the progress endpoint and prints status updates. + """ + print(f"\n--- Waiting for task {task_id} (timeout: {timeout}s) ---") + start_time = time.time() + while time.time() - start_time < timeout: + try: + response = requests.get(f"{base_url}/prgs/{task_id}") + if response.status_code == 404: + time.sleep(1) + continue + + response.raise_for_status() # Raise an exception for bad status codes + + statuses = response.json() + if not statuses: + time.sleep(1) + continue + + last_status = statuses[-1] + status = last_status.get("status") + + # More verbose logging for debugging during tests + message = last_status.get('message', '') + track = last_status.get('track', '') + progress = last_status.get('overall_progress', '') + print(f"Task {task_id} | Status: {status:<12} | Progress: {progress or 'N/A':>3}% | Track: {track:<30} | Message: {message}") + + if status in ["complete", "ERROR", "cancelled", "ERROR_RETRIED", "ERROR_AUTO_CLEANED"]: + print(f"--- Task {task_id} finished with status: {status} ---") + return last_status + + time.sleep(2) + except requests.exceptions.RequestException as e: + print(f"Warning: Request to fetch task status for {task_id} failed: {e}. Retrying...") + time.sleep(5) + + raise TimeoutError(f"Task {task_id} did not complete within {timeout} seconds.") + + +@pytest.fixture(scope="session") +def task_waiter(base_url): + """Provides a fixture that returns the wait_for_task helper function.""" + def _waiter(task_id, timeout=600): + return wait_for_task(base_url, task_id, timeout) + return _waiter + + +@pytest.fixture(scope="session", autouse=True) +def setup_credentials_for_tests(base_url): + """ + A session-wide, automatic fixture to set up all necessary credentials. + It runs once before any tests, and tears down the credentials after all tests are complete. + """ + print("\n--- Setting up credentials for test session ---") + + print("\n--- DEBUGGING CREDENTIALS ---") + print(f"SPOTIFY_API_CLIENT_ID: {SPOTIFY_API_CLIENT_ID}") + print(f"SPOTIFY_API_CLIENT_SECRET: {SPOTIFY_API_CLIENT_SECRET}") + print(f"DEEZER_ARL: {DEEZER_ARL}") + print(f"SPOTIFY_BLOB_CONTENT {SPOTIFY_BLOB_CONTENT}") + print("--- END DEBUGGING ---\n") + + # Skip all tests if secrets are not provided in the environment + if SPOTIFY_API_CLIENT_ID == "your_spotify_client_id" or \ + SPOTIFY_API_CLIENT_SECRET == "your_spotify_client_secret" or \ + not SPOTIFY_BLOB_CONTENT or \ + DEEZER_ARL == "your_deezer_arl": + pytest.skip("Required credentials not provided in .env file or environment. Skipping credential-dependent tests.") + + # 1. Set global Spotify API creds + data = {"client_id": SPOTIFY_API_CLIENT_ID, "client_secret": SPOTIFY_API_CLIENT_SECRET} + response = requests.put(f"{base_url}/credentials/spotify_api_config", json=data) + if response.status_code != 200: + pytest.fail(f"Failed to set global Spotify API creds: {response.text}") + print("Global Spotify API credentials set.") + + # 2. Delete any pre-existing test credentials to ensure a clean state + requests.delete(f"{base_url}/credentials/spotify/{SPOTIFY_ACCOUNT_NAME}") + requests.delete(f"{base_url}/credentials/deezer/{DEEZER_ACCOUNT_NAME}") + print("Cleaned up any old test credentials.") + + # 3. Create Deezer credential + data = {"name": DEEZER_ACCOUNT_NAME, "arl": DEEZER_ARL, "region": "US"} + response = requests.post(f"{base_url}/credentials/deezer/{DEEZER_ACCOUNT_NAME}", json=data) + if response.status_code != 201: + pytest.fail(f"Failed to create Deezer credential: {response.text}") + print("Deezer test credential created.") + + # 4. Create Spotify credential + data = {"name": SPOTIFY_ACCOUNT_NAME, "blob_content": SPOTIFY_BLOB_CONTENT, "region": "US"} + response = requests.post(f"{base_url}/credentials/spotify/{SPOTIFY_ACCOUNT_NAME}", json=data) + if response.status_code != 201: + pytest.fail(f"Failed to create Spotify credential: {response.text}") + print("Spotify test credential created.") + + # 5. Set main config to use these accounts for downloads + config_payload = { + "spotify": SPOTIFY_ACCOUNT_NAME, + "deezer": DEEZER_ACCOUNT_NAME, + } + response = requests.post(f"{base_url}/config", json=config_payload) + if response.status_code != 200: + pytest.fail(f"Failed to set main config for tests: {response.text}") + print("Main config set to use test credentials.") + + yield # This is where the tests will run + + # --- Teardown --- + print("\n--- Tearing down test credentials ---") + response = requests.delete(f"{base_url}/credentials/spotify/{SPOTIFY_ACCOUNT_NAME}") + assert response.status_code in [200, 404] + response = requests.delete(f"{base_url}/credentials/deezer/{DEEZER_ACCOUNT_NAME}") + assert response.status_code in [200, 404] + print("Test credentials deleted.") \ No newline at end of file diff --git a/tests/test_config.py b/tests/test_config.py new file mode 100644 index 0000000..00f81bf --- /dev/null +++ b/tests/test_config.py @@ -0,0 +1,94 @@ +import requests +import pytest + +@pytest.fixture +def reset_config(base_url): + """A fixture to ensure the main config is reset after a test case.""" + response = requests.get(f"{base_url}/config") + assert response.status_code == 200 + original_config = response.json() + yield + response = requests.post(f"{base_url}/config", json=original_config) + assert response.status_code == 200 + +def test_get_main_config(base_url): + """Tests if the main configuration can be retrieved.""" + response = requests.get(f"{base_url}/config") + assert response.status_code == 200 + config = response.json() + assert "service" in config + assert "maxConcurrentDownloads" in config + assert "spotify" in config # Should be set by conftest + assert "deezer" in config # Should be set by conftest + +def test_update_main_config(base_url, reset_config): + """Tests updating various fields in the main configuration.""" + new_settings = { + "maxConcurrentDownloads": 5, + "spotifyQuality": "HIGH", + "deezerQuality": "FLAC", + "customDirFormat": "%artist%/%album%", + "customTrackFormat": "%tracknum% %title%", + "save_cover": False, + "fallback": True, + } + + response = requests.post(f"{base_url}/config", json=new_settings) + assert response.status_code == 200 + updated_config = response.json() + + for key, value in new_settings.items(): + assert updated_config[key] == value + +def test_get_watch_config(base_url): + """Tests if the watch-specific configuration can be retrieved.""" + response = requests.get(f"{base_url}/config/watch") + assert response.status_code == 200 + config = response.json() + assert "delay_between_playlists_seconds" in config + assert "delay_between_artists_seconds" in config + +def test_update_watch_config(base_url): + """Tests updating the watch-specific configuration.""" + response = requests.get(f"{base_url}/config/watch") + original_config = response.json() + + new_settings = { + "delay_between_playlists_seconds": 120, + "delay_between_artists_seconds": 240, + "auto_add_new_releases_to_queue": False, + } + + response = requests.post(f"{base_url}/config/watch", json=new_settings) + assert response.status_code == 200 + updated_config = response.json() + + for key, value in new_settings.items(): + assert updated_config[key] == value + + # Revert to original + requests.post(f"{base_url}/config/watch", json=original_config) + +def test_update_conversion_config(base_url, reset_config): + """ + Iterates through all supported conversion formats and bitrates, + updating the config and verifying the changes for each combination. + """ + conversion_formats = ["mp3", "flac", "ogg", "opus", "m4a"] + bitrates = { + "mp3": ["320", "256", "192", "128"], + "ogg": ["500", "320", "192", "160"], + "opus": ["256", "192", "128", "96"], + "m4a": ["320k", "256k", "192k", "128k"], + "flac": [None] # Bitrate is not applicable for FLAC + } + + for format in conversion_formats: + for br in bitrates.get(format, [None]): + print(f"Testing conversion config: format={format}, bitrate={br}") + new_settings = {"convertTo": format, "bitrate": br} + response = requests.post(f"{base_url}/config", json=new_settings) + assert response.status_code == 200 + updated_config = response.json() + assert updated_config["convertTo"] == format + assert updated_config["bitrate"] == br \ No newline at end of file diff --git a/tests/test_downloads.py b/tests/test_downloads.py new file mode 100644 index 0000000..6cca963 --- /dev/null +++ b/tests/test_downloads.py @@ -0,0 +1,128 @@ +import requests +import pytest + +# URLs provided by the user for testing +SPOTIFY_TRACK_URL = "https://open.spotify.com/track/1Cts4YV9aOXVAP3bm3Ro6r" +SPOTIFY_ALBUM_URL = "https://open.spotify.com/album/4K0JVP5veNYTVI6IMamlla" +SPOTIFY_PLAYLIST_URL = "https://open.spotify.com/playlist/26CiMxIxdn5WhXyccMCPOB" +SPOTIFY_ARTIST_URL = "https://open.spotify.com/artist/7l6cdPhOLYO7lehz5xfzLV" + +# Corresponding IDs extracted from URLs +TRACK_ID = SPOTIFY_TRACK_URL.split('/')[-1].split('?')[0] +ALBUM_ID = SPOTIFY_ALBUM_URL.split('/')[-1].split('?')[0] +PLAYLIST_ID = SPOTIFY_PLAYLIST_URL.split('/')[-1].split('?')[0] +ARTIST_ID = SPOTIFY_ARTIST_URL.split('/')[-1].split('?')[0] + +@pytest.fixture +def reset_config(base_url): + """Fixture to reset the main config after a test to avoid side effects.""" + response = requests.get(f"{base_url}/config") + original_config = response.json() + yield + requests.post(f"{base_url}/config", json=original_config) + +def test_download_track_spotify_only(base_url, task_waiter, reset_config): + """Tests downloading a single track from Spotify with real-time download enabled.""" + print("\n--- Testing Spotify-only track download ---") + config_payload = { + "service": "spotify", + "fallback": False, + "realTime": True, + "spotifyQuality": "NORMAL" # Simulating free account quality + } + requests.post(f"{base_url}/config", json=config_payload) + + response = requests.get(f"{base_url}/track/download/{TRACK_ID}") + assert response.status_code == 202 + task_id = response.json()["task_id"] + + final_status = task_waiter(task_id) + assert final_status["status"] == "complete", f"Task failed: {final_status.get('error')}" + +def test_download_album_spotify_only(base_url, task_waiter, reset_config): + """Tests downloading a full album from Spotify with real-time download enabled.""" + print("\n--- Testing Spotify-only album download ---") + config_payload = {"service": "spotify", "fallback": False, "realTime": True, "spotifyQuality": "NORMAL"} + requests.post(f"{base_url}/config", json=config_payload) + + response = requests.get(f"{base_url}/album/download/{ALBUM_ID}") + assert response.status_code == 202 + task_id = response.json()["task_id"] + + final_status = task_waiter(task_id, timeout=900) + assert final_status["status"] == "complete", f"Task failed: {final_status.get('error')}" + +def test_download_playlist_spotify_only(base_url, task_waiter, reset_config): + """Tests downloading a full playlist from Spotify with real-time download enabled.""" + print("\n--- Testing Spotify-only playlist download ---") + config_payload = {"service": "spotify", "fallback": False, "realTime": True, "spotifyQuality": "NORMAL"} + requests.post(f"{base_url}/config", json=config_payload) + + response = requests.get(f"{base_url}/playlist/download/{PLAYLIST_ID}") + assert response.status_code == 202 + task_id = response.json()["task_id"] + + final_status = task_waiter(task_id, timeout=1200) + assert final_status["status"] == "complete", f"Task failed: {final_status.get('error')}" + +def test_download_artist_spotify_only(base_url, task_waiter, reset_config): + """Tests queuing downloads for an artist's entire discography from Spotify.""" + print("\n--- Testing Spotify-only artist download ---") + config_payload = {"service": "spotify", "fallback": False, "realTime": True, "spotifyQuality": "NORMAL"} + requests.post(f"{base_url}/config", json=config_payload) + + response = requests.get(f"{base_url}/artist/download/{ARTIST_ID}?album_type=album,single") + assert response.status_code == 202 + response_data = response.json() + queued_albums = response_data.get("successfully_queued_albums", []) + assert len(queued_albums) > 0, "No albums were queued for the artist." + + for album in queued_albums: + task_id = album["task_id"] + print(f"--- Waiting for artist album: {album['name']} ({task_id}) ---") + final_status = task_waiter(task_id, timeout=900) + assert final_status["status"] == "complete", f"Artist album task {album['name']} failed: {final_status.get('error')}" + +def test_download_track_with_fallback(base_url, task_waiter, reset_config): + """Tests downloading a Spotify track with Deezer fallback enabled.""" + print("\n--- Testing track download with Deezer fallback ---") + config_payload = { + "service": "spotify", + "fallback": True, + "deezerQuality": "MP3_320" # Simulating higher quality from Deezer free + } + requests.post(f"{base_url}/config", json=config_payload) + + response = requests.get(f"{base_url}/track/download/{TRACK_ID}") + assert response.status_code == 202 + task_id = response.json()["task_id"] + + final_status = task_waiter(task_id) + assert final_status["status"] == "complete", f"Task failed: {final_status.get('error')}" + +@pytest.mark.parametrize("format,bitrate", [ + ("mp3", "320"), ("mp3", "128"), + ("flac", None), + ("ogg", "160"), + ("opus", "128"), + ("m4a", "128k") +]) +def test_download_with_conversion(base_url, task_waiter, reset_config, format, bitrate): + """Tests downloading a track with various conversion formats and bitrates.""" + print(f"\n--- Testing conversion: {format} @ {bitrate or 'default'} ---") + config_payload = { + "service": "spotify", + "fallback": False, + "realTime": True, + "spotifyQuality": "NORMAL", + "convertTo": format, + "bitrate": bitrate + } + requests.post(f"{base_url}/config", json=config_payload) + + response = requests.get(f"{base_url}/track/download/{TRACK_ID}") + assert response.status_code == 202 + task_id = response.json()["task_id"] + + final_status = task_waiter(task_id) + assert final_status["status"] == "complete", f"Download failed for format {format} bitrate {bitrate}: {final_status.get('error')}" \ No newline at end of file diff --git a/tests/test_history.py b/tests/test_history.py new file mode 100644 index 0000000..bd6228b --- /dev/null +++ b/tests/test_history.py @@ -0,0 +1,61 @@ +import requests +import pytest +import time + +TRACK_ID = "1Cts4YV9aOXVAP3bm3Ro6r" # Use a known, short track + +@pytest.fixture +def reset_config(base_url): + """Fixture to reset the main config after a test.""" + response = requests.get(f"{base_url}/config") + original_config = response.json() + yield + requests.post(f"{base_url}/config", json=original_config) + +def test_history_logging_and_filtering(base_url, task_waiter, reset_config): + """ + Tests if a completed download appears in the history and + verifies that history filtering works correctly. + """ + # First, complete a download task to ensure there's a history entry + config_payload = {"service": "spotify", "fallback": False, "realTime": True} + requests.post(f"{base_url}/config", json=config_payload) + response = requests.get(f"{base_url}/track/download/{TRACK_ID}") + assert response.status_code == 202 + task_id = response.json()["task_id"] + task_waiter(task_id) # Wait for the download to complete + + # Give a moment for history to be written if it's asynchronous + time.sleep(2) + + # 1. Get all history and check if our task is present + print("\n--- Verifying task appears in general history ---") + response = requests.get(f"{base_url}/history") + assert response.status_code == 200 + history_data = response.json() + assert "entries" in history_data + assert "total" in history_data + assert history_data["total"] > 0 + + # Find our specific task in the history + history_entry = next((entry for entry in history_data["entries"] if entry['task_id'] == task_id), None) + assert history_entry is not None, f"Task {task_id} not found in download history." + assert history_entry["status_final"] == "COMPLETED" + + # 2. Test filtering for COMPLETED tasks + print("\n--- Verifying history filtering for COMPLETED status ---") + response = requests.get(f"{base_url}/history?filters[status_final]=COMPLETED") + assert response.status_code == 200 + completed_history = response.json() + assert completed_history["total"] > 0 + assert any(entry['task_id'] == task_id for entry in completed_history["entries"]) + assert all(entry['status_final'] == 'COMPLETED' for entry in completed_history["entries"]) + + # 3. Test filtering for an item name + print(f"\n--- Verifying history filtering for item_name: {history_entry['item_name']} ---") + item_name_query = requests.utils.quote(history_entry['item_name']) + response = requests.get(f"{base_url}/history?filters[item_name]={item_name_query}") + assert response.status_code == 200 + named_history = response.json() + assert named_history["total"] > 0 + assert any(entry['task_id'] == task_id for entry in named_history["entries"]) \ No newline at end of file diff --git a/tests/test_prgs.py b/tests/test_prgs.py new file mode 100644 index 0000000..39dd902 --- /dev/null +++ b/tests/test_prgs.py @@ -0,0 +1,93 @@ +import requests +import pytest +import time + +# Use a known, short track for quick tests +TRACK_ID = "1Cts4YV9aOXVAP3bm3Ro6r" +# Use a long playlist to ensure there's time to cancel it +LONG_PLAYLIST_ID = "6WsyUEITURbQXZsqtEewb1" # Today's Top Hits on Spotify + +@pytest.fixture +def reset_config(base_url): + """Fixture to reset the main config after a test.""" + response = requests.get(f"{base_url}/config") + original_config = response.json() + yield + requests.post(f"{base_url}/config", json=original_config) + +def test_list_tasks(base_url, reset_config): + """Tests listing all active tasks.""" + config_payload = {"service": "spotify", "fallback": False, "realTime": True} + requests.post(f"{base_url}/config", json=config_payload) + + # Start a task + response = requests.get(f"{base_url}/track/download/{TRACK_ID}") + assert response.status_code == 202 + task_id = response.json()["task_id"] + + # Check the list to see if our task appears + response = requests.get(f"{base_url}/prgs/list") + assert response.status_code == 200 + tasks = response.json() + assert isinstance(tasks, list) + assert any(t['task_id'] == task_id for t in tasks) + + # Clean up by cancelling the task + requests.post(f"{base_url}/prgs/cancel/{task_id}") + +def test_get_task_progress_and_log(base_url, task_waiter, reset_config): + """Tests getting progress for a running task and retrieving its log after completion.""" + config_payload = {"service": "spotify", "fallback": False, "realTime": True} + requests.post(f"{base_url}/config", json=config_payload) + + response = requests.get(f"{base_url}/track/download/{TRACK_ID}") + assert response.status_code == 202 + task_id = response.json()["task_id"] + + # Poll progress a few times while it's running to check the endpoint + for _ in range(3): + time.sleep(1) + res = requests.get(f"{base_url}/prgs/{task_id}") + if res.status_code == 200 and res.json(): + statuses = res.json() + assert isinstance(statuses, list) + assert "status" in statuses[-1] + break + else: + pytest.fail("Could not get a valid task status in time.") + + # Wait for completion + final_status = task_waiter(task_id) + assert final_status["status"] == "complete" + + # After completion, check the task log endpoint + res = requests.get(f"{base_url}/prgs/{task_id}?log=true") + assert res.status_code == 200 + log_data = res.json() + assert "task_log" in log_data + assert len(log_data["task_log"]) > 0 + assert "status" in log_data["task_log"][0] + +def test_cancel_task(base_url, reset_config): + """Tests cancelling a task shortly after it has started.""" + config_payload = {"service": "spotify", "fallback": False, "realTime": True} + requests.post(f"{base_url}/config", json=config_payload) + + response = requests.get(f"{base_url}/playlist/download/{LONG_PLAYLIST_ID}") + assert response.status_code == 202 + task_id = response.json()["task_id"] + + # Give it a moment to ensure it has started processing + time.sleep(3) + + # Cancel the task + response = requests.post(f"{base_url}/prgs/cancel/{task_id}") + assert response.status_code == 200 + assert response.json()["status"] == "cancelled" + + # Check the final status to confirm it's marked as cancelled + time.sleep(2) # Allow time for the final status to propagate + res = requests.get(f"{base_url}/prgs/{task_id}") + assert res.status_code == 200 + last_status = res.json()[-1] + assert last_status["status"] == "cancelled" \ No newline at end of file diff --git a/tests/test_search.py b/tests/test_search.py new file mode 100644 index 0000000..0d6072f --- /dev/null +++ b/tests/test_search.py @@ -0,0 +1,35 @@ +import requests +import pytest + +def test_search_spotify_artist(base_url): + """Tests searching for an artist on Spotify.""" + response = requests.get(f"{base_url}/search?q=Daft+Punk&search_type=artist") + assert response.status_code == 200 + results = response.json() + assert "items" in results + assert len(results["items"]) > 0 + assert "Daft Punk" in results["items"][0]["name"] + +def test_search_spotify_track(base_url): + """Tests searching for a track on Spotify.""" + response = requests.get(f"{base_url}/search?q=Get+Lucky&search_type=track") + assert response.status_code == 200 + results = response.json() + assert "items" in results + assert len(results["items"]) > 0 + +def test_search_deezer_track(base_url): + """Tests searching for a track on Deezer.""" + response = requests.get(f"{base_url}/search?q=Instant+Crush&search_type=track") + assert response.status_code == 200 + results = response.json() + assert "items" in results + assert len(results["items"]) > 0 + +def test_search_deezer_album(base_url): + """Tests searching for an album on Deezer.""" + response = requests.get(f"{base_url}/search?q=Random+Access+Memories&search_type=album") + assert response.status_code == 200 + results = response.json() + assert "items" in results + assert len(results["items"]) > 0 \ No newline at end of file diff --git a/tests/test_watch.py b/tests/test_watch.py new file mode 100644 index 0000000..fba8a31 --- /dev/null +++ b/tests/test_watch.py @@ -0,0 +1,117 @@ +import requests +import pytest +import time + +SPOTIFY_PLAYLIST_ID = "26CiMxIxdn5WhXyccMCPOB" +SPOTIFY_ARTIST_ID = "7l6cdPhOLYO7lehz5xfzLV" + +@pytest.fixture(autouse=True) +def setup_and_cleanup_watch_tests(base_url): + """ + A fixture that enables watch mode, cleans the watchlist before each test, + and then restores original state and cleans up after each test. + """ + # Get original watch config to restore it later + response = requests.get(f"{base_url}/config/watch") + assert response.status_code == 200 + original_config = response.json() + + # Enable watch mode for testing if it's not already + if not original_config.get("enabled"): + response = requests.post(f"{base_url}/config/watch", json={"enabled": True}) + assert response.status_code == 200 + + # Cleanup any existing watched items before the test + requests.delete(f"{base_url}/playlist/watch/{SPOTIFY_PLAYLIST_ID}") + requests.delete(f"{base_url}/artist/watch/{SPOTIFY_ARTIST_ID}") + + yield + + # Cleanup watched items created during the test + requests.delete(f"{base_url}/playlist/watch/{SPOTIFY_PLAYLIST_ID}") + requests.delete(f"{base_url}/artist/watch/{SPOTIFY_ARTIST_ID}") + + # Restore original watch config + response = requests.post(f"{base_url}/config/watch", json=original_config) + assert response.status_code == 200 + +def test_add_and_list_playlist_to_watch(base_url): + """Tests adding a playlist to the watch list and verifying it appears in the list.""" + response = requests.put(f"{base_url}/playlist/watch/{SPOTIFY_PLAYLIST_ID}") + assert response.status_code == 200 + assert "Playlist added to watch list" in response.json()["message"] + + # Verify it's in the watched list + response = requests.get(f"{base_url}/playlist/watch/list") + assert response.status_code == 200 + watched_playlists = response.json() + assert any(p['spotify_id'] == SPOTIFY_PLAYLIST_ID for p in watched_playlists) + +def test_add_and_list_artist_to_watch(base_url): + """Tests adding an artist to the watch list and verifying it appears in the list.""" + response = requests.put(f"{base_url}/artist/watch/{SPOTIFY_ARTIST_ID}") + assert response.status_code == 200 + assert "Artist added to watch list" in response.json()["message"] + + # Verify it's in the watched list + response = requests.get(f"{base_url}/artist/watch/list") + assert response.status_code == 200 + watched_artists = response.json() + assert any(a['spotify_id'] == SPOTIFY_ARTIST_ID for a in watched_artists) + +def test_trigger_playlist_check(base_url): + """Tests the endpoint for manually triggering a check on a watched playlist.""" + # First, add the playlist to the watch list + requests.put(f"{base_url}/playlist/watch/{SPOTIFY_PLAYLIST_ID}") + + # Trigger the check + response = requests.post(f"{base_url}/playlist/watch/trigger_check/{SPOTIFY_PLAYLIST_ID}") + assert response.status_code == 200 + assert "Check triggered for playlist" in response.json()["message"] + + # A full verification would require inspecting the database or new tasks, + # but for an API test, confirming the trigger endpoint responds correctly is the key goal. + print("Playlist check triggered. Note: This does not verify new downloads were queued.") + +def test_trigger_artist_check(base_url): + """Tests the endpoint for manually triggering a check on a watched artist.""" + # First, add the artist to the watch list + requests.put(f"{base_url}/artist/watch/{SPOTIFY_ARTIST_ID}") + + # Trigger the check + response = requests.post(f"{base_url}/artist/watch/trigger_check/{SPOTIFY_ARTIST_ID}") + assert response.status_code == 200 + assert "Check triggered for artist" in response.json()["message"] + print("Artist check triggered. Note: This does not verify new downloads were queued.") + +def test_remove_playlist_from_watch(base_url): + """Tests removing a playlist from the watch list.""" + # Add the playlist first to ensure it exists + requests.put(f"{base_url}/playlist/watch/{SPOTIFY_PLAYLIST_ID}") + + # Now, remove it + response = requests.delete(f"{base_url}/playlist/watch/{SPOTIFY_PLAYLIST_ID}") + assert response.status_code == 200 + assert "Playlist removed from watch list" in response.json()["message"] + + # Verify it's no longer in the list + response = requests.get(f"{base_url}/playlist/watch/list") + assert response.status_code == 200 + watched_playlists = response.json() + assert not any(p['spotify_id'] == SPOTIFY_PLAYLIST_ID for p in watched_playlists) + +def test_remove_artist_from_watch(base_url): + """Tests removing an artist from the watch list.""" + # Add the artist first to ensure it exists + requests.put(f"{base_url}/artist/watch/{SPOTIFY_ARTIST_ID}") + + # Now, remove it + response = requests.delete(f"{base_url}/artist/watch/{SPOTIFY_ARTIST_ID}") + assert response.status_code == 200 + assert "Artist removed from watch list" in response.json()["message"] + + # Verify it's no longer in the list + response = requests.get(f"{base_url}/artist/watch/list") + assert response.status_code == 200 + watched_artists = response.json() + assert not any(a['spotify_id'] == SPOTIFY_ARTIST_ID for a in watched_artists) \ No newline at end of file From 1cdb6dc9157b103a6493ea2995ebea885d06790d Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Sun, 8 Jun 2025 09:12:37 -0600 Subject: [PATCH 3/7] 2.4.0 --- tests/test_config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_config.py b/tests/test_config.py index 00f81bf..f27bd52 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -26,7 +26,7 @@ def test_update_main_config(base_url, reset_config): new_settings = { "maxConcurrentDownloads": 5, "spotifyQuality": "HIGH", - "deezerQuality": "FLAC", + "deezerQuality": "MP3_128", "customDirFormat": "%artist%/%album%", "customTrackFormat": "%tracknum% %title%", "save_cover": False, From ca77c0e9f381fe82e112e1304a3da921dd74101a Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Sun, 8 Jun 2025 18:08:13 -0600 Subject: [PATCH 4/7] improve test scripts, bump deezspot and fix playlist issues --- requirements.txt | 2 +- routes/utils/celery_tasks.py | 16 +-- routes/utils/credentials.py | 3 + src/js/queue.ts | 2 - tests/conftest.py | 8 +- tests/test_config.py | 61 +++++++---- tests/test_downloads.py | 190 +++++++++++++++++++++++++---------- tests/test_history.py | 2 +- tests/test_prgs.py | 93 ----------------- 9 files changed, 194 insertions(+), 183 deletions(-) delete mode 100644 tests/test_prgs.py diff --git a/requirements.txt b/requirements.txt index 65b8527..3dd0973 100755 --- a/requirements.txt +++ b/requirements.txt @@ -2,4 +2,4 @@ waitress==3.0.2 celery==5.5.3 Flask==3.1.1 flask_cors==6.0.0 -deezspot-spotizerr==1.7.0 +deezspot-spotizerr==1.8.0 diff --git a/routes/utils/celery_tasks.py b/routes/utils/celery_tasks.py index 155ba30..2b19f80 100644 --- a/routes/utils/celery_tasks.py +++ b/routes/utils/celery_tasks.py @@ -366,8 +366,8 @@ def retry_task(task_id): # Update service settings if service == "spotify": if fallback_enabled: - task_info["main"] = config_params.get("deezer", "") - task_info["fallback"] = config_params.get("spotify", "") + task_info["main"] = config_params.get("spotify", "") + task_info["fallback"] = config_params.get("deezer", "") task_info["quality"] = config_params.get("deezerQuality", "MP3_128") task_info["fall_quality"] = config_params.get( "spotifyQuality", "NORMAL" @@ -1335,8 +1335,8 @@ def download_track(self, **task_data): # Determine service parameters if service == "spotify": if fallback_enabled: - main = config_params.get("deezer", "") - fallback = config_params.get("spotify", "") + main = config_params.get("spotify", "") + fallback = config_params.get("deezer", "") quality = config_params.get("deezerQuality", "MP3_128") fall_quality = config_params.get("spotifyQuality", "NORMAL") else: @@ -1421,8 +1421,8 @@ def download_album(self, **task_data): # Determine service parameters if service == "spotify": if fallback_enabled: - main = config_params.get("deezer", "") - fallback = config_params.get("spotify", "") + main = config_params.get("spotify", "") + fallback = config_params.get("deezer", "") quality = config_params.get("deezerQuality", "MP3_128") fall_quality = config_params.get("spotifyQuality", "NORMAL") else: @@ -1507,8 +1507,8 @@ def download_playlist(self, **task_data): # Determine service parameters if service == "spotify": if fallback_enabled: - main = config_params.get("deezer", "") - fallback = config_params.get("spotify", "") + main = config_params.get("spotify", "") + fallback = config_params.get("deezer", "") quality = config_params.get("deezerQuality", "MP3_128") fall_quality = config_params.get("spotifyQuality", "NORMAL") else: diff --git a/routes/utils/credentials.py b/routes/utils/credentials.py index 23a5cef..3b9e953 100755 --- a/routes/utils/credentials.py +++ b/routes/utils/credentials.py @@ -403,6 +403,9 @@ def get_credential(service, name): "name": data.get("name"), "region": data.get("region"), "blob_content": data.get("blob_content"), + "blob_file_path": data.get( + "blob_file_path" + ), # Ensure blob_file_path is returned } return cleaned_data diff --git a/src/js/queue.ts b/src/js/queue.ts index 9db4fdf..8fbc310 100644 --- a/src/js/queue.ts +++ b/src/js/queue.ts @@ -1,4 +1,3 @@ -// --- MODIFIED: Custom URLSearchParams class that does not encode anything --- class CustomURLSearchParams { params: Record; constructor() { @@ -13,7 +12,6 @@ class CustomURLSearchParams { .join('&'); } } -// --- END MODIFIED --- // Interfaces for complex objects interface QueueItem { diff --git a/tests/conftest.py b/tests/conftest.py index 8cdc020..ea1c6ec 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -11,7 +11,7 @@ load_dotenv() # --- Environment-based secrets for testing --- SPOTIFY_API_CLIENT_ID = os.environ.get("SPOTIFY_API_CLIENT_ID", "your_spotify_client_id") SPOTIFY_API_CLIENT_SECRET = os.environ.get("SPOTIFY_API_CLIENT_SECRET", "your_spotify_client_secret") -SPOTIFY_BLOB_CONTENT_STR = os.environ.get("SPOTIFY_BLOB_CONTENT_STR", '{}') +SPOTIFY_BLOB_CONTENT_STR = os.environ.get("SPOTIFY_BLOB_CONTENT", '{}') try: SPOTIFY_BLOB_CONTENT = json.loads(SPOTIFY_BLOB_CONTENT_STR) except json.JSONDecodeError: @@ -46,12 +46,12 @@ def wait_for_task(base_url, task_id, timeout=600): response.raise_for_status() # Raise an exception for bad status codes - statuses = response.json() - if not statuses: + data = response.json() + if not data or not data.get("last_line"): time.sleep(1) continue - last_status = statuses[-1] + last_status = data["last_line"] status = last_status.get("status") # More verbose logging for debugging during tests diff --git a/tests/test_config.py b/tests/test_config.py index f27bd52..914c24f 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -20,17 +20,25 @@ def test_get_main_config(base_url): assert "maxConcurrentDownloads" in config assert "spotify" in config # Should be set by conftest assert "deezer" in config # Should be set by conftest + assert "fallback" in config + assert "realTime" in config + assert "maxRetries" in config def test_update_main_config(base_url, reset_config): - """Tests updating various fields in the main configuration.""" + """Tests updating various fields in the main configuration based on frontend capabilities.""" new_settings = { "maxConcurrentDownloads": 5, "spotifyQuality": "HIGH", - "deezerQuality": "MP3_128", + "deezerQuality": "FLAC", "customDirFormat": "%artist%/%album%", "customTrackFormat": "%tracknum% %title%", "save_cover": False, "fallback": True, + "realTime": False, + "maxRetries": 5, + "retryDelaySeconds": 10, + "retry_delay_increase": 10, + "tracknum_padding": False, } response = requests.post(f"{base_url}/config", json=new_settings) @@ -45,8 +53,9 @@ def test_get_watch_config(base_url): response = requests.get(f"{base_url}/config/watch") assert response.status_code == 200 config = response.json() - assert "delay_between_playlists_seconds" in config - assert "delay_between_artists_seconds" in config + assert "enabled" in config + assert "watchPollIntervalSeconds" in config + assert "watchedArtistAlbumGroup" in config def test_update_watch_config(base_url): """Tests updating the watch-specific configuration.""" @@ -54,14 +63,19 @@ def test_update_watch_config(base_url): original_config = response.json() new_settings = { - "delay_between_playlists_seconds": 120, - "delay_between_artists_seconds": 240, - "auto_add_new_releases_to_queue": False, + "enabled": False, + "watchPollIntervalSeconds": 7200, + "watchedArtistAlbumGroup": ["album", "single"], } response = requests.post(f"{base_url}/config/watch", json=new_settings) assert response.status_code == 200 - updated_config = response.json() + + # The response for updating watch config is just a success message, + # so we need to GET the config again to verify. + verify_response = requests.get(f"{base_url}/config/watch") + assert verify_response.status_code == 200 + updated_config = verify_response.json() for key, value in new_settings.items(): assert updated_config[key] == value @@ -71,24 +85,29 @@ def test_update_watch_config(base_url): def test_update_conversion_config(base_url, reset_config): """ - Iterates through all supported conversion formats and bitrates, - updating the config and verifying the changes for each combination. + Iterates through supported conversion formats and bitrates from the frontend, + updating the config and verifying the changes. """ - conversion_formats = ["mp3", "flac", "ogg", "opus", "m4a"] + # Formats and bitrates aligned with src/js/config.ts + conversion_formats = ["MP3", "AAC", "OGG", "OPUS", "FLAC", "WAV", "ALAC"] bitrates = { - "mp3": ["320", "256", "192", "128"], - "ogg": ["500", "320", "192", "160"], - "opus": ["256", "192", "128", "96"], - "m4a": ["320k", "256k", "192k", "128k"], - "flac": [None] # Bitrate is not applicable for FLAC + "MP3": ["128k", "320k"], + "AAC": ["128k", "256k"], + "OGG": ["128k", "320k"], + "OPUS": ["96k", "256k"], + "FLAC": [None], + "WAV": [None], + "ALAC": [None], } - for format in conversion_formats: - for br in bitrates.get(format, [None]): - print(f"Testing conversion config: format={format}, bitrate={br}") - new_settings = {"convertTo": format, "bitrate": br} + for format_val in conversion_formats: + for br in bitrates.get(format_val, [None]): + print(f"Testing conversion config: format={format_val}, bitrate={br}") + new_settings = {"convertTo": format_val, "bitrate": br} response = requests.post(f"{base_url}/config", json=new_settings) + assert response.status_code == 200 updated_config = response.json() - assert updated_config["convertTo"] == format + assert updated_config["convertTo"] == format_val + # The backend might return null for empty bitrate, which is fine assert updated_config["bitrate"] == br \ No newline at end of file diff --git a/tests/test_downloads.py b/tests/test_downloads.py index 6cca963..74db406 100644 --- a/tests/test_downloads.py +++ b/tests/test_downloads.py @@ -1,7 +1,9 @@ import requests import pytest +import os +import shutil -# URLs provided by the user for testing +# URLs for testing SPOTIFY_TRACK_URL = "https://open.spotify.com/track/1Cts4YV9aOXVAP3bm3Ro6r" SPOTIFY_ALBUM_URL = "https://open.spotify.com/album/4K0JVP5veNYTVI6IMamlla" SPOTIFY_PLAYLIST_URL = "https://open.spotify.com/playlist/26CiMxIxdn5WhXyccMCPOB" @@ -13,68 +15,101 @@ ALBUM_ID = SPOTIFY_ALBUM_URL.split('/')[-1].split('?')[0] PLAYLIST_ID = SPOTIFY_PLAYLIST_URL.split('/')[-1].split('?')[0] ARTIST_ID = SPOTIFY_ARTIST_URL.split('/')[-1].split('?')[0] +DOWNLOAD_DIR = "downloads/" + + +def get_downloaded_files(directory=DOWNLOAD_DIR): + """Walks a directory and returns a list of all file paths.""" + file_paths = [] + if not os.path.isdir(directory): + return file_paths + for root, _, files in os.walk(directory): + for file in files: + # Ignore hidden files like .DS_Store + if not file.startswith('.'): + file_paths.append(os.path.join(root, file)) + return file_paths + + +@pytest.fixture(autouse=True) +def cleanup_downloads_dir(): + """ + Ensures the download directory is removed and recreated, providing a clean + slate before and after each test. + """ + if os.path.exists(DOWNLOAD_DIR): + shutil.rmtree(DOWNLOAD_DIR) + os.makedirs(DOWNLOAD_DIR, exist_ok=True) + yield + if os.path.exists(DOWNLOAD_DIR): + shutil.rmtree(DOWNLOAD_DIR) + + @pytest.fixture def reset_config(base_url): - """Fixture to reset the main config after a test to avoid side effects.""" + """ + Fixture to get original config, set single concurrent download for test + isolation, and restore the original config after the test. + """ response = requests.get(f"{base_url}/config") original_config = response.json() + + # Set max concurrent downloads to 1 for all tests using this fixture. + requests.post(f"{base_url}/config", json={"maxConcurrentDownloads": 1}) + yield + + # Restore original config requests.post(f"{base_url}/config", json=original_config) -def test_download_track_spotify_only(base_url, task_waiter, reset_config): - """Tests downloading a single track from Spotify with real-time download enabled.""" - print("\n--- Testing Spotify-only track download ---") + +@pytest.mark.parametrize("download_type, item_id, timeout, expected_files_min", [ + ("track", TRACK_ID, 600, 1), + ("album", ALBUM_ID, 900, 14), # "After Hours" has 14 tracks + ("playlist", PLAYLIST_ID, 1200, 4), # Test playlist has 4 tracks +]) +def test_spotify_download_and_verify_files(base_url, task_waiter, reset_config, download_type, item_id, timeout, expected_files_min): + """ + Tests downloading a track, album, or playlist and verifies that the + expected number of files are created on disk. + """ + print(f"\n--- Testing Spotify-only '{download_type}' download and verifying files ---") config_payload = { "service": "spotify", "fallback": False, "realTime": True, - "spotifyQuality": "NORMAL" # Simulating free account quality + "spotifyQuality": "NORMAL" } requests.post(f"{base_url}/config", json=config_payload) - response = requests.get(f"{base_url}/track/download/{TRACK_ID}") + response = requests.get(f"{base_url}/{download_type}/download/{item_id}") assert response.status_code == 202 task_id = response.json()["task_id"] - final_status = task_waiter(task_id) - assert final_status["status"] == "complete", f"Task failed: {final_status.get('error')}" + final_status = task_waiter(task_id, timeout=timeout) + assert final_status["status"] == "complete", f"Task failed for {download_type} {item_id}: {final_status.get('error')}" -def test_download_album_spotify_only(base_url, task_waiter, reset_config): - """Tests downloading a full album from Spotify with real-time download enabled.""" - print("\n--- Testing Spotify-only album download ---") - config_payload = {"service": "spotify", "fallback": False, "realTime": True, "spotifyQuality": "NORMAL"} - requests.post(f"{base_url}/config", json=config_payload) + # Verify that the correct number of files were downloaded + downloaded_files = get_downloaded_files() + assert len(downloaded_files) >= expected_files_min, ( + f"Expected at least {expected_files_min} file(s) for {download_type} {item_id}, " + f"but found {len(downloaded_files)}." + ) - response = requests.get(f"{base_url}/album/download/{ALBUM_ID}") - assert response.status_code == 202 - task_id = response.json()["task_id"] - - final_status = task_waiter(task_id, timeout=900) - assert final_status["status"] == "complete", f"Task failed: {final_status.get('error')}" -def test_download_playlist_spotify_only(base_url, task_waiter, reset_config): - """Tests downloading a full playlist from Spotify with real-time download enabled.""" - print("\n--- Testing Spotify-only playlist download ---") - config_payload = {"service": "spotify", "fallback": False, "realTime": True, "spotifyQuality": "NORMAL"} - requests.post(f"{base_url}/config", json=config_payload) - - response = requests.get(f"{base_url}/playlist/download/{PLAYLIST_ID}") - assert response.status_code == 202 - task_id = response.json()["task_id"] - - final_status = task_waiter(task_id, timeout=1200) - assert final_status["status"] == "complete", f"Task failed: {final_status.get('error')}" - -def test_download_artist_spotify_only(base_url, task_waiter, reset_config): - """Tests queuing downloads for an artist's entire discography from Spotify.""" - print("\n--- Testing Spotify-only artist download ---") +def test_artist_download_and_verify_files(base_url, task_waiter, reset_config): + """ + Tests queuing an artist download and verifies that files are created. + Does not check for exact file count due to the variability of artist discographies. + """ + print("\n--- Testing Spotify-only artist download and verifying files ---") config_payload = {"service": "spotify", "fallback": False, "realTime": True, "spotifyQuality": "NORMAL"} requests.post(f"{base_url}/config", json=config_payload) response = requests.get(f"{base_url}/artist/download/{ARTIST_ID}?album_type=album,single") assert response.status_code == 202 response_data = response.json() - queued_albums = response_data.get("successfully_queued_albums", []) + queued_albums = response_data.get("queued_albums", []) assert len(queued_albums) > 0, "No albums were queued for the artist." for album in queued_albums: @@ -83,13 +118,18 @@ def test_download_artist_spotify_only(base_url, task_waiter, reset_config): final_status = task_waiter(task_id, timeout=900) assert final_status["status"] == "complete", f"Artist album task {album['name']} failed: {final_status.get('error')}" -def test_download_track_with_fallback(base_url, task_waiter, reset_config): - """Tests downloading a Spotify track with Deezer fallback enabled.""" - print("\n--- Testing track download with Deezer fallback ---") + # After all tasks complete, verify that at least some files were downloaded. + downloaded_files = get_downloaded_files() + assert len(downloaded_files) > 0, "Artist download ran but no files were found in the download directory." + + +def test_download_with_deezer_fallback_and_verify_files(base_url, task_waiter, reset_config): + """Tests downloading with Deezer fallback and verifies the file exists.""" + print("\n--- Testing track download with Deezer fallback and verifying files ---") config_payload = { "service": "spotify", "fallback": True, - "deezerQuality": "MP3_320" # Simulating higher quality from Deezer free + "deezerQuality": "FLAC" # Test with high quality fallback } requests.post(f"{base_url}/config", json=config_payload) @@ -98,24 +138,58 @@ def test_download_track_with_fallback(base_url, task_waiter, reset_config): task_id = response.json()["task_id"] final_status = task_waiter(task_id) - assert final_status["status"] == "complete", f"Task failed: {final_status.get('error')}" + assert final_status["status"] == "complete", f"Task failed with fallback: {final_status.get('error')}" -@pytest.mark.parametrize("format,bitrate", [ - ("mp3", "320"), ("mp3", "128"), - ("flac", None), - ("ogg", "160"), - ("opus", "128"), - ("m4a", "128k") + # Verify that at least one file was downloaded. + downloaded_files = get_downloaded_files() + assert len(downloaded_files) >= 1, "Fallback download completed but no file was found." + + +def test_download_without_realtime_and_verify_files(base_url, task_waiter, reset_config): + """Tests a non-realtime download and verifies the file exists.""" + print("\n--- Testing download with realTime: False and verifying files ---") + config_payload = { + "service": "spotify", + "fallback": False, + "realTime": False, + "spotifyQuality": "NORMAL" + } + requests.post(f"{base_url}/config", json=config_payload) + + response = requests.get(f"{base_url}/track/download/{TRACK_ID}") + assert response.status_code == 202 + task_id = response.json()["task_id"] + + final_status = task_waiter(task_id) + assert final_status["status"] == "complete", f"Task failed with realTime=False: {final_status.get('error')}" + + # Verify that at least one file was downloaded. + downloaded_files = get_downloaded_files() + assert len(downloaded_files) >= 1, "Non-realtime download completed but no file was found." + + +# Aligned with formats in src/js/config.ts's CONVERSION_FORMATS +@pytest.mark.parametrize("format_name,bitrate,expected_ext", [ + ("mp3", "320k", ".mp3"), + ("aac", "256k", ".m4a"), # AAC is typically in an M4A container + ("ogg", "320k", ".ogg"), + ("opus", "256k", ".opus"), + ("flac", None, ".flac"), + ("wav", None, ".wav"), + ("alac", None, ".m4a"), # ALAC is also in an M4A container ]) -def test_download_with_conversion(base_url, task_waiter, reset_config, format, bitrate): - """Tests downloading a track with various conversion formats and bitrates.""" - print(f"\n--- Testing conversion: {format} @ {bitrate or 'default'} ---") +def test_download_with_conversion_and_verify_format(base_url, task_waiter, reset_config, format_name, bitrate, expected_ext): + """ + Tests downloading a track with various conversion formats and verifies + that the created file has the correct extension. + """ + print(f"\n--- Testing conversion: {format_name.upper()} @ {bitrate or 'default'} ---") config_payload = { "service": "spotify", "fallback": False, "realTime": True, "spotifyQuality": "NORMAL", - "convertTo": format, + "convertTo": format_name.upper(), "bitrate": bitrate } requests.post(f"{base_url}/config", json=config_payload) @@ -125,4 +199,14 @@ def test_download_with_conversion(base_url, task_waiter, reset_config, format, b task_id = response.json()["task_id"] final_status = task_waiter(task_id) - assert final_status["status"] == "complete", f"Download failed for format {format} bitrate {bitrate}: {final_status.get('error')}" \ No newline at end of file + assert final_status["status"] == "complete", f"Download failed for format {format_name} bitrate {bitrate}: {final_status.get('error')}" + + # Verify that a file with the correct extension was created. + downloaded_files = get_downloaded_files() + assert len(downloaded_files) >= 1, "Conversion download completed but no file was found." + + found_correct_format = any(f.lower().endswith(expected_ext) for f in downloaded_files) + assert found_correct_format, ( + f"No file with expected extension '{expected_ext}' found for format '{format_name}'. " + f"Found files: {downloaded_files}" + ) \ No newline at end of file diff --git a/tests/test_history.py b/tests/test_history.py index bd6228b..efcd6fd 100644 --- a/tests/test_history.py +++ b/tests/test_history.py @@ -21,7 +21,7 @@ def test_history_logging_and_filtering(base_url, task_waiter, reset_config): config_payload = {"service": "spotify", "fallback": False, "realTime": True} requests.post(f"{base_url}/config", json=config_payload) response = requests.get(f"{base_url}/track/download/{TRACK_ID}") - assert response.status_code == 202 + assert response.status_code == 200 task_id = response.json()["task_id"] task_waiter(task_id) # Wait for the download to complete diff --git a/tests/test_prgs.py b/tests/test_prgs.py deleted file mode 100644 index 39dd902..0000000 --- a/tests/test_prgs.py +++ /dev/null @@ -1,93 +0,0 @@ -import requests -import pytest -import time - -# Use a known, short track for quick tests -TRACK_ID = "1Cts4YV9aOXVAP3bm3Ro6r" -# Use a long playlist to ensure there's time to cancel it -LONG_PLAYLIST_ID = "6WsyUEITURbQXZsqtEewb1" # Today's Top Hits on Spotify - -@pytest.fixture -def reset_config(base_url): - """Fixture to reset the main config after a test.""" - response = requests.get(f"{base_url}/config") - original_config = response.json() - yield - requests.post(f"{base_url}/config", json=original_config) - -def test_list_tasks(base_url, reset_config): - """Tests listing all active tasks.""" - config_payload = {"service": "spotify", "fallback": False, "realTime": True} - requests.post(f"{base_url}/config", json=config_payload) - - # Start a task - response = requests.get(f"{base_url}/track/download/{TRACK_ID}") - assert response.status_code == 202 - task_id = response.json()["task_id"] - - # Check the list to see if our task appears - response = requests.get(f"{base_url}/prgs/list") - assert response.status_code == 200 - tasks = response.json() - assert isinstance(tasks, list) - assert any(t['task_id'] == task_id for t in tasks) - - # Clean up by cancelling the task - requests.post(f"{base_url}/prgs/cancel/{task_id}") - -def test_get_task_progress_and_log(base_url, task_waiter, reset_config): - """Tests getting progress for a running task and retrieving its log after completion.""" - config_payload = {"service": "spotify", "fallback": False, "realTime": True} - requests.post(f"{base_url}/config", json=config_payload) - - response = requests.get(f"{base_url}/track/download/{TRACK_ID}") - assert response.status_code == 202 - task_id = response.json()["task_id"] - - # Poll progress a few times while it's running to check the endpoint - for _ in range(3): - time.sleep(1) - res = requests.get(f"{base_url}/prgs/{task_id}") - if res.status_code == 200 and res.json(): - statuses = res.json() - assert isinstance(statuses, list) - assert "status" in statuses[-1] - break - else: - pytest.fail("Could not get a valid task status in time.") - - # Wait for completion - final_status = task_waiter(task_id) - assert final_status["status"] == "complete" - - # After completion, check the task log endpoint - res = requests.get(f"{base_url}/prgs/{task_id}?log=true") - assert res.status_code == 200 - log_data = res.json() - assert "task_log" in log_data - assert len(log_data["task_log"]) > 0 - assert "status" in log_data["task_log"][0] - -def test_cancel_task(base_url, reset_config): - """Tests cancelling a task shortly after it has started.""" - config_payload = {"service": "spotify", "fallback": False, "realTime": True} - requests.post(f"{base_url}/config", json=config_payload) - - response = requests.get(f"{base_url}/playlist/download/{LONG_PLAYLIST_ID}") - assert response.status_code == 202 - task_id = response.json()["task_id"] - - # Give it a moment to ensure it has started processing - time.sleep(3) - - # Cancel the task - response = requests.post(f"{base_url}/prgs/cancel/{task_id}") - assert response.status_code == 200 - assert response.json()["status"] == "cancelled" - - # Check the final status to confirm it's marked as cancelled - time.sleep(2) # Allow time for the final status to propagate - res = requests.get(f"{base_url}/prgs/{task_id}") - assert res.status_code == 200 - last_status = res.json()[-1] - assert last_status["status"] == "cancelled" \ No newline at end of file From f39b248b21853b54736f545c49478a94bdc95bac Mon Sep 17 00:00:00 2001 From: Miguel Oliveira Date: Mon, 9 Jun 2025 19:05:20 -0300 Subject: [PATCH 5/7] feat: implement multi-stage Docker build for TypeScript and Python and added label org.opencontainers.image.source --- Dockerfile | 38 +++++++++++++++++++++++--------------- 1 file changed, 23 insertions(+), 15 deletions(-) diff --git a/Dockerfile b/Dockerfile index 6d6a417..113d9e6 100755 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,22 @@ -# Use an official Python runtime as a parent image -FROM python:3.12-slim +# Stage 1: TypeScript build +FROM node:22.16.0-slim AS typescript-builder + +# Set working directory +WORKDIR /app + +# Copy necessary files for TypeScript build +COPY tsconfig.json ./tsconfig.json +COPY src/js ./src/js + +# Install TypeScript globally +RUN npm install -g typescript + +# Compile TypeScript +RUN tsc + +# Stage 2: Final image +FROM python:3.12-slim AS python-builder +LABEL org.opencontainers.image.source="https://github.com/Xoconoch/spotizerr" # Set the working directory in the container WORKDIR /app @@ -10,28 +27,19 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ gosu \ git \ ffmpeg \ - nodejs \ - npm \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* -# Copy requirements file -COPY requirements.txt . - # Install Python dependencies +COPY requirements.txt . RUN pip install --no-cache-dir -r requirements.txt +# Copy static files generated by TypeScript +COPY --from=typescript-builder /app/static/js ./static/js + # Copy application code COPY . . -# Install TypeScript globally -RUN npm install -g typescript - -# Compile TypeScript -# tsc will use tsconfig.json from the current directory (/app) -# It will read from /app/src/js and output to /app/static/js -RUN tsc - # Create necessary directories with proper permissions RUN mkdir -p downloads data/config data/creds data/watch data/history logs/tasks && \ chmod -R 777 downloads data logs From 66da8cef5c98ca240e89130f72b3328c0e3d9fa3 Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Mon, 9 Jun 2025 18:18:19 -0600 Subject: [PATCH 6/7] Fix #167, #161 and #156, implemented #155 --- requirements.txt | 2 +- routes/history.py | 61 +++- routes/prgs.py | 65 ++-- routes/utils/celery_queue_manager.py | 7 +- routes/utils/celery_tasks.py | 131 +++++--- routes/utils/history_manager.py | 221 +++++++++++++- routes/utils/playlist.py | 8 + src/js/history.ts | 194 ++++++++++-- src/js/queue.ts | 440 ++++++++++++++++++--------- static/css/history/history.css | 90 +++++- static/css/queue/queue.css | 79 +++++ static/html/history.html | 19 +- static/images/list.svg | 3 + static/images/skip.svg | 4 + 14 files changed, 1051 insertions(+), 273 deletions(-) create mode 100644 static/images/list.svg create mode 100644 static/images/skip.svg diff --git a/requirements.txt b/requirements.txt index 3dd0973..c8da5ad 100755 --- a/requirements.txt +++ b/requirements.txt @@ -2,4 +2,4 @@ waitress==3.0.2 celery==5.5.3 Flask==3.1.1 flask_cors==6.0.0 -deezspot-spotizerr==1.8.0 +deezspot-spotizerr==1.10.0 \ No newline at end of file diff --git a/routes/history.py b/routes/history.py index 4c2f238..e34a328 100644 --- a/routes/history.py +++ b/routes/history.py @@ -15,20 +15,38 @@ def get_download_history(): sort_by = request.args.get("sort_by", "timestamp_completed") sort_order = request.args.get("sort_order", "DESC") - # Basic filtering example: filter by status_final or download_type + # Create filters dictionary for various filter options filters = {} + + # Status filter status_filter = request.args.get("status_final") if status_filter: filters["status_final"] = status_filter + # Download type filter type_filter = request.args.get("download_type") if type_filter: filters["download_type"] = type_filter - - # Add more filters as needed, e.g., by item_name (would need LIKE for partial match) - # search_term = request.args.get('search') - # if search_term: - # filters['item_name'] = f'%{search_term}%' # This would require LIKE in get_history_entries + + # Parent task filter + parent_task_filter = request.args.get("parent_task_id") + if parent_task_filter: + filters["parent_task_id"] = parent_task_filter + + # Track status filter + track_status_filter = request.args.get("track_status") + if track_status_filter: + filters["track_status"] = track_status_filter + + # Show/hide child tracks + hide_child_tracks = request.args.get("hide_child_tracks", "false").lower() == "true" + if hide_child_tracks: + filters["parent_task_id"] = None # Only show parent entries or standalone tracks + + # Show only tracks with specific parent + only_parent_tracks = request.args.get("only_parent_tracks", "false").lower() == "true" + if only_parent_tracks and not parent_task_filter: + filters["parent_task_id"] = "NOT_NULL" # Special value to indicate we want only child tracks entries, total_count = get_history_entries( limit, offset, sort_by, sort_order, filters @@ -45,3 +63,34 @@ def get_download_history(): except Exception as e: logger.error(f"Error in /api/history endpoint: {e}", exc_info=True) return jsonify({"error": "Failed to retrieve download history"}), 500 + + +@history_bp.route("/tracks/", methods=["GET"]) +def get_tracks_for_parent(parent_task_id): + """API endpoint to retrieve all track entries for a specific parent task.""" + try: + # We don't need pagination for this endpoint as we want all tracks for a parent + filters = {"parent_task_id": parent_task_id} + + # Optional sorting + sort_by = request.args.get("sort_by", "timestamp_completed") + sort_order = request.args.get("sort_order", "DESC") + + entries, total_count = get_history_entries( + limit=1000, # High limit to get all tracks + offset=0, + sort_by=sort_by, + sort_order=sort_order, + filters=filters + ) + + return jsonify( + { + "parent_task_id": parent_task_id, + "tracks": entries, + "total_count": total_count, + } + ) + except Exception as e: + logger.error(f"Error in /api/history/tracks endpoint: {e}", exc_info=True) + return jsonify({"error": f"Failed to retrieve tracks for parent task {parent_task_id}"}), 500 diff --git a/routes/prgs.py b/routes/prgs.py index c6d0d92..23ae233 100755 --- a/routes/prgs.py +++ b/routes/prgs.py @@ -76,13 +76,21 @@ def get_task_details(task_id): last_status = get_last_task_status(task_id) status_count = len(get_task_status(task_id)) + + # Default to the full last_status object, then check for the raw callback + last_line_content = last_status + if last_status and "raw_callback" in last_status: + last_line_content = last_status["raw_callback"] + response = { "original_url": dynamic_original_url, - "last_line": last_status, + "last_line": last_line_content, "timestamp": time.time(), "task_id": task_id, "status_count": status_count, } + if last_status and last_status.get("summary"): + response["summary"] = last_status["summary"] return jsonify(response) @@ -122,33 +130,34 @@ def list_tasks(): last_status = get_last_task_status(task_id) if task_info and last_status: - detailed_tasks.append( - { - "task_id": task_id, - "type": task_info.get( - "type", task_summary.get("type", "unknown") - ), - "name": task_info.get( - "name", task_summary.get("name", "Unknown") - ), - "artist": task_info.get( - "artist", task_summary.get("artist", "") - ), - "download_type": task_info.get( - "download_type", - task_summary.get("download_type", "unknown"), - ), - "status": last_status.get( - "status", "unknown" - ), # Keep summary status for quick access - "last_status_obj": last_status, # Full last status object - "original_request": task_info.get("original_request", {}), - "created_at": task_info.get("created_at", 0), - "timestamp": last_status.get( - "timestamp", task_info.get("created_at", 0) - ), - } - ) + task_details = { + "task_id": task_id, + "type": task_info.get( + "type", task_summary.get("type", "unknown") + ), + "name": task_info.get( + "name", task_summary.get("name", "Unknown") + ), + "artist": task_info.get( + "artist", task_summary.get("artist", "") + ), + "download_type": task_info.get( + "download_type", + task_summary.get("download_type", "unknown"), + ), + "status": last_status.get( + "status", "unknown" + ), # Keep summary status for quick access + "last_status_obj": last_status, # Full last status object + "original_request": task_info.get("original_request", {}), + "created_at": task_info.get("created_at", 0), + "timestamp": last_status.get( + "timestamp", task_info.get("created_at", 0) + ), + } + if last_status.get("summary"): + task_details["summary"] = last_status["summary"] + detailed_tasks.append(task_details) elif ( task_info ): # If last_status is somehow missing, still provide some info diff --git a/routes/utils/celery_queue_manager.py b/routes/utils/celery_queue_manager.py index 548f00e..b472f70 100644 --- a/routes/utils/celery_queue_manager.py +++ b/routes/utils/celery_queue_manager.py @@ -127,6 +127,7 @@ class CeleryDownloadQueueManager: NON_BLOCKING_STATES = [ ProgressState.COMPLETE, + ProgressState.DONE, ProgressState.CANCELLED, ProgressState.ERROR, ProgressState.ERROR_RETRIED, @@ -354,7 +355,11 @@ class CeleryDownloadQueueManager: status = task.get("status") # Only cancel tasks that are not already completed or cancelled - if status not in [ProgressState.COMPLETE, ProgressState.CANCELLED]: + if status not in [ + ProgressState.COMPLETE, + ProgressState.DONE, + ProgressState.CANCELLED, + ]: result = cancel_celery_task(task_id) if result.get("status") == "cancelled": cancelled_count += 1 diff --git a/routes/utils/celery_tasks.py b/routes/utils/celery_tasks.py index 2b19f80..26d5e8d 100644 --- a/routes/utils/celery_tasks.py +++ b/routes/utils/celery_tasks.py @@ -29,7 +29,7 @@ from routes.utils.watch.db import ( ) # Import history manager function -from .history_manager import add_entry_to_history +from .history_manager import add_entry_to_history, add_tracks_from_summary # Create Redis connection for storing task data that's not part of the Celery result backend import redis @@ -238,6 +238,9 @@ def _log_task_to_history(task_id, final_status_str, error_msg=None): except Exception: spotify_id = None # Ignore errors in parsing + # Check for the new summary object in the last status + summary_obj = last_status_obj.get("summary") if last_status_obj else None + history_entry = { "task_id": task_id, "download_type": task_info.get("download_type"), @@ -271,15 +274,34 @@ def _log_task_to_history(task_id, final_status_str, error_msg=None): "bitrate": bitrate_str if bitrate_str else None, # Store None if empty string + "summary_json": json.dumps(summary_obj) if summary_obj else None, + "total_successful": summary_obj.get("total_successful") + if summary_obj + else None, + "total_skipped": summary_obj.get("total_skipped") if summary_obj else None, + "total_failed": summary_obj.get("total_failed") if summary_obj else None, } + + # Add the main history entry for the task add_entry_to_history(history_entry) + + # Process track-level entries from summary if this is a multi-track download + if summary_obj and task_info.get("download_type") in ["album", "playlist"]: + tracks_processed = add_tracks_from_summary( + summary_data=summary_obj, + parent_task_id=task_id, + parent_history_data=history_entry + ) + logger.info( + f"Track-level history: Processed {tracks_processed['successful']} successful, " + f"{tracks_processed['skipped']} skipped, and {tracks_processed['failed']} failed tracks for task {task_id}" + ) + except Exception as e: logger.error( f"History: Error preparing or logging history for task {task_id}: {e}", exc_info=True, ) - - # --- End History Logging Helper --- @@ -536,6 +558,9 @@ class ProgressTrackingTask(Task): Args: progress_data: Dictionary containing progress information from deezspot """ + # Store a copy of the original, unprocessed callback data + raw_callback_data = progress_data.copy() + task_id = self.request.id # Ensure ./logs/tasks directory exists @@ -570,9 +595,6 @@ class ProgressTrackingTask(Task): # Get status type status = progress_data.get("status", "unknown") - # Create a work copy of the data to avoid modifying the original - stored_data = progress_data.copy() - # Get task info for context task_info = get_task_info(task_id) @@ -585,44 +607,47 @@ class ProgressTrackingTask(Task): # Process based on status type using a more streamlined approach if status == "initializing": # --- INITIALIZING: Start of a download operation --- - self._handle_initializing(task_id, stored_data, task_info) + self._handle_initializing(task_id, progress_data, task_info) elif status == "downloading": # --- DOWNLOADING: Track download started --- - self._handle_downloading(task_id, stored_data, task_info) + self._handle_downloading(task_id, progress_data, task_info) elif status == "progress": # --- PROGRESS: Album/playlist track progress --- - self._handle_progress(task_id, stored_data, task_info) + self._handle_progress(task_id, progress_data, task_info) elif status == "real_time" or status == "track_progress": # --- REAL_TIME/TRACK_PROGRESS: Track download real-time progress --- - self._handle_real_time(task_id, stored_data) + self._handle_real_time(task_id, progress_data) elif status == "skipped": # --- SKIPPED: Track was skipped --- - self._handle_skipped(task_id, stored_data, task_info) + self._handle_skipped(task_id, progress_data, task_info) elif status == "retrying": # --- RETRYING: Download failed and being retried --- - self._handle_retrying(task_id, stored_data, task_info) + self._handle_retrying(task_id, progress_data, task_info) elif status == "error": # --- ERROR: Error occurred during download --- - self._handle_error(task_id, stored_data, task_info) + self._handle_error(task_id, progress_data, task_info) elif status == "done": # --- DONE: Download operation completed --- - self._handle_done(task_id, stored_data, task_info) + self._handle_done(task_id, progress_data, task_info) else: # --- UNKNOWN: Unrecognized status --- logger.info( - f"Task {task_id} {status}: {stored_data.get('message', 'No details')}" + f"Task {task_id} {status}: {progress_data.get('message', 'No details')}" ) + # Embed the raw callback data into the status object before storing + progress_data["raw_callback"] = raw_callback_data + # Store the processed status update - store_task_status(task_id, stored_data) + store_task_status(task_id, progress_data) def _handle_initializing(self, task_id, data, task_info): """Handle initializing status from deezspot""" @@ -663,7 +688,7 @@ class ProgressTrackingTask(Task): store_task_info(task_id, task_info) # Update status in data - data["status"] = ProgressState.INITIALIZING + # data["status"] = ProgressState.INITIALIZING def _handle_downloading(self, task_id, data, task_info): """Handle downloading status from deezspot""" @@ -720,7 +745,7 @@ class ProgressTrackingTask(Task): logger.info(f"Task {task_id} downloading: '{track_name}'") # Update status - data["status"] = ProgressState.DOWNLOADING + # data["status"] = ProgressState.DOWNLOADING def _handle_progress(self, task_id, data, task_info): """Handle progress status from deezspot""" @@ -776,7 +801,7 @@ class ProgressTrackingTask(Task): logger.error(f"Error parsing track numbers '{current_track_raw}': {e}") # Ensure correct status - data["status"] = ProgressState.PROGRESS + # data["status"] = ProgressState.PROGRESS def _handle_real_time(self, task_id, data): """Handle real-time progress status from deezspot""" @@ -818,11 +843,11 @@ class ProgressTrackingTask(Task): logger.debug(f"Task {task_id} track progress: {title} by {artist}: {percent}%") # Set appropriate status - data["status"] = ( - ProgressState.REAL_TIME - if data.get("status") == "real_time" - else ProgressState.TRACK_PROGRESS - ) + # data["status"] = ( + # ProgressState.REAL_TIME + # if data.get("status") == "real_time" + # else ProgressState.TRACK_PROGRESS + # ) def _handle_skipped(self, task_id, data, task_info): """Handle skipped status from deezspot""" @@ -872,7 +897,7 @@ class ProgressTrackingTask(Task): store_task_status(task_id, progress_update) # Set status - data["status"] = ProgressState.SKIPPED + # data["status"] = ProgressState.SKIPPED def _handle_retrying(self, task_id, data, task_info): """Handle retrying status from deezspot""" @@ -895,7 +920,7 @@ class ProgressTrackingTask(Task): store_task_info(task_id, task_info) # Set status - data["status"] = ProgressState.RETRYING + # data["status"] = ProgressState.RETRYING def _handle_error(self, task_id, data, task_info): """Handle error status from deezspot""" @@ -911,7 +936,7 @@ class ProgressTrackingTask(Task): store_task_info(task_id, task_info) # Set status and error message - data["status"] = ProgressState.ERROR + # data["status"] = ProgressState.ERROR data["error"] = message def _handle_done(self, task_id, data, task_info): @@ -931,7 +956,7 @@ class ProgressTrackingTask(Task): logger.info(f"Task {task_id} completed: Track '{song}'") # Update status to track_complete - data["status"] = ProgressState.TRACK_COMPLETE + # data["status"] = ProgressState.TRACK_COMPLETE # Update task info completed_tracks = task_info.get("completed_tracks", 0) + 1 @@ -989,15 +1014,28 @@ class ProgressTrackingTask(Task): logger.info(f"Task {task_id} completed: {content_type.upper()}") # Add summary - data["status"] = ProgressState.COMPLETE - data["message"] = ( - f"Download complete: {completed_tracks} tracks downloaded, {skipped_tracks} skipped" - ) + # data["status"] = ProgressState.COMPLETE + summary_obj = data.get("summary") - # Log summary - logger.info( - f"Task {task_id} summary: {completed_tracks} completed, {skipped_tracks} skipped, {error_count} errors" - ) + if summary_obj: + total_successful = summary_obj.get("total_successful", 0) + total_skipped = summary_obj.get("total_skipped", 0) + total_failed = summary_obj.get("total_failed", 0) + # data[ + # "message" + # ] = f"Download complete: {total_successful} tracks downloaded, {total_skipped} skipped, {total_failed} failed." + # Log summary from the summary object + logger.info( + f"Task {task_id} summary: {total_successful} successful, {total_skipped} skipped, {total_failed} failed." + ) + else: + # data["message"] = ( + # f"Download complete: {completed_tracks} tracks downloaded, {skipped_tracks} skipped" + # ) + # Log summary + logger.info( + f"Task {task_id} summary: {completed_tracks} completed, {skipped_tracks} skipped, {error_count} errors" + ) # Schedule deletion for completed multi-track downloads delayed_delete_task_data.apply_async( args=[task_id, "Task completed successfully and auto-cleaned."], @@ -1066,8 +1104,8 @@ class ProgressTrackingTask(Task): else: # Generic done for other types logger.info(f"Task {task_id} completed: {content_type.upper()}") - data["status"] = ProgressState.COMPLETE - data["message"] = "Download complete" + # data["status"] = ProgressState.COMPLETE + # data["message"] = "Download complete" # Celery signal handlers @@ -1134,18 +1172,11 @@ def task_postrun_handler( ) if state == states.SUCCESS: - if current_redis_status != ProgressState.COMPLETE: - store_task_status( - task_id, - { - "status": ProgressState.COMPLETE, - "timestamp": time.time(), - "type": task_info.get("type", "unknown"), - "name": task_info.get("name", "Unknown"), - "artist": task_info.get("artist", ""), - "message": "Download completed successfully.", - }, - ) + if current_redis_status not in [ProgressState.COMPLETE, "done"]: + # The final status is now set by the 'done' callback from deezspot. + # We no longer need to store a generic 'COMPLETE' status here. + # This ensures the raw callback data is the last thing in the log. + pass logger.info( f"Task {task_id} completed successfully: {task_info.get('name', 'Unknown')}" ) diff --git a/routes/utils/history_manager.py b/routes/utils/history_manager.py index 2dba42c..b6072b4 100644 --- a/routes/utils/history_manager.py +++ b/routes/utils/history_manager.py @@ -2,6 +2,7 @@ import sqlite3 import json import time import logging +import uuid from pathlib import Path logger = logging.getLogger(__name__) @@ -27,6 +28,12 @@ EXPECTED_COLUMNS = { "quality_profile": "TEXT", "convert_to": "TEXT", "bitrate": "TEXT", + "parent_task_id": "TEXT", # Reference to parent task for individual tracks + "track_status": "TEXT", # 'SUCCESSFUL', 'SKIPPED', 'FAILED' + "summary_json": "TEXT", # JSON string of the summary object from task + "total_successful": "INTEGER", # Count of successful tracks + "total_skipped": "INTEGER", # Count of skipped tracks + "total_failed": "INTEGER", # Count of failed tracks } @@ -61,7 +68,13 @@ def init_history_db(): service_used TEXT, quality_profile TEXT, convert_to TEXT, - bitrate TEXT + bitrate TEXT, + parent_task_id TEXT, + track_status TEXT, + summary_json TEXT, + total_successful INTEGER, + total_skipped INTEGER, + total_failed INTEGER ) """ cursor.execute(create_table_sql) @@ -106,6 +119,27 @@ def init_history_db(): f"Could not add column '{col_name}': {alter_e}. It might already exist or there's a schema mismatch." ) + # Add additional columns for summary data if they don't exist + for col_name, col_type in { + "summary_json": "TEXT", + "total_successful": "INTEGER", + "total_skipped": "INTEGER", + "total_failed": "INTEGER" + }.items(): + if col_name not in existing_column_names and col_name not in EXPECTED_COLUMNS: + try: + cursor.execute( + f"ALTER TABLE download_history ADD COLUMN {col_name} {col_type}" + ) + logger.info( + f"Added missing column '{col_name} {col_type}' to download_history table." + ) + added_columns = True + except sqlite3.OperationalError as alter_e: + logger.warning( + f"Could not add column '{col_name}': {alter_e}. It might already exist or there's a schema mismatch." + ) + if added_columns: conn.commit() logger.info(f"Download history table schema updated at {HISTORY_DB_FILE}") @@ -148,6 +182,12 @@ def add_entry_to_history(history_data: dict): "quality_profile", "convert_to", "bitrate", + "parent_task_id", + "track_status", + "summary_json", + "total_successful", + "total_skipped", + "total_failed", ] # Ensure all keys are present, filling with None if not for key in required_keys: @@ -164,8 +204,9 @@ def add_entry_to_history(history_data: dict): item_url, spotify_id, status_final, error_message, timestamp_added, timestamp_completed, original_request_json, last_status_obj_json, service_used, quality_profile, - convert_to, bitrate - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + convert_to, bitrate, parent_task_id, track_status, + summary_json, total_successful, total_skipped, total_failed + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) """, ( history_data["task_id"], @@ -185,6 +226,12 @@ def add_entry_to_history(history_data: dict): history_data["quality_profile"], history_data["convert_to"], history_data["bitrate"], + history_data["parent_task_id"], + history_data["track_status"], + history_data["summary_json"], + history_data["total_successful"], + history_data["total_skipped"], + history_data["total_failed"], ), ) conn.commit() @@ -239,8 +286,16 @@ def get_history_entries( for column, value in filters.items(): # Basic security: ensure column is a valid one (alphanumeric + underscore) if column.replace("_", "").isalnum(): - where_clauses.append(f"{column} = ?") - params.append(value) + # Special case for 'NOT_NULL' value for parent_task_id + if column == "parent_task_id" and value == "NOT_NULL": + where_clauses.append(f"{column} IS NOT NULL") + # Regular case for NULL value + elif value is None: + where_clauses.append(f"{column} IS NULL") + # Regular case for exact match + else: + where_clauses.append(f"{column} = ?") + params.append(value) if where_clauses: where_sql = " WHERE " + " AND ".join(where_clauses) @@ -266,6 +321,11 @@ def get_history_entries( "quality_profile", "convert_to", "bitrate", + "parent_task_id", + "track_status", + "total_successful", + "total_skipped", + "total_failed", ] if sort_by not in valid_sort_columns: sort_by = "timestamp_completed" # Default sort @@ -292,6 +352,157 @@ def get_history_entries( conn.close() +def add_track_entry_to_history(track_name, artist_name, parent_task_id, track_status, parent_history_data=None): + """Adds a track-specific entry to the history database. + + Args: + track_name (str): The name of the track + artist_name (str): The artist name + parent_task_id (str): The ID of the parent task (album or playlist) + track_status (str): The status of the track ('SUCCESSFUL', 'SKIPPED', 'FAILED') + parent_history_data (dict, optional): The history data of the parent task + + Returns: + str: The task_id of the created track entry + """ + # Generate a unique ID for this track entry + track_task_id = f"{parent_task_id}_track_{uuid.uuid4().hex[:8]}" + + # Create a copy of parent data or initialize empty dict + track_history_data = {} + if parent_history_data: + # Copy relevant fields from parent + for key in EXPECTED_COLUMNS: + if key in parent_history_data and key not in ['task_id', 'item_name', 'item_artist']: + track_history_data[key] = parent_history_data[key] + + # Set track-specific fields + track_history_data.update({ + "task_id": track_task_id, + "download_type": "track", + "item_name": track_name, + "item_artist": artist_name, + "parent_task_id": parent_task_id, + "track_status": track_status, + "status_final": "COMPLETED" if track_status == "SUCCESSFUL" else + "SKIPPED" if track_status == "SKIPPED" else "ERROR", + "timestamp_completed": time.time() + }) + + # Extract track URL if possible (from last_status_obj_json) + if parent_history_data and parent_history_data.get("last_status_obj_json"): + try: + last_status = json.loads(parent_history_data["last_status_obj_json"]) + + # Try to match track name in the tracks lists to find URL + track_key = f"{track_name} - {artist_name}" + if "raw_callback" in last_status and last_status["raw_callback"].get("url"): + track_history_data["item_url"] = last_status["raw_callback"].get("url") + + # Extract Spotify ID from URL if possible + url = last_status["raw_callback"].get("url", "") + if url and "spotify.com" in url: + try: + spotify_id = url.split("/")[-1] + if spotify_id and len(spotify_id) == 22 and spotify_id.isalnum(): + track_history_data["spotify_id"] = spotify_id + except Exception: + pass + except (json.JSONDecodeError, KeyError, AttributeError) as e: + logger.warning(f"Could not extract track URL for {track_name}: {e}") + + # Add entry to history + add_entry_to_history(track_history_data) + + return track_task_id + +def add_tracks_from_summary(summary_data, parent_task_id, parent_history_data=None): + """Processes a summary object from a completed task and adds individual track entries. + + Args: + summary_data (dict): The summary data containing track lists + parent_task_id (str): The ID of the parent task + parent_history_data (dict, optional): The history data of the parent task + + Returns: + dict: Summary of processed tracks + """ + processed = { + "successful": 0, + "skipped": 0, + "failed": 0 + } + + if not summary_data: + logger.warning(f"No summary data provided for task {parent_task_id}") + return processed + + # Process successful tracks + for track_entry in summary_data.get("successful_tracks", []): + try: + # Parse "track_name - artist_name" format + parts = track_entry.split(" - ", 1) + if len(parts) == 2: + track_name, artist_name = parts + add_track_entry_to_history( + track_name=track_name, + artist_name=artist_name, + parent_task_id=parent_task_id, + track_status="SUCCESSFUL", + parent_history_data=parent_history_data + ) + processed["successful"] += 1 + else: + logger.warning(f"Could not parse track entry: {track_entry}") + except Exception as e: + logger.error(f"Error processing successful track {track_entry}: {e}", exc_info=True) + + # Process skipped tracks + for track_entry in summary_data.get("skipped_tracks", []): + try: + parts = track_entry.split(" - ", 1) + if len(parts) == 2: + track_name, artist_name = parts + add_track_entry_to_history( + track_name=track_name, + artist_name=artist_name, + parent_task_id=parent_task_id, + track_status="SKIPPED", + parent_history_data=parent_history_data + ) + processed["skipped"] += 1 + else: + logger.warning(f"Could not parse skipped track entry: {track_entry}") + except Exception as e: + logger.error(f"Error processing skipped track {track_entry}: {e}", exc_info=True) + + # Process failed tracks + for track_entry in summary_data.get("failed_tracks", []): + try: + parts = track_entry.split(" - ", 1) + if len(parts) == 2: + track_name, artist_name = parts + add_track_entry_to_history( + track_name=track_name, + artist_name=artist_name, + parent_task_id=parent_task_id, + track_status="FAILED", + parent_history_data=parent_history_data + ) + processed["failed"] += 1 + else: + logger.warning(f"Could not parse failed track entry: {track_entry}") + except Exception as e: + logger.error(f"Error processing failed track {track_entry}: {e}", exc_info=True) + + logger.info( + f"Added {processed['successful']} successful, {processed['skipped']} skipped, " + f"and {processed['failed']} failed track entries for task {parent_task_id}" + ) + + return processed + + if __name__ == "__main__": # For testing purposes logging.basicConfig(level=logging.INFO) diff --git a/routes/utils/playlist.py b/routes/utils/playlist.py index 3266e17..5605a51 100755 --- a/routes/utils/playlist.py +++ b/routes/utils/playlist.py @@ -124,6 +124,10 @@ def download_playlist( "spotify", main ) # For blob path blob_file_path = spotify_main_creds.get("blob_file_path") + if blob_file_path is None: + raise ValueError( + f"Spotify credentials for account '{main}' don't contain a blob_file_path. Please check your credentials configuration." + ) if not Path(blob_file_path).exists(): raise FileNotFoundError( f"Spotify credentials blob file not found at {blob_file_path} for account '{main}'" @@ -180,6 +184,10 @@ def download_playlist( spotify_main_creds = get_credential("spotify", main) # For blob path blob_file_path = spotify_main_creds.get("blob_file_path") + if blob_file_path is None: + raise ValueError( + f"Spotify credentials for account '{main}' don't contain a blob_file_path. Please check your credentials configuration." + ) if not Path(blob_file_path).exists(): raise FileNotFoundError( f"Spotify credentials blob file not found at {blob_file_path} for account '{main}'" diff --git a/src/js/history.ts b/src/js/history.ts index 0ec9984..d274ce0 100644 --- a/src/js/history.ts +++ b/src/js/history.ts @@ -6,12 +6,15 @@ document.addEventListener('DOMContentLoaded', () => { const limitSelect = document.getElementById('limit-select') as HTMLSelectElement | null; const statusFilter = document.getElementById('status-filter') as HTMLSelectElement | null; const typeFilter = document.getElementById('type-filter') as HTMLSelectElement | null; + const trackFilter = document.getElementById('track-filter') as HTMLSelectElement | null; + const hideChildTracksCheckbox = document.getElementById('hide-child-tracks') as HTMLInputElement | null; let currentPage = 1; let limit = 25; let totalEntries = 0; let currentSortBy = 'timestamp_completed'; let currentSortOrder = 'DESC'; + let currentParentTaskId: string | null = null; async function fetchHistory(page = 1) { if (!historyTableBody || !prevButton || !nextButton || !pageInfo || !limitSelect || !statusFilter || !typeFilter) { @@ -30,6 +33,21 @@ document.addEventListener('DOMContentLoaded', () => { if (typeVal) { apiUrl += `&download_type=${typeVal}`; } + + // Add track status filter if present + if (trackFilter && trackFilter.value) { + apiUrl += `&track_status=${trackFilter.value}`; + } + + // Add parent task filter if viewing a specific parent's tracks + if (currentParentTaskId) { + apiUrl += `&parent_task_id=${currentParentTaskId}`; + } + + // Add hide child tracks filter if checkbox is checked + if (hideChildTracksCheckbox && hideChildTracksCheckbox.checked) { + apiUrl += `&hide_child_tracks=true`; + } try { const response = await fetch(apiUrl); @@ -42,10 +60,13 @@ document.addEventListener('DOMContentLoaded', () => { currentPage = Math.floor(offset / limit) + 1; updatePagination(); updateSortIndicators(); + + // Update page title if viewing tracks for a parent + updatePageTitle(); } catch (error) { console.error('Error fetching history:', error); if (historyTableBody) { - historyTableBody.innerHTML = 'Error loading history.'; + historyTableBody.innerHTML = 'Error loading history.'; } } } @@ -55,17 +76,43 @@ document.addEventListener('DOMContentLoaded', () => { historyTableBody.innerHTML = ''; // Clear existing rows if (!entries || entries.length === 0) { - historyTableBody.innerHTML = 'No history entries found.'; + historyTableBody.innerHTML = 'No history entries found.'; return; } entries.forEach(entry => { const row = historyTableBody.insertRow(); - row.insertCell().textContent = entry.item_name || 'N/A'; + + // Add class for parent/child styling + if (entry.parent_task_id) { + row.classList.add('child-track-row'); + } else if (entry.download_type === 'album' || entry.download_type === 'playlist') { + row.classList.add('parent-task-row'); + } + + // Item name with indentation for child tracks + const nameCell = row.insertCell(); + if (entry.parent_task_id) { + nameCell.innerHTML = `└─ ${entry.item_name || 'N/A'}`; + } else { + nameCell.textContent = entry.item_name || 'N/A'; + } + row.insertCell().textContent = entry.item_artist || 'N/A'; - row.insertCell().textContent = entry.download_type ? entry.download_type.charAt(0).toUpperCase() + entry.download_type.slice(1) : 'N/A'; + + // Type cell - show track status for child tracks + const typeCell = row.insertCell(); + if (entry.parent_task_id && entry.track_status) { + typeCell.textContent = entry.track_status; + typeCell.classList.add(`track-status-${entry.track_status.toLowerCase()}`); + } else { + typeCell.textContent = entry.download_type ? entry.download_type.charAt(0).toUpperCase() + entry.download_type.slice(1) : 'N/A'; + } + row.insertCell().textContent = entry.service_used || 'N/A'; + // Construct Quality display string + const qualityCell = row.insertCell(); let qualityDisplay = entry.quality_profile || 'N/A'; if (entry.convert_to) { qualityDisplay = `${entry.convert_to.toUpperCase()}`; @@ -76,22 +123,47 @@ document.addEventListener('DOMContentLoaded', () => { } else if (entry.bitrate) { // Case where convert_to might not be set, but bitrate is (e.g. for OGG Vorbis quality settings) qualityDisplay = `${entry.bitrate}k (${entry.quality_profile || 'Profile'})`; } - row.insertCell().textContent = qualityDisplay; + qualityCell.textContent = qualityDisplay; const statusCell = row.insertCell(); statusCell.textContent = entry.status_final || 'N/A'; - statusCell.className = `status-${entry.status_final}`; + statusCell.className = `status-${entry.status_final?.toLowerCase() || 'unknown'}`; row.insertCell().textContent = entry.timestamp_added ? new Date(entry.timestamp_added * 1000).toLocaleString() : 'N/A'; row.insertCell().textContent = entry.timestamp_completed ? new Date(entry.timestamp_completed * 1000).toLocaleString() : 'N/A'; - const detailsCell = row.insertCell(); + const actionsCell = row.insertCell(); + + // Add details button const detailsButton = document.createElement('button'); detailsButton.innerHTML = `Details`; detailsButton.className = 'details-btn btn-icon'; detailsButton.title = 'Show Details'; detailsButton.onclick = () => showDetailsModal(entry); - detailsCell.appendChild(detailsButton); + actionsCell.appendChild(detailsButton); + + // Add view tracks button for album/playlist entries with child tracks + if (!entry.parent_task_id && (entry.download_type === 'album' || entry.download_type === 'playlist') && + (entry.total_successful > 0 || entry.total_skipped > 0 || entry.total_failed > 0)) { + const viewTracksButton = document.createElement('button'); + viewTracksButton.innerHTML = `Tracks`; + viewTracksButton.className = 'tracks-btn btn-icon'; + viewTracksButton.title = 'View Tracks'; + viewTracksButton.setAttribute('data-task-id', entry.task_id); + viewTracksButton.onclick = () => viewTracksForParent(entry.task_id); + actionsCell.appendChild(viewTracksButton); + + // Add track counts display + const trackCountsSpan = document.createElement('span'); + trackCountsSpan.className = 'track-counts'; + trackCountsSpan.title = `Successful: ${entry.total_successful || 0}, Skipped: ${entry.total_skipped || 0}, Failed: ${entry.total_failed || 0}`; + trackCountsSpan.innerHTML = ` + ${entry.total_successful || 0} / + ${entry.total_skipped || 0} / + ${entry.total_failed || 0} + `; + actionsCell.appendChild(trackCountsSpan); + } if (entry.status_final === 'ERROR' && entry.error_message) { const errorSpan = document.createElement('span'); @@ -105,10 +177,8 @@ document.addEventListener('DOMContentLoaded', () => { errorDetailsDiv = document.createElement('div'); errorDetailsDiv.className = 'error-details'; const newCell = row.insertCell(); // This will append to the end of the row - newCell.colSpan = 9; // Span across all columns + newCell.colSpan = 10; // Span across all columns newCell.appendChild(errorDetailsDiv); - // Visually, this new cell will be after the 'Details' button cell. - // To make it appear as part of the status cell or below the row, more complex DOM manipulation or CSS would be needed. } errorDetailsDiv.textContent = entry.error_message; // Toggle display by directly manipulating the style of the details div @@ -127,27 +197,92 @@ document.addEventListener('DOMContentLoaded', () => { prevButton.disabled = currentPage === 1; nextButton.disabled = currentPage === totalPages; } + + function updatePageTitle() { + const titleElement = document.getElementById('history-title'); + if (!titleElement) return; + + if (currentParentTaskId) { + titleElement.textContent = 'Download History - Viewing Tracks'; + + // Add back button + if (!document.getElementById('back-to-history')) { + const backButton = document.createElement('button'); + backButton.id = 'back-to-history'; + backButton.className = 'btn btn-secondary'; + backButton.innerHTML = '← Back to All History'; + backButton.onclick = () => { + currentParentTaskId = null; + updatePageTitle(); + fetchHistory(1); + }; + titleElement.parentNode?.insertBefore(backButton, titleElement); + } + } else { + titleElement.textContent = 'Download History'; + + // Remove back button if it exists + const backButton = document.getElementById('back-to-history'); + if (backButton) { + backButton.remove(); + } + } + } function showDetailsModal(entry: any) { - const details = `Task ID: ${entry.task_id}\n` + - `Type: ${entry.download_type}\n` + - `Name: ${entry.item_name}\n` + - `Artist: ${entry.item_artist}\n` + - `Album: ${entry.item_album || 'N/A'}\n` + - `URL: ${entry.item_url}\n` + - `Spotify ID: ${entry.spotify_id || 'N/A'}\n` + - `Service Used: ${entry.service_used || 'N/A'}\n` + - `Quality Profile (Original): ${entry.quality_profile || 'N/A'}\n` + - `ConvertTo: ${entry.convert_to || 'N/A'}\n` + - `Bitrate: ${entry.bitrate ? entry.bitrate + 'k' : 'N/A'}\n` + - `Status: ${entry.status_final}\n` + - `Error: ${entry.error_message || 'None'}\n` + - `Added: ${new Date(entry.timestamp_added * 1000).toLocaleString()}\n` + - `Completed/Ended: ${new Date(entry.timestamp_completed * 1000).toLocaleString()}\n\n` + - `Original Request: ${JSON.stringify(JSON.parse(entry.original_request_json || '{}'), null, 2)}\n\n` + - `Last Status Object: ${JSON.stringify(JSON.parse(entry.last_status_obj_json || '{}'), null, 2)}`; + // Create more detailed modal content with new fields + let details = `Task ID: ${entry.task_id}\n` + + `Type: ${entry.download_type}\n` + + `Name: ${entry.item_name}\n` + + `Artist: ${entry.item_artist}\n` + + `Album: ${entry.item_album || 'N/A'}\n` + + `URL: ${entry.item_url || 'N/A'}\n` + + `Spotify ID: ${entry.spotify_id || 'N/A'}\n` + + `Service Used: ${entry.service_used || 'N/A'}\n` + + `Quality Profile (Original): ${entry.quality_profile || 'N/A'}\n` + + `ConvertTo: ${entry.convert_to || 'N/A'}\n` + + `Bitrate: ${entry.bitrate ? entry.bitrate + 'k' : 'N/A'}\n` + + `Status: ${entry.status_final}\n` + + `Error: ${entry.error_message || 'None'}\n` + + `Added: ${new Date(entry.timestamp_added * 1000).toLocaleString()}\n` + + `Completed/Ended: ${new Date(entry.timestamp_completed * 1000).toLocaleString()}\n`; + + // Add track-specific details if this is a track + if (entry.parent_task_id) { + details += `Parent Task ID: ${entry.parent_task_id}\n` + + `Track Status: ${entry.track_status || 'N/A'}\n`; + } + + // Add summary details if this is a parent task + if (entry.total_successful !== null || entry.total_skipped !== null || entry.total_failed !== null) { + details += `\nTrack Summary:\n` + + `Successful: ${entry.total_successful || 0}\n` + + `Skipped: ${entry.total_skipped || 0}\n` + + `Failed: ${entry.total_failed || 0}\n`; + } + + details += `\nOriginal Request: ${JSON.stringify(JSON.parse(entry.original_request_json || '{}'), null, 2)}\n\n` + + `Last Status Object: ${JSON.stringify(JSON.parse(entry.last_status_obj_json || '{}'), null, 2)}`; + + // Try to parse and display summary if available + if (entry.summary_json) { + try { + const summary = JSON.parse(entry.summary_json); + details += `\nSummary: ${JSON.stringify(summary, null, 2)}`; + } catch (e) { + console.error('Error parsing summary JSON:', e); + } + } + alert(details); } + + // Function to view tracks for a parent task + async function viewTracksForParent(taskId: string) { + currentParentTaskId = taskId; + currentPage = 1; + fetchHistory(1); + } document.querySelectorAll('th[data-sort]').forEach(headerCell => { headerCell.addEventListener('click', () => { @@ -174,6 +309,7 @@ document.addEventListener('DOMContentLoaded', () => { }); } + // Event listeners for pagination and filters prevButton?.addEventListener('click', () => fetchHistory(currentPage - 1)); nextButton?.addEventListener('click', () => fetchHistory(currentPage + 1)); limitSelect?.addEventListener('change', (e) => { @@ -182,6 +318,8 @@ document.addEventListener('DOMContentLoaded', () => { }); statusFilter?.addEventListener('change', () => fetchHistory(1)); typeFilter?.addEventListener('change', () => fetchHistory(1)); + trackFilter?.addEventListener('change', () => fetchHistory(1)); + hideChildTracksCheckbox?.addEventListener('change', () => fetchHistory(1)); // Initial fetch fetchHistory(); diff --git a/src/js/queue.ts b/src/js/queue.ts index 8fbc310..03fd167 100644 --- a/src/js/queue.ts +++ b/src/js/queue.ts @@ -46,35 +46,49 @@ interface ParentInfo { } interface StatusData { - type?: string; - status?: string; - name?: string; - song?: string; - music?: string; - title?: string; - artist?: string; - artist_name?: string; - album?: string; - owner?: string; - total_tracks?: number | string; - current_track?: number | string; - parsed_current_track?: string; // Make sure these are handled if they are strings - parsed_total_tracks?: string; // Make sure these are handled if they are strings - progress?: number | string; // Can be string initially - percentage?: number | string; // Can be string initially - percent?: number | string; // Can be string initially - time_elapsed?: number; - error?: string; - can_retry?: boolean; - retry_count?: number; - max_retries?: number; // from config potentially - seconds_left?: number; - task_id?: string; + type?: 'track' | 'album' | 'playlist' | 'episode' | string; + status?: 'initializing' | 'skipped' | 'retrying' | 'real-time' | 'error' | 'done' | 'processing' | 'queued' | 'progress' | 'track_progress' | 'complete' | 'cancelled' | 'cancel' | 'interrupted' | string; + + // --- Standardized Fields --- url?: string; - reason?: string; // for skipped + convert_to?: string; + bitrate?: string; + + // Item metadata + song?: string; + artist?: string; + album?: string; + title?: string; // for album + name?: string; // for playlist/track + owner?: string; // for playlist parent?: ParentInfo; + + // Progress indicators + current_track?: number | string; + total_tracks?: number | string; + progress?: number | string; // 0-100 + time_elapsed?: number; // ms + + // Status-specific details + reason?: string; // for 'skipped' + error?: string; // for 'error', 'retrying' + retry_count?: number; + seconds_left?: number; + summary?: { + successful_tracks?: string[]; + skipped_tracks?: string[]; + failed_tracks?: { track: string; reason: string }[]; + total_successful?: number; + total_skipped?: number; + total_failed?: number; + }; + + // --- Fields for internal FE logic or from API wrapper --- + task_id?: string; + can_retry?: boolean; + max_retries?: number; // from config original_url?: string; - position?: number; // For queued items + position?: number; original_request?: { url?: string; retry_url?: string; @@ -87,11 +101,12 @@ interface StatusData { display_type?: string; display_artist?: string; service?: string; - [key: string]: any; // For other potential original_request params + [key: string]: any; }; - event?: string; // from SSE + event?: string; overall_progress?: number; - display_type?: string; // from PRG data + display_type?: string; + [key: string]: any; // Allow other properties } @@ -229,26 +244,20 @@ export class DownloadQueue { // Load initial config from the server. await this.loadConfig(); - // Override the server value with locally persisted queue visibility (if present). + // Use localStorage for queue visibility const storedVisible = localStorage.getItem("downloadQueueVisible"); - if (storedVisible !== null) { - // Ensure config is not null before assigning - if (this.config) { - this.config.downloadQueueVisible = storedVisible === "true"; - } - } + const isVisible = storedVisible === "true"; const queueSidebar = document.getElementById('downloadQueue'); - // Ensure config is not null and queueSidebar exists - if (this.config && queueSidebar) { - queueSidebar.hidden = !this.config.downloadQueueVisible; - queueSidebar.classList.toggle('active', !!this.config.downloadQueueVisible); + if (queueSidebar) { + queueSidebar.hidden = !isVisible; + queueSidebar.classList.toggle('active', isVisible); } // Initialize the queue icon based on sidebar visibility const queueIcon = document.getElementById('queueIcon'); - if (queueIcon && this.config) { - if (this.config.downloadQueueVisible) { + if (queueIcon) { + if (isVisible) { queueIcon.innerHTML = 'Close queue'; queueIcon.setAttribute('aria-expanded', 'true'); queueIcon.classList.add('queue-icon-active'); // Add red tint class @@ -326,7 +335,7 @@ export class DownloadQueue { // Update the queue icon to show X when visible or queue icon when hidden const queueIcon = document.getElementById('queueIcon'); - if (queueIcon && this.config) { + if (queueIcon) { if (isVisible) { // Replace the image with an X and add red tint queueIcon.innerHTML = 'Close queue'; @@ -340,34 +349,9 @@ export class DownloadQueue { } } - // Persist the state locally so it survives refreshes. + // Only persist the state in localStorage, not on the server localStorage.setItem("downloadQueueVisible", String(isVisible)); - - try { - await this.loadConfig(); - const updatedConfig = { ...this.config, downloadQueueVisible: isVisible }; - await this.saveConfig(updatedConfig); - this.dispatchEvent('queueVisibilityChanged', { visible: isVisible }); - } catch (error) { - console.error('Failed to save queue visibility:', error); - // Revert UI if save failed. - queueSidebar.classList.toggle('active', !isVisible); - queueSidebar.hidden = isVisible; - // Also revert the icon back - if (queueIcon && this.config) { - if (!isVisible) { - queueIcon.innerHTML = 'Close queue'; - queueIcon.setAttribute('aria-expanded', 'true'); - queueIcon.classList.add('queue-icon-active'); // Add red tint class - } else { - queueIcon.innerHTML = 'Close queue'; - queueIcon.setAttribute('aria-expanded', 'true'); - queueIcon.classList.add('queue-icon-active'); // Add red tint class - } - } - this.dispatchEvent('queueVisibilityChanged', { visible: !isVisible }); - this.showError('Failed to save queue visibility'); - } + this.dispatchEvent('queueVisibilityChanged', { visible: isVisible }); if (isVisible) { // If the queue is now visible, ensure all visible items are being polled. @@ -644,7 +628,7 @@ createQueueItem(item: QueueItem, type: string, taskId: string, queueId:string): const defaultMessage = (type === 'playlist') ? 'Reading track list' : 'Initializing download...'; // Use display values if available, or fall back to standard fields - const displayTitle = item.name || item.music || item.song || 'Unknown'; + const displayTitle = item.name || item.song || 'Unknown'; const displayArtist = item.artist || ''; const displayType = type.charAt(0).toUpperCase() + type.slice(1); @@ -1037,9 +1021,9 @@ createQueueItem(item: QueueItem, type: string, taskId: string, queueId:string): } // Extract common fields - const trackName = data.song || data.music || data.name || data.title || + const trackName = data.song || data.name || data.title || (queueItem?.item?.name) || 'Unknown'; - const artist = data.artist || data.artist_name || + const artist = data.artist || (queueItem?.item?.artist) || ''; const albumTitle = data.title || data.album || data.parent?.title || data.name || (queueItem?.item?.name) || ''; @@ -1047,18 +1031,14 @@ createQueueItem(item: QueueItem, type: string, taskId: string, queueId:string): (queueItem?.item?.name) || ''; const playlistOwner = data.owner || data.parent?.owner || (queueItem?.item?.owner) || ''; // Add type check if item.owner is object - const currentTrack = data.current_track || data.parsed_current_track || ''; - const totalTracks = data.total_tracks || data.parsed_total_tracks || data.parent?.total_tracks || + const currentTrack = data.current_track || ''; + const totalTracks = data.total_tracks || data.parent?.total_tracks || (queueItem?.item?.total_tracks) || ''; // Format percentage for display when available let formattedPercentage = '0'; if (data.progress !== undefined) { - formattedPercentage = parseFloat(data.progress as string).toFixed(1); // Cast to string - } else if (data.percentage) { - formattedPercentage = (parseFloat(data.percentage as string) * 100).toFixed(1); // Cast to string - } else if (data.percent) { - formattedPercentage = (parseFloat(data.percent as string) * 100).toFixed(1); // Cast to string + formattedPercentage = Number(data.progress).toFixed(1); } // Helper for constructing info about the parent item @@ -1204,11 +1184,37 @@ createQueueItem(item: QueueItem, type: string, taskId: string, queueId:string): case 'done': case 'complete': - if (data.type === 'track') { - return `Downloaded "${trackName}"${artist ? ` by ${artist}` : ''} successfully${getParentInfo()}`; - } else if (data.type === 'album') { + // Final summary for album/playlist + if (data.summary && (data.type === 'album' || data.type === 'playlist')) { + const { total_successful = 0, total_skipped = 0, total_failed = 0, failed_tracks = [] } = data.summary; + const name = data.type === 'album' ? (data.title || albumTitle) : (data.name || playlistName); + return `Finished ${data.type} "${name}". Success: ${total_successful}, Skipped: ${total_skipped}, Failed: ${total_failed}.`; + } + + // Final status for a single track (without a parent) + if (data.type === 'track' && !data.parent) { + return `Downloaded "${trackName}"${artist ? ` by ${artist}` : ''} successfully`; + } + + // A 'done' status for a track *within* a parent collection is just an intermediate step. + if (data.type === 'track' && data.parent) { + const parentType = data.parent.type === 'album' ? 'album' : 'playlist'; + const parentName = data.parent.type === 'album' ? (data.parent.title || '') : (data.parent.name || ''); + const nextTrack = Number(data.current_track || 0) + 1; + const totalTracks = Number(data.total_tracks || 0); + + if (nextTrack > totalTracks) { + return `Finalizing ${parentType} "${parentName}"... (${data.current_track}/${totalTracks} tracks completed)`; + } else { + return `Completed track ${data.current_track}/${totalTracks}: "${trackName}" by ${artist}. Preparing next track...`; + } + } + + // Fallback for album/playlist without summary + if (data.type === 'album') { return `Downloaded album "${albumTitle}"${artist ? ` by ${artist}` : ''} successfully (${totalTracks} tracks)`; - } else if (data.type === 'playlist') { + } + if (data.type === 'playlist') { return `Downloaded playlist "${playlistName}"${playlistOwner ? ` by ${playlistOwner}` : ''} successfully (${totalTracks} tracks)`; } return `Downloaded ${data.type} successfully`; @@ -1276,6 +1282,12 @@ createQueueItem(item: QueueItem, type: string, taskId: string, queueId:string): /* New Methods to Handle Terminal State, Inactivity and Auto-Retry */ handleDownloadCompletion(entry: QueueEntry, queueId: string, progress: StatusData | number) { // Add types + // SAFETY CHECK: Never mark a track with a parent as completed + if (typeof progress !== 'number' && progress.type === 'track' && progress.parent) { + console.log(`Prevented completion of track ${progress.song} that is part of ${progress.parent.type}`); + return; // Exit early and don't mark as complete + } + // Mark the entry as ended entry.hasEnded = true; @@ -1292,10 +1304,11 @@ createQueueItem(item: QueueItem, type: string, taskId: string, queueId:string): // Stop polling this.clearPollingInterval(queueId); - // Use 3 seconds cleanup delay for completed, 10 seconds for other terminal states like errors + // Use 3 seconds cleanup delay for completed, 10 seconds for errors, and 20 seconds for cancelled/skipped const cleanupDelay = (progress && typeof progress !== 'number' && (progress.status === 'complete' || progress.status === 'done')) ? 3000 : + (progress && typeof progress !== 'number' && progress.status === 'error') ? 10000 : (progress && typeof progress !== 'number' && (progress.status === 'cancelled' || progress.status === 'cancel' || progress.status === 'skipped')) ? 20000 : - 10000; // Default for other errors if not caught by the more specific error handler delay + 10000; // Default for other cases if not caught by the more specific conditions // Clean up after the appropriate delay setTimeout(() => { @@ -1655,7 +1668,7 @@ createQueueItem(item: QueueItem, type: string, taskId: string, queueId:string): if (this.queueCache[taskId]) { delete this.queueCache[taskId]; } - continue; + continue; // Skip adding terminal tasks to UI if not already there } let itemType = taskData.type || originalRequest.type || 'unknown'; @@ -1753,7 +1766,6 @@ createQueueItem(item: QueueItem, type: string, taskId: string, queueId:string): } catch (error) { console.error('Error loading config:', error); this.config = { // Initialize with a default structure on error - downloadQueueVisible: false, maxRetries: 3, retryDelaySeconds: 5, retry_delay_increase: 5, @@ -1762,21 +1774,6 @@ createQueueItem(item: QueueItem, type: string, taskId: string, queueId:string): } } - async saveConfig(updatedConfig: AppConfig) { // Add type - try { - const response = await fetch('/api/config', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify(updatedConfig) - }); - if (!response.ok) throw new Error('Failed to save config'); - this.config = await response.json(); - } catch (error) { - console.error('Error saving config:', error); - throw error; - } - } - // Add a method to check if explicit filter is enabled isExplicitFilterEnabled(): boolean { // Add return type return !!this.config.explicitFilter; @@ -1889,6 +1886,15 @@ createQueueItem(item: QueueItem, type: string, taskId: string, queueId:string): // Handle terminal states if (data.last_line && ['complete', 'error', 'cancelled', 'done'].includes(data.last_line.status || '')) { // Add null check console.log(`Terminal state detected: ${data.last_line.status} for ${queueId}`); + + // SAFETY CHECK: Don't mark track as ended if it has a parent + if (data.last_line.type === 'track' && data.last_line.parent) { + console.log(`Not marking track ${data.last_line.song} as ended because it has a parent ${data.last_line.parent.type}`); + // Still update the UI + this.handleStatusUpdate(queueId, data); + return; + } + entry.hasEnded = true; // For cancelled downloads, clean up immediately @@ -1953,22 +1959,42 @@ createQueueItem(item: QueueItem, type: string, taskId: string, queueId:string): // Extract the actual status data from the API response const statusData: StatusData = data.last_line || {}; // Add type - // Special handling for track status updates that are part of an album/playlist - // We want to keep these for showing the track-by-track progress - if (statusData.type === 'track' && statusData.parent) { - // If this is a track that's part of our album/playlist, keep it - if ((entry.type === 'album' && statusData.parent.type === 'album') || - (entry.type === 'playlist' && statusData.parent.type === 'playlist')) { - console.log(`Processing track status update for ${entry.type}: ${statusData.song}`); - } + // --- Normalize statusData to conform to expected types --- + const numericFields = ['current_track', 'total_tracks', 'progress', 'retry_count', 'seconds_left', 'time_elapsed']; + for (const field of numericFields) { + if (statusData[field] !== undefined && typeof statusData[field] === 'string') { + statusData[field] = parseFloat(statusData[field] as string); + } } - // Only skip updates where type doesn't match AND there's no relevant parent relationship - else if (statusData.type && entry.type && statusData.type !== entry.type && - (!statusData.parent || statusData.parent.type !== entry.type)) { - console.log(`Skipping mismatched type: update=${statusData.type}, entry=${entry.type}`); - return; + + const entryType = entry.type; + const updateType = statusData.type; + + if (!updateType) { + console.warn("Status update received without a 'type'. Ignoring.", statusData); + return; } + // --- Filtering logic based on download type --- + // A status update is relevant if its type matches the queue entry's type, + // OR if it's a 'track' update that belongs to an 'album' or 'playlist' entry. + let isRelevantUpdate = false; + if (updateType === entryType) { + isRelevantUpdate = true; + } else if (updateType === 'track' && statusData.parent) { + if (entryType === 'album' && statusData.parent.type === 'album') { + isRelevantUpdate = true; + } else if (entryType === 'playlist' && statusData.parent.type === 'playlist') { + isRelevantUpdate = true; + } + } + + if (!isRelevantUpdate) { + console.log(`Skipping status update with type '${updateType}' for entry of type '${entryType}'.`, statusData); + return; + } + + // Get primary status let status = statusData.status || data.event || 'unknown'; // Define status *before* potential modification @@ -2062,6 +2088,32 @@ createQueueItem(item: QueueItem, type: string, taskId: string, queueId:string): // Apply appropriate status classes this.applyStatusClasses(entry, statusData); // Pass statusData instead of status string + if (status === 'done' || status === 'complete') { + if (statusData.summary && (entry.type === 'album' || entry.type === 'playlist')) { + const { total_successful = 0, total_skipped = 0, total_failed = 0, failed_tracks = [] } = statusData.summary; + const summaryDiv = document.createElement('div'); + summaryDiv.className = 'download-summary'; + + let summaryHTML = ` +
+ Finished: + Success ${total_successful} + Skipped ${total_skipped} + Failed ${total_failed} +
+ `; + + // Remove the individual failed tracks list + // The user only wants to see the count, not the names + + summaryDiv.innerHTML = summaryHTML; + if (logElement) { + logElement.innerHTML = ''; // Clear previous message + logElement.appendChild(summaryDiv); + } + } + } + // Special handling for error status based on new API response format if (status === 'error') { entry.hasEnded = true; @@ -2146,9 +2198,23 @@ createQueueItem(item: QueueItem, type: string, taskId: string, queueId:string): } // Handle terminal states for non-error cases - if (['complete', 'cancel', 'cancelled', 'done', 'skipped'].includes(status)) { - entry.hasEnded = true; - this.handleDownloadCompletion(entry, queueId, statusData); + if (['complete', 'done', 'skipped', 'cancelled', 'cancel'].includes(status)) { + // Only mark as ended if the update type matches the entry type. + // e.g., an album download is only 'done' when an 'album' status says so, + // not when an individual 'track' within it is 'done'. + if (statusData.type === entry.type) { + entry.hasEnded = true; + this.handleDownloadCompletion(entry, queueId, statusData); + } + // IMPORTANT: Never mark a track as ended if it has a parent + else if (statusData.type === 'track' && statusData.parent) { + console.log(`Track ${statusData.song} in ${statusData.parent.type} has completed, but not ending the parent download.`); + // Update UI but don't trigger completion + const logElement = document.getElementById(`log-${entry.uniqueId}-${entry.taskId}`) as HTMLElement | null; + if (logElement) { + logElement.textContent = this.getStatusMessage(statusData); + } + } } // Cache the status for potential page reloads @@ -2211,7 +2277,7 @@ createQueueItem(item: QueueItem, type: string, taskId: string, queueId:string): if (trackProgressBar && statusData.progress !== undefined) { // Update track progress bar - const progress = parseFloat(statusData.progress as string); // Cast to string + const progress = Number(statusData.progress); trackProgressBar.style.width = `${progress}%`; trackProgressBar.setAttribute('aria-valuenow', progress.toString()); // Use string @@ -2321,11 +2387,7 @@ createQueueItem(item: QueueItem, type: string, taskId: string, queueId:string): // Real-time progress for direct track download if (statusData.status === 'real-time' && statusData.progress !== undefined) { - progress = parseFloat(statusData.progress as string); // Cast to string - } else if (statusData.percent !== undefined) { - progress = parseFloat(statusData.percent as string) * 100; // Cast to string - } else if (statusData.percentage !== undefined) { - progress = parseFloat(statusData.percentage as string) * 100; // Cast to string + progress = Number(statusData.progress); } else if (statusData.status === 'done' || statusData.status === 'complete') { progress = 100; } else if (statusData.current_track && statusData.total_tracks) { @@ -2372,6 +2434,44 @@ createQueueItem(item: QueueItem, type: string, taskId: string, queueId:string): let totalTracks = 0; let trackProgress = 0; + // SPECIAL CASE: If this is the final 'done' status for the entire album/playlist (not a track) + if ((statusData.status === 'done' || statusData.status === 'complete') && + (statusData.type === 'album' || statusData.type === 'playlist') && + statusData.type === entry.type && + statusData.total_tracks) { + + console.log('Final album/playlist completion. Setting progress to 100%'); + + // Extract total tracks + totalTracks = parseInt(String(statusData.total_tracks), 10); + // Force current track to equal total tracks for completion + currentTrack = totalTracks; + + // Update counter to show n/n + if (progressCounter) { + progressCounter.textContent = `${totalTracks}/${totalTracks}`; + } + + // Set progress bar to 100% + if (overallProgressBar) { + overallProgressBar.style.width = '100%'; + overallProgressBar.setAttribute('aria-valuenow', '100'); + overallProgressBar.classList.add('complete'); + } + + // Hide track progress or set to complete + if (trackProgressBar) { + const trackProgressContainer = entry.element.querySelector('#track-progress-container-' + entry.uniqueId + '-' + entry.taskId) as HTMLElement | null; + if (trackProgressContainer) { + trackProgressContainer.style.display = 'none'; // Optionally hide or set to 100% + } + } + + // Store for later use + entry.progress = 100; + return; + } + // Handle track-level updates for album/playlist downloads if (statusData.type === 'track' && statusData.parent && (entry.type === 'album' || entry.type === 'playlist')) { @@ -2399,6 +2499,12 @@ createQueueItem(item: QueueItem, type: string, taskId: string, queueId:string): // Get current track and total tracks from the status data if (statusData.current_track !== undefined) { currentTrack = parseInt(String(statusData.current_track), 10); + + // For completed tracks, use the track number rather than one less + if (statusData.status === 'done' || statusData.status === 'complete') { + // The current track is the one that just completed + currentTrack = parseInt(String(statusData.current_track), 10); + } // Get total tracks - try from statusData first, then from parent if (statusData.total_tracks !== undefined) { @@ -2412,7 +2518,10 @@ createQueueItem(item: QueueItem, type: string, taskId: string, queueId:string): // Get track progress for real-time updates if (statusData.status === 'real-time' && statusData.progress !== undefined) { - trackProgress = parseFloat(statusData.progress as string); // Cast to string + trackProgress = Number(statusData.progress); // Cast to number + } else if (statusData.status === 'done' || statusData.status === 'complete') { + // For a completed track, set trackProgress to 100% + trackProgress = 100; } // Update the track progress counter display @@ -2424,7 +2533,9 @@ createQueueItem(item: QueueItem, type: string, taskId: string, queueId:string): if (logElement && statusData.song && statusData.artist) { let progressInfo = ''; if (statusData.status === 'real-time' && trackProgress > 0) { - progressInfo = ` - ${trackProgress.toFixed(1)}%`; + progressInfo = ` - ${trackProgress}%`; + } else if (statusData.status === 'done' || statusData.status === 'complete') { + progressInfo = ' - Complete'; } logElement.textContent = `Currently downloading: ${statusData.song} by ${statusData.artist} (${currentTrack}/${totalTracks}${progressInfo})`; } @@ -2432,16 +2543,23 @@ createQueueItem(item: QueueItem, type: string, taskId: string, queueId:string): // Calculate and update the overall progress bar if (totalTracks > 0) { let overallProgress = 0; - // Always compute overall based on trackProgress if available, using album/playlist real-time formula - if (trackProgress !== undefined) { + + // For completed tracks, use completed/total + if (statusData.status === 'done' || statusData.status === 'complete') { + // For completed tracks, this track is fully complete + overallProgress = (currentTrack / totalTracks) * 100; + } + // For in-progress tracks, use the real-time formula + else if (trackProgress !== undefined) { const completedTracksProgress = (currentTrack - 1) / totalTracks; const currentTrackContribution = (1 / totalTracks) * (trackProgress / 100); overallProgress = (completedTracksProgress + currentTrackContribution) * 100; - console.log(`Overall progress: ${overallProgress.toFixed(2)}% (Track ${currentTrack}/${totalTracks}, Progress: ${trackProgress}%)`); } else { + // Fallback to track count method overallProgress = (currentTrack / totalTracks) * 100; - console.log(`Overall progress (non-real-time): ${overallProgress.toFixed(2)}% (Track ${currentTrack}/${totalTracks})`); } + + console.log(`Overall progress: ${overallProgress.toFixed(2)}% (Track ${currentTrack}/${totalTracks}, Progress: ${trackProgress}%)`); // Update the progress bar if (overallProgressBar) { @@ -2464,23 +2582,36 @@ createQueueItem(item: QueueItem, type: string, taskId: string, queueId:string): trackProgressContainer.style.display = 'block'; } - if (statusData.status === 'real-time') { - // Real-time progress for the current track - const safeTrackProgress = Math.max(0, Math.min(100, trackProgress)); - trackProgressBar.style.width = `${safeTrackProgress}%`; - trackProgressBar.setAttribute('aria-valuenow', safeTrackProgress.toString()); // Use string + if (statusData.status === 'real-time' || statusData.status === 'real_time') { + // For real-time updates, use the track progress for the small green progress bar + // This shows download progress for the current track only + const safeProgress = isNaN(trackProgress) ? 0 : Math.max(0, Math.min(100, trackProgress)); + trackProgressBar.style.width = `${safeProgress}%`; + trackProgressBar.setAttribute('aria-valuenow', String(safeProgress)); trackProgressBar.classList.add('real-time'); - if (safeTrackProgress >= 100) { + if (safeProgress >= 100) { trackProgressBar.classList.add('complete'); } else { trackProgressBar.classList.remove('complete'); } - } else { - // Indeterminate progress animation for non-real-time updates + } else if (statusData.status === 'done' || statusData.status === 'complete') { + // For completed tracks, show 100% + trackProgressBar.style.width = '100%'; + trackProgressBar.setAttribute('aria-valuenow', '100'); + trackProgressBar.classList.add('complete'); + } else if (['progress', 'processing'].includes(statusData.status || '')) { + // For non-real-time progress updates, show an indeterminate-style progress + // by using a pulsing animation via CSS trackProgressBar.classList.add('progress-pulse'); trackProgressBar.style.width = '100%'; - trackProgressBar.setAttribute('aria-valuenow', "50"); // Use string + trackProgressBar.setAttribute('aria-valuenow', String(50)); // indicate in-progress + } else { + // For other status updates, use current track position + trackProgressBar.classList.remove('progress-pulse'); + const trackPositionPercent = currentTrack > 0 ? 100 : 0; + trackProgressBar.style.width = `${trackPositionPercent}%`; + trackProgressBar.setAttribute('aria-valuenow', String(trackPositionPercent)); } } @@ -2523,18 +2654,21 @@ createQueueItem(item: QueueItem, type: string, taskId: string, queueId:string): totalTracks = parseInt(parts[1], 10); } + // For completed albums/playlists, ensure current track equals total tracks + if ((statusData.status === 'done' || statusData.status === 'complete') && + (statusData.type === 'album' || statusData.type === 'playlist') && + statusData.type === entry.type && + totalTracks > 0) { + currentTrack = totalTracks; + } + // Get track progress for real-time downloads if (statusData.status === 'real-time' && statusData.progress !== undefined) { // For real-time downloads, progress comes as a percentage value (0-100) - trackProgress = parseFloat(statusData.progress as string); // Cast to string - } else if (statusData.percent !== undefined) { - // Handle percent values (0-1) - trackProgress = parseFloat(statusData.percent as string) * 100; // Cast to string - } else if (statusData.percentage !== undefined) { - // Handle percentage values (0-1) - trackProgress = parseFloat(statusData.percentage as string) * 100; // Cast to string + trackProgress = Number(statusData.progress); // Cast to number } else if (statusData.status === 'done' || statusData.status === 'complete') { progress = 100; + trackProgress = 100; // Also set trackProgress to 100% for completed status } else if (statusData.current_track && statusData.total_tracks) { // If we don't have real-time progress but do have track position progress = (parseInt(statusData.current_track as string, 10) / parseInt(statusData.total_tracks as string, 10)) * 100; // Cast to string @@ -2730,6 +2864,18 @@ createQueueItem(item: QueueItem, type: string, taskId: string, queueId:string): const serverEquivalent = serverTasks.find(st => st.task_id === localEntry.taskId); if (serverEquivalent && serverEquivalent.last_status_obj && terminalStates.includes(serverEquivalent.last_status_obj.status)) { if (!localEntry.hasEnded) { + // Don't clean up if this is a track with a parent + if (serverEquivalent.last_status_obj.type === 'track' && serverEquivalent.last_status_obj.parent) { + console.log(`Periodic sync: Not cleaning up track ${serverEquivalent.last_status_obj.song} with parent ${serverEquivalent.last_status_obj.parent.type}`); + continue; + } + + // Only clean up if the types match (e.g., don't clean up an album when a track is done) + if (serverEquivalent.last_status_obj.type !== localEntry.type) { + console.log(`Periodic sync: Not cleaning up ${localEntry.type} entry due to ${serverEquivalent.last_status_obj.type} status update`); + continue; + } + console.log(`Periodic sync: Local task ${localEntry.taskId} is now terminal on server (${serverEquivalent.last_status_obj.status}). Cleaning up.`); this.handleDownloadCompletion(localEntry, localEntry.uniqueId, serverEquivalent.last_status_obj); } diff --git a/static/css/history/history.css b/static/css/history/history.css index 5267b84..42c1522 100644 --- a/static/css/history/history.css +++ b/static/css/history/history.css @@ -38,6 +38,71 @@ tr:nth-child(even) { background-color: #222; } +/* Parent and child track styling */ +.parent-task-row { + background-color: #282828 !important; + font-weight: bold; +} + +.child-track-row { + background-color: #1a1a1a !important; + font-size: 0.9em; +} + +.child-track-indent { + color: #1DB954; + margin-right: 5px; +} + +/* Track status styling */ +.track-status-successful { + color: #1DB954; + font-weight: bold; +} + +.track-status-skipped { + color: #FFD700; + font-weight: bold; +} + +.track-status-failed { + color: #FF4136; + font-weight: bold; +} + +/* Track counts display */ +.track-counts { + margin-left: 10px; + font-size: 0.85em; +} + +.track-count.success { + color: #1DB954; +} + +.track-count.skipped { + color: #FFD700; +} + +.track-count.failed { + color: #FF4136; +} + +/* Back button */ +#back-to-history { + margin-right: 15px; + padding: 5px 10px; + background-color: #333; + color: white; + border: none; + border-radius: 4px; + cursor: pointer; +} + +#back-to-history:hover { + background-color: #444; +} + .pagination { margin-top: 20px; text-align: center; @@ -63,6 +128,7 @@ tr:nth-child(even) { display: flex; gap: 15px; align-items: center; + flex-wrap: wrap; } .filters label, .filters select, .filters input { @@ -77,9 +143,16 @@ tr:nth-child(even) { border-radius: 4px; } +.checkbox-filter { + display: flex; + align-items: center; + gap: 5px; +} + .status-COMPLETED { color: #1DB954; font-weight: bold; } .status-ERROR { color: #FF4136; font-weight: bold; } .status-CANCELLED { color: #AAAAAA; } +.status-skipped { color: #FFD700; font-weight: bold; } .error-message-toggle { cursor: pointer; @@ -97,8 +170,8 @@ tr:nth-child(even) { font-size: 0.9em; } -/* Styling for the Details icon button in the table */ -.details-btn { +/* Styling for the buttons in the table */ +.btn-icon { background-color: transparent; /* Or a subtle color like #282828 */ border: none; border-radius: 50%; /* Make it circular */ @@ -108,14 +181,23 @@ tr:nth-child(even) { align-items: center; justify-content: center; transition: background-color 0.2s ease; + margin-right: 5px; } -.details-btn img { +.btn-icon img { width: 16px; /* Icon size */ height: 16px; filter: invert(1); /* Make icon white if it's dark, adjust if needed */ } -.details-btn:hover { +.btn-icon:hover { background-color: #333; /* Darker on hover */ +} + +.details-btn:hover img { + filter: invert(0.8) sepia(1) saturate(5) hue-rotate(175deg); /* Make icon blue on hover */ +} + +.tracks-btn:hover img { + filter: invert(0.8) sepia(1) saturate(5) hue-rotate(90deg); /* Make icon green on hover */ } \ No newline at end of file diff --git a/static/css/queue/queue.css b/static/css/queue/queue.css index f52c882..7765c27 100644 --- a/static/css/queue/queue.css +++ b/static/css/queue/queue.css @@ -573,6 +573,85 @@ margin-top: 8px; } +/* ----------------------------- */ +/* DOWNLOAD SUMMARY ICONS */ +/* ----------------------------- */ + +/* Base styles for all summary icons */ +.summary-icon { + width: 14px; + height: 14px; + vertical-align: middle; + margin-right: 4px; + margin-top: -2px; +} + +/* Download summary formatting */ +.download-summary { + background: rgba(255, 255, 255, 0.05); + border-radius: 6px; + padding: 12px; + margin-top: 5px; +} + +.summary-line { + display: flex; + align-items: center; + gap: 12px; + margin-bottom: 8px; +} + +.summary-line span { + display: flex; + align-items: center; + padding: 3px 8px; + border-radius: 4px; + font-weight: 500; +} + +/* Specific icon background colors */ +.summary-line span:nth-child(2) { + background: rgba(29, 185, 84, 0.1); /* Success background */ +} + +.summary-line span:nth-child(3) { + background: rgba(230, 126, 34, 0.1); /* Skip background */ +} + +.summary-line span:nth-child(4) { + background: rgba(255, 85, 85, 0.1); /* Failed background */ +} + +/* Failed tracks list styling */ +.failed-tracks-title { + color: #ff5555; + font-weight: 600; + margin: 10px 0 5px; + font-size: 13px; +} + +.failed-tracks-list { + list-style-type: none; + padding-left: 10px; + margin: 0; + font-size: 12px; + color: #b3b3b3; + max-height: 100px; + overflow-y: auto; +} + +.failed-tracks-list li { + padding: 3px 0; + position: relative; +} + +.failed-tracks-list li::before { + content: "•"; + color: #ff5555; + position: absolute; + left: -10px; +} + /* Base styles for error buttons */ .error-buttons button { border: none; diff --git a/static/html/history.html b/static/html/history.html index 044ae57..36001d8 100644 --- a/static/html/history.html +++ b/static/html/history.html @@ -19,7 +19,7 @@
-

Download History

+

Download History

@@ -38,6 +38,19 @@ + + + + +
+ + +
@@ -45,13 +58,13 @@ - + - + diff --git a/static/images/list.svg b/static/images/list.svg new file mode 100644 index 0000000..53fe06c --- /dev/null +++ b/static/images/list.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/static/images/skip.svg b/static/images/skip.svg new file mode 100644 index 0000000..b9a2ae2 --- /dev/null +++ b/static/images/skip.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file From 6159d1509b3c64923ad22267ffe8e9ae0a6599d4 Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Mon, 9 Jun 2025 18:28:03 -0600 Subject: [PATCH 7/7] 2.5.0 hotfix --- src/js/history.ts | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/src/js/history.ts b/src/js/history.ts index d274ce0..f0e01bb 100644 --- a/src/js/history.ts +++ b/src/js/history.ts @@ -114,15 +114,19 @@ document.addEventListener('DOMContentLoaded', () => { // Construct Quality display string const qualityCell = row.insertCell(); let qualityDisplay = entry.quality_profile || 'N/A'; - if (entry.convert_to) { + + // Check if convert_to exists and is not "None" + if (entry.convert_to && entry.convert_to !== "None") { qualityDisplay = `${entry.convert_to.toUpperCase()}`; - if (entry.bitrate) { + // Check if bitrate exists and is not "None" + if (entry.bitrate && entry.bitrate !== "None") { qualityDisplay += ` ${entry.bitrate}k`; } qualityDisplay += ` (${entry.quality_profile || 'Original'})`; - } else if (entry.bitrate) { // Case where convert_to might not be set, but bitrate is (e.g. for OGG Vorbis quality settings) - qualityDisplay = `${entry.bitrate}k (${entry.quality_profile || 'Profile'})`; + } else if (entry.bitrate && entry.bitrate !== "None") { // Case where convert_to might not be set, but bitrate is (e.g. for OGG Vorbis quality settings) + qualityDisplay = `${entry.bitrate}k (${entry.quality_profile || 'Profile'})`; } + // If both are "None" or null, it will just use the quality_profile value set above qualityCell.textContent = qualityDisplay; const statusCell = row.insertCell();
Name ArtistTypeType/Status Service Quality Status Date Added Date Completed/EndedDetailsActions