diff --git a/routes/album.py b/routes/album.py index 86957b5..c143374 100755 --- a/routes/album.py +++ b/routes/album.py @@ -40,7 +40,11 @@ class FlushingFileWrapper: def flush(self): self.file.flush() -def download_task(service, url, main, fallback, quality, fall_quality, real_time, prg_path): +def download_task(service, url, main, fallback, quality, fall_quality, real_time, prg_path, orig_request): + """ + The download task writes out the original request data into the progress file + and then runs the album download. + """ try: from routes.utils.album import download_album with open(prg_path, 'w') as f: @@ -48,6 +52,15 @@ def download_task(service, url, main, fallback, quality, fall_quality, real_time original_stdout = sys.stdout sys.stdout = flushing_file # Redirect stdout + # Write the original request data into the progress file. + try: + flushing_file.write(json.dumps({"original_request": orig_request}) + "\n") + except Exception as e: + flushing_file.write(json.dumps({ + "status": "error", + "message": f"Failed to write original request data: {str(e)}" + }) + "\n") + try: download_album( service=service, @@ -158,10 +171,13 @@ def handle_download(): os.makedirs(prg_dir, exist_ok=True) prg_path = os.path.join(prg_dir, filename) + # Capture the original request parameters as a dictionary. + orig_request = request.args.to_dict() + # Create and start the download process, and track it in the global dictionary. process = Process( target=download_task, - args=(service, url, main, fallback, quality, fall_quality, real_time, prg_path) + args=(service, url, main, fallback, quality, fall_quality, real_time, prg_path, orig_request) ) process.start() download_processes[filename] = process diff --git a/routes/artist.py b/routes/artist.py index 4595bb9..25443b3 100644 --- a/routes/artist.py +++ b/routes/artist.py @@ -44,9 +44,10 @@ class FlushingFileWrapper: def flush(self): self.file.flush() -def download_artist_task(service, artist_url, main, fallback, quality, fall_quality, real_time, album_type, prg_path): +def download_artist_task(service, artist_url, main, fallback, quality, fall_quality, real_time, album_type, prg_path, orig_request): """ - This function wraps the call to download_artist_albums and writes JSON status to the prg file. + This function wraps the call to download_artist_albums, writes the original + request data to the progress file, and then writes JSON status updates. """ try: from routes.utils.artist import download_artist_albums @@ -55,6 +56,15 @@ def download_artist_task(service, artist_url, main, fallback, quality, fall_qual original_stdout = sys.stdout sys.stdout = flushing_file # Redirect stdout to our flushing file wrapper + # Write the original request data to the progress file. + try: + flushing_file.write(json.dumps({"original_request": orig_request}) + "\n") + except Exception as e: + flushing_file.write(json.dumps({ + "status": "error", + "message": f"Failed to write original request data: {str(e)}" + }) + "\n") + try: download_artist_albums( service=service, @@ -179,10 +189,13 @@ def handle_artist_download(): os.makedirs(prg_dir, exist_ok=True) prg_path = os.path.join(prg_dir, filename) + # Capture the original request parameters as a dictionary. + orig_request = request.args.to_dict() + # Create and start the download process. process = Process( target=download_artist_task, - args=(service, artist_url, main, fallback, quality, fall_quality, real_time, album_type, prg_path) + args=(service, artist_url, main, fallback, quality, fall_quality, real_time, album_type, prg_path, orig_request) ) process.start() download_processes[filename] = process diff --git a/routes/playlist.py b/routes/playlist.py index c8eba7b..dc2d578 100755 --- a/routes/playlist.py +++ b/routes/playlist.py @@ -40,14 +40,23 @@ class FlushingFileWrapper: def flush(self): self.file.flush() -def download_task(service, url, main, fallback, quality, fall_quality, real_time, prg_path): +def download_task(service, url, main, fallback, quality, fall_quality, real_time, prg_path, orig_request): try: from routes.utils.playlist import download_playlist with open(prg_path, 'w') as f: flushing_file = FlushingFileWrapper(f) original_stdout = sys.stdout sys.stdout = flushing_file # Process-specific stdout - + + # Write the original request data into the progress file. + try: + flushing_file.write(json.dumps({"original_request": orig_request}) + "\n") + except Exception as e: + flushing_file.write(json.dumps({ + "status": "error", + "message": f"Failed to write original request data: {str(e)}" + }) + "\n") + try: download_playlist( service=service, @@ -103,9 +112,12 @@ def handle_download(): os.makedirs(prg_dir, exist_ok=True) prg_path = os.path.join(prg_dir, filename) + # Capture the original request parameters as a dictionary. + orig_request = request.args.to_dict() + process = Process( target=download_task, - args=(service, url, main, fallback, quality, fall_quality, real_time, prg_path) + args=(service, url, main, fallback, quality, fall_quality, real_time, prg_path, orig_request) ) process.start() # Track the running process using the generated filename. diff --git a/routes/prgs.py b/routes/prgs.py index 71929e0..86ec1ef 100755 --- a/routes/prgs.py +++ b/routes/prgs.py @@ -10,8 +10,11 @@ PRGS_DIR = os.path.join(os.getcwd(), 'prgs') @prgs_bp.route('/', methods=['GET']) def get_prg_file(filename): """ - Return a JSON object with the resource type, its name (title) and the last line (progress update) of the PRG file. - If the file is empty, return default values. + Return a JSON object with the resource type, its name (title), + the last progress update (last line) of the PRG file, and, if available, + the original request parameters (from the first line of the file). + + For resource type and name, the second line of the file is used. """ try: # Security check to prevent path traversal attacks. @@ -29,41 +32,53 @@ def get_prg_file(filename): return jsonify({ "type": "", "name": "", - "last_line": None + "last_line": None, + "original_request": None }) - # Process the initialization line (first line) to extract type and name. + # Attempt to extract the original request from the first line. + original_request = None try: - init_data = json.loads(lines[0]) - except Exception as e: - # If parsing fails, use defaults. - init_data = {} + first_line = json.loads(lines[0]) + if "original_request" in first_line: + original_request = first_line["original_request"] + except Exception: + original_request = None - resource_type = init_data.get("type", "") - # Determine the name based on type. - if resource_type == "track": - resource_name = init_data.get("song", "") - elif resource_type == "album": - resource_name = init_data.get("album", "") - elif resource_type == "playlist": - resource_name = init_data.get("name", "") - elif resource_type == "artist": - resource_name = init_data.get("artist", "") + # For resource type and name, use the second line if available. + if len(lines) > 1: + try: + second_line = json.loads(lines[1]) + resource_type = second_line.get("type", "") + if resource_type == "track": + resource_name = second_line.get("song", "") + elif resource_type == "album": + resource_name = second_line.get("album", "") + elif resource_type == "playlist": + resource_name = second_line.get("name", "") + elif resource_type == "artist": + resource_name = second_line.get("artist", "") + else: + resource_name = "" + except Exception: + resource_type = "" + resource_name = "" else: + resource_type = "" resource_name = "" # Get the last line from the file. last_line_raw = lines[-1] - # Try to parse the last line as JSON. try: last_line_parsed = json.loads(last_line_raw) except Exception: - last_line_parsed = last_line_raw # Fallback to returning raw string if JSON parsing fails. + last_line_parsed = last_line_raw # Fallback to raw string if JSON parsing fails. return jsonify({ "type": resource_type, "name": resource_name, - "last_line": last_line_parsed + "last_line": last_line_parsed, + "original_request": original_request }) except FileNotFoundError: abort(404, "File not found") diff --git a/routes/track.py b/routes/track.py index 7ffb95a..f19bf22 100755 --- a/routes/track.py +++ b/routes/track.py @@ -30,14 +30,23 @@ class FlushingFileWrapper: def flush(self): self.file.flush() -def download_task(service, url, main, fallback, quality, fall_quality, real_time, prg_path): +def download_task(service, url, main, fallback, quality, fall_quality, real_time, prg_path, orig_request): try: from routes.utils.track import download_track with open(prg_path, 'w') as f: flushing_file = FlushingFileWrapper(f) original_stdout = sys.stdout sys.stdout = flushing_file # Redirect stdout for this process - + + # Write the original request data into the progress file. + try: + flushing_file.write(json.dumps({"original_request": orig_request}) + "\n") + except Exception as e: + flushing_file.write(json.dumps({ + "status": "error", + "message": f"Failed to write original request data: {str(e)}" + }) + "\n") + try: download_track( service=service, @@ -148,9 +157,12 @@ def handle_download(): os.makedirs(prg_dir, exist_ok=True) prg_path = os.path.join(prg_dir, filename) + # Capture the original request parameters as a dictionary. + orig_request = request.args.to_dict() + process = Process( target=download_task, - args=(service, url, main, fallback, quality, fall_quality, real_time, prg_path) + args=(service, url, main, fallback, quality, fall_quality, real_time, prg_path, orig_request) ) process.start() # Track the running process using the generated filename. diff --git a/static/js/queue.js b/static/js/queue.js index 7bd2326..fec67f6 100644 --- a/static/js/queue.js +++ b/static/js/queue.js @@ -48,9 +48,12 @@ class DownloadQueue { this.dispatchEvent('queueVisibilityChanged', { visible: queueSidebar.classList.contains('active') }); } - addDownload(item, type, prgFile) { + /** + * Now accepts an extra argument "requestUrl" which is the same API call used to initiate the download. + */ + addDownload(item, type, prgFile, requestUrl = null) { const queueId = this.generateQueueId(); - const entry = this.createQueueEntry(item, type, prgFile, queueId); + const entry = this.createQueueEntry(item, type, prgFile, queueId, requestUrl); this.downloadQueue[queueId] = entry; document.getElementById('queueItems').appendChild(entry.element); @@ -73,6 +76,14 @@ class DownloadQueue { try { const response = await fetch(`/api/prgs/${entry.prgFile}`); const data = await response.json(); + + // If the prg file info contains the original_request parameters and we haven't stored a retry URL yet, + // build one using the type and original_request parameters. + if (!entry.requestUrl && data.original_request) { + const params = new URLSearchParams(data.original_request).toString(); + entry.requestUrl = `/api/${entry.type}/download?${params}`; + } + const progress = data.last_line; if (!progress) { @@ -80,6 +91,7 @@ class DownloadQueue { return; } + // If the new progress is the same as the last, also treat it as inactivity. if (JSON.stringify(entry.lastStatus) === JSON.stringify(progress)) { this.handleInactivity(entry, queueId, logElement); return; @@ -108,11 +120,15 @@ class DownloadQueue { return Date.now().toString() + Math.random().toString(36).substr(2, 9); } - createQueueEntry(item, type, prgFile, queueId) { + /** + * Now accepts a fifth parameter "requestUrl" and stores it in the entry. + */ + createQueueEntry(item, type, prgFile, queueId, requestUrl) { return { item, type, prgFile, + requestUrl, // store the original API request URL so we can retry later element: this.createQueueItem(item, type, prgFile, queueId), lastStatus: null, lastUpdated: Date.now(), @@ -174,6 +190,7 @@ class DownloadQueue { const prgResponse = await fetch(`/api/prgs/${prgFile}`); const prgData = await prgResponse.json(); const dummyItem = { name: prgData.name || prgFile, external_urls: {} }; + // In this case, no original request URL is available. this.addDownload(dummyItem, prgData.type || "unknown", prgFile); } } catch (error) { @@ -210,13 +227,11 @@ class DownloadQueue { // Helper function for a simple pluralization: function pluralize(word) { - // If the word already ends with an "s", assume it's plural. return word.endsWith('s') ? word : word + 's'; } switch (data.status) { case 'downloading': - // For track downloads only. if (data.type === 'track') { return `Downloading track "${data.song}" by ${data.artist}...`; } @@ -229,12 +244,9 @@ class DownloadQueue { return `Initializing album download "${data.album}" by ${data.artist}...`; } else if (data.type === 'artist') { let subsets = []; - // Prefer an explicit subsets array if available. if (data.subsets && Array.isArray(data.subsets) && data.subsets.length > 0) { subsets = data.subsets; - } - // Otherwise, if album_type is provided, split it into an array. - else if (data.album_type) { + } else if (data.album_type) { subsets = data.album_type .split(',') .map(item => item.trim()) @@ -244,15 +256,12 @@ class DownloadQueue { const subsetsMessage = formatList(subsets); return `Initializing download for ${data.artist}'s ${subsetsMessage}`; } - // Fallback message if neither subsets nor album_type are provided. return `Initializing download for ${data.artist} with ${data.total_albums} album(s) [${data.album_type}]...`; } return `Initializing ${data.type} download...`; case 'progress': - // Expect progress messages for playlists, albums (or artist’s albums) to include a "track" and "current_track". if (data.track && data.current_track) { - // current_track is a string in the format "current/total" const parts = data.current_track.split('/'); const current = parts[0]; const total = parts[1] || '?'; @@ -260,8 +269,6 @@ class DownloadQueue { if (data.type === 'playlist') { return `Downloading playlist: Track ${current} of ${total} - ${data.track}`; } else if (data.type === 'album') { - // For album progress, the "album" and "artist" fields may be available on a done message. - // In some cases (like artist downloads) only track info is passed. if (data.album && data.artist) { return `Downloading album "${data.album}" by ${data.artist}: track ${current} of ${total} - ${data.track}`; } else { @@ -269,7 +276,6 @@ class DownloadQueue { } } } - // Fallback if fields are missing: return `Progress: ${data.status}...`; case 'done': @@ -297,7 +303,6 @@ class DownloadQueue { return `Track "${data.song}" skipped, it already exists!`; case 'real_time': { - // Convert milliseconds to minutes and seconds. const totalMs = data.time_elapsed; const minutes = Math.floor(totalMs / 60000); const seconds = Math.floor((totalMs % 60000) / 1000); @@ -313,19 +318,80 @@ class DownloadQueue { /* New Methods to Handle Terminal State and Inactivity */ handleTerminalState(entry, queueId, progress) { - // Mark the entry as ended and clear its monitoring interval + // Mark the entry as ended and clear its monitoring interval. entry.hasEnded = true; clearInterval(entry.intervalId); const logElement = document.getElementById(`log-${entry.uniqueId}-${entry.prgFile}`); - if (logElement) { + if (!logElement) return; + + // If the terminal state is an error, hide the cancel button and add error buttons. + if (progress.status === 'error') { + // Hide the cancel button. + const cancelBtn = entry.element.querySelector('.cancel-btn'); + if (cancelBtn) { + cancelBtn.style.display = 'none'; + } + + logElement.innerHTML = ` +
${this.getStatusMessage(progress)}
+
+ + +
+ `; + + // Close (X) button: immediately remove the queue entry. + logElement.querySelector('.close-error-btn').addEventListener('click', () => { + this.cleanupEntry(queueId); + }); + + // Retry button: re-send the original API request. + logElement.querySelector('.retry-btn').addEventListener('click', async () => { + logElement.textContent = 'Retrying download...'; + if (!entry.requestUrl) { + logElement.textContent = 'Retry not available: missing original request information.'; + return; + } + try { + const retryResponse = await fetch(entry.requestUrl); + const retryData = await retryResponse.json(); + if (retryData.prg_file) { + // Delete the failed prg file before updating to the new one. + const oldPrgFile = entry.prgFile; + await fetch(`/api/prgs/delete/${encodeURIComponent(oldPrgFile)}`, { method: 'DELETE' }); + + // Update the log element's id to reflect the new prg_file. + const logEl = entry.element.querySelector('.log'); + logEl.id = `log-${entry.uniqueId}-${retryData.prg_file}`; + + // Update the entry with the new prg_file and reset its state. + entry.prgFile = retryData.prg_file; + entry.lastStatus = null; + entry.hasEnded = false; + entry.lastUpdated = Date.now(); + logEl.textContent = 'Retry initiated...'; + + // Restart monitoring using the new prg_file. + this.startEntryMonitoring(queueId); + } else { + logElement.textContent = 'Retry failed: invalid response from server'; + } + } catch (error) { + logElement.textContent = 'Retry failed: ' + error.message; + } + }); + // Do not automatically clean up if an error occurred. + return; + } else { + // For non-error terminal states, update the message and then clean up after 5 seconds. logElement.textContent = this.getStatusMessage(progress); + setTimeout(() => this.cleanupEntry(queueId), 5000); } - // Optionally, perform cleanup after a delay - setTimeout(() => this.cleanupEntry(queueId), 5000); } + handleInactivity(entry, queueId, logElement) { - // Check if a significant time has elapsed since the last update (e.g., 10 seconds) + // If no update in 10 seconds, treat as an error. const now = Date.now(); if (now - entry.lastUpdated > 10000) { const progress = { status: 'error', message: 'Inactivity timeout' };