Improved everything

This commit is contained in:
cool.gitter.choco
2025-01-26 20:14:29 -06:00
parent 8509a123a6
commit feaab332c8
8 changed files with 313 additions and 262 deletions

View File

@@ -4,15 +4,18 @@ import os
import random
import string
import sys
from threading import Thread
import traceback
from multiprocessing import Process # Use multiprocessing instead of threading
# Define the Blueprint for album-related routes
album_bp = Blueprint('album', __name__)
# Function to generate random filenames
def generate_random_filename(length=6):
chars = string.ascii_lowercase + string.digits
return ''.join(random.choice(chars) for _ in range(length)) + '.prg'
# File wrapper to flush writes immediately
class FlushingFileWrapper:
def __init__(self, file):
self.file = file
@@ -24,13 +27,53 @@ class FlushingFileWrapper:
def flush(self):
self.file.flush()
# Define the download task as a top-level function for picklability
def download_task(service, url, main, fallback, prg_path):
try:
from routes.utils.album import download_album
with open(prg_path, 'w') as f:
flushing_file = FlushingFileWrapper(f)
original_stdout = sys.stdout
sys.stdout = flushing_file # Redirect stdout to the file
try:
# Execute the download process
download_album(
service=service,
url=url,
main=main,
fallback=fallback
)
flushing_file.write(json.dumps({"status": "complete"}) + "\n")
except Exception as e:
# Capture exceptions and write to file
error_data = json.dumps({
"status": "error",
"message": str(e),
"traceback": traceback.format_exc()
})
flushing_file.write(error_data + "\n")
finally:
sys.stdout = original_stdout # Restore original stdout
except Exception as e:
# Handle exceptions that occur outside the main download process
with open(prg_path, 'w') as f:
error_data = json.dumps({
"status": "error",
"message": str(e),
"traceback": traceback.format_exc()
})
f.write(error_data + "\n")
# Define the route to handle album download requests
@album_bp.route('/download', methods=['GET'])
def handle_download():
# Extract query parameters
service = request.args.get('service')
url = request.args.get('url')
main = request.args.get('main')
fallback = request.args.get('fallback') # New fallback parameter
fallback = request.args.get('fallback') # Optional parameter
# Validate required parameters
if not all([service, url, main]):
return Response(
json.dumps({"error": "Missing parameters"}),
@@ -38,48 +81,19 @@ def handle_download():
mimetype='application/json'
)
# Generate a unique file for storing the download progress
filename = generate_random_filename()
prg_dir = './prgs'
os.makedirs(prg_dir, exist_ok=True)
prg_path = os.path.join(prg_dir, filename)
def download_task():
try:
from routes.utils.album import download_album
with open(prg_path, 'w') as f:
flushing_file = FlushingFileWrapper(f)
original_stdout = sys.stdout
sys.stdout = flushing_file
try:
# Pass fallback parameter to download_album
download_album(
service=service,
url=url,
main=main,
fallback=fallback
)
flushing_file.write(json.dumps({"status": "complete"}) + "\n")
except Exception as e:
error_data = json.dumps({
"status": "error",
"message": str(e),
"traceback": traceback.format_exc()
})
flushing_file.write(error_data + "\n")
finally:
sys.stdout = original_stdout
except Exception as e:
with open(prg_path, 'w') as f:
error_data = json.dumps({
"status": "error",
"message": str(e),
"traceback": traceback.format_exc()
})
f.write(error_data + "\n")
Thread(target=download_task).start()
# Start a new process for each download task
Process(
target=download_task,
args=(service, url, main, fallback, prg_path)
).start()
# Return the filename to the client for progress tracking
return Response(
json.dumps({"prg_file": filename}),
status=202,

View File

@@ -4,11 +4,13 @@ import os
import random
import string
import sys
from threading import Thread
import traceback
from multiprocessing import Process # Changed from Thread to Process
playlist_bp = Blueprint('playlist', __name__)
# Removed thread-local stdout setup since we're using process isolation
def generate_random_filename(length=6):
chars = string.ascii_lowercase + string.digits
return ''.join(random.choice(chars) for _ in range(length)) + '.prg'
@@ -24,14 +26,49 @@ class FlushingFileWrapper:
def flush(self):
self.file.flush()
# Moved download_task to top-level for picklability
def download_task(service, url, main, fallback, prg_path):
try:
from routes.utils.playlist import download_playlist
with open(prg_path, 'w') as f:
flushing_file = FlushingFileWrapper(f)
original_stdout = sys.stdout
sys.stdout = flushing_file # Process-specific stdout
try:
download_playlist(
service=service,
url=url,
main=main,
fallback=fallback
)
flushing_file.write(json.dumps({"status": "complete"}) + "\n")
except Exception as e:
error_data = json.dumps({
"status": "error",
"message": str(e),
"traceback": traceback.format_exc()
})
flushing_file.write(error_data + "\n")
finally:
sys.stdout = original_stdout # Restore original stdout
except Exception as e:
# Handle exceptions outside the main logic
with open(prg_path, 'w') as f:
error_data = json.dumps({
"status": "error",
"message": str(e),
"traceback": traceback.format_exc()
})
f.write(error_data + "\n")
@playlist_bp.route('/download', methods=['GET'])
def handle_download():
service = request.args.get('service')
url = request.args.get('url')
main = request.args.get('main') # Changed from 'account'
fallback = request.args.get('fallback') # New parameter
main = request.args.get('main')
fallback = request.args.get('fallback')
# Validate required parameters (main instead of account)
if not all([service, url, main]):
return Response(
json.dumps({"error": "Missing parameters"}),
@@ -44,42 +81,11 @@ def handle_download():
os.makedirs(prg_dir, exist_ok=True)
prg_path = os.path.join(prg_dir, filename)
def download_task():
try:
from routes.utils.playlist import download_playlist
with open(prg_path, 'w') as f:
flushing_file = FlushingFileWrapper(f)
original_stdout = sys.stdout
sys.stdout = flushing_file
try:
# Updated call with main/fallback parameters
download_playlist(
service=service,
url=url,
main=main,
fallback=fallback
)
flushing_file.write(json.dumps({"status": "complete"}) + "\n")
except Exception as e:
error_data = json.dumps({
"status": "error",
"message": str(e),
"traceback": traceback.format_exc()
})
flushing_file.write(error_data + "\n")
finally:
sys.stdout = original_stdout
except Exception as e:
with open(prg_path, 'w') as f:
error_data = json.dumps({
"status": "error",
"message": str(e),
"traceback": traceback.format_exc()
})
f.write(error_data + "\n")
Thread(target=download_task).start()
# Start a new process with required arguments
Process(
target=download_task,
args=(service, url, main, fallback, prg_path)
).start()
return Response(
json.dumps({"prg_file": filename}),

View File

@@ -1,23 +1,30 @@
from flask import Blueprint, send_from_directory, abort
from flask import Blueprint, abort
import os
prgs_bp = Blueprint('prgs', __name__, url_prefix='/api/prgs')
# Base directory for .prg files
# Base directory for files
PRGS_DIR = os.path.join(os.getcwd(), 'prgs')
@prgs_bp.route('/<filename>', methods=['GET'])
def get_prg_file(filename):
"""
Serve a .prg file from the prgs directory.
Return the last line of the specified file from the prgs directory.
"""
try:
# Security check to prevent path traversal attacks
if not filename.endswith('.prg') or '..' in filename or '/' in filename:
if '..' in filename or '/' in filename:
abort(400, "Invalid file request")
# Ensure the file exists in the directory
return send_from_directory(PRGS_DIR, filename)
filepath = os.path.join(PRGS_DIR, filename)
# Read the last line of the file
with open(filepath, 'r') as f:
content = f.read()
lines = content.splitlines()
last_line = lines[-1] if lines else ''
return last_line
except FileNotFoundError:
abort(404, "File not found")
except Exception as e:

View File

@@ -4,8 +4,8 @@ import os
import random
import string
import sys
from threading import Thread
import traceback
from multiprocessing import Process # Changed from threading import Thread
track_bp = Blueprint('track', __name__)
@@ -24,12 +24,47 @@ class FlushingFileWrapper:
def flush(self):
self.file.flush()
# Moved download_task to top-level for multiprocessing compatibility
def download_task(service, url, main, fallback, prg_path):
try:
from routes.utils.track import download_track
with open(prg_path, 'w') as f:
flushing_file = FlushingFileWrapper(f)
original_stdout = sys.stdout
sys.stdout = flushing_file # Redirect stdout per process
try:
download_track(
service=service,
url=url,
main=main,
fallback=fallback
)
flushing_file.write(json.dumps({"status": "complete"}) + "\n")
except Exception as e:
error_data = json.dumps({
"status": "error",
"message": str(e),
"traceback": traceback.format_exc()
})
flushing_file.write(error_data + "\n")
finally:
sys.stdout = original_stdout # Restore original stdout
except Exception as e:
with open(prg_path, 'w') as f:
error_data = json.dumps({
"status": "error",
"message": str(e),
"traceback": traceback.format_exc()
})
f.write(error_data + "\n")
@track_bp.route('/download', methods=['GET'])
def handle_download():
service = request.args.get('service')
url = request.args.get('url')
main = request.args.get('main')
fallback = request.args.get('fallback') # New fallback parameter
fallback = request.args.get('fallback')
if not all([service, url, main]):
return Response(
@@ -43,42 +78,11 @@ def handle_download():
os.makedirs(prg_dir, exist_ok=True)
prg_path = os.path.join(prg_dir, filename)
def download_task():
try:
from routes.utils.track import download_track
with open(prg_path, 'w') as f:
flushing_file = FlushingFileWrapper(f)
original_stdout = sys.stdout
sys.stdout = flushing_file
try:
# Pass all parameters including fallback
download_track(
service=service,
url=url,
main=main,
fallback=fallback
)
flushing_file.write(json.dumps({"status": "complete"}) + "\n")
except Exception as e:
error_data = json.dumps({
"status": "error",
"message": str(e),
"traceback": traceback.format_exc()
})
flushing_file.write(error_data + "\n")
finally:
sys.stdout = original_stdout
except Exception as e:
with open(prg_path, 'w') as f:
error_data = json.dumps({
"status": "error",
"message": str(e),
"traceback": traceback.format_exc()
})
f.write(error_data + "\n")
Thread(target=download_task).start()
# Start a new process with required arguments
Process(
target=download_task,
args=(service, url, main, fallback, prg_path)
).start()
return Response(
json.dumps({"prg_file": filename}),

View File

@@ -33,8 +33,6 @@ def download_album(service, url, main, fallback=None):
method_save=1
)
except Exception as e:
# If the first attempt fails, use the fallback Spotify main
print(f"Failed to download via Deezer fallback: {e}. Trying Spotify fallback main.")
# Load fallback Spotify credentials and attempt download
try:
spo_creds_dir = os.path.join('./creds/spotify', fallback)

View File

@@ -420,10 +420,6 @@ input:checked + .slider:before {
transition: transform 0.2s ease;
}
.queue-item:hover {
transform: translateX(5px);
}
.queue-item .title {
font-weight: 500;
margin-bottom: 4px;
@@ -953,17 +949,11 @@ html {
/* Queue Item Animations */
.queue-item {
transition:
transform 0.2s ease,
opacity 0.3s ease,
background-color 0.3s ease;
opacity: 1;
}
.queue-item:not(.active):hover {
transform: translateX(5px);
background-color: #333;
}
.queue-item.entering {
opacity: 0;
transform: translateX(20px);

View File

@@ -50,6 +50,7 @@ document.addEventListener('DOMContentLoaded', () => {
settingsIcon.addEventListener('click', () => {
sidebar.classList.add('active');
loadCredentials(currentService);
updateFormFields();
});
closeSidebar.addEventListener('click', () => {
@@ -166,6 +167,30 @@ function performSearch() {
return;
}
// Handle direct Spotify URLs
if (isSpotifyUrl(query)) {
try {
const type = getResourceTypeFromUrl(query);
if (!['track', 'album', 'playlist'].includes(type)) {
throw new Error('Unsupported URL type');
}
const item = {
name: `Direct URL (${type})`,
external_urls: { spotify: query }
};
startDownload(query, type, item);
document.getElementById('searchInput').value = '';
return;
} catch (error) {
showError(`Invalid Spotify URL: ${error.message}`);
return;
}
}
// Existing search functionality
resultsContainer.innerHTML = '<div class="loading">Searching...</div>';
fetch(`/api/search?q=${encodeURIComponent(query)}&search_type=${searchType}&limit=30`)
@@ -183,7 +208,6 @@ function performSearch() {
const cards = resultsContainer.querySelectorAll('.result-card');
cards.forEach((card, index) => {
// Add download handler
card.querySelector('.download-btn').addEventListener('click', async (e) => {
e.stopPropagation();
const url = e.target.dataset.url;
@@ -196,6 +220,7 @@ function performSearch() {
.catch(error => showError(error.message));
}
function createResultCard(item, type) {
let imageUrl, title, subtitle, details;
@@ -262,151 +287,149 @@ async function startDownload(url, type, item) {
const data = await response.json();
addToQueue(item, type, data.prg_file);
startMonitoringQueue();
} catch (error) {
showError('Download failed: ' + error.message);
}
}
function addToQueue(item, type, prgFile) {
const queueId = Date.now().toString();
downloadQueue[queueId] = {
const queueId = Date.now().toString() + Math.random().toString(36).substr(2, 9);
const entry = {
item,
type,
prgFile,
element: createQueueItem(item, type, prgFile),
lastLineCount: 0,
element: createQueueItem(item, type, prgFile, queueId),
lastStatus: null,
lastUpdated: Date.now(),
hasEnded: false
hasEnded: false,
intervalId: null,
uniqueId: queueId // Add unique identifier
};
document.getElementById('queueItems').appendChild(downloadQueue[queueId].element);
downloadQueue[queueId] = entry;
document.getElementById('queueItems').appendChild(entry.element);
startEntryMonitoring(queueId);
}
async function startEntryMonitoring(queueId) {
const entry = downloadQueue[queueId];
if (!entry || entry.hasEnded) return;
function createQueueItem(item, type, prgFile) {
entry.intervalId = setInterval(async () => {
const logElement = document.getElementById(`log-${entry.uniqueId}-${entry.prgFile}`);
if (entry.hasEnded) {
clearInterval(entry.intervalId);
return;
}
try {
const response = await fetch(`/api/prgs/${entry.prgFile}`);
const lastLine = (await response.text()).trim();
// Handle empty response
if (!lastLine) {
handleInactivity(entry, queueId, logElement);
return;
}
try {
const data = JSON.parse(lastLine);
// Check for status changes
if (JSON.stringify(entry.lastStatus) === JSON.stringify(data)) {
handleInactivity(entry, queueId, logElement);
return;
}
// Update entry state
entry.lastStatus = data;
entry.lastUpdated = Date.now();
entry.status = data.status;
logElement.textContent = getStatusMessage(data);
// Handle terminal states
if (data.status === 'error' || data.status === 'complete') {
handleTerminalState(entry, queueId, data);
}
} catch (e) {
console.error('Invalid PRG line:', lastLine);
logElement.textContent = 'Error parsing status update';
handleTerminalState(entry, queueId, {
status: 'error',
message: 'Invalid status format'
});
}
} catch (error) {
console.error('Status check failed:', error);
handleTerminalState(entry, queueId, {
status: 'error',
message: 'Status check error'
});
}
}, 2000);
}
function handleInactivity(entry, queueId, logElement) {
if (Date.now() - entry.lastUpdated > 180000) {
logElement.textContent = 'Download timed out (3 minutes inactivity)';
handleTerminalState(entry, queueId, { status: 'timeout' });
}
}
function handleTerminalState(entry, queueId, data) {
const logElement = document.getElementById(`log-${entry.uniqueId}-${entry.prgFile}`);
entry.hasEnded = true;
entry.status = data.status;
if (data.status === 'error') {
logElement.innerHTML = `
<span class="error-status">${getStatusMessage(data)}</span>
<button class="retry-btn">Retry</button>
<button class="close-btn">×</button>
`;
logElement.querySelector('.retry-btn').addEventListener('click', () => {
startDownload(entry.item.external_urls.spotify, entry.type, entry.item);
cleanupEntry(queueId);
});
logElement.querySelector('.close-btn').addEventListener('click', () => {
cleanupEntry(queueId);
});
entry.element.classList.add('failed');
}
if (data.status === 'complete') {
setTimeout(() => cleanupEntry(queueId), 5000);
}
clearInterval(entry.intervalId);
}
function cleanupEntry(queueId) {
const entry = downloadQueue[queueId];
if (entry) {
clearInterval(entry.intervalId);
entry.element.remove();
delete downloadQueue[queueId];
}
}
function createQueueItem(item, type, prgFile, queueId) {
const div = document.createElement('div');
div.className = 'queue-item';
div.innerHTML = `
<div class="title">${item.name}</div>
<div class="type">${type.charAt(0).toUpperCase() + type.slice(1)}</div>
<div class="log" id="log-${prgFile}">Initializing download...</div>
<div class="log" id="log-${queueId}-${prgFile}">Initializing download...</div>
`;
return div;
}
function startMonitoringQueue() {
if (!prgInterval) {
prgInterval = setInterval(async () => {
const queueEntries = Object.entries(downloadQueue);
if (queueEntries.length === 0) {
clearInterval(prgInterval);
prgInterval = null;
return;
}
let activeEntries = 0;
for (const [id, entry] of queueEntries) {
if (entry.hasEnded) continue;
activeEntries++;
try {
const response = await fetch(`/api/prgs/${entry.prgFile}`);
const log = await response.text();
const lines = log.split('\n').filter(line => line.trim() !== '');
const logElement = document.getElementById(`log-${entry.prgFile}`);
// Process new lines
if (lines.length > entry.lastLineCount) {
const newLines = lines.slice(entry.lastLineCount);
entry.lastLineCount = lines.length;
entry.lastUpdated = Date.now();
for (const line of newLines) {
try {
const data = JSON.parse(line);
// Store status in queue entry
if (data.status === 'error' || data.status === 'complete') {
entry.status = data.status;
}
// Handle error status with retry button
if (data.status === 'error') {
logElement.innerHTML = `
<span class="error-status">${getStatusMessage(data)}</span>
<button class="retry-btn">Retry</button>
<button class="close-btn">×</button>
`;
// Retry handler
logElement.querySelector('.retry-btn').addEventListener('click', (e) => {
e.stopPropagation();
startDownload(entry.item.external_urls.spotify, entry.type, entry.item);
delete downloadQueue[id];
entry.element.remove();
});
// Close handler
logElement.querySelector('.close-btn').addEventListener('click', (e) => {
e.stopPropagation();
delete downloadQueue[id];
entry.element.remove();
});
entry.element.classList.add('failed');
entry.hasEnded = true;
} else {
logElement.textContent = getStatusMessage(data);
}
// Handle terminal statuses
if (data.status === 'error' || data.status === 'complete') {
entry.hasEnded = true;
entry.status = data.status;
if (data.status === 'error' && data.traceback) {
console.error('Server error:', data.traceback);
}
break;
}
} catch (e) {
console.error('Invalid PRG line:', line);
}
}
}
// Handle timeout
if (Date.now() - entry.lastUpdated > 180000) {
logElement.textContent = 'Download timed out (3 minutes inactivity)';
entry.hasEnded = true;
entry.status = 'timeout';
}
// Cleanup completed entries only
if (entry.hasEnded && entry.status === 'complete') {
setTimeout(() => {
delete downloadQueue[id];
entry.element.remove();
}, 5000);
}
} catch (error) {
console.error('Status check failed:', error);
entry.hasEnded = true;
entry.status = 'error';
document.getElementById(`log-${entry.prgFile}`).textContent = 'Status check error';
}
}
// Stop interval if no active entries
if (activeEntries === 0) {
clearInterval(prgInterval);
prgInterval = null;
}
}, 2000);
}
}
async function loadCredentials(service) {
try {
@@ -627,3 +650,12 @@ function loadConfig() {
const fallbackToggle = document.getElementById('fallbackToggle');
if (fallbackToggle) fallbackToggle.checked = !!saved.fallback;
}
function isSpotifyUrl(url) {
return url.startsWith('https://open.spotify.com/');
}
function getResourceTypeFromUrl(url) {
const pathParts = new URL(url).pathname.split('/');
return pathParts[1]; // Returns 'track', 'album', or 'playlist'
}