lots of shit

This commit is contained in:
architect.in.git
2025-04-23 12:47:00 -06:00
parent 948e424fde
commit af2401dd39
11 changed files with 114 additions and 217 deletions

13
.env Normal file
View File

@@ -0,0 +1,13 @@
# Docker Compose environment variables
# Redis connection (external or internal)
REDIS_HOST=redis
REDIS_PORT=6379
REDIS_DB=0
REDIS_PASSWORD=CHANGE_ME
EXPLICIT_FILTER=false # Set to true to filter out explicit content
PUID=1000 # User ID for the container
PGID=1000 # Group ID for the container
UMASK=0022 # Optional: Sets the default file permissions for newly created files within the container.

16
app.py
View File

@@ -17,6 +17,7 @@ import atexit
import sys
import redis
import socket
from urllib.parse import urlparse
# Import Celery configuration and manager
from routes.utils.celery_tasks import celery_app
@@ -85,11 +86,16 @@ def check_redis_connection():
redis_port = 6379 # default
# Parse from REDIS_URL if possible
if REDIS_URL and "://" in REDIS_URL:
parts = REDIS_URL.split("://")[1].split(":")
if len(parts) >= 2:
redis_host = parts[0]
redis_port = int(parts[1].split("/")[0])
if REDIS_URL:
# parse hostname and port (handles optional auth)
try:
parsed = urlparse(REDIS_URL)
if parsed.hostname:
redis_host = parsed.hostname
if parsed.port:
redis_port = parsed.port
except Exception:
pass
# Log Redis connection details
logging.info(f"Checking Redis connection to {redis_host}:{redis_port}")

View File

@@ -1 +1 @@
docker buildx build --push --platform linux/amd64,linux/arm64 --build-arg CACHE_BUST=$(date +%s) --tag cooldockerizer93/spotizerr:latest .
docker buildx build --push --load --platform linux/amd64,linux/arm64 --build-arg CACHE_BUST=$(date +%s) --tag cooldockerizer93/spotizerr:latest .

View File

@@ -9,19 +9,20 @@ services:
- ./logs:/app/logs # <-- Volume for persistent logs
ports:
- 7171:7171
image: cooldockerizer93/spotizerr
image: cooldockerizer93/spotizerr:dev
container_name: spotizerr-app
restart: unless-stopped
environment:
- PUID=1000 # Replace with your desired user ID | Remove both if you want to run as root (not recommended, might result in unreadable files)
- PGID=1000 # Replace with your desired group ID | The user must have write permissions in the volume mapped to /app/downloads
- UMASK=0022 # Optional: Sets the default file permissions for newly created files within the container.
- REDIS_HOST=redis
- REDIS_PORT=6379
- REDIS_DB=0
- REDIS_URL=redis://redis:6379/0
- REDIS_BACKEND=redis://redis:6379/0
- EXPLICIT_FILTER=false # Set to true to filter out explicit content
- PUID=${PUID} # Replace with your desired user ID | Remove both if you want to run as root (not recommended, might result in unreadable files)
- PGID=${PGID} # Replace with your desired group ID | The user must have write permissions in the volume mapped to /app/downloads
- UMASK=${UMASK} # Optional: Sets the default file permissions for newly created files within the container.
- REDIS_HOST=${REDIS_HOST}
- REDIS_PORT=${REDIS_PORT}
- REDIS_DB=${REDIS_DB}
- REDIS_PASSWORD=${REDIS_PASSWORD} # Optional, Redis AUTH password. Leave empty if not using authentication
- REDIS_URL=redis://:${REDIS_PASSWORD}@${REDIS_HOST}:${REDIS_PORT}/${REDIS_DB}
- REDIS_BACKEND=redis://:${REDIS_PASSWORD}@${REDIS_HOST}:${REDIS_PORT}/${REDIS_DB}
- EXPLICIT_FILTER=${EXPLICIT_FILTER} # Set to true to filter out explicit content
depends_on:
- redis
@@ -29,9 +30,11 @@ services:
image: redis:alpine
container_name: spotizerr-redis
restart: unless-stopped
environment:
- REDIS_PASSWORD=${REDIS_PASSWORD}
volumes:
- redis-data:/data
command: redis-server --appendonly yes
command: redis-server --requirepass ${REDIS_PASSWORD} --appendonly yes
volumes:
redis-data:

View File

@@ -52,7 +52,10 @@ else
# Ensure proper permissions for all app directories
echo "Setting permissions for /app directories..."
chown -R "${USER_NAME}:${GROUP_NAME}" /app/downloads /app/config /app/creds /app/logs || true
chown -R "${USER_NAME}:${GROUP_NAME}" /app/downloads /app/config /app/creds /app/logs /app/cache || true
# Ensure Spotipy cache file exists and is writable
touch /app/.cache || true
chown "${USER_NAME}:${GROUP_NAME}" /app/.cache || true
# Run as specified user
echo "Starting application as ${USER_NAME}..."

View File

@@ -20,8 +20,7 @@ logger = logging.getLogger(__name__)
prgs_bp = Blueprint('prgs', __name__, url_prefix='/api/prgs')
# The old path for PRG files (keeping for backward compatibility during transition)
PRGS_DIR = os.path.join(os.getcwd(), 'prgs')
# (Old .prg file system removed. Using new task system only.)
@prgs_bp.route('/<task_id>', methods=['GET'])
def get_prg_file(task_id):
@@ -35,116 +34,21 @@ def get_prg_file(task_id):
Args:
task_id: Either a task UUID from Celery or a PRG filename from the old system
"""
try:
# First check if this is a task ID in the new system
task_info = get_task_info(task_id)
if task_info:
# This is a task ID in the new system
original_request = task_info.get("original_request", {})
# Get the latest status update for this task
last_status = get_last_task_status(task_id)
logger.debug(f"API: Got last_status for {task_id}: {json.dumps(last_status) if last_status else None}")
# Get all status updates for debugging
all_statuses = get_task_status(task_id)
status_count = len(all_statuses)
logger.debug(f"API: Task {task_id} has {status_count} status updates")
# Prepare the simplified response with just the requested info
response = {
"original_url": original_request.get("original_url", ""),
"last_line": last_status,
"timestamp": time.time(),
"task_id": task_id,
"status_count": status_count
}
return jsonify(response)
# If not found in new system, try the old PRG file system
# Security check to prevent path traversal attacks.
if '..' in task_id or '/' in task_id:
abort(400, "Invalid file request")
filepath = os.path.join(PRGS_DIR, task_id)
with open(filepath, 'r') as f:
content = f.read()
lines = content.splitlines()
# If the file is empty, return default values with simplified format.
if not lines:
return jsonify({
"last_line": None,
"timestamp": time.time(),
"task_id": task_id,
"status_count": 0
})
# Attempt to extract the original request from the first line.
original_request = None
display_title = ""
display_type = ""
display_artist = ""
try:
first_line = json.loads(lines[0])
if isinstance(first_line, dict):
if "original_request" in first_line:
original_request = first_line["original_request"]
else:
# The first line might be the original request itself
original_request = first_line
# Extract display information from the original request
if original_request:
display_title = original_request.get("display_title", original_request.get("name", ""))
display_type = original_request.get("display_type", original_request.get("type", ""))
display_artist = original_request.get("display_artist", original_request.get("artist", ""))
except Exception as e:
print(f"Error parsing first line of PRG file: {e}")
original_request = None
# For resource type and name, use the second line if available.
resource_type = ""
resource_name = ""
resource_artist = ""
if len(lines) > 1:
try:
second_line = json.loads(lines[1])
# Directly extract 'type' and 'name' from the JSON
resource_type = second_line.get("type", "")
resource_name = second_line.get("name", "")
resource_artist = second_line.get("artist", "")
except Exception:
resource_type = ""
resource_name = ""
resource_artist = ""
# Get the last line from the file.
last_line_raw = lines[-1]
try:
last_line_parsed = json.loads(last_line_raw)
except Exception:
last_line_parsed = last_line_raw # Fallback to raw string if JSON parsing fails.
# Calculate status_count for old PRG files (number of lines in the file)
status_count = len(lines)
# Return simplified response format
return jsonify({
"original_url": original_request.get("original_url", "") if original_request else "",
"last_line": last_line_parsed,
"timestamp": time.time(),
"task_id": task_id,
"status_count": status_count
})
except FileNotFoundError:
abort(404, "Task or file not found")
except Exception as e:
abort(500, f"An error occurred: {e}")
# Only support new task IDs
task_info = get_task_info(task_id)
if not task_info:
abort(404, "Task not found")
original_request = task_info.get("original_request", {})
last_status = get_last_task_status(task_id)
status_count = len(get_task_status(task_id))
response = {
"original_url": original_request.get("original_url", ""),
"last_line": last_status,
"timestamp": time.time(),
"task_id": task_id,
"status_count": status_count
}
return jsonify(response)
@prgs_bp.route('/delete/<task_id>', methods=['DELETE'])
@@ -156,42 +60,15 @@ def delete_prg_file(task_id):
Args:
task_id: Either a task UUID from Celery or a PRG filename from the old system
"""
try:
# First try to delete from Redis if it's a task ID
task_info = get_task_info(task_id)
if task_info:
# This is a task ID in the new system - we should cancel it first
# if it's still running, then clear its data from Redis
cancel_result = cancel_task(task_id)
# Use Redis connection to delete the task data
from routes.utils.celery_tasks import redis_client
# Delete task info and status
redis_client.delete(f"task:{task_id}:info")
redis_client.delete(f"task:{task_id}:status")
return {'message': f'Task {task_id} deleted successfully'}, 200
# If not found in Redis, try the old PRG file system
# Security checks to prevent path traversal and ensure correct file type.
if '..' in task_id or '/' in task_id:
abort(400, "Invalid file request")
if not task_id.endswith('.prg'):
abort(400, "Only .prg files can be deleted")
filepath = os.path.join(PRGS_DIR, task_id)
if not os.path.isfile(filepath):
abort(404, "File not found")
os.remove(filepath)
return {'message': f'File {task_id} deleted successfully'}, 200
except FileNotFoundError:
abort(404, "Task or file not found")
except Exception as e:
abort(500, f"An error occurred: {e}")
# Only support new task IDs
task_info = get_task_info(task_id)
if not task_info:
abort(404, "Task not found")
cancel_task(task_id)
from routes.utils.celery_tasks import redis_client
redis_client.delete(f"task:{task_id}:info")
redis_client.delete(f"task:{task_id}:status")
return {'message': f'Task {task_id} deleted successfully'}, 200
@prgs_bp.route('/list', methods=['GET'])
@@ -200,25 +77,10 @@ def list_prg_files():
Retrieve a list of all tasks in the system.
Combines results from both the old PRG file system and the new task ID based system.
"""
try:
# Get tasks from the new system
tasks = get_all_tasks()
task_ids = [task["task_id"] for task in tasks]
# Get PRG files from the old system
prg_files = []
if os.path.isdir(PRGS_DIR):
with os.scandir(PRGS_DIR) as entries:
for entry in entries:
if entry.is_file() and entry.name.endswith('.prg'):
prg_files.append(entry.name)
# Combine both lists
all_ids = task_ids + prg_files
return jsonify(all_ids)
except Exception as e:
abort(500, f"An error occurred: {e}")
# List only new system tasks
tasks = get_all_tasks()
task_ids = [task["task_id"] for task in tasks]
return jsonify(task_ids)
@prgs_bp.route('/retry/<task_id>', methods=['POST'])

View File

@@ -3,6 +3,7 @@ import traceback
from pathlib import Path
import os
import logging
from flask import Blueprint, Response, request, url_for
from routes.utils.celery_queue_manager import download_queue_manager, get_config_params
from routes.utils.get_info import get_spotify_info
@@ -167,6 +168,9 @@ def download_artist_albums(url, album_type="album,single,compilation", request_a
"parent_request_type": "artist"
}
# Include original download URL for this album task
album_request_args["original_url"] = url_for('album.handle_download', url=album_url, _external=True)
# Create task for this album
task_data = {
"download_type": "album",

View File

@@ -10,7 +10,12 @@ logger = logging.getLogger(__name__)
REDIS_HOST = os.getenv('REDIS_HOST', 'localhost')
REDIS_PORT = os.getenv('REDIS_PORT', '6379')
REDIS_DB = os.getenv('REDIS_DB', '0')
REDIS_URL = os.getenv('REDIS_URL', f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}")
# Optional Redis password
REDIS_PASSWORD = os.getenv('REDIS_PASSWORD', '')
# Build default URL with password if provided
_password_part = f":{REDIS_PASSWORD}@" if REDIS_PASSWORD else ""
default_redis_url = f"redis://{_password_part}{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}"
REDIS_URL = os.getenv('REDIS_URL', default_redis_url)
REDIS_BACKEND = os.getenv('REDIS_BACKEND', REDIS_URL)
# Log Redis connection details

View File

@@ -12,7 +12,7 @@ from celery.exceptions import Retry
logger = logging.getLogger(__name__)
# Setup Redis and Celery
from routes.utils.celery_config import REDIS_URL, REDIS_BACKEND, get_config_params
from routes.utils.celery_config import REDIS_URL, REDIS_BACKEND, REDIS_PASSWORD, get_config_params
# Initialize Celery app
celery_app = Celery('download_tasks',

View File

@@ -42,6 +42,18 @@ document.addEventListener('DOMContentLoaded', function() {
});
}
// Restore last search type if no URL override
const savedType = localStorage.getItem('lastSearchType');
if (savedType && ['track','album','playlist','artist'].includes(savedType)) {
searchType.value = savedType;
}
// Save last selection on change
if (searchType) {
searchType.addEventListener('change', () => {
localStorage.setItem('lastSearchType', searchType.value);
});
}
// Check for URL parameters
const urlParams = new URLSearchParams(window.location.search);
const query = urlParams.get('q');
@@ -341,7 +353,8 @@ document.addEventListener('DOMContentLoaded', function() {
* Extracts details from a Spotify URL
*/
function getSpotifyResourceDetails(url) {
const regex = /spotify\.com\/(track|album|playlist|artist)\/([a-zA-Z0-9]+)/;
// Allow optional path segments (e.g. intl-fr) before resource type
const regex = /spotify\.com\/(?:[^\/]+\/)??(track|album|playlist|artist)\/([a-zA-Z0-9]+)/i;
const match = url.match(regex);
if (match) {

View File

@@ -131,13 +131,14 @@ class DownloadQueue {
fetch(`/api/${entry.type}/download/cancel?prg_file=${entry.prgFile}`)
.then(response => response.json())
.then(data => {
if (data.status === "cancel") {
// API returns status 'cancelled' when cancellation succeeds
if (data.status === "cancelled" || data.status === "cancel") {
entry.hasEnded = true;
if (entry.intervalId) {
clearInterval(entry.intervalId);
entry.intervalId = null;
}
// Clean up immediately
// Remove the entry as soon as the API confirms cancellation
this.cleanupEntry(queueId);
}
})
@@ -624,7 +625,8 @@ createQueueItem(item, type, prgFile, queueId) {
// First cancel the download
const response = await fetch(`/api/${type}/download/cancel?prg_file=${prg}`);
const data = await response.json();
if (data.status === "cancel") {
// API returns status 'cancelled' when cancellation succeeds
if (data.status === "cancelled" || data.status === "cancel") {
if (entry) {
entry.hasEnded = true;
@@ -641,14 +643,6 @@ createQueueItem(item, type, prgFile, queueId) {
this.queueCache[prg] = { status: "cancelled" };
localStorage.setItem("downloadQueueCache", JSON.stringify(this.queueCache));
// Immediately delete from server
try {
await fetch(`/api/prgs/delete/${prg}`, { method: 'DELETE' });
console.log(`Deleted cancelled task from server: ${prg}`);
} catch (deleteError) {
console.error('Error deleting cancelled task:', deleteError);
}
// Immediately remove the item from the UI
this.cleanupEntry(queueid);
}
@@ -1639,9 +1633,9 @@ createQueueItem(item, type, prgFile, queueId) {
} else if (entry.parentInfo && !['done', 'complete', 'error', 'skipped'].includes(prgData.last_line.status)) {
// Show parent info for non-terminal states
if (entry.parentInfo.type === 'album') {
logElement.textContent = `From album: ${entry.parentInfo.title}`;
logElement.textContent = `From album: "${entry.parentInfo.title}"`;
} else if (entry.parentInfo.type === 'playlist') {
logElement.textContent = `From playlist: ${entry.parentInfo.name} by ${entry.parentInfo.owner}`;
logElement.textContent = `From playlist: "${entry.parentInfo.name}" by ${entry.parentInfo.owner}`;
}
}
}
@@ -2281,19 +2275,17 @@ createQueueItem(item, type, prgFile, queueId) {
// Calculate and update the overall progress bar
if (totalTracks > 0) {
let overallProgress = 0;
if (statusData.status === 'real-time' && trackProgress !== undefined) {
// Use the formula: ((current_track-1)/(total_tracks))+(1/total_tracks*progress)
// Always compute overall based on trackProgress if available, using album/playlist real-time formula
if (trackProgress !== undefined) {
const completedTracksProgress = (currentTrack - 1) / totalTracks;
const currentTrackContribution = (1 / totalTracks) * (trackProgress / 100);
overallProgress = (completedTracksProgress + currentTrackContribution) * 100;
console.log(`Real-time overall progress: ${overallProgress.toFixed(2)}% (Track ${currentTrack}/${totalTracks}, Progress: ${trackProgress}%)`);
console.log(`Overall progress: ${overallProgress.toFixed(2)}% (Track ${currentTrack}/${totalTracks}, Progress: ${trackProgress}%)`);
} else {
// Standard progress calculation based on current track position
overallProgress = (currentTrack / totalTracks) * 100;
console.log(`Standard overall progress: ${overallProgress.toFixed(2)}% (Track ${currentTrack}/${totalTracks})`);
console.log(`Overall progress (non-real-time): ${overallProgress.toFixed(2)}% (Track ${currentTrack}/${totalTracks})`);
}
// Update the progress bar
if (overallProgressBar) {
const safeProgress = Math.max(0, Math.min(100, overallProgress));
@@ -2394,21 +2386,17 @@ createQueueItem(item, type, prgFile, queueId) {
// Calculate overall progress
let overallProgress = 0;
if (totalTracks > 0) {
// If we have an explicit overall_progress, use it
// Use explicit overall_progress if provided
if (statusData.overall_progress !== undefined) {
overallProgress = parseFloat(statusData.overall_progress);
} else if (statusData.status === 'real-time' && trackProgress !== undefined) {
// Calculate based on formula: ((current_track-1)/(total_tracks))+(1/total_tracks*progress)
// This gives a precise calculation for real-time downloads
} else if (trackProgress !== undefined) {
// For both real-time and standard multi-track downloads, use same formula
const completedTracksProgress = (currentTrack - 1) / totalTracks;
const currentTrackContribution = (1 / totalTracks) * (trackProgress / 100);
overallProgress = (completedTracksProgress + currentTrackContribution) * 100;
console.log(`Real-time progress: Track ${currentTrack}/${totalTracks}, Track progress: ${trackProgress}%, Overall: ${overallProgress.toFixed(2)}%`);
console.log(`Progress: Track ${currentTrack}/${totalTracks}, Track progress: ${trackProgress}%, Overall: ${overallProgress.toFixed(2)}%`);
} else {
// For non-real-time downloads, show percentage of tracks downloaded
// Using current_track relative to total_tracks
overallProgress = (currentTrack / totalTracks) * 100;
console.log(`Standard progress: Track ${currentTrack}/${totalTracks}, Overall: ${overallProgress.toFixed(2)}%`);
overallProgress = 0;
}
// Update overall progress bar