lots of shit

This commit is contained in:
architect.in.git
2025-04-23 12:47:00 -06:00
parent 948e424fde
commit af2401dd39
11 changed files with 114 additions and 217 deletions

View File

@@ -20,8 +20,7 @@ logger = logging.getLogger(__name__)
prgs_bp = Blueprint('prgs', __name__, url_prefix='/api/prgs')
# The old path for PRG files (keeping for backward compatibility during transition)
PRGS_DIR = os.path.join(os.getcwd(), 'prgs')
# (Old .prg file system removed. Using new task system only.)
@prgs_bp.route('/<task_id>', methods=['GET'])
def get_prg_file(task_id):
@@ -35,116 +34,21 @@ def get_prg_file(task_id):
Args:
task_id: Either a task UUID from Celery or a PRG filename from the old system
"""
try:
# First check if this is a task ID in the new system
task_info = get_task_info(task_id)
if task_info:
# This is a task ID in the new system
original_request = task_info.get("original_request", {})
# Get the latest status update for this task
last_status = get_last_task_status(task_id)
logger.debug(f"API: Got last_status for {task_id}: {json.dumps(last_status) if last_status else None}")
# Get all status updates for debugging
all_statuses = get_task_status(task_id)
status_count = len(all_statuses)
logger.debug(f"API: Task {task_id} has {status_count} status updates")
# Prepare the simplified response with just the requested info
response = {
"original_url": original_request.get("original_url", ""),
"last_line": last_status,
"timestamp": time.time(),
"task_id": task_id,
"status_count": status_count
}
return jsonify(response)
# If not found in new system, try the old PRG file system
# Security check to prevent path traversal attacks.
if '..' in task_id or '/' in task_id:
abort(400, "Invalid file request")
filepath = os.path.join(PRGS_DIR, task_id)
with open(filepath, 'r') as f:
content = f.read()
lines = content.splitlines()
# If the file is empty, return default values with simplified format.
if not lines:
return jsonify({
"last_line": None,
"timestamp": time.time(),
"task_id": task_id,
"status_count": 0
})
# Attempt to extract the original request from the first line.
original_request = None
display_title = ""
display_type = ""
display_artist = ""
try:
first_line = json.loads(lines[0])
if isinstance(first_line, dict):
if "original_request" in first_line:
original_request = first_line["original_request"]
else:
# The first line might be the original request itself
original_request = first_line
# Extract display information from the original request
if original_request:
display_title = original_request.get("display_title", original_request.get("name", ""))
display_type = original_request.get("display_type", original_request.get("type", ""))
display_artist = original_request.get("display_artist", original_request.get("artist", ""))
except Exception as e:
print(f"Error parsing first line of PRG file: {e}")
original_request = None
# For resource type and name, use the second line if available.
resource_type = ""
resource_name = ""
resource_artist = ""
if len(lines) > 1:
try:
second_line = json.loads(lines[1])
# Directly extract 'type' and 'name' from the JSON
resource_type = second_line.get("type", "")
resource_name = second_line.get("name", "")
resource_artist = second_line.get("artist", "")
except Exception:
resource_type = ""
resource_name = ""
resource_artist = ""
# Get the last line from the file.
last_line_raw = lines[-1]
try:
last_line_parsed = json.loads(last_line_raw)
except Exception:
last_line_parsed = last_line_raw # Fallback to raw string if JSON parsing fails.
# Calculate status_count for old PRG files (number of lines in the file)
status_count = len(lines)
# Return simplified response format
return jsonify({
"original_url": original_request.get("original_url", "") if original_request else "",
"last_line": last_line_parsed,
"timestamp": time.time(),
"task_id": task_id,
"status_count": status_count
})
except FileNotFoundError:
abort(404, "Task or file not found")
except Exception as e:
abort(500, f"An error occurred: {e}")
# Only support new task IDs
task_info = get_task_info(task_id)
if not task_info:
abort(404, "Task not found")
original_request = task_info.get("original_request", {})
last_status = get_last_task_status(task_id)
status_count = len(get_task_status(task_id))
response = {
"original_url": original_request.get("original_url", ""),
"last_line": last_status,
"timestamp": time.time(),
"task_id": task_id,
"status_count": status_count
}
return jsonify(response)
@prgs_bp.route('/delete/<task_id>', methods=['DELETE'])
@@ -156,42 +60,15 @@ def delete_prg_file(task_id):
Args:
task_id: Either a task UUID from Celery or a PRG filename from the old system
"""
try:
# First try to delete from Redis if it's a task ID
task_info = get_task_info(task_id)
if task_info:
# This is a task ID in the new system - we should cancel it first
# if it's still running, then clear its data from Redis
cancel_result = cancel_task(task_id)
# Use Redis connection to delete the task data
from routes.utils.celery_tasks import redis_client
# Delete task info and status
redis_client.delete(f"task:{task_id}:info")
redis_client.delete(f"task:{task_id}:status")
return {'message': f'Task {task_id} deleted successfully'}, 200
# If not found in Redis, try the old PRG file system
# Security checks to prevent path traversal and ensure correct file type.
if '..' in task_id or '/' in task_id:
abort(400, "Invalid file request")
if not task_id.endswith('.prg'):
abort(400, "Only .prg files can be deleted")
filepath = os.path.join(PRGS_DIR, task_id)
if not os.path.isfile(filepath):
abort(404, "File not found")
os.remove(filepath)
return {'message': f'File {task_id} deleted successfully'}, 200
except FileNotFoundError:
abort(404, "Task or file not found")
except Exception as e:
abort(500, f"An error occurred: {e}")
# Only support new task IDs
task_info = get_task_info(task_id)
if not task_info:
abort(404, "Task not found")
cancel_task(task_id)
from routes.utils.celery_tasks import redis_client
redis_client.delete(f"task:{task_id}:info")
redis_client.delete(f"task:{task_id}:status")
return {'message': f'Task {task_id} deleted successfully'}, 200
@prgs_bp.route('/list', methods=['GET'])
@@ -200,25 +77,10 @@ def list_prg_files():
Retrieve a list of all tasks in the system.
Combines results from both the old PRG file system and the new task ID based system.
"""
try:
# Get tasks from the new system
tasks = get_all_tasks()
task_ids = [task["task_id"] for task in tasks]
# Get PRG files from the old system
prg_files = []
if os.path.isdir(PRGS_DIR):
with os.scandir(PRGS_DIR) as entries:
for entry in entries:
if entry.is_file() and entry.name.endswith('.prg'):
prg_files.append(entry.name)
# Combine both lists
all_ids = task_ids + prg_files
return jsonify(all_ids)
except Exception as e:
abort(500, f"An error occurred: {e}")
# List only new system tasks
tasks = get_all_tasks()
task_ids = [task["task_id"] for task in tasks]
return jsonify(task_ids)
@prgs_bp.route('/retry/<task_id>', methods=['POST'])

View File

@@ -3,6 +3,7 @@ import traceback
from pathlib import Path
import os
import logging
from flask import Blueprint, Response, request, url_for
from routes.utils.celery_queue_manager import download_queue_manager, get_config_params
from routes.utils.get_info import get_spotify_info
@@ -167,6 +168,9 @@ def download_artist_albums(url, album_type="album,single,compilation", request_a
"parent_request_type": "artist"
}
# Include original download URL for this album task
album_request_args["original_url"] = url_for('album.handle_download', url=album_url, _external=True)
# Create task for this album
task_data = {
"download_type": "album",

View File

@@ -10,7 +10,12 @@ logger = logging.getLogger(__name__)
REDIS_HOST = os.getenv('REDIS_HOST', 'localhost')
REDIS_PORT = os.getenv('REDIS_PORT', '6379')
REDIS_DB = os.getenv('REDIS_DB', '0')
REDIS_URL = os.getenv('REDIS_URL', f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}")
# Optional Redis password
REDIS_PASSWORD = os.getenv('REDIS_PASSWORD', '')
# Build default URL with password if provided
_password_part = f":{REDIS_PASSWORD}@" if REDIS_PASSWORD else ""
default_redis_url = f"redis://{_password_part}{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}"
REDIS_URL = os.getenv('REDIS_URL', default_redis_url)
REDIS_BACKEND = os.getenv('REDIS_BACKEND', REDIS_URL)
# Log Redis connection details

View File

@@ -12,7 +12,7 @@ from celery.exceptions import Retry
logger = logging.getLogger(__name__)
# Setup Redis and Celery
from routes.utils.celery_config import REDIS_URL, REDIS_BACKEND, get_config_params
from routes.utils.celery_config import REDIS_URL, REDIS_BACKEND, REDIS_PASSWORD, get_config_params
# Initialize Celery app
celery_app = Celery('download_tasks',