From 9b57c5631d7a1d4b3eb2bddc01adbb08881ffa0e Mon Sep 17 00:00:00 2001 From: "architect.in.git" Date: Mon, 17 Mar 2025 21:38:10 -0600 Subject: [PATCH] queue management refactor, embrace celery and redis --- CELERY_MIGRATION.md | 160 ++++ Dockerfile | 30 +- app.py | 8 + celery-worker.conf | 23 + celery_worker.log | 33 + requirements-celery.txt | 3 + routes/album.py | 135 +-- routes/artist.py | 122 +-- routes/config.py | 1 + routes/playlist.py | 82 +- routes/prgs.py | 156 +++- routes/track.py | 135 +-- routes/utils/album.py | 15 +- routes/utils/artist.py | 243 ++++-- routes/utils/celery_config.py | 122 +++ routes/utils/celery_queue_manager.py | 440 ++++++++++ routes/utils/celery_tasks.py | 653 ++++++++++++++ routes/utils/playlist.py | 15 +- routes/utils/queue.py | 1213 -------------------------- routes/utils/track.py | 15 +- start_app.sh | 15 + static/css/config/config.css | 44 + static/js/album.js | 82 +- static/js/artist.js | 84 +- static/js/config.js | 41 + static/js/main.js | 85 +- static/js/playlist.js | 89 +- static/js/queue.js | 258 +++--- static/js/track.js | 53 +- supervisor_config.conf | 19 + templates/config.html | 18 +- 31 files changed, 2092 insertions(+), 2300 deletions(-) create mode 100644 CELERY_MIGRATION.md create mode 100644 celery-worker.conf create mode 100644 celery_worker.log create mode 100644 requirements-celery.txt create mode 100644 routes/utils/celery_config.py create mode 100644 routes/utils/celery_queue_manager.py create mode 100644 routes/utils/celery_tasks.py delete mode 100644 routes/utils/queue.py create mode 100755 start_app.sh create mode 100644 supervisor_config.conf diff --git a/CELERY_MIGRATION.md b/CELERY_MIGRATION.md new file mode 100644 index 0000000..b804129 --- /dev/null +++ b/CELERY_MIGRATION.md @@ -0,0 +1,160 @@ +# Migration Guide: File-based Queue to Celery+Redis + +This guide explains how to migrate from the file-based queue system to the new Celery+Redis based system for handling download tasks. + +## Benefits of the New System + +1. **Improved Reliability**: Redis provides reliable persistence for task state +2. **Better Scalability**: Celery workers can be scaled across multiple machines +3. **Enhanced Monitoring**: Built-in tools for monitoring task status and health +4. **Resource Efficiency**: Celery's worker pool is more efficient than Python threads +5. **Cleaner Code**: Separates concerns between queue management and download logic + +## Prerequisites + +- Redis server (3.0+) installed and running +- Python 3.7+ (same as the main application) +- Required Python packages: + - celery>=5.3.6 + - redis>=5.0.1 + - flask-celery-helper>=1.1.0 + +## Installation + +1. Install Redis: + ```bash + # For Debian/Ubuntu + sudo apt-get install redis-server + + # For Arch Linux + sudo pacman -S redis + + # For macOS + brew install redis + ``` + +2. Start Redis server: + ```bash + sudo systemctl start redis + # or + redis-server + ``` + +3. Install required Python packages: + ```bash + pip install -r requirements-celery.txt + ``` + +## Configuration + +1. Set the Redis URL in environment variables (optional): + ```bash + export REDIS_URL=redis://localhost:6379/0 + export REDIS_BACKEND=redis://localhost:6379/0 + ``` + +2. Adjust `config/main.json` as needed: + ```json + { + "maxConcurrentDownloads": 3, + "maxRetries": 3, + "retryDelaySeconds": 5, + "retry_delay_increase": 5 + } + ``` + +## Starting the Worker + +To start the Celery worker: + +```bash +python celery_worker.py +``` + +This will start the worker with the configured maximum concurrent downloads. + +## Monitoring + +You can monitor tasks using Flower, a web-based Celery monitoring tool: + +```bash +pip install flower +celery -A routes.utils.celery_tasks.celery_app flower +``` + +Then access the dashboard at http://localhost:5555 + +## Transitioning from File-based Queue + +The API endpoints (`/api/prgs/*`) have been updated to be backward compatible and will work with both the old .prg file system and the new Celery-based system. This allows for a smooth transition. + +1. During transition, both systems can run in parallel +2. New download requests will use the Celery tasks system +3. Old .prg files will still be accessible via the same API +4. Eventually, the PRG file handling code can be removed once all old tasks are completed + +## Modifying Downloader Functions + +If you need to add a new downloader function, make these changes: + +1. Update the utility module (e.g., track.py) to accept a `progress_callback` parameter +2. Use the progress_callback for reporting progress as shown in the example +3. Create a new Celery task in `routes/utils/celery_tasks.py` + +Example of implementing a callback in your downloader function: + +```python +def download_track(service="", url="", progress_callback=None, ...): + """Download a track with progress reporting""" + + # Create a default callback if none provided + if progress_callback is None: + progress_callback = lambda x: None + + # Report initializing status + progress_callback({ + "status": "initializing", + "type": "track", + "song": track_name, + "artist": artist_name + }) + + # Report download progress + progress_callback({ + "status": "downloading", + "type": "track", + "song": track_name, + "artist": artist_name + }) + + # Report real-time progress + progress_callback({ + "status": "real_time", + "type": "track", + "song": track_name, + "artist": artist_name, + "percentage": 0.5 # 50% complete + }) + + # Report completion + progress_callback({ + "status": "done", + "type": "track", + "song": track_name, + "artist": artist_name + }) +``` + +## API Endpoints + +The API endpoints remain unchanged to maintain compatibility with the frontend: + +- `GET /api/prgs/` - Get task/file status (works with both task IDs and old .prg filenames) +- `DELETE /api/prgs/delete/` - Delete a task/file +- `GET /api/prgs/list` - List all tasks and files +- `POST /api/prgs/retry/` - Retry a failed task +- `POST /api/prgs/cancel/` - Cancel a running task + +## Error Handling + +Errors in Celery tasks are automatically captured and stored in Redis. The task status is updated to "error" and includes the error message and traceback. Tasks can be retried using the `/api/prgs/retry/` endpoint. \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 01c01a7..f98f9ae 100755 --- a/Dockerfile +++ b/Dockerfile @@ -1,32 +1,26 @@ # Use an official Python runtime as a parent image -FROM python:3.12-slim - -# Install system dependencies and gosu for user switching -RUN apt-get update && apt-get install -y git ffmpeg gosu bash && \ - rm -rf /var/lib/apt/lists/* +FROM python:3.9-slim # Set the working directory in the container WORKDIR /app -# Cache-busting mechanism -ARG CACHE_BUST=0 +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + build-essential \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* -# Copy the requirements file into the container +# Copy requirements file COPY requirements.txt . -# Force Docker to always run this step -RUN echo $CACHE_BUST && pip install --no-cache-dir --upgrade --force-reinstall -r requirements.txt +# Install Python dependencies +RUN pip install --no-cache-dir -r requirements.txt # Copy application code COPY . . -# Copy entrypoint script and make it executable -COPY entrypoint.sh /entrypoint.sh -RUN chmod +x /entrypoint.sh +# Create necessary directories +RUN mkdir -p downloads config creds -# Expose the application port -EXPOSE 7171 - -# Set entrypoint to handle user permission setup -ENTRYPOINT ["/entrypoint.sh"] +# Default command (overridden in docker-compose.yml) CMD ["python", "app.py"] diff --git a/app.py b/app.py index 31eccc0..eb01fde 100755 --- a/app.py +++ b/app.py @@ -12,6 +12,14 @@ import logging import time from pathlib import Path import os +import argparse + +# Import Celery configuration +try: + from routes.utils.celery_tasks import celery_app + has_celery = True +except ImportError: + has_celery = False def create_app(): app = Flask(__name__) diff --git a/celery-worker.conf b/celery-worker.conf new file mode 100644 index 0000000..8033cc5 --- /dev/null +++ b/celery-worker.conf @@ -0,0 +1,23 @@ +[program:spotizerr-celery] +command=/path/to/python /path/to/spotizerr/celery_worker.py +directory=/path/to/spotizerr +user=username +numprocs=1 +stdout_logfile=/path/to/spotizerr/logs/celery_worker.log +stderr_logfile=/path/to/spotizerr/logs/celery_worker_error.log +autostart=true +autorestart=true +startsecs=10 +priority=999 +stopasgroup=true +killasgroup=true +environment=REDIS_URL="redis://localhost:6379/0",REDIS_BACKEND="redis://localhost:6379/0" + +; Comment to show how to set up in supervisord: +; 1. Copy this file to /etc/supervisor/conf.d/ (adjust path as needed for your system) +; 2. Replace /path/to/python with actual python path (e.g., /usr/bin/python3) +; 3. Replace /path/to/spotizerr with the actual path to your spotizerr installation +; 4. Replace username with the actual username that should run the process +; 5. Create logs directory: mkdir -p /path/to/spotizerr/logs +; 6. Run: sudo supervisorctl reread && sudo supervisorctl update +; 7. Check status: sudo supervisorctl status spotizerr-celery \ No newline at end of file diff --git a/celery_worker.log b/celery_worker.log new file mode 100644 index 0000000..eb7985c --- /dev/null +++ b/celery_worker.log @@ -0,0 +1,33 @@ +Starting Celery worker with concurrency settings from config... +Worker concurrency: 3 +Traceback (most recent call last): + File "/home/xoconoch/coding/spotizerr/venv/lib/python3.13/site-packages/click_didyoumean/__init__.py", line 35, in resolve_command + return super(DYMMixin, self).resolve_command(ctx, args) # type: ignore + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^ + File "/home/xoconoch/coding/spotizerr/venv/lib/python3.13/site-packages/click/core.py", line 1755, in resolve_command + ctx.fail(_("No such command {name!r}.").format(name=original_cmd_name)) + ~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/home/xoconoch/coding/spotizerr/venv/lib/python3.13/site-packages/click/core.py", line 691, in fail + raise UsageError(message, self) +click.exceptions.UsageError: No such command '/home/xoconoch/coding/venv/bin/celery'. + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/xoconoch/coding/spotizerr/celery_worker.py", line 39, in + celery_app.worker_main() + ~~~~~~~~~~~~~~~~~~~~~~^^ + File "/home/xoconoch/coding/spotizerr/venv/lib/python3.13/site-packages/celery/app/base.py", line 389, in worker_main + self.start(argv=argv) + ~~~~~~~~~~^^^^^^^^^^^ + File "/home/xoconoch/coding/spotizerr/venv/lib/python3.13/site-packages/celery/app/base.py", line 369, in start + celery.main(args=argv, standalone_mode=False) + ~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/home/xoconoch/coding/spotizerr/venv/lib/python3.13/site-packages/click/core.py", line 1082, in main + rv = self.invoke(ctx) + File "/home/xoconoch/coding/spotizerr/venv/lib/python3.13/site-packages/click/core.py", line 1691, in invoke + cmd_name, cmd, args = self.resolve_command(ctx, args) + ~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^ + File "/home/xoconoch/coding/spotizerr/venv/lib/python3.13/site-packages/click_didyoumean/__init__.py", line 50, in resolve_command + raise click.exceptions.UsageError(error_msg, error.ctx) +click.exceptions.UsageError: No such command '/home/xoconoch/coding/venv/bin/celery'. diff --git a/requirements-celery.txt b/requirements-celery.txt new file mode 100644 index 0000000..3769c5d --- /dev/null +++ b/requirements-celery.txt @@ -0,0 +1,3 @@ +celery==5.3.6 +redis==5.0.1 +flask-celery-helper==1.1.0 diff --git a/routes/album.py b/routes/album.py index ce4f3a8..7ff16ed 100755 --- a/routes/album.py +++ b/routes/album.py @@ -2,144 +2,37 @@ from flask import Blueprint, Response, request import json import os import traceback -from routes.utils.queue import download_queue_manager, get_config_params +from routes.utils.celery_queue_manager import download_queue_manager album_bp = Blueprint('album', __name__) @album_bp.route('/download', methods=['GET']) def handle_download(): # Retrieve essential parameters from the request. - service = request.args.get('service') url = request.args.get('url') - - # Get common parameters from config - config_params = get_config_params() - - # Allow request parameters to override config values - main = request.args.get('main') - fallback = request.args.get('fallback') - quality = request.args.get('quality') - fall_quality = request.args.get('fall_quality') - real_time_arg = request.args.get('real_time') - custom_dir_format = request.args.get('custom_dir_format') - custom_track_format = request.args.get('custom_track_format') - pad_tracks_arg = request.args.get('tracknum_padding') - - # Use config values as defaults when parameters are not provided - if not main: - main = config_params['spotify'] if service == 'spotify' else config_params['deezer'] - - if not fallback and config_params['fallback'] and service == 'spotify': - fallback = config_params['spotify'] - - if not quality: - quality = config_params['spotifyQuality'] if service == 'spotify' else config_params['deezerQuality'] - - if not fall_quality and fallback: - fall_quality = config_params['spotifyQuality'] - - # Parse boolean parameters - real_time = real_time_arg.lower() in ['true', '1', 'yes'] if real_time_arg is not None else config_params['realTime'] - pad_tracks = pad_tracks_arg.lower() in ['true', '1', 'yes'] if pad_tracks_arg is not None else config_params['tracknum_padding'] - - # Use config values for formatting if not provided - if not custom_dir_format: - custom_dir_format = config_params['customDirFormat'] - - if not custom_track_format: - custom_track_format = config_params['customTrackFormat'] + name = request.args.get('name') + artist = request.args.get('artist') # Validate required parameters - if not all([service, url, main]): + if not url: return Response( - json.dumps({"error": "Missing parameters: service, url, or main account"}), + json.dumps({"error": "Missing required parameter: url"}), status=400, mimetype='application/json' ) - # Sanitize main and fallback to prevent directory traversal. - if main: - main = os.path.basename(main) - if fallback: - fallback = os.path.basename(fallback) - - # Validate credentials based on service and fallback. - try: - if service == 'spotify': - if fallback: - # Validate Deezer main and Spotify fallback credentials. - deezer_creds_path = os.path.abspath(os.path.join('./creds/deezer', main, 'credentials.json')) - if not os.path.isfile(deezer_creds_path): - return Response( - json.dumps({"error": "Invalid Deezer credentials directory"}), - status=400, - mimetype='application/json' - ) - spotify_fallback_path = os.path.abspath(os.path.join('./creds/spotify', fallback, 'credentials.json')) - if not os.path.isfile(spotify_fallback_path): - return Response( - json.dumps({"error": "Invalid Spotify fallback credentials directory"}), - status=400, - mimetype='application/json' - ) - else: - # Validate Spotify main credentials. - spotify_creds_path = os.path.abspath(os.path.join('./creds/spotify', main, 'credentials.json')) - if not os.path.isfile(spotify_creds_path): - return Response( - json.dumps({"error": "Invalid Spotify credentials directory"}), - status=400, - mimetype='application/json' - ) - elif service == 'deezer': - # Validate Deezer main credentials. - deezer_creds_path = os.path.abspath(os.path.join('./creds/deezer', main, 'credentials.json')) - if not os.path.isfile(deezer_creds_path): - return Response( - json.dumps({"error": "Invalid Deezer credentials directory"}), - status=400, - mimetype='application/json' - ) - else: - return Response( - json.dumps({"error": "Unsupported service"}), - status=400, - mimetype='application/json' - ) - except Exception as e: - return Response( - json.dumps({"error": f"Credential validation failed: {str(e)}"}), - status=500, - mimetype='application/json' - ) - - # Build the task dictionary. - # Note: The new keys "type", "name", and "artist" will be merged into the original_request - # message by the queue handler. - task = { - "download_type": "album", # tells the queue handler which download function to call - "service": service, + # Add the task to the queue with only essential parameters + # The queue manager will now handle all config parameters + task_id = download_queue_manager.add_task({ + "download_type": "album", "url": url, - "main": main, - "fallback": fallback, - "quality": quality, - "fall_quality": fall_quality, - "real_time": real_time, - "custom_dir_format": custom_dir_format, - "custom_track_format": custom_track_format, - "pad_tracks": pad_tracks, - "orig_request": request.args.to_dict(), - # New additional parameters: - "type": "album", - "name": request.args.get('name'), - "artist": request.args.get('artist') - } - - # Add the task to the queue and get the generated prg filename. - prg_filename = download_queue_manager.add_task(task) + "name": name, + "artist": artist, + "orig_request": request.args.to_dict() + }) return Response( - json.dumps({"prg_file": prg_filename}), + json.dumps({"prg_file": task_id}), status=202, mimetype='application/json' ) diff --git a/routes/artist.py b/routes/artist.py index 18654ad..dae16f9 100644 --- a/routes/artist.py +++ b/routes/artist.py @@ -6,10 +6,8 @@ Artist endpoint blueprint. from flask import Blueprint, Response, request import json import os -import random -import string import traceback -from routes.utils.queue import download_queue_manager, get_config_params +from routes.utils.celery_queue_manager import download_queue_manager artist_bp = Blueprint('artist', __name__) @@ -23,134 +21,32 @@ def handle_artist_download(): Enqueues album download tasks for the given artist using the new artist module. Expected query parameters: - url: string (a Spotify artist URL) - - service: string ("spotify" or "deezer") - album_type: string(s); comma-separated values such as "album,single,appears_on,compilation" """ # Retrieve essential parameters from the request. - service = request.args.get('service') url = request.args.get('url') - album_type = request.args.get('album_type') - - # Get common parameters from config - config_params = get_config_params() - - # Allow request parameters to override config values - main = request.args.get('main') - fallback = request.args.get('fallback') - quality = request.args.get('quality') - fall_quality = request.args.get('fall_quality') - real_time_arg = request.args.get('real_time') - custom_dir_format = request.args.get('custom_dir_format') - custom_track_format = request.args.get('custom_track_format') - pad_tracks_arg = request.args.get('tracknum_padding') - - # Use config values as defaults when parameters are not provided - if not main: - main = config_params['spotify'] if service == 'spotify' else config_params['deezer'] - - if not fallback and config_params['fallback'] and service == 'spotify': - fallback = config_params['spotify'] - - if not quality: - quality = config_params['spotifyQuality'] if service == 'spotify' else config_params['deezerQuality'] - - if not fall_quality and fallback: - fall_quality = config_params['spotifyQuality'] - - # Parse boolean parameters - real_time = real_time_arg.lower() in ['true', '1', 'yes'] if real_time_arg is not None else config_params['realTime'] - pad_tracks = pad_tracks_arg.lower() in ['true', '1', 'yes'] if pad_tracks_arg is not None else config_params['tracknum_padding'] - - # Use config values for formatting if not provided - if not custom_dir_format: - custom_dir_format = config_params['customDirFormat'] - - if not custom_track_format: - custom_track_format = config_params['customTrackFormat'] - - # Use default album_type if not specified - if not album_type: - album_type = "album,single,compilation" + album_type = request.args.get('album_type', "album,single,compilation") # Validate required parameters - if not all([service, url, main, quality]): + if not url: return Response( - json.dumps({"error": "Missing parameters: service, url, main, or quality"}), + json.dumps({"error": "Missing required parameter: url"}), status=400, mimetype='application/json' ) - # Sanitize main and fallback to prevent directory traversal. - if main: - main = os.path.basename(main) - if fallback: - fallback = os.path.basename(fallback) - - # Validate credentials based on the selected service. - try: - if service == 'spotify': - if fallback: - # When a fallback is provided, validate both Deezer and Spotify fallback credentials. - deezer_creds_path = os.path.abspath(os.path.join('./creds/deezer', main, 'credentials.json')) - if not os.path.isfile(deezer_creds_path): - return Response( - json.dumps({"error": "Invalid Deezer credentials directory"}), - status=400, - mimetype='application/json' - ) - spotify_fallback_path = os.path.abspath(os.path.join('./creds/spotify', fallback, 'credentials.json')) - if not os.path.isfile(spotify_fallback_path): - return Response( - json.dumps({"error": "Invalid Spotify fallback credentials directory"}), - status=400, - mimetype='application/json' - ) - else: - spotify_creds_path = os.path.abspath(os.path.join('./creds/spotify', main, 'credentials.json')) - if not os.path.isfile(spotify_creds_path): - return Response( - json.dumps({"error": "Invalid Spotify credentials directory"}), - status=400, - mimetype='application/json' - ) - elif service == 'deezer': - deezer_creds_path = os.path.abspath(os.path.join('./creds/deezer', main, 'credentials.json')) - if not os.path.isfile(deezer_creds_path): - return Response( - json.dumps({"error": "Invalid Deezer credentials directory"}), - status=400, - mimetype='application/json' - ) - else: - return Response( - json.dumps({"error": "Unsupported service"}), - status=400, - mimetype='application/json' - ) - except Exception as e: - return Response( - json.dumps({"error": f"Credential validation failed: {str(e)}"}), - status=500, - mimetype='application/json' - ) - try: # Import and call the updated download_artist_albums() function. from routes.utils.artist import download_artist_albums + + # Delegate to the download_artist_albums function which will handle config itself album_prg_files = download_artist_albums( - service=service, url=url, - main=main, - fallback=fallback, - quality=quality, - fall_quality=fall_quality, - real_time=real_time, album_type=album_type, - custom_dir_format=custom_dir_format, - custom_track_format=custom_track_format, - pad_tracks=pad_tracks + request_args=request.args.to_dict() ) - # Return the list of album PRG filenames. + + # Return the list of album task IDs. return Response( json.dumps({ "status": "complete", diff --git a/routes/config.py b/routes/config.py index 7cbadea..5a0b494 100644 --- a/routes/config.py +++ b/routes/config.py @@ -30,6 +30,7 @@ def handle_config(): # Set default values for any missing config options defaults = { + 'service': 'spotify', # Default service is Spotify 'fallback': False, 'spotifyQuality': 'NORMAL', 'deezerQuality': 'MP3_128', diff --git a/routes/playlist.py b/routes/playlist.py index 28523bf..eeb3494 100755 --- a/routes/playlist.py +++ b/routes/playlist.py @@ -2,93 +2,37 @@ from flask import Blueprint, Response, request import os import json import traceback -from routes.utils.queue import download_queue_manager, get_config_params +from routes.utils.celery_queue_manager import download_queue_manager playlist_bp = Blueprint('playlist', __name__) @playlist_bp.route('/download', methods=['GET']) def handle_download(): # Retrieve essential parameters from the request. - service = request.args.get('service') url = request.args.get('url') - - # Get common parameters from config - config_params = get_config_params() - - # Allow request parameters to override config values - main = request.args.get('main') - fallback = request.args.get('fallback') - quality = request.args.get('quality') - fall_quality = request.args.get('fall_quality') - real_time_arg = request.args.get('real_time') - custom_dir_format = request.args.get('custom_dir_format') - custom_track_format = request.args.get('custom_track_format') - pad_tracks_arg = request.args.get('tracknum_padding') - - # Use config values as defaults when parameters are not provided - if not main: - main = config_params['spotify'] if service == 'spotify' else config_params['deezer'] - - if not fallback and config_params['fallback'] and service == 'spotify': - fallback = config_params['spotify'] - - if not quality: - quality = config_params['spotifyQuality'] if service == 'spotify' else config_params['deezerQuality'] - - if not fall_quality and fallback: - fall_quality = config_params['spotifyQuality'] - - # Parse boolean parameters - real_time = real_time_arg.lower() in ['true', '1', 'yes'] if real_time_arg is not None else config_params['realTime'] - pad_tracks = pad_tracks_arg.lower() in ['true', '1', 'yes'] if pad_tracks_arg is not None else config_params['tracknum_padding'] - - # Use config values for formatting if not provided - if not custom_dir_format: - custom_dir_format = config_params['customDirFormat'] - - if not custom_track_format: - custom_track_format = config_params['customTrackFormat'] + name = request.args.get('name') + artist = request.args.get('artist') # Validate required parameters - if not all([service, url, main]): + if not url: return Response( - json.dumps({"error": "Missing parameters: service, url, or main account"}), + json.dumps({"error": "Missing required parameter: url"}), status=400, mimetype='application/json' ) - # Sanitize main and fallback to prevent directory traversal. - if main: - main = os.path.basename(main) - if fallback: - fallback = os.path.basename(fallback) - - # Build the task dictionary. - # Note: the key "download_type" tells the queue handler which download function to call. - task = { + # Add the task to the queue with only essential parameters + # The queue manager will now handle all config parameters + task_id = download_queue_manager.add_task({ "download_type": "playlist", - "service": service, "url": url, - "main": main, - "fallback": fallback, - "quality": quality, - "fall_quality": fall_quality, - "real_time": real_time, - "custom_dir_format": custom_dir_format, - "custom_track_format": custom_track_format, - "pad_tracks": pad_tracks, - "orig_request": request.args.to_dict(), - # If provided, these additional parameters can be used by your download function. - "type": "playlist", - "name": request.args.get('name'), - "artist": request.args.get('artist') - } - - # Add the task to the queue and get the generated process (prg) filename. - prg_filename = download_queue_manager.add_task(task) + "name": name, + "artist": artist, + "orig_request": request.args.to_dict() + }) return Response( - json.dumps({"prg_file": prg_filename}), + json.dumps({"prg_file": task_id}), status=202, mimetype='application/json' ) diff --git a/routes/prgs.py b/routes/prgs.py index 7a99263..62fc0a9 100755 --- a/routes/prgs.py +++ b/routes/prgs.py @@ -2,26 +2,58 @@ from flask import Blueprint, abort, jsonify import os import json +from routes.utils.celery_tasks import ( + get_task_info, + get_task_status, + get_last_task_status, + get_all_tasks, + cancel_task, + retry_task +) + prgs_bp = Blueprint('prgs', __name__, url_prefix='/api/prgs') -# Base directory for files +# The old path for PRG files (keeping for backward compatibility during transition) PRGS_DIR = os.path.join(os.getcwd(), 'prgs') -@prgs_bp.route('/', methods=['GET']) -def get_prg_file(filename): +@prgs_bp.route('/', methods=['GET']) +def get_prg_file(task_id): """ Return a JSON object with the resource type, its name (title), - the last progress update (last line) of the PRG file, and, if available, - the original request parameters (from the first line of the file). + the last progress update, and, if available, the original request parameters. - For resource type and name, the second line of the file is used. + This function works with both the old PRG file system (for backward compatibility) + and the new task ID based system. + + Args: + task_id: Either a task UUID from Celery or a PRG filename from the old system """ try: + # First check if this is a task ID in the new system + task_info = get_task_info(task_id) + + if task_info: + # This is a task ID in the new system + original_request = task_info.get("original_request", {}) + last_status = get_last_task_status(task_id) + + return jsonify({ + "type": task_info.get("type", ""), + "name": task_info.get("name", ""), + "artist": task_info.get("artist", ""), + "last_line": last_status, + "original_request": original_request, + "display_title": original_request.get("display_title", task_info.get("name", "")), + "display_type": original_request.get("display_type", task_info.get("type", "")), + "display_artist": original_request.get("display_artist", task_info.get("artist", "")) + }) + + # If not found in new system, try the old PRG file system # Security check to prevent path traversal attacks. - if '..' in filename or '/' in filename: + if '..' in task_id or '/' in task_id: abort(400, "Invalid file request") - filepath = os.path.join(PRGS_DIR, filename) + filepath = os.path.join(PRGS_DIR, task_id) with open(filepath, 'r') as f: content = f.read() @@ -102,32 +134,54 @@ def get_prg_file(filename): "display_artist": display_artist }) except FileNotFoundError: - abort(404, "File not found") + abort(404, "Task or file not found") except Exception as e: abort(500, f"An error occurred: {e}") -@prgs_bp.route('/delete/', methods=['DELETE']) -def delete_prg_file(filename): +@prgs_bp.route('/delete/', methods=['DELETE']) +def delete_prg_file(task_id): """ - Delete the specified .prg file from the prgs directory. + Delete a task's information and history. + Works with both the old PRG file system and the new task ID based system. + + Args: + task_id: Either a task UUID from Celery or a PRG filename from the old system """ try: + # First try to delete from Redis if it's a task ID + task_info = get_task_info(task_id) + + if task_info: + # This is a task ID in the new system - we should cancel it first + # if it's still running, then clear its data from Redis + cancel_result = cancel_task(task_id) + + # Use Redis connection to delete the task data + from routes.utils.celery_tasks import redis_client + + # Delete task info and status + redis_client.delete(f"task:{task_id}:info") + redis_client.delete(f"task:{task_id}:status") + + return {'message': f'Task {task_id} deleted successfully'}, 200 + + # If not found in Redis, try the old PRG file system # Security checks to prevent path traversal and ensure correct file type. - if '..' in filename or '/' in filename: + if '..' in task_id or '/' in task_id: abort(400, "Invalid file request") - if not filename.endswith('.prg'): + if not task_id.endswith('.prg'): abort(400, "Only .prg files can be deleted") - filepath = os.path.join(PRGS_DIR, filename) + filepath = os.path.join(PRGS_DIR, task_id) if not os.path.isfile(filepath): abort(404, "File not found") os.remove(filepath) - return {'message': f'File {filename} deleted successfully'}, 200 + return {'message': f'File {task_id} deleted successfully'}, 200 except FileNotFoundError: - abort(404, "File not found") + abort(404, "Task or file not found") except Exception as e: abort(500, f"An error occurred: {e}") @@ -135,15 +189,79 @@ def delete_prg_file(filename): @prgs_bp.route('/list', methods=['GET']) def list_prg_files(): """ - Retrieve a list of all .prg files in the prgs directory. + Retrieve a list of all tasks in the system. + Combines results from both the old PRG file system and the new task ID based system. """ try: + # Get tasks from the new system + tasks = get_all_tasks() + task_ids = [task["task_id"] for task in tasks] + + # Get PRG files from the old system prg_files = [] if os.path.isdir(PRGS_DIR): with os.scandir(PRGS_DIR) as entries: for entry in entries: if entry.is_file() and entry.name.endswith('.prg'): prg_files.append(entry.name) - return jsonify(prg_files) + + # Combine both lists + all_ids = task_ids + prg_files + + return jsonify(all_ids) + except Exception as e: + abort(500, f"An error occurred: {e}") + + +@prgs_bp.route('/retry/', methods=['POST']) +def retry_task_endpoint(task_id): + """ + Retry a failed task. + + Args: + task_id: The ID of the task to retry + """ + try: + # First check if this is a task ID in the new system + task_info = get_task_info(task_id) + + if task_info: + # This is a task ID in the new system + result = retry_task(task_id) + return jsonify(result) + + # If not found in new system, we need to handle the old system retry + # For now, return an error as we're transitioning to the new system + return jsonify({ + "status": "error", + "message": "Retry for old system is not supported in the new API. Please use the new task ID format." + }), 400 + except Exception as e: + abort(500, f"An error occurred: {e}") + + +@prgs_bp.route('/cancel/', methods=['POST']) +def cancel_task_endpoint(task_id): + """ + Cancel a running or queued task. + + Args: + task_id: The ID of the task to cancel + """ + try: + # First check if this is a task ID in the new system + task_info = get_task_info(task_id) + + if task_info: + # This is a task ID in the new system + result = cancel_task(task_id) + return jsonify(result) + + # If not found in new system, we need to handle the old system cancellation + # For now, return an error as we're transitioning to the new system + return jsonify({ + "status": "error", + "message": "Cancellation for old system is not supported in the new API. Please use the new task ID format." + }), 400 except Exception as e: abort(500, f"An error occurred: {e}") diff --git a/routes/track.py b/routes/track.py index 13c1f52..7317f43 100755 --- a/routes/track.py +++ b/routes/track.py @@ -2,146 +2,37 @@ from flask import Blueprint, Response, request import os import json import traceback -from routes.utils.queue import download_queue_manager, get_config_params +from routes.utils.celery_queue_manager import download_queue_manager track_bp = Blueprint('track', __name__) @track_bp.route('/download', methods=['GET']) def handle_download(): # Retrieve essential parameters from the request. - service = request.args.get('service') url = request.args.get('url') - - # Get common parameters from config - config_params = get_config_params() - - # Allow request parameters to override config values - main = request.args.get('main') - fallback = request.args.get('fallback') - quality = request.args.get('quality') - fall_quality = request.args.get('fall_quality') - real_time_arg = request.args.get('real_time') - custom_dir_format = request.args.get('custom_dir_format') - custom_track_format = request.args.get('custom_track_format') - pad_tracks_arg = request.args.get('tracknum_padding') - - # Use config values as defaults when parameters are not provided - if not main: - main = config_params['spotify'] if service == 'spotify' else config_params['deezer'] - - if not fallback and config_params['fallback'] and service == 'spotify': - fallback = config_params['spotify'] - - if not quality: - quality = config_params['spotifyQuality'] if service == 'spotify' else config_params['deezerQuality'] - - if not fall_quality and fallback: - fall_quality = config_params['spotifyQuality'] - - # Parse boolean parameters - real_time = real_time_arg.lower() in ['true', '1', 'yes'] if real_time_arg is not None else config_params['realTime'] - pad_tracks = pad_tracks_arg.lower() in ['true', '1', 'yes'] if pad_tracks_arg is not None else config_params['tracknum_padding'] - - # Use config values for formatting if not provided - if not custom_dir_format: - custom_dir_format = config_params['customDirFormat'] - - if not custom_track_format: - custom_track_format = config_params['customTrackFormat'] + name = request.args.get('name') + artist = request.args.get('artist') # Validate required parameters - if not all([service, url, main]): + if not url: return Response( - json.dumps({"error": "Missing parameters: service, url, or main account"}), + json.dumps({"error": "Missing required parameter: url"}), status=400, mimetype='application/json' ) - # Sanitize main and fallback to prevent directory traversal. - if main: - main = os.path.basename(main) - if fallback: - fallback = os.path.basename(fallback) - - # Validate credentials based on service and fallback. - try: - if service == 'spotify': - if fallback: - # Validate Deezer main credentials and Spotify fallback credentials. - deezer_creds_path = os.path.abspath(os.path.join('./creds/deezer', main, 'credentials.json')) - if not os.path.isfile(deezer_creds_path): - return Response( - json.dumps({"error": "Invalid Deezer credentials directory"}), - status=400, - mimetype='application/json' - ) - spotify_fallback_path = os.path.abspath(os.path.join('./creds/spotify', fallback, 'credentials.json')) - if not os.path.isfile(spotify_fallback_path): - return Response( - json.dumps({"error": "Invalid Spotify fallback credentials directory"}), - status=400, - mimetype='application/json' - ) - else: - # Validate Spotify main credentials. - spotify_creds_path = os.path.abspath(os.path.join('./creds/spotify', main, 'credentials.json')) - if not os.path.isfile(spotify_creds_path): - return Response( - json.dumps({"error": "Invalid Spotify credentials directory"}), - status=400, - mimetype='application/json' - ) - elif service == 'deezer': - # Validate Deezer main credentials. - deezer_creds_path = os.path.abspath(os.path.join('./creds/deezer', main, 'credentials.json')) - if not os.path.isfile(deezer_creds_path): - return Response( - json.dumps({"error": "Invalid Deezer credentials directory"}), - status=400, - mimetype='application/json' - ) - else: - return Response( - json.dumps({"error": "Unsupported service"}), - status=400, - mimetype='application/json' - ) - except Exception as e: - return Response( - json.dumps({"error": f"Credential validation failed: {str(e)}"}), - status=500, - mimetype='application/json' - ) - - # Capture the original request parameters. - orig_request = request.args.to_dict() - - # Build the task dictionary. - # The key "download_type" tells the queue handler which download function to call. - task = { + # Add the task to the queue with only essential parameters + # The queue manager will now handle all config parameters + task_id = download_queue_manager.add_task({ "download_type": "track", - "service": service, "url": url, - "main": main, - "fallback": fallback, - "quality": quality, - "fall_quality": fall_quality, - "real_time": real_time, - "custom_dir_format": custom_dir_format, - "custom_track_format": custom_track_format, - "pad_tracks": pad_tracks, - "orig_request": orig_request, - # Additional parameters if needed. - "type": "track", - "name": request.args.get('name'), - "artist": request.args.get('artist') - } - - # Add the task to the queue and get the generated process (prg) filename. - prg_filename = download_queue_manager.add_task(task) + "name": name, + "artist": artist, + "orig_request": request.args.to_dict() + }) return Response( - json.dumps({"prg_file": prg_filename}), + json.dumps({"prg_file": task_id}), status=202, mimetype='application/json' ) diff --git a/routes/utils/album.py b/routes/utils/album.py index 786d376..8d9a939 100755 --- a/routes/utils/album.py +++ b/routes/utils/album.py @@ -18,7 +18,8 @@ def download_album( pad_tracks=True, initial_retry_delay=5, retry_delay_increase=5, - max_retries=3 + max_retries=3, + progress_callback=None ): try: # Load Spotify client credentials if available @@ -51,7 +52,8 @@ def download_album( dl = DeeLogin( arl=deezer_creds.get('arl', ''), spotify_client_id=spotify_client_id, - spotify_client_secret=spotify_client_secret + spotify_client_secret=spotify_client_secret, + progress_callback=progress_callback ) # Download using download_albumspo; pass real_time_dl accordingly and the custom formatting dl.download_albumspo( @@ -92,7 +94,8 @@ def download_album( spo = SpoLogin( credentials_path=spo_creds_path, spotify_client_id=fallback_client_id, - spotify_client_secret=fallback_client_secret + spotify_client_secret=fallback_client_secret, + progress_callback=progress_callback ) spo.download_album( link_album=url, @@ -126,7 +129,8 @@ def download_album( spo = SpoLogin( credentials_path=credentials_path, spotify_client_id=spotify_client_id, - spotify_client_secret=spotify_client_secret + spotify_client_secret=spotify_client_secret, + progress_callback=progress_callback ) spo.download_album( link_album=url, @@ -156,7 +160,8 @@ def download_album( dl = DeeLogin( arl=creds.get('arl', ''), spotify_client_id=spotify_client_id, - spotify_client_secret=spotify_client_secret + spotify_client_secret=spotify_client_secret, + progress_callback=progress_callback ) dl.download_albumdee( link_album=url, diff --git a/routes/utils/artist.py b/routes/utils/artist.py index ba5ddf3..cc8a6d8 100644 --- a/routes/utils/artist.py +++ b/routes/utils/artist.py @@ -1,18 +1,22 @@ import json import traceback from pathlib import Path +import os +import logging +from routes.utils.celery_queue_manager import download_queue_manager, get_config_params from deezspot.easy_spoty import Spo from deezspot.libutils.utils import get_ids, link_is_valid -from routes.utils.queue import download_queue_manager # Global download queue manager +# Configure logging +logger = logging.getLogger(__name__) def log_json(message_dict): """Helper function to output a JSON-formatted log message.""" print(json.dumps(message_dict)) -def get_artist_discography(url, main, album_type='album,single,compilation,appears_on'): +def get_artist_discography(url, main, album_type='album,single,compilation,appears_on', progress_callback=None): """ Validate the URL, extract the artist ID, and retrieve the discography. """ @@ -59,94 +63,155 @@ def get_artist_discography(url, main, album_type='album,single,compilation,appea raise -def download_artist_albums(service, url, main, fallback=None, quality=None, - fall_quality=None, real_time=False, - album_type='album,single,compilation,appears_on', - custom_dir_format="%ar_album%/%album%/%copyright%", - custom_track_format="%tracknum%. %music% - %artist%", - pad_tracks=True, - initial_retry_delay=5, - retry_delay_increase=5, - max_retries=3): +def download_artist_albums(service, url, album_type="album,single,compilation", request_args=None, progress_callback=None): """ - Retrieves the artist discography and, for each album with a valid Spotify URL, - creates a download task that is queued via the global download queue. The queue - creates a PRG file for each album download. This function returns a list of those - album PRG filenames. + Download albums from an artist. + + Args: + service (str): 'spotify' or 'deezer' + url (str): URL of the artist + album_type (str): Comma-separated list of album types to download (album,single,compilation,appears_on) + request_args (dict): Original request arguments for additional parameters + progress_callback (callable): Optional callback function for progress reporting + + Returns: + list: List of task IDs for the enqueued album downloads """ - try: - discography = get_artist_discography(url, main, album_type=album_type) - except Exception as e: - log_json({"status": "error", "message": f"Error retrieving artist discography: {e}"}) - raise - - albums = discography.get('items', []) - if not albums: - log_json({"status": "done", "message": "No albums found for the artist."}) - return [] - - prg_files = [] - - for album in albums: - try: - album_url = album.get('external_urls', {}).get('spotify') - if not album_url: - log_json({ - "status": "warning", - "message": f"No Spotify URL found for album '{album.get('name', 'Unknown Album')}'; skipping." + logger.info(f"Starting artist albums download: {url} (service: {service}, album_types: {album_type})") + + if request_args is None: + request_args = {} + + # Get config parameters + config_params = get_config_params() + + # Get the artist information first + if service == 'spotify': + from deezspot.spotloader import SpoLogin + + # Get credentials + spotify_profile = request_args.get('main', config_params['spotify']) + credentials_path = os.path.abspath(os.path.join('./creds/spotify', spotify_profile, 'credentials.json')) + + # Validate credentials + if not os.path.isfile(credentials_path): + raise ValueError(f"Invalid Spotify credentials path: {credentials_path}") + + # Load Spotify client credentials if available + spotify_client_id = None + spotify_client_secret = None + search_creds_path = Path(f'./creds/spotify/{spotify_profile}/search.json') + if search_creds_path.exists(): + try: + with open(search_creds_path, 'r') as f: + search_creds = json.load(f) + spotify_client_id = search_creds.get('client_id') + spotify_client_secret = search_creds.get('client_secret') + except Exception as e: + logger.error(f"Error loading Spotify search credentials: {e}") + + # Initialize the Spotify client + spo = SpoLogin( + credentials_path=credentials_path, + spotify_client_id=spotify_client_id, + spotify_client_secret=spotify_client_secret, + progress_callback=progress_callback + ) + + # Get artist information + artist_info = spo.get_artist_info(url) + artist_name = artist_info['name'] + artist_id = artist_info['id'] + + # Get the list of albums + album_types = album_type.split(',') + albums = [] + + for album_type_item in album_types: + # Fetch albums of the specified type + albums_of_type = spo.get_albums_by_artist(artist_id, album_type_item.strip()) + for album in albums_of_type: + albums.append({ + 'name': album['name'], + 'url': album['external_urls']['spotify'], + 'type': 'album', + 'artist': artist_name }) - continue - - album_name = album.get('name', 'Unknown Album') - artists = album.get('artists', []) - # Extract artist names or use "Unknown" as a fallback. - artists = [artist.get("name", "Unknown") for artist in artists] - - # Prepare the download task dictionary. - task = { - "download_type": "album", - "service": service, - "url": album_url, - "main": main, - "fallback": fallback, - "quality": quality, - "fall_quality": fall_quality, - "real_time": real_time, - "custom_dir_format": custom_dir_format, - "custom_track_format": custom_track_format, - "pad_tracks": pad_tracks, - "initial_retry_delay": initial_retry_delay, - "retry_delay_increase": retry_delay_increase, - "max_retries": max_retries, - # Extra info for logging in the PRG file. - "name": album_name, - "type": "album", - "artist": artists, - "orig_request": { - "type": "album", - "name": album_name, - "artist": artists - } - } - - # Add the task to the global download queue. - # The queue manager creates the album's PRG file and returns its filename. - prg_filename = download_queue_manager.add_task(task) - prg_files.append(prg_filename) - - log_json({ - "status": "queued", - "album": album_name, - "artist": artists, - "prg_file": prg_filename, - "message": "Album queued for download." + + elif service == 'deezer': + from deezspot.deezloader import DeeLogin + + # Get credentials + deezer_profile = request_args.get('main', config_params['deezer']) + credentials_path = os.path.abspath(os.path.join('./creds/deezer', deezer_profile, 'credentials.json')) + + # Validate credentials + if not os.path.isfile(credentials_path): + raise ValueError(f"Invalid Deezer credentials path: {credentials_path}") + + # For Deezer, we need to extract the ARL + with open(credentials_path, 'r') as f: + credentials = json.load(f) + arl = credentials.get('arl') + + if not arl: + raise ValueError("No ARL found in Deezer credentials") + + # Load Spotify client credentials if available for search purposes + spotify_client_id = None + spotify_client_secret = None + search_creds_path = Path(f'./creds/spotify/{deezer_profile}/search.json') + if search_creds_path.exists(): + try: + with open(search_creds_path, 'r') as f: + search_creds = json.load(f) + spotify_client_id = search_creds.get('client_id') + spotify_client_secret = search_creds.get('client_secret') + except Exception as e: + logger.error(f"Error loading Spotify search credentials: {e}") + + # Initialize the Deezer client + dee = DeeLogin( + arl=arl, + spotify_client_id=spotify_client_id, + spotify_client_secret=spotify_client_secret, + progress_callback=progress_callback + ) + + # Get artist information + artist_info = dee.get_artist_info(url) + artist_name = artist_info['name'] + + # Get the list of albums (Deezer doesn't distinguish types like Spotify) + albums_result = dee.get_artist_albums(url) + albums = [] + + for album in albums_result: + albums.append({ + 'name': album['title'], + 'url': f"https://www.deezer.com/album/{album['id']}", + 'type': 'album', + 'artist': artist_name }) - - except Exception as album_error: - log_json({ - "status": "error", - "message": f"Error processing album '{album.get('name', 'Unknown')}': {album_error}" - }) - traceback.print_exc() - - return prg_files + + else: + raise ValueError(f"Unsupported service: {service}") + + # Queue the album downloads + album_task_ids = [] + + for album in albums: + # Create a task for each album + task_id = download_queue_manager.add_task({ + "download_type": "album", + "service": service, + "url": album['url'], + "name": album['name'], + "artist": album['artist'], + "orig_request": request_args.copy() # Pass along original request args + }) + + album_task_ids.append(task_id) + logger.info(f"Queued album: {album['name']} by {album['artist']} (task ID: {task_id})") + + return album_task_ids diff --git a/routes/utils/celery_config.py b/routes/utils/celery_config.py new file mode 100644 index 0000000..60ce9fd --- /dev/null +++ b/routes/utils/celery_config.py @@ -0,0 +1,122 @@ +import os +import json + +# Load configuration from ./config/main.json and get the max_concurrent_dl value. +CONFIG_PATH = './config/main.json' + +try: + with open(CONFIG_PATH, 'r') as f: + config_data = json.load(f) + MAX_CONCURRENT_DL = config_data.get("maxConcurrentDownloads", 3) + MAX_RETRIES = config_data.get("maxRetries", 3) + RETRY_DELAY = config_data.get("retryDelaySeconds", 5) + RETRY_DELAY_INCREASE = config_data.get("retry_delay_increase", 5) +except Exception as e: + print(f"Error loading configuration: {e}") + # Fallback to default values if there's an error reading the config. + MAX_CONCURRENT_DL = 3 + MAX_RETRIES = 3 + RETRY_DELAY = 5 + RETRY_DELAY_INCREASE = 5 + +def get_config_params(): + """ + Get common download parameters from the config file. + This centralizes parameter retrieval and reduces redundancy in API calls. + + Returns: + dict: A dictionary containing common parameters from config + """ + try: + with open(CONFIG_PATH, 'r') as f: + config = json.load(f) + + return { + 'service': config.get('service', 'spotify'), + 'spotify': config.get('spotify', ''), + 'deezer': config.get('deezer', ''), + 'fallback': config.get('fallback', False), + 'spotifyQuality': config.get('spotifyQuality', 'NORMAL'), + 'deezerQuality': config.get('deezerQuality', 'MP3_128'), + 'realTime': config.get('realTime', False), + 'customDirFormat': config.get('customDirFormat', '%ar_album%/%album%'), + 'customTrackFormat': config.get('customTrackFormat', '%tracknum%. %music%'), + 'tracknum_padding': config.get('tracknum_padding', True), + 'maxRetries': config.get('maxRetries', 3), + 'retryDelaySeconds': config.get('retryDelaySeconds', 5), + 'retry_delay_increase': config.get('retry_delay_increase', 5) + } + except Exception as e: + print(f"Error reading config for parameters: {e}") + # Return defaults if config read fails + return { + 'service': 'spotify', + 'spotify': '', + 'deezer': '', + 'fallback': False, + 'spotifyQuality': 'NORMAL', + 'deezerQuality': 'MP3_128', + 'realTime': False, + 'customDirFormat': '%ar_album%/%album%', + 'customTrackFormat': '%tracknum%. %music%', + 'tracknum_padding': True, + 'maxRetries': 3, + 'retryDelaySeconds': 5, + 'retry_delay_increase': 5 + } + +# Celery configuration +REDIS_URL = os.environ.get('REDIS_URL', 'redis://localhost:6379/0') +REDIS_BACKEND = os.environ.get('REDIS_BACKEND', 'redis://localhost:6379/0') + +# Define task queues +task_queues = { + 'default': { + 'exchange': 'default', + 'routing_key': 'default', + }, + 'downloads': { + 'exchange': 'downloads', + 'routing_key': 'downloads', + } +} + +# Set default queue +task_default_queue = 'downloads' +task_default_exchange = 'downloads' +task_default_routing_key = 'downloads' + +# Celery task settings +task_serializer = 'json' +accept_content = ['json'] +result_serializer = 'json' +enable_utc = True + +# Configure worker concurrency based on MAX_CONCURRENT_DL +worker_concurrency = MAX_CONCURRENT_DL + +# Configure task rate limiting - these are per-minute limits +task_annotations = { + 'routes.utils.celery_tasks.download_track': { + 'rate_limit': f'{MAX_CONCURRENT_DL}/m', + }, + 'routes.utils.celery_tasks.download_album': { + 'rate_limit': f'{MAX_CONCURRENT_DL}/m', + }, + 'routes.utils.celery_tasks.download_playlist': { + 'rate_limit': f'{MAX_CONCURRENT_DL}/m', + } +} + +# Configure retry settings +task_default_retry_delay = RETRY_DELAY # seconds +task_max_retries = MAX_RETRIES + +# Task result settings +task_track_started = True +result_expires = 60 * 60 * 24 * 7 # 7 days + +# Configure visibility timeout for task messages +broker_transport_options = { + 'visibility_timeout': 3600, # 1 hour +} \ No newline at end of file diff --git a/routes/utils/celery_queue_manager.py b/routes/utils/celery_queue_manager.py new file mode 100644 index 0000000..107512c --- /dev/null +++ b/routes/utils/celery_queue_manager.py @@ -0,0 +1,440 @@ +import os +import json +import time +import uuid +import logging +from datetime import datetime + +from routes.utils.celery_tasks import ( + celery_app, + download_track, + download_album, + download_playlist, + store_task_status, + store_task_info, + get_task_info, + get_task_status, + get_last_task_status, + cancel_task as cancel_celery_task, + retry_task as retry_celery_task, + get_all_tasks, + ProgressState +) + +# Configure logging +logger = logging.getLogger(__name__) + +# Load configuration +CONFIG_PATH = './config/main.json' +try: + with open(CONFIG_PATH, 'r') as f: + config_data = json.load(f) + MAX_CONCURRENT_DL = config_data.get("maxConcurrentDownloads", 3) +except Exception as e: + print(f"Error loading configuration: {e}") + # Fallback default + MAX_CONCURRENT_DL = 3 + +def get_config_params(): + """ + Get common download parameters from the config file. + This centralizes parameter retrieval and reduces redundancy in API calls. + + Returns: + dict: A dictionary containing common parameters from config + """ + try: + with open(CONFIG_PATH, 'r') as f: + config = json.load(f) + + return { + 'spotify': config.get('spotify', ''), + 'deezer': config.get('deezer', ''), + 'fallback': config.get('fallback', False), + 'spotifyQuality': config.get('spotifyQuality', 'NORMAL'), + 'deezerQuality': config.get('deezerQuality', 'MP3_128'), + 'realTime': config.get('realTime', False), + 'customDirFormat': config.get('customDirFormat', '%ar_album%/%album%'), + 'customTrackFormat': config.get('customTrackFormat', '%tracknum%. %music%'), + 'tracknum_padding': config.get('tracknum_padding', True), + 'maxRetries': config.get('maxRetries', 3), + 'retryDelaySeconds': config.get('retryDelaySeconds', 5), + 'retry_delay_increase': config.get('retry_delay_increase', 5) + } + except Exception as e: + logger.error(f"Error reading config for parameters: {e}") + # Return defaults if config read fails + return { + 'spotify': '', + 'deezer': '', + 'fallback': False, + 'spotifyQuality': 'NORMAL', + 'deezerQuality': 'MP3_128', + 'realTime': False, + 'customDirFormat': '%ar_album%/%album%', + 'customTrackFormat': '%tracknum%. %music%', + 'tracknum_padding': True, + 'maxRetries': 3, + 'retryDelaySeconds': 5, + 'retry_delay_increase': 5 + } + +class CeleryDownloadQueueManager: + """ + Manages a queue of download tasks using Celery. + This is a drop-in replacement for the previous DownloadQueueManager. + + Instead of using file-based progress tracking, it uses Redis via Celery + for task management and progress tracking. + """ + + def __init__(self): + """Initialize the Celery-based download queue manager""" + self.max_concurrent = MAX_CONCURRENT_DL + self.paused = False + print(f"Celery Download Queue Manager initialized with max_concurrent={self.max_concurrent}") + + def add_task(self, task): + """ + Adds a new download task to the queue. + + Args: + task (dict): Dictionary containing task parameters + + Returns: + str: The task ID for status tracking + """ + try: + download_type = task.get("download_type", "unknown") + service = task.get("service", "") + + # Get common parameters from config + config_params = get_config_params() + + # Use service from config instead of task + service = config_params.get('service') + + # Generate a unique task ID + task_id = str(uuid.uuid4()) + + # Store the original request in task info + original_request = task.get("orig_request", {}).copy() + + # Add essential metadata for retry operations + original_request["download_type"] = download_type + + # Add type from download_type if not provided + if "type" not in task: + task["type"] = download_type + + # Ensure key information is included + for key in ["type", "name", "artist", "service", "url"]: + if key in task and key not in original_request: + original_request[key] = task[key] + + # Add API endpoint information + if "endpoint" not in original_request: + original_request["endpoint"] = f"/api/{download_type}/download" + + # Add explicit display information for the frontend + original_request["display_title"] = task.get("name", original_request.get("name", "Unknown")) + original_request["display_type"] = task.get("type", original_request.get("type", download_type)) + original_request["display_artist"] = task.get("artist", original_request.get("artist", "")) + + # Build the complete task with config parameters + complete_task = { + "download_type": download_type, + "type": task.get("type", download_type), + "name": task.get("name", ""), + "artist": task.get("artist", ""), + "service": service, + "url": task.get("url", ""), + + # Use config values but allow override from request + "main": original_request.get("main", + config_params['spotify'] if service == 'spotify' else config_params['deezer']), + + "fallback": original_request.get("fallback", + config_params['spotify'] if config_params['fallback'] and service == 'spotify' else None), + + "quality": original_request.get("quality", + config_params['spotifyQuality'] if service == 'spotify' else config_params['deezerQuality']), + + "fall_quality": original_request.get("fall_quality", config_params['spotifyQuality']), + + # Parse boolean parameters from string values + "real_time": self._parse_bool_param(original_request.get("real_time"), config_params['realTime']), + + "custom_dir_format": original_request.get("custom_dir_format", config_params['customDirFormat']), + "custom_track_format": original_request.get("custom_track_format", config_params['customTrackFormat']), + + # Parse boolean parameters from string values + "pad_tracks": self._parse_bool_param(original_request.get("tracknum_padding"), config_params['tracknum_padding']), + + "retry_count": 0, + "original_request": original_request, + "created_at": time.time() + } + + # Store the task info in Redis for later retrieval + store_task_info(task_id, complete_task) + + # Store initial queued status + store_task_status(task_id, { + "status": ProgressState.QUEUED, + "timestamp": time.time(), + "type": complete_task["type"], + "name": complete_task["name"], + "artist": complete_task["artist"], + "retry_count": 0, + "queue_position": len(get_all_tasks()) + 1 # Approximate queue position + }) + + # Launch the appropriate Celery task based on download_type + celery_task = None + + if download_type == "track": + celery_task = download_track.apply_async( + kwargs=complete_task, + task_id=task_id, + countdown=0 if not self.paused else 3600 # Delay task if paused + ) + elif download_type == "album": + celery_task = download_album.apply_async( + kwargs=complete_task, + task_id=task_id, + countdown=0 if not self.paused else 3600 + ) + elif download_type == "playlist": + celery_task = download_playlist.apply_async( + kwargs=complete_task, + task_id=task_id, + countdown=0 if not self.paused else 3600 + ) + else: + # Store error status for unknown download type + store_task_status(task_id, { + "status": ProgressState.ERROR, + "message": f"Unsupported download type: {download_type}", + "timestamp": time.time() + }) + logger.error(f"Unsupported download type: {download_type}") + return task_id # Still return the task_id so the error can be tracked + + logger.info(f"Added {download_type} download task {task_id} to Celery queue") + return task_id + + except Exception as e: + logger.error(f"Error adding task to Celery queue: {e}", exc_info=True) + # Generate a task ID even for failed tasks so we can track the error + error_task_id = str(uuid.uuid4()) + store_task_status(error_task_id, { + "status": ProgressState.ERROR, + "message": f"Error adding task to queue: {str(e)}", + "timestamp": time.time(), + "type": task.get("type", "unknown"), + "name": task.get("name", "Unknown"), + "artist": task.get("artist", "") + }) + return error_task_id + + def _parse_bool_param(self, param_value, default_value=False): + """Helper function to parse boolean parameters from string values""" + if param_value is None: + return default_value + if isinstance(param_value, bool): + return param_value + if isinstance(param_value, str): + return param_value.lower() in ['true', '1', 'yes', 'y', 'on'] + return bool(param_value) + + def cancel_task(self, task_id): + """ + Cancels a task by its ID. + + Args: + task_id (str): The ID of the task to cancel + + Returns: + dict: Status information about the cancellation + """ + return cancel_celery_task(task_id) + + def retry_task(self, task_id): + """ + Retry a failed task. + + Args: + task_id (str): The ID of the failed task to retry + + Returns: + dict: Status information about the retry + """ + return retry_celery_task(task_id) + + def cancel_all_tasks(self): + """ + Cancel all currently queued and running tasks. + + Returns: + dict: Status information about the cancellation + """ + tasks = get_all_tasks() + cancelled_count = 0 + + for task in tasks: + task_id = task.get("task_id") + status = task.get("status") + + # Only cancel tasks that are not already completed or cancelled + if status not in [ProgressState.COMPLETE, ProgressState.CANCELLED]: + result = cancel_celery_task(task_id) + if result.get("status") == "cancelled": + cancelled_count += 1 + + return { + "status": "all_cancelled", + "cancelled_count": cancelled_count, + "total_tasks": len(tasks) + } + + def get_queue_status(self): + """ + Get the current status of the queue. + + Returns: + dict: Status information about the queue + """ + tasks = get_all_tasks() + + # Count tasks by status + running_count = 0 + pending_count = 0 + failed_count = 0 + + running_tasks = [] + failed_tasks = [] + + for task in tasks: + status = task.get("status") + + if status == ProgressState.PROCESSING: + running_count += 1 + running_tasks.append({ + "task_id": task.get("task_id"), + "name": task.get("name", "Unknown"), + "type": task.get("type", "unknown"), + "download_type": task.get("download_type", "unknown") + }) + elif status == ProgressState.QUEUED: + pending_count += 1 + elif status == ProgressState.ERROR: + failed_count += 1 + + # Get task info for retry information + task_info = get_task_info(task.get("task_id")) + last_status = get_last_task_status(task.get("task_id")) + + retry_count = 0 + if last_status: + retry_count = last_status.get("retry_count", 0) + + failed_tasks.append({ + "task_id": task.get("task_id"), + "name": task.get("name", "Unknown"), + "type": task.get("type", "unknown"), + "download_type": task.get("download_type", "unknown"), + "retry_count": retry_count + }) + + return { + "running": running_count, + "pending": pending_count, + "failed": failed_count, + "max_concurrent": self.max_concurrent, + "paused": self.paused, + "running_tasks": running_tasks, + "failed_tasks": failed_tasks + } + + def pause(self): + """Pause processing of new tasks.""" + self.paused = True + + # Get all queued tasks + tasks = get_all_tasks() + for task in tasks: + if task.get("status") == ProgressState.QUEUED: + # Update status to indicate the task is paused + store_task_status(task.get("task_id"), { + "status": ProgressState.QUEUED, + "paused": True, + "message": "Queue is paused, task will run when queue is resumed", + "timestamp": time.time() + }) + + logger.info("Download queue processing paused") + return {"status": "paused"} + + def resume(self): + """Resume processing of tasks.""" + self.paused = False + + # Get all queued tasks + tasks = get_all_tasks() + for task in tasks: + if task.get("status") == ProgressState.QUEUED: + task_id = task.get("task_id") + + # Get the task info + task_info = get_task_info(task_id) + if not task_info: + continue + + # Update status to indicate the task is no longer paused + store_task_status(task_id, { + "status": ProgressState.QUEUED, + "paused": False, + "message": "Queue resumed, task will run soon", + "timestamp": time.time() + }) + + # Reschedule the task to run immediately + download_type = task_info.get("download_type", "unknown") + + if download_type == "track": + download_track.apply_async( + kwargs=task_info, + task_id=task_id + ) + elif download_type == "album": + download_album.apply_async( + kwargs=task_info, + task_id=task_id + ) + elif download_type == "playlist": + download_playlist.apply_async( + kwargs=task_info, + task_id=task_id + ) + + logger.info("Download queue processing resumed") + return {"status": "resumed"} + + def start(self): + """Start the queue manager (no-op for Celery implementation).""" + logger.info("Celery Download Queue Manager started") + return {"status": "started"} + + def stop(self): + """Stop the queue manager (graceful shutdown).""" + logger.info("Celery Download Queue Manager stopping...") + + # Cancel all tasks or just let them finish? + # For now, we'll let them finish and just log the shutdown + + logger.info("Celery Download Queue Manager stopped") + return {"status": "stopped"} + +# Create the global instance +download_queue_manager = CeleryDownloadQueueManager() \ No newline at end of file diff --git a/routes/utils/celery_tasks.py b/routes/utils/celery_tasks.py new file mode 100644 index 0000000..215bb2b --- /dev/null +++ b/routes/utils/celery_tasks.py @@ -0,0 +1,653 @@ +import time +import json +import uuid +import logging +import traceback +from datetime import datetime +from celery import Celery, Task, states +from celery.signals import task_prerun, task_postrun, task_failure, worker_ready +from celery.exceptions import Retry + +# Setup Redis and Celery +from routes.utils.celery_config import REDIS_URL, REDIS_BACKEND, get_config_params + +# Configure logging +logger = logging.getLogger(__name__) + +# Initialize Celery app +celery_app = Celery('download_tasks', + broker=REDIS_URL, + backend=REDIS_BACKEND) + +# Load Celery config +celery_app.config_from_object('routes.utils.celery_config') + +# Create Redis connection for storing task data that's not part of the Celery result backend +import redis +redis_client = redis.Redis.from_url(REDIS_URL) + +class ProgressState: + """Enum-like class for progress states""" + QUEUED = "queued" + PROCESSING = "processing" + COMPLETE = "complete" + ERROR = "error" + RETRYING = "retrying" + CANCELLED = "cancel" + +def store_task_status(task_id, status_data): + """Store task status information in Redis""" + # Add timestamp if not present + if 'timestamp' not in status_data: + status_data['timestamp'] = time.time() + + # Convert to JSON and store in Redis + try: + redis_client.rpush(f"task:{task_id}:status", json.dumps(status_data)) + # Set expiry for the list to avoid filling up Redis with old data + redis_client.expire(f"task:{task_id}:status", 60 * 60 * 24 * 7) # 7 days + except Exception as e: + logger.error(f"Error storing task status: {e}") + traceback.print_exc() + +def get_task_status(task_id): + """Get all task status updates from Redis""" + try: + status_list = redis_client.lrange(f"task:{task_id}:status", 0, -1) + return [json.loads(s.decode('utf-8')) for s in status_list] + except Exception as e: + logger.error(f"Error getting task status: {e}") + return [] + +def get_last_task_status(task_id): + """Get the most recent task status update from Redis""" + try: + last_status = redis_client.lindex(f"task:{task_id}:status", -1) + if last_status: + return json.loads(last_status.decode('utf-8')) + return None + except Exception as e: + logger.error(f"Error getting last task status: {e}") + return None + +def store_task_info(task_id, task_info): + """Store task information in Redis""" + try: + redis_client.set(f"task:{task_id}:info", json.dumps(task_info)) + redis_client.expire(f"task:{task_id}:info", 60 * 60 * 24 * 7) # 7 days + except Exception as e: + logger.error(f"Error storing task info: {e}") + +def get_task_info(task_id): + """Get task information from Redis""" + try: + task_info = redis_client.get(f"task:{task_id}:info") + if task_info: + return json.loads(task_info.decode('utf-8')) + return {} + except Exception as e: + logger.error(f"Error getting task info: {e}") + return {} + +def cancel_task(task_id): + """Cancel a task by its ID""" + try: + # Mark the task as cancelled in Redis + store_task_status(task_id, { + "status": ProgressState.CANCELLED, + "message": "Task cancelled by user", + "timestamp": time.time() + }) + + # Try to revoke the Celery task if it hasn't started yet + celery_app.control.revoke(task_id, terminate=True, signal='SIGTERM') + + return {"status": "cancelled", "task_id": task_id} + except Exception as e: + logger.error(f"Error cancelling task {task_id}: {e}") + return {"status": "error", "message": str(e)} + +def retry_task(task_id): + """Retry a failed task""" + try: + # Get task info + task_info = get_task_info(task_id) + if not task_info: + return {"status": "error", "message": f"Task {task_id} not found"} + + # Check if task has retry_count information + last_status = get_last_task_status(task_id) + if last_status and last_status.get("status") == "error": + # Get current retry count + retry_count = last_status.get("retry_count", 0) + + # Get retry configuration from config + config_params = get_config_params() + max_retries = config_params.get('maxRetries', 3) + initial_retry_delay = config_params.get('retryDelaySeconds', 5) + retry_delay_increase = config_params.get('retry_delay_increase', 5) + + # Check if we've exceeded max retries + if retry_count >= max_retries: + return { + "status": "error", + "message": f"Maximum retry attempts ({max_retries}) exceeded" + } + + # Calculate retry delay + retry_delay = initial_retry_delay + (retry_count * retry_delay_increase) + + # Create a new task_id for the retry + new_task_id = f"{task_id}_retry{retry_count + 1}" + + # Update task info for the retry + task_info["retry_count"] = retry_count + 1 + task_info["retry_of"] = task_id + + # Get the service and fallback configuration from config + service = config_params.get("service") + fallback_enabled = config_params.get("fallback", False) + + # Update main, fallback, and quality parameters based on service and fallback setting + if service == 'spotify': + if fallback_enabled: + # If fallback is enabled with Spotify service: + # - main becomes the Deezer account + # - fallback becomes the Spotify account + task_info["main"] = config_params.get("deezer", "") + task_info["fallback"] = config_params.get("spotify", "") + task_info["quality"] = config_params.get("deezerQuality", "MP3_128") + task_info["fall_quality"] = config_params.get("spotifyQuality", "NORMAL") + else: + # If fallback is disabled with Spotify service: + # - main is the Spotify account + # - no fallback + task_info["main"] = config_params.get("spotify", "") + task_info["fallback"] = None + task_info["quality"] = config_params.get("spotifyQuality", "NORMAL") + task_info["fall_quality"] = None + elif service == 'deezer': + # For Deezer service: + # - main is the Deezer account + # - no fallback (even if enabled in config) + task_info["main"] = config_params.get("deezer", "") + task_info["fallback"] = None + task_info["quality"] = config_params.get("deezerQuality", "MP3_128") + task_info["fall_quality"] = None + else: + # Default to Spotify if unknown service + task_info["main"] = config_params.get("spotify", "") + task_info["fallback"] = None + task_info["quality"] = config_params.get("spotifyQuality", "NORMAL") + task_info["fall_quality"] = None + + # Ensure service comes from config for the retry + task_info["service"] = service + + # Update other config-derived parameters + task_info["real_time"] = task_info.get("real_time", config_params.get("realTime", False)) + task_info["custom_dir_format"] = task_info.get("custom_dir_format", config_params.get("customDirFormat", "%ar_album%/%album%")) + task_info["custom_track_format"] = task_info.get("custom_track_format", config_params.get("customTrackFormat", "%tracknum%. %music%")) + task_info["pad_tracks"] = task_info.get("pad_tracks", config_params.get("tracknum_padding", True)) + + # Store the updated task info + store_task_info(new_task_id, task_info) + + # Create a queued status + store_task_status(new_task_id, { + "status": ProgressState.QUEUED, + "type": task_info.get("type", "unknown"), + "name": task_info.get("name", "Unknown"), + "artist": task_info.get("artist", ""), + "retry_count": retry_count + 1, + "max_retries": max_retries, + "retry_delay": retry_delay, + "timestamp": time.time() + }) + + # Launch the appropriate task based on download_type + download_type = task_info.get("download_type", "unknown") + task = None + + if download_type == "track": + task = download_track.apply_async( + kwargs=task_info, + task_id=new_task_id, + queue='downloads' + ) + elif download_type == "album": + task = download_album.apply_async( + kwargs=task_info, + task_id=new_task_id, + queue='downloads' + ) + elif download_type == "playlist": + task = download_playlist.apply_async( + kwargs=task_info, + task_id=new_task_id, + queue='downloads' + ) + else: + return { + "status": "error", + "message": f"Unknown download type: {download_type}" + } + + return { + "status": "requeued", + "task_id": new_task_id, + "retry_count": retry_count + 1, + "max_retries": max_retries, + "retry_delay": retry_delay + } + else: + return { + "status": "error", + "message": "Task is not in a failed state" + } + except Exception as e: + logger.error(f"Error retrying task {task_id}: {e}") + traceback.print_exc() + return {"status": "error", "message": str(e)} + +def get_all_tasks(): + """Get all active task IDs""" + try: + # Get all keys matching the task info pattern + task_keys = redis_client.keys("task:*:info") + + # Extract task IDs from the keys + task_ids = [key.decode('utf-8').split(':')[1] for key in task_keys] + + # Get info for each task + tasks = [] + for task_id in task_ids: + task_info = get_task_info(task_id) + last_status = get_last_task_status(task_id) + + if task_info and last_status: + tasks.append({ + "task_id": task_id, + "type": task_info.get("type", "unknown"), + "name": task_info.get("name", "Unknown"), + "artist": task_info.get("artist", ""), + "download_type": task_info.get("download_type", "unknown"), + "status": last_status.get("status", "unknown"), + "timestamp": last_status.get("timestamp", 0) + }) + + return tasks + except Exception as e: + logger.error(f"Error getting all tasks: {e}") + return [] + +class ProgressTrackingTask(Task): + """Base task class that tracks progress through callbacks""" + + def progress_callback(self, progress_data): + """ + Process progress data from deezspot library callbacks + + Args: + progress_data: Dictionary containing progress information + """ + task_id = self.request.id + + # Add timestamp if not present + if 'timestamp' not in progress_data: + progress_data['timestamp'] = time.time() + + # Map deezspot status to our progress state + status = progress_data.get("status", "unknown") + + # Store the progress update in Redis + store_task_status(task_id, progress_data) + + # Log the progress update + logger.info(f"Task {task_id} progress: {progress_data}") + +# Celery signal handlers +@task_prerun.connect +def task_prerun_handler(task_id=None, task=None, *args, **kwargs): + """Signal handler when a task begins running""" + try: + # Get task info from Redis + task_info = get_task_info(task_id) + + # Update task status to processing + store_task_status(task_id, { + "status": ProgressState.PROCESSING, + "timestamp": time.time(), + "type": task_info.get("type", "unknown"), + "name": task_info.get("name", "Unknown"), + "artist": task_info.get("artist", "") + }) + + logger.info(f"Task {task_id} started processing: {task_info.get('name', 'Unknown')}") + except Exception as e: + logger.error(f"Error in task_prerun_handler: {e}") + +@task_postrun.connect +def task_postrun_handler(task_id=None, task=None, retval=None, state=None, *args, **kwargs): + """Signal handler when a task finishes""" + try: + # Skip if task is already marked as complete or error in Redis + last_status = get_last_task_status(task_id) + if last_status and last_status.get("status") in [ProgressState.COMPLETE, ProgressState.ERROR]: + return + + # Get task info from Redis + task_info = get_task_info(task_id) + + # Update task status based on Celery task state + if state == states.SUCCESS: + store_task_status(task_id, { + "status": ProgressState.COMPLETE, + "timestamp": time.time(), + "type": task_info.get("type", "unknown"), + "name": task_info.get("name", "Unknown"), + "artist": task_info.get("artist", ""), + "message": "Download completed successfully." + }) + logger.info(f"Task {task_id} completed successfully: {task_info.get('name', 'Unknown')}") + except Exception as e: + logger.error(f"Error in task_postrun_handler: {e}") + +@task_failure.connect +def task_failure_handler(task_id=None, exception=None, traceback=None, *args, **kwargs): + """Signal handler when a task fails""" + try: + # Skip if Retry exception (will be handled by the retry mechanism) + if isinstance(exception, Retry): + return + + # Get task info and last status from Redis + task_info = get_task_info(task_id) + last_status = get_last_task_status(task_id) + + # Get retry count + retry_count = 0 + if last_status: + retry_count = last_status.get("retry_count", 0) + + # Get retry configuration + config_params = get_config_params() + max_retries = config_params.get('maxRetries', 3) + + # Check if we can retry + can_retry = retry_count < max_retries + + # Update task status to error + store_task_status(task_id, { + "status": ProgressState.ERROR, + "timestamp": time.time(), + "type": task_info.get("type", "unknown"), + "name": task_info.get("name", "Unknown"), + "artist": task_info.get("artist", ""), + "error": str(exception), + "traceback": str(traceback), + "can_retry": can_retry, + "retry_count": retry_count, + "max_retries": max_retries, + "message": f"Error: {str(exception)}" + }) + + logger.error(f"Task {task_id} failed: {str(exception)}") + except Exception as e: + logger.error(f"Error in task_failure_handler: {e}") + +@worker_ready.connect +def worker_ready_handler(**kwargs): + """Signal handler when a worker starts up""" + logger.info("Celery worker ready and listening for tasks") + + # Check Redis connection + try: + redis_client.ping() + logger.info("Redis connection successful") + except Exception as e: + logger.error(f"Redis connection failed: {e}") + +# Define the download tasks +@celery_app.task(bind=True, base=ProgressTrackingTask, name="download_track", queue="downloads") +def download_track(self, **task_data): + """ + Task to download a track + + Args: + **task_data: Dictionary containing all task parameters + """ + try: + logger.info(f"Processing track download task: {task_data.get('name', 'Unknown')}") + from routes.utils.track import download_track as download_track_func + + # Get config parameters including service + config_params = get_config_params() + + # Get the service from config + service = config_params.get("service") + + # Determine main, fallback, and quality parameters based on service and fallback setting + fallback_enabled = config_params.get("fallback", False) + + if service == 'spotify': + if fallback_enabled: + # If fallback is enabled with Spotify service: + # - main becomes the Deezer account + # - fallback becomes the Spotify account + main = config_params.get("deezer", "") + fallback = config_params.get("spotify", "") + quality = config_params.get("deezerQuality", "MP3_128") + fall_quality = config_params.get("spotifyQuality", "NORMAL") + else: + # If fallback is disabled with Spotify service: + # - main is the Spotify account + # - no fallback + main = config_params.get("spotify", "") + fallback = None + quality = config_params.get("spotifyQuality", "NORMAL") + fall_quality = None + elif service == 'deezer': + # For Deezer service: + # - main is the Deezer account + # - no fallback (even if enabled in config) + main = config_params.get("deezer", "") + fallback = None + quality = config_params.get("deezerQuality", "MP3_128") + fall_quality = None + else: + # Default to Spotify if unknown service + main = config_params.get("spotify", "") + fallback = None + quality = config_params.get("spotifyQuality", "NORMAL") + fall_quality = None + + # Get remaining parameters from task_data or config + url = task_data.get("url", "") + real_time = task_data.get("real_time", config_params.get("realTime", False)) + custom_dir_format = task_data.get("custom_dir_format", config_params.get("customDirFormat", "%ar_album%/%album%")) + custom_track_format = task_data.get("custom_track_format", config_params.get("customTrackFormat", "%tracknum%. %music%")) + pad_tracks = task_data.get("pad_tracks", config_params.get("tracknum_padding", True)) + + # Execute the download function with progress callback + download_track_func( + service=service, + url=url, + main=main, + fallback=fallback, + quality=quality, + fall_quality=fall_quality, + real_time=real_time, + custom_dir_format=custom_dir_format, + custom_track_format=custom_track_format, + pad_tracks=pad_tracks, + progress_callback=self.progress_callback # Pass the callback from our ProgressTrackingTask + ) + + return {"status": "success", "message": "Track download completed"} + except Exception as e: + logger.error(f"Error in download_track task: {e}") + traceback.print_exc() + raise + +@celery_app.task(bind=True, base=ProgressTrackingTask, name="download_album", queue="downloads") +def download_album(self, **task_data): + """ + Task to download an album + + Args: + **task_data: Dictionary containing all task parameters + """ + try: + logger.info(f"Processing album download task: {task_data.get('name', 'Unknown')}") + from routes.utils.album import download_album as download_album_func + + # Get config parameters including service + config_params = get_config_params() + + # Get the service from config + service = config_params.get("service") + + # Determine main, fallback, and quality parameters based on service and fallback setting + fallback_enabled = config_params.get("fallback", False) + + if service == 'spotify': + if fallback_enabled: + # If fallback is enabled with Spotify service: + # - main becomes the Deezer account + # - fallback becomes the Spotify account + main = config_params.get("deezer", "") + fallback = config_params.get("spotify", "") + quality = config_params.get("deezerQuality", "MP3_128") + fall_quality = config_params.get("spotifyQuality", "NORMAL") + else: + # If fallback is disabled with Spotify service: + # - main is the Spotify account + # - no fallback + main = config_params.get("spotify", "") + fallback = None + quality = config_params.get("spotifyQuality", "NORMAL") + fall_quality = None + elif service == 'deezer': + # For Deezer service: + # - main is the Deezer account + # - no fallback (even if enabled in config) + main = config_params.get("deezer", "") + fallback = None + quality = config_params.get("deezerQuality", "MP3_128") + fall_quality = None + else: + # Default to Spotify if unknown service + main = config_params.get("spotify", "") + fallback = None + quality = config_params.get("spotifyQuality", "NORMAL") + fall_quality = None + + # Get remaining parameters from task_data or config + url = task_data.get("url", "") + real_time = task_data.get("real_time", config_params.get("realTime", False)) + custom_dir_format = task_data.get("custom_dir_format", config_params.get("customDirFormat", "%ar_album%/%album%")) + custom_track_format = task_data.get("custom_track_format", config_params.get("customTrackFormat", "%tracknum%. %music%")) + pad_tracks = task_data.get("pad_tracks", config_params.get("tracknum_padding", True)) + + # Execute the download function with progress callback + download_album_func( + service=service, + url=url, + main=main, + fallback=fallback, + quality=quality, + fall_quality=fall_quality, + real_time=real_time, + custom_dir_format=custom_dir_format, + custom_track_format=custom_track_format, + pad_tracks=pad_tracks, + progress_callback=self.progress_callback # Pass the callback from our ProgressTrackingTask + ) + + return {"status": "success", "message": "Album download completed"} + except Exception as e: + logger.error(f"Error in download_album task: {e}") + traceback.print_exc() + raise + +@celery_app.task(bind=True, base=ProgressTrackingTask, name="download_playlist", queue="downloads") +def download_playlist(self, **task_data): + """ + Task to download a playlist + + Args: + **task_data: Dictionary containing all task parameters + """ + try: + logger.info(f"Processing playlist download task: {task_data.get('name', 'Unknown')}") + from routes.utils.playlist import download_playlist as download_playlist_func + + # Get config parameters including service + config_params = get_config_params() + + # Get the service from config + service = config_params.get("service") + + # Determine main, fallback, and quality parameters based on service and fallback setting + fallback_enabled = config_params.get("fallback", False) + + if service == 'spotify': + if fallback_enabled: + # If fallback is enabled with Spotify service: + # - main becomes the Deezer account + # - fallback becomes the Spotify account + main = config_params.get("deezer", "") + fallback = config_params.get("spotify", "") + quality = config_params.get("deezerQuality", "MP3_128") + fall_quality = config_params.get("spotifyQuality", "NORMAL") + else: + # If fallback is disabled with Spotify service: + # - main is the Spotify account + # - no fallback + main = config_params.get("spotify", "") + fallback = None + quality = config_params.get("spotifyQuality", "NORMAL") + fall_quality = None + elif service == 'deezer': + # For Deezer service: + # - main is the Deezer account + # - no fallback (even if enabled in config) + main = config_params.get("deezer", "") + fallback = None + quality = config_params.get("deezerQuality", "MP3_128") + fall_quality = None + else: + # Default to Spotify if unknown service + main = config_params.get("spotify", "") + fallback = None + quality = config_params.get("spotifyQuality", "NORMAL") + fall_quality = None + + # Get remaining parameters from task_data or config + url = task_data.get("url", "") + real_time = task_data.get("real_time", config_params.get("realTime", False)) + custom_dir_format = task_data.get("custom_dir_format", config_params.get("customDirFormat", "%ar_album%/%album%")) + custom_track_format = task_data.get("custom_track_format", config_params.get("customTrackFormat", "%tracknum%. %music%")) + pad_tracks = task_data.get("pad_tracks", config_params.get("tracknum_padding", True)) + + # Execute the download function with progress callback + download_playlist_func( + service=service, + url=url, + main=main, + fallback=fallback, + quality=quality, + fall_quality=fall_quality, + real_time=real_time, + custom_dir_format=custom_dir_format, + custom_track_format=custom_track_format, + pad_tracks=pad_tracks, + progress_callback=self.progress_callback # Pass the callback from our ProgressTrackingTask + ) + + return {"status": "success", "message": "Playlist download completed"} + except Exception as e: + logger.error(f"Error in download_playlist task: {e}") + traceback.print_exc() + raise \ No newline at end of file diff --git a/routes/utils/playlist.py b/routes/utils/playlist.py index e008489..9c05a2b 100755 --- a/routes/utils/playlist.py +++ b/routes/utils/playlist.py @@ -18,7 +18,8 @@ def download_playlist( pad_tracks=True, initial_retry_delay=5, retry_delay_increase=5, - max_retries=3 + max_retries=3, + progress_callback=None ): try: # Load Spotify client credentials if available @@ -51,7 +52,8 @@ def download_playlist( dl = DeeLogin( arl=deezer_creds.get('arl', ''), spotify_client_id=spotify_client_id, - spotify_client_secret=spotify_client_secret + spotify_client_secret=spotify_client_secret, + progress_callback=progress_callback ) # Download using download_playlistspo; pass the custom formatting parameters. dl.download_playlistspo( @@ -92,7 +94,8 @@ def download_playlist( spo = SpoLogin( credentials_path=spo_creds_path, spotify_client_id=fallback_client_id, - spotify_client_secret=fallback_client_secret + spotify_client_secret=fallback_client_secret, + progress_callback=progress_callback ) spo.download_playlist( link_playlist=url, @@ -126,7 +129,8 @@ def download_playlist( spo = SpoLogin( credentials_path=credentials_path, spotify_client_id=spotify_client_id, - spotify_client_secret=spotify_client_secret + spotify_client_secret=spotify_client_secret, + progress_callback=progress_callback ) spo.download_playlist( link_playlist=url, @@ -156,7 +160,8 @@ def download_playlist( dl = DeeLogin( arl=creds.get('arl', ''), spotify_client_id=spotify_client_id, - spotify_client_secret=spotify_client_secret + spotify_client_secret=spotify_client_secret, + progress_callback=progress_callback ) dl.download_playlistdee( link_playlist=url, diff --git a/routes/utils/queue.py b/routes/utils/queue.py deleted file mode 100644 index 1a8a0eb..0000000 --- a/routes/utils/queue.py +++ /dev/null @@ -1,1213 +0,0 @@ -import os -import sys -import json -import time -import string -import random -import traceback -import threading -import signal -import atexit -from multiprocessing import Process, Event -from queue import Queue, Empty - -# ------------------------------------------------------------------------------ -# Configuration -# ------------------------------------------------------------------------------ - -# Load configuration from ./config/main.json and get the max_concurrent_dl value. -CONFIG_PATH = './config/main.json' -try: - with open(CONFIG_PATH, 'r') as f: - config_data = json.load(f) - MAX_CONCURRENT_DL = config_data.get("maxConcurrentDownloads", 3) - MAX_RETRIES = config_data.get("maxRetries", 3) - RETRY_DELAY = config_data.get("retryDelaySeconds", 5) - RETRY_DELAY_INCREASE = config_data.get("retry_delay_increase", 5) - # Hardcode the queue state file to be in the config/state directory - QUEUE_STATE_FILE = "./config/state/queue_state.json" -except Exception as e: - print(f"Error loading configuration: {e}") - # Fallback to default values if there's an error reading the config. - MAX_CONCURRENT_DL = 3 - MAX_RETRIES = 3 - RETRY_DELAY = 5 - RETRY_DELAY_INCREASE = 5 - QUEUE_STATE_FILE = "./config/state/queue_state.json" - -PRG_DIR = './prgs' # directory where .prg files will be stored - -# ------------------------------------------------------------------------------ -# Utility Functions and Classes -# ------------------------------------------------------------------------------ - -def get_config_params(): - """ - Get common download parameters from the config file. - This centralizes parameter retrieval and reduces redundancy in API calls. - - Returns: - dict: A dictionary containing common parameters from config - """ - try: - with open(CONFIG_PATH, 'r') as f: - config = json.load(f) - - return { - 'spotify': config.get('spotify', ''), - 'deezer': config.get('deezer', ''), - 'fallback': config.get('fallback', False), - 'spotifyQuality': config.get('spotifyQuality', 'NORMAL'), - 'deezerQuality': config.get('deezerQuality', 'MP3_128'), - 'realTime': config.get('realTime', False), - 'customDirFormat': config.get('customDirFormat', '%ar_album%/%album%'), - 'customTrackFormat': config.get('customTrackFormat', '%tracknum%. %music%'), - 'tracknum_padding': config.get('tracknum_padding', True), - 'maxRetries': config.get('maxRetries', 3), - 'retryDelaySeconds': config.get('retryDelaySeconds', 5), - 'retry_delay_increase': config.get('retry_delay_increase', 5) - } - except Exception as e: - print(f"Error reading config for parameters: {e}") - # Return defaults if config read fails - return { - 'spotify': '', - 'deezer': '', - 'fallback': False, - 'spotifyQuality': 'NORMAL', - 'deezerQuality': 'MP3_128', - 'realTime': False, - 'customDirFormat': '%ar_album%/%album%', - 'customTrackFormat': '%tracknum%. %music%', - 'tracknum_padding': True, - 'maxRetries': 3, - 'retryDelaySeconds': 5, - 'retry_delay_increase': 5 - } - -def generate_random_filename(length=6, extension=".prg"): - """Generate a random filename with the given extension.""" - chars = string.ascii_lowercase + string.digits - return ''.join(random.choice(chars) for _ in range(length)) + extension - -class FlushingFileWrapper: - """ - A file wrapper that flushes after writing each line and - skips lines whose JSON content has a "type" of "track". - """ - def __init__(self, file): - self.file = file - - def write(self, text): - for line in text.split('\n'): - line = line.strip() - if line and line.startswith('{'): - try: - obj = json.loads(line) - if obj.get("type") == "track": - continue # skip lines that represent track messages - except ValueError: - pass # not valid JSON; write the line as is - if line: # Only write non-empty lines - try: - self.file.write(line + '\n') - self.file.flush() - except (IOError, OSError) as e: - print(f"Error writing to file: {e}") - - def flush(self): - try: - self.file.flush() - except (IOError, OSError) as e: - print(f"Error flushing file: {e}") - - def close(self): - """ - Close the underlying file object. - """ - try: - self.file.flush() - self.file.close() - except (IOError, OSError) as e: - print(f"Error closing file: {e}") - -def handle_termination(signum, frame): - """ - Signal handler for graceful termination of download processes. - Called when a SIGTERM signal is received. - - Args: - signum: The signal number - frame: The current stack frame - """ - try: - print(f"Process received termination signal {signum}") - sys.exit(0) - except Exception as e: - print(f"Error during termination: {e}") - sys.exit(1) - -class StdoutRedirector: - """ - Class that redirects stdout/stderr to a file. - All print statements will be captured and written directly to the target file. - """ - def __init__(self, file_wrapper): - self.file_wrapper = file_wrapper - - def write(self, message): - if message and not message.isspace(): - # Pass the message directly without wrapping it in JSON - self.file_wrapper.write(message.rstrip()) - - def flush(self): - self.file_wrapper.flush() - -def run_download_task(task, prg_path, stop_event=None): - """ - Process a download task based on its type (album, track, playlist, artist). - This function is run in a separate process. - - Args: - task (dict): The task details - prg_path (str): Path to the .prg file for progress updates - stop_event (threading.Event, optional): Used to signal the process to stop gracefully. - """ - # Register signal handler for graceful termination - signal.signal(signal.SIGTERM, handle_termination) - - # Extract common parameters from the task - download_type = task.get("download_type", "unknown") - service = task.get("service", "") - url = task.get("url", "") - main = task.get("main", "") - fallback = task.get("fallback", None) - quality = task.get("quality", None) - fall_quality = task.get("fall_quality", None) - real_time = task.get("real_time", False) - custom_dir_format = task.get("custom_dir_format", "%ar_album%/%album%/%copyright%") - custom_track_format = task.get("custom_track_format", "%tracknum%. %music% - %artist%") - pad_tracks = task.get("pad_tracks", True) - - # Extract retry configuration parameters from the task or use defaults - max_retries = task.get("max_retries", MAX_RETRIES) - initial_retry_delay = task.get("initial_retry_delay", RETRY_DELAY) - retry_delay_increase = task.get("retry_delay_increase", RETRY_DELAY_INCREASE) - - # Get the current retry count (or 0 if not set) - retry_count = task.get("retry_count", 0) - - # Calculate current retry delay based on the retry count - current_retry_delay = initial_retry_delay + (retry_count * retry_delay_increase) - - # Initialize variables for cleanup in finally block - wrapper = None - original_stdout = sys.stdout - original_stderr = sys.stderr - - try: - # Initialize a FlushingFileWrapper for real-time progress updates - try: - prg_file = open(prg_path, 'a') - wrapper = FlushingFileWrapper(prg_file) - except Exception as e: - print(f"Error opening PRG file {prg_path}: {e}") - return - - # If this is a retry, log the retry and delay - if retry_count > 0: - wrapper.write(json.dumps({ - "status": "retrying", - "retry_count": retry_count, - "max_retries": max_retries, - "retry_delay": current_retry_delay, - "timestamp": time.time(), - "message": f"Retry attempt {retry_count}/{max_retries} after {current_retry_delay}s delay" - }) + "\n") - - # Sleep for the calculated delay before attempting retry - time.sleep(current_retry_delay) - - # Redirect stdout and stderr to the progress file - stdout_redirector = StdoutRedirector(wrapper) - sys.stdout = stdout_redirector - sys.stderr = stdout_redirector - - # Check for early termination - if stop_event and stop_event.is_set(): - wrapper.write(json.dumps({ - "status": "interrupted", - "message": "Task was interrupted before starting the download", - "timestamp": time.time() - }) + "\n") - return - - # Dispatch to the appropriate download function based on download_type - if download_type == "track": - from routes.utils.track import download_track - download_track( - service=service, - url=url, - main=main, - fallback=fallback, - quality=quality, - fall_quality=fall_quality, - real_time=real_time, - custom_dir_format=custom_dir_format, - custom_track_format=custom_track_format, - pad_tracks=pad_tracks, - initial_retry_delay=initial_retry_delay, - retry_delay_increase=retry_delay_increase, - max_retries=max_retries - ) - elif download_type == "album": - from routes.utils.album import download_album - download_album( - service=service, - url=url, - main=main, - fallback=fallback, - quality=quality, - fall_quality=fall_quality, - real_time=real_time, - custom_dir_format=custom_dir_format, - custom_track_format=custom_track_format, - pad_tracks=pad_tracks, - initial_retry_delay=initial_retry_delay, - retry_delay_increase=retry_delay_increase, - max_retries=max_retries - ) - elif download_type == "playlist": - from routes.utils.playlist import download_playlist - download_playlist( - service=service, - url=url, - main=main, - fallback=fallback, - quality=quality, - fall_quality=fall_quality, - real_time=real_time, - custom_dir_format=custom_dir_format, - custom_track_format=custom_track_format, - pad_tracks=pad_tracks, - initial_retry_delay=initial_retry_delay, - retry_delay_increase=retry_delay_increase, - max_retries=max_retries - ) - else: - wrapper.write(json.dumps({ - "status": "error", - "message": f"Unsupported download type: {download_type}", - "can_retry": False, - "timestamp": time.time() - }) + "\n") - return - - # If we got here, the download completed successfully - wrapper.write(json.dumps({ - "status": "complete", - "message": f"Download completed successfully.", - "timestamp": time.time() - }) + "\n") - - except Exception as e: - if wrapper: - traceback.print_exc() - - # Check if we can retry the task - can_retry = retry_count < max_retries - - # Log the error and if it can be retried - try: - wrapper.write(json.dumps({ - "status": "error", - "error": str(e), - "traceback": traceback.format_exc(), - "can_retry": can_retry, - "retry_count": retry_count, - "max_retries": max_retries, - "retry_delay": current_retry_delay + retry_delay_increase if can_retry else None, - "timestamp": time.time(), - "message": f"Error: {str(e)}" - }) + "\n") - except Exception as inner_error: - print(f"Error writing error status to PRG file: {inner_error}") - else: - print(f"Error in download task (wrapper not available): {e}") - traceback.print_exc() - finally: - # Restore original stdout and stderr - sys.stdout = original_stdout - sys.stderr = original_stderr - - # Safely clean up wrapper and file - if wrapper: - try: - wrapper.flush() - wrapper.close() - except Exception as e: - print(f"Error closing wrapper: {e}") - - # Try to close the underlying file directly if wrapper close fails - try: - if hasattr(wrapper, 'file') and wrapper.file and not wrapper.file.closed: - wrapper.file.close() - except Exception as file_error: - print(f"Error directly closing file: {file_error}") - -# ------------------------------------------------------------------------------ -# Download Queue Manager Class -# ------------------------------------------------------------------------------ - -class DownloadQueueManager: - """ - Manages a queue of download tasks, ensuring that no more than - MAX_CONCURRENT_DL downloads run concurrently. - """ - def __init__(self, max_concurrent=MAX_CONCURRENT_DL, prg_dir=PRG_DIR): - self.max_concurrent = max_concurrent - self.prg_dir = prg_dir - os.makedirs(self.prg_dir, exist_ok=True) - - self.pending_tasks = Queue() # holds tasks waiting to run - self.running_downloads = {} # maps prg_filename -> (Process instance, task data, stop_event) - self.cancelled_tasks = set() # holds prg_filenames of tasks that have been cancelled - self.failed_tasks = {} # maps prg_filename -> (task data, failure count) - self.lock = threading.Lock() # protects access to shared data structures - self.worker_thread = threading.Thread(target=self.queue_worker, daemon=True) - self.running = False - self.paused = False - - # Print manager configuration for debugging - print(f"Download Queue Manager initialized with max_concurrent={self.max_concurrent}, using prg_dir={self.prg_dir}") - - # Load persisted queue state if available - self.load_queue_state() - - # Register cleanup on application exit - atexit.register(self.cleanup) - - def start(self): - """Start the worker thread that monitors the queue.""" - self.running = True - self.worker_thread.start() - print("Download queue manager started") - - def pause(self): - """Pause processing of new tasks.""" - self.paused = True - print("Download queue processing paused") - - def resume(self): - """Resume processing of tasks.""" - self.paused = False - print("Download queue processing resumed") - - def stop(self): - """Stop the worker thread gracefully.""" - print("Stopping download queue manager...") - self.running = False - self.save_queue_state() - - # Wait for the worker thread to finish - if self.worker_thread.is_alive(): - self.worker_thread.join(timeout=5) - - # Clean up any running processes - self.terminate_all_downloads() - print("Download queue manager stopped") - - def cleanup(self): - """Clean up resources when the application exits.""" - if self.running: - self.stop() - - def save_queue_state(self): - """Save the current queue state to a file for persistence.""" - try: - # Build a serializable state object - with self.lock: - # Get current pending tasks (without removing them) - pending_tasks = [] - with self.pending_tasks.mutex: - for item in list(self.pending_tasks.queue): - prg_filename, task = item - pending_tasks.append({"prg_filename": prg_filename, "task": task}) - - # Get failed tasks - failed_tasks = {} - for prg_filename, (task, retry_count) in self.failed_tasks.items(): - failed_tasks[prg_filename] = {"task": task, "retry_count": retry_count} - - state = { - "pending_tasks": pending_tasks, - "failed_tasks": failed_tasks, - "cancelled_tasks": list(self.cancelled_tasks) - } - - # Write state to file - with open(QUEUE_STATE_FILE, 'w') as f: - json.dump(state, f) - except Exception as e: - print(f"Error saving queue state: {e}") - - def load_queue_state(self): - """Load queue state from a persistent file if available.""" - try: - if os.path.exists(QUEUE_STATE_FILE): - with open(QUEUE_STATE_FILE, 'r') as f: - state = json.load(f) - - # Restore state - with self.lock: - # Restore pending tasks - for task_info in state.get("pending_tasks", []): - self.pending_tasks.put((task_info["prg_filename"], task_info["task"])) - - # Restore failed tasks - for prg_filename, task_info in state.get("failed_tasks", {}).items(): - self.failed_tasks[prg_filename] = (task_info["task"], task_info["retry_count"]) - - # Restore cancelled tasks - self.cancelled_tasks = set(state.get("cancelled_tasks", [])) - - print(f"Loaded queue state: {len(state.get('pending_tasks', []))} pending tasks, {len(state.get('failed_tasks', {}))} failed tasks") - except Exception as e: - print(f"Error loading queue state: {e}") - - def add_task(self, task): - """ - Adds a new download task to the queue. - The task is expected to be a dictionary with all necessary parameters, - including a "download_type" key (album, track, playlist, or artist). - - A .prg file is created for progress logging with an initial entries: - 1. The original request (merged with the extra keys: type, name, artist) - 2. A queued status entry (including type, name, artist, and the task's position in the queue) - - Returns the generated prg filename so that the caller can later check the status or request cancellation. - """ - download_type = task.get("download_type", "unknown") - - # Determine the new task's position by scanning the PRG_DIR for files matching the naming scheme. - existing_positions = [] - try: - for filename in os.listdir(self.prg_dir): - if filename.startswith(f"{download_type}_") and filename.endswith(".prg"): - try: - # Filename format: download_type_.prg - number_part = filename[len(download_type) + 1:-4] - pos_num = int(number_part) - existing_positions.append(pos_num) - except ValueError: - continue # Skip files that do not conform to the naming scheme. - except Exception as e: - print(f"Error scanning directory: {e}") - # If we can't scan the directory, generate a random filename instead - return self._add_task_with_random_filename(task) - - position = max(existing_positions, default=0) + 1 - - # Generate the prg filename based on the download type and determined position. - prg_filename = f"{download_type}_{position}.prg" - prg_path = os.path.join(self.prg_dir, prg_filename) - task['prg_path'] = prg_path - - # Initialize retry count and add retry parameters - task['retry_count'] = 0 - - # Get retry configuration from config, or use what's provided in the task - config_params = get_config_params() - task['max_retries'] = task.get('max_retries', config_params.get('maxRetries', MAX_RETRIES)) - task['initial_retry_delay'] = task.get('initial_retry_delay', config_params.get('retryDelaySeconds', RETRY_DELAY)) - task['retry_delay_increase'] = task.get('retry_delay_increase', config_params.get('retry_delay_increase', RETRY_DELAY_INCREASE)) - - # Create and immediately write the initial entries to the .prg file. - try: - with open(prg_path, 'w') as f: - # Merge extra keys into the original request. - original_request = task.get("orig_request", {}).copy() - - # Add essential metadata for retry operations - original_request["download_type"] = download_type - - # Ensure key information is included - for key in ["type", "name", "artist", "service", "url"]: - if key in task and key not in original_request: - original_request[key] = task[key] - - # Add API endpoint information - if "endpoint" not in original_request: - original_request["endpoint"] = f"/api/{download_type}/download" - - # Add explicit display information for the frontend - original_request["display_title"] = task.get("name", original_request.get("name", "Unknown")) - original_request["display_type"] = task.get("type", original_request.get("type", download_type)) - original_request["display_artist"] = task.get("artist", original_request.get("artist", "")) - - # Write the first entry - the enhanced original request params - f.write(json.dumps(original_request) + "\n") - - # Write the second entry - the queued status - f.write(json.dumps({ - "status": "queued", - "timestamp": time.time(), - "type": task.get("type", ""), - "name": task.get("name", ""), - "artist": task.get("artist", ""), - "retry_count": 0, - "max_retries": task.get('max_retries', MAX_RETRIES), - "initial_retry_delay": task.get('initial_retry_delay', RETRY_DELAY), - "retry_delay_increase": task.get('retry_delay_increase', RETRY_DELAY_INCREASE), - "queue_position": self.pending_tasks.qsize() + 1 - }) + "\n") - except Exception as e: - print(f"Error writing to PRG file: {e}") - # If we can't create the file, try with a random filename - return self._add_task_with_random_filename(task) - - # Add the task to the pending queue - self.pending_tasks.put((prg_filename, task)) - self.save_queue_state() - - print(f"Added task {prg_filename} to download queue") - return prg_filename - - def _add_task_with_random_filename(self, task): - """ - Helper method to create a task with a random filename - in case we can't generate a sequential filename. - """ - try: - download_type = task.get("download_type", "unknown") - random_id = generate_random_filename(extension="") - prg_filename = f"{download_type}_{random_id}.prg" - prg_path = os.path.join(self.prg_dir, prg_filename) - task['prg_path'] = prg_path - - # Initialize retry count and add retry parameters - task['retry_count'] = 0 - - # Get retry configuration from config, or use what's provided in the task - config_params = get_config_params() - task['max_retries'] = task.get('max_retries', config_params.get('maxRetries', MAX_RETRIES)) - task['initial_retry_delay'] = task.get('initial_retry_delay', config_params.get('retryDelaySeconds', RETRY_DELAY)) - task['retry_delay_increase'] = task.get('retry_delay_increase', config_params.get('retry_delay_increase', RETRY_DELAY_INCREASE)) - - with open(prg_path, 'w') as f: - # Merge extra keys into the original request - original_request = task.get("orig_request", {}).copy() - - # Add essential metadata for retry operations - original_request["download_type"] = download_type - - # Ensure key information is included - for key in ["type", "name", "artist", "service", "url"]: - if key in task and key not in original_request: - original_request[key] = task[key] - - # Add API endpoint information - if "endpoint" not in original_request: - original_request["endpoint"] = f"/api/{download_type}/download" - - # Add explicit display information for the frontend - original_request["display_title"] = task.get("name", original_request.get("name", "Unknown")) - original_request["display_type"] = task.get("type", original_request.get("type", download_type)) - original_request["display_artist"] = task.get("artist", original_request.get("artist", "")) - - # Write the first entry - the enhanced original request params - f.write(json.dumps(original_request) + "\n") - - # Write the second entry - the queued status - f.write(json.dumps({ - "status": "queued", - "timestamp": time.time(), - "type": task.get("type", ""), - "name": task.get("name", ""), - "artist": task.get("artist", ""), - "retry_count": 0, - "max_retries": task.get('max_retries', MAX_RETRIES), - "initial_retry_delay": task.get('initial_retry_delay', RETRY_DELAY), - "retry_delay_increase": task.get('retry_delay_increase', RETRY_DELAY_INCREASE), - "queue_position": self.pending_tasks.qsize() + 1 - }) + "\n") - - self.pending_tasks.put((prg_filename, task)) - self.save_queue_state() - - print(f"Added task {prg_filename} to download queue (with random filename)") - return prg_filename - except Exception as e: - print(f"Error adding task with random filename: {e}") - return None - - def retry_task(self, prg_filename): - """ - Retry a failed task by creating a new PRG file and adding it back to the queue. - """ - with self.lock: - # Check if the task is in failed_tasks - if prg_filename not in self.failed_tasks: - return { - "status": "error", - "message": f"Task {prg_filename} not found in failed tasks" - } - - task, retry_count = self.failed_tasks.pop(prg_filename) - # Increment the retry count - task["retry_count"] = retry_count + 1 - - # Get retry configuration parameters from config, not from the task - config_params = get_config_params() - max_retries = config_params.get('maxRetries', MAX_RETRIES) - initial_retry_delay = config_params.get('retryDelaySeconds', RETRY_DELAY) - retry_delay_increase = config_params.get('retry_delay_increase', RETRY_DELAY_INCREASE) - - # Update task with the latest config values - task["max_retries"] = max_retries - task["initial_retry_delay"] = initial_retry_delay - task["retry_delay_increase"] = retry_delay_increase - - # Calculate the new retry delay - current_retry_delay = initial_retry_delay + (task["retry_count"] * retry_delay_increase) - - # If we've exceeded the maximum retries, return an error - if task["retry_count"] > max_retries: - return { - "status": "error", - "message": f"Maximum retry attempts ({max_retries}) exceeded" - } - - # Use the same download type as the original task. - download_type = task.get("download_type", "unknown") - - # Generate a new task with a new PRG filename for the retry. - # We're using the original file name with a retry count suffix. - original_name = os.path.splitext(prg_filename)[0] - new_prg_filename = f"{original_name}_retry{task['retry_count']}.prg" - new_prg_path = os.path.join(self.prg_dir, new_prg_filename) - task["prg_path"] = new_prg_path - - # Try to load the original request information from the original PRG file - original_request = {} - original_prg_path = os.path.join(self.prg_dir, prg_filename) - try: - if os.path.exists(original_prg_path): - with open(original_prg_path, 'r') as f: - first_line = f.readline().strip() - if first_line: - try: - original_request = json.loads(first_line) - except json.JSONDecodeError: - pass - except Exception as e: - print(f"Error reading original request from {prg_filename}: {e}") - - # If we couldn't get the original request, use what we have in the task - if not original_request: - original_request = task.get("orig_request", {}).copy() - # Add essential metadata for retry operations - original_request["download_type"] = download_type - for key in ["type", "name", "artist", "service", "url"]: - if key in task and key not in original_request: - original_request[key] = task[key] - # Add API endpoint information - if "endpoint" not in original_request: - original_request["endpoint"] = f"/api/{download_type}/download" - - # Add explicit display information for the frontend - original_request["display_title"] = task.get("name", "Unknown") - original_request["display_type"] = task.get("type", download_type) - original_request["display_artist"] = task.get("artist", "") - elif not any(key in original_request for key in ["display_title", "display_type", "display_artist"]): - # Ensure display fields exist if they weren't in the original request - original_request["display_title"] = original_request.get("name", task.get("name", "Unknown")) - original_request["display_type"] = original_request.get("type", task.get("type", download_type)) - original_request["display_artist"] = original_request.get("artist", task.get("artist", "")) - - # Create and immediately write the retry information to the new PRG file. - try: - with open(new_prg_path, 'w') as f: - # First, write the original request information - f.write(json.dumps(original_request) + "\n") - - # Then write the queued status with retry information - f.write(json.dumps({ - "status": "queued", - "type": task.get("type", "unknown"), - "name": task.get("name", "Unknown"), - "artist": task.get("artist", "Unknown"), - "retry_count": task["retry_count"], - "max_retries": max_retries, - "retry_delay": current_retry_delay, - "timestamp": time.time() - }) + "\n") - except Exception as e: - print(f"Error creating retry PRG file: {e}") - return { - "status": "error", - "message": f"Failed to create retry file: {str(e)}" - } - - # Add the task to the pending_tasks queue. - self.pending_tasks.put((new_prg_filename, task)) - print(f"Requeued task {new_prg_filename} for retry (attempt {task['retry_count']})") - - # Save updated queue state - self.save_queue_state() - - return { - "status": "requeued", - "prg_file": new_prg_filename, - "retry_count": task["retry_count"], - "max_retries": max_retries, - "retry_delay": current_retry_delay, - } - - def cancel_task(self, prg_filename): - """ - Cancels a running or queued download task by its PRG filename. - Returns a status dictionary that should be returned to the client. - """ - prg_path = os.path.join(self.prg_dir, prg_filename) - - # First, check if the task is even valid (file exists) - if not os.path.exists(prg_path): - return {"status": "error", "message": f"Task {prg_filename} not found"} - - with self.lock: - # Check if task is currently running - if prg_filename in self.running_downloads: - # Get the process and stop event - process, task, stop_event = self.running_downloads[prg_filename] - - # Signal the process to stop gracefully using the event - stop_event.set() - - # Give the process a short time to terminate gracefully - process.join(timeout=2) - - # If the process is still alive, terminate it forcefully - if process.is_alive(): - print(f"Terminating process for {prg_filename} forcefully") - process.terminate() - process.join(timeout=1) - - # If still alive after terminate, kill it - if process.is_alive(): - print(f"Process for {prg_filename} not responding to terminate, killing") - try: - if hasattr(process, 'kill'): - process.kill() - else: - os.kill(process.pid, signal.SIGKILL) - except: - print(f"Error killing process for {prg_filename}") - - # Clean up by removing from running downloads - del self.running_downloads[prg_filename] - - # Update the PRG file to indicate cancellation - try: - with open(prg_path, 'a') as f: - f.write(json.dumps({ - "status": "cancel", - "timestamp": time.time() - }) + "\n") - except Exception as e: - print(f"Error writing cancel status: {e}") - - print(f"Cancelled running task: {prg_filename}") - return {"status": "cancelled", "prg_file": prg_filename} - - # If not running, check if it's a planned retry - if prg_filename in self.failed_tasks: - del self.failed_tasks[prg_filename] - - # Update the PRG file to indicate cancellation - try: - with open(prg_path, 'a') as f: - f.write(json.dumps({ - "status": "cancel", - "timestamp": time.time() - }) + "\n") - except Exception as e: - print(f"Error writing cancel status: {e}") - - print(f"Cancelled retry task: {prg_filename}") - return {"status": "cancelled", "prg_file": prg_filename} - - # If not running, it might be queued; mark as cancelled - self.cancelled_tasks.add(prg_filename) - - # If it's in the queue, try to update its status in the PRG file - try: - with open(prg_path, 'a') as f: - f.write(json.dumps({ - "status": "cancel", - "timestamp": time.time() - }) + "\n") - except Exception as e: - print(f"Error writing cancel status: {e}") - - print(f"Marked queued task as cancelled: {prg_filename}") - return {"status": "cancelled", "prg_file": prg_filename} - - def cancel_all_tasks(self): - """Cancel all currently queued and running tasks.""" - with self.lock: - # First, mark all pending tasks as cancelled - with self.pending_tasks.mutex: - for item in list(self.pending_tasks.queue): - prg_filename, _ = item - self.cancelled_tasks.add(prg_filename) - prg_path = os.path.join(self.prg_dir, prg_filename) - try: - with open(prg_path, 'a') as f: - f.write(json.dumps({ - "status": "cancel", - "message": "Task was cancelled by user", - "timestamp": time.time() - }) + "\n") - except Exception as e: - print(f"Error writing cancelled status for {prg_filename}: {e}") - # Clear the queue - self.pending_tasks.queue.clear() - - # Next, terminate all running tasks - for prg_filename, (process, _, stop_event) in list(self.running_downloads.items()): - if stop_event: - stop_event.set() - - if process and process.is_alive(): - try: - process.terminate() - prg_path = os.path.join(self.prg_dir, prg_filename) - with open(prg_path, 'a') as f: - f.write(json.dumps({ - "status": "cancel", - "message": "Task was cancelled by user", - "timestamp": time.time() - }) + "\n") - except Exception as e: - print(f"Error cancelling task {prg_filename}: {e}") - - # Clear all running downloads - self.running_downloads.clear() - - # Clear failed tasks - self.failed_tasks.clear() - - self.save_queue_state() - return {"status": "all_cancelled"} - - def terminate_all_downloads(self): - """Terminate all running download processes.""" - with self.lock: - for prg_filename, (process, _, stop_event) in list(self.running_downloads.items()): - if stop_event: - stop_event.set() - - if process and process.is_alive(): - try: - process.terminate() - process.join(timeout=2) - if process.is_alive(): - print(f"Process for {prg_filename} did not terminate, forcing kill") - process.kill() - process.join(timeout=1) - except Exception as e: - print(f"Error terminating process: {e}") - - self.running_downloads.clear() - - def get_queue_status(self): - """Get the current status of the queue.""" - with self.lock: - running_count = len(self.running_downloads) - pending_count = self.pending_tasks.qsize() - failed_count = len(self.failed_tasks) - - # Get info about current running tasks - running_tasks = [] - for prg_filename, (_, task, _) in self.running_downloads.items(): - running_tasks.append({ - "prg_filename": prg_filename, - "name": task.get("name", "Unknown"), - "type": task.get("type", "unknown"), - "download_type": task.get("download_type", "unknown") - }) - - # Get info about failed tasks - failed_tasks = [] - for prg_filename, (task, retry_count) in self.failed_tasks.items(): - failed_tasks.append({ - "prg_filename": prg_filename, - "name": task.get("name", "Unknown"), - "type": task.get("type", "unknown"), - "download_type": task.get("download_type", "unknown"), - "retry_count": retry_count - }) - - return { - "running": running_count, - "pending": pending_count, - "failed": failed_count, - "max_concurrent": self.max_concurrent, - "paused": self.paused, - "running_tasks": running_tasks, - "failed_tasks": failed_tasks - } - - def check_for_stuck_tasks(self): - """ - Scan for tasks that appear to be stuck and requeue them if necessary. - Called periodically by the queue worker. - """ - print("Checking for stuck tasks...") - - # First, scan the running tasks to see if any processes are defunct - with self.lock: - defunct_tasks = [] - stalled_tasks = [] - current_time = time.time() - - for prg_filename, (process, task, stop_event) in list(self.running_downloads.items()): - if not process.is_alive(): - # Process is no longer alive but wasn't cleaned up - defunct_tasks.append((prg_filename, task)) - print(f"Found defunct task {prg_filename}, process is no longer alive") - - # Check task prg file timestamp to detect stalled tasks - prg_path = os.path.join(self.prg_dir, prg_filename) - try: - last_modified = os.path.getmtime(prg_path) - if current_time - last_modified > 300: # 5 minutes - print(f"Task {prg_filename} may be stalled, last activity: {current_time - last_modified:.1f} seconds ago") - # Add to stalled tasks list for potential termination - stalled_tasks.append((prg_filename, process, task, stop_event)) - except Exception as e: - print(f"Error checking task timestamp: {e}") - - # Clean up defunct tasks - for prg_filename, task in defunct_tasks: - print(f"Cleaning up defunct task: {prg_filename}") - del self.running_downloads[prg_filename] - - # If task still has retries left, requeue it - retry_count = task.get("retry_count", 0) - if retry_count < MAX_RETRIES: - task["retry_count"] = retry_count + 1 - print(f"Requeuing task {prg_filename}, retry count: {task['retry_count']}") - - # Update the PRG file to indicate the task is being requeued - prg_path = os.path.join(self.prg_dir, prg_filename) - try: - with open(prg_path, 'a') as f: - f.write(json.dumps({ - "status": "requeued", - "message": "Task was automatically requeued after process died", - "retry_count": task["retry_count"], - "timestamp": time.time() - }) + "\n") - except Exception as e: - print(f"Error writing to PRG file for requeued task: {e}") - - self.pending_tasks.put((prg_filename, task)) - else: - # No more retries - mark as failed - try: - with open(prg_path, 'a') as f: - f.write(json.dumps({ - "status": "error", - "message": "Task failed - maximum retry count reached", - "can_retry": False, - "timestamp": time.time() - }) + "\n") - except Exception as e: - print(f"Error writing to PRG file for failed task: {e}") - - # Handle stalled tasks - for prg_filename, process, task, stop_event in stalled_tasks: - print(f"Terminating stalled task {prg_filename}") - - # Signal the process to stop gracefully - if stop_event: - stop_event.set() - - # Give it a short time to terminate gracefully - process.join(timeout=2) - - # If still alive, terminate forcefully - if process.is_alive(): - process.terminate() - process.join(timeout=1) - - # If still alive after terminate, kill it - if process.is_alive(): - try: - if hasattr(process, 'kill'): - process.kill() - else: - os.kill(process.pid, signal.SIGKILL) - except Exception as e: - print(f"Error killing process for {prg_filename}: {e}") - - # Remove from running downloads - del self.running_downloads[prg_filename] - - # If task still has retries left, requeue it - retry_count = task.get("retry_count", 0) - if retry_count < MAX_RETRIES: - task["retry_count"] = retry_count + 1 - print(f"Requeuing stalled task {prg_filename}, retry count: {task['retry_count']}") - - # Update the PRG file to indicate the task is being requeued - prg_path = os.path.join(self.prg_dir, prg_filename) - try: - with open(prg_path, 'a') as f: - f.write(json.dumps({ - "status": "requeued", - "message": "Task was automatically requeued after stalling", - "retry_count": task["retry_count"], - "timestamp": time.time() - }) + "\n") - except Exception as e: - print(f"Error writing to PRG file for requeued task: {e}") - - self.pending_tasks.put((prg_filename, task)) - else: - # No more retries - mark as failed - prg_path = os.path.join(self.prg_dir, prg_filename) - try: - with open(prg_path, 'a') as f: - f.write(json.dumps({ - "status": "error", - "message": "Task stalled - maximum retry count reached", - "can_retry": False, - "timestamp": time.time() - }) + "\n") - except Exception as e: - print(f"Error writing to PRG file for failed task: {e}") - - # Save queue state after processing stuck tasks - if defunct_tasks or stalled_tasks: - self.save_queue_state() - - def queue_worker(self): - """ - Worker thread that continuously monitors the pending_tasks queue. - It cleans up finished download processes and starts new ones if the - number of running downloads is less than the allowed maximum. - """ - last_stuck_check = time.time() - - while self.running: - try: - # Periodically check for stuck tasks - current_time = time.time() - if current_time - last_stuck_check > 60: # Check every minute - self.check_for_stuck_tasks() - last_stuck_check = current_time - - # First, clean up any finished processes. - with self.lock: - finished = [] - for prg_filename, (process, task, _) in list(self.running_downloads.items()): - if not process.is_alive(): - finished.append((prg_filename, task)) - - for prg_filename, task in finished: - del self.running_downloads[prg_filename] - - # Check if the task completed successfully or failed - prg_path = os.path.join(self.prg_dir, prg_filename) - try: - # Read the last line of the prg file to check status - with open(prg_path, 'r') as f: - lines = f.readlines() - if lines: - last_line = lines[-1].strip() - try: - status = json.loads(last_line) - # Check if the task failed and can be retried - if status.get("status") == "error" and status.get("can_retry", False): - retry_count = task.get("retry_count", 0) - if retry_count < MAX_RETRIES: - # Add to failed tasks for potential retry - self.failed_tasks[prg_filename] = (task, retry_count) - print(f"Task {prg_filename} failed and can be retried. Current retry count: {retry_count}") - except json.JSONDecodeError: - # Not valid JSON, ignore - pass - except Exception as e: - print(f"Error checking task completion status: {e}") - - # Get the current count of running downloads with the lock held - running_count = len(self.running_downloads) - - # Log current capacity for debugging - print(f"Queue status: {running_count}/{self.max_concurrent} running, {self.pending_tasks.qsize()} pending, paused: {self.paused}") - - # Start new tasks if there is available capacity and not paused. - if running_count < self.max_concurrent and not self.paused: - try: - # Try to get a new task, but don't block for too long - prg_filename, task = self.pending_tasks.get(timeout=1) - except Empty: - time.sleep(0.5) - continue - - # Check if the task was cancelled while it was still queued. - with self.lock: - if prg_filename in self.cancelled_tasks: - # Task has been cancelled; remove it from the set and skip processing. - self.cancelled_tasks.remove(prg_filename) - print(f"Task {prg_filename} was cancelled while queued, skipping") - continue - - prg_path = task.get('prg_path') - - # Write a status update that the task is now processing - try: - with open(prg_path, 'a') as f: - f.write(json.dumps({ - "status": "processing", - "timestamp": time.time() - }) + "\n") - except Exception as e: - print(f"Error writing processing status: {e}") - - # Create a stop event for graceful shutdown - stop_event = Event() - - # Create and start a new process for the task. - p = Process( - target=run_download_task, - args=(task, prg_path, stop_event) - ) - with self.lock: - self.running_downloads[prg_filename] = (p, task, stop_event) - p.start() - print(f"Started download process for {prg_filename}") - else: - # At capacity or paused; sleep briefly. - time.sleep(1) - except Exception as e: - print(f"Error in queue worker: {e}") - traceback.print_exc() - - # Small sleep to avoid a tight loop. - time.sleep(0.1) - - # Periodically save queue state - if random.randint(1, 100) == 1: # ~1% chance each iteration - self.save_queue_state() - -# ------------------------------------------------------------------------------ -# Global Instance -# ------------------------------------------------------------------------------ - -# Create and start a global instance of the queue manager. -download_queue_manager = DownloadQueueManager() -download_queue_manager.start() diff --git a/routes/utils/track.py b/routes/utils/track.py index 22a9a44..bc92634 100755 --- a/routes/utils/track.py +++ b/routes/utils/track.py @@ -18,7 +18,8 @@ def download_track( pad_tracks=True, initial_retry_delay=5, retry_delay_increase=5, - max_retries=3 + max_retries=3, + progress_callback=None ): try: # Load Spotify client credentials if available @@ -49,7 +50,8 @@ def download_track( dl = DeeLogin( arl=deezer_creds.get('arl', ''), spotify_client_id=spotify_client_id, - spotify_client_secret=spotify_client_secret + spotify_client_secret=spotify_client_secret, + progress_callback=progress_callback ) dl.download_trackspo( link_track=url, @@ -86,7 +88,8 @@ def download_track( spo = SpoLogin( credentials_path=spo_creds_path, spotify_client_id=fallback_client_id, - spotify_client_secret=fallback_client_secret + spotify_client_secret=fallback_client_secret, + progress_callback=progress_callback ) spo.download_track( link_track=url, @@ -113,7 +116,8 @@ def download_track( spo = SpoLogin( credentials_path=credentials_path, spotify_client_id=spotify_client_id, - spotify_client_secret=spotify_client_secret + spotify_client_secret=spotify_client_secret, + progress_callback=progress_callback ) spo.download_track( link_track=url, @@ -142,7 +146,8 @@ def download_track( dl = DeeLogin( arl=creds.get('arl', ''), spotify_client_id=spotify_client_id, - spotify_client_secret=spotify_client_secret + spotify_client_secret=spotify_client_secret, + progress_callback=progress_callback ) dl.download_trackdee( link_track=url, diff --git a/start_app.sh b/start_app.sh new file mode 100755 index 0000000..1f1da8f --- /dev/null +++ b/start_app.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +# Start Flask app in the background +echo "Starting Flask application..." +python app.py & + +# Wait a moment for Flask to initialize +sleep 2 + +# Start Celery worker +echo "Starting Celery worker..." +celery -A routes.utils.celery_tasks.celery_app worker --loglevel=info --concurrency=${MAX_CONCURRENT_DL:-3} -Q downloads & + +# Keep the script running +wait \ No newline at end of file diff --git a/static/css/config/config.css b/static/css/config/config.css index 5e1d15d..7c4959c 100644 --- a/static/css/config/config.css +++ b/static/css/config/config.css @@ -261,6 +261,50 @@ body { transition: transform 0.3s ease; } +/* Service selection highlight */ +#defaultServiceSelect { + border-left: 3px solid #1db954; + box-shadow: 0 0 8px rgba(29, 185, 84, 0.1); + transition: all 0.3s ease; +} + +#defaultServiceSelect:focus { + border-color: #1db954; + box-shadow: 0 0 12px rgba(29, 185, 84, 0.2); +} + +/* Highlighted service-specific options */ +.config-item.highlighted-option { + background-color: rgba(29, 185, 84, 0.05); + border-radius: 8px; + padding: 10px; + margin-left: -10px; + margin-right: -10px; + position: relative; + overflow: hidden; + transition: all 0.3s ease; +} + +.config-item.highlighted-option::before { + content: ''; + position: absolute; + left: 0; + top: 0; + height: 100%; + width: 3px; + background-color: #1db954; +} + +.config-item.highlighted-option label { + color: #ffffff; + font-weight: 500; +} + +/* Add subtle animation on hover */ +.config-item:hover #defaultServiceSelect { + box-shadow: 0 0 12px rgba(29, 185, 84, 0.15); +} + .account-config:hover { transform: translateY(-2px); } diff --git a/static/js/album.js b/static/js/album.js index b1456b0..b923132 100644 --- a/static/js/album.js +++ b/static/js/album.js @@ -170,8 +170,8 @@ async function downloadWholeAlbum(album) { } try { - // Use local startDownload function instead of downloadQueue.startAlbumDownload - await startDownload(url, 'album', { name: album.name || 'Unknown Album' }); + // Use the centralized downloadQueue.download method + await downloadQueue.download(url, 'album', { name: album.name || 'Unknown Album' }); // Make the queue visible after queueing downloadQueue.toggleVisibility(true); } catch (error) { @@ -213,82 +213,12 @@ async function startDownload(url, type, item, albumType) { return; } - const service = url.includes('open.spotify.com') ? 'spotify' : 'deezer'; - let apiUrl = `/api/${type}/download?service=${service}&url=${encodeURIComponent(url)}`; - - // Add name and artist if available for better progress display - if (item.name) { - apiUrl += `&name=${encodeURIComponent(item.name)}`; - } - if (item.artist) { - apiUrl += `&artist=${encodeURIComponent(item.artist)}`; - } - - // For artist downloads, include album_type - if (type === 'artist' && albumType) { - apiUrl += `&album_type=${encodeURIComponent(albumType)}`; - } - try { - const response = await fetch(apiUrl); - if (!response.ok) { - throw new Error(`Server returned ${response.status}`); - } + // Use the centralized downloadQueue.download method + await downloadQueue.download(url, type, item, albumType); - const data = await response.json(); - - // Handle artist downloads which return multiple album_prg_files - if (type === 'artist' && data.album_prg_files && Array.isArray(data.album_prg_files)) { - // Add each album to the download queue separately - const queueIds = []; - data.album_prg_files.forEach(prgFile => { - const queueId = downloadQueue.addDownload(item, 'album', prgFile, apiUrl, false); - queueIds.push({queueId, prgFile}); - }); - - // Wait a short time before checking the status to give server time to create files - await new Promise(resolve => setTimeout(resolve, 1000)); - - // Start monitoring each entry after confirming PRG files exist - for (const {queueId, prgFile} of queueIds) { - try { - const statusResponse = await fetch(`/api/prgs/${prgFile}`); - if (statusResponse.ok) { - // Only start monitoring after confirming the PRG file exists - const entry = downloadQueue.downloadQueue[queueId]; - if (entry) { - // Start monitoring regardless of visibility - downloadQueue.startEntryMonitoring(queueId); - } - } - } catch (statusError) { - console.log(`Initial status check pending for ${prgFile}, will retry on next interval`); - } - } - } else if (data.prg_file) { - // Handle single-file downloads (tracks, albums, playlists) - const queueId = downloadQueue.addDownload(item, type, data.prg_file, apiUrl, false); - - // Wait a short time before checking the status to give server time to create the file - await new Promise(resolve => setTimeout(resolve, 1000)); - - // Ensure the PRG file exists and has initial data by making a status check - try { - const statusResponse = await fetch(`/api/prgs/${data.prg_file}`); - if (statusResponse.ok) { - // Only start monitoring after confirming the PRG file exists - const entry = downloadQueue.downloadQueue[queueId]; - if (entry) { - // Start monitoring regardless of visibility - downloadQueue.startEntryMonitoring(queueId); - } - } - } catch (statusError) { - console.log('Initial status check pending, will retry on next interval'); - } - } else { - throw new Error('Invalid response format from server'); - } + // Make the queue visible after queueing + downloadQueue.toggleVisibility(true); } catch (error) { showError('Download failed: ' + (error?.message || 'Unknown error')); throw error; diff --git a/static/js/artist.js b/static/js/artist.js index 20e46bf..627473f 100644 --- a/static/js/artist.js +++ b/static/js/artist.js @@ -195,8 +195,8 @@ function attachDownloadListeners() { const type = e.currentTarget.dataset.type || 'album'; e.currentTarget.remove(); - // Use our local startDownload function instead of downloadQueue.startAlbumDownload - startDownload(url, type, { name, type }) + // Use the centralized downloadQueue.download method + downloadQueue.download(url, type, { name, type }) .catch(err => showError('Download failed: ' + (err?.message || 'Unknown error'))); }); }); @@ -204,7 +204,7 @@ function attachDownloadListeners() { // Add startDownload function (similar to track.js and main.js) /** - * Starts the download process via API + * Starts the download process via centralized download queue */ async function startDownload(url, type, item, albumType) { if (!url || !type) { @@ -212,82 +212,12 @@ async function startDownload(url, type, item, albumType) { return; } - const service = url.includes('open.spotify.com') ? 'spotify' : 'deezer'; - let apiUrl = `/api/${type}/download?service=${service}&url=${encodeURIComponent(url)}`; - - // Add name and artist if available for better progress display - if (item.name) { - apiUrl += `&name=${encodeURIComponent(item.name)}`; - } - if (item.artist) { - apiUrl += `&artist=${encodeURIComponent(item.artist)}`; - } - - // For artist downloads, include album_type - if (type === 'artist' && albumType) { - apiUrl += `&album_type=${encodeURIComponent(albumType)}`; - } - try { - const response = await fetch(apiUrl); - if (!response.ok) { - throw new Error(`Server returned ${response.status}`); - } + // Use the centralized downloadQueue.download method + await downloadQueue.download(url, type, item, albumType); - const data = await response.json(); - - // Handle artist downloads which return multiple album_prg_files - if (type === 'artist' && data.album_prg_files && Array.isArray(data.album_prg_files)) { - // Add each album to the download queue separately - const queueIds = []; - data.album_prg_files.forEach(prgFile => { - const queueId = downloadQueue.addDownload(item, 'album', prgFile, apiUrl, false); - queueIds.push({queueId, prgFile}); - }); - - // Wait a short time before checking the status to give server time to create files - await new Promise(resolve => setTimeout(resolve, 1000)); - - // Start monitoring each entry after confirming PRG files exist - for (const {queueId, prgFile} of queueIds) { - try { - const statusResponse = await fetch(`/api/prgs/${prgFile}`); - if (statusResponse.ok) { - // Only start monitoring after confirming the PRG file exists - const entry = downloadQueue.downloadQueue[queueId]; - if (entry) { - // Start monitoring regardless of visibility - downloadQueue.startEntryMonitoring(queueId); - } - } - } catch (statusError) { - console.log(`Initial status check pending for ${prgFile}, will retry on next interval`); - } - } - } else if (data.prg_file) { - // Handle single-file downloads (tracks, albums, playlists) - const queueId = downloadQueue.addDownload(item, type, data.prg_file, apiUrl, false); - - // Wait a short time before checking the status to give server time to create the file - await new Promise(resolve => setTimeout(resolve, 1000)); - - // Ensure the PRG file exists and has initial data by making a status check - try { - const statusResponse = await fetch(`/api/prgs/${data.prg_file}`); - if (statusResponse.ok) { - // Only start monitoring after confirming the PRG file exists - const entry = downloadQueue.downloadQueue[queueId]; - if (entry) { - // Start monitoring regardless of visibility - downloadQueue.startEntryMonitoring(queueId); - } - } - } catch (statusError) { - console.log('Initial status check pending, will retry on next interval'); - } - } else { - throw new Error('Invalid response format from server'); - } + // Make the queue visible after queueing + downloadQueue.toggleVisibility(true); } catch (error) { showError('Download failed: ' + (error?.message || 'Unknown error')); throw error; diff --git a/static/js/config.js b/static/js/config.js index 180c806..4cc0c51 100644 --- a/static/js/config.js +++ b/static/js/config.js @@ -79,6 +79,10 @@ function setupEventListeners() { document.getElementById('credentialForm').addEventListener('submit', handleCredentialSubmit); // Config change listeners + document.getElementById('defaultServiceSelect').addEventListener('change', function() { + updateServiceSpecificOptions(); + saveConfig(); + }); document.getElementById('fallbackToggle').addEventListener('change', saveConfig); document.getElementById('realTimeToggle').addEventListener('change', saveConfig); document.getElementById('spotifyQualitySelect').addEventListener('change', saveConfig); @@ -105,6 +109,36 @@ function setupEventListeners() { document.getElementById('maxConcurrentDownloads').addEventListener('change', saveConfig); } +function updateServiceSpecificOptions() { + // Get the selected service + const selectedService = document.getElementById('defaultServiceSelect').value; + + // Get all service-specific sections + const spotifyOptions = document.querySelectorAll('.config-item.spotify-specific'); + const deezerOptions = document.querySelectorAll('.config-item.deezer-specific'); + + // Handle Spotify specific options + if (selectedService === 'spotify') { + // Highlight Spotify section + document.getElementById('spotifyQualitySelect').closest('.config-item').classList.add('highlighted-option'); + document.getElementById('spotifyAccountSelect').closest('.config-item').classList.add('highlighted-option'); + + // Remove highlight from Deezer + document.getElementById('deezerQualitySelect').closest('.config-item').classList.remove('highlighted-option'); + document.getElementById('deezerAccountSelect').closest('.config-item').classList.remove('highlighted-option'); + } + // Handle Deezer specific options (for future use) + else if (selectedService === 'deezer') { + // Highlight Deezer section + document.getElementById('deezerQualitySelect').closest('.config-item').classList.add('highlighted-option'); + document.getElementById('deezerAccountSelect').closest('.config-item').classList.add('highlighted-option'); + + // Remove highlight from Spotify + document.getElementById('spotifyQualitySelect').closest('.config-item').classList.remove('highlighted-option'); + document.getElementById('spotifyAccountSelect').closest('.config-item').classList.remove('highlighted-option'); + } +} + async function updateAccountSelectors() { try { const [spotifyResponse, deezerResponse] = await Promise.all([ @@ -561,6 +595,7 @@ function resetForm() { async function saveConfig() { // Read active account values directly from the DOM (or from the globals which are kept in sync) const config = { + service: document.getElementById('defaultServiceSelect').value, spotify: document.getElementById('spotifyAccountSelect').value, deezer: document.getElementById('deezerAccountSelect').value, fallback: document.getElementById('fallbackToggle').checked, @@ -599,6 +634,12 @@ async function loadConfig() { const savedConfig = await response.json(); + // Set default service selection + document.getElementById('defaultServiceSelect').value = savedConfig.service || 'spotify'; + + // Update the service-specific options based on selected service + updateServiceSpecificOptions(); + // Use the "spotify" and "deezer" properties from the API response to set the active accounts. activeSpotifyAccount = savedConfig.spotify || ''; activeDeezerAccount = savedConfig.deezer || ''; diff --git a/static/js/main.js b/static/js/main.js index 2b6faa3..e673bd5 100644 --- a/static/js/main.js +++ b/static/js/main.js @@ -200,89 +200,12 @@ document.addEventListener('DOMContentLoaded', function() { return; } - const service = url.includes('open.spotify.com') ? 'spotify' : 'deezer'; - let apiUrl = `/api/${type}/download?service=${service}&url=${encodeURIComponent(url)}`; - - // Add name and artist if available for better progress display - if (item.name) { - apiUrl += `&name=${encodeURIComponent(item.name)}`; - } - if (item.artist) { - apiUrl += `&artist=${encodeURIComponent(item.artist)}`; - } - - // For artist downloads, include album_type - if (type === 'artist' && albumType) { - apiUrl += `&album_type=${encodeURIComponent(albumType)}`; - } - try { - const response = await fetch(apiUrl); + // Use the centralized downloadQueue.download method + await downloadQueue.download(url, type, item, albumType); - if (!response.ok) { - const errorData = await response.json().catch(() => ({})); - throw new Error(errorData.error || 'Download request failed'); - } - - const data = await response.json(); - - // Handle artist downloads which return multiple album_prg_files - if (type === 'artist' && data.album_prg_files && Array.isArray(data.album_prg_files)) { - // Add each album to the download queue separately - const queueIds = []; - data.album_prg_files.forEach(prgFile => { - const queueId = downloadQueue.addDownload(item, 'album', prgFile, apiUrl, false); - queueIds.push({queueId, prgFile}); - }); - - // Wait a short time before checking the status to give server time to create files - await new Promise(resolve => setTimeout(resolve, 1000)); - - // Start monitoring each entry after confirming PRG files exist - for (const {queueId, prgFile} of queueIds) { - try { - const statusResponse = await fetch(`/api/prgs/${prgFile}`); - if (statusResponse.ok) { - // Only start monitoring after confirming the PRG file exists - const entry = downloadQueue.downloadQueue[queueId]; - if (entry) { - // Start monitoring regardless of visibility - downloadQueue.startEntryMonitoring(queueId); - } - } - } catch (statusError) { - console.log(`Initial status check pending for ${prgFile}, will retry on next interval`); - } - } - - // Show success message for artist download - if (data.message) { - showSuccess(data.message); - } - } else if (data.prg_file) { - // Handle single-file downloads (tracks, albums, playlists) - const queueId = downloadQueue.addDownload(item, type, data.prg_file, apiUrl, false); - - // Wait a short time before checking the status to give server time to create the file - await new Promise(resolve => setTimeout(resolve, 1000)); - - // Ensure the PRG file exists and has initial data by making a status check - try { - const statusResponse = await fetch(`/api/prgs/${data.prg_file}`); - if (statusResponse.ok) { - // Only start monitoring after confirming the PRG file exists - const entry = downloadQueue.downloadQueue[queueId]; - if (entry) { - // Start monitoring regardless of visibility - downloadQueue.startEntryMonitoring(queueId); - } - } - } catch (statusError) { - console.log('Initial status check pending, will retry on next interval'); - } - } else { - throw new Error('Invalid response format from server'); - } + // Make the queue visible after queueing + downloadQueue.toggleVisibility(true); } catch (error) { showError('Download failed: ' + (error.message || 'Unknown error')); throw error; diff --git a/static/js/playlist.js b/static/js/playlist.js index ac28545..010c860 100644 --- a/static/js/playlist.js +++ b/static/js/playlist.js @@ -240,8 +240,8 @@ async function downloadWholePlaylist(playlist) { } try { - // Use our local startDownload function instead of downloadQueue.startPlaylistDownload - await startDownload(url, 'playlist', { name: playlist.name || 'Unknown Playlist' }); + // Use the centralized downloadQueue.download method + await downloadQueue.download(url, 'playlist', { name: playlist.name || 'Unknown Playlist' }); // Make the queue visible after queueing downloadQueue.toggleVisibility(true); } catch (error) { @@ -295,8 +295,8 @@ async function downloadPlaylistAlbums(playlist) { const albumUrl = album.external_urls?.spotify || ''; if (!albumUrl) continue; - // Use our local startDownload function instead of downloadQueue.startAlbumDownload - await startDownload( + // Use the centralized downloadQueue.download method + await downloadQueue.download( albumUrl, 'album', { name: album.name || 'Unknown Album' } @@ -325,8 +325,7 @@ async function downloadPlaylistAlbums(playlist) { } /** - * Starts the download process by building a minimal API URL with only the necessary parameters, - * since the server will use config defaults for others. + * Starts the download process using the centralized download method from the queue. */ async function startDownload(url, type, item, albumType) { if (!url || !type) { @@ -334,82 +333,12 @@ async function startDownload(url, type, item, albumType) { return; } - const service = url.includes('open.spotify.com') ? 'spotify' : 'deezer'; - let apiUrl = `/api/${type}/download?service=${service}&url=${encodeURIComponent(url)}`; - - // Add name and artist if available for better progress display - if (item.name) { - apiUrl += `&name=${encodeURIComponent(item.name)}`; - } - if (item.artist) { - apiUrl += `&artist=${encodeURIComponent(item.artist)}`; - } - - // For artist downloads, include album_type - if (type === 'artist' && albumType) { - apiUrl += `&album_type=${encodeURIComponent(albumType)}`; - } - try { - const response = await fetch(apiUrl); - if (!response.ok) { - throw new Error(`Server returned ${response.status}`); - } + // Use the centralized downloadQueue.download method + await downloadQueue.download(url, type, item, albumType); - const data = await response.json(); - - // Handle artist downloads which return multiple album_prg_files - if (type === 'artist' && data.album_prg_files && Array.isArray(data.album_prg_files)) { - // Add each album to the download queue separately - const queueIds = []; - data.album_prg_files.forEach(prgFile => { - const queueId = downloadQueue.addDownload(item, 'album', prgFile, apiUrl, false); - queueIds.push({queueId, prgFile}); - }); - - // Wait a short time before checking the status to give server time to create files - await new Promise(resolve => setTimeout(resolve, 1000)); - - // Start monitoring each entry after confirming PRG files exist - for (const {queueId, prgFile} of queueIds) { - try { - const statusResponse = await fetch(`/api/prgs/${prgFile}`); - if (statusResponse.ok) { - // Only start monitoring after confirming the PRG file exists - const entry = downloadQueue.downloadQueue[queueId]; - if (entry) { - // Start monitoring regardless of visibility - downloadQueue.startEntryMonitoring(queueId); - } - } - } catch (statusError) { - console.log(`Initial status check pending for ${prgFile}, will retry on next interval`); - } - } - } else if (data.prg_file) { - // Handle single-file downloads (tracks, albums, playlists) - const queueId = downloadQueue.addDownload(item, type, data.prg_file, apiUrl, false); - - // Wait a short time before checking the status to give server time to create the file - await new Promise(resolve => setTimeout(resolve, 1000)); - - // Ensure the PRG file exists and has initial data by making a status check - try { - const statusResponse = await fetch(`/api/prgs/${data.prg_file}`); - if (statusResponse.ok) { - // Only start monitoring after confirming the PRG file exists - const entry = downloadQueue.downloadQueue[queueId]; - if (entry) { - // Start monitoring regardless of visibility - downloadQueue.startEntryMonitoring(queueId); - } - } - } catch (statusError) { - console.log('Initial status check pending, will retry on next interval'); - } - } else { - throw new Error('Invalid response format from server'); - } + // Make the queue visible after queueing + downloadQueue.toggleVisibility(true); } catch (error) { showError('Download failed: ' + (error?.message || 'Unknown error')); throw error; diff --git a/static/js/queue.js b/static/js/queue.js index 1616e6b..b2e61c6 100644 --- a/static/js/queue.js +++ b/static/js/queue.js @@ -215,7 +215,7 @@ class DownloadQueue { if (entry.isNew) { const logElement = document.getElementById(`log-${entry.uniqueId}-${entry.prgFile}`); if (logElement) { - logElement.textContent = "Preparing download..."; + logElement.textContent = "Reading status..."; } } @@ -944,19 +944,24 @@ class DownloadQueue { entry.statusCheckFailures = 0; // Reset failure counter logEl.textContent = 'Retry initiated...'; - // Verify the PRG file exists before starting monitoring + // Make sure any existing interval is cleared before starting a new one + if (entry.intervalId) { + clearInterval(entry.intervalId); + entry.intervalId = null; + } + + // Always start monitoring right away - don't wait for verification + this.startEntryMonitoring(queueId); + + // Verify the PRG file exists as a secondary check, but don't wait for it to start monitoring try { const verifyResponse = await fetch(`/api/prgs/${retryData.prg_file}`); - if (verifyResponse.ok) { - // Start monitoring the new PRG file - this.startEntryMonitoring(queueId); - } else { - // If verification fails, wait a moment and then start monitoring anyway - setTimeout(() => this.startEntryMonitoring(queueId), 2000); + // Just log the verification result, monitoring is already started + if (!verifyResponse.ok) { + console.log(`PRG file verification failed for ${retryData.prg_file}, but monitoring already started`); } } catch (verifyError) { - // If verification fails, wait a moment and then start monitoring anyway - setTimeout(() => this.startEntryMonitoring(queueId), 2000); + console.log(`PRG file verification error for ${retryData.prg_file}, but monitoring already started:`, verifyError); } } else { logElement.textContent = 'Retry failed: invalid response from server'; @@ -967,100 +972,46 @@ class DownloadQueue { } } - async startTrackDownload(url, item) { - await this.loadConfig(); - const service = url.includes('open.spotify.com') ? 'spotify' : 'deezer'; - - // Use minimal parameters in the URL, letting server use config for defaults - const apiUrl = `/api/track/download?service=${service}&url=${encodeURIComponent(url)}` + - (item.name ? `&name=${encodeURIComponent(item.name)}` : '') + - (item.artist ? `&artist=${encodeURIComponent(item.artist)}` : ''); - - try { - // Show a loading indicator - if (document.getElementById('queueIcon')) { - document.getElementById('queueIcon').classList.add('queue-icon-active'); + /** + * Start monitoring for all active entries in the queue that are visible + */ + startMonitoringActiveEntries() { + for (const queueId in this.downloadQueue) { + const entry = this.downloadQueue[queueId]; + // Only start monitoring if the entry is not in a terminal state and is visible + if (!entry.hasEnded && this.isEntryVisible(queueId) && !entry.intervalId) { + this.startEntryMonitoring(queueId); } - - // First create the queue entry with a preparation message - const tempItem = {...item, name: item.name || 'Preparing...'}; - const response = await fetch(apiUrl); - - if (!response.ok) throw new Error('Network error'); - const data = await response.json(); - - // Add the download to the queue but don't start monitoring yet - const queueId = this.addDownload(item, 'track', data.prg_file, apiUrl, false); - - // Ensure the PRG file exists and has initial data by making a status check - try { - const statusResponse = await fetch(`/api/prgs/${data.prg_file}`); - if (statusResponse.ok) { - // Only start monitoring after confirming the PRG file exists - const entry = this.downloadQueue[queueId]; - if (entry && this.isEntryVisible(queueId)) { - this.startEntryMonitoring(queueId); - } - } - } catch (statusError) { - console.log('Initial status check pending, will retry on next interval'); - } - } catch (error) { - this.dispatchEvent('downloadError', { error, item }); - throw error; } } - async startPlaylistDownload(url, item) { - await this.loadConfig(); - const service = url.includes('open.spotify.com') ? 'spotify' : 'deezer'; - - // Use minimal parameters in the URL, letting server use config for defaults - const apiUrl = `/api/playlist/download?service=${service}&url=${encodeURIComponent(url)}` + - (item.name ? `&name=${encodeURIComponent(item.name)}` : '') + - (item.artist ? `&artist=${encodeURIComponent(item.artist)}` : ''); - - try { - // Show a loading indicator - if (document.getElementById('queueIcon')) { - document.getElementById('queueIcon').classList.add('queue-icon-active'); - } - - const response = await fetch(apiUrl); - if (!response.ok) throw new Error('Network error'); - const data = await response.json(); - - // Add the download to the queue but don't start monitoring yet - const queueId = this.addDownload(item, 'playlist', data.prg_file, apiUrl, false); - - // Ensure the PRG file exists and has initial data by making a status check - try { - const statusResponse = await fetch(`/api/prgs/${data.prg_file}`); - if (statusResponse.ok) { - // Only start monitoring after confirming the PRG file exists - const entry = this.downloadQueue[queueId]; - if (entry && this.isEntryVisible(queueId)) { - this.startEntryMonitoring(queueId); - } - } - } catch (statusError) { - console.log('Initial status check pending, will retry on next interval'); - } - } catch (error) { - this.dispatchEvent('downloadError', { error, item }); - throw error; + /** + * Centralized download method for all content types. + * This method replaces the individual startTrackDownload, startAlbumDownload, etc. methods. + * It will be called by all the other JS files. + */ + async download(url, type, item, albumType = null) { + if (!url) { + throw new Error('Missing URL for download'); } - } - - async startArtistDownload(url, item, albumType = 'album,single,compilation') { - await this.loadConfig(); - const service = url.includes('open.spotify.com') ? 'spotify' : 'deezer'; - // Use minimal parameters in the URL, letting server use config for defaults - const apiUrl = `/api/artist/download?service=${service}&url=${encodeURIComponent(url)}` + - `&album_type=${albumType}` + - (item.name ? `&name=${encodeURIComponent(item.name)}` : '') + - (item.artist ? `&artist=${encodeURIComponent(item.artist)}` : ''); + await this.loadConfig(); + + // Build the API URL with only necessary parameters + let apiUrl = `/api/${type}/download?url=${encodeURIComponent(url)}`; + + // Add name and artist if available for better progress display + if (item.name) { + apiUrl += `&name=${encodeURIComponent(item.name)}`; + } + if (item.artist) { + apiUrl += `&artist=${encodeURIComponent(item.artist)}`; + } + + // For artist downloads, include album_type + if (type === 'artist' && albumType) { + apiUrl += `&album_type=${encodeURIComponent(albumType)}`; + } try { // Show a loading indicator @@ -1069,76 +1020,67 @@ class DownloadQueue { } const response = await fetch(apiUrl); - if (!response.ok) throw new Error('Network error'); + if (!response.ok) { + throw new Error(`Server returned ${response.status}`); + } + const data = await response.json(); - // Track all queue IDs created - const queueIds = []; - - if (data.album_prg_files && Array.isArray(data.album_prg_files)) { + // Handle artist downloads which return multiple album_prg_files + if (type === 'artist' && data.album_prg_files && Array.isArray(data.album_prg_files)) { + // Add each album to the download queue separately + const queueIds = []; data.album_prg_files.forEach(prgFile => { const queueId = this.addDownload(item, 'album', prgFile, apiUrl, false); queueIds.push({queueId, prgFile}); }); + + // Wait a short time before checking the status to give server time to create files + await new Promise(resolve => setTimeout(resolve, 1000)); + + // Start monitoring each entry after confirming PRG files exist + for (const {queueId, prgFile} of queueIds) { + try { + const statusResponse = await fetch(`/api/prgs/${prgFile}`); + if (statusResponse.ok) { + // Only start monitoring after confirming the PRG file exists + const entry = this.downloadQueue[queueId]; + if (entry) { + // Start monitoring regardless of visibility + this.startEntryMonitoring(queueId); + } + } + } catch (statusError) { + console.log(`Initial status check pending for ${prgFile}, will retry on next interval`); + } + } + + return queueIds.map(({queueId}) => queueId); } else if (data.prg_file) { - const queueId = this.addDownload(item, 'album', data.prg_file, apiUrl, false); - queueIds.push({queueId, prgFile: data.prg_file}); - } - - // Start monitoring each entry after confirming PRG files exist - for (const {queueId, prgFile} of queueIds) { + // Handle single-file downloads (tracks, albums, playlists) + const queueId = this.addDownload(item, type, data.prg_file, apiUrl, false); + + // Wait a short time before checking the status to give server time to create the file + await new Promise(resolve => setTimeout(resolve, 1000)); + + // Ensure the PRG file exists and has initial data by making a status check try { - const statusResponse = await fetch(`/api/prgs/${prgFile}`); + const statusResponse = await fetch(`/api/prgs/${data.prg_file}`); if (statusResponse.ok) { + // Only start monitoring after confirming the PRG file exists const entry = this.downloadQueue[queueId]; - if (entry && this.isEntryVisible(queueId)) { + if (entry) { + // Start monitoring regardless of visibility this.startEntryMonitoring(queueId); } } } catch (statusError) { - console.log(`Initial status check pending for ${prgFile}, will retry on next interval`); + console.log('Initial status check pending, will retry on next interval'); } - } - } catch (error) { - this.dispatchEvent('downloadError', { error, item }); - throw error; - } - } - - async startAlbumDownload(url, item) { - await this.loadConfig(); - const service = url.includes('open.spotify.com') ? 'spotify' : 'deezer'; - - // Use minimal parameters in the URL, letting server use config for defaults - const apiUrl = `/api/album/download?service=${service}&url=${encodeURIComponent(url)}` + - (item.name ? `&name=${encodeURIComponent(item.name)}` : '') + - (item.artist ? `&artist=${encodeURIComponent(item.artist)}` : ''); - - try { - // Show a loading indicator - if (document.getElementById('queueIcon')) { - document.getElementById('queueIcon').classList.add('queue-icon-active'); - } - - const response = await fetch(apiUrl); - if (!response.ok) throw new Error('Network error'); - const data = await response.json(); - - // Add the download to the queue but don't start monitoring yet - const queueId = this.addDownload(item, 'album', data.prg_file, apiUrl, false); - - // Ensure the PRG file exists and has initial data by making a status check - try { - const statusResponse = await fetch(`/api/prgs/${data.prg_file}`); - if (statusResponse.ok) { - // Only start monitoring after confirming the PRG file exists - const entry = this.downloadQueue[queueId]; - if (entry && this.isEntryVisible(queueId)) { - this.startEntryMonitoring(queueId); - } - } - } catch (statusError) { - console.log('Initial status check pending, will retry on next interval'); + + return queueId; + } else { + throw new Error('Invalid response format from server'); } } catch (error) { this.dispatchEvent('downloadError', { error, item }); @@ -1190,7 +1132,6 @@ class DownloadQueue { name: prgData.display_title || originalRequest.display_title || originalRequest.name || prgFile, artist: prgData.display_artist || originalRequest.display_artist || originalRequest.artist || '', type: prgData.display_type || originalRequest.display_type || originalRequest.type || 'unknown', - service: originalRequest.service || '', url: originalRequest.url || '', endpoint: originalRequest.endpoint || '', download_type: originalRequest.download_type || '' @@ -1213,7 +1154,6 @@ class DownloadQueue { let requestUrl = null; if (dummyItem.endpoint && dummyItem.url) { const params = new CustomURLSearchParams(); - params.append('service', dummyItem.service); params.append('url', dummyItem.url); if (dummyItem.name) params.append('name', dummyItem.name); @@ -1221,8 +1161,8 @@ class DownloadQueue { // Add any other parameters from the original request for (const [key, value] of Object.entries(originalRequest)) { - if (!['service', 'url', 'name', 'artist', 'type', 'endpoint', 'download_type', - 'display_title', 'display_type', 'display_artist'].includes(key)) { + if (!['url', 'name', 'artist', 'type', 'endpoint', 'download_type', + 'display_title', 'display_type', 'display_artist', 'service'].includes(key)) { params.append(key, value); } } @@ -1257,6 +1197,10 @@ class DownloadQueue { // After adding all entries, update the queue this.updateQueueOrder(); + + // Start monitoring for all active entries that are visible + // This is the key change to ensure continued status updates after page refresh + this.startMonitoringActiveEntries(); } catch (error) { console.error("Error loading existing PRG files:", error); } diff --git a/static/js/track.js b/static/js/track.js index ca05ff4..3500d55 100644 --- a/static/js/track.js +++ b/static/js/track.js @@ -110,9 +110,8 @@ function renderTrack(track) { return; } - // Create a local download function that uses our own API call instead of downloadQueue.startTrackDownload - // This mirrors the approach used in main.js that works properly - startDownload(trackUrl, 'track', { name: track.name || 'Unknown Track', artist: track.artists?.[0]?.name }) + // Use the centralized downloadQueue.download method + downloadQueue.download(trackUrl, 'track', { name: track.name || 'Unknown Track', artist: track.artists?.[0]?.name }) .then(() => { downloadBtn.innerHTML = `Queued!`; // Make the queue visible to show the download @@ -153,8 +152,7 @@ function showError(message) { } /** - * Starts the download process by building a minimal API URL with only the necessary parameters, - * since the server will use config defaults for others. + * Starts the download process by calling the centralized downloadQueue method */ async function startDownload(url, type, item) { if (!url || !type) { @@ -162,49 +160,12 @@ async function startDownload(url, type, item) { return; } - const service = url.includes('open.spotify.com') ? 'spotify' : 'deezer'; - let apiUrl = `/api/${type}/download?service=${service}&url=${encodeURIComponent(url)}`; - - // Add name and artist if available for better progress display - if (item.name) { - apiUrl += `&name=${encodeURIComponent(item.name)}`; - } - if (item.artist) { - apiUrl += `&artist=${encodeURIComponent(item.artist)}`; - } - try { - const response = await fetch(apiUrl); - if (!response.ok) { - throw new Error(`Server returned ${response.status}`); - } + // Use the centralized downloadQueue.download method + await downloadQueue.download(url, type, item); - const data = await response.json(); - - if (!data.prg_file) { - throw new Error('Server did not return a valid PRG file'); - } - - // Add the download to the queue but don't start monitoring yet - const queueId = downloadQueue.addDownload(item, type, data.prg_file, apiUrl, false); - - // Ensure the PRG file exists and has initial data by making a status check - try { - // Wait a short time before checking the status to give server time to create the file - await new Promise(resolve => setTimeout(resolve, 1000)); - - const statusResponse = await fetch(`/api/prgs/${data.prg_file}`); - if (statusResponse.ok) { - // Only start monitoring after confirming the PRG file exists - const entry = downloadQueue.downloadQueue[queueId]; - if (entry) { - // Start monitoring regardless of visibility - downloadQueue.startEntryMonitoring(queueId); - } - } - } catch (statusError) { - console.log('Initial status check pending, will retry on next interval'); - } + // Make the queue visible after queueing + downloadQueue.toggleVisibility(true); } catch (error) { showError('Download failed: ' + (error?.message || 'Unknown error')); throw error; diff --git a/supervisor_config.conf b/supervisor_config.conf new file mode 100644 index 0000000..34f47bd --- /dev/null +++ b/supervisor_config.conf @@ -0,0 +1,19 @@ +[program:spotizerr_flask] +directory=/home/xoconoch/coding/spotizerr +command=python app.py +autostart=true +autorestart=true +stderr_logfile=/var/log/spotizerr/flask.err.log +stdout_logfile=/var/log/spotizerr/flask.out.log + +[program:spotizerr_celery] +directory=/home/xoconoch/coding/spotizerr +command=celery -A routes.utils.celery_tasks.celery_app worker --loglevel=info --concurrency=%(ENV_MAX_CONCURRENT_DL)s -Q downloads +environment=MAX_CONCURRENT_DL=3 +autostart=true +autorestart=true +stderr_logfile=/var/log/spotizerr/celery.err.log +stdout_logfile=/var/log/spotizerr/celery.out.log + +[group:spotizerr] +programs=spotizerr_flask,spotizerr_celery \ No newline at end of file diff --git a/templates/config.html b/templates/config.html index 7144e7b..23e93bd 100644 --- a/templates/config.html +++ b/templates/config.html @@ -18,12 +18,22 @@