@@ -1,7 +1,7 @@
|
||||
fastapi==0.116.1
|
||||
uvicorn[standard]==0.35.0
|
||||
celery==5.5.3
|
||||
deezspot-spotizerr==2.2.4
|
||||
deezspot-spotizerr==2.2.6
|
||||
httpx==0.28.1
|
||||
bcrypt==4.2.1
|
||||
PyJWT==2.10.1
|
||||
|
||||
@@ -27,13 +27,17 @@ def download_album(
|
||||
progress_callback=None,
|
||||
convert_to=None,
|
||||
bitrate=None,
|
||||
artist_separator="; ",
|
||||
recursive_quality=True,
|
||||
_is_celery_task_execution=False, # Added to skip duplicate check from Celery task
|
||||
):
|
||||
if not _is_celery_task_execution:
|
||||
existing_task = get_existing_task_id(url) # Check for duplicates only if not called by Celery task
|
||||
existing_task = get_existing_task_id(
|
||||
url
|
||||
) # Check for duplicates only if not called by Celery task
|
||||
if existing_task:
|
||||
raise DuplicateDownloadError(
|
||||
f"Download for this URL is already in progress.",
|
||||
"Download for this URL is already in progress.",
|
||||
existing_task=existing_task,
|
||||
)
|
||||
try:
|
||||
@@ -96,7 +100,7 @@ def download_album(
|
||||
link_album=url, # Spotify URL
|
||||
output_dir="./downloads",
|
||||
quality_download=quality, # Deezer quality
|
||||
recursive_quality=True,
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
not_interface=False,
|
||||
make_zip=False,
|
||||
@@ -109,6 +113,7 @@ def download_album(
|
||||
max_retries=max_retries,
|
||||
convert_to=convert_to,
|
||||
bitrate=bitrate,
|
||||
artist_separator=artist_separator,
|
||||
)
|
||||
print(
|
||||
f"DEBUG: album.py - Album download via Deezer (account: {fallback}) successful for Spotify URL."
|
||||
@@ -151,7 +156,7 @@ def download_album(
|
||||
link_album=url, # Spotify URL
|
||||
output_dir="./downloads",
|
||||
quality_download=fall_quality, # Spotify quality
|
||||
recursive_quality=True,
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
not_interface=False,
|
||||
make_zip=False,
|
||||
@@ -165,6 +170,7 @@ def download_album(
|
||||
max_retries=max_retries,
|
||||
convert_to=convert_to,
|
||||
bitrate=bitrate,
|
||||
artist_separator=artist_separator,
|
||||
)
|
||||
print(
|
||||
f"DEBUG: album.py - Spotify direct download (account: {main} for blob) successful."
|
||||
@@ -205,7 +211,7 @@ def download_album(
|
||||
link_album=url,
|
||||
output_dir="./downloads",
|
||||
quality_download=quality,
|
||||
recursive_quality=True,
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
not_interface=False,
|
||||
make_zip=False,
|
||||
@@ -219,6 +225,7 @@ def download_album(
|
||||
max_retries=max_retries,
|
||||
convert_to=convert_to,
|
||||
bitrate=bitrate,
|
||||
artist_separator=artist_separator,
|
||||
)
|
||||
print(
|
||||
f"DEBUG: album.py - Direct Spotify download (account: {main} for blob) successful."
|
||||
@@ -246,7 +253,7 @@ def download_album(
|
||||
link_album=url,
|
||||
output_dir="./downloads",
|
||||
quality_download=quality,
|
||||
recursive_quality=True,
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
make_zip=False,
|
||||
custom_dir_format=custom_dir_format,
|
||||
@@ -258,6 +265,7 @@ def download_album(
|
||||
max_retries=max_retries,
|
||||
convert_to=convert_to,
|
||||
bitrate=bitrate,
|
||||
artist_separator=artist_separator,
|
||||
)
|
||||
print(
|
||||
f"DEBUG: album.py - Direct Deezer download (account: {main}) successful."
|
||||
|
||||
@@ -44,6 +44,7 @@ DEFAULT_MAIN_CONFIG = {
|
||||
"retry_delay_increase": 5,
|
||||
"convertTo": None,
|
||||
"bitrate": None,
|
||||
"artist_separator": "; ",
|
||||
}
|
||||
|
||||
|
||||
@@ -123,12 +124,12 @@ task_default_routing_key = "downloads"
|
||||
|
||||
# Task routing - ensure SSE and utility tasks go to utility_tasks queue
|
||||
task_routes = {
|
||||
'routes.utils.celery_tasks.trigger_sse_update_task': {'queue': 'utility_tasks'},
|
||||
'routes.utils.celery_tasks.cleanup_stale_errors': {'queue': 'utility_tasks'},
|
||||
'routes.utils.celery_tasks.delayed_delete_task_data': {'queue': 'utility_tasks'},
|
||||
'routes.utils.celery_tasks.download_track': {'queue': 'downloads'},
|
||||
'routes.utils.celery_tasks.download_album': {'queue': 'downloads'},
|
||||
'routes.utils.celery_tasks.download_playlist': {'queue': 'downloads'},
|
||||
"routes.utils.celery_tasks.trigger_sse_update_task": {"queue": "utility_tasks"},
|
||||
"routes.utils.celery_tasks.cleanup_stale_errors": {"queue": "utility_tasks"},
|
||||
"routes.utils.celery_tasks.delayed_delete_task_data": {"queue": "utility_tasks"},
|
||||
"routes.utils.celery_tasks.download_track": {"queue": "downloads"},
|
||||
"routes.utils.celery_tasks.download_album": {"queue": "downloads"},
|
||||
"routes.utils.celery_tasks.download_playlist": {"queue": "downloads"},
|
||||
}
|
||||
|
||||
# Celery task settings
|
||||
@@ -193,8 +194,8 @@ worker_disable_rate_limits = False
|
||||
|
||||
# Celery Beat schedule
|
||||
beat_schedule = {
|
||||
'cleanup-old-tasks': {
|
||||
'task': 'routes.utils.celery_tasks.cleanup_old_tasks',
|
||||
'schedule': 3600.0, # Run every hour
|
||||
"cleanup-old-tasks": {
|
||||
"task": "routes.utils.celery_tasks.cleanup_old_tasks",
|
||||
"schedule": 3600.0, # Run every hour
|
||||
},
|
||||
}
|
||||
|
||||
@@ -60,6 +60,8 @@ def get_config_params():
|
||||
"retry_delay_increase": config.get("retry_delay_increase", 5),
|
||||
"convertTo": config.get("convertTo", None),
|
||||
"bitrate": config.get("bitrate", None),
|
||||
"artist_separator": config.get("artist_separator", "; "),
|
||||
"recursive_quality": config.get("recursive_quality", False),
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error reading config for parameters: {e}")
|
||||
@@ -80,6 +82,8 @@ def get_config_params():
|
||||
"retry_delay_increase": 5,
|
||||
"convertTo": None, # Default for conversion
|
||||
"bitrate": None, # Default for bitrate
|
||||
"artist_separator": "; ",
|
||||
"recursive_quality": False,
|
||||
}
|
||||
|
||||
|
||||
@@ -95,7 +99,9 @@ def get_existing_task_id(url, download_type=None):
|
||||
Returns:
|
||||
str | None: The task ID of the existing active task, or None if no active duplicate is found.
|
||||
"""
|
||||
logger.debug(f"GET_EXISTING_TASK_ID: Checking for URL='{url}', type='{download_type}'")
|
||||
logger.debug(
|
||||
f"GET_EXISTING_TASK_ID: Checking for URL='{url}', type='{download_type}'"
|
||||
)
|
||||
if not url:
|
||||
logger.debug("GET_EXISTING_TASK_ID: No URL provided, returning None.")
|
||||
return None
|
||||
@@ -119,8 +125,12 @@ def get_existing_task_id(url, download_type=None):
|
||||
}
|
||||
logger.debug(f"GET_EXISTING_TASK_ID: Terminal states defined as: {TERMINAL_STATES}")
|
||||
|
||||
all_existing_tasks_summary = get_all_tasks() # This function already filters by default based on its own TERMINAL_STATES
|
||||
logger.debug(f"GET_EXISTING_TASK_ID: Found {len(all_existing_tasks_summary)} tasks from get_all_tasks(). Iterating...")
|
||||
all_existing_tasks_summary = (
|
||||
get_all_tasks()
|
||||
) # This function already filters by default based on its own TERMINAL_STATES
|
||||
logger.debug(
|
||||
f"GET_EXISTING_TASK_ID: Found {len(all_existing_tasks_summary)} tasks from get_all_tasks(). Iterating..."
|
||||
)
|
||||
|
||||
for task_summary in all_existing_tasks_summary:
|
||||
existing_task_id = task_summary.get("task_id")
|
||||
@@ -128,55 +138,82 @@ def get_existing_task_id(url, download_type=None):
|
||||
logger.debug("GET_EXISTING_TASK_ID: Skipping summary with no task_id.")
|
||||
continue
|
||||
|
||||
logger.debug(f"GET_EXISTING_TASK_ID: Processing existing task_id='{existing_task_id}' from summary.")
|
||||
logger.debug(
|
||||
f"GET_EXISTING_TASK_ID: Processing existing task_id='{existing_task_id}' from summary."
|
||||
)
|
||||
|
||||
# First, check the status of the task directly from its latest status record.
|
||||
# get_all_tasks() might have its own view of terminal, but we re-check here for absolute certainty.
|
||||
existing_last_status_obj = get_last_task_status(existing_task_id)
|
||||
if not existing_last_status_obj:
|
||||
logger.debug(f"GET_EXISTING_TASK_ID: No last status object for task_id='{existing_task_id}'. Skipping.")
|
||||
logger.debug(
|
||||
f"GET_EXISTING_TASK_ID: No last status object for task_id='{existing_task_id}'. Skipping."
|
||||
)
|
||||
continue
|
||||
|
||||
# Extract status from standard structure (status_info.status) or fallback to top-level status
|
||||
existing_status = None
|
||||
if "status_info" in existing_last_status_obj and existing_last_status_obj["status_info"]:
|
||||
if (
|
||||
"status_info" in existing_last_status_obj
|
||||
and existing_last_status_obj["status_info"]
|
||||
):
|
||||
existing_status = existing_last_status_obj["status_info"].get("status")
|
||||
if not existing_status:
|
||||
existing_status = existing_last_status_obj.get("status")
|
||||
|
||||
logger.debug(f"GET_EXISTING_TASK_ID: Task_id='{existing_task_id}', last_status_obj='{existing_last_status_obj}', extracted status='{existing_status}'.")
|
||||
logger.debug(
|
||||
f"GET_EXISTING_TASK_ID: Task_id='{existing_task_id}', last_status_obj='{existing_last_status_obj}', extracted status='{existing_status}'."
|
||||
)
|
||||
|
||||
# If the task is in a terminal state, ignore it and move to the next one.
|
||||
if existing_status in TERMINAL_STATES:
|
||||
logger.debug(f"GET_EXISTING_TASK_ID: Task_id='{existing_task_id}' has terminal status='{existing_status}'. Skipping.")
|
||||
logger.debug(
|
||||
f"GET_EXISTING_TASK_ID: Task_id='{existing_task_id}' has terminal status='{existing_status}'. Skipping."
|
||||
)
|
||||
continue
|
||||
|
||||
logger.debug(f"GET_EXISTING_TASK_ID: Task_id='{existing_task_id}' has ACTIVE status='{existing_status}'. Proceeding to check URL/type.")
|
||||
logger.debug(
|
||||
f"GET_EXISTING_TASK_ID: Task_id='{existing_task_id}' has ACTIVE status='{existing_status}'. Proceeding to check URL/type."
|
||||
)
|
||||
|
||||
# If the task is active, then check if its URL and type match.
|
||||
existing_task_info = get_task_info(existing_task_id)
|
||||
if not existing_task_info:
|
||||
logger.debug(f"GET_EXISTING_TASK_ID: No task info for active task_id='{existing_task_id}'. Skipping.")
|
||||
logger.debug(
|
||||
f"GET_EXISTING_TASK_ID: No task info for active task_id='{existing_task_id}'. Skipping."
|
||||
)
|
||||
continue
|
||||
|
||||
existing_url = existing_task_info.get("url")
|
||||
logger.debug(f"GET_EXISTING_TASK_ID: Task_id='{existing_task_id}', info_url='{existing_url}'. Comparing with target_url='{url}'.")
|
||||
logger.debug(
|
||||
f"GET_EXISTING_TASK_ID: Task_id='{existing_task_id}', info_url='{existing_url}'. Comparing with target_url='{url}'."
|
||||
)
|
||||
if existing_url != url:
|
||||
logger.debug(f"GET_EXISTING_TASK_ID: Task_id='{existing_task_id}' URL mismatch. Skipping.")
|
||||
logger.debug(
|
||||
f"GET_EXISTING_TASK_ID: Task_id='{existing_task_id}' URL mismatch. Skipping."
|
||||
)
|
||||
continue
|
||||
|
||||
if download_type:
|
||||
existing_type = existing_task_info.get("download_type")
|
||||
logger.debug(f"GET_EXISTING_TASK_ID: Task_id='{existing_task_id}', info_type='{existing_type}'. Comparing with target_type='{download_type}'.")
|
||||
logger.debug(
|
||||
f"GET_EXISTING_TASK_ID: Task_id='{existing_task_id}', info_type='{existing_type}'. Comparing with target_type='{download_type}'."
|
||||
)
|
||||
if existing_type != download_type:
|
||||
logger.debug(f"GET_EXISTING_TASK_ID: Task_id='{existing_task_id}' type mismatch. Skipping.")
|
||||
logger.debug(
|
||||
f"GET_EXISTING_TASK_ID: Task_id='{existing_task_id}' type mismatch. Skipping."
|
||||
)
|
||||
continue
|
||||
|
||||
# Found an active task that matches the criteria.
|
||||
logger.info(f"GET_EXISTING_TASK_ID: Found ACTIVE duplicate: task_id='{existing_task_id}' for URL='{url}', type='{download_type}'. Returning this ID.")
|
||||
logger.info(
|
||||
f"GET_EXISTING_TASK_ID: Found ACTIVE duplicate: task_id='{existing_task_id}' for URL='{url}', type='{download_type}'. Returning this ID."
|
||||
)
|
||||
return existing_task_id
|
||||
|
||||
logger.debug(f"GET_EXISTING_TASK_ID: No active duplicate found for URL='{url}', type='{download_type}'. Returning None.")
|
||||
logger.debug(
|
||||
f"GET_EXISTING_TASK_ID: No active duplicate found for URL='{url}', type='{download_type}'. Returning None."
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
@@ -258,8 +295,13 @@ class CeleryDownloadQueueManager:
|
||||
|
||||
# Extract status from standard structure (status_info.status) or fallback to top-level status
|
||||
existing_status = None
|
||||
if "status_info" in existing_last_status_obj and existing_last_status_obj["status_info"]:
|
||||
existing_status = existing_last_status_obj["status_info"].get("status")
|
||||
if (
|
||||
"status_info" in existing_last_status_obj
|
||||
and existing_last_status_obj["status_info"]
|
||||
):
|
||||
existing_status = existing_last_status_obj["status_info"].get(
|
||||
"status"
|
||||
)
|
||||
if not existing_status:
|
||||
existing_status = existing_last_status_obj.get("status")
|
||||
|
||||
@@ -350,6 +392,13 @@ class CeleryDownloadQueueManager:
|
||||
"bitrate": original_request.get(
|
||||
"bitrate", config_params.get("bitrate")
|
||||
),
|
||||
"artist_separator": original_request.get(
|
||||
"artist_separator", config_params.get("artist_separator", "; ")
|
||||
),
|
||||
"recursive_quality": self._parse_bool_param(
|
||||
original_request.get("recursive_quality"),
|
||||
config_params.get("recursive_quality", False),
|
||||
),
|
||||
"retry_count": 0,
|
||||
"original_request": original_request,
|
||||
"created_at": time.time(),
|
||||
|
||||
@@ -2,7 +2,6 @@ import time
|
||||
import json
|
||||
import logging
|
||||
import traceback
|
||||
import asyncio
|
||||
from celery import Celery, Task, states
|
||||
from celery.signals import (
|
||||
task_prerun,
|
||||
@@ -35,6 +34,7 @@ from routes.utils.history_manager import history_manager
|
||||
# Create Redis connection for storing task data that's not part of the Celery result backend
|
||||
import redis
|
||||
|
||||
|
||||
# --- Helpers to build partial summaries from task logs ---
|
||||
def _read_task_log_json_lines(task_id: str) -> list:
|
||||
log_file_path = Path("./logs/tasks") / f"{task_id}.log"
|
||||
@@ -69,7 +69,10 @@ def _extract_parent_initial_tracks(log_lines: list, parent_type: str) -> dict:
|
||||
if album and isinstance(album, dict) and album.get("tracks"):
|
||||
for t in album.get("tracks", []):
|
||||
ids = (t or {}).get("ids", {}) or {}
|
||||
key = ids.get("spotify") or f"{(t or {}).get('track_number', 0)}:{(t or {}).get('title', '')}"
|
||||
key = (
|
||||
ids.get("spotify")
|
||||
or f"{(t or {}).get('track_number', 0)}:{(t or {}).get('title', '')}"
|
||||
)
|
||||
track_map[key] = t
|
||||
break
|
||||
elif parent_type == "playlist":
|
||||
@@ -79,13 +82,18 @@ def _extract_parent_initial_tracks(log_lines: list, parent_type: str) -> dict:
|
||||
for t in playlist.get("tracks", []):
|
||||
ids = (t or {}).get("ids", {}) or {}
|
||||
# TrackPlaylistObject uses position
|
||||
key = ids.get("spotify") or f"{(t or {}).get('position', 0)}:{(t or {}).get('title', '')}"
|
||||
key = (
|
||||
ids.get("spotify")
|
||||
or f"{(t or {}).get('position', 0)}:{(t or {}).get('title', '')}"
|
||||
)
|
||||
track_map[key] = t
|
||||
break
|
||||
return track_map
|
||||
|
||||
|
||||
def _extract_completed_and_skipped_from_logs(log_lines: list) -> tuple[set, set, dict, dict]:
|
||||
def _extract_completed_and_skipped_from_logs(
|
||||
log_lines: list,
|
||||
) -> tuple[set, set, dict, dict]:
|
||||
"""
|
||||
Returns (completed_keys, skipped_keys, completed_objects_by_key, skipped_objects_by_key)
|
||||
Keys prefer ids.spotify, falling back to index+title scheme consistent with initial map.
|
||||
@@ -102,7 +110,9 @@ def _extract_completed_and_skipped_from_logs(log_lines: list) -> tuple[set, set,
|
||||
status = status_info.get("status")
|
||||
ids = (track or {}).get("ids", {}) or {}
|
||||
# Fallback keys try track_number:title and position:title
|
||||
fallback_key = f"{(track or {}).get('track_number', 0)}:{(track or {}).get('title', '')}"
|
||||
fallback_key = (
|
||||
f"{(track or {}).get('track_number', 0)}:{(track or {}).get('title', '')}"
|
||||
)
|
||||
key = ids.get("spotify") or fallback_key
|
||||
if status == "done":
|
||||
completed_keys.add(key)
|
||||
@@ -128,11 +138,13 @@ def _to_track_object_from_initial(initial_track: dict, parent_type: str) -> dict
|
||||
artists_conv = []
|
||||
for a in artists_src:
|
||||
if isinstance(a, dict):
|
||||
artists_conv.append({
|
||||
artists_conv.append(
|
||||
{
|
||||
"type": "artistTrack",
|
||||
"name": a.get("name", ""),
|
||||
"ids": a.get("ids", {}) or {},
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
# Convert album to AlbumTrackObject-like shape
|
||||
album_src = initial_track.get("album", {}) or {}
|
||||
@@ -177,16 +189,23 @@ def build_partial_summary_from_task_log(task_id: str, parent_type: str) -> dict:
|
||||
"""
|
||||
log_lines = _read_task_log_json_lines(task_id)
|
||||
initial_tracks_map = _extract_parent_initial_tracks(log_lines, parent_type)
|
||||
completed_keys, skipped_keys, completed_objs, skipped_objs = _extract_completed_and_skipped_from_logs(log_lines)
|
||||
completed_keys, skipped_keys, completed_objs, skipped_objs = (
|
||||
_extract_completed_and_skipped_from_logs(log_lines)
|
||||
)
|
||||
|
||||
# Determine failed as initial - completed - skipped
|
||||
initial_keys = set(initial_tracks_map.keys())
|
||||
failed_keys = initial_keys.difference(completed_keys.union(skipped_keys))
|
||||
|
||||
successful_tracks = [completed_objs[k] for k in completed_keys if k in completed_objs]
|
||||
successful_tracks = [
|
||||
completed_objs[k] for k in completed_keys if k in completed_objs
|
||||
]
|
||||
skipped_tracks = [skipped_objs[k] for k in skipped_keys if k in skipped_objs]
|
||||
failed_tracks = [
|
||||
{"track": _to_track_object_from_initial(initial_tracks_map[k], parent_type), "reason": "cancelled"}
|
||||
{
|
||||
"track": _to_track_object_from_initial(initial_tracks_map[k], parent_type),
|
||||
"reason": "cancelled",
|
||||
}
|
||||
for k in failed_keys
|
||||
if k in initial_tracks_map
|
||||
]
|
||||
@@ -224,16 +243,15 @@ def trigger_sse_event(task_id: str, reason: str = "status_change"):
|
||||
trigger_sse_update_task.apply_async(
|
||||
args=[task_id, reason],
|
||||
queue="utility_tasks",
|
||||
priority=9 # High priority for real-time updates
|
||||
priority=9, # High priority for real-time updates
|
||||
)
|
||||
# Only log at debug level to reduce verbosity
|
||||
logger.debug(f"SSE: Submitted SSE update task for {task_id} (reason: {reason})")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error submitting SSE update task for task {task_id}: {e}", exc_info=True)
|
||||
|
||||
|
||||
|
||||
logger.error(
|
||||
f"Error submitting SSE update task for task {task_id}: {e}", exc_info=True
|
||||
)
|
||||
|
||||
|
||||
class ProgressState:
|
||||
@@ -421,7 +439,7 @@ def cancel_task(task_id):
|
||||
"status": ProgressState.CANCELLED,
|
||||
"error": "Task cancelled by user",
|
||||
"timestamp": time.time(),
|
||||
}
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
@@ -616,6 +634,7 @@ def retry_task(task_id):
|
||||
logger.error(f"Error retrying task {task_id}: {e}", exc_info=True)
|
||||
return {"status": "error", "error": str(e)}
|
||||
|
||||
|
||||
class ProgressTrackingTask(Task):
|
||||
"""Base task class that tracks progress through callbacks"""
|
||||
|
||||
@@ -715,35 +734,54 @@ class ProgressTrackingTask(Task):
|
||||
if "album" in data:
|
||||
# Album download - create children table and store name in task info
|
||||
logger.info(f"Task {task_id}: Creating album children table")
|
||||
children_table = history_manager.store_album_history(data, task_id, "in_progress")
|
||||
children_table = history_manager.store_album_history(
|
||||
data, task_id, "in_progress"
|
||||
)
|
||||
if children_table:
|
||||
task_info["children_table"] = children_table
|
||||
store_task_info(task_id, task_info)
|
||||
logger.info(f"Task {task_id}: Created and stored children table '{children_table}' in task info")
|
||||
logger.info(
|
||||
f"Task {task_id}: Created and stored children table '{children_table}' in task info"
|
||||
)
|
||||
else:
|
||||
logger.error(f"Task {task_id}: Failed to create album children table")
|
||||
logger.error(
|
||||
f"Task {task_id}: Failed to create album children table"
|
||||
)
|
||||
elif "playlist" in data:
|
||||
# Playlist download - create children table and store name in task info
|
||||
logger.info(f"Task {task_id}: Creating playlist children table")
|
||||
children_table = history_manager.store_playlist_history(data, task_id, "in_progress")
|
||||
children_table = history_manager.store_playlist_history(
|
||||
data, task_id, "in_progress"
|
||||
)
|
||||
if children_table:
|
||||
task_info["children_table"] = children_table
|
||||
store_task_info(task_id, task_info)
|
||||
logger.info(f"Task {task_id}: Created and stored children table '{children_table}' in task info")
|
||||
logger.info(
|
||||
f"Task {task_id}: Created and stored children table '{children_table}' in task info"
|
||||
)
|
||||
else:
|
||||
logger.error(f"Task {task_id}: Failed to create playlist children table")
|
||||
logger.error(
|
||||
f"Task {task_id}: Failed to create playlist children table"
|
||||
)
|
||||
elif "track" in data:
|
||||
# Individual track download - check if it's part of an album/playlist
|
||||
children_table = task_info.get("children_table")
|
||||
if children_table:
|
||||
# Track is part of album/playlist - don't store in main table during initialization
|
||||
logger.info(f"Task {task_id}: Skipping track initialization storage (part of album/playlist, children table: {children_table})")
|
||||
logger.info(
|
||||
f"Task {task_id}: Skipping track initialization storage (part of album/playlist, children table: {children_table})"
|
||||
)
|
||||
else:
|
||||
# Individual track download - store in main table
|
||||
logger.info(f"Task {task_id}: Storing individual track history (initializing)")
|
||||
logger.info(
|
||||
f"Task {task_id}: Storing individual track history (initializing)"
|
||||
)
|
||||
history_manager.store_track_history(data, task_id, "in_progress")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to store initial history for task {task_id}: {e}", exc_info=True)
|
||||
logger.error(
|
||||
f"Failed to store initial history for task {task_id}: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
def _handle_downloading(self, task_id, data, task_info):
|
||||
"""Handle downloading status from deezspot"""
|
||||
@@ -756,7 +794,9 @@ class ProgressTrackingTask(Task):
|
||||
album_obj = track_obj.get("album", {})
|
||||
album_name = album_obj.get("title", "")
|
||||
|
||||
logger.info(f"Task {task_id}: Starting download for track '{track_name}' by {artist_name}")
|
||||
logger.info(
|
||||
f"Task {task_id}: Starting download for track '{track_name}' by {artist_name}"
|
||||
)
|
||||
|
||||
data["status"] = ProgressState.DOWNLOADING
|
||||
data["song"] = track_name
|
||||
@@ -784,12 +824,16 @@ class ProgressTrackingTask(Task):
|
||||
task_info["current_track_num"] = current_track_num
|
||||
store_task_info(task_id, task_info)
|
||||
|
||||
overall_progress = min(int(((current_track_num -1) / total_tracks) * 100), 100)
|
||||
overall_progress = min(
|
||||
int(((current_track_num - 1) / total_tracks) * 100), 100
|
||||
)
|
||||
data["overall_progress"] = overall_progress
|
||||
data["parsed_current_track"] = current_track_num
|
||||
data["parsed_total_tracks"] = total_tracks
|
||||
|
||||
logger.info(f"Task {task_id}: Progress on '{item_name}': Processing track {current_track_num}/{total_tracks} - '{track_name}'")
|
||||
logger.info(
|
||||
f"Task {task_id}: Progress on '{item_name}': Processing track {current_track_num}/{total_tracks} - '{track_name}'"
|
||||
)
|
||||
|
||||
data["status"] = ProgressState.PROGRESS
|
||||
data["song"] = track_name
|
||||
@@ -802,7 +846,9 @@ class ProgressTrackingTask(Task):
|
||||
track_name = track_obj.get("title", "Unknown Track")
|
||||
percentage = data.get("percentage", 0)
|
||||
|
||||
logger.debug(f"Task {task_id}: Real-time progress for '{track_name}': {percentage}%")
|
||||
logger.debug(
|
||||
f"Task {task_id}: Real-time progress for '{track_name}': {percentage}%"
|
||||
)
|
||||
|
||||
data["song"] = track_name
|
||||
artist = data.get("artist", "Unknown")
|
||||
@@ -838,7 +884,9 @@ class ProgressTrackingTask(Task):
|
||||
)
|
||||
|
||||
# Log at debug level
|
||||
logger.debug(f"Task {task_id} track progress: {track_name} by {artist}: {percent}%")
|
||||
logger.debug(
|
||||
f"Task {task_id} track progress: {track_name} by {artist}: {percent}%"
|
||||
)
|
||||
|
||||
def _handle_skipped(self, task_id, data, task_info):
|
||||
"""Handle skipped status from deezspot"""
|
||||
@@ -848,14 +896,22 @@ class ProgressTrackingTask(Task):
|
||||
if "track" in data:
|
||||
# Individual track skipped - check if we should use children table
|
||||
children_table = task_info.get("children_table")
|
||||
logger.debug(f"Task {task_id}: Skipped track, children_table = '{children_table}'")
|
||||
logger.debug(
|
||||
f"Task {task_id}: Skipped track, children_table = '{children_table}'"
|
||||
)
|
||||
if children_table:
|
||||
# Part of album/playlist - store progressively in children table
|
||||
logger.info(f"Task {task_id}: Storing skipped track in children table '{children_table}' (progressive)")
|
||||
history_manager.store_track_history(data, task_id, "skipped", children_table)
|
||||
logger.info(
|
||||
f"Task {task_id}: Storing skipped track in children table '{children_table}' (progressive)"
|
||||
)
|
||||
history_manager.store_track_history(
|
||||
data, task_id, "skipped", children_table
|
||||
)
|
||||
else:
|
||||
# Individual track download - store in main table
|
||||
logger.info(f"Task {task_id}: Storing skipped track in main table (individual download)")
|
||||
logger.info(
|
||||
f"Task {task_id}: Storing skipped track in main table (individual download)"
|
||||
)
|
||||
history_manager.store_track_history(data, task_id, "skipped")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to store skipped history for task {task_id}: {e}")
|
||||
@@ -948,14 +1004,22 @@ class ProgressTrackingTask(Task):
|
||||
elif "track" in data:
|
||||
# Individual track failed - check if we should use children table
|
||||
children_table = task_info.get("children_table")
|
||||
logger.debug(f"Task {task_id}: Failed track, children_table = '{children_table}'")
|
||||
logger.debug(
|
||||
f"Task {task_id}: Failed track, children_table = '{children_table}'"
|
||||
)
|
||||
if children_table:
|
||||
# Part of album/playlist - store progressively in children table
|
||||
logger.info(f"Task {task_id}: Storing failed track in children table '{children_table}' (progressive)")
|
||||
history_manager.store_track_history(data, task_id, "failed", children_table)
|
||||
logger.info(
|
||||
f"Task {task_id}: Storing failed track in children table '{children_table}' (progressive)"
|
||||
)
|
||||
history_manager.store_track_history(
|
||||
data, task_id, "failed", children_table
|
||||
)
|
||||
else:
|
||||
# Individual track download - store in main table
|
||||
logger.info(f"Task {task_id}: Storing failed track in main table (individual download)")
|
||||
logger.info(
|
||||
f"Task {task_id}: Storing failed track in main table (individual download)"
|
||||
)
|
||||
history_manager.store_track_history(data, task_id, "failed")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to store error history for task {task_id}: {e}")
|
||||
@@ -992,17 +1056,28 @@ class ProgressTrackingTask(Task):
|
||||
elif "track" in data:
|
||||
# Individual track completion - check if we should use children table
|
||||
children_table = task_info.get("children_table")
|
||||
logger.debug(f"Task {task_id}: Completed track, children_table = '{children_table}'")
|
||||
logger.debug(
|
||||
f"Task {task_id}: Completed track, children_table = '{children_table}'"
|
||||
)
|
||||
if children_table:
|
||||
# Part of album/playlist - store progressively in children table
|
||||
logger.info(f"Task {task_id}: Storing completed track in children table '{children_table}' (progressive)")
|
||||
history_manager.store_track_history(data, task_id, "completed", children_table)
|
||||
logger.info(
|
||||
f"Task {task_id}: Storing completed track in children table '{children_table}' (progressive)"
|
||||
)
|
||||
history_manager.store_track_history(
|
||||
data, task_id, "completed", children_table
|
||||
)
|
||||
else:
|
||||
# Individual track download - store in main table
|
||||
logger.info(f"Task {task_id}: Storing completed track in main table (individual download)")
|
||||
logger.info(
|
||||
f"Task {task_id}: Storing completed track in main table (individual download)"
|
||||
)
|
||||
history_manager.store_track_history(data, task_id, "completed")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to store completion history for task {task_id}: {e}", exc_info=True)
|
||||
logger.error(
|
||||
f"Failed to store completion history for task {task_id}: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
# Extract data (legacy format support)
|
||||
content_type = data.get("type", "").lower()
|
||||
@@ -1177,7 +1252,7 @@ def task_prerun_handler(task_id=None, task=None, *args, **kwargs):
|
||||
"""Signal handler when a task begins running"""
|
||||
try:
|
||||
# Skip verbose logging for SSE tasks
|
||||
if task and hasattr(task, 'name') and task.name in ['trigger_sse_update_task']:
|
||||
if task and hasattr(task, "name") and task.name in ["trigger_sse_update_task"]:
|
||||
return
|
||||
|
||||
task_info = get_task_info(task_id)
|
||||
@@ -1208,7 +1283,7 @@ def task_postrun_handler(
|
||||
"""Signal handler when a task finishes"""
|
||||
try:
|
||||
# Skip verbose logging for SSE tasks
|
||||
if task and hasattr(task, 'name') and task.name in ['trigger_sse_update_task']:
|
||||
if task and hasattr(task, "name") and task.name in ["trigger_sse_update_task"]:
|
||||
return
|
||||
|
||||
last_status_for_history = get_last_task_status(task_id)
|
||||
@@ -1223,9 +1298,7 @@ def task_postrun_handler(
|
||||
state == states.REVOKED
|
||||
and last_status_for_history.get("status") != ProgressState.CANCELLED
|
||||
):
|
||||
logger.info(
|
||||
f"Task {task_id} was REVOKED (likely cancelled)."
|
||||
)
|
||||
logger.info(f"Task {task_id} was REVOKED (likely cancelled).")
|
||||
# return # Let status update proceed if necessary
|
||||
|
||||
task_info = get_task_info(task_id)
|
||||
@@ -1235,8 +1308,13 @@ def task_postrun_handler(
|
||||
|
||||
# If task was cancelled/revoked, finalize parent history with partial summary
|
||||
try:
|
||||
if state == states.REVOKED or current_redis_status == ProgressState.CANCELLED:
|
||||
parent_type = (task_info.get("download_type") or task_info.get("type") or "").lower()
|
||||
if (
|
||||
state == states.REVOKED
|
||||
or current_redis_status == ProgressState.CANCELLED
|
||||
):
|
||||
parent_type = (
|
||||
task_info.get("download_type") or task_info.get("type") or ""
|
||||
).lower()
|
||||
if parent_type in ["album", "playlist"]:
|
||||
# Build detailed summary from the task log
|
||||
summary = build_partial_summary_from_task_log(task_id, parent_type)
|
||||
@@ -1247,14 +1325,24 @@ def task_postrun_handler(
|
||||
# Try to enrich parent payload with initial callback object (to capture artists, ids, images)
|
||||
try:
|
||||
log_lines = _read_task_log_json_lines(task_id)
|
||||
initial_parent = _extract_initial_parent_object(log_lines, parent_type)
|
||||
initial_parent = _extract_initial_parent_object(
|
||||
log_lines, parent_type
|
||||
)
|
||||
except Exception:
|
||||
initial_parent = None
|
||||
|
||||
if parent_type == "album":
|
||||
album_payload = {"title": title, "total_tracks": total_tracks}
|
||||
if isinstance(initial_parent, dict):
|
||||
for k in ["artists", "ids", "images", "release_date", "genres", "album_type", "tracks"]:
|
||||
for k in [
|
||||
"artists",
|
||||
"ids",
|
||||
"images",
|
||||
"release_date",
|
||||
"genres",
|
||||
"album_type",
|
||||
"tracks",
|
||||
]:
|
||||
if k in initial_parent:
|
||||
album_payload[k] = initial_parent.get(k)
|
||||
# Ensure a main history entry exists even on cancellation
|
||||
@@ -1266,7 +1354,13 @@ def task_postrun_handler(
|
||||
else:
|
||||
playlist_payload = {"title": title}
|
||||
if isinstance(initial_parent, dict):
|
||||
for k in ["owner", "ids", "images", "tracks", "description"]:
|
||||
for k in [
|
||||
"owner",
|
||||
"ids",
|
||||
"images",
|
||||
"tracks",
|
||||
"description",
|
||||
]:
|
||||
if k in initial_parent:
|
||||
playlist_payload[k] = initial_parent.get(k)
|
||||
history_manager.store_playlist_history(
|
||||
@@ -1316,12 +1410,15 @@ def task_postrun_handler(
|
||||
add_single_track_to_playlist_db(
|
||||
playlist_spotify_id=playlist_id,
|
||||
track_item_for_db=track_item_for_db, # Keep as fallback
|
||||
task_id=task_id # Primary source for metadata
|
||||
task_id=task_id, # Primary source for metadata
|
||||
)
|
||||
|
||||
# Update the playlist's m3u file after successful track addition
|
||||
try:
|
||||
from routes.utils.watch.manager import update_playlist_m3u_file
|
||||
from routes.utils.watch.manager import (
|
||||
update_playlist_m3u_file,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Updating m3u file for playlist {playlist_id} after successful track download."
|
||||
)
|
||||
@@ -1390,9 +1487,6 @@ def task_failure_handler(
|
||||
if isinstance(exception, Retry):
|
||||
return
|
||||
|
||||
# Define download task names
|
||||
download_task_names = ["download_track", "download_album", "download_playlist"]
|
||||
|
||||
# Get task info and status
|
||||
task_info = get_task_info(task_id)
|
||||
last_status = get_last_task_status(task_id)
|
||||
@@ -1523,6 +1617,12 @@ def download_track(self, **task_data):
|
||||
save_cover = task_data.get("save_cover", config_params.get("save_cover", True))
|
||||
convert_to = task_data.get("convertTo", config_params.get("convertTo"))
|
||||
bitrate = task_data.get("bitrate", config_params.get("bitrate"))
|
||||
recursive_quality = task_data.get(
|
||||
"recursive_quality", config_params.get("recursive_quality", False)
|
||||
)
|
||||
artist_separator = task_data.get(
|
||||
"artist_separator", config_params.get("artist_separator", "; ")
|
||||
)
|
||||
|
||||
# Execute the download - service is now determined from URL
|
||||
download_track_func(
|
||||
@@ -1539,6 +1639,8 @@ def download_track(self, **task_data):
|
||||
progress_callback=self.progress_callback,
|
||||
convert_to=convert_to,
|
||||
bitrate=bitrate,
|
||||
recursive_quality=recursive_quality,
|
||||
artist_separator=artist_separator,
|
||||
_is_celery_task_execution=True, # Skip duplicate check inside Celery task (consistency)
|
||||
)
|
||||
|
||||
@@ -1610,6 +1712,12 @@ def download_album(self, **task_data):
|
||||
save_cover = task_data.get("save_cover", config_params.get("save_cover", True))
|
||||
convert_to = task_data.get("convertTo", config_params.get("convertTo"))
|
||||
bitrate = task_data.get("bitrate", config_params.get("bitrate"))
|
||||
recursive_quality = task_data.get(
|
||||
"recursive_quality", config_params.get("recursive_quality", False)
|
||||
)
|
||||
artist_separator = task_data.get(
|
||||
"artist_separator", config_params.get("artist_separator", "; ")
|
||||
)
|
||||
|
||||
# Execute the download - service is now determined from URL
|
||||
download_album_func(
|
||||
@@ -1626,6 +1734,8 @@ def download_album(self, **task_data):
|
||||
progress_callback=self.progress_callback,
|
||||
convert_to=convert_to,
|
||||
bitrate=bitrate,
|
||||
recursive_quality=recursive_quality,
|
||||
artist_separator=artist_separator,
|
||||
_is_celery_task_execution=True, # Skip duplicate check inside Celery task
|
||||
)
|
||||
|
||||
@@ -1697,6 +1807,12 @@ def download_playlist(self, **task_data):
|
||||
save_cover = task_data.get("save_cover", config_params.get("save_cover", True))
|
||||
convert_to = task_data.get("convertTo", config_params.get("convertTo"))
|
||||
bitrate = task_data.get("bitrate", config_params.get("bitrate"))
|
||||
recursive_quality = task_data.get(
|
||||
"recursive_quality", config_params.get("recursive_quality", False)
|
||||
)
|
||||
artist_separator = task_data.get(
|
||||
"artist_separator", config_params.get("artist_separator", "; ")
|
||||
)
|
||||
|
||||
# Get retry parameters
|
||||
initial_retry_delay = task_data.get(
|
||||
@@ -1725,6 +1841,8 @@ def download_playlist(self, **task_data):
|
||||
progress_callback=self.progress_callback,
|
||||
convert_to=convert_to,
|
||||
bitrate=bitrate,
|
||||
recursive_quality=recursive_quality,
|
||||
artist_separator=artist_separator,
|
||||
_is_celery_task_execution=True, # Skip duplicate check inside Celery task
|
||||
)
|
||||
|
||||
@@ -1868,11 +1986,7 @@ def delayed_delete_task_data(task_id, reason):
|
||||
delete_task_data_and_log(task_id, reason)
|
||||
|
||||
|
||||
@celery_app.task(
|
||||
name="trigger_sse_update_task",
|
||||
queue="utility_tasks",
|
||||
bind=True
|
||||
)
|
||||
@celery_app.task(name="trigger_sse_update_task", queue="utility_tasks", bind=True)
|
||||
def trigger_sse_update_task(self, task_id: str, reason: str = "status_update"):
|
||||
"""
|
||||
Dedicated Celery task for triggering SSE task summary updates.
|
||||
@@ -1880,14 +1994,16 @@ def trigger_sse_update_task(self, task_id: str, reason: str = "status_update"):
|
||||
"""
|
||||
try:
|
||||
# Send task summary update via Redis pub/sub
|
||||
logger.debug(f"SSE Task: Processing summary update for task {task_id} (reason: {reason})")
|
||||
logger.debug(
|
||||
f"SSE Task: Processing summary update for task {task_id} (reason: {reason})"
|
||||
)
|
||||
|
||||
event_data = {
|
||||
"task_id": task_id,
|
||||
"reason": reason,
|
||||
"timestamp": time.time(),
|
||||
"change_type": "task_summary",
|
||||
"event_type": "summary_update"
|
||||
"event_type": "summary_update",
|
||||
}
|
||||
|
||||
# Use Redis pub/sub for cross-process communication
|
||||
@@ -1896,19 +2012,23 @@ def trigger_sse_update_task(self, task_id: str, reason: str = "status_update"):
|
||||
|
||||
except Exception as e:
|
||||
# Only log errors, not success cases
|
||||
logger.error(f"SSE Task: Failed to publish summary update for task {task_id}: {e}", exc_info=True)
|
||||
logger.error(
|
||||
f"SSE Task: Failed to publish summary update for task {task_id}: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
# Don't raise exception to avoid task retry - SSE updates are best-effort
|
||||
|
||||
|
||||
def _extract_initial_parent_object(log_lines: list, parent_type: str) -> dict | None:
|
||||
"""Return the first album/playlist object from the log's initializing callback, if present."""
|
||||
key = "album" if parent_type == "album" else ("playlist" if parent_type == "playlist" else None)
|
||||
key = (
|
||||
"album"
|
||||
if parent_type == "album"
|
||||
else ("playlist" if parent_type == "playlist" else None)
|
||||
)
|
||||
if not key:
|
||||
return None
|
||||
for obj in log_lines:
|
||||
if key in obj and isinstance(obj[key], dict):
|
||||
return obj[key]
|
||||
return None
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
import spotipy
|
||||
from spotipy.oauth2 import SpotifyClientCredentials
|
||||
from routes.utils.celery_queue_manager import get_config_params
|
||||
from routes.utils.credentials import get_credential, _get_global_spotify_api_creds
|
||||
from routes.utils.credentials import _get_global_spotify_api_creds
|
||||
import logging
|
||||
import time
|
||||
from typing import Dict, List, Optional, Any
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Dict, Optional, Any
|
||||
|
||||
# Import Deezer API and logging
|
||||
from deezspot.deezloader.dee_api import API as DeezerAPI
|
||||
@@ -19,6 +16,7 @@ _spotify_client = None
|
||||
_last_client_init = 0
|
||||
_client_init_interval = 3600 # Reinitialize client every hour
|
||||
|
||||
|
||||
def _get_spotify_client():
|
||||
"""
|
||||
Get or create a Spotify client with global credentials.
|
||||
@@ -29,9 +27,10 @@ def _get_spotify_client():
|
||||
current_time = time.time()
|
||||
|
||||
# Reinitialize client if it's been more than an hour or if client doesn't exist
|
||||
if (_spotify_client is None or
|
||||
current_time - _last_client_init > _client_init_interval):
|
||||
|
||||
if (
|
||||
_spotify_client is None
|
||||
or current_time - _last_client_init > _client_init_interval
|
||||
):
|
||||
client_id, client_secret = _get_global_spotify_api_creds()
|
||||
|
||||
if not client_id or not client_secret:
|
||||
@@ -42,8 +41,7 @@ def _get_spotify_client():
|
||||
# Create new client
|
||||
_spotify_client = spotipy.Spotify(
|
||||
client_credentials_manager=SpotifyClientCredentials(
|
||||
client_id=client_id,
|
||||
client_secret=client_secret
|
||||
client_id=client_id, client_secret=client_secret
|
||||
)
|
||||
)
|
||||
_last_client_init = current_time
|
||||
@@ -51,10 +49,12 @@ def _get_spotify_client():
|
||||
|
||||
return _spotify_client
|
||||
|
||||
|
||||
def _rate_limit_handler(func):
|
||||
"""
|
||||
Decorator to handle rate limiting with exponential backoff.
|
||||
"""
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
max_retries = 3
|
||||
base_delay = 1
|
||||
@@ -71,8 +71,10 @@ def _rate_limit_handler(func):
|
||||
continue
|
||||
raise e
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
@_rate_limit_handler
|
||||
def get_playlist_metadata(playlist_id: str) -> Dict[str, Any]:
|
||||
"""
|
||||
@@ -88,21 +90,29 @@ def get_playlist_metadata(playlist_id: str) -> Dict[str, Any]:
|
||||
|
||||
try:
|
||||
# Get basic playlist info without tracks
|
||||
playlist = client.playlist(playlist_id, fields="id,name,description,owner,images,snapshot_id,public,followers,tracks.total")
|
||||
playlist = client.playlist(
|
||||
playlist_id,
|
||||
fields="id,name,description,owner,images,snapshot_id,public,followers,tracks.total",
|
||||
)
|
||||
|
||||
# Add a flag to indicate this is metadata only
|
||||
playlist['_metadata_only'] = True
|
||||
playlist['_tracks_loaded'] = False
|
||||
playlist["_metadata_only"] = True
|
||||
playlist["_tracks_loaded"] = False
|
||||
|
||||
logger.debug(f"Retrieved playlist metadata for {playlist_id}: {playlist.get('name', 'Unknown')}")
|
||||
logger.debug(
|
||||
f"Retrieved playlist metadata for {playlist_id}: {playlist.get('name', 'Unknown')}"
|
||||
)
|
||||
return playlist
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching playlist metadata for {playlist_id}: {e}")
|
||||
raise
|
||||
|
||||
|
||||
@_rate_limit_handler
|
||||
def get_playlist_tracks(playlist_id: str, limit: int = 100, offset: int = 0) -> Dict[str, Any]:
|
||||
def get_playlist_tracks(
|
||||
playlist_id: str, limit: int = 100, offset: int = 0
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Get playlist tracks with pagination support to handle large playlists efficiently.
|
||||
|
||||
@@ -122,16 +132,19 @@ def get_playlist_tracks(playlist_id: str, limit: int = 100, offset: int = 0) ->
|
||||
playlist_id,
|
||||
limit=min(limit, 100), # Spotify API max is 100
|
||||
offset=offset,
|
||||
fields="items(track(id,name,artists,album,external_urls,preview_url,duration_ms,explicit,popularity)),total,limit,offset"
|
||||
fields="items(track(id,name,artists,album,external_urls,preview_url,duration_ms,explicit,popularity)),total,limit,offset",
|
||||
)
|
||||
|
||||
logger.debug(f"Retrieved {len(tracks_data.get('items', []))} tracks for playlist {playlist_id} (offset: {offset})")
|
||||
logger.debug(
|
||||
f"Retrieved {len(tracks_data.get('items', []))} tracks for playlist {playlist_id} (offset: {offset})"
|
||||
)
|
||||
return tracks_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching playlist tracks for {playlist_id}: {e}")
|
||||
raise
|
||||
|
||||
|
||||
@_rate_limit_handler
|
||||
def get_playlist_full(playlist_id: str, batch_size: int = 100) -> Dict[str, Any]:
|
||||
"""
|
||||
@@ -144,17 +157,15 @@ def get_playlist_full(playlist_id: str, batch_size: int = 100) -> Dict[str, Any]
|
||||
Returns:
|
||||
Complete playlist data with all tracks
|
||||
"""
|
||||
client = _get_spotify_client()
|
||||
|
||||
try:
|
||||
# First get metadata
|
||||
playlist = get_playlist_metadata(playlist_id)
|
||||
|
||||
# Get total track count
|
||||
total_tracks = playlist.get('tracks', {}).get('total', 0)
|
||||
total_tracks = playlist.get("tracks", {}).get("total", 0)
|
||||
|
||||
if total_tracks == 0:
|
||||
playlist['tracks'] = {'items': [], 'total': 0}
|
||||
playlist["tracks"] = {"items": [], "total": 0}
|
||||
return playlist
|
||||
|
||||
# Fetch all tracks in batches
|
||||
@@ -163,7 +174,7 @@ def get_playlist_full(playlist_id: str, batch_size: int = 100) -> Dict[str, Any]
|
||||
|
||||
while offset < total_tracks:
|
||||
batch = get_playlist_tracks(playlist_id, limit=batch_size, offset=offset)
|
||||
batch_items = batch.get('items', [])
|
||||
batch_items = batch.get("items", [])
|
||||
all_tracks.extend(batch_items)
|
||||
|
||||
offset += len(batch_items)
|
||||
@@ -173,22 +184,25 @@ def get_playlist_full(playlist_id: str, batch_size: int = 100) -> Dict[str, Any]
|
||||
time.sleep(0.1)
|
||||
|
||||
# Update playlist with complete tracks data
|
||||
playlist['tracks'] = {
|
||||
'items': all_tracks,
|
||||
'total': total_tracks,
|
||||
'limit': batch_size,
|
||||
'offset': 0
|
||||
playlist["tracks"] = {
|
||||
"items": all_tracks,
|
||||
"total": total_tracks,
|
||||
"limit": batch_size,
|
||||
"offset": 0,
|
||||
}
|
||||
playlist['_metadata_only'] = False
|
||||
playlist['_tracks_loaded'] = True
|
||||
playlist["_metadata_only"] = False
|
||||
playlist["_tracks_loaded"] = True
|
||||
|
||||
logger.info(f"Retrieved complete playlist {playlist_id} with {total_tracks} tracks")
|
||||
logger.info(
|
||||
f"Retrieved complete playlist {playlist_id} with {total_tracks} tracks"
|
||||
)
|
||||
return playlist
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching complete playlist {playlist_id}: {e}")
|
||||
raise
|
||||
|
||||
|
||||
def check_playlist_updated(playlist_id: str, last_snapshot_id: str) -> bool:
|
||||
"""
|
||||
Check if playlist has been updated by comparing snapshot_id.
|
||||
@@ -203,7 +217,7 @@ def check_playlist_updated(playlist_id: str, last_snapshot_id: str) -> bool:
|
||||
"""
|
||||
try:
|
||||
metadata = get_playlist_metadata(playlist_id)
|
||||
current_snapshot_id = metadata.get('snapshot_id')
|
||||
current_snapshot_id = metadata.get("snapshot_id")
|
||||
|
||||
return current_snapshot_id != last_snapshot_id
|
||||
|
||||
@@ -211,8 +225,14 @@ def check_playlist_updated(playlist_id: str, last_snapshot_id: str) -> bool:
|
||||
logger.error(f"Error checking playlist update status for {playlist_id}: {e}")
|
||||
raise
|
||||
|
||||
|
||||
@_rate_limit_handler
|
||||
def get_spotify_info(spotify_id: str, spotify_type: str, limit: Optional[int] = None, offset: Optional[int] = None) -> Dict[str, Any]:
|
||||
def get_spotify_info(
|
||||
spotify_id: str,
|
||||
spotify_type: str,
|
||||
limit: Optional[int] = None,
|
||||
offset: Optional[int] = None,
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Get info from Spotify API using Spotipy directly.
|
||||
Optimized to prevent rate limiting by using appropriate endpoints.
|
||||
@@ -249,9 +269,7 @@ def get_spotify_info(spotify_id: str, spotify_type: str, limit: Optional[int] =
|
||||
elif spotify_type == "artist_discography":
|
||||
# Get artist's albums with pagination
|
||||
albums = client.artist_albums(
|
||||
spotify_id,
|
||||
limit=limit or 20,
|
||||
offset=offset or 0
|
||||
spotify_id, limit=limit or 20, offset=offset or 0
|
||||
)
|
||||
return albums
|
||||
|
||||
@@ -265,10 +283,12 @@ def get_spotify_info(spotify_id: str, spotify_type: str, limit: Optional[int] =
|
||||
logger.error(f"Error fetching {spotify_type} {spotify_id}: {e}")
|
||||
raise
|
||||
|
||||
|
||||
# Cache for playlist metadata to reduce API calls
|
||||
_playlist_metadata_cache = {}
|
||||
_playlist_metadata_cache: Dict[str, tuple[Dict[str, Any], float]] = {}
|
||||
_cache_ttl = 300 # 5 minutes cache
|
||||
|
||||
|
||||
def get_cached_playlist_metadata(playlist_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get playlist metadata from cache if available and not expired.
|
||||
@@ -286,6 +306,7 @@ def get_cached_playlist_metadata(playlist_id: str) -> Optional[Dict[str, Any]]:
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def cache_playlist_metadata(playlist_id: str, metadata: Dict[str, Any]):
|
||||
"""
|
||||
Cache playlist metadata with timestamp.
|
||||
@@ -296,7 +317,10 @@ def cache_playlist_metadata(playlist_id: str, metadata: Dict[str, Any]):
|
||||
"""
|
||||
_playlist_metadata_cache[playlist_id] = (metadata, time.time())
|
||||
|
||||
def get_playlist_info_optimized(playlist_id: str, include_tracks: bool = False) -> Dict[str, Any]:
|
||||
|
||||
def get_playlist_info_optimized(
|
||||
playlist_id: str, include_tracks: bool = False
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Optimized playlist info function that uses caching and selective loading.
|
||||
|
||||
@@ -318,9 +342,9 @@ def get_playlist_info_optimized(playlist_id: str, include_tracks: bool = False)
|
||||
# Get complete playlist data
|
||||
playlist_data = get_playlist_full(playlist_id)
|
||||
# Cache the metadata portion
|
||||
metadata_only = {k: v for k, v in playlist_data.items() if k != 'tracks'}
|
||||
metadata_only['_metadata_only'] = True
|
||||
metadata_only['_tracks_loaded'] = False
|
||||
metadata_only = {k: v for k, v in playlist_data.items() if k != "tracks"}
|
||||
metadata_only["_metadata_only"] = True
|
||||
metadata_only["_tracks_loaded"] = False
|
||||
cache_playlist_metadata(playlist_id, metadata_only)
|
||||
return playlist_data
|
||||
else:
|
||||
@@ -329,6 +353,7 @@ def get_playlist_info_optimized(playlist_id: str, include_tracks: bool = False)
|
||||
cache_playlist_metadata(playlist_id, metadata)
|
||||
return metadata
|
||||
|
||||
|
||||
# Keep the existing Deezer functions unchanged
|
||||
def get_deezer_info(deezer_id, deezer_type, limit=None):
|
||||
"""
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -24,13 +24,17 @@ def download_playlist(
|
||||
progress_callback=None,
|
||||
convert_to=None,
|
||||
bitrate=None,
|
||||
artist_separator="; ",
|
||||
recursive_quality=True,
|
||||
_is_celery_task_execution=False, # Added to skip duplicate check from Celery task
|
||||
):
|
||||
if not _is_celery_task_execution:
|
||||
existing_task = get_existing_task_id(url) # Check for duplicates only if not called by Celery task
|
||||
existing_task = get_existing_task_id(
|
||||
url
|
||||
) # Check for duplicates only if not called by Celery task
|
||||
if existing_task:
|
||||
raise DuplicateDownloadError(
|
||||
f"Download for this URL is already in progress.",
|
||||
"Download for this URL is already in progress.",
|
||||
existing_task=existing_task,
|
||||
)
|
||||
try:
|
||||
@@ -93,7 +97,7 @@ def download_playlist(
|
||||
link_playlist=url, # Spotify URL
|
||||
output_dir="./downloads",
|
||||
quality_download=quality, # Deezer quality
|
||||
recursive_quality=True,
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
not_interface=False,
|
||||
make_zip=False,
|
||||
@@ -106,6 +110,7 @@ def download_playlist(
|
||||
max_retries=max_retries,
|
||||
convert_to=convert_to,
|
||||
bitrate=bitrate,
|
||||
artist_separator=artist_separator,
|
||||
)
|
||||
print(
|
||||
f"DEBUG: playlist.py - Playlist download via Deezer (account: {fallback}) successful for Spotify URL."
|
||||
@@ -153,7 +158,7 @@ def download_playlist(
|
||||
link_playlist=url, # Spotify URL
|
||||
output_dir="./downloads",
|
||||
quality_download=fall_quality, # Spotify quality
|
||||
recursive_quality=True,
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
not_interface=False,
|
||||
make_zip=False,
|
||||
@@ -167,6 +172,7 @@ def download_playlist(
|
||||
max_retries=max_retries,
|
||||
convert_to=convert_to,
|
||||
bitrate=bitrate,
|
||||
artist_separator=artist_separator,
|
||||
)
|
||||
print(
|
||||
f"DEBUG: playlist.py - Spotify direct download (account: {main} for blob) successful."
|
||||
@@ -213,7 +219,7 @@ def download_playlist(
|
||||
link_playlist=url,
|
||||
output_dir="./downloads",
|
||||
quality_download=quality,
|
||||
recursive_quality=True,
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
not_interface=False,
|
||||
make_zip=False,
|
||||
@@ -227,6 +233,7 @@ def download_playlist(
|
||||
max_retries=max_retries,
|
||||
convert_to=convert_to,
|
||||
bitrate=bitrate,
|
||||
artist_separator=artist_separator,
|
||||
)
|
||||
print(
|
||||
f"DEBUG: playlist.py - Direct Spotify download (account: {main} for blob) successful."
|
||||
@@ -254,7 +261,7 @@ def download_playlist(
|
||||
link_playlist=url,
|
||||
output_dir="./downloads",
|
||||
quality_download=quality,
|
||||
recursive_quality=False, # Usually False for playlists to get individual track qualities
|
||||
recursive_quality=recursive_quality, # Usually False for playlists to get individual track qualities
|
||||
recursive_download=False,
|
||||
make_zip=False,
|
||||
custom_dir_format=custom_dir_format,
|
||||
@@ -266,6 +273,7 @@ def download_playlist(
|
||||
max_retries=max_retries,
|
||||
convert_to=convert_to,
|
||||
bitrate=bitrate,
|
||||
artist_separator=artist_separator,
|
||||
)
|
||||
print(
|
||||
f"DEBUG: playlist.py - Direct Deezer download (account: {main}) successful."
|
||||
|
||||
@@ -25,6 +25,8 @@ def download_track(
|
||||
progress_callback=None,
|
||||
convert_to=None,
|
||||
bitrate=None,
|
||||
artist_separator="; ",
|
||||
recursive_quality=False,
|
||||
_is_celery_task_execution=False, # Added for consistency, not currently used for duplicate check
|
||||
):
|
||||
try:
|
||||
@@ -91,7 +93,7 @@ def download_track(
|
||||
link_track=url, # Spotify URL
|
||||
output_dir="./downloads",
|
||||
quality_download=quality, # Deezer quality
|
||||
recursive_quality=False,
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
not_interface=False,
|
||||
custom_dir_format=custom_dir_format,
|
||||
@@ -102,6 +104,7 @@ def download_track(
|
||||
max_retries=max_retries,
|
||||
convert_to=convert_to,
|
||||
bitrate=bitrate,
|
||||
artist_separator=artist_separator,
|
||||
)
|
||||
print(
|
||||
f"DEBUG: track.py - Track download via Deezer (account: {fallback}) successful for Spotify URL."
|
||||
@@ -147,7 +150,7 @@ def download_track(
|
||||
link_track=url, # Spotify URL
|
||||
output_dir="./downloads",
|
||||
quality_download=fall_quality, # Spotify quality
|
||||
recursive_quality=False,
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
not_interface=False,
|
||||
real_time_dl=real_time,
|
||||
@@ -160,6 +163,7 @@ def download_track(
|
||||
max_retries=max_retries,
|
||||
convert_to=convert_to,
|
||||
bitrate=bitrate,
|
||||
artist_separator=artist_separator,
|
||||
)
|
||||
print(
|
||||
f"DEBUG: track.py - Spotify direct download (account: {main} for blob) successful."
|
||||
@@ -202,7 +206,7 @@ def download_track(
|
||||
link_track=url,
|
||||
output_dir="./downloads",
|
||||
quality_download=quality,
|
||||
recursive_quality=False,
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
not_interface=False,
|
||||
real_time_dl=real_time,
|
||||
@@ -215,6 +219,7 @@ def download_track(
|
||||
max_retries=max_retries,
|
||||
convert_to=convert_to,
|
||||
bitrate=bitrate,
|
||||
artist_separator=artist_separator,
|
||||
)
|
||||
print(
|
||||
f"DEBUG: track.py - Direct Spotify download (account: {main} for blob) successful."
|
||||
@@ -242,7 +247,7 @@ def download_track(
|
||||
link_track=url,
|
||||
output_dir="./downloads",
|
||||
quality_download=quality,
|
||||
recursive_quality=False,
|
||||
recursive_quality=recursive_quality,
|
||||
recursive_download=False,
|
||||
custom_dir_format=custom_dir_format,
|
||||
custom_track_format=custom_track_format,
|
||||
@@ -253,6 +258,7 @@ def download_track(
|
||||
max_retries=max_retries,
|
||||
convert_to=convert_to,
|
||||
bitrate=bitrate,
|
||||
artist_separator=artist_separator,
|
||||
)
|
||||
print(
|
||||
f"DEBUG: track.py - Direct Deezer download (account: {main}) successful."
|
||||
|
||||
@@ -183,7 +183,9 @@ def _update_all_playlist_track_tables(cursor: sqlite3.Cursor):
|
||||
"""Updates all existing playlist track tables to ensure they have the latest schema."""
|
||||
try:
|
||||
# Get all table names that start with 'playlist_'
|
||||
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'playlist_%'")
|
||||
cursor.execute(
|
||||
"SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'playlist_%'"
|
||||
)
|
||||
playlist_tables = cursor.fetchall()
|
||||
|
||||
for table_row in playlist_tables:
|
||||
@@ -194,7 +196,9 @@ def _update_all_playlist_track_tables(cursor: sqlite3.Cursor):
|
||||
EXPECTED_PLAYLIST_TRACKS_COLUMNS,
|
||||
f"playlist tracks ({table_name})",
|
||||
):
|
||||
logger.info(f"Updated schema for existing playlist track table: {table_name}")
|
||||
logger.info(
|
||||
f"Updated schema for existing playlist track table: {table_name}"
|
||||
)
|
||||
|
||||
except sqlite3.Error as e:
|
||||
logger.error(f"Error updating playlist track tables schema: {e}", exc_info=True)
|
||||
@@ -219,7 +223,9 @@ def update_all_existing_tables_schema():
|
||||
_update_all_playlist_track_tables(cursor)
|
||||
|
||||
conn.commit()
|
||||
logger.info("Successfully updated all existing tables schema in playlists database")
|
||||
logger.info(
|
||||
"Successfully updated all existing tables schema in playlists database"
|
||||
)
|
||||
|
||||
except sqlite3.Error as e:
|
||||
logger.error(f"Error updating existing tables schema: {e}", exc_info=True)
|
||||
@@ -238,7 +244,9 @@ def ensure_playlist_table_schema(playlist_spotify_id: str):
|
||||
f"SELECT name FROM sqlite_master WHERE type='table' AND name='{table_name}';"
|
||||
)
|
||||
if cursor.fetchone() is None:
|
||||
logger.warning(f"Table {table_name} does not exist. Cannot update schema.")
|
||||
logger.warning(
|
||||
f"Table {table_name} does not exist. Cannot update schema."
|
||||
)
|
||||
return False
|
||||
|
||||
# Update schema
|
||||
@@ -252,11 +260,16 @@ def ensure_playlist_table_schema(playlist_spotify_id: str):
|
||||
logger.info(f"Updated schema for playlist track table: {table_name}")
|
||||
return True
|
||||
else:
|
||||
logger.info(f"Schema already up-to-date for playlist track table: {table_name}")
|
||||
logger.info(
|
||||
f"Schema already up-to-date for playlist track table: {table_name}"
|
||||
)
|
||||
return True
|
||||
|
||||
except sqlite3.Error as e:
|
||||
logger.error(f"Error updating schema for playlist {playlist_spotify_id}: {e}", exc_info=True)
|
||||
logger.error(
|
||||
f"Error updating schema for playlist {playlist_spotify_id}: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
@@ -455,10 +468,12 @@ def get_playlist_track_ids_from_db(playlist_spotify_id: str):
|
||||
return track_ids
|
||||
|
||||
|
||||
def get_playlist_tracks_with_snapshot_from_db(playlist_spotify_id: str):
|
||||
def get_playlist_tracks_with_snapshot_from_db(
|
||||
playlist_spotify_id: str,
|
||||
) -> dict[str, dict[str, str]]:
|
||||
"""Retrieves all tracks with their snapshot_ids from a specific playlist's tracks table in playlists.db."""
|
||||
table_name = f"playlist_{playlist_spotify_id.replace('-', '_')}"
|
||||
tracks_data = {}
|
||||
tracks_data: dict[str, dict[str, str]] = {}
|
||||
try:
|
||||
with _get_playlists_db_connection() as conn: # Use playlists connection
|
||||
cursor = conn.cursor()
|
||||
@@ -486,7 +501,7 @@ def get_playlist_tracks_with_snapshot_from_db(playlist_spotify_id: str):
|
||||
for row in rows:
|
||||
tracks_data[row["spotify_track_id"]] = {
|
||||
"snapshot_id": row["snapshot_id"],
|
||||
"title": row["title"]
|
||||
"title": row["title"],
|
||||
}
|
||||
return tracks_data
|
||||
except sqlite3.Error as e:
|
||||
@@ -530,7 +545,9 @@ def get_playlist_total_tracks_from_db(playlist_spotify_id: str) -> int:
|
||||
return 0
|
||||
|
||||
|
||||
def add_tracks_to_playlist_db(playlist_spotify_id: str, tracks_data: list, snapshot_id: str = None):
|
||||
def add_tracks_to_playlist_db(
|
||||
playlist_spotify_id: str, tracks_data: list, snapshot_id: str = None
|
||||
):
|
||||
"""
|
||||
Updates existing tracks in the playlist's DB table to mark them as currently present
|
||||
in Spotify and updates their last_seen timestamp and snapshot_id. Also refreshes metadata.
|
||||
@@ -574,7 +591,9 @@ def add_tracks_to_playlist_db(playlist_spotify_id: str, tracks_data: list, snaps
|
||||
track_number = track.get("track_number")
|
||||
# Log the raw track_number value for debugging
|
||||
if track_number is None or track_number == 0:
|
||||
logger.debug(f"Track '{track.get('name', 'Unknown')}' has track_number: {track_number} (raw API value)")
|
||||
logger.debug(
|
||||
f"Track '{track.get('name', 'Unknown')}' has track_number: {track_number} (raw API value)"
|
||||
)
|
||||
|
||||
# Prepare tuple for UPDATE statement.
|
||||
# Order: title, artist_names, album_name, album_artist_names, track_number,
|
||||
@@ -790,7 +809,12 @@ def remove_specific_tracks_from_playlist_table(
|
||||
return 0
|
||||
|
||||
|
||||
def add_single_track_to_playlist_db(playlist_spotify_id: str, track_item_for_db: dict, snapshot_id: str = None, task_id: str = None):
|
||||
def add_single_track_to_playlist_db(
|
||||
playlist_spotify_id: str,
|
||||
track_item_for_db: dict,
|
||||
snapshot_id: str = None,
|
||||
task_id: str = None,
|
||||
):
|
||||
"""
|
||||
Adds or updates a single track in the specified playlist's tracks table in playlists.db.
|
||||
Uses deezspot callback data as the source of metadata.
|
||||
@@ -802,11 +826,15 @@ def add_single_track_to_playlist_db(playlist_spotify_id: str, track_item_for_db:
|
||||
task_id: Task ID to extract metadata from callback data
|
||||
"""
|
||||
if not task_id:
|
||||
logger.error(f"No task_id provided for playlist {playlist_spotify_id}. Task ID is required to extract metadata from deezspot callback.")
|
||||
logger.error(
|
||||
f"No task_id provided for playlist {playlist_spotify_id}. Task ID is required to extract metadata from deezspot callback."
|
||||
)
|
||||
return
|
||||
|
||||
if not track_item_for_db or not track_item_for_db.get("track", {}).get("id"):
|
||||
logger.error(f"No track_item_for_db or spotify track ID provided for playlist {playlist_spotify_id}")
|
||||
logger.error(
|
||||
f"No track_item_for_db or spotify track ID provided for playlist {playlist_spotify_id}"
|
||||
)
|
||||
return
|
||||
|
||||
table_name = f"playlist_{playlist_spotify_id.replace('-', '_')}"
|
||||
@@ -818,7 +846,9 @@ def add_single_track_to_playlist_db(playlist_spotify_id: str, track_item_for_db:
|
||||
|
||||
last_status = get_last_task_status(task_id)
|
||||
if not last_status or "raw_callback" not in last_status:
|
||||
logger.error(f"No raw_callback found in task status for task {task_id}. Cannot extract metadata.")
|
||||
logger.error(
|
||||
f"No raw_callback found in task status for task {task_id}. Cannot extract metadata."
|
||||
)
|
||||
return
|
||||
|
||||
callback_data = last_status["raw_callback"]
|
||||
@@ -835,7 +865,9 @@ def add_single_track_to_playlist_db(playlist_spotify_id: str, track_item_for_db:
|
||||
|
||||
# Extract artist names from artists array
|
||||
artists = track_obj.get("artists", [])
|
||||
artist_names = ", ".join([artist.get("name", "") for artist in artists if artist.get("name")])
|
||||
artist_names = ", ".join(
|
||||
[artist.get("name", "") for artist in artists if artist.get("name")]
|
||||
)
|
||||
if not artist_names:
|
||||
artist_names = "N/A"
|
||||
|
||||
@@ -845,14 +877,21 @@ def add_single_track_to_playlist_db(playlist_spotify_id: str, track_item_for_db:
|
||||
|
||||
# Extract album artist names from album artists array
|
||||
album_artists = album_obj.get("artists", [])
|
||||
album_artist_names = ", ".join([artist.get("name", "") for artist in album_artists if artist.get("name")])
|
||||
album_artist_names = ", ".join(
|
||||
[artist.get("name", "") for artist in album_artists if artist.get("name")]
|
||||
)
|
||||
if not album_artist_names:
|
||||
album_artist_names = "N/A"
|
||||
|
||||
logger.debug(f"Extracted metadata from deezspot callback for '{track_name}': track_number={track_number}")
|
||||
logger.debug(
|
||||
f"Extracted metadata from deezspot callback for '{track_name}': track_number={track_number}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error extracting metadata from task {task_id} callback: {e}", exc_info=True)
|
||||
logger.error(
|
||||
f"Error extracting metadata from task {task_id} callback: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
return
|
||||
|
||||
current_time = int(time.time())
|
||||
@@ -860,9 +899,13 @@ def add_single_track_to_playlist_db(playlist_spotify_id: str, track_item_for_db:
|
||||
# Get spotify_track_id and added_at from original track_item_for_db
|
||||
track_id = track_item_for_db["track"]["id"]
|
||||
added_at = track_item_for_db.get("added_at")
|
||||
album_id = track_item_for_db.get("track", {}).get("album", {}).get("id") # Only album ID from original data
|
||||
album_id = (
|
||||
track_item_for_db.get("track", {}).get("album", {}).get("id")
|
||||
) # Only album ID from original data
|
||||
|
||||
logger.info(f"Adding track '{track_name}' (ID: {track_id}) to playlist {playlist_spotify_id} with track_number: {track_number} (from deezspot callback)")
|
||||
logger.info(
|
||||
f"Adding track '{track_name}' (ID: {track_id}) to playlist {playlist_spotify_id} with track_number: {track_number} (from deezspot callback)"
|
||||
)
|
||||
|
||||
track_data_tuple = (
|
||||
track_id,
|
||||
|
||||
@@ -28,7 +28,6 @@ from routes.utils.get_info import (
|
||||
get_spotify_info,
|
||||
get_playlist_metadata,
|
||||
get_playlist_tracks,
|
||||
check_playlist_updated,
|
||||
) # To fetch playlist, track, artist, and album details
|
||||
from routes.utils.celery_queue_manager import download_queue_manager
|
||||
|
||||
@@ -38,12 +37,12 @@ STOP_EVENT = threading.Event()
|
||||
|
||||
# Format mapping for audio file conversions
|
||||
AUDIO_FORMAT_EXTENSIONS = {
|
||||
'mp3': '.mp3',
|
||||
'flac': '.flac',
|
||||
'm4a': '.m4a',
|
||||
'aac': '.m4a',
|
||||
'ogg': '.ogg',
|
||||
'wav': '.wav',
|
||||
"mp3": ".mp3",
|
||||
"flac": ".flac",
|
||||
"m4a": ".m4a",
|
||||
"aac": ".m4a",
|
||||
"ogg": ".ogg",
|
||||
"wav": ".wav",
|
||||
}
|
||||
|
||||
DEFAULT_WATCH_CONFIG = {
|
||||
@@ -128,12 +127,16 @@ def has_playlist_changed(playlist_spotify_id: str, current_snapshot_id: str) ->
|
||||
return current_snapshot_id != last_snapshot_id
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking playlist change status for {playlist_spotify_id}: {e}")
|
||||
logger.error(
|
||||
f"Error checking playlist change status for {playlist_spotify_id}: {e}"
|
||||
)
|
||||
# On error, assume playlist has changed to be safe
|
||||
return True
|
||||
|
||||
|
||||
def needs_track_sync(playlist_spotify_id: str, current_snapshot_id: str, api_total_tracks: int) -> tuple[bool, list[str]]:
|
||||
def needs_track_sync(
|
||||
playlist_spotify_id: str, current_snapshot_id: str, api_total_tracks: int
|
||||
) -> tuple[bool, list[str]]:
|
||||
"""
|
||||
Check if tracks need to be synchronized by comparing snapshot_ids and total counts.
|
||||
|
||||
@@ -184,7 +187,9 @@ def needs_track_sync(playlist_spotify_id: str, current_snapshot_id: str, api_tot
|
||||
return True, []
|
||||
|
||||
|
||||
def find_tracks_in_playlist(playlist_spotify_id: str, tracks_to_find: list[str], current_snapshot_id: str) -> tuple[list, list]:
|
||||
def find_tracks_in_playlist(
|
||||
playlist_spotify_id: str, tracks_to_find: list[str], current_snapshot_id: str
|
||||
) -> tuple[list, list]:
|
||||
"""
|
||||
Progressively fetch playlist tracks until all specified tracks are found or playlist is exhausted.
|
||||
|
||||
@@ -209,10 +214,14 @@ def find_tracks_in_playlist(playlist_spotify_id: str, tracks_to_find: list[str],
|
||||
|
||||
while not_found_tracks and offset < 10000: # Safety limit
|
||||
try:
|
||||
tracks_batch = get_playlist_tracks(playlist_spotify_id, limit=limit, offset=offset)
|
||||
tracks_batch = get_playlist_tracks(
|
||||
playlist_spotify_id, limit=limit, offset=offset
|
||||
)
|
||||
|
||||
if not tracks_batch or "items" not in tracks_batch:
|
||||
logger.warning(f"No tracks returned for playlist {playlist_spotify_id} at offset {offset}")
|
||||
logger.warning(
|
||||
f"No tracks returned for playlist {playlist_spotify_id} at offset {offset}"
|
||||
)
|
||||
break
|
||||
|
||||
batch_items = tracks_batch.get("items", [])
|
||||
@@ -236,7 +245,9 @@ def find_tracks_in_playlist(playlist_spotify_id: str, tracks_to_find: list[str],
|
||||
time.sleep(0.1)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching tracks batch for playlist {playlist_spotify_id} at offset {offset}: {e}")
|
||||
logger.error(
|
||||
f"Error fetching tracks batch for playlist {playlist_spotify_id} at offset {offset}: {e}"
|
||||
)
|
||||
break
|
||||
|
||||
logger.info(
|
||||
@@ -293,16 +304,22 @@ def check_watched_playlists(specific_playlist_id: str = None):
|
||||
continue
|
||||
|
||||
api_snapshot_id = current_playlist_metadata.get("snapshot_id")
|
||||
api_total_tracks = current_playlist_metadata.get("tracks", {}).get("total", 0)
|
||||
api_total_tracks = current_playlist_metadata.get("tracks", {}).get(
|
||||
"total", 0
|
||||
)
|
||||
|
||||
# Enhanced snapshot_id checking with track-level tracking
|
||||
if use_snapshot_checking:
|
||||
# First check if playlist snapshot_id has changed
|
||||
playlist_changed = has_playlist_changed(playlist_spotify_id, api_snapshot_id)
|
||||
playlist_changed = has_playlist_changed(
|
||||
playlist_spotify_id, api_snapshot_id
|
||||
)
|
||||
|
||||
if not playlist_changed:
|
||||
# Even if playlist snapshot_id hasn't changed, check if individual tracks need sync
|
||||
needs_sync, tracks_to_find = needs_track_sync(playlist_spotify_id, api_snapshot_id, api_total_tracks)
|
||||
needs_sync, tracks_to_find = needs_track_sync(
|
||||
playlist_spotify_id, api_snapshot_id, api_total_tracks
|
||||
)
|
||||
|
||||
if not needs_sync:
|
||||
logger.info(
|
||||
@@ -321,18 +338,24 @@ def check_watched_playlists(specific_playlist_id: str = None):
|
||||
f"Playlist Watch Manager: Playlist '{playlist_name}' snapshot_id unchanged, but {len(tracks_to_find)} tracks need sync. Proceeding with targeted check."
|
||||
)
|
||||
# Use targeted track search instead of full fetch
|
||||
found_tracks, not_found_tracks = find_tracks_in_playlist(playlist_spotify_id, tracks_to_find, api_snapshot_id)
|
||||
found_tracks, not_found_tracks = find_tracks_in_playlist(
|
||||
playlist_spotify_id, tracks_to_find, api_snapshot_id
|
||||
)
|
||||
|
||||
# Update found tracks with new snapshot_id
|
||||
if found_tracks:
|
||||
add_tracks_to_playlist_db(playlist_spotify_id, found_tracks, api_snapshot_id)
|
||||
add_tracks_to_playlist_db(
|
||||
playlist_spotify_id, found_tracks, api_snapshot_id
|
||||
)
|
||||
|
||||
# Mark not found tracks as removed
|
||||
if not_found_tracks:
|
||||
logger.info(
|
||||
f"Playlist Watch Manager: {len(not_found_tracks)} tracks not found in playlist '{playlist_name}'. Marking as removed."
|
||||
)
|
||||
mark_tracks_as_not_present_in_spotify(playlist_spotify_id, not_found_tracks)
|
||||
mark_tracks_as_not_present_in_spotify(
|
||||
playlist_spotify_id, not_found_tracks
|
||||
)
|
||||
|
||||
# Update the playlist's m3u file after tracks are removed
|
||||
try:
|
||||
@@ -347,7 +370,9 @@ def check_watched_playlists(specific_playlist_id: str = None):
|
||||
)
|
||||
|
||||
# Update playlist snapshot and continue to next playlist
|
||||
update_playlist_snapshot(playlist_spotify_id, api_snapshot_id, api_total_tracks)
|
||||
update_playlist_snapshot(
|
||||
playlist_spotify_id, api_snapshot_id, api_total_tracks
|
||||
)
|
||||
logger.info(
|
||||
f"Playlist Watch Manager: Finished targeted sync for playlist '{playlist_name}'. Snapshot ID updated to {api_snapshot_id}."
|
||||
)
|
||||
@@ -482,7 +507,9 @@ def check_watched_playlists(specific_playlist_id: str = None):
|
||||
logger.info(
|
||||
f"Playlist Watch Manager: Refreshing {len(all_api_track_items)} tracks from API in local DB for playlist '{playlist_name}'."
|
||||
)
|
||||
add_tracks_to_playlist_db(playlist_spotify_id, all_api_track_items, api_snapshot_id)
|
||||
add_tracks_to_playlist_db(
|
||||
playlist_spotify_id, all_api_track_items, api_snapshot_id
|
||||
)
|
||||
|
||||
removed_db_ids = db_track_ids - current_api_track_ids
|
||||
if removed_db_ids:
|
||||
@@ -814,9 +841,14 @@ def start_watch_manager(): # Renamed from start_playlist_watch_manager
|
||||
# Update all existing tables to ensure they have the latest schema
|
||||
try:
|
||||
update_all_existing_tables_schema()
|
||||
logger.info("Watch Manager: Successfully updated all existing tables schema")
|
||||
logger.info(
|
||||
"Watch Manager: Successfully updated all existing tables schema"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Watch Manager: Error updating existing tables schema: {e}", exc_info=True)
|
||||
logger.error(
|
||||
f"Watch Manager: Error updating existing tables schema: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
_watch_scheduler_thread = threading.Thread(
|
||||
target=playlist_watch_scheduler, daemon=True
|
||||
@@ -855,10 +887,14 @@ def get_playlist_tracks_for_m3u(playlist_spotify_id: str) -> List[Dict[str, Any]
|
||||
List of track dictionaries with metadata
|
||||
"""
|
||||
table_name = f"playlist_{playlist_spotify_id.replace('-', '_')}"
|
||||
tracks = []
|
||||
tracks: List[Dict[str, Any]] = []
|
||||
|
||||
try:
|
||||
from routes.utils.watch.db import _get_playlists_db_connection, _ensure_table_schema, EXPECTED_PLAYLIST_TRACKS_COLUMNS
|
||||
from routes.utils.watch.db import (
|
||||
_get_playlists_db_connection,
|
||||
_ensure_table_schema,
|
||||
EXPECTED_PLAYLIST_TRACKS_COLUMNS,
|
||||
)
|
||||
|
||||
with _get_playlists_db_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
@@ -892,15 +928,18 @@ def get_playlist_tracks_for_m3u(playlist_spotify_id: str) -> List[Dict[str, Any]
|
||||
|
||||
rows = cursor.fetchall()
|
||||
for row in rows:
|
||||
tracks.append({
|
||||
tracks.append(
|
||||
{
|
||||
"spotify_track_id": row["spotify_track_id"],
|
||||
"title": row["title"] or "Unknown Track",
|
||||
"artist_names": row["artist_names"] or "Unknown Artist",
|
||||
"album_name": row["album_name"] or "Unknown Album",
|
||||
"album_artist_names": row["album_artist_names"] or "Unknown Artist",
|
||||
"album_artist_names": row["album_artist_names"]
|
||||
or "Unknown Artist",
|
||||
"track_number": row["track_number"] or 0,
|
||||
"duration_ms": row["duration_ms"] or 0,
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
return tracks
|
||||
|
||||
@@ -912,7 +951,12 @@ def get_playlist_tracks_for_m3u(playlist_spotify_id: str) -> List[Dict[str, Any]
|
||||
return tracks
|
||||
|
||||
|
||||
def generate_track_file_path(track: Dict[str, Any], custom_dir_format: str, custom_track_format: str, convert_to: str = None) -> str:
|
||||
def generate_track_file_path(
|
||||
track: Dict[str, Any],
|
||||
custom_dir_format: str,
|
||||
custom_track_format: str,
|
||||
convert_to: str = None,
|
||||
) -> str:
|
||||
"""
|
||||
Generate the file path for a track based on custom format strings.
|
||||
This mimics the path generation logic used by the deezspot library.
|
||||
@@ -937,13 +981,15 @@ def generate_track_file_path(track: Dict[str, Any], custom_dir_format: str, cust
|
||||
|
||||
# Use album artist for directory structure, main artist for track name
|
||||
main_artist = artist_names.split(", ")[0] if artist_names else "Unknown Artist"
|
||||
album_artist = album_artist_names.split(", ")[0] if album_artist_names else main_artist
|
||||
album_artist = (
|
||||
album_artist_names.split(", ")[0] if album_artist_names else main_artist
|
||||
)
|
||||
|
||||
# Clean names for filesystem
|
||||
def clean_name(name):
|
||||
# Remove or replace characters that are problematic in filenames
|
||||
name = re.sub(r'[<>:"/\\|?*]', '_', str(name))
|
||||
name = re.sub(r'[\x00-\x1f]', '', name) # Remove control characters
|
||||
name = re.sub(r'[<>:"/\\|?*]', "_", str(name))
|
||||
name = re.sub(r"[\x00-\x1f]", "", name) # Remove control characters
|
||||
return name.strip()
|
||||
|
||||
clean_album_artist = clean_name(album_artist)
|
||||
@@ -960,14 +1006,15 @@ def generate_track_file_path(track: Dict[str, Any], custom_dir_format: str, cust
|
||||
"%ar_album%": clean_album_artist,
|
||||
"%tracknum%": f"{track_number:02d}" if track_number > 0 else "00",
|
||||
"%year%": "", # Not available in current DB schema
|
||||
|
||||
# Additional placeholders (not available in current DB schema, using defaults)
|
||||
"%discnum%": "01", # Default to disc 1
|
||||
"%date%": "", # Not available
|
||||
"%genre%": "", # Not available
|
||||
"%isrc%": "", # Not available
|
||||
"%explicit%": "", # Not available
|
||||
"%duration%": str(duration_ms // 1000) if duration_ms > 0 else "0", # Convert ms to seconds
|
||||
"%duration%": str(duration_ms // 1000)
|
||||
if duration_ms > 0
|
||||
else "0", # Convert ms to seconds
|
||||
}
|
||||
|
||||
# Apply replacements to directory format
|
||||
@@ -985,25 +1032,32 @@ def generate_track_file_path(track: Dict[str, Any], custom_dir_format: str, cust
|
||||
full_path = os.path.normpath(full_path)
|
||||
|
||||
# Determine file extension based on convert_to setting or default to mp3
|
||||
if not any(full_path.lower().endswith(ext) for ext in ['.mp3', '.flac', '.m4a', '.ogg', '.wav']):
|
||||
if not any(
|
||||
full_path.lower().endswith(ext)
|
||||
for ext in [".mp3", ".flac", ".m4a", ".ogg", ".wav"]
|
||||
):
|
||||
if convert_to:
|
||||
extension = AUDIO_FORMAT_EXTENSIONS.get(convert_to.lower(), '.mp3')
|
||||
extension = AUDIO_FORMAT_EXTENSIONS.get(convert_to.lower(), ".mp3")
|
||||
full_path += extension
|
||||
else:
|
||||
full_path += '.mp3' # Default fallback
|
||||
full_path += ".mp3" # Default fallback
|
||||
|
||||
return full_path
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error generating file path for track {track.get('title', 'Unknown')}: {e}")
|
||||
logger.error(
|
||||
f"Error generating file path for track {track.get('title', 'Unknown')}: {e}"
|
||||
)
|
||||
# Return a fallback path with appropriate extension
|
||||
safe_title = re.sub(r'[<>:"/\\|?*\x00-\x1f]', '_', str(track.get('title', 'Unknown Track')))
|
||||
safe_title = re.sub(
|
||||
r'[<>:"/\\|?*\x00-\x1f]', "_", str(track.get("title", "Unknown Track"))
|
||||
)
|
||||
|
||||
# Determine extension for fallback
|
||||
if convert_to:
|
||||
extension = AUDIO_FORMAT_EXTENSIONS.get(convert_to.lower(), '.mp3')
|
||||
extension = AUDIO_FORMAT_EXTENSIONS.get(convert_to.lower(), ".mp3")
|
||||
else:
|
||||
extension = '.mp3'
|
||||
extension = ".mp3"
|
||||
|
||||
return f"Unknown Artist/Unknown Album/{safe_title}{extension}"
|
||||
|
||||
@@ -1019,28 +1073,37 @@ def update_playlist_m3u_file(playlist_spotify_id: str):
|
||||
# Get playlist metadata
|
||||
playlist_info = get_watched_playlist(playlist_spotify_id)
|
||||
if not playlist_info:
|
||||
logger.warning(f"Playlist {playlist_spotify_id} not found in watched playlists. Cannot update m3u file.")
|
||||
logger.warning(
|
||||
f"Playlist {playlist_spotify_id} not found in watched playlists. Cannot update m3u file."
|
||||
)
|
||||
return
|
||||
|
||||
playlist_name = playlist_info.get("name", "Unknown Playlist")
|
||||
|
||||
# Get configuration settings
|
||||
from routes.utils.celery_config import get_config_params
|
||||
|
||||
config = get_config_params()
|
||||
|
||||
custom_dir_format = config.get("customDirFormat", "%ar_album%/%album%")
|
||||
custom_track_format = config.get("customTrackFormat", "%tracknum%. %music%")
|
||||
convert_to = config.get("convertTo") # Get conversion format setting
|
||||
output_dir = "./downloads" # This matches the output_dir used in download functions
|
||||
output_dir = (
|
||||
"./downloads" # This matches the output_dir used in download functions
|
||||
)
|
||||
|
||||
# Get all tracks for the playlist
|
||||
tracks = get_playlist_tracks_for_m3u(playlist_spotify_id)
|
||||
|
||||
if not tracks:
|
||||
logger.info(f"No tracks found for playlist '{playlist_name}'. M3U file will be empty or removed.")
|
||||
logger.info(
|
||||
f"No tracks found for playlist '{playlist_name}'. M3U file will be empty or removed."
|
||||
)
|
||||
|
||||
# Clean playlist name for filename
|
||||
safe_playlist_name = re.sub(r'[<>:"/\\|?*\x00-\x1f]', '_', playlist_name).strip()
|
||||
safe_playlist_name = re.sub(
|
||||
r'[<>:"/\\|?*\x00-\x1f]', "_", playlist_name
|
||||
).strip()
|
||||
|
||||
# Create m3u file path
|
||||
playlists_dir = Path(output_dir) / "playlists"
|
||||
@@ -1052,24 +1115,32 @@ def update_playlist_m3u_file(playlist_spotify_id: str):
|
||||
|
||||
for track in tracks:
|
||||
# Generate file path for this track
|
||||
track_file_path = generate_track_file_path(track, custom_dir_format, custom_track_format, convert_to)
|
||||
track_file_path = generate_track_file_path(
|
||||
track, custom_dir_format, custom_track_format, convert_to
|
||||
)
|
||||
|
||||
# Create relative path from m3u file location to track file
|
||||
# M3U file is in ./downloads/playlists/
|
||||
# Track files are in ./downloads/{custom_dir_format}/
|
||||
relative_path = os.path.join("..", track_file_path)
|
||||
relative_path = relative_path.replace("\\", "/") # Use forward slashes for m3u compatibility
|
||||
relative_path = relative_path.replace(
|
||||
"\\", "/"
|
||||
) # Use forward slashes for m3u compatibility
|
||||
|
||||
# Add EXTINF line with track duration and title
|
||||
duration_seconds = (track.get("duration_ms", 0) // 1000) if track.get("duration_ms") else -1
|
||||
duration_seconds = (
|
||||
(track.get("duration_ms", 0) // 1000)
|
||||
if track.get("duration_ms")
|
||||
else -1
|
||||
)
|
||||
artist_and_title = f"{track.get('artist_names', 'Unknown Artist')} - {track.get('title', 'Unknown Track')}"
|
||||
|
||||
m3u_lines.append(f"#EXTINF:{duration_seconds},{artist_and_title}")
|
||||
m3u_lines.append(relative_path)
|
||||
|
||||
# Write m3u file
|
||||
with open(m3u_file_path, 'w', encoding='utf-8') as f:
|
||||
f.write('\n'.join(m3u_lines))
|
||||
with open(m3u_file_path, "w", encoding="utf-8") as f:
|
||||
f.write("\n".join(m3u_lines))
|
||||
|
||||
logger.info(
|
||||
f"Updated m3u file for playlist '{playlist_name}' at {m3u_file_path} with {len(tracks)} tracks{f' (format: {convert_to})' if convert_to else ''}."
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "spotizerr-ui",
|
||||
"private": true,
|
||||
"version": "3.0.5",
|
||||
"version": "3.0.6",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -21,6 +21,7 @@ interface DownloadSettings {
|
||||
hlsThreads: number;
|
||||
deezerQuality: "MP3_128" | "MP3_320" | "FLAC";
|
||||
spotifyQuality: "NORMAL" | "HIGH" | "VERY_HIGH";
|
||||
recursiveQuality?: boolean; // frontend field (mapped to recursive_quality on save)
|
||||
}
|
||||
|
||||
interface WatchConfig {
|
||||
@@ -49,8 +50,14 @@ const CONVERSION_FORMATS: Record<string, string[]> = {
|
||||
};
|
||||
|
||||
// --- API Functions ---
|
||||
const saveDownloadConfig = async (data: Partial<DownloadSettings>) => {
|
||||
const { data: response } = await authApiClient.client.post("/config", data);
|
||||
const saveDownloadConfig = async (data: Partial<DownloadSettings> & { recursive_quality?: boolean }) => {
|
||||
// Map camelCase to snake_case for backend compatibility
|
||||
const payload: any = { ...data };
|
||||
if (typeof data.recursiveQuality !== "undefined") {
|
||||
payload.recursive_quality = data.recursiveQuality;
|
||||
delete payload.recursiveQuality;
|
||||
}
|
||||
const { data: response } = await authApiClient.client.post("/config", payload);
|
||||
return response;
|
||||
};
|
||||
|
||||
@@ -189,6 +196,10 @@ export function DownloadsTab({ config, isLoading }: DownloadsTabProps) {
|
||||
<label htmlFor="fallbackToggle" className="text-content-primary dark:text-content-primary-dark">Download Fallback</label>
|
||||
<input id="fallbackToggle" type="checkbox" {...register("fallback")} className="h-6 w-6 rounded" />
|
||||
</div>
|
||||
<div className="flex items-center justify-between">
|
||||
<label htmlFor="recursiveQualityToggle" className="text-content-primary dark:text-content-primary-dark">Recursive Quality</label>
|
||||
<input id="recursiveQualityToggle" type="checkbox" {...register("recursiveQuality")} className="h-6 w-6 rounded" />
|
||||
</div>
|
||||
|
||||
{/* Watch validation info */}
|
||||
{watchConfig?.enabled && (
|
||||
|
||||
@@ -14,6 +14,7 @@ interface FormattingSettings {
|
||||
album: string;
|
||||
playlist: string;
|
||||
compilation: string;
|
||||
artistSeparator: string;
|
||||
}
|
||||
|
||||
interface FormattingTabProps {
|
||||
@@ -23,7 +24,12 @@ interface FormattingTabProps {
|
||||
|
||||
// --- API Functions ---
|
||||
const saveFormattingConfig = async (data: Partial<FormattingSettings>) => {
|
||||
const { data: response } = await authApiClient.client.post("/config", data);
|
||||
const payload: any = { ...data };
|
||||
if (typeof data.artistSeparator !== "undefined") {
|
||||
payload.artist_separator = data.artistSeparator;
|
||||
delete payload.artistSeparator;
|
||||
}
|
||||
const { data: response } = await authApiClient.client.post("/config", payload);
|
||||
return response;
|
||||
};
|
||||
|
||||
@@ -160,6 +166,17 @@ export function FormattingTab({ config, isLoading }: FormattingTabProps) {
|
||||
className="h-6 w-6 rounded"
|
||||
/>
|
||||
</div>
|
||||
<div className="flex items-center justify-between">
|
||||
<label htmlFor="artistSeparator" className="text-content-primary dark:text-content-primary-dark">Artist Separator</label>
|
||||
<input
|
||||
id="artistSeparator"
|
||||
type="text"
|
||||
maxLength={8}
|
||||
placeholder="; "
|
||||
{...register("artistSeparator")}
|
||||
className="block w-full p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline-none focus:ring-2 focus:ring-input-focus"
|
||||
/>
|
||||
</div>
|
||||
<div className="flex items-center justify-between">
|
||||
<label htmlFor="saveCoverToggle" className="text-content-primary dark:text-content-primary-dark">Save Album Cover</label>
|
||||
<input id="saveCoverToggle" type="checkbox" {...register("saveCover")} className="h-6 w-6 rounded" />
|
||||
|
||||
@@ -57,6 +57,7 @@ export type FlatAppSettings = {
|
||||
album: string;
|
||||
playlist: string;
|
||||
compilation: string;
|
||||
artistSeparator: string;
|
||||
};
|
||||
|
||||
const defaultSettings: FlatAppSettings = {
|
||||
@@ -89,6 +90,7 @@ const defaultSettings: FlatAppSettings = {
|
||||
album: "{artist_name}/{album_name}",
|
||||
playlist: "Playlists/{playlist_name}",
|
||||
compilation: "Compilations/{album_name}",
|
||||
artistSeparator: "; ",
|
||||
watch: {
|
||||
enabled: false,
|
||||
},
|
||||
|
||||
@@ -31,6 +31,7 @@ export interface AppSettings {
|
||||
album: string;
|
||||
playlist: string;
|
||||
compilation: string;
|
||||
artistSeparator: string;
|
||||
watch: {
|
||||
enabled: boolean;
|
||||
// Add other watch properties from the old type if they still exist in the API response
|
||||
|
||||
@@ -29,6 +29,7 @@ export interface AppSettings {
|
||||
album: string;
|
||||
playlist: string;
|
||||
compilation: string;
|
||||
artistSeparator: string;
|
||||
watch: {
|
||||
enabled: boolean;
|
||||
// Add other watch properties from the old type if they still exist in the API response
|
||||
|
||||
Reference in New Issue
Block a user