fixup after merge/rebase to dev

This commit is contained in:
che-pj
2025-08-27 21:39:08 +02:00
parent 957928bfa0
commit 7b7e32c923

View File

@@ -1,5 +1,5 @@
import re import re
from typing import List, Dict, Any, Dict, Any from typing import List
from fastapi import APIRouter, Request, Depends, Request, Depends from fastapi import APIRouter, Request, Depends, Request, Depends
from pydantic import BaseModel from pydantic import BaseModel
import logging import logging
@@ -33,7 +33,6 @@ class BulkAddLinksRequest(BaseModel):
@router.post("/bulk-add-spotify-links") @router.post("/bulk-add-spotify-links")
async def bulk_add_spotify_links(request: BulkAddLinksRequest, req: Request, current_user: User = Depends(require_auth_from_state)):
async def bulk_add_spotify_links(request: BulkAddLinksRequest, req: Request, current_user: User = Depends(require_auth_from_state)): async def bulk_add_spotify_links(request: BulkAddLinksRequest, req: Request, current_user: User = Depends(require_auth_from_state)):
added_count = 0 added_count = 0
failed_links = [] failed_links = []
@@ -45,7 +44,7 @@ async def bulk_add_spotify_links(request: BulkAddLinksRequest, req: Request, cur
# but still handle potential errors during info retrieval or unsupported types # but still handle potential errors during info retrieval or unsupported types
# Extract type and ID from the link directly using regex # Extract type and ID from the link directly using regex
match = re.match( match = re.match(
r"https://open\.spotify\.com(?:/intl-[a-z]{2})?/(track|album|playlist|artist)/([a-zA-Z0-9]+)(?:\?.*)?", r"https://open\.spotify\.com(?:/[a-z]{2})?/(track|album|playlist|artist)/([a-zA-Z0-9]+)(?:\?.*)?",
link, link,
) )
if not match: if not match:
@@ -108,24 +107,6 @@ async def bulk_add_spotify_links(request: BulkAddLinksRequest, req: Request, cur
# Add to download queue using the queue manager # Add to download queue using the queue manager
task_id = download_queue_manager.add_task(task_data) task_id = download_queue_manager.add_task(task_data)
if task_id:
added_count += 1
logger.debug(f"Added {added_count}/{total_links} {spotify_type} '{item_name}' ({spotify_id}) to queue with task_id: {task_id}.")
# Prepare task data for the queue manager
task_data = {
"download_type": spotify_type,
"url": spotify_url,
"name": item_name,
"artist": artist_name,
"spotify_id": spotify_id,
"type": spotify_type,
"username": current_user.username,
"orig_request": dict(req.query_params),
}
# Add to download queue using the queue manager
task_id = download_queue_manager.add_task(task_data)
if task_id: if task_id:
added_count += 1 added_count += 1
logger.debug(f"Added {added_count}/{total_links} {spotify_type} '{item_name}' ({spotify_id}) to queue with task_id: {task_id}.") logger.debug(f"Added {added_count}/{total_links} {spotify_type} '{item_name}' ({spotify_id}) to queue with task_id: {task_id}.")
@@ -134,11 +115,6 @@ async def bulk_add_spotify_links(request: BulkAddLinksRequest, req: Request, cur
failed_links.append(link) failed_links.append(link)
continue continue
added_count += 1
logger.debug(
f"Added {added_count + 1}/{total_links} {spotify_type} '{item_name}' ({spotify_id}) to queue."
)
except Exception as e: except Exception as e:
logger.error(f"Error processing Spotify link {link}: {e}", exc_info=True) logger.error(f"Error processing Spotify link {link}: {e}", exc_info=True)
failed_links.append(link) failed_links.append(link)