@@ -28,6 +28,8 @@ PGID=1000
|
||||
# Optional: Sets the default file permissions for newly created files within the container.
|
||||
UMASK=0022
|
||||
|
||||
# Whether to setup file permissions on startup. May improve performance on remote/slow filesystems
|
||||
SKIP_SET_PERMISSIONS=false
|
||||
|
||||
###
|
||||
### Multi-user settings, disabled by default.
|
||||
|
||||
@@ -2,42 +2,17 @@ name: spotizerr
|
||||
|
||||
services:
|
||||
spotizerr:
|
||||
image: cooldockerizer93/spotizerr
|
||||
image: cooldockerizer93/spotizerr:beta
|
||||
volumes:
|
||||
- ./data:/app/data
|
||||
- ./downloads:/app/downloads # <-- Change this for your music library dir
|
||||
- ./logs:/app/logs # <-- Volume for persistent logs
|
||||
ports:
|
||||
- 7171:7171
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
container_name: spotizerr-app
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
- PUID=${PUID} # Replace with your desired user ID | Remove both if you want to run as root (not recommended, might result in unreadable files)
|
||||
- PGID=${PGID} # Replace with your desired group ID | The user must have write permissions in the volume mapped to /app/downloads
|
||||
- UMASK=${UMASK} # Optional: Sets the default file permissions for newly created files within the container.
|
||||
- REDIS_HOST=${REDIS_HOST}
|
||||
- REDIS_PORT=${REDIS_PORT}
|
||||
- REDIS_DB=${REDIS_DB}
|
||||
- REDIS_PASSWORD=${REDIS_PASSWORD} # Optional, Redis AUTH password. Leave empty if not using authentication
|
||||
- REDIS_URL=redis://:${REDIS_PASSWORD}@${REDIS_HOST}:${REDIS_PORT}/${REDIS_DB}
|
||||
- REDIS_BACKEND=redis://:${REDIS_PASSWORD}@${REDIS_HOST}:${REDIS_PORT}/${REDIS_DB}
|
||||
- EXPLICIT_FILTER=${EXPLICIT_FILTER} # Set to true to filter out explicit content
|
||||
- ENABLE_AUTH=${ENABLE_AUTH} # Set to true to enable authentication
|
||||
- JWT_SECRET=${JWT_SECRET} # Set to a random string for production
|
||||
- JWT_EXPIRATION_HOURS=${JWT_EXPIRATION_HOURS} # Set to 24 for 24 hours
|
||||
- DEFAULT_ADMIN_USERNAME=${DEFAULT_ADMIN_USERNAME} # Set to admin
|
||||
- DEFAULT_ADMIN_PASSWORD=${DEFAULT_ADMIN_PASSWORD} # Set to admin123
|
||||
- SSO_ENABLED=${SSO_ENABLED} # Set to true to enable SSO
|
||||
- SSO_BASE_REDIRECT_URI=${SSO_BASE_REDIRECT_URI} # Set to http://127.0.0.1:7171/api/auth/sso/callback
|
||||
- FRONTEND_URL=${FRONTEND_URL} # Frontend URL for SSO redirects
|
||||
- DISABLE_REGISTRATION=${DISABLE_REGISTRATION} # Set to true to disable registration
|
||||
- GOOGLE_CLIENT_ID=${GOOGLE_CLIENT_ID} # Google SSO client ID
|
||||
- GOOGLE_CLIENT_SECRET=${GOOGLE_CLIENT_SECRET} # Google SSO client secret
|
||||
- GITHUB_CLIENT_ID=${GITHUB_CLIENT_ID} # GitHub SSO client ID
|
||||
- GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET} # GitHub SSO client secret
|
||||
env_file:
|
||||
- .env
|
||||
|
||||
depends_on:
|
||||
- redis
|
||||
@@ -46,11 +21,11 @@ services:
|
||||
image: redis:alpine
|
||||
container_name: spotizerr-redis
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
- REDIS_PASSWORD=${REDIS_PASSWORD}
|
||||
env_file:
|
||||
- .env
|
||||
volumes:
|
||||
- redis-data:/data
|
||||
command: redis-server --requirepass ${REDIS_PASSWORD} --appendonly yes
|
||||
command: sh -c 'redis-server --requirepass "$REDIS_PASSWORD" --appendonly yes'
|
||||
|
||||
volumes:
|
||||
redis-data:
|
||||
|
||||
@@ -6,6 +6,28 @@ if [ -n "${UMASK}" ]; then
|
||||
umask "${UMASK}"
|
||||
fi
|
||||
|
||||
# Compose Redis URLs from base variables if not explicitly provided
|
||||
if [ -z "${REDIS_URL}" ]; then
|
||||
REDIS_HOST=${REDIS_HOST:-redis}
|
||||
REDIS_PORT=${REDIS_PORT:-6379}
|
||||
REDIS_DB=${REDIS_DB:-0}
|
||||
|
||||
if [ -n "${REDIS_PASSWORD}" ]; then
|
||||
if [ -n "${REDIS_USERNAME}" ]; then
|
||||
AUTH_PART="${REDIS_USERNAME}:${REDIS_PASSWORD}@"
|
||||
else
|
||||
AUTH_PART=":${REDIS_PASSWORD}@"
|
||||
fi
|
||||
else
|
||||
AUTH_PART=""
|
||||
fi
|
||||
export REDIS_URL="redis://${AUTH_PART}${REDIS_HOST}:${REDIS_PORT}/${REDIS_DB}"
|
||||
fi
|
||||
|
||||
if [ -z "${REDIS_BACKEND}" ]; then
|
||||
export REDIS_BACKEND="${REDIS_URL}"
|
||||
fi
|
||||
|
||||
# Redis is now in a separate container so we don't need to start it locally
|
||||
echo "Using Redis at ${REDIS_URL}"
|
||||
|
||||
@@ -50,10 +72,15 @@ else
|
||||
echo "Created user: ${USER_NAME} (UID: ${PUID})"
|
||||
fi
|
||||
|
||||
# Ensure proper permissions for all app directories
|
||||
# Ensure proper permissions for all app directories unless skipped via env var
|
||||
if [ "${SKIP_SET_PERMISSIONS}" = "true" ] || [ "${SKIP_SET_PERMISSIONS}" = "1" ]; then
|
||||
echo "SKIP_SET_PERMISSIONS is set; skipping permissions for /app/downloads /app/data /app/logs"
|
||||
else
|
||||
echo "Setting permissions for /app directories..."
|
||||
chown -R "${USER_NAME}:${GROUP_NAME}" /app/downloads /app/data /app/logs || true
|
||||
# Ensure Spotipy cache file exists and is writable
|
||||
fi
|
||||
|
||||
# Ensure Spotipy cache file exists and is writable (fast, local to container)
|
||||
touch /app/.cache || true
|
||||
chown "${USER_NAME}:${GROUP_NAME}" /app/.cache || true
|
||||
|
||||
|
||||
@@ -873,6 +873,12 @@ async def cancel_task_endpoint(task_id: str, current_user: User = Depends(requir
|
||||
if task_info:
|
||||
# This is a task ID in the new system
|
||||
result = cancel_task(task_id)
|
||||
try:
|
||||
# Push an immediate SSE update so clients reflect cancellation and partial summary
|
||||
await trigger_sse_update(task_id, "cancelled")
|
||||
result["sse_notified"] = True
|
||||
except Exception as e:
|
||||
logger.error(f"SSE notify after cancel failed for {task_id}: {e}")
|
||||
return result
|
||||
|
||||
# If not found in new system, we need to handle the old system cancellation
|
||||
|
||||
@@ -35,6 +35,172 @@ from routes.utils.history_manager import history_manager
|
||||
# Create Redis connection for storing task data that's not part of the Celery result backend
|
||||
import redis
|
||||
|
||||
# --- Helpers to build partial summaries from task logs ---
|
||||
def _read_task_log_json_lines(task_id: str) -> list:
|
||||
log_file_path = Path("./logs/tasks") / f"{task_id}.log"
|
||||
if not log_file_path.exists():
|
||||
return []
|
||||
lines = []
|
||||
try:
|
||||
with open(log_file_path, "r") as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
try:
|
||||
lines.append(json.loads(line))
|
||||
except Exception:
|
||||
continue
|
||||
except Exception:
|
||||
return []
|
||||
return lines
|
||||
|
||||
|
||||
def _extract_parent_initial_tracks(log_lines: list, parent_type: str) -> dict:
|
||||
"""
|
||||
Returns a mapping from a stable track key to the track object from the initial parent callback.
|
||||
For albums: key by ids.spotify or f"{track_number}:{title}" as fallback.
|
||||
For playlists: key by ids.spotify or f"{position}:{title}" as fallback.
|
||||
"""
|
||||
track_map: dict[str, dict] = {}
|
||||
if parent_type == "album":
|
||||
for obj in log_lines:
|
||||
album = obj.get("album")
|
||||
if album and isinstance(album, dict) and album.get("tracks"):
|
||||
for t in album.get("tracks", []):
|
||||
ids = (t or {}).get("ids", {}) or {}
|
||||
key = ids.get("spotify") or f"{(t or {}).get('track_number', 0)}:{(t or {}).get('title', '')}"
|
||||
track_map[key] = t
|
||||
break
|
||||
elif parent_type == "playlist":
|
||||
for obj in log_lines:
|
||||
playlist = obj.get("playlist")
|
||||
if playlist and isinstance(playlist, dict) and playlist.get("tracks"):
|
||||
for t in playlist.get("tracks", []):
|
||||
ids = (t or {}).get("ids", {}) or {}
|
||||
# TrackPlaylistObject uses position
|
||||
key = ids.get("spotify") or f"{(t or {}).get('position', 0)}:{(t or {}).get('title', '')}"
|
||||
track_map[key] = t
|
||||
break
|
||||
return track_map
|
||||
|
||||
|
||||
def _extract_completed_and_skipped_from_logs(log_lines: list) -> tuple[set, set, dict, dict]:
|
||||
"""
|
||||
Returns (completed_keys, skipped_keys, completed_objects_by_key, skipped_objects_by_key)
|
||||
Keys prefer ids.spotify, falling back to index+title scheme consistent with initial map.
|
||||
"""
|
||||
completed_keys: set[str] = set()
|
||||
skipped_keys: set[str] = set()
|
||||
completed_objs: dict[str, dict] = {}
|
||||
skipped_objs: dict[str, dict] = {}
|
||||
for obj in log_lines:
|
||||
track = obj.get("track")
|
||||
if not track:
|
||||
continue
|
||||
status_info = obj.get("status_info", {}) or {}
|
||||
status = status_info.get("status")
|
||||
ids = (track or {}).get("ids", {}) or {}
|
||||
# Fallback keys try track_number:title and position:title
|
||||
fallback_key = f"{(track or {}).get('track_number', 0)}:{(track or {}).get('title', '')}"
|
||||
key = ids.get("spotify") or fallback_key
|
||||
if status == "done":
|
||||
completed_keys.add(key)
|
||||
completed_objs[key] = track
|
||||
elif status == "skipped":
|
||||
skipped_keys.add(key)
|
||||
skipped_objs[key] = track
|
||||
return completed_keys, skipped_keys, completed_objs, skipped_objs
|
||||
|
||||
|
||||
def _to_track_object_from_initial(initial_track: dict, parent_type: str) -> dict:
|
||||
"""Convert initial album/playlist track entry to a TrackObject-like dict."""
|
||||
# Common fields
|
||||
title = initial_track.get("title", "")
|
||||
disc_number = initial_track.get("disc_number", 1)
|
||||
track_number = initial_track.get("track_number", 0)
|
||||
duration_ms = initial_track.get("duration_ms", 0)
|
||||
explicit = initial_track.get("explicit", False)
|
||||
ids = initial_track.get("ids", {}) or {}
|
||||
|
||||
# Convert artists to ArtistTrackObject[] shape
|
||||
artists_src = initial_track.get("artists", []) or []
|
||||
artists_conv = []
|
||||
for a in artists_src:
|
||||
if isinstance(a, dict):
|
||||
artists_conv.append({
|
||||
"type": "artistTrack",
|
||||
"name": a.get("name", ""),
|
||||
"ids": a.get("ids", {}) or {},
|
||||
})
|
||||
|
||||
# Convert album to AlbumTrackObject-like shape
|
||||
album_src = initial_track.get("album", {}) or {}
|
||||
album_conv = {
|
||||
"type": "albumTrack",
|
||||
"album_type": album_src.get("album_type", ""),
|
||||
"title": album_src.get("title", ""),
|
||||
"release_date": album_src.get("release_date", {}) or {},
|
||||
"total_tracks": album_src.get("total_tracks", 0),
|
||||
"genres": album_src.get("genres", []) or [],
|
||||
"images": album_src.get("images", []) or [],
|
||||
"ids": album_src.get("ids", {}) or {},
|
||||
"artists": [
|
||||
{
|
||||
"type": "artistAlbumTrack",
|
||||
"name": aa.get("name", ""),
|
||||
"ids": aa.get("ids", {}) or {},
|
||||
}
|
||||
for aa in (album_src.get("artists", []) or [])
|
||||
if isinstance(aa, dict)
|
||||
],
|
||||
}
|
||||
|
||||
return {
|
||||
"type": "track",
|
||||
"title": title,
|
||||
"disc_number": disc_number,
|
||||
"track_number": track_number,
|
||||
"duration_ms": duration_ms,
|
||||
"explicit": explicit,
|
||||
"genres": [],
|
||||
"album": album_conv,
|
||||
"artists": artists_conv,
|
||||
"ids": ids,
|
||||
}
|
||||
|
||||
|
||||
def build_partial_summary_from_task_log(task_id: str, parent_type: str) -> dict:
|
||||
"""
|
||||
Build a SummaryObject-like dict using the task's log lines.
|
||||
Includes arrays successful_tracks, skipped_tracks, failed_tracks (with reason), and totals.
|
||||
"""
|
||||
log_lines = _read_task_log_json_lines(task_id)
|
||||
initial_tracks_map = _extract_parent_initial_tracks(log_lines, parent_type)
|
||||
completed_keys, skipped_keys, completed_objs, skipped_objs = _extract_completed_and_skipped_from_logs(log_lines)
|
||||
|
||||
# Determine failed as initial - completed - skipped
|
||||
initial_keys = set(initial_tracks_map.keys())
|
||||
failed_keys = initial_keys.difference(completed_keys.union(skipped_keys))
|
||||
|
||||
successful_tracks = [completed_objs[k] for k in completed_keys if k in completed_objs]
|
||||
skipped_tracks = [skipped_objs[k] for k in skipped_keys if k in skipped_objs]
|
||||
failed_tracks = [
|
||||
{"track": _to_track_object_from_initial(initial_tracks_map[k], parent_type), "reason": "cancelled"}
|
||||
for k in failed_keys
|
||||
if k in initial_tracks_map
|
||||
]
|
||||
|
||||
return {
|
||||
"successful_tracks": successful_tracks,
|
||||
"skipped_tracks": skipped_tracks,
|
||||
"failed_tracks": failed_tracks,
|
||||
"total_successful": len(successful_tracks),
|
||||
"total_skipped": len(skipped_tracks),
|
||||
"total_failed": len(failed_tracks),
|
||||
}
|
||||
|
||||
|
||||
# Configure logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -249,6 +415,8 @@ def cancel_task(task_id):
|
||||
store_task_status(
|
||||
task_id,
|
||||
{
|
||||
"status": ProgressState.CANCELLED,
|
||||
"error": "Task cancelled by user",
|
||||
"status_info": {
|
||||
"status": ProgressState.CANCELLED,
|
||||
"error": "Task cancelled by user",
|
||||
@@ -1065,6 +1233,53 @@ def task_postrun_handler(
|
||||
last_status_for_history.get("status") if last_status_for_history else None
|
||||
)
|
||||
|
||||
# If task was cancelled/revoked, finalize parent history with partial summary
|
||||
try:
|
||||
if state == states.REVOKED or current_redis_status == ProgressState.CANCELLED:
|
||||
parent_type = (task_info.get("download_type") or task_info.get("type") or "").lower()
|
||||
if parent_type in ["album", "playlist"]:
|
||||
# Build detailed summary from the task log
|
||||
summary = build_partial_summary_from_task_log(task_id, parent_type)
|
||||
status_info = {"status": "done", "summary": summary}
|
||||
title = task_info.get("name", "Unknown")
|
||||
total_tracks = task_info.get("total_tracks", 0)
|
||||
|
||||
# Try to enrich parent payload with initial callback object (to capture artists, ids, images)
|
||||
try:
|
||||
log_lines = _read_task_log_json_lines(task_id)
|
||||
initial_parent = _extract_initial_parent_object(log_lines, parent_type)
|
||||
except Exception:
|
||||
initial_parent = None
|
||||
|
||||
if parent_type == "album":
|
||||
album_payload = {"title": title, "total_tracks": total_tracks}
|
||||
if isinstance(initial_parent, dict):
|
||||
for k in ["artists", "ids", "images", "release_date", "genres", "album_type", "tracks"]:
|
||||
if k in initial_parent:
|
||||
album_payload[k] = initial_parent.get(k)
|
||||
# Ensure a main history entry exists even on cancellation
|
||||
history_manager.store_album_history(
|
||||
{"album": album_payload, "status_info": status_info},
|
||||
task_id,
|
||||
"failed",
|
||||
)
|
||||
else:
|
||||
playlist_payload = {"title": title}
|
||||
if isinstance(initial_parent, dict):
|
||||
for k in ["owner", "ids", "images", "tracks", "description"]:
|
||||
if k in initial_parent:
|
||||
playlist_payload[k] = initial_parent.get(k)
|
||||
history_manager.store_playlist_history(
|
||||
{"playlist": playlist_payload, "status_info": status_info},
|
||||
task_id,
|
||||
"failed",
|
||||
)
|
||||
except Exception as finalize_err:
|
||||
logger.error(
|
||||
f"Failed to finalize partial history for cancelled task {task_id}: {finalize_err}",
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
if state == states.SUCCESS:
|
||||
if current_redis_status not in [ProgressState.COMPLETE, "done"]:
|
||||
# The final status is now set by the 'done' callback from deezspot.
|
||||
@@ -1685,4 +1900,15 @@ def trigger_sse_update_task(self, task_id: str, reason: str = "status_update"):
|
||||
# Don't raise exception to avoid task retry - SSE updates are best-effort
|
||||
|
||||
|
||||
def _extract_initial_parent_object(log_lines: list, parent_type: str) -> dict | None:
|
||||
"""Return the first album/playlist object from the log's initializing callback, if present."""
|
||||
key = "album" if parent_type == "album" else ("playlist" if parent_type == "playlist" else None)
|
||||
if not key:
|
||||
return None
|
||||
for obj in log_lines:
|
||||
if key in obj and isinstance(obj[key], dict):
|
||||
return obj[key]
|
||||
return None
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -492,7 +492,13 @@ class HistoryManager:
|
||||
successful_tracks = summary.get("total_successful", 0)
|
||||
failed_tracks = summary.get("total_failed", 0)
|
||||
skipped_tracks = summary.get("total_skipped", 0)
|
||||
total_tracks = album.get("total_tracks", 0)
|
||||
total_tracks = summary.get("total_successful", 0) + summary.get("total_skipped", 0) + summary.get("total_failed", 0) or album.get("total_tracks", 0)
|
||||
|
||||
# Enrich album metadata if missing
|
||||
try:
|
||||
album = self._enrich_album_metadata_from_summary(album, summary)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Calculate total duration
|
||||
tracks = album.get("tracks", [])
|
||||
@@ -561,7 +567,12 @@ class HistoryManager:
|
||||
total_duration
|
||||
))
|
||||
|
||||
# Children table is populated progressively during track processing, not from summary
|
||||
# If we have a summary (e.g., on cancellation), populate children from it including failed ones
|
||||
try:
|
||||
if summary:
|
||||
self._populate_album_children_table(children_table, summary, album.get("title", ""))
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to populate children from summary for album {children_table}: {e}")
|
||||
|
||||
logger.info(f"Stored album history for '{album.get('title')}' (task: {task_id}, children: {children_table}, status: {status_to_store})")
|
||||
return None
|
||||
@@ -617,8 +628,20 @@ class HistoryManager:
|
||||
failed_tracks = summary.get("total_failed", 0)
|
||||
skipped_tracks = summary.get("total_skipped", 0)
|
||||
|
||||
# Improve metadata for playlist main row using summary first success/skip/failed track
|
||||
try:
|
||||
if not playlist.get("images"):
|
||||
for arr_key in ("successful_tracks", "skipped_tracks", "failed_tracks"):
|
||||
arr = summary.get(arr_key, []) or []
|
||||
candidate = (arr[0].get("album") if arr_key == "failed_tracks" and isinstance(arr[0], dict) else (arr[0].get("album") if arr and isinstance(arr[0], dict) else {})) if arr else {}
|
||||
if candidate and candidate.get("images"):
|
||||
playlist.setdefault("images", candidate.get("images", []))
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
tracks = playlist.get("tracks", [])
|
||||
total_tracks = len(tracks)
|
||||
total_tracks = (summary.get("total_successful", 0) + summary.get("total_skipped", 0) + summary.get("total_failed", 0)) or len(tracks)
|
||||
total_duration = self._calculate_total_duration(tracks)
|
||||
|
||||
# Derive accurate status
|
||||
@@ -683,7 +706,12 @@ class HistoryManager:
|
||||
total_duration
|
||||
))
|
||||
|
||||
# Children table is populated progressively during track processing, not from summary
|
||||
# If we have a summary (e.g., on cancellation), populate children from it including failed ones
|
||||
try:
|
||||
if summary:
|
||||
self._populate_playlist_children_table(children_table, summary)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to populate children from summary for playlist {children_table}: {e}")
|
||||
|
||||
logger.info(f"Stored playlist history for '{playlist.get('title')}' (task: {task_id}, children: {children_table}, status: {status_to_store})")
|
||||
return None
|
||||
@@ -697,37 +725,31 @@ class HistoryManager:
|
||||
try:
|
||||
# Ensure table exists before population
|
||||
self._create_children_table(table_name)
|
||||
all_tracks = []
|
||||
all_rows = []
|
||||
|
||||
# Add successful tracks
|
||||
for track in summary.get("successful_tracks", []):
|
||||
track_data = self._prepare_child_track_data(track, album_title, "completed")
|
||||
all_tracks.append(track_data)
|
||||
all_rows.append(self._map_values_to_row(track_data["values"]))
|
||||
|
||||
# Add failed tracks
|
||||
for failed_item in summary.get("failed_tracks", []):
|
||||
track = failed_item.get("track", {})
|
||||
track_data = self._prepare_child_track_data(track, album_title, "failed")
|
||||
track_data["metadata"]["failure_reason"] = failed_item.get("reason", "Unknown error")
|
||||
all_tracks.append(track_data)
|
||||
all_rows.append(self._map_values_to_row(track_data["values"]))
|
||||
|
||||
# Add skipped tracks
|
||||
for track in summary.get("skipped_tracks", []):
|
||||
track_data = self._prepare_child_track_data(track, album_title, "skipped")
|
||||
all_tracks.append(track_data)
|
||||
all_rows.append(self._map_values_to_row(track_data["values"]))
|
||||
|
||||
# Insert all tracks
|
||||
# Upsert all rows
|
||||
with self._get_connection() as conn:
|
||||
for track_data in all_tracks:
|
||||
conn.execute(f"""
|
||||
INSERT INTO {table_name} (
|
||||
title, artists, album_title, duration_ms, track_number,
|
||||
disc_number, explicit, status, external_ids, genres,
|
||||
isrc, timestamp, position, metadata
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""", track_data["values"])
|
||||
for row in all_rows:
|
||||
self._upsert_child_row(conn, table_name, row)
|
||||
|
||||
logger.info(f"Populated {len(all_tracks)} tracks in children table {table_name}")
|
||||
logger.info(f"Populated {len(all_rows)} tracks in children table {table_name}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to populate album children table {table_name}: {e}")
|
||||
@@ -737,37 +759,30 @@ class HistoryManager:
|
||||
try:
|
||||
# Ensure table exists before population
|
||||
self._create_children_table(table_name)
|
||||
all_tracks = []
|
||||
all_rows = []
|
||||
|
||||
# Add successful tracks
|
||||
for track in summary.get("successful_tracks", []):
|
||||
track_data = self._prepare_child_track_data(track, "", "completed")
|
||||
all_tracks.append(track_data)
|
||||
all_rows.append(self._map_values_to_row(track_data["values"]))
|
||||
|
||||
# Add failed tracks
|
||||
for failed_item in summary.get("failed_tracks", []):
|
||||
track = failed_item.get("track", {})
|
||||
track_data = self._prepare_child_track_data(track, "", "failed")
|
||||
track_data["metadata"]["failure_reason"] = failed_item.get("reason", "Unknown error")
|
||||
all_tracks.append(track_data)
|
||||
all_rows.append(self._map_values_to_row(track_data["values"]))
|
||||
|
||||
# Add skipped tracks
|
||||
for track in summary.get("skipped_tracks", []):
|
||||
track_data = self._prepare_child_track_data(track, "", "skipped")
|
||||
all_tracks.append(track_data)
|
||||
all_rows.append(self._map_values_to_row(track_data["values"]))
|
||||
|
||||
# Insert all tracks
|
||||
with self._get_connection() as conn:
|
||||
for track_data in all_tracks:
|
||||
conn.execute(f"""
|
||||
INSERT INTO {table_name} (
|
||||
title, artists, album_title, duration_ms, track_number,
|
||||
disc_number, explicit, status, external_ids, genres,
|
||||
isrc, timestamp, position, metadata
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""", track_data["values"])
|
||||
for row in all_rows:
|
||||
self._upsert_child_row(conn, table_name, row)
|
||||
|
||||
logger.info(f"Populated {len(all_tracks)} tracks in children table {table_name}")
|
||||
logger.info(f"Populated {len(all_rows)} tracks in children table {table_name}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to populate playlist children table {table_name}: {e}")
|
||||
@@ -1106,6 +1121,213 @@ class HistoryManager:
|
||||
logger.error(f"Failed to clear old history: {e}")
|
||||
return 0
|
||||
|
||||
# --- New helpers for failed children insertion and metadata enrichment ---
|
||||
def _populate_failed_children_for_album(self, table_name: str, summary: Dict, album_title: str) -> None:
|
||||
try:
|
||||
self._create_children_table(table_name)
|
||||
with self._get_connection() as conn:
|
||||
for failed_item in summary.get("failed_tracks", []):
|
||||
track = failed_item.get("track", {})
|
||||
track_data = self._prepare_child_track_data(track, album_title, "failed")
|
||||
track_data["metadata"]["failure_reason"] = failed_item.get("reason", "cancelled")
|
||||
conn.execute(f"""
|
||||
INSERT INTO {table_name} (
|
||||
title, artists, album_title, duration_ms, track_number,
|
||||
disc_number, explicit, status, external_ids, genres,
|
||||
isrc, timestamp, position, metadata
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""", track_data["values"])
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to insert failed children for album into {table_name}: {e}")
|
||||
|
||||
def _populate_failed_children_for_playlist(self, table_name: str, summary: Dict) -> None:
|
||||
try:
|
||||
self._create_children_table(table_name)
|
||||
with self._get_connection() as conn:
|
||||
for failed_item in summary.get("failed_tracks", []):
|
||||
track = failed_item.get("track", {})
|
||||
track_data = self._prepare_child_track_data(track, "", "failed")
|
||||
track_data["metadata"]["failure_reason"] = failed_item.get("reason", "cancelled")
|
||||
conn.execute(f"""
|
||||
INSERT INTO {table_name} (
|
||||
title, artists, album_title, duration_ms, track_number,
|
||||
disc_number, explicit, status, external_ids, genres,
|
||||
isrc, timestamp, position, metadata
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""", track_data["values"])
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to insert failed children for playlist into {table_name}: {e}")
|
||||
|
||||
def _enrich_album_metadata_from_summary(self, album: Dict, summary: Dict) -> Dict:
|
||||
if album.get("images") and album.get("release_date") and album.get("genres"):
|
||||
return album
|
||||
# Prefer successful track album data, then skipped, then failed
|
||||
src_track = None
|
||||
for key in ("successful_tracks", "skipped_tracks", "failed_tracks"):
|
||||
arr = summary.get(key, []) or []
|
||||
if arr:
|
||||
src_track = arr[0] if key != "failed_tracks" else (arr[0].get("track") if isinstance(arr[0], dict) else None)
|
||||
break
|
||||
if isinstance(src_track, dict):
|
||||
album_obj = src_track.get("album", {}) or {}
|
||||
album.setdefault("images", album_obj.get("images", []))
|
||||
album.setdefault("release_date", album_obj.get("release_date", {}))
|
||||
album.setdefault("genres", album_obj.get("genres", []))
|
||||
album.setdefault("album_type", album_obj.get("album_type", album.get("album_type")))
|
||||
return album
|
||||
|
||||
# --- Upsert helpers to avoid duplicate children rows and keep most complete ---
|
||||
def _map_values_to_row(self, values: tuple) -> Dict:
|
||||
(
|
||||
title,
|
||||
artists_json,
|
||||
album_title,
|
||||
duration_ms,
|
||||
track_number,
|
||||
disc_number,
|
||||
explicit,
|
||||
status,
|
||||
external_ids_json,
|
||||
genres_json,
|
||||
isrc,
|
||||
timestamp,
|
||||
position,
|
||||
metadata_json,
|
||||
) = values
|
||||
return {
|
||||
"title": title,
|
||||
"artists": artists_json,
|
||||
"album_title": album_title,
|
||||
"duration_ms": duration_ms,
|
||||
"track_number": track_number,
|
||||
"disc_number": disc_number,
|
||||
"explicit": explicit,
|
||||
"status": status,
|
||||
"external_ids": external_ids_json,
|
||||
"genres": genres_json,
|
||||
"isrc": isrc,
|
||||
"timestamp": timestamp,
|
||||
"position": position,
|
||||
"metadata": metadata_json,
|
||||
}
|
||||
|
||||
def _status_priority(self, status: str) -> int:
|
||||
order = {"completed": 3, "skipped": 2, "failed": 1}
|
||||
return order.get((status or "").lower(), 0)
|
||||
|
||||
def _merge_child_rows(self, existing: Dict, new: Dict) -> Dict:
|
||||
merged = existing.copy()
|
||||
# Prefer non-empty/non-null values; for status use priority
|
||||
for key in [
|
||||
"artists",
|
||||
"album_title",
|
||||
"duration_ms",
|
||||
"track_number",
|
||||
"disc_number",
|
||||
"explicit",
|
||||
"external_ids",
|
||||
"genres",
|
||||
"isrc",
|
||||
"metadata",
|
||||
]:
|
||||
old_val = merged.get(key)
|
||||
new_val = new.get(key)
|
||||
# Consider JSON strings: prefer longer/ non-empty
|
||||
if (old_val in (None, "", 0)) and new_val not in (None, ""):
|
||||
merged[key] = new_val
|
||||
elif isinstance(new_val, str) and isinstance(old_val, str) and len(new_val) > len(old_val):
|
||||
merged[key] = new_val
|
||||
# Status: keep highest priority
|
||||
if self._status_priority(new.get("status")) > self._status_priority(existing.get("status")):
|
||||
merged["status"] = new.get("status")
|
||||
# Timestamp: keep earliest for creation but allow update to latest timestamp for last update
|
||||
merged["timestamp"] = max(existing.get("timestamp") or 0, new.get("timestamp") or 0)
|
||||
return merged
|
||||
|
||||
def _find_existing_child_row(self, conn: sqlite3.Connection, table_name: str, new_row: Dict) -> Optional[Dict]:
|
||||
try:
|
||||
cursor = conn.execute(f"SELECT * FROM {table_name} WHERE title = ?", (new_row.get("title", ""),))
|
||||
candidates = [dict(r) for r in cursor.fetchall()]
|
||||
if not candidates:
|
||||
return None
|
||||
# Try match by ISRC
|
||||
isrc = new_row.get("isrc")
|
||||
if isrc:
|
||||
for r in candidates:
|
||||
if (r.get("isrc") or "") == isrc:
|
||||
return r
|
||||
# Try match by position (playlist) then track_number (album)
|
||||
pos = new_row.get("position")
|
||||
if pos is not None:
|
||||
for r in candidates:
|
||||
if r.get("position") == pos:
|
||||
return r
|
||||
tn = new_row.get("track_number")
|
||||
if tn:
|
||||
for r in candidates:
|
||||
if r.get("track_number") == tn:
|
||||
return r
|
||||
# Fallback: first candidate with same title
|
||||
return candidates[0]
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def _upsert_child_row(self, conn: sqlite3.Connection, table_name: str, row: Dict) -> None:
|
||||
existing = self._find_existing_child_row(conn, table_name, row)
|
||||
if existing:
|
||||
merged = self._merge_child_rows(existing, row)
|
||||
conn.execute(
|
||||
f"""
|
||||
UPDATE {table_name}
|
||||
SET artists = ?, album_title = ?, duration_ms = ?, track_number = ?,
|
||||
disc_number = ?, explicit = ?, status = ?, external_ids = ?,
|
||||
genres = ?, isrc = ?, timestamp = ?, position = ?, metadata = ?
|
||||
WHERE id = ?
|
||||
""",
|
||||
(
|
||||
merged.get("artists"),
|
||||
merged.get("album_title"),
|
||||
merged.get("duration_ms"),
|
||||
merged.get("track_number"),
|
||||
merged.get("disc_number"),
|
||||
merged.get("explicit"),
|
||||
merged.get("status"),
|
||||
merged.get("external_ids"),
|
||||
merged.get("genres"),
|
||||
merged.get("isrc"),
|
||||
merged.get("timestamp"),
|
||||
merged.get("position"),
|
||||
merged.get("metadata"),
|
||||
existing.get("id"),
|
||||
),
|
||||
)
|
||||
else:
|
||||
conn.execute(
|
||||
f"""
|
||||
INSERT INTO {table_name} (
|
||||
title, artists, album_title, duration_ms, track_number,
|
||||
disc_number, explicit, status, external_ids, genres,
|
||||
isrc, timestamp, position, metadata
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
row.get("title"),
|
||||
row.get("artists"),
|
||||
row.get("album_title"),
|
||||
row.get("duration_ms"),
|
||||
row.get("track_number"),
|
||||
row.get("disc_number"),
|
||||
row.get("explicit"),
|
||||
row.get("status"),
|
||||
row.get("external_ids"),
|
||||
row.get("genres"),
|
||||
row.get("isrc"),
|
||||
row.get("timestamp"),
|
||||
row.get("position"),
|
||||
row.get("metadata"),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
# Global history manager instance
|
||||
history_manager = HistoryManager()
|
||||
Reference in New Issue
Block a user