Merge pull request #134 from Xoconoch/dev

Hotpatch 2.0.1
This commit is contained in:
Xoconoch
2025-05-30 13:18:34 -06:00
committed by GitHub
7 changed files with 220 additions and 256 deletions

View File

@@ -49,13 +49,9 @@ Music downloader which combines the best of two worlds: Spotify's catalog and De
mkdir spotizerr && cd spotizerr
```
2. Copy the `.env` file from this repo and update all variables (e.g. Redis credentials, PUID/PGID, UMASK).
2. Setup a `.env` file following the `.env.example` file from this repo and update all variables (e.g. Redis credentials, PUID/PGID, UMASK).
3. Copy `docker-compose.yml` from this repo.
4. Create required directories:
```bash
mkdir -p data/creds data/config data/watch data/history downloads logs/tasks .cache
```
5. Launch containers:
4. Launch containers:
```bash
docker compose up -d
```
@@ -146,7 +142,7 @@ First create a Spotify credentials file using the 3rd-party `librespot-auth` too
This file has the following format:
```
{"username": "string" "auth_type": 1 "auth_data": "string"}
{"username": "long text" "auth_type": 1 "auth_data": "even longer text"}
```
The important ones are the "username" and "auth_data" parameters, these match the "username" and "credentials" sections respectively when adding/editing spotify credentials in Spotizerr.
@@ -235,23 +231,13 @@ Copy that value and paste it into the correspondant setting in Spotizerr
- For deezer: MP3 128k, MP3 320k (sometimes premium, it varies) and FLAC (premium only)
- **Customizable formatting**:
- Track number padding (01. Track or 1. Track)
- Track number padding (01. Track or 1. Track)
- Adjust retry parameters (max attempts, delay, delay increase)
### Environment Variables
Define your variables in the `.env` file in the project root:
```dotenv
REDIS_HOST=redis # Redis host name
REDIS_PORT=6379 # Redis port number
REDIS_DB=0 # Redis DB index
REDIS_PASSWORD=CHANGE_ME # Redis AUTH password
EXPLICIT_FILTER=false # Filter explicit content
PUID=1000 # Container user ID
PGID=1000 # Container group ID
UMASK=0022 # Default file permission mask
```
- **Watching artits/playlists**
- Start watching a spotify playlist and its tracks will be downloaded dynamically as it updates.
- Start watching a spotify artist and their albums will be automatically downloaded, never miss a release!
## Troubleshooting
**Common Issues**:

View File

@@ -8,7 +8,7 @@ services:
- ./logs:/app/logs # <-- Volume for persistent logs
ports:
- 7171:7171
image: cooldockerizer93/spotizerr
image: test
container_name: spotizerr-app
restart: unless-stopped
environment:

View File

@@ -52,7 +52,7 @@ else
# Ensure proper permissions for all app directories
echo "Setting permissions for /app directories..."
chown -R "${USER_NAME}:${GROUP_NAME}" /app/downloads /app/config /app/creds /app/logs /app/cache || true
chown -R "${USER_NAME}:${GROUP_NAME}" /app/downloads /app/data /app/logs || true
# Ensure Spotipy cache file exists and is writable
touch /app/.cache || true
chown "${USER_NAME}:${GROUP_NAME}" /app/.cache || true

View File

@@ -6,9 +6,18 @@ import threading
import time
import os
config_bp = Blueprint('config_bp', __name__)
CONFIG_PATH = Path('./data/config/main.json')
CONFIG_PATH_WATCH = Path('./data/config/watch.json')
# Import the centralized config getters that handle file creation and defaults
from routes.utils.celery_config import get_config_params as get_main_config_params, DEFAULT_MAIN_CONFIG, CONFIG_FILE_PATH as MAIN_CONFIG_FILE_PATH
from routes.utils.watch.manager import get_watch_config as get_watch_manager_config, DEFAULT_WATCH_CONFIG, CONFIG_FILE_PATH as WATCH_CONFIG_FILE_PATH
logger = logging.getLogger(__name__)
config_bp = Blueprint('config', __name__)
# Path to main config file (consistent with celery_config.py)
# CONFIG_PATH = Path('./data/config/main.json') # Defined as MAIN_CONFIG_FILE_PATH from import
# Path to watch config file (consistent with watch/manager.py)
# WATCH_CONFIG_PATH = Path('./data/config/watch.json') # Defined as WATCH_CONFIG_FILE_PATH from import
# Flag for config change notifications
config_changed = False
@@ -23,120 +32,102 @@ NOTIFY_PARAMETERS = [
'deezerQuality'
]
# Helper to get main config (uses the one from celery_config)
def get_config():
try:
if not CONFIG_PATH.exists():
CONFIG_PATH.parent.mkdir(parents=True, exist_ok=True)
CONFIG_PATH.write_text('{}')
return {}
with open(CONFIG_PATH, 'r') as f:
return json.load(f)
except Exception as e:
logging.error(f"Error reading config: {str(e)}")
return None
"""Retrieves the main configuration, creating it with defaults if necessary."""
return get_main_config_params()
# Helper to save main config
def save_config(config_data):
"""Save config and track changes to important parameters"""
global config_changed, last_config
"""Saves the main configuration data to main.json."""
try:
# Load current config for comparison
current_config = get_config() or {}
MAIN_CONFIG_FILE_PATH.parent.mkdir(parents=True, exist_ok=True)
# Ensure all default keys are present before saving, merging if necessary
current_defaults = DEFAULT_MAIN_CONFIG.copy()
# Overlay provided data on defaults to ensure all keys are there.
# This might not be ideal if user explicitly wants to remove a key,
# but for this setup, ensuring defaults is safer.
# A better approach for full PUT might be to replace entirely,
# but for ensuring defaults, this is okay.
# Let's assume config_data is what the user intends fully.
# We'll rely on get_config_params to have already populated defaults if the file was new.
# When saving, we should just save what's given, after ensuring it has necessary structure.
# Merge with defaults to ensure all keys are present
# This ensures that if a user POSTs partial data, it's merged with existing/default structure
# Check if any notify parameters changed
for param in NOTIFY_PARAMETERS:
if param in config_data:
if param not in current_config or config_data[param] != current_config.get(param):
config_changed = True
logging.info(f"Config parameter '{param}' changed from '{current_config.get(param)}' to '{config_data[param]}'")
# Load current or default config
existing_config = {}
if MAIN_CONFIG_FILE_PATH.exists():
with open(MAIN_CONFIG_FILE_PATH, 'r') as f_read:
existing_config = json.load(f_read)
else: # Should be rare if get_config_params was called
existing_config = DEFAULT_MAIN_CONFIG.copy()
# Update with new data
for key, value in config_data.items():
existing_config[key] = value
# Save last known config
last_config = config_data.copy()
# Ensure all default keys are still there
for default_key, default_value in DEFAULT_MAIN_CONFIG.items():
if default_key not in existing_config:
existing_config[default_key] = default_value
with open(MAIN_CONFIG_FILE_PATH, 'w') as f:
json.dump(existing_config, f, indent=4)
logger.info(f"Main configuration saved to {MAIN_CONFIG_FILE_PATH}")
return True, None
except Exception as e:
logger.error(f"Error saving main configuration: {e}", exc_info=True)
return False, str(e)
# Helper to get watch config (uses the one from watch/manager.py)
def get_watch_config_http(): # Renamed to avoid conflict with the imported get_watch_config
"""Retrieves the watch configuration, creating it with defaults if necessary."""
return get_watch_manager_config()
# Helper to save watch config
def save_watch_config_http(watch_config_data): # Renamed
"""Saves the watch configuration data to watch.json."""
try:
WATCH_CONFIG_FILE_PATH.parent.mkdir(parents=True, exist_ok=True)
# Write the config file
CONFIG_PATH.parent.mkdir(parents=True, exist_ok=True)
with open(CONFIG_PATH, 'w') as f:
json.dump(config_data, f, indent=2)
# Similar logic to save_config: merge with defaults/existing
existing_config = {}
if WATCH_CONFIG_FILE_PATH.exists():
with open(WATCH_CONFIG_FILE_PATH, 'r') as f_read:
existing_config = json.load(f_read)
else: # Should be rare if get_watch_manager_config was called
existing_config = DEFAULT_WATCH_CONFIG.copy()
for key, value in watch_config_data.items():
existing_config[key] = value
return True
except Exception as e:
logging.error(f"Error saving config: {str(e)}")
return False
for default_key, default_value in DEFAULT_WATCH_CONFIG.items():
if default_key not in existing_config:
existing_config[default_key] = default_value
def get_watch_config():
"""Reads watch.json and returns its content or defaults."""
try:
if not CONFIG_PATH_WATCH.exists():
CONFIG_PATH_WATCH.parent.mkdir(parents=True, exist_ok=True)
# Default watch config
defaults = {
'enabled': False,
'watchedArtistAlbumGroup': ["album", "single"],
'watchPollIntervalSeconds': 3600
}
CONFIG_PATH_WATCH.write_text(json.dumps(defaults, indent=2))
return defaults
with open(CONFIG_PATH_WATCH, 'r') as f:
return json.load(f)
with open(WATCH_CONFIG_FILE_PATH, 'w') as f:
json.dump(existing_config, f, indent=4)
logger.info(f"Watch configuration saved to {WATCH_CONFIG_FILE_PATH}")
return True, None
except Exception as e:
logging.error(f"Error reading watch config: {str(e)}")
# Return defaults on error to prevent crashes
return {
'enabled': False,
'watchedArtistAlbumGroup': ["album", "single"],
'watchPollIntervalSeconds': 3600
}
def save_watch_config(watch_config_data):
"""Saves data to watch.json."""
try:
CONFIG_PATH_WATCH.parent.mkdir(parents=True, exist_ok=True)
with open(CONFIG_PATH_WATCH, 'w') as f:
json.dump(watch_config_data, f, indent=2)
return True
except Exception as e:
logging.error(f"Error saving watch config: {str(e)}")
return False
logger.error(f"Error saving watch configuration: {e}", exc_info=True)
return False, str(e)
@config_bp.route('/config', methods=['GET'])
def handle_config():
config = get_config()
if config is None:
return jsonify({"error": "Could not read config file"}), 500
# Create config/state directory
Path('./data/config/state').mkdir(parents=True, exist_ok=True)
# Set default values for any missing config options
defaults = {
'service': 'spotify', # Default service is Spotify
'fallback': False,
'spotifyQuality': 'NORMAL',
'deezerQuality': 'MP3_128',
'realTime': False,
'customDirFormat': '%ar_album%/%album%',
'customTrackFormat': '%tracknum%. %music%',
'maxConcurrentDownloads': 3,
'maxRetries': 3,
'retryDelaySeconds': 5,
'retry_delay_increase': 5,
'tracknum_padding': True
}
# Populate defaults for any missing keys
for key, default_value in defaults.items():
if key not in config:
config[key] = default_value
# Get explicit filter setting from environment variable
explicit_filter_env = os.environ.get('EXPLICIT_FILTER', 'false').lower()
config['explicitFilter'] = explicit_filter_env in ('true', '1', 'yes', 'on')
return jsonify(config)
"""Handles GET requests for the main configuration."""
try:
config = get_config()
return jsonify(config)
except Exception as e:
logger.error(f"Error in GET /config: {e}", exc_info=True)
return jsonify({"error": "Failed to retrieve configuration", "details": str(e)}), 500
@config_bp.route('/config', methods=['POST', 'PUT'])
def update_config():
"""Handles POST/PUT requests to update the main configuration."""
try:
new_config = request.get_json()
if not isinstance(new_config, dict):
@@ -149,75 +140,64 @@ def update_config():
explicit_filter_env = os.environ.get('EXPLICIT_FILTER', 'false').lower()
new_config['explicitFilter'] = explicit_filter_env in ('true', '1', 'yes', 'on')
if not save_config(new_config):
return jsonify({"error": "Failed to save config"}), 500
success, error_msg = save_config(new_config)
if success:
# Return the updated config
updated_config_values = get_config()
if updated_config_values is None:
# This case should ideally not be reached if save_config succeeded
# and get_config handles errors by returning a default or None.
return jsonify({"error": "Failed to retrieve configuration after saving"}), 500
# Return the updated config
updated_config_values = get_config()
if updated_config_values is None:
# This case should ideally not be reached if save_config succeeded
# and get_config handles errors by returning a default or None.
return jsonify({"error": "Failed to retrieve configuration after saving"}), 500
return jsonify(updated_config_values)
return jsonify(updated_config_values)
else:
return jsonify({"error": "Failed to update configuration", "details": error_msg}), 500
except json.JSONDecodeError:
return jsonify({"error": "Invalid JSON data"}), 400
except Exception as e:
logging.error(f"Error updating config: {str(e)}")
return jsonify({"error": "Failed to update config"}), 500
logger.error(f"Error in POST/PUT /config: {e}", exc_info=True)
return jsonify({"error": "Failed to update configuration", "details": str(e)}), 500
@config_bp.route('/config/check', methods=['GET'])
def check_config_changes():
"""
Check if config has changed since last check
Returns: Status of config changes
"""
global config_changed
# Get current state
has_changed = config_changed
# Reset flag after checking
if has_changed:
config_changed = False
return jsonify({
"changed": has_changed,
"last_config": last_config
})
# This endpoint seems more related to dynamically checking if config changed
# on disk, which might not be necessary if settings are applied on restart
# or by a dedicated manager. For now, just return current config.
try:
config = get_config()
return jsonify({
"message": "Current configuration retrieved.",
"config": config
})
except Exception as e:
logger.error(f"Error in GET /config/check: {e}", exc_info=True)
return jsonify({"error": "Failed to check configuration", "details": str(e)}), 500
@config_bp.route('/config/watch', methods=['GET'])
def handle_watch_config():
watch_config = get_watch_config()
# Ensure defaults are applied if file was corrupted or missing fields
defaults = {
'enabled': False,
'watchedArtistAlbumGroup': ["album", "single"],
'watchPollIntervalSeconds': 3600
}
for key, default_value in defaults.items():
if key not in watch_config:
watch_config[key] = default_value
return jsonify(watch_config)
"""Handles GET requests for the watch configuration."""
try:
watch_config = get_watch_config_http()
return jsonify(watch_config)
except Exception as e:
logger.error(f"Error in GET /config/watch: {e}", exc_info=True)
return jsonify({"error": "Failed to retrieve watch configuration", "details": str(e)}), 500
@config_bp.route('/config/watch', methods=['POST', 'PUT'])
def update_watch_config():
"""Handles POST/PUT requests to update the watch configuration."""
try:
new_watch_config = request.get_json()
if not isinstance(new_watch_config, dict):
return jsonify({"error": "Invalid watch config format"}), 400
if not save_watch_config(new_watch_config):
return jsonify({"error": "Failed to save watch config"}), 500
updated_watch_config_values = get_watch_config()
if updated_watch_config_values is None:
return jsonify({"error": "Failed to retrieve watch configuration after saving"}), 500
return jsonify(updated_watch_config_values)
success, error_msg = save_watch_config_http(new_watch_config)
if success:
return jsonify({"message": "Watch configuration updated successfully"}), 200
else:
return jsonify({"error": "Failed to update watch configuration", "details": error_msg}), 500
except json.JSONDecodeError:
return jsonify({"error": "Invalid JSON data for watch config"}), 400
except Exception as e:
logging.error(f"Error updating watch config: {str(e)}")
return jsonify({"error": "Failed to update watch config"}), 500
logger.error(f"Error in POST/PUT /config/watch: {e}", exc_info=True)
return jsonify({"error": "Failed to update watch configuration", "details": str(e)}), 500

View File

@@ -22,86 +22,71 @@ REDIS_BACKEND = os.getenv('REDIS_BACKEND', REDIS_URL)
logger.info(f"Redis configuration: REDIS_URL={REDIS_URL}, REDIS_BACKEND={REDIS_BACKEND}")
# Config path
CONFIG_PATH = './data/config/main.json'
CONFIG_FILE_PATH = Path('./data/config/main.json')
DEFAULT_MAIN_CONFIG = {
'service': 'spotify',
'spotify': '',
'deezer': '',
'fallback': False,
'spotifyQuality': 'NORMAL',
'deezerQuality': 'MP3_128',
'realTime': False,
'customDirFormat': '%ar_album%/%album%',
'customTrackFormat': '%tracknum%. %music%',
'tracknum_padding': True,
'maxConcurrentDownloads': 3,
'maxRetries': 3,
'retryDelaySeconds': 5,
'retry_delay_increase': 5
}
def get_config_params():
"""
Get configuration parameters from the config file.
Creates the file with defaults if it doesn't exist.
Ensures all default keys are present in the loaded config.
Returns:
dict: A dictionary containing configuration parameters
"""
try:
if not Path(CONFIG_PATH).exists():
return {
'service': 'spotify',
'spotify': '',
'deezer': '',
'fallback': False,
'spotifyQuality': 'NORMAL',
'deezerQuality': 'MP3_128',
'realTime': False,
'customDirFormat': '%ar_album%/%album%',
'customTrackFormat': '%tracknum%. %music%',
'tracknum_padding': True,
'maxConcurrentDownloads': 3,
'maxRetries': 3,
'retryDelaySeconds': 5,
'retry_delay_increase': 5
}
# Ensure ./data/config directory exists
CONFIG_FILE_PATH.parent.mkdir(parents=True, exist_ok=True)
if not CONFIG_FILE_PATH.exists():
logger.info(f"{CONFIG_FILE_PATH} not found. Creating with default values.")
with open(CONFIG_FILE_PATH, 'w') as f:
json.dump(DEFAULT_MAIN_CONFIG, f, indent=4)
return DEFAULT_MAIN_CONFIG.copy() # Return a copy of defaults
with open(CONFIG_PATH, 'r') as f:
with open(CONFIG_FILE_PATH, 'r') as f:
config = json.load(f)
# Set defaults for missing values
defaults = {
'service': 'spotify',
'spotify': '',
'deezer': '',
'fallback': False,
'spotifyQuality': 'NORMAL',
'deezerQuality': 'MP3_128',
'realTime': False,
'customDirFormat': '%ar_album%/%album%',
'customTrackFormat': '%tracknum%. %music%',
'tracknum_padding': True,
'maxConcurrentDownloads': 3,
'maxRetries': 3,
'retryDelaySeconds': 5,
'retry_delay_increase': 5
}
for key, value in defaults.items():
# Ensure all default keys are present in the loaded config
updated = False
for key, value in DEFAULT_MAIN_CONFIG.items():
if key not in config:
config[key] = value
updated = True
if updated:
logger.info(f"Configuration at {CONFIG_FILE_PATH} was missing some default keys. Updated with defaults.")
with open(CONFIG_FILE_PATH, 'w') as f:
json.dump(config, f, indent=4)
return config
except Exception as e:
logger.error(f"Error reading config: {e}")
# Return defaults if config read fails
return {
'service': 'spotify',
'spotify': '',
'deezer': '',
'fallback': False,
'spotifyQuality': 'NORMAL',
'deezerQuality': 'MP3_128',
'realTime': False,
'customDirFormat': '%ar_album%/%album%',
'customTrackFormat': '%tracknum%. %music%',
'tracknum_padding': True,
'maxConcurrentDownloads': 3,
'maxRetries': 3,
'retryDelaySeconds': 5,
'retry_delay_increase': 5
}
logger.error(f"Error reading or creating config at {CONFIG_FILE_PATH}: {e}", exc_info=True)
# Return defaults if config read/create fails
return DEFAULT_MAIN_CONFIG.copy()
# Load configuration values we need for Celery
config = get_config_params()
MAX_CONCURRENT_DL = config.get('maxConcurrentDownloads', 3)
MAX_RETRIES = config.get('maxRetries', 3)
RETRY_DELAY = config.get('retryDelaySeconds', 5)
RETRY_DELAY_INCREASE = config.get('retry_delay_increase', 5)
config_params_values = get_config_params() # Renamed to avoid conflict with module name
MAX_CONCURRENT_DL = config_params_values.get('maxConcurrentDownloads', 3)
MAX_RETRIES = config_params_values.get('maxRetries', 3)
RETRY_DELAY = config_params_values.get('retryDelaySeconds', 5)
RETRY_DELAY_INCREASE = config_params_values.get('retry_delay_increase', 5)
# Define task queues
task_queues = {

View File

@@ -11,8 +11,8 @@ DB_DIR = Path('./data/watch')
PLAYLISTS_DB_PATH = DB_DIR / 'playlists.db'
ARTISTS_DB_PATH = DB_DIR / 'artists.db'
# Config path remains the same
CONFIG_PATH = Path('./data/config/watch.json')
# Config path for watch.json is managed in routes.utils.watch.manager now
# CONFIG_PATH = Path('./data/config/watch.json') # Removed
def _get_playlists_db_connection():
DB_DIR.mkdir(parents=True, exist_ok=True)

View File

@@ -20,10 +20,10 @@ from routes.utils.watch.db import (
update_artist_metadata_after_check # Renamed from update_artist_metadata
)
from routes.utils.get_info import get_spotify_info # To fetch playlist, track, artist, and album details
from routes.utils.celery_queue_manager import download_queue_manager, get_config_params
from routes.utils.celery_queue_manager import download_queue_manager
logger = logging.getLogger(__name__)
CONFIG_PATH = Path('./data/config/watch.json')
CONFIG_FILE_PATH = Path('./data/config/watch.json')
STOP_EVENT = threading.Event()
DEFAULT_WATCH_CONFIG = {
@@ -36,24 +36,37 @@ DEFAULT_WATCH_CONFIG = {
}
def get_watch_config():
"""Loads the watch configuration from watch.json."""
"""Loads the watch configuration from watch.json.
Creates the file with defaults if it doesn't exist.
Ensures all default keys are present in the loaded config.
"""
try:
if CONFIG_PATH.exists():
with open(CONFIG_PATH, 'r') as f:
config = json.load(f)
# Ensure all default keys are present
for key, value in DEFAULT_WATCH_CONFIG.items():
config.setdefault(key, value)
return config
else:
# Create a default config if it doesn't exist
with open(CONFIG_PATH, 'w') as f:
# Ensure ./data/config directory exists
CONFIG_FILE_PATH.parent.mkdir(parents=True, exist_ok=True)
if not CONFIG_FILE_PATH.exists():
logger.info(f"{CONFIG_FILE_PATH} not found. Creating with default watch config.")
with open(CONFIG_FILE_PATH, 'w') as f:
json.dump(DEFAULT_WATCH_CONFIG, f, indent=2)
logger.info(f"Created default watch config at {CONFIG_PATH}")
return DEFAULT_WATCH_CONFIG
return DEFAULT_WATCH_CONFIG.copy()
with open(CONFIG_FILE_PATH, 'r') as f:
config = json.load(f)
updated = False
for key, value in DEFAULT_WATCH_CONFIG.items():
if key not in config:
config[key] = value
updated = True
if updated:
logger.info(f"Watch configuration at {CONFIG_FILE_PATH} was missing some default keys. Updated with defaults.")
with open(CONFIG_FILE_PATH, 'w') as f:
json.dump(config, f, indent=2)
return config
except Exception as e:
logger.error(f"Error loading watch config: {e}", exc_info=True)
return DEFAULT_WATCH_CONFIG # Fallback
logger.error(f"Error loading or creating watch config at {CONFIG_FILE_PATH}: {e}", exc_info=True)
return DEFAULT_WATCH_CONFIG.copy() # Fallback
def construct_spotify_url(item_id, item_type="track"):
return f"https://open.spotify.com/{item_type}/{item_id}"