Merge pull request #331 from Phlogi/logging-dot-env-and-imports

(fix): logging level from .env configuration
This commit is contained in:
Spotizerr
2025-08-26 19:59:06 -06:00
committed by GitHub
4 changed files with 39 additions and 18 deletions

31
app.py
View File

@@ -12,16 +12,20 @@ import sys
import redis
import socket
from urllib.parse import urlparse
from dotenv import load_dotenv
load_dotenv()
# Define a mapping from string log levels to logging constants
LOG_LEVELS = {
"CRITICAL": logging.CRITICAL,
"ERROR": logging.ERROR,
"WARNING": logging.WARNING,
"INFO": logging.INFO,
"DEBUG": logging.DEBUG,
"NOTSET": logging.NOTSET,
}
# Parse log level from environment as early as possible, default to INFO for visibility
log_level_str = os.getenv("LOG_LEVEL", "WARNING").upper()
log_level = getattr(logging, log_level_str, logging.INFO)
# Set up a very basic logging config immediately, so early logs (including import/migration errors) are visible
logging.basicConfig(
level=log_level,
format="%(asctime)s [%(levelname)s] %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
stream=sys.stderr,
)
# Run DB migrations as early as possible, before importing any routers that may touch DBs
try:
@@ -37,10 +41,6 @@ except Exception as e:
)
sys.exit(1)
# Get log level from environment variable, default to INFO
log_level_str = os.getenv("LOG_LEVEL", "WARNING").upper()
log_level = LOG_LEVELS.get(log_level_str, logging.INFO)
# Apply process umask from environment as early as possible
_umask_value = os.getenv("UMASK")
if _umask_value:
@@ -139,6 +139,7 @@ def setup_logging():
"uvicorn", # General Uvicorn logger
"uvicorn.access", # Uvicorn access logs
"uvicorn.error", # Uvicorn error logs
"spotizerr",
]:
logger = logging.getLogger(logger_name)
logger.setLevel(log_level)
@@ -146,7 +147,7 @@ def setup_logging():
# if access_log=False is used in uvicorn.run, and to ensure our middleware handles it.
logger.propagate = False if logger_name == "uvicorn.access" else True
logging.info("Logging system initialized")
logger.info("Logging system initialized")
def check_redis_connection():
@@ -197,6 +198,8 @@ async def lifespan(app: FastAPI):
"""Handle application startup and shutdown"""
# Startup
setup_logging()
effective_level = logging.getLevelName(log_level)
logging.getLogger(__name__).info(f"Logging system fully initialized (lifespan startup). Effective log level: {effective_level}")
# Run migrations before initializing services
try:

View File

@@ -82,7 +82,7 @@ class SSEBroadcaster:
# Clean up disconnected clients
for client in disconnected:
self.clients.discard(client)
logger.info(
logger.debug(
f"SSE Broadcaster: Successfully sent to {sent_count} clients, removed {len(disconnected)} disconnected clients"
)

View File

@@ -5,6 +5,9 @@ import threading
import os
import sys
from dotenv import load_dotenv
load_dotenv()
# Import Celery task utilities
from .celery_config import get_config_params, MAX_CONCURRENT_DL
@@ -70,6 +73,12 @@ class CeleryManager:
logger.debug(f"Generated Celery command: {' '.join(command)}")
return command
def _get_worker_env(self):
# Inherit current environment, but set NO_CONSOLE_LOG=1 for subprocess
env = os.environ.copy()
env["NO_CONSOLE_LOG"] = "1"
return env
def _process_output_reader(self, stream, log_prefix, error=False):
logger.debug(f"Log reader thread started for {log_prefix}")
try:
@@ -138,6 +147,7 @@ class CeleryManager:
text=True,
bufsize=1,
universal_newlines=True,
env=self._get_worker_env(),
)
self.download_log_thread_stdout = threading.Thread(
target=self._process_output_reader,
@@ -161,7 +171,7 @@ class CeleryManager:
queues="utility_tasks,default", # Listen to utility and default
concurrency=5, # Increased concurrency for SSE updates and utility tasks
worker_name_suffix="utw", # Utility Worker
log_level_env=os.getenv("LOG_LEVEL", "ERROR").upper(),
log_level_env=os.getenv("LOG_LEVEL", "WARNING").upper(),
)
logger.info(
@@ -174,6 +184,7 @@ class CeleryManager:
text=True,
bufsize=1,
universal_newlines=True,
env=self._get_worker_env(),
)
self.utility_log_thread_stdout = threading.Thread(
target=self._process_output_reader,

View File

@@ -285,9 +285,16 @@ def setup_celery_logging(**kwargs):
"""
This handler ensures Celery uses our application logging settings
instead of its own. Prevents duplicate log configurations.
Also disables console logging if NO_CONSOLE_LOG=1 is set in the environment.
"""
# Using the root logger's handlers and level preserves our config
return logging.getLogger()
root_logger = logging.getLogger()
import os
if os.environ.get("NO_CONSOLE_LOG") == "1":
# Remove all StreamHandlers (console handlers) from the root logger
handlers_to_remove = [h for h in root_logger.handlers if isinstance(h, logging.StreamHandler)]
for h in handlers_to_remove:
root_logger.removeHandler(h)
return root_logger
# The initialization of a worker will log the worker configuration