from .routes import tms_routes, raw_api_routes, raw_sharing_routes, downloads_routes, custom_paths_routes from . import worker_downloads # New file-centric worker from . import worker as backend_worker from .services import nas_sharing_service from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware import logging import threading import os import signal import sys import atexit from dotenv import load_dotenv # Load environment variables from .env.local first, then .env load_dotenv('.env.local') load_dotenv() # Import routers # Configure logging - output to both terminal and file LOG_FILE = os.path.join(os.path.dirname( os.path.dirname(__file__)), 'backend.log') # Create formatter log_formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') # Root logger configuration root_logger = logging.getLogger() root_logger.setLevel(logging.INFO) # Console handler (terminal) console_handler = logging.StreamHandler() console_handler.setLevel(logging.INFO) console_handler.setFormatter(log_formatter) root_logger.addHandler(console_handler) # File handler (backend.log) file_handler = logging.FileHandler(LOG_FILE, encoding='utf-8') file_handler.setLevel(logging.INFO) file_handler.setFormatter(log_formatter) root_logger.addHandler(file_handler) logger = logging.getLogger(__name__) def cleanup_on_exit(): """ Graceful shutdown handler - cleanup downloads when server stops. Called automatically by atexit or signal handlers. """ logger.debug("🛑 Server shutting down, cleaning up active downloads...") try: from .services import downloads_service from .services.aria2.download_manager import get_aria2_manager # Get all active downloads active_downloads = downloads_service.get_active_downloads() if not active_downloads: logger.debug("No active downloads to clean up") return logger.debug(f"Found {len(active_downloads)} active downloads") # Try to get aria2 manager try: manager = get_aria2_manager() except Exception as e: logger.warning(f"Could not get aria2 manager: {e}") manager = None # Cancel all aria2 tasks first cancelled_count = 0 for download in active_downloads: gid = download.get('aria2_gid') if gid and manager: try: logger.debug(f"Cancelling aria2 task GID: {gid}") manager.cancel_download(gid) cancelled_count += 1 except Exception as e: logger.warning(f"Failed to cancel aria2 GID {gid}: {e}") if cancelled_count > 0: logger.debug(f"Cancelled {cancelled_count} aria2 tasks") # Update all active downloads to failed status failed_count = 0 for download in active_downloads: try: downloads_service.update_download_status( download_id=download['id'], status='failed', error_message='Server was shut down during download' ) failed_count += 1 except Exception as e: logger.error( f"Failed to update download {download['id']}: {e}") logger.debug( f"✅ Cleanup complete: {failed_count} downloads marked as failed") except Exception as e: logger.error(f"Error during cleanup: {e}", exc_info=True) # Register cleanup handlers atexit.register(cleanup_on_exit) def signal_handler(sig, frame): """Handle SIGINT/SIGTERM gracefully.""" logger.debug(f"Received signal {sig}, initiating graceful shutdown...") cleanup_on_exit() sys.exit(0) signal.signal(signal.SIGINT, signal_handler) signal.signal(signal.SIGTERM, signal_handler) app = FastAPI( title="DKI Download API", version="2.0.0", redirect_slashes=False # Disable automatic slash redirects ) # Configure CORS app.add_middleware( CORSMiddleware, allow_origins=["*"], # Allow all origins in development allow_credentials=True, allow_methods=["*"], allow_headers=["*"], ) # ==================== REGISTER ROUTERS ==================== # Register all route modules app.include_router(tms_routes.router) app.include_router(raw_api_routes.router) app.include_router(raw_sharing_routes.router) app.include_router(downloads_routes.router) # New unified downloads API app.include_router(custom_paths_routes.router) # Custom folder paths # ==================== STARTUP EVENTS ==================== @app.on_event('startup') def start_background_worker(): """Start the background workers in daemon threads. - backend_worker: Polls pending submissions for automation - worker_downloads: Polls pending file downloads (NEW file-centric) - nas_sharing_service: Processes sharing links with Selenium - aria2_daemon: Fast parallel download engine (if enabled) """ try: # Start aria2 daemon if enabled USE_ARIA2 = os.getenv('USE_ARIA2', 'true').lower() == 'true' if USE_ARIA2: try: from .services.aria2 import start_aria2_daemon aria2_secret = os.getenv( 'ARIA2_RPC_SECRET', 'dkidownload_secret_2025') if start_aria2_daemon(secret=aria2_secret): logger.debug("✅ aria2c RPC daemon started on port 6800") else: logger.warning( "⚠️ aria2c failed to start, downloads will use requests") except Exception as e: logger.warning( f"⚠️ aria2c not available: {e}, downloads will use requests") else: logger.debug( "aria2 disabled (USE_ARIA2=false), using requests for downloads") # Start submission worker t1 = threading.Thread(target=backend_worker.run_loop, name='backend-worker', daemon=True) t1.start() # Start file download worker (NEW) t2 = threading.Thread( target=worker_downloads.start_worker, name='file-download-worker', daemon=True) t2.start() # Start sharing link worker nas_sharing_service.start_sharing_worker() logger.debug("Background workers started") except Exception as e: # Log but don't prevent app startup logger.exception('Failed to start background workers: %s', e) # ==================== HEALTH CHECK ==================== @app.get("/") def health_check(): """Simple health check endpoint.""" return { "status": "ok", "app": "DKI Download API", "version": "2.0.0" }