412 lines
16 KiB
Python
Executable File
412 lines
16 KiB
Python
Executable File
"""
|
|
NAS Sharing Service - Public API wrapper
|
|
"""
|
|
|
|
import os
|
|
import sys
|
|
import logging
|
|
from typing import Dict, Optional, Any, Callable
|
|
from .nas_sharing_worker import SharingLinkWorker
|
|
|
|
logger = logging.getLogger(__name__)
|
|
_worker: Optional[SharingLinkWorker] = None
|
|
USE_ARIA2 = os.getenv('USE_ARIA2', 'true').lower() == 'true'
|
|
|
|
|
|
def get_worker() -> SharingLinkWorker:
|
|
global _worker
|
|
if _worker is None:
|
|
_worker = SharingLinkWorker()
|
|
_worker.start()
|
|
return _worker
|
|
|
|
|
|
def process_sharing_link(url: str) -> Dict[str, str]:
|
|
worker = get_worker()
|
|
request_id = worker.submit_request(url)
|
|
return {'request_id': request_id, 'status': 'pending'}
|
|
|
|
|
|
def get_sharing_result(request_id: str) -> Optional[Dict]:
|
|
return get_worker().get_result(request_id)
|
|
|
|
|
|
def is_otp_required() -> bool:
|
|
worker = get_worker()
|
|
# Show modal only once and before submission
|
|
if worker.otp_pending and not worker.otp_modal_shown and not worker.otp_submitted:
|
|
worker.otp_modal_shown = True
|
|
return True
|
|
return False
|
|
|
|
|
|
def submit_otp(code: str) -> Dict[str, str]:
|
|
get_worker().otp_code = code
|
|
return {'status': 'ok', 'message': 'OTP đã nhận'}
|
|
|
|
|
|
def download_file(
|
|
sharing_id: str,
|
|
file_path: str,
|
|
save_path: str,
|
|
is_folder: bool = False,
|
|
progress_callback=None,
|
|
max_speed: Optional[str] = None,
|
|
validate_link: bool = True
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
Download file from sharing link.
|
|
|
|
Args:
|
|
sharing_id: Sharing ID
|
|
file_path: Remote file path
|
|
save_path: Local save path
|
|
is_folder: Whether file is a folder
|
|
progress_callback: Progress callback function
|
|
max_speed: Optional bandwidth limit (e.g., '100K')
|
|
validate_link: Validate link trước khi tải (default: True)
|
|
|
|
Returns:
|
|
Dict with status, message, save_path
|
|
"""
|
|
worker = get_worker()
|
|
|
|
try:
|
|
# Step 0: Clean up existing file to force re-download
|
|
# aria2 with continue=true will SKIP if file exists and is complete.
|
|
# We must delete it to ensure a fresh download.
|
|
# Use the shared cleanup function with delete_dirs=False (Sharing mode specific)
|
|
try:
|
|
from .nas_api.file_operations import cleanup_duplicates_before_download
|
|
file_name = os.path.basename(save_path)
|
|
dest_dir = os.path.dirname(save_path)
|
|
|
|
# Clean up duplicates AND the exact file
|
|
cleanup_duplicates_before_download(
|
|
dest_path=dest_dir,
|
|
file_name_pattern=file_name.replace(
|
|
'.zip', ''), # Basic pattern from filename
|
|
exact_filename=file_name,
|
|
delete_dirs=False
|
|
)
|
|
|
|
# Safety check: If file still exists (could not be deleted), append _NEW
|
|
if os.path.exists(save_path):
|
|
logger.warning(
|
|
f"[download_file] Could not delete existing file {save_path}, appending _NEW")
|
|
name, ext = os.path.splitext(save_path)
|
|
save_path = f"{name}_NEW{ext}"
|
|
logger.debug(f"[download_file] New save path: {save_path}")
|
|
|
|
except ImportError:
|
|
logger.warning(
|
|
"Could not import cleanup_duplicates_before_download")
|
|
# Fallback to simple delete
|
|
if os.path.exists(save_path):
|
|
try:
|
|
os.remove(save_path)
|
|
except:
|
|
pass
|
|
except Exception as e:
|
|
logger.warning(f"[download_file] Cleanup failed: {e}")
|
|
|
|
# Also delete .aria2 control file if exists (for the new path)
|
|
aria2_file = f"{save_path}.aria2"
|
|
if os.path.exists(aria2_file):
|
|
try:
|
|
os.remove(aria2_file)
|
|
logger.debug(
|
|
f"[download_file] Deleted existing .aria2 file: {aria2_file}")
|
|
except Exception as e:
|
|
logger.warning(
|
|
f"[download_file] Failed to delete .aria2 file: {e}")
|
|
|
|
# Step 1: Extract cookies and build URL (NEEDS driver lock - FAST ~1s)
|
|
with worker.driver_lock:
|
|
# Always ensure driver is ready and ALIVE (check current_url)
|
|
# Previously only checked 'if not worker.driver', which missed dead drivers
|
|
worker._ensure_driver_ready()
|
|
|
|
if not worker.driver:
|
|
return {'status': 'error', 'message': 'Worker driver failed to initialize'}
|
|
|
|
# Ensure driver is on the correct sharing page to get valid cookies
|
|
# If driver is fresh (data:,) or on another page, cookies will be missing/wrong
|
|
expected_url = f'https://disk.lezhin.com:5001/sharing/{sharing_id}'
|
|
try:
|
|
if sharing_id not in worker.driver.current_url:
|
|
logger.debug(
|
|
f"[download_file] Driver not on sharing page, navigating to: {expected_url}")
|
|
worker.driver.get(expected_url)
|
|
except Exception as e:
|
|
logger.warning(
|
|
f"[download_file] Failed to check/navigate URL: {e}")
|
|
# Try to restart driver if navigation fails
|
|
worker._ensure_driver_ready()
|
|
if worker.driver:
|
|
worker.driver.get(expected_url)
|
|
|
|
# Import here to avoid circular dependency
|
|
from .nas_sharing_api.selenium_operations import prepare_download_url, validate_download_link
|
|
|
|
# Extract cookies and build download URL (FAST - only needs driver briefly)
|
|
file_name = os.path.basename(save_path)
|
|
download_url, cookie_string = prepare_download_url(
|
|
driver=worker.driver,
|
|
sharing_id=sharing_id,
|
|
remote_path=file_path,
|
|
file_name=file_name
|
|
)
|
|
# Lock released here - driver now free for other requests!
|
|
|
|
# Step 1.5: Validate link (OPTIONAL - phát hiện link chết trước khi tải)
|
|
if validate_link:
|
|
logger.debug(
|
|
f"[download_file] Validating link before download: {file_name}")
|
|
try:
|
|
is_valid, error_msg, content_length = validate_download_link(
|
|
download_url=download_url,
|
|
cookie_string=cookie_string,
|
|
timeout=10
|
|
)
|
|
|
|
if not is_valid:
|
|
# Link đã chết → Fail ngay, không tải
|
|
logger.error(
|
|
f"[download_file] ❌ Link validation failed: {error_msg}")
|
|
return {
|
|
'status': 'error',
|
|
'message': f'Link không hợp lệ: {error_msg}',
|
|
'save_path': None
|
|
}
|
|
|
|
# Link OK → Log size nếu có
|
|
if content_length:
|
|
logger.debug(
|
|
f"[download_file] ✅ Link valid, file size: {content_length:,} bytes")
|
|
else:
|
|
logger.debug(
|
|
f"[download_file] ✅ Link valid (size unknown)")
|
|
|
|
except RuntimeError as e:
|
|
# Validation error (network/timeout) → Log warning nhưng vẫn tiếp tục tải
|
|
logger.warning(
|
|
f"[download_file] ⚠️ Link validation failed with error: {e}")
|
|
logger.warning(
|
|
f"[download_file] Continuing download anyway...")
|
|
|
|
# Step 2: Download with aria2 (NO driver lock needed - SLOW ~minutes)
|
|
from .nas_sharing_api.selenium_operations import get_aria2_manager
|
|
|
|
manager = get_aria2_manager()
|
|
logger.debug(f"[download_file] Starting aria2 download: {file_name}")
|
|
|
|
success, error_msg, gid = manager.download_file(
|
|
url=download_url,
|
|
dest_path=save_path,
|
|
cookies=cookie_string,
|
|
referer="https://disk.lezhin.com:5001/",
|
|
progress_callback=progress_callback,
|
|
max_download_limit=max_speed
|
|
)
|
|
|
|
if success:
|
|
return {
|
|
'status': 'success',
|
|
'message': 'Đã tải thành công',
|
|
'save_path': save_path,
|
|
'aria2_gid': gid # ✅ Return GID for cancellation support
|
|
}
|
|
else:
|
|
# ✅ QUAN TRỌNG: Return GID ngay cả khi fail
|
|
# Vì GID đã tồn tại từ khi aria2 task được tạo
|
|
return {
|
|
'status': 'error',
|
|
'message': f'Download failed: {error_msg}',
|
|
'save_path': None,
|
|
'aria2_gid': gid # ✅ Return GID để có thể cancel hoặc cleanup
|
|
}
|
|
|
|
except Exception as e:
|
|
logger.error(f"[download_file] Exception: {e}", exc_info=True)
|
|
return {'status': 'error', 'message': str(e), 'aria2_gid': None}
|
|
|
|
|
|
def shutdown_worker():
|
|
global _worker
|
|
if _worker:
|
|
_worker.stop()
|
|
_worker = None
|
|
|
|
|
|
def start_sharing_worker():
|
|
"""Initialize and start the sharing link worker on startup"""
|
|
get_worker() # This will create and start the worker if not exists
|
|
|
|
|
|
def get_sharing_worker() -> Optional[SharingLinkWorker]:
|
|
"""Get the global sharing worker instance (for external use)"""
|
|
return _worker
|
|
|
|
|
|
def download_sharing_files(
|
|
worker_instance: SharingLinkWorker,
|
|
sharing_id: str,
|
|
files_info: list,
|
|
dest_path: str,
|
|
job_id: Optional[str] = None,
|
|
progress_callback: Optional[Callable] = None
|
|
) -> tuple:
|
|
"""
|
|
Download multiple files from sharing link to destination
|
|
|
|
Args:
|
|
worker_instance: SharingLinkWorker instance
|
|
sharing_id: Sharing ID
|
|
files_info: List of file dicts with 'path', 'name', 'is_folder'
|
|
dest_path: Destination directory
|
|
job_id: Optional job ID for progress updates
|
|
progress_callback: Optional callback(file_index, total_files, file_progress_data)
|
|
|
|
Returns:
|
|
(status, results, message) tuple
|
|
"""
|
|
try:
|
|
import os
|
|
|
|
# Ensure driver is ready
|
|
if not worker_instance.driver:
|
|
worker_instance._ensure_driver_ready()
|
|
|
|
if not worker_instance.driver:
|
|
raise RuntimeError("Worker driver failed to initialize")
|
|
|
|
results = []
|
|
success_count = 0
|
|
total_files = len(files_info)
|
|
|
|
# Initialize files status for progress tracking
|
|
files_status = [
|
|
{
|
|
"name": f.get('name', ''),
|
|
"status": "pending",
|
|
"is_folder": f.get('is_folder', False),
|
|
"size": f.get('size_bytes', 0)
|
|
}
|
|
for f in files_info
|
|
]
|
|
|
|
for idx, file_info in enumerate(files_info):
|
|
file_path = file_info.get('path', '')
|
|
file_name = file_info.get('name', os.path.basename(file_path))
|
|
is_folder = file_info.get('is_folder', False)
|
|
file_size = file_info.get('size_bytes', 0)
|
|
|
|
# Add .zip for folders
|
|
if is_folder and not file_name.endswith('.zip'):
|
|
file_name = f"{file_name}.zip"
|
|
|
|
save_path = os.path.join(dest_path, file_name)
|
|
|
|
# Update status to downloading
|
|
files_status[idx]["status"] = "downloading"
|
|
files_status[idx]["progress"] = 0
|
|
|
|
print(
|
|
f"[Download] ({idx + 1}/{total_files}) {file_name} → {save_path}")
|
|
|
|
# Progress callback for individual file
|
|
def file_progress_callback(downloaded_bytes: int, total_bytes: int):
|
|
# Update progress for both files and folders
|
|
files_status[idx]["downloaded"] = downloaded_bytes
|
|
|
|
if total_bytes > 0:
|
|
# File with known size - calculate percentage
|
|
progress_pct = (downloaded_bytes / total_bytes) * 100
|
|
files_status[idx]["progress"] = round(progress_pct, 1)
|
|
files_status[idx]["total"] = total_bytes
|
|
else:
|
|
# Folder (no total size) - just track downloaded bytes
|
|
files_status[idx]["progress"] = None
|
|
files_status[idx]["total"] = None
|
|
|
|
# Call parent progress callback
|
|
if progress_callback:
|
|
progress_callback(idx, total_files, {
|
|
"current_file": file_name,
|
|
"current_file_index": idx + 1,
|
|
"total_files": total_files,
|
|
"current_file_progress": files_status[idx].get("progress"),
|
|
"current_file_downloaded": downloaded_bytes,
|
|
"current_file_total": total_bytes if total_bytes > 0 else None,
|
|
"files_status": files_status
|
|
})
|
|
|
|
# Download (always use aria2)
|
|
try:
|
|
# Step 1: Extract cookies and build URL WITH VALIDATION (LOCK driver briefly ~1s)
|
|
with worker_instance.driver_lock:
|
|
# Ensure driver is on correct sharing page before extracting cookies
|
|
expected_url = f'https://disk.lezhin.com:5001/sharing/{sharing_id}'
|
|
if sharing_id not in worker_instance.driver.current_url:
|
|
logger.debug(
|
|
f"[download_sharing_files] Driver not on sharing page, navigating to: {expected_url}")
|
|
worker_instance.driver.get(expected_url)
|
|
|
|
from .nas_sharing_api.selenium_operations import prepare_download_url
|
|
|
|
download_url, cookie_string = prepare_download_url(
|
|
driver=worker_instance.driver,
|
|
sharing_id=sharing_id,
|
|
remote_path=file_path,
|
|
file_name=file_name
|
|
)
|
|
# Lock released - driver free for other requests!
|
|
|
|
# Step 2: Download with aria2 (NO lock - allows parallel downloads)
|
|
from .nas_sharing_api.selenium_operations import get_aria2_manager
|
|
|
|
manager = get_aria2_manager()
|
|
success, error_msg, gid = manager.download_file(
|
|
url=download_url,
|
|
dest_path=save_path,
|
|
cookies=cookie_string,
|
|
referer="https://disk.lezhin.com:5001/",
|
|
progress_callback=file_progress_callback
|
|
)
|
|
|
|
if not success:
|
|
raise RuntimeError(f"aria2 download failed: {error_msg}")
|
|
|
|
except Exception as e:
|
|
import traceback
|
|
traceback.print_exc()
|
|
success = False
|
|
|
|
# Update status after download
|
|
if success:
|
|
files_status[idx]["status"] = "completed"
|
|
files_status[idx]["progress"] = 100
|
|
success_count += 1
|
|
else:
|
|
files_status[idx]["status"] = "failed"
|
|
|
|
results.append({
|
|
'name': file_name,
|
|
'path': file_path,
|
|
'success': success,
|
|
'destination': save_path if success else None
|
|
})
|
|
|
|
if success_count == len(files_info):
|
|
return ("success", results, f"Downloaded {success_count}/{len(files_info)} files")
|
|
elif success_count > 0:
|
|
return ("partial", results, f"Downloaded {success_count}/{len(files_info)} files")
|
|
else:
|
|
return ("failed", results, "All downloads failed")
|
|
|
|
except Exception as e:
|
|
import traceback
|
|
traceback.print_exc()
|
|
return ("error", [], str(e))
|