This commit is contained in:
arthur 2025-12-10 13:41:43 +07:00
commit d6c547b29f
123 changed files with 25789 additions and 0 deletions

64
.gitignore vendored Executable file
View File

@ -0,0 +1,64 @@
# Logs
logs/
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
lerna-debug.log*
# Node / build
node_modules/
dist/
dist-ssr/
*.local
# Environment / secrets
.env
.env.local
credentials.json
credentials.txt
session_api.json
metadata.json
tms_session.json
# Legacy (no longer used - Chrome profile handles cookies)
session.json
session_sharing.json
synology_cookies.json
# Editor / OS
.vscode/
!.vscode/extensions.json
.idea/
.DS_Store
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?
# Python
.venv/
venv/
__pycache__/
*.pyc
*.pyo
*.pyd
# Chrome profile and runtime artifacts
chrome_profile
chrome_profile_nas
# aria2 session and temp files
aria2/*.aria2
aria2/*.session
aria2/downloads/
aria2/*.log
# Legacy directories (ignored)
raw/
.serena/
.github/
.vscode/

2077
App.tsx Executable file

File diff suppressed because it is too large Load Diff

245
README.md Executable file
View File

@ -0,0 +1,245 @@
# DKI Download
Ứng dụng web quản lý quyền TMS và tải raw files từ Synology NAS.
## Tech Stack
- **Frontend**: React 19 + TypeScript + TailwindCSS + Vite
- **Backend Python**: FastAPI + Python 3.12 (xử lý download)
- **Backend TypeScript**: Express + Node.js (xử lý TMS permission)
- **Database**: MongoDB (metadata) + Supabase PostgreSQL (queue, history, sessions)
- **Storage**: Synology NAS (FileStation API + Sharing Link với Selenium)
- **Download**: Aria2 (download accelerator)
## Features
### 🔐 Cấp quyền TMS
- Cấp quyền hàng loạt (usernames × GE IDs)
- Sử dụng TMS REST API trực tiếp
- Queue management với drag-and-drop ưu tiên
- Real-time status tracking qua Supabase Realtime
### 📥 Tải Raw Files
- **API Mode**: Tải trực tiếp qua NAS FileStation API
- **Sharing Link Mode**: Tải qua Synology sharing link với Selenium
- Browse NAS directories với file browser
- Concurrent download queue (max 10 parallel)
- Smart file type detection và icons
### ✅ Check Upload
- Kiểm tra trạng thái upload của chapters
- Lọc theo status: pending, found, not found, error
### 🔧 Custom Paths
- Lưu và quản lý custom folder paths cho sharing link
- Quick jump đến folder thường dùng
## Quick Start
### Prerequisites
- Node.js 18+ và npm
- Python 3.12+
- MongoDB access
- Supabase project
### Installation
```powershell
# Clone và cài đặt dependencies
git clone <repository-url>
cd dkiDownload
npm install
# Setup Python backend
python -m venv .venv
.\.venv\Scripts\Activate.ps1
pip install -r backend/requirements.txt
```
### Development
```powershell
# Chạy cả 3 services (frontend + 2 backends)
npm run dev
# Hoặc chạy riêng:
npm run dev:frontend # Frontend (port 5173)
npm run dev:backend-python # Python backend (port 8000)
npm run dev:backend-typescript # TypeScript backend (port 3002)
```
**URLs:**
- Frontend: http://localhost:5173
- Python API: http://localhost:8000
- TypeScript API: http://localhost:3002
## Environment Variables
Tạo file `.env.local`:
```bash
# MongoDB
MONGODB_URI=mongodb+srv://...
MONGODB_DATABASE=schedule
# Supabase
SUPABASE_URL=https://....supabase.co
SUPABASE_SERVICE_ROLE_KEY=eyJ...
# NAS
NAS_BASE_URL=https://disk.lezhin.com:5001/webapi
NAS_DSM_URL=https://disk.lezhin.com:5001
NAS_USERNAME=geupload2
NAS_PASSWORD=***
NAS_DESTINATION_PATH=\\172.16.14.240\raw
# Chrome profile cho Selenium
NAS_CHROME_PROFILE_PATH=chrome_profile_nas
# TMS
TMS_API_URL=https://tms.kiledel.com
TMS_EMAIL=***
TMS_PASSWORD=***
# Optional
DRIVER_ADMIN_TOKEN=***
```
## Project Structure
```
dkiDownload/
├── App.tsx # Main React component
├── index.tsx # React entry point
├── types.ts # TypeScript definitions
├── components/ # React components (41 files)
│ ├── SubmissionForm.tsx # TMS permission form
│ ├── SubmissionHistory.tsx # Permission history
│ ├── RawDownloadForm.tsx # Raw download form
│ ├── DownloadHistory.tsx # Download history
│ ├── CheckPage.tsx # Check upload page
│ └── ...
├── hooks/ # React hooks
│ ├── use-tab-visibility.ts # Tab visibility tracking
│ └── index.ts
├── utils/ # Utilities
│ ├── supabase.ts # Supabase client
│ ├── use-realtime-downloads.ts
│ ├── use-realtime-submissions.ts
│ └── sort-utils.ts
├── src/ # TypeScript backend (Express)
│ ├── server.ts # Express server entry
│ ├── config.ts # Configuration
│ ├── api/
│ │ └── submissions.ts # Submissions API
│ └── services/
│ ├── auth.service.ts # TMS authentication
│ ├── tms-api.service.ts # TMS API calls
│ ├── supabase.service.ts # Supabase operations
│ └── worker.service.ts # Permission worker
├── backend/ # Python backend (FastAPI)
│ ├── main.py # FastAPI app
│ ├── worker.py # TMS permission worker
│ ├── worker_downloads.py # Download worker
│ ├── routes/
│ │ ├── tms_routes.py # TMS endpoints
│ │ ├── raw_api_routes.py # API download endpoints
│ │ ├── raw_sharing_routes.py # Sharing link endpoints
│ │ ├── downloads_routes.py # Download management
│ │ └── custom_paths_routes.py
│ └── services/
│ ├── mongodb_service.py
│ ├── supabase_service.py
│ ├── nas_service.py
│ ├── downloads_service.py
│ ├── nas_api/ # NAS API package
│ ├── nas_sharing_api/ # Selenium sharing package
│ └── aria2/ # Aria2 client
└── aria2/ # Aria2 download tool
└── aria2c.exe
```
## Database Schema (Supabase)
| Table | Mô tả |
| -------------- | ------------------------------------ |
| `sessions` | Auth sessions (TMS, NAS FileStation) |
| `submissions` | TMS permission submissions |
| `downloads` | File download queue & history |
| `custom_paths` | Custom folder paths cho sharing |
| `check_list` | Check upload requests |
| `chapters` | Chapter management |
| `users` | User list for autocomplete |
## Architecture
```
┌─────────────────┐ ┌──────────────────┐ ┌─────────────────┐
│ React App │────▶│ Python Backend │────▶│ Synology NAS │
│ (Vite) │ │ (FastAPI:8000) │ │ (FileStation) │
└────────┬────────┘ └────────┬─────────┘ └─────────────────┘
│ │
│ ┌────────▼─────────┐
│ │ Download Worker │
│ │ (Aria2 + API) │
│ └──────────────────┘
│ ┌──────────────────┐ ┌─────────────────┐
└─────────────▶│ TS Backend │────▶│ TMS API │
│ (Express:3002) │ │ (REST API) │
└────────┬─────────┘ └─────────────────┘
┌────────▼─────────┐
│ Permission Worker│
└──────────────────┘
┌───────────────────────┴───────────────────────┐
│ Supabase │
│ (Realtime subscriptions + PostgreSQL) │
└───────────────────────────────────────────────┘
```
## Key API Endpoints
### TMS Permission
- `POST /api/tms/submit` - Submit permission request
- `GET /api/tms/submissions` - Get submission history
- `DELETE /api/tms/submissions/:id` - Delete submission
### Raw Download (API Mode)
- `POST /api/raw-files/list` - List files from NAS
- `POST /api/raw-files/download` - Create download job
### Raw Download (Sharing Link)
- `POST /api/sharing-link/get-from-db` - Get sharing link from MongoDB
- `POST /api/sharing-link/process` - Process sharing link
- `POST /api/sharing-link/download` - Download from sharing link
### Download Management
- `GET /api/downloads` - Get download queue/history
- `DELETE /api/downloads/:id` - Cancel/delete download
### Check Upload
- `POST /api/check/submit` - Submit check request
- `GET /api/check/history` - Get check history
## License
Internal tool for Lezhin Entertainment.

80
aria2/AUTHORS Executable file
View File

@ -0,0 +1,80 @@
These are people who made lots of contributions:
Tatsuhiro Tsujikawa <tatsuhiro.t at gmail dot com>
Ross Smith II <aria2spam at netebb dot com> (Windows port)
Nils Maier <maierman at web dot Germany>
The aria2 contributor's list extracted from commit logs [1]:
103yiran
Alexander Amanuel
Alexander Egorenkov
Ali MJ Al-Nasrawy
Anthony Bryan
Artur Petrov
Athmane Madjoudj
Ben Chaney
Char
Cristian Rodríguez
Dan Church
Dan Fandrich
David Macek
Emmanuel Engelhart
FH0
Florian Gamböck
Fredrik Fornwall
Hernan Martinez
ITriskTI
Igor Khomyakov
Jarda Snajdr
JimmyZ
Juan Francisco Cantero Hurtado
Kcchouette
Kurt Kartaltepe
Michał Górny
Michał Leśniewski
Mingye Wang
Natanael Copa
Nikita Ofitserov
Nils Maier
ORiON-
ReadmeCritic
Richard Nias
Rosen Penev
Ross Smith II
Ryan Steinmetz
Ryo ONODERA
Sarim Khan
Sergey Zolotarev
Sonny Piers
Sébastien Cabaniols
Tatsuhiro Tsujikawa
Torbjörn Lönnemark
Tse Kit Yam
Vasilij Schneidermann
Zoltan Toth-Czifra
a1346054
amtlib-dot-dll
c3mb0
carsonzhu
diadistis
eric1932
geetam
gilberto dos santos alves
gt
klemens
kwkam
luokar
mozillazg
multisnow
nicolov
nkh0472
oliviercommelarbre
qweaszxcdf
rotor
sleepymac
suzker
tcely
yixinBC
[1] https://gist.github.com/tatsuhiro-t/deaffeb064652104ad11

339
aria2/COPYING Executable file
View File

@ -0,0 +1,339 @@
GNU GENERAL PUBLIC LICENSE
Version 2, June 1991
Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The licenses for most software are designed to take away your
freedom to share and change it. By contrast, the GNU General Public
License is intended to guarantee your freedom to share and change free
software--to make sure the software is free for all its users. This
General Public License applies to most of the Free Software
Foundation's software and to any other program whose authors commit to
using it. (Some other Free Software Foundation software is covered by
the GNU Lesser General Public License instead.) You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
this service if you wish), that you receive source code or can get it
if you want it, that you can change the software or use pieces of it
in new free programs; and that you know you can do these things.
To protect your rights, we need to make restrictions that forbid
anyone to deny you these rights or to ask you to surrender the rights.
These restrictions translate to certain responsibilities for you if you
distribute copies of the software, or if you modify it.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must give the recipients all the rights that
you have. You must make sure that they, too, receive or can get the
source code. And you must show them these terms so they know their
rights.
We protect your rights with two steps: (1) copyright the software, and
(2) offer you this license which gives you legal permission to copy,
distribute and/or modify the software.
Also, for each author's protection and ours, we want to make certain
that everyone understands that there is no warranty for this free
software. If the software is modified by someone else and passed on, we
want its recipients to know that what they have is not the original, so
that any problems introduced by others will not reflect on the original
authors' reputations.
Finally, any free program is threatened constantly by software
patents. We wish to avoid the danger that redistributors of a free
program will individually obtain patent licenses, in effect making the
program proprietary. To prevent this, we have made it clear that any
patent must be licensed for everyone's free use or not licensed at all.
The precise terms and conditions for copying, distribution and
modification follow.
GNU GENERAL PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License applies to any program or other work which contains
a notice placed by the copyright holder saying it may be distributed
under the terms of this General Public License. The "Program", below,
refers to any such program or work, and a "work based on the Program"
means either the Program or any derivative work under copyright law:
that is to say, a work containing the Program or a portion of it,
either verbatim or with modifications and/or translated into another
language. (Hereinafter, translation is included without limitation in
the term "modification".) Each licensee is addressed as "you".
Activities other than copying, distribution and modification are not
covered by this License; they are outside its scope. The act of
running the Program is not restricted, and the output from the Program
is covered only if its contents constitute a work based on the
Program (independent of having been made by running the Program).
Whether that is true depends on what the Program does.
1. You may copy and distribute verbatim copies of the Program's
source code as you receive it, in any medium, provided that you
conspicuously and appropriately publish on each copy an appropriate
copyright notice and disclaimer of warranty; keep intact all the
notices that refer to this License and to the absence of any warranty;
and give any other recipients of the Program a copy of this License
along with the Program.
You may charge a fee for the physical act of transferring a copy, and
you may at your option offer warranty protection in exchange for a fee.
2. You may modify your copy or copies of the Program or any portion
of it, thus forming a work based on the Program, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:
a) You must cause the modified files to carry prominent notices
stating that you changed the files and the date of any change.
b) You must cause any work that you distribute or publish, that in
whole or in part contains or is derived from the Program or any
part thereof, to be licensed as a whole at no charge to all third
parties under the terms of this License.
c) If the modified program normally reads commands interactively
when run, you must cause it, when started running for such
interactive use in the most ordinary way, to print or display an
announcement including an appropriate copyright notice and a
notice that there is no warranty (or else, saying that you provide
a warranty) and that users may redistribute the program under
these conditions, and telling the user how to view a copy of this
License. (Exception: if the Program itself is interactive but
does not normally print such an announcement, your work based on
the Program is not required to print an announcement.)
These requirements apply to the modified work as a whole. If
identifiable sections of that work are not derived from the Program,
and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works. But when you
distribute the same sections as part of a whole which is a work based
on the Program, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the
entire whole, and thus to each and every part regardless of who wrote it.
Thus, it is not the intent of this section to claim rights or contest
your rights to work written entirely by you; rather, the intent is to
exercise the right to control the distribution of derivative or
collective works based on the Program.
In addition, mere aggregation of another work not based on the Program
with the Program (or with a work based on the Program) on a volume of
a storage or distribution medium does not bring the other work under
the scope of this License.
3. You may copy and distribute the Program (or a work based on it,
under Section 2) in object code or executable form under the terms of
Sections 1 and 2 above provided that you also do one of the following:
a) Accompany it with the complete corresponding machine-readable
source code, which must be distributed under the terms of Sections
1 and 2 above on a medium customarily used for software interchange; or,
b) Accompany it with a written offer, valid for at least three
years, to give any third party, for a charge no more than your
cost of physically performing source distribution, a complete
machine-readable copy of the corresponding source code, to be
distributed under the terms of Sections 1 and 2 above on a medium
customarily used for software interchange; or,
c) Accompany it with the information you received as to the offer
to distribute corresponding source code. (This alternative is
allowed only for noncommercial distribution and only if you
received the program in object code or executable form with such
an offer, in accord with Subsection b above.)
The source code for a work means the preferred form of the work for
making modifications to it. For an executable work, complete source
code means all the source code for all modules it contains, plus any
associated interface definition files, plus the scripts used to
control compilation and installation of the executable. However, as a
special exception, the source code distributed need not include
anything that is normally distributed (in either source or binary
form) with the major components (compiler, kernel, and so on) of the
operating system on which the executable runs, unless that component
itself accompanies the executable.
If distribution of executable or object code is made by offering
access to copy from a designated place, then offering equivalent
access to copy the source code from the same place counts as
distribution of the source code, even though third parties are not
compelled to copy the source along with the object code.
4. You may not copy, modify, sublicense, or distribute the Program
except as expressly provided under this License. Any attempt
otherwise to copy, modify, sublicense or distribute the Program is
void, and will automatically terminate your rights under this License.
However, parties who have received copies, or rights, from you under
this License will not have their licenses terminated so long as such
parties remain in full compliance.
5. You are not required to accept this License, since you have not
signed it. However, nothing else grants you permission to modify or
distribute the Program or its derivative works. These actions are
prohibited by law if you do not accept this License. Therefore, by
modifying or distributing the Program (or any work based on the
Program), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying
the Program or works based on it.
6. Each time you redistribute the Program (or any work based on the
Program), the recipient automatically receives a license from the
original licensor to copy, distribute or modify the Program subject to
these terms and conditions. You may not impose any further
restrictions on the recipients' exercise of the rights granted herein.
You are not responsible for enforcing compliance by third parties to
this License.
7. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues),
conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot
distribute so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you
may not distribute the Program at all. For example, if a patent
license would not permit royalty-free redistribution of the Program by
all those who receive copies directly or indirectly through you, then
the only way you could satisfy both it and this License would be to
refrain entirely from distribution of the Program.
If any portion of this section is held invalid or unenforceable under
any particular circumstance, the balance of the section is intended to
apply and the section as a whole is intended to apply in other
circumstances.
It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system, which is
implemented by public license practices. Many people have made
generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing
to distribute software through any other system and a licensee cannot
impose that choice.
This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License.
8. If the distribution and/or use of the Program is restricted in
certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Program under this License
may add an explicit geographical distribution limitation excluding
those countries, so that distribution is permitted only in or among
countries not thus excluded. In such case, this License incorporates
the limitation as if written in the body of this License.
9. The Free Software Foundation may publish revised and/or new versions
of the General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the Program
specifies a version number of this License which applies to it and "any
later version", you have the option of following the terms and conditions
either of that version or of any later version published by the Free
Software Foundation. If the Program does not specify a version number of
this License, you may choose any version ever published by the Free Software
Foundation.
10. If you wish to incorporate parts of the Program into other free
programs whose distribution conditions are different, write to the author
to ask for permission. For software which is copyrighted by the Free
Software Foundation, write to the Free Software Foundation; we sometimes
make exceptions for this. Our decision will be guided by the two goals
of preserving the free status of all derivatives of our free software and
of promoting the sharing and reuse of software generally.
NO WARRANTY
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
REPAIR OR CORRECTION.
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGES.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
convey the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
Also add information on how to contact you by electronic and paper mail.
If the program is interactive, make it output a short notice like this
when it starts in an interactive mode:
Gnomovision version 69, Copyright (C) year name of author
Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, the commands you use may
be called something other than `show w' and `show c'; they could even be
mouse-clicks or menu items--whatever suits your program.
You should also get your employer (if you work as a programmer) or your
school, if any, to sign a "copyright disclaimer" for the program, if
necessary. Here is a sample; alter the names:
Yoyodyne, Inc., hereby disclaims all copyright interest in the program
`Gnomovision' (which makes passes at compilers) written by James Hacker.
<signature of Ty Coon>, 1 April 1989
Ty Coon, President of Vice
This General Public License does not permit incorporating your program into
proprietary programs. If your program is a subroutine library, you may
consider it more useful to permit linking proprietary applications with the
library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License.

1181
aria2/ChangeLog Executable file

File diff suppressed because it is too large Load Diff

137
aria2/LICENSE.OpenSSL Executable file
View File

@ -0,0 +1,137 @@
Certain source files in this program permit linking with the OpenSSL
library (http://www.openssl.org), which otherwise wouldn't be allowed
under the GPL. For purposes of identifying OpenSSL, most source files
giving this permission limit it to versions of OpenSSL having a license
identical to that listed in this file (LICENSE.OpenSSL). It is not
necessary for the copyright years to match between this file and the
OpenSSL version in question. However, note that because this file is
an extension of the license statements of these source files, this file
may not be changed except with permission from all copyright holders
of source files in this program which reference this file.
LICENSE ISSUES
==============
The OpenSSL toolkit stays under a dual license, i.e. both the conditions of
the OpenSSL License and the original SSLeay license apply to the toolkit.
See below for the actual license texts. Actually both licenses are BSD-style
Open Source licenses. In case of any license issues related to OpenSSL
please contact openssl-core@openssl.org.
OpenSSL License
---------------
/* ====================================================================
* Copyright (c) 1998-2001 The OpenSSL Project. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. All advertising materials mentioning features or use of this
* software must display the following acknowledgment:
* "This product includes software developed by the OpenSSL Project
* for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
*
* 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
* endorse or promote products derived from this software without
* prior written permission. For written permission, please contact
* openssl-core@openssl.org.
*
* 5. Products derived from this software may not be called "OpenSSL"
* nor may "OpenSSL" appear in their names without prior written
* permission of the OpenSSL Project.
*
* 6. Redistributions of any form whatsoever must retain the following
* acknowledgment:
* "This product includes software developed by the OpenSSL Project
* for use in the OpenSSL Toolkit (http://www.openssl.org/)"
*
* THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
* EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
* ====================================================================
*
* This product includes cryptographic software written by Eric Young
* (eay@cryptsoft.com). This product includes software written by Tim
* Hudson (tjh@cryptsoft.com).
*
*/
Original SSLeay License
-----------------------
/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
* All rights reserved.
*
* This package is an SSL implementation written
* by Eric Young (eay@cryptsoft.com).
* The implementation was written so as to conform with Netscapes SSL.
*
* This library is free for commercial and non-commercial use as long as
* the following conditions are aheared to. The following conditions
* apply to all code found in this distribution, be it the RC4, RSA,
* lhash, DES, etc., code; not just the SSL code. The SSL documentation
* included with this distribution is covered by the same copyright terms
* except that the holder is Tim Hudson (tjh@cryptsoft.com).
*
* Copyright remains Eric Young's, and as such any Copyright notices in
* the code are not to be removed.
* If this package is used in a product, Eric Young should be given attribution
* as the author of the parts of the library used.
* This can be in the form of a textual message at program startup or
* in documentation (online or textual) provided with the package.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. All advertising materials mentioning features or use of this software
* must display the following acknowledgement:
* "This product includes cryptographic software written by
* Eric Young (eay@cryptsoft.com)"
* The word 'cryptographic' can be left out if the rouines from the library
* being used are not cryptographic related :-).
* 4. If you include any Windows specific code (or a derivative thereof) from
* the apps directory (application code) you must include an acknowledgement:
* "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
*
* THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* The licence and distribution terms for any publically available version or
* derivative of this code cannot be changed. i.e. this code cannot simply be
* copied and put under another distribution licence
* [including the GNU Public Licence.]
*/

7
aria2/NEWS Executable file
View File

@ -0,0 +1,7 @@
aria2 1.37.0
============
Release Note
------------
See https://github.com/aria2/aria2/releases/tag/release-1.37.0

904
aria2/README.html Executable file
View File

@ -0,0 +1,904 @@
<?xml version="1.0" encoding="utf-8" ?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<meta name="generator" content="Docutils 0.19: https://docutils.sourceforge.io/" />
<title>aria2 - The ultra fast download utility</title>
<style type="text/css">
/*
:Author: David Goodger (goodger@python.org)
:Id: $Id: html4css1.css 8954 2022-01-20 10:10:25Z milde $
:Copyright: This stylesheet has been placed in the public domain.
Default cascading style sheet for the HTML output of Docutils.
See https://docutils.sourceforge.io/docs/howto/html-stylesheets.html for how to
customize this style sheet.
*/
/* used to remove borders from tables and images */
.borderless, table.borderless td, table.borderless th {
border: 0 }
table.borderless td, table.borderless th {
/* Override padding for "table.docutils td" with "! important".
The right padding separates the table cells. */
padding: 0 0.5em 0 0 ! important }
.first {
/* Override more specific margin styles with "! important". */
margin-top: 0 ! important }
.last, .with-subtitle {
margin-bottom: 0 ! important }
.hidden {
display: none }
.subscript {
vertical-align: sub;
font-size: smaller }
.superscript {
vertical-align: super;
font-size: smaller }
a.toc-backref {
text-decoration: none ;
color: black }
blockquote.epigraph {
margin: 2em 5em ; }
dl.docutils dd {
margin-bottom: 0.5em }
object[type="image/svg+xml"], object[type="application/x-shockwave-flash"] {
overflow: hidden;
}
/* Uncomment (and remove this text!) to get bold-faced definition list terms
dl.docutils dt {
font-weight: bold }
*/
div.abstract {
margin: 2em 5em }
div.abstract p.topic-title {
font-weight: bold ;
text-align: center }
div.admonition, div.attention, div.caution, div.danger, div.error,
div.hint, div.important, div.note, div.tip, div.warning {
margin: 2em ;
border: medium outset ;
padding: 1em }
div.admonition p.admonition-title, div.hint p.admonition-title,
div.important p.admonition-title, div.note p.admonition-title,
div.tip p.admonition-title {
font-weight: bold ;
font-family: sans-serif }
div.attention p.admonition-title, div.caution p.admonition-title,
div.danger p.admonition-title, div.error p.admonition-title,
div.warning p.admonition-title, .code .error {
color: red ;
font-weight: bold ;
font-family: sans-serif }
/* Uncomment (and remove this text!) to get reduced vertical space in
compound paragraphs.
div.compound .compound-first, div.compound .compound-middle {
margin-bottom: 0.5em }
div.compound .compound-last, div.compound .compound-middle {
margin-top: 0.5em }
*/
div.dedication {
margin: 2em 5em ;
text-align: center ;
font-style: italic }
div.dedication p.topic-title {
font-weight: bold ;
font-style: normal }
div.figure {
margin-left: 2em ;
margin-right: 2em }
div.footer, div.header {
clear: both;
font-size: smaller }
div.line-block {
display: block ;
margin-top: 1em ;
margin-bottom: 1em }
div.line-block div.line-block {
margin-top: 0 ;
margin-bottom: 0 ;
margin-left: 1.5em }
div.sidebar {
margin: 0 0 0.5em 1em ;
border: medium outset ;
padding: 1em ;
background-color: #ffffee ;
width: 40% ;
float: right ;
clear: right }
div.sidebar p.rubric {
font-family: sans-serif ;
font-size: medium }
div.system-messages {
margin: 5em }
div.system-messages h1 {
color: red }
div.system-message {
border: medium outset ;
padding: 1em }
div.system-message p.system-message-title {
color: red ;
font-weight: bold }
div.topic {
margin: 2em }
h1.section-subtitle, h2.section-subtitle, h3.section-subtitle,
h4.section-subtitle, h5.section-subtitle, h6.section-subtitle {
margin-top: 0.4em }
h1.title {
text-align: center }
h2.subtitle {
text-align: center }
hr.docutils {
width: 75% }
img.align-left, .figure.align-left, object.align-left, table.align-left {
clear: left ;
float: left ;
margin-right: 1em }
img.align-right, .figure.align-right, object.align-right, table.align-right {
clear: right ;
float: right ;
margin-left: 1em }
img.align-center, .figure.align-center, object.align-center {
display: block;
margin-left: auto;
margin-right: auto;
}
table.align-center {
margin-left: auto;
margin-right: auto;
}
.align-left {
text-align: left }
.align-center {
clear: both ;
text-align: center }
.align-right {
text-align: right }
/* reset inner alignment in figures */
div.align-right {
text-align: inherit }
/* div.align-center * { */
/* text-align: left } */
.align-top {
vertical-align: top }
.align-middle {
vertical-align: middle }
.align-bottom {
vertical-align: bottom }
ol.simple, ul.simple {
margin-bottom: 1em }
ol.arabic {
list-style: decimal }
ol.loweralpha {
list-style: lower-alpha }
ol.upperalpha {
list-style: upper-alpha }
ol.lowerroman {
list-style: lower-roman }
ol.upperroman {
list-style: upper-roman }
p.attribution {
text-align: right ;
margin-left: 50% }
p.caption {
font-style: italic }
p.credits {
font-style: italic ;
font-size: smaller }
p.label {
white-space: nowrap }
p.rubric {
font-weight: bold ;
font-size: larger ;
color: maroon ;
text-align: center }
p.sidebar-title {
font-family: sans-serif ;
font-weight: bold ;
font-size: larger }
p.sidebar-subtitle {
font-family: sans-serif ;
font-weight: bold }
p.topic-title {
font-weight: bold }
pre.address {
margin-bottom: 0 ;
margin-top: 0 ;
font: inherit }
pre.literal-block, pre.doctest-block, pre.math, pre.code {
margin-left: 2em ;
margin-right: 2em }
pre.code .ln { color: grey; } /* line numbers */
pre.code, code { background-color: #eeeeee }
pre.code .comment, code .comment { color: #5C6576 }
pre.code .keyword, code .keyword { color: #3B0D06; font-weight: bold }
pre.code .literal.string, code .literal.string { color: #0C5404 }
pre.code .name.builtin, code .name.builtin { color: #352B84 }
pre.code .deleted, code .deleted { background-color: #DEB0A1}
pre.code .inserted, code .inserted { background-color: #A3D289}
span.classifier {
font-family: sans-serif ;
font-style: oblique }
span.classifier-delimiter {
font-family: sans-serif ;
font-weight: bold }
span.interpreted {
font-family: sans-serif }
span.option {
white-space: nowrap }
span.pre {
white-space: pre }
span.problematic {
color: red }
span.section-subtitle {
/* font-size relative to parent (h1..h6 element) */
font-size: 80% }
table.citation {
border-left: solid 1px gray;
margin-left: 1px }
table.docinfo {
margin: 2em 4em }
table.docutils {
margin-top: 0.5em ;
margin-bottom: 0.5em }
table.footnote {
border-left: solid 1px black;
margin-left: 1px }
table.docutils td, table.docutils th,
table.docinfo td, table.docinfo th {
padding-left: 0.5em ;
padding-right: 0.5em ;
vertical-align: top }
table.docutils th.field-name, table.docinfo th.docinfo-name {
font-weight: bold ;
text-align: left ;
white-space: nowrap ;
padding-left: 0 }
/* "booktabs" style (no vertical lines) */
table.docutils.booktabs {
border: 0px;
border-top: 2px solid;
border-bottom: 2px solid;
border-collapse: collapse;
}
table.docutils.booktabs * {
border: 0px;
}
table.docutils.booktabs th {
border-bottom: thin solid;
text-align: left;
}
h1 tt.docutils, h2 tt.docutils, h3 tt.docutils,
h4 tt.docutils, h5 tt.docutils, h6 tt.docutils {
font-size: 100% }
ul.auto-toc {
list-style-type: none }
</style>
</head>
<body>
<div class="document" id="aria2-the-ultra-fast-download-utility">
<h1 class="title">aria2 - The ultra fast download utility</h1>
<div class="section" id="disclaimer">
<h1>Disclaimer</h1>
<p>This program comes with no warranty.
You must use this program at your own risk.</p>
</div>
<div class="section" id="introduction">
<h1>Introduction</h1>
<p>aria2 is a utility for downloading files. The supported protocols are
HTTP(S), FTP, SFTP, BitTorrent, and Metalink. aria2 can download a
file from multiple sources/protocols and tries to utilize your maximum
download bandwidth. It supports downloading a file from
HTTP(S)/FTP/SFTP and BitTorrent at the same time, while the data
downloaded from HTTP(S)/FTP/SFTP is uploaded to the BitTorrent
swarm. Using Metalink's chunk checksums, aria2 automatically validates
chunks of data while downloading a file like BitTorrent.</p>
<p>The project page is located at <a class="reference external" href="https://aria2.github.io/">https://aria2.github.io/</a>.</p>
<p>See the <a class="reference external" href="https://aria2.github.io/manual/en/html/">aria2 Online Manual</a> (<a class="reference external" href="https://aria2.github.io/manual/ru/html/">Russian translation</a>, <a class="reference external" href="https://aria2.github.io/manual/pt/html/">Portuguese
translation</a>) to learn
how to use aria2.</p>
</div>
<div class="section" id="features">
<h1>Features</h1>
<p>Here is a list of features:</p>
<ul class="simple">
<li>Command-line interface</li>
<li>Download files through HTTP(S)/FTP/SFTP/BitTorrent</li>
<li>Segmented downloading</li>
<li>Metalink version 4 (RFC 5854) support(HTTP/FTP/SFTP/BitTorrent)</li>
<li>Metalink version 3.0 support(HTTP/FTP/SFTP/BitTorrent)</li>
<li>Metalink/HTTP (RFC 6249) support</li>
<li>HTTP/1.1 implementation</li>
<li>HTTP Proxy support</li>
<li>HTTP BASIC authentication support</li>
<li>HTTP Proxy authentication support</li>
<li>Well-known environment variables for proxy: <tt class="docutils literal">http_proxy</tt>,
<tt class="docutils literal">https_proxy</tt>, <tt class="docutils literal">ftp_proxy</tt>, <tt class="docutils literal">all_proxy</tt> and <tt class="docutils literal">no_proxy</tt></li>
<li>HTTP gzip, deflate content encoding support</li>
<li>Verify peer using given trusted CA certificate in HTTPS</li>
<li>Client certificate authentication in HTTPS</li>
<li>Chunked transfer encoding support</li>
<li>Load Cookies from the file using the Firefox3 format, Chromium/Google Chrome
and the Mozilla/Firefox
(1.x/2.x)/Netscape format.</li>
<li>Save Cookies in the Mozilla/Firefox (1.x/2.x)/Netscape format.</li>
<li>Custom HTTP Header support</li>
<li>Persistent Connections support</li>
<li>FTP/SFTP through HTTP Proxy</li>
<li>Download/Upload speed throttling</li>
<li>BitTorrent extensions: Fast extension, DHT, PEX, MSE/PSE,
Multi-Tracker, UDP tracker</li>
<li>BitTorrent <a class="reference external" href="http://getright.com/seedtorrent.html">WEB-Seeding</a>.
aria2 requests chunk more than piece size to reduce the request
overhead. It also supports pipelined requests with piece size.</li>
<li>BitTorrent Local Peer Discovery</li>
<li>Rename/change the directory structure of BitTorrent downloads
completely</li>
<li>JSON-RPC (over HTTP and WebSocket)/XML-RPC interface</li>
<li>Run as a daemon process</li>
<li>Selective download in multi-file torrent/Metalink</li>
<li>Chunk checksum validation in Metalink</li>
<li>Can disable segmented downloading in Metalink</li>
<li>Netrc support</li>
<li>Configuration file support</li>
<li>Download URIs found in a text file or stdin and the destination
directory and output file name can be specified optionally</li>
<li>Parameterized URI support</li>
<li>IPv6 support with Happy Eyeballs</li>
<li>Disk cache to reduce disk activity</li>
</ul>
</div>
<div class="section" id="versioning-and-release-schedule">
<h1>Versioning and release schedule</h1>
<p>We use 3 numbers for the aria2 version: MAJOR.MINOR.PATCH. We will ship
MINOR updates on the 15th of every month. We may skip a release if we have
had no changes since the last release. The feature and documentation
freeze happens 10 days before the release day (the 5th day of the month)
for translation teams. We will raise an issue about the upcoming
release around that day.</p>
<p>We may release PATCH releases between regular releases if we have
security issues.</p>
<p>The MAJOR version will stay at 1 for the time being.</p>
</div>
<div class="section" id="how-to-get-source-code">
<h1>How to get source code</h1>
<p>We maintain the source code at Github:
<a class="reference external" href="https://github.com/aria2/aria2">https://github.com/aria2/aria2</a></p>
<p>To get the latest source code, run the following command:</p>
<pre class="literal-block">
$ git clone https://github.com/aria2/aria2.git
</pre>
<p>This will create an aria2 directory in your current directory and source
files are stored there.</p>
</div>
<div class="section" id="dependency">
<h1>Dependency</h1>
<table border="1" class="docutils">
<colgroup>
<col width="35%" />
<col width="65%" />
</colgroup>
<thead valign="bottom">
<tr><th class="head">features</th>
<th class="head">dependency</th>
</tr>
</thead>
<tbody valign="top">
<tr><td>HTTPS</td>
<td>OSX or GnuTLS or OpenSSL or Windows</td>
</tr>
<tr><td>SFTP</td>
<td>libssh2</td>
</tr>
<tr><td>BitTorrent</td>
<td>None. Optional: libnettle+libgmp or libgcrypt
or OpenSSL (see note)</td>
</tr>
<tr><td>Metalink</td>
<td>libxml2 or Expat.</td>
</tr>
<tr><td>Checksum</td>
<td>None. Optional: OSX or libnettle or libgcrypt
or OpenSSL or Windows (see note)</td>
</tr>
<tr><td>gzip, deflate in HTTP</td>
<td>zlib</td>
</tr>
<tr><td>Async DNS</td>
<td>C-Ares</td>
</tr>
<tr><td>Firefox3/Chromium cookie</td>
<td>libsqlite3</td>
</tr>
<tr><td>XML-RPC</td>
<td>libxml2 or Expat.</td>
</tr>
<tr><td>JSON-RPC over WebSocket</td>
<td>libnettle or libgcrypt or OpenSSL</td>
</tr>
</tbody>
</table>
<div class="admonition note">
<p class="first admonition-title">Note</p>
<p class="last">libxml2 has precedence over Expat if both libraries are installed.
If you prefer Expat, run configure with <tt class="docutils literal"><span class="pre">--without-libxml2</span></tt>.</p>
</div>
<div class="admonition note">
<p class="first admonition-title">Note</p>
<p>On Apple OSX, OS-level SSL/TLS support will be preferred. Hence
neither GnuTLS nor OpenSSL is required on that platform. If you'd
like to disable this behavior, run configure with
<tt class="docutils literal"><span class="pre">--without-appletls</span></tt>.</p>
<p>GnuTLS has precedence over OpenSSL if both libraries are installed.
If you prefer OpenSSL, run configure with <tt class="docutils literal"><span class="pre">--without-gnutls</span></tt>
<tt class="docutils literal"><span class="pre">--with-openssl</span></tt>.</p>
<p class="last">On Windows, there is SSL implementation available that is based on
the native Windows SSL capabilities (Schannel) and it will be
preferred. Hence neither GnuTLS nor OpenSSL is required on that
platform. If you'd like to disable this behavior, run configure
with <tt class="docutils literal"><span class="pre">--without-wintls</span></tt>.</p>
</div>
<div class="admonition note">
<p class="first admonition-title">Note</p>
<p>On Apple OSX, the OS-level checksum support will be preferred,
unless aria2 is configured with <tt class="docutils literal"><span class="pre">--without-appletls</span></tt>.</p>
<p>libnettle has precedence over libgcrypt if both libraries are
installed. If you prefer libgcrypt, run configure with
<tt class="docutils literal"><span class="pre">--without-libnettle</span> <span class="pre">--with-libgcrypt</span></tt>. If OpenSSL is selected over
GnuTLS, neither libnettle nor libgcrypt will be used.</p>
<p>If none of the optional dependencies are installed, an internal
implementation that only supports md5 and sha1 will be used.</p>
<p class="last">On Windows, there is SSL implementation available that is based on
the native Windows capabilities and it will be preferred, unless
aria2 is configured with <tt class="docutils literal"><span class="pre">--without-wintls</span></tt>.</p>
</div>
<p>A user can have one of the following configurations for SSL and crypto
libraries:</p>
<ul class="simple">
<li>OpenSSL</li>
<li>GnuTLS + libgcrypt</li>
<li>GnuTLS + libnettle</li>
<li>Apple TLS (OSX only)</li>
<li>Windows TLS (Windows only)</li>
</ul>
<p>You can disable BitTorrent and Metalink support by providing
<tt class="docutils literal"><span class="pre">--disable-bittorrent</span></tt> and <tt class="docutils literal"><span class="pre">--disable-metalink</span></tt> to the configure
script respectively.</p>
<p>To enable async DNS support, you need c-ares.</p>
<ul class="simple">
<li>c-ares: <a class="reference external" href="http://c-ares.haxx.se/">http://c-ares.haxx.se/</a></li>
</ul>
</div>
<div class="section" id="how-to-build">
<h1>How to build</h1>
<p>aria2 is primarily written in C++. Initially, it was written based on
C++98/C++03 standard features. We are now migrating aria2 to the C++11
standard. The current source code requires a C++11 aware compiler. For
well-known compilers, such as g++ and clang, the <tt class="docutils literal"><span class="pre">-std=c++11</span></tt> or
<tt class="docutils literal"><span class="pre">-std=c++0x</span></tt> flag must be supported.</p>
<p>To build aria2 from the source package, you need the following
development packages (package name may vary depending on the
distribution you use):</p>
<ul class="simple">
<li>libgnutls-dev (Required for HTTPS, BitTorrent, Checksum support)</li>
<li>nettle-dev (Required for BitTorrent, Checksum support)</li>
<li>libgmp-dev (Required for BitTorrent)</li>
<li>libssh2-1-dev (Required for SFTP support)</li>
<li>libc-ares-dev (Required for async DNS support)</li>
<li>libxml2-dev (Required for Metalink support)</li>
<li>zlib1g-dev (Required for gzip, deflate decoding support in HTTP)</li>
<li>libsqlite3-dev (Required for Firefox3/Chromium cookie support)</li>
<li>pkg-config (Required to detect installed libraries)</li>
</ul>
<p>You can use libgcrypt-dev instead of nettle-dev and libgmp-dev:</p>
<ul class="simple">
<li>libgpg-error-dev (Required for BitTorrent, Checksum support)</li>
<li>libgcrypt-dev (Required for BitTorrent, Checksum support)</li>
</ul>
<p>You can use libssl-dev instead of
libgnutls-dev, nettle-dev, libgmp-dev, libgpg-error-dev and libgcrypt-dev:</p>
<ul class="simple">
<li>libssl-dev (Required for HTTPS, BitTorrent, Checksum support)</li>
</ul>
<p>You can use libexpat1-dev instead of libxml2-dev:</p>
<ul class="simple">
<li>libexpat1-dev (Required for Metalink support)</li>
</ul>
<p>On Fedora you need the following packages: gcc, gcc-c++, kernel-devel,
libgcrypt-devel, libxml2-devel, openssl-devel, gettext-devel, cppunit</p>
<p>If you downloaded source code from a git repository, you have to install
the following packages to get autoconf macros:</p>
<ul class="simple">
<li>libxml2-dev</li>
<li>libcppunit-dev</li>
<li>autoconf</li>
<li>automake</li>
<li>autotools-dev</li>
<li>autopoint</li>
<li>libtool</li>
</ul>
<p>And run the following command to generate configure script and other files
necessary to build the program:</p>
<pre class="literal-block">
$ autoreconf -i
</pre>
<p>Also, you need <a class="reference external" href="http://sphinx-doc.org/">Sphinx</a> to build the man page.</p>
<p>If you are building aria2 for Mac OS X, take a look at
the makerelease-osx.mk GNU Make makefile.</p>
<p>The quickest way to build aria2 is first to run configure script:</p>
<pre class="literal-block">
$ ./configure
</pre>
<p>To build statically linked aria2, use <tt class="docutils literal">ARIA2_STATIC=yes</tt>
command-line option:</p>
<pre class="literal-block">
$ ./configure ARIA2_STATIC=yes
</pre>
<p>After configuration is done, run <tt class="docutils literal">make</tt> to compile the program:</p>
<pre class="literal-block">
$ make
</pre>
<p>See <a class="reference internal" href="#cross-compiling-windows-binary">Cross-compiling Windows binary</a> to create a Windows binary.
See <a class="reference internal" href="#cross-compiling-android-binary">Cross-compiling Android binary</a> to create an Android binary.</p>
<p>The configure script checks available libraries and enables as many
features as possible except for experimental features not enabled by
default.</p>
<p>Since 1.1.0, aria2 checks the certificate of HTTPS servers by default.
If you build with OpenSSL or the recent version of GnuTLS which has
<tt class="docutils literal">gnutls_certificate_set_x509_system_trust()</tt> function and the
library is properly configured to locate the system-wide CA
certificates store, aria2 will automatically load those certificates
at the startup. If it is not the case, I recommend supplying the path
to the CA bundle file. For example, in Debian the path to CA bundle
file is '/etc/ssl/certs/ca-certificates.crt' (in ca-certificates
package). This may vary depending on your distribution. You can give
it to configure script using <tt class="docutils literal"><span class="pre">--with-ca-bundle</span> option</tt>:</p>
<pre class="literal-block">
$ ./configure --with-ca-bundle='/etc/ssl/certs/ca-certificates.crt'
$ make
</pre>
<p>Without <tt class="docutils literal"><span class="pre">--with-ca-bundle</span></tt> option, you will encounter the error when
accessing HTTPS servers because the certificate cannot be verified
without the CA bundle. In such a case, you can specify the CA bundle file
using aria2's <tt class="docutils literal"><span class="pre">--ca-certificate</span></tt> option. If you don't have the CA bundle
file installed, then the last resort is to disable the certificate
validation using <tt class="docutils literal"><span class="pre">--check-certificate=false</span></tt>.</p>
<p>Using the native OSX (AppleTLS) and/or Windows (WinTLS) implementation
will automatically use the system certificate store, so
<tt class="docutils literal"><span class="pre">--with-ca-bundle</span></tt> is not necessary and will be ignored when using
these implementations.</p>
<p>By default, the bash_completion file named <tt class="docutils literal">aria2c</tt> is installed to
the directory <tt class="docutils literal">$prefix/share/doc/aria2/bash_completion</tt>. To change
the install directory of the file, use <tt class="docutils literal"><span class="pre">--with-bashcompletiondir</span></tt>
option.</p>
<p>After a <tt class="docutils literal">make</tt>, the executable is located at <tt class="docutils literal">src/aria2c</tt>.</p>
<p>aria2 uses CppUnit for automated unit testing. To run the unit test:</p>
<pre class="literal-block">
$ make check
</pre>
</div>
<div class="section" id="cross-compiling-windows-binary">
<h1>Cross-compiling Windows binary</h1>
<p>In this section, we describe how to build a Windows binary using a
mingw-w64 (<a class="reference external" href="http://mingw-w64.org/doku.php">http://mingw-w64.org/doku.php</a>) cross-compiler on Debian
Linux. The MinGW (<a class="reference external" href="http://www.mingw.org/">http://www.mingw.org/</a>) may not be able to build
aria2.</p>
<p>The easiest way to build Windows binary is using Dockerfile.mingw. See
Dockerfile.mingw how to build a binary. If you cannot use Dockerfile,
then continue to read the following paragraphs.</p>
<p>Basically, after compiling and installing depended libraries, you can
do cross-compile just passing appropriate <tt class="docutils literal"><span class="pre">--host</span></tt> option and
specifying <tt class="docutils literal">CPPFLAGS</tt>, <tt class="docutils literal">LDFLAGS</tt>, and <tt class="docutils literal">PKG_CONFIG_LIBDIR</tt>
variables to configure. For convenience and to lower our own
development cost, we provide an easier way to configure the build
settings.</p>
<p><tt class="docutils literal"><span class="pre">mingw-config</span></tt> script is a configure script wrapper for mingw-w64.
We use it to create official Windows build. This script assumes
the following libraries have been built for cross-compile:</p>
<ul class="simple">
<li>c-ares</li>
<li>expat</li>
<li>sqlite3</li>
<li>zlib</li>
<li>libssh2</li>
<li>cppunit</li>
</ul>
<p>Some environment variables can be adjusted to change build settings:</p>
<dl class="docutils">
<dt><tt class="docutils literal">HOST</tt></dt>
<dd>cross-compile to build programs to run on <tt class="docutils literal">HOST</tt>. It defaults to
<tt class="docutils literal"><span class="pre">i686-w64-mingw32</span></tt>. To build a 64bit binary, specify
<tt class="docutils literal"><span class="pre">x86_64-w64-mingw32</span></tt>.</dd>
<dt><tt class="docutils literal">PREFIX</tt></dt>
<dd>Prefix to the directory where dependent libraries are installed. It
defaults to <tt class="docutils literal"><span class="pre">/usr/local/$HOST</span></tt>. <tt class="docutils literal"><span class="pre">-I$PREFIX/include</span></tt> will be
added to <tt class="docutils literal">CPPFLAGS</tt>. <tt class="docutils literal"><span class="pre">-L$PREFIX/lib</span></tt> will be added to
<tt class="docutils literal">LDFLAGS</tt>. <tt class="docutils literal">$PREFIX/lib/pkgconfig</tt> will be set to
<tt class="docutils literal">PKG_CONFIG_LIBDIR</tt>.</dd>
</dl>
<p>For example, to build a 64bit binary do this:</p>
<pre class="literal-block">
$ HOST=x86_64-w64-mingw32 ./mingw-config
</pre>
<p>If you want libaria2 dll with <tt class="docutils literal"><span class="pre">--enable-libaria2</span></tt>, then don't use
<tt class="docutils literal">ARIA2_STATIC=yes</tt> and prepare the DLL version of external
libraries.</p>
</div>
<div class="section" id="cross-compiling-android-binary">
<h1>Cross-compiling Android binary</h1>
<p>In this section, we describe how to build Android binary using Android
NDK cross-compiler on Debian Linux.</p>
<p>At the time of this writing, Android NDK r21e should compile aria2
without errors.</p>
<p><tt class="docutils literal"><span class="pre">android-config</span></tt> script is a configure script wrapper for Android
build. We use it to create an official Android build. This script
assumes the following libraries have been built for cross-compile:</p>
<ul class="simple">
<li>c-ares</li>
<li>openssl</li>
<li>expat</li>
<li>zlib</li>
<li>libssh2</li>
</ul>
<p>When building the above libraries, make sure that disable shared
library and enable only static library. We are going to link those
libraries statically.</p>
<p><tt class="docutils literal"><span class="pre">android-config</span></tt> assumes that <tt class="docutils literal">$ANDROID_HOME</tt> and <tt class="docutils literal">$NDK</tt>
environment variables are defined.</p>
<p>We currently use Android NDK r21e. <tt class="docutils literal">$NDK</tt> should point to the
directory to Android NDK. The build tools will be found under
<tt class="docutils literal"><span class="pre">$NDK/toolchains/llvm/prebuilt/linux-x86_64/bin/</span></tt>.</p>
<p>All the dependent libraries must be installed under
<tt class="docutils literal">$ANDROID_HOME/usr/local</tt>.</p>
<p>After <tt class="docutils literal"><span class="pre">android-config</span></tt>, run <tt class="docutils literal">make</tt> to compile sources.</p>
</div>
<div class="section" id="building-documentation">
<h1>Building documentation</h1>
<p><a class="reference external" href="http://sphinx-doc.org/">Sphinx</a> is used to building the
documentation. aria2 man pages will be build when you run <tt class="docutils literal">make</tt> if
they are not up-to-date. You can also build an HTML version of the aria2
man page by <tt class="docutils literal">make html</tt>. The HTML version manual is also available
<a class="reference external" href="https://aria2.github.io/manual/en/html/">online</a> (<a class="reference external" href="https://aria2.github.io/manual/ru/html/">Russian
translation</a>, <a class="reference external" href="https://aria2.github.io/manual/pt/html/">Portuguese
translation</a>).</p>
</div>
<div class="section" id="bittorrent">
<h1>BitTorrent</h1>
<div class="section" id="about-file-names">
<h2>About file names</h2>
<p>The file name of the downloaded file is determined as follows:</p>
<dl class="docutils">
<dt>single-file mode</dt>
<dd>If &quot;name&quot; key is present in .torrent file, the file name is the value
of &quot;name&quot; key. Otherwise, the file name is the base name of .torrent
file appended by &quot;.file&quot;. For example, .torrent file is
&quot;test.torrent&quot;, then file name is &quot;test.torrent.file&quot;. The
directory to store the downloaded file can be specified by -d
option.</dd>
<dt>multi-file mode</dt>
<dd>The complete directory/file structure mentioned in .torrent file
is created. The directory to store the top directory of
downloaded files can be specified by -d option.</dd>
</dl>
<p>Before download starts, a complete directory structure is created if
needed. By default, aria2 opens at most 100 files mentioned in
.torrent file, and directly writes to and reads from these files.
The number of files to open simultaneously can be controlled by
<tt class="docutils literal"><span class="pre">--bt-max-open-files</span></tt> option.</p>
</div>
<div class="section" id="dht">
<h2>DHT</h2>
<p>aria2 supports mainline compatible DHT. By default, the routing table
for IPv4 DHT is saved to <tt class="docutils literal">$XDG_CACHE_HOME/aria2/dht.dat</tt> and the
routing table for IPv6 DHT is saved to
<tt class="docutils literal">$XDG_CACHE_HOME/aria2/dht6.dat</tt> unless files exist at
<tt class="docutils literal"><span class="pre">$HOME/.aria2/dht.dat</span></tt> or <tt class="docutils literal"><span class="pre">$HOME/.aria2/dht6.dat</span></tt>. aria2 uses the
same port number to listen on for both IPv4 and IPv6 DHT.</p>
</div>
<div class="section" id="udp-tracker">
<h2>UDP tracker</h2>
<p>UDP tracker support is enabled when IPv4 DHT is enabled. The port
number of the UDP tracker is shared with DHT. Use <tt class="docutils literal"><span class="pre">--dht-listen-port</span></tt>
option to change the port number.</p>
</div>
<div class="section" id="other-things-should-be-noted">
<h2>Other things should be noted</h2>
<ul class="simple">
<li><tt class="docutils literal"><span class="pre">-o</span></tt> option is used to change the file name of .torrent file itself,
not a file name of a file in .torrent file. For this purpose, use
<tt class="docutils literal"><span class="pre">--index-out</span></tt> option instead.</li>
<li>The port numbers that aria2 uses by default are 6881-6999 for TCP
and UDP.</li>
<li>aria2 doesn't configure port-forwarding automatically. Please
configure your router or firewall manually.</li>
<li>The maximum number of peers is 55. This limit may be exceeded when
the download rate is low. This download rate can be adjusted using
<tt class="docutils literal"><span class="pre">--bt-request-peer-speed-limit</span></tt> option.</li>
<li>As of release 0.10.0, aria2 stops sending request messages after
selective download completes.</li>
</ul>
</div>
</div>
<div class="section" id="metalink">
<h1>Metalink</h1>
<p>The current implementation supports HTTP(S)/FTP/SFTP/BitTorrent. The
other P2P protocols are ignored. Both Metalink4 (RFC 5854) and
Metalink version 3.0 documents are supported.</p>
<p>For checksum verification, md5, sha-1, sha-224, sha-256, sha-384, and
sha-512 are supported. If multiple hash algorithms are provided, aria2
uses a stronger one. If whole file checksum verification fails, aria2
doesn't retry the download and just exits with a non-zero return code.</p>
<p>The supported user preferences are version, language, location,
protocol, and os.</p>
<p>If chunk checksums are provided in the Metalink file, aria2 automatically
validates chunks of data during download. This behavior can be turned
off by a command-line option.</p>
<p>If a signature is included in a Metalink file, aria2 saves it as a file
after the completion of the download. The file name is download
file name + &quot;.sig&quot;. If the same file already exists, the signature file is
not saved.</p>
<p>In Metalink4, a multi-file torrent could appear in metalink:metaurl
element. Since aria2 cannot download 2 same torrents at the same
time, aria2 groups files in metalink:file element which has the same
BitTorrent metaurl, and downloads them from a single BitTorrent swarm.
This is a basically multi-file torrent download with file selection, so
the adjacent files which are not in Metalink document but share the same
piece with the selected file are also created.</p>
<p>If relative URI is specified in metalink:url or metalink:metaurl
element, aria2 uses the URI of Metalink file as base URI to resolve
the relative URI. If relative URI is found in the Metalink file which is
read from the local disk, aria2 uses the value of <tt class="docutils literal"><span class="pre">--metalink-base-uri</span></tt>
option as base URI. If this option is not specified, the relative URI
will be ignored.</p>
</div>
<div class="section" id="metalink-http">
<h1>Metalink/HTTP</h1>
<p>The current implementation only uses rel=duplicate links. aria2
understands Digest header fields and check whether it matches the
digest value from other sources. If it differs, drop the connection.
aria2 also uses this digest value to perform checksum verification
after the download is finished. aria2 recognizes geo value. To tell aria2
which location you prefer, you can use <tt class="docutils literal"><span class="pre">--metalink-location</span></tt> option.</p>
</div>
<div class="section" id="netrc">
<h1>netrc</h1>
<p>netrc support is enabled by default for HTTP(S)/FTP/SFTP. To disable
netrc support, specify -n command-line option. Your .netrc file
should have correct permissions(600).</p>
</div>
<div class="section" id="websocket">
<h1>WebSocket</h1>
<p>The WebSocket server embedded in aria2 implements the specification
defined in RFC 6455. The supported protocol version is 13.</p>
</div>
<div class="section" id="libaria2">
<h1>libaria2</h1>
<p>The libaria2 is a C++ library that offers aria2 functionality to the
client code. Currently, libaria2 is not built by default. To enable
libaria2, use <tt class="docutils literal"><span class="pre">--enable-libaria2</span></tt> configure option. By default,
only the shared library is built. To build a static library, use
<tt class="docutils literal"><span class="pre">--enable-static</span></tt> configure option as well. See libaria2
documentation to know how to use API.</p>
</div>
<div class="section" id="references">
<h1>References</h1>
<ul class="simple">
<li><a class="reference external" href="https://aria2.github.io/manual/en/html/">aria2 Online Manual</a></li>
<li><a class="reference external" href="https://aria2.github.io/">https://aria2.github.io/</a></li>
<li><a class="reference external" href="http://tools.ietf.org/html/rfc959">RFC 959 FILE TRANSFER PROTOCOL (FTP)</a></li>
<li><a class="reference external" href="http://tools.ietf.org/html/rfc1738">RFC 1738 Uniform Resource Locators (URL)</a></li>
<li><a class="reference external" href="http://tools.ietf.org/html/rfc2428">RFC 2428 FTP Extensions for IPv6 and NATs</a></li>
<li><a class="reference external" href="http://tools.ietf.org/html/rfc2616">RFC 2616 Hypertext Transfer Protocol -- HTTP/1.1</a></li>
<li><a class="reference external" href="http://tools.ietf.org/html/rfc3659">RFC 3659 Extensions to FTP</a></li>
<li><a class="reference external" href="http://tools.ietf.org/html/rfc3986">RFC 3986 Uniform Resource Identifier (URI): Generic Syntax</a></li>
<li><a class="reference external" href="http://tools.ietf.org/html/rfc4038">RFC 4038 Application Aspects of IPv6 Transition</a></li>
<li><a class="reference external" href="http://tools.ietf.org/html/rfc5854">RFC 5854 The Metalink Download Description Format</a></li>
<li><a class="reference external" href="http://tools.ietf.org/html/rfc6249">RFC 6249 Metalink/HTTP: Mirrors and Hashes</a></li>
<li><a class="reference external" href="http://tools.ietf.org/html/rfc6265">RFC 6265 HTTP State Management Mechanism</a></li>
<li><a class="reference external" href="http://tools.ietf.org/html/rfc6266">RFC 6266 Use of the Content-Disposition Header Field in the Hypertext Transfer Protocol (HTTP)</a></li>
<li><a class="reference external" href="http://tools.ietf.org/html/rfc6455">RFC 6455 The WebSocket Protocol</a></li>
<li><a class="reference external" href="http://tools.ietf.org/html/rfc6555">RFC 6555 Happy Eyeballs: Success with Dual-Stack Hosts</a></li>
<li><a class="reference external" href="http://www.bittorrent.org/beps/bep_0003.html">The BitTorrent Protocol Specification</a></li>
<li><a class="reference external" href="http://www.bittorrent.org/beps/bep_0005.html">BitTorrent: DHT Protocol</a></li>
<li><a class="reference external" href="http://www.bittorrent.org/beps/bep_0006.html">BitTorrent: Fast Extension</a></li>
<li><a class="reference external" href="http://www.bittorrent.org/beps/bep_0007.html">BitTorrent: IPv6 Tracker Extension</a></li>
<li><a class="reference external" href="http://www.bittorrent.org/beps/bep_0009.html">BitTorrent: Extension for Peers to Send Metadata Files</a></li>
<li><a class="reference external" href="http://www.bittorrent.org/beps/bep_0010.html">BitTorrent: Extension Protocol</a></li>
<li><a class="reference external" href="http://www.bittorrent.org/beps/bep_0012.html">BitTorrent: Multitracker Metadata Extension</a></li>
<li><a class="reference external" href="http://www.bittorrent.org/beps/bep_0015.html">BitTorrent: UDP Tracker Protocol for BitTorrent</a>
and <a class="reference external" href="http://www.rasterbar.com/products/libtorrent/udp_tracker_protocol.html">BitTorrent udp-tracker protocol specification</a>.</li>
<li><a class="reference external" href="http://www.bittorrent.org/beps/bep_0019.html">BitTorrent: WebSeed - HTTP/FTP Seeding (GetRight style)</a></li>
<li><a class="reference external" href="http://www.bittorrent.org/beps/bep_0027.html">BitTorrent: Private Torrents</a></li>
<li><a class="reference external" href="http://www.bittorrent.org/beps/bep_0032.html">BitTorrent: BitTorrent DHT Extensions for IPv6</a></li>
<li><a class="reference external" href="http://wiki.vuze.com/w/Message_Stream_Encryption">BitTorrent: Message Stream Encryption</a></li>
<li><a class="reference external" href="https://pdos.csail.mit.edu/~petar/papers/maymounkov-kademlia-lncs.pdf">Kademlia: A Peer-to-peer Information System Based on the XOR Metric</a></li>
</ul>
</div>
</div>
</body>
</html>

52
aria2/README.mingw Executable file
View File

@ -0,0 +1,52 @@
aria2 Windows build
===================
aria2 Windows build is provided in 2 flavors: 32bit version and 64bit
version. The executable was compiled using mingw-w64 cross compiler on
Ubuntu Linux.
The executable is statically linked, so no extra DLLs are
necessary. The linked libraries are:
* gmp 6.3.0
* expat 2.5.0
* sqlite 3.43.1
* zlib 1.3
* c-ares 1.19.1
* libssh2 1.11.0
This build has the following difference from the original release:
* 32bit version only: ``--disable-ipv6`` is enabled by default. (In
other words, IPv6 support is disabled by default).
Known Issues
------------
* TLSv1.3 does not work.
* --file-allocation=falloc uses SetFileValidData function to allocate
disk space without filling zero. But it has security
implications. Refer to
https://msdn.microsoft.com/en-us/library/windows/desktop/aa365544%28v=vs.85%29.aspx
for more details.
* When Ctrl-C is pressed, aria2 shows "Shutdown sequence
commencing... Press Ctrl-C again for emergency shutdown." But
mingw32 build cannot handle second Ctrl-C properly. The second
Ctrl-C just kills aria2 instantly without proper shutdown sequence
and you may lose data. So don't press Ctrl-C twice.
* --daemon option doesn't work.
* 32bit version only: When ``--disable-ipv6=false`` is given,
BitTorrent DHT may not work properly.
* 32bit version only: Most of the IPv6 functionality does not work
even if ``--disable-ipv6=false`` is given.
References
----------
* http://smithii.com/aria2
* http://kemovitra.blogspot.com/2009/12/download-aria2-163.html

BIN
aria2/aria2c.exe Executable file

Binary file not shown.

0
backend/__init__.py Executable file
View File

14
backend/common/__init__.py Executable file
View File

@ -0,0 +1,14 @@
"""
Common utilities package for DKI Download backend.
Contains shared logic used across multiple modules.
"""
from .destination_helper import get_download_destination_path
from .file_type_helper import is_folder_path, should_add_zip_extension, get_download_filename
__all__ = [
'get_download_destination_path',
'is_folder_path',
'should_add_zip_extension',
'get_download_filename',
]

View File

@ -0,0 +1,45 @@
"""
Common utility for generating download destination paths.
Used by both API mode and Sharing Link mode.
"""
import os
from typing import Optional
def get_download_destination_path(
ge_id: str,
lang: str,
base_path: Optional[str] = None
) -> str:
"""
Generate destination path for downloads.
Args:
ge_id: GE ID (e.g., "11")
lang: Language code (e.g., "us", "US", "de") - will be UPPERCASED
base_path: Optional base path override. If None, uses nas_service.DESTINATION_PATH
Returns:
Full destination path (e.g., "\\172.16.14.240\\raw\\11_US\\")
Examples:
>>> get_download_destination_path("11", "us")
"\\\\172.16.14.240\\\\raw\\\\11_US\\\\"
>>> get_download_destination_path("1000", "de")
"\\\\172.16.14.240\\\\raw\\\\1000_DE\\\\"
>>> get_download_destination_path("42", "jp", "D:\\downloads")
"D:\\\\downloads\\\\42_JP\\\\"
"""
if base_path is None:
# Import here to avoid circular dependency
from ..services import nas_service
base_path = nas_service.DESTINATION_PATH
# Always uppercase the lang code to match old behavior (1000_DE, not 1000_de)
return os.path.join(
base_path,
f"{ge_id}_{lang.upper()}"
)

View File

@ -0,0 +1,108 @@
"""
File Type Detection Helper - Unified logic for detecting files vs folders
This module provides a single source of truth for determining whether a path
represents a file or a folder across the entire project.
"""
import os
import re
from typing import Optional
def is_folder_path(file_path: str, file_name: Optional[str] = None) -> bool:
"""
Determine if a path represents a folder (not a file).
Logic:
1. If path ends with '/' definitely a folder
2. If path has no file extension (e.g., .psd, .jpg) folder
3. Otherwise file
Args:
file_path: Full path on NAS (e.g., "/Comic_TMS_L/DKI/DE/펀치/001.psd")
file_name: Optional file name for validation
Returns:
True if folder, False if file
Examples:
>>> is_folder_path("/path/to/file.psd")
False
>>> is_folder_path("/path/to/[식자설정]")
True
>>> is_folder_path("/path/to/folder/")
True
>>> is_folder_path("/path/to/DE_PSD")
True
"""
if not file_path:
return False
# Explicit folder marker
if file_path.endswith('/'):
return True
# Check if path has file extension
# Pattern: .extension at end (excluding / in extension)
# This handles: .psd, .jpg, .zip, .psb, .png, etc.
has_extension = bool(re.search(r'\.[^/.]+$', file_path))
# No extension = folder
return not has_extension
def should_add_zip_extension(file_path: str, file_name: str) -> bool:
"""
Determine if .zip extension should be added for download.
NAS API automatically zips folders, so we need to add .zip to the destination
filename when downloading folders.
Args:
file_path: Full path on NAS
file_name: Current file name
Returns:
True if should add .zip extension
Examples:
>>> should_add_zip_extension("/path/[식자설정]", "[식자설정]")
True
>>> should_add_zip_extension("/path/file.psd", "file.psd")
False
>>> should_add_zip_extension("/path/[식자설정]", "[식자설정].zip")
False # Already has .zip
"""
# Only add .zip if:
# 1. Path is a folder
# 2. Name doesn't already have .zip
return is_folder_path(file_path) and not file_name.endswith('.zip')
def get_download_filename(file_path: str, file_name: str) -> str:
"""
Get the final filename for download (with .zip if needed).
Args:
file_path: Full path on NAS
file_name: Original file name
Returns:
Final filename for download
Examples:
>>> get_download_filename("/path/[식자설정]", "[식자설정]")
"[식자설정].zip"
>>> get_download_filename("/path/file.psd", "file.psd")
"file.psd"
"""
if should_add_zip_extension(file_path, file_name):
return f"{file_name}.zip"
return file_name

143
backend/common/models.py Executable file
View File

@ -0,0 +1,143 @@
"""
Data models for file-centric download system.
Each download record represents ONE file, not a batch.
"""
from pydantic import BaseModel
from typing import Optional
from datetime import datetime
from decimal import Decimal
class FileDownload(BaseModel):
"""Represents a single file download record in the database."""
id: int
batch_id: str
# File info
ge_id: str
lang: str
file_name: str
file_path: str # Relative path in source
# Mode & status
mode: str # 'api' or 'sharing'
status: str # 'pending', 'downloading', 'completed', 'failed', 'cancelled'
# Paths
destination_path: Optional[str] = None
# Progress
file_size: Optional[int] = None
downloaded_size: int = 0
progress_percent: Decimal = Decimal('0.00')
# Timestamps
created_at: datetime
started_at: Optional[datetime] = None
completed_at: Optional[datetime] = None
# Error handling
error_message: Optional[str] = None
retry_count: int = 0
# Metadata
sharing_id: Optional[str] = None
mongodb_path: Optional[str] = None
class Config:
from_attributes = True # For Pydantic v2 (orm_mode in v1)
# ==================== REQUEST MODELS ====================
class FileInfo(BaseModel):
"""Single file information for batch download requests."""
name: str
path: str # Relative path in source
isdir: bool = False # For API mode
is_folder: Optional[bool] = None # For Sharing mode (alias)
class CreateDownloadBatchRequest(BaseModel):
"""
Request to create a batch of file downloads (API mode).
Each file will become a separate download record.
"""
files: list[FileInfo]
ge_id: str
lang: str
class CreateSharingDownloadBatchRequest(BaseModel):
"""
Request to create batch downloads from sharing link.
"""
sharing_id: str
files: list[FileInfo]
ge_id: Optional[str] = None
lang: Optional[str] = None
class UpdateDownloadRequest(BaseModel):
"""Request to update a single download (cancel/retry)."""
action: str # 'retry' or 'cancel'
# ==================== RESPONSE MODELS ====================
class DownloadBatchResponse(BaseModel):
"""Response after creating a batch of downloads."""
success: bool
batch_id: str
file_count: int
download_ids: list[int] # List of created download IDs
message: str
class DownloadResponse(BaseModel):
"""Response for single download operation."""
success: bool
download: Optional[FileDownload] = None
message: str
class DownloadListResponse(BaseModel):
"""Response for listing downloads."""
success: bool
downloads: list[FileDownload]
count: int
# ==================== BATCH GROUPING ====================
class DownloadBatch(BaseModel):
"""
Grouped view of downloads for UI display.
Groups files by batch_id for better UX.
"""
batch_id: str
ge_id: str
lang: str
mode: str
# Batch-level stats
total_files: int
completed_files: int
failed_files: int
total_size: int
downloaded_size: int
# Batch status (derived from files)
status: str # 'downloading', 'completed', 'partial_failed', 'failed'
# Timestamps (min/max from files)
created_at: datetime
started_at: Optional[datetime] = None
completed_at: Optional[datetime] = None
# Duration (if completed)
duration_seconds: Optional[float] = None
# Individual files in this batch
files: list[FileDownload]

212
backend/main.py Executable file
View File

@ -0,0 +1,212 @@
from .routes import tms_routes, raw_api_routes, raw_sharing_routes, downloads_routes, custom_paths_routes
from . import worker_downloads # New file-centric worker
from . import worker as backend_worker
from .services import nas_sharing_service
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
import logging
import threading
import os
import signal
import sys
import atexit
from dotenv import load_dotenv
# Load environment variables from .env.local first, then .env
load_dotenv('.env.local')
load_dotenv()
# Import routers
# Configure logging - output to both terminal and file
LOG_FILE = os.path.join(os.path.dirname(
os.path.dirname(__file__)), 'backend.log')
# Create formatter
log_formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# Root logger configuration
root_logger = logging.getLogger()
root_logger.setLevel(logging.INFO)
# Console handler (terminal)
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.INFO)
console_handler.setFormatter(log_formatter)
root_logger.addHandler(console_handler)
# File handler (backend.log)
file_handler = logging.FileHandler(LOG_FILE, encoding='utf-8')
file_handler.setLevel(logging.INFO)
file_handler.setFormatter(log_formatter)
root_logger.addHandler(file_handler)
logger = logging.getLogger(__name__)
def cleanup_on_exit():
"""
Graceful shutdown handler - cleanup downloads when server stops.
Called automatically by atexit or signal handlers.
"""
logger.debug("🛑 Server shutting down, cleaning up active downloads...")
try:
from .services import downloads_service
from .services.aria2.download_manager import get_aria2_manager
# Get all active downloads
active_downloads = downloads_service.get_active_downloads()
if not active_downloads:
logger.debug("No active downloads to clean up")
return
logger.debug(f"Found {len(active_downloads)} active downloads")
# Try to get aria2 manager
try:
manager = get_aria2_manager()
except Exception as e:
logger.warning(f"Could not get aria2 manager: {e}")
manager = None
# Cancel all aria2 tasks first
cancelled_count = 0
for download in active_downloads:
gid = download.get('aria2_gid')
if gid and manager:
try:
logger.debug(f"Cancelling aria2 task GID: {gid}")
manager.cancel_download(gid)
cancelled_count += 1
except Exception as e:
logger.warning(f"Failed to cancel aria2 GID {gid}: {e}")
if cancelled_count > 0:
logger.debug(f"Cancelled {cancelled_count} aria2 tasks")
# Update all active downloads to failed status
failed_count = 0
for download in active_downloads:
try:
downloads_service.update_download_status(
download_id=download['id'],
status='failed',
error_message='Server was shut down during download'
)
failed_count += 1
except Exception as e:
logger.error(
f"Failed to update download {download['id']}: {e}")
logger.debug(
f"✅ Cleanup complete: {failed_count} downloads marked as failed")
except Exception as e:
logger.error(f"Error during cleanup: {e}", exc_info=True)
# Register cleanup handlers
atexit.register(cleanup_on_exit)
def signal_handler(sig, frame):
"""Handle SIGINT/SIGTERM gracefully."""
logger.debug(f"Received signal {sig}, initiating graceful shutdown...")
cleanup_on_exit()
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
app = FastAPI(
title="DKI Download API",
version="2.0.0",
redirect_slashes=False # Disable automatic slash redirects
)
# Configure CORS
app.add_middleware(
CORSMiddleware,
allow_origins=["*"], # Allow all origins in development
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# ==================== REGISTER ROUTERS ====================
# Register all route modules
app.include_router(tms_routes.router)
app.include_router(raw_api_routes.router)
app.include_router(raw_sharing_routes.router)
app.include_router(downloads_routes.router) # New unified downloads API
app.include_router(custom_paths_routes.router) # Custom folder paths
# ==================== STARTUP EVENTS ====================
@app.on_event('startup')
def start_background_worker():
"""Start the background workers in daemon threads.
- backend_worker: Polls pending submissions for automation
- worker_downloads: Polls pending file downloads (NEW file-centric)
- nas_sharing_service: Processes sharing links with Selenium
- aria2_daemon: Fast parallel download engine (if enabled)
"""
try:
# Start aria2 daemon if enabled
USE_ARIA2 = os.getenv('USE_ARIA2', 'true').lower() == 'true'
if USE_ARIA2:
try:
from .services.aria2 import start_aria2_daemon
aria2_secret = os.getenv(
'ARIA2_RPC_SECRET', 'dkidownload_secret_2025')
if start_aria2_daemon(secret=aria2_secret):
logger.debug("✅ aria2c RPC daemon started on port 6800")
else:
logger.warning(
"⚠️ aria2c failed to start, downloads will use requests")
except Exception as e:
logger.warning(
f"⚠️ aria2c not available: {e}, downloads will use requests")
else:
logger.debug(
"aria2 disabled (USE_ARIA2=false), using requests for downloads")
# Start submission worker
t1 = threading.Thread(target=backend_worker.run_loop,
name='backend-worker', daemon=True)
t1.start()
# Start file download worker (NEW)
t2 = threading.Thread(
target=worker_downloads.start_worker, name='file-download-worker', daemon=True)
t2.start()
# Start sharing link worker
nas_sharing_service.start_sharing_worker()
logger.debug("Background workers started")
except Exception as e:
# Log but don't prevent app startup
logger.exception('Failed to start background workers: %s', e)
# ==================== HEALTH CHECK ====================
@app.get("/")
def health_check():
"""Simple health check endpoint."""
return {
"status": "ok",
"app": "DKI Download API",
"version": "2.0.0"
}

18
backend/requirements.txt Executable file
View File

@ -0,0 +1,18 @@
fastapi==0.119.1
uvicorn[standard]==0.38.0
pymongo[srv]==4.15.3
python-dotenv==1.0.0
supabase==2.22.3
webdriver-manager==4.0.2
flask==2.0.1
werkzeug==2.0.2
jinja2==3.0.3
itsdangerous==2.0.1
click==8.0.3
markupsafe==2.0.1
pyinstaller==6.14.2
# selenium in old project was 4.20.0; keep current installed 4.15.0 or upgrade if needed
selenium==4.20.0
# aria2 RPC client for fast parallel downloads
aria2p==0.12.1
psutil==6.1.1

1
backend/routes/__init__.py Executable file
View File

@ -0,0 +1 @@
# Routes module

View File

@ -0,0 +1,225 @@
"""
Custom Paths Routes - Manage user-defined folder shortcuts for sharing links
"""
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
from typing import Optional
import logging
from ..services import supabase_service
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api/custom-paths", tags=["Custom Paths"])
# ==================== REQUEST MODELS ====================
class CustomPathCreate(BaseModel):
ge_id: str
lang: str
custom_path: str
class CustomPathUpdate(BaseModel):
custom_path: str
# ==================== ROUTES ====================
@router.get('')
def list_all_custom_paths():
"""
Get all custom paths, sorted by ge_id ascending.
Returns:
{
"success": true,
"custom_paths": [
{
"ge_id": "1000",
"lang": "DE",
"custom_path": "/folder/subfolder",
"created_at": "...",
"updated_at": "..."
},
...
],
"total": 10
}
"""
try:
client = supabase_service.get_supabase_client()
# Query all custom paths, ordered by ge_id ASC
response = client.table("custom_paths").select(
"*").order("ge_id", desc=False).execute()
return {
"success": True,
"custom_paths": response.data or [],
"total": len(response.data) if response.data else 0
}
except Exception as e:
logger.error(f"Error listing custom paths: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {e}")
@router.get('/{ge_id}')
def get_custom_path(ge_id: str, lang: Optional[str] = None):
"""
Get custom path for a specific GE ID.
Query params:
lang: Optional language filter
Returns:
{
"success": true,
"custom_path": "/folder/subfolder",
"ge_id": "1000",
"lang": "DE"
}
"""
try:
client = supabase_service.get_supabase_client()
# Query with ge_id
query = client.table("custom_paths").select("*").eq("ge_id", ge_id)
# Add lang filter if provided
if lang:
query = query.eq("lang", lang.upper())
response = query.execute()
if not response.data or len(response.data) == 0:
return {
"success": True,
"custom_path": None,
"exists": False
}
record = response.data[0] # type: ignore
return {
"success": True,
"custom_path": record.get("custom_path") if isinstance(record, dict) else None,
"ge_id": record.get("ge_id") if isinstance(record, dict) else None,
"lang": record.get("lang") if isinstance(record, dict) else None,
"exists": True,
"created_at": record.get("created_at") if isinstance(record, dict) else None,
"updated_at": record.get("updated_at") if isinstance(record, dict) else None
}
except Exception as e:
logger.error(f"Error getting custom path: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {e}")
@router.post('')
def create_or_update_custom_path(payload: CustomPathCreate):
"""
Create or update custom path for a GE ID.
Uses UPSERT logic (insert or update if exists).
Body:
{
"ge_id": "1000",
"lang": "DE",
"custom_path": "/folder/subfolder"
}
Returns:
{
"success": true,
"message": "Custom path saved",
"is_new": true/false
}
"""
try:
# Validate input
if not payload.ge_id or not payload.lang or not payload.custom_path:
raise HTTPException(
status_code=400, detail="Thiếu thông tin: ge_id, lang, hoặc custom_path")
client = supabase_service.get_supabase_client()
# Check if exists
existing = client.table("custom_paths").select(
"*").eq("ge_id", payload.ge_id).execute()
is_new = not existing.data or len(existing.data) == 0
# Prepare data
data = {
"ge_id": payload.ge_id,
"lang": payload.lang.upper(),
"custom_path": payload.custom_path,
"updated_at": "NOW()"
}
if is_new:
# Insert new record
response = client.table("custom_paths").insert(data).execute()
message = "Custom path đã được thêm"
else:
# Update existing record
response = client.table("custom_paths").update(
data).eq("ge_id", payload.ge_id).execute()
message = "Custom path đã được cập nhật"
logger.debug(
f"{'Created' if is_new else 'Updated'} custom path for {payload.ge_id} {payload.lang}: {payload.custom_path}")
return {
"success": True,
"message": message,
"is_new": is_new,
"custom_path": payload.custom_path
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error saving custom path: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {e}")
@router.delete('/{ge_id}')
def delete_custom_path(ge_id: str):
"""
Delete custom path for a GE ID.
Returns:
{
"success": true,
"message": "Custom path deleted"
}
"""
try:
client = supabase_service.get_supabase_client()
# Check if exists
existing = client.table("custom_paths").select(
"*").eq("ge_id", ge_id).execute()
if not existing.data or len(existing.data) == 0:
raise HTTPException(
status_code=404, detail="Custom path không tồn tại")
# Delete
client.table("custom_paths").delete().eq("ge_id", ge_id).execute()
logger.debug(f"Deleted custom path for {ge_id}")
return {
"success": True,
"message": "Custom path đã được xóa"
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error deleting custom path: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {e}")

View File

@ -0,0 +1,401 @@
"""
Downloads Routes - File-centric RESTful API for download management.
Each endpoint operates on individual file downloads, not batches.
"""
from fastapi import APIRouter, HTTPException, Query
from pydantic import BaseModel
from typing import List, Dict, Optional
import logging
from ..services import downloads_service
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api", tags=["Downloads"])
# ==================== REQUEST MODELS ====================
class FileInfo(BaseModel):
"""Single file information."""
name: str
path: str
isdir: bool = False
is_folder: Optional[bool] = None # Alias for isdir
class CreateBatchRequest(BaseModel):
"""Request to create a batch of file downloads (API mode)."""
files: List[FileInfo]
ge_id: str
lang: str
class CreateSharingBatchRequest(BaseModel):
"""Request to create batch downloads from sharing link."""
sharing_id: str
files: List[FileInfo]
ge_id: Optional[str] = None
lang: Optional[str] = None
class UpdateDownloadRequest(BaseModel):
"""Request to update a single download."""
action: str # "retry" or "cancel"
# ==================== DOWNLOAD ENDPOINTS ====================
@router.get('/downloads')
def get_all_downloads(
status: Optional[str] = Query(None, description="Filter by status"),
mode: Optional[str] = Query(
None, description="Filter by mode (api/sharing)"),
limit: int = Query(100, description="Max number of downloads to return")
):
"""
Get all file downloads with optional filtering.
Query params:
- status: pending, downloading, completed, failed, cancelled
- mode: api, sharing
- limit: Max results (default: 100)
Returns list of individual file downloads (not batched).
Frontend groups by batch_id for display.
"""
try:
downloads = downloads_service.get_all_downloads(
status=status,
mode=mode,
limit=limit
)
return {
"success": True,
"downloads": downloads,
"count": len(downloads)
}
except Exception as e:
logger.error(f"Error getting downloads: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {str(e)}")
@router.get('/downloads/{download_id}')
def get_download_by_id(download_id: int):
"""Get a single file download by ID."""
try:
download = downloads_service.get_download_by_id(download_id)
if not download:
raise HTTPException(
status_code=404, detail="Download không tồn tại")
return {
"success": True,
"download": download
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting download {download_id}: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {str(e)}")
@router.get('/batches/{batch_id}')
def get_batch_downloads(batch_id: str):
"""Get all downloads in a specific batch."""
try:
downloads = downloads_service.get_downloads_by_batch(batch_id)
summary = downloads_service.get_batch_summary(batch_id)
if not downloads:
raise HTTPException(status_code=404, detail="Batch không tồn tại")
return {
"success": True,
"batch": summary,
"downloads": downloads,
"count": len(downloads)
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting batch {batch_id}: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {str(e)}")
@router.post('/batches/api')
def create_api_batch(payload: CreateBatchRequest):
"""
Create a batch of downloads for API mode (direct NAS access).
Each file becomes a separate download record.
"""
try:
from ..common import get_download_destination_path
from ..services import mongodb_service
logger.debug(
f"Creating API batch: {payload.ge_id}_{payload.lang}, {len(payload.files)} files")
if not payload.files:
raise HTTPException(
status_code=400, detail="Không có file nào được chọn")
# Get MongoDB path
mongodb_path = mongodb_service.get_path_from_tms_data(
payload.ge_id, payload.lang)
# Calculate destination
destination_path = get_download_destination_path(
payload.ge_id, payload.lang)
# Convert FileInfo to dicts
files_data = [
{
"name": f.name,
"path": f.path,
"isdir": f.isdir or f.is_folder or False
}
for f in payload.files
]
# Create batch
result = downloads_service.create_downloads_batch(
files=files_data,
ge_id=payload.ge_id,
lang=payload.lang,
mode='api',
mongodb_path=mongodb_path,
destination_path=destination_path
)
if not result["success"]:
raise HTTPException(status_code=500, detail=result["message"])
logger.debug(
f"Created API batch {result['batch_id']}: {result['file_count']} files")
return {
"success": True,
"batch_id": result["batch_id"],
"download_ids": result["download_ids"],
"file_count": result["file_count"],
"destination_path": destination_path,
"mongodb_path": mongodb_path,
"message": f"Đã tạo {result['file_count']} downloads"
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error creating API batch: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {str(e)}")
@router.post('/batches/sharing')
def create_sharing_batch(payload: CreateSharingBatchRequest):
"""
Create a batch of downloads for Sharing mode (sharing link).
Each file becomes a separate download record.
"""
try:
from ..common import get_download_destination_path
from ..services import nas_service
logger.debug(
f"Creating Sharing batch: {payload.sharing_id}, {len(payload.files)} files")
if not payload.files:
raise HTTPException(
status_code=400, detail="Không có file nào được chọn")
# Determine GE ID and destination
if payload.ge_id and payload.lang:
from ..services import mongodb_service
destination_path = get_download_destination_path(
payload.ge_id, payload.lang)
# For sharing mode, mongodb_path = sharing link (linkRaw) from MongoDB
mongodb_path = mongodb_service.get_sharing_link_from_tms_data(
payload.ge_id, payload.lang)
ge_id = payload.ge_id
lang = payload.lang
else:
destination_path = nas_service.DESTINATION_PATH
mongodb_path = None
ge_id = f"SHARING_{payload.sharing_id}"
lang = "LINK"
# Convert FileInfo to dicts
files_data = [
{
"name": f.name,
"path": f.path,
"isdir": f.isdir or f.is_folder or False
}
for f in payload.files
]
# Create batch
result = downloads_service.create_downloads_batch(
files=files_data,
ge_id=ge_id,
lang=lang,
mode='sharing',
sharing_id=payload.sharing_id,
mongodb_path=mongodb_path,
destination_path=destination_path
)
if not result["success"]:
raise HTTPException(status_code=500, detail=result["message"])
logger.debug(
f"Created Sharing batch {result['batch_id']}: {result['file_count']} files")
return {
"success": True,
"batch_id": result["batch_id"],
"download_ids": result["download_ids"],
"file_count": result["file_count"],
"destination_path": destination_path,
"sharing_id": payload.sharing_id,
"message": f"Đã tạo {result['file_count']} downloads"
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error creating Sharing batch: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {str(e)}")
@router.patch('/downloads/{download_id}')
def update_download(download_id: int, payload: UpdateDownloadRequest):
"""
Update a file download (cancel or retry).
Actions:
- "cancel": Cancel pending/downloading file
- "retry": Retry failed file
"""
try:
if payload.action == "cancel":
success = downloads_service.cancel_download(download_id)
if success:
return {
"success": True,
"message": f"Download {download_id} đã được hủy"
}
else:
raise HTTPException(
status_code=404, detail="Download không tồn tại")
elif payload.action == "retry":
success = downloads_service.retry_download(download_id)
if success:
return {
"success": True,
"message": f"Download {download_id} đã được đưa vào queue"
}
else:
raise HTTPException(
status_code=404, detail="Download không tồn tại")
else:
raise HTTPException(
status_code=400,
detail=f"Action không hợp lệ: {payload.action}. Chỉ chấp nhận 'cancel' hoặc 'retry'"
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error updating download {download_id}: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {str(e)}")
@router.delete('/downloads/{download_id}')
def delete_download(download_id: int):
"""
Delete a file download record.
Only completed/failed/cancelled downloads can be deleted.
"""
try:
# Check download exists and status
download = downloads_service.get_download_by_id(download_id)
if not download:
raise HTTPException(
status_code=404, detail="Download không tồn tại")
# Only delete terminal status
if download["status"] not in ["completed", "failed", "cancelled"]:
raise HTTPException(
status_code=400,
detail="Chỉ có thể xóa downloads đã hoàn thành hoặc thất bại"
)
success = downloads_service.delete_download(download_id)
if success:
return {
"success": True,
"message": f"Download {download_id} đã được xóa"
}
else:
raise HTTPException(
status_code=500, detail="Không thể xóa download")
except HTTPException:
raise
except Exception as e:
logger.error(f"Error deleting download {download_id}: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {str(e)}")
@router.delete('/batches/{batch_id}')
def delete_batch(batch_id: str):
"""
Delete all downloads in a batch.
Only works if all downloads are in terminal status.
"""
try:
# Check all downloads in batch
downloads = downloads_service.get_downloads_by_batch(batch_id)
if not downloads:
raise HTTPException(status_code=404, detail="Batch không tồn tại")
# Verify all are terminal
active_count = sum(
1 for d in downloads
if d["status"] in ["pending", "downloading"]
)
if active_count > 0:
raise HTTPException(
status_code=400,
detail=f"Batch còn {active_count} downloads đang active"
)
success = downloads_service.delete_batch(batch_id)
if success:
return {
"success": True,
"message": f"Batch {batch_id} đã được xóa ({len(downloads)} files)"
}
else:
raise HTTPException(status_code=500, detail="Không thể xóa batch")
except HTTPException:
raise
except Exception as e:
logger.error(f"Error deleting batch {batch_id}: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {str(e)}")

251
backend/routes/raw_api_routes.py Executable file
View File

@ -0,0 +1,251 @@
"""
Raw Download Routes - API Mode
Handles NAS FileStation API downloads (với OTP authentication).
"""
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
from typing import List, Dict, Optional
import logging
from ..services import mongodb_service, nas_service, supabase_service, downloads_service
from ..common import get_download_destination_path
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api/raw-files", tags=["Raw API"])
# ==================== REQUEST MODELS ====================
class RawFileListRequest(BaseModel):
ge_id: str
lang: str
class OTPAuthRequest(BaseModel):
otp_code: str
class RawFileDownloadRequest(BaseModel):
files: List[Dict] # List of file objects with name, path, isdir
ge_id: str
lang: str
class RawFolderListRequest(BaseModel):
folder_path: str
ge_id: str # For context, though not strictly needed for folder listing
lang: str
# ==================== FILE LISTING ENDPOINTS ====================
@router.post('/list')
def list_raw_files(payload: RawFileListRequest):
"""
List raw files for a given GE ID and language.
Returns one of:
- success: List of files/folders
- otp_required: Session expired, OTP needed
- error: Various errors (no path found, NAS connection failed, etc.)
"""
try:
# Get path from MongoDB only (API mode)
nas_path = mongodb_service.get_path_from_tms_data(
payload.ge_id, payload.lang)
if not nas_path:
return {
"status": "error",
"message": f"Không tìm thấy đường dẫn cho GE ID {payload.ge_id} (Lang: {payload.lang})"
}
# Try to list files from NAS
status, files, message = nas_service.get_files_for_path(nas_path)
if status == "success":
return {
"status": "success",
"files": files,
"path": nas_path,
"message": message
}
elif status == "otp_required":
return {
"status": "otp_required",
"message": message
}
else: # error
# Append path to error message for context
enhanced_message = message
if message and "Thư mục không tồn tại" in message:
enhanced_message = f"{message}\n(Đường dẫn từ MongoDB: {nas_path})"
return {
"status": "error",
"message": enhanced_message
}
except Exception as e:
logger.error(f"❌ [list_raw_files] Exception: {e}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {e}")
@router.post('/list-folder')
def list_folder_contents(payload: RawFolderListRequest):
"""
List contents of a specific folder in NAS.
Used for navigation when user double-clicks a folder.
"""
try:
status, files, message = nas_service.get_files_for_path(
payload.folder_path)
if status == "success":
return {
"status": "success",
"files": files,
"path": payload.folder_path,
"message": message
}
elif status == "otp_required":
return {
"status": "otp_required",
"message": message
}
else: # error
return {
"status": "error",
"message": message
}
except Exception as e:
logger.error(f"Error in list_folder_contents: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {e}")
# ==================== AUTHENTICATION ENDPOINTS ====================
@router.post('/auth-otp')
def authenticate_otp(payload: OTPAuthRequest):
"""
Authenticate with OTP to establish NAS session.
Returns success or error status.
"""
try:
status, message = nas_service.authenticate_with_otp(payload.otp_code)
if status == "success":
return {
"status": "success",
"message": message
}
else: # error
return {
"status": "error",
"message": message
}
except Exception as e:
logger.error(f"Error in authenticate_otp: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {e}")
# ==================== DOWNLOAD ENDPOINTS ====================
@router.post('/download')
def download_raw_files(payload: RawFileDownloadRequest):
"""
Create a download job in the queue.
Returns job_id immediately for client to poll status.
Background worker will process the actual download.
"""
try:
logger.debug(
f"Creating download job: {payload.ge_id} {payload.lang}, {len(payload.files)} files")
if not payload.files:
return {
"status": "error",
"message": "Không có file nào được chọn để tải xuống"
}
if not payload.ge_id or not payload.lang:
return {
"status": "error",
"message": "GE ID và Lang là bắt buộc"
}
# Calculate destination path using helper
destination_path = get_download_destination_path(
payload.ge_id, payload.lang)
# Get MongoDB path from first file
mongodb_path = None
if payload.files and len(payload.files) > 0:
first_file_path = payload.files[0].get('path', '')
if first_file_path:
# Extract parent directory from path (remove filename)
mongodb_path = '/'.join(first_file_path.split('/')
[:-1]) if '/' in first_file_path else first_file_path
# ✅ FIX: Use downloads_service (NEW) instead of supabase_service (OLD)
result = downloads_service.create_downloads_batch(
files=payload.files,
ge_id=payload.ge_id,
lang=payload.lang,
mode='api', # FileStation API mode
mongodb_path=mongodb_path,
destination_path=destination_path
)
if not result['success']:
return {
"status": "error",
"message": result.get('message', 'Không thể tạo batch downloads')
}
logger.debug(
f"Created downloads batch: {result['batch_id']} ({result['file_count']} files)")
return {
"status": "pending",
"message": "Batch đã được tạo và đang chờ xử lý",
"batch_id": result['batch_id'],
"download_ids": result['download_ids'],
"file_count": result['file_count'],
"mongodb_path": mongodb_path,
"destination_path": destination_path
}
except Exception as e:
logger.error(f"Error creating download job: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {e}")
@router.get('/download-status/{batch_id}')
def get_download_status(batch_id: str):
"""
Get the status of a download batch.
Returns batch summary with all files' status.
"""
try:
summary = downloads_service.get_batch_summary(batch_id)
if not summary:
raise HTTPException(status_code=404, detail="Batch không tồn tại")
return {
"success": True,
"batch": summary
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting job status: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {e}")
# ==================== OLD ENDPOINTS REMOVED ====================
# Download management endpoints đã được chuyển sang /api/downloads và /api/batches
# Xem downloads_routes.py để sử dụng API mới

View File

@ -0,0 +1,606 @@
"""
Raw Download Routes - Sharing Link Mode
Handles Synology sharing link downloads (với Selenium + OTP).
"""
from fastapi import APIRouter, HTTPException, Request, Response
from pydantic import BaseModel
from typing import List, Dict, Optional
import logging
import os
import uuid
import time
from collections import defaultdict
from ..services import mongodb_service, nas_service, nas_sharing_service, supabase_service, downloads_service
from ..common import get_download_destination_path
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api/sharing-link", tags=["Raw Sharing"])
# ==================== RATE LIMITING ====================
# Track last request time per sharing_id to prevent NAS API rate limit errors
_last_request_time: Dict[str, float] = defaultdict(float)
_rate_limit_window_ms = 200 # Minimum 200ms between requests for same sharing_id
# ==================== REQUEST MODELS ====================
class SharingLinkRequest(BaseModel):
url: str
class SharingLinkDownloadRequest(BaseModel):
sharing_id: str
files: List[Dict] # List of file objects with name, path, isdir
ge_id: Optional[str] = None # Optional: for organizing files by project
lang: Optional[str] = None # Optional: for organizing files by project
class SharingLinkFromDbRequest(BaseModel):
ge_id: str
lang: str
class SharingOtpSubmit(BaseModel):
otp_code: str
# ==================== SHARING LINK PROCESSING ====================
@router.post('/get-from-db')
def get_sharing_link_from_db(payload: SharingLinkFromDbRequest):
"""
Query MongoDB titles_data collection to get linkRaw field.
Returns sharing link from database or throws error with record details.
"""
try:
# Query MongoDB
collection = mongodb_service.get_titles_collection()
query = {
"geId": str(payload.ge_id).strip(),
"lang": str(payload.lang).strip().upper()
}
# Find all matching records
documents = list(collection.find(query))
# Validation 1: No records found
if len(documents) == 0:
raise HTTPException(
status_code=404,
detail={
"error": "Không tìm thấy record",
"query": {"geId": payload.ge_id, "lang": payload.lang.upper()}
}
)
# Validation 2: Multiple records (should not happen with unique constraint)
if len(documents) > 1:
record_info = [
{
"geId": doc.get("geId"),
"lang": doc.get("lang"),
"linkRaw": doc.get("linkRaw"),
"path": doc.get("path")
}
for doc in documents
]
raise HTTPException(
status_code=400,
detail={
"error": "Tìm thấy nhiều hơn 1 record",
"records": record_info
}
)
# Get single record
document = documents[0]
link_raw = document.get("linkRaw")
# Validation 3: linkRaw is empty or null
if not link_raw or not isinstance(link_raw, str) or link_raw.strip() == "":
record_info = {
"geId": document.get("geId"),
"lang": document.get("lang"),
"linkRaw": link_raw,
"path": document.get("path")
}
raise HTTPException(
status_code=400,
detail={
"error": "Trường linkRaw trống hoặc null",
"record": record_info
}
)
# Validation 4: linkRaw is not a valid link
link_raw_stripped = link_raw.strip()
if not (link_raw_stripped.startswith("http://") or link_raw_stripped.startswith("https://")):
record_info = {
"geId": document.get("geId"),
"lang": document.get("lang"),
"linkRaw": link_raw,
"path": document.get("path")
}
raise HTTPException(
status_code=400,
detail={
"error": "linkRaw không phải là liên kết hợp lệ (phải bắt đầu bằng http:// hoặc https://)",
"record": record_info
}
)
# Success - return sharing link
return {
"success": True,
"sharing_link": link_raw_stripped,
"record": {
"geId": document.get("geId"),
"lang": document.get("lang"),
"path": document.get("path")
}
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error querying MongoDB for sharing link: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {e}")
@router.post('/process')
def process_sharing_link(payload: SharingLinkRequest):
"""
Process sharing link to extract file list.
Returns: request_id for polling result
"""
try:
# Validate URL format
if not payload.url or 'sharing' not in payload.url:
raise HTTPException(status_code=400, detail="URL không hợp lệ")
# Submit to worker queue
result = nas_sharing_service.process_sharing_link(payload.url)
return {
"success": True,
"request_id": result['request_id'],
"status": result['status'],
"message": "Đang xử lý sharing link..."
}
except Exception as e:
logger.error(f"Error processing sharing link: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {e}")
@router.get('/related-projects')
def get_related_projects(link_raw: str):
"""
Tìm tất cả GE projects cùng sharing link (linkRaw).
Query params:
link_raw: Sharing link URL
Returns:
{
"success": true,
"projects": [
{"ge_id": "1000", "lang": "DE"},
{"ge_id": "2000", "lang": "KO"}
],
"total": 2
}
"""
try:
if not link_raw or not link_raw.strip():
raise HTTPException(
status_code=400, detail="link_raw không được rỗng")
link_raw_normalized = link_raw.strip()
# Query MongoDB titles_data - tìm tất cả records có cùng linkRaw
collection = mongodb_service.get_titles_collection()
documents = list(collection.find(
{"linkRaw": link_raw_normalized},
{"geId": 1, "lang": 1, "_id": 0} # Chỉ lấy geId và lang
))
# Format kết quả
projects = [
{
"ge_id": doc.get("geId"),
"lang": doc.get("lang")
}
for doc in documents
]
logger.debug(
f"Found {len(projects)} projects with linkRaw: {link_raw_normalized}")
return {
"success": True,
"projects": projects,
"total": len(projects)
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error querying related projects: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {e}")
@router.get('/related-projects-by-ge')
def get_related_projects_by_ge(ge_id: str, lang: str):
"""
Tìm tất cả GE projects cùng sharing link dựa trên ge_id lang.
Query params:
ge_id: GE ID
lang: Language code
Returns:
{
"success": true,
"projects": [
{"ge_id": "1000", "lang": "DE"},
{"ge_id": "2000", "lang": "KO"}
],
"total": 2
}
"""
try:
if not ge_id or not ge_id.strip():
raise HTTPException(
status_code=400, detail="ge_id không được rỗng")
if not lang or not lang.strip():
raise HTTPException(status_code=400, detail="lang không được rỗng")
# Step 1: Get linkRaw from titles_data
link_raw = mongodb_service.get_sharing_link_from_tms_data(ge_id, lang)
if not link_raw:
return {
"success": True,
"projects": [],
"total": 0
}
# Step 2: Find all projects with same linkRaw
collection = mongodb_service.get_titles_collection()
documents = list(collection.find(
{"linkRaw": link_raw},
{"geId": 1, "lang": 1, "_id": 0}
))
# Format kết quả
projects = [
{
"ge_id": doc.get("geId"),
"lang": doc.get("lang")
}
for doc in documents
]
logger.debug(
f"Found {len(projects)} related projects for GE {ge_id} {lang}")
return {
"success": True,
"projects": projects,
"total": len(projects)
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error querying related projects by GE: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {e}")
@router.get('/project-note')
def get_project_note(ge_id: str, lang: str):
"""
Lấy note từ collection titlelist_data cho GE project cụ thể.
Query params:
ge_id: GE ID
lang: Language code
Returns:
{
"success": true,
"note": "Content of note field"
}
"""
try:
if not ge_id or not ge_id.strip():
raise HTTPException(
status_code=400, detail="ge_id không được rỗng")
if not lang or not lang.strip():
raise HTTPException(status_code=400, detail="lang không được rỗng")
# Query MongoDB titlelist_data collection
db = mongodb_service.get_db_connection()
collection = db['titlelist_data']
document = collection.find_one(
{
"geId": str(ge_id).strip(),
"lang": str(lang).strip().upper()
},
{"note": 1, "_id": 0}
)
if not document:
return {
"success": True,
"note": None
}
note_content = document.get("note")
logger.debug(f"Found note for {ge_id} {lang}: {bool(note_content)}")
return {
"success": True,
"note": note_content if note_content else None
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error querying project note: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {e}")
@router.get('/result/{request_id}')
def get_sharing_result(request_id: str):
"""
Poll result of sharing link processing.
Returns:
- status: "pending" | "success" | "error"
- If success: sharing_id, path, files, total_files
- If error: message
"""
try:
result = nas_sharing_service.get_sharing_result(request_id)
if not result:
raise HTTPException(
status_code=404, detail="Request không tồn tại")
return result
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting sharing result: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {e}")
# ==================== FOLDER NAVIGATION ====================
@router.post('/list-folder')
def list_sharing_folder(payload: dict):
"""
List contents of a subfolder in sharing link.
Used when user double-clicks a folder in sharing link file list.
Payload:
- sharing_id: Sharing ID from initial process
- folder_path: Path to folder to list (e.g., "/subfolder_name")
Returns:
- status: "success" | "error"
- files: List of files/folders
- path: Current folder path
"""
try:
sharing_id = payload.get('sharing_id')
folder_path = payload.get('folder_path', '/')
if not sharing_id:
raise HTTPException(
status_code=400, detail="sharing_id là bắt buộc")
# CRITICAL FIX: Rate limiting to prevent Error 407 from rapid requests
current_time = time.time() * 1000 # Convert to milliseconds
last_time = _last_request_time[sharing_id]
time_since_last = current_time - last_time
if time_since_last < _rate_limit_window_ms:
wait_time = (_rate_limit_window_ms - time_since_last) / 1000
logger.warning(
f"Rate limit hit for {sharing_id}, rejecting request (wait {wait_time:.2f}s)")
raise HTTPException(
status_code=429,
detail=f"Vui lòng chậm lại, đợi {wait_time:.1f}s trước khi thao tác tiếp"
)
# Update last request time
_last_request_time[sharing_id] = current_time
# Get sharing worker instance
worker = nas_sharing_service.get_sharing_worker()
if not worker or not worker.driver:
raise HTTPException(
status_code=503, detail="Sharing worker không sẵn sàng")
# CRITICAL FIX: Lock driver to prevent race conditions
# Without lock, multiple users can navigate simultaneously and conflict
with worker.driver_lock:
# List folder using nas_sharing_api package
from ..services.nas_sharing_api import get_file_list
logger.debug(
f"📂 [Navigation] Lấy danh sách subfolder: {folder_path}")
files = get_file_list(
driver=worker.driver,
sharing_id=sharing_id,
folder_path=folder_path
)
logger.debug(
f"✅ [Navigation] Tìm thấy {len(files)} items trong: {folder_path}")
return {
"status": "success",
"files": files,
"path": folder_path,
"message": f"Tìm thấy {len(files)} item(s)"
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error listing sharing folder: {e}")
import traceback
traceback.print_exc()
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {e}")
# ==================== DOWNLOAD ENDPOINTS ====================
@router.post('/download')
def download_sharing_files(payload: SharingLinkDownloadRequest):
"""
Download files from sharing link to NAS raw folder.
If ge_id and lang provided, files go to \\NAS_PATH\\{ge_id}_{lang}\\ (same as API download).
Otherwise, files go directly to \\NAS_PATH\\ root.
"""
try:
logger.debug(
f"Creating sharing link download job: {payload.sharing_id}, {len(payload.files)} files")
if not payload.files:
return {
"status": "error",
"message": "Không có file nào được chọn để tải xuống"
}
# Calculate destination path
if payload.ge_id and payload.lang:
# Use same logic as API download: \\172.16.14.240\\raw\\11_US\\
destination_path = get_download_destination_path(
payload.ge_id, payload.lang)
ge_id_for_db = payload.ge_id
lang_for_db = payload.lang
else:
# Fallback: direct to root (backward compatible with old logic)
destination_path = nas_service.DESTINATION_PATH
ge_id_for_db = f"SHARING_{payload.sharing_id}"
lang_for_db = "LINK"
# ✅ FIX: Use downloads_service (NEW) instead of supabase_service (OLD)
result = downloads_service.create_downloads_batch(
files=payload.files,
ge_id=ge_id_for_db,
lang=lang_for_db,
mode='sharing', # Sharing link mode
sharing_id=payload.sharing_id,
mongodb_path=None, # No MongoDB path for sharing links
destination_path=destination_path
)
if not result['success']:
return {
"status": "error",
"message": result.get('message', 'Không thể tạo batch downloads')
}
logger.debug(
f"Created sharing batch: {result['batch_id']} ({result['file_count']} files)")
return {
"status": "pending",
"message": "Batch đã được tạo và đang chờ xử lý",
"batch_id": result['batch_id'],
"download_ids": result['download_ids'],
"file_count": result['file_count'],
"destination_path": destination_path,
"sharing_id": payload.sharing_id
}
except Exception as e:
logger.error(f"Error creating sharing download job: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {e}")
# ==================== OTP HANDLING ====================
@router.get('/download-status/{batch_id}')
def get_sharing_download_status(batch_id: str):
"""
Get the status of a sharing link download batch.
Returns batch summary with all files' status.
"""
try:
summary = downloads_service.get_batch_summary(batch_id)
if not summary:
raise HTTPException(status_code=404, detail="Batch không tồn tại")
return {
"success": True,
"batch": summary
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting sharing download job status: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {e}")
# ==================== OTP HANDLING ====================
@router.get('/otp-status')
def check_sharing_otp_status():
"""
Check if sharing worker is waiting for OTP
Returns:
- otp_required: bool
"""
try:
otp_required = nas_sharing_service.is_otp_required()
return {
"otp_required": otp_required,
"message": "Vui lòng nhập mã OTP" if otp_required else "Không cần OTP"
}
except Exception as e:
logger.error(f"Error checking OTP status: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {e}")
@router.post('/submit-otp')
def submit_sharing_otp(payload: SharingOtpSubmit):
"""
Submit OTP code for sharing link login
Args:
otp_code: OTP code from user
Returns:
- status: "success" | "error"
- message: Status message
"""
try:
status, message = nas_sharing_service.submit_otp(payload.otp_code)
if status == "success":
return {
"success": True,
"message": message
}
else:
raise HTTPException(status_code=400, detail=message)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error submitting OTP: {e}")
raise HTTPException(status_code=500, detail=f"Lỗi hệ thống: {e}")

211
backend/routes/tms_routes.py Executable file
View File

@ -0,0 +1,211 @@
"""
TMS Permission Management Routes
Handles submission creation, listing, deletion, retry, and queue display.
"""
from fastapi import APIRouter, HTTPException, Request
from pydantic import BaseModel
from typing import List
import logging
from ..services import supabase_service
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api", tags=["TMS"])
# ==================== REQUEST MODELS ====================
class SubmissionCreate(BaseModel):
submission_id: str
usernames: List[str]
ge_input: str
# ==================== SUBMISSION ENDPOINTS ====================
@router.post("/submissions")
def create_submission(payload: SubmissionCreate):
try:
created = supabase_service.create_submission_supabase(
payload.submission_id,
payload.usernames,
payload.ge_input
)
return created
except Exception as e:
raise HTTPException(status_code=400, detail=str(e))
@router.get("/submissions")
def list_submissions(limit: int = 50):
submissions = supabase_service.get_submissions_supabase(limit=limit)
return {"success": True, "submissions": submissions}
@router.get("/submissions/{submission_id}")
def get_submission(submission_id: str):
sub_list = supabase_service.get_submissions_supabase(limit=100)
sub = next((item for item in sub_list if item.get("submission_id") == submission_id), None)
if not sub:
raise HTTPException(status_code=404, detail="Submission not found")
return {"success": True, "submission": sub}
@router.delete("/submissions/{submission_id}")
def delete_submission(submission_id: str):
ok = supabase_service.delete_submission_supabase(submission_id)
if not ok:
raise HTTPException(status_code=404, detail="Submission not found or could not be deleted")
return {"success": True}
@router.post("/submissions/{submission_id}/retry")
def retry_submission(submission_id: str, payload: dict = None): # type: ignore
"""Retry a submission with only the error GE IDs and error usernames.
If payload.errorGeIds and payload.errorUsernames are provided, create a new submission
with only those GE IDs and usernames that had errors.
Otherwise, reset the original submission to pending (legacy behavior).
"""
if payload and payload.get('errorGeIds') and payload.get('errorUsernames'):
error_ge_ids = payload['errorGeIds']
error_usernames = payload['errorUsernames']
# Create new submission with only error GE IDs and error usernames
new_ge_id_and_lang = '\n'.join(error_ge_ids)
username_str = ','.join(error_usernames)
created = supabase_service.create_retry_submission(username_str, new_ge_id_and_lang)
if not created:
raise HTTPException(status_code=500, detail="Failed to create retry submission")
return {"success": True, "newSubmissionId": created.get("id")}
else:
# Legacy behavior: reset status to pending
ok = supabase_service.update_submission_supabase(submission_id, status="pending")
if not ok:
raise HTTPException(status_code=404, detail="Submission not found or could not be updated")
return {"success": True}
# ==================== DRIVER MANAGEMENT ====================
@router.post('/driver/close')
def close_driver(request: Request):
"""Close the global Selenium WebDriver if it's running.
Security policy:
- If environment variable DRIVER_ADMIN_TOKEN is set, require header X-Admin-Token matching it.
- If DRIVER_ADMIN_TOKEN is not set, only allow requests from localhost (127.0.0.1 or ::1).
"""
import os
try:
admin_token = os.environ.get('DRIVER_ADMIN_TOKEN')
header_token = request.headers.get('x-admin-token')
client_host = request.client.host if request.client else ''
if admin_token:
if not header_token or header_token != admin_token:
raise HTTPException(status_code=401, detail='Invalid or missing admin token')
else:
# allow only requests originating from localhost when no token configured
if client_host not in ('127.0.0.1', '::1', 'localhost'):
raise HTTPException(status_code=403, detail='Driver close is restricted to localhost')
# TMS permission automation is handled by TypeScript backend now
# This endpoint is kept for backward compatibility but does nothing
return {'success': True, 'message': 'Driver management moved to TypeScript backend'}
except HTTPException:
raise
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
# ==================== USERNAME MANAGEMENT ====================
@router.get('/usernames')
def get_usernames():
usernames = supabase_service.get_userslist()
return {"success": True, "usernames": usernames}
@router.get('/usernames/search')
def search_usernames(q: str = ""):
"""Search usernames by query string (case-insensitive, contains match)."""
all_usernames = supabase_service.get_userslist()
if not q:
return {"success": True, "suggestions": all_usernames[:20]}
q_lower = q.lower()
suggestions = [u for u in all_usernames if q_lower in u.lower()]
return {"success": True, "suggestions": suggestions[:20]}
@router.post('/usernames')
def add_username(payload: dict):
new_username = payload.get('username') if isinstance(payload, dict) else None
if not new_username:
raise HTTPException(status_code=400, detail='username is required')
return supabase_service.add_username(new_username)
@router.delete('/usernames')
def delete_username(payload: dict):
username = payload.get('username') if isinstance(payload, dict) else None
if not username:
raise HTTPException(status_code=400, detail='username is required')
return supabase_service.delete_username(username)
# ==================== QUEUE DISPLAY ====================
@router.get('/queue')
def get_queue(limit: int = 100, all: bool = False):
"""Return a flattened list of GE items built from pending submissions in Supabase.
Each pending submission's `input.ge_input` (newline separated) is split into GE ID and lang
and turned into an item consumable by the frontend `QueueStatus` component.
"""
try:
# By default include only pending and processing submissions so UI can show the single processing submission
# If caller passes all=true, include completed and failed as well (useful when a single endpoint should provide history)
all_subs = supabase_service.get_submissions_supabase(limit=1000) or []
allowed = ('pending', 'processing') if not all else ('pending', 'processing', 'completed', 'failed')
subs = [d for d in all_subs if str(d.get('status', '')).lower() in allowed]
items = []
for doc in subs:
submission_id = doc.get('submission_id')
usernames = doc.get('input', {}).get('usernames', []) if isinstance(doc.get('input'), dict) else []
usernames_str = '\n'.join(usernames) if isinstance(usernames, list) else (usernames or '')
ge_input = doc.get('input', {}).get('ge_input', '') if isinstance(doc.get('input'), dict) else ''
# split lines and create items
lines = [l.strip() for l in str(ge_input).splitlines() if l and l.strip()]
for idx, line in enumerate(lines):
parts = line.split() # expect e.g. "1000 de" or "696 us"
ge_id = parts[0] if len(parts) > 0 else line
lang = parts[1] if len(parts) > 1 else ''
key = f"{submission_id}:{idx}"
raw_status = str(doc.get('status', 'pending')).lower()
# map backend status to frontend status labels
if raw_status == 'pending':
mapped_status = 'waiting'
elif raw_status == 'processing':
mapped_status = 'processing'
elif raw_status == 'completed':
# when requesting all, represent completed as done
mapped_status = 'done'
elif raw_status == 'failed':
mapped_status = 'error'
else:
mapped_status = raw_status
items.append({
'key': key,
'id': str(ge_id),
'lang': str(lang),
'status': mapped_status,
'usernames': usernames_str,
'submission_id': submission_id
})
# respect limit on resulting GE items
return {'success': True, 'queue': items[:limit]}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))

0
backend/services/__init__.py Executable file
View File

View File

@ -0,0 +1,14 @@
"""
aria2 integration package for fast parallel downloads
"""
from .daemon import start_aria2_daemon, stop_aria2_daemon, is_aria2_running
from .download_manager import Aria2DownloadManager, get_aria2_manager
__all__ = [
'start_aria2_daemon',
'stop_aria2_daemon',
'is_aria2_running',
'Aria2DownloadManager',
'get_aria2_manager'
]

210
backend/services/aria2/daemon.py Executable file
View File

@ -0,0 +1,210 @@
"""
aria2c RPC daemon management
Starts/stops the aria2c daemon with optimized settings for NAS downloads
"""
import os
import subprocess
import time
import logging
from pathlib import Path
from typing import Optional
import psutil
logger = logging.getLogger(__name__)
# Global daemon process
_aria2_process: Optional[subprocess.Popen] = None
def get_aria2_executable() -> str:
"""Get path to aria2c.exe in project folder"""
project_root = Path(__file__).parent.parent.parent.parent
aria2_exe = project_root / "aria2" / "aria2c.exe"
if not aria2_exe.exists():
raise FileNotFoundError(
f"aria2c.exe not found at {aria2_exe}. "
f"Please ensure aria2 folder exists in project root."
)
return str(aria2_exe)
def is_aria2_running(port: int = 6800) -> bool:
"""
Check if aria2c RPC server is already running on specified port
Args:
port: RPC port to check (default: 6800)
Returns:
True if aria2c is running on the port
"""
try:
for conn in psutil.net_connections():
# Check if laddr exists and has port attribute
if hasattr(conn, 'laddr') and conn.laddr and hasattr(conn.laddr, 'port'):
if conn.laddr.port == port and conn.status == 'LISTEN':
return True
return False
except Exception as e:
logger.warning(f"Could not check if aria2 is running: {e}")
return False
def start_aria2_daemon(
port: int = 6800,
secret: str = "dkidownload_secret_2025",
max_concurrent: int = 10,
max_connections: Optional[int] = None,
split: Optional[int] = None,
min_split_size: str = "1M",
download_dir: Optional[str] = None
) -> bool:
"""
Start aria2c RPC daemon with optimized settings
Args:
port: RPC port (default: 6800)
secret: RPC secret token for authentication
max_concurrent: Max concurrent downloads
max_connections: Max connections per server (default: from env ARIA2_MAX_CONNECTIONS_PER_FILE or 16)
split: Number of connections per file (default: same as max_connections)
min_split_size: Minimum size to split (e.g., "1M")
download_dir: Temporary download directory
Returns:
True if started successfully
"""
global _aria2_process
# Load max_connections from environment if not provided
if max_connections is None:
max_connections = int(
os.getenv('ARIA2_MAX_CONNECTIONS_PER_FILE', '16'))
# Default split to max_connections if not specified
if split is None:
split = max_connections
# Check if already running
if is_aria2_running(port):
logger.debug(f"aria2c already running on port {port}")
return True
try:
aria2_exe = get_aria2_executable()
# Default download dir: project_root/aria2/downloads
if download_dir is None:
project_root = Path(__file__).parent.parent.parent.parent
download_dir = str(project_root / "aria2" / "downloads")
# Create download dir if not exists
os.makedirs(download_dir, exist_ok=True)
# aria2c command with optimized settings
cmd = [
aria2_exe,
'--enable-rpc',
'--rpc-listen-all=false', # Only localhost for security
f'--rpc-listen-port={port}',
f'--rpc-secret={secret}',
f'--max-concurrent-downloads={max_concurrent}',
f'--max-connection-per-server={max_connections}',
f'--split={split}',
f'--min-split-size={min_split_size}',
'--continue=true', # Resume support
'--auto-file-renaming=false', # Don't auto-rename
f'--dir={download_dir}',
'--log=-', # Log to stdout
'--log-level=notice',
'--console-log-level=warn',
'--summary-interval=0', # Disable periodic summary
'--disable-ipv6=true', # Faster connection
'--check-certificate=false', # NAS uses self-signed cert
]
logger.debug(f"Starting aria2c daemon on port {port}...")
logger.debug(f"Download directory: {download_dir}")
# Start process (detached, no window on Windows)
_aria2_process = subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
creationflags=subprocess.CREATE_NO_WINDOW if os.name == 'nt' else 0
)
# Wait a bit and check if started
time.sleep(1)
if _aria2_process.poll() is not None:
# Process died immediately
stdout, stderr = _aria2_process.communicate()
logger.error(f"aria2c failed to start: {stderr.decode()}")
return False
# Verify it's listening
if is_aria2_running(port):
logger.debug(
f"✅ aria2c daemon started successfully on port {port}")
return True
else:
logger.error("aria2c started but not listening on port")
return False
except Exception as e:
logger.error(f"Failed to start aria2c: {e}", exc_info=True)
return False
def stop_aria2_daemon() -> bool:
"""
Stop the aria2c daemon gracefully
Returns:
True if stopped successfully
"""
global _aria2_process
if _aria2_process is None:
logger.debug("No aria2c process to stop")
return True
try:
logger.debug("Stopping aria2c daemon...")
_aria2_process.terminate()
# Wait up to 5 seconds for graceful shutdown
try:
_aria2_process.wait(timeout=5)
logger.debug("✅ aria2c daemon stopped")
except subprocess.TimeoutExpired:
logger.warning("aria2c didn't stop gracefully, forcing...")
_aria2_process.kill()
_aria2_process.wait()
logger.debug("✅ aria2c daemon force-killed")
_aria2_process = None
return True
except Exception as e:
logger.error(f"Error stopping aria2c: {e}")
return False
def restart_aria2_daemon(**kwargs) -> bool:
"""
Restart aria2c daemon with new settings
Args:
**kwargs: Arguments to pass to start_aria2_daemon()
Returns:
True if restarted successfully
"""
stop_aria2_daemon()
time.sleep(1)
return start_aria2_daemon(**kwargs)

View File

@ -0,0 +1,447 @@
"""
Aria2 Download Manager
High-level interface for downloading files via aria2c RPC
Handles progress tracking, error handling, and fallback logic
"""
import os
import time
import logging
import threading
from pathlib import Path
from typing import Optional, Callable, Dict, Any, Tuple
from urllib.parse import urlencode
import aria2p
from .error_parser import parse_aria2_error, format_error_for_user, Aria2ErrorCategory
logger = logging.getLogger(__name__)
# Global singleton instance
_aria2_manager: Optional['Aria2DownloadManager'] = None
class Aria2DownloadManager:
"""
Manages file downloads via aria2c RPC
Features:
- Multi-threaded downloads (16 connections per file)
- Progress callbacks for UI updates
- Automatic retry on failure
- Cookie and header support for authenticated downloads
"""
def __init__(
self,
host: str = 'localhost',
port: int = 6800,
secret: str = 'dkidownload_secret_2025'
):
"""
Initialize aria2 RPC client
Args:
host: aria2c RPC host
port: aria2c RPC port
secret: RPC secret token
"""
self.host = host
self.port = port
self.secret = secret
try:
# Create aria2p client - aria2p adds /jsonrpc automatically
# Don't include port in host URL, use port parameter instead
self.client = aria2p.Client(
host=f"http://{host}",
port=port,
secret=secret
)
self.api = aria2p.API(self.client)
# Test connection - try to get stats to verify
try:
stats = self.api.get_stats()
logger.debug(f"✅ Connected to aria2 RPC on {host}:{port}")
except Exception as conn_err:
# More specific error handling
error_str = str(conn_err)
if "Expecting value" in error_str or "JSON" in error_str:
logger.warning(
f"aria2 RPC JSON parsing issue (non-fatal): {conn_err}")
logger.debug(
f"✅ aria2 RPC client initialized on {host}:{port} (connection assumed)")
else:
logger.debug(
f"✅ aria2 RPC client initialized on {host}:{port}")
except Exception as e:
logger.error(f"Failed to connect to aria2c RPC: {e}")
raise
def download_file(
self,
url: str,
dest_path: str,
headers: Optional[Dict[str, str]] = None,
cookies: Optional[str] = None,
progress_callback: Optional[Callable[[int, int], None]] = None,
referer: Optional[str] = None,
method: str = 'GET',
post_data: Optional[Dict[str, str]] = None,
max_download_limit: Optional[str] = None
) -> Tuple[bool, Optional[str], Optional[str]]:
"""
Download a file via aria2 (GET ONLY - NO POST SUPPORT)
IMPORTANT: aria2 ONLY supports GET requests. POST requests will raise ValueError.
Do NOT pass method='POST' or post_data to this function.
Args:
url: Download URL (with query params if needed)
dest_path: Full local path to save file
headers: Custom HTTP headers dict
cookies: Cookie string (format: "name=value; name2=value2")
progress_callback: Optional callback(downloaded_bytes, total_bytes)
referer: Referer header value
method: HTTP method (MUST be 'GET', others will raise error)
post_data: NOT SUPPORTED - will raise ValueError if provided
max_download_limit: Optional bandwidth limit (e.g., '100K', '1M')
Returns:
Tuple[success: bool, error_message: Optional[str], gid: Optional[str]]
Raises:
ValueError: If method is not 'GET' or post_data is provided
"""
try:
# Ensure parent directory exists BEFORE aria2 tries to access it
parent_dir = os.path.dirname(dest_path)
if parent_dir:
try:
os.makedirs(parent_dir, exist_ok=True)
logger.debug(f"Ensured directory exists: {parent_dir}")
except Exception as mkdir_err:
logger.error(
f"Failed to create directory {parent_dir}: {mkdir_err}")
raise
# Normalize path for aria2 (convert to absolute path)
# This prevents aria2 from trying to create directories itself
abs_dest_path = os.path.abspath(dest_path)
abs_parent_dir = os.path.dirname(abs_dest_path)
# Load max connections from environment
max_connections = os.getenv('ARIA2_MAX_CONNECTIONS_PER_FILE', '16')
# Reduce connections for bandwidth-limited downloads
if max_download_limit:
# With throttled bandwidth (e.g., 100KB/s), use only 1 connection
# to avoid timeout from too many slow connections
max_connections = '1'
logger.debug(
f"Reduced connections to 1 for bandwidth-limited download")
# Build aria2 options - use absolute paths
options = {
'dir': abs_parent_dir,
'out': os.path.basename(abs_dest_path),
'max-connection-per-server': max_connections,
'split': max_connections,
'min-split-size': '1M',
'continue': 'true',
'auto-file-renaming': 'false',
'allow-overwrite': 'true',
'check-certificate': 'false', # NAS self-signed cert
}
# Apply bandwidth limit if specified (for background downloads)
if max_download_limit:
options['max-download-limit'] = max_download_limit
logger.debug(f"Bandwidth limit: {max_download_limit}")
# Add headers - aria2 accepts list format
if headers:
header_list = []
for key, value in headers.items():
header_list.append(f"{key}: {value}")
if header_list:
options['header'] = '\n'.join(header_list)
# Add referer
if referer:
options['referer'] = referer
# Add cookies - append to header
if cookies:
cookie_header = f"Cookie: {cookies}"
if 'header' in options:
options['header'] = options['header'] + \
'\n' + cookie_header
else:
options['header'] = cookie_header
logger.debug(
f"Starting aria2 download: {os.path.basename(abs_dest_path)}")
logger.debug(f"URL: {url[:100]}...")
logger.debug(f"Dest: {abs_dest_path}")
logger.debug(f"Aria2 dir: {abs_parent_dir}")
logger.debug(f"Method: {method}")
# Validate method - aria2 ONLY supports GET/HEAD
if method.upper() != 'GET':
error_msg = (
f"❌ aria2 only supports GET method, received: {method}. "
f"POST requests are NOT supported. "
f"This is a critical error - please check your download implementation."
)
logger.error(error_msg)
raise ValueError(error_msg)
# Validate no POST data present
if post_data:
error_msg = (
f"❌ aria2 cannot handle POST data. "
f"POST requests with form data are NOT supported by aria2c RPC. "
f"This indicates a bug in the calling code."
)
logger.error(error_msg)
raise ValueError(error_msg)
# Standard GET download with aria2 (16 connections)
download = self.api.add_uris([url], options=options)
gid = download.gid
logger.debug(f"Download started with GID: {gid}")
# Monitor progress
success = self._wait_for_completion(
gid=gid,
progress_callback=progress_callback
)
if success:
logger.debug(f"✅ Download completed: {abs_dest_path}")
return True, None, gid
else:
# Get error info and parse it
try:
download = self.api.get_download(gid)
raw_error = download.error_message or "Unknown error"
except:
raw_error = "Download failed"
# Parse error with structured handler
parsed_error = parse_aria2_error(raw_error)
# Log with category
logger.error(
f"❌ Download failed [{parsed_error.category.value}]: {raw_error}")
logger.debug(f"💡 {parsed_error.user_message}")
logger.debug(
f"📋 Suggested action: {parsed_error.suggested_action}")
# Return user-friendly error message
error_msg = format_error_for_user(
parsed_error, include_technical=False)
return False, error_msg, None
except Exception as e:
raw_error = str(e)
# Parse error
parsed_error = parse_aria2_error(raw_error)
# Special handling for JSON/RPC errors
if "Expecting value" in raw_error or "JSON" in raw_error:
error_msg = (
f"aria2 RPC communication error: {raw_error}. "
"This might be due to aria2c daemon not running or RPC secret mismatch. "
"Check if aria2c process is active and RPC port 6800 is accessible."
)
else:
error_msg = format_error_for_user(
parsed_error, include_technical=True)
logger.error(
f"❌ Exception [{parsed_error.category.value}]: {raw_error}", exc_info=True)
return False, error_msg, None
def _wait_for_completion(
self,
gid: str,
progress_callback: Optional[Callable] = None,
poll_interval: float = 0.5
) -> bool:
"""
Wait for download to complete and track progress
Args:
gid: aria2 download GID
progress_callback: Optional callback(downloaded_bytes, total_bytes)
poll_interval: How often to check status (seconds)
Returns:
True if download completed successfully
"""
last_completed = 0
last_total = 0
while True:
try:
download = self.api.get_download(gid)
# Check status
if download.is_complete:
# Final progress callback
if progress_callback:
progress_callback(
download.completed_length, download.total_length)
return True
elif download.has_failed:
logger.error(f"Download failed: {download.error_message}")
return False
elif download.is_removed:
logger.warning("Download was removed")
return False
# Update progress
completed = download.completed_length
total = download.total_length
# Only call callback if values changed
if progress_callback and (completed != last_completed or total != last_total):
progress_callback(completed, total)
last_completed = completed
last_total = total
# Wait before next poll
time.sleep(poll_interval)
except Exception as e:
logger.error(f"Error checking download status: {e}")
time.sleep(poll_interval)
def get_status(self, gid: str) -> Dict[str, Any]:
"""
Get current status of a download
Args:
gid: aria2 download GID
Returns:
Status dict with keys: gid, status, completed, total, speed, progress
"""
try:
download = self.api.get_download(gid)
progress = 0
if download.total_length > 0:
progress = (download.completed_length /
download.total_length) * 100
return {
'gid': gid,
'status': download.status,
'completed': download.completed_length,
'total': download.total_length,
'speed': download.download_speed,
'progress': round(progress, 2),
'error': download.error_message if download.has_failed else None
}
except Exception as e:
return {
'gid': gid,
'status': 'error',
'error': str(e)
}
def cancel_download(self, gid: str) -> bool:
"""
Cancel an active download
Args:
gid: aria2 download GID
Returns:
True if cancelled successfully
"""
try:
# Remove by GID using force remove
download = self.api.get_download(gid)
if download:
download.remove(force=True)
logger.debug(f"Cancelled download: {gid}")
return True
return False
except Exception as e:
logger.error(f"Failed to cancel download {gid}: {e}")
return False
def get_global_stats(self) -> Dict[str, Any]:
"""
Get global download statistics
Returns:
Stats dict with download speed, active downloads, etc.
"""
try:
stats = self.api.get_stats()
return {
'download_speed': getattr(stats, 'download_speed', 0),
'upload_speed': getattr(stats, 'upload_speed', 0),
'num_active': getattr(stats, 'num_active', 0),
'num_waiting': getattr(stats, 'num_waiting', 0),
'num_stopped': getattr(stats, 'num_stopped', 0)
}
except Exception as e:
logger.error(f"Failed to get stats: {e}")
return {
'download_speed': 0,
'upload_speed': 0,
'num_active': 0,
'num_waiting': 0,
'num_stopped': 0
}
def get_aria2_manager(
host: str = 'localhost',
port: int = 6800,
secret: str = 'dkidownload_secret_2025'
) -> Optional[Aria2DownloadManager]:
"""
Get or create global aria2 manager instance (singleton)
Args:
host: aria2c RPC host
port: aria2c RPC port
secret: RPC secret token
Returns:
Aria2DownloadManager instance or None if connection failed
"""
global _aria2_manager
if _aria2_manager is None:
try:
_aria2_manager = Aria2DownloadManager(
host=host,
port=port,
secret=secret
)
except Exception as e:
logger.error(f"Failed to create aria2 manager: {e}")
return None
return _aria2_manager
def reset_aria2_manager():
"""Reset the global manager instance (useful for testing)"""
global _aria2_manager
_aria2_manager = None

View File

@ -0,0 +1,196 @@
"""
Aria2 Error Parser - Parse and categorize aria2 errors with actionable solutions.
Based on official aria2 documentation:
https://aria2.github.io/manual/en/html/aria2c.html
"""
from typing import Optional, Dict, Any
from enum import Enum
import re
class Aria2ErrorCategory(Enum):
"""Categories of aria2 errors for better handling"""
FILE_CONFLICT = "file_conflict" # File exists, control file missing
NETWORK = "network" # Connection, timeout, DNS errors
HTTP = "http" # HTTP status codes
AUTH = "auth" # Authentication failures
CORRUPTION = "corruption" # Checksum, torrent parsing errors
CONFIG = "config" # Invalid options
RESOURCE = "resource" # Disk full, permission denied
UNKNOWN = "unknown"
class Aria2Error:
"""Parsed aria2 error with category and actionable solution"""
def __init__(
self,
category: Aria2ErrorCategory,
original_message: str,
user_message: str,
suggested_action: str,
technical_details: Optional[Dict[str, Any]] = None
):
self.category = category
self.original_message = original_message
self.user_message = user_message
self.suggested_action = suggested_action
self.technical_details = technical_details or {}
def to_dict(self) -> Dict[str, Any]:
return {
'category': self.category.value,
'original_message': self.original_message,
'user_message': self.user_message,
'suggested_action': self.suggested_action,
'technical_details': self.technical_details
}
# Error patterns with solutions
ERROR_PATTERNS = [
# File exists but control file missing (--allow-overwrite=false)
{
'pattern': r'File .+ exists, but a control file.*does not exist',
'category': Aria2ErrorCategory.FILE_CONFLICT,
'user_message': 'Tệp đã tồn tại từ lần tải trước (chưa hoàn thành hoặc đã hoàn thành)',
'suggested_action': 'Xóa tệp cũ và tệp .aria2 (nếu có) để tải lại từ đầu, hoặc bỏ qua nếu tệp đã hoàn thành',
'extract_file': lambda msg: (m.group(1) if (m := re.search(r'File (.+?) exists', msg)) else None)
},
# Connection timeout
{
'pattern': r'Read timed out|Connection timed out',
'category': Aria2ErrorCategory.NETWORK,
'user_message': 'Kết nối bị timeout (quá chậm hoặc không phản hồi)',
'suggested_action': 'Kiểm tra kết nối mạng, giảm số connections (--max-connection-per-server), hoặc tăng timeout'
},
# Connection refused
{
'pattern': r'Connection refused|Could not connect',
'category': Aria2ErrorCategory.NETWORK,
'user_message': 'Không thể kết nối đến server',
'suggested_action': 'Kiểm tra URL, firewall, hoặc server có thể đang offline'
},
# HTTP 404
{
'pattern': r'404|Not Found',
'category': Aria2ErrorCategory.HTTP,
'user_message': 'Tệp không tồn tại trên server (404)',
'suggested_action': 'Link download có thể đã hết hạn hoặc tệp đã bị xóa'
},
# HTTP 403
{
'pattern': r'403|Forbidden',
'category': Aria2ErrorCategory.HTTP,
'user_message': 'Không có quyền truy cập (403)',
'suggested_action': 'Kiểm tra cookies, session, hoặc referer header'
},
# HTTP 401
{
'pattern': r'401|Unauthorized',
'category': Aria2ErrorCategory.AUTH,
'user_message': 'Yêu cầu xác thực (401)',
'suggested_action': 'Cần đăng nhập lại hoặc refresh token/session'
},
# Checksum mismatch
{
'pattern': r'Checksum validation failed|piece hash check failed',
'category': Aria2ErrorCategory.CORRUPTION,
'user_message': 'Dữ liệu bị lỗi (checksum không khớp)',
'suggested_action': 'Xóa tệp .aria2 và tải lại từ đầu'
},
# Disk full
{
'pattern': r'No space left on device|Disk full',
'category': Aria2ErrorCategory.RESOURCE,
'user_message': 'Không đủ dung lượng ổ đĩa',
'suggested_action': 'Giải phóng dung lượng hoặc thay đổi thư mục đích'
},
# Permission denied
{
'pattern': r'Permission denied',
'category': Aria2ErrorCategory.RESOURCE,
'user_message': 'Không có quyền ghi file',
'suggested_action': 'Kiểm tra quyền truy cập thư mục đích'
},
# Too many redirects
{
'pattern': r'Too many redirects',
'category': Aria2ErrorCategory.HTTP,
'user_message': 'Quá nhiều lần chuyển hướng',
'suggested_action': 'URL có thể bị lỗi hoặc redirect loop'
}
]
def parse_aria2_error(error_message: str) -> Aria2Error:
"""
Parse aria2 error message and return structured error with solution.
Args:
error_message: Raw error message from aria2
Returns:
Aria2Error object with category and suggested action
"""
# Try to match known patterns
for pattern_info in ERROR_PATTERNS:
if re.search(pattern_info['pattern'], error_message, re.IGNORECASE):
technical_details = {}
# Extract file path if available
if 'extract_file' in pattern_info:
file_path = pattern_info['extract_file'](error_message)
if file_path:
technical_details['file_path'] = file_path
return Aria2Error(
category=pattern_info['category'],
original_message=error_message,
user_message=pattern_info['user_message'],
suggested_action=pattern_info['suggested_action'],
technical_details=technical_details
)
# Unknown error - return generic error
return Aria2Error(
category=Aria2ErrorCategory.UNKNOWN,
original_message=error_message,
user_message='Lỗi không xác định từ aria2',
suggested_action='Xem log chi tiết để biết thêm thông tin',
technical_details={'raw_error': error_message}
)
def format_error_for_user(error: Aria2Error, include_technical: bool = False) -> str:
"""
Format error message for user display.
Args:
error: Parsed Aria2Error object
include_technical: Whether to include technical details
Returns:
Formatted error string
"""
message = f"{error.user_message}\n"
message += f"💡 Giải pháp: {error.suggested_action}"
if include_technical:
message += f"\n🔧 Chi tiết kỹ thuật: {error.original_message}"
if error.technical_details:
for key, value in error.technical_details.items():
message += f"\n - {key}: {value}"
return message

View File

@ -0,0 +1,461 @@
"""
Downloads Service - File-centric download management.
Each download record represents ONE file, not a batch of files.
"""
import logging
from typing import List, Dict, Any, Optional, cast
from datetime import datetime
from uuid import uuid4
from .supabase_service import get_supabase_client
logger = logging.getLogger(__name__)
# ==================== BATCH CREATION ====================
def create_downloads_batch(
files: List[Dict[str, Any]],
ge_id: str,
lang: str,
mode: str,
sharing_id: Optional[str] = None,
mongodb_path: Optional[str] = None,
destination_path: Optional[str] = None
) -> Dict[str, Any]:
"""
Create a batch of file download records.
Args:
files: List of file info dicts with 'name', 'path', 'isdir'/'is_folder'
ge_id: GE ID for organizing
lang: Language code
mode: 'api' or 'sharing'
sharing_id: Optional sharing link ID (for sharing mode)
mongodb_path: MongoDB reference path
- For API mode: folder_path from titles_data
- For Sharing mode: linkRaw from titles_data
destination_path: Optional full destination path for downloads
Returns:
{
"success": bool,
"batch_id": str,
"download_ids": List[int],
"file_count": int,
"message": str
}
"""
try:
client = get_supabase_client()
batch_id = str(uuid4()) # Generate batch ID to group files
# Prepare download records (one per file)
download_records = []
for file_info in files:
file_name = file_info.get('name', '')
file_path = file_info.get('path', '')
record = {
'batch_id': batch_id,
'ge_id': ge_id,
'lang': lang,
'file_name': file_name,
'file_path': file_path,
'mode': mode,
'status': 'pending',
'sharing_id': sharing_id,
'mongodb_path': mongodb_path,
'destination_path': destination_path,
'retry_count': 0,
'downloaded_size': 0,
'progress_percent': 0.0
}
download_records.append(record)
# Insert all records in one batch
response = client.table('downloads').insert(download_records).execute()
# Extract IDs of created downloads
created_downloads = response.data or []
download_ids = [cast(int, d.get('id')) for d in created_downloads if isinstance(
d, dict) and d.get('id')]
logger.debug(
f"Created batch {batch_id}: {len(download_ids)} files for {ge_id} {lang}")
return {
"success": True,
"batch_id": batch_id,
"download_ids": download_ids,
"file_count": len(download_ids),
"message": f"Created {len(download_ids)} download records"
}
except Exception as e:
logger.error(f"Error creating downloads batch: {e}")
return {
"success": False,
"batch_id": None,
"download_ids": [],
"file_count": 0,
"message": f"Lỗi tạo downloads: {str(e)}"
}
# ==================== QUERY FUNCTIONS ====================
def get_all_downloads(
status: Optional[str] = None,
mode: Optional[str] = None,
limit: int = 100
) -> List[Dict[str, Any]]:
"""
Get all download records with optional filtering.
Args:
status: Filter by status (pending, downloading, completed, failed, cancelled)
mode: Filter by mode (api, sharing)
limit: Max number of records to return
Returns:
List of download records
"""
try:
client = get_supabase_client()
# Build query
query = client.table('downloads').select('*')
# Apply filters
if status:
query = query.eq('status', status)
if mode:
query = query.eq('mode', mode)
# Order by created_at descending (newest first)
query = query.order('created_at', desc=True).limit(limit)
response = query.execute()
return cast(List[Dict[str, Any]], response.data or [])
except Exception as e:
logger.error(f"Error getting downloads: {e}")
return []
def get_download_by_id(download_id: int) -> Optional[Dict[str, Any]]:
"""Get a single download record by ID."""
try:
client = get_supabase_client()
response = client.table('downloads').select(
'*').eq('id', download_id).execute()
if response.data and len(response.data) > 0:
return cast(Dict[str, Any], response.data[0])
return None
except Exception as e:
logger.error(f"Error getting download {download_id}: {e}")
return None
def get_downloads_by_batch(batch_id: str) -> List[Dict[str, Any]]:
"""Get all downloads in a batch."""
try:
client = get_supabase_client()
response = client.table('downloads').select(
'*').eq('batch_id', batch_id).order('file_name').execute()
return cast(List[Dict[str, Any]], response.data or [])
except Exception as e:
logger.error(f"Error getting batch {batch_id}: {e}")
return []
def get_active_downloads() -> List[Dict[str, Any]]:
"""Get all downloads that are pending or currently downloading."""
try:
client = get_supabase_client()
response = client.table('downloads')\
.select('*')\
.in_('status', ['pending', 'downloading'])\
.order('created_at', desc=False)\
.execute()
return cast(List[Dict[str, Any]], response.data or [])
except Exception as e:
logger.error(f"Error getting active downloads: {e}")
return []
# ==================== UPDATE FUNCTIONS ====================
def update_download_status(
download_id: int,
status: str,
error_message: Optional[str] = None,
progress_percent: Optional[float] = None,
downloaded_size: Optional[int] = None,
file_size: Optional[int] = None,
destination_path: Optional[str] = None,
aria2_gid: Optional[str] = None
) -> bool:
"""
Update download status and progress.
Args:
download_id: ID of download to update
status: New status (pending, downloading, completed, failed, cancelled)
error_message: Optional error message
progress_percent: Download progress (0-100)
downloaded_size: Bytes downloaded so far
file_size: Total file size in bytes
destination_path: Final destination path
aria2_gid: Aria2 download GID (for cancellation)
Returns:
True if successful, False otherwise
"""
try:
client = get_supabase_client()
update_data: Dict[str, Any] = {'status': status}
# Set timestamps based on status
if status == 'downloading':
# First time switching to downloading, set started_at
download = get_download_by_id(download_id)
if download and not download.get('started_at'):
update_data['started_at'] = datetime.utcnow().isoformat()
elif status in ['completed', 'failed', 'cancelled']:
# Terminal status, set completed_at
update_data['completed_at'] = datetime.utcnow().isoformat()
# Clear aria2_gid when terminal
update_data['aria2_gid'] = None
# Optional fields
if error_message is not None:
update_data['error_message'] = error_message
if progress_percent is not None:
update_data['progress_percent'] = progress_percent
if downloaded_size is not None:
update_data['downloaded_size'] = downloaded_size
if file_size is not None:
update_data['file_size'] = file_size
if destination_path is not None:
update_data['destination_path'] = destination_path
if aria2_gid is not None:
update_data['aria2_gid'] = aria2_gid
response = client.table('downloads').update(
update_data).eq('id', download_id).execute()
if response.data:
logger.debug(f"Updated download {download_id} to status {status}")
return True
return False
except Exception as e:
logger.error(f"Error updating download {download_id}: {e}")
return False
def cancel_download(download_id: int) -> bool:
"""Cancel a download (set status to cancelled)."""
try:
# Get download info first
download = get_download_by_id(download_id)
if not download:
logger.warning(f"Cannot cancel download {download_id}: not found")
return False
# If actively downloading with aria2, cancel the aria2 task first
if download.get('status') == 'downloading' and download.get('aria2_gid'):
try:
from .aria2.download_manager import get_aria2_manager
manager = get_aria2_manager()
if manager:
gid = download['aria2_gid']
logger.debug(f"Cancelling aria2 download GID: {gid}")
manager.cancel_download(gid)
except Exception as e:
logger.error(f"Failed to cancel aria2 download: {e}")
# Continue to update DB even if aria2 cancel fails
# Update database status
return update_download_status(download_id, 'cancelled')
except Exception as e:
logger.error(f"Error cancelling download {download_id}: {e}")
return False
def retry_download(download_id: int) -> bool:
"""
Retry a failed download.
Resets status to pending and increments retry_count.
"""
try:
client = get_supabase_client()
# Get current download
download = get_download_by_id(download_id)
if not download:
return False
# Increment retry count
retry_count = download.get('retry_count', 0) + 1
# Reset to pending
update_data = {
'status': 'pending',
'retry_count': retry_count,
'error_message': None,
'progress_percent': 0.0,
'downloaded_size': 0,
'started_at': None,
'completed_at': None
}
response = client.table('downloads').update(
update_data).eq('id', download_id).execute()
if response.data:
logger.debug(
f"Retrying download {download_id} (attempt #{retry_count})")
return True
return False
except Exception as e:
logger.error(f"Error retrying download {download_id}: {e}")
return False
# ==================== DELETE FUNCTIONS ====================
def delete_download(download_id: int) -> bool:
"""Delete a download record (usually for completed/failed downloads)."""
try:
client = get_supabase_client()
response = client.table('downloads').delete().eq(
'id', download_id).execute()
if response.data:
logger.debug(f"Deleted download {download_id}")
return True
return False
except Exception as e:
logger.error(f"Error deleting download {download_id}: {e}")
return False
def delete_batch(batch_id: str) -> bool:
"""Delete all downloads in a batch."""
try:
client = get_supabase_client()
response = client.table('downloads').delete().eq(
'batch_id', batch_id).execute()
if response.data:
logger.debug(
f"Deleted batch {batch_id} ({len(response.data)} files)")
return True
return False
except Exception as e:
logger.error(f"Error deleting batch {batch_id}: {e}")
return False
# ==================== BATCH OPERATIONS ====================
def get_batch_summary(batch_id: str) -> Optional[Dict[str, Any]]:
"""
Get summary statistics for a download batch.
Returns:
{
"batch_id": str,
"ge_id": str,
"lang": str,
"mode": str,
"total_files": int,
"completed_files": int,
"failed_files": int,
"total_size": int,
"downloaded_size": int,
"status": str, # derived from file statuses
"created_at": str,
"started_at": str,
"completed_at": str,
"duration_seconds": float
}
"""
try:
downloads = get_downloads_by_batch(batch_id)
if not downloads:
return None
# Calculate stats
first_download = downloads[0]
total_files = len(downloads)
completed_files = sum(
1 for d in downloads if d['status'] == 'completed')
failed_files = sum(1 for d in downloads if d['status'] == 'failed')
total_size = sum(d.get('file_size', 0) or 0 for d in downloads)
downloaded_size = sum(d.get('downloaded_size', 0)
or 0 for d in downloads)
# Determine batch status
if all(d['status'] == 'completed' for d in downloads):
batch_status = 'completed'
elif all(d['status'] == 'failed' for d in downloads):
batch_status = 'failed'
elif any(d['status'] in ['pending', 'downloading'] for d in downloads):
batch_status = 'downloading'
else:
batch_status = 'partial_failed'
# Timestamps
created_at = min(d['created_at'] for d in downloads)
started_times = [d['started_at']
for d in downloads if d.get('started_at')]
completed_times = [d['completed_at']
for d in downloads if d.get('completed_at')]
started_at = min(started_times) if started_times else None
completed_at = max(completed_times) if completed_times and len(
completed_times) == total_files else None
# Calculate duration
duration_seconds = None
if started_at and completed_at:
start = datetime.fromisoformat(started_at.replace('Z', '+00:00'))
end = datetime.fromisoformat(completed_at.replace('Z', '+00:00'))
duration_seconds = (end - start).total_seconds()
return {
"batch_id": batch_id,
"ge_id": first_download['ge_id'],
"lang": first_download['lang'],
"mode": first_download['mode'],
"total_files": total_files,
"completed_files": completed_files,
"failed_files": failed_files,
"total_size": total_size,
"downloaded_size": downloaded_size,
"status": batch_status,
"created_at": created_at,
"started_at": started_at,
"completed_at": completed_at,
"duration_seconds": duration_seconds
}
except Exception as e:
logger.error(f"Error getting batch summary for {batch_id}: {e}")
return None

View File

@ -0,0 +1,469 @@
"""
Module for MongoDB connection and data access.
Combines logic from the old project's mongodb.py and mongodb_submissions.py
"""
import os
import logging
import re
from datetime import datetime, timedelta
from typing import List, Dict, Optional, Any
from pymongo import MongoClient, ASCENDING, DESCENDING
from pymongo.errors import ConnectionFailure, ServerSelectionTimeoutError, DuplicateKeyError
from dotenv import load_dotenv
# Load environment variables from .env.local first, then .env
load_dotenv('.env.local')
load_dotenv() # Fallback to .env
# Use logger from root (configured in main.py)
logger = logging.getLogger(__name__)
# --- MongoDB Config ---
MONGODB_URI = os.getenv("MONGODB_URI")
if not MONGODB_URI:
raise ValueError("MONGODB_URI not found in environment variables")
DATABASE_NAME = "schedule"
SUBMISSIONS_COLLECTION = "submissions"
TITLES_COLLECTION = "titles_data"
# --- Connection Caching ---
_mongodb_client = None
_submissions_collection = None
_titles_collection = None
# ----------------------
# Connection helpers
# ----------------------
def get_db_connection():
"""Initializes and returns the MongoDB database connection with caching."""
global _mongodb_client
if _mongodb_client is None:
try:
logger.debug("Initializing new MongoDB connection...")
_mongodb_client = MongoClient(
MONGODB_URI,
serverSelectionTimeoutMS=5000,
connectTimeoutMS=10000,
socketTimeoutMS=10000
)
# Test connection
_mongodb_client.admin.command('ping')
logger.debug("MongoDB connection successful.")
except (ConnectionFailure, ServerSelectionTimeoutError) as e:
logger.error(f"Could not connect to MongoDB: {e}")
_mongodb_client = None # Reset on failure
raise Exception(f"Không thể kết nối MongoDB: {e}")
except Exception as e:
logger.error(
f"An unexpected error occurred during MongoDB initialization: {e}")
_mongodb_client = None # Reset on failure
raise Exception(f"Lỗi khởi tạo MongoDB: {e}")
return _mongodb_client[DATABASE_NAME]
def get_submissions_collection():
"""Returns the submissions collection, initializing the connection if needed."""
global _submissions_collection
if _submissions_collection is None:
db = get_db_connection()
_submissions_collection = db[SUBMISSIONS_COLLECTION]
_create_submission_indexes()
return _submissions_collection
def get_titles_collection():
"""Returns the titles collection, initializing the connection if needed."""
global _titles_collection
if _titles_collection is None:
db = get_db_connection()
_titles_collection = db[TITLES_COLLECTION]
return _titles_collection
def close_mongodb_connection():
"""Closes the MongoDB connection if it exists."""
global _mongodb_client
if _mongodb_client:
_mongodb_client.close()
_mongodb_client = None
logger.debug("MongoDB connection closed.")
# -------------------------------
# Indexes and initialization
# -------------------------------
def _create_submission_indexes():
"""Creates necessary indexes for the submissions collection."""
try:
collection = get_submissions_collection()
# Unique submission_id
collection.create_index(
"submission_id", unique=True, name="idx_submission_id")
# Timestamp for sorting
collection.create_index(
[("created_at", DESCENDING)], name="idx_created_at")
# Status index
collection.create_index("status", name="idx_status")
# Compound index for queue ordering
collection.create_index(
[("status", ASCENDING), ("queue_position", ASCENDING)], name="idx_queue")
# TTL index - automatically delete submissions after 30 days
collection.create_index(
"created_at", expireAfterSeconds=2592000, name="idx_ttl")
logger.debug("Submission indexes created successfully.")
except Exception as e:
logger.error(f"Error creating submission indexes: {e}")
# ---------------------------------------------------
# Submissions Logic (adapted from mongodb_submissions.py)
# ---------------------------------------------------
def create_submission(submission_id: str, usernames: List[str], ge_input: str) -> Dict[str, Any]:
"""Creates a new submission with 'pending' status and assigns a queue_position."""
try:
collection = get_submissions_collection()
now = datetime.utcnow()
# Determine next queue position among pending
max_doc = collection.find_one({"status": "pending"}, sort=[
("queue_position", DESCENDING)])
next_position = (max_doc.get("queue_position") + 1) if (
max_doc and max_doc.get("queue_position") is not None) else 1
submission_doc = {
"submission_id": submission_id,
"timestamp": now,
"status": "pending",
"input": {
"usernames": usernames,
"ge_input": ge_input
},
"results": [],
"error_message": None,
"created_at": now,
"updated_at": now,
"processing_started_at": None,
"processing_completed_at": None,
"queue_position": next_position,
"retry_count": 0,
"last_retry_at": None
}
result = collection.insert_one(submission_doc)
# Convert ObjectId to string
submission_doc["_id"] = str(result.inserted_id)
logger.debug(
f"Created submission: {submission_id} at position {next_position}")
return submission_doc
except DuplicateKeyError:
raise Exception(f"Submission ID {submission_id} đã tồn tại")
except Exception as e:
logger.error(f"Error creating submission: {e}")
raise Exception(f"Không thể tạo submission: {e}")
def get_submission_by_id(submission_id: str) -> Optional[Dict[str, Any]]:
"""Fetches a submission by its submission_id."""
try:
collection = get_submissions_collection()
doc = collection.find_one({"submission_id": submission_id})
if doc:
doc["_id"] = str(doc["_id"])
return doc
except Exception as e:
logger.error(f"Error fetching submission {submission_id}: {e}")
return None
def get_submissions(limit: int = 50, status: Optional[str] = None) -> List[Dict[str, Any]]:
"""Fetches submissions, optionally filtered by status, newest first."""
try:
collection = get_submissions_collection()
query = {}
if status:
query["status"] = status
cursor = collection.find(query).sort(
"created_at", DESCENDING).limit(limit)
subs = []
for doc in cursor:
doc["_id"] = str(doc["_id"])
subs.append(doc)
return subs
except Exception as e:
logger.error(f"Error fetching submissions: {e}")
return []
def get_pending_submissions() -> List[Dict[str, Any]]:
"""Returns pending submissions ordered by queue_position ascending."""
try:
collection = get_submissions_collection()
cursor = collection.find({"status": "pending"}).sort(
"queue_position", ASCENDING)
subs = []
for doc in cursor:
doc["_id"] = str(doc["_id"])
subs.append(doc)
return subs
except Exception as e:
logger.error(f"Error fetching pending submissions: {e}")
return []
def get_next_pending_submission() -> Optional[Dict[str, Any]]:
"""Return the next pending submission (lowest queue_position)."""
try:
collection = get_submissions_collection()
doc = collection.find_one({"status": "pending"}, sort=[
("queue_position", ASCENDING)])
if doc:
doc["_id"] = str(doc["_id"])
return doc
except Exception as e:
logger.error(f"Error fetching next pending submission: {e}")
return None
def update_submission(
submission_id: str,
status: str,
results: Optional[List[Dict]] = None,
error_message: Optional[str] = None
) -> bool:
"""Updates the status and results of a submission and manages timestamps/queue position."""
try:
collection = get_submissions_collection()
update_data = {
"status": status,
"updated_at": datetime.utcnow()
}
if status == "processing":
update_data["processing_started_at"] = datetime.utcnow()
elif status in ["completed", "failed"]:
update_data["processing_completed_at"] = datetime.utcnow()
update_data["queue_position"] = None
if status == "completed" and results is not None:
update_data["results"] = results
if status == "failed" and error_message is not None:
update_data["error_message"] = error_message
result = collection.update_one(
{"submission_id": submission_id}, {"$set": update_data})
if result.modified_count > 0:
logger.debug(
f"Updated submission {submission_id} to status {status}")
return True
else:
logger.warning(f"No submission found with ID: {submission_id}")
return False
except Exception as e:
logger.error(f"Error updating submission {submission_id}: {e}")
return False
def delete_submission(submission_id: str) -> bool:
"""Deletes a submission by its ID."""
try:
collection = get_submissions_collection()
result = collection.delete_one({"submission_id": submission_id})
if result.deleted_count > 0:
logger.debug(f"Deleted submission: {submission_id}")
return True
else:
logger.warning(f"No submission found with ID: {submission_id}")
return False
except Exception as e:
logger.error(f"Error deleting submission {submission_id}: {e}")
return False
def increment_retry_count(submission_id: str) -> bool:
"""Increment retry_count and set last_retry_at/updated_at."""
try:
collection = get_submissions_collection()
result = collection.update_one(
{"submission_id": submission_id},
{
"$inc": {"retry_count": 1},
"$set": {"last_retry_at": datetime.utcnow(), "updated_at": datetime.utcnow()}
}
)
return result.modified_count > 0
except Exception as e:
logger.error(f"Error increment retry count for {submission_id}: {e}")
return False
def requeue_stuck_submissions(timeout_minutes: int = 30) -> int:
"""Requeue submissions stuck in processing longer than timeout_minutes back to pending."""
try:
collection = get_submissions_collection()
timeout_date = datetime.utcnow() - timedelta(minutes=timeout_minutes)
result = collection.update_many(
{"status": "processing", "processing_started_at": {"$lt": timeout_date}},
{"$set": {"status": "pending", "updated_at": datetime.utcnow(
), "processing_started_at": None}, "$inc": {"retry_count": 1}}
)
logger.debug(f"Requeued {result.modified_count} stuck submissions")
return result.modified_count
except Exception as e:
logger.error(f"Error requeue stuck submissions: {e}")
return 0
def cleanup_excess_submissions(max_keep: int = 15) -> int:
"""Keep only the newest `max_keep` completed/failed submissions; delete older ones."""
try:
collection = get_submissions_collection()
count = collection.count_documents(
{"status": {"$in": ["completed", "failed"]}})
if count <= max_keep:
logger.debug(
f"Current completed/failed count ({count}) <= max_keep ({max_keep}), nothing to cleanup")
return 0
to_delete = count - max_keep
old_docs = list(collection.find({"status": {"$in": ["completed", "failed"]}}, {
"_id": 1}).sort("created_at", ASCENDING).limit(to_delete))
if not old_docs:
return 0
ids = [d["_id"] for d in old_docs]
result = collection.delete_many({"_id": {"$in": ids}})
logger.debug(f"Cleaned up {result.deleted_count} excess submissions")
return result.deleted_count
except Exception as e:
logger.error(f"Error cleanup excess submissions: {e}")
return 0
def cleanup_old_submissions(days: int = 30) -> int:
"""Delete completed/failed submissions older than `days` days."""
try:
collection = get_submissions_collection()
cutoff = datetime.utcnow() - timedelta(days=days)
result = collection.delete_many(
{"created_at": {"$lt": cutoff}, "status": {"$in": ["completed", "failed"]}})
logger.debug(f"Cleaned up {result.deleted_count} old submissions")
return result.deleted_count
except Exception as e:
logger.error(f"Error cleanup old submissions: {e}")
return 0
def get_statistics() -> Dict[str, int]:
"""Return counts grouped by status and total."""
try:
collection = get_submissions_collection()
pipeline = [{"$group": {"_id": "$status", "count": {"$sum": 1}}}]
results = list(collection.aggregate(pipeline))
stats = {"total": 0, "pending": 0,
"processing": 0, "completed": 0, "failed": 0}
for item in results:
status = item.get("_id")
count = item.get("count", 0)
if status in stats:
stats[status] = count
stats["total"] += count
return stats
except Exception as e:
logger.error(f"Error getting statistics: {e}")
return {"total": 0, "pending": 0, "processing": 0, "completed": 0, "failed": 0}
# ---------------------------------------------------
# Titles Logic (from mongodb.py)
# ---------------------------------------------------
# Note: This part is not directly used by the permission page,
# but it's good to have it here for future use.
def get_tms_data(ge_id: str, orig_lang: str) -> Optional[str]:
"""
Fetches the TMS ID from the titles_data collection.
Returns the TMS ID as a string or None if not found.
"""
try:
collection = get_titles_collection()
query = {"geId": str(ge_id).strip(), "lang": str(
orig_lang).strip().upper()}
document = collection.find_one(query)
if not document:
logger.warning(
f"No document found for geId: {ge_id}, lang: {orig_lang}")
return None
# Try extract from trTmsLink first
tms_link = document.get("trTmsLink")
if tms_link and isinstance(tms_link, str):
match = re.search(r'/project/(\d+)', tms_link)
if match:
return match.group(1)
tms_id_direct = document.get("tmsId")
if tms_id_direct:
return str(tms_id_direct).strip()
return None
except Exception as e:
logger.error(f"Error querying MongoDB for TMS data: {e}")
return None
def get_path_from_tms_data(ge_id: str, orig_lang: str) -> Optional[str]:
"""
Fetches the NAS path from the titles_data collection for raw file downloads.
Uses the same query logic as get_tms_data but returns the 'path' field.
Returns the path as a string or None if not found.
"""
try:
collection = get_titles_collection()
query = {"geId": str(ge_id).strip(), "lang": str(
orig_lang).strip().upper()}
document = collection.find_one(query)
if not document:
logger.warning(
f"No document found for geId: {ge_id}, lang: {orig_lang}")
return None
# Get the path field directly
path = document.get("path")
if path and isinstance(path, str):
return str(path).strip()
logger.warning(
f"No path field found for geId: {ge_id}, lang: {orig_lang}")
return None
except Exception as e:
logger.error(f"Error querying MongoDB for path data: {e}")
return None
def get_sharing_link_from_tms_data(ge_id: str, orig_lang: str) -> Optional[str]:
"""
Fetches the sharing link (linkRaw) from the titles_data collection.
Used for displaying source in sharing mode downloads.
Returns the linkRaw as a string or None if not found.
"""
try:
collection = get_titles_collection()
query = {"geId": str(ge_id).strip(), "lang": str(
orig_lang).strip().upper()}
document = collection.find_one(query)
if not document:
logger.warning(
f"No document found for geId: {ge_id}, lang: {orig_lang}")
return None
# Get the linkRaw field
link_raw = document.get("linkRaw")
if link_raw and isinstance(link_raw, str):
return str(link_raw).strip()
logger.warning(
f"No linkRaw field found for geId: {ge_id}, lang: {orig_lang}")
return None
except Exception as e:
logger.error(f"Error querying MongoDB for linkRaw data: {e}")
return None

View File

@ -0,0 +1,70 @@
"""
NAS API Module - FileStation Operations
"""
from .config import (
BASE_URL,
USERNAME,
PASSWORD,
DESTINATION_PATH,
session,
logger
)
from .exceptions import (
NASAuthenticationError,
NASConnectionError,
NASAPIError
)
from .session import (
save_sid,
load_sid
)
from .auth import (
login_with_otp,
authenticate_with_otp
)
from .file_operations import (
syno_entry_request,
test_session_validity,
list_folder_contents,
list_shares,
get_files_for_path,
download_single_file_aria2,
cleanup_duplicates_before_download,
download_files_to_destination,
download_files_as_single_zip,
)
__all__ = [
# Config
'BASE_URL',
'USERNAME',
'PASSWORD',
'DESTINATION_PATH',
'session',
'logger',
# Exceptions
'NASAuthenticationError',
'NASConnectionError',
'NASAPIError',
# Session
'save_sid',
'load_sid',
# Auth
'login_with_otp',
'authenticate_with_otp',
# File Operations
'syno_entry_request',
'test_session_validity',
'list_folder_contents',
'list_shares',
'get_files_for_path',
'download_single_file_aria2',
'cleanup_duplicates_before_download',
'download_files_to_destination',
'download_files_as_single_zip',
]

View File

@ -0,0 +1,79 @@
"""
NAS API Authentication
Handle OTP login and authentication.
"""
import requests
from typing import Tuple, Optional
from .config import BASE_URL, USERNAME, PASSWORD, session, logger
from .session import save_sid
from .exceptions import NASAuthenticationError, NASConnectionError
def login_with_otp(otp_code: str) -> str:
"""
Login to NAS with OTP code.
Returns the session ID (SID) on success.
Raises NASAuthenticationError on failure.
"""
try:
auth_url = f"{BASE_URL}/auth.cgi"
auth_params = {
"api": "SYNO.API.Auth",
"version": "6",
"method": "login",
"account": USERNAME,
"passwd": PASSWORD,
"session": "FileStation",
"format": "sid",
"otp_code": otp_code
}
resp = session.get(auth_url, params=auth_params,
verify=False, timeout=30)
resp.raise_for_status()
data = resp.json()
if data.get("success"):
sid = data["data"]["sid"]
save_sid(sid)
logger.debug("NAS login successful")
return sid
else:
error_msg = data.get("error", {})
logger.error(f"NAS login failed: {error_msg}")
raise NASAuthenticationError(
f"Đăng nhập NAS thất bại: {error_msg}")
except requests.exceptions.RequestException as e:
logger.error(f"Network error during NAS login: {e}")
raise NASConnectionError(f"Lỗi kết nối NAS: {e}")
except Exception as e:
logger.error(f"Unexpected error during NAS login: {e}")
raise NASAuthenticationError(f"Lỗi đăng nhập NAS: {e}")
def authenticate_with_otp(otp_code: str) -> Tuple[str, Optional[str]]:
"""
Authenticate with OTP and save session.
Args:
otp_code: The OTP code from user
Returns:
Tuple[str, Optional[str]]:
- status: "success" or "error"
- message: Success message or error description
"""
try:
sid = login_with_otp(otp_code)
return "success", "Đăng nhập thành công"
except NASAuthenticationError as e:
return "error", str(e)
except NASConnectionError as e:
return "error", str(e)
except Exception as e:
logger.error(f"Unexpected error during OTP authentication: {e}")
return "error", f"Lỗi đăng nhập: {e}"

View File

@ -0,0 +1,60 @@
"""
NAS API Configuration
Environment variables and global settings.
"""
import os
import logging
import requests
import urllib3
from typing import cast
from dotenv import load_dotenv
# Load environment variables from .env.local first, then .env
load_dotenv('.env.local')
load_dotenv() # Fallback to .env
# Disable SSL warnings for self-signed certificates
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# Use logger from root (configured in main.py)
logger = logging.getLogger(__name__)
# === NAS Configuration from Environment ===
# NO FALLBACK VALUES - Must be explicitly set in .env.local
_base_url = os.getenv("NAS_BASE_URL")
_username = os.getenv("NAS_USERNAME")
_password = os.getenv("NAS_PASSWORD")
_env_dest_path = os.getenv("NAS_DESTINATION_PATH")
# Validate all required environment variables
missing_vars = []
if not _base_url:
missing_vars.append("NAS_BASE_URL")
if not _username:
missing_vars.append("NAS_USERNAME")
if not _password:
missing_vars.append("NAS_PASSWORD")
if not _env_dest_path:
missing_vars.append("NAS_DESTINATION_PATH")
if missing_vars:
error_msg = f"❌ CRITICAL: Missing required environment variables in .env.local: {', '.join(missing_vars)}"
logger.error(error_msg)
raise RuntimeError(error_msg)
# After validation, cast to str (guaranteed non-None after error check above)
BASE_URL: str = cast(str, _base_url)
USERNAME: str = cast(str, _username)
PASSWORD: str = cast(str, _password)
DESTINATION_PATH: str = cast(str, _env_dest_path).replace("/", "\\")
print(f"✅ [INIT] NAS Configuration loaded successfully")
print(f" - Base URL: {BASE_URL}")
print(f" - Username: {USERNAME}")
print(f" - Destination Path: {repr(DESTINATION_PATH)}")
logger.debug(
f"NAS Configuration: BASE_URL={BASE_URL}, USERNAME={USERNAME}, DESTINATION_PATH={DESTINATION_PATH}")
# Global session object
session = requests.Session()

View File

@ -0,0 +1,19 @@
"""
NAS API Exceptions
Custom exceptions for NAS operations.
"""
class NASAuthenticationError(Exception):
"""Raised when NAS authentication fails"""
pass
class NASConnectionError(Exception):
"""Raised when NAS connection fails"""
pass
class NASAPIError(Exception):
"""Raised when NAS API returns an error"""
pass

View File

@ -0,0 +1,659 @@
"""
NAS API File Operations
File listing, downloading, and management.
"""
import os
import json
import shutil
import requests
from typing import List, Dict, Optional, Tuple, Union, TYPE_CHECKING
from urllib.parse import urlencode
from .config import BASE_URL, DESTINATION_PATH, session, logger
from .session import load_sid
from .exceptions import NASConnectionError, NASAPIError
if TYPE_CHECKING:
from ..aria2.download_manager import Aria2DownloadManager
# aria2 integration
USE_ARIA2 = os.getenv('USE_ARIA2', 'true').lower() == 'true'
# None, False (unavailable), or Aria2DownloadManager
_aria2_manager: Optional[Union[bool, "Aria2DownloadManager"]] = None
def get_aria2_manager() -> "Aria2DownloadManager":
"""Get or create aria2 manager instance. Raises error if unavailable."""
global _aria2_manager
if _aria2_manager is None and USE_ARIA2:
try:
from ..aria2.download_manager import get_aria2_manager as _get_manager
_aria2_manager = _get_manager()
if _aria2_manager:
logger.debug(
"✅ aria2 manager initialized for NAS API downloads")
else:
raise RuntimeError("aria2 manager returned None")
except Exception as e:
logger.error(f"❌ CRITICAL: aria2 not available: {e}")
raise RuntimeError(
f"aria2 is required but not available: {e}") from e
if _aria2_manager is False or _aria2_manager is None:
raise RuntimeError("aria2 is required but not initialized")
return _aria2_manager # type: ignore
def syno_entry_request(sid: str, calls: List[Dict]) -> Dict:
"""
Make a SYNO.Entry.Request with multiple API calls.
Returns the JSON response.
Raises NASAPIError on API failure.
"""
try:
url = f"{BASE_URL}/entry.cgi"
compound = json.dumps(calls)
params = {
"api": "SYNO.Entry.Request",
"version": 1,
"method": "request",
"_sid": sid,
"compound": compound
}
resp = session.post(url, data=params, verify=False, timeout=30)
resp.raise_for_status()
data = resp.json()
logger.debug(f"NAS API response: {data}")
return data
except requests.exceptions.RequestException as e:
logger.error(f"Network error during NAS API call: {e}")
raise NASConnectionError(f"Lỗi kết nối NAS API: {e}")
except Exception as e:
logger.error(f"Unexpected error during NAS API call: {e}")
raise NASAPIError(f"Lỗi NAS API: {e}")
def test_session_validity(sid: str) -> bool:
"""
Test if the current session ID is still valid by attempting a simple API call.
Returns True if valid, False if expired/invalid.
"""
try:
data = syno_entry_request(sid, [{
"api": "SYNO.FileStation.List",
"method": "list_share",
"version": 2
}])
# Check if the API call was successful
result = data.get("data", {}).get("result", [])
if result and len(result) > 0:
first_result = result[0]
if first_result.get("success"):
logger.debug("Session ID is valid")
return True
logger.warning("Session ID appears to be invalid")
return False
except Exception as e:
logger.warning(f"Session validation failed: {e}")
return False
def list_folder_contents(sid: str, folder_path: str) -> Tuple[bool, List[Dict], Optional[str]]:
"""
List files and folders in the specified path.
Args:
sid: Session ID
folder_path: Path to list (e.g., "/Comic_TMS_L/DKI/JP")
Returns:
Tuple[bool, List[Dict], Optional[str]]:
- success: True if successful, False if failed
- files: List of file/folder dictionaries
- error_message: Error message if failed, None if successful
"""
try:
data = syno_entry_request(sid, [{
"api": "SYNO.FileStation.List",
"method": "list",
"version": 2,
"folder_path": folder_path,
"additional": ["real_path", "size", "owner", "time", "perm", "type"]
}])
# Parse the response
result = data.get("data", {}).get("result", [])
if result and len(result) > 0:
first_result = result[0]
if first_result.get("success"):
files = first_result.get("data", {}).get("files", [])
logger.debug(
f"Successfully listed {len(files)} items in {folder_path}")
return True, files, None
else:
error_info = first_result.get("error", {})
error_code = error_info.get("code") if isinstance(
error_info, dict) else None
# Check for specific error codes
if error_code == 408:
error_msg = f"Thư mục không tồn tại: {folder_path}"
elif error_code == 400:
error_msg = f"Đường dẫn không hợp lệ: {folder_path}"
elif error_code == 402:
error_msg = f"Không có quyền truy cập: {folder_path}"
else:
error_msg = f"Lỗi NAS (code {error_code}): {error_info}"
logger.error(
f"Failed to list folder {folder_path}: {error_msg}")
return False, [], error_msg
else:
error_msg = "Invalid API response format"
logger.error(f"Failed to list folder {folder_path}: {error_msg}")
return False, [], error_msg
except Exception as e:
error_msg = f"Exception during folder listing: {e}"
logger.error(error_msg)
return False, [], error_msg
def list_shares(sid: str) -> Tuple[bool, List[str], Optional[str]]:
"""
List all available root shares.
Returns:
Tuple[bool, List[str], Optional[str]]:
- success: True if successful, False if failed
- shares: List of share names
- error_message: Error message if failed, None if successful
"""
try:
data = syno_entry_request(sid, [{
"api": "SYNO.FileStation.List",
"method": "list_share",
"version": 2
}])
# Parse the response
result = data.get("data", {}).get("result", [])
if result and len(result) > 0:
first_result = result[0]
if first_result.get("success"):
shares_data = first_result.get("data", {}).get("shares", [])
share_names = [share["name"] for share in shares_data]
logger.debug(f"Successfully listed {len(share_names)} shares")
return True, share_names, None
else:
error_info = first_result.get("error", {})
error_msg = f"NAS API Error: {error_info}"
logger.error(f"Failed to list shares: {error_msg}")
return False, [], error_msg
else:
error_msg = "Invalid API response format"
logger.error(f"Failed to list shares: {error_msg}")
return False, [], error_msg
except Exception as e:
error_msg = f"Exception during shares listing: {e}"
logger.error(error_msg)
return False, [], error_msg
def get_files_for_path(folder_path: str) -> Tuple[str, List[Dict], Optional[str]]:
"""
High-level function to get files for a given path.
Handles session management automatically.
Args:
folder_path: Path to list (e.g., "/Comic_TMS_L/DKI/JP")
Returns:
Tuple[str, List[Dict], Optional[str]]:
- status: "success", "otp_required", or "error"
- files: List of file/folder dictionaries (empty if not success)
- message: Status message or error description
"""
try:
# Try to load existing session
sid = load_sid()
if sid:
# Test if session is still valid
if test_session_validity(sid):
# Session is valid, try to list files
success, files, error_msg = list_folder_contents(
sid, folder_path)
if success:
return "success", files, "Đã tải danh sách file thành công"
else:
# error_msg already contains detailed message from list_folder_contents
logger.warning(
f"Failed to list files despite valid session: {error_msg}")
return "error", [], error_msg
else:
# Session is invalid, need new login
logger.debug("Session expired, OTP required")
return "otp_required", [], "Phiên đăng nhập đã hết hạn. Vui lòng nhập mã OTP."
else:
# No session found, need login
logger.debug("No session found, OTP required")
return "otp_required", [], "Cần đăng nhập. Vui lòng nhập mã OTP."
except Exception as e:
logger.error(f"Unexpected error in get_files_for_path: {e}")
return "error", [], f"Lỗi hệ thống: {e}"
def download_single_file_aria2(
sid: str,
remote_path: str,
local_save_path: str,
is_dir: bool = False,
progress_callback=None,
max_speed: Optional[str] = None
) -> Tuple[bool, Optional[str], Optional[str]]:
"""
Download via aria2 - NO FALLBACK, throws error if aria2 unavailable
Args:
sid: Session ID
remote_path: Path on NAS
local_save_path: Local file path to save to
is_dir: Whether the remote_path is a directory (will be zipped)
progress_callback: Optional callback(downloaded_bytes, total_bytes)
max_speed: Optional bandwidth limit (e.g., '100K')
Returns:
Tuple[success, error_message, gid]
"""
# Will raise RuntimeError if aria2 not available
manager = get_aria2_manager()
try:
# Build download URL with SID
url = f"{BASE_URL}/entry.cgi"
params = {
"api": "SYNO.FileStation.Download",
"version": 2,
"method": "download",
"path": remote_path,
"mode": "download",
"_sid": sid
}
# Convert to full URL with query string
download_url = f"{url}?{urlencode(params)}"
logger.debug(
f"[aria2] Downloading: {remote_path} -> {local_save_path}")
# Download via aria2
success, error_msg, gid = manager.download_file(
url=download_url,
dest_path=local_save_path,
progress_callback=progress_callback,
max_download_limit=max_speed
)
if success:
logger.debug(f"[aria2] ✅ Download success: {local_save_path}")
return True, None, gid
else:
# NO FALLBACK - Throw error immediately
error_msg = error_msg or "aria2 download failed"
logger.error(f"[aria2] ❌ FAILED: {error_msg}")
raise RuntimeError(f"aria2 download failed: {error_msg}")
except RuntimeError:
# Re-raise RuntimeError (from aria2 failures)
raise
except Exception as e:
# Unexpected exception - NO FALLBACK
logger.error(f"[aria2] ❌ ERROR: {e}")
raise RuntimeError(f"aria2 unexpected error: {e}") from e
def cleanup_duplicates_before_download(dest_path: str, file_name_pattern: str, exact_filename: str, delete_dirs: bool = True) -> None:
"""
Before download, delete all files/folders that contain file_name_pattern in their name.
Also attempts to delete the exact_filename if it exists.
Args:
dest_path: Destination directory path
file_name_pattern: Original file name to search for (e.g., "[식자설정]")
exact_filename: Exact filename of the file to be downloaded (e.g., "[식자설정].zip")
delete_dirs: Whether to delete matching directories (True for API mode, False for Sharing mode)
"""
try:
if not os.path.exists(dest_path):
return
# List all files/folders in destination
for item_name in os.listdir(dest_path):
item_path = os.path.join(dest_path, item_name)
# Check if item name contains the file_name_pattern
if file_name_pattern in item_name:
try:
# If it's the exact file we are about to download
if item_name == exact_filename:
if os.path.isfile(item_path):
os.remove(item_path)
logger.debug(f"Deleted existing file: {item_path}")
elif os.path.isdir(item_path) and delete_dirs:
shutil.rmtree(item_path)
logger.debug(
f"Deleted existing folder (exact match): {item_path}")
continue
# For other duplicates
if os.path.isfile(item_path):
os.remove(item_path)
logger.debug(f"Cleaned up duplicate file: {item_path}")
elif os.path.isdir(item_path):
if delete_dirs:
shutil.rmtree(item_path)
logger.debug(
f"Cleaned up duplicate folder: {item_path}")
else:
logger.debug(
f"Skipped deleting duplicate folder (Sharing Mode): {item_path}")
except Exception as e:
logger.warning(f"Could not delete {item_path}: {e}")
except Exception as e:
logger.error(
f"Error cleaning up duplicates for '{file_name_pattern}': {e}")
def download_files_to_destination(
files_info: List[Dict],
ge_id: str,
lang: str,
base_destination: Optional[str] = None,
progress_callback=None
) -> Tuple[str, List[Dict], Optional[str], Optional[str]]:
"""
Download multiple files from NAS to network destination.
Simplified version - no complex error handling, just download.
Args:
files_info: List of file dicts with keys: name, path, isdir
ge_id: GE ID for folder naming (not used if base_destination provided)
lang: Language code (not used if base_destination provided)
base_destination: Full destination path. If None, will create GEID_LANG folder under DESTINATION_PATH
Returns:
Tuple[str, List[Dict], Optional[str], Optional[str]]:
- status: "success", "partial", or "error"
- results: List of download results with success/error for each file
- message: Overall status message
- destination_path: The actual destination path where files were downloaded
"""
try:
# Validate session
sid = load_sid()
if not sid:
return "error", [], "Session không hợp lệ. Vui lòng đăng nhập lại.", None
# Determine destination path
if base_destination is None:
# Create GEID_LANG folder under default DESTINATION_PATH
lang_upper = lang.upper()
dest_folder = f"{ge_id}_{lang_upper}"
if DESTINATION_PATH.endswith("\\"):
dest_path = f"{DESTINATION_PATH}{dest_folder}"
else:
dest_path = f"{DESTINATION_PATH}\\{dest_folder}"
else:
# Use provided path directly (already includes GEID_LANG)
dest_path = base_destination
# Create destination directory
os.makedirs(dest_path, exist_ok=True)
logger.debug(f"Destination created: {dest_path}")
results = []
successful_downloads = 0
total_files = len(files_info)
# Initialize files_status for progress tracking
files_status = [
{
"name": f"{file_info.get('name', 'unknown')}{'.zip' if file_info.get('isdir', False) else ''}",
"status": "pending",
"progress": None,
"downloaded": 0,
"total": None,
"is_folder": file_info.get("isdir", False)
}
for file_info in files_info
]
for idx, file_info in enumerate(files_info):
file_name = file_info.get("name", "unknown")
remote_path = file_info.get("path", "")
is_dir = file_info.get("isdir", False)
# Add .zip extension for directories
local_filename = file_name
if is_dir:
local_filename += ".zip"
# Cleanup duplicates BEFORE download
cleanup_duplicates_before_download(
dest_path, file_name, local_filename, delete_dirs=True)
# Build file path manually
local_file_path = f"{dest_path}\\{local_filename}"
# Safety check: If file still exists (could not be deleted), append _NEW
if os.path.exists(local_file_path):
logger.warning(
f"Could not delete existing file {local_filename}, appending _NEW")
name, ext = os.path.splitext(local_filename)
local_filename = f"{name}_NEW{ext}"
local_file_path = f"{dest_path}\\{local_filename}"
# Update status to downloading
files_status[idx]["status"] = "downloading"
# Create file-specific progress callback
def file_progress_callback(downloaded_bytes, total_bytes):
files_status[idx]["downloaded"] = downloaded_bytes
files_status[idx]["total"] = total_bytes
if total_bytes > 0:
progress_pct = (downloaded_bytes / total_bytes) * 100
files_status[idx]["progress"] = round(progress_pct, 1)
else:
files_status[idx]["progress"] = None
# Call parent progress callback
if progress_callback:
progress_callback(idx, total_files, {
"current_file": local_filename,
"current_file_index": idx + 1,
"total_files": total_files,
"current_file_progress": files_status[idx].get("progress"),
"current_file_downloaded": downloaded_bytes,
"current_file_total": total_bytes if total_bytes > 0 else None,
"files_status": files_status
})
# Download via aria2 (required)
success, error_msg, gid = download_single_file_aria2(
sid, remote_path, local_file_path, is_dir,
progress_callback=file_progress_callback
)
# Update files_status
if success:
files_status[idx]["status"] = "completed"
successful_downloads += 1
else:
files_status[idx]["status"] = "failed"
result = {
"file_name": file_name,
"local_path": local_file_path,
"success": success,
"error_message": error_msg,
"is_directory": is_dir
}
results.append(result)
# No cleanup after download anymore
# Determine overall status
total_files = len(files_info)
if successful_downloads == total_files:
status = "success"
message = f"Đã tải xuống {successful_downloads}/{total_files} file vào {dest_path}"
elif successful_downloads > 0:
status = "partial"
message = f"Đã tải xuống {successful_downloads}/{total_files} file"
else:
status = "error"
message = "Không thể tải xuống file nào"
return status, results, message, dest_path
except Exception as e:
logger.error(f"Error in download_files_to_destination: {e}")
return "error", [], f"Lỗi: {e}", None
def download_files_as_single_zip(
files_info: List[Dict],
dest_path: str,
sid: Optional[str] = None
) -> Tuple[str, List[Dict], str, Optional[str]]:
"""
Download multiple files/folders as a single zip file.
Args:
files_info: List of file info dicts with 'path', 'name', 'isdir' keys
dest_path: Local destination path (network share)
sid: Session ID for authentication
Returns:
Tuple of (status, results, message, zip_file_path)
- status: "success", "error"
- results: List of file info dicts with download status
- message: Human-readable status message
- zip_file_path: Full path to the created zip file
Logic:
- If only 1 file/folder: zip that single item (don't double-zip folders)
- If multiple files: zip all into a single archive named after parent folder
"""
try:
if not sid:
return "error", [], "Missing session ID", None
if not files_info:
return "error", [], "No files selected", None
# Ensure destination directory exists
os.makedirs(dest_path, exist_ok=True)
# Determine zip file name
if len(files_info) == 1:
# Single file/folder: use its name for zip
single_item = files_info[0]
zip_base_name = single_item.get("name", "download")
else:
# Multiple files: find common parent folder name
# Extract parent path from first file
first_path = files_info[0].get("path", "")
# Remove leading/trailing slashes and get parent
clean_path = first_path.strip("/")
path_parts = clean_path.split("/")
if len(path_parts) > 1:
# Use parent folder name
zip_base_name = path_parts[-2]
else:
# Fallback to generic name
zip_base_name = "download"
# Sanitize filename
zip_base_name = "".join(c if c.isalnum() or c in (
' ', '-', '_') else '_' for c in zip_base_name)
zip_filename = f"{zip_base_name}.zip"
local_zip_path = os.path.join(dest_path, zip_filename)
# Prepare path parameter for FileStation API
# API expects comma-separated paths with proper JSON encoding
path_list = [f'"{file_info["path"]}"' for file_info in files_info]
path_param = f'[{",".join(path_list)}]'
logger.debug(
f"Downloading {len(files_info)} items as single zip: {zip_filename}")
logger.debug(f"Path parameter: {path_param}")
# Download using FileStation Download API with multi-file mode
download_url = f"{BASE_URL}/entry.cgi"
params = {
"api": "SYNO.FileStation.Download",
"version": "2",
"method": "download",
"path": path_param,
"mode": "download",
"_sid": sid
}
response = session.get(download_url, params=params,
verify=False, timeout=300, stream=True)
if response.status_code == 200:
# Save zip file
with open(local_zip_path, 'wb') as f:
for chunk in response.iter_content(chunk_size=8192):
if chunk:
f.write(chunk)
# Verify file was created
if os.path.exists(local_zip_path) and os.path.getsize(local_zip_path) > 0:
file_size = os.path.getsize(local_zip_path)
logger.debug(
f"Successfully downloaded: {local_zip_path} ({file_size} bytes)")
# Create result entries for each file
results = [
{
"file_name": file_info.get("name", "unknown"),
"local_path": local_zip_path,
"success": True,
"error_message": None,
"is_directory": file_info.get("isdir", False)
}
for file_info in files_info
]
return "success", results, f"Đã tải xuống {len(files_info)} file vào {local_zip_path}", local_zip_path
else:
logger.error(
f"Downloaded file is empty or does not exist: {local_zip_path}")
return "error", [], "File tải xuống bị lỗi (0 bytes)", None
else:
error_msg = f"HTTP {response.status_code}: {response.text[:200]}"
logger.error(f"Download failed: {error_msg}")
return "error", [], error_msg, None
except Exception as e:
logger.error(
f"Error in download_files_as_single_zip: {e}", exc_info=True)
return "error", [], f"Lỗi: {str(e)}", None

View File

@ -0,0 +1,46 @@
"""
NAS API Session Management
Handle session ID persistence via Supabase.
"""
from datetime import datetime, timezone
from typing import Optional
from .config import logger
from ..supabase_service import get_supabase_client
SESSION_ID = "nas_filestation"
def save_sid(sid: str) -> None:
"""Save session ID to Supabase for persistence across requests."""
try:
supabase = get_supabase_client()
supabase.table("sessions").upsert({
"id": SESSION_ID,
"data": {"sid": sid},
"last_login": datetime.now(timezone.utc).isoformat(),
"updated_at": datetime.now(timezone.utc).isoformat()
}).execute()
logger.debug(f"Session ID saved to Supabase: {sid[:8]}...")
except Exception as e:
logger.error(f"Error saving session ID to Supabase: {e}")
def load_sid() -> Optional[str]:
"""Load session ID from Supabase if it exists."""
try:
supabase = get_supabase_client()
result = supabase.table("sessions").select(
"data").eq("id", SESSION_ID).single().execute()
if result.data and isinstance(result.data, dict):
data = result.data.get("data")
if isinstance(data, dict):
sid = data.get("sid")
if isinstance(sid, str) and sid:
logger.debug(
f"Session ID loaded from Supabase: {sid[:8]}...")
return sid
except Exception as e:
logger.error(f"Error loading session ID from Supabase: {e}")
return None

59
backend/services/nas_service.py Executable file
View File

@ -0,0 +1,59 @@
"""
NAS Service - Direct re-export from nas_api
"""
# Re-export from nas_api
from .nas_api import (
# Config
BASE_URL,
USERNAME,
PASSWORD,
DESTINATION_PATH,
session,
logger,
# Exceptions
NASAuthenticationError,
NASConnectionError,
NASAPIError,
# Session
save_sid,
load_sid,
# Auth
login_with_otp,
authenticate_with_otp,
# File Operations
syno_entry_request,
test_session_validity,
list_folder_contents,
list_shares,
get_files_for_path,
download_single_file_aria2,
cleanup_duplicates_before_download,
download_files_to_destination,
download_files_as_single_zip,
)
__all__ = [
'BASE_URL',
'USERNAME',
'PASSWORD',
'DESTINATION_PATH',
'session',
'logger',
'NASAuthenticationError',
'NASConnectionError',
'NASAPIError',
'save_sid',
'load_sid',
'login_with_otp',
'authenticate_with_otp',
'syno_entry_request',
'test_session_validity',
'list_folder_contents',
'list_shares',
'get_files_for_path',
'download_single_file_aria2',
'cleanup_duplicates_before_download',
'download_files_to_destination',
'download_files_as_single_zip',
]

View File

@ -0,0 +1,55 @@
"""
NAS Sharing API Package
Handles Synology sharing link operations via Selenium.
Modules:
- session: Session management and credentials
- auth: Login and OTP authentication
- selenium_operations: File listing and download operations
"""
# Session management
from .session import (
get_username_password,
SharingSessionManager,
)
# Authentication
from .auth import (
get_dsm_credentials,
perform_login,
detect_otp_modal,
submit_otp_code,
wait_for_login_success,
is_logged_in,
)
# Selenium operations
from .selenium_operations import (
get_file_list,
encode_path_to_dlink,
prepare_download_url,
get_aria2_manager,
get_initial_path,
extract_sharing_id,
)
__all__ = [
# Session
'get_username_password',
'SharingSessionManager',
# Auth
'get_dsm_credentials',
'perform_login',
'detect_otp_modal',
'submit_otp_code',
'wait_for_login_success',
'is_logged_in',
# Selenium operations
'get_file_list',
'encode_path_to_dlink',
'prepare_download_url',
'get_aria2_manager',
'get_initial_path',
'extract_sharing_id',
]

View File

@ -0,0 +1,373 @@
"""
NAS Sharing Auth Module - Login flow với OTP modal support
EXTRACTED từ download_link.py, adapted cho modal OTP pattern
"""
import os
import time
from typing import Optional, Tuple, TYPE_CHECKING
from selenium import webdriver
from selenium.webdriver.common.by import By
if TYPE_CHECKING:
from selenium.webdriver.remote.webelement import WebElement
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
def get_dsm_credentials() -> Tuple[str, str]:
"""
Get DSM credentials from environment
Returns:
(username, password) tuple
Raises:
ValueError: If credentials not set in .env.local
"""
username = os.getenv("NAS_USERNAME")
password = os.getenv("NAS_PASSWORD")
if not username or not password:
raise ValueError("NAS_USERNAME and NAS_PASSWORD must be set in .env.local")
return username, password
def perform_login(
driver: webdriver.Chrome,
username: Optional[str] = None,
password: Optional[str] = None,
otp_callback=None
) -> bool:
"""
Perform DSM login with OTP modal support
EXTRACTED từ download_link.py DSMSeleniumLogin.login()
MODIFIED: Dùng callback cho OTP thay manual input
Args:
driver: Selenium WebDriver instance
username: DSM username (default: from env)
password: DSM password (default: from env)
otp_callback: Function() -> Optional[str] to get OTP code from modal
Should return None if timeout/cancelled
Returns:
True if login successful
"""
if username is None or password is None:
username, password = get_dsm_credentials()
try:
# Get DSM URL from env
dsm_url = os.getenv("NAS_DSM_URL")
if not dsm_url:
raise ValueError("NAS_DSM_URL must be set in .env.local")
print(f"\n🌐 Đang truy cập: {dsm_url}")
print(" (Trang có thể mất 30-60s để load...)")
# Navigate to DSM
try:
driver.get(dsm_url)
except Exception as e:
print(f"⚠️ Timeout khi load trang, nhưng tiếp tục thử...")
time.sleep(5)
wait = WebDriverWait(driver, 30)
# === STEP 1: Enter USERNAME ===
print("🔍 BƯỚC 1: Đang tìm form username...")
# NO FALLBACK - Throw error nếu selector không match
username_input = wait.until(
EC.visibility_of_element_located((By.CSS_SELECTOR,
"input#login_username, input[type='text'][name='username'], input.syno-ux-textfield[type='text']"))
)
print(f"📝 Nhập username: {username}")
username_input.click()
time.sleep(0.5)
username_input.clear()
time.sleep(0.5)
username_input.send_keys(username)
time.sleep(1)
# Click Next button
print("🖱️ Tìm nút Next...")
next_button = _find_button(driver, [
"div[syno-id='account-panel-next-btn']", # DSM 7.x
"div.login-btn[role='button']",
"button#login-btn",
"button[type='submit']",
])
if not next_button:
raise RuntimeError("Không tìm thấy nút Next")
print("🖱️ Click nút Next...")
next_button.click()
time.sleep(3)
# === STEP 2: Enter PASSWORD ===
print("\n🔍 BƯỚC 2: Đang tìm form password...")
try:
password_input = wait.until(
EC.visibility_of_element_located((By.CSS_SELECTOR, "input[syno-id='password']"))
)
except:
try:
password_input = wait.until(
EC.visibility_of_element_located((By.CSS_SELECTOR, "input[type='password'][name='current-password']"))
)
except:
password_input = wait.until(
EC.visibility_of_element_located((By.CSS_SELECTOR, "input[type='password']"))
)
print("🔑 Nhập password...")
password_input.click()
time.sleep(0.5)
password_input.clear()
time.sleep(0.5)
password_input.send_keys(password)
time.sleep(1)
# Tick "Stay signed in" checkbox
print("☑️ Tick checkbox 'Stay signed in'...")
try:
stay_signed_checkbox = driver.find_element(By.CSS_SELECTOR,
"div.login-checkbox input[type='checkbox']")
if not stay_signed_checkbox.is_selected():
checkbox_label = driver.find_element(By.CSS_SELECTOR,
"div.login-checkbox label.box")
checkbox_label.click()
print(" ✅ Đã tick 'Stay signed in'")
time.sleep(0.5)
else:
print(" Checkbox đã được tick sẵn")
except Exception as e:
print(f" ⚠️ Không tìm thấy checkbox (không sao): {e}")
time.sleep(0.5)
# Click Sign In button
print("🖱️ Tìm nút Sign In...")
signin_button = _find_button(driver, [
"div[syno-id='password-panel-next-btn']", # DSM 7.x password panel
"div[syno-id='account-panel-next-btn']",
"div.login-btn[role='button']",
"button#login-btn",
"button[type='submit']",
])
if not signin_button:
raise RuntimeError("Không tìm thấy nút Sign In")
print("🖱️ Click nút Sign In...")
signin_button.click()
time.sleep(3)
# === STEP 3: Handle OTP if needed ===
otp_required = detect_otp_modal(driver)
if otp_required:
print("\n" + "=" * 70)
print("🔐 PHÁT HIỆN YÊU CẦU OTP (2-FACTOR AUTHENTICATION)")
print("=" * 70)
if otp_callback:
print("⏳ Đang đợi OTP từ frontend modal...")
otp_code = otp_callback()
if not otp_code:
print("❌ Không nhận được OTP (timeout hoặc cancelled)")
return False
# Submit OTP
print(f"✅ Nhận OTP: {otp_code[:2]}***")
if not submit_otp_code(driver, otp_code):
print("❌ Lỗi submit OTP")
return False
time.sleep(3)
else:
print("⚠️ Không có OTP callback, bỏ qua...")
return False
else:
print(" Không phát hiện yêu cầu OTP")
# === STEP 4: Wait for login success ===
print("⏳ Đang chờ đăng nhập hoàn tất...")
return wait_for_login_success(driver, timeout=15)
except Exception as e:
print(f"❌ Lỗi trong quá trình đăng nhập: {e}")
import traceback
traceback.print_exc()
return False
def detect_otp_modal(driver: webdriver.Chrome) -> bool:
"""
Detect if OTP modal is shown
EXTRACTED từ download_link.py logic
Returns:
True if OTP input is visible
"""
try:
# Method 1: Find title "Enter verification code"
otp_title = driver.find_element(By.XPATH,
"//*[contains(text(), 'Enter verification code') or contains(text(), 'verification code')]")
if otp_title.is_displayed():
return True
except:
pass
try:
# Method 2: Find input with name='one-time-code'
otp_input = driver.find_element(By.CSS_SELECTOR, "input[name='one-time-code']")
if otp_input.is_displayed():
return True
except:
pass
try:
# Method 3: Find OTP button with syno-id
otp_button = driver.find_element(By.CSS_SELECTOR, "div[syno-id='otp-panel-next-btn']")
if otp_button.is_displayed():
return True
except:
pass
return False
def submit_otp_code(driver: webdriver.Chrome, otp_code: str) -> bool:
"""
Submit OTP code to form
Args:
driver: Selenium WebDriver
otp_code: 6-digit OTP code
Returns:
True if submitted successfully
"""
try:
# Find OTP input
otp_input = driver.find_element(By.CSS_SELECTOR, "input[name='one-time-code']")
if not otp_input.is_displayed():
print("❌ OTP input không visible")
return False
# Clear and enter OTP
otp_input.clear()
otp_input.send_keys(otp_code)
time.sleep(0.5)
# Click submit or press Enter
try:
otp_button = driver.find_element(By.CSS_SELECTOR, "div[syno-id='otp-panel-next-btn']")
otp_button.click()
except:
# Fallback: press Enter
otp_input.send_keys("\n")
print("✅ OTP đã submit")
return True
except Exception as e:
print(f"❌ Lỗi submit OTP: {e}")
return False
def wait_for_login_success(driver: webdriver.Chrome, timeout: int = 15) -> bool:
"""
Wait for login to complete
EXTRACTED từ download_link.py is_logged_in() check
Args:
driver: Selenium WebDriver
timeout: Max seconds to wait
Returns:
True if "Synology Drive" div detected (indicates successful login)
Raises:
RuntimeError: If login not detected after timeout
"""
for i in range(timeout):
try:
# Check for "Synology Drive" text - only appears after login
elem = driver.find_element(By.XPATH, "//div[contains(text(), 'Synology Drive')]")
if elem is not None:
print("✅ Đăng nhập thành công!")
return True
except Exception as e:
# Log specific error for debugging
if i == 0: # Only log on first attempt
print(f"🔍 Chưa thấy 'Synology Drive', đang đợi... (Error: {type(e).__name__})")
time.sleep(1)
# Timeout - throw error thay vì return False
print("❌ KHÔNG phát hiện đăng nhập thành công sau timeout")
raise RuntimeError(
f"Login verification failed: 'Synology Drive' element not found after {timeout}s. "
f"Login may have failed or page structure changed."
)
def is_logged_in(driver: webdriver.Chrome) -> bool:
"""
Quick check if already logged in
EXACT COPY từ download_link.py DSMSeleniumLogin.is_logged_in()
Returns:
True if "Synology Drive" div found
"""
if not driver:
return False
try:
elem = driver.find_element(By.XPATH, "//div[contains(text(), 'Synology Drive')]")
return elem is not None
except:
return False
def _find_button(driver: webdriver.Chrome, selectors: list) -> Optional['WebElement']:
"""
Helper: Find first visible button from selector list
Args:
driver: Selenium WebDriver
selectors: List of CSS selectors to try
Returns:
WebElement if found, None otherwise
"""
for selector in selectors:
try:
if selector.startswith("//"):
button = driver.find_element(By.XPATH, selector)
else:
button = driver.find_element(By.CSS_SELECTOR, selector)
if button and button.is_displayed():
return button
except:
continue
return None

View File

@ -0,0 +1,496 @@
"""
NAS Sharing API Module - FolderSharing API calls
EXTRACTED từ download_link.py DSMSeleniumLogin methods
"""
import os
import sys
import time
import requests
import urllib3
import logging
from typing import Dict, List, Any, Optional, Union, TYPE_CHECKING
from urllib.parse import urlencode
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import TimeoutException
# Disable SSL warnings
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# Setup logger
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
if TYPE_CHECKING:
from ..aria2.download_manager import Aria2DownloadManager
# aria2 integration
USE_ARIA2 = os.getenv('USE_ARIA2', 'true').lower() == 'true'
# None, False (unavailable), or Aria2DownloadManager
_aria2_manager: Optional[Union[bool, "Aria2DownloadManager"]] = None
def get_aria2_manager() -> "Aria2DownloadManager":
"""
Get or create aria2 manager instance for sharing downloads.
Raises:
RuntimeError: If aria2 is not available
"""
global _aria2_manager
if _aria2_manager is None and USE_ARIA2:
try:
from ..aria2.download_manager import get_aria2_manager as _get_manager
_aria2_manager = _get_manager()
if not _aria2_manager:
raise RuntimeError("aria2 manager returned None")
logger.debug("✅ aria2 manager initialized for Sharing downloads")
except Exception as e:
raise RuntimeError(f"aria2 is required but not available: {e}")
if _aria2_manager is False or _aria2_manager is None:
raise RuntimeError("aria2 is required but not initialized")
return _aria2_manager # type: ignore
def get_file_list(
driver: webdriver.Chrome,
sharing_id: str,
folder_path: str = "/"
) -> List[Dict[str, Any]]:
"""
Lấy danh sách file/folder từ sharing link qua FolderSharing.List API
Args:
driver: Selenium WebDriver cookies hợp lệ
sharing_id: Sharing ID (trích xuất từ URL)
folder_path: Đường dẫn folder cần list (mặc định: "/")
Returns:
List of file/folder dicts với các key:
- name: Tên file/folder
- is_folder: True nếu folder
- size: Chuỗi size đã format
- size_bytes: Size tính bằng bytes
- path: Đường dẫn đầy đủ
- additional: Metadata bổ sung từ API
Raises:
RuntimeError: Khi session hết hạn (error 101) hoặc lỗi API khác
"""
# Tạo session với cookies từ Selenium
session = requests.Session()
for cookie in driver.get_cookies():
session.cookies.set(
cookie['name'], cookie['value'], domain=cookie['domain'])
# Endpoint FolderSharing.List API
url = "https://disk.lezhin.com:5001/sharing/webapi/entry.cgi"
params = {
'api': 'SYNO.FolderSharing.List',
'method': 'list',
'version': '2',
'offset': '0',
'limit': '1000',
'sort_by': '"name"',
'sort_direction': '"ASC"',
'action': '"enum"',
'additional': '["size","owner","time","perm","type","mount_point_type"]',
'filetype': '"all"',
'folder_path': f'"{folder_path}"',
'_sharing_id': f'"{sharing_id}"'
}
# Log với context rõ ràng hơn
import inspect
caller = inspect.stack()[1].function if len(
inspect.stack()) > 1 else "unknown"
print(f"\n🔍 [{caller}] Lấy danh sách (FolderSharing API): {folder_path}")
# NO RETRY - Throw error ngay để phát hiện vấn đề sớm
try:
response = session.post(url, data=params, verify=False, timeout=30)
result = response.json()
# Kiểm tra response
if not result:
raise RuntimeError("API không trả về dữ liệu")
# Kiểm tra success
if not result.get("success"):
error_code = result.get('error', {}).get('code')
error_detail = result.get('error', {})
print(f"❌ API lỗi {error_code}: {error_detail}")
# Error 101: Session hết hạn
if error_code == 101:
raise RuntimeError("SESSION_EXPIRED")
# Error 407: Rate limit - KHÔNG RETRY, throw ngay
# (Frontend có debounce 300ms + Backend có rate limit 500ms)
if error_code == 407:
raise RuntimeError(f"API_ERROR_407_RATE_LIMIT: {error_detail}")
# Các lỗi API khác
raise RuntimeError(f"API_ERROR_{error_code}: {error_detail}")
# Parse file list
files = result['data']['files']
print(f"✅ Thành công! Tìm thấy {len(files)} item(s).")
# Format file list
formatted = []
for f in files:
is_folder = f.get('isdir', False)
# Lấy size (chỉ cho files, không có cho folders)
size_bytes = 0
if not is_folder and f.get('additional') and f['additional'].get('size'):
size_bytes = f['additional']['size']
# Format size string (rỗng cho folders)
size_str = "" if is_folder else _format_size(size_bytes)
formatted.append({
'name': f.get('name', ''),
'is_folder': is_folder,
'size': size_str,
'size_bytes': size_bytes,
'path': f.get('path', ''),
'additional': f.get('additional', {})
})
return formatted
except RuntimeError:
# RuntimeError (SESSION_EXPIRED, API_ERROR_xxx) → raise ngay
raise
except requests.exceptions.Timeout as e:
# Network timeout - KHÔNG RETRY, throw ngay
print(f"❌ Timeout khi gọi API: {e}")
raise RuntimeError(f"API_TIMEOUT: {e}") from e
except requests.exceptions.RequestException as e:
# Network/Request errors - KHÔNG RETRY, throw ngay
print(f"❌ Network error khi gọi API: {e}")
raise RuntimeError(f"API_NETWORK_ERROR: {e}") from e
def encode_path_to_dlink(path: str) -> str:
"""
Encode path to dlink token (hex encoding)
EXACT COPY từ download_link.py DSMSeleniumLogin.encode_path_to_dlink()
Args:
path: File path, e.g., "/수조(북극여우)/001화_PSD_JPG.zip"
Returns:
Hex-encoded path string for dlink parameter
"""
# Encode path to bytes (UTF-8) then convert to hex
path_bytes = path.encode('utf-8')
dlink_hex = path_bytes.hex()
return dlink_hex
def download_file_direct(
driver: webdriver.Chrome,
sharing_id: str,
remote_path: str,
is_dir: bool = False,
save_path: Optional[str] = None,
progress_callback=None
) -> bool:
"""
DEPRECATED - DO NOT USE
This function uses requests library instead of aria2.
All downloads MUST use aria2 for parallel connections.
Use prepare_download_url() + aria2_manager.download_file() instead.
Kept for reference only - will be removed in future versions.
"""
raise NotImplementedError(
"download_file_direct() is deprecated. "
"Use prepare_download_url() + aria2_manager.download_file() instead. "
"All downloads MUST use aria2."
)
# Old implementation removed - see git history if needed
if not save_path:
raise ValueError("save_path is required")
def prepare_download_url(
driver: webdriver.Chrome,
sharing_id: str,
remote_path: str,
file_name: str
) -> tuple[str, str]:
"""
Extract cookies and build download URL from Selenium driver.
This function MUST be called with driver_lock held.
Returns URL and cookies that can be used for aria2 download WITHOUT driver.
Args:
driver: Selenium WebDriver with valid cookies
sharing_id: Sharing ID
remote_path: File/folder path in NAS
file_name: Filename for URL path
Returns:
(download_url, cookie_string) tuple
"""
# Extract cookies from Selenium
cookie_string = "; ".join([
f"{c['name']}={c['value']}"
for c in driver.get_cookies()
])
# Convert path to dlink (hex-encoded) for BOTH files and folders
dlink = encode_path_to_dlink(remote_path)
# Build GET request URL (same for files and folders)
url = f"https://disk.lezhin.com:5001/fsdownload/webapi/file_download.cgi/{file_name}"
params = {
'dlink': f'"{dlink}"',
'noCache': str(int(time.time() * 1000)),
'_sharing_id': f'"{sharing_id}"',
'api': 'SYNO.FolderSharing.Download',
'version': '2',
'method': 'download',
'mode': 'download',
'stdhtml': 'false'
}
download_url = f"{url}?{urlencode(params)}"
return download_url, cookie_string
def validate_download_link(
download_url: str,
cookie_string: str,
timeout: int = 10
) -> tuple[bool, Optional[str], Optional[int]]:
"""
Validate download link bằng HEAD request để phát hiện link chết TRƯỚC KHI tải.
Giải pháp cho vấn đề: Files pending lâu Link expire Download 38B HTML error page
Args:
download_url: URL download đã build từ prepare_download_url()
cookie_string: Cookie string từ prepare_download_url()
timeout: Timeout cho HEAD request (giây)
Returns:
(is_valid, error_message, content_length) tuple:
- is_valid: True nếu link OK, False nếu chết
- error_message: None nếu OK, error string nếu failed
- content_length: File size (bytes) nếu , None nếu không xác định
Raises:
RuntimeError: Nếu lỗi network/timeout
"""
import requests
try:
# Prepare headers
headers = {
'Cookie': cookie_string,
'Referer': 'https://disk.lezhin.com:5001/',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
}
logger.debug(
f"[validate_link] Sending HEAD request to validate link...")
# Send HEAD request (lightweight, chỉ lấy headers)
response = requests.head(
download_url,
headers=headers,
timeout=timeout,
verify=False, # NAS self-signed cert
allow_redirects=True
)
# Check status code
if response.status_code == 200:
# Link OK!
content_length = response.headers.get('Content-Length')
size_bytes = int(content_length) if content_length else None
# Check Content-Type để phát hiện HTML error page
content_type = response.headers.get('Content-Type', '')
if 'text/html' in content_type.lower():
# NAS trả về HTML thay vì file → Link có vấn đề
error_msg = f"Link trả về HTML (possibly expired or error page). Content-Type: {content_type}"
logger.warning(f"[validate_link] ❌ {error_msg}")
return False, error_msg, None
# Link hợp lệ
size_str = f"{size_bytes:,} bytes" if size_bytes else "unknown size"
logger.debug(f"[validate_link] ✅ Link valid ({size_str})")
return True, None, size_bytes
elif response.status_code == 401:
# Unauthorized → Session expired
error_msg = "Session expired (401 Unauthorized)"
logger.warning(f"[validate_link] ❌ {error_msg}")
return False, error_msg, None
elif response.status_code == 403:
# Forbidden → Permission denied
error_msg = "Permission denied (403 Forbidden)"
logger.warning(f"[validate_link] ❌ {error_msg}")
return False, error_msg, None
elif response.status_code == 404:
# Not found → File không tồn tại
error_msg = "File not found (404)"
logger.warning(f"[validate_link] ❌ {error_msg}")
return False, error_msg, None
else:
# Unexpected status code
error_msg = f"Unexpected HTTP status: {response.status_code}"
logger.warning(f"[validate_link] ⚠️ {error_msg}")
return False, error_msg, None
except requests.exceptions.Timeout as e:
error_msg = f"Timeout after {timeout}s: {e}"
logger.error(f"[validate_link] ❌ {error_msg}")
raise RuntimeError(error_msg) from e
except requests.exceptions.RequestException as e:
error_msg = f"Network error: {e}"
logger.error(f"[validate_link] ❌ {error_msg}")
raise RuntimeError(error_msg) from e
except Exception as e:
error_msg = f"Unexpected error: {e}"
logger.error(f"[validate_link] ❌ {error_msg}")
raise RuntimeError(error_msg) from e
def get_initial_path(driver: webdriver.Chrome) -> str:
"""
Lấy folder path hiện tại từ thanh điều hướng (path bar) trên trang sharing.
Workflow:
1. Chờ path bar xuất hiện (tối đa 15s)
2. Nếu timeout Check login dialog
3. Nếu login dialog raise NEEDS_LOGIN
4. Nếu không cả 2 raise error
Args:
driver: Selenium WebDriver đang trang sharing
Returns:
Path của folder hiện tại từ thanh điều hướng
Raises:
RuntimeError:
- "NEEDS_LOGIN" nếu phát hiện login dialog
- Error khác nếu không tìm thấy path bar
"""
if not driver:
raise RuntimeError("Driver không tồn tại")
# BƯỚC 1: Chờ folder path bar xuất hiện (tối đa 15s)
print("🔍 Đang chờ folder path bar xuất hiện (timeout 15s)...")
try:
path_btn = WebDriverWait(driver, 15).until(
EC.presence_of_element_located((By.CSS_SELECTOR,
"li table.x-btn button.x-btn-text[aria-label]"))
)
folder_name = path_btn.get_attribute('aria-label')
if folder_name:
path = f"/{folder_name}"
print(f"✅ Phát hiện folder path: {path}")
return path
# aria-label rỗng → fallback check login
print("⚠️ Path bar không có aria-label, kiểm tra login dialog...")
except TimeoutException:
# Timeout 15s - không tìm thấy path bar
print("⚠️ Timeout: Không tìm thấy folder path bar sau 15 giây")
# BƯỚC 2: Path bar không có → Check login dialog
print("🔍 Kiểm tra login dialog...")
try:
login_dialog = driver.find_element(
By.CSS_SELECTOR, "div#webfm-access-dialog")
if login_dialog and login_dialog.is_displayed():
print("⚠️ Phát hiện login dialog - cần đăng nhập File Station")
raise RuntimeError("NEEDS_LOGIN")
except Exception as e:
# Không tìm thấy login dialog hoặc lỗi khác
if "NEEDS_LOGIN" in str(e):
raise
print(f"⚠️ Không tìm thấy login dialog: {e}")
# BƯỚC 3: Không có path bar và không có login dialog → THROW ERROR
current_url = driver.current_url if driver else "unknown"
raise RuntimeError(
f"Không tìm thấy folder path bar trên trang sharing sau 15 giây. "
f"URL: {current_url}"
)
def extract_sharing_id(url: str) -> Optional[str]:
"""
Extract sharing ID from sharing link URL
Args:
url: Sharing link URL (e.g., "https://disk.lezhin.com:5001/sharing/ABC123/...")
Returns:
Sharing ID string, or None if invalid format
"""
try:
parts = url.split('/sharing/')
if len(parts) < 2:
return None
sharing_id = parts[1].split('/')[0].split('?')[0]
return sharing_id
except Exception as e:
print(f"❌ Lỗi extract_sharing_id: {e}")
return None
def _format_size(size_bytes: int) -> str:
"""
Helper: Format bytes to human-readable size
Args:
size_bytes: Size in bytes
Returns:
Formatted string (e.g., "1.5 MB")
"""
if size_bytes == 0:
return ""
elif size_bytes < 1024:
return f"{size_bytes} B"
elif size_bytes < 1024 * 1024:
return f"{size_bytes / 1024:.2f} KB"
elif size_bytes < 1024 * 1024 * 1024:
return f"{size_bytes / (1024 * 1024):.2f} MB"
else:
return f"{size_bytes / (1024 * 1024 * 1024):.2f} GB"

View File

@ -0,0 +1,86 @@
"""
NAS Sharing Session Management
Handles session checking and credential management for Synology DSM sharing links.
Chrome profile handles cookie persistence automatically - no JSON files needed.
"""
import os
import time
from typing import Optional, Tuple
from selenium import webdriver
from selenium.webdriver.common.by import By
def get_username_password() -> Tuple[str, str]:
"""
Get credentials from environment variables.
Returns:
Tuple of (username, password)
Raises:
ValueError: If credentials not found in environment
"""
username = os.getenv("NAS_USERNAME")
password = os.getenv("NAS_PASSWORD")
if not username or not password:
raise ValueError("NAS_USERNAME and NAS_PASSWORD must be set in .env.local")
return username, password
class SharingSessionManager:
"""
Manages DSM session check for sharing links.
Chrome profile handles cookies automatically - no JSON files needed.
"""
def __init__(self, driver: webdriver.Chrome):
"""
Initialize session manager.
Args:
driver: Selenium Chrome WebDriver instance
"""
self.driver = driver
def ensure_logged_in_page(self) -> None:
"""
Navigate to DSM URL to trigger Chrome profile cookie loading.
Chrome profile handles cookies automatically.
Raises:
RuntimeError: If driver not initialized
ValueError: If NAS_DSM_URL not set in environment
"""
if not self.driver:
raise RuntimeError("[Session] ❌ Driver not initialized")
dsm_url = os.getenv("NAS_DSM_URL")
if not dsm_url:
raise ValueError("NAS_DSM_URL must be set in .env.local")
print("[Session] 🌐 Navigating to DSM URL...")
self.driver.get(dsm_url)
time.sleep(2) # Wait for cookies to load from profile
print("[Session] ✅ Chrome profile cookies loaded automatically")
def is_logged_in(self) -> bool:
"""
Check if already logged in to DSM.
Chrome profile cookies handle authentication automatically.
Returns:
True if logged in, False otherwise
"""
if not self.driver:
return False
try:
# Look for DSM-specific elements that indicate logged-in state
elem = self.driver.find_element(By.XPATH, "//div[contains(text(), 'Synology Drive')]")
return elem is not None
except:
return False

View File

@ -0,0 +1,411 @@
"""
NAS Sharing Service - Public API wrapper
"""
import os
import sys
import logging
from typing import Dict, Optional, Any, Callable
from .nas_sharing_worker import SharingLinkWorker
logger = logging.getLogger(__name__)
_worker: Optional[SharingLinkWorker] = None
USE_ARIA2 = os.getenv('USE_ARIA2', 'true').lower() == 'true'
def get_worker() -> SharingLinkWorker:
global _worker
if _worker is None:
_worker = SharingLinkWorker()
_worker.start()
return _worker
def process_sharing_link(url: str) -> Dict[str, str]:
worker = get_worker()
request_id = worker.submit_request(url)
return {'request_id': request_id, 'status': 'pending'}
def get_sharing_result(request_id: str) -> Optional[Dict]:
return get_worker().get_result(request_id)
def is_otp_required() -> bool:
worker = get_worker()
# Show modal only once and before submission
if worker.otp_pending and not worker.otp_modal_shown and not worker.otp_submitted:
worker.otp_modal_shown = True
return True
return False
def submit_otp(code: str) -> Dict[str, str]:
get_worker().otp_code = code
return {'status': 'ok', 'message': 'OTP đã nhận'}
def download_file(
sharing_id: str,
file_path: str,
save_path: str,
is_folder: bool = False,
progress_callback=None,
max_speed: Optional[str] = None,
validate_link: bool = True
) -> Dict[str, Any]:
"""
Download file from sharing link.
Args:
sharing_id: Sharing ID
file_path: Remote file path
save_path: Local save path
is_folder: Whether file is a folder
progress_callback: Progress callback function
max_speed: Optional bandwidth limit (e.g., '100K')
validate_link: Validate link trước khi tải (default: True)
Returns:
Dict with status, message, save_path
"""
worker = get_worker()
try:
# Step 0: Clean up existing file to force re-download
# aria2 with continue=true will SKIP if file exists and is complete.
# We must delete it to ensure a fresh download.
# Use the shared cleanup function with delete_dirs=False (Sharing mode specific)
try:
from .nas_api.file_operations import cleanup_duplicates_before_download
file_name = os.path.basename(save_path)
dest_dir = os.path.dirname(save_path)
# Clean up duplicates AND the exact file
cleanup_duplicates_before_download(
dest_path=dest_dir,
file_name_pattern=file_name.replace(
'.zip', ''), # Basic pattern from filename
exact_filename=file_name,
delete_dirs=False
)
# Safety check: If file still exists (could not be deleted), append _NEW
if os.path.exists(save_path):
logger.warning(
f"[download_file] Could not delete existing file {save_path}, appending _NEW")
name, ext = os.path.splitext(save_path)
save_path = f"{name}_NEW{ext}"
logger.debug(f"[download_file] New save path: {save_path}")
except ImportError:
logger.warning(
"Could not import cleanup_duplicates_before_download")
# Fallback to simple delete
if os.path.exists(save_path):
try:
os.remove(save_path)
except:
pass
except Exception as e:
logger.warning(f"[download_file] Cleanup failed: {e}")
# Also delete .aria2 control file if exists (for the new path)
aria2_file = f"{save_path}.aria2"
if os.path.exists(aria2_file):
try:
os.remove(aria2_file)
logger.debug(
f"[download_file] Deleted existing .aria2 file: {aria2_file}")
except Exception as e:
logger.warning(
f"[download_file] Failed to delete .aria2 file: {e}")
# Step 1: Extract cookies and build URL (NEEDS driver lock - FAST ~1s)
with worker.driver_lock:
# Always ensure driver is ready and ALIVE (check current_url)
# Previously only checked 'if not worker.driver', which missed dead drivers
worker._ensure_driver_ready()
if not worker.driver:
return {'status': 'error', 'message': 'Worker driver failed to initialize'}
# Ensure driver is on the correct sharing page to get valid cookies
# If driver is fresh (data:,) or on another page, cookies will be missing/wrong
expected_url = f'https://disk.lezhin.com:5001/sharing/{sharing_id}'
try:
if sharing_id not in worker.driver.current_url:
logger.debug(
f"[download_file] Driver not on sharing page, navigating to: {expected_url}")
worker.driver.get(expected_url)
except Exception as e:
logger.warning(
f"[download_file] Failed to check/navigate URL: {e}")
# Try to restart driver if navigation fails
worker._ensure_driver_ready()
if worker.driver:
worker.driver.get(expected_url)
# Import here to avoid circular dependency
from .nas_sharing_api.selenium_operations import prepare_download_url, validate_download_link
# Extract cookies and build download URL (FAST - only needs driver briefly)
file_name = os.path.basename(save_path)
download_url, cookie_string = prepare_download_url(
driver=worker.driver,
sharing_id=sharing_id,
remote_path=file_path,
file_name=file_name
)
# Lock released here - driver now free for other requests!
# Step 1.5: Validate link (OPTIONAL - phát hiện link chết trước khi tải)
if validate_link:
logger.debug(
f"[download_file] Validating link before download: {file_name}")
try:
is_valid, error_msg, content_length = validate_download_link(
download_url=download_url,
cookie_string=cookie_string,
timeout=10
)
if not is_valid:
# Link đã chết → Fail ngay, không tải
logger.error(
f"[download_file] ❌ Link validation failed: {error_msg}")
return {
'status': 'error',
'message': f'Link không hợp lệ: {error_msg}',
'save_path': None
}
# Link OK → Log size nếu có
if content_length:
logger.debug(
f"[download_file] ✅ Link valid, file size: {content_length:,} bytes")
else:
logger.debug(
f"[download_file] ✅ Link valid (size unknown)")
except RuntimeError as e:
# Validation error (network/timeout) → Log warning nhưng vẫn tiếp tục tải
logger.warning(
f"[download_file] ⚠️ Link validation failed with error: {e}")
logger.warning(
f"[download_file] Continuing download anyway...")
# Step 2: Download with aria2 (NO driver lock needed - SLOW ~minutes)
from .nas_sharing_api.selenium_operations import get_aria2_manager
manager = get_aria2_manager()
logger.debug(f"[download_file] Starting aria2 download: {file_name}")
success, error_msg, gid = manager.download_file(
url=download_url,
dest_path=save_path,
cookies=cookie_string,
referer="https://disk.lezhin.com:5001/",
progress_callback=progress_callback,
max_download_limit=max_speed
)
if success:
return {
'status': 'success',
'message': 'Đã tải thành công',
'save_path': save_path,
'aria2_gid': gid # ✅ Return GID for cancellation support
}
else:
# ✅ QUAN TRỌNG: Return GID ngay cả khi fail
# Vì GID đã tồn tại từ khi aria2 task được tạo
return {
'status': 'error',
'message': f'Download failed: {error_msg}',
'save_path': None,
'aria2_gid': gid # ✅ Return GID để có thể cancel hoặc cleanup
}
except Exception as e:
logger.error(f"[download_file] Exception: {e}", exc_info=True)
return {'status': 'error', 'message': str(e), 'aria2_gid': None}
def shutdown_worker():
global _worker
if _worker:
_worker.stop()
_worker = None
def start_sharing_worker():
"""Initialize and start the sharing link worker on startup"""
get_worker() # This will create and start the worker if not exists
def get_sharing_worker() -> Optional[SharingLinkWorker]:
"""Get the global sharing worker instance (for external use)"""
return _worker
def download_sharing_files(
worker_instance: SharingLinkWorker,
sharing_id: str,
files_info: list,
dest_path: str,
job_id: Optional[str] = None,
progress_callback: Optional[Callable] = None
) -> tuple:
"""
Download multiple files from sharing link to destination
Args:
worker_instance: SharingLinkWorker instance
sharing_id: Sharing ID
files_info: List of file dicts with 'path', 'name', 'is_folder'
dest_path: Destination directory
job_id: Optional job ID for progress updates
progress_callback: Optional callback(file_index, total_files, file_progress_data)
Returns:
(status, results, message) tuple
"""
try:
import os
# Ensure driver is ready
if not worker_instance.driver:
worker_instance._ensure_driver_ready()
if not worker_instance.driver:
raise RuntimeError("Worker driver failed to initialize")
results = []
success_count = 0
total_files = len(files_info)
# Initialize files status for progress tracking
files_status = [
{
"name": f.get('name', ''),
"status": "pending",
"is_folder": f.get('is_folder', False),
"size": f.get('size_bytes', 0)
}
for f in files_info
]
for idx, file_info in enumerate(files_info):
file_path = file_info.get('path', '')
file_name = file_info.get('name', os.path.basename(file_path))
is_folder = file_info.get('is_folder', False)
file_size = file_info.get('size_bytes', 0)
# Add .zip for folders
if is_folder and not file_name.endswith('.zip'):
file_name = f"{file_name}.zip"
save_path = os.path.join(dest_path, file_name)
# Update status to downloading
files_status[idx]["status"] = "downloading"
files_status[idx]["progress"] = 0
print(
f"[Download] ({idx + 1}/{total_files}) {file_name}{save_path}")
# Progress callback for individual file
def file_progress_callback(downloaded_bytes: int, total_bytes: int):
# Update progress for both files and folders
files_status[idx]["downloaded"] = downloaded_bytes
if total_bytes > 0:
# File with known size - calculate percentage
progress_pct = (downloaded_bytes / total_bytes) * 100
files_status[idx]["progress"] = round(progress_pct, 1)
files_status[idx]["total"] = total_bytes
else:
# Folder (no total size) - just track downloaded bytes
files_status[idx]["progress"] = None
files_status[idx]["total"] = None
# Call parent progress callback
if progress_callback:
progress_callback(idx, total_files, {
"current_file": file_name,
"current_file_index": idx + 1,
"total_files": total_files,
"current_file_progress": files_status[idx].get("progress"),
"current_file_downloaded": downloaded_bytes,
"current_file_total": total_bytes if total_bytes > 0 else None,
"files_status": files_status
})
# Download (always use aria2)
try:
# Step 1: Extract cookies and build URL WITH VALIDATION (LOCK driver briefly ~1s)
with worker_instance.driver_lock:
# Ensure driver is on correct sharing page before extracting cookies
expected_url = f'https://disk.lezhin.com:5001/sharing/{sharing_id}'
if sharing_id not in worker_instance.driver.current_url:
logger.debug(
f"[download_sharing_files] Driver not on sharing page, navigating to: {expected_url}")
worker_instance.driver.get(expected_url)
from .nas_sharing_api.selenium_operations import prepare_download_url
download_url, cookie_string = prepare_download_url(
driver=worker_instance.driver,
sharing_id=sharing_id,
remote_path=file_path,
file_name=file_name
)
# Lock released - driver free for other requests!
# Step 2: Download with aria2 (NO lock - allows parallel downloads)
from .nas_sharing_api.selenium_operations import get_aria2_manager
manager = get_aria2_manager()
success, error_msg, gid = manager.download_file(
url=download_url,
dest_path=save_path,
cookies=cookie_string,
referer="https://disk.lezhin.com:5001/",
progress_callback=file_progress_callback
)
if not success:
raise RuntimeError(f"aria2 download failed: {error_msg}")
except Exception as e:
import traceback
traceback.print_exc()
success = False
# Update status after download
if success:
files_status[idx]["status"] = "completed"
files_status[idx]["progress"] = 100
success_count += 1
else:
files_status[idx]["status"] = "failed"
results.append({
'name': file_name,
'path': file_path,
'success': success,
'destination': save_path if success else None
})
if success_count == len(files_info):
return ("success", results, f"Downloaded {success_count}/{len(files_info)} files")
elif success_count > 0:
return ("partial", results, f"Downloaded {success_count}/{len(files_info)} files")
else:
return ("failed", results, "All downloads failed")
except Exception as e:
import traceback
traceback.print_exc()
return ("error", [], str(e))

View File

@ -0,0 +1,374 @@
"""
NAS Sharing Worker - Thread-based queue processor
REFACTORED: Dùng nas_sharing_auth + nas_sharing_api modules
"""
import os
import time
import threading
import queue
import uuid
from typing import Dict, Optional
from functools import wraps
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.chrome.options import Options
from selenium.common.exceptions import NoSuchWindowException, WebDriverException
from webdriver_manager.chrome import ChromeDriverManager
from .nas_sharing_api import (
SharingSessionManager,
perform_login,
extract_sharing_id,
get_initial_path,
get_file_list,
)
def handle_window_closed(func):
"""Decorator to handle browser crash/close errors"""
@wraps(func)
def wrapper(self, *args, **kwargs):
try:
return func(self, *args, **kwargs)
except Exception as e:
error_msg = str(e).lower()
is_driver_error = (
'window already closed' in error_msg or
'web view not found' in error_msg or
'max retries exceeded' in error_msg or
'connection refused' in error_msg or
isinstance(e, (NoSuchWindowException, WebDriverException))
)
if is_driver_error:
print(f"[SharingWorker] ⚠️ Driver error: {str(e)[:100]}")
print(f"[SharingWorker] 🔄 Resetting driver...")
try:
if self.driver:
self.driver.quit()
except:
pass
self.driver = None
return func(self, *args, **kwargs)
else:
raise
return wrapper
class SharingLinkWorker:
"""
Worker processes sharing link requests from queue
Single browser instance with session persistence
"""
def __init__(self):
self.driver: Optional[webdriver.Chrome] = None
self.session_manager: Optional[SharingSessionManager] = None
self.request_queue = queue.Queue()
self.results = {}
self.is_running = False
self.worker_thread = None
# Thread safety: Lock to prevent concurrent driver access
self.driver_lock = threading.RLock()
# OTP handling with modal shown tracking
self.otp_pending = False
self.otp_code: Optional[str] = None
self.otp_modal_shown = False
self.otp_submitted = False # Track OTP submission success
def start(self):
"""Start worker thread"""
if self.is_running:
return
self.is_running = True
self.worker_thread = threading.Thread(target=self._worker_loop, daemon=True)
self.worker_thread.start()
print("[SharingWorker] Started")
def stop(self):
"""Stop worker and cleanup"""
self.is_running = False
if self.driver:
try:
# Close all windows first
if len(self.driver.window_handles) > 0:
self.driver.quit()
else:
# Force kill if no windows
self.driver.service.process.terminate()
except Exception as e:
print(f"[SharingWorker] Warning during cleanup: {e}")
finally:
self.driver = None
print("[SharingWorker] Stopped")
def submit_request(self, url: str) -> str:
"""Submit sharing link for processing"""
request_id = str(uuid.uuid4())
self.request_queue.put({
'id': request_id,
'url': url,
'timestamp': time.time()
})
self.results[request_id] = {
'status': 'pending',
'message': 'Đang xử lý sharing link...'
}
return request_id
def get_result(self, request_id: str) -> Optional[Dict]:
"""Get processing result"""
return self.results.get(request_id)
def _worker_loop(self):
"""Main worker loop"""
print("[SharingWorker] Worker loop started")
while self.is_running:
try:
try:
request = self.request_queue.get(timeout=1)
except queue.Empty:
continue
request_id = request['id']
url = request['url']
print(f"[SharingWorker] Processing: {url}")
result = self._process_sharing_link(url)
self.results[request_id] = result
print(f"[SharingWorker] Completed: {result['status']}")
except Exception as e:
print(f"[SharingWorker] Error: {e}")
import traceback
traceback.print_exc()
def _ensure_driver_ready(self):
"""Setup Chrome driver if not exists - Thread-safe"""
with self.driver_lock:
if self.driver:
try:
_ = self.driver.current_url
print("[SharingWorker] ✅ Reusing existing driver")
return
except:
print("[SharingWorker] ⚠️ Driver dead, creating new...")
try:
self.driver.quit()
except:
pass
self.driver = None
# ========== TẤT CẢ CODE TẠO DRIVER TRONG LOCK ĐỂ TRÁNH RACE CONDITION ==========
print("[SharingWorker] 🚀 Creating new Chrome driver...")
chrome_options = Options()
# Chrome profile from environment
profile_path_env = os.getenv("NAS_CHROME_PROFILE_PATH")
if not profile_path_env:
raise ValueError("NAS_CHROME_PROFILE_PATH must be set in .env.local")
# Resolve absolute path
current_file = os.path.abspath(__file__)
backend_dir = os.path.dirname(os.path.dirname(current_file))
workspace_root = os.path.dirname(backend_dir)
profile_path = os.path.join(workspace_root, profile_path_env)
os.makedirs(profile_path, exist_ok=True)
# Chrome options (fix crash issues)
chrome_options.add_argument(f'user-data-dir={profile_path}')
chrome_options.add_argument('--disable-gpu')
chrome_options.add_argument('--start-maximized')
chrome_options.add_argument('--ignore-certificate-errors')
# Additional stability options (prevent crashes)
chrome_options.add_argument('--no-sandbox')
chrome_options.add_argument('--disable-dev-shm-usage')
chrome_options.add_argument('--disable-blink-features=AutomationControlled')
chrome_options.add_argument('--remote-debugging-port=0') # Let Chrome choose port
# Disable extensions to avoid conflicts
chrome_options.add_argument('--disable-extensions')
# Prevent "Chrome is being controlled by automated test software" banner
chrome_options.add_experimental_option("excludeSwitches", ["enable-automation"])
chrome_options.add_experimental_option('useAutomationExtension', False)
service = Service(ChromeDriverManager().install())
try:
self.driver = webdriver.Chrome(service=service, options=chrome_options)
except Exception as e:
print(f"[SharingWorker] ❌ Failed to create Chrome driver: {e}")
print(f"[SharingWorker] Profile path: {profile_path}")
print(f"[SharingWorker] Chrome options: {chrome_options.arguments}")
# Try to kill any zombie Chrome processes
import subprocess
try:
subprocess.run(['taskkill', '/F', '/IM', 'chrome.exe'],
capture_output=True, timeout=5)
subprocess.run(['taskkill', '/F', '/IM', 'chromedriver.exe'],
capture_output=True, timeout=5)
print(f"[SharingWorker] Killed zombie Chrome processes, retrying...")
time.sleep(2)
# Retry once after killing zombies
self.driver = webdriver.Chrome(service=service, options=chrome_options)
except Exception as retry_error:
print(f"[SharingWorker] ❌ Retry also failed: {retry_error}")
raise RuntimeError(
f"Cannot create Chrome driver. "
f"Try: 1) Close all Chrome windows, 2) Delete chrome_profile_nas folder, 3) Restart"
) from e
# Create session manager
self.session_manager = SharingSessionManager(self.driver)
print(f"[SharingWorker] ✅ Driver created, profile: {profile_path}")
@handle_window_closed
def _perform_login(self) -> bool:
"""
Perform DSM login using nas_sharing_auth module
OTP via modal pattern
"""
if not self.driver or not self.session_manager:
raise RuntimeError("Driver not initialized")
# Type safety assertions
assert self.driver is not None
assert self.session_manager is not None
# OTP callback for nas_sharing_auth.perform_login()
def otp_callback() -> Optional[str]:
"""Wait for OTP from frontend modal"""
# Set pending flag
if not self.otp_pending:
self.otp_pending = True
self.otp_modal_shown = False
self.otp_submitted = False
# Wait for OTP (max 5 minutes)
for i in range(300):
if self.otp_code:
code = self.otp_code
# DON'T reset flags yet - wait for login completion
self.otp_code = None
return code
time.sleep(1)
if i % 10 == 0:
print(f"[SharingWorker] ⏳ Waiting for OTP... ({300-i}s)")
# Timeout
self.otp_pending = False
self.otp_modal_shown = False
self.otp_submitted = False
return None
# Call perform_login() from nas_sharing_api
success = perform_login(
driver=self.driver,
otp_callback=otp_callback
)
if success:
print("[SharingWorker] ✅ Login successful!")
# QUAN TRỌNG: Mark OTP submitted TRƯỚC KHI reset flags
if self.otp_pending:
self.otp_submitted = True
print("[SharingWorker] ✅ OTP đã được xác nhận thành công")
# Đợi để Chrome profile lưu cookies (quan trọng!)
print("[SharingWorker] ⏳ Đợi 5s để lưu cookies vào Chrome profile...")
time.sleep(5)
# Reset OTP flags sau khi đã đợi
self.otp_pending = False
self.otp_modal_shown = False
print("[SharingWorker] ✅ Cookies đã được lưu vào Chrome profile")
return True
# Login failed - reset flags
self.otp_pending = False
self.otp_modal_shown = False
self.otp_submitted = False
return False
@handle_window_closed
def _process_sharing_link(self, url: str) -> Dict:
"""
Process sharing link - navigate extract file list
"""
from .nas_sharing_api.selenium_operations import extract_sharing_id, get_initial_path, get_file_list
try:
sharing_id = extract_sharing_id(url)
if not sharing_id:
raise Exception("Cannot extract sharing_id from URL")
print(f"[SharingWorker] Sharing ID: {sharing_id}")
with self.driver_lock:
self._ensure_driver_ready()
assert self.driver is not None
# Clear cache để tránh conflict ExtJS
try:
self.driver.execute_cdp_cmd('Network.clearBrowserCache', {})
except Exception:
pass
self.driver.get(url)
try:
initial_path = get_initial_path(self.driver)
except RuntimeError as e:
if "NEEDS_LOGIN" in str(e):
print("[SharingWorker] Login required")
if not self._perform_login():
raise Exception("Login failed")
# XÓA CACHE sau khi login xong để tránh conflict ExtJS
print("[SharingWorker] Xóa cache trước khi truy cập lại sharing link...")
try:
self.driver.execute_cdp_cmd('Network.clearBrowserCache', {})
except Exception as clear_error:
print(f"[SharingWorker] ⚠️ Không thể xóa cache: {clear_error}")
self.driver.get(url)
initial_path = get_initial_path(self.driver)
else:
raise
print(f"[SharingWorker] 📋 Lấy danh sách ROOT folder: {initial_path}")
files = get_file_list(self.driver, sharing_id, initial_path)
return {
'status': 'success',
'sharing_id': sharing_id,
'path': initial_path,
'files': files,
'total_files': len(files),
'message': f'Found {len(files)} files'
}
except Exception as e:
print(f"[SharingWorker] Error: {e}")
import traceback
traceback.print_exc()
return {
'status': 'error',
'message': str(e)
}

View File

@ -0,0 +1,362 @@
"""
Supabase service wrapper for managing userslist and submissions.
Uses Supabase for userslist storage and can optionally use for submissions.
"""
import os
import logging
from typing import List, Dict, Any, Optional
from datetime import datetime
from dotenv import load_dotenv
from supabase import create_client
# Load .env.local
load_dotenv('.env.local')
load_dotenv()
logger = logging.getLogger(__name__)
# Supabase configuration
SUPABASE_URL = os.getenv("SUPABASE_URL")
SUPABASE_SERVICE_ROLE_KEY = os.getenv("SUPABASE_SERVICE_ROLE_KEY")
_supabase_client = None
def get_supabase_client():
"""Get or create Supabase client instance."""
global _supabase_client
if _supabase_client is None:
if not SUPABASE_URL or not SUPABASE_SERVICE_ROLE_KEY:
raise ValueError(
"SUPABASE_URL or SUPABASE_SERVICE_ROLE_KEY not configured")
_supabase_client = create_client(
SUPABASE_URL, SUPABASE_SERVICE_ROLE_KEY)
logger.debug("Supabase client initialized")
return _supabase_client
# ===========================
# Userslist Management
# ===========================
def get_userslist() -> List[str]:
"""Fetch all usernames from Supabase userslist table."""
try:
client = get_supabase_client()
response = client.table("userslist").select(
"username").order("username", desc=False).execute()
usernames = []
for item in response.data:
if isinstance(item, dict) and "username" in item:
usernames.append(item["username"])
return usernames
except Exception as e:
logger.error(f"Error fetching userslist: {e}")
return []
def add_username(username: str) -> Dict[str, Any]:
"""Add a new username to Supabase userslist table."""
username = username.strip()
if not username:
return {"success": False, "message": "Empty username"}
try:
client = get_supabase_client()
# Check if username already exists
existing = client.table("userslist").select(
"id").eq("username", username).execute()
if existing.data:
all_usernames = get_userslist()
return {"success": False, "message": "Already exists", "usernames": all_usernames}
# Insert new username
client.table("userslist").insert({"username": username}).execute()
all_usernames = get_userslist()
return {"success": True, "message": "Added", "usernames": all_usernames}
except Exception as e:
logger.error(f"Error adding username: {e}")
return {"success": False, "message": f"Could not save: {str(e)}"}
def delete_username(username: str) -> Dict[str, Any]:
"""Delete a username from Supabase userslist table."""
username = username.strip()
try:
client = get_supabase_client()
# Check if username exists
existing = client.table("userslist").select(
"id").eq("username", username).execute()
if not existing.data:
all_usernames = get_userslist()
return {"success": False, "message": "Not found", "usernames": all_usernames}
# Delete username
client.table("userslist").delete().eq("username", username).execute()
all_usernames = get_userslist()
return {"success": True, "message": "Deleted", "usernames": all_usernames}
except Exception as e:
logger.error(f"Error deleting username: {e}")
return {"success": False, "message": f"Could not delete: {str(e)}"}
# ===========================
# Submissions Management (via Supabase)
# ===========================
def create_submission_supabase(submission_id: str, usernames: List[str], ge_input: str) -> Dict[str, Any]:
"""Create a new submission in Supabase."""
try:
client = get_supabase_client()
# Get max queue_position from pending submissions
pending = client.table("submissions").select("queue_position").eq(
"status", "pending").order("queue_position", desc=True).limit(1).execute()
next_position = 1
if pending.data and len(pending.data) > 0:
first_item = pending.data[0]
if isinstance(first_item, dict):
max_pos = first_item.get("queue_position")
if isinstance(max_pos, int):
next_position = max_pos + 1
submission_data = {
"submission_id": submission_id,
"status": "pending",
"input": {
"usernames": usernames,
"ge_input": ge_input
},
"results": [],
"queue_position": next_position,
"retry_count": 0
}
response = client.table("submissions").insert(
submission_data).execute()
if response.data:
return {"success": True, "submission": response.data[0]}
return {"success": False, "message": "Failed to create submission"}
except Exception as e:
logger.error(f"Error creating submission: {e}")
return {"success": False, "message": str(e)}
def get_submission_by_id(submission_id: str) -> Optional[Dict[str, Any]]:
"""Get a single submission by its ID."""
try:
client = get_supabase_client()
response = client.table("submissions").select(
"*").eq("id", submission_id).limit(1).execute()
if response.data and len(response.data) > 0:
result = response.data[0]
if isinstance(result, dict):
return result
return None
except Exception as e:
logger.error(f"Error fetching submission by id: {e}")
return None
def create_retry_submission(username: str, ge_id_and_lang: str) -> Optional[Dict[str, Any]]:
"""Create a new submission for retry (simpler version for retrying errors)."""
try:
import uuid
client = get_supabase_client()
# Get max queue_position from pending submissions
pending = client.table("submissions").select("queue_position").eq(
"status", "pending").order("queue_position", desc=True).limit(1).execute()
next_position = 1
if pending.data and len(pending.data) > 0:
first_item = pending.data[0]
if isinstance(first_item, dict):
max_pos = first_item.get("queue_position")
if isinstance(max_pos, int):
next_position = max_pos + 1
# Parse usernames
usernames = [u.strip() for u in username.split(',') if u.strip()]
submission_data = {
"submission_id": str(uuid.uuid4()),
"status": "pending",
"input": {
"usernames": usernames,
"ge_input": ge_id_and_lang
},
"results": [],
"queue_position": next_position,
"retry_count": 0
}
response = client.table("submissions").insert(
submission_data).execute()
if response.data and len(response.data) > 0:
result = response.data[0]
if isinstance(result, dict):
return result
return None
except Exception as e:
logger.error(f"Error creating retry submission: {e}")
return None
def get_submissions_supabase(limit: int = 50, status: Optional[str] = None) -> Any:
"""Fetch submissions from Supabase."""
try:
client = get_supabase_client()
query = client.table("submissions").select("*")
if status:
query = query.eq("status", status)
response = query.order("created_at", desc=True).limit(limit).execute()
return response.data
except Exception as e:
logger.error(f"Error fetching submissions: {e}")
return []
def get_pending_submissions_supabase() -> Any:
"""Get pending submissions from Supabase ordered by created_at (FIFO).
NOTE: Only fetch raw_download submissions. TMS permission submissions are handled by TypeScript backend.
"""
try:
client = get_supabase_client()
response = (
client.table("submissions")
.select("*")
.eq("status", "pending")
.eq("submission_type", "raw_download") # Only raw downloads
.order("created_at", desc=False) # FIFO order
.execute()
)
return response.data
except Exception as e:
logger.error(f"Error fetching pending submissions: {e}")
return []
def get_processing_submissions_supabase() -> Any:
"""Get all submissions currently in 'processing' status.
Used by worker to detect and reset stuck submissions.
NOTE: Only fetch raw_download submissions. TMS permission submissions are handled by TypeScript backend.
"""
try:
client = get_supabase_client()
response = (
client.table("submissions")
.select("*")
.eq("status", "processing")
.eq("submission_type", "raw_download") # Only raw downloads
.execute()
)
return response.data
except Exception as e:
logger.error(f"Error fetching processing submissions: {e}")
return []
def update_submission_supabase(submission_id: str, **kwargs) -> bool:
"""Update a submission in Supabase."""
try:
client = get_supabase_client()
response = client.table("submissions").update(
kwargs).eq("submission_id", submission_id).execute()
return len(response.data) > 0
except Exception as e:
logger.error(f"Error updating submission: {e}")
return False
def delete_submission_supabase(submission_id: str) -> bool:
"""Delete a submission from Supabase."""
try:
client = get_supabase_client()
response = client.table("submissions").delete().eq(
"submission_id", submission_id).execute()
# Supabase returns empty data on successful delete
return True
except Exception as e:
logger.error(f"Error deleting submission: {e}")
return False
# ===========================
# Raw Downloads History Management
# ===========================
def create_raw_download_history(
ge_id: str,
lang: str,
destination_path: str,
files: List[Dict[str, Any]],
status: str = "success",
total_files: int = 0,
successful_downloads: int = 0,
mongodb_path: Optional[str] = None,
mode: str = "api"
) -> Optional[Dict[str, Any]]:
"""Create a new raw download history entry in Supabase."""
try:
client = get_supabase_client()
record = {
"ge_id": ge_id,
"lang": lang.upper(),
"destination_path": destination_path,
"files": files,
"status": status,
"total_files": total_files,
"successful_downloads": successful_downloads,
"mongodb_path": mongodb_path,
"mode": mode
}
response = client.table(
"raw_download_history").insert(record).execute()
if response.data and len(response.data) > 0:
first_item = response.data[0]
if isinstance(first_item, dict):
logger.debug(f"Created raw download history: {ge_id}_{lang}")
return first_item
return None
except Exception as e:
logger.error(
f"Error creating raw download history: {e}", exc_info=True)
return None
def get_raw_download_history(limit: int = 50) -> List[Dict[str, Any]]:
"""Fetch raw download history from Supabase, newest first."""
try:
client = get_supabase_client()
response = client.table("raw_download_history").select(
"*").order("created_at", desc=True).limit(limit).execute()
if response.data and isinstance(response.data, list):
return [item for item in response.data if isinstance(item, dict)]
return []
except Exception as e:
logger.error(f"Error fetching raw download history: {e}")
return []
def delete_raw_download_history(download_id: str) -> bool:
"""Delete a raw download history entry by ID."""
try:
client = get_supabase_client()
response = client.table("raw_download_history").delete().eq(
"id", download_id).execute()
return True
except Exception as e:
logger.error(f"Error deleting raw download history: {e}")
return False
# ===========================
# OLD QUEUE FUNCTIONS REMOVED
# ===========================
# All raw_download_queue related functions have been removed.
# Use downloads_service.py instead for file download management.

60
backend/services/usernames.py Executable file
View File

@ -0,0 +1,60 @@
"""
Usernames management service: loads from backend/static/userslist.json and exposes functions to get/add/delete usernames.
"""
import os
import json
from typing import List, Dict
USERS_FILE = os.path.join(os.path.dirname(__file__), '..', 'static', 'userslist.json')
def load_usernames() -> List[str]:
try:
with open(USERS_FILE, 'r', encoding='utf-8') as f:
return json.load(f)
except Exception:
return []
def save_usernames(usernames: List[str]) -> bool:
try:
with open(USERS_FILE, 'w', encoding='utf-8') as f:
json.dump(usernames, f, ensure_ascii=False, indent=4)
return True
except Exception:
return False
def get_usernames() -> Dict[str, object]:
try:
names = load_usernames()
return {"success": True, "usernames": names}
except Exception as e:
return {"success": False, "message": str(e)}
def add_username(new_username: str) -> Dict[str, object]:
new_username = new_username.strip()
if not new_username:
return {"success": False, "message": "Empty username"}
names = load_usernames()
if new_username in names:
return {"success": False, "message": "Already exists", "usernames": names}
names.append(new_username)
names.sort()
ok = save_usernames(names)
if ok:
return {"success": True, "message": "Added", "usernames": names}
return {"success": False, "message": "Could not save"}
def delete_username(username: str) -> Dict[str, object]:
username = username.strip()
names = load_usernames()
if username not in names:
return {"success": False, "message": "Not found", "usernames": names}
names.remove(username)
ok = save_usernames(names)
if ok:
return {"success": True, "message": "Deleted", "usernames": names}
return {"success": False, "message": "Could not save"}

303
backend/worker.py Executable file
View File

@ -0,0 +1,303 @@
"""
Background worker to process pending submissions.
Behavior:
- Polls pending submissions from Supabase (`backend.services.supabase_service.get_pending_submissions_supabase`) if available.
- For each submission:
1. Validate all GE/lang TMS data upfront; if any error, stop and report error for entire submission.
2. Mark as `processing`, call `automation.process_project` for each GE.
3. Determine overall status: if ANY detail has status='error', mark submission as 'failed'; else 'completed'.
4. Save structured results with url, message, status per detail.
5. Close driver on error; reuse on success.
6. Reset stuck submissions (processing > timeout) back to pending or failed.
This module can be started as a standalone script during development, or imported and started
from a FastAPI startup event.
"""
import time
import logging
import signal
import sys
from datetime import datetime, timedelta
from typing import List, Dict, Any
from .services import supabase_service
# NOTE: TMS permission automation moved to TypeScript backend (backend-tms/)
# This worker only handles raw_download submissions
from .services import mongodb_service
# Use logger from root (configured in main.py)
log = logging.getLogger(__name__)
# Reduce verbosity from httpx (Supabase client) to avoid spamming INFO logs for each request
logging.getLogger('httpx').setLevel(logging.WARNING)
# Signal handler for graceful shutdown
_shutdown_requested = False
def signal_handler(sig, frame):
global _shutdown_requested
log.info(f'Received signal {sig}, initiating graceful shutdown...')
_shutdown_requested = True
# NOTE: No need to close Chrome driver anymore - TMS automation moved to TypeScript backend
# Register signal handlers
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
def reset_processing_to_pending_on_startup():
"""Reset all 'processing' submissions back to 'pending' on server startup.
This handles the case where server was shut down while submissions were being processed.
After reset, worker will process them in order by created_at (oldest first).
"""
try:
processing_subs = supabase_service.get_processing_submissions_supabase()
if not processing_subs:
log.info('No stuck processing submissions found on startup')
return
log.info(
f'Found {len(processing_subs)} submissions stuck in processing state. Resetting to pending...')
for sub in processing_subs:
submission_id = str(sub.get('submission_id') or sub.get('id'))
try:
supabase_service.update_submission_supabase(
submission_id,
status='pending',
error_message=None # clear any previous error message
)
log.info(
f'Reset submission {submission_id} from processing to pending')
except Exception as e:
log.error(f'Error resetting submission {submission_id}: {e}')
except Exception as e:
log.error(f'Error in reset_processing_to_pending_on_startup: {e}')
def parse_ge_input_raw(ge_input: str) -> List[Dict[str, Any]]:
"""Split raw ge_input (newline separated) into list of dicts {ge_id, langs, tn_mode}.
This is a minimal parser compatible with the old input format like "1000 de" or "696 us".
The `langs` returned is a list of tuples (lang_code, final_flag) where final_flag is False by default.
"""
lines = [l.strip() for l in str(ge_input).splitlines() if l.strip()]
parsed = []
for line in lines:
parts = line.split()
ge_id = parts[0]
lang = parts[1] if len(parts) > 1 else ''
lang_code = lang
parsed.append({'ge_id': ge_id, 'langs': [
(lang_code, False)], 'tn_mode': False})
return parsed
def extract_url_from_message(message: str) -> tuple[str, str]:
"""Extract URL and clean message from automation message format.
Expected format: "https://... -> message text" or just "message text"
Returns: (url, clean_message)
"""
if not message:
return ('#', '')
# Try to parse URL -> message format (legacy automation output)
if ' -> ' in message:
parts = message.split(' -> ', 1)
url = parts[0].strip()
clean_msg = parts[1].strip()
# verify it looks like a URL
if url.startswith('http://') or url.startswith('https://'):
return (url, clean_msg)
# If no URL found, use '#' as placeholder
return ('#', message)
def validate_ge_inputs(ge_list: List[Dict[str, Any]]) -> tuple[bool, List[str]]:
"""Validate all GE/lang TMS data upfront. Return (is_valid, error_messages).
Mimics old project behavior: check ALL before processing ANY.
"""
errors = []
for ge in ge_list:
ge_id = ge['ge_id']
langs = ge['langs'] # list of (lang_code, final_flag)
for lang_code, _ in langs:
if not lang_code:
continue
orig_lang = lang_code.split(
'_')[1] if '_' in lang_code else lang_code
try:
tms_id = mongodb_service.get_tms_data(ge_id, orig_lang)
log.debug(
f"Validated TMS data: GE={ge_id}, lang={orig_lang}, tms_id={tms_id}")
except Exception as e:
error_msg = str(e)
if 'TMS ID chưa được bổ sung' in error_msg:
error_msg = f"{ge_id} {orig_lang}: TMS ID chưa được bổ sung"
errors.append(error_msg)
return (len(errors) == 0, errors)
def process_one_submission(sub: Dict[str, Any]):
submission_id = sub.get('submission_id') or sub.get('id')
if not submission_id:
log.error('Submission missing id, skipping')
return
submission_id = str(submission_id)
log.info(f"Processing submission: {submission_id}")
try:
# Mark processing
supabase_service.update_submission_supabase(
submission_id, status='processing')
usernames = []
input_data = sub.get('input') if isinstance(
sub.get('input'), dict) else None
if input_data:
usernames = input_data.get('usernames', []) or []
ge_input = input_data.get('ge_input', '')
else:
# compatibility: older shape
usernames = sub.get('usernames', []) or []
ge_input = sub.get('ge_input', '')
parsed_ge = parse_ge_input_raw(ge_input)
# ===== STEP 1: Validate all GE data BEFORE processing =====
is_valid, validation_errors = validate_ge_inputs(parsed_ge)
if not is_valid:
error_message = "Không thể tiếp tục do có lỗi với dữ liệu đầu vào:\n" + \
"\n".join(validation_errors)
log.error(f"Validation failed: {error_message}")
# Build error results for all GE entries
ge_results = []
for ge in parsed_ge:
ge_id = ge['ge_id']
langs = ge['langs']
ge_id_and_lang = f"{ge_id} {langs[0][0]}" if langs else ge_id
details = []
for username in usernames:
details.append({
'username': username,
'url': '#',
'message': error_message,
'status': 'error',
'errorDetails': None
})
ge_result = {
'geIdAndLang': ge_id_and_lang,
'completionTime': datetime.utcnow().isoformat() + 'Z',
'details': details
}
ge_results.append(ge_result)
# Mark as failed due to validation error
supabase_service.update_submission_supabase(
submission_id, status='failed', error_message=error_message, results=ge_results)
log.info(
f"Submission {submission_id} marked as failed due to validation error")
return
# ===== STEP 2: Process submissions =====
# NOTE: This Python worker only handles raw_download submissions.
# TMS permission submissions are handled by TypeScript backend (backend-tms/)
# If we reach here, it means the submission_type filter failed - this is a bug.
error_message = "ERROR: Python worker only handles raw_download submissions. TMS permission automation has been moved to TypeScript backend (port 4000)."
log.error(
f"Submission {submission_id} should not be processed by Python worker - check submission_type filter")
ge_results = []
for ge in parsed_ge:
ge_id = ge['ge_id']
langs = ge['langs']
ge_id_and_lang = f"{ge_id} {langs[0][0]}" if langs else ge_id
details = []
for username in usernames:
details.append({
'username': username,
'url': '#',
'message': error_message,
'status': 'error',
'errorDetails': None
})
ge_result = {
'geIdAndLang': ge_id_and_lang,
'completionTime': datetime.utcnow().isoformat() + 'Z',
'details': details
}
ge_results.append(ge_result)
supabase_service.update_submission_supabase(
submission_id, status='failed', error_message=error_message, results=ge_results)
log.info(
f"Submission {submission_id} marked as failed - wrong submission_type")
except Exception as e:
err = str(e)
log.exception(f"Error processing submission {submission_id}: {err}")
supabase_service.update_submission_supabase(
submission_id, status='failed', error_message=err)
def run_loop(poll_interval: int = 3):
"""Run the worker loop with adaptive backoff when no pending submissions are found.
Behavior:
- When there are pending submissions: process them and poll at `poll_interval` (fast).
- When there are no pending submissions: exponentially back off (double interval) up to `max_interval`.
- This reduces the number of requests to Supabase and avoids repeated httpx INFO logs.
- On startup, resets any stuck 'processing' submissions back to 'pending'.
"""
log.info('Worker started, resetting any stuck submissions...')
reset_processing_to_pending_on_startup()
log.info('Polling for pending submissions...')
current_interval = poll_interval
max_interval = 30 # seconds
while not _shutdown_requested:
try:
pending = supabase_service.get_pending_submissions_supabase()
if pending and isinstance(pending, list) and len(pending) > 0:
# reset to fast polling when work exists
current_interval = poll_interval
for sub in pending:
if _shutdown_requested:
log.info('Shutdown requested, stopping processing')
break
process_one_submission(sub)
# short pause before re-checking
time.sleep(current_interval)
else:
# no pending work: backoff to reduce polling frequency
current_interval = min(
max_interval, current_interval * 2) if current_interval < max_interval else max_interval
log.debug(
f'No pending submissions, backing off to {current_interval}s')
time.sleep(current_interval)
except Exception as e:
log.exception(f'Worker loop error: {e}')
# on error, wait a bit before retrying
time.sleep(min(max_interval, current_interval * 2))
log.info('Worker shutting down gracefully')
# NOTE: No cleanup needed - TMS automation moved to TypeScript backend
if __name__ == '__main__':
run_loop()

615
backend/worker_downloads.py Executable file
View File

@ -0,0 +1,615 @@
"""
File-centric download worker.
Processes individual file downloads from the downloads table.
Architecture:
- Each download record = 1 file
- Worker picks up pending files and downloads them one by one
- Progress tracked per-file, not per-batch
- Uses aria2c for fast multi-connection downloads
"""
import time
import logging
import signal
import re
from datetime import datetime
from typing import Optional, Dict, Any, List
import os
from concurrent.futures import ThreadPoolExecutor, as_completed
from .services import downloads_service
from .services import nas_service
from .services import mongodb_service
from .common import get_download_filename
logger = logging.getLogger(__name__)
# Worker configuration - Load from environment variables
POLL_INTERVAL = int(os.getenv('WORKER_POLL_INTERVAL', '3')
) # seconds (min interval)
# seconds (max when idle)
POLL_INTERVAL_MAX = int(os.getenv('WORKER_POLL_INTERVAL_MAX', '30'))
# Process up to N files concurrently
MAX_CONCURRENT_DOWNLOADS = int(
os.getenv('WORKER_MAX_CONCURRENT_DOWNLOADS', '5'))
BACKGROUND_DOWNLOAD_MAX_SPEED = os.getenv(
# Bandwidth limit for background queue
'BACKGROUND_DOWNLOAD_MAX_SPEED', '100K')
_shutdown_requested = False
def natural_sort_key(text: str) -> List:
"""
Generate key for natural sorting (e.g., file1, file2, file10, not file1, file10, file2).
Args:
text: String to generate sort key for
Returns:
List of mixed strings and integers for natural sorting
"""
def convert(part):
return int(part) if part.isdigit() else part.lower()
return [convert(c) for c in re.split('([0-9]+)', text)]
def signal_handler(sig, frame):
"""Handle graceful shutdown on SIGINT/SIGTERM."""
global _shutdown_requested
logger.debug(f"Received signal {sig}, initiating graceful shutdown...")
_shutdown_requested = True
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
def process_file_download(download: Dict[str, Any], sid: str, max_speed: Optional[str] = None) -> bool:
"""
Process a single file download.
Args:
download: Download record from database
sid: NAS session ID for authentication
max_speed: Optional bandwidth limit (e.g., '100K') for throttling
Returns:
True if successful, False if failed
"""
download_id = download["id"]
file_name = download["file_name"]
file_path = download["file_path"]
ge_id = download["ge_id"]
lang = download["lang"]
mode = download["mode"]
# IMPORTANT: Uppercase lang code for folder naming (1000_DE, not 1000_de)
lang_upper = lang.upper()
logger.debug(
f"[Download {download_id}] Processing: {file_name} (mode: {mode}, GE={ge_id}, lang={lang_upper})")
try:
# Update to downloading status
downloads_service.update_download_status(
download_id=download_id,
status='downloading',
progress_percent=0.0
)
# Determine source and destination paths
if mode == 'api':
# API mode: Download from NAS FileStation
source_path = file_path # Relative path in NAS
base_dest = nas_service.DESTINATION_PATH
# Get final filename (adds .zip for folders automatically)
dest_filename = get_download_filename(file_path, file_name)
# Use format: base/GE_LANG/filename (e.g., raw/1000_DE/file.zip)
dest_path = os.path.join(
base_dest, f"{ge_id}_{lang_upper}", dest_filename)
# Get MongoDB path
mongodb_path = mongodb_service.get_path_from_tms_data(ge_id, lang)
# Download using NAS service
success, final_path, error_msg = _download_api_file(
download_id=download_id,
source_path=source_path,
dest_path=dest_path,
file_name=file_name,
sid=sid,
max_speed=max_speed
)
elif mode == 'sharing':
# Sharing mode: Download from sharing link
sharing_id = download.get('sharing_id')
if not sharing_id:
raise Exception("Missing sharing_id for sharing mode download")
base_dest = nas_service.DESTINATION_PATH
# Get final filename (adds .zip for folders automatically)
dest_filename = get_download_filename(file_path, file_name)
# Use format: base/GE_LANG/filename (e.g., raw/1000_DE/file.zip)
dest_path = os.path.join(
base_dest, f"{ge_id}_{lang_upper}", dest_filename)
# Download using sharing service
success, final_path, error_msg = _download_sharing_file(
download_id=download_id,
sharing_id=sharing_id,
file_path=file_path,
dest_path=dest_path,
file_name=file_name,
max_speed=max_speed
)
mongodb_path = None
else:
raise Exception(f"Unknown download mode: {mode}")
# Update final status
if success:
downloads_service.update_download_status(
download_id=download_id,
status='completed',
progress_percent=100.0,
destination_path=final_path
)
logger.debug(f"[Download {download_id}] ✅ Completed: {file_name}")
return True
else:
downloads_service.update_download_status(
download_id=download_id,
status='failed',
error_message=error_msg
)
logger.error(f"[Download {download_id}] ❌ Failed: {error_msg}")
return False
except Exception as e:
error_msg = str(e)
logger.error(
f"[Download {download_id}] Exception: {error_msg}", exc_info=True)
downloads_service.update_download_status(
download_id=download_id,
status='failed',
error_message=error_msg
)
return False
def _download_api_file(
download_id: int,
source_path: str,
dest_path: str,
file_name: str,
sid: str,
max_speed: Optional[str] = None
) -> tuple[bool, Optional[str], Optional[str]]:
"""
Download a single file via NAS FileStation API.
Args:
max_speed: Optional bandwidth limit (e.g., '100K')
Returns:
(success, final_path, error_message)
"""
try:
# Progress callback
def progress_callback(downloaded: int, total: int):
# Always update downloaded_size, even if total is unknown (folders)
downloads_service.update_download_status(
download_id=download_id,
status='downloading',
progress_percent=round(
(downloaded / total) * 100, 2) if total > 0 else None,
downloaded_size=downloaded,
file_size=total if total > 0 else None
)
# Create destination directory
os.makedirs(os.path.dirname(dest_path), exist_ok=True)
# Download file using NAS API with aria2
success, error_msg, gid = nas_service.download_single_file_aria2(
sid=sid,
remote_path=source_path,
local_save_path=dest_path,
is_dir=False,
progress_callback=progress_callback,
max_speed=max_speed
)
# Save GID to database for cancellation support
if gid:
downloads_service.update_download_status(
download_id=download_id,
status='downloading',
aria2_gid=gid
)
if success:
return True, dest_path, None
else:
return False, None, error_msg or "Download failed"
except Exception as e:
return False, None, str(e)
def _download_sharing_file(
download_id: int,
sharing_id: str,
file_path: str,
dest_path: str,
file_name: str,
max_speed: Optional[str] = None
) -> tuple[bool, Optional[str], Optional[str]]:
"""
Download a single file from sharing link.
Args:
max_speed: Optional bandwidth limit (e.g., '100K')
Returns:
(success, final_path, error_message)
"""
try:
from .services import nas_sharing_service
# Progress callback
def progress_callback(downloaded: int, total: int):
# Always update downloaded_size, even if total is unknown (folders)
downloads_service.update_download_status(
download_id=download_id,
status='downloading',
progress_percent=round(
(downloaded / total) * 100, 2) if total > 0 else None,
downloaded_size=downloaded,
file_size=total if total > 0 else None
)
# Create destination directory
os.makedirs(os.path.dirname(dest_path), exist_ok=True)
# Get sharing worker
worker = nas_sharing_service.get_sharing_worker()
if not worker:
return False, None, "Sharing worker not available"
# Determine if this is a folder (check file extension)
is_folder = not bool(os.path.splitext(file_path)[1])
# Download file from sharing link using aria2
result = nas_sharing_service.download_file(
sharing_id=sharing_id,
file_path=file_path,
save_path=dest_path,
is_folder=is_folder,
progress_callback=progress_callback,
max_speed=max_speed
)
# ✅ Save GID to database for cancellation support (same as API mode)
gid = result.get('aria2_gid')
if gid:
downloads_service.update_download_status(
download_id=download_id,
status='downloading',
aria2_gid=gid
)
if result['status'] == 'success':
logger.debug(
f"[Sharing Download {download_id}] ✅ Downloaded: {file_name}")
return True, result['save_path'], None
else:
logger.error(
f"[Sharing Download {download_id}] ❌ Failed: {result['message']}")
return False, None, result['message']
except Exception as e:
logger.error(
f"[Sharing Download {download_id}] Exception: {str(e)}", exc_info=True)
return False, None, str(e)
def recover_orphaned_downloads():
"""
Recovery logic: Find downloads stuck in 'downloading' state.
This happens when:
- Server crashed while downloading
- Worker was killed
- Aria2 task completed but DB wasn't updated
For each orphaned download:
- Check if aria2 GID is still active
- If not active -> mark as failed
"""
logger.debug("🔍 Checking for orphaned downloads...")
try:
# Get all downloads stuck in 'downloading' state
orphaned = downloads_service.get_all_downloads(
status='downloading', limit=1000)
if not orphaned:
logger.debug("No orphaned downloads found")
return
logger.debug(f"Found {len(orphaned)} downloads in 'downloading' state")
# Try to get aria2 manager
try:
from .services.aria2.download_manager import get_aria2_manager
manager = get_aria2_manager()
except Exception as e:
logger.warning(f"Could not get aria2 manager: {e}")
manager = None
recovered_count = 0
failed_count = 0
for download in orphaned:
gid = download.get('aria2_gid')
download_id = download['id']
file_name = download.get('file_name', 'unknown')
# Check if aria2 task is still active
is_active = False
if gid and manager:
try:
status = manager.get_status(gid)
is_active = status.get('status') in [
'active', 'waiting', 'paused']
except Exception as e:
logger.debug(f"Could not get status for GID {gid}: {e}")
if is_active:
# Aria2 is still downloading - let worker handle it
logger.debug(
f"♻️ Download {download_id} ({file_name}) is still active in aria2")
recovered_count += 1
else:
# Aria2 task doesn't exist - mark as failed
logger.warning(
f"❌ Download {download_id} ({file_name}) has no active aria2 task")
downloads_service.update_download_status(
download_id=download_id,
status='failed',
error_message='Download was interrupted (server crash or restart)'
)
failed_count += 1
logger.debug(
f"✅ Recovery complete: {recovered_count} recovered, {failed_count} marked as failed")
except Exception as e:
logger.error(
f"Error during orphaned downloads recovery: {e}", exc_info=True)
def worker_loop():
"""
Main worker loop.
Continuously polls for pending downloads and processes them.
LOGIC:
- API mode: Tải ngay lập tức, không giới hạn số lượng
- Sharing mode:
* Main queue (first MAX_CONCURRENT_DOWNLOADS): Full speed, ưu tiên slots
* Background queue (rest): Throttled, LUÔN TẢI SONG SONG để keep link alive
"""
logger.debug("🚀 File Download Worker started")
logger.debug(f" - Poll interval: {POLL_INTERVAL}s")
logger.debug(
f" - Max concurrent (Sharing only): {MAX_CONCURRENT_DOWNLOADS}")
logger.debug(
f" - Background speed (Sharing only): {BACKGROUND_DOWNLOAD_MAX_SPEED}")
# Recovery: Check for orphaned downloads from previous crashes
recover_orphaned_downloads()
# Get NAS session - use saved SID
sid = nas_service.load_sid()
if not sid:
logger.error("❌ No NAS session found. Please login via OTP first.")
return
logger.debug(f"✅ Loaded NAS session (SID: {sid[:20]}...)")
# Create thread pool - NO LIMIT (API mode needs unlimited parallelism)
# Sharing mode will self-limit via queue logic
executor = ThreadPoolExecutor(max_workers=50, thread_name_prefix="DL-")
active_futures = {} # Map future -> download_id
# Adaptive polling: start with min interval, increase when idle
current_poll_interval = POLL_INTERVAL
while not _shutdown_requested:
try:
# Get active downloads (pending or downloading)
active_downloads = downloads_service.get_active_downloads()
# Separate API and Sharing downloads
api_downloads = [d for d in active_downloads if d['mode'] == 'api']
sharing_downloads = [
d for d in active_downloads if d['mode'] == 'sharing']
# ========== API MODE: IMMEDIATE DOWNLOAD (NO QUEUE) ==========
api_pending = [
d for d in api_downloads if d['status'] == 'pending']
for download in api_pending:
try:
future = executor.submit(
process_file_download,
download,
sid,
max_speed=None # API mode: Always full speed
)
active_futures[future] = download['id']
logger.debug(
f"🚀 [API] Started download {download['id']}: {download['file_name']}")
except Exception as e:
logger.error(
f"Error submitting API download {download['id']}: {e}")
# ========== SHARING MODE: QUEUE-BASED WITH THROTTLING ==========
# Get pending sharing downloads
sharing_pending = [
d for d in sharing_downloads if d['status'] == 'pending']
if sharing_pending:
# Natural sort by file name within each batch
from collections import defaultdict
batches = defaultdict(list)
for d in sharing_pending:
batch_id = d.get('batch_id', 'default')
batches[batch_id].append(d)
# Sort each batch naturally and flatten
sorted_pending = []
for batch_files in batches.values():
sorted_batch = sorted(
batch_files, key=lambda x: natural_sort_key(x['file_name']))
sorted_pending.extend(sorted_batch)
# Split into main queue (first N) and background queue (rest)
main_queue = sorted_pending[:MAX_CONCURRENT_DOWNLOADS]
background_queue = sorted_pending[MAX_CONCURRENT_DOWNLOADS:]
# Count currently downloading sharing files (for slot calculation)
sharing_downloading_count = len(
[d for d in sharing_downloads if d['status'] == 'downloading'])
available_slots = MAX_CONCURRENT_DOWNLOADS - sharing_downloading_count
# Submit main queue files (full speed, use available slots)
if available_slots > 0:
for download in main_queue[:available_slots]:
try:
future = executor.submit(
process_file_download,
download,
sid,
max_speed=None # Full speed
)
active_futures[future] = download['id']
logger.debug(
f"🚀 [SHARING-MAIN] Started download {download['id']}: {download['file_name']}")
except Exception as e:
logger.error(
f"Error submitting sharing download {download['id']}: {e}")
# Submit background queue files (throttled, ALWAYS to keep links alive)
# Background files KHÔNG CẦN SLOTS - tải song song với tốc độ thấp
for download in background_queue:
# Check if already downloading
if download['status'] == 'pending':
try:
future = executor.submit(
process_file_download,
download,
sid,
max_speed=BACKGROUND_DOWNLOAD_MAX_SPEED # Throttled
)
active_futures[future] = download['id']
logger.debug(
f"🐢 [SHARING-BACKGROUND] Started download {download['id']}: {download['file_name']} (limited to {BACKGROUND_DOWNLOAD_MAX_SPEED})")
except Exception as e:
logger.error(
f"Error submitting background download {download['id']}: {e}")
# Check for completed futures (non-blocking)
done_futures = [f for f in active_futures if f.done()]
for future in done_futures:
download_id = active_futures.pop(future)
try:
success = future.result()
if success:
logger.debug(
f"✅ Thread completed download {download_id}")
else:
logger.warning(
f"⚠️ Thread failed download {download_id}")
except Exception as e:
logger.error(
f"❌ Thread exception for download {download_id}: {e}")
# Log status
if active_downloads:
api_pending_count = len(
[d for d in api_downloads if d['status'] == 'pending'])
api_downloading_count = len(
[d for d in api_downloads if d['status'] == 'downloading'])
sharing_pending_count = len(
[d for d in sharing_downloads if d['status'] == 'pending'])
sharing_downloading_count = len(
[d for d in sharing_downloads if d['status'] == 'downloading'])
logger.debug(
f"📊 API: {len(api_downloads)} (Downloading: {api_downloading_count}, Pending: {api_pending_count}) | "
f"Sharing: {len(sharing_downloads)} (Downloading: {sharing_downloading_count}, Pending: {sharing_pending_count}) | "
f"Threads: {len(active_futures)}"
)
# Adaptive polling: fast when active, slow when idle
if active_downloads or active_futures:
# Has work → use minimum interval
current_poll_interval = POLL_INTERVAL
else:
# Idle → gradually increase interval (up to max)
current_poll_interval = min(
current_poll_interval * 2, POLL_INTERVAL_MAX)
except Exception as e:
logger.error(f"Error in worker loop: {e}", exc_info=True)
# On error, use default interval
current_poll_interval = POLL_INTERVAL
time.sleep(current_poll_interval)
# Cleanup
logger.debug("Shutting down thread pool...")
executor.shutdown(wait=True)
logger.debug("Worker shutdown complete")
def start_worker():
"""Start the download worker in the background."""
import threading
def run():
try:
worker_loop()
except Exception as e:
logger.error(f"Worker crashed: {e}", exc_info=True)
thread = threading.Thread(target=run, daemon=True, name="DownloadWorker")
thread.start()
logger.debug("Download worker thread started")
return thread
if __name__ == "__main__":
# For standalone testing
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
try:
worker_loop()
except KeyboardInterrupt:
logger.debug("Worker stopped by user")

228
components/CheckHistory.tsx Executable file
View File

@ -0,0 +1,228 @@
import React, { useState } from 'react';
interface CheckResult {
ge_id: string;
lang: string;
chapter: string;
status: 'FOUND' | 'NOT_FOUND' | 'ERROR';
message: string;
tms_url?: string;
}
interface CheckRecord {
id: string;
created_at: string;
input: any;
status: 'pending' | 'processing' | 'completed' | 'failed';
results: CheckResult[];
error?: string;
}
interface CheckHistoryProps {
history: CheckRecord[];
onDelete?: (id: string) => void;
}
const CheckHistory: React.FC<CheckHistoryProps> = ({ history, onDelete }) => {
const [expandedId, setExpandedId] = useState<string | null>(null);
const [hideCompleted, setHideCompleted] = useState(false);
const formatDate = (dateStr: string) => {
const date = new Date(dateStr);
return date.toLocaleString('vi-VN');
};
const getStatusColor = (status: string) => {
switch (status) {
case 'completed': return 'text-green-400';
case 'processing': return 'text-yellow-400';
case 'failed': return 'text-red-400';
default: return 'text-slate-400';
}
};
const getStatusText = (status: string) => {
switch (status) {
case 'completed': return 'Hoàn thành';
case 'processing': return 'Đang xử lý';
case 'failed': return 'Thất bại';
default: return 'Chờ xử lý';
}
};
return (
<div className="w-full mt-8">
<div className="flex justify-between items-center mb-6">
<h2 className="text-2xl font-semibold text-white">Lịch sử Check Upload</h2>
<label className="flex items-center gap-2 text-sm text-slate-400 cursor-pointer">
<span>n chap đã hoàn thành</span>
<button
onClick={() => setHideCompleted(!hideCompleted)}
className={`relative inline-flex h-6 w-11 items-center rounded-full transition-colors ${hideCompleted ? 'bg-indigo-600' : 'bg-slate-600'
}`}
>
<span
className={`inline-block h-4 w-4 transform rounded-full bg-white transition-transform ${hideCompleted ? 'translate-x-6' : 'translate-x-1'
}`}
/>
</button>
</label>
</div>
{history.length > 0 ? (
<div className="space-y-4">
{history.map((record) => {
const isExpanded = expandedId === record.id;
const foundCount = record.results.filter(r => r.status === 'FOUND').length;
const notFoundCount = record.results.filter(r => r.status === 'NOT_FOUND').length;
const errorCount = record.results.filter(r => r.status === 'ERROR').length;
// Filter results based on toggle - hide FOUND (completed) when toggle is on
let displayResults = hideCompleted
? record.results.filter(r => r.status !== 'FOUND')
: record.results;
// Sort results by GE ID (ascending)
displayResults = [...displayResults].sort((a, b) => {
const aId = parseInt(a.ge_id) || 0;
const bId = parseInt(b.ge_id) || 0;
return aId - bId;
});
// Skip record if no results to show when filter is on
if (hideCompleted && displayResults.length === 0) {
return null;
}
return (
<div
key={record.id}
className="bg-slate-800/50 border border-slate-700 rounded-lg overflow-hidden"
>
<div
className="p-4 cursor-pointer hover:bg-slate-700/30 transition-colors"
onClick={() => setExpandedId(isExpanded ? null : record.id)}
>
<div className="flex justify-between items-start">
<div className="flex-1">
<div className="flex items-center gap-3 mb-2">
<span className="text-sm text-slate-400">{formatDate(record.created_at)}</span>
<span className={`text-sm font-medium ${getStatusColor(record.status)}`}>
{getStatusText(record.status)}
</span>
</div>
<div className="flex gap-4 text-sm">
<span className="text-slate-300">
Tổng: <span className="font-medium text-white">{record.results.length}</span>
</span>
{foundCount > 0 && (
<span className="text-green-400">
Đã : <span className="font-medium">{foundCount}</span>
</span>
)}
{notFoundCount > 0 && (
<span className="text-yellow-400">
Chưa : <span className="font-medium">{notFoundCount}</span>
</span>
)}
{errorCount > 0 && (
<span className="text-red-400">
Lỗi: <span className="font-medium">{errorCount}</span>
</span>
)}
</div>
</div>
<div className="flex items-center gap-2">
{onDelete && (
<button
onClick={(e) => {
e.stopPropagation();
onDelete(record.id);
}}
className="text-slate-500 hover:text-red-400 transition-colors p-1"
title="Xoá"
>
<svg className="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16" />
</svg>
</button>
)}
<button className="text-slate-400 hover:text-white transition-colors">
<svg
className={`w-5 h-5 transition-transform ${isExpanded ? 'rotate-180' : ''}`}
fill="none"
stroke="currentColor"
viewBox="0 0 24 24"
>
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 9l-7 7-7-7" />
</svg>
</button>
</div>
</div>
</div>
{isExpanded && displayResults.length > 0 && (
<div className="border-t border-slate-700">
<div className="overflow-x-auto">
<table className="w-full text-sm">
<thead className="bg-slate-700/50">
<tr>
<th className="py-2 px-4 text-left text-slate-300">GE ID</th>
<th className="py-2 px-4 text-left text-slate-300">LANG</th>
<th className="py-2 px-4 text-left text-slate-300">CHAP</th>
<th className="py-2 px-4 text-left text-slate-300">Trạng thái</th>
<th className="py-2 px-4 text-left text-slate-300">Ghi chú</th>
</tr>
</thead>
<tbody>
{displayResults.map((result, idx) => (
<tr key={idx} className="border-t border-slate-700/50 hover:bg-slate-700/30">
<td className="py-2 px-4">
{result.tms_url ? (
<a
href={result.tms_url}
target="_blank"
rel="noreferrer"
className="text-blue-400 hover:underline"
>
{result.ge_id}
</a>
) : (
<span className="text-white">{result.ge_id}</span>
)}
</td>
<td className="py-2 px-4 text-slate-300">{result.lang}</td>
<td className="py-2 px-4 text-slate-300">{result.chapter}</td>
<td className="py-2 px-4">
<span
className={`px-2 py-1 rounded text-xs font-bold ${result.status === 'FOUND'
? 'bg-green-900 text-green-300'
: result.status === 'NOT_FOUND'
? 'bg-yellow-900 text-yellow-300'
: 'bg-red-900 text-red-300'
}`}
>
{result.status === 'FOUND' ? 'ĐÃ CÓ' : result.status === 'NOT_FOUND' ? 'CHƯA CÓ' : 'LỖI'}
</span>
</td>
<td className="py-2 px-4 text-slate-400">{result.message}</td>
</tr>
))}
</tbody>
</table>
</div>
</div>
)}
</div>
);
})}
</div>
) : (
<div className="text-center py-10 px-6 bg-slate-800/50 border border-slate-700 rounded-lg">
<p className="text-slate-400">Chưa lịch sử check.</p>
</div>
)}
</div>
);
};
export default CheckHistory;

14
components/CheckIcon.tsx Executable file
View File

@ -0,0 +1,14 @@
import React, { memo } from 'react';
const CheckIcon: React.FC<{ className?: string }> = ({ className }) => (
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 24 24"
fill="currentColor"
className={className}
>
<path fillRule="evenodd" d="M2.25 12c0-5.385 4.365-9.75 9.75-9.75s9.75 4.365 9.75 9.75-4.365 9.75-9.75 9.75S2.25 17.385 2.25 12Zm13.36-1.814a.75.75 0 1 0-1.22-.872l-3.236 4.53L9.53 12.22a.75.75 0 0 0-1.06 1.06l2.25 2.25a.75.75 0 0 0 1.14-.094l3.75-5.25Z" clipRule="evenodd" />
</svg>
);
export default memo(CheckIcon);

162
components/CheckPage.tsx Executable file
View File

@ -0,0 +1,162 @@
import React, { useState, useEffect } from 'react';
import { supabase } from '../utils/supabase';
import CheckHistory from './CheckHistory';
const CheckPage: React.FC = () => {
const [input, setInput] = useState('');
const [loading, setLoading] = useState(false);
const [currentId, setCurrentId] = useState<string | null>(null);
const [history, setHistory] = useState<any[]>([]);
useEffect(() => {
if (!currentId) return;
const channel = supabase
.channel('check_list_updates')
.on(
'postgres_changes',
{
event: 'UPDATE',
schema: 'public',
table: 'check_list',
filter: `id=eq.${currentId}`,
},
(payload) => {
if (payload.new.status === 'completed' || payload.new.status === 'failed') {
setLoading(false);
}
}
)
.subscribe();
return () => {
supabase.removeChannel(channel);
};
}, [currentId]);
// Fetch history on mount
useEffect(() => {
fetchHistory();
}, []);
const fetchHistory = async () => {
try {
const response = await fetch('/api/check/history');
const data = await response.json();
if (data.success) {
setHistory(data.data);
}
} catch (error) {
console.error('Failed to fetch history:', error);
}
};
// Subscribe to check_list changes for auto-refresh
useEffect(() => {
const channel = supabase
.channel('check_list_all')
.on(
'postgres_changes',
{
event: '*',
schema: 'public',
table: 'check_list',
},
() => {
fetchHistory();
}
)
.subscribe();
return () => {
supabase.removeChannel(channel);
};
}, []);
const handleSubmit = async (e: React.FormEvent) => {
e.preventDefault();
if (!input.trim()) return;
setLoading(true);
setCurrentId(null);
try {
const response = await fetch('/api/check/submit', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ check_input: input }),
});
const data = await response.json();
if (data.success) {
setCurrentId(data.id);
// Refresh history after submission
setTimeout(() => fetchHistory(), 1000);
} else {
alert('Error: ' + data.error);
setLoading(false);
}
} catch (error) {
console.error(error);
setLoading(false);
alert('Error submitting request');
}
};
const handleDeleteHistory = async (id: string) => {
try {
const response = await fetch(`/api/check/${id}`, {
method: 'DELETE',
});
const data = await response.json();
if (data.success) {
fetchHistory();
} else {
alert('Lỗi xoá: ' + data.error);
}
} catch (error) {
console.error('Failed to delete:', error);
alert('Lỗi xoá lịch sử');
}
};
return (
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
<h1 className="text-2xl font-bold text-white mb-6">Check Upload Status (QC Subset)</h1>
<form onSubmit={handleSubmit} className="mb-8">
<div className="mb-4">
<label className="block text-sm font-medium text-slate-300 mb-2">
Input (GE ID LANG CHAP)
</label>
<textarea
value={input}
onChange={(e) => setInput(e.target.value)}
className="bg-slate-900/50 border border-slate-700 text-slate-100 text-sm rounded-lg focus:ring-indigo-500 focus:border-indigo-500 block w-full p-2.5 h-32 font-mono"
placeholder="4164 FR 3&#10;419 DE 80"
/>
</div>
<div className="flex gap-2">
<button
type="submit"
disabled={loading}
className="text-white bg-indigo-600 hover:bg-indigo-700 focus:ring-4 focus:outline-none focus:ring-indigo-800 font-medium rounded-lg text-sm px-5 py-2.5 text-center disabled:opacity-50"
>
{loading ? 'Checking...' : 'Check'}
</button>
<button
type="button"
onClick={() => setInput('')}
disabled={loading || !input.trim()}
className="text-slate-300 bg-slate-700 hover:bg-slate-600 focus:ring-4 focus:outline-none focus:ring-slate-800 font-medium rounded-lg text-sm px-5 py-2.5 text-center disabled:opacity-50"
>
Clear
</button>
</div>
</form>
<CheckHistory history={history} onDelete={handleDeleteHistory} />
</div>
);
};
export default CheckPage;

69
components/ConfirmModal.tsx Executable file
View File

@ -0,0 +1,69 @@
import React from 'react';
interface ConfirmModalProps {
isOpen: boolean;
title?: string;
message: React.ReactNode;
confirmText?: string;
cancelText?: string;
confirmButtonClass?: string;
onConfirm: () => void;
onCancel: () => void;
isLoading?: boolean;
}
const ConfirmModal: React.FC<ConfirmModalProps> = ({
isOpen,
title = 'Xác nhận',
message,
confirmText = 'Xác nhận',
cancelText = 'Huỷ',
confirmButtonClass = 'bg-rose-600 hover:bg-rose-700',
onConfirm,
onCancel,
isLoading = false,
}) => {
if (!isOpen) return null;
return (
<div className="fixed inset-0 bg-black/50 backdrop-blur-sm flex items-center justify-center p-4 z-[60]">
<div className="bg-slate-800 rounded-xl shadow-2xl border border-slate-700 max-w-md w-full">
{/* Header */}
<div className="p-6 border-b border-slate-700">
<h3 className="text-lg font-semibold text-white">{title}</h3>
</div>
{/* Content */}
<div className="p-6">
<div className="text-slate-300">{message}</div>
</div>
{/* Footer */}
<div className="flex justify-end gap-3 p-6 border-t border-slate-700">
<button
onClick={onCancel}
disabled={isLoading}
className="px-4 py-2 bg-slate-700 hover:bg-slate-600 text-white rounded-lg font-medium transition-colors disabled:opacity-50"
>
{cancelText}
</button>
<button
onClick={onConfirm}
disabled={isLoading}
className={`px-4 py-2 text-white rounded-lg font-medium transition-colors disabled:opacity-50 flex items-center gap-2 ${confirmButtonClass}`}
>
{isLoading && (
<svg className="w-4 h-4 animate-spin" fill="none" viewBox="0 0 24 24">
<circle className="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" strokeWidth="4"></circle>
<path className="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"></path>
</svg>
)}
{confirmText}
</button>
</div>
</div>
</div>
);
};
export default ConfirmModal;

View File

@ -0,0 +1,27 @@
import React from 'react';
import { TruncatedPath } from './TruncatedPath';
interface CopyButtonWithModalProps {
label: string;
text: string;
variant?: 'blue' | 'yellow';
}
/**
* Wrapper component for TruncatedPath with label support.
* This component is kept for backward compatibility.
* Internally uses TruncatedPath for consistent behavior.
*/
export const CopyButtonWithModal: React.FC<CopyButtonWithModalProps> = ({
label,
text,
variant = 'blue'
}) => {
return (
<TruncatedPath
path={text}
variant={variant}
label={label}
/>
);
};

View File

@ -0,0 +1,401 @@
import React, { useState, useEffect } from 'react';
interface CustomPath {
ge_id: string;
lang: string;
custom_path: string;
created_at?: string;
updated_at?: string;
}
interface CustomPathManagerModalProps {
isOpen: boolean;
onClose: () => void;
onPathUpdated?: () => void; // Callback khi có thay đổi
}
const CustomPathManagerModal: React.FC<CustomPathManagerModalProps> = ({ isOpen, onClose, onPathUpdated }) => {
const [searchTerm, setSearchTerm] = useState('');
const [allPaths, setAllPaths] = useState<CustomPath[]>([]);
const [filteredPaths, setFilteredPaths] = useState<CustomPath[]>([]);
const [visibleCount, setVisibleCount] = useState(10);
const [isLoading, setIsLoading] = useState(false);
const [error, setError] = useState<string | null>(null);
const [editingPath, setEditingPath] = useState<CustomPath | null>(null);
const [editForm, setEditForm] = useState({ ge_id: '', lang: '', custom_path: '' });
const [showDeleteConfirm, setShowDeleteConfirm] = useState(false);
const [deleteTarget, setDeleteTarget] = useState<string | null>(null);
useEffect(() => {
if (isOpen) {
document.body.style.overflow = 'hidden';
loadPaths();
} else {
document.body.style.overflow = 'auto';
}
return () => {
document.body.style.overflow = 'auto';
};
}, [isOpen]);
useEffect(() => {
// Filter paths when search term changes
if (searchTerm.trim() === '') {
setFilteredPaths(allPaths.slice(0, visibleCount));
} else {
const filtered = allPaths
.filter(path =>
path.ge_id.toLowerCase().includes(searchTerm.toLowerCase()) ||
path.lang.toLowerCase().includes(searchTerm.toLowerCase()) ||
path.custom_path.toLowerCase().includes(searchTerm.toLowerCase())
)
.slice(0, visibleCount);
setFilteredPaths(filtered);
}
}, [searchTerm, allPaths, visibleCount]);
const loadPaths = async () => {
setIsLoading(true);
setError(null);
try {
const response = await fetch('/api/custom-paths');
const data = await response.json();
if (data.success && Array.isArray(data.custom_paths)) {
setAllPaths(data.custom_paths);
setFilteredPaths(data.custom_paths.slice(0, 10));
} else {
setError('Không thể tải danh sách custom paths');
}
} catch (err) {
console.error('Error loading custom paths:', err);
setError('Lỗi khi tải danh sách custom paths');
} finally {
setIsLoading(false);
}
};
if (!isOpen) {
return null;
}
const handleSearch = () => {
setVisibleCount(10);
const filtered = allPaths
.filter(path =>
path.ge_id.toLowerCase().includes(searchTerm.toLowerCase()) ||
path.lang.toLowerCase().includes(searchTerm.toLowerCase()) ||
path.custom_path.toLowerCase().includes(searchTerm.toLowerCase())
)
.slice(0, 10);
setFilteredPaths(filtered);
};
const handleInputKeyDown = (e: React.KeyboardEvent<HTMLInputElement>) => {
if (e.key === 'Enter') {
e.preventDefault();
handleSearch();
}
};
const handleLoadMore = () => {
const newCount = visibleCount + 10;
setVisibleCount(newCount);
if (searchTerm.trim() === '') {
setFilteredPaths(allPaths.slice(0, newCount));
} else {
const filtered = allPaths
.filter(path =>
path.ge_id.toLowerCase().includes(searchTerm.toLowerCase()) ||
path.lang.toLowerCase().includes(searchTerm.toLowerCase()) ||
path.custom_path.toLowerCase().includes(searchTerm.toLowerCase())
)
.slice(0, newCount);
setFilteredPaths(filtered);
}
};
const handleEdit = (path: CustomPath) => {
setEditingPath(path);
setEditForm({
ge_id: path.ge_id,
lang: path.lang,
custom_path: path.custom_path
});
};
const handleCancelEdit = () => {
setEditingPath(null);
setEditForm({ ge_id: '', lang: '', custom_path: '' });
};
const handleSaveEdit = async () => {
if (!editForm.ge_id || !editForm.lang || !editForm.custom_path) {
setError('Vui lòng điền đầy đủ thông tin');
return;
}
setIsLoading(true);
setError(null);
try {
// Delete old record if ge_id changed
if (editingPath && editingPath.ge_id !== editForm.ge_id) {
await fetch(`/api/custom-paths/${editingPath.ge_id}`, {
method: 'DELETE'
});
}
// Create/update new record
const response = await fetch('/api/custom-paths', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(editForm)
});
const data = await response.json();
if (data.success) {
await loadPaths();
handleCancelEdit();
if (onPathUpdated) onPathUpdated();
} else {
setError(data.message || 'Không thể lưu custom path');
}
} catch (err) {
console.error('Error saving custom path:', err);
setError('Lỗi khi lưu custom path');
} finally {
setIsLoading(false);
}
};
const handleDeleteClick = (geId: string) => {
setDeleteTarget(geId);
setShowDeleteConfirm(true);
};
const handleConfirmDelete = async () => {
if (!deleteTarget) return;
setIsLoading(true);
setError(null);
try {
const response = await fetch(`/api/custom-paths/${deleteTarget}`, {
method: 'DELETE'
});
const data = await response.json();
if (data.success) {
await loadPaths();
setShowDeleteConfirm(false);
setDeleteTarget(null);
if (onPathUpdated) onPathUpdated();
} else {
setError(data.message || 'Không thể xóa custom path');
}
} catch (err) {
console.error('Error deleting custom path:', err);
setError('Lỗi khi xóa custom path');
} finally {
setIsLoading(false);
}
};
const handleCancelDelete = () => {
setShowDeleteConfirm(false);
setDeleteTarget(null);
};
return (
<div className="fixed inset-0 bg-black/70 backdrop-blur-sm flex items-center justify-center z-50 p-4">
<div className="bg-slate-800 rounded-2xl shadow-2xl w-full max-w-4xl max-h-[90vh] flex flex-col border border-slate-700">
{/* Header */}
<div className="flex justify-between items-center p-6 border-b border-slate-700">
<h2 className="text-2xl font-bold text-white">Quản Custom Paths</h2>
<button
onClick={onClose}
className="text-slate-400 hover:text-white transition-colors p-2 hover:bg-slate-700 rounded-lg"
>
<svg className="w-6 h-6" viewBox="0 0 24 24" fill="currentColor">
<path fillRule="evenodd" d="M5.47 5.47a.75.75 0 0 1 1.06 0L12 10.94l5.47-5.47a.75.75 0 1 1 1.06 1.06L13.06 12l5.47 5.47a.75.75 0 1 1-1.06 1.06L12 13.06l-5.47 5.47a.75.75 0 0 1-1.06-1.06L10.94 12 5.47 6.53a.75.75 0 0 1 0-1.06Z" clipRule="evenodd" />
</svg>
</button>
</div>
{/* Search Bar */}
<div className="p-6 border-b border-slate-700">
<div className="flex gap-3">
<input
type="text"
value={searchTerm}
onChange={(e) => setSearchTerm(e.target.value)}
onKeyDown={handleInputKeyDown}
placeholder="Tìm kiếm GE ID, Lang, hoặc Path..."
className="flex-1 bg-slate-900/50 border border-slate-600 text-white text-sm rounded-lg focus:ring-indigo-500 focus:border-indigo-500 px-4 py-2.5"
/>
<button
onClick={handleSearch}
className="px-6 py-2.5 bg-indigo-600 hover:bg-indigo-700 text-white font-medium rounded-lg transition-colors"
>
Tìm kiếm
</button>
</div>
</div>
{/* Error Message */}
{error && (
<div className="mx-6 mt-4 p-4 bg-red-900/20 border border-red-500/50 rounded-lg">
<p className="text-red-400 text-sm">{error}</p>
</div>
)}
{/* Content */}
<div className="flex-1 overflow-y-auto p-6">
{isLoading && filteredPaths.length === 0 ? (
<div className="flex justify-center items-center py-12">
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-indigo-500"></div>
</div>
) : filteredPaths.length === 0 ? (
<div className="text-center py-12">
<p className="text-slate-400">Không tìm thấy custom path nào</p>
</div>
) : (
<div className="space-y-3">
{filteredPaths.map((path) => (
<div
key={path.ge_id}
className="bg-slate-900/50 border border-slate-700 rounded-lg p-4 hover:border-slate-600 transition-colors"
>
{editingPath?.ge_id === path.ge_id ? (
/* Edit Mode */
<div className="space-y-3">
<div className="grid grid-cols-3 gap-3">
<input
type="text"
value={editForm.ge_id}
onChange={(e) => setEditForm({ ...editForm, ge_id: e.target.value })}
placeholder="GE ID"
className="bg-slate-800 border border-slate-600 text-white text-sm rounded-lg px-3 py-2"
/>
<input
type="text"
value={editForm.lang}
onChange={(e) => setEditForm({ ...editForm, lang: e.target.value.toUpperCase() })}
placeholder="Lang"
className="bg-slate-800 border border-slate-600 text-white text-sm rounded-lg px-3 py-2"
/>
<input
type="text"
value={editForm.custom_path}
onChange={(e) => setEditForm({ ...editForm, custom_path: e.target.value })}
placeholder="Custom Path"
className="bg-slate-800 border border-slate-600 text-white text-sm rounded-lg px-3 py-2"
/>
</div>
<div className="flex gap-2">
<button
onClick={handleSaveEdit}
disabled={isLoading}
className="px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-white text-sm rounded-lg disabled:opacity-50"
>
Lưu
</button>
<button
onClick={handleCancelEdit}
className="px-4 py-2 bg-slate-700 hover:bg-slate-600 text-white text-sm rounded-lg"
>
Hủy
</button>
</div>
</div>
) : (
/* View Mode */
<div className="flex items-center justify-between">
<div className="flex-1 space-y-1">
<div className="flex items-center gap-3">
<span className="text-white font-semibold">{path.ge_id}</span>
<span className="px-2 py-0.5 bg-indigo-600/20 text-indigo-300 text-xs rounded">
{path.lang}
</span>
</div>
<p className="text-slate-300 text-sm font-mono">{path.custom_path}</p>
</div>
<div className="flex gap-2">
<button
onClick={() => handleEdit(path)}
className="p-2 text-indigo-400 hover:text-indigo-300 hover:bg-slate-800 rounded-lg transition-colors"
title="Sửa"
>
<svg className="w-5 h-5" viewBox="0 0 24 24" fill="currentColor">
<path d="M21.731 2.269a2.625 2.625 0 0 0-3.712 0l-1.157 1.157 3.712 3.712 1.157-1.157a2.625 2.625 0 0 0 0-3.712ZM19.513 8.199l-3.712-3.712-8.4 8.4a5.25 5.25 0 0 0-1.32 2.214l-.8 2.685a.75.75 0 0 0 .933.933l2.685-.8a5.25 5.25 0 0 0 2.214-1.32l8.4-8.4Z" />
<path d="M5.25 5.25a3 3 0 0 0-3 3v10.5a3 3 0 0 0 3 3h10.5a3 3 0 0 0 3-3V13.5a.75.75 0 0 0-1.5 0v5.25a1.5 1.5 0 0 1-1.5 1.5H5.25a1.5 1.5 0 0 1-1.5-1.5V8.25a1.5 1.5 0 0 1 1.5-1.5h5.25a.75.75 0 0 0 0-1.5H5.25Z" />
</svg>
</button>
<button
onClick={() => handleDeleteClick(path.ge_id)}
className="p-2 text-red-400 hover:text-red-300 hover:bg-slate-800 rounded-lg transition-colors"
title="Xóa"
>
<svg className="w-5 h-5" viewBox="0 0 24 24" fill="currentColor">
<path fillRule="evenodd" d="M16.5 4.478v.227a48.816 48.816 0 0 1 3.878.512.75.75 0 1 1-.256 1.478l-.209-.035-1.005 13.07a3 3 0 0 1-2.991 2.77H8.084a3 3 0 0 1-2.991-2.77L4.087 6.66l-.209.035a.75.75 0 0 1-.256-1.478A48.567 48.567 0 0 1 7.5 4.705v-.227c0-1.564 1.213-2.9 2.816-2.951a52.662 52.662 0 0 1 3.369 0c1.603.051 2.815 1.387 2.815 2.951Zm-6.136-1.452a51.196 51.196 0 0 1 3.273 0C14.39 3.05 15 3.684 15 4.478v.113a49.488 49.488 0 0 0-6 0v-.113c0-.794.609-1.428 1.364-1.452Zm-.355 5.945a.75.75 0 1 0-1.5.058l.347 9a.75.75 0 1 0 1.499-.058l-.346-9Zm5.48.058a.75.75 0 1 0-1.498-.058l-.347 9a.75.75 0 0 0 1.5.058l.345-9Z" clipRule="evenodd" />
</svg>
</button>
</div>
</div>
)}
</div>
))}
</div>
)}
{/* Load More Button */}
{filteredPaths.length < allPaths.length && (
<div className="mt-6 text-center">
<button
onClick={handleLoadMore}
className="px-6 py-2.5 bg-slate-700 hover:bg-slate-600 text-white font-medium rounded-lg transition-colors"
>
Tải thêm
</button>
</div>
)}
</div>
{/* Footer Info */}
<div className="p-4 border-t border-slate-700 bg-slate-900/50">
<p className="text-slate-400 text-sm text-center">
Tổng số: <span className="text-white font-semibold">{allPaths.length}</span> custom paths
</p>
</div>
</div>
{/* Delete Confirmation Modal */}
{showDeleteConfirm && (
<div className="fixed inset-0 bg-black/70 backdrop-blur-sm flex items-center justify-center z-[60]">
<div className="bg-slate-800 rounded-xl shadow-2xl p-6 max-w-md w-full mx-4 border border-slate-700">
<h3 className="text-xl font-bold text-white mb-4">Xác nhận xóa</h3>
<p className="text-slate-300 mb-6">
Bạn chắc chắn muốn xóa custom path cho <span className="text-white font-semibold">{deleteTarget}</span>?
</p>
<div className="flex gap-3">
<button
onClick={handleConfirmDelete}
disabled={isLoading}
className="flex-1 px-4 py-2.5 bg-red-600 hover:bg-red-700 text-white font-medium rounded-lg disabled:opacity-50"
>
Xóa
</button>
<button
onClick={handleCancelDelete}
className="flex-1 px-4 py-2.5 bg-slate-700 hover:bg-slate-600 text-white font-medium rounded-lg"
>
Hủy
</button>
</div>
</div>
</div>
)}
</div>
);
};
export default CustomPathManagerModal;

59
components/DownloadHistory.tsx Executable file
View File

@ -0,0 +1,59 @@
import React, { memo } from 'react';
import type { DownloadHistoryEntry, DownloadedFile } from '../types';
import DownloadHistoryItem from './DownloadHistoryItem';
import { naturalSort } from '../utils/naturalSort';
interface DownloadHistoryProps {
history: DownloadHistoryEntry[];
onDelete: (id: string) => void;
onRetry: (entry: DownloadHistoryEntry) => void;
onErrorClick: (details: string) => void;
}
const DownloadHistory: React.FC<DownloadHistoryProps> = ({ history, onDelete, onRetry, onErrorClick }) => {
// Sort files within each history entry by natural sort
const sortedHistory = React.useMemo(() => {
return history.map(entry => ({
...entry,
files: naturalSort<DownloadedFile>(
entry.files,
(file: DownloadedFile) => file.file_name || file.name || ''
)
}));
}, [history]);
// Chỉ hiển thị 30 records mới nhất để tối ưu hiệu suất
const displayedHistory = React.useMemo(() => sortedHistory.slice(0, 30), [sortedHistory]);
return (
<div className="bg-slate-800/50 backdrop-blur-sm p-6 rounded-2xl shadow-lg border border-slate-700 flex flex-col h-full">
<div className="flex justify-between items-center mb-4 border-b border-slate-700 pb-3">
<h2 className="text-xl font-semibold text-white">Lịch sử tải xuống</h2>
<span className="text-sm text-slate-400">
{displayedHistory.length} / {history.length} bản ghi
</span>
</div>
<div className="flex-1 overflow-y-auto pr-2 -mr-2">
{displayedHistory.length > 0 ? (
<div className="space-y-4">
{displayedHistory.map((entry) => (
<DownloadHistoryItem
key={entry.id}
entry={entry}
onDelete={onDelete}
onRetry={onRetry}
onErrorClick={onErrorClick}
/>
))}
</div>
) : (
<div className="flex justify-center items-center flex-1">
<p className="text-slate-400">Chưa lịch sử tải xuống.</p>
</div>
)}
</div>
</div>
);
};
export default memo(DownloadHistory);

View File

@ -0,0 +1,219 @@
import React, { memo } from 'react';
import type { DownloadHistoryEntry } from '../types';
import TrashIcon from './TrashIcon';
import RetryIcon from './RetryIcon';
import CheckIcon from './CheckIcon';
import XCircleIcon from './XCircleIcon';
import { CopyButtonWithModal } from './CopyButtonWithModal';
import { TruncatedPath } from './TruncatedPath';
import { FolderPathDisplay } from './FolderPathDisplay';
// Helper function to format file size (extracted from App.tsx)
const formatFileSize = (bytes: number | undefined): string => {
if (!bytes || bytes === 0) return '';
const units = ['B', 'KB', 'MB', 'GB', 'TB'];
let size = bytes;
let unitIndex = 0;
while (size >= 1024 && unitIndex < units.length - 1) {
size /= 1024;
unitIndex++;
}
const formatted = unitIndex === 0 ? size.toString() : size.toFixed(2);
return ` (${formatted} ${units[unitIndex]})`;
};
interface DownloadHistoryItemProps {
entry: DownloadHistoryEntry;
onDelete: (id: string) => void;
onRetry: (entry: DownloadHistoryEntry) => void;
onErrorClick: (details: string) => void;
}
const DownloadHistoryItem: React.FC<DownloadHistoryItemProps> = ({ entry, onDelete, onRetry, onErrorClick }) => {
// Use geIdAndLang from entry (already formatted by converter)
const geIdAndLang = entry.geIdAndLang || 'N/A';
// Determine overall status
const getStatus = () => {
if (entry.successCount === entry.totalFiles) return 'success';
if (entry.successCount === 0) return 'error';
return 'partial';
};
const status = getStatus();
// Get status icon and color
const getStatusDisplay = () => {
switch (status) {
case 'success':
return {
icon: <CheckIcon className="w-5 h-5" />,
color: 'text-green-400',
bg: 'bg-green-900/5 border-green-700/25',
label: 'Hoàn thành',
};
case 'error':
return {
icon: <XCircleIcon className="w-5 h-5" />,
color: 'text-red-400',
bg: 'bg-red-900/30 border-red-700/25',
label: 'Thất bại',
};
case 'partial':
return {
icon: <span className="text-lg"></span>,
color: 'text-yellow-400',
bg: 'bg-yellow-900/30 border-yellow-700/25',
label: 'Một phần',
};
}
};
const statusDisplay = getStatusDisplay();
// Get error details for failed files
const getErrorDetails = () => {
const failedFiles = entry.files.filter((f) => f.status === 'error');
if (failedFiles.length === 0) return null;
return failedFiles
.map((f) => `${f.name}: ${f.message}`)
.join('\n');
};
const errorDetails = getErrorDetails();
return (
<div className={`border rounded-lg p-4 ${statusDisplay.bg}`}>
{/* Header: GE ID, Time, Delete Button */}
<div className="flex items-center justify-between mb-3">
<div className="flex items-center gap-3 text-sm">
<span className="font-semibold text-white">{geIdAndLang}</span>
<span className="text-slate-500"></span>
<span className="text-slate-400">{entry.timestamp}</span>
</div>
{/* Action Buttons */}
<div className="flex items-center gap-2">
{errorDetails && (
<button
onClick={() => onErrorClick(errorDetails)}
className="text-red-400 hover:text-red-300 text-sm font-medium"
title="Xem chi tiết lỗi"
>
Chi tiết
</button>
)}
{status !== 'success' && (
<button
onClick={() => onRetry(entry)}
className="p-1.5 hover:bg-slate-700 rounded transition-colors"
title="Thử lại"
>
<RetryIcon className="w-4 h-4 text-slate-400" />
</button>
)}
<button
onClick={() => onDelete(entry.id)}
className="p-1.5 hover:bg-red-900/30 rounded transition-colors"
title="Xóa"
>
<TrashIcon className="w-4 h-4 text-red-400" />
</button>
</div>
</div>
{/* Path Flow: Lezhin Disk → NAS Path */}
<div className="flex items-start gap-3 mb-3">
{/* Lezhin Disk Path (Source - only show once) */}
{entry.mongoDbPath && (
<div className="flex-1 min-w-0">
<CopyButtonWithModal
label="Lezhin Disk"
text={entry.mongoDbPath}
variant="blue"
/>
</div>
)}
{/* Arrow Icon */}
{entry.mongoDbPath && entry.files.length > 0 && (
<svg viewBox="0 0 24 24" fill="currentColor" className="w-5 h-5 text-slate-500 flex-shrink-0 mt-2">
<path fillRule="evenodd" d="M16.28 11.47a.75.75 0 0 1 0 1.06l-7.5 7.5a.75.75 0 0 1-1.06-1.06L14.69 12 7.72 5.03a.75.75 0 0 1 1.06-1.06l7.5 7.5Z" clipRule="evenodd" />
</svg>
)}
{/* NAS Path (Folder name only - deduplicated) */}
<div className="flex-1 min-w-0">
{(() => {
// Get unique folder names from all files
const folderNames = new Set(
entry.files
.map((file: any) => {
const filePath = file.path || '';
if (!filePath) return '';
// Extract folder: D:/.../raw/1000_DE/file.zip → D:/.../raw/1000_DE
// Keep original path separators (don't convert \ to /)
const cleanPath = filePath.replace(/[/\\]+$/, '');
const lastSepIndex = Math.max(
cleanPath.lastIndexOf('/'),
cleanPath.lastIndexOf('\\')
);
return lastSepIndex > 0 ? cleanPath.substring(0, lastSepIndex) : cleanPath;
})
.filter(Boolean)
);
// Display unique folders
return Array.from(folderNames).map((folderPath, index) => (
<FolderPathDisplay
key={index}
fullPath={folderPath}
variant="yellow"
maxChars={60}
/>
));
})()}
</div>
</div>
{/* Files List */}
{entry.files.length > 0 && (
<div className="mt-3 pt-3 border-t border-slate-700/50">
<div className="flex flex-wrap gap-2">
{entry.files.map((file: any, index) => {
const isSuccess = file.status === 'success' || file.status === 'completed';
const isFailed = file.status === 'failed' || file.status === 'error';
const sizeStr = formatFileSize(file.file_size);
const is38Bytes = file.file_size === 38; // Detect 38B error file
return (
<div
key={index}
className={`inline-flex items-center gap-1.5 px-2.5 py-1 rounded-lg text-xs font-medium transition-colors ${is38Bytes
? 'bg-red-900/30 text-red-300 border border-red-700/50' // 38B error
: isSuccess
? 'bg-green-900/20 text-green-300 border border-green-700/30'
: isFailed
? 'bg-red-900/20 text-red-300 border border-red-700/30'
: 'bg-slate-700/20 text-slate-300 border border-slate-700/30'
}`}
title={isFailed ? (file.error_message || file.message || 'Lỗi không xác định') : undefined}
>
<span className="overflow-hidden break-words">{file.name}{sizeStr}</span>
<span className="flex-shrink-0"></span>
</div>
);
})}
</div>
</div>
)}
</div>
);
};
export default memo(DownloadHistoryItem);

234
components/DownloadQueueItem.tsx Executable file
View File

@ -0,0 +1,234 @@
import React, { useState, memo } from 'react';
import type { DownloadQueueItem } from '../types';
import TrashIcon from './TrashIcon';
import { TruncatedPath } from './TruncatedPath';
import { FolderPathDisplay } from './FolderPathDisplay';
import { CopyButtonWithModal } from './CopyButtonWithModal';
interface DownloadQueueItemProps {
item: DownloadQueueItem;
queuePosition?: number;
onCancel: (jobId: string) => void;
}
const DownloadQueueItem: React.FC<DownloadQueueItemProps> = ({ item, queuePosition, onCancel }) => {
const [isExpanded, setIsExpanded] = useState(true);
const isProcessing = item.status === 'processing';
const isWaiting = item.status === 'waiting' || item.status === 'pending';
const hasProgress = item.progressData && item.progressData.files_status;
// Format GE ID and Lang (uppercase lang)
const geIdAndLang = item.name.includes(' ')
? item.name.split(' ').map((part, idx) => idx === 1 ? part.toUpperCase() : part).join(' ')
: item.name;
// Get status color
const getStatusColor = () => {
if (isProcessing) return 'text-amber-400';
if (isWaiting) return 'text-blue-400';
return 'text-slate-400';
};
// Get status background
const getStatusBg = () => {
if (isProcessing) return 'bg-amber-900/30 border-amber-700/25';
if (isWaiting) return 'bg-blue-900/30 border-blue-700/25';
return 'bg-slate-900/30 border-slate-700/25';
};
// Extract paths based on mode
const isSharing = item.mode === 'sharing';
const sourcePath = isSharing ? (item.sharingUrl || '') : (item.mongoDbPath || item.name.split(' - ')[1] || '');
const sourceLabel = isSharing ? 'Sharing Link' : 'Lezhin Disk';
// Extract destination path from item data
const destinationPath = item.destinationPath || '';
// Format file size
const formatSize = (bytes: number) => {
if (bytes === 0) return '';
if (bytes < 1024) return `${bytes} B`;
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
if (bytes < 1024 * 1024 * 1024) return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
return `${(bytes / (1024 * 1024 * 1024)).toFixed(2)} GB`;
};
return (
<div className={`border rounded-lg p-4 ${getStatusBg()}`}>
{/* Header: GE ID, Queue Position, Cancel Button */}
<div className="flex items-center justify-between mb-3">
<div className="flex items-center gap-3 text-sm">
<span className="font-semibold text-white">{geIdAndLang}</span>
{queuePosition && (
<>
<span className="text-slate-500"></span>
<div className="flex items-center gap-1.5">
<div className="h-5 w-5 rounded-full bg-blue-500/20 border-2 border-blue-500 flex items-center justify-center">
<span className="text-xs text-blue-400 font-bold">{queuePosition}</span>
</div>
<span className={getStatusColor()}>
{isProcessing ? 'Đang xử lý' : 'Đang chờ'}
</span>
</div>
</>
)}
{!queuePosition && (
<>
<span className="text-slate-500"></span>
<span className={getStatusColor()}>
{isProcessing ? 'Đang xử lý' : 'Đang chờ'}
</span>
</>
)}
{/* Progress summary */}
{hasProgress && item.progressData && (
<>
<span className="text-slate-500"></span>
<span className="text-slate-300 text-xs">
{item.progressData.current_file_index || 0}/{item.progressData.total_files || 0} files
</span>
</>
)}
</div>
<div className="flex items-center gap-2">
{/* Expand/Collapse button */}
{hasProgress && (
<button
onClick={() => setIsExpanded(!isExpanded)}
className="p-1.5 hover:bg-slate-700/50 rounded transition-colors"
title={isExpanded ? "Thu gọn" : "Mở rộng"}
>
<svg viewBox="0 0 24 24" fill="currentColor"
className={`w-4 h-4 text-slate-400 transition-transform ${isExpanded ? 'rotate-180' : ''}`}>
<path fillRule="evenodd" d="M12.53 16.28a.75.75 0 0 1-1.06 0l-7.5-7.5a.75.75 0 0 1 1.06-1.06L12 14.69l6.97-6.97a.75.75 0 1 1 1.06 1.06l-7.5 7.5Z" clipRule="evenodd" />
</svg>
</button>
)}
{/* Cancel Button */}
<button
onClick={() => onCancel(item.jobId || item.key)}
className="p-1.5 hover:bg-red-900/30 rounded transition-colors"
title="Hủy tải xuống"
disabled={!item.jobId}
>
<TrashIcon className="w-4 h-4 text-red-400" />
</button>
</div>
</div>
{/* Path Flow: Source → NAS Path */}
{(sourcePath || destinationPath) && (
<div className="flex items-start gap-3 mb-3">
{/* Source Path (Lezhin Disk or Sharing Link) */}
{sourcePath && (
<div className="flex-1 min-w-0">
<CopyButtonWithModal
label={sourceLabel}
text={sourcePath}
variant="blue"
/>
</div>
)}
{/* Arrow Icon */}
{sourcePath && destinationPath && (
<svg viewBox="0 0 24 24" fill="currentColor" className="w-5 h-5 text-slate-500 flex-shrink-0 mt-2">
<path fillRule="evenodd" d="M16.28 11.47a.75.75 0 0 1 0 1.06l-7.5 7.5a.75.75 0 0 1-1.06-1.06L14.69 12 7.72 5.03a.75.75 0 0 1 1.06-1.06l7.5 7.5Z" clipRule="evenodd" />
</svg>
)}
{/* NAS Path (Destination) - Show only folder name */}
{destinationPath && (
<div className="flex-1 min-w-0">
<FolderPathDisplay
fullPath={destinationPath}
variant="yellow"
maxChars={60}
/>
</div>
)}
</div>
)}
{/* Files Progress List */}
{hasProgress && isExpanded && item.progressData?.files_status && (
<div className="mt-3 space-y-2 border-t border-slate-700/50 pt-3">
{[...item.progressData.files_status].sort((a, b) => {
// Natural sort by file name
return a.name.localeCompare(b.name, undefined, { numeric: true, sensitivity: 'base' });
}).map((file, idx) => (
<div key={idx} className="bg-slate-900/40 rounded-lg p-3">
<div className="flex items-center justify-between mb-2">
<div className="flex items-center gap-2 flex-1 min-w-0">
{/* File icon */}
<span className="text-base flex-shrink-0">
{file.is_folder ? '📁' : '📄'}
</span>
{/* File name */}
<span className="text-sm text-slate-200 truncate" title={file.name}>
{file.name}
</span>
{/* Status badge */}
<span className={`text-xs px-2 py-0.5 rounded-full flex-shrink-0 ${file.status === 'completed' ? 'bg-green-900/40 text-green-400' :
file.status === 'downloading' ? 'bg-amber-900/40 text-amber-400' :
file.status === 'failed' ? 'bg-red-900/40 text-red-400' :
'bg-slate-700/40 text-slate-400'
}`}>
{file.status === 'completed' ? '✓ Hoàn tất' :
file.status === 'downloading' ? '↓ Đang tải' :
file.status === 'failed' ? '✗ Lỗi' :
'⋯ Chờ'}
</span>
</div>
{/* Size/Progress info */}
<div className="text-xs text-slate-400 flex-shrink-0 ml-2">
{file.status === 'downloading' && file.downloaded !== undefined ? (
// Show: downloaded/total for files with known size, or just downloaded for folders
file.total !== undefined && file.total > 0 ? (
<span className="font-mono">{formatSize(file.downloaded)} / {formatSize(file.total)}</span>
) : (
<span className="font-mono">{formatSize(file.downloaded)}</span>
)
) : file.status === 'completed' && file.size > 0 ? (
<span>{formatSize(file.size)}</span>
) : null}
</div>
</div>
{/* Progress bar (only for files with known size) */}
{file.status === 'downloading' && !file.is_folder && file.progress !== undefined && (
<div className="relative h-2 bg-slate-800 rounded-full overflow-hidden">
<div
className="absolute left-0 top-0 h-full bg-gradient-to-r from-amber-500 to-amber-400 transition-all duration-300"
style={{ width: `${file.progress}%` }}
>
<div className="absolute inset-0 bg-white/20 animate-pulse"></div>
</div>
</div>
)}
{/* Folder progress bar (indeterminate - full bar with pulse animation) */}
{file.status === 'downloading' && file.is_folder && file.downloaded !== undefined && (
<div className="space-y-1">
<div className="relative h-2 bg-slate-800 rounded-full overflow-hidden">
{/* Full bar with pulse animation for folders */}
<div className="absolute left-0 top-0 h-full w-full bg-gradient-to-r from-amber-500 to-amber-400">
<div className="absolute inset-0 bg-white/20 animate-pulse"></div>
</div>
</div>
</div>
)}
</div>
))}
</div>
)}
</div>
);
};
export default memo(DownloadQueueItem);

View File

@ -0,0 +1,64 @@
import React, { memo } from 'react';
import type { DownloadQueueItem } from '../types';
import DownloadQueueItemComponent from './DownloadQueueItem';
interface DownloadQueueStatusProps {
geId: string; // Deprecated - kept for backward compatibility
queue: DownloadQueueItem[];
onCancelJob: (jobId: string) => void;
downloadingFilesCount?: number; // Số file đang downloading thực tế
}
const DownloadQueueStatus: React.FC<DownloadQueueStatusProps> = ({ queue, onCancelJob, downloadingFilesCount = 0 }) => {
// Queue items are already sorted by created_at DESC in App.tsx (convertToQueueItems)
// Newest batches are already at the top, so we just use the queue as-is
const sortedQueue = React.useMemo(
() => queue,
[queue]
);
const showDetails = sortedQueue.length > 0;
const waitingCount = sortedQueue.filter(q => q.status === 'waiting' || q.status === 'pending').length;
return (
<div className="bg-slate-800/50 backdrop-blur-sm p-6 rounded-2xl shadow-lg border border-slate-700 flex flex-col h-full">
<div className="flex justify-between items-center mb-4 border-b border-slate-700 pb-3">
<h2 className="text-xl font-semibold text-white">Hàng đi Trạng thái</h2>
<div className="flex items-center gap-3 text-sm font-semibold">
{downloadingFilesCount > 0 && (
<div className="flex items-center gap-1">
<div className="h-2 w-2 rounded-full bg-amber-500 animate-pulse"></div>
<span className="text-amber-400">{downloadingFilesCount} tệp đang tải</span>
</div>
)}
{waitingCount > 0 && (
<div className="text-slate-400">
<span className="text-blue-400">{waitingCount}</span> đang chờ
</div>
)}
</div>
</div>
{showDetails ? (
<div className="flex-1 overflow-y-auto pr-2 -mr-2">
<div className="space-y-3">
{sortedQueue.map((item, index) => (
<DownloadQueueItemComponent
key={item.key}
item={item}
queuePosition={item.queuePosition || (item.status === 'waiting' || item.status === 'pending' ? index + 1 : undefined)}
onCancel={onCancelJob}
/>
))}
</div>
</div>
) : (
<div className="flex justify-center items-center flex-1">
<p className="text-slate-500">Không job nào đang xử .</p>
</div>
)}
</div>
);
};
export default memo(DownloadQueueStatus);

14
components/DragHandleIcon.tsx Executable file
View File

@ -0,0 +1,14 @@
import React from 'react';
const DragHandleIcon: React.FC = () => (
<svg width="16" height="16" viewBox="0 0 16 16" fill="currentColor" aria-hidden="true">
<circle cx="5" cy="4" r="1.5" />
<circle cx="11" cy="4" r="1.5" />
<circle cx="5" cy="8" r="1.5" />
<circle cx="11" cy="8" r="1.5" />
<circle cx="5" cy="12" r="1.5" />
<circle cx="11" cy="12" r="1.5" />
</svg>
);
export default DragHandleIcon;

75
components/ErrorDetailModal.tsx Executable file
View File

@ -0,0 +1,75 @@
import React, { useEffect } from 'react';
interface ErrorDetailModalProps {
isOpen: boolean;
onClose: () => void;
errorMessage: string | null;
}
const ErrorDetailModal: React.FC<ErrorDetailModalProps> = ({ isOpen, onClose, errorMessage }) => {
useEffect(() => {
if (isOpen) {
document.body.style.overflow = 'hidden';
} else {
document.body.style.overflow = 'auto';
}
return () => {
document.body.style.overflow = 'auto';
};
}, [isOpen]);
if (!isOpen) {
return null;
}
return (
<div
className="fixed inset-0 bg-black bg-opacity-70 z-50 flex justify-center items-center p-4 animate-fade-in-fast"
onClick={onClose}
role="dialog"
aria-modal="true"
>
<div
className="bg-slate-800 border border-slate-700 rounded-2xl shadow-2xl w-full max-w-2xl max-h-[90vh] flex flex-col p-6 animate-slide-up"
onClick={e => e.stopPropagation()}
>
<div className="flex justify-between items-center mb-4 pb-4 border-b border-slate-700">
<h2 className="text-xl font-semibold text-rose-400">Chi tiết Lỗi</h2>
<button onClick={onClose} className="text-slate-400 hover:text-white transition-colors text-3xl leading-none">&times;</button>
</div>
<div className="flex-grow overflow-y-auto pr-2 -mr-2">
<pre className="bg-slate-900/50 p-4 rounded-lg text-slate-300 whitespace-pre-wrap break-all font-mono text-sm">
{errorMessage || 'Không có chi tiết lỗi.'}
</pre>
</div>
<div className="mt-6 pt-4 border-t border-slate-700 text-right">
<button
onClick={onClose}
className="text-white bg-slate-600 hover:bg-slate-700 font-medium rounded-lg text-sm px-5 py-2.5"
>
Đóng
</button>
</div>
</div>
<style>{`
@keyframes fade-in-fast {
from { opacity: 0; }
to { opacity: 1; }
}
.animate-fade-in-fast {
animation: fade-in-fast 0.2s ease-out forwards;
}
@keyframes slide-up {
from { opacity: 0; transform: translateY(20px); }
to { opacity: 1; transform: translateY(0); }
}
.animate-slide-up {
animation: slide-up 0.3s ease-out forwards;
}
`}</style>
</div>
);
};
export default ErrorDetailModal;

15
components/FileIcon.tsx Executable file
View File

@ -0,0 +1,15 @@
import React from 'react';
const FileIcon: React.FC = () => (
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 24 24"
fill="currentColor"
className="w-5 h-5 text-slate-400 flex-shrink-0"
>
<path fillRule="evenodd" d="M5.625 1.5c-1.036 0-1.875.84-1.875 1.875v17.25c0 1.035.84 1.875 1.875 1.875h12.75c1.035 0 1.875-.84 1.875-1.875V12.75A3.75 3.75 0 0 0 16.5 9h-1.875a1.875 1.875 0 0 1-1.875-1.875V5.25A3.75 3.75 0 0 0 9 1.5H5.625ZM7.5 15a.75.75 0 0 1 .75-.75h7.5a.75.75 0 0 1 0 1.5h-7.5A.75.75 0 0 1 7.5 15Zm.75 2.25a.75.75 0 0 0 0 1.5H12a.75.75 0 0 0 0-1.5H8.25Z" clipRule="evenodd" />
<path d="M12.971 1.816A5.23 5.23 0 0 1 14.25 5.25v1.875c0 .207.168.375.375.375H16.5a5.23 5.23 0 0 1 3.434 1.279 9.768 9.768 0 0 0-6.963-6.963Z" />
</svg>
);
export default FileIcon;

14
components/FileImageIcon.tsx Executable file
View File

@ -0,0 +1,14 @@
import React from 'react';
const FileImageIcon: React.FC = () => (
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 24 24"
fill="currentColor"
className="w-5 h-5 text-blue-400 flex-shrink-0"
>
<path fillRule="evenodd" d="M1.5 6a2.25 2.25 0 0 1 2.25-2.25h16.5A2.25 2.25 0 0 1 22.5 6v12a2.25 2.25 0 0 1-2.25 2.25H3.75A2.25 2.25 0 0 1 1.5 18V6ZM3 16.06V18c0 .414.336.75.75.75h16.5A.75.75 0 0 0 21 18v-1.94l-2.69-2.689a1.5 1.5 0 0 0-2.12 0l-.88.879.97.97a.75.75 0 1 1-1.06 1.06l-5.16-5.159a1.5 1.5 0 0 0-2.12 0L3 16.061Zm10.125-7.81a1.125 1.125 0 1 1 2.25 0 1.125 1.125 0 0 1-2.25 0Z" clipRule="evenodd" />
</svg>
);
export default FileImageIcon;

34
components/FileItem.tsx Executable file
View File

@ -0,0 +1,34 @@
import React, { memo } from 'react';
import type { FileSystemItem as FileSystemItemType } from '../types';
import FolderIcon from './FolderIcon';
import FileIcon from './FileIcon';
interface FileItemProps {
item: FileSystemItemType;
isSelected: boolean;
onSelect: (id: string) => void;
onDoubleClick?: () => void;
}
const FileItem: React.FC<FileItemProps> = ({ item, isSelected, onSelect, onDoubleClick }) => {
const itemClasses = `grid grid-cols-12 gap-4 px-4 py-3 items-center transition-colors duration-150 cursor-pointer ${isSelected ? 'bg-indigo-600/30' : 'hover:bg-slate-700/30'
}`;
return (
<div
onClick={() => onSelect(item.id)}
onDoubleClick={onDoubleClick}
className={itemClasses}
>
<div className="col-span-6 flex items-center gap-3">
{item.type === 'folder' ? <FolderIcon /> : <FileIcon />}
<span className="text-slate-200 text-sm truncate" title={item.name}>{item.name}</span>
</div>
<div className="col-span-3 text-right text-slate-400 text-xs font-mono">{item.modified}</div>
<div className="col-span-2 text-right text-slate-400 text-xs font-mono">{item.size || '—'}</div>
<div className="col-span-1"></div>
</div>
);
};
export default memo(FileItem);

192
components/FileList.tsx Executable file
View File

@ -0,0 +1,192 @@
import React, { memo } from 'react';
import type { FileSystemItem as FileSystemItemType } from '../types';
import FileItem from './FileItem';
import Spinner from './Spinner';
import { PathBar } from './PathBar';
interface FileListProps {
items: FileSystemItemType[];
isLoading: boolean;
onDownload: () => void;
selectedIds: string[];
onSelectItem: (id: string) => void;
onFolderDoubleClick?: (folder: FileSystemItemType) => void;
onNavigateBack?: () => void;
onNavigateForward?: () => void;
canNavigateBack?: boolean;
canNavigateForward?: boolean;
currentPath?: string;
currentMode?: 'api' | 'sharing' | null;
isDownloadButtonLoading?: boolean;
hasCustomPath?: boolean;
isCustomPath?: boolean;
onSaveCustomPath?: () => void;
isSavingCustomPath?: boolean;
initialSearchQuery?: string; // Auto-fill search input from parent
}
const FileList: React.FC<FileListProps> = ({
items,
isLoading,
onDownload,
selectedIds,
onSelectItem,
onFolderDoubleClick,
onNavigateBack,
onNavigateForward,
canNavigateBack = false,
canNavigateForward = false,
currentPath = '',
currentMode = null,
isDownloadButtonLoading = false,
hasCustomPath = false,
isCustomPath = false,
onSaveCustomPath,
isSavingCustomPath = false,
initialSearchQuery = ''
}) => {
// Search state
const [searchQuery, setSearchQuery] = React.useState(initialSearchQuery);
const [debouncedQuery, setDebouncedQuery] = React.useState(initialSearchQuery);
// Update search query when initialSearchQuery changes (from parent)
React.useEffect(() => {
setSearchQuery(initialSearchQuery);
setDebouncedQuery(initialSearchQuery);
}, [initialSearchQuery]);
// Debounce search query (300ms)
React.useEffect(() => {
const timer = setTimeout(() => {
setDebouncedQuery(searchQuery);
}, 300);
return () => clearTimeout(timer);
}, [searchQuery]);
// Filter items based on search query
const filteredItems = React.useMemo(() => {
if (!debouncedQuery.trim()) {
return items;
}
const query = debouncedQuery.toLowerCase();
return items.filter(item =>
item.name.toLowerCase().includes(query)
);
}, [items, debouncedQuery]);
const hasFilesOrFolders = filteredItems.length > 0;
const selectedCount = selectedIds.length;
const buttonText = selectedCount > 0 ? `Tải xuống ${selectedCount} mục` : 'Tải xuống tất cả';
const isDownloadDisabled = selectedCount === 0 && !hasFilesOrFolders;
const renderContent = () => {
if (isLoading) {
return (
<div className="text-center py-10 px-6 flex justify-center items-center gap-2">
<Spinner />
<p className="text-slate-400">Đang tải danh sách tệp...</p>
</div>
);
}
if (items.length > 0) {
return (
<>
<div className="grid grid-cols-12 gap-4 px-4 py-3 bg-slate-900/50 text-xs font-semibold text-slate-400 uppercase tracking-wider border-b border-slate-700 flex-shrink-0">
<div className="col-span-6">Tên</div>
<div className="col-span-3 text-right">Ngày sửa đi</div>
<div className="col-span-2 text-right">Kích thước</div>
<div className="col-span-1"></div>
</div>
<div className="divide-y divide-slate-700/50 flex-1 overflow-y-auto">
{filteredItems.length > 0 ? (
filteredItems.map((item) => (
<FileItem
key={item.id}
item={item}
isSelected={selectedIds.includes(item.id)}
onSelect={onSelectItem}
onDoubleClick={item.type === 'folder' && onFolderDoubleClick ? () => onFolderDoubleClick(item) : undefined}
/>
))
) : (
<div className="text-center py-10 px-6">
<p className="text-slate-400">Không tìm thấy kết quả cho "{debouncedQuery}"</p>
</div>
)}
</div>
</>
)
}
return (
<div className="text-center py-10 px-6 flex-1 flex items-center justify-center">
<p className="text-slate-400">Không tệp hoặc thư mục nào. Vui lòng thực hiện tìm kiếm.</p>
</div>
)
};
return (
<div className="bg-slate-800/50 backdrop-blur-sm rounded-2xl shadow-2xl border border-slate-700 h-full flex flex-col">
{/* Header */}
<div className="p-4 flex justify-between items-center border-b border-slate-700 flex-shrink-0">
<h2 className="text-xl font-semibold text-white">File & Folder</h2>
<div className="flex gap-3">
<button
onClick={() => {
// TODO: Open CustomPathManagerModal
if (typeof (window as any).openCustomPathManager === 'function') {
(window as any).openCustomPathManager();
}
}}
className="flex justify-center items-center gap-2 text-white bg-slate-700 hover:bg-slate-600 focus:ring-4 focus:outline-none focus:ring-slate-600 font-medium rounded-lg text-sm px-4 py-2 text-center transition-all duration-200"
title="Quản lý Custom Paths"
>
<svg className="w-4 h-4" viewBox="0 0 24 24" fill="currentColor">
<path fillRule="evenodd" d="M11.078 2.25c-.917 0-1.699.663-1.85 1.567L9.05 4.889c-.02.12-.115.26-.297.348a7.493 7.493 0 0 0-.986.57c-.166.115-.334.126-.45.083L6.3 5.508a1.875 1.875 0 0 0-2.282.819l-.922 1.597a1.875 1.875 0 0 0 .432 2.385l.84.692c.095.078.17.229.154.43a7.598 7.598 0 0 0 0 1.139c.015.2-.059.352-.153.43l-.841.692a1.875 1.875 0 0 0-.432 2.385l.922 1.597a1.875 1.875 0 0 0 2.282.818l1.019-.382c.115-.043.283-.031.45.082.312.214.641.405.985.57.182.088.277.228.297.35l.178 1.071c.151.904.933 1.567 1.85 1.567h1.844c.916 0 1.699-.663 1.85-1.567l.178-1.072c.02-.12.114-.26.297-.349.344-.165.673-.356.985-.57.167-.114.335-.125.45-.082l1.02.382a1.875 1.875 0 0 0 2.28-.819l.923-1.597a1.875 1.875 0 0 0-.432-2.385l-.84-.692c-.095-.078-.17-.229-.154-.43a7.614 7.614 0 0 0 0-1.139c-.016-.2.059-.352.153-.43l.84-.692c.708-.582.891-1.59.433-2.385l-.922-1.597a1.875 1.875 0 0 0-2.282-.818l-1.02.382c-.114.043-.282.031-.449-.083a7.49 7.49 0 0 0-.985-.57c-.183-.087-.277-.227-.297-.348l-.179-1.072a1.875 1.875 0 0 0-1.85-1.567h-1.843ZM12 15.75a3.75 3.75 0 1 0 0-7.5 3.75 3.75 0 0 0 0 7.5Z" clipRule="evenodd" />
</svg>
Custom Paths
</button>
<button
onClick={onDownload}
disabled={isDownloadDisabled || isLoading || isDownloadButtonLoading}
className="flex justify-center items-center gap-2 text-white bg-indigo-600 hover:bg-indigo-700 focus:ring-4 focus:outline-none focus:ring-indigo-800 font-medium rounded-lg text-sm px-4 py-2 text-center transition-all duration-200 disabled:bg-slate-600 disabled:text-slate-400 disabled:cursor-not-allowed"
>
{isDownloadButtonLoading && <Spinner />}
{buttonText}
</button>
</div>
</div>
{/* Path Bar */}
{(onNavigateBack || onNavigateForward) && currentPath && (
<div className="flex-shrink-0">
<PathBar
currentPath={currentPath}
canGoBack={canNavigateBack}
canGoForward={canNavigateForward}
onNavigateBack={onNavigateBack || (() => { })}
onNavigateForward={onNavigateForward || (() => { })}
currentMode={currentMode}
hasCustomPath={hasCustomPath}
isCustomPath={isCustomPath}
onSaveCustomPath={onSaveCustomPath}
isSavingCustomPath={isSavingCustomPath}
searchQuery={searchQuery}
onSearchChange={setSearchQuery}
/>
</div>
)}
{/* File List Content */}
<div className="flex-1 overflow-hidden flex flex-col">
{renderContent()}
</div>
</div>
);
};
export default memo(FileList);

0
components/FileZipIcon.tsx Executable file
View File

14
components/FolderIcon.tsx Executable file
View File

@ -0,0 +1,14 @@
import React from 'react';
const FolderIcon: React.FC = () => (
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 24 24"
fill="currentColor"
className="w-5 h-5 text-amber-400 flex-shrink-0"
>
<path d="M19.906 9c.382 0 .749.057 1.094.162V9a3 3 0 0 0-3-3h-3.879a.75.75 0 0 1-.53-.22L11.47 3.66A2.25 2.25 0 0 0 9.879 3H6a3 3 0 0 0-3 3v3.162A3.756 3.756 0 0 1 4.094 9h15.812ZM4.094 10.5a2.25 2.25 0 0 0-2.227 2.568l.857 6A2.25 2.25 0 0 0 4.951 21H19.05a2.25 2.25 0 0 0 2.227-1.932l.857-6a2.25 2.25 0 0 0-2.227-2.568H4.094Z" />
</svg>
);
export default FolderIcon;

View File

@ -0,0 +1,38 @@
import React from 'react';
import { TruncatedPath } from './TruncatedPath';
interface FolderPathDisplayProps {
fullPath: string;
variant?: 'blue' | 'yellow';
maxChars?: number;
}
/**
* Hiển thị đưng dẫn đến folder (bỏ filename)
* dụ: D:\projects\dkiDownload\raw\1000_DE\003.jpg D:\projects\dkiDownload\raw\1000_DE
*/
export const FolderPathDisplay: React.FC<FolderPathDisplayProps> = ({
fullPath,
variant = 'yellow',
maxChars = 60
}) => {
// If path already ends with folder (no filename), display as-is
// If path has filename, remove it
// Check if last segment has extension (file) or not (folder)
const lastSegment = fullPath.split(/[/\\]/).pop() || '';
const hasExtension = /\.[^.]+$/.test(lastSegment);
const folderPath = hasExtension
? fullPath.replace(/[/\\][^/\\]+$/, '') // Remove filename
: fullPath; // Already a folder path
if (!folderPath) return null;
return (
<TruncatedPath
path={folderPath}
variant={variant}
maxChars={maxChars}
/>
);
};

62
components/GeIdResultItem.tsx Executable file
View File

@ -0,0 +1,62 @@
import React, { memo } from 'react';
import type { GeIdResult } from '../types';
import UserResultItem from './UserResultItem';
interface GeIdResultItemProps {
result: GeIdResult;
onErrorClick: (details: string) => void;
}
const GeIdResultItem: React.FC<GeIdResultItemProps> = ({ result, onErrorClick }) => {
const completionDate = result.completionTime ? (result.completionTime instanceof Date ? result.completionTime : new Date(result.completionTime)) : null;
const timeString = completionDate ? completionDate.toLocaleTimeString('vi-VN') : '-';
// Lấy URL từ detail đầu tiên (tất cả details trong cùng GE/lang đều có cùng URL)
const tmsUrl = result.details?.[0]?.url;
// Format hiển thị: "1000 DE (https://tms.../project/18?l=de_DE)"
// geIdAndLang có dạng "1000 de" hoặc "1000 US" - cần in hoa lang
const formatGeIdAndLang = () => {
const parts = result.geIdAndLang.split(' ');
if (parts.length >= 2) {
const geId = parts[0];
const lang = parts.slice(1).join(' ').toUpperCase();
return `${geId} ${lang}`;
}
return result.geIdAndLang.toUpperCase();
};
return (
<div className="bg-slate-900/50 border border-slate-700 rounded-lg p-4">
<div className="flex justify-between items-center mb-3 pb-3 border-b border-slate-700">
<h4 className="font-mono font-semibold text-indigo-300">
{formatGeIdAndLang()}
{tmsUrl && (
<>
{' '}
<span className="text-slate-400">(</span>
<a
href={tmsUrl}
target="_blank"
rel="noopener noreferrer"
className="text-cyan-400 hover:text-cyan-300 hover:underline transition-colors"
title={tmsUrl}
>
{tmsUrl}
</a>
<span className="text-slate-400">)</span>
</>
)}
</h4>
<span className="text-xs text-slate-400 font-mono">{timeString}</span>
</div>
<div className="space-y-2">
{(result.details || []).map((detail, index) => (
<UserResultItem key={index} detail={detail} onErrorClick={onErrorClick} />
))}
</div>
</div>
);
};
export default memo(GeIdResultItem);

147
components/HistoryItem.tsx Executable file
View File

@ -0,0 +1,147 @@
import React, { useState, memo } from 'react';
import type { Submission, GeIdResult } from '../types';
import GeIdResultItem from './GeIdResultItem';
import TrashIcon from './TrashIcon';
import RetryIcon from './RetryIcon';
import ConfirmModal from './ConfirmModal';
interface HistoryItemProps {
submission: Submission;
onErrorClick: (details: string) => void;
onDelete: (id: string) => void;
onRetry: (submission: Submission, errorGeIds: string[], errorUsernames: string[]) => void;
onPaste: (username: string, geIdAndLang: string) => void;
hideNonErrors: boolean;
}
const HistoryItem: React.FC<HistoryItemProps> = ({ submission, onErrorClick, onDelete, onRetry, onPaste, hideNonErrors }) => {
const [showRetryConfirm, setShowRetryConfirm] = useState(false);
// Find GE IDs with errors
const errorResults = submission.results?.filter(r => r.details?.some(d => d.status === 'error')) ?? [];
const errorGeIds = errorResults.map(r => r.geIdAndLang);
const hasError = errorGeIds.length > 0;
// Get unique usernames from error results
const errorUsernames = [...new Set(
errorResults.flatMap(r => r.details?.filter(d => d.status === 'error').map(d => d.username) ?? [])
)];
// Filter results based on hideNonErrors toggle
const filteredResults = hideNonErrors
? submission.results?.filter(r => r.details?.some(d => d.status === 'error'))
: submission.results;
const handlePaste = () => {
onPaste(submission.username, submission.geIdAndLang);
window.scrollTo({ top: 0, behavior: 'smooth' });
};
const handleRetryConfirm = () => {
setShowRetryConfirm(false);
onRetry(submission, errorGeIds, errorUsernames);
};
// If hideNonErrors is on and no errors, don't render
if (hideNonErrors && !hasError) {
return null;
}
return (
<>
<div className="bg-slate-800/50 border border-slate-700 rounded-lg shadow-md animate-fade-in overflow-hidden">
<div className="w-full flex justify-between items-center p-3 bg-slate-800">
<div className="flex items-center gap-3">
<span className="text-xs text-slate-400 flex-shrink-0">
{submission.timestamp.toLocaleString('vi-VN')}
</span>
</div>
<div className="flex items-center gap-2">
<button
onClick={handlePaste}
className="flex items-center gap-1.5 text-xs font-semibold text-cyan-400 bg-cyan-500/10 hover:bg-cyan-500/20 px-2 py-1 rounded-md transition-colors"
title="Dán username và GE ID & LANG vào form"
>
<svg className="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M9 5H7a2 2 0 00-2 2v12a2 2 0 002 2h10a2 2 0 002-2V7a2 2 0 00-2-2h-2M9 5a2 2 0 002 2h2a2 2 0 002-2M9 5a2 2 0 012-2h2a2 2 0 012 2" />
</svg>
Dán
</button>
{hasError && (
<button
onClick={() => setShowRetryConfirm(true)}
className="flex items-center gap-1.5 text-xs font-semibold text-amber-400 bg-amber-500/10 hover:bg-amber-500/20 px-2 py-1 rounded-md transition-colors"
>
<RetryIcon className="w-4 h-4" />
Retry
</button>
)}
<button
onClick={() => onDelete(submission.id)}
className="flex items-center gap-1.5 text-xs font-semibold text-rose-400 bg-rose-500/10 hover:bg-rose-500/20 px-2 py-1 rounded-md transition-colors"
>
<TrashIcon className="w-4 h-4" />
Xoá
</button>
</div>
</div>
<div className="p-4 space-y-4 border-t border-slate-700">
{filteredResults && filteredResults.length > 0 ? (
filteredResults.map((result, i) => (
<GeIdResultItem key={i} result={result} onErrorClick={onErrorClick} />
))
) : (
<div className="text-center py-4">
<p className="text-slate-500 text-sm">
{hideNonErrors ? 'Không có lỗi trong lần submit này.' : 'Không có dữ liệu kết quả chi tiết cho lần submit này.'}
</p>
</div>
)}
</div>
<style>{`
@keyframes fade-in {
from { opacity: 0; transform: translateY(-10px); }
to { opacity: 1; transform: translateY(0); }
}
.animate-fade-in {
animation: fade-in 0.3s ease-out forwards;
}
`}</style>
</div>
{/* Retry Confirmation Modal */}
<ConfirmModal
isOpen={showRetryConfirm}
title="Xác nhận Retry"
message={
<div className="space-y-4">
<p className="text-slate-300">Bạn muốn retry các GE ID & LANG sau?</p>
<div className="bg-slate-700/50 rounded-lg p-3 max-h-32 overflow-y-auto">
<p className="text-xs text-slate-400 mb-2">GE ID & LANG bị lỗi:</p>
{errorGeIds.map((geId, i) => (
<div key={i} className="text-sm text-indigo-300 font-mono">{geId}</div>
))}
</div>
<div className="bg-slate-700/50 rounded-lg p-3 max-h-32 overflow-y-auto">
<p className="text-xs text-slate-400 mb-2">Usernames:</p>
{errorUsernames.map((username, i) => (
<div key={i} className="text-sm text-cyan-300 font-mono">{username}</div>
))}
</div>
<p className="text-xs text-slate-400">
Các GE ID & LANG lỗi sẽ đưc tách thành record mới bắt đu cấp quyền ngay.
</p>
</div>
}
confirmText="Retry"
cancelText="Huỷ"
onConfirm={handleRetryConfirm}
onCancel={() => setShowRetryConfirm(false)}
/>
</>
);
};
export default memo(HistoryItem);

40
components/Navigation.tsx Executable file
View File

@ -0,0 +1,40 @@
import React from 'react';
type Page = 'permission' | 'rawDownload' | 'check';
interface NavigationProps {
currentPage: Page;
setCurrentPage: (page: Page) => void;
}
const Navigation: React.FC<NavigationProps> = ({ currentPage, setCurrentPage }) => {
const activeClasses = 'bg-indigo-600 text-white';
const inactiveClasses = 'bg-slate-800 text-slate-300 hover:bg-slate-700';
return (
<nav className="flex justify-center mb-8">
<div className="flex p-1 space-x-1 bg-slate-800/50 border border-slate-700 rounded-lg">
<button
onClick={() => setCurrentPage('permission')}
className={`px-4 py-2 text-sm font-medium rounded-md transition-colors duration-200 ${currentPage === 'permission' ? activeClasses : inactiveClasses}`}
>
Cấp quyền TMS
</button>
<button
onClick={() => setCurrentPage('rawDownload')}
className={`px-4 py-2 text-sm font-medium rounded-md transition-colors duration-200 ${currentPage === 'rawDownload' ? activeClasses : inactiveClasses}`}
>
Tải raw & Setting
</button>
<button
onClick={() => setCurrentPage('check')}
className={`px-4 py-2 text-sm font-medium rounded-md transition-colors duration-200 ${currentPage === 'check' ? activeClasses : inactiveClasses}`}
>
Check Upload
</button>
</div>
</nav>
);
};
export default Navigation;

View File

@ -0,0 +1,167 @@
/**
* NotificationManager Component
* Handles browser notifications for submission completion
* Uses Supabase Realtime to listen for updates and localStorage to filter notifications
*/
import { useEffect } from 'react';
import { supabase } from '../utils/supabase';
interface NotificationManagerProps {
// No props needed - component manages its own state
}
const NotificationManager: React.FC<NotificationManagerProps> = () => {
useEffect(() => {
console.log('[NotificationManager] Component mounted');
// Request notification permission on mount
if ('Notification' in window && Notification.permission === 'default') {
Notification.requestPermission().then(permission => {
console.log('[NotificationManager] Notification permission:', permission);
});
} else {
console.log('[NotificationManager] Notification permission status:', Notification.permission);
}
console.log('[NotificationManager] Setting up Supabase subscription...');
// Subscribe to Supabase Realtime for submission updates
const channel = supabase
.channel('submissions-notifications')
.on(
'postgres_changes',
{
event: 'UPDATE',
schema: 'public',
table: 'submissions',
},
(payload) => {
console.log('[NotificationManager] Submission updated:', payload);
handleSubmissionUpdate(payload.new);
}
)
.subscribe((status) => {
console.log('[NotificationManager] Subscription status:', status);
});
console.log('[NotificationManager] Subscription created');
// Cleanup on unmount
return () => {
console.log('[NotificationManager] Cleaning up subscription');
supabase.removeChannel(channel);
};
}, []);
const handleSubmissionUpdate = (submission: any) => {
console.log('[NotificationManager] handleSubmissionUpdate called with:', submission);
// Only process completed or failed submissions
if (submission.status !== 'completed' && submission.status !== 'failed') {
console.log('[NotificationManager] Ignoring submission with status:', submission.status);
return;
}
// Check if this submission was initiated by current browser
const mySubmissions = getMySubmissions();
console.log('[NotificationManager] My submissions:', mySubmissions);
console.log('[NotificationManager] Current submission_id:', submission.submission_id);
if (!mySubmissions.includes(submission.submission_id)) {
console.log('[NotificationManager] Not our submission, ignoring');
return; // Not our submission, ignore
}
console.log('[NotificationManager] Showing notification...');
// Show notification
if ('Notification' in window && Notification.permission === 'granted') {
// Parse results to count errors
const results = submission.results || [];
const errorCount = results.filter((r: any) => r.status === 'error').length;
const totalCount = results.length;
const hasErrors = errorCount > 0;
// Determine title based on status
let title: string;
if (submission.status === 'failed' || hasErrors) {
title = `❌ Có ${errorCount} link bị lỗi`;
} else {
title = '✅ Cấp quyền TMS hoàn tất';
}
// Build body content with username and GE info
const input = submission.input || {};
const usernameList = input.username_list || [];
const geInput = input.ge_input || '';
// Format username (show first 3, then "...")
const usernameDisplay = usernameList.length > 3
? `${usernameList.slice(0, 3).join(', ')}...`
: usernameList.join(', ');
// Format GE input (show first 3 lines, then "...")
const geLines = geInput.split('\n').filter((line: string) => line.trim());
const geDisplay = geLines.length > 3
? `${geLines.slice(0, 3).join(', ')}...`
: geLines.join(', ');
const body = `${usernameDisplay}\n${geDisplay}`;
const notification = new Notification(title, {
body,
icon: '/push_noti.png',
tag: submission.submission_id, // Prevent duplicate notifications
});
// Focus window when notification is clicked
notification.onclick = () => {
window.focus();
notification.close();
};
// Auto close after 10 seconds
setTimeout(() => notification.close(), 10000);
console.log('[NotificationManager] Notification shown');
} else {
console.log('[NotificationManager] Notification permission not granted:', Notification.permission);
}
// Remove from localStorage after notification
removeMySubmission(submission.submission_id);
};
return null; // This component doesn't render anything
};
// Helper functions for localStorage management
const STORAGE_KEY = 'my_submissions';
export const addMySubmission = (submissionId: string) => {
const existing = getMySubmissions();
if (!existing.includes(submissionId)) {
existing.push(submissionId);
localStorage.setItem(STORAGE_KEY, JSON.stringify(existing));
console.log('[NotificationManager] Added submission to localStorage:', submissionId);
}
};
export const getMySubmissions = (): string[] => {
try {
const stored = localStorage.getItem(STORAGE_KEY);
return stored ? JSON.parse(stored) : [];
} catch {
return [];
}
};
export const removeMySubmission = (submissionId: string) => {
const existing = getMySubmissions();
const filtered = existing.filter(id => id !== submissionId);
localStorage.setItem(STORAGE_KEY, JSON.stringify(filtered));
console.log('[NotificationManager] Removed submission from localStorage:', submissionId);
};
export default NotificationManager;

122
components/OtpModal.tsx Executable file
View File

@ -0,0 +1,122 @@
import React, { useState } from 'react';
import Spinner from './Spinner';
interface OtpModalProps {
isOpen: boolean;
onClose: () => void;
onSubmit: (otpCode: string) => Promise<void>;
isLoading?: boolean;
errorMessage?: string | null;
}
const OtpModal: React.FC<OtpModalProps> = ({
isOpen,
onClose,
onSubmit,
isLoading = false,
errorMessage
}) => {
const [otpCode, setOtpCode] = useState('');
const [localError, setLocalError] = useState<string | null>(null);
const handleSubmit = async (e: React.FormEvent) => {
e.preventDefault();
if (!otpCode.trim()) {
setLocalError('Vui lòng nhập mã OTP');
return;
}
setLocalError(null);
try {
await onSubmit(otpCode);
// If successful, clear the OTP and close modal
setOtpCode('');
} catch (error) {
// Error will be handled by parent component
}
};
const handleClose = () => {
setOtpCode('');
setLocalError(null);
onClose();
};
if (!isOpen) return null;
return (
<div className="fixed inset-0 bg-black/50 backdrop-blur-sm flex items-center justify-center z-50">
<div className="bg-slate-800 rounded-2xl shadow-2xl border border-slate-700 p-8 w-full max-w-md mx-4">
<div className="text-center mb-6">
<div className="w-16 h-16 bg-indigo-600/20 rounded-full flex items-center justify-center mx-auto mb-4">
<svg className="w-8 h-8 text-indigo-400" viewBox="0 0 24 24" fill="currentColor">
<path fillRule="evenodd" d="M12 1.5a5.25 5.25 0 0 0-5.25 5.25v3a3 3 0 0 0-3 3v6.75a3 3 0 0 0 3 3h10.5a3 3 0 0 0 3-3v-6.75a3 3 0 0 0-3-3v-3c0-2.9-2.35-5.25-5.25-5.25Zm3.75 8.25v-3a3.75 3.75 0 1 0-7.5 0v3h7.5Z" clipRule="evenodd" />
</svg>
</div>
<h3 className="text-xl font-semibold text-white mb-2">
Xác thực OTP
</h3>
<p className="text-slate-400 text-sm">
Phiên đăng nhập đã hết hạn. Vui lòng nhập OTP đ tiếp tục.
</p>
</div>
<form onSubmit={handleSubmit}>
<div className="mb-6">
<label htmlFor="otpCode" className="block mb-2 text-sm font-medium text-slate-300">
OTP
</label>
<input
type="text"
id="otpCode"
value={otpCode}
onChange={(e) => setOtpCode(e.target.value)}
className="bg-slate-900/50 border border-slate-700 text-slate-100 text-sm rounded-lg focus:ring-indigo-500 focus:border-indigo-500 block w-full p-3 transition-colors duration-200"
placeholder="Nhập mã OTP 6 chữ số"
disabled={isLoading}
maxLength={6}
autoFocus
/>
</div>
{(localError || errorMessage) && (
<div className="mb-4 p-3 bg-red-900/20 border border-red-800 rounded-lg">
<p className="text-red-400 text-sm text-center">
{localError || errorMessage}
</p>
</div>
)}
<div className="flex gap-3">
<button
type="button"
onClick={handleClose}
disabled={isLoading}
className="flex-1 text-slate-300 bg-slate-700 hover:bg-slate-600 focus:ring-4 focus:outline-none focus:ring-slate-800 font-medium rounded-lg text-sm px-5 py-3 text-center transition-all duration-200 disabled:bg-slate-800 disabled:text-slate-500 disabled:cursor-not-allowed"
>
Hủy
</button>
<button
type="submit"
disabled={isLoading || !otpCode.trim()}
className="flex-1 flex justify-center items-center text-white bg-indigo-600 hover:bg-indigo-700 focus:ring-4 focus:outline-none focus:ring-indigo-800 font-medium rounded-lg text-sm px-5 py-3 text-center transition-all duration-200 disabled:bg-indigo-900 disabled:text-slate-400 disabled:cursor-not-allowed"
>
{isLoading ? (
<>
<Spinner />
<span>Đang xác thực...</span>
</>
) : (
'Xác nhận'
)}
</button>
</div>
</form>
</div>
</div>
);
};
export default OtpModal;

128
components/PathBar.tsx Executable file
View File

@ -0,0 +1,128 @@
import React from 'react';
interface PathBarProps {
currentPath: string;
canGoBack: boolean;
canGoForward: boolean;
onNavigateBack: () => void;
onNavigateForward: () => void;
currentMode?: 'api' | 'sharing' | null;
hasCustomPath?: boolean;
isCustomPath?: boolean;
onSaveCustomPath?: () => void;
isSavingCustomPath?: boolean;
searchQuery?: string;
onSearchChange?: (query: string) => void;
}
/**
* PathBar component hiển thị đưng dẫn hiện tại nút điều hướng Back/Forward
*/
export function PathBar({
currentPath,
canGoBack,
canGoForward,
onNavigateBack,
onNavigateForward,
currentMode = null,
hasCustomPath = false,
isCustomPath = false,
onSaveCustomPath,
isSavingCustomPath = false,
searchQuery = '',
onSearchChange,
}: PathBarProps) {
return (
<div className="flex items-center gap-3 px-4 py-2 bg-slate-900/50 border-b border-slate-700">
{/* Navigation Buttons */}
<div className="flex items-center gap-1">
<button
onClick={onNavigateBack}
disabled={!canGoBack}
className={`p-1.5 rounded transition-colors ${canGoBack
? 'hover:bg-slate-700 text-slate-300 hover:text-white'
: 'text-slate-600 cursor-not-allowed'
}`}
title="Quay lại"
>
<svg className="w-5 h-5" viewBox="0 0 24 24" fill="currentColor">
<path fillRule="evenodd" d="M7.72 12.53a.75.75 0 0 1 0-1.06l7.5-7.5a.75.75 0 1 1 1.06 1.06L9.31 12l6.97 6.97a.75.75 0 1 1-1.06 1.06l-7.5-7.5Z" clipRule="evenodd" />
</svg>
</button>
<button
onClick={onNavigateForward}
disabled={!canGoForward}
className={`p-1.5 rounded transition-colors ${canGoForward
? 'hover:bg-slate-700 text-slate-300 hover:text-white'
: 'text-slate-600 cursor-not-allowed'
}`}
title="Tiến tới"
>
<svg className="w-5 h-5" viewBox="0 0 24 24" fill="currentColor">
<path fillRule="evenodd" d="M16.28 11.47a.75.75 0 0 1 0 1.06l-7.5 7.5a.75.75 0 0 1-1.06-1.06L14.69 12 7.72 5.03a.75.75 0 0 1 1.06-1.06l7.5 7.5Z" clipRule="evenodd" />
</svg>
</button>
</div>
{/* Current Path - Highlight if custom path */}
<div className="flex-1 min-w-0">
<div
className={`font-mono text-sm truncate transition-colors ${isCustomPath
? 'text-amber-400 font-semibold'
: 'text-slate-300'
}`}
title={`${currentPath || '/'}${isCustomPath ? ' (Custom Path)' : ''}`}
>
{currentPath || '/'}
</div>
</div>
{/* Search Input */}
{onSearchChange && (
<div className="w-auto">
<input
type="text"
value={searchQuery}
onChange={(e) => onSearchChange(e.target.value)}
placeholder="Tìm kiếm..."
className="w-32 px-3 py-1.5 text-sm bg-slate-800 border border-slate-600 rounded-lg text-slate-200 placeholder-slate-500 focus:outline-none focus:border-indigo-500 focus:ring-1 focus:ring-indigo-500 transition-colors"
/>
</div>
)}
{/* Save Custom Path Button - Only show in Sharing mode */}
{onSaveCustomPath && currentMode === 'sharing' && (
<button
onClick={onSaveCustomPath}
disabled={isSavingCustomPath}
className={`px-3 py-1.5 text-xs font-medium rounded-lg border transition-all duration-200 flex items-center gap-1.5 ${hasCustomPath
? 'bg-indigo-600/20 text-indigo-300 border border-indigo-500/50 hover:bg-indigo-600/30 hover:border-indigo-500/70'
: 'bg-emerald-600/20 text-emerald-300 border border-emerald-500/50 hover:bg-emerald-600/30 hover:border-emerald-500/70'
} disabled:opacity-50 disabled:cursor-not-allowed`}
title={hasCustomPath ? 'Cập nhật custom path' : 'Lưu path hiện tại làm custom path'}
>
{isSavingCustomPath ? (
<>
<svg className="animate-spin h-3 w-3" fill="none" viewBox="0 0 24 24">
<circle className="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" strokeWidth="4"></circle>
<path className="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"></path>
</svg>
<span>Đang lưu...</span>
</>
) : (
<>
<svg className="w-3.5 h-3.5" viewBox="0 0 24 24" fill="currentColor">
{hasCustomPath ? (
<path fillRule="evenodd" d="M4.755 10.059a7.5 7.5 0 0 1 12.548-3.364l1.903 1.903h-3.183a.75.75 0 1 0 0 1.5h4.992a.75.75 0 0 0 .75-.75V4.356a.75.75 0 0 0-1.5 0v3.18l-1.9-1.9A9 9 0 0 0 3.306 9.67a.75.75 0 1 0 1.45.388Zm15.408 3.352a.75.75 0 0 0-.919.53 7.5 7.5 0 0 1-12.548 3.364l-1.902-1.903h3.183a.75.75 0 0 0 0-1.5H2.984a.75.75 0 0 0-.75.75v4.992a.75.75 0 0 0 1.5 0v-3.18l1.9 1.9a9 9 0 0 0 15.059-4.035.75.75 0 0 0-.53-.918Z" clipRule="evenodd" />
) : (
<path fillRule="evenodd" d="M12 3.75a.75.75 0 0 1 .75.75v6.75h6.75a.75.75 0 0 1 0 1.5h-6.75v6.75a.75.75 0 0 1-1.5 0v-6.75H4.5a.75.75 0 0 1 0-1.5h6.75V4.5a.75.75 0 0 1 .75-.75Z" clipRule="evenodd" />
)}
</svg>
<span>{hasCustomPath ? 'Update' : 'Add'}</span>
</>
)}
</button>
)}
</div>
);
}

318
components/ProjectInfoModal.tsx Executable file
View File

@ -0,0 +1,318 @@
import React, { useState, useEffect } from 'react';
import type { ProjectDetails, Subset } from '../types';
import { getSortedKeys, sortEntriesByKey } from '../utils/sort-utils';
import ConfirmModal from './ConfirmModal';
// Language code to TMS locale mapping (same as backend)
const LANG_TO_LOCALE: Record<string, string> = {
US: 'en_US',
FR: 'fr_FR',
ES: 'es_ES',
DE: 'de_DE',
TW: 'zh_TW',
JP: 'ja_JP',
TH: 'th_TH',
KR: 'ko_KR',
KO: 'ko_KR',
};
function getTmsUrl(projectId: number, lang?: string): string {
const locale = lang ? LANG_TO_LOCALE[lang.toUpperCase()] : null;
const baseUrl = `https://tms.kiledel.com/project/${projectId}`;
return locale ? `${baseUrl}?l=${locale}` : baseUrl;
}
interface ProjectInfoModalProps {
isOpen: boolean;
onClose: () => void;
projectDetails: ProjectDetails | null;
isLoading: boolean;
onDeleteMember?: (projectId: number, email: string, lang?: string) => Promise<void>;
}
const ProjectInfoModal: React.FC<ProjectInfoModalProps> = ({
isOpen,
onClose,
projectDetails,
isLoading,
onDeleteMember,
}) => {
const [activeSubsetIndex, setActiveSubsetIndex] = useState(0);
const [deletingEmail, setDeletingEmail] = useState<string | null>(null);
const [confirmDeleteEmail, setConfirmDeleteEmail] = useState<string | null>(null);
const [emailToUsername, setEmailToUsername] = useState<Record<string, string>>({});
const [isLoadingUsernames, setIsLoadingUsernames] = useState(false);
// Fetch usernames when project details change
useEffect(() => {
if (!projectDetails || !isOpen) return;
const emails = Object.keys(projectDetails.members);
if (emails.length === 0) return;
const fetchUsernames = async () => {
setIsLoadingUsernames(true);
try {
console.log('Fetching usernames for emails:', emails);
const response = await fetch('/api/user/resolve-emails', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ emails }),
});
const data = await response.json();
console.log('Resolve emails response:', data);
if (data.success && data.data) {
setEmailToUsername(data.data);
}
} catch (error) {
console.error('Failed to fetch usernames:', error);
} finally {
setIsLoadingUsernames(false);
}
};
fetchUsernames();
}, [projectDetails?.projectId, isOpen]); // Only re-fetch when project changes
const handleDeleteMember = async (email: string) => {
if (!projectDetails || !onDeleteMember) return;
setDeletingEmail(email);
setConfirmDeleteEmail(null);
try {
await onDeleteMember(projectDetails.projectId, email, projectDetails.lang);
} finally {
setDeletingEmail(null);
}
};
if (!isOpen) return null;
return (
<div className="fixed inset-0 bg-black/50 backdrop-blur-sm flex items-center justify-center p-4 z-50">
<div className="bg-slate-800 rounded-2xl shadow-2xl border border-slate-700 max-w-5xl w-full max-h-[90vh] overflow-hidden flex flex-col">
{/* Header */}
<div className="flex items-center justify-between p-6 border-b border-slate-700">
<h2 className="text-2xl font-bold text-white">
Thông tin Project TMS
{projectDetails && (
<>
{' '}
<span className="text-slate-400">(</span>
<a
href={getTmsUrl(projectDetails.projectId, projectDetails.lang)}
target="_blank"
rel="noopener noreferrer"
className="text-cyan-400 hover:text-cyan-300 hover:underline transition-colors text-lg font-normal"
>
{getTmsUrl(projectDetails.projectId, projectDetails.lang)}
</a>
<span className="text-slate-400">)</span>
</>
)}
</h2>
<button
onClick={onClose}
className="text-slate-400 hover:text-white transition-colors p-2 hover:bg-slate-700 rounded-lg"
>
<svg className="w-6 h-6" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M6 18L18 6M6 6l12 12" />
</svg>
</button>
</div>
{/* Content */}
<div className="flex-1 overflow-y-auto p-6 space-y-6">
{isLoading ? (
<div className="text-center py-12">
<div className="inline-block animate-spin rounded-full h-12 w-12 border-b-2 border-indigo-500"></div>
<p className="text-slate-400 mt-4">Đang tải thông tin...</p>
</div>
) : projectDetails ? (
<>
{/* Project Members Section - Only show users with both username and email */}
<div>
<h3 className="text-lg font-semibold text-white mb-4 flex items-center gap-2">
Project Members
{isLoadingUsernames && (
<span className="text-xs text-slate-400 animate-pulse">(đang tải usernames...)</span>
)}
</h3>
{(() => {
// Filter to only show users with valid TMS username, then sort by username
const membersWithUsername = Object.keys(projectDetails.members)
.filter(email => {
const username = emailToUsername[email];
return username && username !== email;
})
.sort((a, b) => {
const usernameA = emailToUsername[a] || '';
const usernameB = emailToUsername[b] || '';
return usernameA.localeCompare(usernameB);
});
if (membersWithUsername.length === 0) {
return <p className="text-slate-400">Không member TMS nào.</p>;
}
return (
<div className="max-h-80 overflow-y-auto pr-2">
<div className="grid grid-cols-1 sm:grid-cols-2 md:grid-cols-3 gap-3">
{membersWithUsername.map((email) => {
const username = emailToUsername[email];
return (
<div
key={email}
className="bg-slate-700/50 text-slate-200 px-4 py-3 rounded-lg text-sm flex items-center justify-between gap-2 group"
title={email}
>
<div className="truncate flex-1">
<div className="font-medium text-indigo-300 truncate">{username}</div>
<div className="truncate text-xs text-slate-400">{email}</div>
</div>
{onDeleteMember && (
<button
onClick={() => setConfirmDeleteEmail(email)}
disabled={deletingEmail === email}
className="text-slate-400 hover:text-rose-400 transition-colors p-1 rounded hover:bg-slate-600 flex-shrink-0"
title="Xóa khỏi Project"
>
{deletingEmail === email ? (
<svg className="w-4 h-4 animate-spin" fill="none" viewBox="0 0 24 24">
<circle className="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" strokeWidth="4"></circle>
<path className="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"></path>
</svg>
) : (
<svg className="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M6 18L18 6M6 6l12 12" />
</svg>
)}
</button>
)}
</div>
);
})}
</div>
</div>
);
})()}
</div>
{/* Subsets Section */}
<div>
<h3 className="text-lg font-semibold text-white mb-4">Subsets</h3>
{projectDetails.subsets.length === 0 ? (
<p className="text-slate-400">Không subset nào.</p>
) : (
<>
{/* Subset Tabs */}
<div className="flex gap-2 mb-4 overflow-x-auto pb-2">
{projectDetails.subsets.map((subset, index) => (
<button
key={subset.id}
onClick={() => setActiveSubsetIndex(index)}
className={`px-4 py-2 rounded-lg font-medium whitespace-nowrap transition-colors ${activeSubsetIndex === index
? 'bg-indigo-600 text-white'
: 'bg-slate-700 text-slate-300 hover:bg-slate-600'
}`}
>
{subset.title}
</button>
))}
</div>
{/* Active Subset Content */}
{projectDetails.subsets[activeSubsetIndex] && (
<div className="bg-slate-700/30 rounded-lg overflow-hidden max-h-80 overflow-y-auto">
<table className="w-full">
<thead className="sticky top-0 bg-slate-700">
<tr>
<th className="text-left p-4 text-slate-300 font-semibold">Member</th>
<th className="text-left p-4 text-slate-300 font-semibold">Access</th>
</tr>
</thead>
<tbody>
{(() => {
// Filter subset members to only those with valid TMS username, then sort by username
const subsetMembers = Object.entries(projectDetails.subsets[activeSubsetIndex].members)
.filter(([email]) => {
const username = emailToUsername[email];
return username && username !== email;
})
.sort((a, b) => {
const usernameA = emailToUsername[a[0]] || '';
const usernameB = emailToUsername[b[0]] || '';
return usernameA.localeCompare(usernameB);
});
if (subsetMembers.length === 0) {
return (
<tr>
<td colSpan={2} className="p-4 text-center text-slate-400">
Không member TMS nào trong subset này.
</td>
</tr>
);
}
return subsetMembers.map(
([email, role]) => {
const username = emailToUsername[email];
return (
<tr key={email} className="border-t border-slate-700/50">
<td className="p-4 text-slate-200" title={email}>{username}</td>
<td className="p-4">
<span
className={`px-3 py-1 rounded-full text-xs font-medium ${role === 'RW'
? 'bg-green-500/20 text-green-400'
: 'bg-blue-500/20 text-blue-400'
}`}
>
{role}
</span>
</td>
</tr>
);
}
);
})()}
</tbody>
</table>
</div>
)}
</>
)}
</div>
</>
) : (
<p className="text-slate-400 text-center py-12">Không dữ liệu.</p>
)}
</div>
{/* Footer */}
<div className="flex justify-end p-6 border-t border-slate-700">
<button
onClick={onClose}
className="px-6 py-2.5 bg-slate-700 hover:bg-slate-600 text-white rounded-lg font-medium transition-colors"
>
Đóng
</button>
</div>
</div>
{/* Confirm Delete Modal */}
<ConfirmModal
isOpen={!!confirmDeleteEmail}
title="Xác nhận xoá"
message={`Bạn có chắc muốn xoá "${confirmDeleteEmail}" khỏi Project Members?`}
confirmText="Xoá"
cancelText="Huỷ"
onConfirm={() => confirmDeleteEmail && handleDeleteMember(confirmDeleteEmail)}
onCancel={() => setConfirmDeleteEmail(null)}
isLoading={!!deletingEmail}
/>
</div>
);
};
export default ProjectInfoModal;

View File

@ -0,0 +1,151 @@
import React, { useState, useEffect, useRef } from 'react';
import type { GeIdItem } from './QueueStatus';
import DragHandleIcon from './DragHandleIcon';
interface QueueManagementModalProps {
isOpen: boolean;
onClose: () => void;
queueItems: GeIdItem[];
onReorder: (reorderedItems: GeIdItem[]) => void;
onDelete: (key: string) => void;
}
const QueueManagementModal: React.FC<QueueManagementModalProps> = ({
isOpen,
onClose,
queueItems,
onReorder,
onDelete,
}) => {
const [localItems, setLocalItems] = useState<GeIdItem[]>([]);
const dragItem = useRef<number | null>(null);
const dragOverItem = useRef<number | null>(null);
useEffect(() => {
if (isOpen) {
document.body.style.overflow = 'hidden';
setLocalItems(queueItems.filter(item => item.status === 'waiting'));
} else {
document.body.style.overflow = 'auto';
}
return () => {
document.body.style.overflow = 'auto';
};
}, [isOpen, queueItems]);
if (!isOpen) {
return null;
}
const handleDragStart = (e: React.DragEvent<HTMLDivElement>, position: number) => {
dragItem.current = position;
e.dataTransfer.effectAllowed = 'move';
// Add a delay to allow the ghost image to be created before styling
setTimeout(() => {
e.currentTarget.classList.add('dragging');
}, 0)
};
const handleDragEnter = (e: React.DragEvent<HTMLDivElement>, position: number) => {
dragOverItem.current = position;
};
const handleDrop = (e: React.DragEvent<HTMLDivElement>) => {
if (dragItem.current === null || dragOverItem.current === null) return;
const newItems = [...localItems];
const dragItemContent = newItems[dragItem.current];
newItems.splice(dragItem.current, 1);
newItems.splice(dragOverItem.current, 0, dragItemContent);
dragItem.current = null;
dragOverItem.current = null;
setLocalItems(newItems);
onReorder(newItems); // Update parent state
};
const handleDragEnd = (e: React.DragEvent<HTMLDivElement>) => {
e.currentTarget.classList.remove('dragging');
}
return (
<div
className="fixed inset-0 bg-black bg-opacity-70 z-50 flex justify-center items-center p-4 animate-fade-in-fast"
onClick={onClose}
role="dialog"
aria-modal="true"
>
<div
className="bg-slate-800 border border-slate-700 rounded-2xl shadow-2xl w-full max-w-2xl max-h-[90vh] flex flex-col p-6 animate-slide-up"
onClick={e => e.stopPropagation()}
>
<div className="flex justify-between items-center mb-4 pb-4 border-b border-slate-700">
<h2 className="text-xl font-semibold text-white">Quản Hàng đi</h2>
<button onClick={onClose} className="text-slate-400 hover:text-white transition-colors text-2xl leading-none">&times;</button>
</div>
<div className="flex-grow overflow-y-auto pr-2 -mr-2">
{localItems.length > 0 ? (
<div className="space-y-2">
{localItems.map((item, index) => (
<div
key={item.key}
className="flex items-start justify-between p-3 bg-slate-700/50 rounded-lg group transition-shadow"
draggable
onDragStart={(e) => handleDragStart(e, index)}
onDragEnter={(e) => handleDragEnter(e, index)}
onDragEnd={handleDragEnd}
onDragOver={(e) => e.preventDefault()}
onDrop={handleDrop}
>
<div className="flex items-start flex-1 min-w-0">
<div className="cursor-move text-slate-500 group-hover:text-slate-300 mr-4 pt-1 flex-shrink-0">
<DragHandleIcon />
</div>
<div className="flex-1 min-w-0">
<div className="font-mono text-sm text-slate-200 font-semibold truncate" title={`${item.id} (${item.lang})`}>
{item.id} ({item.lang})
</div>
<pre className="text-slate-400 text-xs font-mono mt-1 whitespace-pre-wrap break-words">{item.usernames}</pre>
</div>
</div>
<button
onClick={() => onDelete(item.key)}
className="text-xs font-medium text-rose-500 hover:text-rose-400 transition-colors opacity-0 group-hover:opacity-100 ml-4 flex-shrink-0"
>
Xoá
</button>
</div>
))}
</div>
) : (
<div className="flex justify-center items-center h-full">
<p className="text-slate-500">Không submit nào đang chờ.</p>
</div>
)}
</div>
</div>
<style>{`
.dragging {
opacity: 0.5;
box-shadow: 0 10px 15px -3px rgb(0 0 0 / 0.2), 0 4px 6px -4px rgb(0 0 0 / 0.2);
}
@keyframes fade-in-fast {
from { opacity: 0; }
to { opacity: 1; }
}
.animate-fade-in-fast {
animation: fade-in-fast 0.2s ease-out forwards;
}
@keyframes slide-up {
from { opacity: 0; transform: translateY(20px); }
to { opacity: 1; transform: translateY(0); }
}
.animate-slide-up {
animation: slide-up 0.3s ease-out forwards;
}
`}</style>
</div>
);
};
export default QueueManagementModal;

97
components/QueueStatus.tsx Executable file
View File

@ -0,0 +1,97 @@
import React, { memo } from 'react';
export type GeIdItem = {
key: string;
id: string;
lang: string;
status: 'waiting' | 'processing' | 'done' | 'error';
usernames?: string;
};
interface QueueStatusProps {
currentSubmission: { username: string; geIdAndLang: string } | null;
queueItems: GeIdItem[];
pendingSubmissionsCount?: number;
onOpenQueueModal: () => void;
}
const getStatusStyles = (status: GeIdItem['status']) => {
switch (status) {
case 'processing':
return 'bg-amber-500/20 text-amber-300 border-amber-500/30';
case 'done':
return 'bg-green-500/20 text-green-300 border-green-500/30';
case 'error':
return 'bg-rose-500/20 text-rose-300 border-rose-500/30';
case 'waiting':
default:
return 'bg-slate-700/50 text-slate-400 border-slate-600';
}
};
const QueueStatus: React.FC<QueueStatusProps> = ({ currentSubmission, queueItems, pendingSubmissionsCount = 0, onOpenQueueModal }) => {
// derive usernames from queueItems when available; fallback to currentSubmission
const derivedUsernames = (() => {
if (queueItems && queueItems.length > 0) {
const all = queueItems.flatMap(i => (i.usernames || '').split('\n').map(s => s.trim()).filter(Boolean));
return Array.from(new Set(all));
}
if (currentSubmission) return currentSubmission.username.split('\n').map(s => s.trim()).filter(Boolean);
return [];
})();
const userCount = derivedUsernames.length;
const geIdCount = queueItems.length;
// Show details only when there are queue items available (currently processing submission)
const showDetails = geIdCount > 0;
return (
<div className="mt-8 bg-slate-800/50 backdrop-blur-sm p-6 rounded-2xl shadow-lg border border-slate-700 min-h-[10rem]">
<div className="flex justify-between items-center mb-4 border-b border-slate-700 pb-3">
<h2 className="text-xl font-semibold text-white">Hàng đi Trạng thái</h2>
<span className="text-sm font-semibold tabular-nums rounded-md px-2 py-1 text-slate-400">
Submit đang chờ: {pendingSubmissionsCount}
</span>
</div>
{showDetails ? (
<div className="grid grid-cols-1 md:grid-cols-2 gap-6 font-mono text-sm animate-fade-in">
<div>
<h3 className="text-slate-400 mb-2 font-sans font-semibold">Username ({userCount})</h3>
<pre className="bg-slate-900/50 p-3 rounded-lg whitespace-pre-wrap break-all h-48 overflow-y-auto">{derivedUsernames.join('\n')}</pre>
</div>
<div>
<h3 className="text-slate-400 mb-2 font-sans font-semibold">GE ID & Lang ({geIdCount})</h3>
<div className="bg-slate-900/50 p-3 rounded-lg h-48 overflow-y-auto flex flex-wrap gap-2 content-start">
{queueItems.map(item => (
<button
key={item.key}
className={`px-2 py-1 text-xs rounded-md border transition-all duration-300 ${getStatusStyles(item.status)}`}
disabled
>
{item.id} {item.lang.toUpperCase()}
</button>
))}
</div>
</div>
</div>
) : (
<div className="flex justify-center items-center h-full min-h-[12rem]">
<p className="text-slate-500">Không submit nào đang đưc xử .</p>
</div>
)}
<style>{`
@keyframes fade-in {
from { opacity: 0; transform: translateY(10px); }
to { opacity: 1; transform: translateY(0); }
}
.animate-fade-in {
animation: fade-in 0.5s ease-out forwards;
}
`}</style>
</div>
);
};
export default memo(QueueStatus);

149
components/RawDownloadForm.tsx Executable file
View File

@ -0,0 +1,149 @@
import React from 'react';
import Spinner from './Spinner';
interface RawDownloadFormProps {
geIdAndLang: string;
setGeIdAndLang: (value: string) => void;
isLoading: boolean;
handleSubmit: (e: React.FormEvent) => void;
handleRawDownload: (e: React.FormEvent) => void;
isRawDownloading: boolean;
relatedProjects?: Array<{ ge_id: string, lang: string }>;
projectNote?: string | null;
currentGeId?: string;
currentLang?: string;
}
const RawDownloadForm: React.FC<RawDownloadFormProps> = ({
geIdAndLang,
setGeIdAndLang,
isLoading,
handleSubmit,
handleRawDownload,
isRawDownloading,
relatedProjects = [],
projectNote = null,
currentGeId = '',
currentLang = '',
}) => {
// Parse note into tags (split by comma)
const noteTags = projectNote
? projectNote.split(',').map(tag => tag.trim()).filter(Boolean)
: [];
return (
<div className="space-y-6">
{/* Input and Action Buttons */}
<form onSubmit={handleSubmit}>
<div className="flex gap-3">
<input
id="rawGeIdAndLang"
type="text"
value={geIdAndLang}
onChange={(e) => setGeIdAndLang(e.target.value)}
onFocus={(e) => e.target.select()}
className="flex-1 min-w-0 bg-slate-900/50 border border-slate-700 text-slate-100 text-sm rounded-lg focus:ring-indigo-500 focus:border-indigo-500 p-2.5 transition-colors duration-200"
placeholder="Nhập GE ID và Lang (ví dụ: 1000 de)"
disabled={isLoading || isRawDownloading}
/>
<button
type="submit"
disabled={isLoading || isRawDownloading}
className="flex justify-center items-center text-white bg-indigo-600 hover:bg-indigo-700 focus:ring-4 focus:outline-none focus:ring-indigo-800 font-medium rounded-lg text-sm px-5 py-2.5 transition-all duration-200 disabled:bg-indigo-900 disabled:text-slate-400 disabled:cursor-not-allowed whitespace-nowrap w-[130px]"
>
{isLoading ? (
<>
<Spinner />
<span>Đang tìm...</span>
</>
) : (
'Tải setting'
)}
</button>
<button
type="button"
onClick={handleRawDownload}
disabled={isLoading || isRawDownloading}
className="flex justify-center items-center text-white bg-emerald-600 hover:bg-emerald-700 focus:ring-4 focus:outline-none focus:ring-emerald-800 font-medium rounded-lg text-sm px-5 py-2.5 transition-all duration-200 disabled:bg-emerald-900 disabled:text-slate-400 disabled:cursor-not-allowed whitespace-nowrap w-[130px]"
>
{isRawDownloading ? (
<>
<Spinner />
<span>Đang tải...</span>
</>
) : (
'Tải raw'
)}
</button>
</div>
</form>
{/* Related Projects Section */}
<div className="bg-slate-800/30 border border-slate-700/50 rounded-xl p-4">
<div className="flex items-center gap-2 mb-3">
<svg className="w-4 h-4 text-indigo-400" viewBox="0 0 24 24" fill="currentColor">
<path fillRule="evenodd" d="M4.848 2.771A49.144 49.144 0 0 1 12 2.25c2.43 0 4.817.178 7.152.52 1.978.292 3.348 2.024 3.348 3.97v6.02c0 1.946-1.37 3.678-3.348 3.97a48.901 48.901 0 0 1-3.476.383.39.39 0 0 0-.297.17l-2.755 4.133a.75.75 0 0 1-1.248 0l-2.755-4.133a.39.39 0 0 0-.297-.17 48.9 48.9 0 0 1-3.476-.384c-1.978-.29-3.348-2.024-3.348-3.97V6.741c0-1.946 1.37-3.68 3.348-3.97Z" clipRule="evenodd" />
</svg>
<h3 className="text-sm font-semibold text-slate-300">Tựa cùng raw</h3>
</div>
<div className="flex flex-wrap gap-2">
{relatedProjects.length > 0 ? (
relatedProjects.map((project, idx) => {
const isCurrent = project.ge_id === currentGeId && project.lang.toUpperCase() === currentLang.toUpperCase();
return (
<div
key={idx}
className={`inline-flex items-center gap-2 px-3 py-1.5 rounded-lg transition-all duration-200 text-sm font-medium ${isCurrent
? 'bg-indigo-600/30 border border-indigo-500/60'
: 'bg-slate-700/50 border border-slate-600/50 hover:bg-slate-600/60 hover:border-slate-500/60'
}`}
>
<span className={isCurrent ? 'text-indigo-200 font-bold' : 'text-slate-200 font-semibold'}>
{project.ge_id}
</span>
<span className={`uppercase text-xs tracking-wider ${isCurrent ? 'text-indigo-300' : 'text-slate-400'}`}>
{project.lang}
</span>
</div>
);
})
) : (
<span className="text-sm text-slate-500 italic py-1">
Chưa dữ liệu
</span>
)}
</div>
</div>
{/* Note Section */}
<div className="bg-slate-800/30 border border-slate-700/50 rounded-xl p-4">
<div className="flex items-center gap-2 mb-3">
<svg className="w-4 h-4 text-amber-400" viewBox="0 0 24 24" fill="currentColor">
<path fillRule="evenodd" d="M4.848 2.771A49.144 49.144 0 0 1 12 2.25c2.43 0 4.817.178 7.152.52 1.978.292 3.348 2.024 3.348 3.97v6.02c0 1.946-1.37 3.678-3.348 3.97a48.901 48.901 0 0 1-3.476.383.39.39 0 0 0-.297.17l-2.755 4.133a.75.75 0 0 1-1.248 0l-2.755-4.133a.39.39 0 0 0-.297-.17 48.9 48.9 0 0 1-3.476-.384c-1.978-.29-3.348-2.024-3.348-3.97V6.741c0-1.946 1.37-3.68 3.348-3.97Z" clipRule="evenodd" />
</svg>
<h3 className="text-sm font-semibold text-slate-300">Note</h3>
</div>
<div className="flex flex-wrap gap-2">
{noteTags.length > 0 ? (
noteTags.map((tag, idx) => (
<span
key={idx}
className="inline-flex items-center px-3 py-1.5 bg-amber-500/10 border border-amber-500/30 rounded-lg text-amber-200 text-sm font-medium hover:bg-amber-500/20 hover:border-amber-500/50 transition-all duration-200"
>
{tag}
</span>
))
) : (
<span className="text-sm text-slate-500 italic py-1">
Chưa note
</span>
)}
</div>
</div>
</div>
);
};
export default RawDownloadForm;

14
components/RetryIcon.tsx Executable file
View File

@ -0,0 +1,14 @@
import React from 'react';
const RetryIcon: React.FC<{ className?: string }> = ({ className }) => (
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 24 24"
fill="currentColor"
className={className}
>
<path fillRule="evenodd" d="M4.755 10.059a7.5 7.5 0 0 1 12.548-3.364l1.903 1.903h-3.183a.75.75 0 1 0 0 1.5h4.992a.75.75 0 0 0 .75-.75V4.356a.75.75 0 0 0-1.5 0v3.18l-1.9-1.9A9 9 0 0 0 3.306 9.67a.75.75 0 1 0 1.45.388Zm15.408 3.352a.75.75 0 0 0-.919.53 7.5 7.5 0 0 1-12.548 3.364l-1.902-1.903h3.183a.75.75 0 0 0 0-1.5H2.984a.75.75 0 0 0-.75.75v4.992a.75.75 0 0 0 1.5 0v-3.18l1.9 1.9a9 9 0 0 0 15.059-4.035.75.75 0 0 0-.53-.918Z" clipRule="evenodd" />
</svg>
);
export default RetryIcon;

10
components/Spinner.tsx Executable file
View File

@ -0,0 +1,10 @@
import React from 'react';
const Spinner: React.FC = () => (
<svg className="animate-spin -ml-1 mr-3 h-5 w-5 text-white" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24">
<circle className="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" strokeWidth="4"></circle>
<path className="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"></path>
</svg>
);
export default Spinner;

126
components/SubmissionForm.tsx Executable file
View File

@ -0,0 +1,126 @@
import React, { useRef, useEffect, useState, useCallback } from 'react';
import UsernameAutocomplete from './UsernameAutocomplete';
interface SubmissionFormProps {
username: string;
setUsername: (value: string) => void;
geIdAndLang: string;
setGeIdAndLang: (value: string) => void;
handleSubmit: (e: React.FormEvent) => void;
onManageUserClick: () => void;
onViewInfoClick?: () => void;
}
const SubmissionForm: React.FC<SubmissionFormProps> = ({
username,
setUsername,
geIdAndLang,
setGeIdAndLang,
handleSubmit,
onManageUserClick,
onViewInfoClick,
}) => {
const geIdAndLangRef = useRef<HTMLTextAreaElement>(null);
const [usernameHeight, setUsernameHeight] = useState<number | null>(null);
// Memoize callbacks to prevent unnecessary re-renders
const handleUsernameChange = useCallback((value: string) => {
setUsername(value);
}, [setUsername]);
const handleUserSelect = useCallback((user: string) => {
setUsername(user);
}, [setUsername]);
const handleHeightChange = useCallback((h: number) => {
setUsernameHeight(h);
}, []);
const handleGeIdAndLangChange = useCallback((e: React.ChangeEvent<HTMLTextAreaElement>) => {
setGeIdAndLang(e.target.value);
}, [setGeIdAndLang]);
useEffect(() => {
const geIdAndLangEl = geIdAndLangRef.current;
if (!geIdAndLangEl) return;
const observer = new ResizeObserver(entries => {
window.requestAnimationFrame(() => {
if (!entries.length) return;
const newHeightPx = `${(entries[0].target as HTMLElement).offsetHeight}px`;
if (geIdAndLangEl.style.height !== newHeightPx) {
geIdAndLangEl.style.height = newHeightPx;
}
});
});
observer.observe(geIdAndLangEl);
return () => observer.disconnect();
}, []);
return (
<form onSubmit={handleSubmit}>
<div className="grid grid-cols-1 md:grid-cols-2 gap-6 mb-6">
<div>
<UsernameAutocomplete
value={username}
onChange={handleUsernameChange}
onUserSelect={handleUserSelect}
disabled={false}
placeholder="DKI_공통_PTrinh(L)
DKI_공통_Amii(DM)"
rows={5}
onHeightChange={handleHeightChange}
/>
</div>
<div>
<textarea
ref={geIdAndLangRef}
id="geIdAndLang"
value={geIdAndLang}
onChange={handleGeIdAndLangChange}
className="bg-slate-900/50 border border-slate-700 text-slate-100 text-sm rounded-lg focus:ring-indigo-500 focus:border-indigo-500 block w-full p-2.5 transition-colors duration-200 resize-y min-h-32"
placeholder="1000 de
696 us f"
rows={5}
style={usernameHeight ? { height: `${usernameHeight}px` } : undefined}
/>
</div>
</div>
<div className="flex justify-between items-center">
<div className="flex items-center gap-4">
<button
type="submit"
className="flex justify-center items-center text-white bg-indigo-600 hover:bg-indigo-700 focus:ring-4 focus:outline-none focus:ring-indigo-800 font-medium rounded-lg text-sm px-5 py-3 text-center transition-all duration-200"
>
Cấp quyền
</button>
<button
type="button"
onClick={onManageUserClick}
className="text-white bg-slate-600 hover:bg-slate-700 focus:ring-4 focus:outline-none focus:ring-slate-800 font-medium rounded-lg text-sm px-5 py-3 text-center transition-all duration-200"
>
Quản người dùng
</button>
{onViewInfoClick && (
<button
type="button"
disabled={!geIdAndLang.trim()}
onClick={onViewInfoClick}
className="text-white bg-emerald-600 hover:bg-emerald-700 focus:ring-4 focus:outline-none focus:ring-emerald-800 font-medium rounded-lg text-sm px-5 py-3 text-center transition-all duration-200 disabled:bg-emerald-900 disabled:text-slate-500 disabled:cursor-not-allowed"
title={!geIdAndLang.trim() ? 'Vui lòng nhập GE ID & Lang' : 'Xem thông tin project TMS'}
>
Xem thông tin
</button>
)}
</div>
</div>
</form>
);
};
export default SubmissionForm;

View File

@ -0,0 +1,57 @@
import React, { useState, memo } from 'react';
import type { Submission } from '../types';
import HistoryItem from './HistoryItem';
interface SubmissionHistoryProps {
submissions: Submission[];
onErrorClick: (details: string) => void;
onDelete: (id: string) => void;
onRetry: (submission: Submission, errorGeIds: string[], errorUsernames: string[]) => void;
onPaste: (username: string, geIdAndLang: string) => void;
}
const SubmissionHistory: React.FC<SubmissionHistoryProps> = ({ submissions, onErrorClick, onDelete, onRetry, onPaste }) => {
const [hideNonErrors, setHideNonErrors] = useState(false);
return (
<div className="w-full">
<div className="flex justify-between items-center mb-6">
<h2 className="text-2xl font-semibold text-white">Lịch sử submit</h2>
<label className="flex items-center gap-2 text-sm text-slate-400 cursor-pointer">
<span>Chỉ hiện lỗi</span>
<button
onClick={() => setHideNonErrors(!hideNonErrors)}
className={`relative inline-flex h-6 w-11 items-center rounded-full transition-colors ${hideNonErrors ? 'bg-indigo-600' : 'bg-slate-600'
}`}
>
<span
className={`inline-block h-4 w-4 transform rounded-full bg-white transition-transform ${hideNonErrors ? 'translate-x-6' : 'translate-x-1'
}`}
/>
</button>
</label>
</div>
{submissions.length > 0 ? (
<div className="space-y-4">
{submissions.map((sub) => (
<HistoryItem
key={sub.id}
submission={sub}
onErrorClick={onErrorClick}
onDelete={onDelete}
onRetry={onRetry}
onPaste={onPaste}
hideNonErrors={hideNonErrors}
/>
))}
</div>
) : (
<div className="text-center py-10 px-6 bg-slate-800/50 border border-slate-700 rounded-lg">
<p className="text-slate-400">Chưa lần submit nào.</p>
</div>
)}
</div>
);
};
export default memo(SubmissionHistory);

0
components/Toast.tsx Executable file
View File

0
components/ToastContainer.tsx Executable file
View File

14
components/TrashIcon.tsx Executable file
View File

@ -0,0 +1,14 @@
import React from 'react';
const TrashIcon: React.FC<{ className?: string }> = ({ className }) => (
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 24 24"
fill="currentColor"
className={className}
>
<path fillRule="evenodd" d="M16.5 4.478v.227a48.816 48.816 0 0 1 3.878.512.75.75 0 1 1-.256 1.478l-.209-.035-1.005 13.07a3 3 0 0 1-2.991 2.77H8.084a3 3 0 0 1-2.991-2.77L4.087 6.66l-.209.035a.75.75 0 0 1-.256-1.478A48.567 48.567 0 0 1 7.5 4.705v-.227c0-1.564 1.213-2.9 2.816-2.951a52.662 52.662 0 0 1 3.369 0c1.603.051 2.815 1.387 2.815 2.951Zm-6.136-1.452a51.196 51.196 0 0 1 3.273 0C14.39 3.05 15 3.684 15 4.478v.113a49.488 49.488 0 0 0-6 0v-.113c0-.794.609-1.428 1.364-1.452Zm-.355 5.945a.75.75 0 1 0-1.5.058l.347 9a.75.75 0 1 0 1.499-.058l-.346-9Zm5.48.058a.75.75 0 1 0-1.498-.058l-.347 9a.75.75 0 0 0 1.5.058l.345-9Z" clipRule="evenodd" />
</svg>
);
export default TrashIcon;

72
components/TruncatedPath.tsx Executable file
View File

@ -0,0 +1,72 @@
import React, { useState } from 'react';
interface TruncatedPathProps {
path: string;
variant?: 'blue' | 'yellow';
maxChars?: number;
label?: string; // Optional label for display context
}
/**
* Component hiển thị đưng dẫn với overflow che khuất phần đu (bên trái)
* Sử dụng useEffect đ scroll input về cuối, hiển thị phần filename
*/
export const TruncatedPath: React.FC<TruncatedPathProps> = ({
path,
variant = 'yellow',
maxChars = 50,
label
}) => {
const [isCopied, setIsCopied] = useState(false);
const inputRef = React.useRef<HTMLInputElement>(null);
// Scroll to end on mount to show filename
React.useEffect(() => {
if (inputRef.current) {
inputRef.current.scrollLeft = inputRef.current.scrollWidth;
}
}, [path]);
const handleCopy = async () => {
try {
if (navigator.clipboard && navigator.clipboard.writeText) {
await navigator.clipboard.writeText(path);
setIsCopied(true);
setTimeout(() => setIsCopied(false), 1000);
} else {
// Fallback for older browsers
const textArea = document.createElement('textarea');
textArea.value = path;
textArea.style.position = 'fixed';
textArea.style.left = '-999999px';
document.body.appendChild(textArea);
textArea.select();
document.execCommand('copy');
document.body.removeChild(textArea);
setIsCopied(true);
setTimeout(() => setIsCopied(false), 1000);
}
} catch (err) {
console.error('Failed to copy:', err);
}
};
const inputClasses = variant === 'blue'
? `border-blue-700/30 ${isCopied ? 'text-green-400 bg-green-900/20 border-green-600/50' : 'text-blue-400/80 bg-slate-900/50'}`
: `border-yellow-700/30 ${isCopied ? 'text-green-400 bg-green-900/20 border-green-600/50' : 'text-yellow-400/80 bg-slate-900/50'}`;
const titleText = label ? `${label}: Click để copy` : `Click để copy: ${path}`;
return (
<input
ref={inputRef}
type="text"
value={path}
readOnly
onClick={handleCopy}
title={titleText}
className={`w-full px-3 py-2 text-xs border rounded-lg focus:outline-none focus:ring-2 transition-all duration-300 cursor-pointer ${variant === 'blue' ? 'focus:ring-blue-500/30 hover:border-blue-600/50' : 'focus:ring-yellow-500/30 hover:border-yellow-600/50'
} ${inputClasses}`}
/>
);
};

View File

@ -0,0 +1,174 @@
import React, { useState, useEffect } from 'react';
import { sortByProperty } from '../utils/sort-utils';
// Default prefix for TMS username search
const TMS_USERNAME_PREFIX = 'DKI_';
interface TmsUser {
email: string;
name: string;
}
interface UserManagementModalProps {
isOpen: boolean;
onClose: () => void;
onAddUser: (username: string) => void;
}
const UserManagementModal: React.FC<UserManagementModalProps> = ({ isOpen, onClose, onAddUser }) => {
const [searchTerm, setSearchTerm] = useState('');
const [allUsers, setAllUsers] = useState<TmsUser[]>([]);
const [filteredUsers, setFilteredUsers] = useState<TmsUser[]>([]);
const [isLoading, setIsLoading] = useState(false);
const [error, setError] = useState<string | null>(null);
// Load all users when modal opens
useEffect(() => {
if (isOpen) {
document.body.style.overflow = 'hidden';
setSearchTerm('');
setError(null);
loadAllUsers();
} else {
document.body.style.overflow = 'auto';
}
return () => {
document.body.style.overflow = 'auto';
};
}, [isOpen]);
// Filter users when search term changes
useEffect(() => {
if (!searchTerm.trim()) {
setFilteredUsers(sortByProperty(allUsers, u => u.name));
} else {
const term = searchTerm.toLowerCase();
setFilteredUsers(
sortByProperty(
allUsers.filter(u => u.name.toLowerCase().includes(term) || u.email.toLowerCase().includes(term)),
u => u.name
)
);
}
}, [searchTerm, allUsers]);
const loadAllUsers = async () => {
setIsLoading(true);
setError(null);
try {
// Search with DKI_ prefix to get DKI users only
const response = await fetch('/api/user/search', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ query: TMS_USERNAME_PREFIX }),
});
const data = await response.json();
if (data.success && Array.isArray(data.data)) {
const sortedUsers = sortByProperty(data.data as TmsUser[], u => u.name);
setAllUsers(sortedUsers);
setFilteredUsers(sortedUsers);
} else {
setError('Không thể tải danh sách người dùng');
}
} catch (err) {
console.error('Error loading users:', err);
setError('Lỗi khi tải danh sách người dùng');
} finally {
setIsLoading(false);
}
};
if (!isOpen) {
return null;
}
return (
<div
className="fixed inset-0 bg-black bg-opacity-70 z-50 flex justify-center items-center p-4 animate-fade-in-fast"
onClick={onClose}
role="dialog"
aria-modal="true"
>
<div
className="bg-slate-800 border border-slate-700 rounded-2xl shadow-2xl w-full max-w-2xl h-[600px] flex flex-col p-6 animate-slide-up"
onClick={e => e.stopPropagation()}
>
<div className="flex justify-between items-center mb-4 pb-4 border-b border-slate-700">
<h2 className="text-xl font-semibold text-white">Tìm kiếm người dùng TMS</h2>
<button onClick={onClose} className="text-slate-400 hover:text-white transition-colors text-2xl">&times;</button>
</div>
<div className="mb-4">
<input
type="text"
placeholder="Lọc theo tên hoặc email..."
value={searchTerm}
onChange={e => setSearchTerm(e.target.value)}
className="bg-slate-900/50 border border-slate-700 text-slate-100 text-sm rounded-lg focus:ring-indigo-500 focus:border-indigo-500 block w-full p-2.5"
autoFocus
/>
</div>
{error && (
<div className="mb-4 p-3 bg-rose-900/20 border border-rose-700 rounded-lg text-sm text-rose-300">
{error}
</div>
)}
<div className="flex-grow overflow-y-auto pr-2 -mr-2">
{isLoading ? (
<div className="flex justify-center items-center h-32 text-slate-400">
<span>Đang tải danh sách...</span>
</div>
) : filteredUsers.length === 0 ? (
<div className="flex justify-center items-center h-32 text-slate-400">
<span>{searchTerm ? 'Không tìm thấy người dùng phù hợp' : 'Không có người dùng nào'}</span>
</div>
) : (
<>
<div className="grid grid-cols-2 gap-2 text-sm font-semibold text-slate-400 px-3 py-2 border-b border-slate-700">
<span>Tên</span>
<span>Email</span>
</div>
<div className="divide-y divide-slate-700/50">
{filteredUsers.map((user) => (
<div
key={user.email}
className="grid grid-cols-2 gap-2 items-center p-3"
>
<span className="text-slate-200 font-medium">{user.name}</span>
<span className="text-slate-400 text-sm truncate">{user.email}</span>
</div>
))}
</div>
</>
)}
</div>
{/* Footer - fixed at bottom */}
<div className="mt-4 pt-4 border-t border-slate-700 text-center text-sm text-slate-500">
{searchTerm ? `Tìm thấy ${filteredUsers.length} / ${allUsers.length} người dùng` : `Tổng cộng ${allUsers.length} người dùng`}
</div>
</div>
<style>{`
@keyframes fade-in-fast {
from { opacity: 0; }
to { opacity: 1; }
}
.animate-fade-in-fast {
animation: fade-in-fast 0.2s ease-out forwards;
}
@keyframes slide-up {
from { opacity: 0; transform: translateY(20px); }
to { opacity: 1; transform: translateY(0); }
}
.animate-slide-up {
animation: slide-up 0.3s ease-out forwards;
}
`}</style>
</div>
);
};
export default UserManagementModal;

57
components/UserResultItem.tsx Executable file
View File

@ -0,0 +1,57 @@
import React, { memo } from 'react';
import type { ResultDetail } from '../types';
import CheckIcon from './CheckIcon';
import XCircleIcon from './XCircleIcon';
interface UserResultItemProps {
detail: ResultDetail;
onErrorClick: (details: string) => void;
}
const UserResultItem: React.FC<UserResultItemProps> = ({ detail, onErrorClick }) => {
const isSuccess = detail.status === 'success';
const hasErrorDetails = !isSuccess && detail.errorDetails;
// Check if message contains "Đã có quyền" - should be yellow/warning
const isAlreadyHasPermission = isSuccess && detail.message?.includes('Đã có quyền');
// Determine status color: error=red, already has=yellow, success=green
const statusColor = !isSuccess
? 'text-rose-400'
: isAlreadyHasPermission
? 'text-amber-400'
: 'text-green-400';
const Icon = isSuccess ? CheckIcon : XCircleIcon;
const renderStatus = (className?: string) => {
const commonClasses = `flex items-center gap-2 ${statusColor}`;
if (hasErrorDetails) {
return (
<button
onClick={() => onErrorClick(detail.errorDetails!)}
className={`${commonClasses} hover:underline ${className || ''}`}
>
<Icon className="w-4 h-4 flex-shrink-0" />
<span className="text-xs">{detail.message}</span>
</button>
)
}
return (
<div className={`${commonClasses} ${className || ''}`}>
<Icon className="w-4 h-4 flex-shrink-0" />
<span className="text-xs">{detail.message}</span>
</div>
);
}
return (
<div className="grid grid-cols-1 sm:grid-cols-10 gap-x-4 gap-y-1 items-center p-2 rounded-md hover:bg-slate-700/30 font-mono text-sm">
<div className="truncate text-slate-300 sm:col-span-4" title={detail.username}>
{detail.username}
</div>
{renderStatus('sm:col-span-6')}
</div>
);
};
export default memo(UserResultItem);

View File

@ -0,0 +1,342 @@
import React, { useState, useRef, useEffect, useLayoutEffect } from 'react';
import ReactDOM from 'react-dom';
import { sortByProperty } from '../utils/sort-utils';
// Default prefix for TMS username search
const TMS_USERNAME_PREFIX = 'DKI_';
// Simple cache for user search results
const userCache = new Map<string, TmsUser[]>();
interface TmsUser {
email: string;
name: string;
}
interface UsernameAutocompleteProps {
value: string;
onChange: (value: string) => void;
onUserSelect: (username: string) => void;
disabled?: boolean;
placeholder?: string;
rows?: number;
onHeightChange?: (heightPx: number) => void;
}
const UsernameAutocomplete: React.FC<UsernameAutocompleteProps> = ({
value,
onChange,
onUserSelect,
disabled = false,
placeholder = '',
rows = 5,
onHeightChange,
}) => {
// Use internal state for instant typing, sync to parent via onChange
const [internalValue, setInternalValue] = useState(value);
const [suggestions, setSuggestions] = useState<TmsUser[]>([]);
const [showSuggestions, setShowSuggestions] = useState(false);
const [selectedIndex, setSelectedIndex] = useState(-1);
const [isLoading, setIsLoading] = useState(false);
const [justSelected, setJustSelected] = useState(false);
const textareaRef = useRef<HTMLTextAreaElement>(null);
const suggestionsRef = useRef<HTMLDivElement | null>(null);
const [portalStyle, setPortalStyle] = useState<React.CSSProperties | null>(null);
const debounceTimerRef = useRef<number>(0);
// Sync external value changes to internal state
useEffect(() => {
setInternalValue(value);
}, [value]);
// report textarea height to parent so they can keep other inputs in sync
const onHeightChangeRef = useRef(onHeightChange);
onHeightChangeRef.current = onHeightChange;
useEffect(() => {
const ta = textareaRef.current;
if (!ta) return;
const obs = new ResizeObserver(entries => {
if (!entries || !entries.length) return;
const h = (entries[0].target as HTMLElement).offsetHeight;
if (typeof onHeightChangeRef.current === 'function') onHeightChangeRef.current(h);
});
obs.observe(ta);
// report initial size
if (typeof onHeightChangeRef.current === 'function') onHeightChangeRef.current(ta.offsetHeight);
return () => obs.disconnect();
}, []);
// Get current line content (word being typed)
const getCurrentWord = () => {
if (!textareaRef.current) return '';
const textarea = textareaRef.current;
const text = textarea.value;
const cursorPos = textarea.selectionStart;
const beforeCursor = text.substring(0, cursorPos);
const lines = beforeCursor.split('\n');
const currentLine = lines[lines.length - 1];
return currentLine;
};
// Get all existing usernames from textarea (excluding current line being typed)
const getExistingUsernames = (): Set<string> => {
if (!textareaRef.current) return new Set();
const textarea = textareaRef.current;
const text = textarea.value;
const cursorPos = textarea.selectionStart;
const beforeCursor = text.substring(0, cursorPos);
const afterCursor = text.substring(cursorPos);
const linesBefore = beforeCursor.split('\n');
const currentLineIndex = linesBefore.length - 1;
// Get all lines except the current one being typed
const allLines = text.split('\n');
const existingNames = new Set<string>();
allLines.forEach((line, index) => {
const trimmed = line.trim().toLowerCase();
if (trimmed && index !== currentLineIndex) {
existingNames.add(trimmed);
}
});
return existingNames;
};
// Fetch suggestions from TMS API (only DKI_ users) with cache
const fetchSuggestions = async (query: string) => {
// Don't fetch if just selected a suggestion
if (justSelected) return;
if (query.length < 1) {
setSuggestions([]);
setShowSuggestions(false);
return;
}
const cacheKey = query.toLowerCase();
// Check cache first
if (userCache.has(cacheKey)) {
const cached = userCache.get(cacheKey)!;
const existingUsernames = getExistingUsernames();
const filteredUsers = cached.filter(u => !existingUsernames.has(u.name.toLowerCase()));
setSuggestions(filteredUsers);
setShowSuggestions(filteredUsers.length > 0);
setSelectedIndex(filteredUsers.length > 0 ? 0 : -1);
return;
}
setIsLoading(true);
try {
// Search with original query
const response = await fetch('/api/user/search', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ query }),
});
const data = await response.json();
if (data.success && Array.isArray(data.data)) {
// Filter: only DKI_ users, then sort by name
const usersData = data.data as TmsUser[];
const dkiUsers = sortByProperty(
usersData.filter((user) => user.name.toUpperCase().includes(TMS_USERNAME_PREFIX)),
(user) => user.name
);
// Cache the result
userCache.set(cacheKey, dkiUsers);
// Filter out existing usernames for display
const existingUsernames = getExistingUsernames();
const filteredUsers = dkiUsers.filter(u => !existingUsernames.has(u.name.toLowerCase()));
setSuggestions(filteredUsers);
setShowSuggestions(filteredUsers.length > 0);
setSelectedIndex(filteredUsers.length > 0 ? 0 : -1);
}
} catch (error) {
console.error('Error fetching suggestions:', error);
setSuggestions([]);
} finally {
setIsLoading(false);
}
};
// Handle input change - update internal state immediately, debounce parent update
const handleInputChange = (e: React.ChangeEvent<HTMLTextAreaElement>) => {
const newValue = e.target.value;
setInternalValue(newValue); // Instant update for smooth typing
// Clear previous timer
window.clearTimeout(debounceTimerRef.current);
// Debounce: update parent and fetch suggestions after 100ms
debounceTimerRef.current = window.setTimeout(() => {
onChange(newValue); // Sync to parent
const currentWord = getCurrentWord();
fetchSuggestions(currentWord);
}, 100);
};
// Cleanup debounce timer on unmount
useEffect(() => {
return () => window.clearTimeout(debounceTimerRef.current);
}, []);
// Handle keyboard navigation
const handleKeyDown = (e: React.KeyboardEvent<HTMLTextAreaElement>) => {
if (!showSuggestions || suggestions.length === 0) {
if (e.key === 'ArrowDown' && e.ctrlKey) {
e.preventDefault();
setShowSuggestions(true);
}
return;
}
switch (e.key) {
case 'ArrowDown':
e.preventDefault();
setSelectedIndex((prev) => (prev + 1) % suggestions.length);
break;
case 'ArrowUp':
e.preventDefault();
setSelectedIndex((prev) => (prev - 1 + suggestions.length) % suggestions.length);
break;
case 'Enter':
if (selectedIndex >= 0) {
e.preventDefault();
selectSuggestion(suggestions[selectedIndex]);
}
break;
case 'Escape':
e.preventDefault();
setShowSuggestions(false);
setSelectedIndex(-1);
break;
default:
break;
}
};
// Select suggestion and replace current line with user's name
const selectSuggestion = (user: TmsUser) => {
if (!textareaRef.current) return;
const textarea = textareaRef.current;
const text = internalValue;
const cursorPos = textarea.selectionStart;
const beforeCursor = text.substring(0, cursorPos);
const afterCursor = text.substring(cursorPos);
const lines = beforeCursor.split('\n');
const currentLineIndex = lines.length - 1;
lines[currentLineIndex] = user.name; // Use name as the selected value
const newText = lines.join('\n') + afterCursor;
setInternalValue(newText);
onChange(newText);
// Hide suggestions and set flag to prevent re-showing
setShowSuggestions(false);
setSelectedIndex(-1);
setSuggestions([]);
setJustSelected(true);
// Focus back to textarea and move cursor to end of suggestion
setTimeout(() => {
textarea.focus();
const newCursorPos = lines.join('\n').length;
textarea.setSelectionRange(newCursorPos, newCursorPos);
// Reset flag after a short delay
setTimeout(() => setJustSelected(false), 200);
}, 0);
};
// Close suggestions when clicking outside
useEffect(() => {
const handleClickOutside = (e: MouseEvent) => {
const sugEl = suggestionsRef.current;
const taEl = textareaRef.current;
if (sugEl && !sugEl.contains(e.target as Node)) {
if (taEl && !taEl.contains(e.target as Node)) {
setShowSuggestions(false);
}
}
};
document.addEventListener('mousedown', handleClickOutside);
return () => document.removeEventListener('mousedown', handleClickOutside);
}, []);
// compute portal position based on textarea bounding rect
const updatePortalPosition = () => {
const ta = textareaRef.current;
if (!ta) return;
const rect = ta.getBoundingClientRect();
const top = rect.bottom + window.scrollY;
const left = rect.left + window.scrollX;
const width = rect.width;
setPortalStyle({ position: 'absolute', top: `${top}px`, left: `${left}px`, width: `${width}px`, zIndex: 99999 });
};
useLayoutEffect(() => {
if (showSuggestions) updatePortalPosition();
}, [showSuggestions, suggestions]);
useEffect(() => {
const onScroll = () => { if (showSuggestions) updatePortalPosition(); };
const onResize = () => { if (showSuggestions) updatePortalPosition(); };
window.addEventListener('scroll', onScroll, true);
window.addEventListener('resize', onResize);
return () => {
window.removeEventListener('scroll', onScroll, true);
window.removeEventListener('resize', onResize);
};
}, [showSuggestions]);
return (
<div className="relative">
<textarea
ref={textareaRef}
value={internalValue}
onChange={handleInputChange}
onKeyDown={handleKeyDown}
className="bg-slate-900/50 border border-slate-700 text-slate-100 text-sm rounded-lg focus:ring-indigo-500 focus:border-indigo-500 block w-full p-2.5 transition-colors duration-200 resize-y min-h-32"
placeholder={placeholder}
disabled={disabled}
rows={rows}
/>
{showSuggestions && suggestions.length > 0 && portalStyle && ReactDOM.createPortal(
<div
ref={suggestionsRef}
style={portalStyle}
>
<div className="bg-slate-800 border border-slate-700 rounded-lg shadow-lg max-h-48 overflow-y-auto" style={{ maxHeight: '240px' }}>
{suggestions.map((user, index) => (
<div
key={user.email}
onClick={() => selectSuggestion(user)}
className={`px-3 py-2 cursor-pointer transition-colors ${index === selectedIndex
? 'bg-indigo-600 text-white'
: 'text-slate-200 hover:bg-slate-700'
}`}
>
<div className="font-medium">{user.name}</div>
<div className="text-xs text-slate-400">{user.email}</div>
</div>
))}
</div>
</div>, document.body
)}
{isLoading && (
<div className="absolute right-2 top-2 text-xs text-slate-400">
<span className="animate-pulse">...</span>
</div>
)}
</div>
);
};
export default UsernameAutocomplete;

14
components/XCircleIcon.tsx Executable file
View File

@ -0,0 +1,14 @@
import React, { memo } from 'react';
const XCircleIcon: React.FC<{ className?: string }> = ({ className }) => (
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 24 24"
fill="currentColor"
className={className}
>
<path fillRule="evenodd" d="M12 2.25c-5.385 0-9.75 4.365-9.75 9.75s4.365 9.75 9.75 9.75 9.75-4.365 9.75-9.75S17.385 2.25 12 2.25Zm-1.72 6.97a.75.75 0 1 0-1.06 1.06L10.94 12l-1.72 1.72a.75.75 0 1 0 1.06 1.06L12 13.06l1.72 1.72a.75.75 0 1 0 1.06-1.06L13.06 12l1.72-1.72a.75.75 0 1 0-1.06-1.06L12 10.94l-1.72-1.72Z" clipRule="evenodd" />
</svg>
);
export default memo(XCircleIcon);

2
hooks/index.ts Executable file
View File

@ -0,0 +1,2 @@
// Custom hooks for App
export { useTabVisibility } from './use-tab-visibility';

22
hooks/use-tab-visibility.ts Executable file
View File

@ -0,0 +1,22 @@
import { useEffect, useRef } from 'react';
/**
* Hook đ track visibility của browser tab
* Trả về ref chứa trạng thái visible hiện tại
*
* Sử dụng đ pause polling khi tab n, giảm tải cho server CPU
*/
export function useTabVisibility() {
const isVisibleRef = useRef(!document.hidden);
useEffect(() => {
const handleVisibilityChange = () => {
isVisibleRef.current = !document.hidden;
console.log('[TabVisibility]', document.hidden ? 'Tab hidden - pausing polls' : 'Tab visible - resuming polls');
};
document.addEventListener('visibilitychange', handleVisibilityChange);
return () => document.removeEventListener('visibilitychange', handleVisibilityChange);
}, []);
return isVisibleRef;
}

27
index.html Executable file
View File

@ -0,0 +1,27 @@
<!DOCTYPE html>
<html lang="vi">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link rel="icon" type="image/x-icon" href="/favicon.ico" />
<title>Cấp quyền TMS</title>
<script src="https://cdn.tailwindcss.com"></script>
<script type="importmap">
{
"imports": {
"react-dom/": "https://aistudiocdn.com/react-dom@^19.2.0/",
"react/": "https://aistudiocdn.com/react@^19.2.0/",
"react": "https://aistudiocdn.com/react@^19.2.0"
}
}
</script>
<link rel="stylesheet" href="/index.css">
</head>
<body class="bg-slate-900 text-slate-100">
<div id="root"></div>
<script type="module" src="/index.tsx"></script>
</body>
</html>

17
index.tsx Executable file
View File

@ -0,0 +1,17 @@
import React from 'react';
import ReactDOM from 'react-dom/client';
import App from './App';
// global visual overrides (must be imported before mounting App)
import './src/styles/overrides.css';
const rootElement = document.getElementById('root');
if (!rootElement) {
throw new Error("Could not find root element to mount to");
}
const root = ReactDOM.createRoot(rootElement);
root.render(
<React.StrictMode>
<App />
</React.StrictMode>
);

4175
package-lock.json generated Executable file

File diff suppressed because it is too large Load Diff

42
package.json Executable file
View File

@ -0,0 +1,42 @@
{
"name": "tms-request_and_download",
"private": true,
"version": "0.0.0",
"type": "module",
"scripts": {
"dev": "concurrently \"npm:dev:backend-python\" \"npm:dev:backend-typescript\" \"npm:dev:frontend:delayed\" --names python,typescript,frontend --prefix name --kill-others",
"dev:backend-python": "export PYTHONIOENCODING=utf-8 && ./.venv/bin/python -m uvicorn backend.main:app --reload --port 8000 --log-level info",
"dev:backend-typescript": "tsx watch src/server.ts",
"dev:frontend": "vite",
"dev:frontend:delayed": "node -e \"setTimeout(() => {}, 5000)\" && vite",
"build": "npm run build:backend && npm run build:frontend",
"build:backend": "tsc --project src/tsconfig.json",
"build:frontend": "vite build",
"start:backend": "node dist/server.js",
"preview": "vite preview"
},
"dependencies": {
"@supabase/supabase-js": "^2.86.0",
"axios": "^1.6.2",
"cors": "^2.8.5",
"dotenv": "^16.3.1",
"express": "^4.18.2",
"mongodb": "^6.3.0",
"react": "^19.2.1",
"react-dom": "^19.2.1"
},
"devDependencies": {
"@types/cors": "^2.8.17",
"@types/express": "^4.17.21",
"@types/node": "^22.14.0",
"@types/react": "^18.2.0",
"@types/react-dom": "^18.2.0",
"@vitejs/plugin-basic-ssl": "^2.1.0",
"@vitejs/plugin-react": "^5.0.0",
"concurrently": "^8.2.2",
"ts-node-dev": "^2.0.0",
"tsx": "^4.20.6",
"typescript": "~5.8.2",
"vite": "^6.2.0"
}
}

BIN
public/favicon.ico Executable file

Binary file not shown.

After

Width:  |  Height:  |  Size: 149 KiB

BIN
public/push_noti.png Executable file

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

287
src/README.md Executable file
View File

@ -0,0 +1,287 @@
# TMS Permission Backend - Setup Guide
## 📁 Cấu trúc Project (Monorepo)
Project sử dụng **single package.json** ở root để quản lý tất cả dependencies:
```
dkiDownload/
├── package.json # ← Single source of truth
├── tsconfig.json # ← TypeScript config chung
├── .env.example # ← Environment variables template
├── .env # ← Your actual env vars (gitignored)
├── src/ # Backend TypeScript
│ ├── server.ts # Express server chính
│ ├── types/
│ │ └── index.ts # TypeScript definitions
│ ├── utils/
│ │ └── logger.ts # Logger utility
│ ├── services/
│ │ ├── auth.service.ts # TMS authentication + token cache
│ │ ├── mongodb.service.ts # MongoDB connection + TMS ID lookup
│ │ ├── tms-api.service.ts # TMS API operations + search filter
│ │ ├── supabase.service.ts # Submissions CRUD + autocomplete
│ │ └── worker.service.ts # Background queue polling
│ └── controllers/
│ └── permission.controller.ts # Core automated permission logic
├── components/ # Frontend React components
├── App.tsx # Frontend main app
└── index.tsx # Frontend entry point
```
## 🚀 Setup & Chạy (Simplified!)
### 1. Cài đặt dependencies (1 lần duy nhất!)
```bash
npm install
```
### 2. Tạo file .env ở root
```bash
cp .env.example .env
```
**Nội dung file .env:**
```env
# TMS API Configuration
TMS_API_BASE_URL=https://api.tms.example.com
TMS_USERNAME=your_username
TMS_PASSWORD=your_password
# MongoDB Configuration
MONGODB_URI=mongodb://localhost:27017/tms_database
MONGODB_DB_NAME=tms_database
# Supabase Configuration
SUPABASE_URL=https://your-project.supabase.co
SUPABASE_SERVICE_ROLE_KEY=your_service_role_key
# Server Configuration
TMS_PORT=4000
# Worker Configuration (optional)
WORKER_POLL_INTERVAL=3000
```
### 3. Khởi động MongoDB
Đảm bảo MongoDB đang chạy với collection `titles_data`:
```bash
# Windows
mongod --dbpath "C:\data\db"
# hoặc dùng MongoDB service
net start MongoDB
```
### 4. Chạy Full Stack
**Option A: Chạy cả Backend + Frontend cùng lúc (Recommended)**
```bash
npm run dev
```
Backend: **http://localhost:4000**
Frontend: **http://localhost:3000**
**Option B: Chạy riêng từng service**
```bash
# Terminal 1: Backend only
npm run dev:backend
# Terminal 2: Frontend only
npm run dev:frontend
```
**Production:**
```bash
npm run build # Build cả backend + frontend
npm run start:backend # Start backend
npm run preview # Preview frontend build
```
### 5. Kiểm tra backend
Mở trình duyệt và truy cập:
```
http://localhost:4000/health
```
Nếu thành công, bạn sẽ thấy:
```json
{
"success": true,
"data": {
"status": "healthy",
"timestamp": "2025-11-26T...",
"service": "TMS Permission Backend"
}
}
```
## 📡 API Endpoints
| Method | Endpoint | Mô tả |
| ------ | ---------------------------- | ---------------------------------- |
| POST | `/api/submit` | Tạo submission mới |
| GET | `/api/submissions/:id` | Lấy status + results |
| GET | `/api/submissions?limit=30` | Lịch sử submissions |
| DELETE | `/api/submissions/:id` | Xóa submission |
| POST | `/api/submissions/:id/retry` | Retry submission failed |
| GET | `/api/usernames` | Danh sách usernames (autocomplete) |
| POST | `/api/user/search` | Tìm user TMS (có filter project) |
| GET | `/health` | Health check |
## 🎯 Features Automated Logic
### 1. Username Suffix Detection
- `(DM)` hoặc `(DM/QC)` → Grant **RW** permission
- `(L)` → Grant **R** permission
- Username khác → Skip (không xử lý)
### 2. Fixed Member
- **Luôn luôn** thêm `DKI_공통_Tool(DM/QC)` với **RW** permission đầu tiên
- Sau đó mới xử lý các usernames khác
### 3. Final Flag Support
- Nếu input có keyword `final` (ví dụ: `1000 de final`)
- Sẽ grant permission cho subset **FINAL** thay vì **ALL**
### 4. Search User với Project Filter
- Frontend có thể gọi `/api/user/search` với `excludeProjectId`
- Backend sẽ tự động loại bỏ users đã có trong project đó
## 🧪 Testing Workflow
### Test 1: Create Submission
```bash
curl -X POST http://localhost:4000/api/submit \
-H "Content-Type: application/json" \
-d '{
"submission_type": "tms_permission",
"username_list": ["user1(DM)", "user2(L)"],
"ge_input": "1000 de\n1001 us final"
}'
```
### Test 2: Check Status
```bash
curl http://localhost:4000/api/submissions/tms_1732xxxxx_xxxxx
```
### Test 3: Search User
```bash
curl -X POST http://localhost:4000/api/user/search \
-H "Content-Type: application/json" \
-d '{
"query": "John",
"excludeProjectId": "12345"
}'
```
## 🔧 Frontend Integration
Frontend đã được cập nhật để gọi API qua Vite proxy:
**vite.config.ts:**
```typescript
proxy: {
'/api': {
target: 'http://127.0.0.1:4000', // ← Đã update từ 8000
changeOrigin: true,
secure: false,
}
}
```
**App.tsx:**
```typescript
// ✅ Updated - Dùng relative path
const response = await fetch('/api/submit', { ... });
// ❌ Old - Hardcode localhost:4000
// const response = await fetch('http://localhost:4000/api/submit', { ... });
```
## 🏃 Quick Start
```bash
# 1. Install dependencies (1 lần duy nhất)
npm install
# 2. Setup environment
cp .env.example .env
# Edit .env với credentials thật
# 3. Start MongoDB
net start MongoDB
# 4. Run full stack
npm run dev
# 5. Open browser
# Frontend: http://localhost:3000
# Backend: http://localhost:4000
```
## 📝 Logs
Backend sẽ log ra console:
- `[INFO]` - Thông tin general
- `[DEBUG]` - Chi tiết debug
- `[WARN]` - Cảnh báo
- `[ERROR]` - Lỗi
Example:
```
[2025-11-26T10:30:45.123Z] [INFO] 🚀 TMS Backend server running on port 4000
[2025-11-26T10:31:20.456Z] [INFO] ✅ Submission created {"submissionId":"tms_...","usernames":2,"ges":2}
[2025-11-26T10:31:23.789Z] [INFO] 📝 Processing submission tms_...
```
## 🧹 Next Steps
1. ✅ Backend src/ hoàn thành
2. ✅ Frontend đã update API calls
3. ✅ Vite proxy đã config
4. ⏳ **TODO:** Test toàn bộ flow
5. ⏳ **TODO:** Delete folder `backend-tms/` sau khi test thành công
## 🐛 Troubleshooting
### Backend không start
- Kiểm tra MongoDB đang chạy
- Kiểm tra file `.env` đã tạo và đúng format
- Kiểm tra port 4000 chưa bị chiếm
### API trả về lỗi 401 (Unauthorized)
- Kiểm tra TMS credentials trong `.env`
- Token có thể đã expired, restart backend để refresh
### Worker không process queue
- Kiểm tra Supabase connection
- Kiểm tra MongoDB connection
- Xem logs để debug chi tiết

87
src/api/submissions.ts Executable file
View File

@ -0,0 +1,87 @@
/**
* TMS Permission API Client
* Updated for src/ backend (TypeScript)
*/
export type Submission = {
submission_id: string;
timestamp: string;
status: string;
input: { username_list: string[]; ge_input: string };
results: any[];
created_at?: string;
updated_at?: string;
};
export async function fetchSubmissions(limit = 50) {
// Fetch completed/failed submissions from src/ backend
const res = await fetch(`/api/submissions?limit=${limit}`);
if (!res.ok) throw new Error(`Fetch submissions failed: ${res.statusText}`);
const data = await res.json();
if (!data.success) throw new Error(data.error || 'API returned error');
return data.data as Submission[];
}
export async function createSubmission(payload: {
submission_type: 'tms_permission';
username_list: string[];
ge_input: string;
}) {
const res = await fetch('/api/submit', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(payload),
});
if (!res.ok) throw new Error(`Create submission failed: ${res.statusText}`);
const data = await res.json();
if (!data.success) throw new Error(data.error || 'API returned error');
return data.data;
}
export async function getSubmission(submission_id: string) {
const res = await fetch(`/api/submissions/${encodeURIComponent(submission_id)}`);
if (!res.ok) throw new Error(`Get submission failed: ${res.statusText}`);
const data = await res.json();
if (!data.success) throw new Error(data.error || 'API returned error');
return data.data;
}
export async function deleteSubmission(submission_id: string) {
const res = await fetch(`/api/submissions/${encodeURIComponent(submission_id)}`, {
method: 'DELETE',
});
if (!res.ok) throw new Error(`Delete failed: ${res.statusText}`);
const data = await res.json();
if (!data.success) throw new Error(data.error || 'API returned error');
return data.data;
}
export async function retrySubmission(submission_id: string) {
const res = await fetch(`/api/submissions/${encodeURIComponent(submission_id)}/retry`, {
method: 'POST',
});
if (!res.ok) throw new Error(`Retry failed: ${res.statusText}`);
const data = await res.json();
if (!data.success) throw new Error(data.error || 'API returned error');
return data.data;
}
export async function searchUser(query: string, excludeProjectId?: string) {
const res = await fetch('/api/user/search', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ query, excludeProjectId }),
});
if (!res.ok) throw new Error(`Search user failed: ${res.statusText}`);
const data = await res.json();
if (!data.success) throw new Error(data.error || 'API returned error');
return data.data;
}
export async function getUsernames() {
const res = await fetch('/api/usernames');
if (!res.ok) throw new Error(`Get usernames failed: ${res.statusText}`);
const data = await res.json();
if (!data.success) throw new Error(data.error || 'API returned error');
return data.data as string[];
}

Some files were not shown because too many files have changed in this diff Show More