Compare commits
365 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
18e4b6cd58 | ||
|
|
c380abaee2 | ||
|
|
44f202d116 | ||
|
|
008f16a05d | ||
|
|
927013d9a6 | ||
|
|
9f62c7c29c | ||
|
|
6897776460 | ||
|
|
3089a45b13 | ||
|
|
e37545c9c9 | ||
|
|
17e7862307 | ||
|
|
d1eadff425 | ||
|
|
650988c7ac | ||
|
|
49e62c1f83 | ||
|
|
4c67455c67 | ||
|
|
5a5e3d2960 | ||
|
|
11da8b6e9a | ||
|
|
265e6a72be | ||
|
|
7816dc9488 | ||
|
|
678d642683 | ||
|
|
0f4174d153 | ||
|
|
babcd8edb7 | ||
|
|
6e00bbab53 | ||
|
|
72642351d0 | ||
|
|
51a01ea03f | ||
|
|
d9a78ea837 | ||
|
|
5b221d5bd5 | ||
|
|
c36549ca69 | ||
|
|
7e79bef8da | ||
|
|
e3b4a4ba19 | ||
|
|
30d216c7ca | ||
|
|
d80483adc2 | ||
|
|
1cda391dfe | ||
|
|
375ec36781 | ||
|
|
4ad1c05444 | ||
|
|
c88eeb0b12 | ||
|
|
c6261aba6a | ||
|
|
a010b967b9 | ||
|
|
af6547f254 | ||
|
|
ba235b0b93 | ||
|
|
1bfde96e46 | ||
|
|
e1f9b4b6d3 | ||
|
|
95cf4fbed8 | ||
|
|
9ddc7d31bb | ||
|
|
83626017b9 | ||
|
|
b9372f0ef0 | ||
|
|
db97a7df14 | ||
|
|
575fca3806 | ||
|
|
a1c8f42435 | ||
|
|
a3c2680fec | ||
|
|
12dade0240 | ||
|
|
2a528a126c | ||
|
|
8839080069 | ||
|
|
8f66d75eb3 | ||
|
|
56ee681aec | ||
|
|
6db03f05a9 | ||
|
|
068da94e2a | ||
|
|
4b824b2d9f | ||
|
|
284c5e7aa6 | ||
|
|
036cd3e066 | ||
|
|
479c7a3f3f | ||
|
|
0404d870ad | ||
|
|
93a53763e0 | ||
|
|
c20d743286 | ||
|
|
ba938f64c5 | ||
|
|
af00d69e5c | ||
|
|
bc47da504c | ||
|
|
5a24c891c0 | ||
|
|
1103df98c1 | ||
|
|
74920e2e2f | ||
|
|
75775f2798 | ||
|
|
fad0f1060b | ||
|
|
0ca359e509 | ||
|
|
1d0ee31001 | ||
|
|
20a0a59670 | ||
|
|
9a00304a93 | ||
|
|
55b00bf884 | ||
|
|
e85f12977f | ||
|
|
940346e2f4 | ||
|
|
1854e6bb17 | ||
|
|
26b2ef0abb | ||
|
|
9cceaacd14 | ||
|
|
1ed13f7f88 | ||
|
|
729aa30253 | ||
|
|
b8bbc9c32f | ||
|
|
a263e3eb2c | ||
|
|
10bae4f98b | ||
|
|
b02aef2af9 | ||
|
|
56c0b633c8 | ||
|
|
4e8e8eba66 | ||
|
|
d5638b922d | ||
|
|
dc695c9a04 | ||
|
|
52909258ca | ||
|
|
e9b9801ac1 | ||
|
|
86a358d568 | ||
|
|
97c5bfaa7d | ||
|
|
8d0c110415 | ||
|
|
a131f4a11b | ||
|
|
335873a7f6 | ||
|
|
7446e07a8c | ||
|
|
693f7b482a | ||
|
|
1d4a13466f | ||
|
|
17844d4c28 | ||
|
|
55d0e3141c | ||
|
|
a967eb1080 | ||
|
|
21ff749cf3 | ||
|
|
18862bb8e0 | ||
|
|
27833615b7 | ||
|
|
00fae5cadd | ||
|
|
4fcbd5c4f7 | ||
|
|
bb8fd0646a | ||
|
|
1218adf5f2 | ||
|
|
818bf40a9c | ||
|
|
254612a49b | ||
|
|
92101e249a | ||
|
|
a18ab484cc | ||
|
|
7af9d67770 | ||
|
|
d63afcce89 | ||
|
|
15d0969cd9 | ||
|
|
5574b50d20 | ||
|
|
662c903bf3 | ||
|
|
545043e1d6 | ||
|
|
8f10ff8f96 | ||
|
|
62f3bd94de | ||
|
|
253b1868ec | ||
|
|
c4aefb6175 | ||
|
|
956cad0da4 | ||
|
|
83d8df84bf | ||
|
|
0c058fa162 | ||
|
|
21dbf46f81 | ||
|
|
af6eea8253 | ||
|
|
7029271999 | ||
|
|
5dabee332e | ||
|
|
d9fe98231f | ||
|
|
6ee98328fb | ||
|
|
3dbb94d298 | ||
|
|
1956be0c71 | ||
|
|
e9414853f9 | ||
|
|
d67cce501b | ||
|
|
d87b74d359 | ||
|
|
be7a8fd103 | ||
|
|
d23740eac7 | ||
|
|
56a507b45d | ||
|
|
31ce1e6618 | ||
|
|
b637fb3db8 | ||
|
|
1d876f8ded | ||
|
|
2f43164732 | ||
|
|
9747cabb14 | ||
|
|
9b460758f9 | ||
|
|
8cc1f788ad | ||
|
|
0888e16aec | ||
|
|
cab550a13c | ||
|
|
2ef3983049 | ||
|
|
ca4392fa8b | ||
|
|
8fad61e329 | ||
|
|
02bd4a61fb | ||
|
|
30ac5bf9db | ||
|
|
87e0a986e6 | ||
|
|
353cef7dbd | ||
|
|
f9b0bbe676 | ||
|
|
93d54c8f84 | ||
|
|
26bf675a41 | ||
|
|
35e84e652e | ||
|
|
462fc0397e | ||
|
|
d4bf574370 | ||
|
|
d3ec000da5 | ||
|
|
d7d256f716 | ||
|
|
804fbe2bdc | ||
|
|
1fde0a9951 | ||
|
|
0cf5ebe5e9 | ||
|
|
0b7c658c8f | ||
|
|
e6ec1ed755 | ||
|
|
ac479bb023 | ||
|
|
9ac557b0a8 | ||
|
|
4585db0281 | ||
|
|
0d86356f96 | ||
|
|
140ee488b7 | ||
|
|
486379183b | ||
|
|
19a588a997 | ||
|
|
fa30e738d9 | ||
|
|
eefb536cb3 | ||
|
|
02b136dac7 | ||
|
|
b712282f62 | ||
|
|
de369f3bcd | ||
|
|
3ee3af03cf | ||
|
|
9a646d516b | ||
|
|
311fb00430 | ||
|
|
4008e20278 | ||
|
|
375b9885ee | ||
|
|
ce01512537 | ||
|
|
7dc12aca0c | ||
|
|
a6c65acfcb | ||
|
|
19342647e5 | ||
|
|
7fe7d93e83 | ||
|
|
19769ea0bb | ||
|
|
c9bace1e5a | ||
|
|
2d1b6de51a | ||
|
|
c07c0fbdf7 | ||
|
|
578d050926 | ||
|
|
cb66661d9b | ||
|
|
e35fc1f31a | ||
|
|
0d6053fa76 | ||
|
|
546d6af598 | ||
|
|
2b12bd8c42 | ||
|
|
05dc0ca1c6 | ||
|
|
5023a99f91 | ||
|
|
b2b62aeb52 | ||
|
|
ba7fe24a0a | ||
|
|
55e5b0079a | ||
|
|
e90e731eaa | ||
|
|
0b73ea1386 | ||
|
|
e013c63c59 | ||
|
|
2ae22f942e | ||
|
|
a22a90adf3 | ||
|
|
ecf56cb977 | ||
|
|
430ec7352b | ||
|
|
bc70ff94cc | ||
|
|
549328893e | ||
|
|
e6c17393fb | ||
|
|
ba673b9e53 | ||
|
|
3c510fc265 | ||
|
|
4ea3a75dc0 | ||
|
|
670c2f1ff5 | ||
|
|
0d1deadb6f | ||
|
|
f11190ee25 | ||
|
|
eb42fbabfd | ||
|
|
84d5c0f13f | ||
|
|
09bc354c18 | ||
|
|
3ed31b7994 | ||
|
|
550942aad7 | ||
|
|
a9c8ee2ff4 | ||
|
|
10b4f18d33 | ||
|
|
45b0d71dd0 | ||
|
|
e0eb3f0453 | ||
|
|
daf70211ac | ||
|
|
c28384e78d | ||
|
|
122c797652 | ||
|
|
14d5a3abb4 | ||
|
|
ca05fa184d | ||
|
|
13ff41aa95 | ||
|
|
d8a53dcea6 | ||
|
|
9f589439a1 | ||
|
|
5bb984d410 | ||
|
|
1825e8ba04 | ||
|
|
0e55c28142 | ||
|
|
e485cf734b | ||
|
|
520ef91d2d | ||
|
|
674cf101da | ||
|
|
4371e53b86 | ||
|
|
bf2b685e83 | ||
|
|
e7f0b1d1fd | ||
|
|
647679f581 | ||
|
|
237bf6731d | ||
|
|
25af89d7a0 | ||
|
|
e384199c6e | ||
|
|
98425764d3 | ||
|
|
8f186ad894 | ||
|
|
8b153bdabe | ||
|
|
401ad3b9f9 | ||
|
|
5e1e62d5b6 | ||
|
|
afc70e6a10 | ||
|
|
20c32d39c8 | ||
|
|
73cd2ea6b9 | ||
|
|
c5c862b516 | ||
|
|
a0cdac87e8 | ||
|
|
f5d7ee4d1a | ||
|
|
43950014b2 | ||
|
|
ec45983810 | ||
|
|
7795208332 | ||
|
|
b0dc7b80ab | ||
|
|
ab9b3e87b1 | ||
|
|
116135289c | ||
|
|
2bddd5b3b2 | ||
|
|
5f2eb907b6 | ||
|
|
3f17cc8cb4 | ||
|
|
33e2e126f1 | ||
|
|
467d4bbc58 | ||
|
|
e1e7f63f50 | ||
|
|
18fdbcba18 | ||
|
|
4bcb069ec7 | ||
|
|
bfbaee8e5c | ||
|
|
282c1ebf1d | ||
|
|
6e50841387 | ||
|
|
508977e70b | ||
|
|
cb1e4bb0c1 | ||
|
|
809aec69c2 | ||
|
|
71aa9204f4 | ||
|
|
65cf4c217f | ||
|
|
2d3a1df21d | ||
|
|
43bc95b7fc | ||
|
|
fe59e064f0 | ||
|
|
67fc7a9226 | ||
|
|
474ff8cd26 | ||
|
|
310f4dc58a | ||
|
|
d491c21b97 | ||
|
|
a0c58aad2c | ||
|
|
48c89713ba | ||
|
|
778124312c | ||
|
|
edbfba6663 | ||
|
|
ff1036563a | ||
|
|
6ac56c0a77 | ||
|
|
6ae687f3ab | ||
|
|
eda9754d30 | ||
|
|
84d8f37ba6 | ||
|
|
05a0c4fd55 | ||
|
|
8700db4a37 | ||
|
|
cbc423e4b7 | ||
|
|
06a272ccbd | ||
|
|
c1e614650a | ||
|
|
9598fca34e | ||
|
|
f70237f13d | ||
|
|
d7162592e0 | ||
|
|
1ba635a793 | ||
|
|
b906d8a2bd | ||
|
|
63fd402083 | ||
|
|
556f0672dc | ||
|
|
b971a79047 | ||
|
|
d4dd266f6b | ||
|
|
ea6301d326 | ||
|
|
147269849d | ||
|
|
cc887eb8a1 | ||
|
|
6d8ead8598 | ||
|
|
0f85cd4c8d | ||
|
|
8b5c936177 | ||
|
|
6e72c63268 | ||
|
|
306826ecb9 | ||
|
|
e1286e02af | ||
|
|
333a912d67 | ||
|
|
3b9c4a4e88 | ||
|
|
dbf1c34282 | ||
|
|
05a75d0ac5 | ||
|
|
6a33e61c38 | ||
|
|
53212f45e3 | ||
|
|
01ed725136 | ||
|
|
1c92591bf1 | ||
|
|
f5e020da4e | ||
|
|
c83fa3b86a | ||
|
|
6b65da7f66 | ||
|
|
2ae3cb5fa5 | ||
|
|
4fc0ce26f3 | ||
|
|
7b5218ad98 | ||
|
|
e2a8673c94 | ||
|
|
ef821b69a5 | ||
|
|
da51e03cef | ||
|
|
75fc582299 | ||
|
|
0a99d3c584 | ||
|
|
0de5a59a64 | ||
|
|
973885a147 | ||
|
|
6fe7b7e7ee | ||
|
|
447dd7feff | ||
|
|
7381e54f4f | ||
|
|
73c8b6e670 | ||
|
|
d867c55e37 | ||
|
|
741a0d67cc | ||
|
|
88eb6dff5d | ||
|
|
3525ecb569 | ||
|
|
0f61b0be08 | ||
|
|
4548d809f9 | ||
|
|
6d777e2a56 | ||
|
|
583d74fcc9 | ||
|
|
40bfda2ad7 | ||
|
|
b1b8ed4180 | ||
|
|
0e898733d6 | ||
|
|
704826b421 | ||
|
|
7ac61ce64a | ||
|
|
3ef2ee732a |
52
.github/workflows/release.yml
vendored
52
.github/workflows/release.yml
vendored
@ -1,52 +0,0 @@
|
||||
name: Build and Release
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v*"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "22"
|
||||
cache: "npm"
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Apply tag version
|
||||
shell: pwsh
|
||||
run: |
|
||||
$version = "${{ github.ref_name }}".TrimStart('v')
|
||||
node scripts/set_version_node.mjs $version
|
||||
|
||||
- name: Build app
|
||||
run: npm run build
|
||||
|
||||
- name: Build Windows artifacts
|
||||
run: npm run release:win
|
||||
|
||||
- name: Pack portable zip
|
||||
shell: pwsh
|
||||
run: |
|
||||
Compress-Archive -Path "release\win-unpacked\*" -DestinationPath "Real-Debrid-Downloader-win64.zip" -Force
|
||||
|
||||
- name: Publish GitHub Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: |
|
||||
Real-Debrid-Downloader-win64.zip
|
||||
release/*.exe
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
14
.gitignore
vendored
14
.gitignore
vendored
@ -17,9 +17,23 @@ rd_download_manifest.json
|
||||
_update_staging/
|
||||
apply_update.cmd
|
||||
|
||||
.claude/
|
||||
.github/
|
||||
docs/plans/
|
||||
CHANGELOG.md
|
||||
|
||||
node_modules/
|
||||
.vite/
|
||||
coverage/
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
# Forgejo deployment runtime files
|
||||
deploy/forgejo/.env
|
||||
deploy/forgejo/forgejo/
|
||||
deploy/forgejo/postgres/
|
||||
deploy/forgejo/caddy/data/
|
||||
deploy/forgejo/caddy/config/
|
||||
deploy/forgejo/caddy/logs/
|
||||
deploy/forgejo/backups/
|
||||
|
||||
23
CLAUDE.md
Normal file
23
CLAUDE.md
Normal file
@ -0,0 +1,23 @@
|
||||
## Release + Update Source (Wichtig)
|
||||
|
||||
- Primäre Plattform ist `https://git.24-music.de`
|
||||
- Standard-Repo: `Administrator/real-debrid-downloader`
|
||||
- Nicht mehr primär über Codeberg/GitHub releasen
|
||||
|
||||
## Releasen
|
||||
|
||||
1. Token setzen:
|
||||
- PowerShell: `$env:GITEA_TOKEN="<token>"`
|
||||
2. Release ausführen:
|
||||
- `npm run release:gitea -- <version> [notes]`
|
||||
|
||||
Das Script:
|
||||
- bumped `package.json`
|
||||
- baut Windows-Artefakte
|
||||
- pusht `main` + Tag
|
||||
- erstellt Release auf `git.24-music.de`
|
||||
- lädt Assets hoch
|
||||
|
||||
## Auto-Update
|
||||
|
||||
- Updater nutzt aktuell `git.24-music.de` als Standardquelle
|
||||
218
README.md
218
README.md
@ -1,83 +1,203 @@
|
||||
# Real-Debrid Download Manager (Node/Electron)
|
||||
# Multi Debrid Downloader
|
||||
|
||||
Desktop-App auf **Node.js + Electron + React + TypeScript** mit JDownloader-Style Workflow, optimiert fuer Real-Debrid.
|
||||
Desktop downloader with fast queue management, automatic extraction, and robust error handling.
|
||||
|
||||
## Highlights
|
||||

|
||||

|
||||

|
||||

|
||||

|
||||
|
||||
- Modernes, dunkles UI mit Header-Steuerung (Start, Pause, Stop, Speed, ETA)
|
||||
- Tabs: **Linksammler**, **Downloads**, **Settings**
|
||||
- Paketbasierte Queue mit Datei-Status, Progress, Speed, Retries
|
||||
- Paket-Abbruch waehrend laufender Downloads inklusive sicherem Archiv-Cleanup
|
||||
- `.dlc` Import (Dateidialog und Drag-and-Drop)
|
||||
- Session-Persistenz (robustes JSON-State-Management)
|
||||
- Auto-Resume beim Start (optional)
|
||||
- Reconnect-Basislogik (429/503, Wartefenster, resumable priorisiert)
|
||||
- Integritaetscheck (SFV/CRC32/MD5/SHA1) nach Download
|
||||
- Auto-Retry bei Integritaetsfehlern
|
||||
- Cleanup-Trigger fuer fertige Tasks:
|
||||
- Nie
|
||||
- Sofort
|
||||
- Beim App-Start
|
||||
- Sobald Paket fertig ist
|
||||
## Why this tool?
|
||||
|
||||
## Voraussetzungen
|
||||
- Familiar download-manager workflow: collect links, start, pause, resume, and finish cleanly.
|
||||
- Multiple debrid providers in one app, including automatic fallback.
|
||||
- Built for stability with large queues: session persistence, reconnect handling, resume support, and integrity verification.
|
||||
|
||||
- Node.js 20+ (empfohlen 22+)
|
||||
- Windows 10/11 (fuer Release-Build)
|
||||
- Optional: 7-Zip/UnRAR fuer RAR/7Z Entpacken
|
||||
## Core features
|
||||
|
||||
### Queue and download engine
|
||||
|
||||
- Package-based queue with file status, progress, ETA, speed, and retry counters.
|
||||
- Start, pause, stop, and cancel for both single items and full packages.
|
||||
- Multi-select via Ctrl+Click for batch operations on packages and items.
|
||||
- Duplicate handling when adding links: keep, skip, or overwrite.
|
||||
- Session recovery after restart, including optional auto-resume.
|
||||
- Circuit breaker with escalating backoff cooldowns to handle provider outages gracefully.
|
||||
|
||||
### Debrid and link handling
|
||||
|
||||
- Supported providers: `realdebrid`, `megadebrid`, `bestdebrid`, `alldebrid`.
|
||||
- Configurable provider order: primary + secondary + tertiary.
|
||||
- Optional automatic fallback to alternative providers on failures.
|
||||
- `.dlc` import via file picker and drag-and-drop.
|
||||
|
||||
### Extraction, cleanup, and quality
|
||||
|
||||
- JVM-based extraction backend using SevenZipJBinding + Zip4j (supports RAR, 7z, ZIP, and more).
|
||||
- Automatic fallback to legacy UnRAR/7z CLI tools when JVM is unavailable.
|
||||
- Auto-extract with separate target directory and conflict strategies.
|
||||
- Hybrid extraction: simultaneous downloading and extracting with smart I/O priority throttling.
|
||||
- Nested extraction: archives within archives are automatically extracted (one level deep).
|
||||
- Pre-extraction disk space validation to prevent incomplete extracts.
|
||||
- Right-click "Extract now" on any package with at least one completed item.
|
||||
- Post-download integrity checks (`CRC32`, `MD5`, `SHA1`) with auto-retry on failures.
|
||||
- Completed-item cleanup policy: `never`, `immediate`, `on_start`, `package_done`.
|
||||
- Optional removal of link artifacts and sample files after extraction.
|
||||
|
||||
### Auto-rename
|
||||
|
||||
- Automatic renaming of extracted files based on series/episode patterns.
|
||||
- Multi-episode token parsing for batch renames.
|
||||
|
||||
### UI and progress
|
||||
|
||||
- Visual progress bars with percentage overlay for packages and individual items.
|
||||
- Real-time bandwidth chart showing current download speeds.
|
||||
- Persistent download counters: all-time totals and per-session statistics.
|
||||
- Download history for completed packages.
|
||||
- Vertical sidebar with organized settings tabs.
|
||||
- Hoster display showing both the original source and the debrid provider used.
|
||||
|
||||
### Convenience and automation
|
||||
|
||||
- Clipboard watcher for automatic link detection.
|
||||
- Minimize-to-tray with tray menu controls.
|
||||
- Speed limits globally or per download.
|
||||
- Bandwidth schedules for time-based speed profiles.
|
||||
- Built-in auto-updater via `git.24-music.de` Releases.
|
||||
- Long path support (>260 characters) on Windows.
|
||||
|
||||
## Installation
|
||||
|
||||
### Option A: prebuilt releases (recommended)
|
||||
|
||||
1. Download a release from the `git.24-music.de` Releases page.
|
||||
2. Run the installer or portable build.
|
||||
3. Add your debrid tokens in Settings.
|
||||
|
||||
Releases: `https://git.24-music.de/Administrator/real-debrid-downloader/releases`
|
||||
|
||||
### Option B: build from source
|
||||
|
||||
Requirements:
|
||||
|
||||
- Node.js `20+` (recommended `22+`)
|
||||
- npm
|
||||
- Windows `10/11` (for packaging and regular desktop use)
|
||||
- Java Runtime `8+` (for SevenZipJBinding sidecar backend)
|
||||
- Optional fallback: 7-Zip/UnRAR if you force legacy extraction mode
|
||||
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
|
||||
## Entwicklung
|
||||
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
|
||||
## Build
|
||||
## NPM scripts
|
||||
|
||||
| Command | Description |
|
||||
| --- | --- |
|
||||
| `npm run dev` | Starts main process, renderer, and Electron in dev mode |
|
||||
| `npm run build` | Builds main and renderer bundles |
|
||||
| `npm run start` | Starts the app locally in production mode |
|
||||
| `npm test` | Runs Vitest unit tests |
|
||||
| `npm run self-check` | Runs integrated end-to-end self-checks |
|
||||
| `npm run release:win` | Creates Windows installer and portable build |
|
||||
| `npm run release:gitea -- <version> [notes]` | One-command version bump + build + tag + release upload to `git.24-music.de` |
|
||||
| `npm run release:codeberg -- <version> [notes]` | Legacy path for old Codeberg workflow |
|
||||
|
||||
### One-command git.24-music release
|
||||
|
||||
```bash
|
||||
npm run build
|
||||
npm run release:gitea -- 1.6.31 "- Maintenance update"
|
||||
```
|
||||
|
||||
Danach liegen die Artefakte in:
|
||||
This command will:
|
||||
|
||||
- `build/main`
|
||||
- `build/renderer`
|
||||
1. Bump `package.json` version.
|
||||
2. Build setup/portable artifacts (`npm run release:win`).
|
||||
3. Commit and push `main` to your `git.24-music.de` remote.
|
||||
4. Create and push tag `v<version>`.
|
||||
5. Create/update the Gitea release and upload required assets.
|
||||
|
||||
## Start (Production lokal)
|
||||
Required once before release:
|
||||
|
||||
```bash
|
||||
npm run start
|
||||
git remote add gitea https://git.24-music.de/<user>/<repo>.git
|
||||
```
|
||||
|
||||
## Tests
|
||||
PowerShell token setup:
|
||||
|
||||
```bash
|
||||
npm test
|
||||
npm run self-check
|
||||
```powershell
|
||||
$env:GITEA_TOKEN="<dein-token>"
|
||||
```
|
||||
|
||||
- `npm test`: Unit-Tests fuer Parser/Cleanup/Integrity
|
||||
- `npm run self-check`: End-to-End-Checks mit lokalem Mock-Server (Queue, Pause/Resume, Reconnect, Paket-Cancel)
|
||||
## Typical workflow
|
||||
|
||||
## Projektstruktur
|
||||
1. Add provider tokens in Settings.
|
||||
2. Paste/import links or `.dlc` containers.
|
||||
3. Optionally set package names, target folders, extraction, and cleanup rules.
|
||||
4. Start the queue and monitor progress in the Downloads tab.
|
||||
5. Review integrity results and summary after completion.
|
||||
|
||||
- `src/main`: Electron Main Process + Download/Queue Logik
|
||||
- `src/preload`: sichere IPC Bridge
|
||||
- `src/renderer`: React UI
|
||||
- `src/shared`: gemeinsame Typen und IPC-Channel
|
||||
- `tests`: Unit- und Self-Check Tests
|
||||
## Project structure
|
||||
|
||||
## Hinweise
|
||||
- `src/main` - Electron main process, queue/download/provider logic
|
||||
- `src/preload` - secure IPC bridge between main and renderer
|
||||
- `src/renderer` - React UI
|
||||
- `src/shared` - shared types and IPC contracts
|
||||
- `tests` - unit tests and self-check tests
|
||||
- `resources/extractor-jvm` - SevenZipJBinding + Zip4j sidecar JAR and native libraries
|
||||
|
||||
## Data and logs
|
||||
|
||||
The app stores runtime files in Electron's `userData` directory, including:
|
||||
|
||||
- Runtime-Dateien liegen im Electron `userData` Verzeichnis:
|
||||
- `rd_downloader_config.json`
|
||||
- `rd_session_state.json`
|
||||
- `rd_downloader.log`
|
||||
|
||||
- Die bisherige Python-Datei bleibt vorerst als Legacy-Referenz im Repo, die aktive App ist jetzt Node/Electron.
|
||||
## Troubleshooting
|
||||
|
||||
- Download does not start: verify token and selected provider in Settings.
|
||||
- Extraction fails: check archive passwords and native extractor installation (7-Zip/WinRAR). Optional JVM extractor can be forced with `RD_EXTRACT_BACKEND=jvm`.
|
||||
- Very slow downloads: check active speed limit and bandwidth schedules.
|
||||
- Unexpected interruptions: enable reconnect and fallback providers.
|
||||
- Stalled downloads: the app auto-detects stalls within 10 seconds and retries automatically.
|
||||
|
||||
## Changelog
|
||||
|
||||
Release history is available on [git.24-music.de Releases](https://git.24-music.de/Administrator/real-debrid-downloader/releases).
|
||||
|
||||
### v1.6.61 (2026-03-05)
|
||||
|
||||
- Fixed leftover empty package folders in `Downloader Unfertig` after successful extraction.
|
||||
- Resume marker files (`.rd_extract_progress*.json`) are now treated as ignorable for empty-folder cleanup.
|
||||
- Deferred post-processing now clears resume markers before running empty-directory removal.
|
||||
|
||||
### v1.6.60 (2026-03-05)
|
||||
|
||||
- Added package-scoped password cache for extraction: once the first archive in a package is solved, following archives in the same package reuse that password first.
|
||||
- Kept fallback behavior intact (`""` and other candidates are still tested), but moved empty-password probing behind the learned password to reduce per-archive delays.
|
||||
- Added cache invalidation on real `wrong_password` failures so stale passwords are automatically discarded.
|
||||
|
||||
### v1.6.59 (2026-03-05)
|
||||
|
||||
- Switched default extraction backend to native tools (`legacy`) for more stable archive-to-archive flow.
|
||||
- Prioritized 7-Zip as primary native extractor, with WinRAR/UnRAR as fallback.
|
||||
- JVM extractor remains available as opt-in via `RD_EXTRACT_BACKEND=jvm`.
|
||||
|
||||
### v1.6.58 (2026-03-05)
|
||||
|
||||
- Fixed extraction progress oscillation (`1% -> 100% -> 1%` loops) during password retries.
|
||||
- Kept strict archive completion logic, but normalized in-progress archive percent to avoid false visual done states before real completion.
|
||||
|
||||
### v1.6.57 (2026-03-05)
|
||||
|
||||
- Fixed extraction flow so archives are marked done only on real completion, not on temporary `100%` progress spikes.
|
||||
- Improved password handling: after the first successful archive, the discovered password is prioritized for subsequent archives.
|
||||
- Fixed progress parsing for password retries (reset/restart handling), reducing visible and real gaps between archive extractions.
|
||||
|
||||
## License
|
||||
|
||||
MIT - see `LICENSE`.
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 121 KiB After Width: | Height: | Size: 279 KiB |
@ -1,16 +0,0 @@
|
||||
param(
|
||||
[string]$Version = ""
|
||||
)
|
||||
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
pip install pyinstaller pillow
|
||||
|
||||
if ($Version -ne "") {
|
||||
python scripts/set_version.py $Version
|
||||
}
|
||||
|
||||
python scripts/prepare_icon.py
|
||||
pyinstaller --noconfirm --windowed --onedir --name "Real-Debrid-Downloader" --icon "assets/app_icon.ico" real_debrid_downloader_gui.py
|
||||
|
||||
Write-Host "Build fertig: dist/Real-Debrid-Downloader/Real-Debrid-Downloader.exe"
|
||||
@ -25,11 +25,11 @@ AppPublisher=Sucukdeluxe
|
||||
DefaultDirName={autopf}\{#MyAppName}
|
||||
DefaultGroupName={#MyAppName}
|
||||
OutputDir={#MyOutputDir}
|
||||
OutputBaseFilename=Real-Debrid-Downloader-Setup-{#MyAppVersion}
|
||||
OutputBaseFilename=Real-Debrid-Downloader Setup {#MyAppVersion}
|
||||
Compression=lzma
|
||||
SolidCompression=yes
|
||||
WizardStyle=modern
|
||||
PrivilegesRequired=admin
|
||||
PrivilegesRequired=lowest
|
||||
ArchitecturesInstallIn64BitMode=x64compatible
|
||||
UninstallDisplayIcon={app}\{#MyAppExeName}
|
||||
SetupIconFile={#MyIconFile}
|
||||
@ -39,8 +39,8 @@ Name: "german"; MessagesFile: "compiler:Languages\German.isl"
|
||||
Name: "english"; MessagesFile: "compiler:Default.isl"
|
||||
|
||||
[Files]
|
||||
Source: "{#MySourceDir}\\*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs createallsubdirs
|
||||
Source: "{#MyIconFile}"; DestDir: "{app}"; DestName: "app_icon.ico"; Flags: ignoreversion
|
||||
Source: "{#MySourceDir}\\*"; DestDir: "{app}"; Flags: recursesubdirs createallsubdirs
|
||||
Source: "{#MyIconFile}"; DestDir: "{app}"; DestName: "app_icon.ico"
|
||||
|
||||
[Icons]
|
||||
Name: "{group}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"; IconFilename: "{app}\app_icon.ico"
|
||||
|
||||
161
package-lock.json
generated
161
package-lock.json
generated
@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "real-debrid-downloader",
|
||||
"version": "1.1.15",
|
||||
"version": "1.5.66",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "real-debrid-downloader",
|
||||
"version": "1.1.15",
|
||||
"version": "1.5.66",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"adm-zip": "^0.5.16",
|
||||
@ -25,6 +25,7 @@
|
||||
"cross-env": "^7.0.3",
|
||||
"electron": "^31.7.7",
|
||||
"electron-builder": "^25.1.8",
|
||||
"rcedit": "^5.0.2",
|
||||
"tsup": "^8.3.6",
|
||||
"tsx": "^4.19.2",
|
||||
"typescript": "^5.7.3",
|
||||
@ -64,7 +65,6 @@
|
||||
"integrity": "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@babel/code-frame": "^7.29.0",
|
||||
"@babel/generator": "^7.29.0",
|
||||
@ -2043,7 +2043,6 @@
|
||||
"integrity": "sha512-z9VXpC7MWrhfWipitjNdgCauoMLRdIILQsAEV+ZesIzBq/oUlxk0m3ApZuMFCXdnS4U7KrI+l3WRUEGQ8K1QKw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@types/prop-types": "*",
|
||||
"csstype": "^3.2.2"
|
||||
@ -2305,7 +2304,6 @@
|
||||
"integrity": "sha512-IWrosm/yrn43eiKqkfkHis7QioDleaXQHdDVPKg0FSwwd/DuvyX79TZnFOnYpB7dcsFAMmtFztZuXPDvSePkFw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"fast-deep-equal": "^3.1.1",
|
||||
"fast-json-stable-stringify": "^2.0.0",
|
||||
@ -2479,6 +2477,7 @@
|
||||
"integrity": "sha512-+25nxyyznAXF7Nef3y0EbBeqmGZgeN/BxHX29Rs39djAfaFalmQ89SE6CWyDCHzGL0yt/ycBtNOmGTW0FyGWNw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"archiver-utils": "^2.1.0",
|
||||
"async": "^3.2.4",
|
||||
@ -2498,6 +2497,7 @@
|
||||
"integrity": "sha512-bEL/yUb/fNNiNTuUz979Z0Yg5L+LzLxGJz8x79lYmR54fmTIb6ob/hNQgkQnIUDWIFjZVQwl9Xs356I6BAMHfw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"glob": "^7.1.4",
|
||||
"graceful-fs": "^4.2.0",
|
||||
@ -2520,6 +2520,7 @@
|
||||
"integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"core-util-is": "~1.0.0",
|
||||
"inherits": "~2.0.3",
|
||||
@ -2535,7 +2536,8 @@
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
|
||||
"integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/archiver-utils/node_modules/string_decoder": {
|
||||
"version": "1.1.1",
|
||||
@ -2543,6 +2545,7 @@
|
||||
"integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"safe-buffer": "~5.1.0"
|
||||
}
|
||||
@ -2762,7 +2765,6 @@
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"baseline-browser-mapping": "^2.9.0",
|
||||
"caniuse-lite": "^1.0.30001759",
|
||||
@ -3344,6 +3346,7 @@
|
||||
"integrity": "sha512-D3uMHtGc/fcO1Gt1/L7i1e33VOvD4A9hfQLP+6ewd+BvG/gQ84Yh4oftEhAdjSMgBgwGL+jsppT7JYNpo6MHHg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"buffer-crc32": "^0.2.13",
|
||||
"crc32-stream": "^4.0.2",
|
||||
@ -3517,6 +3520,7 @@
|
||||
"integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"peer": true,
|
||||
"bin": {
|
||||
"crc32": "bin/crc32.njs"
|
||||
},
|
||||
@ -3530,6 +3534,7 @@
|
||||
"integrity": "sha512-NT7w2JVU7DFroFdYkeq8cywxrgjPHWkdX1wjpRQXPX5Asews3tA+Ght6lddQO5Mkumffp3X7GEqku3epj2toIw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"crc-32": "^1.2.0",
|
||||
"readable-stream": "^3.4.0"
|
||||
@ -3572,6 +3577,54 @@
|
||||
"node": ">= 8"
|
||||
}
|
||||
},
|
||||
"node_modules/cross-spawn-windows-exe": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/cross-spawn-windows-exe/-/cross-spawn-windows-exe-1.2.0.tgz",
|
||||
"integrity": "sha512-mkLtJJcYbDCxEG7Js6eUnUNndWjyUZwJ3H7bErmmtOYU/Zb99DyUkpamuIZE0b3bhmJyZ7D90uS6f+CGxRRjOw==",
|
||||
"dev": true,
|
||||
"funding": [
|
||||
{
|
||||
"type": "individual",
|
||||
"url": "https://github.com/sponsors/malept"
|
||||
},
|
||||
{
|
||||
"type": "tidelift",
|
||||
"url": "https://tidelift.com/subscription/pkg/npm-cross-spawn-windows-exe?utm_medium=referral&utm_source=npm_fund"
|
||||
}
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@malept/cross-spawn-promise": "^1.1.0",
|
||||
"is-wsl": "^2.2.0",
|
||||
"which": "^2.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/cross-spawn-windows-exe/node_modules/@malept/cross-spawn-promise": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@malept/cross-spawn-promise/-/cross-spawn-promise-1.1.1.tgz",
|
||||
"integrity": "sha512-RTBGWL5FWQcg9orDOCcp4LvItNzUPcyEU9bwaeJX0rJ1IQxzucC48Y0/sQLp/g6t99IQgAlGIaesJS+gTn7tVQ==",
|
||||
"dev": true,
|
||||
"funding": [
|
||||
{
|
||||
"type": "individual",
|
||||
"url": "https://github.com/sponsors/malept"
|
||||
},
|
||||
{
|
||||
"type": "tidelift",
|
||||
"url": "https://tidelift.com/subscription/pkg/npm-.malept-cross-spawn-promise?utm_medium=referral&utm_source=npm_fund"
|
||||
}
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"cross-spawn": "^7.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/csstype": {
|
||||
"version": "3.2.3",
|
||||
"resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz",
|
||||
@ -3780,7 +3833,6 @@
|
||||
"integrity": "sha512-NoXo6Liy2heSklTI5OIZbCgXC1RzrDQsZkeEwXhdOro3FT1VBOvbubvscdPnjVuQ4AMwwv61oaH96AbiYg9EnQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"app-builder-lib": "25.1.8",
|
||||
"builder-util": "25.1.7",
|
||||
@ -3976,6 +4028,7 @@
|
||||
"integrity": "sha512-2ntkJ+9+0GFP6nAISiMabKt6eqBB0kX1QqHNWFWAXgi0VULKGisM46luRFpIBiU3u/TDmhZMM8tzvo2Abn3ayg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"app-builder-lib": "25.1.8",
|
||||
"archiver": "^5.3.1",
|
||||
@ -3989,6 +4042,7 @@
|
||||
"integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"graceful-fs": "^4.2.0",
|
||||
"jsonfile": "^6.0.1",
|
||||
@ -4004,6 +4058,7 @@
|
||||
"integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"universalify": "^2.0.0"
|
||||
},
|
||||
@ -4017,6 +4072,7 @@
|
||||
"integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">= 10.0.0"
|
||||
}
|
||||
@ -4253,7 +4309,6 @@
|
||||
"dev": true,
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"bin": {
|
||||
"esbuild": "bin/esbuild"
|
||||
},
|
||||
@ -4539,7 +4594,8 @@
|
||||
"resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz",
|
||||
"integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/fs-extra": {
|
||||
"version": "8.1.0",
|
||||
@ -5146,6 +5202,22 @@
|
||||
"is-ci": "bin.js"
|
||||
}
|
||||
},
|
||||
"node_modules/is-docker": {
|
||||
"version": "2.2.1",
|
||||
"resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz",
|
||||
"integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"is-docker": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/is-fullwidth-code-point": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
|
||||
@ -5186,12 +5258,26 @@
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/is-wsl": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz",
|
||||
"integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"is-docker": "^2.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/isarray": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
|
||||
"integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/isbinaryfile": {
|
||||
"version": "5.0.7",
|
||||
@ -5376,6 +5462,7 @@
|
||||
"integrity": "sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"readable-stream": "^2.0.5"
|
||||
},
|
||||
@ -5389,6 +5476,7 @@
|
||||
"integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"core-util-is": "~1.0.0",
|
||||
"inherits": "~2.0.3",
|
||||
@ -5404,7 +5492,8 @@
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
|
||||
"integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/lazystream/node_modules/string_decoder": {
|
||||
"version": "1.1.1",
|
||||
@ -5412,6 +5501,7 @@
|
||||
"integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"safe-buffer": "~5.1.0"
|
||||
}
|
||||
@ -5458,35 +5548,40 @@
|
||||
"resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz",
|
||||
"integrity": "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/lodash.difference": {
|
||||
"version": "4.5.0",
|
||||
"resolved": "https://registry.npmjs.org/lodash.difference/-/lodash.difference-4.5.0.tgz",
|
||||
"integrity": "sha512-dS2j+W26TQ7taQBGN8Lbbq04ssV3emRw4NY58WErlTO29pIqS0HmoT5aJ9+TUQ1N3G+JOZSji4eugsWwGp9yPA==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/lodash.flatten": {
|
||||
"version": "4.4.0",
|
||||
"resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz",
|
||||
"integrity": "sha512-C5N2Z3DgnnKr0LOpv/hKCgKdb7ZZwafIrsesve6lmzvZIRZRGaZ/l6Q8+2W7NaT+ZwO3fFlSCzCzrDCFdJfZ4g==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/lodash.isplainobject": {
|
||||
"version": "4.0.6",
|
||||
"resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz",
|
||||
"integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/lodash.union": {
|
||||
"version": "4.6.0",
|
||||
"resolved": "https://registry.npmjs.org/lodash.union/-/lodash.union-4.6.0.tgz",
|
||||
"integrity": "sha512-c4pB2CdGrGdjMKYLA+XiRDO7Y0PRQbm/Gzg8qMj+QH+pFVAoTp5sBpO0odL3FjoPCGjK96p6qsP+yQoiLoOBcw==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/log-symbols": {
|
||||
"version": "4.1.0",
|
||||
@ -6050,6 +6145,7 @@
|
||||
"integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
@ -6310,7 +6406,6 @@
|
||||
"integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
@ -6375,7 +6470,6 @@
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"nanoid": "^3.3.11",
|
||||
"picocolors": "^1.1.1",
|
||||
@ -6433,7 +6527,8 @@
|
||||
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
|
||||
"integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/progress": {
|
||||
"version": "2.0.3",
|
||||
@ -6507,12 +6602,24 @@
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/rcedit": {
|
||||
"version": "5.0.2",
|
||||
"resolved": "https://registry.npmjs.org/rcedit/-/rcedit-5.0.2.tgz",
|
||||
"integrity": "sha512-dgysxaeXZ4snLpPjn8aVtHvZDCx+aRcvZbaWBgl1poU6OPustMvOkj9a9ZqASQ6i5Y5szJ13LSvglEOwrmgUxA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"cross-spawn-windows-exe": "^1.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 22.12.0"
|
||||
}
|
||||
},
|
||||
"node_modules/react": {
|
||||
"version": "18.3.1",
|
||||
"resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz",
|
||||
"integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"loose-envify": "^1.1.0"
|
||||
},
|
||||
@ -6577,6 +6684,7 @@
|
||||
"integrity": "sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"minimatch": "^5.1.0"
|
||||
}
|
||||
@ -6586,7 +6694,8 @@
|
||||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
|
||||
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/readdir-glob/node_modules/brace-expansion": {
|
||||
"version": "2.0.2",
|
||||
@ -6594,6 +6703,7 @@
|
||||
"integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0"
|
||||
}
|
||||
@ -6604,6 +6714,7 @@
|
||||
"integrity": "sha512-7o1wEA2RyMP7Iu7GNba9vc0RWWGACJOCZBJX2GJWip0ikV+wcOsgVuY9uE8CPiyQhkGFSlhuSkZPavN7u1c2Fw==",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"brace-expansion": "^2.0.1"
|
||||
},
|
||||
@ -7284,6 +7395,7 @@
|
||||
"integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"bl": "^4.0.3",
|
||||
"end-of-stream": "^1.4.1",
|
||||
@ -7568,7 +7680,6 @@
|
||||
"integrity": "sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"esbuild": "~0.27.0",
|
||||
"get-tsconfig": "^4.7.5"
|
||||
@ -7751,7 +7862,6 @@
|
||||
"integrity": "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"esbuild": "^0.25.0",
|
||||
"fdir": "^6.4.4",
|
||||
@ -9361,7 +9471,6 @@
|
||||
"integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"esbuild": "^0.21.3",
|
||||
"postcss": "^8.4.43",
|
||||
@ -9619,6 +9728,7 @@
|
||||
"integrity": "sha512-9qv4rlDiopXg4E69k+vMHjNN63YFMe9sZMrdlvKnCjlCRWeCBswPPMPUfx+ipsAWq1LXHe70RcbaHdJJpS6hyQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"archiver-utils": "^3.0.4",
|
||||
"compress-commons": "^4.1.2",
|
||||
@ -9634,6 +9744,7 @@
|
||||
"integrity": "sha512-KVgf4XQVrTjhyWmx6cte4RxonPLR9onExufI1jhvw/MQ4BB6IsZD5gT8Lq+u/+pRkWna/6JoHpiQioaqFP5Rzw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"glob": "^7.2.3",
|
||||
"graceful-fs": "^4.2.0",
|
||||
|
||||
18
package.json
18
package.json
@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "real-debrid-downloader",
|
||||
"version": "1.1.15",
|
||||
"description": "Real-Debrid Downloader Desktop (Electron + React + TypeScript)",
|
||||
"version": "1.6.66",
|
||||
"description": "Desktop downloader",
|
||||
"main": "build/main/main/main.js",
|
||||
"author": "Sucukdeluxe",
|
||||
"license": "MIT",
|
||||
@ -16,7 +16,9 @@
|
||||
"start": "cross-env NODE_ENV=production electron .",
|
||||
"test": "vitest run",
|
||||
"self-check": "tsx tests/self-check.ts",
|
||||
"release:win": "npm run build && electron-builder --publish never --win nsis portable"
|
||||
"release:win": "npm run build && electron-builder --publish never --win nsis portable",
|
||||
"release:gitea": "node scripts/release_gitea.mjs",
|
||||
"release:forgejo": "node scripts/release_gitea.mjs"
|
||||
},
|
||||
"dependencies": {
|
||||
"adm-zip": "^0.5.16",
|
||||
@ -25,8 +27,8 @@
|
||||
"uuid": "^11.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^24.0.13",
|
||||
"@types/adm-zip": "^0.5.7",
|
||||
"@types/node": "^24.0.13",
|
||||
"@types/react": "^18.3.12",
|
||||
"@types/react-dom": "^18.3.1",
|
||||
"@types/uuid": "^10.0.0",
|
||||
@ -35,6 +37,7 @@
|
||||
"cross-env": "^7.0.3",
|
||||
"electron": "^31.7.7",
|
||||
"electron-builder": "^25.1.8",
|
||||
"rcedit": "^5.0.2",
|
||||
"tsup": "^8.3.6",
|
||||
"tsx": "^4.19.2",
|
||||
"typescript": "^5.7.3",
|
||||
@ -52,8 +55,12 @@
|
||||
"files": [
|
||||
"build/main/**/*",
|
||||
"build/renderer/**/*",
|
||||
"resources/extractor-jvm/**/*",
|
||||
"package.json"
|
||||
],
|
||||
"asarUnpack": [
|
||||
"resources/extractor-jvm/**/*"
|
||||
],
|
||||
"win": {
|
||||
"target": [
|
||||
"nsis",
|
||||
@ -67,6 +74,7 @@
|
||||
"perMachine": false,
|
||||
"allowToChangeInstallationDirectory": true,
|
||||
"createDesktopShortcut": true
|
||||
}
|
||||
},
|
||||
"afterPack": "scripts/afterPack.cjs"
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,5 +0,0 @@
|
||||
requests>=2.31.0
|
||||
pyzipper>=0.3.6
|
||||
send2trash>=1.8.2
|
||||
keyring>=25.6.0
|
||||
tkinterdnd2>=0.4.2
|
||||
22
resources/extractor-jvm/README.md
Normal file
22
resources/extractor-jvm/README.md
Normal file
@ -0,0 +1,22 @@
|
||||
# JVM extractor runtime
|
||||
|
||||
This directory contains the Java sidecar runtime used by `src/main/extractor.ts`.
|
||||
|
||||
## Included backends
|
||||
|
||||
- `sevenzipjbinding` for the primary extraction path (RAR/7z/ZIP and others)
|
||||
- `zip4j` for ZIP multipart handling (JD-style split ZIP behavior)
|
||||
|
||||
## Layout
|
||||
|
||||
- `classes/` compiled `JBindExtractorMain` classes
|
||||
- `lib/` runtime jars required by the sidecar
|
||||
- `src/` Java source for the sidecar
|
||||
|
||||
## Rebuild notes
|
||||
|
||||
The checked-in classes are Java 8 compatible and built from:
|
||||
|
||||
`resources/extractor-jvm/src/com/sucukdeluxe/extractor/JBindExtractorMain.java`
|
||||
|
||||
If you need to rebuild, compile against the jars in `lib/` with a Java 8-compatible compiler.
|
||||
12
resources/extractor-jvm/THIRD_PARTY_NOTICES.txt
Normal file
12
resources/extractor-jvm/THIRD_PARTY_NOTICES.txt
Normal file
@ -0,0 +1,12 @@
|
||||
Bundled JVM extractor dependencies:
|
||||
|
||||
1) sevenzipjbinding (16.02-2.01)
|
||||
- Maven artifact: net.sf.sevenzipjbinding:sevenzipjbinding
|
||||
- Maven artifact: net.sf.sevenzipjbinding:sevenzipjbinding-all-platforms
|
||||
- Upstream: https://sevenzipjbind.sourceforge.net/
|
||||
|
||||
2) zip4j (2.11.5)
|
||||
- Maven artifact: net.lingala.zip4j:zip4j
|
||||
- Upstream: https://github.com/srikanth-lingala/zip4j
|
||||
|
||||
Please review upstream licenses and notices before redistribution.
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
resources/extractor-jvm/lib/sevenzipjbinding-all-platforms.jar
Normal file
BIN
resources/extractor-jvm/lib/sevenzipjbinding-all-platforms.jar
Normal file
Binary file not shown.
BIN
resources/extractor-jvm/lib/sevenzipjbinding.jar
Normal file
BIN
resources/extractor-jvm/lib/sevenzipjbinding.jar
Normal file
Binary file not shown.
BIN
resources/extractor-jvm/lib/zip4j.jar
Normal file
BIN
resources/extractor-jvm/lib/zip4j.jar
Normal file
Binary file not shown.
File diff suppressed because it is too large
Load Diff
18
scripts/afterPack.cjs
Normal file
18
scripts/afterPack.cjs
Normal file
@ -0,0 +1,18 @@
|
||||
const path = require("path");
|
||||
const { rcedit } = require("rcedit");
|
||||
|
||||
module.exports = async function afterPack(context) {
|
||||
const productFilename = context.packager?.appInfo?.productFilename;
|
||||
if (!productFilename) {
|
||||
console.warn(" • rcedit: skipped — productFilename not available");
|
||||
return;
|
||||
}
|
||||
const exePath = path.join(context.appOutDir, `${productFilename}.exe`);
|
||||
const iconPath = path.resolve(__dirname, "..", "assets", "app_icon.ico");
|
||||
console.log(` • rcedit: patching icon → ${exePath}`);
|
||||
try {
|
||||
await rcedit(exePath, { icon: iconPath });
|
||||
} catch (error) {
|
||||
console.warn(` • rcedit: failed — ${String(error)}`);
|
||||
}
|
||||
};
|
||||
@ -1,5 +1,6 @@
|
||||
import { DebridService } from "../src/main/debrid";
|
||||
import { defaultSettings } from "../src/main/constants";
|
||||
import { MegaWebFallback } from "../src/main/mega-web-fallback";
|
||||
|
||||
const links = [
|
||||
"https://rapidgator.net/file/837ef967aede4935e3e0374c4e663b40/GTHDERTPIIP7P401.part1.rar.html",
|
||||
@ -10,7 +11,8 @@ const links = [
|
||||
const settings = {
|
||||
...defaultSettings(),
|
||||
token: process.env.RD_TOKEN || "",
|
||||
megaToken: process.env.MEGA_TOKEN || "",
|
||||
megaLogin: process.env.MEGA_LOGIN || "",
|
||||
megaPassword: process.env.MEGA_PASSWORD || "",
|
||||
bestToken: process.env.BEST_TOKEN || "",
|
||||
allDebridToken: process.env.ALLDEBRID_TOKEN || "",
|
||||
providerPrimary: "alldebrid" as const,
|
||||
@ -19,13 +21,20 @@ const settings = {
|
||||
autoProviderFallback: true
|
||||
};
|
||||
|
||||
if (!settings.token && !settings.megaToken && !settings.bestToken && !settings.allDebridToken) {
|
||||
console.error("No provider tokens set. Use RD_TOKEN/MEGA_TOKEN/BEST_TOKEN/ALLDEBRID_TOKEN.");
|
||||
if (!settings.token && !(settings.megaLogin && settings.megaPassword) && !settings.bestToken && !settings.allDebridToken) {
|
||||
console.error("No provider credentials set. Use RD_TOKEN or MEGA_LOGIN+MEGA_PASSWORD or BEST_TOKEN or ALLDEBRID_TOKEN.");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const service = new DebridService(settings);
|
||||
const megaWeb = new MegaWebFallback(() => ({
|
||||
login: settings.megaLogin,
|
||||
password: settings.megaPassword
|
||||
}));
|
||||
try {
|
||||
const service = new DebridService(settings, {
|
||||
megaWebUnrestrict: (link) => megaWeb.unrestrict(link)
|
||||
});
|
||||
for (const link of links) {
|
||||
try {
|
||||
const result = await service.unrestrictLink(link);
|
||||
@ -34,6 +43,9 @@ async function main(): Promise<void> {
|
||||
console.log(`[FAIL] ${String(error)}`);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
megaWeb.dispose();
|
||||
}
|
||||
}
|
||||
|
||||
void main();
|
||||
main().catch(e => { console.error(e); process.exit(1); });
|
||||
|
||||
148
scripts/mega_web_generate_download_test.mjs
Normal file
148
scripts/mega_web_generate_download_test.mjs
Normal file
@ -0,0 +1,148 @@
|
||||
const LOGIN = process.env.MEGA_LOGIN || "";
|
||||
const PASSWORD = process.env.MEGA_PASSWORD || "";
|
||||
|
||||
const LINKS = [
|
||||
"https://rapidgator.net/file/90b5397dfc3e1a0e561db7d6b89d5604/scnb-rrw7-S08E01.part1.rar.html",
|
||||
"https://rapidgator.net/file/8ddf856dc833310c5cae9db82caf9682/scnb-rrw7-S08E01.part2.rar.html",
|
||||
"https://rapidgator.net/file/440eed67d266476866332ae224c3fad5/scnb-rrw7-S08E01.part3.rar.html"
|
||||
];
|
||||
|
||||
if (!LOGIN || !PASSWORD) {
|
||||
throw new Error("Set MEGA_LOGIN and MEGA_PASSWORD env vars");
|
||||
}
|
||||
|
||||
function sleep(ms) {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
function cookieFrom(headers) {
|
||||
const cookies = headers.getSetCookie();
|
||||
return cookies.map((x) => x.split(";")[0].trim()).filter(Boolean).join("; ");
|
||||
}
|
||||
|
||||
function parseDebridCodes(html) {
|
||||
const re = /processDebrid\((\d+),'([^']+)',0\)/g;
|
||||
const out = [];
|
||||
let m;
|
||||
while ((m = re.exec(html)) !== null) {
|
||||
out.push({ id: Number(m[1]), code: m[2] });
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
async function resolveCode(cookie, code) {
|
||||
for (let attempt = 1; attempt <= 50; attempt += 1) {
|
||||
const res = await fetch("https://www.mega-debrid.eu/index.php?ajax=debrid&json", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
"User-Agent": "Mozilla/5.0",
|
||||
Cookie: cookie,
|
||||
Referer: "https://www.mega-debrid.eu/index.php?page=debrideur&lang=de"
|
||||
},
|
||||
body: new URLSearchParams({
|
||||
code,
|
||||
autodl: "0"
|
||||
})
|
||||
});
|
||||
const text = (await res.text()).trim();
|
||||
if (text === "reload") {
|
||||
if (attempt % 5 === 0) {
|
||||
console.log(` [retry] code=${code} attempt=${attempt}/50 (waiting for server)`);
|
||||
}
|
||||
await sleep(800);
|
||||
continue;
|
||||
}
|
||||
if (text === "false") {
|
||||
return { ok: false, reason: "false" };
|
||||
}
|
||||
try {
|
||||
const parsed = JSON.parse(text);
|
||||
if (parsed?.link) {
|
||||
return { ok: true, link: String(parsed.link), text: String(parsed.text || "") };
|
||||
}
|
||||
return { ok: false, reason: text };
|
||||
} catch {
|
||||
return { ok: false, reason: text };
|
||||
}
|
||||
}
|
||||
return { ok: false, reason: "timeout" };
|
||||
}
|
||||
|
||||
async function probeDownload(url) {
|
||||
const res = await fetch(url, {
|
||||
method: "GET",
|
||||
headers: {
|
||||
Range: "bytes=0-4095",
|
||||
"User-Agent": "Mozilla/5.0"
|
||||
},
|
||||
redirect: "manual"
|
||||
});
|
||||
return {
|
||||
status: res.status,
|
||||
location: res.headers.get("location") || "",
|
||||
contentType: res.headers.get("content-type") || "",
|
||||
contentLength: res.headers.get("content-length") || ""
|
||||
};
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const loginRes = await fetch("https://www.mega-debrid.eu/index.php?form=login", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
"User-Agent": "Mozilla/5.0"
|
||||
},
|
||||
body: new URLSearchParams({
|
||||
login: LOGIN,
|
||||
password: PASSWORD,
|
||||
remember: "on"
|
||||
}),
|
||||
redirect: "manual"
|
||||
});
|
||||
|
||||
if (loginRes.status >= 400) {
|
||||
throw new Error(`Login failed with HTTP ${loginRes.status}`);
|
||||
}
|
||||
const cookie = cookieFrom(loginRes.headers);
|
||||
if (!cookie) {
|
||||
throw new Error("Login returned no session cookie");
|
||||
}
|
||||
console.log("login", loginRes.status, loginRes.headers.get("location") || "");
|
||||
|
||||
const debridRes = await fetch("https://www.mega-debrid.eu/index.php?form=debrid", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
"User-Agent": "Mozilla/5.0",
|
||||
Cookie: cookie,
|
||||
Referer: "https://www.mega-debrid.eu/index.php?page=debrideur&lang=de"
|
||||
},
|
||||
body: new URLSearchParams({
|
||||
links: LINKS.join("\n"),
|
||||
password: "",
|
||||
showLinks: "1"
|
||||
})
|
||||
});
|
||||
|
||||
const html = await debridRes.text();
|
||||
const codes = parseDebridCodes(html);
|
||||
console.log("codes", codes.length);
|
||||
if (codes.length === 0) {
|
||||
throw new Error("No processDebrid codes found");
|
||||
}
|
||||
|
||||
for (let i = 0; i < Math.min(3, codes.length); i += 1) {
|
||||
const c = codes[i];
|
||||
const resolved = await resolveCode(cookie, c.code);
|
||||
if (!resolved.ok) {
|
||||
console.log(`[FAIL] code ${c.code}: ${resolved.reason}`);
|
||||
continue;
|
||||
}
|
||||
console.log(`[OK] code ${c.code} -> ${resolved.link}`);
|
||||
const probe = await probeDownload(resolved.link);
|
||||
console.log(` probe status=${probe.status} type=${probe.contentType} len=${probe.contentLength} loc=${probe.location}`);
|
||||
}
|
||||
}
|
||||
|
||||
await main().catch((e) => { console.error(e); process.exit(1); });
|
||||
@ -1,29 +0,0 @@
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def main() -> int:
|
||||
project_root = Path(__file__).resolve().parents[1]
|
||||
png_path = project_root / "assets" / "app_icon.png"
|
||||
ico_path = project_root / "assets" / "app_icon.ico"
|
||||
|
||||
if not png_path.exists():
|
||||
print(f"Icon PNG not found: {png_path}")
|
||||
return 1
|
||||
|
||||
try:
|
||||
from PIL import Image
|
||||
except ImportError:
|
||||
print("Pillow missing. Install with: pip install pillow")
|
||||
return 1
|
||||
|
||||
with Image.open(png_path) as image:
|
||||
image = image.convert("RGBA")
|
||||
sizes = [(16, 16), (24, 24), (32, 32), (48, 48), (64, 64), (128, 128), (256, 256)]
|
||||
image.save(ico_path, format="ICO", sizes=sizes)
|
||||
|
||||
print(f"Wrote icon: {ico_path}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
@ -5,12 +5,14 @@ const RAPIDGATOR_LINKS = [
|
||||
];
|
||||
|
||||
const rdToken = process.env.RD_TOKEN || "";
|
||||
const megaToken = process.env.MEGA_TOKEN || "";
|
||||
const megaLogin = process.env.MEGA_LOGIN || "";
|
||||
const megaPassword = process.env.MEGA_PASSWORD || "";
|
||||
const bestToken = process.env.BEST_TOKEN || "";
|
||||
const allDebridToken = process.env.ALLDEBRID_TOKEN || "";
|
||||
let megaCookie = "";
|
||||
|
||||
if (!rdToken && !megaToken && !bestToken && !allDebridToken) {
|
||||
console.error("No provider token configured. Set RD_TOKEN and/or MEGA_TOKEN and/or BEST_TOKEN and/or ALLDEBRID_TOKEN.");
|
||||
if (!rdToken && !(megaLogin && megaPassword) && !bestToken && !allDebridToken) {
|
||||
console.error("No provider credentials configured. Set RD_TOKEN and/or MEGA_LOGIN+MEGA_PASSWORD and/or BEST_TOKEN and/or ALLDEBRID_TOKEN.");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@ -64,34 +66,84 @@ async function callRealDebrid(link) {
|
||||
};
|
||||
}
|
||||
|
||||
// megaCookie is intentionally cached at module scope so that multiple
|
||||
// callMegaDebrid() invocations reuse the same session cookie.
|
||||
async function callMegaDebrid(link) {
|
||||
const response = await fetch(`https://www.mega-debrid.eu/api.php?action=getLink&token=${encodeURIComponent(megaToken)}`, {
|
||||
if (!megaCookie) {
|
||||
const loginRes = await fetch("https://www.mega-debrid.eu/index.php?form=login", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
"User-Agent": "RD-Node-Downloader/1.1.12"
|
||||
"User-Agent": "Mozilla/5.0"
|
||||
},
|
||||
body: new URLSearchParams({ link })
|
||||
body: new URLSearchParams({ login: megaLogin, password: megaPassword, remember: "on" }),
|
||||
redirect: "manual"
|
||||
});
|
||||
const text = await response.text();
|
||||
const payload = asRecord(safeJson(text));
|
||||
if (!response.ok) {
|
||||
return { ok: false, error: parseResponseError(response.status, text, payload) };
|
||||
if (loginRes.status >= 400) {
|
||||
return { ok: false, error: `Mega-Web login failed with HTTP ${loginRes.status}` };
|
||||
}
|
||||
const code = pickString(payload, ["response_code"]);
|
||||
if (code && code.toLowerCase() !== "ok") {
|
||||
return { ok: false, error: pickString(payload, ["response_text"]) || code };
|
||||
megaCookie = loginRes.headers.getSetCookie()
|
||||
.map((chunk) => chunk.split(";")[0].trim())
|
||||
.filter(Boolean)
|
||||
.join("; ");
|
||||
if (!megaCookie) {
|
||||
return { ok: false, error: "Mega-Web login returned no session cookie" };
|
||||
}
|
||||
const direct = pickString(payload, ["debridLink", "download", "link"]);
|
||||
}
|
||||
|
||||
const debridRes = await fetch("https://www.mega-debrid.eu/index.php?form=debrid", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
"User-Agent": "Mozilla/5.0",
|
||||
Cookie: megaCookie,
|
||||
Referer: "https://www.mega-debrid.eu/index.php?page=debrideur&lang=de"
|
||||
},
|
||||
body: new URLSearchParams({ links: link, password: "", showLinks: "1" })
|
||||
});
|
||||
const html = await debridRes.text();
|
||||
const code = html.match(/processDebrid\(\d+,'([^']+)',0\)/i)?.[1] || "";
|
||||
if (!code) {
|
||||
return { ok: false, error: "Mega-Web returned no processDebrid code" };
|
||||
}
|
||||
|
||||
for (let attempt = 1; attempt <= 40; attempt += 1) {
|
||||
const ajaxRes = await fetch("https://www.mega-debrid.eu/index.php?ajax=debrid&json", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
"User-Agent": "Mozilla/5.0",
|
||||
Cookie: megaCookie,
|
||||
Referer: "https://www.mega-debrid.eu/index.php?page=debrideur&lang=de"
|
||||
},
|
||||
body: new URLSearchParams({ code, autodl: "0" })
|
||||
});
|
||||
const txt = (await ajaxRes.text()).trim();
|
||||
if (txt === "reload") {
|
||||
await new Promise((resolve) => setTimeout(resolve, 650));
|
||||
continue;
|
||||
}
|
||||
if (txt === "false") {
|
||||
return { ok: false, error: "Mega-Web returned false" };
|
||||
}
|
||||
const payload = safeJson(txt);
|
||||
const direct = String(payload?.link || "");
|
||||
if (!direct) {
|
||||
return { ok: false, error: "Mega-Debrid returned no debridLink" };
|
||||
const msg = String(payload?.text || txt || "Mega-Web no link");
|
||||
if (/hoster does not respond correctly|could not be done for this moment/i.test(msg)) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 1200));
|
||||
continue;
|
||||
}
|
||||
return { ok: false, error: msg };
|
||||
}
|
||||
return {
|
||||
ok: true,
|
||||
direct,
|
||||
fileName: pickString(payload, ["filename", "fileName"])
|
||||
fileName: pickString(asRecord(payload), ["filename"]) || ""
|
||||
};
|
||||
}
|
||||
return { ok: false, error: "Mega-Web timeout while generating link" };
|
||||
}
|
||||
|
||||
async function callBestDebrid(link) {
|
||||
const encoded = encodeURIComponent(link);
|
||||
@ -196,7 +248,7 @@ async function main() {
|
||||
if (rdToken) {
|
||||
providers.push({ name: "Real-Debrid", run: callRealDebrid });
|
||||
}
|
||||
if (megaToken) {
|
||||
if (megaLogin && megaPassword) {
|
||||
providers.push({ name: "Mega-Debrid", run: callMegaDebrid });
|
||||
}
|
||||
if (bestToken) {
|
||||
@ -242,4 +294,4 @@ async function main() {
|
||||
}
|
||||
}
|
||||
|
||||
await main();
|
||||
await main().catch((e) => { console.error(e); process.exit(1); });
|
||||
|
||||
356
scripts/release_gitea.mjs
Normal file
356
scripts/release_gitea.mjs
Normal file
@ -0,0 +1,356 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { spawnSync } from "node:child_process";
|
||||
|
||||
const NPM_RELEASE_WIN = process.platform === "win32"
|
||||
? {
|
||||
command: process.env.ComSpec || "cmd.exe",
|
||||
args: ["/d", "/s", "/c", "npm run release:win"]
|
||||
}
|
||||
: {
|
||||
command: "npm",
|
||||
args: ["run", "release:win"]
|
||||
};
|
||||
|
||||
function run(command, args, options = {}) {
|
||||
const result = spawnSync(command, args, {
|
||||
cwd: process.cwd(),
|
||||
encoding: "utf8",
|
||||
stdio: options.capture ? ["pipe", "pipe", "pipe"] : "inherit"
|
||||
});
|
||||
if (result.status !== 0) {
|
||||
const stderr = result.stderr ? String(result.stderr).trim() : "";
|
||||
const stdout = result.stdout ? String(result.stdout).trim() : "";
|
||||
const details = [stderr, stdout].filter(Boolean).join("\n");
|
||||
throw new Error(`Command failed: ${command} ${args.join(" ")}${details ? `\n${details}` : ""}`);
|
||||
}
|
||||
return options.capture ? String(result.stdout || "") : "";
|
||||
}
|
||||
|
||||
function runCapture(command, args) {
|
||||
const result = spawnSync(command, args, {
|
||||
cwd: process.cwd(),
|
||||
encoding: "utf8",
|
||||
stdio: ["pipe", "pipe", "pipe"]
|
||||
});
|
||||
if (result.status !== 0) {
|
||||
const stderr = String(result.stderr || "").trim();
|
||||
throw new Error(stderr || `Command failed: ${command} ${args.join(" ")}`);
|
||||
}
|
||||
return String(result.stdout || "").trim();
|
||||
}
|
||||
|
||||
function runWithInput(command, args, input) {
|
||||
const result = spawnSync(command, args, {
|
||||
cwd: process.cwd(),
|
||||
encoding: "utf8",
|
||||
input,
|
||||
stdio: ["pipe", "pipe", "pipe"],
|
||||
timeout: 10000
|
||||
});
|
||||
if (result.status !== 0) {
|
||||
const stderr = String(result.stderr || "").trim();
|
||||
throw new Error(stderr || `Command failed: ${command} ${args.join(" ")}`);
|
||||
}
|
||||
return String(result.stdout || "");
|
||||
}
|
||||
|
||||
function parseArgs(argv) {
|
||||
const args = argv.slice(2);
|
||||
if (args.includes("--help") || args.includes("-h")) {
|
||||
return { help: true };
|
||||
}
|
||||
|
||||
const dryRun = args.includes("--dry-run");
|
||||
const cleaned = args.filter((arg) => arg !== "--dry-run");
|
||||
const version = cleaned[0] || "";
|
||||
const notes = cleaned.slice(1).join(" ").trim();
|
||||
return { help: false, dryRun, version, notes };
|
||||
}
|
||||
|
||||
function parseRemoteUrl(url) {
|
||||
const raw = String(url || "").trim();
|
||||
const httpsMatch = raw.match(/^https?:\/\/([^/]+)\/([^/]+)\/([^/]+?)(?:\.git)?$/i);
|
||||
if (httpsMatch) {
|
||||
return { host: httpsMatch[1], owner: httpsMatch[2], repo: httpsMatch[3] };
|
||||
}
|
||||
const sshMatch = raw.match(/^git@([^:]+):([^/]+)\/([^/]+?)(?:\.git)?$/i);
|
||||
if (sshMatch) {
|
||||
return { host: sshMatch[1], owner: sshMatch[2], repo: sshMatch[3] };
|
||||
}
|
||||
const sshAltMatch = raw.match(/^ssh:\/\/git@([^/:]+)(?::\d+)?\/([^/]+)\/([^/]+?)(?:\.git)?$/i);
|
||||
if (sshAltMatch) {
|
||||
return { host: sshAltMatch[1], owner: sshAltMatch[2], repo: sshAltMatch[3] };
|
||||
}
|
||||
throw new Error(`Cannot parse remote URL: ${raw}`);
|
||||
}
|
||||
|
||||
function normalizeBaseUrl(url) {
|
||||
const raw = String(url || "").trim().replace(/\/+$/, "");
|
||||
if (!raw) {
|
||||
return "";
|
||||
}
|
||||
if (!/^https?:\/\//i.test(raw)) {
|
||||
throw new Error("GITEA_BASE_URL must start with http:// or https://");
|
||||
}
|
||||
return raw;
|
||||
}
|
||||
|
||||
function getGiteaRepo() {
|
||||
const forcedRemote = String(process.env.GITEA_REMOTE || process.env.FORGEJO_REMOTE || "").trim();
|
||||
const remotes = forcedRemote
|
||||
? [forcedRemote]
|
||||
: ["gitea", "forgejo", "origin", "github-new", "codeberg"];
|
||||
|
||||
const preferredBase = normalizeBaseUrl(process.env.GITEA_BASE_URL || process.env.FORGEJO_BASE_URL || "https://git.24-music.de");
|
||||
|
||||
const preferredProtocol = preferredBase ? new URL(preferredBase).protocol : "https:";
|
||||
|
||||
for (const remote of remotes) {
|
||||
try {
|
||||
const remoteUrl = runCapture("git", ["remote", "get-url", remote]);
|
||||
const parsed = parseRemoteUrl(remoteUrl);
|
||||
const remoteBase = `https://${parsed.host}`.toLowerCase();
|
||||
if (preferredBase && remoteBase !== preferredBase.toLowerCase().replace(/^http:/, "https:")) {
|
||||
continue;
|
||||
}
|
||||
return { remote, ...parsed, baseUrl: `${preferredProtocol}//${parsed.host}` };
|
||||
} catch {
|
||||
// try next remote
|
||||
}
|
||||
}
|
||||
|
||||
if (preferredBase) {
|
||||
throw new Error(
|
||||
`No remote found for ${preferredBase}. Add one with: git remote add gitea ${preferredBase}/<owner>/<repo>.git`
|
||||
);
|
||||
}
|
||||
|
||||
throw new Error("No suitable remote found. Set GITEA_REMOTE or GITEA_BASE_URL.");
|
||||
}
|
||||
|
||||
function getAuthHeader(host) {
|
||||
const explicitToken = String(process.env.GITEA_TOKEN || process.env.FORGEJO_TOKEN || "").trim();
|
||||
if (explicitToken) {
|
||||
return `token ${explicitToken}`;
|
||||
}
|
||||
|
||||
const credentialText = runWithInput("git", ["credential", "fill"], `protocol=https\nhost=${host}\n\n`);
|
||||
const map = new Map();
|
||||
for (const line of credentialText.split(/\r?\n/)) {
|
||||
if (!line.includes("=")) {
|
||||
continue;
|
||||
}
|
||||
const [key, value] = line.split("=", 2);
|
||||
map.set(key, value);
|
||||
}
|
||||
const username = map.get("username") || "";
|
||||
const password = map.get("password") || "";
|
||||
if (!username || !password) {
|
||||
throw new Error(
|
||||
`Missing credentials for ${host}. Set GITEA_TOKEN or store credentials for this host in git credential helper.`
|
||||
);
|
||||
}
|
||||
const token = Buffer.from(`${username}:${password}`, "utf8").toString("base64");
|
||||
return `Basic ${token}`;
|
||||
}
|
||||
|
||||
async function apiRequest(method, url, authHeader, body, contentType = "application/json") {
|
||||
const headers = {
|
||||
Accept: "application/json",
|
||||
Authorization: authHeader
|
||||
};
|
||||
if (body !== undefined) {
|
||||
headers["Content-Type"] = contentType;
|
||||
}
|
||||
const response = await fetch(url, {
|
||||
method,
|
||||
headers,
|
||||
body
|
||||
});
|
||||
const text = await response.text();
|
||||
let parsed;
|
||||
try {
|
||||
parsed = text ? JSON.parse(text) : null;
|
||||
} catch {
|
||||
parsed = text;
|
||||
}
|
||||
return { ok: response.ok, status: response.status, body: parsed };
|
||||
}
|
||||
|
||||
function ensureVersionString(version) {
|
||||
const trimmed = String(version || "").trim();
|
||||
if (!/^\d+\.\d+\.\d+(?:[-+][0-9A-Za-z.-]+)?$/.test(trimmed)) {
|
||||
throw new Error("Invalid version format. Expected e.g. 1.4.42");
|
||||
}
|
||||
return trimmed;
|
||||
}
|
||||
|
||||
function updatePackageVersion(rootDir, version) {
|
||||
const packagePath = path.join(rootDir, "package.json");
|
||||
const packageJson = JSON.parse(fs.readFileSync(packagePath, "utf8"));
|
||||
if (String(packageJson.version || "") === version) {
|
||||
process.stdout.write(`package.json is already at version ${version}, skipping update.\n`);
|
||||
return;
|
||||
}
|
||||
packageJson.version = version;
|
||||
fs.writeFileSync(packagePath, `${JSON.stringify(packageJson, null, 2)}\n`, "utf8");
|
||||
}
|
||||
|
||||
function patchLatestYml(releaseDir, version) {
|
||||
const ymlPath = path.join(releaseDir, "latest.yml");
|
||||
let content = fs.readFileSync(ymlPath, "utf8");
|
||||
const setupName = `Real-Debrid-Downloader Setup ${version}.exe`;
|
||||
const dashedName = `Real-Debrid-Downloader-Setup-${version}.exe`;
|
||||
if (content.includes(dashedName)) {
|
||||
content = content.split(dashedName).join(setupName);
|
||||
fs.writeFileSync(ymlPath, content, "utf8");
|
||||
process.stdout.write(`Patched latest.yml: replaced "${dashedName}" with "${setupName}"\n`);
|
||||
}
|
||||
}
|
||||
|
||||
function ensureAssetsExist(rootDir, version) {
|
||||
const releaseDir = path.join(rootDir, "release");
|
||||
const files = [
|
||||
`Real-Debrid-Downloader Setup ${version}.exe`,
|
||||
`Real-Debrid-Downloader ${version}.exe`,
|
||||
"latest.yml",
|
||||
`Real-Debrid-Downloader Setup ${version}.exe.blockmap`
|
||||
];
|
||||
for (const fileName of files) {
|
||||
const fullPath = path.join(releaseDir, fileName);
|
||||
if (!fs.existsSync(fullPath)) {
|
||||
throw new Error(`Missing release artifact: ${fullPath}`);
|
||||
}
|
||||
}
|
||||
patchLatestYml(releaseDir, version);
|
||||
return { releaseDir, files };
|
||||
}
|
||||
|
||||
function ensureNoTrackedChanges() {
|
||||
const output = runCapture("git", ["status", "--porcelain"]);
|
||||
const lines = output.split(/\r?\n/).filter(Boolean);
|
||||
const tracked = lines.filter((line) => !line.startsWith("?? "));
|
||||
if (tracked.length > 0) {
|
||||
throw new Error(`Working tree has tracked changes:\n${tracked.join("\n")}`);
|
||||
}
|
||||
}
|
||||
|
||||
function ensureTagMissing(tag) {
|
||||
const result = spawnSync("git", ["rev-parse", "--verify", `refs/tags/${tag}`], {
|
||||
cwd: process.cwd(),
|
||||
stdio: "ignore"
|
||||
});
|
||||
if (result.status === 0) {
|
||||
throw new Error(`Tag already exists: ${tag}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function createOrGetRelease(baseApi, tag, authHeader, notes) {
|
||||
const byTag = await apiRequest("GET", `${baseApi}/releases/tags/${encodeURIComponent(tag)}`, authHeader);
|
||||
if (byTag.ok) {
|
||||
return byTag.body;
|
||||
}
|
||||
const payload = {
|
||||
tag_name: tag,
|
||||
target_commitish: "main",
|
||||
name: tag,
|
||||
body: notes || `Release ${tag}`,
|
||||
draft: false,
|
||||
prerelease: false
|
||||
};
|
||||
const created = await apiRequest("POST", `${baseApi}/releases`, authHeader, JSON.stringify(payload));
|
||||
if (!created.ok) {
|
||||
throw new Error(`Failed to create release (${created.status}): ${JSON.stringify(created.body)}`);
|
||||
}
|
||||
return created.body;
|
||||
}
|
||||
|
||||
async function uploadReleaseAssets(baseApi, releaseId, authHeader, releaseDir, files) {
|
||||
for (const fileName of files) {
|
||||
const filePath = path.join(releaseDir, fileName);
|
||||
const fileSize = fs.statSync(filePath).size;
|
||||
const uploadUrl = `${baseApi}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`;
|
||||
|
||||
// Stream large files instead of loading them entirely into memory
|
||||
const fileStream = fs.createReadStream(filePath);
|
||||
const response = await fetch(uploadUrl, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
Authorization: authHeader,
|
||||
"Content-Type": "application/octet-stream",
|
||||
"Content-Length": String(fileSize)
|
||||
},
|
||||
body: fileStream,
|
||||
duplex: "half"
|
||||
});
|
||||
|
||||
const text = await response.text();
|
||||
let parsed;
|
||||
try {
|
||||
parsed = text ? JSON.parse(text) : null;
|
||||
} catch {
|
||||
parsed = text;
|
||||
}
|
||||
|
||||
if (response.ok) {
|
||||
process.stdout.write(`Uploaded: ${fileName}\n`);
|
||||
continue;
|
||||
}
|
||||
if (response.status === 409 || response.status === 422) {
|
||||
process.stdout.write(`Skipped existing asset: ${fileName}\n`);
|
||||
continue;
|
||||
}
|
||||
throw new Error(`Asset upload failed for ${fileName} (${response.status}): ${JSON.stringify(parsed)}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const rootDir = process.cwd();
|
||||
const args = parseArgs(process.argv);
|
||||
if (args.help) {
|
||||
process.stdout.write("Usage: npm run release:gitea -- <version> [release notes] [--dry-run]\n");
|
||||
process.stdout.write("Env: GITEA_BASE_URL, GITEA_REMOTE, GITEA_TOKEN\n");
|
||||
process.stdout.write("Compatibility envs still supported: FORGEJO_BASE_URL, FORGEJO_REMOTE, FORGEJO_TOKEN\n");
|
||||
process.stdout.write("Example: npm run release:gitea -- 1.6.31 \"- Bugfixes\"\n");
|
||||
return;
|
||||
}
|
||||
|
||||
const version = ensureVersionString(args.version);
|
||||
const tag = `v${version}`;
|
||||
const releaseNotes = args.notes || `- Release ${tag}`;
|
||||
const repo = getGiteaRepo();
|
||||
|
||||
ensureNoTrackedChanges();
|
||||
ensureTagMissing(tag);
|
||||
|
||||
if (args.dryRun) {
|
||||
process.stdout.write(`Dry run: would release ${tag}. No changes made.\n`);
|
||||
return;
|
||||
}
|
||||
|
||||
updatePackageVersion(rootDir, version);
|
||||
|
||||
process.stdout.write(`Building release artifacts for ${tag}...\n`);
|
||||
run(NPM_RELEASE_WIN.command, NPM_RELEASE_WIN.args);
|
||||
const assets = ensureAssetsExist(rootDir, version);
|
||||
|
||||
run("git", ["add", "package.json"]);
|
||||
run("git", ["commit", "-m", `Release ${tag}`]);
|
||||
run("git", ["push", repo.remote, "main"]);
|
||||
run("git", ["tag", tag]);
|
||||
run("git", ["push", repo.remote, tag]);
|
||||
|
||||
const authHeader = getAuthHeader(repo.host);
|
||||
const baseApi = `${repo.baseUrl}/api/v1/repos/${repo.owner}/${repo.repo}`;
|
||||
const release = await createOrGetRelease(baseApi, tag, authHeader, releaseNotes);
|
||||
await uploadReleaseAssets(baseApi, release.id, authHeader, assets.releaseDir, assets.files);
|
||||
|
||||
process.stdout.write(`Release published: ${release.html_url || `${repo.baseUrl}/${repo.owner}/${repo.repo}/releases/tag/${tag}`}\n`);
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
process.stderr.write(`${String(error?.message || error)}\n`);
|
||||
process.exit(1);
|
||||
});
|
||||
@ -1,305 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import sys
|
||||
import tempfile
|
||||
import threading
|
||||
import time
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
from tkinter import messagebox
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[1]
|
||||
if str(ROOT) not in sys.path:
|
||||
sys.path.insert(0, str(ROOT))
|
||||
|
||||
import real_debrid_downloader_gui as appmod
|
||||
|
||||
|
||||
def assert_true(condition: bool, message: str) -> None:
|
||||
if not condition:
|
||||
raise AssertionError(message)
|
||||
|
||||
|
||||
def run() -> None:
|
||||
temp_root = Path(tempfile.mkdtemp(prefix="rd_self_check_"))
|
||||
|
||||
original_config = appmod.CONFIG_FILE
|
||||
original_manifest = appmod.MANIFEST_FILE
|
||||
appmod.CONFIG_FILE = temp_root / "rd_downloader_config.json"
|
||||
appmod.MANIFEST_FILE = temp_root / "rd_download_manifest.json"
|
||||
|
||||
message_calls: list[tuple[str, str, str]] = []
|
||||
original_showerror = messagebox.showerror
|
||||
original_showwarning = messagebox.showwarning
|
||||
original_showinfo = messagebox.showinfo
|
||||
original_askyesno = messagebox.askyesno
|
||||
|
||||
def fake_message(kind: str):
|
||||
def _inner(title: str, text: str):
|
||||
message_calls.append((kind, str(title), str(text)))
|
||||
return None
|
||||
|
||||
return _inner
|
||||
|
||||
messagebox.showerror = fake_message("error")
|
||||
messagebox.showwarning = fake_message("warning")
|
||||
messagebox.showinfo = fake_message("info")
|
||||
|
||||
app = appmod.DownloaderApp()
|
||||
app.withdraw()
|
||||
|
||||
try:
|
||||
app.token_var.set("demo-token")
|
||||
app.output_dir_var.set(str(temp_root / "downloads"))
|
||||
app.links_text.delete("1.0", "end")
|
||||
app.links_text.insert("1.0", "not_a_link")
|
||||
app.start_downloads()
|
||||
assert_true(
|
||||
any("Ungültige Links" in text for kind, _, text in message_calls if kind == "error"),
|
||||
"Link-Validierung hat ungültige Eingabe nicht blockiert",
|
||||
)
|
||||
|
||||
app.cleanup_mode_var.set("delete")
|
||||
app.extract_conflict_mode_var.set("rename")
|
||||
app.remove_link_files_after_extract_var.set(True)
|
||||
app.remove_samples_var.set(True)
|
||||
app.remember_token_var.set(True)
|
||||
app.token_var.set("token-123")
|
||||
|
||||
original_can_secure = app._can_store_token_securely
|
||||
original_store_keyring = app._store_token_in_keyring
|
||||
app._can_store_token_securely = lambda: True
|
||||
app._store_token_in_keyring = lambda token: False
|
||||
app._save_config()
|
||||
|
||||
config_data = json.loads(appmod.CONFIG_FILE.read_text(encoding="utf-8"))
|
||||
assert_true(config_data.get("token") == "token-123", "Token-Fallback in Config bei Keyring-Fehler fehlt")
|
||||
|
||||
app.cleanup_mode_var.set("none")
|
||||
app.extract_conflict_mode_var.set("overwrite")
|
||||
app.remove_link_files_after_extract_var.set(False)
|
||||
app.remove_samples_var.set(False)
|
||||
app.token_var.set("")
|
||||
app._load_config()
|
||||
assert_true(app.cleanup_mode_var.get() == "delete", "cleanup_mode wurde nicht aus Config geladen")
|
||||
assert_true(app.extract_conflict_mode_var.get() == "rename", "extract_conflict_mode wurde nicht geladen")
|
||||
assert_true(app.remove_link_files_after_extract_var.get() is True, "remove_link_files_after_extract fehlt")
|
||||
assert_true(app.remove_samples_var.get() is True, "remove_samples_after_extract fehlt")
|
||||
|
||||
app._can_store_token_securely = original_can_secure
|
||||
app._store_token_in_keyring = original_store_keyring
|
||||
|
||||
class DummyWorker:
|
||||
@staticmethod
|
||||
def is_alive() -> bool:
|
||||
return True
|
||||
|
||||
app.worker_thread = DummyWorker()
|
||||
app.pause_event.clear()
|
||||
app.toggle_pause_downloads()
|
||||
assert_true(app.pause_event.is_set(), "Pause wurde nicht aktiviert")
|
||||
app.toggle_pause_downloads()
|
||||
assert_true(not app.pause_event.is_set(), "Resume wurde nicht aktiviert")
|
||||
|
||||
app.pause_event.set()
|
||||
started = time.monotonic()
|
||||
|
||||
def _unpause() -> None:
|
||||
time.sleep(0.25)
|
||||
app.pause_event.clear()
|
||||
|
||||
threading.Thread(target=_unpause, daemon=True).start()
|
||||
app._wait_if_paused()
|
||||
waited = time.monotonic() - started
|
||||
assert_true(waited >= 0.2, "Pause-Wait hat nicht geblockt")
|
||||
|
||||
messagebox.askyesno = lambda *args, **kwargs: True
|
||||
cancel_package_dir = temp_root / "cancel_pkg"
|
||||
cancel_package_dir.mkdir(parents=True, exist_ok=True)
|
||||
(cancel_package_dir / "release.part1.rar").write_bytes(b"x")
|
||||
(cancel_package_dir / "keep_movie.mkv").write_bytes(b"x")
|
||||
|
||||
cancel_row = "package-cancel"
|
||||
child_row = "package-cancel-link-1"
|
||||
app.table.insert("", "end", iid=cancel_row, text="cancelpkg", values=("-", "Wartet", "0/1", "0 B/s", "0"), open=True)
|
||||
app.table.insert(cancel_row, "end", iid=child_row, text="https://example.com/cancel", values=("-", "Wartet", "0%", "0 B/s", "0"))
|
||||
app.links_text.delete("1.0", "end")
|
||||
app.links_text.insert("1.0", "https://example.com/cancel\n")
|
||||
app.package_contexts = [
|
||||
{
|
||||
"package_row_id": cancel_row,
|
||||
"row_map": {1: child_row},
|
||||
"job": {
|
||||
"name": "cancelpkg",
|
||||
"links": ["https://example.com/cancel"],
|
||||
"package_dir": cancel_package_dir,
|
||||
"extract_target_dir": None,
|
||||
"completed_indices": [],
|
||||
},
|
||||
}
|
||||
]
|
||||
app.worker_thread = DummyWorker()
|
||||
app.table.selection_set(child_row)
|
||||
app._remove_selected_progress_rows()
|
||||
assert_true(app._is_package_cancelled(cancel_row), "Paket-Abbruch wurde nicht markiert")
|
||||
assert_true(not app.table.exists(cancel_row), "Paketzeile wurde nicht entfernt")
|
||||
remaining_links = app.links_text.get("1.0", "end").strip()
|
||||
assert_true(not remaining_links, "Link wurde bei Paketentfernung nicht aus Liste entfernt")
|
||||
|
||||
removed_cancel_files = app._cleanup_cancelled_package_artifacts(cancel_package_dir)
|
||||
assert_true(removed_cancel_files >= 1, "Archiv-Cleanup bei Paketabbruch hat nichts gelöscht")
|
||||
assert_true(not (cancel_package_dir / "release.part1.rar").exists(), "RAR-Teil wurde nicht entfernt")
|
||||
assert_true((cancel_package_dir / "keep_movie.mkv").exists(), "Nicht-Archivdatei wurde fälschlich gelöscht")
|
||||
|
||||
status_events: list[tuple[float, str]] = []
|
||||
extract_times: dict[str, float] = {}
|
||||
download_starts: dict[str, float] = {}
|
||||
|
||||
original_queue_status = app._queue_status
|
||||
original_download_single = app._download_single_link
|
||||
original_extract_archive = app._extract_archive
|
||||
|
||||
def fake_queue_status(message: str) -> None:
|
||||
status_events.append((time.monotonic(), message))
|
||||
original_queue_status(message)
|
||||
|
||||
def fake_download_single(
|
||||
token: str,
|
||||
package_dir: Path,
|
||||
index: int,
|
||||
link: str,
|
||||
package_row_id: str | None = None,
|
||||
) -> appmod.DownloadResult:
|
||||
package_name = package_dir.name
|
||||
download_starts.setdefault(package_name, time.monotonic())
|
||||
archive_path = package_dir / f"{package_name}_{index}.zip"
|
||||
archive_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with zipfile.ZipFile(archive_path, "w") as archive:
|
||||
archive.writestr("movie.mkv", b"movie-data")
|
||||
archive.writestr(f"Samples/{package_name}-sample.mkv", b"sample-data")
|
||||
archive.writestr("download_links.txt", "https://example.com/file")
|
||||
time.sleep(0.18)
|
||||
return appmod.DownloadResult(path=archive_path, bytes_written=archive_path.stat().st_size)
|
||||
|
||||
def fake_extract_archive(archive_path: Path, extract_target_dir: Path, conflict_mode: str):
|
||||
package_name = archive_path.parent.name
|
||||
if package_name == "pkg1":
|
||||
extract_times["pkg1_start"] = time.monotonic()
|
||||
time.sleep(0.8)
|
||||
else:
|
||||
time.sleep(0.25)
|
||||
with zipfile.ZipFile(archive_path) as archive:
|
||||
archive.extractall(extract_target_dir)
|
||||
if package_name == "pkg1":
|
||||
extract_times["pkg1_end"] = time.monotonic()
|
||||
return None
|
||||
|
||||
app._queue_status = fake_queue_status
|
||||
app._download_single_link = fake_download_single
|
||||
app._extract_archive = fake_extract_archive
|
||||
|
||||
app.table.delete(*app.table.get_children())
|
||||
app.package_contexts = []
|
||||
|
||||
package_specs: list[tuple[str, Path, Path]] = []
|
||||
for idx in (1, 2):
|
||||
package_name = f"pkg{idx}"
|
||||
package_dir = temp_root / package_name
|
||||
extract_dir = temp_root / f"extract_{package_name}"
|
||||
package_dir.mkdir(parents=True, exist_ok=True)
|
||||
extract_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
package_row_id = f"package-{idx}"
|
||||
app.table.insert("", "end", iid=package_row_id, text=package_name, values=("-", "Wartet", "0/1", "0 B/s", "0"), open=True)
|
||||
row_id = f"{package_row_id}-link-1"
|
||||
app.table.insert(package_row_id, "end", iid=row_id, text="https://example.com/file", values=("-", "Wartet", "0%", "0 B/s", "0"))
|
||||
|
||||
app.package_contexts.append(
|
||||
{
|
||||
"package_row_id": package_row_id,
|
||||
"row_map": {1: row_id},
|
||||
"job": {
|
||||
"name": package_name,
|
||||
"links": ["https://example.com/file"],
|
||||
"package_dir": package_dir,
|
||||
"extract_target_dir": extract_dir,
|
||||
"completed_indices": [],
|
||||
},
|
||||
}
|
||||
)
|
||||
package_specs.append((package_name, package_dir, extract_dir))
|
||||
|
||||
app.run_started_at = time.monotonic()
|
||||
app.total_downloaded_bytes = 0
|
||||
app.stop_event.clear()
|
||||
app.pause_event.clear()
|
||||
app._set_manifest_for_run(
|
||||
[
|
||||
{"name": name, "links": ["https://example.com/file"]}
|
||||
for name, _package_dir, _extract_dir in package_specs
|
||||
],
|
||||
temp_root / "downloads",
|
||||
"self-check-signature",
|
||||
resume_map={},
|
||||
)
|
||||
|
||||
app._download_queue_worker(
|
||||
token="demo-token",
|
||||
max_parallel=1,
|
||||
hybrid_extract=True,
|
||||
cleanup_mode="none",
|
||||
extract_conflict_mode="overwrite",
|
||||
overall_total_links=2,
|
||||
remove_link_files_after_extract=True,
|
||||
remove_samples_after_extract=True,
|
||||
)
|
||||
app._process_ui_queue()
|
||||
|
||||
pkg1_extract_dir = temp_root / "extract_pkg1"
|
||||
pkg2_extract_dir = temp_root / "extract_pkg2"
|
||||
assert_true((pkg1_extract_dir / "movie.mkv").exists(), "Entpacken pkg1 fehlgeschlagen")
|
||||
assert_true((pkg2_extract_dir / "movie.mkv").exists(), "Entpacken pkg2 fehlgeschlagen")
|
||||
assert_true(not (pkg1_extract_dir / "download_links.txt").exists(), "Link-Artefakte wurden nicht entfernt")
|
||||
assert_true(not (pkg2_extract_dir / "download_links.txt").exists(), "Link-Artefakte pkg2 wurden nicht entfernt")
|
||||
assert_true(not (pkg1_extract_dir / "Samples").exists(), "Sample-Ordner pkg1 wurde nicht entfernt")
|
||||
assert_true(not (pkg2_extract_dir / "Samples").exists(), "Sample-Ordner pkg2 wurde nicht entfernt")
|
||||
|
||||
assert_true("pkg1_start" in extract_times and "pkg1_end" in extract_times, "Entpack-Zeiten für pkg1 fehlen")
|
||||
assert_true("pkg2" in download_starts, "Downloadstart für pkg2 fehlt")
|
||||
assert_true(
|
||||
download_starts["pkg2"] < extract_times["pkg1_end"],
|
||||
"Paket 2 startete nicht parallel zum Entpacken von Paket 1",
|
||||
)
|
||||
|
||||
manifest_data = json.loads(appmod.MANIFEST_FILE.read_text(encoding="utf-8"))
|
||||
assert_true(bool(manifest_data.get("finished")), "Manifest wurde nach Lauf nicht abgeschlossen")
|
||||
|
||||
with app.path_lock:
|
||||
app.reserved_target_keys.add("dummy-key")
|
||||
app.ui_queue.put(("controls", False))
|
||||
app._process_ui_queue()
|
||||
with app.path_lock:
|
||||
assert_true(len(app.reserved_target_keys) == 0, "reserved_target_keys wurden nicht bereinigt")
|
||||
|
||||
app._queue_status = original_queue_status
|
||||
app._download_single_link = original_download_single
|
||||
app._extract_archive = original_extract_archive
|
||||
|
||||
assert_true(any("Entpacken läuft parallel" in text for _, text in status_events), "Kein Parallel-Entpacken-Status geloggt")
|
||||
print("Self-check erfolgreich")
|
||||
finally:
|
||||
try:
|
||||
app.destroy()
|
||||
except Exception:
|
||||
pass
|
||||
messagebox.showerror = original_showerror
|
||||
messagebox.showwarning = original_showwarning
|
||||
messagebox.showinfo = original_showinfo
|
||||
messagebox.askyesno = original_askyesno
|
||||
appmod.CONFIG_FILE = original_config
|
||||
appmod.MANIFEST_FILE = original_manifest
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
run()
|
||||
@ -1,35 +0,0 @@
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def main() -> int:
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: python scripts/set_version.py <version>")
|
||||
return 1
|
||||
|
||||
version = sys.argv[1].strip().lstrip("v")
|
||||
if not re.fullmatch(r"\d+(?:\.\d+){1,3}", version):
|
||||
print(f"Invalid version: {version}")
|
||||
return 1
|
||||
|
||||
target = Path(__file__).resolve().parents[1] / "real_debrid_downloader_gui.py"
|
||||
content = target.read_text(encoding="utf-8")
|
||||
updated, count = re.subn(
|
||||
r'^APP_VERSION\s*=\s*"[^"]+"\s*$',
|
||||
f'APP_VERSION = "{version}"',
|
||||
content,
|
||||
count=1,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
if count != 1:
|
||||
print("APP_VERSION marker not found")
|
||||
return 1
|
||||
|
||||
target.write_text(updated, encoding="utf-8")
|
||||
print(f"Set APP_VERSION to {version}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
@ -1,24 +0,0 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
|
||||
const version = process.argv[2];
|
||||
if (!version) {
|
||||
console.error("Usage: node scripts/set_version_node.mjs <version>");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const root = process.cwd();
|
||||
|
||||
const packageJsonPath = path.join(root, "package.json");
|
||||
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, "utf8"));
|
||||
packageJson.version = version;
|
||||
fs.writeFileSync(packageJsonPath, `${JSON.stringify(packageJson, null, 2)}\n`, "utf8");
|
||||
|
||||
const constantsPath = path.join(root, "src", "main", "constants.ts");
|
||||
const constants = fs.readFileSync(constantsPath, "utf8").replace(
|
||||
/APP_VERSION = "[^"]+"/,
|
||||
`APP_VERSION = "${version}"`
|
||||
);
|
||||
fs.writeFileSync(constantsPath, constants, "utf8");
|
||||
|
||||
console.log(`Set version to ${version}`);
|
||||
@ -1,46 +1,133 @@
|
||||
import path from "node:path";
|
||||
import { app } from "electron";
|
||||
import { AddLinksPayload, AppSettings, ParsedPackageInput, UiSnapshot, UpdateCheckResult } from "../shared/types";
|
||||
import {
|
||||
AddLinksPayload,
|
||||
AppSettings,
|
||||
DuplicatePolicy,
|
||||
HistoryEntry,
|
||||
PackagePriority,
|
||||
ParsedPackageInput,
|
||||
SessionStats,
|
||||
StartConflictEntry,
|
||||
StartConflictResolutionResult,
|
||||
UiSnapshot,
|
||||
UpdateCheckResult,
|
||||
UpdateInstallProgress,
|
||||
UpdateInstallResult
|
||||
} from "../shared/types";
|
||||
import { importDlcContainers } from "./container";
|
||||
import { APP_VERSION, defaultSettings } from "./constants";
|
||||
import { APP_VERSION } from "./constants";
|
||||
import { DownloadManager } from "./download-manager";
|
||||
import { parseCollectorInput } from "./link-parser";
|
||||
import { configureLogger, logger } from "./logger";
|
||||
import { createStoragePaths, emptySession, loadSession, loadSettings, saveSettings } from "./storage";
|
||||
import { checkGitHubUpdate } from "./update";
|
||||
import { configureLogger, getLogFilePath, logger } from "./logger";
|
||||
import { initSessionLog, getSessionLogPath, shutdownSessionLog } from "./session-log";
|
||||
import { MegaWebFallback } from "./mega-web-fallback";
|
||||
import { addHistoryEntry, cancelPendingAsyncSaves, clearHistory, createStoragePaths, loadHistory, loadSession, loadSettings, normalizeLoadedSession, normalizeLoadedSessionTransientFields, normalizeSettings, removeHistoryEntry, saveSession, saveSettings } from "./storage";
|
||||
import { abortActiveUpdateDownload, checkGitHubUpdate, installLatestUpdate } from "./update";
|
||||
import { startDebugServer, stopDebugServer } from "./debug-server";
|
||||
|
||||
function sanitizeSettingsPatch(partial: Partial<AppSettings>): Partial<AppSettings> {
|
||||
const entries = Object.entries(partial || {}).filter(([, value]) => value !== undefined);
|
||||
return Object.fromEntries(entries) as Partial<AppSettings>;
|
||||
}
|
||||
|
||||
function settingsFingerprint(settings: AppSettings): string {
|
||||
return JSON.stringify(normalizeSettings(settings));
|
||||
}
|
||||
|
||||
export class AppController {
|
||||
private settings: AppSettings;
|
||||
|
||||
private manager: DownloadManager;
|
||||
|
||||
private megaWebFallback: MegaWebFallback;
|
||||
|
||||
private lastUpdateCheck: UpdateCheckResult | null = null;
|
||||
|
||||
private lastUpdateCheckAt = 0;
|
||||
|
||||
private storagePaths = createStoragePaths(path.join(app.getPath("userData"), "runtime"));
|
||||
|
||||
private onStateHandler: ((snapshot: UiSnapshot) => void) | null = null;
|
||||
|
||||
private autoResumePending = false;
|
||||
|
||||
public constructor() {
|
||||
configureLogger(this.storagePaths.baseDir);
|
||||
initSessionLog(this.storagePaths.baseDir);
|
||||
this.settings = loadSettings(this.storagePaths);
|
||||
const session = loadSession(this.storagePaths);
|
||||
this.manager = new DownloadManager(this.settings, session, this.storagePaths);
|
||||
this.megaWebFallback = new MegaWebFallback(() => ({
|
||||
login: this.settings.megaLogin,
|
||||
password: this.settings.megaPassword
|
||||
}));
|
||||
this.manager = new DownloadManager(this.settings, session, this.storagePaths, {
|
||||
megaWebUnrestrict: (link: string, signal?: AbortSignal) => this.megaWebFallback.unrestrict(link, signal),
|
||||
invalidateMegaSession: () => this.megaWebFallback.invalidateSession(),
|
||||
onHistoryEntry: (entry: HistoryEntry) => {
|
||||
addHistoryEntry(this.storagePaths, entry);
|
||||
}
|
||||
});
|
||||
this.manager.on("state", (snapshot: UiSnapshot) => {
|
||||
this.onState?.(snapshot);
|
||||
this.onStateHandler?.(snapshot);
|
||||
});
|
||||
logger.info(`App gestartet v${APP_VERSION}`);
|
||||
logger.info(`Log-Datei: ${getLogFilePath()}`);
|
||||
startDebugServer(this.manager, this.storagePaths.baseDir);
|
||||
|
||||
if (this.settings.autoResumeOnStart) {
|
||||
const snapshot = this.manager.getSnapshot();
|
||||
const hasPending = Object.values(snapshot.session.items).some((item) => item.status === "queued" || item.status === "reconnect_wait");
|
||||
if (hasPending && this.hasAnyProviderToken(this.settings)) {
|
||||
this.manager.start();
|
||||
logger.info("Auto-Resume beim Start aktiviert");
|
||||
if (hasPending) {
|
||||
void this.manager.getStartConflicts().then((conflicts) => {
|
||||
const hasConflicts = conflicts.length > 0;
|
||||
if (this.hasAnyProviderToken(this.settings) && !hasConflicts) {
|
||||
// If the onState handler is already set (renderer connected), start immediately.
|
||||
// Otherwise mark as pending so the onState setter triggers the start.
|
||||
if (this.onStateHandler) {
|
||||
logger.info("Auto-Resume beim Start aktiviert (nach Konflikt-Check)");
|
||||
void this.manager.start().catch((err) => logger.warn(`Auto-Resume Start Fehler: ${String(err)}`));
|
||||
} else {
|
||||
this.autoResumePending = true;
|
||||
logger.info("Auto-Resume beim Start vorgemerkt");
|
||||
}
|
||||
} else if (hasConflicts) {
|
||||
logger.info("Auto-Resume übersprungen: Start-Konflikte erkannt");
|
||||
}
|
||||
}).catch((err) => logger.warn(`getStartConflicts Fehler (constructor): ${String(err)}`));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private hasAnyProviderToken(settings: AppSettings): boolean {
|
||||
return Boolean(settings.token.trim() || settings.megaToken.trim() || settings.bestToken.trim() || settings.allDebridToken.trim());
|
||||
return Boolean(
|
||||
settings.token.trim()
|
||||
|| (settings.megaLogin.trim() && settings.megaPassword.trim())
|
||||
|| settings.bestToken.trim()
|
||||
|| settings.allDebridToken.trim()
|
||||
|| (settings.ddownloadLogin.trim() && settings.ddownloadPassword.trim())
|
||||
|| settings.oneFichierApiKey.trim()
|
||||
);
|
||||
}
|
||||
|
||||
public onState: ((snapshot: UiSnapshot) => void) | null = null;
|
||||
public get onState(): ((snapshot: UiSnapshot) => void) | null {
|
||||
return this.onStateHandler;
|
||||
}
|
||||
|
||||
public set onState(handler: ((snapshot: UiSnapshot) => void) | null) {
|
||||
this.onStateHandler = handler;
|
||||
if (handler) {
|
||||
handler(this.manager.getSnapshot());
|
||||
if (this.autoResumePending) {
|
||||
this.autoResumePending = false;
|
||||
void this.manager.start().catch((err) => logger.warn(`Auto-Resume Start Fehler: ${String(err)}`));
|
||||
logger.info("Auto-Resume beim Start aktiviert");
|
||||
} else {
|
||||
// Trigger pending extractions without starting the session
|
||||
this.manager.triggerIdleExtractions();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public getSnapshot(): UiSnapshot {
|
||||
return this.manager.getSnapshot();
|
||||
@ -55,18 +142,51 @@ export class AppController {
|
||||
}
|
||||
|
||||
public updateSettings(partial: Partial<AppSettings>): AppSettings {
|
||||
this.settings = {
|
||||
...defaultSettings(),
|
||||
const sanitizedPatch = sanitizeSettingsPatch(partial);
|
||||
const nextSettings = normalizeSettings({
|
||||
...this.settings,
|
||||
...partial
|
||||
};
|
||||
...sanitizedPatch
|
||||
});
|
||||
|
||||
if (settingsFingerprint(nextSettings) === settingsFingerprint(this.settings)) {
|
||||
return this.settings;
|
||||
}
|
||||
|
||||
// Preserve the live totalDownloadedAllTime from the download manager
|
||||
const liveSettings = this.manager.getSettings();
|
||||
nextSettings.totalDownloadedAllTime = Math.max(nextSettings.totalDownloadedAllTime || 0, liveSettings.totalDownloadedAllTime || 0);
|
||||
this.settings = nextSettings;
|
||||
saveSettings(this.storagePaths, this.settings);
|
||||
this.manager.setSettings(this.settings);
|
||||
return this.settings;
|
||||
}
|
||||
|
||||
public async checkUpdates(): Promise<UpdateCheckResult> {
|
||||
return checkGitHubUpdate(this.settings.updateRepo);
|
||||
const result = await checkGitHubUpdate(this.settings.updateRepo);
|
||||
if (!result.error) {
|
||||
this.lastUpdateCheck = result;
|
||||
this.lastUpdateCheckAt = Date.now();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public async installUpdate(onProgress?: (progress: UpdateInstallProgress) => void): Promise<UpdateInstallResult> {
|
||||
// Stop active downloads before installing. Extractions may continue briefly
|
||||
// until prepareForShutdown() is called during app quit.
|
||||
if (this.manager.isSessionRunning()) {
|
||||
this.manager.stop();
|
||||
}
|
||||
|
||||
const cacheAgeMs = Date.now() - this.lastUpdateCheckAt;
|
||||
const cached = this.lastUpdateCheck && !this.lastUpdateCheck.error && cacheAgeMs <= 10 * 60 * 1000
|
||||
? this.lastUpdateCheck
|
||||
: undefined;
|
||||
const result = await installLatestUpdate(this.settings.updateRepo, cached, onProgress);
|
||||
if (result.started) {
|
||||
this.lastUpdateCheck = null;
|
||||
this.lastUpdateCheckAt = 0;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public addLinks(payload: AddLinksPayload): { addedPackages: number; addedLinks: number; invalidCount: number } {
|
||||
@ -82,18 +202,35 @@ export class AppController {
|
||||
const packages = await importDlcContainers(filePaths);
|
||||
const merged: ParsedPackageInput[] = packages.map((pkg) => ({
|
||||
name: pkg.name,
|
||||
links: pkg.links
|
||||
links: pkg.links,
|
||||
...(pkg.fileNames ? { fileNames: pkg.fileNames } : {})
|
||||
}));
|
||||
const result = this.manager.addPackages(merged);
|
||||
return result;
|
||||
}
|
||||
|
||||
public async getStartConflicts(): Promise<StartConflictEntry[]> {
|
||||
return this.manager.getStartConflicts();
|
||||
}
|
||||
|
||||
public async resolveStartConflict(packageId: string, policy: DuplicatePolicy): Promise<StartConflictResolutionResult> {
|
||||
return this.manager.resolveStartConflict(packageId, policy);
|
||||
}
|
||||
|
||||
public clearAll(): void {
|
||||
this.manager.clearAll();
|
||||
}
|
||||
|
||||
public start(): void {
|
||||
this.manager.start();
|
||||
public async start(): Promise<void> {
|
||||
await this.manager.start();
|
||||
}
|
||||
|
||||
public async startPackages(packageIds: string[]): Promise<void> {
|
||||
await this.manager.startPackages(packageIds);
|
||||
}
|
||||
|
||||
public async startItems(itemIds: string[]): Promise<void> {
|
||||
await this.manager.startItems(itemIds);
|
||||
}
|
||||
|
||||
public stop(): void {
|
||||
@ -104,12 +241,144 @@ export class AppController {
|
||||
return this.manager.togglePause();
|
||||
}
|
||||
|
||||
public retryExtraction(packageId: string): void {
|
||||
this.manager.retryExtraction(packageId);
|
||||
}
|
||||
|
||||
public extractNow(packageId: string): void {
|
||||
this.manager.extractNow(packageId);
|
||||
}
|
||||
|
||||
public resetPackage(packageId: string): void {
|
||||
this.manager.resetPackage(packageId);
|
||||
}
|
||||
|
||||
public cancelPackage(packageId: string): void {
|
||||
this.manager.cancelPackage(packageId);
|
||||
}
|
||||
|
||||
public shutdown(): void {
|
||||
public renamePackage(packageId: string, newName: string): void {
|
||||
this.manager.renamePackage(packageId, newName);
|
||||
}
|
||||
|
||||
public reorderPackages(packageIds: string[]): void {
|
||||
this.manager.reorderPackages(packageIds);
|
||||
}
|
||||
|
||||
public removeItem(itemId: string): void {
|
||||
this.manager.removeItem(itemId);
|
||||
}
|
||||
|
||||
public togglePackage(packageId: string): void {
|
||||
this.manager.togglePackage(packageId);
|
||||
}
|
||||
|
||||
public exportQueue(): string {
|
||||
return this.manager.exportQueue();
|
||||
}
|
||||
|
||||
public importQueue(json: string): { addedPackages: number; addedLinks: number } {
|
||||
return this.manager.importQueue(json);
|
||||
}
|
||||
|
||||
public getSessionStats(): SessionStats {
|
||||
return this.manager.getSessionStats();
|
||||
}
|
||||
|
||||
public exportBackup(): string {
|
||||
const settings = { ...this.settings };
|
||||
const SENSITIVE_KEYS: (keyof AppSettings)[] = ["token", "megaLogin", "megaPassword", "bestToken", "allDebridToken", "ddownloadLogin", "ddownloadPassword", "oneFichierApiKey"];
|
||||
for (const key of SENSITIVE_KEYS) {
|
||||
const val = settings[key];
|
||||
if (typeof val === "string" && val.length > 0) {
|
||||
(settings as Record<string, unknown>)[key] = `***${val.slice(-4)}`;
|
||||
}
|
||||
}
|
||||
const session = this.manager.getSession();
|
||||
return JSON.stringify({ version: 1, settings, session }, null, 2);
|
||||
}
|
||||
|
||||
public importBackup(json: string): { restored: boolean; message: string } {
|
||||
let parsed: Record<string, unknown>;
|
||||
try {
|
||||
parsed = JSON.parse(json) as Record<string, unknown>;
|
||||
} catch {
|
||||
return { restored: false, message: "Ungültiges JSON" };
|
||||
}
|
||||
if (!parsed || typeof parsed !== "object" || !parsed.settings || !parsed.session) {
|
||||
return { restored: false, message: "Kein gültiges Backup (settings/session fehlen)" };
|
||||
}
|
||||
const importedSettings = parsed.settings as AppSettings;
|
||||
const SENSITIVE_KEYS: (keyof AppSettings)[] = ["token", "megaLogin", "megaPassword", "bestToken", "allDebridToken", "ddownloadLogin", "ddownloadPassword", "oneFichierApiKey"];
|
||||
for (const key of SENSITIVE_KEYS) {
|
||||
const val = (importedSettings as Record<string, unknown>)[key];
|
||||
if (typeof val === "string" && val.startsWith("***")) {
|
||||
(importedSettings as Record<string, unknown>)[key] = (this.settings as Record<string, unknown>)[key];
|
||||
}
|
||||
}
|
||||
const restoredSettings = normalizeSettings(importedSettings);
|
||||
this.settings = restoredSettings;
|
||||
saveSettings(this.storagePaths, this.settings);
|
||||
this.manager.setSettings(this.settings);
|
||||
// Full stop including extraction abort — the old session is being replaced,
|
||||
// so no extraction tasks from it should keep running.
|
||||
this.manager.stop();
|
||||
this.manager.abortAllPostProcessing();
|
||||
// Cancel any deferred persist timer and queued async writes so the old
|
||||
// in-memory session does not overwrite the restored session file on disk.
|
||||
this.manager.clearPersistTimer();
|
||||
cancelPendingAsyncSaves();
|
||||
const restoredSession = normalizeLoadedSessionTransientFields(
|
||||
normalizeLoadedSession(parsed.session)
|
||||
);
|
||||
saveSession(this.storagePaths, restoredSession);
|
||||
// Prevent prepareForShutdown from overwriting the restored session file
|
||||
// with the old in-memory session when the app quits after backup restore.
|
||||
this.manager.skipShutdownPersist = true;
|
||||
// Block all persistence (including persistSoon from any IPC operations
|
||||
// the user might trigger before restarting) to protect the restored backup.
|
||||
this.manager.blockAllPersistence = true;
|
||||
return { restored: true, message: "Backup wiederhergestellt. Bitte App neustarten." };
|
||||
}
|
||||
|
||||
public getSessionLogPath(): string | null {
|
||||
return getSessionLogPath();
|
||||
}
|
||||
|
||||
public shutdown(): void {
|
||||
stopDebugServer();
|
||||
abortActiveUpdateDownload();
|
||||
this.manager.prepareForShutdown();
|
||||
this.megaWebFallback.dispose();
|
||||
shutdownSessionLog();
|
||||
logger.info("App beendet");
|
||||
}
|
||||
|
||||
public getHistory(): HistoryEntry[] {
|
||||
return loadHistory(this.storagePaths);
|
||||
}
|
||||
|
||||
public clearHistory(): void {
|
||||
clearHistory(this.storagePaths);
|
||||
}
|
||||
|
||||
public setPackagePriority(packageId: string, priority: PackagePriority): void {
|
||||
this.manager.setPackagePriority(packageId, priority);
|
||||
}
|
||||
|
||||
public skipItems(itemIds: string[]): void {
|
||||
this.manager.skipItems(itemIds);
|
||||
}
|
||||
|
||||
public resetItems(itemIds: string[]): void {
|
||||
this.manager.resetItems(itemIds);
|
||||
}
|
||||
|
||||
public removeHistoryEntry(entryId: string): void {
|
||||
removeHistoryEntry(this.storagePaths, entryId);
|
||||
}
|
||||
|
||||
public addToHistory(entry: HistoryEntry): void {
|
||||
addHistoryEntry(this.storagePaths, entry);
|
||||
}
|
||||
}
|
||||
|
||||
66
src/main/backup-crypto.ts
Normal file
66
src/main/backup-crypto.ts
Normal file
@ -0,0 +1,66 @@
|
||||
import crypto from "node:crypto";
|
||||
|
||||
export const SENSITIVE_KEYS = [
|
||||
"token",
|
||||
"megaLogin",
|
||||
"megaPassword",
|
||||
"bestToken",
|
||||
"allDebridToken",
|
||||
"archivePasswordList"
|
||||
] as const;
|
||||
|
||||
export type SensitiveKey = (typeof SENSITIVE_KEYS)[number];
|
||||
|
||||
export interface EncryptedCredentials {
|
||||
salt: string;
|
||||
iv: string;
|
||||
tag: string;
|
||||
data: string;
|
||||
}
|
||||
|
||||
const PBKDF2_ITERATIONS = 100_000;
|
||||
const KEY_LENGTH = 32; // 256 bit
|
||||
const IV_LENGTH = 12; // 96 bit for GCM
|
||||
const SALT_LENGTH = 16;
|
||||
|
||||
function deriveKey(username: string, salt: Buffer): Buffer {
|
||||
return crypto.pbkdf2Sync(username, salt, PBKDF2_ITERATIONS, KEY_LENGTH, "sha256");
|
||||
}
|
||||
|
||||
export function encryptCredentials(
|
||||
fields: Record<string, string>,
|
||||
username: string
|
||||
): EncryptedCredentials {
|
||||
const salt = crypto.randomBytes(SALT_LENGTH);
|
||||
const iv = crypto.randomBytes(IV_LENGTH);
|
||||
const key = deriveKey(username, salt);
|
||||
|
||||
const cipher = crypto.createCipheriv("aes-256-gcm", key, iv);
|
||||
const plaintext = JSON.stringify(fields);
|
||||
const encrypted = Buffer.concat([cipher.update(plaintext, "utf8"), cipher.final()]);
|
||||
const tag = cipher.getAuthTag();
|
||||
|
||||
return {
|
||||
salt: salt.toString("hex"),
|
||||
iv: iv.toString("hex"),
|
||||
tag: tag.toString("hex"),
|
||||
data: encrypted.toString("hex")
|
||||
};
|
||||
}
|
||||
|
||||
export function decryptCredentials(
|
||||
encrypted: EncryptedCredentials,
|
||||
username: string
|
||||
): Record<string, string> {
|
||||
const salt = Buffer.from(encrypted.salt, "hex");
|
||||
const iv = Buffer.from(encrypted.iv, "hex");
|
||||
const tag = Buffer.from(encrypted.tag, "hex");
|
||||
const data = Buffer.from(encrypted.data, "hex");
|
||||
const key = deriveKey(username, salt);
|
||||
|
||||
const decipher = crypto.createDecipheriv("aes-256-gcm", key, iv);
|
||||
decipher.setAuthTag(tag);
|
||||
const decrypted = Buffer.concat([decipher.update(data), decipher.final()]);
|
||||
|
||||
return JSON.parse(decrypted.toString("utf8")) as Record<string, string>;
|
||||
}
|
||||
@ -1,17 +1,23 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { ARCHIVE_TEMP_EXTENSIONS, LINK_ARTIFACT_EXTENSIONS, RAR_SPLIT_RE, SAMPLE_DIR_NAMES, SAMPLE_TOKEN_RE, SAMPLE_VIDEO_EXTENSIONS } from "./constants";
|
||||
import { ARCHIVE_TEMP_EXTENSIONS, LINK_ARTIFACT_EXTENSIONS, MAX_LINK_ARTIFACT_BYTES, RAR_SPLIT_RE, SAMPLE_DIR_NAMES, SAMPLE_TOKEN_RE, SAMPLE_VIDEO_EXTENSIONS } from "./constants";
|
||||
|
||||
async function yieldToLoop(): Promise<void> {
|
||||
await new Promise<void>((resolve) => {
|
||||
setTimeout(resolve, 0);
|
||||
});
|
||||
}
|
||||
|
||||
export function isArchiveOrTempFile(filePath: string): boolean {
|
||||
const lower = filePath.toLowerCase();
|
||||
const ext = path.extname(lower);
|
||||
const lowerName = path.basename(filePath).toLowerCase();
|
||||
const ext = path.extname(lowerName);
|
||||
if (ARCHIVE_TEMP_EXTENSIONS.has(ext)) {
|
||||
return true;
|
||||
}
|
||||
if (lower.includes(".part") && lower.endsWith(".rar")) {
|
||||
if (lowerName.includes(".part") && lowerName.endsWith(".rar")) {
|
||||
return true;
|
||||
}
|
||||
return RAR_SPLIT_RE.test(lower);
|
||||
return RAR_SPLIT_RE.test(lowerName);
|
||||
}
|
||||
|
||||
export function cleanupCancelledPackageArtifacts(packageDir: string): number {
|
||||
@ -22,9 +28,11 @@ export function cleanupCancelledPackageArtifacts(packageDir: string): number {
|
||||
const stack = [packageDir];
|
||||
while (stack.length > 0) {
|
||||
const current = stack.pop() as string;
|
||||
for (const entry of fs.readdirSync(current, { withFileTypes: true })) {
|
||||
let entries: fs.Dirent[] = [];
|
||||
try { entries = fs.readdirSync(current, { withFileTypes: true }); } catch { continue; }
|
||||
for (const entry of entries) {
|
||||
const full = path.join(current, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
if (entry.isDirectory() && !entry.isSymbolicLink()) {
|
||||
stack.push(full);
|
||||
} else if (entry.isFile() && isArchiveOrTempFile(full)) {
|
||||
try {
|
||||
@ -39,17 +47,62 @@ export function cleanupCancelledPackageArtifacts(packageDir: string): number {
|
||||
return removed;
|
||||
}
|
||||
|
||||
export function removeDownloadLinkArtifacts(extractDir: string): number {
|
||||
if (!fs.existsSync(extractDir)) {
|
||||
export async function cleanupCancelledPackageArtifactsAsync(packageDir: string): Promise<number> {
|
||||
try {
|
||||
await fs.promises.access(packageDir, fs.constants.F_OK);
|
||||
} catch {
|
||||
return 0;
|
||||
}
|
||||
|
||||
let removed = 0;
|
||||
let touched = 0;
|
||||
const stack = [packageDir];
|
||||
while (stack.length > 0) {
|
||||
const current = stack.pop() as string;
|
||||
let entries: fs.Dirent[] = [];
|
||||
try {
|
||||
entries = await fs.promises.readdir(current, { withFileTypes: true });
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const entry of entries) {
|
||||
const full = path.join(current, entry.name);
|
||||
if (entry.isDirectory() && !entry.isSymbolicLink()) {
|
||||
stack.push(full);
|
||||
} else if (entry.isFile() && isArchiveOrTempFile(full)) {
|
||||
try {
|
||||
await fs.promises.rm(full, { force: true });
|
||||
removed += 1;
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
touched += 1;
|
||||
if (touched % 80 === 0) {
|
||||
await yieldToLoop();
|
||||
}
|
||||
}
|
||||
}
|
||||
return removed;
|
||||
}
|
||||
|
||||
export async function removeDownloadLinkArtifacts(extractDir: string): Promise<number> {
|
||||
try {
|
||||
await fs.promises.access(extractDir);
|
||||
} catch {
|
||||
return 0;
|
||||
}
|
||||
let removed = 0;
|
||||
const stack = [extractDir];
|
||||
while (stack.length > 0) {
|
||||
const current = stack.pop() as string;
|
||||
for (const entry of fs.readdirSync(current, { withFileTypes: true })) {
|
||||
let entries: fs.Dirent[] = [];
|
||||
try { entries = await fs.promises.readdir(current, { withFileTypes: true }); } catch { continue; }
|
||||
for (const entry of entries) {
|
||||
const full = path.join(current, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
if (entry.isDirectory() && !entry.isSymbolicLink()) {
|
||||
stack.push(full);
|
||||
continue;
|
||||
}
|
||||
@ -63,8 +116,11 @@ export function removeDownloadLinkArtifacts(extractDir: string): number {
|
||||
if (!shouldDelete && [".txt", ".html", ".htm", ".nfo"].includes(ext)) {
|
||||
if (/[._\- ](links?|downloads?|urls?|dlc)([._\- ]|$)/i.test(name)) {
|
||||
try {
|
||||
const text = fs.readFileSync(full, "utf8");
|
||||
const stat = await fs.promises.stat(full);
|
||||
if (stat.size <= MAX_LINK_ARTIFACT_BYTES) {
|
||||
const text = await fs.promises.readFile(full, "utf8");
|
||||
shouldDelete = /https?:\/\//i.test(text);
|
||||
}
|
||||
} catch {
|
||||
shouldDelete = false;
|
||||
}
|
||||
@ -73,7 +129,7 @@ export function removeDownloadLinkArtifacts(extractDir: string): number {
|
||||
|
||||
if (shouldDelete) {
|
||||
try {
|
||||
fs.rmSync(full, { force: true });
|
||||
await fs.promises.rm(full, { force: true });
|
||||
removed += 1;
|
||||
} catch {
|
||||
// ignore
|
||||
@ -84,38 +140,77 @@ export function removeDownloadLinkArtifacts(extractDir: string): number {
|
||||
return removed;
|
||||
}
|
||||
|
||||
export function removeSampleArtifacts(extractDir: string): { files: number; dirs: number } {
|
||||
if (!fs.existsSync(extractDir)) {
|
||||
export async function removeSampleArtifacts(extractDir: string): Promise<{ files: number; dirs: number }> {
|
||||
try {
|
||||
await fs.promises.access(extractDir);
|
||||
} catch {
|
||||
return { files: 0, dirs: 0 };
|
||||
}
|
||||
|
||||
let removedFiles = 0;
|
||||
let removedDirs = 0;
|
||||
const allDirs: string[] = [];
|
||||
const sampleDirs: string[] = [];
|
||||
const stack = [extractDir];
|
||||
|
||||
const countFilesRecursive = async (rootDir: string): Promise<number> => {
|
||||
let count = 0;
|
||||
const dirs = [rootDir];
|
||||
while (dirs.length > 0) {
|
||||
const current = dirs.pop() as string;
|
||||
let entries: fs.Dirent[] = [];
|
||||
try {
|
||||
entries = await fs.promises.readdir(current, { withFileTypes: true });
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
for (const entry of entries) {
|
||||
const full = path.join(current, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
try {
|
||||
const stat = await fs.promises.lstat(full);
|
||||
if (stat.isSymbolicLink()) {
|
||||
continue;
|
||||
}
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
dirs.push(full);
|
||||
} else if (entry.isFile()) {
|
||||
count += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
return count;
|
||||
};
|
||||
|
||||
while (stack.length > 0) {
|
||||
const current = stack.pop() as string;
|
||||
allDirs.push(current);
|
||||
for (const entry of fs.readdirSync(current, { withFileTypes: true })) {
|
||||
let entries: fs.Dirent[] = [];
|
||||
try { entries = await fs.promises.readdir(current, { withFileTypes: true }); } catch { continue; }
|
||||
for (const entry of entries) {
|
||||
const full = path.join(current, entry.name);
|
||||
if (entry.isDirectory() || entry.isSymbolicLink()) {
|
||||
const base = entry.name.toLowerCase();
|
||||
if (SAMPLE_DIR_NAMES.has(base)) {
|
||||
sampleDirs.push(full);
|
||||
continue;
|
||||
}
|
||||
if (entry.isDirectory()) {
|
||||
stack.push(full);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (!entry.isFile()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const parent = path.basename(path.dirname(full)).toLowerCase();
|
||||
const stem = path.parse(entry.name).name.toLowerCase();
|
||||
const ext = path.extname(entry.name).toLowerCase();
|
||||
const inSampleDir = SAMPLE_DIR_NAMES.has(parent);
|
||||
const isSampleVideo = SAMPLE_VIDEO_EXTENSIONS.has(ext) && SAMPLE_TOKEN_RE.test(stem);
|
||||
|
||||
if (inSampleDir || isSampleVideo) {
|
||||
if (isSampleVideo) {
|
||||
try {
|
||||
fs.rmSync(full, { force: true });
|
||||
await fs.promises.rm(full, { force: true });
|
||||
removedFiles += 1;
|
||||
} catch {
|
||||
// ignore
|
||||
@ -124,17 +219,18 @@ export function removeSampleArtifacts(extractDir: string): { files: number; dirs
|
||||
}
|
||||
}
|
||||
|
||||
allDirs.sort((a, b) => b.length - a.length);
|
||||
for (const dir of allDirs) {
|
||||
if (dir === extractDir) {
|
||||
continue;
|
||||
}
|
||||
const base = path.basename(dir).toLowerCase();
|
||||
if (!SAMPLE_DIR_NAMES.has(base)) {
|
||||
continue;
|
||||
}
|
||||
sampleDirs.sort((a, b) => b.length - a.length);
|
||||
for (const dir of sampleDirs) {
|
||||
try {
|
||||
fs.rmSync(dir, { recursive: true, force: true });
|
||||
const stat = await fs.promises.lstat(dir);
|
||||
if (stat.isSymbolicLink()) {
|
||||
await fs.promises.rm(dir, { force: true });
|
||||
removedDirs += 1;
|
||||
continue;
|
||||
}
|
||||
const filesInDir = await countFilesRecursive(dir);
|
||||
await fs.promises.rm(dir, { recursive: true, force: true });
|
||||
removedFiles += filesInDir;
|
||||
removedDirs += 1;
|
||||
} catch {
|
||||
// ignore
|
||||
|
||||
@ -1,36 +1,54 @@
|
||||
import path from "node:path";
|
||||
import os from "node:os";
|
||||
import { AppSettings } from "../shared/types";
|
||||
import packageJson from "../../package.json";
|
||||
|
||||
export const APP_NAME = "Debrid Download Manager";
|
||||
export const APP_VERSION = "1.1.15";
|
||||
export const APP_NAME = "Multi Debrid Downloader";
|
||||
export const APP_VERSION: string = packageJson.version;
|
||||
export const API_BASE_URL = "https://api.real-debrid.com/rest/1.0";
|
||||
|
||||
export const DCRYPT_UPLOAD_URL = "https://dcrypt.it/decrypt/upload";
|
||||
export const DLC_SERVICE_URL = "http://service.jdownloader.org/dlcrypt/service.php?srcType=dlc&destType=pylo&data={KEY}";
|
||||
export const DCRYPT_PASTE_URL = "https://dcrypt.it/decrypt/paste";
|
||||
export const DLC_SERVICE_URL = "https://service.jdownloader.org/dlcrypt/service.php?srcType=dlc&destType=pylo&data={KEY}";
|
||||
export const DLC_AES_KEY = Buffer.from("cb99b5cbc24db398", "utf8");
|
||||
export const DLC_AES_IV = Buffer.from("9bc24cb995cb8db3", "utf8");
|
||||
|
||||
export const REQUEST_RETRIES = 3;
|
||||
export const CHUNK_SIZE = 512 * 1024;
|
||||
|
||||
export const WRITE_BUFFER_SIZE = 512 * 1024; // 512 KB write buffer (JDownloader: 500 KB)
|
||||
export const WRITE_FLUSH_TIMEOUT_MS = 2000; // 2s flush timeout
|
||||
export const ALLOCATION_UNIT_SIZE = 4096; // 4 KB NTFS alignment
|
||||
export const STREAM_HIGH_WATER_MARK = 512 * 1024; // 512 KB stream buffer — lower than before (2 MB) so backpressure triggers sooner when disk is slow
|
||||
export const DISK_BUSY_THRESHOLD_MS = 300; // Show "Warte auf Festplatte" if writableLength > 0 for this long
|
||||
|
||||
export const SAMPLE_DIR_NAMES = new Set(["sample", "samples"]);
|
||||
export const SAMPLE_VIDEO_EXTENSIONS = new Set([".mkv", ".mp4", ".avi", ".mov", ".wmv", ".m4v", ".ts", ".m2ts", ".webm"]);
|
||||
export const LINK_ARTIFACT_EXTENSIONS = new Set([".url", ".webloc", ".dlc", ".rsdf", ".ccf"]);
|
||||
export const SAMPLE_TOKEN_RE = /(^|[._\-\s])sample([._\-\s]|$)/i;
|
||||
|
||||
export const ARCHIVE_TEMP_EXTENSIONS = new Set([".rar", ".zip", ".7z", ".tmp", ".part"]);
|
||||
export const RAR_SPLIT_RE = /\.r\d{2}$/i;
|
||||
export const ARCHIVE_TEMP_EXTENSIONS = new Set([".rar", ".zip", ".7z", ".tmp", ".part", ".tar", ".gz", ".bz2", ".xz", ".rev"]);
|
||||
export const RAR_SPLIT_RE = /\.r\d{2,3}$/i;
|
||||
|
||||
export const DEFAULT_UPDATE_REPO = "Sucukdeluxe/real-debrid-downloader";
|
||||
export const MAX_MANIFEST_FILE_BYTES = 5 * 1024 * 1024;
|
||||
export const MAX_LINK_ARTIFACT_BYTES = 256 * 1024;
|
||||
export const SPEED_WINDOW_SECONDS = 1;
|
||||
export const CLIPBOARD_POLL_INTERVAL_MS = 2000;
|
||||
|
||||
export const DEFAULT_UPDATE_REPO = "Administrator/real-debrid-downloader";
|
||||
|
||||
export function defaultSettings(): AppSettings {
|
||||
const baseDir = path.join(os.homedir(), "Downloads", "RealDebrid");
|
||||
return {
|
||||
token: "",
|
||||
megaToken: "",
|
||||
megaLogin: "",
|
||||
megaPassword: "",
|
||||
bestToken: "",
|
||||
allDebridToken: "",
|
||||
ddownloadLogin: "",
|
||||
ddownloadPassword: "",
|
||||
oneFichierApiKey: "",
|
||||
archivePasswordList: "",
|
||||
rememberToken: true,
|
||||
providerPrimary: "realdebrid",
|
||||
providerSecondary: "megadebrid",
|
||||
@ -39,7 +57,10 @@ export function defaultSettings(): AppSettings {
|
||||
outputDir: baseDir,
|
||||
packageName: "",
|
||||
autoExtract: true,
|
||||
autoRename4sf4sj: false,
|
||||
extractDir: path.join(baseDir, "_entpackt"),
|
||||
collectMkvToLibrary: false,
|
||||
mkvLibraryDir: path.join(baseDir, "_mkv"),
|
||||
createExtractSubfolder: true,
|
||||
hybridExtract: true,
|
||||
cleanupMode: "none",
|
||||
@ -52,10 +73,23 @@ export function defaultSettings(): AppSettings {
|
||||
reconnectWaitSeconds: 45,
|
||||
completedCleanupPolicy: "never",
|
||||
maxParallel: 4,
|
||||
maxParallelExtract: 2,
|
||||
retryLimit: 0,
|
||||
speedLimitEnabled: false,
|
||||
speedLimitKbps: 0,
|
||||
speedLimitMode: "global",
|
||||
updateRepo: DEFAULT_UPDATE_REPO,
|
||||
autoUpdateCheck: true
|
||||
autoUpdateCheck: true,
|
||||
clipboardWatch: false,
|
||||
minimizeToTray: false,
|
||||
theme: "dark" as const,
|
||||
collapseNewPackages: true,
|
||||
autoSkipExtracted: false,
|
||||
confirmDeleteSelection: true,
|
||||
totalDownloadedAllTime: 0,
|
||||
bandwidthSchedules: [],
|
||||
columnOrder: ["name", "size", "progress", "hoster", "account", "prio", "status", "speed"],
|
||||
extractCpuPriority: "high",
|
||||
autoExtractWhenStopped: true
|
||||
};
|
||||
}
|
||||
|
||||
@ -1,10 +1,17 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import crypto from "node:crypto";
|
||||
import { DCRYPT_UPLOAD_URL, DLC_AES_IV, DLC_AES_KEY, DLC_SERVICE_URL } from "./constants";
|
||||
import { DCRYPT_PASTE_URL, DCRYPT_UPLOAD_URL, DLC_AES_IV, DLC_AES_KEY, DLC_SERVICE_URL } from "./constants";
|
||||
import { compactErrorText, inferPackageNameFromLinks, isHttpLink, sanitizeFilename, uniquePreserveOrder } from "./utils";
|
||||
import { ParsedPackageInput } from "../shared/types";
|
||||
|
||||
const MAX_DLC_FILE_BYTES = 8 * 1024 * 1024;
|
||||
|
||||
function isContainerSizeValidationError(error: unknown): boolean {
|
||||
const text = compactErrorText(error);
|
||||
return /zu groß/i.test(text) || /DLC-Datei ungültig oder zu groß/i.test(text);
|
||||
}
|
||||
|
||||
function decodeDcryptPayload(responseText: string): unknown {
|
||||
let text = String(responseText || "").trim();
|
||||
const m = text.match(/<textarea[^>]*>([\s\S]*?)<\/textarea>/i);
|
||||
@ -62,6 +69,14 @@ function decryptRcPayload(base64Rc: string): Buffer {
|
||||
return Buffer.concat([decipher.update(rcBytes), decipher.final()]);
|
||||
}
|
||||
|
||||
function readDlcFileWithLimit(filePath: string): Buffer {
|
||||
const stat = fs.statSync(filePath);
|
||||
if (stat.size <= 0 || stat.size > MAX_DLC_FILE_BYTES) {
|
||||
throw new Error(`DLC-Datei ungültig oder zu groß (${Math.floor(stat.size)} B)`);
|
||||
}
|
||||
return fs.readFileSync(filePath);
|
||||
}
|
||||
|
||||
function parsePackagesFromDlcXml(xml: string): ParsedPackageInput[] {
|
||||
const packages: ParsedPackageInput[] = [];
|
||||
const packageRegex = /<package\s+[^>]*name="([^"]*)"[^>]*>([\s\S]*?)<\/package>/gi;
|
||||
@ -79,6 +94,36 @@ function parsePackagesFromDlcXml(xml: string): ParsedPackageInput[] {
|
||||
}
|
||||
|
||||
const links: string[] = [];
|
||||
const fileNames: string[] = [];
|
||||
const fileRegex = /<file>([\s\S]*?)<\/file>/gi;
|
||||
for (let fm = fileRegex.exec(packageBody); fm; fm = fileRegex.exec(packageBody)) {
|
||||
const fileBody = fm[1] || "";
|
||||
const urlMatch = fileBody.match(/<url>(.*?)<\/url>/i);
|
||||
if (!urlMatch) {
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
const url = Buffer.from((urlMatch[1] || "").trim(), "base64").toString("utf8").trim();
|
||||
if (!isHttpLink(url)) {
|
||||
continue;
|
||||
}
|
||||
let fileName = "";
|
||||
const fnMatch = fileBody.match(/<filename>(.*?)<\/filename>/i);
|
||||
if (fnMatch?.[1]) {
|
||||
try {
|
||||
fileName = Buffer.from(fnMatch[1].trim(), "base64").toString("utf8").trim();
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
links.push(url);
|
||||
fileNames.push(sanitizeFilename(fileName));
|
||||
} catch {
|
||||
// skip broken entries
|
||||
}
|
||||
}
|
||||
|
||||
if (links.length === 0) {
|
||||
const urlRegex = /<url>(.*?)<\/url>/gi;
|
||||
for (let um = urlRegex.exec(packageBody); um; um = urlRegex.exec(packageBody)) {
|
||||
try {
|
||||
@ -90,13 +135,19 @@ function parsePackagesFromDlcXml(xml: string): ParsedPackageInput[] {
|
||||
// skip broken entries
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const uniqueLinks = uniquePreserveOrder(links);
|
||||
const hasFileNames = fileNames.some((fn) => fn.length > 0);
|
||||
if (uniqueLinks.length > 0) {
|
||||
packages.push({
|
||||
const pkg: ParsedPackageInput = {
|
||||
name: sanitizeFilename(packageName || inferPackageNameFromLinks(uniqueLinks) || `Paket-${packages.length + 1}`),
|
||||
links: uniqueLinks
|
||||
});
|
||||
};
|
||||
if (hasFileNames) {
|
||||
pkg.fileNames = fileNames;
|
||||
}
|
||||
packages.push(pkg);
|
||||
}
|
||||
}
|
||||
|
||||
@ -104,7 +155,7 @@ function parsePackagesFromDlcXml(xml: string): ParsedPackageInput[] {
|
||||
}
|
||||
|
||||
async function decryptDlcLocal(filePath: string): Promise<ParsedPackageInput[]> {
|
||||
const content = fs.readFileSync(filePath, "ascii").trim();
|
||||
const content = readDlcFileWithLimit(filePath).toString("ascii").trim();
|
||||
if (content.length < 89) {
|
||||
return [];
|
||||
}
|
||||
@ -113,7 +164,7 @@ async function decryptDlcLocal(filePath: string): Promise<ParsedPackageInput[]>
|
||||
const dlcData = content.slice(0, -88);
|
||||
|
||||
const rcUrl = DLC_SERVICE_URL.replace("{KEY}", encodeURIComponent(dlcKey));
|
||||
const rcResponse = await fetch(rcUrl, { method: "GET" });
|
||||
const rcResponse = await fetch(rcUrl, { method: "GET", signal: AbortSignal.timeout(30000) });
|
||||
if (!rcResponse.ok) {
|
||||
return [];
|
||||
}
|
||||
@ -129,59 +180,140 @@ async function decryptDlcLocal(filePath: string): Promise<ParsedPackageInput[]>
|
||||
decipher.setAutoPadding(false);
|
||||
let decrypted = Buffer.concat([decipher.update(encrypted), decipher.final()]);
|
||||
|
||||
if (decrypted.length === 0) {
|
||||
return [];
|
||||
}
|
||||
const pad = decrypted[decrypted.length - 1];
|
||||
if (pad > 0 && pad <= 16) {
|
||||
if (pad > 0 && pad <= 16 && pad <= decrypted.length) {
|
||||
let validPad = true;
|
||||
for (let index = 1; index <= pad; index += 1) {
|
||||
if (decrypted[decrypted.length - index] !== pad) {
|
||||
validPad = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (validPad) {
|
||||
decrypted = decrypted.subarray(0, decrypted.length - pad);
|
||||
}
|
||||
}
|
||||
|
||||
const xmlData = Buffer.from(decrypted.toString("utf8"), "base64").toString("utf8");
|
||||
return parsePackagesFromDlcXml(xmlData);
|
||||
}
|
||||
|
||||
async function decryptDlcViaDcrypt(filePath: string): Promise<ParsedPackageInput[]> {
|
||||
const fileName = path.basename(filePath);
|
||||
const blob = new Blob([fs.readFileSync(filePath)]);
|
||||
function extractLinksFromResponse(text: string): string[] {
|
||||
const payload = decodeDcryptPayload(text);
|
||||
let links = extractUrlsRecursive(payload);
|
||||
if (links.length === 0) {
|
||||
links = extractUrlsRecursive(text);
|
||||
}
|
||||
return uniquePreserveOrder(links.filter((l) => isHttpLink(l)));
|
||||
}
|
||||
|
||||
async function tryDcryptUpload(fileContent: Buffer, fileName: string): Promise<string[] | null> {
|
||||
const blob = new Blob([new Uint8Array(fileContent)]);
|
||||
const form = new FormData();
|
||||
form.set("dlcfile", blob, fileName);
|
||||
|
||||
const response = await fetch(DCRYPT_UPLOAD_URL, {
|
||||
method: "POST",
|
||||
body: form
|
||||
body: form,
|
||||
signal: AbortSignal.timeout(30000)
|
||||
});
|
||||
if (response.status === 413) {
|
||||
return null;
|
||||
}
|
||||
const text = await response.text();
|
||||
if (!response.ok) {
|
||||
throw new Error(compactErrorText(text));
|
||||
}
|
||||
const payload = decodeDcryptPayload(text);
|
||||
let packages = extractPackagesFromPayload(payload);
|
||||
if (packages.length === 1) {
|
||||
const regrouped = groupLinksByName(packages[0].links);
|
||||
if (regrouped.length > 1) {
|
||||
packages = regrouped;
|
||||
return extractLinksFromResponse(text);
|
||||
}
|
||||
|
||||
async function tryDcryptPaste(fileContent: Buffer): Promise<string[] | null> {
|
||||
const form = new FormData();
|
||||
form.set("content", fileContent.toString("ascii"));
|
||||
|
||||
const response = await fetch(DCRYPT_PASTE_URL, {
|
||||
method: "POST",
|
||||
body: form,
|
||||
signal: AbortSignal.timeout(30000)
|
||||
});
|
||||
if (response.status === 413) {
|
||||
return null;
|
||||
}
|
||||
if (packages.length === 0) {
|
||||
packages = groupLinksByName(extractUrlsRecursive(text));
|
||||
const text = await response.text();
|
||||
if (!response.ok) {
|
||||
throw new Error(compactErrorText(text));
|
||||
}
|
||||
return packages;
|
||||
return extractLinksFromResponse(text);
|
||||
}
|
||||
|
||||
async function decryptDlcViaDcrypt(filePath: string): Promise<ParsedPackageInput[]> {
|
||||
const fileContent = readDlcFileWithLimit(filePath);
|
||||
const fileName = path.basename(filePath);
|
||||
const packageName = sanitizeFilename(path.basename(filePath, ".dlc")) || "Paket";
|
||||
|
||||
let links = await tryDcryptUpload(fileContent, fileName);
|
||||
if (links === null) {
|
||||
links = await tryDcryptPaste(fileContent);
|
||||
}
|
||||
if (links === null) {
|
||||
throw new Error("DLC-Datei zu groß für dcrypt.it");
|
||||
}
|
||||
if (links.length === 0) {
|
||||
return [];
|
||||
}
|
||||
return [{ name: packageName, links }];
|
||||
}
|
||||
|
||||
export async function importDlcContainers(filePaths: string[]): Promise<ParsedPackageInput[]> {
|
||||
const out: ParsedPackageInput[] = [];
|
||||
const failures: string[] = [];
|
||||
let sawDlc = false;
|
||||
for (const filePath of filePaths) {
|
||||
if (path.extname(filePath).toLowerCase() !== ".dlc") {
|
||||
continue;
|
||||
}
|
||||
sawDlc = true;
|
||||
let packages: ParsedPackageInput[] = [];
|
||||
let fileFailed = false;
|
||||
let fileFailureReasons: string[] = [];
|
||||
try {
|
||||
packages = await decryptDlcLocal(filePath);
|
||||
} catch {
|
||||
} catch (error) {
|
||||
if (isContainerSizeValidationError(error)) {
|
||||
failures.push(`${path.basename(filePath)}: ${compactErrorText(error)}`);
|
||||
continue;
|
||||
}
|
||||
fileFailed = true;
|
||||
fileFailureReasons.push(`lokal: ${compactErrorText(error)}`);
|
||||
packages = [];
|
||||
}
|
||||
if (packages.length === 0) {
|
||||
try {
|
||||
packages = await decryptDlcViaDcrypt(filePath);
|
||||
} catch (error) {
|
||||
if (isContainerSizeValidationError(error)) {
|
||||
failures.push(`${path.basename(filePath)}: ${compactErrorText(error)}`);
|
||||
continue;
|
||||
}
|
||||
fileFailed = true;
|
||||
fileFailureReasons.push(`dcrypt: ${compactErrorText(error)}`);
|
||||
packages = [];
|
||||
}
|
||||
}
|
||||
if (packages.length === 0 && fileFailed) {
|
||||
failures.push(`${path.basename(filePath)}: ${fileFailureReasons.join("; ")}`);
|
||||
}
|
||||
out.push(...packages);
|
||||
}
|
||||
|
||||
if (out.length === 0 && sawDlc && failures.length > 0) {
|
||||
const details = failures.slice(0, 2).join(" | ");
|
||||
const suffix = failures.length > 2 ? ` (+${failures.length - 2} weitere)` : "";
|
||||
throw new Error(`DLC konnte nicht importiert werden: ${details}${suffix}`);
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
1269
src/main/debrid.ts
1269
src/main/debrid.ts
File diff suppressed because it is too large
Load Diff
279
src/main/debug-server.ts
Normal file
279
src/main/debug-server.ts
Normal file
@ -0,0 +1,279 @@
|
||||
import http from "node:http";
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { logger, getLogFilePath } from "./logger";
|
||||
import type { DownloadManager } from "./download-manager";
|
||||
|
||||
const DEFAULT_PORT = 9868;
|
||||
const MAX_LOG_LINES = 10000;
|
||||
|
||||
let server: http.Server | null = null;
|
||||
let manager: DownloadManager | null = null;
|
||||
let authToken = "";
|
||||
|
||||
function loadToken(baseDir: string): string {
|
||||
const tokenPath = path.join(baseDir, "debug_token.txt");
|
||||
try {
|
||||
return fs.readFileSync(tokenPath, "utf8").trim();
|
||||
} catch {
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
function getPort(baseDir: string): number {
|
||||
const portPath = path.join(baseDir, "debug_port.txt");
|
||||
try {
|
||||
const n = Number(fs.readFileSync(portPath, "utf8").trim());
|
||||
if (Number.isFinite(n) && n >= 1024 && n <= 65535) {
|
||||
return n;
|
||||
}
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
return DEFAULT_PORT;
|
||||
}
|
||||
|
||||
function checkAuth(req: http.IncomingMessage): boolean {
|
||||
if (!authToken) {
|
||||
return false;
|
||||
}
|
||||
const header = req.headers.authorization || "";
|
||||
if (header === `Bearer ${authToken}`) {
|
||||
return true;
|
||||
}
|
||||
const url = new URL(req.url || "/", "http://localhost");
|
||||
return url.searchParams.get("token") === authToken;
|
||||
}
|
||||
|
||||
function jsonResponse(res: http.ServerResponse, status: number, data: unknown): void {
|
||||
const body = JSON.stringify(data, null, 2);
|
||||
res.writeHead(status, {
|
||||
"Content-Type": "application/json; charset=utf-8",
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Cache-Control": "no-cache"
|
||||
});
|
||||
res.end(body);
|
||||
}
|
||||
|
||||
function readLogTail(lines: number): string[] {
|
||||
const logPath = getLogFilePath();
|
||||
try {
|
||||
const content = fs.readFileSync(logPath, "utf8");
|
||||
const allLines = content.split("\n").filter((l) => l.trim().length > 0);
|
||||
return allLines.slice(-Math.min(lines, MAX_LOG_LINES));
|
||||
} catch {
|
||||
return ["(Log-Datei nicht lesbar)"];
|
||||
}
|
||||
}
|
||||
|
||||
function handleRequest(req: http.IncomingMessage, res: http.ServerResponse): void {
|
||||
if (req.method === "OPTIONS") {
|
||||
res.writeHead(204, {
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Access-Control-Allow-Headers": "Authorization"
|
||||
});
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
|
||||
if (!checkAuth(req)) {
|
||||
jsonResponse(res, 401, { error: "Unauthorized" });
|
||||
return;
|
||||
}
|
||||
|
||||
const url = new URL(req.url || "/", "http://localhost");
|
||||
const pathname = url.pathname;
|
||||
|
||||
if (pathname === "/health") {
|
||||
jsonResponse(res, 200, {
|
||||
status: "ok",
|
||||
uptime: Math.floor(process.uptime()),
|
||||
memoryMB: Math.round(process.memoryUsage().rss / 1024 / 1024)
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (pathname === "/log") {
|
||||
const count = Math.min(Number(url.searchParams.get("lines") || "100"), MAX_LOG_LINES);
|
||||
const grep = url.searchParams.get("grep") || "";
|
||||
let lines = readLogTail(count);
|
||||
if (grep) {
|
||||
const pattern = grep.toLowerCase();
|
||||
lines = lines.filter((l) => l.toLowerCase().includes(pattern));
|
||||
}
|
||||
jsonResponse(res, 200, { lines, count: lines.length });
|
||||
return;
|
||||
}
|
||||
|
||||
if (pathname === "/status") {
|
||||
if (!manager) {
|
||||
jsonResponse(res, 503, { error: "Manager not initialized" });
|
||||
return;
|
||||
}
|
||||
const snapshot = manager.getSnapshot();
|
||||
const items = Object.values(snapshot.session.items);
|
||||
const packages = Object.values(snapshot.session.packages);
|
||||
|
||||
const byStatus: Record<string, number> = {};
|
||||
for (const item of items) {
|
||||
byStatus[item.status] = (byStatus[item.status] || 0) + 1;
|
||||
}
|
||||
|
||||
const activeItems = items
|
||||
.filter((i) => i.status === "downloading" || i.status === "validating")
|
||||
.map((i) => ({
|
||||
id: i.id,
|
||||
fileName: i.fileName,
|
||||
status: i.status,
|
||||
fullStatus: i.fullStatus,
|
||||
provider: i.provider,
|
||||
progress: i.progressPercent,
|
||||
speedMBs: +(i.speedBps / 1024 / 1024).toFixed(2),
|
||||
downloadedMB: +(i.downloadedBytes / 1024 / 1024).toFixed(1),
|
||||
totalMB: i.totalBytes ? +(i.totalBytes / 1024 / 1024).toFixed(1) : null,
|
||||
retries: i.retries,
|
||||
lastError: i.lastError
|
||||
}));
|
||||
|
||||
const failedItems = items
|
||||
.filter((i) => i.status === "failed")
|
||||
.map((i) => ({
|
||||
fileName: i.fileName,
|
||||
lastError: i.lastError,
|
||||
retries: i.retries,
|
||||
provider: i.provider
|
||||
}));
|
||||
|
||||
jsonResponse(res, 200, {
|
||||
running: snapshot.session.running,
|
||||
paused: snapshot.session.paused,
|
||||
speed: snapshot.speedText,
|
||||
eta: snapshot.etaText,
|
||||
itemCounts: byStatus,
|
||||
totalItems: items.length,
|
||||
packages: packages.map((p) => ({
|
||||
name: p.name,
|
||||
status: p.status,
|
||||
items: p.itemIds.length
|
||||
})),
|
||||
activeItems,
|
||||
failedItems: failedItems.length > 0 ? failedItems : undefined
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (pathname === "/items") {
|
||||
if (!manager) {
|
||||
jsonResponse(res, 503, { error: "Manager not initialized" });
|
||||
return;
|
||||
}
|
||||
const snapshot = manager.getSnapshot();
|
||||
const filter = url.searchParams.get("status");
|
||||
const pkg = url.searchParams.get("package");
|
||||
let items = Object.values(snapshot.session.items);
|
||||
if (filter) {
|
||||
items = items.filter((i) => i.status === filter);
|
||||
}
|
||||
if (pkg) {
|
||||
const pkgLower = pkg.toLowerCase();
|
||||
const matchedPkg = Object.values(snapshot.session.packages)
|
||||
.find((p) => p.name.toLowerCase().includes(pkgLower));
|
||||
if (matchedPkg) {
|
||||
const ids = new Set(matchedPkg.itemIds);
|
||||
items = items.filter((i) => ids.has(i.id));
|
||||
}
|
||||
}
|
||||
jsonResponse(res, 200, {
|
||||
count: items.length,
|
||||
items: items.map((i) => ({
|
||||
fileName: i.fileName,
|
||||
status: i.status,
|
||||
fullStatus: i.fullStatus,
|
||||
provider: i.provider,
|
||||
progress: i.progressPercent,
|
||||
speedMBs: +(i.speedBps / 1024 / 1024).toFixed(2),
|
||||
downloadedMB: +(i.downloadedBytes / 1024 / 1024).toFixed(1),
|
||||
totalMB: i.totalBytes ? +(i.totalBytes / 1024 / 1024).toFixed(1) : null,
|
||||
retries: i.retries,
|
||||
lastError: i.lastError
|
||||
}))
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (pathname === "/session") {
|
||||
if (!manager) {
|
||||
jsonResponse(res, 503, { error: "Manager not initialized" });
|
||||
return;
|
||||
}
|
||||
const snapshot = manager.getSnapshot();
|
||||
const pkg = url.searchParams.get("package");
|
||||
if (pkg) {
|
||||
const pkgLower = pkg.toLowerCase();
|
||||
const matchedPkg = Object.values(snapshot.session.packages)
|
||||
.find((p) => p.name.toLowerCase().includes(pkgLower));
|
||||
if (matchedPkg) {
|
||||
const ids = new Set(matchedPkg.itemIds);
|
||||
const pkgItems = Object.values(snapshot.session.items)
|
||||
.filter((i) => ids.has(i.id));
|
||||
jsonResponse(res, 200, {
|
||||
package: matchedPkg,
|
||||
items: pkgItems
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
jsonResponse(res, 200, {
|
||||
running: snapshot.session.running,
|
||||
paused: snapshot.session.paused,
|
||||
packageCount: Object.keys(snapshot.session.packages).length,
|
||||
itemCount: Object.keys(snapshot.session.items).length,
|
||||
packages: Object.values(snapshot.session.packages).map((p) => ({
|
||||
id: p.id,
|
||||
name: p.name,
|
||||
status: p.status,
|
||||
items: p.itemIds.length
|
||||
}))
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
jsonResponse(res, 404, {
|
||||
error: "Not found",
|
||||
endpoints: [
|
||||
"GET /health",
|
||||
"GET /log?lines=100&grep=keyword",
|
||||
"GET /status",
|
||||
"GET /items?status=downloading&package=Bloodline",
|
||||
"GET /session?package=Criminal"
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
export function startDebugServer(mgr: DownloadManager, baseDir: string): void {
|
||||
authToken = loadToken(baseDir);
|
||||
if (!authToken) {
|
||||
logger.info("Debug-Server: Kein Token in debug_token.txt, Server wird nicht gestartet");
|
||||
return;
|
||||
}
|
||||
|
||||
manager = mgr;
|
||||
const port = getPort(baseDir);
|
||||
|
||||
server = http.createServer(handleRequest);
|
||||
server.listen(port, "127.0.0.1", () => {
|
||||
logger.info(`Debug-Server gestartet auf Port ${port}`);
|
||||
});
|
||||
server.on("error", (err) => {
|
||||
logger.warn(`Debug-Server Fehler: ${String(err)}`);
|
||||
server = null;
|
||||
});
|
||||
}
|
||||
|
||||
export function stopDebugServer(): void {
|
||||
if (server) {
|
||||
server.close();
|
||||
server = null;
|
||||
logger.info("Debug-Server gestoppt");
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -2,6 +2,18 @@ import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import crypto from "node:crypto";
|
||||
import { ParsedHashEntry } from "../shared/types";
|
||||
import { MAX_MANIFEST_FILE_BYTES } from "./constants";
|
||||
|
||||
const manifestCache = new Map<string, { at: number; entries: Map<string, ParsedHashEntry> }>();
|
||||
const MANIFEST_CACHE_TTL_MS = 15000;
|
||||
|
||||
function normalizeManifestKey(value: string): string {
|
||||
return String(value || "")
|
||||
.replace(/\\/g, "/")
|
||||
.replace(/^\.\//, "")
|
||||
.trim()
|
||||
.toLowerCase();
|
||||
}
|
||||
|
||||
export function parseHashLine(line: string): ParsedHashEntry | null {
|
||||
const text = String(line || "").trim();
|
||||
@ -29,6 +41,12 @@ export function parseHashLine(line: string): ParsedHashEntry | null {
|
||||
}
|
||||
|
||||
export function readHashManifest(packageDir: string): Map<string, ParsedHashEntry> {
|
||||
const cacheKey = path.resolve(packageDir);
|
||||
const cached = manifestCache.get(cacheKey);
|
||||
if (cached && Date.now() - cached.at <= MANIFEST_CACHE_TTL_MS) {
|
||||
return new Map(cached.entries);
|
||||
}
|
||||
|
||||
const map = new Map<string, ParsedHashEntry>();
|
||||
const patterns: Array<[string, "crc32" | "md5" | "sha1"]> = [
|
||||
[".sfv", "crc32"],
|
||||
@ -40,7 +58,17 @@ export function readHashManifest(packageDir: string): Map<string, ParsedHashEntr
|
||||
return map;
|
||||
}
|
||||
|
||||
for (const entry of fs.readdirSync(packageDir, { withFileTypes: true })) {
|
||||
const manifestFiles = fs.readdirSync(packageDir, { withFileTypes: true })
|
||||
.filter((entry) => {
|
||||
if (!entry.isFile()) {
|
||||
return false;
|
||||
}
|
||||
const ext = path.extname(entry.name).toLowerCase();
|
||||
return patterns.some(([pattern]) => pattern === ext);
|
||||
})
|
||||
.sort((a, b) => a.name.localeCompare(b.name, undefined, { numeric: true, sensitivity: "base" }));
|
||||
|
||||
for (const entry of manifestFiles) {
|
||||
if (!entry.isFile()) {
|
||||
continue;
|
||||
}
|
||||
@ -50,7 +78,16 @@ export function readHashManifest(packageDir: string): Map<string, ParsedHashEntr
|
||||
continue;
|
||||
}
|
||||
const filePath = path.join(packageDir, entry.name);
|
||||
const lines = fs.readFileSync(filePath, "utf8").split(/\r?\n/);
|
||||
let lines: string[];
|
||||
try {
|
||||
const stat = fs.statSync(filePath);
|
||||
if (stat.size > MAX_MANIFEST_FILE_BYTES) {
|
||||
continue;
|
||||
}
|
||||
lines = fs.readFileSync(filePath, "utf8").split(/\r?\n/);
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
for (const line of lines) {
|
||||
const parsed = parseHashLine(line);
|
||||
if (!parsed) {
|
||||
@ -60,20 +97,28 @@ export function readHashManifest(packageDir: string): Map<string, ParsedHashEntr
|
||||
...parsed,
|
||||
algorithm: hit[1]
|
||||
};
|
||||
map.set(parsed.fileName.toLowerCase(), normalized);
|
||||
const key = normalizeManifestKey(parsed.fileName);
|
||||
if (map.has(key)) {
|
||||
continue;
|
||||
}
|
||||
map.set(key, normalized);
|
||||
}
|
||||
}
|
||||
manifestCache.set(cacheKey, { at: Date.now(), entries: new Map(map) });
|
||||
return map;
|
||||
}
|
||||
|
||||
const crcTable = new Int32Array(256);
|
||||
for (let i = 0; i < 256; i++) {
|
||||
let c = i;
|
||||
for (let j = 0; j < 8; j++) c = c & 1 ? (0xedb88320 ^ (c >>> 1)) : (c >>> 1);
|
||||
crcTable[i] = c;
|
||||
}
|
||||
|
||||
function crc32Buffer(data: Buffer, seed = 0): number {
|
||||
let crc = seed ^ -1;
|
||||
for (let i = 0; i < data.length; i += 1) {
|
||||
let c = (crc ^ data[i]) & 0xff;
|
||||
for (let j = 0; j < 8; j += 1) {
|
||||
c = (c & 1) ? (0xedb88320 ^ (c >>> 1)) : (c >>> 1);
|
||||
}
|
||||
crc = (crc >>> 8) ^ c;
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
crc = (crc >>> 8) ^ crcTable[(crc ^ data[i]) & 0xff];
|
||||
}
|
||||
return crc ^ -1;
|
||||
}
|
||||
@ -81,20 +126,21 @@ function crc32Buffer(data: Buffer, seed = 0): number {
|
||||
async function hashFile(filePath: string, algorithm: "crc32" | "md5" | "sha1"): Promise<string> {
|
||||
if (algorithm === "crc32") {
|
||||
const stream = fs.createReadStream(filePath, { highWaterMark: 1024 * 1024 });
|
||||
return await new Promise<string>((resolve, reject) => {
|
||||
let crc = 0;
|
||||
stream.on("data", (chunk: Buffer) => {
|
||||
crc = crc32Buffer(chunk, crc);
|
||||
});
|
||||
stream.on("error", reject);
|
||||
stream.on("end", () => resolve(((crc >>> 0).toString(16)).padStart(8, "0").toLowerCase()));
|
||||
});
|
||||
for await (const chunk of stream) {
|
||||
crc = crc32Buffer(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk), crc);
|
||||
await new Promise(r => setImmediate(r));
|
||||
}
|
||||
return (crc >>> 0).toString(16).padStart(8, "0").toLowerCase();
|
||||
}
|
||||
|
||||
const hash = crypto.createHash(algorithm);
|
||||
const data = fs.readFileSync(filePath);
|
||||
hash.update(data);
|
||||
return hash.digest("hex").toLowerCase();
|
||||
const stream = fs.createReadStream(filePath, { highWaterMark: 1024 * 1024 });
|
||||
return await new Promise<string>((resolve, reject) => {
|
||||
stream.on("data", (chunk: string | Buffer) => hash.update(typeof chunk === "string" ? Buffer.from(chunk) : chunk));
|
||||
stream.on("error", reject);
|
||||
stream.on("end", () => resolve(hash.digest("hex").toLowerCase()));
|
||||
});
|
||||
}
|
||||
|
||||
export async function validateFileAgainstManifest(filePath: string, packageDir: string): Promise<{ ok: boolean; message: string }> {
|
||||
@ -102,8 +148,9 @@ export async function validateFileAgainstManifest(filePath: string, packageDir:
|
||||
if (manifest.size === 0) {
|
||||
return { ok: true, message: "Kein Hash verfügbar" };
|
||||
}
|
||||
const key = path.basename(filePath).toLowerCase();
|
||||
const entry = manifest.get(key);
|
||||
const keyByBaseName = normalizeManifestKey(path.basename(filePath));
|
||||
const keyByRelativePath = normalizeManifestKey(path.relative(packageDir, filePath));
|
||||
const entry = manifest.get(keyByRelativePath) || manifest.get(keyByBaseName);
|
||||
if (!entry) {
|
||||
return { ok: true, message: "Kein Hash für Datei" };
|
||||
}
|
||||
|
||||
@ -6,7 +6,9 @@ export function mergePackageInputs(packages: ParsedPackageInput[]): ParsedPackag
|
||||
for (const pkg of packages) {
|
||||
const name = sanitizeFilename(pkg.name || inferPackageNameFromLinks(pkg.links));
|
||||
const list = grouped.get(name) ?? [];
|
||||
list.push(...pkg.links);
|
||||
for (const link of pkg.links) {
|
||||
list.push(link);
|
||||
}
|
||||
grouped.set(name, list);
|
||||
}
|
||||
return Array.from(grouped.entries()).map(([name, links]) => ({
|
||||
|
||||
@ -2,19 +2,216 @@ import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
|
||||
let logFilePath = path.resolve(process.cwd(), "rd_downloader.log");
|
||||
let fallbackLogFilePath: string | null = null;
|
||||
const LOG_FLUSH_INTERVAL_MS = 120;
|
||||
const LOG_BUFFER_LIMIT_CHARS = 1_000_000;
|
||||
const LOG_MAX_FILE_BYTES = 10 * 1024 * 1024;
|
||||
const rotateCheckAtByFile = new Map<string, number>();
|
||||
|
||||
type LogListener = (line: string) => void;
|
||||
let logListener: LogListener | null = null;
|
||||
|
||||
let pendingLines: string[] = [];
|
||||
let pendingChars = 0;
|
||||
let flushTimer: NodeJS.Timeout | null = null;
|
||||
let flushInFlight = false;
|
||||
let exitHookAttached = false;
|
||||
|
||||
export function setLogListener(listener: LogListener | null): void {
|
||||
logListener = listener;
|
||||
}
|
||||
|
||||
export function configureLogger(baseDir: string): void {
|
||||
logFilePath = path.join(baseDir, "rd_downloader.log");
|
||||
const cwdLogPath = path.resolve(process.cwd(), "rd_downloader.log");
|
||||
fallbackLogFilePath = cwdLogPath === logFilePath ? null : cwdLogPath;
|
||||
}
|
||||
|
||||
function appendLine(filePath: string, line: string): { ok: boolean; errorText: string } {
|
||||
try {
|
||||
fs.mkdirSync(path.dirname(filePath), { recursive: true });
|
||||
fs.appendFileSync(filePath, line, "utf8");
|
||||
return { ok: true, errorText: "" };
|
||||
} catch (error) {
|
||||
return { ok: false, errorText: String(error) };
|
||||
}
|
||||
}
|
||||
|
||||
async function appendChunk(filePath: string, chunk: string): Promise<{ ok: boolean; errorText: string }> {
|
||||
try {
|
||||
await fs.promises.mkdir(path.dirname(filePath), { recursive: true });
|
||||
await fs.promises.appendFile(filePath, chunk, "utf8");
|
||||
return { ok: true, errorText: "" };
|
||||
} catch (error) {
|
||||
return { ok: false, errorText: String(error) };
|
||||
}
|
||||
}
|
||||
|
||||
function writeStderr(text: string): void {
|
||||
try {
|
||||
process.stderr.write(text);
|
||||
} catch {
|
||||
// ignore stderr failures
|
||||
}
|
||||
}
|
||||
|
||||
function flushSyncPending(): void {
|
||||
if (pendingLines.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const chunk = pendingLines.join("");
|
||||
pendingLines = [];
|
||||
pendingChars = 0;
|
||||
|
||||
rotateIfNeeded(logFilePath);
|
||||
const primary = appendLine(logFilePath, chunk);
|
||||
if (fallbackLogFilePath) {
|
||||
rotateIfNeeded(fallbackLogFilePath);
|
||||
const fallback = appendLine(fallbackLogFilePath, chunk);
|
||||
if (!primary.ok && !fallback.ok) {
|
||||
writeStderr(`LOGGER write failed (primary+fallback): ${primary.errorText} | ${fallback.errorText}\n`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (!primary.ok) {
|
||||
writeStderr(`LOGGER write failed: ${primary.errorText}\n`);
|
||||
}
|
||||
}
|
||||
|
||||
function scheduleFlush(immediate = false): void {
|
||||
if (flushInFlight) {
|
||||
return;
|
||||
}
|
||||
if (immediate) {
|
||||
if (flushTimer) {
|
||||
clearTimeout(flushTimer);
|
||||
flushTimer = null;
|
||||
}
|
||||
void flushAsync();
|
||||
return;
|
||||
}
|
||||
if (flushTimer) {
|
||||
return;
|
||||
}
|
||||
flushTimer = setTimeout(() => {
|
||||
flushTimer = null;
|
||||
void flushAsync();
|
||||
}, LOG_FLUSH_INTERVAL_MS);
|
||||
}
|
||||
|
||||
function rotateIfNeeded(filePath: string): void {
|
||||
try {
|
||||
const now = Date.now();
|
||||
const lastRotateCheckAt = rotateCheckAtByFile.get(filePath) || 0;
|
||||
if (now - lastRotateCheckAt < 60_000) {
|
||||
return;
|
||||
}
|
||||
rotateCheckAtByFile.set(filePath, now);
|
||||
const stat = fs.statSync(filePath);
|
||||
if (stat.size < LOG_MAX_FILE_BYTES) {
|
||||
return;
|
||||
}
|
||||
const backup = `${filePath}.old`;
|
||||
try {
|
||||
fs.rmSync(backup, { force: true });
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
fs.renameSync(filePath, backup);
|
||||
} catch {
|
||||
// ignore - file may not exist yet
|
||||
}
|
||||
}
|
||||
|
||||
async function rotateIfNeededAsync(filePath: string): Promise<void> {
|
||||
try {
|
||||
const now = Date.now();
|
||||
const lastRotateCheckAt = rotateCheckAtByFile.get(filePath) || 0;
|
||||
if (now - lastRotateCheckAt < 60_000) {
|
||||
return;
|
||||
}
|
||||
rotateCheckAtByFile.set(filePath, now);
|
||||
const stat = await fs.promises.stat(filePath);
|
||||
if (stat.size < LOG_MAX_FILE_BYTES) {
|
||||
return;
|
||||
}
|
||||
const backup = `${filePath}.old`;
|
||||
await fs.promises.rm(backup, { force: true }).catch(() => {});
|
||||
await fs.promises.rename(filePath, backup);
|
||||
} catch {
|
||||
// ignore - file may not exist yet
|
||||
}
|
||||
}
|
||||
|
||||
async function flushAsync(): Promise<void> {
|
||||
if (flushInFlight || pendingLines.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
flushInFlight = true;
|
||||
const linesSnapshot = pendingLines.slice();
|
||||
const chunk = linesSnapshot.join("");
|
||||
|
||||
try {
|
||||
await rotateIfNeededAsync(logFilePath);
|
||||
const primary = await appendChunk(logFilePath, chunk);
|
||||
let wroteAny = primary.ok;
|
||||
if (fallbackLogFilePath) {
|
||||
await rotateIfNeededAsync(fallbackLogFilePath);
|
||||
const fallback = await appendChunk(fallbackLogFilePath, chunk);
|
||||
wroteAny = wroteAny || fallback.ok;
|
||||
if (!primary.ok && !fallback.ok) {
|
||||
writeStderr(`LOGGER write failed (primary+fallback): ${primary.errorText} | ${fallback.errorText}\n`);
|
||||
}
|
||||
} else if (!primary.ok) {
|
||||
writeStderr(`LOGGER write failed: ${primary.errorText}\n`);
|
||||
}
|
||||
if (wroteAny) {
|
||||
pendingLines = pendingLines.slice(linesSnapshot.length);
|
||||
pendingChars = Math.max(0, pendingChars - chunk.length);
|
||||
}
|
||||
} finally {
|
||||
flushInFlight = false;
|
||||
if (pendingLines.length > 0) {
|
||||
scheduleFlush();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function ensureExitHook(): void {
|
||||
if (exitHookAttached) {
|
||||
return;
|
||||
}
|
||||
exitHookAttached = true;
|
||||
process.once("beforeExit", flushSyncPending);
|
||||
process.once("exit", flushSyncPending);
|
||||
}
|
||||
|
||||
function write(level: "INFO" | "WARN" | "ERROR", message: string): void {
|
||||
ensureExitHook();
|
||||
const line = `${new Date().toISOString()} [${level}] ${message}\n`;
|
||||
try {
|
||||
fs.mkdirSync(path.dirname(logFilePath), { recursive: true });
|
||||
fs.appendFileSync(logFilePath, line, "utf8");
|
||||
} catch {
|
||||
// ignore logging failures
|
||||
pendingLines.push(line);
|
||||
pendingChars += line.length;
|
||||
|
||||
if (logListener) {
|
||||
try { logListener(line); } catch { /* ignore */ }
|
||||
}
|
||||
|
||||
while (pendingChars > LOG_BUFFER_LIMIT_CHARS && pendingLines.length > 1) {
|
||||
const removed = pendingLines.shift();
|
||||
if (!removed) {
|
||||
break;
|
||||
}
|
||||
pendingChars = Math.max(0, pendingChars - removed.length);
|
||||
}
|
||||
|
||||
if (level === "ERROR") {
|
||||
scheduleFlush(true);
|
||||
return;
|
||||
}
|
||||
scheduleFlush();
|
||||
}
|
||||
|
||||
export const logger = {
|
||||
|
||||
438
src/main/main.ts
438
src/main/main.ts
@ -1,13 +1,60 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { app, BrowserWindow, dialog, ipcMain, IpcMainInvokeEvent } from "electron";
|
||||
import { AddLinksPayload, AppSettings } from "../shared/types";
|
||||
import { app, BrowserWindow, clipboard, dialog, ipcMain, IpcMainInvokeEvent, Menu, shell, Tray } from "electron";
|
||||
import { AddLinksPayload, AppSettings, UpdateInstallProgress } from "../shared/types";
|
||||
import { AppController } from "./app-controller";
|
||||
import { IPC_CHANNELS } from "../shared/ipc";
|
||||
import { logger } from "./logger";
|
||||
import { getLogFilePath, logger } from "./logger";
|
||||
import { APP_NAME } from "./constants";
|
||||
import { extractHttpLinksFromText } from "./utils";
|
||||
import { cleanupStaleSubstDrives, shutdownDaemon } from "./extractor";
|
||||
|
||||
/* ── IPC validation helpers ────────────────────────────────────── */
|
||||
function validateString(value: unknown, name: string): string {
|
||||
if (typeof value !== "string") {
|
||||
throw new Error(`${name} muss ein String sein`);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
function validatePlainObject(value: unknown, name: string): Record<string, unknown> {
|
||||
if (!value || typeof value !== "object" || Array.isArray(value)) {
|
||||
throw new Error(`${name} muss ein Objekt sein`);
|
||||
}
|
||||
return value as Record<string, unknown>;
|
||||
}
|
||||
|
||||
const IMPORT_QUEUE_MAX_BYTES = 10 * 1024 * 1024;
|
||||
const RENAME_PACKAGE_MAX_CHARS = 240;
|
||||
function validateStringArray(value: unknown, name: string): string[] {
|
||||
if (!Array.isArray(value) || !value.every(v => typeof v === "string")) {
|
||||
throw new Error(`${name} muss ein String-Array sein`);
|
||||
}
|
||||
return value as string[];
|
||||
}
|
||||
|
||||
/* ── Single Instance Lock ───────────────────────────────────────── */
|
||||
const gotLock = app.requestSingleInstanceLock();
|
||||
if (!gotLock) {
|
||||
app.exit(0);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
/* ── Unhandled error protection ─────────────────────────────────── */
|
||||
process.on("uncaughtException", (error) => {
|
||||
logger.error(`Uncaught Exception: ${String(error?.stack || error)}`);
|
||||
});
|
||||
process.on("unhandledRejection", (reason) => {
|
||||
logger.error(`Unhandled Rejection: ${String(reason)}`);
|
||||
});
|
||||
|
||||
let mainWindow: BrowserWindow | null = null;
|
||||
let tray: Tray | null = null;
|
||||
let clipboardTimer: ReturnType<typeof setInterval> | null = null;
|
||||
let updateQuitTimer: ReturnType<typeof setTimeout> | null = null;
|
||||
let lastClipboardText = "";
|
||||
const controller = new AppController();
|
||||
const CLIPBOARD_MAX_TEXT_CHARS = 50_000;
|
||||
|
||||
function isDevMode(): boolean {
|
||||
return process.env.NODE_ENV === "development";
|
||||
@ -20,7 +67,8 @@ function createWindow(): BrowserWindow {
|
||||
minWidth: 1120,
|
||||
minHeight: 760,
|
||||
backgroundColor: "#070b14",
|
||||
title: `${APP_NAME} v${controller.getVersion()}`,
|
||||
title: `${APP_NAME} - v${controller.getVersion()}`,
|
||||
icon: path.join(app.getAppPath(), "assets", "app_icon.ico"),
|
||||
webPreferences: {
|
||||
contextIsolation: true,
|
||||
nodeIntegration: false,
|
||||
@ -28,6 +76,22 @@ function createWindow(): BrowserWindow {
|
||||
}
|
||||
});
|
||||
|
||||
if (!isDevMode()) {
|
||||
window.webContents.session.webRequest.onHeadersReceived((details, callback) => {
|
||||
callback({
|
||||
responseHeaders: {
|
||||
...details.responseHeaders,
|
||||
"Content-Security-Policy": [
|
||||
"default-src 'self'; script-src 'self'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; connect-src 'self' https://api.real-debrid.com https://codeberg.org https://bestdebrid.com https://api.alldebrid.com https://www.mega-debrid.eu https://git.24-music.de https://ddownload.com https://ddl.to"
|
||||
]
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
window.setMenuBarVisibility(false);
|
||||
window.setAutoHideMenuBar(true);
|
||||
|
||||
if (isDevMode()) {
|
||||
void window.loadURL("http://localhost:5173");
|
||||
} else {
|
||||
@ -37,18 +101,296 @@ function createWindow(): BrowserWindow {
|
||||
return window;
|
||||
}
|
||||
|
||||
function bindMainWindowLifecycle(window: BrowserWindow): void {
|
||||
window.on("close", (event) => {
|
||||
const settings = controller.getSettings();
|
||||
if (settings.minimizeToTray && tray) {
|
||||
event.preventDefault();
|
||||
window.hide();
|
||||
}
|
||||
});
|
||||
|
||||
window.on("closed", () => {
|
||||
if (mainWindow === window) {
|
||||
mainWindow = null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function createTray(): void {
|
||||
if (tray) {
|
||||
return;
|
||||
}
|
||||
const iconPath = path.join(app.getAppPath(), "assets", "app_icon.ico");
|
||||
try {
|
||||
tray = new Tray(iconPath);
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
tray.setToolTip(APP_NAME);
|
||||
const contextMenu = Menu.buildFromTemplate([
|
||||
{ label: "Anzeigen", click: () => { mainWindow?.show(); mainWindow?.focus(); } },
|
||||
{ type: "separator" },
|
||||
{ label: "Start", click: () => { void controller.start().catch((err) => logger.warn(`Tray Start Fehler: ${String(err)}`)); } },
|
||||
{ label: "Stop", click: () => { controller.stop(); } },
|
||||
{ type: "separator" },
|
||||
{ label: "Beenden", click: () => { app.quit(); } }
|
||||
]);
|
||||
tray.setContextMenu(contextMenu);
|
||||
tray.on("double-click", () => {
|
||||
mainWindow?.show();
|
||||
mainWindow?.focus();
|
||||
});
|
||||
}
|
||||
|
||||
function destroyTray(): void {
|
||||
if (tray) {
|
||||
tray.destroy();
|
||||
tray = null;
|
||||
}
|
||||
}
|
||||
|
||||
function extractLinksFromText(text: string): string[] {
|
||||
return extractHttpLinksFromText(text);
|
||||
}
|
||||
|
||||
function normalizeClipboardText(text: string): string {
|
||||
const truncateUnicodeSafe = (value: string, maxChars: number): string => {
|
||||
if (value.length <= maxChars) {
|
||||
return value;
|
||||
}
|
||||
const points = Array.from(value);
|
||||
if (points.length <= maxChars) {
|
||||
return value;
|
||||
}
|
||||
return points.slice(0, maxChars).join("");
|
||||
};
|
||||
|
||||
const normalized = String(text || "");
|
||||
if (normalized.length <= CLIPBOARD_MAX_TEXT_CHARS) {
|
||||
return normalized;
|
||||
}
|
||||
const truncated = truncateUnicodeSafe(normalized, CLIPBOARD_MAX_TEXT_CHARS);
|
||||
const lastBreak = Math.max(
|
||||
truncated.lastIndexOf("\n"),
|
||||
truncated.lastIndexOf("\r"),
|
||||
truncated.lastIndexOf("\t"),
|
||||
truncated.lastIndexOf(" ")
|
||||
);
|
||||
if (lastBreak >= Math.floor(CLIPBOARD_MAX_TEXT_CHARS * 0.7)) {
|
||||
return truncated.slice(0, lastBreak);
|
||||
}
|
||||
return truncated;
|
||||
}
|
||||
|
||||
function startClipboardWatcher(): void {
|
||||
if (clipboardTimer) {
|
||||
return;
|
||||
}
|
||||
lastClipboardText = normalizeClipboardText(clipboard.readText());
|
||||
clipboardTimer = setInterval(() => {
|
||||
let text: string;
|
||||
try {
|
||||
text = normalizeClipboardText(clipboard.readText());
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
if (text === lastClipboardText || !text.trim()) {
|
||||
return;
|
||||
}
|
||||
lastClipboardText = text;
|
||||
const links = extractLinksFromText(text);
|
||||
if (links.length > 0 && mainWindow && !mainWindow.isDestroyed()) {
|
||||
mainWindow.webContents.send(IPC_CHANNELS.CLIPBOARD_DETECTED, links);
|
||||
}
|
||||
}, 2000);
|
||||
}
|
||||
|
||||
function stopClipboardWatcher(): void {
|
||||
if (clipboardTimer) {
|
||||
clearInterval(clipboardTimer);
|
||||
clipboardTimer = null;
|
||||
}
|
||||
}
|
||||
|
||||
function updateClipboardWatcher(): void {
|
||||
const settings = controller.getSettings();
|
||||
if (settings.clipboardWatch) {
|
||||
startClipboardWatcher();
|
||||
} else {
|
||||
stopClipboardWatcher();
|
||||
}
|
||||
}
|
||||
|
||||
function updateTray(): void {
|
||||
const settings = controller.getSettings();
|
||||
if (settings.minimizeToTray) {
|
||||
createTray();
|
||||
} else {
|
||||
destroyTray();
|
||||
}
|
||||
}
|
||||
|
||||
function registerIpcHandlers(): void {
|
||||
ipcMain.handle(IPC_CHANNELS.GET_SNAPSHOT, () => controller.getSnapshot());
|
||||
ipcMain.handle(IPC_CHANNELS.GET_VERSION, () => controller.getVersion());
|
||||
ipcMain.handle(IPC_CHANNELS.CHECK_UPDATES, async () => controller.checkUpdates());
|
||||
ipcMain.handle(IPC_CHANNELS.UPDATE_SETTINGS, (_event: IpcMainInvokeEvent, partial: Partial<AppSettings>) => controller.updateSettings(partial ?? {}));
|
||||
ipcMain.handle(IPC_CHANNELS.ADD_LINKS, (_event: IpcMainInvokeEvent, payload: AddLinksPayload) => controller.addLinks(payload));
|
||||
ipcMain.handle(IPC_CHANNELS.ADD_CONTAINERS, async (_event: IpcMainInvokeEvent, filePaths: string[]) => controller.addContainers(filePaths ?? []));
|
||||
ipcMain.handle(IPC_CHANNELS.INSTALL_UPDATE, async () => {
|
||||
const result = await controller.installUpdate((progress: UpdateInstallProgress) => {
|
||||
if (!mainWindow || mainWindow.isDestroyed()) {
|
||||
return;
|
||||
}
|
||||
mainWindow.webContents.send(IPC_CHANNELS.UPDATE_INSTALL_PROGRESS, progress);
|
||||
});
|
||||
if (result.started) {
|
||||
updateQuitTimer = setTimeout(() => {
|
||||
app.quit();
|
||||
}, 2500);
|
||||
}
|
||||
return result;
|
||||
});
|
||||
ipcMain.handle(IPC_CHANNELS.OPEN_EXTERNAL, async (_event: IpcMainInvokeEvent, rawUrl: string) => {
|
||||
try {
|
||||
const parsed = new URL(String(rawUrl || "").trim());
|
||||
if (parsed.protocol !== "https:" && parsed.protocol !== "http:") {
|
||||
return false;
|
||||
}
|
||||
await shell.openExternal(parsed.toString());
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
ipcMain.handle(IPC_CHANNELS.UPDATE_SETTINGS, (_event: IpcMainInvokeEvent, partial: Partial<AppSettings>) => {
|
||||
const validated = validatePlainObject(partial ?? {}, "partial");
|
||||
const result = controller.updateSettings(validated as Partial<AppSettings>);
|
||||
updateClipboardWatcher();
|
||||
updateTray();
|
||||
return result;
|
||||
});
|
||||
ipcMain.handle(IPC_CHANNELS.ADD_LINKS, (_event: IpcMainInvokeEvent, payload: AddLinksPayload) => {
|
||||
validatePlainObject(payload ?? {}, "payload");
|
||||
validateString(payload?.rawText, "rawText");
|
||||
if (payload.packageName !== undefined) {
|
||||
validateString(payload.packageName, "packageName");
|
||||
}
|
||||
if (payload.duplicatePolicy !== undefined && payload.duplicatePolicy !== "keep" && payload.duplicatePolicy !== "skip" && payload.duplicatePolicy !== "overwrite") {
|
||||
throw new Error("duplicatePolicy muss 'keep', 'skip' oder 'overwrite' sein");
|
||||
}
|
||||
return controller.addLinks(payload);
|
||||
});
|
||||
ipcMain.handle(IPC_CHANNELS.ADD_CONTAINERS, async (_event: IpcMainInvokeEvent, filePaths: string[]) => {
|
||||
const validPaths = validateStringArray(filePaths ?? [], "filePaths");
|
||||
const safePaths = validPaths.filter((p) => path.isAbsolute(p));
|
||||
return controller.addContainers(safePaths);
|
||||
});
|
||||
ipcMain.handle(IPC_CHANNELS.GET_START_CONFLICTS, () => controller.getStartConflicts());
|
||||
ipcMain.handle(IPC_CHANNELS.RESOLVE_START_CONFLICT, (_event: IpcMainInvokeEvent, packageId: string, policy: "keep" | "skip" | "overwrite") => {
|
||||
validateString(packageId, "packageId");
|
||||
validateString(policy, "policy");
|
||||
if (policy !== "keep" && policy !== "skip" && policy !== "overwrite") {
|
||||
throw new Error("policy muss 'keep', 'skip' oder 'overwrite' sein");
|
||||
}
|
||||
return controller.resolveStartConflict(packageId, policy);
|
||||
});
|
||||
ipcMain.handle(IPC_CHANNELS.CLEAR_ALL, () => controller.clearAll());
|
||||
ipcMain.handle(IPC_CHANNELS.START, () => controller.start());
|
||||
ipcMain.handle(IPC_CHANNELS.START_PACKAGES, (_event: IpcMainInvokeEvent, packageIds: string[]) => {
|
||||
validateStringArray(packageIds ?? [], "packageIds");
|
||||
return controller.startPackages(packageIds ?? []);
|
||||
});
|
||||
ipcMain.handle(IPC_CHANNELS.START_ITEMS, (_event: IpcMainInvokeEvent, itemIds: string[]) => {
|
||||
validateStringArray(itemIds ?? [], "itemIds");
|
||||
return controller.startItems(itemIds ?? []);
|
||||
});
|
||||
ipcMain.handle(IPC_CHANNELS.STOP, () => controller.stop());
|
||||
ipcMain.handle(IPC_CHANNELS.TOGGLE_PAUSE, () => controller.togglePause());
|
||||
ipcMain.handle(IPC_CHANNELS.CANCEL_PACKAGE, (_event: IpcMainInvokeEvent, packageId: string) => controller.cancelPackage(packageId));
|
||||
ipcMain.handle(IPC_CHANNELS.CANCEL_PACKAGE, (_event: IpcMainInvokeEvent, packageId: string) => {
|
||||
validateString(packageId, "packageId");
|
||||
return controller.cancelPackage(packageId);
|
||||
});
|
||||
ipcMain.handle(IPC_CHANNELS.RENAME_PACKAGE, (_event: IpcMainInvokeEvent, packageId: string, newName: string) => {
|
||||
validateString(packageId, "packageId");
|
||||
validateString(newName, "newName");
|
||||
if (newName.length > RENAME_PACKAGE_MAX_CHARS) {
|
||||
throw new Error(`newName zu lang (max ${RENAME_PACKAGE_MAX_CHARS} Zeichen)`);
|
||||
}
|
||||
return controller.renamePackage(packageId, newName);
|
||||
});
|
||||
ipcMain.handle(IPC_CHANNELS.REORDER_PACKAGES, (_event: IpcMainInvokeEvent, packageIds: string[]) => {
|
||||
validateStringArray(packageIds, "packageIds");
|
||||
return controller.reorderPackages(packageIds);
|
||||
});
|
||||
ipcMain.handle(IPC_CHANNELS.REMOVE_ITEM, (_event: IpcMainInvokeEvent, itemId: string) => {
|
||||
validateString(itemId, "itemId");
|
||||
return controller.removeItem(itemId);
|
||||
});
|
||||
ipcMain.handle(IPC_CHANNELS.TOGGLE_PACKAGE, (_event: IpcMainInvokeEvent, packageId: string) => {
|
||||
validateString(packageId, "packageId");
|
||||
return controller.togglePackage(packageId);
|
||||
});
|
||||
ipcMain.handle(IPC_CHANNELS.RETRY_EXTRACTION, (_event: IpcMainInvokeEvent, packageId: string) => {
|
||||
validateString(packageId, "packageId");
|
||||
return controller.retryExtraction(packageId);
|
||||
});
|
||||
ipcMain.handle(IPC_CHANNELS.EXTRACT_NOW, (_event: IpcMainInvokeEvent, packageId: string) => {
|
||||
validateString(packageId, "packageId");
|
||||
return controller.extractNow(packageId);
|
||||
});
|
||||
ipcMain.handle(IPC_CHANNELS.RESET_PACKAGE, (_event: IpcMainInvokeEvent, packageId: string) => {
|
||||
validateString(packageId, "packageId");
|
||||
return controller.resetPackage(packageId);
|
||||
});
|
||||
ipcMain.handle(IPC_CHANNELS.SET_PACKAGE_PRIORITY, (_event: IpcMainInvokeEvent, packageId: string, priority: string) => {
|
||||
validateString(packageId, "packageId");
|
||||
validateString(priority, "priority");
|
||||
if (priority !== "high" && priority !== "normal" && priority !== "low") {
|
||||
throw new Error("priority muss 'high', 'normal' oder 'low' sein");
|
||||
}
|
||||
return controller.setPackagePriority(packageId, priority);
|
||||
});
|
||||
ipcMain.handle(IPC_CHANNELS.SKIP_ITEMS, (_event: IpcMainInvokeEvent, itemIds: string[]) => {
|
||||
validateStringArray(itemIds ?? [], "itemIds");
|
||||
return controller.skipItems(itemIds ?? []);
|
||||
});
|
||||
ipcMain.handle(IPC_CHANNELS.RESET_ITEMS, (_event: IpcMainInvokeEvent, itemIds: string[]) => {
|
||||
validateStringArray(itemIds ?? [], "itemIds");
|
||||
return controller.resetItems(itemIds ?? []);
|
||||
});
|
||||
ipcMain.handle(IPC_CHANNELS.GET_HISTORY, () => controller.getHistory());
|
||||
ipcMain.handle(IPC_CHANNELS.CLEAR_HISTORY, () => controller.clearHistory());
|
||||
ipcMain.handle(IPC_CHANNELS.REMOVE_HISTORY_ENTRY, (_event: IpcMainInvokeEvent, entryId: string) => {
|
||||
validateString(entryId, "entryId");
|
||||
return controller.removeHistoryEntry(entryId);
|
||||
});
|
||||
ipcMain.handle(IPC_CHANNELS.EXPORT_QUEUE, async () => {
|
||||
const options = {
|
||||
defaultPath: `rd-queue-export.json`,
|
||||
filters: [{ name: "Queue Export", extensions: ["json"] }]
|
||||
};
|
||||
const result = mainWindow ? await dialog.showSaveDialog(mainWindow, options) : await dialog.showSaveDialog(options);
|
||||
if (result.canceled || !result.filePath) {
|
||||
return { saved: false };
|
||||
}
|
||||
const json = controller.exportQueue();
|
||||
await fs.promises.writeFile(result.filePath, json, "utf8");
|
||||
return { saved: true };
|
||||
});
|
||||
ipcMain.handle(IPC_CHANNELS.IMPORT_QUEUE, (_event: IpcMainInvokeEvent, json: string) => {
|
||||
validateString(json, "json");
|
||||
const bytes = Buffer.byteLength(json, "utf8");
|
||||
if (bytes > IMPORT_QUEUE_MAX_BYTES) {
|
||||
throw new Error(`Queue-Import zu groß (max ${IMPORT_QUEUE_MAX_BYTES} Bytes)`);
|
||||
}
|
||||
return controller.importQueue(json);
|
||||
});
|
||||
ipcMain.handle(IPC_CHANNELS.TOGGLE_CLIPBOARD, () => {
|
||||
const settings = controller.getSettings();
|
||||
const next = !settings.clipboardWatch;
|
||||
controller.updateSettings({ clipboardWatch: next });
|
||||
updateClipboardWatcher();
|
||||
return next;
|
||||
});
|
||||
ipcMain.handle(IPC_CHANNELS.PICK_FOLDER, async () => {
|
||||
const options = {
|
||||
properties: ["openDirectory", "createDirectory"] as Array<"openDirectory" | "createDirectory">
|
||||
@ -67,6 +409,64 @@ function registerIpcHandlers(): void {
|
||||
const result = mainWindow ? await dialog.showOpenDialog(mainWindow, options) : await dialog.showOpenDialog(options);
|
||||
return result.canceled ? [] : result.filePaths;
|
||||
});
|
||||
ipcMain.handle(IPC_CHANNELS.GET_SESSION_STATS, () => controller.getSessionStats());
|
||||
|
||||
ipcMain.handle(IPC_CHANNELS.RESTART, () => {
|
||||
app.relaunch();
|
||||
app.quit();
|
||||
});
|
||||
|
||||
ipcMain.handle(IPC_CHANNELS.QUIT, () => {
|
||||
app.quit();
|
||||
});
|
||||
|
||||
ipcMain.handle(IPC_CHANNELS.EXPORT_BACKUP, async () => {
|
||||
const options = {
|
||||
defaultPath: `mdd-backup-${new Date().toISOString().slice(0, 10)}.json`,
|
||||
filters: [{ name: "Backup", extensions: ["json"] }]
|
||||
};
|
||||
const result = mainWindow ? await dialog.showSaveDialog(mainWindow, options) : await dialog.showSaveDialog(options);
|
||||
if (result.canceled || !result.filePath) {
|
||||
return { saved: false };
|
||||
}
|
||||
const json = controller.exportBackup();
|
||||
await fs.promises.writeFile(result.filePath, json, "utf8");
|
||||
return { saved: true };
|
||||
});
|
||||
|
||||
ipcMain.handle(IPC_CHANNELS.OPEN_LOG, async () => {
|
||||
const logPath = getLogFilePath();
|
||||
await shell.openPath(logPath);
|
||||
});
|
||||
|
||||
ipcMain.handle(IPC_CHANNELS.OPEN_SESSION_LOG, async () => {
|
||||
const logPath = controller.getSessionLogPath();
|
||||
if (logPath) {
|
||||
await shell.openPath(logPath);
|
||||
}
|
||||
});
|
||||
|
||||
ipcMain.handle(IPC_CHANNELS.IMPORT_BACKUP, async () => {
|
||||
const options = {
|
||||
properties: ["openFile"] as Array<"openFile">,
|
||||
filters: [
|
||||
{ name: "Backup", extensions: ["json"] },
|
||||
{ name: "Alle Dateien", extensions: ["*"] }
|
||||
]
|
||||
};
|
||||
const result = mainWindow ? await dialog.showOpenDialog(mainWindow, options) : await dialog.showOpenDialog(options);
|
||||
if (result.canceled || result.filePaths.length === 0) {
|
||||
return { restored: false, message: "Abgebrochen" };
|
||||
}
|
||||
const filePath = result.filePaths[0];
|
||||
const stat = await fs.promises.stat(filePath);
|
||||
const BACKUP_MAX_BYTES = 50 * 1024 * 1024;
|
||||
if (stat.size > BACKUP_MAX_BYTES) {
|
||||
return { restored: false, message: `Backup-Datei zu groß (max 50 MB, Datei hat ${(stat.size / 1024 / 1024).toFixed(1)} MB)` };
|
||||
}
|
||||
const json = await fs.promises.readFile(filePath, "utf8");
|
||||
return controller.importBackup(json);
|
||||
});
|
||||
|
||||
controller.onState = (snapshot) => {
|
||||
if (!mainWindow || mainWindow.isDestroyed()) {
|
||||
@ -76,15 +476,33 @@ function registerIpcHandlers(): void {
|
||||
};
|
||||
}
|
||||
|
||||
app.on("second-instance", () => {
|
||||
if (mainWindow) {
|
||||
if (mainWindow.isMinimized()) {
|
||||
mainWindow.restore();
|
||||
}
|
||||
mainWindow.show();
|
||||
mainWindow.focus();
|
||||
}
|
||||
});
|
||||
|
||||
app.whenReady().then(() => {
|
||||
cleanupStaleSubstDrives();
|
||||
registerIpcHandlers();
|
||||
mainWindow = createWindow();
|
||||
bindMainWindowLifecycle(mainWindow);
|
||||
updateClipboardWatcher();
|
||||
updateTray();
|
||||
|
||||
app.on("activate", () => {
|
||||
if (BrowserWindow.getAllWindows().length === 0) {
|
||||
mainWindow = createWindow();
|
||||
bindMainWindowLifecycle(mainWindow);
|
||||
}
|
||||
});
|
||||
}).catch((error) => {
|
||||
console.error("App startup failed:", error);
|
||||
app.quit();
|
||||
});
|
||||
|
||||
app.on("window-all-closed", () => {
|
||||
@ -94,6 +512,10 @@ app.on("window-all-closed", () => {
|
||||
});
|
||||
|
||||
app.on("before-quit", () => {
|
||||
if (updateQuitTimer) { clearTimeout(updateQuitTimer); updateQuitTimer = null; }
|
||||
stopClipboardWatcher();
|
||||
destroyTray();
|
||||
shutdownDaemon();
|
||||
try {
|
||||
controller.shutdown();
|
||||
} catch (error) {
|
||||
|
||||
424
src/main/mega-web-fallback.ts
Normal file
424
src/main/mega-web-fallback.ts
Normal file
@ -0,0 +1,424 @@
|
||||
import { UnrestrictedLink } from "./realdebrid";
|
||||
import { compactErrorText, filenameFromUrl, sleep } from "./utils";
|
||||
|
||||
type MegaCredentials = {
|
||||
login: string;
|
||||
password: string;
|
||||
};
|
||||
|
||||
type CodeEntry = {
|
||||
code: string;
|
||||
linkHint: string;
|
||||
};
|
||||
|
||||
const LOGIN_URL = "https://www.mega-debrid.eu/index.php?form=login";
|
||||
const DEBRID_URL = "https://www.mega-debrid.eu/index.php?form=debrid";
|
||||
const DEBRID_AJAX_URL = "https://www.mega-debrid.eu/index.php?ajax=debrid&json";
|
||||
const DEBRID_REFERER = "https://www.mega-debrid.eu/index.php?page=debrideur&lang=de";
|
||||
|
||||
function normalizeLink(link: string): string {
|
||||
return link.trim().toLowerCase();
|
||||
}
|
||||
|
||||
function parseSetCookieFromHeaders(headers: Headers): string {
|
||||
const getSetCookie = (headers as unknown as { getSetCookie?: () => string[] }).getSetCookie;
|
||||
if (typeof getSetCookie === "function") {
|
||||
const values = getSetCookie.call(headers)
|
||||
.map((entry) => entry.split(";")[0].trim())
|
||||
.filter(Boolean);
|
||||
if (values.length > 0) {
|
||||
return values.join("; ");
|
||||
}
|
||||
}
|
||||
|
||||
const raw = headers.get("set-cookie") || "";
|
||||
if (!raw) {
|
||||
return "";
|
||||
}
|
||||
return raw
|
||||
.split(/,(?=[^;=]+?=)/g)
|
||||
.map((chunk) => chunk.split(";")[0].trim())
|
||||
.filter(Boolean)
|
||||
.join("; ");
|
||||
}
|
||||
|
||||
const PERMANENT_HOSTER_ERRORS = [
|
||||
"hosternotavailable",
|
||||
"filenotfound",
|
||||
"file_unavailable",
|
||||
"file not found",
|
||||
"link is dead",
|
||||
"file has been removed",
|
||||
"file has been deleted",
|
||||
"file was deleted",
|
||||
"file was removed",
|
||||
"not available",
|
||||
"file is no longer available"
|
||||
];
|
||||
|
||||
function parsePageErrors(html: string): string[] {
|
||||
const errors: string[] = [];
|
||||
const errorRegex = /class=["'][^"']*\berror\b[^"']*["'][^>]*>([^<]+)</gi;
|
||||
let m: RegExpExecArray | null;
|
||||
while ((m = errorRegex.exec(html)) !== null) {
|
||||
const text = m[1].replace(/^Fehler:\s*/i, "").trim();
|
||||
if (text) {
|
||||
errors.push(text);
|
||||
}
|
||||
}
|
||||
return errors;
|
||||
}
|
||||
|
||||
function isPermanentHosterError(errors: string[]): string | null {
|
||||
for (const err of errors) {
|
||||
const lower = err.toLowerCase();
|
||||
for (const pattern of PERMANENT_HOSTER_ERRORS) {
|
||||
if (lower.includes(pattern)) {
|
||||
return err;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function parseCodes(html: string): CodeEntry[] {
|
||||
const entries: CodeEntry[] = [];
|
||||
const cardRegex = /<div[^>]*class=['"][^'"]*acp-box[^'"]*['"][^>]*>[\s\S]*?<\/div>/gi;
|
||||
let cardMatch: RegExpExecArray | null;
|
||||
while ((cardMatch = cardRegex.exec(html)) !== null) {
|
||||
const block = cardMatch[0];
|
||||
const linkTitle = (block.match(/<h3>\s*Link:\s*([^<]+)<\/h3>/i)?.[1] || "").trim();
|
||||
const code = block.match(/processDebrid\(\d+,'([^']+)',0\)/i)?.[1] || "";
|
||||
if (!code) {
|
||||
continue;
|
||||
}
|
||||
entries.push({ code, linkHint: normalizeLink(linkTitle) });
|
||||
}
|
||||
|
||||
if (entries.length === 0) {
|
||||
const fallbackRegex = /processDebrid\(\d+,'([^']+)',0\)/gi;
|
||||
let m: RegExpExecArray | null;
|
||||
while ((m = fallbackRegex.exec(html)) !== null) {
|
||||
entries.push({ code: m[1], linkHint: "" });
|
||||
}
|
||||
}
|
||||
|
||||
return entries;
|
||||
}
|
||||
|
||||
function pickCode(entries: CodeEntry[], link: string): string {
|
||||
if (entries.length === 0) {
|
||||
return "";
|
||||
}
|
||||
const target = normalizeLink(link);
|
||||
const match = entries.find((entry) => entry.linkHint && entry.linkHint.includes(target));
|
||||
return (match?.code || entries[0].code || "").trim();
|
||||
}
|
||||
|
||||
function parseDebridJson(text: string): { link: string; text: string } | null {
|
||||
try {
|
||||
const parsed = JSON.parse(text) as { link?: string; text?: string };
|
||||
return {
|
||||
link: String(parsed.link || ""),
|
||||
text: String(parsed.text || "")
|
||||
};
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function abortError(): Error {
|
||||
return new Error("aborted:mega-web");
|
||||
}
|
||||
|
||||
function withTimeoutSignal(signal: AbortSignal | undefined, timeoutMs: number): AbortSignal {
|
||||
const timeoutSignal = AbortSignal.timeout(timeoutMs);
|
||||
if (!signal) {
|
||||
return timeoutSignal;
|
||||
}
|
||||
return AbortSignal.any([signal, timeoutSignal]);
|
||||
}
|
||||
|
||||
function throwIfAborted(signal?: AbortSignal): void {
|
||||
if (signal?.aborted) {
|
||||
throw abortError();
|
||||
}
|
||||
}
|
||||
|
||||
async function sleepWithSignal(ms: number, signal?: AbortSignal): Promise<void> {
|
||||
if (!signal) {
|
||||
await sleep(ms);
|
||||
return;
|
||||
}
|
||||
if (signal.aborted) {
|
||||
throw abortError();
|
||||
}
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
let timer: NodeJS.Timeout | null = setTimeout(() => {
|
||||
timer = null;
|
||||
signal.removeEventListener("abort", onAbort);
|
||||
resolve();
|
||||
}, Math.max(0, ms));
|
||||
|
||||
const onAbort = (): void => {
|
||||
if (timer) {
|
||||
clearTimeout(timer);
|
||||
timer = null;
|
||||
}
|
||||
signal.removeEventListener("abort", onAbort);
|
||||
reject(abortError());
|
||||
};
|
||||
|
||||
signal.addEventListener("abort", onAbort, { once: true });
|
||||
});
|
||||
}
|
||||
|
||||
async function raceWithAbort<T>(promise: Promise<T>, signal?: AbortSignal): Promise<T> {
|
||||
if (!signal) {
|
||||
return promise;
|
||||
}
|
||||
if (signal.aborted) {
|
||||
throw abortError();
|
||||
}
|
||||
|
||||
return new Promise<T>((resolve, reject) => {
|
||||
let settled = false;
|
||||
|
||||
const onAbort = (): void => {
|
||||
if (settled) {
|
||||
return;
|
||||
}
|
||||
settled = true;
|
||||
signal.removeEventListener("abort", onAbort);
|
||||
reject(abortError());
|
||||
};
|
||||
|
||||
signal.addEventListener("abort", onAbort, { once: true });
|
||||
|
||||
promise.then((value) => {
|
||||
if (settled) {
|
||||
return;
|
||||
}
|
||||
settled = true;
|
||||
signal.removeEventListener("abort", onAbort);
|
||||
resolve(value);
|
||||
}, (error) => {
|
||||
if (settled) {
|
||||
return;
|
||||
}
|
||||
settled = true;
|
||||
signal.removeEventListener("abort", onAbort);
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export class MegaWebFallback {
|
||||
private queue: Promise<unknown> = Promise.resolve();
|
||||
|
||||
private getCredentials: () => MegaCredentials;
|
||||
|
||||
private cookie = "";
|
||||
|
||||
private cookieSetAt = 0;
|
||||
|
||||
public constructor(getCredentials: () => MegaCredentials) {
|
||||
this.getCredentials = getCredentials;
|
||||
}
|
||||
|
||||
public async unrestrict(link: string, signal?: AbortSignal): Promise<UnrestrictedLink | null> {
|
||||
const overallSignal = withTimeoutSignal(signal, 180000);
|
||||
return this.runExclusive(async () => {
|
||||
throwIfAborted(overallSignal);
|
||||
const creds = this.getCredentials();
|
||||
if (!creds.login.trim() || !creds.password.trim()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!this.cookie || Date.now() - this.cookieSetAt > 20 * 60 * 1000) {
|
||||
await this.login(creds.login, creds.password, overallSignal);
|
||||
}
|
||||
|
||||
const generated = await this.generate(link, overallSignal);
|
||||
if (!generated) {
|
||||
this.cookie = "";
|
||||
await this.login(creds.login, creds.password, overallSignal);
|
||||
const retry = await this.generate(link, overallSignal);
|
||||
if (!retry) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
directUrl: retry.directUrl,
|
||||
fileName: retry.fileName || filenameFromUrl(link),
|
||||
fileSize: null,
|
||||
retriesUsed: 0
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
directUrl: generated.directUrl,
|
||||
fileName: generated.fileName || filenameFromUrl(link),
|
||||
fileSize: null,
|
||||
retriesUsed: 0
|
||||
};
|
||||
}, overallSignal);
|
||||
}
|
||||
|
||||
public invalidateSession(): void {
|
||||
this.cookie = "";
|
||||
this.cookieSetAt = 0;
|
||||
}
|
||||
|
||||
private async runExclusive<T>(job: () => Promise<T>, signal?: AbortSignal): Promise<T> {
|
||||
const queuedAt = Date.now();
|
||||
const QUEUE_WAIT_TIMEOUT_MS = 90000;
|
||||
const guardedJob = async (): Promise<T> => {
|
||||
throwIfAborted(signal);
|
||||
const waited = Date.now() - queuedAt;
|
||||
if (waited > QUEUE_WAIT_TIMEOUT_MS) {
|
||||
throw new Error(`Mega-Web Queue-Timeout (${Math.floor(waited / 1000)}s gewartet)`);
|
||||
}
|
||||
return job();
|
||||
};
|
||||
const run = this.queue.then(guardedJob, guardedJob);
|
||||
this.queue = run.then(() => undefined, () => undefined);
|
||||
return raceWithAbort(run, signal);
|
||||
}
|
||||
|
||||
private async login(login: string, password: string, signal?: AbortSignal): Promise<void> {
|
||||
throwIfAborted(signal);
|
||||
const response = await fetch(LOGIN_URL, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
"User-Agent": "Mozilla/5.0"
|
||||
},
|
||||
body: new URLSearchParams({
|
||||
login,
|
||||
password,
|
||||
remember: "on"
|
||||
}),
|
||||
redirect: "manual",
|
||||
signal: withTimeoutSignal(signal, 30000)
|
||||
});
|
||||
|
||||
const cookie = parseSetCookieFromHeaders(response.headers);
|
||||
if (!cookie) {
|
||||
throw new Error("Mega-Web Login liefert kein Session-Cookie");
|
||||
}
|
||||
|
||||
const verify = await fetch(DEBRID_REFERER, {
|
||||
method: "GET",
|
||||
headers: {
|
||||
"User-Agent": "Mozilla/5.0",
|
||||
Cookie: cookie,
|
||||
Referer: DEBRID_REFERER
|
||||
},
|
||||
signal: withTimeoutSignal(signal, 30000)
|
||||
});
|
||||
const verifyHtml = await verify.text();
|
||||
const hasDebridForm = /id=["']debridForm["']/i.test(verifyHtml) || /name=["']links["']/i.test(verifyHtml);
|
||||
if (!hasDebridForm) {
|
||||
throw new Error("Mega-Web Login ungültig oder Session blockiert");
|
||||
}
|
||||
|
||||
this.cookie = cookie;
|
||||
this.cookieSetAt = Date.now();
|
||||
}
|
||||
|
||||
private async generate(link: string, signal?: AbortSignal): Promise<{ directUrl: string; fileName: string } | null> {
|
||||
throwIfAborted(signal);
|
||||
const page = await fetch(DEBRID_URL, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
"User-Agent": "Mozilla/5.0",
|
||||
Cookie: this.cookie,
|
||||
Referer: DEBRID_REFERER
|
||||
},
|
||||
body: new URLSearchParams({
|
||||
links: link,
|
||||
password: "",
|
||||
showLinks: "1"
|
||||
}),
|
||||
signal: withTimeoutSignal(signal, 30000)
|
||||
});
|
||||
|
||||
const html = await page.text();
|
||||
|
||||
// Check for permanent hoster errors before looking for debrid codes
|
||||
const pageErrors = parsePageErrors(html);
|
||||
const permanentError = isPermanentHosterError(pageErrors);
|
||||
if (permanentError) {
|
||||
throw new Error(`Mega-Web: Link permanent ungültig (${permanentError})`);
|
||||
}
|
||||
|
||||
const code = pickCode(parseCodes(html), link);
|
||||
if (!code) {
|
||||
return null;
|
||||
}
|
||||
|
||||
for (let attempt = 1; attempt <= 60; attempt += 1) {
|
||||
throwIfAborted(signal);
|
||||
const res = await fetch(DEBRID_AJAX_URL, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
"User-Agent": "Mozilla/5.0",
|
||||
Cookie: this.cookie,
|
||||
Referer: DEBRID_REFERER
|
||||
},
|
||||
body: new URLSearchParams({
|
||||
code,
|
||||
autodl: "0"
|
||||
}),
|
||||
signal: withTimeoutSignal(signal, 15000)
|
||||
});
|
||||
|
||||
const text = (await res.text()).trim();
|
||||
if (text === "reload") {
|
||||
await sleepWithSignal(650, signal);
|
||||
continue;
|
||||
}
|
||||
if (text === "false") {
|
||||
return null;
|
||||
}
|
||||
|
||||
const parsed = parseDebridJson(text);
|
||||
if (!parsed) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!parsed.link) {
|
||||
if (/hoster does not respond correctly|could not be done for this moment/i.test(parsed.text || "")) {
|
||||
await sleepWithSignal(1200, signal);
|
||||
continue;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
const fromText = parsed.text
|
||||
.replace(/<[^>]*>/g, " ")
|
||||
.replace(/\s+/g, " ")
|
||||
.trim();
|
||||
|
||||
const nameMatch = fromText.match(/([\w .\-\[\]\(\)]+\.(?:rar|r\d{2}|zip|7z|mkv|mp4|avi|mp3|flac))/i);
|
||||
const fileName = (nameMatch?.[1] || filenameFromUrl(link)).trim();
|
||||
return {
|
||||
directUrl: parsed.link,
|
||||
fileName
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public dispose(): void {
|
||||
this.cookie = "";
|
||||
}
|
||||
}
|
||||
|
||||
export function compactMegaWebError(error: unknown): string {
|
||||
return compactErrorText(error);
|
||||
}
|
||||
@ -1,11 +1,14 @@
|
||||
import { API_BASE_URL, REQUEST_RETRIES } from "./constants";
|
||||
import { API_BASE_URL, APP_VERSION, REQUEST_RETRIES } from "./constants";
|
||||
import { compactErrorText, sleep } from "./utils";
|
||||
|
||||
const DEBRID_USER_AGENT = `RD-Node-Downloader/${APP_VERSION}`;
|
||||
|
||||
export interface UnrestrictedLink {
|
||||
fileName: string;
|
||||
directUrl: string;
|
||||
fileSize: number | null;
|
||||
retriesUsed: number;
|
||||
skipTlsVerify?: boolean;
|
||||
}
|
||||
|
||||
function shouldRetryStatus(status: number): boolean {
|
||||
@ -16,7 +19,103 @@ function retryDelay(attempt: number): number {
|
||||
return Math.min(5000, 400 * 2 ** attempt);
|
||||
}
|
||||
|
||||
function parseErrorBody(status: number, body: string): string {
|
||||
function parseRetryAfterMs(value: string | null): number {
|
||||
const text = String(value || "").trim();
|
||||
if (!text) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const asSeconds = Number(text);
|
||||
if (Number.isFinite(asSeconds) && asSeconds >= 0) {
|
||||
return Math.min(120000, Math.floor(asSeconds * 1000));
|
||||
}
|
||||
|
||||
const asDate = Date.parse(text);
|
||||
if (Number.isFinite(asDate)) {
|
||||
return Math.min(120000, Math.max(0, asDate - Date.now()));
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
function retryDelayForResponse(response: Response, attempt: number): number {
|
||||
if (response.status !== 429) {
|
||||
return retryDelay(attempt);
|
||||
}
|
||||
const fromHeader = parseRetryAfterMs(response.headers.get("retry-after"));
|
||||
return fromHeader > 0 ? fromHeader : retryDelay(attempt);
|
||||
}
|
||||
|
||||
function readHttpStatusFromErrorText(text: string): number {
|
||||
const match = String(text || "").match(/HTTP\s+(\d{3})/i);
|
||||
return match ? Number(match[1]) : 0;
|
||||
}
|
||||
|
||||
function isRetryableErrorText(text: string): boolean {
|
||||
const status = readHttpStatusFromErrorText(text);
|
||||
if (status === 429 || status >= 500) {
|
||||
return true;
|
||||
}
|
||||
const lower = String(text || "").toLowerCase();
|
||||
return lower.includes("timeout")
|
||||
|| lower.includes("network")
|
||||
|| lower.includes("fetch failed")
|
||||
|| lower.includes("aborted")
|
||||
|| lower.includes("econnreset")
|
||||
|| lower.includes("enotfound")
|
||||
|| lower.includes("etimedout")
|
||||
|| lower.includes("html statt json");
|
||||
}
|
||||
|
||||
function withTimeoutSignal(signal: AbortSignal | undefined, timeoutMs: number): AbortSignal {
|
||||
if (!signal) {
|
||||
return AbortSignal.timeout(timeoutMs);
|
||||
}
|
||||
return AbortSignal.any([signal, AbortSignal.timeout(timeoutMs)]);
|
||||
}
|
||||
|
||||
async function sleepWithSignal(ms: number, signal?: AbortSignal): Promise<void> {
|
||||
if (!signal) {
|
||||
await sleep(ms);
|
||||
return;
|
||||
}
|
||||
// Check before entering the Promise constructor to avoid a race where the timer
|
||||
// resolves before the aborted check runs (especially when ms=0).
|
||||
if (signal.aborted) {
|
||||
throw new Error("aborted");
|
||||
}
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
let timer: NodeJS.Timeout | null = setTimeout(() => {
|
||||
timer = null;
|
||||
signal.removeEventListener("abort", onAbort);
|
||||
resolve();
|
||||
}, Math.max(0, ms));
|
||||
|
||||
const onAbort = (): void => {
|
||||
if (timer) {
|
||||
clearTimeout(timer);
|
||||
timer = null;
|
||||
}
|
||||
signal.removeEventListener("abort", onAbort);
|
||||
reject(new Error("aborted"));
|
||||
};
|
||||
|
||||
signal.addEventListener("abort", onAbort, { once: true });
|
||||
});
|
||||
}
|
||||
|
||||
function looksLikeHtmlResponse(contentType: string, body: string): boolean {
|
||||
const type = String(contentType || "").toLowerCase();
|
||||
if (type.includes("text/html") || type.includes("application/xhtml+xml")) {
|
||||
return true;
|
||||
}
|
||||
return /^\s*<(!doctype\s+html|html\b)/i.test(String(body || ""));
|
||||
}
|
||||
|
||||
function parseErrorBody(status: number, body: string, contentType: string): string {
|
||||
if (looksLikeHtmlResponse(contentType, body)) {
|
||||
return `Real-Debrid lieferte HTML statt JSON (HTTP ${status})`;
|
||||
}
|
||||
const clean = compactErrorText(body);
|
||||
return clean || `HTTP ${status}`;
|
||||
}
|
||||
@ -28,7 +127,7 @@ export class RealDebridClient {
|
||||
this.token = token;
|
||||
}
|
||||
|
||||
public async unrestrictLink(link: string): Promise<UnrestrictedLink> {
|
||||
public async unrestrictLink(link: string, signal?: AbortSignal): Promise<UnrestrictedLink> {
|
||||
let lastError = "";
|
||||
for (let attempt = 1; attempt <= REQUEST_RETRIES; attempt += 1) {
|
||||
try {
|
||||
@ -38,26 +137,46 @@ export class RealDebridClient {
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.token}`,
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
"User-Agent": "RD-Node-Downloader/1.1.12"
|
||||
"User-Agent": DEBRID_USER_AGENT
|
||||
},
|
||||
body
|
||||
body,
|
||||
signal: withTimeoutSignal(signal, 30000)
|
||||
});
|
||||
|
||||
const text = await response.text();
|
||||
const contentType = String(response.headers.get("content-type") || "");
|
||||
if (!response.ok) {
|
||||
const parsed = parseErrorBody(response.status, text);
|
||||
const parsed = parseErrorBody(response.status, text, contentType);
|
||||
if (shouldRetryStatus(response.status) && attempt < REQUEST_RETRIES) {
|
||||
await sleep(retryDelay(attempt));
|
||||
await sleepWithSignal(retryDelayForResponse(response, attempt), signal);
|
||||
continue;
|
||||
}
|
||||
throw new Error(parsed);
|
||||
}
|
||||
|
||||
const payload = JSON.parse(text) as Record<string, unknown>;
|
||||
if (looksLikeHtmlResponse(contentType, text)) {
|
||||
throw new Error("Real-Debrid lieferte HTML statt JSON");
|
||||
}
|
||||
|
||||
let payload: Record<string, unknown>;
|
||||
try {
|
||||
payload = JSON.parse(text) as Record<string, unknown>;
|
||||
} catch {
|
||||
throw new Error("Ungültige JSON-Antwort von Real-Debrid");
|
||||
}
|
||||
const directUrl = String(payload.download || payload.link || "").trim();
|
||||
if (!directUrl) {
|
||||
throw new Error("Unrestrict ohne Download-URL");
|
||||
}
|
||||
try {
|
||||
const parsedUrl = new URL(directUrl);
|
||||
if (parsedUrl.protocol !== "https:" && parsedUrl.protocol !== "http:") {
|
||||
throw new Error(`Ungültiges Download-URL-Protokoll (${parsedUrl.protocol})`);
|
||||
}
|
||||
} catch (urlError) {
|
||||
if (urlError instanceof Error && urlError.message.includes("Protokoll")) throw urlError;
|
||||
throw new Error("Real-Debrid Antwort enthält keine gültige Download-URL");
|
||||
}
|
||||
|
||||
const fileName = String(payload.filename || "download.bin").trim() || "download.bin";
|
||||
const fileSizeRaw = Number(payload.filesize ?? NaN);
|
||||
@ -69,13 +188,16 @@ export class RealDebridClient {
|
||||
};
|
||||
} catch (error) {
|
||||
lastError = compactErrorText(error);
|
||||
if (attempt >= REQUEST_RETRIES) {
|
||||
if (signal?.aborted || (/aborted/i.test(lastError) && !/timeout/i.test(lastError))) {
|
||||
break;
|
||||
}
|
||||
await sleep(retryDelay(attempt));
|
||||
if (attempt >= REQUEST_RETRIES || !isRetryableErrorText(lastError)) {
|
||||
break;
|
||||
}
|
||||
await sleepWithSignal(retryDelay(attempt), signal);
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(lastError || "Unrestrict fehlgeschlagen");
|
||||
throw new Error(String(lastError || "Unrestrict fehlgeschlagen").replace(/^Error:\s*/i, ""));
|
||||
}
|
||||
}
|
||||
|
||||
128
src/main/session-log.ts
Normal file
128
src/main/session-log.ts
Normal file
@ -0,0 +1,128 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { setLogListener } from "./logger";
|
||||
|
||||
const SESSION_LOG_FLUSH_INTERVAL_MS = 200;
|
||||
|
||||
let sessionLogPath: string | null = null;
|
||||
let sessionLogsDir: string | null = null;
|
||||
let pendingLines: string[] = [];
|
||||
let flushTimer: NodeJS.Timeout | null = null;
|
||||
|
||||
function formatTimestamp(): string {
|
||||
const now = new Date();
|
||||
const y = now.getFullYear();
|
||||
const mo = String(now.getMonth() + 1).padStart(2, "0");
|
||||
const d = String(now.getDate()).padStart(2, "0");
|
||||
const h = String(now.getHours()).padStart(2, "0");
|
||||
const mi = String(now.getMinutes()).padStart(2, "0");
|
||||
const s = String(now.getSeconds()).padStart(2, "0");
|
||||
return `${y}-${mo}-${d}_${h}-${mi}-${s}`;
|
||||
}
|
||||
|
||||
function flushPending(): void {
|
||||
if (pendingLines.length === 0 || !sessionLogPath) {
|
||||
return;
|
||||
}
|
||||
const chunk = pendingLines.join("");
|
||||
pendingLines = [];
|
||||
try {
|
||||
fs.appendFileSync(sessionLogPath, chunk, "utf8");
|
||||
} catch {
|
||||
// ignore write errors
|
||||
}
|
||||
}
|
||||
|
||||
function scheduleFlush(): void {
|
||||
if (flushTimer) {
|
||||
return;
|
||||
}
|
||||
flushTimer = setTimeout(() => {
|
||||
flushTimer = null;
|
||||
flushPending();
|
||||
}, SESSION_LOG_FLUSH_INTERVAL_MS);
|
||||
}
|
||||
|
||||
function appendToSessionLog(line: string): void {
|
||||
if (!sessionLogPath) {
|
||||
return;
|
||||
}
|
||||
pendingLines.push(line);
|
||||
scheduleFlush();
|
||||
}
|
||||
|
||||
async function cleanupOldSessionLogs(dir: string, maxAgeDays: number): Promise<void> {
|
||||
try {
|
||||
const files = await fs.promises.readdir(dir);
|
||||
const cutoff = Date.now() - maxAgeDays * 24 * 60 * 60 * 1000;
|
||||
for (const file of files) {
|
||||
if (!file.startsWith("session_") || !file.endsWith(".txt")) {
|
||||
continue;
|
||||
}
|
||||
const filePath = path.join(dir, file);
|
||||
try {
|
||||
const stat = await fs.promises.stat(filePath);
|
||||
if (stat.mtimeMs < cutoff) {
|
||||
await fs.promises.unlink(filePath);
|
||||
}
|
||||
} catch {
|
||||
// ignore - file may be locked
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// ignore - dir may not exist
|
||||
}
|
||||
}
|
||||
|
||||
export function initSessionLog(baseDir: string): void {
|
||||
sessionLogsDir = path.join(baseDir, "session-logs");
|
||||
try {
|
||||
fs.mkdirSync(sessionLogsDir, { recursive: true });
|
||||
} catch {
|
||||
sessionLogsDir = null;
|
||||
return;
|
||||
}
|
||||
|
||||
const timestamp = formatTimestamp();
|
||||
sessionLogPath = path.join(sessionLogsDir, `session_${timestamp}.txt`);
|
||||
|
||||
const isoTimestamp = new Date().toISOString();
|
||||
try {
|
||||
fs.writeFileSync(sessionLogPath, `=== Session gestartet: ${isoTimestamp} ===\n`, "utf8");
|
||||
} catch {
|
||||
sessionLogPath = null;
|
||||
return;
|
||||
}
|
||||
|
||||
setLogListener((line) => appendToSessionLog(line));
|
||||
|
||||
void cleanupOldSessionLogs(sessionLogsDir, 7);
|
||||
}
|
||||
|
||||
export function getSessionLogPath(): string | null {
|
||||
return sessionLogPath;
|
||||
}
|
||||
|
||||
export function shutdownSessionLog(): void {
|
||||
if (!sessionLogPath) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Flush any pending lines
|
||||
if (flushTimer) {
|
||||
clearTimeout(flushTimer);
|
||||
flushTimer = null;
|
||||
}
|
||||
flushPending();
|
||||
|
||||
// Write closing line
|
||||
const isoTimestamp = new Date().toISOString();
|
||||
try {
|
||||
fs.appendFileSync(sessionLogPath, `=== Session beendet: ${isoTimestamp} ===\n`, "utf8");
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
|
||||
setLogListener(null);
|
||||
sessionLogPath = null;
|
||||
}
|
||||
@ -1,22 +1,228 @@
|
||||
import fs from "node:fs";
|
||||
import fsp from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { AppSettings, SessionState } from "../shared/types";
|
||||
import { AppSettings, BandwidthScheduleEntry, DebridProvider, DownloadItem, DownloadStatus, HistoryEntry, PackageEntry, PackagePriority, SessionState } from "../shared/types";
|
||||
import { defaultSettings } from "./constants";
|
||||
import { logger } from "./logger";
|
||||
|
||||
const VALID_PROVIDERS = new Set(["realdebrid", "megadebrid", "bestdebrid", "alldebrid"]);
|
||||
const VALID_PRIMARY_PROVIDERS = new Set(["realdebrid", "megadebrid", "bestdebrid", "alldebrid", "ddownload", "onefichier"]);
|
||||
const VALID_FALLBACK_PROVIDERS = new Set(["none", "realdebrid", "megadebrid", "bestdebrid", "alldebrid", "ddownload", "onefichier"]);
|
||||
const VALID_CLEANUP_MODES = new Set(["none", "trash", "delete"]);
|
||||
const VALID_CONFLICT_MODES = new Set(["overwrite", "skip", "rename", "ask"]);
|
||||
const VALID_FINISHED_POLICIES = new Set(["never", "immediate", "on_start", "package_done"]);
|
||||
const VALID_SPEED_MODES = new Set(["global", "per_download"]);
|
||||
const VALID_THEMES = new Set(["dark", "light"]);
|
||||
const VALID_EXTRACT_CPU_PRIORITIES = new Set(["high", "middle", "low"]);
|
||||
const VALID_PACKAGE_PRIORITIES = new Set<string>(["high", "normal", "low"]);
|
||||
const VALID_DOWNLOAD_STATUSES = new Set<DownloadStatus>([
|
||||
"queued", "validating", "downloading", "paused", "reconnect_wait", "extracting", "integrity_check", "completed", "failed", "cancelled"
|
||||
]);
|
||||
const VALID_ITEM_PROVIDERS = new Set<DebridProvider>(["realdebrid", "megadebrid", "bestdebrid", "alldebrid", "ddownload", "onefichier"]);
|
||||
const VALID_ONLINE_STATUSES = new Set(["online", "offline", "checking"]);
|
||||
|
||||
function asText(value: unknown): string {
|
||||
return String(value ?? "").trim();
|
||||
}
|
||||
|
||||
function clampNumber(value: unknown, fallback: number, min: number, max: number): number {
|
||||
const num = Number(value);
|
||||
if (!Number.isFinite(num)) {
|
||||
return fallback;
|
||||
}
|
||||
return Math.max(min, Math.min(max, Math.floor(num)));
|
||||
}
|
||||
|
||||
function createScheduleId(index: number): string {
|
||||
return `sched-${Date.now().toString(36)}-${index.toString(36)}-${Math.random().toString(36).slice(2, 8)}`;
|
||||
}
|
||||
|
||||
function normalizeBandwidthSchedules(raw: unknown): BandwidthScheduleEntry[] {
|
||||
if (!Array.isArray(raw)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const normalized: BandwidthScheduleEntry[] = [];
|
||||
for (let index = 0; index < raw.length; index += 1) {
|
||||
const entry = raw[index];
|
||||
if (!entry || typeof entry !== "object") {
|
||||
continue;
|
||||
}
|
||||
const value = entry as Partial<BandwidthScheduleEntry>;
|
||||
const rawId = typeof value.id === "string" ? value.id.trim() : "";
|
||||
normalized.push({
|
||||
id: rawId || createScheduleId(index),
|
||||
startHour: clampNumber(value.startHour, 0, 0, 23),
|
||||
endHour: clampNumber(value.endHour, 8, 0, 23),
|
||||
speedLimitKbps: clampNumber(value.speedLimitKbps, 0, 0, 500000),
|
||||
enabled: value.enabled === undefined ? true : Boolean(value.enabled)
|
||||
});
|
||||
}
|
||||
return normalized;
|
||||
}
|
||||
|
||||
function normalizeAbsoluteDir(value: unknown, fallback: string): string {
|
||||
const text = asText(value);
|
||||
if (!text || !path.isAbsolute(text)) {
|
||||
return path.resolve(fallback);
|
||||
}
|
||||
return path.resolve(text);
|
||||
}
|
||||
|
||||
const DEFAULT_COLUMN_ORDER = ["name", "size", "progress", "hoster", "account", "prio", "status", "speed"];
|
||||
const ALL_VALID_COLUMNS = new Set([...DEFAULT_COLUMN_ORDER, "added"]);
|
||||
|
||||
function normalizeColumnOrder(raw: unknown): string[] {
|
||||
if (!Array.isArray(raw) || raw.length === 0) {
|
||||
return [...DEFAULT_COLUMN_ORDER];
|
||||
}
|
||||
const valid = ALL_VALID_COLUMNS;
|
||||
const seen = new Set<string>();
|
||||
const result: string[] = [];
|
||||
for (const col of raw) {
|
||||
if (typeof col === "string" && valid.has(col) && !seen.has(col)) {
|
||||
seen.add(col);
|
||||
result.push(col);
|
||||
}
|
||||
}
|
||||
// "name" is mandatory — ensure it's always present
|
||||
if (!seen.has("name")) {
|
||||
result.unshift("name");
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
const DEPRECATED_UPDATE_REPOS = new Set([
|
||||
"sucukdeluxe/real-debrid-downloader"
|
||||
]);
|
||||
|
||||
function migrateUpdateRepo(raw: string, fallback: string): string {
|
||||
const trimmed = raw.trim();
|
||||
if (!trimmed || DEPRECATED_UPDATE_REPOS.has(trimmed.toLowerCase())) {
|
||||
return fallback;
|
||||
}
|
||||
return trimmed;
|
||||
}
|
||||
|
||||
export function normalizeSettings(settings: AppSettings): AppSettings {
|
||||
const defaults = defaultSettings();
|
||||
const normalized: AppSettings = {
|
||||
token: asText(settings.token),
|
||||
megaLogin: asText(settings.megaLogin),
|
||||
megaPassword: asText(settings.megaPassword),
|
||||
bestToken: asText(settings.bestToken),
|
||||
allDebridToken: asText(settings.allDebridToken),
|
||||
ddownloadLogin: asText(settings.ddownloadLogin),
|
||||
ddownloadPassword: asText(settings.ddownloadPassword),
|
||||
oneFichierApiKey: asText(settings.oneFichierApiKey),
|
||||
archivePasswordList: String(settings.archivePasswordList ?? "").replace(/\r\n|\r/g, "\n"),
|
||||
rememberToken: Boolean(settings.rememberToken),
|
||||
providerPrimary: settings.providerPrimary,
|
||||
providerSecondary: settings.providerSecondary,
|
||||
providerTertiary: settings.providerTertiary,
|
||||
autoProviderFallback: Boolean(settings.autoProviderFallback),
|
||||
outputDir: normalizeAbsoluteDir(settings.outputDir, defaults.outputDir),
|
||||
packageName: asText(settings.packageName),
|
||||
autoExtract: Boolean(settings.autoExtract),
|
||||
autoRename4sf4sj: Boolean(settings.autoRename4sf4sj),
|
||||
extractDir: normalizeAbsoluteDir(settings.extractDir, defaults.extractDir),
|
||||
collectMkvToLibrary: Boolean(settings.collectMkvToLibrary),
|
||||
mkvLibraryDir: normalizeAbsoluteDir(settings.mkvLibraryDir, defaults.mkvLibraryDir),
|
||||
createExtractSubfolder: Boolean(settings.createExtractSubfolder),
|
||||
hybridExtract: Boolean(settings.hybridExtract),
|
||||
cleanupMode: settings.cleanupMode,
|
||||
extractConflictMode: settings.extractConflictMode,
|
||||
removeLinkFilesAfterExtract: Boolean(settings.removeLinkFilesAfterExtract),
|
||||
removeSamplesAfterExtract: Boolean(settings.removeSamplesAfterExtract),
|
||||
enableIntegrityCheck: Boolean(settings.enableIntegrityCheck),
|
||||
autoResumeOnStart: Boolean(settings.autoResumeOnStart),
|
||||
autoReconnect: Boolean(settings.autoReconnect),
|
||||
maxParallel: clampNumber(settings.maxParallel, defaults.maxParallel, 1, 50),
|
||||
maxParallelExtract: clampNumber(settings.maxParallelExtract, defaults.maxParallelExtract, 1, 8),
|
||||
retryLimit: clampNumber(settings.retryLimit, defaults.retryLimit, 0, 99),
|
||||
reconnectWaitSeconds: clampNumber(settings.reconnectWaitSeconds, defaults.reconnectWaitSeconds, 10, 600),
|
||||
completedCleanupPolicy: settings.completedCleanupPolicy,
|
||||
speedLimitEnabled: Boolean(settings.speedLimitEnabled),
|
||||
speedLimitKbps: clampNumber(settings.speedLimitKbps, defaults.speedLimitKbps, 0, 500000),
|
||||
speedLimitMode: settings.speedLimitMode,
|
||||
autoUpdateCheck: Boolean(settings.autoUpdateCheck),
|
||||
updateRepo: migrateUpdateRepo(asText(settings.updateRepo), defaults.updateRepo),
|
||||
clipboardWatch: Boolean(settings.clipboardWatch),
|
||||
minimizeToTray: Boolean(settings.minimizeToTray),
|
||||
collapseNewPackages: settings.collapseNewPackages !== undefined ? Boolean(settings.collapseNewPackages) : defaults.collapseNewPackages,
|
||||
autoSkipExtracted: settings.autoSkipExtracted !== undefined ? Boolean(settings.autoSkipExtracted) : defaults.autoSkipExtracted,
|
||||
confirmDeleteSelection: settings.confirmDeleteSelection !== undefined ? Boolean(settings.confirmDeleteSelection) : defaults.confirmDeleteSelection,
|
||||
totalDownloadedAllTime: typeof settings.totalDownloadedAllTime === "number" && settings.totalDownloadedAllTime >= 0 ? settings.totalDownloadedAllTime : defaults.totalDownloadedAllTime,
|
||||
theme: VALID_THEMES.has(settings.theme) ? settings.theme : defaults.theme,
|
||||
bandwidthSchedules: normalizeBandwidthSchedules(settings.bandwidthSchedules),
|
||||
columnOrder: normalizeColumnOrder(settings.columnOrder),
|
||||
extractCpuPriority: settings.extractCpuPriority,
|
||||
autoExtractWhenStopped: settings.autoExtractWhenStopped !== undefined ? Boolean(settings.autoExtractWhenStopped) : defaults.autoExtractWhenStopped
|
||||
};
|
||||
|
||||
if (!VALID_PRIMARY_PROVIDERS.has(normalized.providerPrimary)) {
|
||||
normalized.providerPrimary = defaults.providerPrimary;
|
||||
}
|
||||
if (!VALID_FALLBACK_PROVIDERS.has(normalized.providerSecondary)) {
|
||||
normalized.providerSecondary = "none";
|
||||
}
|
||||
if (!VALID_FALLBACK_PROVIDERS.has(normalized.providerTertiary)) {
|
||||
normalized.providerTertiary = "none";
|
||||
}
|
||||
if (normalized.providerSecondary === normalized.providerPrimary) {
|
||||
normalized.providerSecondary = "none";
|
||||
}
|
||||
if (normalized.providerTertiary === normalized.providerPrimary || normalized.providerTertiary === normalized.providerSecondary) {
|
||||
normalized.providerTertiary = "none";
|
||||
}
|
||||
if (!VALID_CLEANUP_MODES.has(normalized.cleanupMode)) {
|
||||
normalized.cleanupMode = defaults.cleanupMode;
|
||||
}
|
||||
if (!VALID_CONFLICT_MODES.has(normalized.extractConflictMode)) {
|
||||
normalized.extractConflictMode = defaults.extractConflictMode;
|
||||
}
|
||||
if (!VALID_FINISHED_POLICIES.has(normalized.completedCleanupPolicy)) {
|
||||
normalized.completedCleanupPolicy = defaults.completedCleanupPolicy;
|
||||
}
|
||||
if (!VALID_SPEED_MODES.has(normalized.speedLimitMode)) {
|
||||
normalized.speedLimitMode = defaults.speedLimitMode;
|
||||
}
|
||||
if (!VALID_EXTRACT_CPU_PRIORITIES.has(normalized.extractCpuPriority)) {
|
||||
normalized.extractCpuPriority = defaults.extractCpuPriority;
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
function sanitizeCredentialPersistence(settings: AppSettings): AppSettings {
|
||||
if (settings.rememberToken) {
|
||||
return settings;
|
||||
}
|
||||
return {
|
||||
...settings,
|
||||
token: "",
|
||||
megaLogin: "",
|
||||
megaPassword: "",
|
||||
bestToken: "",
|
||||
allDebridToken: "",
|
||||
ddownloadLogin: "",
|
||||
ddownloadPassword: "",
|
||||
oneFichierApiKey: ""
|
||||
};
|
||||
}
|
||||
|
||||
export interface StoragePaths {
|
||||
baseDir: string;
|
||||
configFile: string;
|
||||
sessionFile: string;
|
||||
historyFile: string;
|
||||
}
|
||||
|
||||
export function createStoragePaths(baseDir: string): StoragePaths {
|
||||
return {
|
||||
baseDir,
|
||||
configFile: path.join(baseDir, "rd_downloader_config.json"),
|
||||
sessionFile: path.join(baseDir, "rd_session_state.json")
|
||||
sessionFile: path.join(baseDir, "rd_session_state.json"),
|
||||
historyFile: path.join(baseDir, "rd_history.json")
|
||||
};
|
||||
}
|
||||
|
||||
@ -24,43 +230,306 @@ function ensureBaseDir(baseDir: string): void {
|
||||
fs.mkdirSync(baseDir, { recursive: true });
|
||||
}
|
||||
|
||||
function asRecord(value: unknown): Record<string, unknown> | null {
|
||||
if (!value || typeof value !== "object" || Array.isArray(value)) {
|
||||
return null;
|
||||
}
|
||||
return value as Record<string, unknown>;
|
||||
}
|
||||
|
||||
function readSettingsFile(filePath: string): AppSettings | null {
|
||||
try {
|
||||
const parsed = JSON.parse(fs.readFileSync(filePath, "utf8")) as AppSettings;
|
||||
const merged = normalizeSettings({
|
||||
...defaultSettings(),
|
||||
...parsed
|
||||
});
|
||||
return sanitizeCredentialPersistence(merged);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function normalizeLoadedSession(raw: unknown): SessionState {
|
||||
const fallback = emptySession();
|
||||
const parsed = asRecord(raw);
|
||||
if (!parsed) {
|
||||
return fallback;
|
||||
}
|
||||
|
||||
const now = Date.now();
|
||||
const itemsById: Record<string, DownloadItem> = {};
|
||||
const rawItems = asRecord(parsed.items) ?? {};
|
||||
for (const [entryId, rawItem] of Object.entries(rawItems)) {
|
||||
const item = asRecord(rawItem);
|
||||
if (!item) {
|
||||
continue;
|
||||
}
|
||||
const id = asText(item.id) || entryId;
|
||||
const packageId = asText(item.packageId);
|
||||
const url = asText(item.url);
|
||||
if (!id || !packageId || !url) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const statusRaw = asText(item.status) as DownloadStatus;
|
||||
const status: DownloadStatus = VALID_DOWNLOAD_STATUSES.has(statusRaw) ? statusRaw : "queued";
|
||||
const providerRaw = asText(item.provider) as DebridProvider;
|
||||
|
||||
const onlineStatusRaw = asText(item.onlineStatus);
|
||||
|
||||
itemsById[id] = {
|
||||
id,
|
||||
packageId,
|
||||
url,
|
||||
provider: VALID_ITEM_PROVIDERS.has(providerRaw) ? providerRaw : null,
|
||||
status,
|
||||
retries: clampNumber(item.retries, 0, 0, 1_000_000),
|
||||
speedBps: clampNumber(item.speedBps, 0, 0, 10_000_000_000),
|
||||
downloadedBytes: clampNumber(item.downloadedBytes, 0, 0, 10_000_000_000_000),
|
||||
totalBytes: item.totalBytes == null ? null : clampNumber(item.totalBytes, 0, 0, 10_000_000_000_000),
|
||||
progressPercent: clampNumber(item.progressPercent, 0, 0, 100),
|
||||
fileName: asText(item.fileName) || "download.bin",
|
||||
targetPath: asText(item.targetPath),
|
||||
resumable: item.resumable === undefined ? true : Boolean(item.resumable),
|
||||
attempts: clampNumber(item.attempts, 0, 0, 10_000),
|
||||
lastError: asText(item.lastError),
|
||||
fullStatus: asText(item.fullStatus),
|
||||
onlineStatus: VALID_ONLINE_STATUSES.has(onlineStatusRaw) ? onlineStatusRaw as "online" | "offline" | "checking" : undefined,
|
||||
createdAt: clampNumber(item.createdAt, now, 0, Number.MAX_SAFE_INTEGER),
|
||||
updatedAt: clampNumber(item.updatedAt, now, 0, Number.MAX_SAFE_INTEGER)
|
||||
};
|
||||
}
|
||||
|
||||
const packagesById: Record<string, PackageEntry> = {};
|
||||
const rawPackages = asRecord(parsed.packages) ?? {};
|
||||
for (const [entryId, rawPkg] of Object.entries(rawPackages)) {
|
||||
const pkg = asRecord(rawPkg);
|
||||
if (!pkg) {
|
||||
continue;
|
||||
}
|
||||
const id = asText(pkg.id) || entryId;
|
||||
if (!id) {
|
||||
continue;
|
||||
}
|
||||
const statusRaw = asText(pkg.status) as DownloadStatus;
|
||||
const status: DownloadStatus = VALID_DOWNLOAD_STATUSES.has(statusRaw) ? statusRaw : "queued";
|
||||
const rawItemIds = Array.isArray(pkg.itemIds) ? pkg.itemIds : [];
|
||||
packagesById[id] = {
|
||||
id,
|
||||
name: asText(pkg.name) || "Paket",
|
||||
outputDir: asText(pkg.outputDir),
|
||||
extractDir: asText(pkg.extractDir),
|
||||
status,
|
||||
itemIds: rawItemIds
|
||||
.map((value) => asText(value))
|
||||
.filter((value) => value.length > 0),
|
||||
cancelled: Boolean(pkg.cancelled),
|
||||
enabled: pkg.enabled === undefined ? true : Boolean(pkg.enabled),
|
||||
priority: VALID_PACKAGE_PRIORITIES.has(asText(pkg.priority)) ? asText(pkg.priority) as PackagePriority : "normal",
|
||||
createdAt: clampNumber(pkg.createdAt, now, 0, Number.MAX_SAFE_INTEGER),
|
||||
updatedAt: clampNumber(pkg.updatedAt, now, 0, Number.MAX_SAFE_INTEGER)
|
||||
};
|
||||
}
|
||||
|
||||
for (const [itemId, item] of Object.entries(itemsById)) {
|
||||
if (!packagesById[item.packageId]) {
|
||||
delete itemsById[itemId];
|
||||
}
|
||||
}
|
||||
|
||||
for (const pkg of Object.values(packagesById)) {
|
||||
pkg.itemIds = pkg.itemIds.filter((itemId) => {
|
||||
const item = itemsById[itemId];
|
||||
return Boolean(item) && item.packageId === pkg.id;
|
||||
});
|
||||
}
|
||||
|
||||
const rawOrder = Array.isArray(parsed.packageOrder) ? parsed.packageOrder : [];
|
||||
const seenOrder = new Set<string>();
|
||||
const packageOrder = rawOrder
|
||||
.map((entry) => asText(entry))
|
||||
.filter((id) => {
|
||||
if (!(id in packagesById) || seenOrder.has(id)) {
|
||||
return false;
|
||||
}
|
||||
seenOrder.add(id);
|
||||
return true;
|
||||
});
|
||||
for (const packageId of Object.keys(packagesById)) {
|
||||
if (!seenOrder.has(packageId)) {
|
||||
seenOrder.add(packageId);
|
||||
packageOrder.push(packageId);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...fallback,
|
||||
version: clampNumber(parsed.version, fallback.version, 1, 10),
|
||||
packageOrder,
|
||||
packages: packagesById,
|
||||
items: itemsById,
|
||||
runStartedAt: clampNumber(parsed.runStartedAt, 0, 0, Number.MAX_SAFE_INTEGER),
|
||||
totalDownloadedBytes: clampNumber(parsed.totalDownloadedBytes, 0, 0, Number.MAX_SAFE_INTEGER),
|
||||
summaryText: asText(parsed.summaryText),
|
||||
reconnectUntil: clampNumber(parsed.reconnectUntil, 0, 0, Number.MAX_SAFE_INTEGER),
|
||||
reconnectReason: asText(parsed.reconnectReason),
|
||||
paused: Boolean(parsed.paused),
|
||||
running: Boolean(parsed.running),
|
||||
updatedAt: clampNumber(parsed.updatedAt, now, 0, Number.MAX_SAFE_INTEGER)
|
||||
};
|
||||
}
|
||||
|
||||
export function loadSettings(paths: StoragePaths): AppSettings {
|
||||
ensureBaseDir(paths.baseDir);
|
||||
if (!fs.existsSync(paths.configFile)) {
|
||||
return defaultSettings();
|
||||
}
|
||||
const loaded = readSettingsFile(paths.configFile);
|
||||
if (loaded) {
|
||||
return loaded;
|
||||
}
|
||||
|
||||
const backupFile = `${paths.configFile}.bak`;
|
||||
const backupLoaded = fs.existsSync(backupFile) ? readSettingsFile(backupFile) : null;
|
||||
if (backupLoaded) {
|
||||
logger.warn("Konfiguration defekt, Backup-Datei wird verwendet");
|
||||
try {
|
||||
const parsed = JSON.parse(fs.readFileSync(paths.configFile, "utf8")) as Partial<AppSettings>;
|
||||
const merged: AppSettings = {
|
||||
...defaultSettings(),
|
||||
...parsed
|
||||
};
|
||||
if (!VALID_PROVIDERS.has(merged.providerPrimary)) {
|
||||
merged.providerPrimary = "realdebrid";
|
||||
const payload = JSON.stringify(backupLoaded, null, 2);
|
||||
const tempPath = `${paths.configFile}.tmp`;
|
||||
fs.writeFileSync(tempPath, payload, "utf8");
|
||||
syncRenameWithExdevFallback(tempPath, paths.configFile);
|
||||
} catch {
|
||||
// ignore restore write failure
|
||||
}
|
||||
if (!VALID_PROVIDERS.has(merged.providerSecondary)) {
|
||||
merged.providerSecondary = "megadebrid";
|
||||
return backupLoaded;
|
||||
}
|
||||
if (!VALID_PROVIDERS.has(merged.providerTertiary)) {
|
||||
merged.providerTertiary = "bestdebrid";
|
||||
}
|
||||
merged.autoProviderFallback = Boolean(merged.autoProviderFallback);
|
||||
merged.maxParallel = Math.max(1, Math.min(50, Number(merged.maxParallel) || 4));
|
||||
merged.speedLimitKbps = Math.max(0, Math.min(500000, Number(merged.speedLimitKbps) || 0));
|
||||
merged.reconnectWaitSeconds = Math.max(10, Math.min(600, Number(merged.reconnectWaitSeconds) || 45));
|
||||
return merged;
|
||||
} catch (error) {
|
||||
logger.error(`Konfiguration konnte nicht geladen werden: ${String(error)}`);
|
||||
|
||||
logger.error("Konfiguration konnte nicht geladen werden (auch Backup fehlgeschlagen)");
|
||||
return defaultSettings();
|
||||
}
|
||||
|
||||
function syncRenameWithExdevFallback(tempPath: string, targetPath: string): void {
|
||||
try {
|
||||
fs.renameSync(tempPath, targetPath);
|
||||
} catch (renameError: unknown) {
|
||||
if (renameError && typeof renameError === "object" && "code" in renameError && (renameError as NodeJS.ErrnoException).code === "EXDEV") {
|
||||
fs.copyFileSync(tempPath, targetPath);
|
||||
try { fs.rmSync(tempPath, { force: true }); } catch {}
|
||||
} else {
|
||||
throw renameError;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function sessionTempPath(sessionFile: string, kind: "sync" | "async"): string {
|
||||
return `${sessionFile}.${kind}.tmp`;
|
||||
}
|
||||
|
||||
function sessionBackupPath(sessionFile: string): string {
|
||||
return `${sessionFile}.bak`;
|
||||
}
|
||||
|
||||
export function normalizeLoadedSessionTransientFields(session: SessionState): SessionState {
|
||||
// Reset transient fields that may be stale from a previous crash
|
||||
const ACTIVE_STATUSES = new Set(["downloading", "validating", "extracting", "integrity_check", "paused", "reconnect_wait"]);
|
||||
for (const item of Object.values(session.items)) {
|
||||
if (ACTIVE_STATUSES.has(item.status)) {
|
||||
item.status = "queued";
|
||||
item.lastError = "";
|
||||
}
|
||||
// Always clear stale speed values
|
||||
item.speedBps = 0;
|
||||
}
|
||||
|
||||
// Reset package-level active statuses to queued (mirrors item reset above)
|
||||
const ACTIVE_PKG_STATUSES = new Set(["downloading", "validating", "extracting", "integrity_check", "paused", "reconnect_wait"]);
|
||||
for (const pkg of Object.values(session.packages)) {
|
||||
if (ACTIVE_PKG_STATUSES.has(pkg.status)) {
|
||||
pkg.status = "queued";
|
||||
}
|
||||
pkg.postProcessLabel = undefined;
|
||||
}
|
||||
|
||||
// Clear stale session-level running/paused flags
|
||||
session.running = false;
|
||||
session.paused = false;
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
function readSessionFile(filePath: string): SessionState | null {
|
||||
try {
|
||||
const parsed = JSON.parse(fs.readFileSync(filePath, "utf8")) as unknown;
|
||||
return normalizeLoadedSessionTransientFields(normalizeLoadedSession(parsed));
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function saveSettings(paths: StoragePaths, settings: AppSettings): void {
|
||||
ensureBaseDir(paths.baseDir);
|
||||
const payload = JSON.stringify(settings, null, 2);
|
||||
// Create a backup of the existing config before overwriting
|
||||
if (fs.existsSync(paths.configFile)) {
|
||||
try {
|
||||
fs.copyFileSync(paths.configFile, `${paths.configFile}.bak`);
|
||||
} catch {
|
||||
// Best-effort backup; proceed even if it fails
|
||||
}
|
||||
}
|
||||
const persisted = sanitizeCredentialPersistence(normalizeSettings(settings));
|
||||
const payload = JSON.stringify(persisted, null, 2);
|
||||
const tempPath = `${paths.configFile}.tmp`;
|
||||
try {
|
||||
fs.writeFileSync(tempPath, payload, "utf8");
|
||||
fs.renameSync(tempPath, paths.configFile);
|
||||
syncRenameWithExdevFallback(tempPath, paths.configFile);
|
||||
} catch (error) {
|
||||
try { fs.rmSync(tempPath, { force: true }); } catch { /* ignore */ }
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
let asyncSettingsSaveRunning = false;
|
||||
let asyncSettingsSaveQueued: { paths: StoragePaths; settings: AppSettings } | null = null;
|
||||
|
||||
async function writeSettingsPayload(paths: StoragePaths, payload: string): Promise<void> {
|
||||
await fs.promises.mkdir(paths.baseDir, { recursive: true });
|
||||
await fsp.copyFile(paths.configFile, `${paths.configFile}.bak`).catch(() => {});
|
||||
const tempPath = `${paths.configFile}.settings.tmp`;
|
||||
await fsp.writeFile(tempPath, payload, "utf8");
|
||||
try {
|
||||
await fsp.rename(tempPath, paths.configFile);
|
||||
} catch (renameError: unknown) {
|
||||
if (renameError && typeof renameError === "object" && "code" in renameError && (renameError as NodeJS.ErrnoException).code === "EXDEV") {
|
||||
await fsp.copyFile(tempPath, paths.configFile);
|
||||
await fsp.rm(tempPath, { force: true }).catch(() => {});
|
||||
} else {
|
||||
await fsp.rm(tempPath, { force: true }).catch(() => {});
|
||||
throw renameError;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function saveSettingsAsync(paths: StoragePaths, settings: AppSettings): Promise<void> {
|
||||
const persisted = sanitizeCredentialPersistence(normalizeSettings(settings));
|
||||
const payload = JSON.stringify(persisted, null, 2);
|
||||
if (asyncSettingsSaveRunning) {
|
||||
asyncSettingsSaveQueued = { paths, settings };
|
||||
return;
|
||||
}
|
||||
asyncSettingsSaveRunning = true;
|
||||
try {
|
||||
await writeSettingsPayload(paths, payload);
|
||||
} catch (error) {
|
||||
logger.error(`Async Settings-Save fehlgeschlagen: ${String(error)}`);
|
||||
} finally {
|
||||
asyncSettingsSaveRunning = false;
|
||||
if (asyncSettingsSaveQueued) {
|
||||
const queued = asyncSettingsSaveQueued;
|
||||
asyncSettingsSaveQueued = null;
|
||||
void saveSettingsAsync(queued.paths, queued.settings);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function emptySession(): SessionState {
|
||||
@ -85,25 +554,196 @@ export function loadSession(paths: StoragePaths): SessionState {
|
||||
if (!fs.existsSync(paths.sessionFile)) {
|
||||
return emptySession();
|
||||
}
|
||||
try {
|
||||
const parsed = JSON.parse(fs.readFileSync(paths.sessionFile, "utf8")) as Partial<SessionState>;
|
||||
return {
|
||||
...emptySession(),
|
||||
...parsed,
|
||||
packages: parsed.packages ?? {},
|
||||
items: parsed.items ?? {},
|
||||
packageOrder: parsed.packageOrder ?? []
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`Session konnte nicht geladen werden: ${String(error)}`);
|
||||
return emptySession();
|
||||
|
||||
const primary = readSessionFile(paths.sessionFile);
|
||||
if (primary) {
|
||||
return primary;
|
||||
}
|
||||
|
||||
const backupFile = sessionBackupPath(paths.sessionFile);
|
||||
const backup = fs.existsSync(backupFile) ? readSessionFile(backupFile) : null;
|
||||
if (backup) {
|
||||
logger.warn("Session defekt, Backup-Datei wird verwendet");
|
||||
try {
|
||||
const payload = JSON.stringify({ ...backup, updatedAt: Date.now() });
|
||||
const tempPath = sessionTempPath(paths.sessionFile, "sync");
|
||||
fs.writeFileSync(tempPath, payload, "utf8");
|
||||
syncRenameWithExdevFallback(tempPath, paths.sessionFile);
|
||||
} catch {
|
||||
// ignore restore write failure
|
||||
}
|
||||
return backup;
|
||||
}
|
||||
|
||||
logger.error("Session konnte nicht geladen werden (auch Backup fehlgeschlagen)");
|
||||
return emptySession();
|
||||
}
|
||||
|
||||
export function saveSession(paths: StoragePaths, session: SessionState): void {
|
||||
syncSaveGeneration += 1;
|
||||
ensureBaseDir(paths.baseDir);
|
||||
const payload = JSON.stringify({ ...session, updatedAt: Date.now() }, null, 2);
|
||||
const tempPath = `${paths.sessionFile}.tmp`;
|
||||
fs.writeFileSync(tempPath, payload, "utf8");
|
||||
fs.renameSync(tempPath, paths.sessionFile);
|
||||
if (fs.existsSync(paths.sessionFile)) {
|
||||
try {
|
||||
fs.copyFileSync(paths.sessionFile, sessionBackupPath(paths.sessionFile));
|
||||
} catch {
|
||||
// Best-effort backup; proceed even if it fails
|
||||
}
|
||||
}
|
||||
const payload = JSON.stringify({ ...session, updatedAt: Date.now() });
|
||||
const tempPath = sessionTempPath(paths.sessionFile, "sync");
|
||||
try {
|
||||
fs.writeFileSync(tempPath, payload, "utf8");
|
||||
syncRenameWithExdevFallback(tempPath, paths.sessionFile);
|
||||
} catch (error) {
|
||||
try { fs.rmSync(tempPath, { force: true }); } catch { /* ignore */ }
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
let asyncSaveRunning = false;
|
||||
let asyncSaveQueued: { paths: StoragePaths; payload: string } | null = null;
|
||||
let syncSaveGeneration = 0;
|
||||
|
||||
async function writeSessionPayload(paths: StoragePaths, payload: string, generation: number): Promise<void> {
|
||||
await fs.promises.mkdir(paths.baseDir, { recursive: true });
|
||||
await fsp.copyFile(paths.sessionFile, sessionBackupPath(paths.sessionFile)).catch(() => {});
|
||||
const tempPath = sessionTempPath(paths.sessionFile, "async");
|
||||
await fsp.writeFile(tempPath, payload, "utf8");
|
||||
// If a synchronous save occurred after this async save started, discard the stale write
|
||||
if (generation < syncSaveGeneration) {
|
||||
await fsp.rm(tempPath, { force: true }).catch(() => {});
|
||||
return;
|
||||
}
|
||||
try {
|
||||
await fsp.rename(tempPath, paths.sessionFile);
|
||||
} catch (renameError: unknown) {
|
||||
if (renameError && typeof renameError === "object" && "code" in renameError && (renameError as NodeJS.ErrnoException).code === "EXDEV") {
|
||||
if (generation < syncSaveGeneration) {
|
||||
await fsp.rm(tempPath, { force: true }).catch(() => {});
|
||||
return;
|
||||
}
|
||||
await fsp.copyFile(tempPath, paths.sessionFile);
|
||||
await fsp.rm(tempPath, { force: true }).catch(() => {});
|
||||
} else {
|
||||
await fsp.rm(tempPath, { force: true }).catch(() => {});
|
||||
throw renameError;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function saveSessionPayloadAsync(paths: StoragePaths, payload: string): Promise<void> {
|
||||
if (asyncSaveRunning) {
|
||||
asyncSaveQueued = { paths, payload };
|
||||
return;
|
||||
}
|
||||
asyncSaveRunning = true;
|
||||
const gen = syncSaveGeneration;
|
||||
try {
|
||||
await writeSessionPayload(paths, payload, gen);
|
||||
} catch (error) {
|
||||
logger.error(`Async Session-Save fehlgeschlagen: ${String(error)}`);
|
||||
} finally {
|
||||
asyncSaveRunning = false;
|
||||
if (asyncSaveQueued) {
|
||||
const queued = asyncSaveQueued;
|
||||
asyncSaveQueued = null;
|
||||
void saveSessionPayloadAsync(queued.paths, queued.payload);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function cancelPendingAsyncSaves(): void {
|
||||
asyncSaveQueued = null;
|
||||
asyncSettingsSaveQueued = null;
|
||||
syncSaveGeneration += 1;
|
||||
}
|
||||
|
||||
export async function saveSessionAsync(paths: StoragePaths, session: SessionState): Promise<void> {
|
||||
const payload = JSON.stringify({ ...session, updatedAt: Date.now() });
|
||||
await saveSessionPayloadAsync(paths, payload);
|
||||
}
|
||||
|
||||
const MAX_HISTORY_ENTRIES = 500;
|
||||
|
||||
function normalizeHistoryEntry(raw: unknown, index: number): HistoryEntry | null {
|
||||
const entry = asRecord(raw);
|
||||
if (!entry) return null;
|
||||
|
||||
const id = asText(entry.id) || `hist-${Date.now().toString(36)}-${index}`;
|
||||
const name = asText(entry.name) || "Unbenannt";
|
||||
const providerRaw = asText(entry.provider);
|
||||
|
||||
return {
|
||||
id,
|
||||
name,
|
||||
totalBytes: clampNumber(entry.totalBytes, 0, 0, Number.MAX_SAFE_INTEGER),
|
||||
downloadedBytes: clampNumber(entry.downloadedBytes, 0, 0, Number.MAX_SAFE_INTEGER),
|
||||
fileCount: clampNumber(entry.fileCount, 0, 0, 100000),
|
||||
provider: VALID_ITEM_PROVIDERS.has(providerRaw as DebridProvider) ? providerRaw as DebridProvider : null,
|
||||
completedAt: clampNumber(entry.completedAt, Date.now(), 0, Number.MAX_SAFE_INTEGER),
|
||||
durationSeconds: clampNumber(entry.durationSeconds, 0, 0, Number.MAX_SAFE_INTEGER),
|
||||
status: entry.status === "deleted" ? "deleted" : "completed",
|
||||
outputDir: asText(entry.outputDir),
|
||||
urls: Array.isArray(entry.urls) ? (entry.urls as unknown[]).map(String).filter(Boolean) : undefined
|
||||
};
|
||||
}
|
||||
|
||||
export function loadHistory(paths: StoragePaths): HistoryEntry[] {
|
||||
ensureBaseDir(paths.baseDir);
|
||||
if (!fs.existsSync(paths.historyFile)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
const raw = JSON.parse(fs.readFileSync(paths.historyFile, "utf8")) as unknown;
|
||||
if (!Array.isArray(raw)) return [];
|
||||
|
||||
const entries: HistoryEntry[] = [];
|
||||
for (let i = 0; i < raw.length && entries.length < MAX_HISTORY_ENTRIES; i++) {
|
||||
const normalized = normalizeHistoryEntry(raw[i], i);
|
||||
if (normalized) entries.push(normalized);
|
||||
}
|
||||
return entries;
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export function saveHistory(paths: StoragePaths, entries: HistoryEntry[]): void {
|
||||
ensureBaseDir(paths.baseDir);
|
||||
const trimmed = entries.slice(0, MAX_HISTORY_ENTRIES);
|
||||
const payload = JSON.stringify(trimmed, null, 2);
|
||||
const tempPath = `${paths.historyFile}.tmp`;
|
||||
try {
|
||||
fs.writeFileSync(tempPath, payload, "utf8");
|
||||
syncRenameWithExdevFallback(tempPath, paths.historyFile);
|
||||
} catch (error) {
|
||||
try { fs.rmSync(tempPath, { force: true }); } catch { /* ignore */ }
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export function addHistoryEntry(paths: StoragePaths, entry: HistoryEntry): HistoryEntry[] {
|
||||
const existing = loadHistory(paths);
|
||||
const updated = [entry, ...existing].slice(0, MAX_HISTORY_ENTRIES);
|
||||
saveHistory(paths, updated);
|
||||
return updated;
|
||||
}
|
||||
|
||||
export function removeHistoryEntry(paths: StoragePaths, entryId: string): HistoryEntry[] {
|
||||
const existing = loadHistory(paths);
|
||||
const updated = existing.filter(e => e.id !== entryId);
|
||||
saveHistory(paths, updated);
|
||||
return updated;
|
||||
}
|
||||
|
||||
export function clearHistory(paths: StoragePaths): void {
|
||||
ensureBaseDir(paths.baseDir);
|
||||
if (fs.existsSync(paths.historyFile)) {
|
||||
try {
|
||||
fs.unlinkSync(paths.historyFile);
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
1154
src/main/update.ts
1154
src/main/update.ts
File diff suppressed because it is too large
Load Diff
@ -1,20 +1,55 @@
|
||||
import path from "node:path";
|
||||
import { ParsedPackageInput } from "../shared/types";
|
||||
|
||||
function safeDecodeURIComponent(value: string): string {
|
||||
try {
|
||||
return decodeURIComponent(value);
|
||||
} catch {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
const WINDOWS_RESERVED_BASENAMES = new Set([
|
||||
"con", "prn", "aux", "nul",
|
||||
"com1", "com2", "com3", "com4", "com5", "com6", "com7", "com8", "com9",
|
||||
"lpt1", "lpt2", "lpt3", "lpt4", "lpt5", "lpt6", "lpt7", "lpt8", "lpt9"
|
||||
]);
|
||||
|
||||
export function compactErrorText(message: unknown, maxLen = 220): string {
|
||||
const raw = String(message ?? "").replace(/<[^>]+>/g, " ").replace(/\s+/g, " ").trim();
|
||||
if (!raw) {
|
||||
return "Unbekannter Fehler";
|
||||
}
|
||||
if (raw.length <= maxLen) {
|
||||
const safeMaxLen = Number.isFinite(maxLen) ? Math.max(4, Math.floor(maxLen)) : 220;
|
||||
if (raw.length <= safeMaxLen) {
|
||||
return raw;
|
||||
}
|
||||
return `${raw.slice(0, maxLen - 3)}...`;
|
||||
return `${raw.slice(0, safeMaxLen - 3)}...`;
|
||||
}
|
||||
|
||||
export function sanitizeFilename(name: string): string {
|
||||
const cleaned = String(name || "").trim().replace(/[\\/:*?"<>|]/g, " ").replace(/\s+/g, " ").trim();
|
||||
return cleaned || "Paket";
|
||||
const cleaned = String(name || "")
|
||||
.replace(/\0/g, "")
|
||||
.replace(/[\\/:*?"<>|]/g, " ")
|
||||
.replace(/\s+/g, " ")
|
||||
.trim();
|
||||
|
||||
let normalized = cleaned
|
||||
.replace(/^[.\s]+/g, "")
|
||||
.replace(/[.\s]+$/g, "")
|
||||
.trim();
|
||||
|
||||
if (!normalized || normalized === "." || normalized === ".." || /^\.+$/.test(normalized)) {
|
||||
return "Paket";
|
||||
}
|
||||
|
||||
const parsed = path.parse(normalized);
|
||||
const reservedBase = (parsed.name.split(".")[0] || parsed.name).toLowerCase();
|
||||
if (WINDOWS_RESERVED_BASENAMES.has(reservedBase)) {
|
||||
normalized = `${parsed.name.replace(/^([^.]*)/, "$1_")}${parsed.ext}`;
|
||||
}
|
||||
|
||||
return normalized || "Paket";
|
||||
}
|
||||
|
||||
export function isHttpLink(value: string): boolean {
|
||||
@ -30,6 +65,60 @@ export function isHttpLink(value: string): boolean {
|
||||
}
|
||||
}
|
||||
|
||||
export function extractHttpLinksFromText(text: string): string[] {
|
||||
const matches = String(text || "").match(/https?:\/\/[^\s<>"']+/gi) ?? [];
|
||||
const seen = new Set<string>();
|
||||
const links: string[] = [];
|
||||
|
||||
for (const match of matches) {
|
||||
let candidate = String(match || "").trim();
|
||||
let openParen = 0;
|
||||
let closeParen = 0;
|
||||
let openBracket = 0;
|
||||
let closeBracket = 0;
|
||||
for (const char of candidate) {
|
||||
if (char === "(") {
|
||||
openParen += 1;
|
||||
} else if (char === ")") {
|
||||
closeParen += 1;
|
||||
} else if (char === "[") {
|
||||
openBracket += 1;
|
||||
} else if (char === "]") {
|
||||
closeBracket += 1;
|
||||
}
|
||||
}
|
||||
while (candidate.length > 0) {
|
||||
const lastChar = candidate[candidate.length - 1];
|
||||
if (![")", "]", ",", ".", "!", "?", ";", ":"].includes(lastChar)) {
|
||||
break;
|
||||
}
|
||||
if (lastChar === ")") {
|
||||
if (closeParen <= openParen) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (lastChar === "]") {
|
||||
if (closeBracket <= openBracket) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (lastChar === ")") {
|
||||
closeParen = Math.max(0, closeParen - 1);
|
||||
} else if (lastChar === "]") {
|
||||
closeBracket = Math.max(0, closeBracket - 1);
|
||||
}
|
||||
candidate = candidate.slice(0, -1);
|
||||
}
|
||||
if (!candidate || !isHttpLink(candidate) || seen.has(candidate)) {
|
||||
continue;
|
||||
}
|
||||
seen.add(candidate);
|
||||
links.push(candidate);
|
||||
}
|
||||
|
||||
return links;
|
||||
}
|
||||
|
||||
export function humanSize(bytes: number): string {
|
||||
const value = Number(bytes);
|
||||
if (!Number.isFinite(value) || value < 0) {
|
||||
@ -51,6 +140,9 @@ export function humanSize(bytes: number): string {
|
||||
export function filenameFromUrl(url: string): string {
|
||||
try {
|
||||
const parsed = new URL(url);
|
||||
if (parsed.protocol !== "http:" && parsed.protocol !== "https:") {
|
||||
return "download.bin";
|
||||
}
|
||||
const queryName = parsed.searchParams.get("filename")
|
||||
|| parsed.searchParams.get("file")
|
||||
|| parsed.searchParams.get("name")
|
||||
@ -58,19 +150,25 @@ export function filenameFromUrl(url: string): string {
|
||||
|| parsed.searchParams.get("title")
|
||||
|| "";
|
||||
const rawName = queryName || path.basename(parsed.pathname || "");
|
||||
const decoded = decodeURIComponent(rawName || "").trim();
|
||||
const decoded = safeDecodeURIComponent(rawName || "").trim();
|
||||
const normalized = decoded
|
||||
.replace(/\.(rar|zip|7z|tar|gz|bz2|xz|iso|part\d+\.rar|r\d{2})\.html$/i, ".$1")
|
||||
.replace(/\.(rar|zip|7z|tar|gz|bz2|xz|iso|part\d+\.rar|r\d{2,3})\.html$/i, ".$1")
|
||||
.replace(/\.(mp4|mkv|avi|mp3|flac|srt)\.html$/i, ".$1");
|
||||
if (/^[a-f0-9]{24,}$/i.test(normalized)) {
|
||||
return "download.bin";
|
||||
}
|
||||
return sanitizeFilename(normalized || "download.bin");
|
||||
} catch {
|
||||
return "download.bin";
|
||||
}
|
||||
}
|
||||
|
||||
export function looksLikeOpaqueFilename(name: string): boolean {
|
||||
const cleaned = sanitizeFilename(name || "").toLowerCase();
|
||||
if (!cleaned || cleaned === "download.bin") {
|
||||
return true;
|
||||
}
|
||||
const parsed = path.parse(cleaned);
|
||||
return /^[a-f0-9]{24,}$/i.test(parsed.name || cleaned);
|
||||
}
|
||||
|
||||
export function inferPackageNameFromLinks(links: string[]): string {
|
||||
if (links.length === 0) {
|
||||
return "Paket";
|
||||
@ -101,14 +199,17 @@ export function uniquePreserveOrder(items: string[]): string[] {
|
||||
export function parsePackagesFromLinksText(rawText: string, defaultPackageName: string): ParsedPackageInput[] {
|
||||
const lines = String(rawText || "").split(/\r?\n/);
|
||||
const packages: ParsedPackageInput[] = [];
|
||||
let currentName = sanitizeFilename(defaultPackageName || "Paket");
|
||||
let currentName = String(defaultPackageName || "").trim();
|
||||
let currentLinks: string[] = [];
|
||||
|
||||
const flush = (): void => {
|
||||
const links = uniquePreserveOrder(currentLinks.filter((line) => isHttpLink(line)));
|
||||
if (links.length > 0) {
|
||||
const normalizedCurrentName = String(currentName || "").trim();
|
||||
packages.push({
|
||||
name: sanitizeFilename(currentName || inferPackageNameFromLinks(links)),
|
||||
name: normalizedCurrentName
|
||||
? sanitizeFilename(normalizedCurrentName)
|
||||
: inferPackageNameFromLinks(links),
|
||||
links
|
||||
});
|
||||
}
|
||||
@ -123,7 +224,7 @@ export function parsePackagesFromLinksText(rawText: string, defaultPackageName:
|
||||
const marker = text.match(/^#\s*package\s*:\s*(.+)$/i);
|
||||
if (marker) {
|
||||
flush();
|
||||
currentName = sanitizeFilename(marker[1]);
|
||||
currentName = String(marker[1] || "").trim();
|
||||
continue;
|
||||
}
|
||||
currentLinks.push(text);
|
||||
@ -137,6 +238,9 @@ export function parsePackagesFromLinksText(rawText: string, defaultPackageName:
|
||||
}
|
||||
|
||||
export function ensureDirPath(baseDir: string, packageName: string): string {
|
||||
if (!path.isAbsolute(baseDir)) {
|
||||
throw new Error("baseDir muss ein absoluter Pfad sein");
|
||||
}
|
||||
return path.join(baseDir, sanitizeFilename(packageName));
|
||||
}
|
||||
|
||||
|
||||
@ -1,5 +1,17 @@
|
||||
import { contextBridge, ipcRenderer } from "electron";
|
||||
import { AddLinksPayload, AppSettings, UiSnapshot, UpdateCheckResult } from "../shared/types";
|
||||
import {
|
||||
AddLinksPayload,
|
||||
AppSettings,
|
||||
DuplicatePolicy,
|
||||
HistoryEntry,
|
||||
PackagePriority,
|
||||
SessionStats,
|
||||
StartConflictEntry,
|
||||
StartConflictResolutionResult,
|
||||
UiSnapshot,
|
||||
UpdateCheckResult,
|
||||
UpdateInstallProgress
|
||||
} from "../shared/types";
|
||||
import { IPC_CHANNELS } from "../shared/ipc";
|
||||
import { ElectronApi } from "../shared/preload-api";
|
||||
|
||||
@ -7,24 +19,68 @@ const api: ElectronApi = {
|
||||
getSnapshot: (): Promise<UiSnapshot> => ipcRenderer.invoke(IPC_CHANNELS.GET_SNAPSHOT),
|
||||
getVersion: (): Promise<string> => ipcRenderer.invoke(IPC_CHANNELS.GET_VERSION),
|
||||
checkUpdates: (): Promise<UpdateCheckResult> => ipcRenderer.invoke(IPC_CHANNELS.CHECK_UPDATES),
|
||||
installUpdate: () => ipcRenderer.invoke(IPC_CHANNELS.INSTALL_UPDATE),
|
||||
openExternal: (url: string): Promise<boolean> => ipcRenderer.invoke(IPC_CHANNELS.OPEN_EXTERNAL, url),
|
||||
updateSettings: (settings: Partial<AppSettings>): Promise<AppSettings> => ipcRenderer.invoke(IPC_CHANNELS.UPDATE_SETTINGS, settings),
|
||||
addLinks: (payload: AddLinksPayload): Promise<{ addedPackages: number; addedLinks: number; invalidCount: number }> =>
|
||||
ipcRenderer.invoke(IPC_CHANNELS.ADD_LINKS, payload),
|
||||
addContainers: (filePaths: string[]): Promise<{ addedPackages: number; addedLinks: number }> =>
|
||||
ipcRenderer.invoke(IPC_CHANNELS.ADD_CONTAINERS, filePaths),
|
||||
getStartConflicts: (): Promise<StartConflictEntry[]> => ipcRenderer.invoke(IPC_CHANNELS.GET_START_CONFLICTS),
|
||||
resolveStartConflict: (packageId: string, policy: DuplicatePolicy): Promise<StartConflictResolutionResult> =>
|
||||
ipcRenderer.invoke(IPC_CHANNELS.RESOLVE_START_CONFLICT, packageId, policy),
|
||||
clearAll: (): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.CLEAR_ALL),
|
||||
start: (): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.START),
|
||||
startPackages: (packageIds: string[]): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.START_PACKAGES, packageIds),
|
||||
stop: (): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.STOP),
|
||||
togglePause: (): Promise<boolean> => ipcRenderer.invoke(IPC_CHANNELS.TOGGLE_PAUSE),
|
||||
cancelPackage: (packageId: string): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.CANCEL_PACKAGE, packageId),
|
||||
renamePackage: (packageId: string, newName: string): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.RENAME_PACKAGE, packageId, newName),
|
||||
reorderPackages: (packageIds: string[]): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.REORDER_PACKAGES, packageIds),
|
||||
removeItem: (itemId: string): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.REMOVE_ITEM, itemId),
|
||||
togglePackage: (packageId: string): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.TOGGLE_PACKAGE, packageId),
|
||||
exportQueue: (): Promise<{ saved: boolean }> => ipcRenderer.invoke(IPC_CHANNELS.EXPORT_QUEUE),
|
||||
importQueue: (json: string): Promise<{ addedPackages: number; addedLinks: number }> => ipcRenderer.invoke(IPC_CHANNELS.IMPORT_QUEUE, json),
|
||||
toggleClipboard: (): Promise<boolean> => ipcRenderer.invoke(IPC_CHANNELS.TOGGLE_CLIPBOARD),
|
||||
pickFolder: (): Promise<string | null> => ipcRenderer.invoke(IPC_CHANNELS.PICK_FOLDER),
|
||||
pickContainers: (): Promise<string[]> => ipcRenderer.invoke(IPC_CHANNELS.PICK_CONTAINERS),
|
||||
getSessionStats: (): Promise<SessionStats> => ipcRenderer.invoke(IPC_CHANNELS.GET_SESSION_STATS),
|
||||
restart: (): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.RESTART),
|
||||
quit: (): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.QUIT),
|
||||
exportBackup: (): Promise<{ saved: boolean }> => ipcRenderer.invoke(IPC_CHANNELS.EXPORT_BACKUP),
|
||||
importBackup: (): Promise<{ restored: boolean; message: string }> => ipcRenderer.invoke(IPC_CHANNELS.IMPORT_BACKUP),
|
||||
openLog: (): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.OPEN_LOG),
|
||||
openSessionLog: (): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.OPEN_SESSION_LOG),
|
||||
retryExtraction: (packageId: string): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.RETRY_EXTRACTION, packageId),
|
||||
extractNow: (packageId: string): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.EXTRACT_NOW, packageId),
|
||||
resetPackage: (packageId: string): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.RESET_PACKAGE, packageId),
|
||||
getHistory: (): Promise<HistoryEntry[]> => ipcRenderer.invoke(IPC_CHANNELS.GET_HISTORY),
|
||||
clearHistory: (): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.CLEAR_HISTORY),
|
||||
removeHistoryEntry: (entryId: string): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.REMOVE_HISTORY_ENTRY, entryId),
|
||||
setPackagePriority: (packageId: string, priority: PackagePriority): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.SET_PACKAGE_PRIORITY, packageId, priority),
|
||||
skipItems: (itemIds: string[]): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.SKIP_ITEMS, itemIds),
|
||||
resetItems: (itemIds: string[]): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.RESET_ITEMS, itemIds),
|
||||
startItems: (itemIds: string[]): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.START_ITEMS, itemIds),
|
||||
onStateUpdate: (callback: (snapshot: UiSnapshot) => void): (() => void) => {
|
||||
const listener = (_event: unknown, snapshot: UiSnapshot): void => callback(snapshot);
|
||||
ipcRenderer.on(IPC_CHANNELS.STATE_UPDATE, listener);
|
||||
return () => {
|
||||
ipcRenderer.removeListener(IPC_CHANNELS.STATE_UPDATE, listener);
|
||||
};
|
||||
},
|
||||
onClipboardDetected: (callback: (links: string[]) => void): (() => void) => {
|
||||
const listener = (_event: unknown, links: string[]): void => callback(links);
|
||||
ipcRenderer.on(IPC_CHANNELS.CLIPBOARD_DETECTED, listener);
|
||||
return () => {
|
||||
ipcRenderer.removeListener(IPC_CHANNELS.CLIPBOARD_DETECTED, listener);
|
||||
};
|
||||
},
|
||||
onUpdateInstallProgress: (callback: (progress: UpdateInstallProgress) => void): (() => void) => {
|
||||
const listener = (_event: unknown, progress: UpdateInstallProgress): void => callback(progress);
|
||||
ipcRenderer.on(IPC_CHANNELS.UPDATE_INSTALL_PROGRESS, listener);
|
||||
return () => {
|
||||
ipcRenderer.removeListener(IPC_CHANNELS.UPDATE_INSTALL_PROGRESS, listener);
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
3698
src/renderer/App.tsx
3698
src/renderer/App.tsx
File diff suppressed because it is too large
Load Diff
@ -3,7 +3,7 @@
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Debrid Download Manager</title>
|
||||
<title>Multi Debrid Downloader</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
|
||||
25
src/renderer/package-order.ts
Normal file
25
src/renderer/package-order.ts
Normal file
@ -0,0 +1,25 @@
|
||||
import type { PackageEntry } from "../shared/types";
|
||||
|
||||
export function reorderPackageOrderByDrop(order: string[], draggedPackageId: string, targetPackageId: string): string[] {
|
||||
const fromIndex = order.indexOf(draggedPackageId);
|
||||
const toIndex = order.indexOf(targetPackageId);
|
||||
if (fromIndex < 0 || toIndex < 0 || fromIndex === toIndex) {
|
||||
return order;
|
||||
}
|
||||
const next = [...order];
|
||||
const [dragged] = next.splice(fromIndex, 1);
|
||||
const insertIndex = Math.max(0, Math.min(next.length, toIndex));
|
||||
next.splice(insertIndex, 0, dragged);
|
||||
return next;
|
||||
}
|
||||
|
||||
export function sortPackageOrderByName(order: string[], packages: Record<string, PackageEntry>, descending: boolean): string[] {
|
||||
const sorted = [...order];
|
||||
sorted.sort((a, b) => {
|
||||
const nameA = (packages[a]?.name ?? "").toLowerCase();
|
||||
const nameB = (packages[b]?.name ?? "").toLowerCase();
|
||||
const cmp = nameA.localeCompare(nameB, undefined, { numeric: true, sensitivity: "base" });
|
||||
return descending ? -cmp : cmp;
|
||||
});
|
||||
return sorted;
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@ -2,15 +2,46 @@ export const IPC_CHANNELS = {
|
||||
GET_SNAPSHOT: "app:get-snapshot",
|
||||
GET_VERSION: "app:get-version",
|
||||
CHECK_UPDATES: "app:check-updates",
|
||||
INSTALL_UPDATE: "app:install-update",
|
||||
UPDATE_INSTALL_PROGRESS: "app:update-install-progress",
|
||||
OPEN_EXTERNAL: "app:open-external",
|
||||
UPDATE_SETTINGS: "app:update-settings",
|
||||
ADD_LINKS: "queue:add-links",
|
||||
ADD_CONTAINERS: "queue:add-containers",
|
||||
GET_START_CONFLICTS: "queue:get-start-conflicts",
|
||||
RESOLVE_START_CONFLICT: "queue:resolve-start-conflict",
|
||||
CLEAR_ALL: "queue:clear-all",
|
||||
START: "queue:start",
|
||||
START_PACKAGES: "queue:start-packages",
|
||||
STOP: "queue:stop",
|
||||
TOGGLE_PAUSE: "queue:toggle-pause",
|
||||
CANCEL_PACKAGE: "queue:cancel-package",
|
||||
RENAME_PACKAGE: "queue:rename-package",
|
||||
REORDER_PACKAGES: "queue:reorder-packages",
|
||||
REMOVE_ITEM: "queue:remove-item",
|
||||
TOGGLE_PACKAGE: "queue:toggle-package",
|
||||
EXPORT_QUEUE: "queue:export",
|
||||
IMPORT_QUEUE: "queue:import",
|
||||
PICK_FOLDER: "dialog:pick-folder",
|
||||
PICK_CONTAINERS: "dialog:pick-containers",
|
||||
STATE_UPDATE: "state:update"
|
||||
STATE_UPDATE: "state:update",
|
||||
CLIPBOARD_DETECTED: "clipboard:detected",
|
||||
TOGGLE_CLIPBOARD: "clipboard:toggle",
|
||||
GET_SESSION_STATS: "stats:get-session-stats",
|
||||
RESTART: "app:restart",
|
||||
QUIT: "app:quit",
|
||||
EXPORT_BACKUP: "app:export-backup",
|
||||
IMPORT_BACKUP: "app:import-backup",
|
||||
OPEN_LOG: "app:open-log",
|
||||
OPEN_SESSION_LOG: "app:open-session-log",
|
||||
RETRY_EXTRACTION: "queue:retry-extraction",
|
||||
EXTRACT_NOW: "queue:extract-now",
|
||||
RESET_PACKAGE: "queue:reset-package",
|
||||
GET_HISTORY: "history:get",
|
||||
CLEAR_HISTORY: "history:clear",
|
||||
REMOVE_HISTORY_ENTRY: "history:remove-entry",
|
||||
SET_PACKAGE_PRIORITY: "queue:set-package-priority",
|
||||
SKIP_ITEMS: "queue:skip-items",
|
||||
RESET_ITEMS: "queue:reset-items",
|
||||
START_ITEMS: "queue:start-items"
|
||||
} as const;
|
||||
|
||||
@ -1,18 +1,62 @@
|
||||
import type { AddLinksPayload, AppSettings, UiSnapshot, UpdateCheckResult } from "./types";
|
||||
import type {
|
||||
AddLinksPayload,
|
||||
AppSettings,
|
||||
DuplicatePolicy,
|
||||
HistoryEntry,
|
||||
PackagePriority,
|
||||
SessionStats,
|
||||
StartConflictEntry,
|
||||
StartConflictResolutionResult,
|
||||
UiSnapshot,
|
||||
UpdateCheckResult,
|
||||
UpdateInstallProgress,
|
||||
UpdateInstallResult
|
||||
} from "./types";
|
||||
|
||||
export interface ElectronApi {
|
||||
getSnapshot: () => Promise<UiSnapshot>;
|
||||
getVersion: () => Promise<string>;
|
||||
checkUpdates: () => Promise<UpdateCheckResult>;
|
||||
installUpdate: () => Promise<UpdateInstallResult>;
|
||||
openExternal: (url: string) => Promise<boolean>;
|
||||
updateSettings: (settings: Partial<AppSettings>) => Promise<AppSettings>;
|
||||
addLinks: (payload: AddLinksPayload) => Promise<{ addedPackages: number; addedLinks: number; invalidCount: number }>;
|
||||
addContainers: (filePaths: string[]) => Promise<{ addedPackages: number; addedLinks: number }>;
|
||||
getStartConflicts: () => Promise<StartConflictEntry[]>;
|
||||
resolveStartConflict: (packageId: string, policy: DuplicatePolicy) => Promise<StartConflictResolutionResult>;
|
||||
clearAll: () => Promise<void>;
|
||||
start: () => Promise<void>;
|
||||
startPackages: (packageIds: string[]) => Promise<void>;
|
||||
stop: () => Promise<void>;
|
||||
togglePause: () => Promise<boolean>;
|
||||
cancelPackage: (packageId: string) => Promise<void>;
|
||||
renamePackage: (packageId: string, newName: string) => Promise<void>;
|
||||
reorderPackages: (packageIds: string[]) => Promise<void>;
|
||||
removeItem: (itemId: string) => Promise<void>;
|
||||
togglePackage: (packageId: string) => Promise<void>;
|
||||
exportQueue: () => Promise<{ saved: boolean }>;
|
||||
importQueue: (json: string) => Promise<{ addedPackages: number; addedLinks: number }>;
|
||||
toggleClipboard: () => Promise<boolean>;
|
||||
pickFolder: () => Promise<string | null>;
|
||||
pickContainers: () => Promise<string[]>;
|
||||
getSessionStats: () => Promise<SessionStats>;
|
||||
restart: () => Promise<void>;
|
||||
quit: () => Promise<void>;
|
||||
exportBackup: () => Promise<{ saved: boolean }>;
|
||||
importBackup: () => Promise<{ restored: boolean; message: string }>;
|
||||
openLog: () => Promise<void>;
|
||||
openSessionLog: () => Promise<void>;
|
||||
retryExtraction: (packageId: string) => Promise<void>;
|
||||
extractNow: (packageId: string) => Promise<void>;
|
||||
resetPackage: (packageId: string) => Promise<void>;
|
||||
getHistory: () => Promise<HistoryEntry[]>;
|
||||
clearHistory: () => Promise<void>;
|
||||
removeHistoryEntry: (entryId: string) => Promise<void>;
|
||||
setPackagePriority: (packageId: string, priority: PackagePriority) => Promise<void>;
|
||||
skipItems: (itemIds: string[]) => Promise<void>;
|
||||
resetItems: (itemIds: string[]) => Promise<void>;
|
||||
startItems: (itemIds: string[]) => Promise<void>;
|
||||
onStateUpdate: (callback: (snapshot: UiSnapshot) => void) => () => void;
|
||||
onClipboardDetected: (callback: (links: string[]) => void) => () => void;
|
||||
onUpdateInstallProgress: (callback: (progress: UpdateInstallProgress) => void) => () => void;
|
||||
}
|
||||
|
||||
@ -14,22 +14,50 @@ export type CleanupMode = "none" | "trash" | "delete";
|
||||
export type ConflictMode = "overwrite" | "skip" | "rename" | "ask";
|
||||
export type SpeedMode = "global" | "per_download";
|
||||
export type FinishedCleanupPolicy = "never" | "immediate" | "on_start" | "package_done";
|
||||
export type DebridProvider = "realdebrid" | "megadebrid" | "bestdebrid" | "alldebrid";
|
||||
export type DebridProvider = "realdebrid" | "megadebrid" | "bestdebrid" | "alldebrid" | "ddownload" | "onefichier";
|
||||
export type DebridFallbackProvider = DebridProvider | "none";
|
||||
export type AppTheme = "dark" | "light";
|
||||
export type PackagePriority = "high" | "normal" | "low";
|
||||
export type ExtractCpuPriority = "high" | "middle" | "low";
|
||||
|
||||
export interface BandwidthScheduleEntry {
|
||||
id: string;
|
||||
startHour: number;
|
||||
endHour: number;
|
||||
speedLimitKbps: number;
|
||||
enabled: boolean;
|
||||
}
|
||||
|
||||
export interface DownloadStats {
|
||||
totalDownloaded: number;
|
||||
totalDownloadedAllTime: number;
|
||||
totalFiles: number;
|
||||
totalPackages: number;
|
||||
sessionStartedAt: number;
|
||||
}
|
||||
|
||||
export interface AppSettings {
|
||||
token: string;
|
||||
megaToken: string;
|
||||
megaLogin: string;
|
||||
megaPassword: string;
|
||||
bestToken: string;
|
||||
allDebridToken: string;
|
||||
ddownloadLogin: string;
|
||||
ddownloadPassword: string;
|
||||
oneFichierApiKey: string;
|
||||
archivePasswordList: string;
|
||||
rememberToken: boolean;
|
||||
providerPrimary: DebridProvider;
|
||||
providerSecondary: DebridProvider;
|
||||
providerTertiary: DebridProvider;
|
||||
providerSecondary: DebridFallbackProvider;
|
||||
providerTertiary: DebridFallbackProvider;
|
||||
autoProviderFallback: boolean;
|
||||
outputDir: string;
|
||||
packageName: string;
|
||||
autoExtract: boolean;
|
||||
autoRename4sf4sj: boolean;
|
||||
extractDir: string;
|
||||
collectMkvToLibrary: boolean;
|
||||
mkvLibraryDir: string;
|
||||
createExtractSubfolder: boolean;
|
||||
hybridExtract: boolean;
|
||||
cleanupMode: CleanupMode;
|
||||
@ -42,11 +70,24 @@ export interface AppSettings {
|
||||
reconnectWaitSeconds: number;
|
||||
completedCleanupPolicy: FinishedCleanupPolicy;
|
||||
maxParallel: number;
|
||||
maxParallelExtract: number;
|
||||
retryLimit: number;
|
||||
speedLimitEnabled: boolean;
|
||||
speedLimitKbps: number;
|
||||
speedLimitMode: SpeedMode;
|
||||
updateRepo: string;
|
||||
autoUpdateCheck: boolean;
|
||||
clipboardWatch: boolean;
|
||||
minimizeToTray: boolean;
|
||||
theme: AppTheme;
|
||||
collapseNewPackages: boolean;
|
||||
autoSkipExtracted: boolean;
|
||||
confirmDeleteSelection: boolean;
|
||||
totalDownloadedAllTime: number;
|
||||
bandwidthSchedules: BandwidthScheduleEntry[];
|
||||
columnOrder: string[];
|
||||
extractCpuPriority: ExtractCpuPriority;
|
||||
autoExtractWhenStopped: boolean;
|
||||
}
|
||||
|
||||
export interface DownloadItem {
|
||||
@ -68,6 +109,7 @@ export interface DownloadItem {
|
||||
fullStatus: string;
|
||||
createdAt: number;
|
||||
updatedAt: number;
|
||||
onlineStatus?: "online" | "offline" | "checking";
|
||||
}
|
||||
|
||||
export interface PackageEntry {
|
||||
@ -78,6 +120,9 @@ export interface PackageEntry {
|
||||
status: DownloadStatus;
|
||||
itemIds: string[];
|
||||
cancelled: boolean;
|
||||
enabled: boolean;
|
||||
priority: PackagePriority;
|
||||
postProcessLabel?: string;
|
||||
createdAt: number;
|
||||
updatedAt: number;
|
||||
}
|
||||
@ -110,6 +155,7 @@ export interface DownloadSummary {
|
||||
export interface ParsedPackageInput {
|
||||
name: string;
|
||||
links: string[];
|
||||
fileNames?: string[];
|
||||
}
|
||||
|
||||
export interface ContainerImportResult {
|
||||
@ -121,33 +167,123 @@ export interface UiSnapshot {
|
||||
settings: AppSettings;
|
||||
session: SessionState;
|
||||
summary: DownloadSummary | null;
|
||||
stats: DownloadStats;
|
||||
speedText: string;
|
||||
etaText: string;
|
||||
canStart: boolean;
|
||||
canStop: boolean;
|
||||
canPause: boolean;
|
||||
clipboardActive: boolean;
|
||||
reconnectSeconds: number;
|
||||
packageSpeedBps: Record<string, number>;
|
||||
}
|
||||
|
||||
export interface AddLinksPayload {
|
||||
rawText: string;
|
||||
packageName?: string;
|
||||
duplicatePolicy?: DuplicatePolicy;
|
||||
}
|
||||
|
||||
export interface AddContainerPayload {
|
||||
filePaths: string[];
|
||||
}
|
||||
|
||||
export type DuplicatePolicy = "keep" | "skip" | "overwrite";
|
||||
|
||||
export interface QueueAddResult {
|
||||
addedPackages: number;
|
||||
addedLinks: number;
|
||||
skippedExistingPackages: string[];
|
||||
overwrittenPackages: string[];
|
||||
}
|
||||
|
||||
export interface ContainerConflictResult {
|
||||
conflicts: string[];
|
||||
packageCount: number;
|
||||
linkCount: number;
|
||||
}
|
||||
|
||||
export interface StartConflictEntry {
|
||||
packageId: string;
|
||||
packageName: string;
|
||||
extractDir: string;
|
||||
}
|
||||
|
||||
export interface StartConflictResolutionResult {
|
||||
skipped: boolean;
|
||||
overwritten: boolean;
|
||||
}
|
||||
|
||||
export interface UpdateCheckResult {
|
||||
updateAvailable: boolean;
|
||||
currentVersion: string;
|
||||
latestVersion: string;
|
||||
latestTag: string;
|
||||
releaseUrl: string;
|
||||
setupAssetUrl?: string;
|
||||
setupAssetName?: string;
|
||||
setupAssetDigest?: string;
|
||||
releaseNotes?: string;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export interface UpdateInstallResult {
|
||||
started: boolean;
|
||||
message: string;
|
||||
}
|
||||
|
||||
export interface UpdateInstallProgress {
|
||||
stage: "starting" | "downloading" | "verifying" | "launching" | "done" | "error";
|
||||
percent: number | null;
|
||||
downloadedBytes: number;
|
||||
totalBytes: number | null;
|
||||
message: string;
|
||||
}
|
||||
|
||||
export interface ParsedHashEntry {
|
||||
fileName: string;
|
||||
algorithm: "crc32" | "md5" | "sha1";
|
||||
digest: string;
|
||||
}
|
||||
|
||||
export interface BandwidthSample {
|
||||
timestamp: number;
|
||||
speedBps: number;
|
||||
}
|
||||
|
||||
export interface BandwidthStats {
|
||||
samples: BandwidthSample[];
|
||||
currentSpeedBps: number;
|
||||
averageSpeedBps: number;
|
||||
maxSpeedBps: number;
|
||||
totalBytesSession: number;
|
||||
sessionDurationSeconds: number;
|
||||
}
|
||||
|
||||
export interface SessionStats {
|
||||
bandwidth: BandwidthStats;
|
||||
totalDownloads: number;
|
||||
completedDownloads: number;
|
||||
failedDownloads: number;
|
||||
activeDownloads: number;
|
||||
queuedDownloads: number;
|
||||
}
|
||||
|
||||
export interface HistoryEntry {
|
||||
id: string;
|
||||
name: string;
|
||||
totalBytes: number;
|
||||
downloadedBytes: number;
|
||||
fileCount: number;
|
||||
provider: DebridProvider | null;
|
||||
completedAt: number;
|
||||
durationSeconds: number;
|
||||
status: "completed" | "deleted";
|
||||
outputDir: string;
|
||||
urls?: string[];
|
||||
}
|
||||
|
||||
export interface HistoryState {
|
||||
entries: HistoryEntry[];
|
||||
maxEntries: number;
|
||||
}
|
||||
|
||||
49
tests/app-order.test.ts
Normal file
49
tests/app-order.test.ts
Normal file
@ -0,0 +1,49 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { reorderPackageOrderByDrop, sortPackageOrderByName } from "../src/renderer/package-order";
|
||||
|
||||
describe("reorderPackageOrderByDrop", () => {
|
||||
it("moves adjacent package down by one on drop", () => {
|
||||
const next = reorderPackageOrderByDrop(["a", "b", "c"], "b", "c");
|
||||
expect(next).toEqual(["a", "c", "b"]);
|
||||
});
|
||||
|
||||
it("moves package after lower drop target", () => {
|
||||
const next = reorderPackageOrderByDrop(["a", "b", "c", "d"], "a", "c");
|
||||
expect(next).toEqual(["b", "c", "a", "d"]);
|
||||
});
|
||||
|
||||
it("returns original order when ids are invalid", () => {
|
||||
const order = ["a", "b", "c"];
|
||||
expect(reorderPackageOrderByDrop(order, "x", "b")).toEqual(order);
|
||||
expect(reorderPackageOrderByDrop(order, "a", "x")).toEqual(order);
|
||||
expect(reorderPackageOrderByDrop(order, "a", "a")).toEqual(order);
|
||||
});
|
||||
});
|
||||
|
||||
describe("sortPackageOrderByName", () => {
|
||||
it("sorts package IDs alphabetically ascending", () => {
|
||||
const sorted = sortPackageOrderByName(
|
||||
["pkg3", "pkg1", "pkg2"],
|
||||
{
|
||||
pkg1: { id: "pkg1", name: "Alpha", outputDir: "", extractDir: "", status: "queued", itemIds: [], cancelled: false, enabled: true, priority: "normal", createdAt: 0, updatedAt: 0 },
|
||||
pkg2: { id: "pkg2", name: "beta", outputDir: "", extractDir: "", status: "queued", itemIds: [], cancelled: false, enabled: true, priority: "normal", createdAt: 0, updatedAt: 0 },
|
||||
pkg3: { id: "pkg3", name: "Gamma", outputDir: "", extractDir: "", status: "queued", itemIds: [], cancelled: false, enabled: true, priority: "normal", createdAt: 0, updatedAt: 0 }
|
||||
},
|
||||
false
|
||||
);
|
||||
expect(sorted).toEqual(["pkg1", "pkg2", "pkg3"]);
|
||||
});
|
||||
|
||||
it("sorts package IDs alphabetically descending", () => {
|
||||
const sorted = sortPackageOrderByName(
|
||||
["pkg1", "pkg2", "pkg3"],
|
||||
{
|
||||
pkg1: { id: "pkg1", name: "Alpha", outputDir: "", extractDir: "", status: "queued", itemIds: [], cancelled: false, enabled: true, priority: "normal", createdAt: 0, updatedAt: 0 },
|
||||
pkg2: { id: "pkg2", name: "beta", outputDir: "", extractDir: "", status: "queued", itemIds: [], cancelled: false, enabled: true, priority: "normal", createdAt: 0, updatedAt: 0 },
|
||||
pkg3: { id: "pkg3", name: "Gamma", outputDir: "", extractDir: "", status: "queued", itemIds: [], cancelled: false, enabled: true, priority: "normal", createdAt: 0, updatedAt: 0 }
|
||||
},
|
||||
true
|
||||
);
|
||||
expect(sorted).toEqual(["pkg3", "pkg2", "pkg1"]);
|
||||
});
|
||||
});
|
||||
694
tests/auto-rename.test.ts
Normal file
694
tests/auto-rename.test.ts
Normal file
@ -0,0 +1,694 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
import {
|
||||
extractEpisodeToken,
|
||||
applyEpisodeTokenToFolderName,
|
||||
sourceHasRpToken,
|
||||
ensureRepackToken,
|
||||
buildAutoRenameBaseName,
|
||||
buildAutoRenameBaseNameFromFolders,
|
||||
buildAutoRenameBaseNameFromFoldersWithOptions
|
||||
} from "../src/main/download-manager";
|
||||
|
||||
describe("extractEpisodeToken", () => {
|
||||
it("extracts S01E01 from standard scene format", () => {
|
||||
expect(extractEpisodeToken("show.name.s01e01.720p")).toBe("S01E01");
|
||||
});
|
||||
|
||||
it("extracts episode with dot separators", () => {
|
||||
expect(extractEpisodeToken("Show.S02E15.1080p")).toBe("S02E15");
|
||||
});
|
||||
|
||||
it("extracts episode with dash separators", () => {
|
||||
expect(extractEpisodeToken("show-s3e5-720p")).toBe("S03E05");
|
||||
});
|
||||
|
||||
it("extracts episode with underscore separators", () => {
|
||||
expect(extractEpisodeToken("show_s10e100_hdtv")).toBe("S10E100");
|
||||
});
|
||||
|
||||
it("extracts episode with space separators", () => {
|
||||
expect(extractEpisodeToken("Show Name s1e2 720p")).toBe("S01E02");
|
||||
});
|
||||
|
||||
it("pads single-digit season and episode to 2 digits", () => {
|
||||
expect(extractEpisodeToken("show.s1e3.720p")).toBe("S01E03");
|
||||
});
|
||||
|
||||
it("handles 3-digit episode numbers", () => {
|
||||
expect(extractEpisodeToken("show.s01e123")).toBe("S01E123");
|
||||
});
|
||||
|
||||
it("returns null for no episode token", () => {
|
||||
expect(extractEpisodeToken("some.random.file.720p")).toBeNull();
|
||||
});
|
||||
|
||||
it("returns null for season-only pattern (no episode)", () => {
|
||||
expect(extractEpisodeToken("show.s01.720p")).toBeNull();
|
||||
});
|
||||
|
||||
it("returns null for empty string", () => {
|
||||
expect(extractEpisodeToken("")).toBeNull();
|
||||
});
|
||||
|
||||
it("is case-insensitive", () => {
|
||||
expect(extractEpisodeToken("Show.S05E10.1080p")).toBe("S05E10");
|
||||
expect(extractEpisodeToken("show.s05e10.1080p")).toBe("S05E10");
|
||||
});
|
||||
|
||||
it("extracts from episode token at start of string", () => {
|
||||
expect(extractEpisodeToken("s01e01.720p")).toBe("S01E01");
|
||||
});
|
||||
|
||||
it("extracts from episode token at end of string", () => {
|
||||
expect(extractEpisodeToken("show.s02e03")).toBe("S02E03");
|
||||
});
|
||||
|
||||
it("extracts double episode token s01e01e02", () => {
|
||||
expect(extractEpisodeToken("tvr-mammon-s01e01e02-720p")).toBe("S01E01E02");
|
||||
});
|
||||
|
||||
it("extracts double episode with dot separators", () => {
|
||||
expect(extractEpisodeToken("Show.S01E03E04.720p")).toBe("S01E03E04");
|
||||
});
|
||||
|
||||
it("extracts double episode at end of string", () => {
|
||||
expect(extractEpisodeToken("show.s02e05e06")).toBe("S02E05E06");
|
||||
});
|
||||
|
||||
it("extracts double episode with single-digit numbers", () => {
|
||||
expect(extractEpisodeToken("show-s1e1e2-720p")).toBe("S01E01E02");
|
||||
});
|
||||
});
|
||||
|
||||
describe("applyEpisodeTokenToFolderName", () => {
|
||||
it("replaces existing episode token in folder name", () => {
|
||||
expect(applyEpisodeTokenToFolderName("Show.S01E01.720p-4sf", "S02E05")).toBe("Show.S02E05.720p-4sf");
|
||||
});
|
||||
|
||||
it("replaces season-only token when no episode in folder", () => {
|
||||
expect(applyEpisodeTokenToFolderName("Show.S01.720p-4sf", "S01E03")).toBe("Show.S01E03.720p-4sf");
|
||||
});
|
||||
|
||||
it("inserts before -4sf suffix when no season/episode in folder", () => {
|
||||
expect(applyEpisodeTokenToFolderName("Show.720p-4sf", "S01E05")).toBe("Show.720p.S01E05-4sf");
|
||||
});
|
||||
|
||||
it("inserts before -4sj suffix", () => {
|
||||
expect(applyEpisodeTokenToFolderName("Show.720p-4sj", "S01E05")).toBe("Show.720p.S01E05-4sj");
|
||||
});
|
||||
|
||||
it("appends episode token when no recognized pattern", () => {
|
||||
expect(applyEpisodeTokenToFolderName("SomeFolder", "S01E01")).toBe("SomeFolder.S01E01");
|
||||
});
|
||||
|
||||
it("returns episode token when folder name is empty", () => {
|
||||
expect(applyEpisodeTokenToFolderName("", "S01E01")).toBe("S01E01");
|
||||
});
|
||||
|
||||
it("handles folder with existing multi-digit episode", () => {
|
||||
expect(applyEpisodeTokenToFolderName("Show.S01E99.720p-4sf", "S01E05")).toBe("Show.S01E05.720p-4sf");
|
||||
});
|
||||
|
||||
it("is case-insensitive for -4SF/-4SJ suffix", () => {
|
||||
expect(applyEpisodeTokenToFolderName("Show.720p-4SF", "S01E01")).toBe("Show.720p.S01E01-4SF");
|
||||
});
|
||||
|
||||
it("applies double episode token to season-only folder", () => {
|
||||
expect(applyEpisodeTokenToFolderName("Mammon.S01.German.1080P.Bluray.x264-SMAHD", "S01E01E02"))
|
||||
.toBe("Mammon.S01E01E02.German.1080P.Bluray.x264-SMAHD");
|
||||
});
|
||||
|
||||
it("replaces existing double episode in folder with new token", () => {
|
||||
expect(applyEpisodeTokenToFolderName("Show.S01E01E02.720p-4sf", "S01E03E04"))
|
||||
.toBe("Show.S01E03E04.720p-4sf");
|
||||
});
|
||||
|
||||
it("replaces existing single episode in folder with double episode token", () => {
|
||||
expect(applyEpisodeTokenToFolderName("Show.S01E01.720p-4sf", "S01E01E02"))
|
||||
.toBe("Show.S01E01E02.720p-4sf");
|
||||
});
|
||||
});
|
||||
|
||||
describe("sourceHasRpToken", () => {
|
||||
it("detects .rp. in filename", () => {
|
||||
expect(sourceHasRpToken("show.s01e01.rp.720p")).toBe(true);
|
||||
});
|
||||
|
||||
it("detects -rp- in filename", () => {
|
||||
expect(sourceHasRpToken("show-s01e01-rp-720p")).toBe(true);
|
||||
});
|
||||
|
||||
it("detects _rp_ in filename", () => {
|
||||
expect(sourceHasRpToken("show_s01e01_rp_720p")).toBe(true);
|
||||
});
|
||||
|
||||
it("detects rp at end of string", () => {
|
||||
expect(sourceHasRpToken("show.s01e01.rp")).toBe(true);
|
||||
});
|
||||
|
||||
it("does not match rp inside a word", () => {
|
||||
expect(sourceHasRpToken("enterprise.s01e01")).toBe(false);
|
||||
});
|
||||
|
||||
it("returns false for empty string", () => {
|
||||
expect(sourceHasRpToken("")).toBe(false);
|
||||
});
|
||||
|
||||
it("is case-insensitive", () => {
|
||||
expect(sourceHasRpToken("show.RP.720p")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("ensureRepackToken", () => {
|
||||
it("inserts REPACK before quality token", () => {
|
||||
expect(ensureRepackToken("Show.S01E01.1080p-4sf")).toBe("Show.S01E01.REPACK.1080p-4sf");
|
||||
});
|
||||
|
||||
it("inserts REPACK before 720p", () => {
|
||||
expect(ensureRepackToken("Show.S01E01.720p-4sf")).toBe("Show.S01E01.REPACK.720p-4sf");
|
||||
});
|
||||
|
||||
it("inserts REPACK before 2160p", () => {
|
||||
expect(ensureRepackToken("Show.S01E01.2160p-4sf")).toBe("Show.S01E01.REPACK.2160p-4sf");
|
||||
});
|
||||
|
||||
it("inserts REPACK before -4sf when no quality token", () => {
|
||||
expect(ensureRepackToken("Show.S01E01-4sf")).toBe("Show.S01E01.REPACK-4sf");
|
||||
});
|
||||
|
||||
it("inserts REPACK before -4sj when no quality token", () => {
|
||||
expect(ensureRepackToken("Show.S01E01-4sj")).toBe("Show.S01E01.REPACK-4sj");
|
||||
});
|
||||
|
||||
it("appends REPACK when no recognized insertion point", () => {
|
||||
expect(ensureRepackToken("Show.S01E01")).toBe("Show.S01E01.REPACK");
|
||||
});
|
||||
|
||||
it("does not double-add REPACK if already present", () => {
|
||||
expect(ensureRepackToken("Show.S01E01.REPACK.1080p-4sf")).toBe("Show.S01E01.REPACK.1080p-4sf");
|
||||
});
|
||||
|
||||
it("does not double-add repack (case-insensitive)", () => {
|
||||
expect(ensureRepackToken("Show.s01e01.repack.720p-4sf")).toBe("Show.s01e01.repack.720p-4sf");
|
||||
});
|
||||
});
|
||||
|
||||
describe("buildAutoRenameBaseName", () => {
|
||||
it("renames with episode token from source file", () => {
|
||||
const result = buildAutoRenameBaseName("Show.S01.720p-4sf", "show.s01e05.720p.mkv");
|
||||
expect(result).toBe("Show.S01E05.720p-4sf");
|
||||
});
|
||||
|
||||
it("works with -4sj suffix", () => {
|
||||
const result = buildAutoRenameBaseName("Show.S01.720p-4sj", "show.s01e03.720p.mkv");
|
||||
expect(result).toBe("Show.S01E03.720p-4sj");
|
||||
});
|
||||
|
||||
it("renames generic scene folder with group suffix", () => {
|
||||
const result = buildAutoRenameBaseName("Show.S01.720p-GROUP", "show.s01e05.720p.mkv");
|
||||
expect(result).toBe("Show.S01.720p-GROUP");
|
||||
});
|
||||
|
||||
it("returns null when source has no episode token", () => {
|
||||
const result = buildAutoRenameBaseName("Show.S01.720p-4sf", "random.file.720p.mkv");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("adds REPACK when source has rp token", () => {
|
||||
const result = buildAutoRenameBaseName("Show.S01.720p-4sf", "show.s01e05.rp.720p.mkv");
|
||||
expect(result).toBe("Show.S01E05.REPACK.720p-4sf");
|
||||
});
|
||||
|
||||
it("handles folder with existing episode that gets replaced", () => {
|
||||
const result = buildAutoRenameBaseName("Show.S01E01.720p-4sf", "show.s01e10.720p.mkv");
|
||||
expect(result).toBe("Show.S01E10.720p-4sf");
|
||||
});
|
||||
|
||||
it("inserts episode before -4sf when folder has no season/episode", () => {
|
||||
const result = buildAutoRenameBaseName("Show.720p-4sf", "show.s01e05.720p.mkv");
|
||||
expect(result).toBe("Show.720p.S01E05-4sf");
|
||||
});
|
||||
|
||||
it("handles case-insensitive 4SF suffix", () => {
|
||||
const result = buildAutoRenameBaseName("Show.S01.720p-4SF", "show.s01e02.720p.mkv");
|
||||
expect(result).toBe("Show.S01E02.720p-4SF");
|
||||
});
|
||||
|
||||
it("handles rp + no quality token in folder", () => {
|
||||
const result = buildAutoRenameBaseName("Show.S01-4sf", "show.s01e05.rp.mkv");
|
||||
expect(result).toBe("Show.S01E05.REPACK-4sf");
|
||||
});
|
||||
|
||||
it("returns null for empty folder name", () => {
|
||||
const result = buildAutoRenameBaseName("", "show.s01e01.mkv");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("returns null for empty source file name", () => {
|
||||
const result = buildAutoRenameBaseName("Show.S01-4sf", "");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
// Edge cases
|
||||
it("handles 2160p quality token", () => {
|
||||
const result = buildAutoRenameBaseName("Show.S01.2160p-4sf", "show.s01e01.rp.2160p.mkv");
|
||||
expect(result).toBe("Show.S01E01.REPACK.2160p-4sf");
|
||||
});
|
||||
|
||||
it("handles 480p quality token", () => {
|
||||
const result = buildAutoRenameBaseName("Show.S01.480p-4sf", "show.s01e07.480p.mkv");
|
||||
expect(result).toBe("Show.S01E07.480p-4sf");
|
||||
});
|
||||
|
||||
it("does not trigger on folders ending with similar but wrong suffix", () => {
|
||||
expect(buildAutoRenameBaseName("Show.S01-4sfx", "show.s01e01.mkv")).toBeNull();
|
||||
expect(buildAutoRenameBaseName("Show.S01-x4sf", "show.s01e01.mkv")).toBeNull();
|
||||
});
|
||||
|
||||
it("handles high season and episode numbers", () => {
|
||||
const result = buildAutoRenameBaseName("Show.S99.720p-4sf", "show.s99e999.720p.mkv");
|
||||
// SCENE_EPISODE_RE allows up to 3-digit episodes and 2-digit seasons
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!).toContain("S99E999");
|
||||
});
|
||||
|
||||
// Real-world scene release patterns
|
||||
it("real-world: German series with dots", () => {
|
||||
const result = buildAutoRenameBaseName(
|
||||
"Der.Bergdoktor.S18.German.720p.WEB.x264-4SJ",
|
||||
"der.bergdoktor.s18e01.german.720p.web.x264"
|
||||
);
|
||||
expect(result).toBe("Der.Bergdoktor.S18E01.German.720p.WEB.x264-4SJ");
|
||||
});
|
||||
|
||||
it("real-world: English series with rp token", () => {
|
||||
const result = buildAutoRenameBaseName(
|
||||
"The.Last.of.Us.S02.1080p.WEB-4SF",
|
||||
"the.last.of.us.s02e03.rp.1080p.web"
|
||||
);
|
||||
expect(result).toBe("The.Last.of.Us.S02E03.REPACK.1080p.WEB-4SF");
|
||||
});
|
||||
|
||||
it("real-world: multiple dots in name", () => {
|
||||
const result = buildAutoRenameBaseName(
|
||||
"Grey.s.Anatomy.S21.German.DL.720p.WEB.x264-4SJ",
|
||||
"grey.s.anatomy.s21e08.german.dl.720p.web.x264"
|
||||
);
|
||||
expect(result).toBe("Grey.s.Anatomy.S21E08.German.DL.720p.WEB.x264-4SJ");
|
||||
});
|
||||
|
||||
it("real-world: 4K content", () => {
|
||||
const result = buildAutoRenameBaseName(
|
||||
"Severance.S02.2160p.ATVP.WEB-DL.DDP5.1.DV.H.265-4SF",
|
||||
"severance.s02e07.2160p.atvp.web-dl.ddp5.1.dv.h.265"
|
||||
);
|
||||
expect(result).toBe("Severance.S02E07.2160p.ATVP.WEB-DL.DDP5.1.DV.H.265-4SF");
|
||||
});
|
||||
|
||||
it("real-world: Britannia release keeps folder base name", () => {
|
||||
const result = buildAutoRenameBaseName(
|
||||
"Britannia.S02.GERMAN.720p.WEBRiP.x264-LAW",
|
||||
"law-britannia.s02e01.720p.webrip"
|
||||
);
|
||||
expect(result).toBe("Britannia.S02.GERMAN.720p.WEBRiP.x264-LAW");
|
||||
});
|
||||
|
||||
it("real-world: Britannia repack injects REPACK", () => {
|
||||
const result = buildAutoRenameBaseName(
|
||||
"Britannia.S02.GERMAN.720p.WEBRiP.x264-LAW",
|
||||
"law-britannia.s02e09.720p.webrip.repack"
|
||||
);
|
||||
expect(result).toBe("Britannia.S02.GERMAN.REPACK.720p.WEBRiP.x264-LAW");
|
||||
});
|
||||
|
||||
it("adds REPACK when folder name carries RP hint", () => {
|
||||
const result = buildAutoRenameBaseName(
|
||||
"Banshee.S02E01.German.RP.720p.BluRay.x264-RIPLEY",
|
||||
"r-banshee.s02e01-720p"
|
||||
);
|
||||
expect(result).toBe("Banshee.S02E01.German.REPACK.720p.BluRay.x264-RIPLEY");
|
||||
});
|
||||
|
||||
it("real-world: folder already has wrong episode", () => {
|
||||
const result = buildAutoRenameBaseName(
|
||||
"Cobra.Kai.S06E01.720p.NF.WEB-DL.DDP5.1.x264-4SF",
|
||||
"cobra.kai.s06e14.720p.nf.web-dl.ddp5.1.x264"
|
||||
);
|
||||
expect(result).toBe("Cobra.Kai.S06E14.720p.NF.WEB-DL.DDP5.1.x264-4SF");
|
||||
});
|
||||
|
||||
// Bug-hunting edge cases
|
||||
it("source filename extension is not included in episode detection", () => {
|
||||
// The sourceFileName passed to buildAutoRenameBaseName is the basename without extension
|
||||
// so .mkv should not interfere, but let's verify with an actual extension
|
||||
const result = buildAutoRenameBaseName("Show.S01-4sf", "show.s01e01.mkv");
|
||||
// "mkv" should not be treated as part of the filename match
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!).toContain("S01E01");
|
||||
});
|
||||
|
||||
it("does not match episode-like patterns in codec strings", () => {
|
||||
// h.265 has digits but should not be confused with episode tokens
|
||||
const token = extractEpisodeToken("show.s01e01.h.265");
|
||||
expect(token).toBe("S01E01");
|
||||
});
|
||||
|
||||
it("handles folder with dash separators throughout", () => {
|
||||
const result = buildAutoRenameBaseName(
|
||||
"Show-Name-S01-720p-4sf",
|
||||
"show-name-s01e05-720p"
|
||||
);
|
||||
expect(result).toBe("Show-Name-S01E05-720p-4sf");
|
||||
});
|
||||
|
||||
it("does not duplicate episode when folder already has the same episode", () => {
|
||||
const result = buildAutoRenameBaseName(
|
||||
"Show.S01E05.720p-4sf",
|
||||
"show.s01e05.720p"
|
||||
);
|
||||
// Must NOT produce "Show.S01E05.720p.S01E05-4sf" (double episode bug)
|
||||
expect(result).toBe("Show.S01E05.720p-4sf");
|
||||
});
|
||||
|
||||
it("handles folder with only -4sf suffix (edge case)", () => {
|
||||
const result = buildAutoRenameBaseName("-4sf", "show.s01e01.mkv");
|
||||
// Extreme edge case - sanitizeFilename trims leading dots
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!).toContain("S01E01");
|
||||
expect(result!).toContain("-4sf");
|
||||
expect(result!).not.toContain(".S01E01.S01E01"); // no duplication
|
||||
});
|
||||
|
||||
it("sanitizes special characters from result", () => {
|
||||
// sanitizeFilename should strip dangerous chars
|
||||
const result = buildAutoRenameBaseName("Show:Name.S01-4sf", "show.s01e01.mkv");
|
||||
// The colon should be sanitized away
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!).not.toContain(":");
|
||||
});
|
||||
});
|
||||
|
||||
describe("buildAutoRenameBaseNameFromFolders", () => {
|
||||
it("uses parent folder when current folder is not a scene template", () => {
|
||||
const result = buildAutoRenameBaseNameFromFolders(
|
||||
[
|
||||
"Episode 01",
|
||||
"Banshee.S02.German.720p.BluRay.x264-RIPLEY"
|
||||
],
|
||||
"r-banshee.s02e01-720p"
|
||||
);
|
||||
expect(result).toBe("Banshee.S02.German.720p.BluRay.x264-RIPLEY");
|
||||
});
|
||||
|
||||
it("uses nested scene subfolder directly", () => {
|
||||
const result = buildAutoRenameBaseNameFromFolders(
|
||||
[
|
||||
"Banshee.S02E01.German.720p.BluRay.x264-RIPLEY",
|
||||
"Banshee.S02.German.720p.BluRay.x264-RIPLEY"
|
||||
],
|
||||
"r-banshee.s02e01-720p"
|
||||
);
|
||||
expect(result).toBe("Banshee.S02E01.German.720p.BluRay.x264-RIPLEY");
|
||||
});
|
||||
|
||||
it("injects REPACK when parent folder carries repack hint", () => {
|
||||
const result = buildAutoRenameBaseNameFromFolders(
|
||||
[
|
||||
"Banshee.S02E01.German.720p.BluRay.x264-RIPLEY",
|
||||
"Banshee.S02.German.RP.720p.BluRay.x264-RIPLEY"
|
||||
],
|
||||
"r-banshee.s02e01-720p"
|
||||
);
|
||||
expect(result).toBe("Banshee.S02E01.German.REPACK.720p.BluRay.x264-RIPLEY");
|
||||
});
|
||||
|
||||
it("uses nested Arrow episode folder with title", () => {
|
||||
const result = buildAutoRenameBaseNameFromFolders(
|
||||
[
|
||||
"Arrow.S04E01.Green.Arrow.German.DL.720p.BluRay.x264-RSG",
|
||||
"Arrow.S04.German.DL.720p.BluRay.x264-RSG"
|
||||
],
|
||||
"rsg-arrow-s04e01-720p"
|
||||
);
|
||||
expect(result).toBe("Arrow.S04E01.Green.Arrow.German.DL.720p.BluRay.x264-RSG");
|
||||
});
|
||||
|
||||
it("adds REPACK for Arrow when source contains rp token", () => {
|
||||
const result = buildAutoRenameBaseNameFromFolders(
|
||||
[
|
||||
"Arrow.S04E01.Green.Arrow.German.DL.720p.BluRay.x264-RSG",
|
||||
"Arrow.S04.German.DL.720p.BluRay.x264-RSG"
|
||||
],
|
||||
"rsg-arrow-s04e01.rp.720p"
|
||||
);
|
||||
expect(result).toBe("Arrow.S04E01.Green.Arrow.German.DL.REPACK.720p.BluRay.x264-RSG");
|
||||
});
|
||||
|
||||
it("converts Teil token to episode using parent season", () => {
|
||||
const result = buildAutoRenameBaseNameFromFolders(
|
||||
[
|
||||
"Last.Impact.Der.Einschlag.Teil1.GERMAN.DL.720p.WEB.H264-SunDry",
|
||||
"Last.Impact.Der.Einschlag.S01.GERMAN.DL.720p.WEB.H264-SunDry"
|
||||
],
|
||||
"sundry-last.impact.der.einschlag.teil1.720p.web.h264"
|
||||
);
|
||||
expect(result).toBe("Last.Impact.Der.Einschlag.S01E01.GERMAN.DL.720p.WEB.H264-SunDry");
|
||||
});
|
||||
|
||||
it("converts Teil token to episode with REPACK", () => {
|
||||
const result = buildAutoRenameBaseNameFromFolders(
|
||||
[
|
||||
"Last.Impact.Der.Einschlag.Teil1.GERMAN.DL.720p.WEB.H264-SunDry",
|
||||
"Last.Impact.Der.Einschlag.S01.GERMAN.DL.720p.WEB.H264-SunDry"
|
||||
],
|
||||
"sundry-last.impact.der.einschlag.teil1.rp.720p.web.h264"
|
||||
);
|
||||
expect(result).toBe("Last.Impact.Der.Einschlag.S01E01.GERMAN.DL.REPACK.720p.WEB.H264-SunDry");
|
||||
});
|
||||
|
||||
it("forces episode insertion for flat season folder when many files share directory", () => {
|
||||
const result = buildAutoRenameBaseNameFromFoldersWithOptions(
|
||||
[
|
||||
"Arrow.S08.GERMAN.DUBBED.DL.720p.BluRay.x264-TMSF"
|
||||
],
|
||||
"tmsf-arrow-s08e03-720p",
|
||||
{ forceEpisodeForSeasonFolder: true }
|
||||
);
|
||||
expect(result).toBe("Arrow.S08E03.GERMAN.DUBBED.DL.720p.BluRay.x264-TMSF");
|
||||
});
|
||||
|
||||
it("forces episode insertion plus REPACK for flat season folder", () => {
|
||||
const result = buildAutoRenameBaseNameFromFoldersWithOptions(
|
||||
[
|
||||
"Gotham.S05.GERMAN.DUBBED.720p.BLURAY.x264-ZZGtv"
|
||||
],
|
||||
"zzgtv-gotham-s05e02.rp",
|
||||
{ forceEpisodeForSeasonFolder: true }
|
||||
);
|
||||
expect(result).toBe("Gotham.S05E02.GERMAN.DUBBED.REPACK.720p.BLURAY.x264-ZZGtv");
|
||||
});
|
||||
|
||||
it("uses nested episode title folder for Gotham TvR style", () => {
|
||||
const result = buildAutoRenameBaseNameFromFoldersWithOptions(
|
||||
[
|
||||
"Gotham.S04E01.Pax.Penguina.GERMAN.5.1.DL.AC3.720p.BDRiP.x264-TvR",
|
||||
"Gotham.S04.GERMAN.5.1.DL.AC3.720p.BDRiP.x264-TvR"
|
||||
],
|
||||
"tvr-gotham-s04e01-720p",
|
||||
{ forceEpisodeForSeasonFolder: true }
|
||||
);
|
||||
expect(result).toBe("Gotham.S04E01.Pax.Penguina.GERMAN.5.1.DL.AC3.720p.BDRiP.x264-TvR");
|
||||
});
|
||||
|
||||
it("uses nested title folder for Britannia TV4A style", () => {
|
||||
const result = buildAutoRenameBaseNameFromFoldersWithOptions(
|
||||
[
|
||||
"Britannia.S01E01.Die.Landung.German.DL.720p.BluRay.x264-TV4A",
|
||||
"Britannia.S01.German.DL.720p.BluRay.x264-TV4A"
|
||||
],
|
||||
"tv4a-britannia.s01e01-720p",
|
||||
{ forceEpisodeForSeasonFolder: true }
|
||||
);
|
||||
expect(result).toBe("Britannia.S01E01.Die.Landung.German.DL.720p.BluRay.x264-TV4A");
|
||||
});
|
||||
|
||||
it("handles odd source token style 101 by using nested Agent X folder", () => {
|
||||
const result = buildAutoRenameBaseNameFromFoldersWithOptions(
|
||||
[
|
||||
"Agent.X.S01E01.Pilot.German.DD51.Dubbed.DL.720p.iTunesHD.x264-TVS",
|
||||
"Agent.X.S01.German.DD51.Dubbed.DL.720p.iTunesHD.x264-TVS"
|
||||
],
|
||||
"tvs-agent-x-dd51-ded-dl-7p-ithd-x264-101",
|
||||
{ forceEpisodeForSeasonFolder: true }
|
||||
);
|
||||
expect(result).toBe("Agent.X.S01E01.Pilot.German.DD51.Dubbed.DL.720p.iTunesHD.x264-TVS");
|
||||
});
|
||||
|
||||
it("maps compact code 301 to S03E01 for nested Legion folder", () => {
|
||||
const result = buildAutoRenameBaseNameFromFoldersWithOptions(
|
||||
[
|
||||
"Legion.S03E01.Kapitel.20.German.DD51.Dubbed.DL.720p.AmazonHD.AVC-TVS",
|
||||
"Legion.S03.German.DD51.Dubbed.DL.720p.AmazonHD.AVC-TVS"
|
||||
],
|
||||
"tvs-legion-dd51-ded-dl-7p-azhd-avc-301",
|
||||
{ forceEpisodeForSeasonFolder: true }
|
||||
);
|
||||
expect(result).toBe("Legion.S03E01.Kapitel.20.German.DD51.Dubbed.DL.720p.AmazonHD.AVC-TVS");
|
||||
});
|
||||
|
||||
it("maps compact code 211 in flat season folder", () => {
|
||||
const result = buildAutoRenameBaseNameFromFoldersWithOptions(
|
||||
[
|
||||
"Lethal.Weapon.S02.German.DD51.Dubbed.DL.720p.AmazonHD.x264-TVS"
|
||||
],
|
||||
"tvs-lethal-weapon-dd51-ded-dl-7p-azhd-x264-211",
|
||||
{ forceEpisodeForSeasonFolder: true }
|
||||
);
|
||||
expect(result).toBe("Lethal.Weapon.S02E11.German.DD51.Dubbed.DL.720p.AmazonHD.x264-TVS");
|
||||
});
|
||||
|
||||
it("maps episode-only token e01 via season folder hint and keeps REPACK", () => {
|
||||
const result = buildAutoRenameBaseNameFromFoldersWithOptions(
|
||||
[
|
||||
"Cheat.der.Betrug.S01.GERMAN.720p.WEB.h264-TMSF"
|
||||
],
|
||||
"tmsf-cheatderbetrug-e01-720p-repack",
|
||||
{ forceEpisodeForSeasonFolder: true }
|
||||
);
|
||||
expect(result).toBe("Cheat.der.Betrug.S01E01.GERMAN.REPACK.720p.WEB.h264-TMSF");
|
||||
});
|
||||
|
||||
it("maps episode-only token e02 via season folder hint", () => {
|
||||
const result = buildAutoRenameBaseNameFromFoldersWithOptions(
|
||||
[
|
||||
"Cheat.der.Betrug.S01.GERMAN.720p.WEB.h264-TMSF"
|
||||
],
|
||||
"tmsf-cheatderbetrug-e02-720p",
|
||||
{ forceEpisodeForSeasonFolder: true }
|
||||
);
|
||||
expect(result).toBe("Cheat.der.Betrug.S01E02.GERMAN.720p.WEB.h264-TMSF");
|
||||
});
|
||||
|
||||
it("keeps renaming for odd source order like 4sf-bs-720p-s01e05", () => {
|
||||
const result = buildAutoRenameBaseNameFromFoldersWithOptions(
|
||||
[
|
||||
"Cheat.der.Betrug.S01.GERMAN.720p.WEB.h264-TMSF"
|
||||
],
|
||||
"4sf-bs-720p-s01e05",
|
||||
{ forceEpisodeForSeasonFolder: true }
|
||||
);
|
||||
expect(result).toBe("Cheat.der.Betrug.S01E05.GERMAN.720p.WEB.h264-TMSF");
|
||||
});
|
||||
|
||||
it("accepts lowercase scene group suffixes", () => {
|
||||
const result = buildAutoRenameBaseNameFromFoldersWithOptions(
|
||||
[
|
||||
"Cheat.der.Betrug.S01.GERMAN.720p.WEB.h264-tmsf"
|
||||
],
|
||||
"tmsf-cheatderbetrug-e01-720p",
|
||||
{ forceEpisodeForSeasonFolder: true }
|
||||
);
|
||||
expect(result).toBe("Cheat.der.Betrug.S01E01.GERMAN.720p.WEB.h264-tmsf");
|
||||
});
|
||||
|
||||
it("renames double episode file into season folder (Mammon style)", () => {
|
||||
const result = buildAutoRenameBaseNameFromFoldersWithOptions(
|
||||
[
|
||||
"Mammon.S01.German.1080P.Bluray.x264-SMAHD"
|
||||
],
|
||||
"tvr-mammon-s01e01e02-720p",
|
||||
{ forceEpisodeForSeasonFolder: true }
|
||||
);
|
||||
expect(result).toBe("Mammon.S01E01E02.German.1080P.Bluray.x264-SMAHD");
|
||||
});
|
||||
|
||||
it("renames second double episode file correctly", () => {
|
||||
const result = buildAutoRenameBaseNameFromFoldersWithOptions(
|
||||
[
|
||||
"Mammon.S01.German.1080P.Bluray.x264-SMAHD"
|
||||
],
|
||||
"tvr-mammon-s01e03e04-720p",
|
||||
{ forceEpisodeForSeasonFolder: true }
|
||||
);
|
||||
expect(result).toBe("Mammon.S01E03E04.German.1080P.Bluray.x264-SMAHD");
|
||||
});
|
||||
|
||||
it("renames third double episode file correctly", () => {
|
||||
const result = buildAutoRenameBaseNameFromFoldersWithOptions(
|
||||
[
|
||||
"Mammon.S01.German.1080P.Bluray.x264-SMAHD"
|
||||
],
|
||||
"tvr-mammon-s01e05e06-720p",
|
||||
{ forceEpisodeForSeasonFolder: true }
|
||||
);
|
||||
expect(result).toBe("Mammon.S01E05E06.German.1080P.Bluray.x264-SMAHD");
|
||||
});
|
||||
|
||||
// Last-resort fallback: folder has season but no scene group suffix (user-renamed packages)
|
||||
it("renames when folder has season but no scene group suffix (Mystery Road case)", () => {
|
||||
const result = buildAutoRenameBaseNameFromFoldersWithOptions(
|
||||
["Mystery Road S02"],
|
||||
"myst.road.de.dl.hdtv.7p-s02e05",
|
||||
{ forceEpisodeForSeasonFolder: true }
|
||||
);
|
||||
expect(result).toBe("Mystery Road S02E05");
|
||||
});
|
||||
|
||||
it("renames with season-only folder and custom name without dots", () => {
|
||||
const result = buildAutoRenameBaseNameFromFoldersWithOptions(
|
||||
["Meine Serie S03"],
|
||||
"meine-serie-s03e10-720p",
|
||||
{ forceEpisodeForSeasonFolder: true }
|
||||
);
|
||||
expect(result).toBe("Meine Serie S03E10");
|
||||
});
|
||||
|
||||
it("prefers scene-group folder over season-only fallback", () => {
|
||||
const result = buildAutoRenameBaseNameFromFoldersWithOptions(
|
||||
[
|
||||
"Mystery Road S02",
|
||||
"Mystery.Road.S02.GERMAN.DL.AC3.720p.HDTV.x264-hrs"
|
||||
],
|
||||
"myst.road.de.dl.hdtv.7p-s02e05",
|
||||
{ forceEpisodeForSeasonFolder: true }
|
||||
);
|
||||
// Should use the scene-group folder (hrs), not the custom one
|
||||
expect(result).toBe("Mystery.Road.S02E05.GERMAN.DL.AC3.720p.HDTV.x264-hrs");
|
||||
});
|
||||
|
||||
it("does not use season-only fallback when forceEpisodeForSeasonFolder is false", () => {
|
||||
const result = buildAutoRenameBaseNameFromFoldersWithOptions(
|
||||
["Mystery Road S02"],
|
||||
"myst.road.de.dl.hdtv.7p-s02e05",
|
||||
{ forceEpisodeForSeasonFolder: false }
|
||||
);
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("renames Riviera S02 with single-digit episode s02e2", () => {
|
||||
const result = buildAutoRenameBaseNameFromFoldersWithOptions(
|
||||
["Riviera.S02.GERMAN.DUBBED.DL.720p.WebHD.x264-TVP"],
|
||||
"tvp-riviera-s02e2-720p",
|
||||
{ forceEpisodeForSeasonFolder: true }
|
||||
);
|
||||
expect(result).toBe("Riviera.S02E02.GERMAN.DUBBED.DL.720p.WebHD.x264-TVP");
|
||||
});
|
||||
|
||||
it("renames Room 104 abbreviated source r104.de.dl.web.7p-s04e02", () => {
|
||||
const result = buildAutoRenameBaseNameFromFoldersWithOptions(
|
||||
["Room.104.S04.GERMAN.DL.720p.WEBRiP.x264-LAW"],
|
||||
"r104.de.dl.web.7p-s04e02",
|
||||
{ forceEpisodeForSeasonFolder: true }
|
||||
);
|
||||
expect(result).toBe("Room.104.S04E02.GERMAN.DL.720p.WEBRiP.x264-LAW");
|
||||
});
|
||||
|
||||
it("renames Room 104 wayne source with episode", () => {
|
||||
const result = buildAutoRenameBaseNameFromFoldersWithOptions(
|
||||
["Room.104.S04.GERMAN.DL.720p.WEBRiP.x264-LAW"],
|
||||
"room.104.s04e01.german.dl.720p.web.h264-wayne",
|
||||
{ forceEpisodeForSeasonFolder: true }
|
||||
);
|
||||
expect(result).toBe("Room.104.S04E01.GERMAN.DL.720p.WEBRiP.x264-LAW");
|
||||
});
|
||||
});
|
||||
@ -25,16 +25,85 @@ describe("cleanup", () => {
|
||||
expect(fs.existsSync(path.join(dir, "movie.mkv"))).toBe(true);
|
||||
});
|
||||
|
||||
it("removes sample artifacts and link files", () => {
|
||||
it("removes sample artifacts and link files", async () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-clean-"));
|
||||
tempDirs.push(dir);
|
||||
fs.mkdirSync(path.join(dir, "Samples"), { recursive: true });
|
||||
fs.writeFileSync(path.join(dir, "Samples", "demo-sample.mkv"), "x");
|
||||
fs.writeFileSync(path.join(dir, "download_links.txt"), "https://example.com/a\n");
|
||||
|
||||
const links = removeDownloadLinkArtifacts(dir);
|
||||
const samples = removeSampleArtifacts(dir);
|
||||
const links = await removeDownloadLinkArtifacts(dir);
|
||||
const samples = await removeSampleArtifacts(dir);
|
||||
expect(links).toBeGreaterThan(0);
|
||||
expect(samples.files + samples.dirs).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it("cleans up archive files in nested directories", () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-clean-"));
|
||||
tempDirs.push(dir);
|
||||
|
||||
// Create nested directory structure with archive files
|
||||
const sub1 = path.join(dir, "season1");
|
||||
const sub2 = path.join(dir, "season1", "extras");
|
||||
fs.mkdirSync(sub2, { recursive: true });
|
||||
|
||||
fs.writeFileSync(path.join(sub1, "episode.part1.rar"), "x");
|
||||
fs.writeFileSync(path.join(sub1, "episode.part2.rar"), "x");
|
||||
fs.writeFileSync(path.join(sub2, "bonus.zip"), "x");
|
||||
fs.writeFileSync(path.join(sub2, "bonus.7z"), "x");
|
||||
// Non-archive files should be kept
|
||||
fs.writeFileSync(path.join(sub1, "video.mkv"), "real content");
|
||||
fs.writeFileSync(path.join(sub2, "subtitle.srt"), "subtitle content");
|
||||
|
||||
const removed = cleanupCancelledPackageArtifacts(dir);
|
||||
expect(removed).toBe(4); // 2 rar parts + zip + 7z
|
||||
expect(fs.existsSync(path.join(sub1, "episode.part1.rar"))).toBe(false);
|
||||
expect(fs.existsSync(path.join(sub1, "episode.part2.rar"))).toBe(false);
|
||||
expect(fs.existsSync(path.join(sub2, "bonus.zip"))).toBe(false);
|
||||
expect(fs.existsSync(path.join(sub2, "bonus.7z"))).toBe(false);
|
||||
// Non-archives kept
|
||||
expect(fs.existsSync(path.join(sub1, "video.mkv"))).toBe(true);
|
||||
expect(fs.existsSync(path.join(sub2, "subtitle.srt"))).toBe(true);
|
||||
});
|
||||
|
||||
it("detects link artifacts by URL content in text files", async () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-clean-"));
|
||||
tempDirs.push(dir);
|
||||
|
||||
// File with link-like name containing URLs should be removed
|
||||
fs.writeFileSync(path.join(dir, "download_links.txt"), "https://rapidgator.net/file/abc123\nhttps://uploaded.net/file/def456\n");
|
||||
// File with link-like name but no URLs should be kept
|
||||
fs.writeFileSync(path.join(dir, "my_downloads.txt"), "Just some random text without URLs");
|
||||
// Regular text file that doesn't match the link pattern should be kept
|
||||
fs.writeFileSync(path.join(dir, "readme.txt"), "https://example.com");
|
||||
// .url files should always be removed
|
||||
fs.writeFileSync(path.join(dir, "bookmark.url"), "[InternetShortcut]\nURL=https://example.com");
|
||||
// .dlc files should always be removed
|
||||
fs.writeFileSync(path.join(dir, "container.dlc"), "encrypted-data");
|
||||
|
||||
const removed = await removeDownloadLinkArtifacts(dir);
|
||||
expect(removed).toBeGreaterThanOrEqual(3); // download_links.txt + bookmark.url + container.dlc
|
||||
expect(fs.existsSync(path.join(dir, "download_links.txt"))).toBe(false);
|
||||
expect(fs.existsSync(path.join(dir, "bookmark.url"))).toBe(false);
|
||||
expect(fs.existsSync(path.join(dir, "container.dlc"))).toBe(false);
|
||||
// Non-matching files should be kept
|
||||
expect(fs.existsSync(path.join(dir, "readme.txt"))).toBe(true);
|
||||
});
|
||||
|
||||
it("does not recurse into sample symlink or junction targets", async () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-clean-"));
|
||||
const external = fs.mkdtempSync(path.join(os.tmpdir(), "rd-clean-ext-"));
|
||||
tempDirs.push(dir, external);
|
||||
|
||||
const outsideFile = path.join(external, "outside-sample.mkv");
|
||||
fs.writeFileSync(outsideFile, "keep", "utf8");
|
||||
|
||||
const linkedSampleDir = path.join(dir, "sample");
|
||||
const linkType: fs.symlink.Type = process.platform === "win32" ? "junction" : "dir";
|
||||
fs.symlinkSync(external, linkedSampleDir, linkType);
|
||||
|
||||
const result = await removeSampleArtifacts(dir);
|
||||
expect(result.files).toBe(0);
|
||||
expect(fs.existsSync(outsideFile)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
210
tests/container.test.ts
Normal file
210
tests/container.test.ts
Normal file
@ -0,0 +1,210 @@
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { importDlcContainers } from "../src/main/container";
|
||||
|
||||
const tempDirs: string[] = [];
|
||||
const originalFetch = globalThis.fetch;
|
||||
|
||||
afterEach(() => {
|
||||
globalThis.fetch = originalFetch;
|
||||
vi.restoreAllMocks();
|
||||
for (const dir of tempDirs.splice(0)) {
|
||||
fs.rmSync(dir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
describe("container", () => {
|
||||
it("skips oversized DLC files without throwing and blocking other files", async () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-dlc-"));
|
||||
tempDirs.push(dir);
|
||||
const oversizedFilePath = path.join(dir, "oversized.dlc");
|
||||
fs.writeFileSync(oversizedFilePath, Buffer.alloc((8 * 1024 * 1024) + 1, 1));
|
||||
|
||||
// Create a valid mockup DLC that would be skipped if an error was thrown
|
||||
const validFilePath = path.join(dir, "valid.dlc");
|
||||
// Just needs to be short enough to pass file limits but fail parsing, triggering dcrypt fallback
|
||||
fs.writeFileSync(validFilePath, Buffer.from("Valid but not real DLC content..."));
|
||||
|
||||
const fetchSpy = vi.fn(async (url: string | URL | Request) => {
|
||||
const urlStr = String(url);
|
||||
if (urlStr.includes("dcrypt.it/decrypt/upload")) {
|
||||
return new Response("http://example.com/file1.rar\nhttp://example.com/file2.rar", { status: 200 });
|
||||
}
|
||||
return new Response("", { status: 404 });
|
||||
});
|
||||
globalThis.fetch = fetchSpy as unknown as typeof fetch;
|
||||
|
||||
const result = await importDlcContainers([oversizedFilePath, validFilePath]);
|
||||
|
||||
// Expect the oversized to be silently skipped, and valid to be parsed into 1 package with DLC filename
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("valid");
|
||||
expect(result[0].links).toEqual(["http://example.com/file1.rar", "http://example.com/file2.rar"]);
|
||||
expect(fetchSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("skips non-dlc files completely", async () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-dlc-non-"));
|
||||
tempDirs.push(dir);
|
||||
const txtPath = path.join(dir, "links.txt");
|
||||
fs.writeFileSync(txtPath, "http://link.com/1");
|
||||
|
||||
const result = await importDlcContainers([txtPath]);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it("falls back to dcrypt if local decryption returns empty", async () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-dlc-"));
|
||||
tempDirs.push(dir);
|
||||
const filePath = path.join(dir, "fallback.dlc");
|
||||
|
||||
// A file large enough to trigger local decryption attempt (needs > 89 bytes to pass the slice check)
|
||||
fs.writeFileSync(filePath, Buffer.alloc(100, 1).toString("base64"));
|
||||
|
||||
const fetchSpy = vi.fn(async (url: string | URL | Request) => {
|
||||
const urlStr = String(url);
|
||||
if (urlStr.includes("service.jdownloader.org")) {
|
||||
// Mock local RC service failure (returning 404)
|
||||
return new Response("", { status: 404 });
|
||||
}
|
||||
if (urlStr.includes("dcrypt.it/decrypt/upload")) {
|
||||
// Mock dcrypt fallback success
|
||||
return new Response("http://fallback.com/1", { status: 200 });
|
||||
}
|
||||
return new Response("", { status: 404 });
|
||||
});
|
||||
globalThis.fetch = fetchSpy as unknown as typeof fetch;
|
||||
|
||||
const result = await importDlcContainers([filePath]);
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("fallback");
|
||||
expect(result[0].links).toEqual(["http://fallback.com/1"]);
|
||||
// Should have tried both!
|
||||
expect(fetchSpy).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it("falls back to dcrypt when local decryption throws invalid padding", async () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-dlc-"));
|
||||
tempDirs.push(dir);
|
||||
const filePath = path.join(dir, "invalid-local.dlc");
|
||||
fs.writeFileSync(filePath, "X".repeat(120));
|
||||
|
||||
const fetchSpy = vi.fn(async (url: string | URL | Request) => {
|
||||
const urlStr = String(url);
|
||||
if (urlStr.includes("service.jdownloader.org")) {
|
||||
return new Response(`<rc>${Buffer.alloc(16).toString("base64")}</rc>`, { status: 200 });
|
||||
}
|
||||
if (urlStr.includes("dcrypt.it/decrypt/upload")) {
|
||||
return new Response("http://example.com/fallback1", { status: 200 });
|
||||
}
|
||||
return new Response("", { status: 404 });
|
||||
});
|
||||
globalThis.fetch = fetchSpy as unknown as typeof fetch;
|
||||
|
||||
const result = await importDlcContainers([filePath]);
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("invalid-local");
|
||||
expect(result[0].links).toEqual(["http://example.com/fallback1"]);
|
||||
expect(fetchSpy).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it("falls back to paste endpoint when upload returns 413", async () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-dlc-"));
|
||||
tempDirs.push(dir);
|
||||
const filePath = path.join(dir, "big-dlc.dlc");
|
||||
fs.writeFileSync(filePath, Buffer.alloc(100, 1).toString("base64"));
|
||||
|
||||
const fetchSpy = vi.fn(async (url: string | URL | Request) => {
|
||||
const urlStr = String(url);
|
||||
if (urlStr.includes("service.jdownloader.org")) {
|
||||
return new Response("", { status: 404 });
|
||||
}
|
||||
if (urlStr.includes("dcrypt.it/decrypt/upload")) {
|
||||
return new Response("Request Entity Too Large", { status: 413 });
|
||||
}
|
||||
if (urlStr.includes("dcrypt.it/decrypt/paste")) {
|
||||
return new Response("http://paste-fallback.com/file1.rar\nhttp://paste-fallback.com/file2.rar", { status: 200 });
|
||||
}
|
||||
return new Response("", { status: 404 });
|
||||
});
|
||||
globalThis.fetch = fetchSpy as unknown as typeof fetch;
|
||||
|
||||
const result = await importDlcContainers([filePath]);
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe("big-dlc");
|
||||
expect(result[0].links).toEqual(["http://paste-fallback.com/file1.rar", "http://paste-fallback.com/file2.rar"]);
|
||||
// local RC + upload + paste = 3 calls
|
||||
expect(fetchSpy).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it("throws when both dcrypt endpoints return 413", async () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-dlc-"));
|
||||
tempDirs.push(dir);
|
||||
const filePath = path.join(dir, "huge.dlc");
|
||||
fs.writeFileSync(filePath, Buffer.alloc(100, 1).toString("base64"));
|
||||
|
||||
const fetchSpy = vi.fn(async (url: string | URL | Request) => {
|
||||
const urlStr = String(url);
|
||||
if (urlStr.includes("service.jdownloader.org")) {
|
||||
return new Response("", { status: 404 });
|
||||
}
|
||||
if (urlStr.includes("dcrypt.it/decrypt/upload")) {
|
||||
return new Response("Request Entity Too Large", { status: 413 });
|
||||
}
|
||||
if (urlStr.includes("dcrypt.it/decrypt/paste")) {
|
||||
return new Response("Request Entity Too Large", { status: 413 });
|
||||
}
|
||||
return new Response("", { status: 500 });
|
||||
});
|
||||
globalThis.fetch = fetchSpy as unknown as typeof fetch;
|
||||
|
||||
await expect(importDlcContainers([filePath])).rejects.toThrow(/zu groß für dcrypt/i);
|
||||
});
|
||||
|
||||
it("throws when upload returns 413 and paste returns 500", async () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-dlc-"));
|
||||
tempDirs.push(dir);
|
||||
const filePath = path.join(dir, "doomed.dlc");
|
||||
fs.writeFileSync(filePath, Buffer.from("not a valid dlc payload at all"));
|
||||
|
||||
const fetchSpy = vi.fn(async (url: string | URL | Request) => {
|
||||
const urlStr = String(url);
|
||||
if (urlStr.includes("service.jdownloader.org")) {
|
||||
return new Response("", { status: 404 });
|
||||
}
|
||||
if (urlStr.includes("dcrypt.it/decrypt/upload")) {
|
||||
return new Response("Request Entity Too Large", { status: 413 });
|
||||
}
|
||||
if (urlStr.includes("dcrypt.it/decrypt/paste")) {
|
||||
return new Response("paste failure", { status: 500 });
|
||||
}
|
||||
return new Response("", { status: 500 });
|
||||
});
|
||||
globalThis.fetch = fetchSpy as unknown as typeof fetch;
|
||||
|
||||
await expect(importDlcContainers([filePath])).rejects.toThrow(/DLC konnte nicht importiert werden/i);
|
||||
});
|
||||
|
||||
it("throws clear error when all dlc imports fail", async () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-dlc-"));
|
||||
tempDirs.push(dir);
|
||||
const filePath = path.join(dir, "broken.dlc");
|
||||
fs.writeFileSync(filePath, Buffer.from("not a valid dlc payload at all"));
|
||||
|
||||
const fetchSpy = vi.fn(async (url: string | URL | Request) => {
|
||||
const urlStr = String(url);
|
||||
if (urlStr.includes("service.jdownloader.org")) {
|
||||
return new Response("", { status: 404 });
|
||||
}
|
||||
if (urlStr.includes("dcrypt.it/decrypt/upload")) {
|
||||
return new Response("upstream failure", { status: 500 });
|
||||
}
|
||||
return new Response("", { status: 500 });
|
||||
});
|
||||
globalThis.fetch = fetchSpy as unknown as typeof fetch;
|
||||
|
||||
await expect(importDlcContainers([filePath])).rejects.toThrow(/DLC konnte nicht importiert werden/i);
|
||||
});
|
||||
});
|
||||
@ -1,19 +1,21 @@
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import { defaultSettings } from "../src/main/constants";
|
||||
import { DebridService } from "../src/main/debrid";
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { defaultSettings, REQUEST_RETRIES } from "../src/main/constants";
|
||||
import { DebridService, extractRapidgatorFilenameFromHtml, filenameFromRapidgatorUrlPath, normalizeResolvedFilename } from "../src/main/debrid";
|
||||
|
||||
const originalFetch = globalThis.fetch;
|
||||
|
||||
afterEach(() => {
|
||||
globalThis.fetch = originalFetch;
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe("debrid service", () => {
|
||||
it("falls back to Mega-Debrid when Real-Debrid fails", async () => {
|
||||
it("falls back to Mega web when Real-Debrid fails", async () => {
|
||||
const settings = {
|
||||
...defaultSettings(),
|
||||
token: "rd-token",
|
||||
megaToken: "mega-token",
|
||||
megaLogin: "user",
|
||||
megaPassword: "pass",
|
||||
bestToken: "",
|
||||
providerPrimary: "realdebrid" as const,
|
||||
providerSecondary: "megadebrid" as const,
|
||||
@ -29,26 +31,29 @@ describe("debrid service", () => {
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}
|
||||
if (url.includes("mega-debrid.eu/api.php?action=getLink")) {
|
||||
return new Response(JSON.stringify({ response_code: "ok", debridLink: "https://mega.example/file.bin" }), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}
|
||||
return new Response("not-found", { status: 404 });
|
||||
}) as typeof fetch;
|
||||
|
||||
const service = new DebridService(settings);
|
||||
const megaWeb = vi.fn(async () => ({
|
||||
fileName: "file.bin",
|
||||
directUrl: "https://mega-web.example/file.bin",
|
||||
fileSize: null,
|
||||
retriesUsed: 0
|
||||
}));
|
||||
|
||||
const service = new DebridService(settings, { megaWebUnrestrict: megaWeb });
|
||||
const result = await service.unrestrictLink("https://rapidgator.net/file/example.part1.rar.html");
|
||||
expect(result.provider).toBe("megadebrid");
|
||||
expect(result.directUrl).toBe("https://mega.example/file.bin");
|
||||
expect(result.directUrl).toBe("https://mega-web.example/file.bin");
|
||||
expect(megaWeb).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("does not fallback when auto fallback is disabled", async () => {
|
||||
const settings = {
|
||||
...defaultSettings(),
|
||||
token: "rd-token",
|
||||
megaToken: "mega-token",
|
||||
megaLogin: "user",
|
||||
megaPassword: "pass",
|
||||
providerPrimary: "realdebrid" as const,
|
||||
providerSecondary: "megadebrid" as const,
|
||||
providerTertiary: "bestdebrid" as const,
|
||||
@ -60,21 +65,25 @@ describe("debrid service", () => {
|
||||
if (url.includes("api.real-debrid.com/rest/1.0/unrestrict/link")) {
|
||||
return new Response("traffic exhausted", { status: 429 });
|
||||
}
|
||||
return new Response(JSON.stringify({ response_code: "ok", debridLink: "https://mega.example/file.bin" }), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
return new Response("not-found", { status: 404 });
|
||||
}) as typeof fetch;
|
||||
|
||||
const service = new DebridService(settings);
|
||||
const megaWeb = vi.fn(async () => ({
|
||||
fileName: "unused.bin",
|
||||
directUrl: "https://unused",
|
||||
fileSize: null,
|
||||
retriesUsed: 0
|
||||
}));
|
||||
|
||||
const service = new DebridService(settings, { megaWebUnrestrict: megaWeb });
|
||||
await expect(service.unrestrictLink("https://rapidgator.net/file/example.part2.rar.html")).rejects.toThrow();
|
||||
expect(megaWeb).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
it("supports BestDebrid auth query fallback", async () => {
|
||||
it("uses BestDebrid auth header without token query fallback", async () => {
|
||||
const settings = {
|
||||
...defaultSettings(),
|
||||
token: "",
|
||||
megaToken: "",
|
||||
bestToken: "best-token",
|
||||
providerPrimary: "bestdebrid" as const,
|
||||
providerSecondary: "realdebrid" as const,
|
||||
@ -82,15 +91,11 @@ describe("debrid service", () => {
|
||||
autoProviderFallback: true
|
||||
};
|
||||
|
||||
const calledUrls: string[] = [];
|
||||
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
|
||||
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
|
||||
calledUrls.push(url);
|
||||
if (url.includes("/api/v1/generateLink?link=")) {
|
||||
return new Response(JSON.stringify({ message: "Bad token, expired, or invalid" }), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}
|
||||
if (url.includes("/api/v1/generateLink?auth=")) {
|
||||
return new Response(JSON.stringify({ download: "https://best.example/file.bin", filename: "file.bin", filesize: 2048 }), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
@ -103,13 +108,108 @@ describe("debrid service", () => {
|
||||
const result = await service.unrestrictLink("https://rapidgator.net/file/example.part3.rar.html");
|
||||
expect(result.provider).toBe("bestdebrid");
|
||||
expect(result.fileSize).toBe(2048);
|
||||
expect(calledUrls.some((url) => url.includes("auth="))).toBe(false);
|
||||
});
|
||||
|
||||
it("sends Bearer auth header to BestDebrid", async () => {
|
||||
const settings = {
|
||||
...defaultSettings(),
|
||||
token: "",
|
||||
bestToken: "best-token",
|
||||
providerPrimary: "bestdebrid" as const,
|
||||
providerSecondary: "none" as const,
|
||||
providerTertiary: "none" as const,
|
||||
autoProviderFallback: true
|
||||
};
|
||||
|
||||
let authHeader = "";
|
||||
globalThis.fetch = (async (input: RequestInfo | URL, init?: RequestInit): Promise<Response> => {
|
||||
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
|
||||
if (url.includes("/api/v1/generateLink?link=")) {
|
||||
const headers = init?.headers;
|
||||
if (headers instanceof Headers) {
|
||||
authHeader = headers.get("Authorization") || "";
|
||||
} else if (Array.isArray(headers)) {
|
||||
const tuple = headers.find(([key]) => key.toLowerCase() === "authorization");
|
||||
authHeader = tuple?.[1] || "";
|
||||
} else {
|
||||
authHeader = String((headers as Record<string, unknown> | undefined)?.Authorization || "");
|
||||
}
|
||||
return new Response(JSON.stringify({ download: "https://best.example/file.bin", filename: "file.bin", filesize: 42 }), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}
|
||||
return new Response("not-found", { status: 404 });
|
||||
}) as typeof fetch;
|
||||
|
||||
const service = new DebridService(settings);
|
||||
const result = await service.unrestrictLink("https://hoster.example/file/abc");
|
||||
expect(result.provider).toBe("bestdebrid");
|
||||
expect(authHeader).toBe("Bearer best-token");
|
||||
});
|
||||
|
||||
it("does not retry BestDebrid auth failures (401)", async () => {
|
||||
const settings = {
|
||||
...defaultSettings(),
|
||||
token: "",
|
||||
bestToken: "best-token",
|
||||
providerPrimary: "bestdebrid" as const,
|
||||
providerSecondary: "none" as const,
|
||||
providerTertiary: "none" as const,
|
||||
autoProviderFallback: true
|
||||
};
|
||||
|
||||
let calls = 0;
|
||||
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
|
||||
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
|
||||
if (url.includes("/api/v1/generateLink?link=")) {
|
||||
calls += 1;
|
||||
return new Response(JSON.stringify({ message: "Unauthorized" }), {
|
||||
status: 401,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}
|
||||
return new Response("not-found", { status: 404 });
|
||||
}) as typeof fetch;
|
||||
|
||||
const service = new DebridService(settings);
|
||||
await expect(service.unrestrictLink("https://hoster.example/file/no-retry")).rejects.toThrow();
|
||||
expect(calls).toBe(1);
|
||||
});
|
||||
|
||||
it("does not retry AllDebrid auth failures (403)", async () => {
|
||||
const settings = {
|
||||
...defaultSettings(),
|
||||
allDebridToken: "ad-token",
|
||||
providerPrimary: "alldebrid" as const,
|
||||
providerSecondary: "none" as const,
|
||||
providerTertiary: "none" as const,
|
||||
autoProviderFallback: true
|
||||
};
|
||||
|
||||
let calls = 0;
|
||||
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
|
||||
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
|
||||
if (url.includes("api.alldebrid.com/v4/link/unlock")) {
|
||||
calls += 1;
|
||||
return new Response(JSON.stringify({ status: "error", error: { message: "forbidden" } }), {
|
||||
status: 403,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}
|
||||
return new Response("not-found", { status: 404 });
|
||||
}) as typeof fetch;
|
||||
|
||||
const service = new DebridService(settings);
|
||||
await expect(service.unrestrictLink("https://hoster.example/file/no-retry-ad")).rejects.toThrow();
|
||||
expect(calls).toBe(1);
|
||||
});
|
||||
|
||||
it("supports AllDebrid unlock", async () => {
|
||||
const settings = {
|
||||
...defaultSettings(),
|
||||
token: "",
|
||||
megaToken: "",
|
||||
bestToken: "",
|
||||
allDebridToken: "ad-token",
|
||||
providerPrimary: "alldebrid" as const,
|
||||
@ -142,4 +242,551 @@ describe("debrid service", () => {
|
||||
expect(result.directUrl).toBe("https://alldebrid.example/file.bin");
|
||||
expect(result.fileSize).toBe(4096);
|
||||
});
|
||||
|
||||
it("treats MegaDebrid as not configured when web fallback callback is unavailable", async () => {
|
||||
const settings = {
|
||||
...defaultSettings(),
|
||||
megaLogin: "user",
|
||||
megaPassword: "pass",
|
||||
providerPrimary: "megadebrid" as const,
|
||||
providerSecondary: "none" as const,
|
||||
providerTertiary: "none" as const,
|
||||
autoProviderFallback: false
|
||||
};
|
||||
|
||||
const service = new DebridService(settings);
|
||||
await expect(service.unrestrictLink("https://rapidgator.net/file/missing-mega-web")).rejects.toThrow(/nicht konfiguriert/i);
|
||||
});
|
||||
|
||||
it("uses Mega web path exclusively", async () => {
|
||||
const settings = {
|
||||
...defaultSettings(),
|
||||
token: "",
|
||||
bestToken: "",
|
||||
allDebridToken: "",
|
||||
megaLogin: "user",
|
||||
megaPassword: "pass",
|
||||
providerPrimary: "megadebrid" as const,
|
||||
providerSecondary: "megadebrid" as const,
|
||||
providerTertiary: "megadebrid" as const,
|
||||
autoProviderFallback: true
|
||||
};
|
||||
|
||||
const fetchSpy = vi.fn(async () => new Response("not-found", { status: 404 }));
|
||||
globalThis.fetch = fetchSpy as unknown as typeof fetch;
|
||||
|
||||
const megaWeb = vi.fn(async () => ({
|
||||
fileName: "from-web.rar",
|
||||
directUrl: "https://www11.unrestrict.link/download/file/abc/from-web.rar",
|
||||
fileSize: null,
|
||||
retriesUsed: 0
|
||||
}));
|
||||
|
||||
const service = new DebridService(settings, { megaWebUnrestrict: megaWeb });
|
||||
const result = await service.unrestrictLink("https://rapidgator.net/file/abc/from-web.rar.html");
|
||||
expect(result.provider).toBe("megadebrid");
|
||||
expect(result.directUrl).toContain("unrestrict.link/download/file/");
|
||||
expect(megaWeb).toHaveBeenCalledTimes(1);
|
||||
expect(fetchSpy).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
it("aborts Mega web unrestrict when caller signal is cancelled", async () => {
|
||||
const settings = {
|
||||
...defaultSettings(),
|
||||
token: "",
|
||||
bestToken: "",
|
||||
allDebridToken: "",
|
||||
megaLogin: "user",
|
||||
megaPassword: "pass",
|
||||
providerPrimary: "megadebrid" as const,
|
||||
providerSecondary: "none" as const,
|
||||
providerTertiary: "none" as const,
|
||||
autoProviderFallback: false
|
||||
};
|
||||
|
||||
const megaWeb = vi.fn((_link: string, signal?: AbortSignal): Promise<never> => new Promise((_, reject) => {
|
||||
const onAbort = (): void => reject(new Error("aborted:mega-web-test"));
|
||||
if (signal?.aborted) {
|
||||
onAbort();
|
||||
return;
|
||||
}
|
||||
signal?.addEventListener("abort", onAbort, { once: true });
|
||||
}));
|
||||
|
||||
const service = new DebridService(settings, { megaWebUnrestrict: megaWeb });
|
||||
const controller = new AbortController();
|
||||
const abortTimer = setTimeout(() => {
|
||||
controller.abort("test");
|
||||
}, 200);
|
||||
|
||||
try {
|
||||
await expect(service.unrestrictLink("https://rapidgator.net/file/abort-mega-web", controller.signal)).rejects.toThrow(/aborted/i);
|
||||
expect(megaWeb).toHaveBeenCalledTimes(1);
|
||||
expect(megaWeb.mock.calls[0]?.[1]).toBe(controller.signal);
|
||||
} finally {
|
||||
clearTimeout(abortTimer);
|
||||
}
|
||||
});
|
||||
|
||||
it("respects provider selection and does not append hidden providers", async () => {
|
||||
const settings = {
|
||||
...defaultSettings(),
|
||||
token: "",
|
||||
bestToken: "",
|
||||
allDebridToken: "ad-token",
|
||||
megaLogin: "user",
|
||||
megaPassword: "pass",
|
||||
providerPrimary: "megadebrid" as const,
|
||||
providerSecondary: "megadebrid" as const,
|
||||
providerTertiary: "megadebrid" as const,
|
||||
autoProviderFallback: true
|
||||
};
|
||||
|
||||
let allDebridCalls = 0;
|
||||
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
|
||||
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
|
||||
if (url.includes("api.alldebrid.com/v4/link/unlock")) {
|
||||
allDebridCalls += 1;
|
||||
return new Response(JSON.stringify({ status: "success", data: { link: "https://alldebrid.example/file.bin" } }), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}
|
||||
return new Response("not-found", { status: 404 });
|
||||
}) as typeof fetch;
|
||||
|
||||
const megaWeb = vi.fn(async () => null);
|
||||
const service = new DebridService(settings, { megaWebUnrestrict: megaWeb });
|
||||
await expect(service.unrestrictLink("https://rapidgator.net/file/example.part5.rar.html")).rejects.toThrow();
|
||||
expect(allDebridCalls).toBe(0);
|
||||
});
|
||||
|
||||
it("does not use secondary provider when fallback is disabled and primary is missing", async () => {
|
||||
const settings = {
|
||||
...defaultSettings(),
|
||||
token: "",
|
||||
megaLogin: "user",
|
||||
megaPassword: "pass",
|
||||
providerPrimary: "realdebrid" as const,
|
||||
providerSecondary: "megadebrid" as const,
|
||||
providerTertiary: "none" as const,
|
||||
autoProviderFallback: false
|
||||
};
|
||||
|
||||
const megaWeb = vi.fn(async () => ({
|
||||
fileName: "should-not-run.bin",
|
||||
directUrl: "https://unused",
|
||||
fileSize: null,
|
||||
retriesUsed: 0
|
||||
}));
|
||||
|
||||
const service = new DebridService(settings, { megaWebUnrestrict: megaWeb });
|
||||
await expect(service.unrestrictLink("https://rapidgator.net/file/example.part5.rar.html")).rejects.toThrow(/nicht konfiguriert/i);
|
||||
expect(megaWeb).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
it("allows disabling secondary and tertiary providers", async () => {
|
||||
const settings = {
|
||||
...defaultSettings(),
|
||||
token: "rd-token",
|
||||
megaLogin: "user",
|
||||
megaPassword: "pass",
|
||||
providerPrimary: "realdebrid" as const,
|
||||
providerSecondary: "none" as const,
|
||||
providerTertiary: "none" as const,
|
||||
autoProviderFallback: true
|
||||
};
|
||||
|
||||
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
|
||||
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
|
||||
if (url.includes("api.real-debrid.com/rest/1.0/unrestrict/link")) {
|
||||
return new Response(JSON.stringify({ error: "traffic_limit" }), {
|
||||
status: 403,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}
|
||||
return new Response("not-found", { status: 404 });
|
||||
}) as typeof fetch;
|
||||
|
||||
const megaWeb = vi.fn(async () => ({
|
||||
fileName: "unused.bin",
|
||||
directUrl: "https://unused",
|
||||
fileSize: null,
|
||||
retriesUsed: 0
|
||||
}));
|
||||
|
||||
const service = new DebridService(settings, { megaWebUnrestrict: megaWeb });
|
||||
await expect(service.unrestrictLink("https://rapidgator.net/file/example.part6.rar.html")).rejects.toThrow();
|
||||
expect(megaWeb).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
it("resolves rapidgator filename from page when provider returns hash", async () => {
|
||||
const settings = {
|
||||
...defaultSettings(),
|
||||
token: "rd-token",
|
||||
providerPrimary: "realdebrid" as const,
|
||||
providerSecondary: "none" as const,
|
||||
providerTertiary: "none" as const,
|
||||
autoProviderFallback: true
|
||||
};
|
||||
|
||||
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
|
||||
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
|
||||
if (url.includes("api.real-debrid.com/rest/1.0/unrestrict/link")) {
|
||||
return new Response(JSON.stringify({
|
||||
download: "https://cdn.example/file.bin",
|
||||
filename: "6f09df2984fe01378537c7cd8d7fa7ce",
|
||||
filesize: 2048
|
||||
}), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}
|
||||
if (url.includes("rapidgator.net/file/6f09df2984fe01378537c7cd8d7fa7ce")) {
|
||||
return new Response("<html><head><title>download file Banshee.S04E01.German.DL.720p.part01.rar - Rapidgator</title></head></html>", {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "text/html" }
|
||||
});
|
||||
}
|
||||
return new Response("not-found", { status: 404 });
|
||||
}) as typeof fetch;
|
||||
|
||||
const service = new DebridService(settings);
|
||||
const result = await service.unrestrictLink("https://rapidgator.net/file/6f09df2984fe01378537c7cd8d7fa7ce");
|
||||
expect(result.provider).toBe("realdebrid");
|
||||
expect(result.fileName).toBe("Banshee.S04E01.German.DL.720p.part01.rar");
|
||||
});
|
||||
|
||||
it("resolves filenames for rg.to links", async () => {
|
||||
const settings = {
|
||||
...defaultSettings(),
|
||||
allDebridToken: ""
|
||||
};
|
||||
|
||||
const link = "https://rg.to/file/685cec6dcc1837dc725755fc9c726dd9";
|
||||
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
|
||||
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
|
||||
if (url === link) {
|
||||
return new Response("<html><head><title>Download file Bulletproof.S01E01.German.DL.DD20.Synced.720p.AmazonHD.h264-GDR.part01.rar</title></head></html>", {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "text/html" }
|
||||
});
|
||||
}
|
||||
return new Response("not-found", { status: 404 });
|
||||
}) as typeof fetch;
|
||||
|
||||
const service = new DebridService(settings);
|
||||
const resolved = await service.resolveFilenames([link]);
|
||||
expect(resolved.get(link)).toBe("Bulletproof.S01E01.German.DL.DD20.Synced.720p.AmazonHD.h264-GDR.part01.rar");
|
||||
});
|
||||
|
||||
it("does not unrestrict non-rapidgator links during filename scan", async () => {
|
||||
const settings = {
|
||||
...defaultSettings(),
|
||||
token: "rd-token",
|
||||
providerPrimary: "realdebrid" as const,
|
||||
providerSecondary: "none" as const,
|
||||
providerTertiary: "none" as const,
|
||||
autoProviderFallback: true,
|
||||
allDebridToken: ""
|
||||
};
|
||||
|
||||
const linkFromPage = "https://rapidgator.net/file/11111111111111111111111111111111";
|
||||
const linkFromProvider = "https://hoster.example/file/22222222222222222222222222222222";
|
||||
let unrestrictCalls = 0;
|
||||
|
||||
globalThis.fetch = (async (input: RequestInfo | URL, init?: RequestInit): Promise<Response> => {
|
||||
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
|
||||
|
||||
if (url === linkFromPage) {
|
||||
return new Response("<html><head><title>Download file from-page.part1.rar</title></head></html>", {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "text/html" }
|
||||
});
|
||||
}
|
||||
|
||||
if (url.includes("api.real-debrid.com/rest/1.0/unrestrict/link")) {
|
||||
unrestrictCalls += 1;
|
||||
const body = init?.body;
|
||||
const bodyText = body instanceof URLSearchParams ? body.toString() : String(body || "");
|
||||
const linkValue = new URLSearchParams(bodyText).get("link") || "";
|
||||
if (linkValue === linkFromProvider) {
|
||||
return new Response(JSON.stringify({
|
||||
download: "https://cdn.example/from-provider",
|
||||
filename: "from-provider.part2.rar",
|
||||
filesize: 1024
|
||||
}), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return new Response("not-found", { status: 404 });
|
||||
}) as typeof fetch;
|
||||
|
||||
const service = new DebridService(settings);
|
||||
const events: Array<{ link: string; fileName: string }> = [];
|
||||
const resolved = await service.resolveFilenames([linkFromPage, linkFromProvider], (link, fileName) => {
|
||||
events.push({ link, fileName });
|
||||
});
|
||||
|
||||
expect(resolved.get(linkFromPage)).toBe("from-page.part1.rar");
|
||||
expect(resolved.has(linkFromProvider)).toBe(false);
|
||||
expect(unrestrictCalls).toBe(0);
|
||||
expect(events).toEqual(expect.arrayContaining([
|
||||
{ link: linkFromPage, fileName: "from-page.part1.rar" }
|
||||
]));
|
||||
});
|
||||
|
||||
it("does not unrestrict rapidgator links during filename scan after page lookup miss", async () => {
|
||||
const settings = {
|
||||
...defaultSettings(),
|
||||
token: "rd-token",
|
||||
providerPrimary: "realdebrid" as const,
|
||||
providerSecondary: "none" as const,
|
||||
providerTertiary: "none" as const,
|
||||
allDebridToken: ""
|
||||
};
|
||||
|
||||
const link = "https://rapidgator.net/file/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
|
||||
let unrestrictCalls = 0;
|
||||
|
||||
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
|
||||
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
|
||||
if (url.includes("api.real-debrid.com/rest/1.0/unrestrict/link")) {
|
||||
unrestrictCalls += 1;
|
||||
return new Response(JSON.stringify({ error: "should-not-be-called" }), {
|
||||
status: 500,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}
|
||||
if (url === link) {
|
||||
return new Response("not found", { status: 404 });
|
||||
}
|
||||
return new Response("not-found", { status: 404 });
|
||||
}) as typeof fetch;
|
||||
|
||||
const service = new DebridService(settings);
|
||||
const resolved = await service.resolveFilenames([link]);
|
||||
expect(resolved.size).toBe(0);
|
||||
expect(unrestrictCalls).toBe(0);
|
||||
});
|
||||
|
||||
it("maps AllDebrid filename infos by index when response link is missing", async () => {
|
||||
const settings = {
|
||||
...defaultSettings(),
|
||||
token: "",
|
||||
bestToken: "",
|
||||
allDebridToken: "ad-token",
|
||||
providerPrimary: "realdebrid" as const,
|
||||
providerSecondary: "none" as const,
|
||||
providerTertiary: "none" as const,
|
||||
autoProviderFallback: true
|
||||
};
|
||||
|
||||
const linkA = "https://rapidgator.net/file/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
|
||||
const linkB = "https://rapidgator.net/file/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb";
|
||||
|
||||
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
|
||||
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
|
||||
if (url.includes("api.alldebrid.com/v4/link/infos")) {
|
||||
return new Response(JSON.stringify({
|
||||
status: "success",
|
||||
data: {
|
||||
infos: [
|
||||
{ filename: "wrong-a.mkv" },
|
||||
{ filename: "wrong-b.mkv" }
|
||||
]
|
||||
}
|
||||
}), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}
|
||||
if (url === linkA || url === linkB) {
|
||||
return new Response("no title", { status: 404 });
|
||||
}
|
||||
return new Response("not-found", { status: 404 });
|
||||
}) as typeof fetch;
|
||||
|
||||
const service = new DebridService(settings);
|
||||
const resolved = await service.resolveFilenames([linkA, linkB]);
|
||||
expect(resolved.get(linkA)).toBe("wrong-a.mkv");
|
||||
expect(resolved.get(linkB)).toBe("wrong-b.mkv");
|
||||
expect(resolved.size).toBe(2);
|
||||
});
|
||||
|
||||
it("retries AllDebrid filename infos after transient server error", async () => {
|
||||
const settings = {
|
||||
...defaultSettings(),
|
||||
allDebridToken: "ad-token"
|
||||
};
|
||||
|
||||
const link = "https://rapidgator.net/file/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
|
||||
let infoCalls = 0;
|
||||
|
||||
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
|
||||
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
|
||||
if (url.includes("api.alldebrid.com/v4/link/infos")) {
|
||||
infoCalls += 1;
|
||||
if (infoCalls === 1) {
|
||||
return new Response("temporary error", { status: 500 });
|
||||
}
|
||||
return new Response(JSON.stringify({
|
||||
status: "success",
|
||||
data: {
|
||||
infos: [
|
||||
{ link, filename: "resolved-from-infos.mkv" }
|
||||
]
|
||||
}
|
||||
}), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}
|
||||
return new Response("not-found", { status: 404 });
|
||||
}) as typeof fetch;
|
||||
|
||||
const service = new DebridService(settings);
|
||||
const resolved = await service.resolveFilenames([link]);
|
||||
expect(resolved.get(link)).toBe("resolved-from-infos.mkv");
|
||||
expect(infoCalls).toBe(2);
|
||||
});
|
||||
|
||||
it("retries AllDebrid filename infos when HTML challenge is returned", async () => {
|
||||
const settings = {
|
||||
...defaultSettings(),
|
||||
allDebridToken: "ad-token"
|
||||
};
|
||||
|
||||
const link = "https://rapidgator.net/file/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb";
|
||||
let infoCalls = 0;
|
||||
let pageCalls = 0;
|
||||
|
||||
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
|
||||
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
|
||||
if (url.includes("api.alldebrid.com/v4/link/infos")) {
|
||||
infoCalls += 1;
|
||||
return new Response("<html><title>cf challenge</title></html>", {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "text/html" }
|
||||
});
|
||||
}
|
||||
if (url === link) {
|
||||
pageCalls += 1;
|
||||
}
|
||||
return new Response("not-found", { status: 404 });
|
||||
}) as typeof fetch;
|
||||
|
||||
const service = new DebridService(settings);
|
||||
const resolved = await service.resolveFilenames([link]);
|
||||
expect(resolved.size).toBe(0);
|
||||
expect(infoCalls).toBe(REQUEST_RETRIES);
|
||||
expect(pageCalls).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("normalizeResolvedFilename", () => {
|
||||
it("strips HTML entities", () => {
|
||||
expect(normalizeResolvedFilename("Show.S01E01.German.DL.720p.part01.rar")).toBe("Show.S01E01.German.DL.720p.part01.rar");
|
||||
expect(normalizeResolvedFilename("File&Name.part1.rar")).toBe("File&Name.part1.rar");
|
||||
expect(normalizeResolvedFilename("File"Name".part1.rar")).toBe('File"Name".part1.rar');
|
||||
});
|
||||
|
||||
it("strips HTML tags and collapses whitespace", () => {
|
||||
// Tags are replaced by spaces, then multiple spaces collapsed
|
||||
const result = normalizeResolvedFilename("<b>Show.S01E01</b>.part01.rar");
|
||||
expect(result).toBe("Show.S01E01 .part01.rar");
|
||||
|
||||
// Entity decoding happens before tag removal, so <...> becomes <...> then gets stripped
|
||||
const entityTagResult = normalizeResolvedFilename("File<Tag>.part1.rar");
|
||||
expect(entityTagResult).toBe("File .part1.rar");
|
||||
});
|
||||
|
||||
it("strips 'download file' prefix", () => {
|
||||
expect(normalizeResolvedFilename("Download file Show.S01E01.part01.rar")).toBe("Show.S01E01.part01.rar");
|
||||
expect(normalizeResolvedFilename("download file Movie.2024.mkv")).toBe("Movie.2024.mkv");
|
||||
});
|
||||
|
||||
it("strips Rapidgator suffix", () => {
|
||||
expect(normalizeResolvedFilename("Show.S01E01.part01.rar - Rapidgator")).toBe("Show.S01E01.part01.rar");
|
||||
expect(normalizeResolvedFilename("Movie.mkv | Rapidgator.net")).toBe("Movie.mkv");
|
||||
});
|
||||
|
||||
it("returns empty for opaque or non-filename values", () => {
|
||||
expect(normalizeResolvedFilename("")).toBe("");
|
||||
expect(normalizeResolvedFilename("just some text")).toBe("");
|
||||
expect(normalizeResolvedFilename("e51f6809bb6ca615601f5ac5db433737")).toBe("");
|
||||
expect(normalizeResolvedFilename("download.bin")).toBe("");
|
||||
});
|
||||
|
||||
it("handles combined transforms", () => {
|
||||
// "Download file" prefix stripped, & decoded to &, "- Rapidgator" suffix stripped
|
||||
expect(normalizeResolvedFilename("Download file Show.S01E01.part01.rar - Rapidgator"))
|
||||
.toBe("Show.S01E01.part01.rar");
|
||||
});
|
||||
});
|
||||
|
||||
describe("filenameFromRapidgatorUrlPath", () => {
|
||||
it("extracts filename from standard rapidgator URL", () => {
|
||||
expect(filenameFromRapidgatorUrlPath("https://rapidgator.net/file/abc123/Show.S01E01.part01.rar.html"))
|
||||
.toBe("Show.S01E01.part01.rar");
|
||||
});
|
||||
|
||||
it("extracts filename without .html suffix", () => {
|
||||
expect(filenameFromRapidgatorUrlPath("https://rapidgator.net/file/abc123/Movie.2024.mkv"))
|
||||
.toBe("Movie.2024.mkv");
|
||||
});
|
||||
|
||||
it("returns empty for hash-only URL paths", () => {
|
||||
expect(filenameFromRapidgatorUrlPath("https://rapidgator.net/file/e51f6809bb6ca615601f5ac5db433737"))
|
||||
.toBe("");
|
||||
});
|
||||
|
||||
it("returns empty for invalid URLs", () => {
|
||||
expect(filenameFromRapidgatorUrlPath("not-a-url")).toBe("");
|
||||
expect(filenameFromRapidgatorUrlPath("")).toBe("");
|
||||
});
|
||||
|
||||
it("handles URL-encoded path segments", () => {
|
||||
expect(filenameFromRapidgatorUrlPath("https://rapidgator.net/file/id/Show%20Name.S01E01.part01.rar.html"))
|
||||
.toBe("Show Name.S01E01.part01.rar");
|
||||
});
|
||||
});
|
||||
|
||||
describe("extractRapidgatorFilenameFromHtml", () => {
|
||||
it("extracts filename from title tag", () => {
|
||||
const html = "<html><head><title>Download file Show.S01E01.German.DL.720p.part01.rar - Rapidgator</title></head></html>";
|
||||
expect(extractRapidgatorFilenameFromHtml(html)).toBe("Show.S01E01.German.DL.720p.part01.rar");
|
||||
});
|
||||
|
||||
it("extracts filename from og:title meta tag", () => {
|
||||
const html = '<html><head><meta property="og:title" content="Movie.2024.German.DL.1080p.mkv"></head></html>';
|
||||
expect(extractRapidgatorFilenameFromHtml(html)).toBe("Movie.2024.German.DL.1080p.mkv");
|
||||
});
|
||||
|
||||
it("extracts filename from reversed og:title attribute order", () => {
|
||||
const html = '<html><head><meta content="Movie.2024.German.DL.1080p.mkv" property="og:title"></head></html>';
|
||||
expect(extractRapidgatorFilenameFromHtml(html)).toBe("Movie.2024.German.DL.1080p.mkv");
|
||||
});
|
||||
|
||||
it("returns empty for HTML without recognizable filenames", () => {
|
||||
const html = "<html><head><title>Rapidgator: Fast, Pair and Unlimited</title></head><body>No file here</body></html>";
|
||||
expect(extractRapidgatorFilenameFromHtml(html)).toBe("");
|
||||
});
|
||||
|
||||
it("returns empty for empty HTML", () => {
|
||||
expect(extractRapidgatorFilenameFromHtml("")).toBe("");
|
||||
});
|
||||
|
||||
it("ignores broad body text that is not a labeled filename", () => {
|
||||
const html = "<html><body>Please download file now from mirror.mkv</body></html>";
|
||||
expect(extractRapidgatorFilenameFromHtml(html)).toBe("");
|
||||
});
|
||||
|
||||
it("extracts from File name label in page body", () => {
|
||||
const html = '<html><body>File name: <b>Show.S02E03.720p.part01.rar</b></body></html>';
|
||||
expect(extractRapidgatorFilenameFromHtml(html)).toBe("Show.S02E03.720p.part01.rar");
|
||||
});
|
||||
});
|
||||
|
||||
4506
tests/download-manager.test.ts
Normal file
4506
tests/download-manager.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
204
tests/extractor-jvm.test.ts
Normal file
204
tests/extractor-jvm.test.ts
Normal file
@ -0,0 +1,204 @@
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { spawnSync } from "node:child_process";
|
||||
import AdmZip from "adm-zip";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import { extractPackageArchives } from "../src/main/extractor";
|
||||
|
||||
const tempDirs: string[] = [];
|
||||
const originalBackend = process.env.RD_EXTRACT_BACKEND;
|
||||
|
||||
function hasJavaRuntime(): boolean {
|
||||
const result = spawnSync("java", ["-version"], { stdio: "ignore" });
|
||||
return result.status === 0;
|
||||
}
|
||||
|
||||
function hasJvmExtractorRuntime(): boolean {
|
||||
const root = path.join(process.cwd(), "resources", "extractor-jvm");
|
||||
const classesMain = path.join(root, "classes", "com", "sucukdeluxe", "extractor", "JBindExtractorMain.class");
|
||||
const requiredLibs = [
|
||||
path.join(root, "lib", "sevenzipjbinding.jar"),
|
||||
path.join(root, "lib", "sevenzipjbinding-all-platforms.jar"),
|
||||
path.join(root, "lib", "zip4j.jar")
|
||||
];
|
||||
return fs.existsSync(classesMain) && requiredLibs.every((libPath) => fs.existsSync(libPath));
|
||||
}
|
||||
|
||||
afterEach(() => {
|
||||
for (const dir of tempDirs.splice(0)) {
|
||||
fs.rmSync(dir, { recursive: true, force: true });
|
||||
}
|
||||
if (originalBackend === undefined) {
|
||||
delete process.env.RD_EXTRACT_BACKEND;
|
||||
} else {
|
||||
process.env.RD_EXTRACT_BACKEND = originalBackend;
|
||||
}
|
||||
});
|
||||
|
||||
describe.skipIf(!hasJavaRuntime() || !hasJvmExtractorRuntime())("extractor jvm backend", () => {
|
||||
it("extracts zip archives through SevenZipJBinding backend", async () => {
|
||||
process.env.RD_EXTRACT_BACKEND = "jvm";
|
||||
|
||||
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-jvm-extract-"));
|
||||
tempDirs.push(root);
|
||||
const packageDir = path.join(root, "pkg");
|
||||
const targetDir = path.join(root, "out");
|
||||
fs.mkdirSync(packageDir, { recursive: true });
|
||||
|
||||
const zipPath = path.join(packageDir, "release.zip");
|
||||
const zip = new AdmZip();
|
||||
zip.addFile("episode.txt", Buffer.from("ok"));
|
||||
zip.writeZip(zipPath);
|
||||
|
||||
const result = await extractPackageArchives({
|
||||
packageDir,
|
||||
targetDir,
|
||||
cleanupMode: "none",
|
||||
conflictMode: "overwrite",
|
||||
removeLinks: false,
|
||||
removeSamples: false
|
||||
});
|
||||
|
||||
expect(result.extracted).toBe(1);
|
||||
expect(result.failed).toBe(0);
|
||||
expect(fs.existsSync(path.join(targetDir, "episode.txt"))).toBe(true);
|
||||
});
|
||||
|
||||
it("emits progress callbacks with archiveName and percent", async () => {
|
||||
process.env.RD_EXTRACT_BACKEND = "jvm";
|
||||
|
||||
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-jvm-progress-"));
|
||||
tempDirs.push(root);
|
||||
const packageDir = path.join(root, "pkg");
|
||||
const targetDir = path.join(root, "out");
|
||||
fs.mkdirSync(packageDir, { recursive: true });
|
||||
|
||||
// Create a ZIP with some content to trigger progress
|
||||
const zipPath = path.join(packageDir, "progress-test.zip");
|
||||
const zip = new AdmZip();
|
||||
zip.addFile("file1.txt", Buffer.from("Hello World ".repeat(100)));
|
||||
zip.addFile("file2.txt", Buffer.from("Another file ".repeat(100)));
|
||||
zip.writeZip(zipPath);
|
||||
|
||||
const progressUpdates: Array<{
|
||||
archiveName: string;
|
||||
percent: number;
|
||||
phase: string;
|
||||
archivePercent?: number;
|
||||
}> = [];
|
||||
|
||||
const result = await extractPackageArchives({
|
||||
packageDir,
|
||||
targetDir,
|
||||
cleanupMode: "none",
|
||||
conflictMode: "overwrite",
|
||||
removeLinks: false,
|
||||
removeSamples: false,
|
||||
onProgress: (update) => {
|
||||
progressUpdates.push({
|
||||
archiveName: update.archiveName,
|
||||
percent: update.percent,
|
||||
phase: update.phase,
|
||||
archivePercent: update.archivePercent,
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.extracted).toBe(1);
|
||||
expect(result.failed).toBe(0);
|
||||
|
||||
// Should have at least preparing, extracting, and done phases
|
||||
const phases = new Set(progressUpdates.map((u) => u.phase));
|
||||
expect(phases.has("preparing")).toBe(true);
|
||||
expect(phases.has("extracting")).toBe(true);
|
||||
|
||||
// Extracting phase should include the archive name
|
||||
const extracting = progressUpdates.filter((u) => u.phase === "extracting" && u.archiveName === "progress-test.zip");
|
||||
expect(extracting.length).toBeGreaterThan(0);
|
||||
|
||||
// Should end at 100%
|
||||
const lastExtracting = extracting[extracting.length - 1];
|
||||
expect(lastExtracting.archivePercent).toBe(100);
|
||||
|
||||
// Files should exist
|
||||
expect(fs.existsSync(path.join(targetDir, "file1.txt"))).toBe(true);
|
||||
expect(fs.existsSync(path.join(targetDir, "file2.txt"))).toBe(true);
|
||||
});
|
||||
|
||||
it("extracts multiple archives sequentially with progress for each", async () => {
|
||||
process.env.RD_EXTRACT_BACKEND = "jvm";
|
||||
|
||||
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-jvm-multi-"));
|
||||
tempDirs.push(root);
|
||||
const packageDir = path.join(root, "pkg");
|
||||
const targetDir = path.join(root, "out");
|
||||
fs.mkdirSync(packageDir, { recursive: true });
|
||||
|
||||
// Create two separate ZIP archives
|
||||
const zip1 = new AdmZip();
|
||||
zip1.addFile("episode01.txt", Buffer.from("ep1 content"));
|
||||
zip1.writeZip(path.join(packageDir, "archive1.zip"));
|
||||
|
||||
const zip2 = new AdmZip();
|
||||
zip2.addFile("episode02.txt", Buffer.from("ep2 content"));
|
||||
zip2.writeZip(path.join(packageDir, "archive2.zip"));
|
||||
|
||||
const archiveNames = new Set<string>();
|
||||
|
||||
const result = await extractPackageArchives({
|
||||
packageDir,
|
||||
targetDir,
|
||||
cleanupMode: "none",
|
||||
conflictMode: "overwrite",
|
||||
removeLinks: false,
|
||||
removeSamples: false,
|
||||
onProgress: (update) => {
|
||||
if (update.phase === "extracting" && update.archiveName) {
|
||||
archiveNames.add(update.archiveName);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.extracted).toBe(2);
|
||||
expect(result.failed).toBe(0);
|
||||
// Both archive names should have appeared in progress
|
||||
expect(archiveNames.has("archive1.zip")).toBe(true);
|
||||
expect(archiveNames.has("archive2.zip")).toBe(true);
|
||||
// Both files extracted
|
||||
expect(fs.existsSync(path.join(targetDir, "episode01.txt"))).toBe(true);
|
||||
expect(fs.existsSync(path.join(targetDir, "episode02.txt"))).toBe(true);
|
||||
});
|
||||
|
||||
it("respects ask/skip conflict mode in jvm backend", async () => {
|
||||
process.env.RD_EXTRACT_BACKEND = "jvm";
|
||||
|
||||
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-jvm-extract-"));
|
||||
tempDirs.push(root);
|
||||
const packageDir = path.join(root, "pkg");
|
||||
const targetDir = path.join(root, "out");
|
||||
fs.mkdirSync(packageDir, { recursive: true });
|
||||
fs.mkdirSync(targetDir, { recursive: true });
|
||||
|
||||
const zipPath = path.join(packageDir, "conflict.zip");
|
||||
const zip = new AdmZip();
|
||||
zip.addFile("same.txt", Buffer.from("new"));
|
||||
zip.writeZip(zipPath);
|
||||
|
||||
const existingPath = path.join(targetDir, "same.txt");
|
||||
fs.writeFileSync(existingPath, "old", "utf8");
|
||||
|
||||
const result = await extractPackageArchives({
|
||||
packageDir,
|
||||
targetDir,
|
||||
cleanupMode: "none",
|
||||
conflictMode: "ask",
|
||||
removeLinks: false,
|
||||
removeSamples: false
|
||||
});
|
||||
|
||||
expect(result.extracted).toBe(1);
|
||||
expect(result.failed).toBe(0);
|
||||
expect(fs.readFileSync(existingPath, "utf8")).toBe("old");
|
||||
});
|
||||
});
|
||||
1089
tests/extractor.test.ts
Normal file
1089
tests/extractor.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
@ -2,7 +2,7 @@ import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import { parseHashLine, validateFileAgainstManifest } from "../src/main/integrity";
|
||||
import { parseHashLine, readHashManifest, validateFileAgainstManifest } from "../src/main/integrity";
|
||||
|
||||
const tempDirs: string[] = [];
|
||||
|
||||
@ -29,4 +29,56 @@ describe("integrity", () => {
|
||||
const result = await validateFileAgainstManifest(filePath, dir);
|
||||
expect(result.ok).toBe(true);
|
||||
});
|
||||
|
||||
it("skips manifest files larger than 5MB", () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-int-"));
|
||||
tempDirs.push(dir);
|
||||
|
||||
// Create a .md5 manifest that exceeds the 5MB limit
|
||||
const largeContent = "d41d8cd98f00b204e9800998ecf8427e sample.bin\n".repeat(200000);
|
||||
const manifestPath = path.join(dir, "hashes.md5");
|
||||
fs.writeFileSync(manifestPath, largeContent, "utf8");
|
||||
|
||||
// Verify the file is actually > 5MB
|
||||
const stat = fs.statSync(manifestPath);
|
||||
expect(stat.size).toBeGreaterThan(5 * 1024 * 1024);
|
||||
|
||||
// readHashManifest should skip the oversized file
|
||||
const manifest = readHashManifest(dir);
|
||||
expect(manifest.size).toBe(0);
|
||||
});
|
||||
|
||||
it("does not parse SHA256 (64-char hex) as valid hash", () => {
|
||||
// SHA256 is 64 chars - parseHashLine only supports 32 (MD5) and 40 (SHA1)
|
||||
const sha256Line = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 emptyfile.bin";
|
||||
const result = parseHashLine(sha256Line);
|
||||
// 64-char hex should not match the MD5 (32) or SHA1 (40) pattern
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("parses SHA1 hash lines correctly", () => {
|
||||
const sha1Line = "da39a3ee5e6b4b0d3255bfef95601890afd80709 emptyfile.bin";
|
||||
const result = parseHashLine(sha1Line);
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.algorithm).toBe("sha1");
|
||||
expect(result?.digest).toBe("da39a3ee5e6b4b0d3255bfef95601890afd80709");
|
||||
expect(result?.fileName).toBe("emptyfile.bin");
|
||||
});
|
||||
|
||||
it("ignores comment lines in hash manifests", () => {
|
||||
expect(parseHashLine("; This is a comment")).toBeNull();
|
||||
expect(parseHashLine("")).toBeNull();
|
||||
expect(parseHashLine(" ")).toBeNull();
|
||||
});
|
||||
|
||||
it("keeps first hash entry when duplicate filename appears across manifests", () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-int-"));
|
||||
tempDirs.push(dir);
|
||||
|
||||
fs.writeFileSync(path.join(dir, "disc1.md5"), "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa movie.mkv\n", "utf8");
|
||||
fs.writeFileSync(path.join(dir, "disc2.md5"), "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb movie.mkv\n", "utf8");
|
||||
|
||||
const manifest = readHashManifest(dir);
|
||||
expect(manifest.get("movie.mkv")?.digest).toBe("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
|
||||
});
|
||||
});
|
||||
|
||||
74
tests/link-parser.test.ts
Normal file
74
tests/link-parser.test.ts
Normal file
@ -0,0 +1,74 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { mergePackageInputs, parseCollectorInput } from "../src/main/link-parser";
|
||||
|
||||
describe("link-parser", () => {
|
||||
describe("mergePackageInputs", () => {
|
||||
it("merges packages with the same name and preserves order", () => {
|
||||
const input = [
|
||||
{ name: "Package A", links: ["http://link1", "http://link2"] },
|
||||
{ name: "Package B", links: ["http://link3"] },
|
||||
{ name: "Package A", links: ["http://link4", "http://link1"] },
|
||||
{ name: "", links: ["http://link5"] } // empty name will be inferred
|
||||
];
|
||||
|
||||
const result = mergePackageInputs(input);
|
||||
|
||||
expect(result).toHaveLength(3); // Package A, Package B, and inferred 'Paket'
|
||||
|
||||
const pkgA = result.find(p => p.name === "Package A");
|
||||
expect(pkgA?.links).toEqual(["http://link1", "http://link2", "http://link4"]); // link1 deduplicated
|
||||
|
||||
const pkgB = result.find(p => p.name === "Package B");
|
||||
expect(pkgB?.links).toEqual(["http://link3"]);
|
||||
});
|
||||
|
||||
it("sanitizes names during merge", () => {
|
||||
const input = [
|
||||
{ name: "Valid_Name", links: ["http://link1"] },
|
||||
{ name: "Valid?Name*", links: ["http://link2"] }
|
||||
];
|
||||
|
||||
const result = mergePackageInputs(input);
|
||||
|
||||
// "Valid?Name*" becomes "Valid Name " -> trimmed to "Valid Name"
|
||||
expect(result.map(p => p.name).sort()).toEqual(["Valid Name", "Valid_Name"]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("parseCollectorInput", () => {
|
||||
it("returns empty array for empty or invalid input", () => {
|
||||
expect(parseCollectorInput("")).toEqual([]);
|
||||
expect(parseCollectorInput("just some text without links")).toEqual([]);
|
||||
expect(parseCollectorInput("ftp://notsupported")).toEqual([]);
|
||||
});
|
||||
|
||||
it("parses and merges links from raw text", () => {
|
||||
const rawText = `
|
||||
Here are some links:
|
||||
http://example.com/part1.rar
|
||||
http://example.com/part2.rar
|
||||
|
||||
# package: Custom_Name
|
||||
http://other.com/file1
|
||||
http://other.com/file2
|
||||
`;
|
||||
|
||||
const result = parseCollectorInput(rawText, "DefaultFallback");
|
||||
|
||||
// Should have 2 packages: "DefaultFallback" and "Custom_Name"
|
||||
expect(result).toHaveLength(2);
|
||||
|
||||
const defaultPkg = result.find(p => p.name === "DefaultFallback");
|
||||
expect(defaultPkg?.links).toEqual([
|
||||
"http://example.com/part1.rar",
|
||||
"http://example.com/part2.rar"
|
||||
]);
|
||||
|
||||
const customPkg = result.find(p => p.name === "Custom_Name"); // sanitized!
|
||||
expect(customPkg?.links).toEqual([
|
||||
"http://other.com/file1",
|
||||
"http://other.com/file2"
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
178
tests/mega-web-fallback.test.ts
Normal file
178
tests/mega-web-fallback.test.ts
Normal file
@ -0,0 +1,178 @@
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { MegaWebFallback } from "../src/main/mega-web-fallback";
|
||||
|
||||
const originalFetch = globalThis.fetch;
|
||||
|
||||
describe("mega-web-fallback", () => {
|
||||
afterEach(() => {
|
||||
globalThis.fetch = originalFetch;
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe("MegaWebFallback class", () => {
|
||||
it("returns null when credentials are empty", async () => {
|
||||
const fallback = new MegaWebFallback(() => ({ login: "", password: "" }));
|
||||
const result = await fallback.unrestrict("https://mega.debrid/test");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("logs in, fetches HTML, parses code, and polls AJAX for direct url", async () => {
|
||||
let fetchCallCount = 0;
|
||||
globalThis.fetch = vi.fn(async (url: string | URL | Request) => {
|
||||
const urlStr = String(url);
|
||||
fetchCallCount += 1;
|
||||
|
||||
if (urlStr.includes("form=login")) {
|
||||
const headers = new Headers();
|
||||
headers.append("set-cookie", "session=goodcookie; path=/");
|
||||
return new Response("", { headers, status: 200 });
|
||||
}
|
||||
|
||||
if (urlStr.includes("page=debrideur")) {
|
||||
return new Response('<form id="debridForm"></form>', { status: 200 });
|
||||
}
|
||||
|
||||
if (urlStr.includes("form=debrid")) {
|
||||
// The POST to generate the code
|
||||
return new Response(`
|
||||
<div class="acp-box">
|
||||
<h3>Link: https://mega.debrid/link1</h3>
|
||||
<a href="javascript:processDebrid(1,'secretcode123',0)">Download</a>
|
||||
</div>
|
||||
`, { status: 200 });
|
||||
}
|
||||
|
||||
if (urlStr.includes("ajax=debrid")) {
|
||||
// Polling endpoint
|
||||
return new Response(JSON.stringify({ link: "https://mega.direct/123" }), { status: 200 });
|
||||
}
|
||||
|
||||
return new Response("Not found", { status: 404 });
|
||||
}) as unknown as typeof fetch;
|
||||
|
||||
const fallback = new MegaWebFallback(() => ({ login: "user", password: "pwd" }));
|
||||
|
||||
const result = await fallback.unrestrict("https://mega.debrid/link1");
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.directUrl).toBe("https://mega.direct/123");
|
||||
expect(result?.fileName).toBe("link1");
|
||||
// Calls: 1. Login POST, 2. Verify GET, 3. Generate POST, 4. Polling POST
|
||||
expect(fetchCallCount).toBe(4);
|
||||
});
|
||||
|
||||
it("throws if login fails to set cookie", async () => {
|
||||
globalThis.fetch = vi.fn(async (url: string | URL | Request) => {
|
||||
const urlStr = String(url);
|
||||
if (urlStr.includes("form=login")) {
|
||||
const headers = new Headers(); // No cookie
|
||||
return new Response("", { headers, status: 200 });
|
||||
}
|
||||
return new Response("Not found", { status: 404 });
|
||||
}) as unknown as typeof fetch;
|
||||
|
||||
const fallback = new MegaWebFallback(() => ({ login: "bad", password: "bad" }));
|
||||
|
||||
await expect(fallback.unrestrict("http://mega.debrid/file"))
|
||||
.rejects.toThrow("Mega-Web Login liefert kein Session-Cookie");
|
||||
});
|
||||
|
||||
it("throws if login verify check fails (no form found)", async () => {
|
||||
globalThis.fetch = vi.fn(async (url: string | URL | Request) => {
|
||||
const urlStr = String(url);
|
||||
if (urlStr.includes("form=login")) {
|
||||
const headers = new Headers();
|
||||
headers.append("set-cookie", "session=goodcookie; path=/");
|
||||
return new Response("", { headers, status: 200 });
|
||||
}
|
||||
if (urlStr.includes("page=debrideur")) {
|
||||
// Missing form!
|
||||
return new Response('<html><body>Nothing here</body></html>', { status: 200 });
|
||||
}
|
||||
return new Response("Not found", { status: 404 });
|
||||
}) as unknown as typeof fetch;
|
||||
|
||||
const fallback = new MegaWebFallback(() => ({ login: "a", password: "b" }));
|
||||
|
||||
await expect(fallback.unrestrict("http://mega.debrid/file"))
|
||||
.rejects.toThrow("Mega-Web Login ungültig oder Session blockiert");
|
||||
});
|
||||
|
||||
it("returns null if generation fails to find a code", async () => {
|
||||
let callCount = 0;
|
||||
globalThis.fetch = vi.fn(async (url: string | URL | Request) => {
|
||||
const urlStr = String(url);
|
||||
callCount++;
|
||||
if (urlStr.includes("form=login")) {
|
||||
const headers = new Headers();
|
||||
headers.append("set-cookie", "session=goodcookie; path=/");
|
||||
return new Response("", { headers, status: 200 });
|
||||
}
|
||||
if (urlStr.includes("page=debrideur")) {
|
||||
return new Response('<form id="debridForm"></form>', { status: 200 });
|
||||
}
|
||||
if (urlStr.includes("form=debrid")) {
|
||||
// The generate POST returns HTML without any codes
|
||||
return new Response(`<div>No links here</div>`, { status: 200 });
|
||||
}
|
||||
return new Response("Not found", { status: 404 });
|
||||
}) as unknown as typeof fetch;
|
||||
|
||||
const fallback = new MegaWebFallback(() => ({ login: "a", password: "b" }));
|
||||
const result = await fallback.unrestrict("http://mega.debrid/file");
|
||||
|
||||
// Generation fails -> resets cookie -> tries again -> fails again -> returns null
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("aborts pending Mega-Web polling when signal is cancelled", async () => {
|
||||
globalThis.fetch = vi.fn((url: string | URL | Request, init?: RequestInit): Promise<Response> => {
|
||||
const urlStr = String(url);
|
||||
|
||||
if (urlStr.includes("form=login")) {
|
||||
const headers = new Headers();
|
||||
headers.append("set-cookie", "session=goodcookie; path=/");
|
||||
return Promise.resolve(new Response("", { headers, status: 200 }));
|
||||
}
|
||||
|
||||
if (urlStr.includes("page=debrideur")) {
|
||||
return Promise.resolve(new Response('<form id="debridForm"></form>', { status: 200 }));
|
||||
}
|
||||
|
||||
if (urlStr.includes("form=debrid")) {
|
||||
return Promise.resolve(new Response(`
|
||||
<div class="acp-box">
|
||||
<h3>Link: https://mega.debrid/link2</h3>
|
||||
<a href="javascript:processDebrid(1,'secretcode456',0)">Download</a>
|
||||
</div>
|
||||
`, { status: 200 }));
|
||||
}
|
||||
|
||||
if (urlStr.includes("ajax=debrid")) {
|
||||
return new Promise<Response>((_resolve, reject) => {
|
||||
const signal = init?.signal;
|
||||
const onAbort = (): void => reject(new Error("aborted:ajax"));
|
||||
if (signal?.aborted) {
|
||||
onAbort();
|
||||
return;
|
||||
}
|
||||
signal?.addEventListener("abort", onAbort, { once: true });
|
||||
});
|
||||
}
|
||||
|
||||
return Promise.resolve(new Response("Not found", { status: 404 }));
|
||||
}) as unknown as typeof fetch;
|
||||
|
||||
const fallback = new MegaWebFallback(() => ({ login: "user", password: "pwd" }));
|
||||
const controller = new AbortController();
|
||||
const timer = setTimeout(() => {
|
||||
controller.abort("test");
|
||||
}, 200);
|
||||
|
||||
try {
|
||||
await expect(fallback.unrestrict("https://mega.debrid/link2", controller.signal)).rejects.toThrow(/aborted/i);
|
||||
} finally {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
42
tests/realdebrid.test.ts
Normal file
42
tests/realdebrid.test.ts
Normal file
@ -0,0 +1,42 @@
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import { RealDebridClient } from "../src/main/realdebrid";
|
||||
|
||||
const originalFetch = globalThis.fetch;
|
||||
|
||||
afterEach(() => {
|
||||
globalThis.fetch = originalFetch;
|
||||
});
|
||||
|
||||
describe("realdebrid client", () => {
|
||||
it("returns a clear error when HTML is returned instead of JSON", async () => {
|
||||
globalThis.fetch = (async (): Promise<Response> => {
|
||||
return new Response("<html><title>Cloudflare</title></html>", {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "text/html" }
|
||||
});
|
||||
}) as typeof fetch;
|
||||
|
||||
const client = new RealDebridClient("rd-token");
|
||||
await expect(client.unrestrictLink("https://hoster.example/file/html")).rejects.toThrow(/html/i);
|
||||
});
|
||||
|
||||
it("does not leak raw response body on JSON parse errors", async () => {
|
||||
globalThis.fetch = (async (): Promise<Response> => {
|
||||
return new Response("<html>token=secret-should-not-leak</html>", {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}) as typeof fetch;
|
||||
|
||||
const client = new RealDebridClient("rd-token");
|
||||
try {
|
||||
await client.unrestrictLink("https://hoster.example/file/invalid-json");
|
||||
throw new Error("expected unrestrict to fail");
|
||||
} catch (error) {
|
||||
const text = String(error || "");
|
||||
expect(text.toLowerCase()).toContain("json");
|
||||
expect(text.toLowerCase()).not.toContain("secret-should-not-leak");
|
||||
expect(text.toLowerCase()).not.toContain("<html>");
|
||||
}
|
||||
});
|
||||
});
|
||||
188
tests/resolve-archive-items.test.ts
Normal file
188
tests/resolve-archive-items.test.ts
Normal file
@ -0,0 +1,188 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { resolveArchiveItemsFromList } from "../src/main/download-manager";
|
||||
|
||||
type MinimalItem = {
|
||||
targetPath?: string;
|
||||
fileName?: string;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
|
||||
function makeItems(names: string[]): MinimalItem[] {
|
||||
return names.map((name) => ({
|
||||
targetPath: `C:\\Downloads\\Package\\${name}`,
|
||||
fileName: name,
|
||||
id: name,
|
||||
status: "completed",
|
||||
}));
|
||||
}
|
||||
|
||||
describe("resolveArchiveItemsFromList", () => {
|
||||
// ── Multipart RAR (.partN.rar) ──
|
||||
|
||||
it("matches multipart .part1.rar archives", () => {
|
||||
const items = makeItems([
|
||||
"Movie.part1.rar",
|
||||
"Movie.part2.rar",
|
||||
"Movie.part3.rar",
|
||||
"Other.rar",
|
||||
]);
|
||||
const result = resolveArchiveItemsFromList("Movie.part1.rar", items as any);
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result.map((i: any) => i.fileName)).toEqual([
|
||||
"Movie.part1.rar",
|
||||
"Movie.part2.rar",
|
||||
"Movie.part3.rar",
|
||||
]);
|
||||
});
|
||||
|
||||
it("matches multipart .part01.rar archives (zero-padded)", () => {
|
||||
const items = makeItems([
|
||||
"Film.part01.rar",
|
||||
"Film.part02.rar",
|
||||
"Film.part10.rar",
|
||||
"Unrelated.zip",
|
||||
]);
|
||||
const result = resolveArchiveItemsFromList("Film.part01.rar", items as any);
|
||||
expect(result).toHaveLength(3);
|
||||
});
|
||||
|
||||
// ── Old-style RAR (.rar + .r00, .r01, etc.) ──
|
||||
|
||||
it("matches old-style .rar + .rNN volumes", () => {
|
||||
const items = makeItems([
|
||||
"Archive.rar",
|
||||
"Archive.r00",
|
||||
"Archive.r01",
|
||||
"Archive.r02",
|
||||
"Other.zip",
|
||||
]);
|
||||
const result = resolveArchiveItemsFromList("Archive.rar", items as any);
|
||||
expect(result).toHaveLength(4);
|
||||
});
|
||||
|
||||
// ── Single RAR ──
|
||||
|
||||
it("matches a single .rar file", () => {
|
||||
const items = makeItems(["SingleFile.rar", "Other.mkv"]);
|
||||
const result = resolveArchiveItemsFromList("SingleFile.rar", items as any);
|
||||
expect(result).toHaveLength(1);
|
||||
expect((result[0] as any).fileName).toBe("SingleFile.rar");
|
||||
});
|
||||
|
||||
// ── Split ZIP ──
|
||||
|
||||
it("matches split .zip.NNN files", () => {
|
||||
const items = makeItems([
|
||||
"Data.zip",
|
||||
"Data.zip.001",
|
||||
"Data.zip.002",
|
||||
"Data.zip.003",
|
||||
]);
|
||||
const result = resolveArchiveItemsFromList("Data.zip.001", items as any);
|
||||
expect(result).toHaveLength(4);
|
||||
});
|
||||
|
||||
// ── Split 7z ──
|
||||
|
||||
it("matches split .7z.NNN files", () => {
|
||||
const items = makeItems([
|
||||
"Backup.7z.001",
|
||||
"Backup.7z.002",
|
||||
]);
|
||||
const result = resolveArchiveItemsFromList("Backup.7z.001", items as any);
|
||||
expect(result).toHaveLength(2);
|
||||
});
|
||||
|
||||
// ── Generic .NNN splits ──
|
||||
|
||||
it("matches generic .NNN split files", () => {
|
||||
const items = makeItems([
|
||||
"video.001",
|
||||
"video.002",
|
||||
"video.003",
|
||||
]);
|
||||
const result = resolveArchiveItemsFromList("video.001", items as any);
|
||||
expect(result).toHaveLength(3);
|
||||
});
|
||||
|
||||
// ── Exact filename match ──
|
||||
|
||||
it("matches a single .zip by exact name", () => {
|
||||
const items = makeItems(["myarchive.zip", "other.rar"]);
|
||||
const result = resolveArchiveItemsFromList("myarchive.zip", items as any);
|
||||
expect(result).toHaveLength(1);
|
||||
expect((result[0] as any).fileName).toBe("myarchive.zip");
|
||||
});
|
||||
|
||||
// ── Case insensitivity ──
|
||||
|
||||
it("matches case-insensitively", () => {
|
||||
const items = makeItems([
|
||||
"MOVIE.PART1.RAR",
|
||||
"MOVIE.PART2.RAR",
|
||||
]);
|
||||
const result = resolveArchiveItemsFromList("movie.part1.rar", items as any);
|
||||
expect(result).toHaveLength(2);
|
||||
});
|
||||
|
||||
// ── Stem-based fallback ──
|
||||
|
||||
it("uses stem-based fallback when exact patterns fail", () => {
|
||||
// Simulate a debrid service that renames "Movie.part1.rar" to "Movie.part1_dl.rar"
|
||||
// but the disk file is "Movie.part1.rar"
|
||||
const items = makeItems([
|
||||
"Movie.rar",
|
||||
]);
|
||||
// The archive on disk is "Movie.part1.rar" but there's no item matching the
|
||||
// .partN pattern. The stem "movie" should match "Movie.rar" via fallback.
|
||||
const result = resolveArchiveItemsFromList("Movie.part1.rar", items as any);
|
||||
// stem fallback: "movie" starts with "movie" and ends with .rar
|
||||
expect(result).toHaveLength(1);
|
||||
});
|
||||
|
||||
// ── Single item fallback ──
|
||||
|
||||
it("returns single archive item when no pattern matches", () => {
|
||||
const items = makeItems(["totally-different-name.rar"]);
|
||||
const result = resolveArchiveItemsFromList("Original.rar", items as any);
|
||||
// Single item in list with archive extension → return it
|
||||
expect(result).toHaveLength(1);
|
||||
});
|
||||
|
||||
// ── Empty when no match ──
|
||||
|
||||
it("returns empty when items have no archive extensions", () => {
|
||||
const items = makeItems(["video.mkv", "subtitle.srt"]);
|
||||
const result = resolveArchiveItemsFromList("Archive.rar", items as any);
|
||||
expect(result).toHaveLength(0);
|
||||
});
|
||||
|
||||
// ── Items without targetPath ──
|
||||
|
||||
it("falls back to fileName when targetPath is missing", () => {
|
||||
const items = [
|
||||
{ fileName: "Movie.part1.rar", id: "1", status: "completed" },
|
||||
{ fileName: "Movie.part2.rar", id: "2", status: "completed" },
|
||||
];
|
||||
const result = resolveArchiveItemsFromList("Movie.part1.rar", items as any);
|
||||
expect(result).toHaveLength(2);
|
||||
});
|
||||
|
||||
// ── Multiple archives, should not cross-match ──
|
||||
|
||||
it("does not cross-match different archive groups", () => {
|
||||
const items = makeItems([
|
||||
"Episode.S01E01.part1.rar",
|
||||
"Episode.S01E01.part2.rar",
|
||||
"Episode.S01E02.part1.rar",
|
||||
"Episode.S01E02.part2.rar",
|
||||
]);
|
||||
const result1 = resolveArchiveItemsFromList("Episode.S01E01.part1.rar", items as any);
|
||||
expect(result1).toHaveLength(2);
|
||||
expect(result1.every((i: any) => i.fileName.includes("S01E01"))).toBe(true);
|
||||
|
||||
const result2 = resolveArchiveItemsFromList("Episode.S01E02.part1.rar", items as any);
|
||||
expect(result2).toHaveLength(2);
|
||||
expect(result2.every((i: any) => i.fileName.includes("S01E02"))).toBe(true);
|
||||
});
|
||||
});
|
||||
@ -153,7 +153,7 @@ async function main(): Promise<void> {
|
||||
createStoragePaths(path.join(tempRoot, "state-pause"))
|
||||
);
|
||||
manager2.addPackages([{ name: "pause", links: ["https://dummy/slow"] }]);
|
||||
manager2.start();
|
||||
await manager2.start();
|
||||
await new Promise((resolve) => setTimeout(resolve, 120));
|
||||
const paused = manager2.togglePause();
|
||||
assert(paused, "Pause konnte nicht aktiviert werden");
|
||||
@ -185,10 +185,17 @@ async function main(): Promise<void> {
|
||||
manager4.cancelPackage(pkgId);
|
||||
await waitFor(() => !manager4.getSnapshot().session.running || Object.values(manager4.getSnapshot().session.items).every((item) => item.status !== "downloading"), 15000);
|
||||
const cancelSnapshot = manager4.getSnapshot();
|
||||
const cancelItem = Object.values(cancelSnapshot.session.items)[0];
|
||||
const remainingItems = Object.values(cancelSnapshot.session.items);
|
||||
if (remainingItems.length === 0) {
|
||||
assert(cancelSnapshot.session.packageOrder.length === 0, "Abgebrochenes Paket wurde nicht entfernt");
|
||||
} else {
|
||||
const cancelItem = remainingItems[0];
|
||||
assert(cancelItem?.status === "cancelled" || cancelItem?.status === "queued", "Paketabbruch nicht wirksam");
|
||||
}
|
||||
const packageDir = path.join(path.join(tempRoot, "downloads-cancel"), "cancel");
|
||||
assert(!fs.existsSync(path.join(packageDir, "release.part1.rar")), "RAR-Artefakt wurde nicht gelöscht");
|
||||
const cancelArtifact = path.join(packageDir, "release.part1.rar");
|
||||
await waitFor(() => !fs.existsSync(cancelArtifact), 10000);
|
||||
assert(!fs.existsSync(cancelArtifact), "RAR-Artefakt wurde nicht gelöscht");
|
||||
|
||||
console.log("Node self-check erfolgreich");
|
||||
} finally {
|
||||
|
||||
163
tests/session-log.test.ts
Normal file
163
tests/session-log.test.ts
Normal file
@ -0,0 +1,163 @@
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import { initSessionLog, getSessionLogPath, shutdownSessionLog } from "../src/main/session-log";
|
||||
import { setLogListener } from "../src/main/logger";
|
||||
|
||||
const tempDirs: string[] = [];
|
||||
|
||||
afterEach(() => {
|
||||
// Ensure session log is shut down between tests
|
||||
shutdownSessionLog();
|
||||
// Ensure listener is cleared between tests
|
||||
setLogListener(null);
|
||||
for (const dir of tempDirs.splice(0)) {
|
||||
fs.rmSync(dir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
describe("session-log", () => {
|
||||
it("initSessionLog creates directory and file", () => {
|
||||
const baseDir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-slog-"));
|
||||
tempDirs.push(baseDir);
|
||||
|
||||
initSessionLog(baseDir);
|
||||
const logPath = getSessionLogPath();
|
||||
expect(logPath).not.toBeNull();
|
||||
expect(fs.existsSync(logPath!)).toBe(true);
|
||||
expect(fs.existsSync(path.join(baseDir, "session-logs"))).toBe(true);
|
||||
expect(path.basename(logPath!)).toMatch(/^session_\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}\.txt$/);
|
||||
|
||||
const content = fs.readFileSync(logPath!, "utf8");
|
||||
expect(content).toContain("=== Session gestartet:");
|
||||
|
||||
shutdownSessionLog();
|
||||
});
|
||||
|
||||
it("logger listener writes to session log", async () => {
|
||||
const baseDir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-slog-"));
|
||||
tempDirs.push(baseDir);
|
||||
|
||||
initSessionLog(baseDir);
|
||||
const logPath = getSessionLogPath()!;
|
||||
|
||||
// Simulate a log line via the listener
|
||||
const { logger } = await import("../src/main/logger");
|
||||
logger.info("Test-Nachricht für Session-Log");
|
||||
|
||||
// Wait for flush (200ms interval + margin)
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
|
||||
const content = fs.readFileSync(logPath, "utf8");
|
||||
expect(content).toContain("Test-Nachricht für Session-Log");
|
||||
|
||||
shutdownSessionLog();
|
||||
});
|
||||
|
||||
it("shutdownSessionLog writes closing line", () => {
|
||||
const baseDir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-slog-"));
|
||||
tempDirs.push(baseDir);
|
||||
|
||||
initSessionLog(baseDir);
|
||||
const logPath = getSessionLogPath()!;
|
||||
|
||||
shutdownSessionLog();
|
||||
|
||||
const content = fs.readFileSync(logPath, "utf8");
|
||||
expect(content).toContain("=== Session beendet:");
|
||||
});
|
||||
|
||||
it("shutdownSessionLog removes listener", async () => {
|
||||
const baseDir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-slog-"));
|
||||
tempDirs.push(baseDir);
|
||||
|
||||
initSessionLog(baseDir);
|
||||
const logPath = getSessionLogPath()!;
|
||||
|
||||
shutdownSessionLog();
|
||||
|
||||
// Log after shutdown - should NOT appear in session log
|
||||
const { logger } = await import("../src/main/logger");
|
||||
logger.info("Nach-Shutdown-Nachricht");
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
|
||||
const content = fs.readFileSync(logPath, "utf8");
|
||||
expect(content).not.toContain("Nach-Shutdown-Nachricht");
|
||||
});
|
||||
|
||||
it("cleanupOldSessionLogs deletes old files", async () => {
|
||||
const baseDir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-slog-"));
|
||||
tempDirs.push(baseDir);
|
||||
|
||||
const logsDir = path.join(baseDir, "session-logs");
|
||||
fs.mkdirSync(logsDir, { recursive: true });
|
||||
|
||||
// Create a fake old session log
|
||||
const oldFile = path.join(logsDir, "session_2020-01-01_00-00-00.txt");
|
||||
fs.writeFileSync(oldFile, "old session");
|
||||
// Set mtime to 30 days ago
|
||||
const oldTime = new Date(Date.now() - 30 * 24 * 60 * 60 * 1000);
|
||||
fs.utimesSync(oldFile, oldTime, oldTime);
|
||||
|
||||
// Create a recent file
|
||||
const newFile = path.join(logsDir, "session_2099-01-01_00-00-00.txt");
|
||||
fs.writeFileSync(newFile, "new session");
|
||||
|
||||
// initSessionLog triggers cleanup
|
||||
initSessionLog(baseDir);
|
||||
|
||||
// Wait for async cleanup
|
||||
await new Promise((resolve) => setTimeout(resolve, 300));
|
||||
|
||||
expect(fs.existsSync(oldFile)).toBe(false);
|
||||
expect(fs.existsSync(newFile)).toBe(true);
|
||||
|
||||
shutdownSessionLog();
|
||||
});
|
||||
|
||||
it("cleanupOldSessionLogs keeps recent files", async () => {
|
||||
const baseDir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-slog-"));
|
||||
tempDirs.push(baseDir);
|
||||
|
||||
const logsDir = path.join(baseDir, "session-logs");
|
||||
fs.mkdirSync(logsDir, { recursive: true });
|
||||
|
||||
// Create a file from 2 days ago (should be kept)
|
||||
const recentFile = path.join(logsDir, "session_2025-12-01_00-00-00.txt");
|
||||
fs.writeFileSync(recentFile, "recent session");
|
||||
const recentTime = new Date(Date.now() - 2 * 24 * 60 * 60 * 1000);
|
||||
fs.utimesSync(recentFile, recentTime, recentTime);
|
||||
|
||||
initSessionLog(baseDir);
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 300));
|
||||
|
||||
expect(fs.existsSync(recentFile)).toBe(true);
|
||||
|
||||
shutdownSessionLog();
|
||||
});
|
||||
|
||||
it("multiple sessions create different files", async () => {
|
||||
const baseDir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-slog-"));
|
||||
tempDirs.push(baseDir);
|
||||
|
||||
initSessionLog(baseDir);
|
||||
const path1 = getSessionLogPath();
|
||||
shutdownSessionLog();
|
||||
|
||||
// Small delay to ensure different timestamp
|
||||
await new Promise((resolve) => setTimeout(resolve, 1100));
|
||||
|
||||
initSessionLog(baseDir);
|
||||
const path2 = getSessionLogPath();
|
||||
shutdownSessionLog();
|
||||
|
||||
expect(path1).not.toBeNull();
|
||||
expect(path2).not.toBeNull();
|
||||
expect(path1).not.toBe(path2);
|
||||
expect(fs.existsSync(path1!)).toBe(true);
|
||||
expect(fs.existsSync(path2!)).toBe(true);
|
||||
});
|
||||
});
|
||||
513
tests/storage.test.ts
Normal file
513
tests/storage.test.ts
Normal file
@ -0,0 +1,513 @@
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import { AppSettings } from "../src/shared/types";
|
||||
import { defaultSettings } from "../src/main/constants";
|
||||
import { createStoragePaths, emptySession, loadSession, loadSettings, normalizeSettings, saveSession, saveSessionAsync, saveSettings } from "../src/main/storage";
|
||||
|
||||
const tempDirs: string[] = [];
|
||||
|
||||
afterEach(() => {
|
||||
for (const dir of tempDirs.splice(0)) {
|
||||
fs.rmSync(dir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
describe("settings storage", () => {
|
||||
it("does not persist provider credentials when rememberToken is disabled", () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-store-"));
|
||||
tempDirs.push(dir);
|
||||
const paths = createStoragePaths(dir);
|
||||
|
||||
saveSettings(paths, {
|
||||
...defaultSettings(),
|
||||
rememberToken: false,
|
||||
token: "rd-token",
|
||||
megaLogin: "mega-user",
|
||||
megaPassword: "mega-pass",
|
||||
bestToken: "best-token",
|
||||
allDebridToken: "all-token"
|
||||
});
|
||||
|
||||
const raw = JSON.parse(fs.readFileSync(paths.configFile, "utf8")) as Record<string, unknown>;
|
||||
expect(raw.token).toBe("");
|
||||
expect(raw.megaLogin).toBe("");
|
||||
expect(raw.megaPassword).toBe("");
|
||||
expect(raw.bestToken).toBe("");
|
||||
expect(raw.allDebridToken).toBe("");
|
||||
|
||||
const loaded = loadSettings(paths);
|
||||
expect(loaded.rememberToken).toBe(false);
|
||||
expect(loaded.token).toBe("");
|
||||
expect(loaded.megaLogin).toBe("");
|
||||
expect(loaded.megaPassword).toBe("");
|
||||
expect(loaded.bestToken).toBe("");
|
||||
expect(loaded.allDebridToken).toBe("");
|
||||
});
|
||||
|
||||
it("persists provider credentials when rememberToken is enabled", () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-store-"));
|
||||
tempDirs.push(dir);
|
||||
const paths = createStoragePaths(dir);
|
||||
|
||||
saveSettings(paths, {
|
||||
...defaultSettings(),
|
||||
rememberToken: true,
|
||||
token: "rd-token",
|
||||
megaLogin: "mega-user",
|
||||
megaPassword: "mega-pass",
|
||||
bestToken: "best-token",
|
||||
allDebridToken: "all-token"
|
||||
});
|
||||
|
||||
const loaded = loadSettings(paths);
|
||||
expect(loaded.token).toBe("rd-token");
|
||||
expect(loaded.megaLogin).toBe("mega-user");
|
||||
expect(loaded.megaPassword).toBe("mega-pass");
|
||||
expect(loaded.bestToken).toBe("best-token");
|
||||
expect(loaded.allDebridToken).toBe("all-token");
|
||||
});
|
||||
|
||||
it("normalizes invalid enum and numeric values", () => {
|
||||
const normalized = normalizeSettings({
|
||||
...defaultSettings(),
|
||||
providerPrimary: "invalid-provider" as unknown as AppSettings["providerPrimary"],
|
||||
providerSecondary: "invalid-provider" as unknown as AppSettings["providerSecondary"],
|
||||
providerTertiary: "invalid-provider" as unknown as AppSettings["providerTertiary"],
|
||||
cleanupMode: "broken" as unknown as AppSettings["cleanupMode"],
|
||||
extractConflictMode: "broken" as unknown as AppSettings["extractConflictMode"],
|
||||
completedCleanupPolicy: "broken" as unknown as AppSettings["completedCleanupPolicy"],
|
||||
speedLimitMode: "broken" as unknown as AppSettings["speedLimitMode"],
|
||||
maxParallel: 0,
|
||||
retryLimit: 999,
|
||||
reconnectWaitSeconds: 9999,
|
||||
speedLimitKbps: -1,
|
||||
outputDir: " ",
|
||||
extractDir: " ",
|
||||
mkvLibraryDir: " ",
|
||||
updateRepo: " "
|
||||
});
|
||||
|
||||
expect(normalized.providerPrimary).toBe("realdebrid");
|
||||
expect(normalized.providerSecondary).toBe("none");
|
||||
expect(normalized.providerTertiary).toBe("none");
|
||||
expect(normalized.cleanupMode).toBe("none");
|
||||
expect(normalized.extractConflictMode).toBe("overwrite");
|
||||
expect(normalized.completedCleanupPolicy).toBe("never");
|
||||
expect(normalized.speedLimitMode).toBe("global");
|
||||
expect(normalized.maxParallel).toBe(1);
|
||||
expect(normalized.retryLimit).toBe(99);
|
||||
expect(normalized.reconnectWaitSeconds).toBe(600);
|
||||
expect(normalized.speedLimitKbps).toBe(0);
|
||||
expect(normalized.outputDir).toBe(defaultSettings().outputDir);
|
||||
expect(normalized.extractDir).toBe(defaultSettings().extractDir);
|
||||
expect(normalized.mkvLibraryDir).toBe(defaultSettings().mkvLibraryDir);
|
||||
expect(normalized.updateRepo).toBe(defaultSettings().updateRepo);
|
||||
});
|
||||
|
||||
it("normalizes malformed persisted config on load", () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-store-"));
|
||||
tempDirs.push(dir);
|
||||
const paths = createStoragePaths(dir);
|
||||
|
||||
fs.writeFileSync(
|
||||
paths.configFile,
|
||||
JSON.stringify({
|
||||
providerPrimary: "not-valid",
|
||||
completedCleanupPolicy: "not-valid",
|
||||
maxParallel: "999",
|
||||
retryLimit: "-3",
|
||||
reconnectWaitSeconds: "1",
|
||||
speedLimitMode: "not-valid",
|
||||
updateRepo: ""
|
||||
}),
|
||||
"utf8"
|
||||
);
|
||||
|
||||
const loaded = loadSettings(paths);
|
||||
expect(loaded.providerPrimary).toBe("realdebrid");
|
||||
expect(loaded.completedCleanupPolicy).toBe("never");
|
||||
expect(loaded.maxParallel).toBe(50);
|
||||
expect(loaded.retryLimit).toBe(0);
|
||||
expect(loaded.reconnectWaitSeconds).toBe(10);
|
||||
expect(loaded.speedLimitMode).toBe("global");
|
||||
expect(loaded.updateRepo).toBe(defaultSettings().updateRepo);
|
||||
});
|
||||
|
||||
it("keeps explicit none as fallback provider choice", () => {
|
||||
const normalized = normalizeSettings({
|
||||
...defaultSettings(),
|
||||
providerSecondary: "none",
|
||||
providerTertiary: "none"
|
||||
});
|
||||
|
||||
expect(normalized.providerSecondary).toBe("none");
|
||||
expect(normalized.providerTertiary).toBe("none");
|
||||
});
|
||||
|
||||
it("normalizes archive password list line endings", () => {
|
||||
const normalized = normalizeSettings({
|
||||
...defaultSettings(),
|
||||
archivePasswordList: "one\r\ntwo\r\nthree"
|
||||
});
|
||||
|
||||
expect(normalized.archivePasswordList).toBe("one\ntwo\nthree");
|
||||
});
|
||||
|
||||
it("assigns and preserves bandwidth schedule ids", () => {
|
||||
const normalized = normalizeSettings({
|
||||
...defaultSettings(),
|
||||
bandwidthSchedules: [{ id: "", startHour: 1, endHour: 6, speedLimitKbps: 1024, enabled: true }]
|
||||
});
|
||||
|
||||
const generatedId = normalized.bandwidthSchedules[0]?.id;
|
||||
expect(typeof generatedId).toBe("string");
|
||||
expect(generatedId?.length).toBeGreaterThan(0);
|
||||
|
||||
const normalizedAgain = normalizeSettings({
|
||||
...defaultSettings(),
|
||||
bandwidthSchedules: normalized.bandwidthSchedules
|
||||
});
|
||||
expect(normalizedAgain.bandwidthSchedules[0]?.id).toBe(generatedId);
|
||||
});
|
||||
|
||||
it("resets stale active statuses to queued on session load", () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-store-"));
|
||||
tempDirs.push(dir);
|
||||
const paths = createStoragePaths(dir);
|
||||
|
||||
const session = emptySession();
|
||||
session.packages["pkg1"] = {
|
||||
id: "pkg1",
|
||||
name: "Test Package",
|
||||
outputDir: "/tmp/out",
|
||||
extractDir: "/tmp/extract",
|
||||
status: "downloading",
|
||||
itemIds: ["item1", "item2", "item3", "item4"],
|
||||
cancelled: false,
|
||||
enabled: true,
|
||||
createdAt: Date.now(),
|
||||
updatedAt: Date.now()
|
||||
};
|
||||
session.items["item1"] = {
|
||||
id: "item1",
|
||||
packageId: "pkg1",
|
||||
url: "https://example.com/file1.rar",
|
||||
provider: null,
|
||||
status: "downloading",
|
||||
retries: 0,
|
||||
speedBps: 1024,
|
||||
downloadedBytes: 5000,
|
||||
totalBytes: 10000,
|
||||
progressPercent: 50,
|
||||
fileName: "file1.rar",
|
||||
targetPath: "/tmp/out/file1.rar",
|
||||
resumable: true,
|
||||
attempts: 1,
|
||||
lastError: "some error",
|
||||
fullStatus: "",
|
||||
createdAt: Date.now(),
|
||||
updatedAt: Date.now()
|
||||
};
|
||||
session.items["item2"] = {
|
||||
id: "item2",
|
||||
packageId: "pkg1",
|
||||
url: "https://example.com/file2.rar",
|
||||
provider: null,
|
||||
status: "paused",
|
||||
retries: 0,
|
||||
speedBps: 0,
|
||||
downloadedBytes: 0,
|
||||
totalBytes: null,
|
||||
progressPercent: 0,
|
||||
fileName: "file2.rar",
|
||||
targetPath: "/tmp/out/file2.rar",
|
||||
resumable: false,
|
||||
attempts: 0,
|
||||
lastError: "",
|
||||
fullStatus: "",
|
||||
createdAt: Date.now(),
|
||||
updatedAt: Date.now()
|
||||
};
|
||||
session.items["item3"] = {
|
||||
id: "item3",
|
||||
packageId: "pkg1",
|
||||
url: "https://example.com/file3.rar",
|
||||
provider: null,
|
||||
status: "completed",
|
||||
retries: 0,
|
||||
speedBps: 0,
|
||||
downloadedBytes: 10000,
|
||||
totalBytes: 10000,
|
||||
progressPercent: 100,
|
||||
fileName: "file3.rar",
|
||||
targetPath: "/tmp/out/file3.rar",
|
||||
resumable: false,
|
||||
attempts: 1,
|
||||
lastError: "",
|
||||
fullStatus: "",
|
||||
createdAt: Date.now(),
|
||||
updatedAt: Date.now()
|
||||
};
|
||||
session.items["item4"] = {
|
||||
id: "item4",
|
||||
packageId: "pkg1",
|
||||
url: "https://example.com/file4.rar",
|
||||
provider: null,
|
||||
status: "queued",
|
||||
retries: 0,
|
||||
speedBps: 0,
|
||||
downloadedBytes: 0,
|
||||
totalBytes: null,
|
||||
progressPercent: 0,
|
||||
fileName: "file4.rar",
|
||||
targetPath: "/tmp/out/file4.rar",
|
||||
resumable: false,
|
||||
attempts: 0,
|
||||
lastError: "",
|
||||
fullStatus: "",
|
||||
createdAt: Date.now(),
|
||||
updatedAt: Date.now()
|
||||
};
|
||||
|
||||
saveSession(paths, session);
|
||||
const loaded = loadSession(paths);
|
||||
|
||||
// Active statuses (downloading, paused) should be reset to "queued"
|
||||
expect(loaded.items["item1"].status).toBe("queued");
|
||||
expect(loaded.items["item2"].status).toBe("queued");
|
||||
// Speed should be cleared
|
||||
expect(loaded.items["item1"].speedBps).toBe(0);
|
||||
// lastError should be cleared for reset items
|
||||
expect(loaded.items["item1"].lastError).toBe("");
|
||||
// Completed and queued statuses should be preserved
|
||||
expect(loaded.items["item3"].status).toBe("completed");
|
||||
expect(loaded.items["item4"].status).toBe("queued");
|
||||
// Downloaded bytes should be preserved
|
||||
expect(loaded.items["item1"].downloadedBytes).toBe(5000);
|
||||
// Package data should be preserved
|
||||
expect(loaded.packages["pkg1"].name).toBe("Test Package");
|
||||
});
|
||||
|
||||
it("returns empty session when session file contains invalid JSON", () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-store-"));
|
||||
tempDirs.push(dir);
|
||||
const paths = createStoragePaths(dir);
|
||||
|
||||
fs.writeFileSync(paths.sessionFile, "{{{corrupted json!!!", "utf8");
|
||||
|
||||
const loaded = loadSession(paths);
|
||||
const empty = emptySession();
|
||||
expect(loaded.packages).toEqual(empty.packages);
|
||||
expect(loaded.items).toEqual(empty.items);
|
||||
expect(loaded.packageOrder).toEqual(empty.packageOrder);
|
||||
});
|
||||
|
||||
it("loads backup session when primary session is corrupted", () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-store-"));
|
||||
tempDirs.push(dir);
|
||||
const paths = createStoragePaths(dir);
|
||||
|
||||
const backupSession = emptySession();
|
||||
backupSession.packageOrder = ["pkg-backup"];
|
||||
backupSession.packages["pkg-backup"] = {
|
||||
id: "pkg-backup",
|
||||
name: "Backup Package",
|
||||
outputDir: path.join(dir, "out"),
|
||||
extractDir: path.join(dir, "extract"),
|
||||
status: "queued",
|
||||
itemIds: ["item-backup"],
|
||||
cancelled: false,
|
||||
enabled: true,
|
||||
createdAt: Date.now(),
|
||||
updatedAt: Date.now()
|
||||
};
|
||||
backupSession.items["item-backup"] = {
|
||||
id: "item-backup",
|
||||
packageId: "pkg-backup",
|
||||
url: "https://example.com/backup-file",
|
||||
provider: null,
|
||||
status: "queued",
|
||||
retries: 0,
|
||||
speedBps: 0,
|
||||
downloadedBytes: 0,
|
||||
totalBytes: null,
|
||||
progressPercent: 0,
|
||||
fileName: "backup-file.rar",
|
||||
targetPath: path.join(dir, "out", "backup-file.rar"),
|
||||
resumable: true,
|
||||
attempts: 0,
|
||||
lastError: "",
|
||||
fullStatus: "Wartet",
|
||||
createdAt: Date.now(),
|
||||
updatedAt: Date.now()
|
||||
};
|
||||
|
||||
fs.writeFileSync(`${paths.sessionFile}.bak`, JSON.stringify(backupSession), "utf8");
|
||||
fs.writeFileSync(paths.sessionFile, "{broken-session-json", "utf8");
|
||||
|
||||
const loaded = loadSession(paths);
|
||||
expect(loaded.packageOrder).toEqual(["pkg-backup"]);
|
||||
expect(loaded.packages["pkg-backup"]?.name).toBe("Backup Package");
|
||||
expect(loaded.items["item-backup"]?.fileName).toBe("backup-file.rar");
|
||||
|
||||
const restoredPrimary = JSON.parse(fs.readFileSync(paths.sessionFile, "utf8")) as { packages?: Record<string, unknown> };
|
||||
expect(restoredPrimary.packages && "pkg-backup" in restoredPrimary.packages).toBe(true);
|
||||
});
|
||||
|
||||
it("returns defaults when config file contains invalid JSON", () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-store-"));
|
||||
tempDirs.push(dir);
|
||||
const paths = createStoragePaths(dir);
|
||||
|
||||
// Write invalid JSON to the config file
|
||||
fs.writeFileSync(paths.configFile, "{{{{not valid json!!!}", "utf8");
|
||||
|
||||
const loaded = loadSettings(paths);
|
||||
const defaults = defaultSettings();
|
||||
expect(loaded.providerPrimary).toBe(defaults.providerPrimary);
|
||||
expect(loaded.maxParallel).toBe(defaults.maxParallel);
|
||||
expect(loaded.retryLimit).toBe(defaults.retryLimit);
|
||||
expect(loaded.outputDir).toBe(defaults.outputDir);
|
||||
expect(loaded.cleanupMode).toBe(defaults.cleanupMode);
|
||||
});
|
||||
|
||||
it("loads backup config when primary config is corrupted", () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-store-"));
|
||||
tempDirs.push(dir);
|
||||
const paths = createStoragePaths(dir);
|
||||
|
||||
const backupSettings = {
|
||||
...defaultSettings(),
|
||||
outputDir: path.join(dir, "backup-output"),
|
||||
packageName: "from-backup"
|
||||
};
|
||||
fs.writeFileSync(`${paths.configFile}.bak`, JSON.stringify(backupSettings, null, 2), "utf8");
|
||||
fs.writeFileSync(paths.configFile, "{broken-json", "utf8");
|
||||
|
||||
const loaded = loadSettings(paths);
|
||||
expect(loaded.outputDir).toBe(backupSettings.outputDir);
|
||||
expect(loaded.packageName).toBe("from-backup");
|
||||
});
|
||||
|
||||
it("sanitizes malformed persisted session structures", () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-store-"));
|
||||
tempDirs.push(dir);
|
||||
const paths = createStoragePaths(dir);
|
||||
|
||||
fs.writeFileSync(paths.sessionFile, JSON.stringify({
|
||||
version: "invalid",
|
||||
packageOrder: [123, "pkg-valid"],
|
||||
packages: {
|
||||
"1": "bad-entry",
|
||||
"pkg-valid": {
|
||||
id: "pkg-valid",
|
||||
name: "Valid Package",
|
||||
outputDir: "C:/tmp/out",
|
||||
extractDir: "C:/tmp/extract",
|
||||
status: "downloading",
|
||||
itemIds: ["item-valid", 123],
|
||||
cancelled: false,
|
||||
enabled: true
|
||||
}
|
||||
},
|
||||
items: {
|
||||
"item-valid": {
|
||||
id: "item-valid",
|
||||
packageId: "pkg-valid",
|
||||
url: "https://example.com/file",
|
||||
status: "queued",
|
||||
fileName: "file.bin",
|
||||
targetPath: "C:/tmp/out/file.bin"
|
||||
},
|
||||
"item-bad": "broken"
|
||||
}
|
||||
}), "utf8");
|
||||
|
||||
const loaded = loadSession(paths);
|
||||
expect(Object.keys(loaded.packages)).toEqual(["pkg-valid"]);
|
||||
expect(Object.keys(loaded.items)).toEqual(["item-valid"]);
|
||||
expect(loaded.packageOrder).toEqual(["pkg-valid"]);
|
||||
});
|
||||
|
||||
it("captures async session save payload before later mutations", async () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-store-"));
|
||||
tempDirs.push(dir);
|
||||
const paths = createStoragePaths(dir);
|
||||
|
||||
const session = emptySession();
|
||||
session.summaryText = "before-mutation";
|
||||
|
||||
const pending = saveSessionAsync(paths, session);
|
||||
session.summaryText = "after-mutation";
|
||||
await pending;
|
||||
|
||||
const persisted = JSON.parse(fs.readFileSync(paths.sessionFile, "utf8")) as { summaryText: string };
|
||||
expect(persisted.summaryText).toBe("before-mutation");
|
||||
});
|
||||
|
||||
it("creates session backup before sync and async session overwrites", async () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-store-"));
|
||||
tempDirs.push(dir);
|
||||
const paths = createStoragePaths(dir);
|
||||
|
||||
const first = emptySession();
|
||||
first.summaryText = "first";
|
||||
saveSession(paths, first);
|
||||
|
||||
const second = emptySession();
|
||||
second.summaryText = "second";
|
||||
saveSession(paths, second);
|
||||
|
||||
const backupAfterSync = JSON.parse(fs.readFileSync(`${paths.sessionFile}.bak`, "utf8")) as { summaryText?: string };
|
||||
expect(backupAfterSync.summaryText).toBe("first");
|
||||
|
||||
const third = emptySession();
|
||||
third.summaryText = "third";
|
||||
await saveSessionAsync(paths, third);
|
||||
|
||||
const backupAfterAsync = JSON.parse(fs.readFileSync(`${paths.sessionFile}.bak`, "utf8")) as { summaryText?: string };
|
||||
const primaryAfterAsync = JSON.parse(fs.readFileSync(paths.sessionFile, "utf8")) as { summaryText?: string };
|
||||
expect(backupAfterAsync.summaryText).toBe("second");
|
||||
expect(primaryAfterAsync.summaryText).toBe("third");
|
||||
});
|
||||
|
||||
it("applies defaults for missing fields when loading old config", () => {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-store-"));
|
||||
tempDirs.push(dir);
|
||||
const paths = createStoragePaths(dir);
|
||||
|
||||
// Write a minimal config that simulates an old version missing newer fields
|
||||
fs.writeFileSync(
|
||||
paths.configFile,
|
||||
JSON.stringify({
|
||||
token: "my-token",
|
||||
rememberToken: true,
|
||||
outputDir: "/custom/output"
|
||||
}),
|
||||
"utf8"
|
||||
);
|
||||
|
||||
const loaded = loadSettings(paths);
|
||||
const defaults = defaultSettings();
|
||||
|
||||
// Old fields should be preserved
|
||||
expect(loaded.token).toBe("my-token");
|
||||
expect(loaded.outputDir).toBe(path.resolve("/custom/output"));
|
||||
|
||||
// Missing new fields should get default values
|
||||
expect(loaded.autoProviderFallback).toBe(defaults.autoProviderFallback);
|
||||
expect(loaded.hybridExtract).toBe(defaults.hybridExtract);
|
||||
expect(loaded.completedCleanupPolicy).toBe(defaults.completedCleanupPolicy);
|
||||
expect(loaded.speedLimitMode).toBe(defaults.speedLimitMode);
|
||||
expect(loaded.clipboardWatch).toBe(defaults.clipboardWatch);
|
||||
expect(loaded.minimizeToTray).toBe(defaults.minimizeToTray);
|
||||
expect(loaded.retryLimit).toBe(defaults.retryLimit);
|
||||
expect(loaded.collectMkvToLibrary).toBe(defaults.collectMkvToLibrary);
|
||||
expect(loaded.mkvLibraryDir).toBe(defaults.mkvLibraryDir);
|
||||
expect(loaded.theme).toBe(defaults.theme);
|
||||
expect(loaded.bandwidthSchedules).toEqual(defaults.bandwidthSchedules);
|
||||
expect(loaded.updateRepo).toBe(defaults.updateRepo);
|
||||
});
|
||||
});
|
||||
567
tests/update.test.ts
Normal file
567
tests/update.test.ts
Normal file
@ -0,0 +1,567 @@
|
||||
import fs from "node:fs";
|
||||
import crypto from "node:crypto";
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { checkGitHubUpdate, installLatestUpdate, isRemoteNewer, normalizeUpdateRepo, parseVersionParts } from "../src/main/update";
|
||||
import { APP_VERSION } from "../src/main/constants";
|
||||
import { UpdateCheckResult, UpdateInstallProgress } from "../src/shared/types";
|
||||
|
||||
const originalFetch = globalThis.fetch;
|
||||
|
||||
function sha256Hex(buffer: Buffer): string {
|
||||
return crypto.createHash("sha256").update(buffer).digest("hex");
|
||||
}
|
||||
|
||||
function sha512Hex(buffer: Buffer): string {
|
||||
return crypto.createHash("sha512").update(buffer).digest("hex");
|
||||
}
|
||||
|
||||
afterEach(() => {
|
||||
globalThis.fetch = originalFetch;
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe("update", () => {
|
||||
it("normalizes update repo input", () => {
|
||||
expect(normalizeUpdateRepo("")).toBe("Administrator/real-debrid-downloader");
|
||||
expect(normalizeUpdateRepo("owner/repo")).toBe("owner/repo");
|
||||
expect(normalizeUpdateRepo("https://codeberg.org/owner/repo")).toBe("owner/repo");
|
||||
expect(normalizeUpdateRepo("https://www.codeberg.org/owner/repo")).toBe("owner/repo");
|
||||
expect(normalizeUpdateRepo("https://codeberg.org/owner/repo/releases/tag/v1.2.3")).toBe("owner/repo");
|
||||
expect(normalizeUpdateRepo("codeberg.org/owner/repo.git")).toBe("owner/repo");
|
||||
expect(normalizeUpdateRepo("git@codeberg.org:owner/repo.git")).toBe("owner/repo");
|
||||
});
|
||||
|
||||
it("uses normalized repo slug for API requests", async () => {
|
||||
let requestedUrl = "";
|
||||
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
|
||||
requestedUrl = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
tag_name: `v${APP_VERSION}`,
|
||||
html_url: "https://git.24-music.de/owner/repo/releases/tag/v1.0.0",
|
||||
assets: []
|
||||
}),
|
||||
{
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
}
|
||||
);
|
||||
}) as typeof fetch;
|
||||
|
||||
const result = await checkGitHubUpdate("https://git.24-music.de/owner/repo/releases");
|
||||
expect(requestedUrl).toBe("https://git.24-music.de/api/v1/repos/owner/repo/releases/latest");
|
||||
expect(result.currentVersion).toBe(APP_VERSION);
|
||||
expect(result.latestVersion).toBe(APP_VERSION);
|
||||
expect(result.updateAvailable).toBe(false);
|
||||
});
|
||||
|
||||
it("picks setup executable asset from release list", async () => {
|
||||
globalThis.fetch = (async (): Promise<Response> => new Response(
|
||||
JSON.stringify({
|
||||
tag_name: "v9.9.9",
|
||||
html_url: "https://codeberg.org/owner/repo/releases/tag/v9.9.9",
|
||||
assets: [
|
||||
{
|
||||
name: "Real-Debrid-Downloader 9.9.9.exe",
|
||||
browser_download_url: "https://example.invalid/portable.exe"
|
||||
},
|
||||
{
|
||||
name: "Real-Debrid-Downloader Setup 9.9.9.exe",
|
||||
browser_download_url: "https://example.invalid/setup.exe",
|
||||
digest: "sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
|
||||
}
|
||||
]
|
||||
}),
|
||||
{
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
}
|
||||
)) as typeof fetch;
|
||||
|
||||
const result = await checkGitHubUpdate("owner/repo");
|
||||
expect(result.updateAvailable).toBe(true);
|
||||
expect(result.setupAssetUrl).toBe("https://example.invalid/setup.exe");
|
||||
expect(result.setupAssetName).toBe("Real-Debrid-Downloader Setup 9.9.9.exe");
|
||||
});
|
||||
|
||||
it("falls back to alternate download URL when setup asset URL returns 404", async () => {
|
||||
const executablePayload = fs.readFileSync(process.execPath);
|
||||
const executableDigest = sha256Hex(executablePayload);
|
||||
const requestedUrls: string[] = [];
|
||||
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
|
||||
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
|
||||
requestedUrls.push(url);
|
||||
|
||||
if (url.includes("stale-setup.exe")) {
|
||||
return new Response("missing", { status: 404 });
|
||||
}
|
||||
if (url.includes("/releases/download/v9.9.9/")) {
|
||||
return new Response(executablePayload, {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/octet-stream" }
|
||||
});
|
||||
}
|
||||
return new Response("missing", { status: 404 });
|
||||
}) as typeof fetch;
|
||||
|
||||
const prechecked: UpdateCheckResult = {
|
||||
updateAvailable: true,
|
||||
currentVersion: APP_VERSION,
|
||||
latestVersion: "9.9.9",
|
||||
latestTag: "v9.9.9",
|
||||
releaseUrl: "https://codeberg.org/owner/repo/releases/tag/v9.9.9",
|
||||
setupAssetUrl: "https://example.invalid/stale-setup.exe",
|
||||
setupAssetName: "Real-Debrid-Downloader Setup 9.9.9.exe",
|
||||
setupAssetDigest: `sha256:${executableDigest}`
|
||||
};
|
||||
|
||||
const result = await installLatestUpdate("owner/repo", prechecked);
|
||||
expect(result.started).toBe(true);
|
||||
expect(requestedUrls.some((url) => url.includes("/releases/download/v9.9.9/"))).toBe(true);
|
||||
expect(requestedUrls.filter((url) => url.includes("stale-setup.exe"))).toHaveLength(1);
|
||||
});
|
||||
|
||||
it("skips draft tag payload and resolves setup asset from stable latest release", async () => {
|
||||
const executablePayload = fs.readFileSync(process.execPath);
|
||||
const requestedUrls: string[] = [];
|
||||
|
||||
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
|
||||
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
|
||||
requestedUrls.push(url);
|
||||
|
||||
if (url.endsWith("/releases/tags/v9.9.9")) {
|
||||
return new Response(JSON.stringify({
|
||||
tag_name: "v9.9.9",
|
||||
draft: true,
|
||||
prerelease: false,
|
||||
assets: [
|
||||
{
|
||||
name: "Draft Setup 9.9.9.exe",
|
||||
browser_download_url: "https://example.invalid/draft-setup.exe"
|
||||
}
|
||||
]
|
||||
}), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}
|
||||
|
||||
if (url.endsWith("/releases/latest")) {
|
||||
const stableDigest = sha256Hex(executablePayload);
|
||||
return new Response(JSON.stringify({
|
||||
tag_name: "v9.9.9",
|
||||
draft: false,
|
||||
prerelease: false,
|
||||
assets: [
|
||||
{
|
||||
name: "Stable Setup 9.9.9.exe",
|
||||
browser_download_url: "https://example.invalid/stable-setup.exe",
|
||||
digest: `sha256:${stableDigest}`
|
||||
}
|
||||
]
|
||||
}), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}
|
||||
|
||||
if (url.includes("stable-setup.exe")) {
|
||||
return new Response(executablePayload, {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/octet-stream" }
|
||||
});
|
||||
}
|
||||
|
||||
return new Response("missing", { status: 404 });
|
||||
}) as typeof fetch;
|
||||
|
||||
const prechecked: UpdateCheckResult = {
|
||||
updateAvailable: true,
|
||||
currentVersion: APP_VERSION,
|
||||
latestVersion: "9.9.9",
|
||||
latestTag: "v9.9.9",
|
||||
releaseUrl: "https://codeberg.org/owner/repo/releases/tag/v9.9.9",
|
||||
setupAssetUrl: "",
|
||||
setupAssetName: ""
|
||||
};
|
||||
|
||||
const result = await installLatestUpdate("owner/repo", prechecked);
|
||||
expect(result.started).toBe(true);
|
||||
expect(requestedUrls.some((url) => url.endsWith("/releases/tags/v9.9.9"))).toBe(true);
|
||||
expect(requestedUrls.some((url) => url.endsWith("/releases/latest"))).toBe(true);
|
||||
expect(requestedUrls.some((url) => url.includes("stable-setup.exe"))).toBe(true);
|
||||
expect(requestedUrls.some((url) => url.includes("draft-setup.exe"))).toBe(false);
|
||||
});
|
||||
|
||||
it("times out hanging release JSON body reads", async () => {
|
||||
vi.useFakeTimers();
|
||||
try {
|
||||
const cancelSpy = vi.fn(async () => undefined);
|
||||
globalThis.fetch = (async (): Promise<Response> => ({
|
||||
ok: true,
|
||||
status: 200,
|
||||
headers: new Headers({ "Content-Type": "application/json" }),
|
||||
json: () => new Promise(() => undefined),
|
||||
body: {
|
||||
cancel: cancelSpy
|
||||
}
|
||||
} as unknown as Response)) as typeof fetch;
|
||||
|
||||
const pending = checkGitHubUpdate("owner/repo");
|
||||
await vi.advanceTimersByTimeAsync(13000);
|
||||
const result = await pending;
|
||||
expect(result.updateAvailable).toBe(false);
|
||||
expect(String(result.error || "")).toMatch(/timeout/i);
|
||||
expect(cancelSpy).toHaveBeenCalledTimes(1);
|
||||
} finally {
|
||||
vi.useRealTimers();
|
||||
}
|
||||
});
|
||||
|
||||
it("aborts hanging update body downloads on idle timeout", async () => {
|
||||
const previousTimeout = process.env.RD_UPDATE_BODY_IDLE_TIMEOUT_MS;
|
||||
process.env.RD_UPDATE_BODY_IDLE_TIMEOUT_MS = "1000";
|
||||
|
||||
try {
|
||||
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
|
||||
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
|
||||
if (url.includes("hang-setup.exe")) {
|
||||
const body = new ReadableStream<Uint8Array>({
|
||||
start(controller) {
|
||||
controller.enqueue(new Uint8Array([1, 2, 3]));
|
||||
}
|
||||
});
|
||||
return new Response(body, {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/octet-stream" }
|
||||
});
|
||||
}
|
||||
return new Response("missing", { status: 404 });
|
||||
}) as typeof fetch;
|
||||
|
||||
const prechecked: UpdateCheckResult = {
|
||||
updateAvailable: true,
|
||||
currentVersion: APP_VERSION,
|
||||
latestVersion: "9.9.9",
|
||||
latestTag: "v9.9.9",
|
||||
releaseUrl: "https://codeberg.org/owner/repo/releases/tag/v9.9.9",
|
||||
setupAssetUrl: "https://example.invalid/hang-setup.exe",
|
||||
setupAssetName: "",
|
||||
setupAssetDigest: "sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
|
||||
};
|
||||
|
||||
const result = await installLatestUpdate("owner/repo", prechecked);
|
||||
expect(result.started).toBe(false);
|
||||
expect(result.message).toMatch(/timeout/i);
|
||||
} finally {
|
||||
if (previousTimeout === undefined) {
|
||||
delete process.env.RD_UPDATE_BODY_IDLE_TIMEOUT_MS;
|
||||
} else {
|
||||
process.env.RD_UPDATE_BODY_IDLE_TIMEOUT_MS = previousTimeout;
|
||||
}
|
||||
}
|
||||
}, 20000);
|
||||
|
||||
it("blocks installer start when SHA256 digest mismatches", async () => {
|
||||
const executablePayload = fs.readFileSync(process.execPath);
|
||||
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
|
||||
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
|
||||
if (url.includes("mismatch-setup.exe")) {
|
||||
return new Response(executablePayload, {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/octet-stream" }
|
||||
});
|
||||
}
|
||||
return new Response("missing", { status: 404 });
|
||||
}) as typeof fetch;
|
||||
|
||||
const prechecked: UpdateCheckResult = {
|
||||
updateAvailable: true,
|
||||
currentVersion: APP_VERSION,
|
||||
latestVersion: "9.9.9",
|
||||
latestTag: "v9.9.9",
|
||||
releaseUrl: "https://codeberg.org/owner/repo/releases/tag/v9.9.9",
|
||||
setupAssetUrl: "https://example.invalid/mismatch-setup.exe",
|
||||
setupAssetName: "setup.exe",
|
||||
setupAssetDigest: "sha256:1111111111111111111111111111111111111111111111111111111111111111"
|
||||
};
|
||||
|
||||
const result = await installLatestUpdate("owner/repo", prechecked);
|
||||
expect(result.started).toBe(false);
|
||||
expect(result.message).toMatch(/integrit|sha256|mismatch/i);
|
||||
});
|
||||
|
||||
it("uses latest.yml SHA512 digest when API asset digest is missing", async () => {
|
||||
const executablePayload = fs.readFileSync(process.execPath);
|
||||
const digestSha512Hex = sha512Hex(executablePayload);
|
||||
const digestSha512Base64 = Buffer.from(digestSha512Hex, "hex").toString("base64");
|
||||
const requestedUrls: string[] = [];
|
||||
|
||||
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
|
||||
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
|
||||
requestedUrls.push(url);
|
||||
|
||||
if (url.endsWith("/releases/tags/v9.9.9")) {
|
||||
return new Response(JSON.stringify({
|
||||
tag_name: "v9.9.9",
|
||||
draft: false,
|
||||
prerelease: false,
|
||||
assets: [
|
||||
{
|
||||
name: "Real-Debrid-Downloader Setup 9.9.9.exe",
|
||||
browser_download_url: "https://example.invalid/setup-no-digest.exe"
|
||||
},
|
||||
{
|
||||
name: "latest.yml",
|
||||
browser_download_url: "https://example.invalid/latest.yml"
|
||||
}
|
||||
]
|
||||
}), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}
|
||||
|
||||
if (url.includes("latest.yml")) {
|
||||
return new Response(
|
||||
`version: 9.9.9\npath: Real-Debrid-Downloader-Setup-9.9.9.exe\nsha512: ${digestSha512Base64}\n`,
|
||||
{
|
||||
status: 200,
|
||||
headers: { "Content-Type": "text/yaml" }
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
if (url.includes("setup-no-digest.exe")) {
|
||||
return new Response(executablePayload, {
|
||||
status: 200,
|
||||
headers: {
|
||||
"Content-Type": "application/octet-stream",
|
||||
"Content-Length": String(executablePayload.length)
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return new Response("missing", { status: 404 });
|
||||
}) as typeof fetch;
|
||||
|
||||
const prechecked: UpdateCheckResult = {
|
||||
updateAvailable: true,
|
||||
currentVersion: APP_VERSION,
|
||||
latestVersion: "9.9.9",
|
||||
latestTag: "v9.9.9",
|
||||
releaseUrl: "https://codeberg.org/owner/repo/releases/tag/v9.9.9",
|
||||
setupAssetUrl: "https://example.invalid/setup-no-digest.exe",
|
||||
setupAssetName: "Real-Debrid-Downloader Setup 9.9.9.exe",
|
||||
setupAssetDigest: ""
|
||||
};
|
||||
|
||||
const result = await installLatestUpdate("owner/repo", prechecked);
|
||||
expect(result.started).toBe(true);
|
||||
expect(requestedUrls.some((url) => url.endsWith("/releases/tags/v9.9.9"))).toBe(true);
|
||||
expect(requestedUrls.some((url) => url.includes("latest.yml"))).toBe(true);
|
||||
});
|
||||
|
||||
it("rejects installer when latest.yml SHA512 digest does not match", async () => {
|
||||
const executablePayload = fs.readFileSync(process.execPath);
|
||||
const wrongDigestBase64 = Buffer.alloc(64, 0x13).toString("base64");
|
||||
|
||||
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
|
||||
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
|
||||
|
||||
if (url.endsWith("/releases/tags/v9.9.9")) {
|
||||
return new Response(JSON.stringify({
|
||||
tag_name: "v9.9.9",
|
||||
draft: false,
|
||||
prerelease: false,
|
||||
assets: [
|
||||
{
|
||||
name: "Real-Debrid-Downloader Setup 9.9.9.exe",
|
||||
browser_download_url: "https://example.invalid/setup-no-digest.exe"
|
||||
},
|
||||
{
|
||||
name: "latest.yml",
|
||||
browser_download_url: "https://example.invalid/latest.yml"
|
||||
}
|
||||
]
|
||||
}), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" }
|
||||
});
|
||||
}
|
||||
|
||||
if (url.includes("latest.yml")) {
|
||||
return new Response(
|
||||
`version: 9.9.9\npath: Real-Debrid-Downloader Setup 9.9.9.exe\nsha512: ${wrongDigestBase64}\n`,
|
||||
{
|
||||
status: 200,
|
||||
headers: { "Content-Type": "text/yaml" }
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
if (url.includes("setup-no-digest.exe")) {
|
||||
return new Response(executablePayload, {
|
||||
status: 200,
|
||||
headers: {
|
||||
"Content-Type": "application/octet-stream",
|
||||
"Content-Length": String(executablePayload.length)
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return new Response("missing", { status: 404 });
|
||||
}) as typeof fetch;
|
||||
|
||||
const prechecked: UpdateCheckResult = {
|
||||
updateAvailable: true,
|
||||
currentVersion: APP_VERSION,
|
||||
latestVersion: "9.9.9",
|
||||
latestTag: "v9.9.9",
|
||||
releaseUrl: "https://codeberg.org/owner/repo/releases/tag/v9.9.9",
|
||||
setupAssetUrl: "https://example.invalid/setup-no-digest.exe",
|
||||
setupAssetName: "Real-Debrid-Downloader Setup 9.9.9.exe",
|
||||
setupAssetDigest: ""
|
||||
};
|
||||
|
||||
const result = await installLatestUpdate("owner/repo", prechecked);
|
||||
expect(result.started).toBe(false);
|
||||
expect(result.message).toMatch(/sha512|integrit|mismatch/i);
|
||||
});
|
||||
|
||||
it("emits install progress events while downloading and launching update", async () => {
|
||||
const executablePayload = fs.readFileSync(process.execPath);
|
||||
const digest = sha256Hex(executablePayload);
|
||||
|
||||
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
|
||||
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
|
||||
if (url.includes("progress-setup.exe")) {
|
||||
return new Response(executablePayload, {
|
||||
status: 200,
|
||||
headers: {
|
||||
"Content-Type": "application/octet-stream",
|
||||
"Content-Length": String(executablePayload.length)
|
||||
}
|
||||
});
|
||||
}
|
||||
return new Response("missing", { status: 404 });
|
||||
}) as typeof fetch;
|
||||
|
||||
const prechecked: UpdateCheckResult = {
|
||||
updateAvailable: true,
|
||||
currentVersion: APP_VERSION,
|
||||
latestVersion: "9.9.9",
|
||||
latestTag: "v9.9.9",
|
||||
releaseUrl: "https://codeberg.org/owner/repo/releases/tag/v9.9.9",
|
||||
setupAssetUrl: "https://example.invalid/progress-setup.exe",
|
||||
setupAssetName: "setup.exe",
|
||||
setupAssetDigest: `sha256:${digest}`
|
||||
};
|
||||
|
||||
const progressEvents: UpdateInstallProgress[] = [];
|
||||
const result = await installLatestUpdate("owner/repo", prechecked, (progress) => {
|
||||
progressEvents.push(progress);
|
||||
});
|
||||
|
||||
expect(result.started).toBe(true);
|
||||
expect(progressEvents.some((entry) => entry.stage === "starting")).toBe(true);
|
||||
expect(progressEvents.some((entry) => entry.stage === "downloading")).toBe(true);
|
||||
expect(progressEvents.some((entry) => entry.stage === "verifying")).toBe(true);
|
||||
expect(progressEvents.some((entry) => entry.stage === "launching")).toBe(true);
|
||||
expect(progressEvents.some((entry) => entry.stage === "done")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("normalizeUpdateRepo extended", () => {
|
||||
it("handles trailing slashes and extra path segments", () => {
|
||||
expect(normalizeUpdateRepo("owner/repo/")).toBe("owner/repo");
|
||||
expect(normalizeUpdateRepo("/owner/repo/")).toBe("owner/repo");
|
||||
expect(normalizeUpdateRepo("https://codeberg.org/owner/repo/tree/main/src")).toBe("owner/repo");
|
||||
});
|
||||
|
||||
it("handles ssh-style git URLs", () => {
|
||||
expect(normalizeUpdateRepo("git@codeberg.org:user/project.git")).toBe("user/project");
|
||||
});
|
||||
|
||||
it("returns default for malformed inputs", () => {
|
||||
expect(normalizeUpdateRepo("just-one-part")).toBe("Administrator/real-debrid-downloader");
|
||||
expect(normalizeUpdateRepo(" ")).toBe("Administrator/real-debrid-downloader");
|
||||
});
|
||||
|
||||
it("rejects traversal-like owner or repo segments", () => {
|
||||
expect(normalizeUpdateRepo("../owner/repo")).toBe("Administrator/real-debrid-downloader");
|
||||
expect(normalizeUpdateRepo("owner/../repo")).toBe("Administrator/real-debrid-downloader");
|
||||
expect(normalizeUpdateRepo("https://codeberg.org/owner/../../repo")).toBe("Administrator/real-debrid-downloader");
|
||||
});
|
||||
|
||||
it("handles www prefix", () => {
|
||||
expect(normalizeUpdateRepo("https://www.codeberg.org/owner/repo")).toBe("owner/repo");
|
||||
expect(normalizeUpdateRepo("www.codeberg.org/owner/repo")).toBe("owner/repo");
|
||||
});
|
||||
});
|
||||
|
||||
describe("isRemoteNewer", () => {
|
||||
it("detects newer major version", () => {
|
||||
expect(isRemoteNewer("1.0.0", "2.0.0")).toBe(true);
|
||||
});
|
||||
|
||||
it("detects newer minor version", () => {
|
||||
expect(isRemoteNewer("1.2.0", "1.3.0")).toBe(true);
|
||||
});
|
||||
|
||||
it("detects newer patch version", () => {
|
||||
expect(isRemoteNewer("1.2.3", "1.2.4")).toBe(true);
|
||||
});
|
||||
|
||||
it("returns false for same version", () => {
|
||||
expect(isRemoteNewer("1.2.3", "1.2.3")).toBe(false);
|
||||
});
|
||||
|
||||
it("returns false for older version", () => {
|
||||
expect(isRemoteNewer("2.0.0", "1.0.0")).toBe(false);
|
||||
expect(isRemoteNewer("1.3.0", "1.2.0")).toBe(false);
|
||||
expect(isRemoteNewer("1.2.4", "1.2.3")).toBe(false);
|
||||
});
|
||||
|
||||
it("handles versions with different segment counts", () => {
|
||||
expect(isRemoteNewer("1.2", "1.2.1")).toBe(true);
|
||||
expect(isRemoteNewer("1.2.1", "1.2")).toBe(false);
|
||||
expect(isRemoteNewer("1", "1.0.1")).toBe(true);
|
||||
});
|
||||
|
||||
it("handles v-prefix in version strings", () => {
|
||||
expect(isRemoteNewer("v1.0.0", "v2.0.0")).toBe(true);
|
||||
expect(isRemoteNewer("v1.0.0", "v1.0.0")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("parseVersionParts", () => {
|
||||
it("parses standard version strings", () => {
|
||||
expect(parseVersionParts("1.2.3")).toEqual([1, 2, 3]);
|
||||
expect(parseVersionParts("10.20.30")).toEqual([10, 20, 30]);
|
||||
});
|
||||
|
||||
it("strips v prefix", () => {
|
||||
expect(parseVersionParts("v1.2.3")).toEqual([1, 2, 3]);
|
||||
expect(parseVersionParts("V1.2.3")).toEqual([1, 2, 3]);
|
||||
});
|
||||
|
||||
it("handles single segment", () => {
|
||||
expect(parseVersionParts("5")).toEqual([5]);
|
||||
});
|
||||
|
||||
it("handles version with pre-release suffix", () => {
|
||||
// Non-numeric suffixes are stripped per part
|
||||
expect(parseVersionParts("1.2.3-beta")).toEqual([1, 2, 3]);
|
||||
expect(parseVersionParts("1.2.3rc1")).toEqual([1, 2, 3]);
|
||||
});
|
||||
|
||||
it("handles empty and whitespace", () => {
|
||||
expect(parseVersionParts("")).toEqual([0]);
|
||||
expect(parseVersionParts(" ")).toEqual([0]);
|
||||
});
|
||||
|
||||
it("handles versions with extra dots", () => {
|
||||
expect(parseVersionParts("1.2.3.4")).toEqual([1, 2, 3, 4]);
|
||||
});
|
||||
});
|
||||
@ -1,5 +1,5 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { parsePackagesFromLinksText, isHttpLink, sanitizeFilename, formatEta, filenameFromUrl } from "../src/main/utils";
|
||||
import { extractHttpLinksFromText, parsePackagesFromLinksText, isHttpLink, sanitizeFilename, formatEta, filenameFromUrl, looksLikeOpaqueFilename } from "../src/main/utils";
|
||||
|
||||
describe("utils", () => {
|
||||
it("validates http links", () => {
|
||||
@ -9,9 +9,24 @@ describe("utils", () => {
|
||||
expect(isHttpLink("foo bar")).toBe(false);
|
||||
});
|
||||
|
||||
it("extracts links from text and trims trailing punctuation", () => {
|
||||
const links = extractHttpLinksFromText("See (https://example.com/test) and https://rapidgator.net/file/abc123, plus https://example.com/a.b.");
|
||||
expect(links).toEqual([
|
||||
"https://example.com/test",
|
||||
"https://rapidgator.net/file/abc123",
|
||||
"https://example.com/a.b"
|
||||
]);
|
||||
});
|
||||
|
||||
it("sanitizes filenames", () => {
|
||||
expect(sanitizeFilename("foo/bar:baz*")).toBe("foo bar baz");
|
||||
expect(sanitizeFilename(" ")).toBe("Paket");
|
||||
expect(sanitizeFilename("test\0file.txt")).toBe("testfile.txt");
|
||||
expect(sanitizeFilename("\0\0\0")).toBe("Paket");
|
||||
expect(sanitizeFilename("..")).toBe("Paket");
|
||||
expect(sanitizeFilename(".")).toBe("Paket");
|
||||
expect(sanitizeFilename("release... ")).toBe("release");
|
||||
expect(sanitizeFilename(" con ")).toBe("con_");
|
||||
});
|
||||
|
||||
it("parses package markers", () => {
|
||||
@ -34,6 +49,70 @@ describe("utils", () => {
|
||||
it("normalizes filenames from links", () => {
|
||||
expect(filenameFromUrl("https://rapidgator.net/file/id/show.part1.rar.html")).toBe("show.part1.rar");
|
||||
expect(filenameFromUrl("https://debrid.example/dl/abc?filename=Movie.S01E01.mkv")).toBe("Movie.S01E01.mkv");
|
||||
expect(filenameFromUrl("https://debrid.example/dl/e51f6809bb6ca615601f5ac5db433737")).toBe("download.bin");
|
||||
expect(filenameFromUrl("https://debrid.example/dl/%E0%A4%A")).toBe("%E0%A4%A");
|
||||
expect(filenameFromUrl("https://debrid.example/dl/e51f6809bb6ca615601f5ac5db433737")).toBe("e51f6809bb6ca615601f5ac5db433737");
|
||||
expect(filenameFromUrl("data:text/plain;base64,SGVsbG8=")).toBe("download.bin");
|
||||
expect(filenameFromUrl("blob:https://example.com/12345678-1234-1234-1234-1234567890ab")).toBe("download.bin");
|
||||
expect(looksLikeOpaqueFilename("download.bin")).toBe(true);
|
||||
expect(looksLikeOpaqueFilename("e51f6809bb6ca615601f5ac5db433737")).toBe(true);
|
||||
expect(looksLikeOpaqueFilename("movie.part1.rar")).toBe(false);
|
||||
});
|
||||
|
||||
it("preserves unicode filenames", () => {
|
||||
expect(sanitizeFilename("日本語ファイル.txt")).toBe("日本語ファイル.txt");
|
||||
expect(sanitizeFilename("Ünïcödé Tëst.mkv")).toBe("Ünïcödé Tëst.mkv");
|
||||
expect(sanitizeFilename("파일이름.rar")).toBe("파일이름.rar");
|
||||
expect(sanitizeFilename("файл.zip")).toBe("файл.zip");
|
||||
});
|
||||
|
||||
it("handles very long filenames", () => {
|
||||
const longName = "a".repeat(300);
|
||||
const result = sanitizeFilename(longName);
|
||||
expect(typeof result).toBe("string");
|
||||
expect(result.length).toBeGreaterThan(0);
|
||||
// The function should return a non-empty string and not crash
|
||||
expect(result).toBe(longName);
|
||||
});
|
||||
|
||||
it("formats eta with very large values without crashing", () => {
|
||||
const result = formatEta(999999);
|
||||
expect(typeof result).toBe("string");
|
||||
expect(result.length).toBeGreaterThan(0);
|
||||
// 999999 seconds = 277h 46m 39s
|
||||
expect(result).toBe("277:46:39");
|
||||
});
|
||||
|
||||
it("formats eta with edge cases", () => {
|
||||
expect(formatEta(0)).toBe("00:00");
|
||||
expect(formatEta(NaN)).toBe("--");
|
||||
expect(formatEta(Infinity)).toBe("--");
|
||||
expect(formatEta(Number.MAX_SAFE_INTEGER)).toMatch(/^\d+:\d{2}:\d{2}$/);
|
||||
});
|
||||
|
||||
it("extracts filenames from URLs with encoded characters", () => {
|
||||
expect(filenameFromUrl("https://example.com/file%20with%20spaces.rar")).toBe("file with spaces.rar");
|
||||
// %C3%A9 decodes to e-acute (UTF-8), which is preserved
|
||||
expect(filenameFromUrl("https://example.com/t%C3%A9st%20file.zip")).toBe("t\u00e9st file.zip");
|
||||
expect(filenameFromUrl("https://example.com/dl?filename=Movie%20Name%20S01E01.mkv")).toBe("Movie Name S01E01.mkv");
|
||||
// Malformed percent-encoding should not crash
|
||||
const result = filenameFromUrl("https://example.com/%ZZ%invalid");
|
||||
expect(typeof result).toBe("string");
|
||||
expect(result.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it("handles looksLikeOpaqueFilename edge cases", () => {
|
||||
// Empty string -> sanitizeFilename returns "Paket" which is not opaque
|
||||
expect(looksLikeOpaqueFilename("")).toBe(false);
|
||||
expect(looksLikeOpaqueFilename("a")).toBe(false);
|
||||
expect(looksLikeOpaqueFilename("ab")).toBe(false);
|
||||
expect(looksLikeOpaqueFilename("abc")).toBe(false);
|
||||
expect(looksLikeOpaqueFilename("download.bin")).toBe(true);
|
||||
// 24-char hex string is opaque (matches /^[a-f0-9]{24,}$/)
|
||||
expect(looksLikeOpaqueFilename("abcdef123456789012345678")).toBe(true);
|
||||
expect(looksLikeOpaqueFilename("abcdef1234567890abcdef12")).toBe(true);
|
||||
// Short hex strings (< 24 chars) are NOT considered opaque
|
||||
expect(looksLikeOpaqueFilename("abcdef12345")).toBe(false);
|
||||
// Real filename with extension
|
||||
expect(looksLikeOpaqueFilename("Show.S01E01.720p.mkv")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
@ -12,5 +12,5 @@
|
||||
"isolatedModules": true,
|
||||
"types": ["node", "vite/client"]
|
||||
},
|
||||
"include": ["src", "tests", "vite.config.ts"]
|
||||
"include": ["src", "tests", "vite.config.mts"]
|
||||
}
|
||||
|
||||
Loading…
Reference in New Issue
Block a user