Compare commits
51 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
49e62c1f83 | ||
|
|
4c67455c67 | ||
|
|
5a5e3d2960 | ||
|
|
11da8b6e9a | ||
|
|
265e6a72be | ||
|
|
7816dc9488 | ||
|
|
678d642683 | ||
|
|
0f4174d153 | ||
|
|
babcd8edb7 | ||
|
|
6e00bbab53 | ||
|
|
72642351d0 | ||
|
|
51a01ea03f | ||
|
|
d9a78ea837 | ||
|
|
5b221d5bd5 | ||
|
|
c36549ca69 | ||
|
|
7e79bef8da | ||
|
|
e3b4a4ba19 | ||
|
|
30d216c7ca | ||
|
|
d80483adc2 | ||
|
|
1cda391dfe | ||
|
|
375ec36781 | ||
|
|
4ad1c05444 | ||
|
|
c88eeb0b12 | ||
|
|
c6261aba6a | ||
|
|
a010b967b9 | ||
|
|
af6547f254 | ||
|
|
ba235b0b93 | ||
|
|
1bfde96e46 | ||
|
|
e1f9b4b6d3 | ||
|
|
95cf4fbed8 | ||
|
|
9ddc7d31bb | ||
|
|
83626017b9 | ||
|
|
b9372f0ef0 | ||
|
|
db97a7df14 | ||
|
|
575fca3806 | ||
|
|
a1c8f42435 | ||
|
|
a3c2680fec | ||
|
|
12dade0240 | ||
|
|
2a528a126c | ||
|
|
8839080069 | ||
|
|
8f66d75eb3 | ||
|
|
56ee681aec | ||
|
|
6db03f05a9 | ||
|
|
068da94e2a | ||
|
|
4b824b2d9f | ||
|
|
284c5e7aa6 | ||
|
|
036cd3e066 | ||
|
|
479c7a3f3f | ||
|
|
0404d870ad | ||
|
|
93a53763e0 | ||
|
|
c20d743286 |
25
README.md
25
README.md
@ -160,7 +160,7 @@ The app stores runtime files in Electron's `userData` directory, including:
|
|||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
|
|
||||||
- Download does not start: verify token and selected provider in Settings.
|
- Download does not start: verify token and selected provider in Settings.
|
||||||
- Extraction fails: check archive passwords, JVM runtime (`resources/extractor-jvm`), or force legacy mode with `RD_EXTRACT_BACKEND=legacy`.
|
- Extraction fails: check archive passwords and native extractor installation (7-Zip/WinRAR). Optional JVM extractor can be forced with `RD_EXTRACT_BACKEND=jvm`.
|
||||||
- Very slow downloads: check active speed limit and bandwidth schedules.
|
- Very slow downloads: check active speed limit and bandwidth schedules.
|
||||||
- Unexpected interruptions: enable reconnect and fallback providers.
|
- Unexpected interruptions: enable reconnect and fallback providers.
|
||||||
- Stalled downloads: the app auto-detects stalls within 10 seconds and retries automatically.
|
- Stalled downloads: the app auto-detects stalls within 10 seconds and retries automatically.
|
||||||
@ -169,6 +169,29 @@ The app stores runtime files in Electron's `userData` directory, including:
|
|||||||
|
|
||||||
Release history is available on [git.24-music.de Releases](https://git.24-music.de/Administrator/real-debrid-downloader/releases).
|
Release history is available on [git.24-music.de Releases](https://git.24-music.de/Administrator/real-debrid-downloader/releases).
|
||||||
|
|
||||||
|
### v1.6.60 (2026-03-05)
|
||||||
|
|
||||||
|
- Added package-scoped password cache for extraction: once the first archive in a package is solved, following archives in the same package reuse that password first.
|
||||||
|
- Kept fallback behavior intact (`""` and other candidates are still tested), but moved empty-password probing behind the learned password to reduce per-archive delays.
|
||||||
|
- Added cache invalidation on real `wrong_password` failures so stale passwords are automatically discarded.
|
||||||
|
|
||||||
|
### v1.6.59 (2026-03-05)
|
||||||
|
|
||||||
|
- Switched default extraction backend to native tools (`legacy`) for more stable archive-to-archive flow.
|
||||||
|
- Prioritized 7-Zip as primary native extractor, with WinRAR/UnRAR as fallback.
|
||||||
|
- JVM extractor remains available as opt-in via `RD_EXTRACT_BACKEND=jvm`.
|
||||||
|
|
||||||
|
### v1.6.58 (2026-03-05)
|
||||||
|
|
||||||
|
- Fixed extraction progress oscillation (`1% -> 100% -> 1%` loops) during password retries.
|
||||||
|
- Kept strict archive completion logic, but normalized in-progress archive percent to avoid false visual done states before real completion.
|
||||||
|
|
||||||
|
### v1.6.57 (2026-03-05)
|
||||||
|
|
||||||
|
- Fixed extraction flow so archives are marked done only on real completion, not on temporary `100%` progress spikes.
|
||||||
|
- Improved password handling: after the first successful archive, the discovered password is prioritized for subsequent archives.
|
||||||
|
- Fixed progress parsing for password retries (reset/restart handling), reducing visible and real gaps between archive extractions.
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
MIT - see `LICENSE`.
|
MIT - see `LICENSE`.
|
||||||
|
|||||||
@ -1,75 +0,0 @@
|
|||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
import { spawnSync } from "node:child_process";
|
|
||||||
|
|
||||||
const credResult = spawnSync("git", ["credential", "fill"], {
|
|
||||||
input: "protocol=https\nhost=codeberg.org\n\n",
|
|
||||||
encoding: "utf8",
|
|
||||||
stdio: ["pipe", "pipe", "pipe"]
|
|
||||||
});
|
|
||||||
const creds = new Map();
|
|
||||||
for (const line of credResult.stdout.split(/\r?\n/)) {
|
|
||||||
if (line.includes("=")) {
|
|
||||||
const [k, v] = line.split("=", 2);
|
|
||||||
creds.set(k, v);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const auth = "Basic " + Buffer.from(creds.get("username") + ":" + creds.get("password")).toString("base64");
|
|
||||||
const owner = "Sucukdeluxe";
|
|
||||||
const repo = "real-debrid-downloader";
|
|
||||||
const tag = "v1.5.35";
|
|
||||||
const baseApi = `https://codeberg.org/api/v1/repos/${owner}/${repo}`;
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
await fetch(baseApi, {
|
|
||||||
method: "PATCH",
|
|
||||||
headers: { Authorization: auth, "Content-Type": "application/json" },
|
|
||||||
body: JSON.stringify({ has_releases: true })
|
|
||||||
});
|
|
||||||
|
|
||||||
const createRes = await fetch(`${baseApi}/releases`, {
|
|
||||||
method: "POST",
|
|
||||||
headers: { Authorization: auth, "Content-Type": "application/json", Accept: "application/json" },
|
|
||||||
body: JSON.stringify({
|
|
||||||
tag_name: tag,
|
|
||||||
target_commitish: "main",
|
|
||||||
name: tag,
|
|
||||||
body: "- Fix: Fortschritt zeigt jetzt kombinierten Wert (Download + Entpacken)\n- Fix: Pausieren zeigt nicht mehr 'Warte auf Daten'\n- Pixel-perfekte Dual-Layer Progress-Bar Texte (clip-path)",
|
|
||||||
draft: false,
|
|
||||||
prerelease: false
|
|
||||||
})
|
|
||||||
});
|
|
||||||
const release = await createRes.json();
|
|
||||||
if (!createRes.ok) {
|
|
||||||
console.error("Create failed:", JSON.stringify(release));
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
console.log("Release created:", release.id);
|
|
||||||
|
|
||||||
const files = [
|
|
||||||
"Real-Debrid-Downloader Setup 1.5.35.exe",
|
|
||||||
"Real-Debrid-Downloader 1.5.35.exe",
|
|
||||||
"latest.yml",
|
|
||||||
"Real-Debrid-Downloader Setup 1.5.35.exe.blockmap"
|
|
||||||
];
|
|
||||||
for (const f of files) {
|
|
||||||
const filePath = path.join("release", f);
|
|
||||||
const data = fs.readFileSync(filePath);
|
|
||||||
const uploadUrl = `${baseApi}/releases/${release.id}/assets?name=${encodeURIComponent(f)}`;
|
|
||||||
const res = await fetch(uploadUrl, {
|
|
||||||
method: "POST",
|
|
||||||
headers: { Authorization: auth, "Content-Type": "application/octet-stream" },
|
|
||||||
body: data
|
|
||||||
});
|
|
||||||
if (res.ok) {
|
|
||||||
console.log("Uploaded:", f);
|
|
||||||
} else if (res.status === 409 || res.status === 422) {
|
|
||||||
console.log("Skipped existing:", f);
|
|
||||||
} else {
|
|
||||||
console.error("Upload failed for", f, ":", res.status);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
console.log(`Done! https://codeberg.org/${owner}/${repo}/releases/tag/${tag}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
main().catch(e => { console.error(e.message); process.exit(1); });
|
|
||||||
@ -25,11 +25,11 @@ AppPublisher=Sucukdeluxe
|
|||||||
DefaultDirName={autopf}\{#MyAppName}
|
DefaultDirName={autopf}\{#MyAppName}
|
||||||
DefaultGroupName={#MyAppName}
|
DefaultGroupName={#MyAppName}
|
||||||
OutputDir={#MyOutputDir}
|
OutputDir={#MyOutputDir}
|
||||||
OutputBaseFilename=Real-Debrid-Downloader-Setup-{#MyAppVersion}
|
OutputBaseFilename=Real-Debrid-Downloader Setup {#MyAppVersion}
|
||||||
Compression=lzma
|
Compression=lzma
|
||||||
SolidCompression=yes
|
SolidCompression=yes
|
||||||
WizardStyle=modern
|
WizardStyle=modern
|
||||||
PrivilegesRequired=admin
|
PrivilegesRequired=lowest
|
||||||
ArchitecturesInstallIn64BitMode=x64compatible
|
ArchitecturesInstallIn64BitMode=x64compatible
|
||||||
UninstallDisplayIcon={app}\{#MyAppExeName}
|
UninstallDisplayIcon={app}\{#MyAppExeName}
|
||||||
SetupIconFile={#MyIconFile}
|
SetupIconFile={#MyIconFile}
|
||||||
@ -39,8 +39,8 @@ Name: "german"; MessagesFile: "compiler:Languages\German.isl"
|
|||||||
Name: "english"; MessagesFile: "compiler:Default.isl"
|
Name: "english"; MessagesFile: "compiler:Default.isl"
|
||||||
|
|
||||||
[Files]
|
[Files]
|
||||||
Source: "{#MySourceDir}\\*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs createallsubdirs
|
Source: "{#MySourceDir}\\*"; DestDir: "{app}"; Flags: recursesubdirs createallsubdirs
|
||||||
Source: "{#MyIconFile}"; DestDir: "{app}"; DestName: "app_icon.ico"; Flags: ignoreversion
|
Source: "{#MyIconFile}"; DestDir: "{app}"; DestName: "app_icon.ico"
|
||||||
|
|
||||||
[Icons]
|
[Icons]
|
||||||
Name: "{group}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"; IconFilename: "{app}\app_icon.ico"
|
Name: "{group}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"; IconFilename: "{app}\app_icon.ico"
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "real-debrid-downloader",
|
"name": "real-debrid-downloader",
|
||||||
"version": "1.6.31",
|
"version": "1.6.60",
|
||||||
"description": "Desktop downloader",
|
"description": "Desktop downloader",
|
||||||
"main": "build/main/main/main.js",
|
"main": "build/main/main/main.js",
|
||||||
"author": "Sucukdeluxe",
|
"author": "Sucukdeluxe",
|
||||||
@ -17,7 +17,6 @@
|
|||||||
"test": "vitest run",
|
"test": "vitest run",
|
||||||
"self-check": "tsx tests/self-check.ts",
|
"self-check": "tsx tests/self-check.ts",
|
||||||
"release:win": "npm run build && electron-builder --publish never --win nsis portable",
|
"release:win": "npm run build && electron-builder --publish never --win nsis portable",
|
||||||
"release:codeberg": "node scripts/release_codeberg.mjs",
|
|
||||||
"release:gitea": "node scripts/release_gitea.mjs",
|
"release:gitea": "node scripts/release_gitea.mjs",
|
||||||
"release:forgejo": "node scripts/release_gitea.mjs"
|
"release:forgejo": "node scripts/release_gitea.mjs"
|
||||||
},
|
},
|
||||||
|
|||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -3,7 +3,9 @@ package com.sucukdeluxe.extractor;
|
|||||||
import net.lingala.zip4j.ZipFile;
|
import net.lingala.zip4j.ZipFile;
|
||||||
import net.lingala.zip4j.exception.ZipException;
|
import net.lingala.zip4j.exception.ZipException;
|
||||||
import net.lingala.zip4j.model.FileHeader;
|
import net.lingala.zip4j.model.FileHeader;
|
||||||
|
import net.sf.sevenzipjbinding.ExtractAskMode;
|
||||||
import net.sf.sevenzipjbinding.ExtractOperationResult;
|
import net.sf.sevenzipjbinding.ExtractOperationResult;
|
||||||
|
import net.sf.sevenzipjbinding.IArchiveExtractCallback;
|
||||||
import net.sf.sevenzipjbinding.IArchiveOpenCallback;
|
import net.sf.sevenzipjbinding.IArchiveOpenCallback;
|
||||||
import net.sf.sevenzipjbinding.IArchiveOpenVolumeCallback;
|
import net.sf.sevenzipjbinding.IArchiveOpenVolumeCallback;
|
||||||
import net.sf.sevenzipjbinding.IInArchive;
|
import net.sf.sevenzipjbinding.IInArchive;
|
||||||
@ -26,6 +28,7 @@ import java.io.InputStream;
|
|||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.io.RandomAccessFile;
|
import java.io.RandomAccessFile;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.nio.file.Files;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Base64;
|
import java.util.Base64;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
@ -42,12 +45,18 @@ public final class JBindExtractorMain {
|
|||||||
private static final Pattern NUMBERED_ZIP_SPLIT_RE = Pattern.compile("(?i).*\\.zip\\.\\d{3}$");
|
private static final Pattern NUMBERED_ZIP_SPLIT_RE = Pattern.compile("(?i).*\\.zip\\.\\d{3}$");
|
||||||
private static final Pattern OLD_ZIP_SPLIT_RE = Pattern.compile("(?i).*\\.z\\d{2,3}$");
|
private static final Pattern OLD_ZIP_SPLIT_RE = Pattern.compile("(?i).*\\.z\\d{2,3}$");
|
||||||
private static final Pattern SEVEN_ZIP_SPLIT_RE = Pattern.compile("(?i).*\\.7z\\.001$");
|
private static final Pattern SEVEN_ZIP_SPLIT_RE = Pattern.compile("(?i).*\\.7z\\.001$");
|
||||||
|
private static final Pattern DIGIT_SUFFIX_RE = Pattern.compile("\\d{2,3}");
|
||||||
|
private static final Pattern WINDOWS_SPECIAL_CHARS_RE = Pattern.compile("[:<>*?\"\\|]");
|
||||||
private static volatile boolean sevenZipInitialized = false;
|
private static volatile boolean sevenZipInitialized = false;
|
||||||
|
|
||||||
private JBindExtractorMain() {
|
private JBindExtractorMain() {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) {
|
||||||
|
if (args.length == 1 && "--daemon".equals(args[0])) {
|
||||||
|
runDaemon();
|
||||||
|
return;
|
||||||
|
}
|
||||||
int exit = 1;
|
int exit = 1;
|
||||||
try {
|
try {
|
||||||
ExtractionRequest request = parseArgs(args);
|
ExtractionRequest request = parseArgs(args);
|
||||||
@ -62,6 +71,127 @@ public final class JBindExtractorMain {
|
|||||||
System.exit(exit);
|
System.exit(exit);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static void runDaemon() {
|
||||||
|
System.out.println("RD_DAEMON_READY");
|
||||||
|
System.out.flush();
|
||||||
|
java.io.BufferedReader reader = new java.io.BufferedReader(
|
||||||
|
new java.io.InputStreamReader(System.in, StandardCharsets.UTF_8));
|
||||||
|
try {
|
||||||
|
String line;
|
||||||
|
while ((line = reader.readLine()) != null) {
|
||||||
|
line = line.trim();
|
||||||
|
if (line.isEmpty()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
int exitCode = 1;
|
||||||
|
try {
|
||||||
|
ExtractionRequest request = parseDaemonRequest(line);
|
||||||
|
exitCode = runExtraction(request);
|
||||||
|
} catch (IllegalArgumentException error) {
|
||||||
|
emitError("Argumentfehler: " + safeMessage(error));
|
||||||
|
exitCode = 2;
|
||||||
|
} catch (Throwable error) {
|
||||||
|
emitError(safeMessage(error));
|
||||||
|
exitCode = 1;
|
||||||
|
}
|
||||||
|
System.out.println("RD_REQUEST_DONE " + exitCode);
|
||||||
|
System.out.flush();
|
||||||
|
}
|
||||||
|
} catch (IOException ignored) {
|
||||||
|
// stdin closed — parent process exited
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static ExtractionRequest parseDaemonRequest(String jsonLine) {
|
||||||
|
// Minimal JSON parsing without external dependencies.
|
||||||
|
// Expected format: {"archive":"...","target":"...","conflict":"...","backend":"...","passwords":["...","..."]}
|
||||||
|
ExtractionRequest request = new ExtractionRequest();
|
||||||
|
request.archiveFile = new File(extractJsonString(jsonLine, "archive"));
|
||||||
|
request.targetDir = new File(extractJsonString(jsonLine, "target"));
|
||||||
|
String conflict = extractJsonString(jsonLine, "conflict");
|
||||||
|
if (conflict.length() > 0) {
|
||||||
|
request.conflictMode = ConflictMode.fromValue(conflict);
|
||||||
|
}
|
||||||
|
String backend = extractJsonString(jsonLine, "backend");
|
||||||
|
if (backend.length() > 0) {
|
||||||
|
request.backend = Backend.fromValue(backend);
|
||||||
|
}
|
||||||
|
// Parse passwords array
|
||||||
|
int pwStart = jsonLine.indexOf("\"passwords\"");
|
||||||
|
if (pwStart >= 0) {
|
||||||
|
int arrStart = jsonLine.indexOf('[', pwStart);
|
||||||
|
int arrEnd = jsonLine.indexOf(']', arrStart);
|
||||||
|
if (arrStart >= 0 && arrEnd > arrStart) {
|
||||||
|
String arrContent = jsonLine.substring(arrStart + 1, arrEnd);
|
||||||
|
int idx = 0;
|
||||||
|
while (idx < arrContent.length()) {
|
||||||
|
int qStart = arrContent.indexOf('"', idx);
|
||||||
|
if (qStart < 0) break;
|
||||||
|
int qEnd = findClosingQuote(arrContent, qStart + 1);
|
||||||
|
if (qEnd < 0) break;
|
||||||
|
request.passwords.add(unescapeJsonString(arrContent.substring(qStart + 1, qEnd)));
|
||||||
|
idx = qEnd + 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (request.archiveFile == null || !request.archiveFile.exists() || !request.archiveFile.isFile()) {
|
||||||
|
throw new IllegalArgumentException("Archiv nicht gefunden: " +
|
||||||
|
(request.archiveFile == null ? "null" : request.archiveFile.getAbsolutePath()));
|
||||||
|
}
|
||||||
|
if (request.targetDir == null) {
|
||||||
|
throw new IllegalArgumentException("--target fehlt");
|
||||||
|
}
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String extractJsonString(String json, String key) {
|
||||||
|
String search = "\"" + key + "\"";
|
||||||
|
int keyIdx = json.indexOf(search);
|
||||||
|
if (keyIdx < 0) return "";
|
||||||
|
int colonIdx = json.indexOf(':', keyIdx + search.length());
|
||||||
|
if (colonIdx < 0) return "";
|
||||||
|
int qStart = json.indexOf('"', colonIdx + 1);
|
||||||
|
if (qStart < 0) return "";
|
||||||
|
int qEnd = findClosingQuote(json, qStart + 1);
|
||||||
|
if (qEnd < 0) return "";
|
||||||
|
return unescapeJsonString(json.substring(qStart + 1, qEnd));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static int findClosingQuote(String s, int from) {
|
||||||
|
for (int i = from; i < s.length(); i++) {
|
||||||
|
char c = s.charAt(i);
|
||||||
|
if (c == '\\') {
|
||||||
|
i++; // skip escaped character
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (c == '"') return i;
|
||||||
|
}
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String unescapeJsonString(String s) {
|
||||||
|
if (s.indexOf('\\') < 0) return s;
|
||||||
|
StringBuilder sb = new StringBuilder(s.length());
|
||||||
|
for (int i = 0; i < s.length(); i++) {
|
||||||
|
char c = s.charAt(i);
|
||||||
|
if (c == '\\' && i + 1 < s.length()) {
|
||||||
|
char next = s.charAt(i + 1);
|
||||||
|
switch (next) {
|
||||||
|
case '"': sb.append('"'); i++; break;
|
||||||
|
case '\\': sb.append('\\'); i++; break;
|
||||||
|
case '/': sb.append('/'); i++; break;
|
||||||
|
case 'n': sb.append('\n'); i++; break;
|
||||||
|
case 'r': sb.append('\r'); i++; break;
|
||||||
|
case 't': sb.append('\t'); i++; break;
|
||||||
|
default: sb.append(c); break;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
sb.append(c);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sb.toString();
|
||||||
|
}
|
||||||
|
|
||||||
private static int runExtraction(ExtractionRequest request) throws Exception {
|
private static int runExtraction(ExtractionRequest request) throws Exception {
|
||||||
List<String> passwords = normalizePasswords(request.passwords);
|
List<String> passwords = normalizePasswords(request.passwords);
|
||||||
Exception lastError = null;
|
Exception lastError = null;
|
||||||
@ -152,30 +282,35 @@ public final class JBindExtractorMain {
|
|||||||
}
|
}
|
||||||
|
|
||||||
ensureDirectory(output.getParentFile());
|
ensureDirectory(output.getParentFile());
|
||||||
|
rejectSymlink(output);
|
||||||
long[] remaining = new long[] { itemUnits };
|
long[] remaining = new long[] { itemUnits };
|
||||||
|
boolean extractionSuccess = false;
|
||||||
try {
|
try {
|
||||||
InputStream in = zipFile.getInputStream(header);
|
InputStream in = zipFile.getInputStream(header);
|
||||||
OutputStream out = new FileOutputStream(output);
|
|
||||||
try {
|
try {
|
||||||
byte[] buffer = new byte[BUFFER_SIZE];
|
OutputStream out = new FileOutputStream(output);
|
||||||
while (true) {
|
try {
|
||||||
int read = in.read(buffer);
|
byte[] buffer = new byte[BUFFER_SIZE];
|
||||||
if (read < 0) {
|
while (true) {
|
||||||
break;
|
int read = in.read(buffer);
|
||||||
|
if (read < 0) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (read == 0) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
out.write(buffer, 0, read);
|
||||||
|
long accounted = Math.min(remaining[0], (long) read);
|
||||||
|
remaining[0] -= accounted;
|
||||||
|
progress.advance(accounted);
|
||||||
}
|
}
|
||||||
if (read == 0) {
|
} finally {
|
||||||
continue;
|
try {
|
||||||
|
out.close();
|
||||||
|
} catch (Throwable ignored) {
|
||||||
}
|
}
|
||||||
out.write(buffer, 0, read);
|
|
||||||
long accounted = Math.min(remaining[0], (long) read);
|
|
||||||
remaining[0] -= accounted;
|
|
||||||
progress.advance(accounted);
|
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
try {
|
|
||||||
out.close();
|
|
||||||
} catch (Throwable ignored) {
|
|
||||||
}
|
|
||||||
try {
|
try {
|
||||||
in.close();
|
in.close();
|
||||||
} catch (Throwable ignored) {
|
} catch (Throwable ignored) {
|
||||||
@ -188,11 +323,19 @@ public final class JBindExtractorMain {
|
|||||||
if (modified > 0) {
|
if (modified > 0) {
|
||||||
output.setLastModified(modified);
|
output.setLastModified(modified);
|
||||||
}
|
}
|
||||||
|
extractionSuccess = true;
|
||||||
} catch (ZipException error) {
|
} catch (ZipException error) {
|
||||||
if (isWrongPassword(error, encrypted)) {
|
if (isWrongPassword(error, encrypted)) {
|
||||||
throw new WrongPasswordException(error);
|
throw new WrongPasswordException(error);
|
||||||
}
|
}
|
||||||
throw error;
|
throw error;
|
||||||
|
} finally {
|
||||||
|
if (!extractionSuccess && output.exists()) {
|
||||||
|
try {
|
||||||
|
output.delete();
|
||||||
|
} catch (Throwable ignored) {
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -219,98 +362,99 @@ public final class JBindExtractorMain {
|
|||||||
try {
|
try {
|
||||||
context = openSevenZipArchive(request.archiveFile, password);
|
context = openSevenZipArchive(request.archiveFile, password);
|
||||||
IInArchive archive = context.archive;
|
IInArchive archive = context.archive;
|
||||||
ISimpleInArchive simple = archive.getSimpleInterface();
|
int itemCount = archive.getNumberOfItems();
|
||||||
ISimpleInArchiveItem[] items = simple.getArchiveItems();
|
if (itemCount <= 0) {
|
||||||
|
throw new IOException("Archiv enthalt keine Eintrage oder konnte nicht gelesen werden: " + request.archiveFile.getAbsolutePath());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pre-scan: collect file indices, sizes, output paths, and detect encryption
|
||||||
long totalUnits = 0;
|
long totalUnits = 0;
|
||||||
boolean encrypted = false;
|
boolean encrypted = false;
|
||||||
for (ISimpleInArchiveItem item : items) {
|
List<Integer> fileIndices = new ArrayList<Integer>();
|
||||||
if (item == null || item.isFolder()) {
|
List<File> outputFiles = new ArrayList<File>();
|
||||||
continue;
|
List<Long> fileSizes = new ArrayList<Long>();
|
||||||
}
|
|
||||||
try {
|
|
||||||
encrypted = encrypted || item.isEncrypted();
|
|
||||||
} catch (Throwable ignored) {
|
|
||||||
// ignore encrypted flag read issues
|
|
||||||
}
|
|
||||||
totalUnits += safeSize(item.getSize());
|
|
||||||
}
|
|
||||||
ProgressTracker progress = new ProgressTracker(totalUnits);
|
|
||||||
progress.emitStart();
|
|
||||||
|
|
||||||
Set<String> reserved = new HashSet<String>();
|
Set<String> reserved = new HashSet<String>();
|
||||||
for (ISimpleInArchiveItem item : items) {
|
|
||||||
if (item == null) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
String entryName = normalizeEntryName(item.getPath(), "item-" + item.getItemIndex());
|
for (int i = 0; i < itemCount; i++) {
|
||||||
if (item.isFolder()) {
|
Boolean isFolder = (Boolean) archive.getProperty(i, PropID.IS_FOLDER);
|
||||||
|
String entryPath = (String) archive.getProperty(i, PropID.PATH);
|
||||||
|
String entryName = normalizeEntryName(entryPath, "item-" + i);
|
||||||
|
|
||||||
|
if (Boolean.TRUE.equals(isFolder)) {
|
||||||
File dir = resolveDirectory(request.targetDir, entryName);
|
File dir = resolveDirectory(request.targetDir, entryName);
|
||||||
ensureDirectory(dir);
|
ensureDirectory(dir);
|
||||||
reserved.add(pathKey(dir));
|
reserved.add(pathKey(dir));
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
long itemUnits = safeSize(item.getSize());
|
|
||||||
File output = resolveOutputFile(request.targetDir, entryName, request.conflictMode, reserved);
|
|
||||||
if (output == null) {
|
|
||||||
progress.advance(itemUnits);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
ensureDirectory(output.getParentFile());
|
|
||||||
final FileOutputStream out = new FileOutputStream(output);
|
|
||||||
final long[] remaining = new long[] { itemUnits };
|
|
||||||
try {
|
try {
|
||||||
ExtractOperationResult result = item.extractSlow(new ISequentialOutStream() {
|
Boolean isEncrypted = (Boolean) archive.getProperty(i, PropID.ENCRYPTED);
|
||||||
@Override
|
encrypted = encrypted || Boolean.TRUE.equals(isEncrypted);
|
||||||
public int write(byte[] data) throws SevenZipException {
|
|
||||||
if (data == null || data.length == 0) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
out.write(data);
|
|
||||||
} catch (IOException error) {
|
|
||||||
throw new SevenZipException("Fehler beim Schreiben: " + error.getMessage(), error);
|
|
||||||
}
|
|
||||||
long accounted = Math.min(remaining[0], (long) data.length);
|
|
||||||
remaining[0] -= accounted;
|
|
||||||
progress.advance(accounted);
|
|
||||||
return data.length;
|
|
||||||
}
|
|
||||||
}, password == null ? "" : password);
|
|
||||||
|
|
||||||
if (remaining[0] > 0) {
|
|
||||||
progress.advance(remaining[0]);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (result != ExtractOperationResult.OK) {
|
|
||||||
if (isPasswordFailure(result, encrypted)) {
|
|
||||||
throw new WrongPasswordException(new IOException("Falsches Passwort"));
|
|
||||||
}
|
|
||||||
throw new IOException("7z-Fehler: " + result.name());
|
|
||||||
}
|
|
||||||
} catch (SevenZipException error) {
|
|
||||||
if (looksLikeWrongPassword(error, encrypted)) {
|
|
||||||
throw new WrongPasswordException(error);
|
|
||||||
}
|
|
||||||
throw error;
|
|
||||||
} finally {
|
|
||||||
try {
|
|
||||||
out.close();
|
|
||||||
} catch (Throwable ignored) {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
java.util.Date modified = item.getLastWriteTime();
|
|
||||||
if (modified != null) {
|
|
||||||
output.setLastModified(modified.getTime());
|
|
||||||
}
|
|
||||||
} catch (Throwable ignored) {
|
} catch (Throwable ignored) {
|
||||||
// best effort
|
// ignore encrypted flag read issues
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Long rawSize = (Long) archive.getProperty(i, PropID.SIZE);
|
||||||
|
long itemSize = safeSize(rawSize);
|
||||||
|
totalUnits += itemSize;
|
||||||
|
|
||||||
|
File output = resolveOutputFile(request.targetDir, entryName, request.conflictMode, reserved);
|
||||||
|
fileIndices.add(i);
|
||||||
|
outputFiles.add(output); // null if skipped
|
||||||
|
fileSizes.add(itemSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fileIndices.isEmpty()) {
|
||||||
|
// All items are folders or skipped
|
||||||
|
ProgressTracker progress = new ProgressTracker(1);
|
||||||
|
progress.emitStart();
|
||||||
|
progress.emitDone();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
ProgressTracker progress = new ProgressTracker(totalUnits);
|
||||||
|
progress.emitStart();
|
||||||
|
|
||||||
|
// Build index array for bulk extract
|
||||||
|
int[] indices = new int[fileIndices.size()];
|
||||||
|
for (int i = 0; i < fileIndices.size(); i++) {
|
||||||
|
indices[i] = fileIndices.get(i);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Map from archive index to our position in fileIndices/outputFiles
|
||||||
|
Map<Integer, Integer> indexToPos = new HashMap<Integer, Integer>();
|
||||||
|
for (int i = 0; i < fileIndices.size(); i++) {
|
||||||
|
indexToPos.put(fileIndices.get(i), i);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Bulk extraction state
|
||||||
|
final boolean encryptedFinal = encrypted;
|
||||||
|
final String effectivePassword = password == null ? "" : password;
|
||||||
|
final File[] currentOutput = new File[1];
|
||||||
|
final FileOutputStream[] currentStream = new FileOutputStream[1];
|
||||||
|
final boolean[] currentSuccess = new boolean[1];
|
||||||
|
final long[] currentRemaining = new long[1];
|
||||||
|
final Throwable[] firstError = new Throwable[1];
|
||||||
|
final int[] currentPos = new int[] { -1 };
|
||||||
|
|
||||||
|
try {
|
||||||
|
archive.extract(indices, false, new BulkExtractCallback(
|
||||||
|
archive, indexToPos, fileIndices, outputFiles, fileSizes,
|
||||||
|
progress, encryptedFinal, effectivePassword, currentOutput,
|
||||||
|
currentStream, currentSuccess, currentRemaining, currentPos, firstError
|
||||||
|
));
|
||||||
|
} catch (SevenZipException error) {
|
||||||
|
if (looksLikeWrongPassword(error, encryptedFinal)) {
|
||||||
|
throw new WrongPasswordException(error);
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (firstError[0] != null) {
|
||||||
|
if (firstError[0] instanceof WrongPasswordException) {
|
||||||
|
throw (WrongPasswordException) firstError[0];
|
||||||
|
}
|
||||||
|
throw (Exception) firstError[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
progress.emitDone();
|
progress.emitDone();
|
||||||
@ -328,14 +472,31 @@ public final class JBindExtractorMain {
|
|||||||
|
|
||||||
if (SEVEN_ZIP_SPLIT_RE.matcher(nameLower).matches()) {
|
if (SEVEN_ZIP_SPLIT_RE.matcher(nameLower).matches()) {
|
||||||
VolumedArchiveInStream volumed = new VolumedArchiveInStream(archiveFile.getName(), callback);
|
VolumedArchiveInStream volumed = new VolumedArchiveInStream(archiveFile.getName(), callback);
|
||||||
IInArchive archive = SevenZip.openInArchive(null, volumed, callback);
|
try {
|
||||||
return new SevenZipArchiveContext(archive, null, volumed, callback);
|
IInArchive archive = SevenZip.openInArchive(null, volumed, callback);
|
||||||
|
return new SevenZipArchiveContext(archive, null, volumed, callback);
|
||||||
|
} catch (Exception error) {
|
||||||
|
callback.close();
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
RandomAccessFile raf = new RandomAccessFile(archiveFile, "r");
|
RandomAccessFile raf = new RandomAccessFile(archiveFile, "r");
|
||||||
RandomAccessFileInStream stream = new RandomAccessFileInStream(raf);
|
RandomAccessFileInStream stream = new RandomAccessFileInStream(raf);
|
||||||
IInArchive archive = SevenZip.openInArchive(null, stream, callback);
|
try {
|
||||||
return new SevenZipArchiveContext(archive, stream, null, callback);
|
IInArchive archive = SevenZip.openInArchive(null, stream, callback);
|
||||||
|
return new SevenZipArchiveContext(archive, stream, null, callback);
|
||||||
|
} catch (Exception error) {
|
||||||
|
try {
|
||||||
|
stream.close();
|
||||||
|
} catch (Throwable ignored) {
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
raf.close();
|
||||||
|
} catch (Throwable ignored) {
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static boolean isWrongPassword(ZipException error, boolean encrypted) {
|
private static boolean isWrongPassword(ZipException error, boolean encrypted) {
|
||||||
@ -396,7 +557,7 @@ public final class JBindExtractorMain {
|
|||||||
}
|
}
|
||||||
if (siblingName.startsWith(prefix) && siblingName.length() >= prefix.length() + 2) {
|
if (siblingName.startsWith(prefix) && siblingName.length() >= prefix.length() + 2) {
|
||||||
String suffix = siblingName.substring(prefix.length());
|
String suffix = siblingName.substring(prefix.length());
|
||||||
if (suffix.matches("\\d{2,3}")) {
|
if (DIGIT_SUFFIX_RE.matcher(suffix).matches()) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -480,6 +641,12 @@ public final class JBindExtractorMain {
|
|||||||
}
|
}
|
||||||
if (normalized.matches("^[a-zA-Z]:.*")) {
|
if (normalized.matches("^[a-zA-Z]:.*")) {
|
||||||
normalized = normalized.substring(2);
|
normalized = normalized.substring(2);
|
||||||
|
while (normalized.startsWith("/")) {
|
||||||
|
normalized = normalized.substring(1);
|
||||||
|
}
|
||||||
|
while (normalized.startsWith("\\")) {
|
||||||
|
normalized = normalized.substring(1);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
File targetCanonical = targetDir.getCanonicalFile();
|
File targetCanonical = targetDir.getCanonicalFile();
|
||||||
File output = new File(targetCanonical, normalized);
|
File output = new File(targetCanonical, normalized);
|
||||||
@ -488,7 +655,8 @@ public final class JBindExtractorMain {
|
|||||||
String outputPath = outputCanonical.getPath();
|
String outputPath = outputCanonical.getPath();
|
||||||
String targetPathNorm = isWindows() ? targetPath.toLowerCase(Locale.ROOT) : targetPath;
|
String targetPathNorm = isWindows() ? targetPath.toLowerCase(Locale.ROOT) : targetPath;
|
||||||
String outputPathNorm = isWindows() ? outputPath.toLowerCase(Locale.ROOT) : outputPath;
|
String outputPathNorm = isWindows() ? outputPath.toLowerCase(Locale.ROOT) : outputPath;
|
||||||
if (!outputPathNorm.equals(targetPathNorm) && !outputPathNorm.startsWith(targetPathNorm + File.separator)) {
|
String targetPrefix = targetPathNorm.endsWith(File.separator) ? targetPathNorm : targetPathNorm + File.separator;
|
||||||
|
if (!outputPathNorm.equals(targetPathNorm) && !outputPathNorm.startsWith(targetPrefix)) {
|
||||||
throw new IOException("Path Traversal blockiert: " + entryName);
|
throw new IOException("Path Traversal blockiert: " + entryName);
|
||||||
}
|
}
|
||||||
return outputCanonical;
|
return outputCanonical;
|
||||||
@ -506,20 +674,50 @@ public final class JBindExtractorMain {
|
|||||||
if (entry.length() == 0) {
|
if (entry.length() == 0) {
|
||||||
return fallback;
|
return fallback;
|
||||||
}
|
}
|
||||||
|
// Sanitize Windows special characters from each path segment
|
||||||
|
String[] segments = entry.split("/", -1);
|
||||||
|
StringBuilder sanitized = new StringBuilder();
|
||||||
|
for (int i = 0; i < segments.length; i++) {
|
||||||
|
if (i > 0) {
|
||||||
|
sanitized.append('/');
|
||||||
|
}
|
||||||
|
sanitized.append(WINDOWS_SPECIAL_CHARS_RE.matcher(segments[i]).replaceAll("_"));
|
||||||
|
}
|
||||||
|
entry = sanitized.toString();
|
||||||
|
if (entry.length() == 0) {
|
||||||
|
return fallback;
|
||||||
|
}
|
||||||
return entry;
|
return entry;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static long safeSize(Long value) {
|
private static long safeSize(Long value) {
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
return 1;
|
return 0;
|
||||||
}
|
}
|
||||||
long size = value.longValue();
|
long size = value.longValue();
|
||||||
if (size <= 0) {
|
if (size <= 0) {
|
||||||
return 1;
|
return 0;
|
||||||
}
|
}
|
||||||
return size;
|
return size;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static void rejectSymlink(File file) throws IOException {
|
||||||
|
if (file == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (Files.isSymbolicLink(file.toPath())) {
|
||||||
|
throw new IOException("Zieldatei ist ein Symlink, Schreiben verweigert: " + file.getAbsolutePath());
|
||||||
|
}
|
||||||
|
// Also check parent directories for symlinks
|
||||||
|
File parent = file.getParentFile();
|
||||||
|
while (parent != null) {
|
||||||
|
if (Files.isSymbolicLink(parent.toPath())) {
|
||||||
|
throw new IOException("Elternverzeichnis ist ein Symlink, Schreiben verweigert: " + parent.getAbsolutePath());
|
||||||
|
}
|
||||||
|
parent = parent.getParentFile();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static void ensureDirectory(File dir) throws IOException {
|
private static void ensureDirectory(File dir) throws IOException {
|
||||||
if (dir == null) {
|
if (dir == null) {
|
||||||
return;
|
return;
|
||||||
@ -681,6 +879,176 @@ public final class JBindExtractorMain {
|
|||||||
private final List<String> passwords = new ArrayList<String>();
|
private final List<String> passwords = new ArrayList<String>();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bulk extraction callback that implements both IArchiveExtractCallback and
|
||||||
|
* ICryptoGetTextPassword. Using the bulk IInArchive.extract() API instead of
|
||||||
|
* per-item extractSlow() is critical for performance — solid RAR archives
|
||||||
|
* otherwise re-decode from the beginning for every single item.
|
||||||
|
*/
|
||||||
|
private static final class BulkExtractCallback implements IArchiveExtractCallback, ICryptoGetTextPassword {
|
||||||
|
private final IInArchive archive;
|
||||||
|
private final Map<Integer, Integer> indexToPos;
|
||||||
|
private final List<Integer> fileIndices;
|
||||||
|
private final List<File> outputFiles;
|
||||||
|
private final List<Long> fileSizes;
|
||||||
|
private final ProgressTracker progress;
|
||||||
|
private final boolean encrypted;
|
||||||
|
private final String password;
|
||||||
|
private final File[] currentOutput;
|
||||||
|
private final FileOutputStream[] currentStream;
|
||||||
|
private final boolean[] currentSuccess;
|
||||||
|
private final long[] currentRemaining;
|
||||||
|
private final int[] currentPos;
|
||||||
|
private final Throwable[] firstError;
|
||||||
|
|
||||||
|
BulkExtractCallback(IInArchive archive, Map<Integer, Integer> indexToPos,
|
||||||
|
List<Integer> fileIndices, List<File> outputFiles, List<Long> fileSizes,
|
||||||
|
ProgressTracker progress, boolean encrypted, String password,
|
||||||
|
File[] currentOutput, FileOutputStream[] currentStream,
|
||||||
|
boolean[] currentSuccess, long[] currentRemaining, int[] currentPos,
|
||||||
|
Throwable[] firstError) {
|
||||||
|
this.archive = archive;
|
||||||
|
this.indexToPos = indexToPos;
|
||||||
|
this.fileIndices = fileIndices;
|
||||||
|
this.outputFiles = outputFiles;
|
||||||
|
this.fileSizes = fileSizes;
|
||||||
|
this.progress = progress;
|
||||||
|
this.encrypted = encrypted;
|
||||||
|
this.password = password;
|
||||||
|
this.currentOutput = currentOutput;
|
||||||
|
this.currentStream = currentStream;
|
||||||
|
this.currentSuccess = currentSuccess;
|
||||||
|
this.currentRemaining = currentRemaining;
|
||||||
|
this.currentPos = currentPos;
|
||||||
|
this.firstError = firstError;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String cryptoGetTextPassword() {
|
||||||
|
return password;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setTotal(long total) {
|
||||||
|
// 7z reports total compressed bytes; we track uncompressed via ProgressTracker
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setCompleted(long complete) {
|
||||||
|
// Not used — we track per-write progress
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ISequentialOutStream getStream(int index, ExtractAskMode extractAskMode) throws SevenZipException {
|
||||||
|
closeCurrentStream();
|
||||||
|
|
||||||
|
Integer pos = indexToPos.get(index);
|
||||||
|
if (pos == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
currentPos[0] = pos;
|
||||||
|
currentOutput[0] = outputFiles.get(pos);
|
||||||
|
currentSuccess[0] = false;
|
||||||
|
currentRemaining[0] = fileSizes.get(pos);
|
||||||
|
|
||||||
|
if (extractAskMode != ExtractAskMode.EXTRACT) {
|
||||||
|
currentOutput[0] = null;
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (currentOutput[0] == null) {
|
||||||
|
progress.advance(currentRemaining[0]);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
ensureDirectory(currentOutput[0].getParentFile());
|
||||||
|
rejectSymlink(currentOutput[0]);
|
||||||
|
currentStream[0] = new FileOutputStream(currentOutput[0]);
|
||||||
|
} catch (IOException error) {
|
||||||
|
throw new SevenZipException("Fehler beim Erstellen: " + error.getMessage(), error);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new ISequentialOutStream() {
|
||||||
|
@Override
|
||||||
|
public int write(byte[] data) throws SevenZipException {
|
||||||
|
if (data == null || data.length == 0) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
currentStream[0].write(data);
|
||||||
|
} catch (IOException error) {
|
||||||
|
throw new SevenZipException("Fehler beim Schreiben: " + error.getMessage(), error);
|
||||||
|
}
|
||||||
|
long accounted = Math.min(currentRemaining[0], (long) data.length);
|
||||||
|
currentRemaining[0] -= accounted;
|
||||||
|
progress.advance(accounted);
|
||||||
|
return data.length;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void prepareOperation(ExtractAskMode extractAskMode) {
|
||||||
|
// no-op
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setOperationResult(ExtractOperationResult result) throws SevenZipException {
|
||||||
|
if (currentRemaining[0] > 0) {
|
||||||
|
progress.advance(currentRemaining[0]);
|
||||||
|
currentRemaining[0] = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result == ExtractOperationResult.OK) {
|
||||||
|
currentSuccess[0] = true;
|
||||||
|
closeCurrentStream();
|
||||||
|
if (currentPos[0] >= 0 && currentOutput[0] != null) {
|
||||||
|
try {
|
||||||
|
int archiveIndex = fileIndices.get(currentPos[0]);
|
||||||
|
java.util.Date modified = (java.util.Date) archive.getProperty(archiveIndex, PropID.LAST_MODIFICATION_TIME);
|
||||||
|
if (modified != null) {
|
||||||
|
currentOutput[0].setLastModified(modified.getTime());
|
||||||
|
}
|
||||||
|
} catch (Throwable ignored) {
|
||||||
|
// best effort
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
closeCurrentStream();
|
||||||
|
if (currentOutput[0] != null && currentOutput[0].exists()) {
|
||||||
|
try {
|
||||||
|
currentOutput[0].delete();
|
||||||
|
} catch (Throwable ignored) {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (firstError[0] == null) {
|
||||||
|
if (isPasswordFailure(result, encrypted)) {
|
||||||
|
firstError[0] = new WrongPasswordException(new IOException("Falsches Passwort"));
|
||||||
|
} else {
|
||||||
|
firstError[0] = new IOException("7z-Fehler: " + result.name());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void closeCurrentStream() {
|
||||||
|
if (currentStream[0] != null) {
|
||||||
|
try {
|
||||||
|
currentStream[0].close();
|
||||||
|
} catch (Throwable ignored) {
|
||||||
|
}
|
||||||
|
currentStream[0] = null;
|
||||||
|
}
|
||||||
|
if (!currentSuccess[0] && currentOutput[0] != null && currentOutput[0].exists()) {
|
||||||
|
try {
|
||||||
|
currentOutput[0].delete();
|
||||||
|
} catch (Throwable ignored) {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static final class WrongPasswordException extends Exception {
|
private static final class WrongPasswordException extends Exception {
|
||||||
private static final long serialVersionUID = 1L;
|
private static final long serialVersionUID = 1L;
|
||||||
|
|
||||||
@ -828,12 +1196,11 @@ public final class JBindExtractorMain {
|
|||||||
if (filename == null || filename.trim().length() == 0) {
|
if (filename == null || filename.trim().length() == 0) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
File direct = new File(filename);
|
// Always resolve relative to the archive's parent directory.
|
||||||
if (direct.isAbsolute() && direct.exists()) {
|
// Never accept absolute paths to prevent path traversal.
|
||||||
return direct;
|
String baseName = new File(filename).getName();
|
||||||
}
|
|
||||||
if (archiveDir != null) {
|
if (archiveDir != null) {
|
||||||
File relative = new File(archiveDir, filename);
|
File relative = new File(archiveDir, baseName);
|
||||||
if (relative.exists()) {
|
if (relative.exists()) {
|
||||||
return relative;
|
return relative;
|
||||||
}
|
}
|
||||||
@ -843,13 +1210,13 @@ public final class JBindExtractorMain {
|
|||||||
if (!sibling.isFile()) {
|
if (!sibling.isFile()) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if (sibling.getName().equalsIgnoreCase(filename)) {
|
if (sibling.getName().equalsIgnoreCase(baseName)) {
|
||||||
return sibling;
|
return sibling;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return direct.exists() ? direct : null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|||||||
@ -2,8 +2,17 @@ const path = require("path");
|
|||||||
const { rcedit } = require("rcedit");
|
const { rcedit } = require("rcedit");
|
||||||
|
|
||||||
module.exports = async function afterPack(context) {
|
module.exports = async function afterPack(context) {
|
||||||
const exePath = path.join(context.appOutDir, `${context.packager.appInfo.productFilename}.exe`);
|
const productFilename = context.packager?.appInfo?.productFilename;
|
||||||
|
if (!productFilename) {
|
||||||
|
console.warn(" • rcedit: skipped — productFilename not available");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const exePath = path.join(context.appOutDir, `${productFilename}.exe`);
|
||||||
const iconPath = path.resolve(__dirname, "..", "assets", "app_icon.ico");
|
const iconPath = path.resolve(__dirname, "..", "assets", "app_icon.ico");
|
||||||
console.log(` • rcedit: patching icon → ${exePath}`);
|
console.log(` • rcedit: patching icon → ${exePath}`);
|
||||||
await rcedit(exePath, { icon: iconPath });
|
try {
|
||||||
|
await rcedit(exePath, { icon: iconPath });
|
||||||
|
} catch (error) {
|
||||||
|
console.warn(` • rcedit: failed — ${String(error)}`);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@ -31,18 +31,21 @@ async function main(): Promise<void> {
|
|||||||
login: settings.megaLogin,
|
login: settings.megaLogin,
|
||||||
password: settings.megaPassword
|
password: settings.megaPassword
|
||||||
}));
|
}));
|
||||||
const service = new DebridService(settings, {
|
try {
|
||||||
megaWebUnrestrict: (link) => megaWeb.unrestrict(link)
|
const service = new DebridService(settings, {
|
||||||
});
|
megaWebUnrestrict: (link) => megaWeb.unrestrict(link)
|
||||||
for (const link of links) {
|
});
|
||||||
try {
|
for (const link of links) {
|
||||||
const result = await service.unrestrictLink(link);
|
try {
|
||||||
console.log(`[OK] ${result.providerLabel} -> ${result.fileName}`);
|
const result = await service.unrestrictLink(link);
|
||||||
} catch (error) {
|
console.log(`[OK] ${result.providerLabel} -> ${result.fileName}`);
|
||||||
console.log(`[FAIL] ${String(error)}`);
|
} catch (error) {
|
||||||
|
console.log(`[FAIL] ${String(error)}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
} finally {
|
||||||
|
megaWeb.dispose();
|
||||||
}
|
}
|
||||||
megaWeb.dispose();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void main();
|
main().catch(e => { console.error(e); process.exit(1); });
|
||||||
|
|||||||
@ -16,8 +16,8 @@ function sleep(ms) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function cookieFrom(headers) {
|
function cookieFrom(headers) {
|
||||||
const raw = headers.get("set-cookie") || "";
|
const cookies = headers.getSetCookie();
|
||||||
return raw.split(",").map((x) => x.split(";")[0].trim()).filter(Boolean).join("; ");
|
return cookies.map((x) => x.split(";")[0].trim()).filter(Boolean).join("; ");
|
||||||
}
|
}
|
||||||
|
|
||||||
function parseDebridCodes(html) {
|
function parseDebridCodes(html) {
|
||||||
@ -47,6 +47,9 @@ async function resolveCode(cookie, code) {
|
|||||||
});
|
});
|
||||||
const text = (await res.text()).trim();
|
const text = (await res.text()).trim();
|
||||||
if (text === "reload") {
|
if (text === "reload") {
|
||||||
|
if (attempt % 5 === 0) {
|
||||||
|
console.log(` [retry] code=${code} attempt=${attempt}/50 (waiting for server)`);
|
||||||
|
}
|
||||||
await sleep(800);
|
await sleep(800);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -98,7 +101,13 @@ async function main() {
|
|||||||
redirect: "manual"
|
redirect: "manual"
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (loginRes.status >= 400) {
|
||||||
|
throw new Error(`Login failed with HTTP ${loginRes.status}`);
|
||||||
|
}
|
||||||
const cookie = cookieFrom(loginRes.headers);
|
const cookie = cookieFrom(loginRes.headers);
|
||||||
|
if (!cookie) {
|
||||||
|
throw new Error("Login returned no session cookie");
|
||||||
|
}
|
||||||
console.log("login", loginRes.status, loginRes.headers.get("location") || "");
|
console.log("login", loginRes.status, loginRes.headers.get("location") || "");
|
||||||
|
|
||||||
const debridRes = await fetch("https://www.mega-debrid.eu/index.php?form=debrid", {
|
const debridRes = await fetch("https://www.mega-debrid.eu/index.php?form=debrid", {
|
||||||
@ -136,4 +145,4 @@ async function main() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
await main();
|
await main().catch((e) => { console.error(e); process.exit(1); });
|
||||||
|
|||||||
@ -66,6 +66,8 @@ async function callRealDebrid(link) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// megaCookie is intentionally cached at module scope so that multiple
|
||||||
|
// callMegaDebrid() invocations reuse the same session cookie.
|
||||||
async function callMegaDebrid(link) {
|
async function callMegaDebrid(link) {
|
||||||
if (!megaCookie) {
|
if (!megaCookie) {
|
||||||
const loginRes = await fetch("https://www.mega-debrid.eu/index.php?form=login", {
|
const loginRes = await fetch("https://www.mega-debrid.eu/index.php?form=login", {
|
||||||
@ -77,13 +79,15 @@ async function callMegaDebrid(link) {
|
|||||||
body: new URLSearchParams({ login: megaLogin, password: megaPassword, remember: "on" }),
|
body: new URLSearchParams({ login: megaLogin, password: megaPassword, remember: "on" }),
|
||||||
redirect: "manual"
|
redirect: "manual"
|
||||||
});
|
});
|
||||||
megaCookie = (loginRes.headers.get("set-cookie") || "")
|
if (loginRes.status >= 400) {
|
||||||
.split(",")
|
return { ok: false, error: `Mega-Web login failed with HTTP ${loginRes.status}` };
|
||||||
|
}
|
||||||
|
megaCookie = loginRes.headers.getSetCookie()
|
||||||
.map((chunk) => chunk.split(";")[0].trim())
|
.map((chunk) => chunk.split(";")[0].trim())
|
||||||
.filter(Boolean)
|
.filter(Boolean)
|
||||||
.join("; ");
|
.join("; ");
|
||||||
if (!megaCookie) {
|
if (!megaCookie) {
|
||||||
return { ok: false, error: "Mega-Web login failed" };
|
return { ok: false, error: "Mega-Web login returned no session cookie" };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -290,4 +294,4 @@ async function main() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
await main();
|
await main().catch((e) => { console.error(e); process.exit(1); });
|
||||||
|
|||||||
@ -1,289 +0,0 @@
|
|||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
import { spawnSync } from "node:child_process";
|
|
||||||
|
|
||||||
const NPM_EXECUTABLE = process.platform === "win32" ? "npm.cmd" : "npm";
|
|
||||||
|
|
||||||
function run(command, args, options = {}) {
|
|
||||||
const result = spawnSync(command, args, {
|
|
||||||
cwd: process.cwd(),
|
|
||||||
encoding: "utf8",
|
|
||||||
stdio: options.capture ? ["pipe", "pipe", "pipe"] : "inherit"
|
|
||||||
});
|
|
||||||
if (result.status !== 0) {
|
|
||||||
const stderr = result.stderr ? String(result.stderr).trim() : "";
|
|
||||||
const stdout = result.stdout ? String(result.stdout).trim() : "";
|
|
||||||
const details = [stderr, stdout].filter(Boolean).join("\n");
|
|
||||||
throw new Error(`Command failed: ${command} ${args.join(" ")}${details ? `\n${details}` : ""}`);
|
|
||||||
}
|
|
||||||
return options.capture ? String(result.stdout || "") : "";
|
|
||||||
}
|
|
||||||
|
|
||||||
function runCapture(command, args) {
|
|
||||||
const result = spawnSync(command, args, {
|
|
||||||
cwd: process.cwd(),
|
|
||||||
encoding: "utf8",
|
|
||||||
stdio: ["pipe", "pipe", "pipe"]
|
|
||||||
});
|
|
||||||
if (result.status !== 0) {
|
|
||||||
const stderr = String(result.stderr || "").trim();
|
|
||||||
throw new Error(stderr || `Command failed: ${command} ${args.join(" ")}`);
|
|
||||||
}
|
|
||||||
return String(result.stdout || "").trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
function runWithInput(command, args, input) {
|
|
||||||
const result = spawnSync(command, args, {
|
|
||||||
cwd: process.cwd(),
|
|
||||||
encoding: "utf8",
|
|
||||||
input,
|
|
||||||
stdio: ["pipe", "pipe", "pipe"]
|
|
||||||
});
|
|
||||||
if (result.status !== 0) {
|
|
||||||
const stderr = String(result.stderr || "").trim();
|
|
||||||
throw new Error(stderr || `Command failed: ${command} ${args.join(" ")}`);
|
|
||||||
}
|
|
||||||
return String(result.stdout || "");
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseArgs(argv) {
|
|
||||||
const args = argv.slice(2);
|
|
||||||
if (args.includes("--help") || args.includes("-h")) {
|
|
||||||
return { help: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
const dryRun = args.includes("--dry-run");
|
|
||||||
const cleaned = args.filter((arg) => arg !== "--dry-run");
|
|
||||||
const version = cleaned[0] || "";
|
|
||||||
const notes = cleaned.slice(1).join(" ").trim();
|
|
||||||
return { help: false, dryRun, version, notes };
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseCodebergRemote(url) {
|
|
||||||
const raw = String(url || "").trim();
|
|
||||||
const httpsMatch = raw.match(/^https?:\/\/(?:www\.)?codeberg\.org\/([^/]+)\/([^/]+?)(?:\.git)?$/i);
|
|
||||||
if (httpsMatch) {
|
|
||||||
return { owner: httpsMatch[1], repo: httpsMatch[2] };
|
|
||||||
}
|
|
||||||
const sshMatch = raw.match(/^git@codeberg\.org:([^/]+)\/([^/]+?)(?:\.git)?$/i);
|
|
||||||
if (sshMatch) {
|
|
||||||
return { owner: sshMatch[1], repo: sshMatch[2] };
|
|
||||||
}
|
|
||||||
throw new Error(`Cannot parse Codeberg remote URL: ${raw}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
function getCodebergRepo() {
|
|
||||||
const remotes = ["codeberg", "origin"];
|
|
||||||
for (const remote of remotes) {
|
|
||||||
try {
|
|
||||||
const remoteUrl = runCapture("git", ["remote", "get-url", remote]);
|
|
||||||
if (/codeberg\.org/i.test(remoteUrl)) {
|
|
||||||
const parsed = parseCodebergRemote(remoteUrl);
|
|
||||||
return { remote, ...parsed };
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
// try next remote
|
|
||||||
}
|
|
||||||
}
|
|
||||||
throw new Error("No Codeberg remote found. Add one with: git remote add codeberg https://codeberg.org/<owner>/<repo>.git");
|
|
||||||
}
|
|
||||||
|
|
||||||
function getCodebergAuthHeader() {
|
|
||||||
const credentialText = runWithInput("git", ["credential", "fill"], "protocol=https\nhost=codeberg.org\n\n");
|
|
||||||
const map = new Map();
|
|
||||||
for (const line of credentialText.split(/\r?\n/)) {
|
|
||||||
if (!line.includes("=")) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
const [key, value] = line.split("=", 2);
|
|
||||||
map.set(key, value);
|
|
||||||
}
|
|
||||||
const username = map.get("username") || "";
|
|
||||||
const password = map.get("password") || "";
|
|
||||||
if (!username || !password) {
|
|
||||||
throw new Error("Missing Codeberg credentials in git credential helper");
|
|
||||||
}
|
|
||||||
const token = Buffer.from(`${username}:${password}`, "utf8").toString("base64");
|
|
||||||
return `Basic ${token}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function apiRequest(method, url, authHeader, body, contentType = "application/json") {
|
|
||||||
const headers = {
|
|
||||||
Accept: "application/json",
|
|
||||||
Authorization: authHeader
|
|
||||||
};
|
|
||||||
if (body !== undefined) {
|
|
||||||
headers["Content-Type"] = contentType;
|
|
||||||
}
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method,
|
|
||||||
headers,
|
|
||||||
body
|
|
||||||
});
|
|
||||||
const text = await response.text();
|
|
||||||
let parsed;
|
|
||||||
try {
|
|
||||||
parsed = text ? JSON.parse(text) : null;
|
|
||||||
} catch {
|
|
||||||
parsed = text;
|
|
||||||
}
|
|
||||||
return { ok: response.ok, status: response.status, body: parsed };
|
|
||||||
}
|
|
||||||
|
|
||||||
function ensureVersionString(version) {
|
|
||||||
const trimmed = String(version || "").trim();
|
|
||||||
if (!/^\d+\.\d+\.\d+(?:[-+][0-9A-Za-z.-]+)?$/.test(trimmed)) {
|
|
||||||
throw new Error("Invalid version format. Expected e.g. 1.4.42");
|
|
||||||
}
|
|
||||||
return trimmed;
|
|
||||||
}
|
|
||||||
|
|
||||||
function updatePackageVersion(rootDir, version) {
|
|
||||||
const packagePath = path.join(rootDir, "package.json");
|
|
||||||
const packageJson = JSON.parse(fs.readFileSync(packagePath, "utf8"));
|
|
||||||
if (String(packageJson.version || "") === version) {
|
|
||||||
throw new Error(`package.json is already at version ${version}`);
|
|
||||||
}
|
|
||||||
packageJson.version = version;
|
|
||||||
fs.writeFileSync(packagePath, `${JSON.stringify(packageJson, null, 2)}\n`, "utf8");
|
|
||||||
}
|
|
||||||
|
|
||||||
function patchLatestYml(releaseDir, version) {
|
|
||||||
const ymlPath = path.join(releaseDir, "latest.yml");
|
|
||||||
let content = fs.readFileSync(ymlPath, "utf8");
|
|
||||||
const setupName = `Real-Debrid-Downloader Setup ${version}.exe`;
|
|
||||||
const dashedName = `Real-Debrid-Downloader-Setup-${version}.exe`;
|
|
||||||
if (content.includes(dashedName)) {
|
|
||||||
content = content.split(dashedName).join(setupName);
|
|
||||||
fs.writeFileSync(ymlPath, content, "utf8");
|
|
||||||
process.stdout.write(`Patched latest.yml: replaced "${dashedName}" with "${setupName}"\n`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function ensureAssetsExist(rootDir, version) {
|
|
||||||
const releaseDir = path.join(rootDir, "release");
|
|
||||||
const files = [
|
|
||||||
`Real-Debrid-Downloader Setup ${version}.exe`,
|
|
||||||
`Real-Debrid-Downloader ${version}.exe`,
|
|
||||||
"latest.yml",
|
|
||||||
`Real-Debrid-Downloader Setup ${version}.exe.blockmap`
|
|
||||||
];
|
|
||||||
for (const fileName of files) {
|
|
||||||
const fullPath = path.join(releaseDir, fileName);
|
|
||||||
if (!fs.existsSync(fullPath)) {
|
|
||||||
throw new Error(`Missing release artifact: ${fullPath}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
patchLatestYml(releaseDir, version);
|
|
||||||
return { releaseDir, files };
|
|
||||||
}
|
|
||||||
|
|
||||||
function ensureNoTrackedChanges() {
|
|
||||||
const output = runCapture("git", ["status", "--porcelain"]);
|
|
||||||
const lines = output.split(/\r?\n/).filter(Boolean);
|
|
||||||
const tracked = lines.filter((line) => !line.startsWith("?? "));
|
|
||||||
if (tracked.length > 0) {
|
|
||||||
throw new Error(`Working tree has tracked changes:\n${tracked.join("\n")}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function ensureTagMissing(tag) {
|
|
||||||
const result = spawnSync("git", ["rev-parse", "--verify", `refs/tags/${tag}`], {
|
|
||||||
cwd: process.cwd(),
|
|
||||||
stdio: "ignore"
|
|
||||||
});
|
|
||||||
if (result.status === 0) {
|
|
||||||
throw new Error(`Tag already exists: ${tag}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function createOrGetRelease(owner, repo, tag, authHeader, notes) {
|
|
||||||
const baseApi = `https://codeberg.org/api/v1/repos/${owner}/${repo}`;
|
|
||||||
const byTag = await apiRequest("GET", `${baseApi}/releases/tags/${encodeURIComponent(tag)}`, authHeader);
|
|
||||||
if (byTag.ok) {
|
|
||||||
return byTag.body;
|
|
||||||
}
|
|
||||||
const payload = {
|
|
||||||
tag_name: tag,
|
|
||||||
target_commitish: "main",
|
|
||||||
name: tag,
|
|
||||||
body: notes || `Release ${tag}`,
|
|
||||||
draft: false,
|
|
||||||
prerelease: false
|
|
||||||
};
|
|
||||||
const created = await apiRequest("POST", `${baseApi}/releases`, authHeader, JSON.stringify(payload));
|
|
||||||
if (!created.ok) {
|
|
||||||
throw new Error(`Failed to create release (${created.status}): ${JSON.stringify(created.body)}`);
|
|
||||||
}
|
|
||||||
return created.body;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function uploadReleaseAssets(owner, repo, releaseId, authHeader, releaseDir, files) {
|
|
||||||
const baseApi = `https://codeberg.org/api/v1/repos/${owner}/${repo}`;
|
|
||||||
for (const fileName of files) {
|
|
||||||
const filePath = path.join(releaseDir, fileName);
|
|
||||||
const fileData = fs.readFileSync(filePath);
|
|
||||||
const uploadUrl = `${baseApi}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`;
|
|
||||||
const response = await apiRequest("POST", uploadUrl, authHeader, fileData, "application/octet-stream");
|
|
||||||
if (response.ok) {
|
|
||||||
process.stdout.write(`Uploaded: ${fileName}\n`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (response.status === 409 || response.status === 422) {
|
|
||||||
process.stdout.write(`Skipped existing asset: ${fileName}\n`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
throw new Error(`Asset upload failed for ${fileName} (${response.status}): ${JSON.stringify(response.body)}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
const rootDir = process.cwd();
|
|
||||||
const args = parseArgs(process.argv);
|
|
||||||
if (args.help) {
|
|
||||||
process.stdout.write("Usage: npm run release:codeberg -- <version> [release notes] [--dry-run]\n");
|
|
||||||
process.stdout.write("Example: npm run release:codeberg -- 1.4.42 \"- Small fixes\"\n");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const version = ensureVersionString(args.version);
|
|
||||||
const tag = `v${version}`;
|
|
||||||
const releaseNotes = args.notes || `- Release ${tag}`;
|
|
||||||
const { remote, owner, repo } = getCodebergRepo();
|
|
||||||
|
|
||||||
ensureNoTrackedChanges();
|
|
||||||
ensureTagMissing(tag);
|
|
||||||
updatePackageVersion(rootDir, version);
|
|
||||||
|
|
||||||
process.stdout.write(`Building release artifacts for ${tag}...\n`);
|
|
||||||
run(NPM_EXECUTABLE, ["run", "release:win"]);
|
|
||||||
const assets = ensureAssetsExist(rootDir, version);
|
|
||||||
|
|
||||||
if (args.dryRun) {
|
|
||||||
process.stdout.write(`Dry run complete. Assets exist for ${tag}.\n`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
run("git", ["add", "package.json"]);
|
|
||||||
run("git", ["commit", "-m", `Release ${tag}`]);
|
|
||||||
run("git", ["push", remote, "main"]);
|
|
||||||
run("git", ["tag", tag]);
|
|
||||||
run("git", ["push", remote, tag]);
|
|
||||||
|
|
||||||
const authHeader = getCodebergAuthHeader();
|
|
||||||
const baseRepoApi = `https://codeberg.org/api/v1/repos/${owner}/${repo}`;
|
|
||||||
const patchReleaseEnabled = await apiRequest("PATCH", baseRepoApi, authHeader, JSON.stringify({ has_releases: true }));
|
|
||||||
if (!patchReleaseEnabled.ok) {
|
|
||||||
throw new Error(`Failed to enable releases (${patchReleaseEnabled.status}): ${JSON.stringify(patchReleaseEnabled.body)}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const release = await createOrGetRelease(owner, repo, tag, authHeader, releaseNotes);
|
|
||||||
await uploadReleaseAssets(owner, repo, release.id, authHeader, assets.releaseDir, assets.files);
|
|
||||||
|
|
||||||
process.stdout.write(`Release published: ${release.html_url}\n`);
|
|
||||||
}
|
|
||||||
|
|
||||||
main().catch((error) => {
|
|
||||||
process.stderr.write(`${String(error?.message || error)}\n`);
|
|
||||||
process.exit(1);
|
|
||||||
});
|
|
||||||
@ -2,7 +2,15 @@ import fs from "node:fs";
|
|||||||
import path from "node:path";
|
import path from "node:path";
|
||||||
import { spawnSync } from "node:child_process";
|
import { spawnSync } from "node:child_process";
|
||||||
|
|
||||||
const NPM_EXECUTABLE = process.platform === "win32" ? "npm.cmd" : "npm";
|
const NPM_RELEASE_WIN = process.platform === "win32"
|
||||||
|
? {
|
||||||
|
command: process.env.ComSpec || "cmd.exe",
|
||||||
|
args: ["/d", "/s", "/c", "npm run release:win"]
|
||||||
|
}
|
||||||
|
: {
|
||||||
|
command: "npm",
|
||||||
|
args: ["run", "release:win"]
|
||||||
|
};
|
||||||
|
|
||||||
function run(command, args, options = {}) {
|
function run(command, args, options = {}) {
|
||||||
const result = spawnSync(command, args, {
|
const result = spawnSync(command, args, {
|
||||||
@ -37,7 +45,8 @@ function runWithInput(command, args, input) {
|
|||||||
cwd: process.cwd(),
|
cwd: process.cwd(),
|
||||||
encoding: "utf8",
|
encoding: "utf8",
|
||||||
input,
|
input,
|
||||||
stdio: ["pipe", "pipe", "pipe"]
|
stdio: ["pipe", "pipe", "pipe"],
|
||||||
|
timeout: 10000
|
||||||
});
|
});
|
||||||
if (result.status !== 0) {
|
if (result.status !== 0) {
|
||||||
const stderr = String(result.stderr || "").trim();
|
const stderr = String(result.stderr || "").trim();
|
||||||
@ -95,15 +104,17 @@ function getGiteaRepo() {
|
|||||||
|
|
||||||
const preferredBase = normalizeBaseUrl(process.env.GITEA_BASE_URL || process.env.FORGEJO_BASE_URL || "https://git.24-music.de");
|
const preferredBase = normalizeBaseUrl(process.env.GITEA_BASE_URL || process.env.FORGEJO_BASE_URL || "https://git.24-music.de");
|
||||||
|
|
||||||
|
const preferredProtocol = preferredBase ? new URL(preferredBase).protocol : "https:";
|
||||||
|
|
||||||
for (const remote of remotes) {
|
for (const remote of remotes) {
|
||||||
try {
|
try {
|
||||||
const remoteUrl = runCapture("git", ["remote", "get-url", remote]);
|
const remoteUrl = runCapture("git", ["remote", "get-url", remote]);
|
||||||
const parsed = parseRemoteUrl(remoteUrl);
|
const parsed = parseRemoteUrl(remoteUrl);
|
||||||
const remoteBase = `https://${parsed.host}`.toLowerCase();
|
const remoteBase = `https://${parsed.host}`.toLowerCase();
|
||||||
if (preferredBase && remoteBase !== preferredBase.toLowerCase()) {
|
if (preferredBase && remoteBase !== preferredBase.toLowerCase().replace(/^http:/, "https:")) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
return { remote, ...parsed, baseUrl: `https://${parsed.host}` };
|
return { remote, ...parsed, baseUrl: `${preferredProtocol}//${parsed.host}` };
|
||||||
} catch {
|
} catch {
|
||||||
// try next remote
|
// try next remote
|
||||||
}
|
}
|
||||||
@ -179,7 +190,8 @@ function updatePackageVersion(rootDir, version) {
|
|||||||
const packagePath = path.join(rootDir, "package.json");
|
const packagePath = path.join(rootDir, "package.json");
|
||||||
const packageJson = JSON.parse(fs.readFileSync(packagePath, "utf8"));
|
const packageJson = JSON.parse(fs.readFileSync(packagePath, "utf8"));
|
||||||
if (String(packageJson.version || "") === version) {
|
if (String(packageJson.version || "") === version) {
|
||||||
throw new Error(`package.json is already at version ${version}`);
|
process.stdout.write(`package.json is already at version ${version}, skipping update.\n`);
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
packageJson.version = version;
|
packageJson.version = version;
|
||||||
fs.writeFileSync(packagePath, `${JSON.stringify(packageJson, null, 2)}\n`, "utf8");
|
fs.writeFileSync(packagePath, `${JSON.stringify(packageJson, null, 2)}\n`, "utf8");
|
||||||
@ -257,9 +269,31 @@ async function createOrGetRelease(baseApi, tag, authHeader, notes) {
|
|||||||
async function uploadReleaseAssets(baseApi, releaseId, authHeader, releaseDir, files) {
|
async function uploadReleaseAssets(baseApi, releaseId, authHeader, releaseDir, files) {
|
||||||
for (const fileName of files) {
|
for (const fileName of files) {
|
||||||
const filePath = path.join(releaseDir, fileName);
|
const filePath = path.join(releaseDir, fileName);
|
||||||
const fileData = fs.readFileSync(filePath);
|
const fileSize = fs.statSync(filePath).size;
|
||||||
const uploadUrl = `${baseApi}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`;
|
const uploadUrl = `${baseApi}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`;
|
||||||
const response = await apiRequest("POST", uploadUrl, authHeader, fileData, "application/octet-stream");
|
|
||||||
|
// Stream large files instead of loading them entirely into memory
|
||||||
|
const fileStream = fs.createReadStream(filePath);
|
||||||
|
const response = await fetch(uploadUrl, {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
Accept: "application/json",
|
||||||
|
Authorization: authHeader,
|
||||||
|
"Content-Type": "application/octet-stream",
|
||||||
|
"Content-Length": String(fileSize)
|
||||||
|
},
|
||||||
|
body: fileStream,
|
||||||
|
duplex: "half"
|
||||||
|
});
|
||||||
|
|
||||||
|
const text = await response.text();
|
||||||
|
let parsed;
|
||||||
|
try {
|
||||||
|
parsed = text ? JSON.parse(text) : null;
|
||||||
|
} catch {
|
||||||
|
parsed = text;
|
||||||
|
}
|
||||||
|
|
||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
process.stdout.write(`Uploaded: ${fileName}\n`);
|
process.stdout.write(`Uploaded: ${fileName}\n`);
|
||||||
continue;
|
continue;
|
||||||
@ -268,7 +302,7 @@ async function uploadReleaseAssets(baseApi, releaseId, authHeader, releaseDir, f
|
|||||||
process.stdout.write(`Skipped existing asset: ${fileName}\n`);
|
process.stdout.write(`Skipped existing asset: ${fileName}\n`);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
throw new Error(`Asset upload failed for ${fileName} (${response.status}): ${JSON.stringify(response.body)}`);
|
throw new Error(`Asset upload failed for ${fileName} (${response.status}): ${JSON.stringify(parsed)}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -290,17 +324,18 @@ async function main() {
|
|||||||
|
|
||||||
ensureNoTrackedChanges();
|
ensureNoTrackedChanges();
|
||||||
ensureTagMissing(tag);
|
ensureTagMissing(tag);
|
||||||
|
|
||||||
|
if (args.dryRun) {
|
||||||
|
process.stdout.write(`Dry run: would release ${tag}. No changes made.\n`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
updatePackageVersion(rootDir, version);
|
updatePackageVersion(rootDir, version);
|
||||||
|
|
||||||
process.stdout.write(`Building release artifacts for ${tag}...\n`);
|
process.stdout.write(`Building release artifacts for ${tag}...\n`);
|
||||||
run(NPM_EXECUTABLE, ["run", "release:win"]);
|
run(NPM_RELEASE_WIN.command, NPM_RELEASE_WIN.args);
|
||||||
const assets = ensureAssetsExist(rootDir, version);
|
const assets = ensureAssetsExist(rootDir, version);
|
||||||
|
|
||||||
if (args.dryRun) {
|
|
||||||
process.stdout.write(`Dry run complete. Assets exist for ${tag}.\n`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
run("git", ["add", "package.json"]);
|
run("git", ["add", "package.json"]);
|
||||||
run("git", ["commit", "-m", `Release ${tag}`]);
|
run("git", ["commit", "-m", `Release ${tag}`]);
|
||||||
run("git", ["push", repo.remote, "main"]);
|
run("git", ["push", repo.remote, "main"]);
|
||||||
|
|||||||
@ -1,83 +0,0 @@
|
|||||||
import https from "node:https";
|
|
||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
|
|
||||||
const TOKEN = "36034f878a07e8705c577a838e5186b3d6010d03";
|
|
||||||
const OWNER = "Sucukdeluxe";
|
|
||||||
const REPO = "real-debrid-downloader";
|
|
||||||
const TAG = "v1.5.97";
|
|
||||||
|
|
||||||
const RELEASE_BODY = `## What's Changed in v1.5.97
|
|
||||||
|
|
||||||
### Bug Fixes
|
|
||||||
- **Fix "Ausstehend" / "Warten auf Parts" label flicker during hybrid extraction**: Previously, every hybrid extraction run would reset ALL non-extracted completed items to either "Entpacken - Ausstehend" or "Entpacken - Warten auf Parts", causing visible flickering between status labels. Now only items whose archives are actually in the current \`readyArchives\` set get "Ausstehend"; all other items correctly show "Warten auf Parts" until their archive is genuinely ready for extraction. This eliminates the misleading "Ausstehend" label on items that aren't being extracted in the current run.
|
|
||||||
`;
|
|
||||||
|
|
||||||
function request(method, urlPath, body) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1${urlPath}`,
|
|
||||||
method,
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/json", Accept: "application/json" },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
let data = "";
|
|
||||||
res.on("data", (c) => (data += c));
|
|
||||||
res.on("end", () => {
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`${res.statusCode} ${data}`));
|
|
||||||
else resolve(JSON.parse(data || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
if (body) req.write(typeof body === "string" ? body : JSON.stringify(body));
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function uploadAsset(releaseId, filePath, name) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const fileBuffer = fs.readFileSync(filePath);
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1/repos/${OWNER}/${REPO}/releases/${releaseId}/assets?name=${encodeURIComponent(name)}`,
|
|
||||||
method: "POST",
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/octet-stream", "Content-Length": fileBuffer.length },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
let data = "";
|
|
||||||
res.on("data", (c) => (data += c));
|
|
||||||
res.on("end", () => {
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`${res.statusCode} ${data}`));
|
|
||||||
else resolve(JSON.parse(data || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
req.write(fileBuffer);
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
console.log("Creating release...");
|
|
||||||
const release = await request("POST", `/repos/${OWNER}/${REPO}/releases`, {
|
|
||||||
tag_name: TAG, name: TAG, body: RELEASE_BODY, draft: false, prerelease: false,
|
|
||||||
});
|
|
||||||
console.log(`Release created: id=${release.id}`);
|
|
||||||
const releaseDir = path.resolve("release");
|
|
||||||
const assets = [
|
|
||||||
{ file: `Real-Debrid-Downloader-Setup-1.5.97.exe`, name: `Real-Debrid-Downloader-Setup-1.5.97.exe` },
|
|
||||||
{ file: `Real-Debrid-Downloader 1.5.97.exe`, name: `Real-Debrid-Downloader-1.5.97.exe` },
|
|
||||||
{ file: `latest.yml`, name: `latest.yml` },
|
|
||||||
{ file: `Real-Debrid-Downloader Setup 1.5.97.exe.blockmap`, name: `Real-Debrid-Downloader-Setup-1.5.97.exe.blockmap` },
|
|
||||||
];
|
|
||||||
for (const asset of assets) {
|
|
||||||
const filePath = path.join(releaseDir, asset.file);
|
|
||||||
if (!fs.existsSync(filePath)) { console.warn(`SKIP: ${asset.file}`); continue; }
|
|
||||||
console.log(`Uploading ${asset.name} (${(fs.statSync(filePath).size / 1048576).toFixed(1)} MB)...`);
|
|
||||||
await uploadAsset(release.id, filePath, asset.name);
|
|
||||||
console.log(` done`);
|
|
||||||
}
|
|
||||||
console.log("Done!");
|
|
||||||
}
|
|
||||||
main().catch((err) => { console.error(err); process.exit(1); });
|
|
||||||
@ -1,131 +0,0 @@
|
|||||||
import https from "node:https";
|
|
||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
|
|
||||||
const TOKEN = "36034f878a07e8705c577a838e5186b3d6010d03";
|
|
||||||
const OWNER = "Sucukdeluxe";
|
|
||||||
const REPO = "real-debrid-downloader";
|
|
||||||
const TAG = "v1.6.10";
|
|
||||||
|
|
||||||
const BODY = `## What's Changed in v1.6.10
|
|
||||||
|
|
||||||
### Critical Bug Fixes
|
|
||||||
|
|
||||||
#### Post-Process Slot Counter Race Condition (multiple packages extracting simultaneously)
|
|
||||||
- **Bug:** After stopping and restarting a session, the internal post-processing slot counter could go **negative**. This allowed multiple packages to extract simultaneously instead of the intended one-at-a-time sequential extraction.
|
|
||||||
- **Root cause:** \`stop()\` resets the active counter to 0 and resolves all waiting promises. The resolved waiters then increment the counter (+N), but ALL tasks (including the original active one) still decrement it in their cleanup (-(N+1)), resulting in a negative value. On the next session start, multiple packages pass the \`active < maxConcurrent\` check.
|
|
||||||
- **Fix:** Added a guard in \`releasePostProcessSlot()\` to prevent the counter from going below zero.
|
|
||||||
|
|
||||||
#### Extraction Resume State / Progress Sync Bug (first episode stays "Entpacken - Ausstehend")
|
|
||||||
- **Bug:** When an archive was previously extracted in a hybrid extraction round and recorded in the resume state, but the app was stopped before the item's "Entpackt - Done" label was persisted, the next full extraction would skip the archive (correctly, via resume state) but never update the item's UI label. The item would permanently show "Entpacken - Ausstehend" while all other episodes showed "Entpackt - Done".
|
|
||||||
- **Fix:** \`extractPackageArchives()\` now emits progress events with \`archivePercent: 100\` for archives that are already in the resume state, so the caller's \`onProgress\` handler marks those items as "Entpackt - Done" immediately.
|
|
||||||
|
|
||||||
#### Abort Labels Applied to Non-Extracting Items
|
|
||||||
- **Bug:** When stopping a session, \`abortPostProcessing()\` set ALL completed items with any "Entpacken" label to "Entpacken abgebrochen (wird fortgesetzt)" — including items that were merely "Entpacken - Ausstehend" or "Entpacken - Warten auf Parts" and had never started extracting.
|
|
||||||
- **Fix:** The abort label is now only applied to items with active extraction progress (e.g., "Entpacken 64%"), not to pending items.
|
|
||||||
|
|
||||||
#### Missing Package Status Update in Hybrid Extraction Branches
|
|
||||||
- **Bug:** \`triggerPendingExtractions()\` and \`recoverPostProcessingOnStartup()\` did not set \`pkg.status = "queued"\` in their hybrid extraction branches, unlike the full extraction branches. This could cause the package status bar to show incorrect state during hybrid extraction.
|
|
||||||
- **Fix:** Both hybrid branches now correctly set \`pkg.status = "queued"\` before triggering extraction.
|
|
||||||
|
|
||||||
### Files Changed
|
|
||||||
- \`src/main/download-manager.ts\` — Slot counter guard, abort label fix, hybrid pkg.status
|
|
||||||
- \`src/main/extractor.ts\` — Resume state progress emission
|
|
||||||
- \`package.json\` — Version bump to 1.6.10
|
|
||||||
`;
|
|
||||||
|
|
||||||
function apiRequest(method, apiPath, body) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const options = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1/repos/${OWNER}/${REPO}${apiPath}`,
|
|
||||||
method,
|
|
||||||
headers: {
|
|
||||||
Authorization: `token ${TOKEN}`,
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
Accept: "application/json",
|
|
||||||
},
|
|
||||||
};
|
|
||||||
const req = https.request(options, (res) => {
|
|
||||||
let data = "";
|
|
||||||
res.on("data", (chunk) => (data += chunk));
|
|
||||||
res.on("end", () => {
|
|
||||||
if (res.statusCode >= 400) {
|
|
||||||
reject(new Error(`${res.statusCode}: ${data}`));
|
|
||||||
} else {
|
|
||||||
resolve(JSON.parse(data));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
if (body) req.write(JSON.stringify(body));
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function uploadAsset(releaseId, filePath, fileName) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const fileBuffer = fs.readFileSync(filePath);
|
|
||||||
const options = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1/repos/${OWNER}/${REPO}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`,
|
|
||||||
method: "POST",
|
|
||||||
headers: {
|
|
||||||
Authorization: `token ${TOKEN}`,
|
|
||||||
"Content-Type": "application/octet-stream",
|
|
||||||
"Content-Length": fileBuffer.length,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
const req = https.request(options, (res) => {
|
|
||||||
let data = "";
|
|
||||||
res.on("data", (chunk) => (data += chunk));
|
|
||||||
res.on("end", () => {
|
|
||||||
if (res.statusCode >= 400) {
|
|
||||||
reject(new Error(`Upload ${fileName}: ${res.statusCode}: ${data}`));
|
|
||||||
} else {
|
|
||||||
console.log(` Uploaded: ${fileName}`);
|
|
||||||
resolve(JSON.parse(data));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
req.write(fileBuffer);
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
console.log("Creating release...");
|
|
||||||
const release = await apiRequest("POST", "/releases", {
|
|
||||||
tag_name: TAG,
|
|
||||||
name: TAG,
|
|
||||||
body: BODY,
|
|
||||||
draft: false,
|
|
||||||
prerelease: false,
|
|
||||||
});
|
|
||||||
console.log(`Release created: ${release.html_url}`);
|
|
||||||
|
|
||||||
const releaseDir = path.resolve("release");
|
|
||||||
const assets = [
|
|
||||||
{ file: `Real-Debrid-Downloader-Setup-1.6.10.exe`, name: `Real-Debrid-Downloader-Setup-1.6.10.exe` },
|
|
||||||
{ file: `Real-Debrid-Downloader 1.6.10.exe`, name: `Real-Debrid-Downloader-1.6.10.exe` },
|
|
||||||
{ file: `latest.yml`, name: `latest.yml` },
|
|
||||||
{ file: `Real-Debrid-Downloader Setup 1.6.10.exe.blockmap`, name: `Real-Debrid-Downloader-Setup-1.6.10.exe.blockmap` },
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const asset of assets) {
|
|
||||||
const filePath = path.join(releaseDir, asset.file);
|
|
||||||
if (!fs.existsSync(filePath)) {
|
|
||||||
console.warn(` SKIP (not found): ${asset.file}`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
await uploadAsset(release.id, filePath, asset.name);
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log("Done!");
|
|
||||||
}
|
|
||||||
|
|
||||||
main().catch((err) => {
|
|
||||||
console.error(err);
|
|
||||||
process.exit(1);
|
|
||||||
});
|
|
||||||
@ -1,134 +0,0 @@
|
|||||||
import https from "node:https";
|
|
||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
import { fileURLToPath } from "node:url";
|
|
||||||
|
|
||||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
||||||
const TOKEN = "36034f878a07e8705c577a838e5186b3d6010d03";
|
|
||||||
const OWNER = "Sucukdeluxe";
|
|
||||||
const REPO = "real-debrid-downloader";
|
|
||||||
const TAG = "v1.6.11";
|
|
||||||
|
|
||||||
const BODY = `## What's Changed in v1.6.11
|
|
||||||
|
|
||||||
### New Feature: Extract While Stopped
|
|
||||||
- **New setting "Entpacken auch ohne laufende Session"** (default: enabled): Extractions now continue running even after clicking Stop, and pending extractions are automatically triggered on app startup without needing to click Start
|
|
||||||
- This means downloaded archives get extracted immediately regardless of session state — no more forgotten pending extractions after restart
|
|
||||||
|
|
||||||
### Bug Fixes
|
|
||||||
|
|
||||||
#### Update Installation Safety
|
|
||||||
- **Stop active downloads before installing updates**: Previously, launching an update while downloads were active could cause data corruption. The app now gracefully stops all downloads before spawning the installer
|
|
||||||
- **Increased quit timeout** from 800ms to 2500ms after launching the update installer, giving the OS more time to start the setup process before the app exits
|
|
||||||
|
|
||||||
#### Extraction Resume Progress (v1.6.9 fix)
|
|
||||||
- **Fixed "Entpacken - Ausstehend" for already-extracted archives**: When resuming extraction (e.g. after a crash), archives that were already successfully extracted in a previous run now correctly show as "Entpackt - Done" immediately, instead of staying stuck as "Entpacken - Ausstehend" until all remaining archives finish
|
|
||||||
- Root cause: The resume state correctly tracked completed archives, but no UI progress event was emitted for them, leaving their items with a stale "pending" label
|
|
||||||
|
|
||||||
#### Extraction Abort Label Accuracy (v1.6.9 fix)
|
|
||||||
- **"Entpacken abgebrochen" now only applied to actively extracting items**: Previously, clicking Stop would mark ALL extraction-related items as "Entpacken abgebrochen (wird fortgesetzt)" — even items that were just queued ("Ausstehend") or waiting for parts ("Warten auf Parts") and had never started extracting. Now only items with actual extraction progress get the "abgebrochen" label
|
|
||||||
|
|
||||||
#### Hybrid Extraction Package Status (v1.6.9 fix)
|
|
||||||
- **Fixed package status not updating for hybrid extraction recovery**: When recovering pending hybrid extractions on startup or after pause toggle, the package status is now correctly set to "queued" so the UI reflects that extraction work is pending
|
|
||||||
|
|
||||||
#### Parallel Extraction Slot Counter (v1.6.10 fix)
|
|
||||||
- **Fixed multiple packages extracting simultaneously despite maxParallelExtract=1**: The post-processing slot counter could go negative after Stop was clicked (stop resets counter to 0, but aborted tasks still decrement in their finally blocks). On the next session start, the negative counter let multiple packages pass the concurrency check. Added a guard to prevent the counter from going below zero
|
|
||||||
|
|
||||||
### Files Changed
|
|
||||||
- \`src/main/download-manager.ts\` — autoExtractWhenStopped logic in stop(), triggerIdleExtractions(), slot counter guard
|
|
||||||
- \`src/main/app-controller.ts\` — Stop downloads before update, trigger idle extractions on startup
|
|
||||||
- \`src/main/main.ts\` — Increased update quit timeout
|
|
||||||
- \`src/main/extractor.ts\` — Emit progress for resumed archives
|
|
||||||
- \`src/main/constants.ts\` — New default setting
|
|
||||||
- \`src/shared/types.ts\` — autoExtractWhenStopped type
|
|
||||||
- \`src/renderer/App.tsx\` — Settings toggle UI
|
|
||||||
`;
|
|
||||||
|
|
||||||
function apiRequest(method, apiPath, body) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1${apiPath}`,
|
|
||||||
method,
|
|
||||||
headers: {
|
|
||||||
Authorization: `token ${TOKEN}`,
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
Accept: "application/json",
|
|
||||||
},
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) {
|
|
||||||
reject(new Error(`${res.statusCode} ${text}`));
|
|
||||||
} else {
|
|
||||||
resolve(JSON.parse(text || "{}"));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
if (body) req.write(JSON.stringify(body));
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function uploadAsset(releaseId, filePath, fileName) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const data = fs.readFileSync(filePath);
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1/repos/${OWNER}/${REPO}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`,
|
|
||||||
method: "POST",
|
|
||||||
headers: {
|
|
||||||
Authorization: `token ${TOKEN}`,
|
|
||||||
"Content-Type": "application/octet-stream",
|
|
||||||
"Content-Length": data.length,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`Upload ${fileName}: ${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
req.write(data);
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
console.log("Creating release...");
|
|
||||||
const release = await apiRequest("POST", `/repos/${OWNER}/${REPO}/releases`, {
|
|
||||||
tag_name: TAG,
|
|
||||||
name: TAG,
|
|
||||||
body: BODY,
|
|
||||||
draft: false,
|
|
||||||
prerelease: false,
|
|
||||||
});
|
|
||||||
console.log(`Release created: ${release.id}`);
|
|
||||||
|
|
||||||
const releaseDir = path.join(__dirname, "..", "release");
|
|
||||||
const assets = [
|
|
||||||
{ file: "Real-Debrid-Downloader-Setup-1.6.11.exe", name: "Real-Debrid-Downloader-Setup-1.6.11.exe" },
|
|
||||||
{ file: "Real-Debrid-Downloader 1.6.11.exe", name: "Real-Debrid-Downloader-1.6.11.exe" },
|
|
||||||
{ file: "latest.yml", name: "latest.yml" },
|
|
||||||
{ file: "Real-Debrid-Downloader Setup 1.6.11.exe.blockmap", name: "Real-Debrid-Downloader-Setup-1.6.11.exe.blockmap" },
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const a of assets) {
|
|
||||||
const p = path.join(releaseDir, a.file);
|
|
||||||
if (!fs.existsSync(p)) { console.warn(`SKIP ${a.file}`); continue; }
|
|
||||||
console.log(`Uploading ${a.name} ...`);
|
|
||||||
await uploadAsset(release.id, p, a.name);
|
|
||||||
console.log(` done.`);
|
|
||||||
}
|
|
||||||
console.log("Release complete!");
|
|
||||||
}
|
|
||||||
|
|
||||||
main().catch((e) => { console.error(e); process.exit(1); });
|
|
||||||
@ -1,121 +0,0 @@
|
|||||||
import https from "node:https";
|
|
||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
import { fileURLToPath } from "node:url";
|
|
||||||
|
|
||||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
||||||
const TOKEN = "36034f878a07e8705c577a838e5186b3d6010d03";
|
|
||||||
const OWNER = "Sucukdeluxe";
|
|
||||||
const REPO = "real-debrid-downloader";
|
|
||||||
const TAG = "v1.6.12";
|
|
||||||
|
|
||||||
const BODY = `## What's Changed in v1.6.12
|
|
||||||
|
|
||||||
### Bug Fixes (found via code review)
|
|
||||||
|
|
||||||
#### Package status incorrectly set to "downloading" during idle extraction
|
|
||||||
- When extraction ran while the session was stopped (via the new \`autoExtractWhenStopped\` feature), \`handlePackagePostProcessing\` would set \`pkg.status = "downloading"\` after hybrid extraction completed — even though no downloads were active
|
|
||||||
- This caused packages to appear as "downloading" in the UI when the session was stopped, which was confusing and semantically incorrect
|
|
||||||
- **Fix:** The status derivation now checks \`this.session.running\` in addition to \`pkg.enabled\` and \`!this.session.paused\`. When the session is stopped, packages are set to \`"queued"\` instead of \`"downloading"\`
|
|
||||||
|
|
||||||
#### Backup import leaves orphan extraction tasks running
|
|
||||||
- When importing a backup with \`autoExtractWhenStopped = true\`, the \`importBackup()\` method called \`stop()\` which no longer aborts extraction tasks (by design). This meant extraction tasks from the **old** session continued running in the background, potentially mutating stale in-memory state while the restored session was being saved to disk
|
|
||||||
- **Fix:** \`importBackup()\` now explicitly calls \`abortAllPostProcessing()\` after \`stop()\` to ensure all extraction tasks from the old session are terminated before the new session is loaded
|
|
||||||
- Added public \`abortAllPostProcessing()\` method to DownloadManager for external callers that need a full extraction abort regardless of settings
|
|
||||||
|
|
||||||
#### Corrected misleading comment in installUpdate()
|
|
||||||
- The comment in \`installUpdate()\` claimed it stops "downloads/extractions" but with \`autoExtractWhenStopped\`, extractions may continue briefly until \`prepareForShutdown()\` runs during app quit. Updated comment to reflect actual behavior.
|
|
||||||
|
|
||||||
### Files Changed
|
|
||||||
- \`src/main/download-manager.ts\` — Fixed \`pkg.status\` derivation in \`handlePackagePostProcessing\`, added \`abortAllPostProcessing()\`
|
|
||||||
- \`src/main/app-controller.ts\` — \`importBackup()\` now aborts all post-processing, updated \`installUpdate()\` comment
|
|
||||||
`;
|
|
||||||
|
|
||||||
function apiRequest(method, apiPath, body) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1${apiPath}`,
|
|
||||||
method,
|
|
||||||
headers: {
|
|
||||||
Authorization: `token ${TOKEN}`,
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
Accept: "application/json",
|
|
||||||
},
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) {
|
|
||||||
reject(new Error(`${res.statusCode} ${text}`));
|
|
||||||
} else {
|
|
||||||
resolve(JSON.parse(text || "{}"));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
if (body) req.write(JSON.stringify(body));
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function uploadAsset(releaseId, filePath, fileName) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const data = fs.readFileSync(filePath);
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1/repos/${OWNER}/${REPO}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`,
|
|
||||||
method: "POST",
|
|
||||||
headers: {
|
|
||||||
Authorization: `token ${TOKEN}`,
|
|
||||||
"Content-Type": "application/octet-stream",
|
|
||||||
"Content-Length": data.length,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`Upload ${fileName}: ${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
req.write(data);
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
console.log("Creating release...");
|
|
||||||
const release = await apiRequest("POST", `/repos/${OWNER}/${REPO}/releases`, {
|
|
||||||
tag_name: TAG,
|
|
||||||
name: TAG,
|
|
||||||
body: BODY,
|
|
||||||
draft: false,
|
|
||||||
prerelease: false,
|
|
||||||
});
|
|
||||||
console.log(`Release created: ${release.id}`);
|
|
||||||
|
|
||||||
const releaseDir = path.join(__dirname, "..", "release");
|
|
||||||
const assets = [
|
|
||||||
{ file: "Real-Debrid-Downloader-Setup-1.6.12.exe", name: "Real-Debrid-Downloader-Setup-1.6.12.exe" },
|
|
||||||
{ file: "Real-Debrid-Downloader 1.6.12.exe", name: "Real-Debrid-Downloader-1.6.12.exe" },
|
|
||||||
{ file: "latest.yml", name: "latest.yml" },
|
|
||||||
{ file: "Real-Debrid-Downloader Setup 1.6.12.exe.blockmap", name: "Real-Debrid-Downloader-Setup-1.6.12.exe.blockmap" },
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const a of assets) {
|
|
||||||
const p = path.join(releaseDir, a.file);
|
|
||||||
if (!fs.existsSync(p)) { console.warn(`SKIP ${a.file}`); continue; }
|
|
||||||
console.log(`Uploading ${a.name} ...`);
|
|
||||||
await uploadAsset(release.id, p, a.name);
|
|
||||||
console.log(` done.`);
|
|
||||||
}
|
|
||||||
console.log("Release complete!");
|
|
||||||
}
|
|
||||||
|
|
||||||
main().catch((e) => { console.error(e); process.exit(1); });
|
|
||||||
@ -1,118 +0,0 @@
|
|||||||
import https from "node:https";
|
|
||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
import { fileURLToPath } from "node:url";
|
|
||||||
|
|
||||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
||||||
const TOKEN = "36034f878a07e8705c577a838e5186b3d6010d03";
|
|
||||||
const OWNER = "Sucukdeluxe";
|
|
||||||
const REPO = "real-debrid-downloader";
|
|
||||||
const TAG = "v1.6.13";
|
|
||||||
|
|
||||||
const BODY = `## What's Changed in v1.6.13
|
|
||||||
|
|
||||||
### Bug Fixes
|
|
||||||
|
|
||||||
#### History entries not being created
|
|
||||||
- **Fixed: Nothing was being added to the download history** regardless of whether using Start or Stop mode
|
|
||||||
- Root cause: History entries were only created inside \`removePackageFromSession()\`, which only runs when the cleanup policy removes the package from the session. With \`completedCleanupPolicy = "never"\` (the default), packages are never removed, so history was never populated. With \`"immediate"\` policy, items were removed one-by-one leaving an empty array when the package itself was removed — also resulting in no history entry
|
|
||||||
- **Fix:** History entries are now recorded directly in \`handlePackagePostProcessing()\` when a package completes extraction (or download without extraction). A deduplication Set (\`historyRecordedPackages\`) prevents double entries when the cleanup policy also removes the package
|
|
||||||
- The \`removePackageFromSession()\` history logic now only fires for manual deletions (reason = "deleted"), not for completions which are already tracked
|
|
||||||
|
|
||||||
#### UI delay after extraction completes (20-30 seconds)
|
|
||||||
- **Fixed: Package stayed visible for 20-30 seconds after extraction finished** before disappearing or showing "Done" status
|
|
||||||
- Root cause: After extraction set \`pkg.status = "completed"\`, there was no \`emitState()\` call. The next UI update only happened after \`autoRenameExtractedVideoFiles()\`, \`collectMkvFilesToLibrary()\`, and \`applyPackageDoneCleanup()\` all completed — which could take 20-30 seconds for large packages with MKV collection or renaming
|
|
||||||
- **Fix:** Added an \`emitState()\` call immediately after the package status is set (completed/failed), before the rename and MKV collection steps. The UI now reflects the extraction result instantly while post-extraction steps run in the background
|
|
||||||
|
|
||||||
### Files Changed
|
|
||||||
- \`src/main/download-manager.ts\` — New \`recordPackageHistory()\` method, \`historyRecordedPackages\` deduplication Set, \`emitState()\` after extraction completion, refactored \`removePackageFromSession()\` history logic
|
|
||||||
`;
|
|
||||||
|
|
||||||
function apiRequest(method, apiPath, body) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1${apiPath}`,
|
|
||||||
method,
|
|
||||||
headers: {
|
|
||||||
Authorization: `token ${TOKEN}`,
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
Accept: "application/json",
|
|
||||||
},
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) {
|
|
||||||
reject(new Error(`${res.statusCode} ${text}`));
|
|
||||||
} else {
|
|
||||||
resolve(JSON.parse(text || "{}"));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
if (body) req.write(JSON.stringify(body));
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function uploadAsset(releaseId, filePath, fileName) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const data = fs.readFileSync(filePath);
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1/repos/${OWNER}/${REPO}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`,
|
|
||||||
method: "POST",
|
|
||||||
headers: {
|
|
||||||
Authorization: `token ${TOKEN}`,
|
|
||||||
"Content-Type": "application/octet-stream",
|
|
||||||
"Content-Length": data.length,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`Upload ${fileName}: ${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
req.write(data);
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
console.log("Creating release...");
|
|
||||||
const release = await apiRequest("POST", `/repos/${OWNER}/${REPO}/releases`, {
|
|
||||||
tag_name: TAG,
|
|
||||||
name: TAG,
|
|
||||||
body: BODY,
|
|
||||||
draft: false,
|
|
||||||
prerelease: false,
|
|
||||||
});
|
|
||||||
console.log(`Release created: ${release.id}`);
|
|
||||||
|
|
||||||
const releaseDir = path.join(__dirname, "..", "release");
|
|
||||||
const assets = [
|
|
||||||
{ file: "Real-Debrid-Downloader-Setup-1.6.13.exe", name: "Real-Debrid-Downloader-Setup-1.6.13.exe" },
|
|
||||||
{ file: "Real-Debrid-Downloader 1.6.13.exe", name: "Real-Debrid-Downloader-1.6.13.exe" },
|
|
||||||
{ file: "latest.yml", name: "latest.yml" },
|
|
||||||
{ file: "Real-Debrid-Downloader Setup 1.6.13.exe.blockmap", name: "Real-Debrid-Downloader-Setup-1.6.13.exe.blockmap" },
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const a of assets) {
|
|
||||||
const p = path.join(releaseDir, a.file);
|
|
||||||
if (!fs.existsSync(p)) { console.warn(`SKIP ${a.file}`); continue; }
|
|
||||||
console.log(`Uploading ${a.name} ...`);
|
|
||||||
await uploadAsset(release.id, p, a.name);
|
|
||||||
console.log(` done.`);
|
|
||||||
}
|
|
||||||
console.log("Release complete!");
|
|
||||||
}
|
|
||||||
|
|
||||||
main().catch((e) => { console.error(e); process.exit(1); });
|
|
||||||
@ -1,98 +0,0 @@
|
|||||||
import https from "node:https";
|
|
||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
import { fileURLToPath } from "node:url";
|
|
||||||
|
|
||||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
||||||
const TOKEN = "36034f878a07e8705c577a838e5186b3d6010d03";
|
|
||||||
const OWNER = "Sucukdeluxe";
|
|
||||||
const REPO = "real-debrid-downloader";
|
|
||||||
const TAG = "v1.6.14";
|
|
||||||
|
|
||||||
const BODY = `## What's Changed in v1.6.14
|
|
||||||
|
|
||||||
### Improvements
|
|
||||||
|
|
||||||
#### Better Auto-Rename Logging
|
|
||||||
- Added detailed logging for the auto-rename feature to help diagnose cases where renaming doesn't work as expected
|
|
||||||
- New log messages show: how many video files were found, which files couldn't be matched to a target name (with folder candidates), and successful renames with source → target mapping
|
|
||||||
- This makes it much easier to identify why a specific file wasn't renamed (wrong folder name, missing episode token, file already exists, etc.)
|
|
||||||
|
|
||||||
### Tests
|
|
||||||
- Added test case for Riviera S02 with single-digit episode format (\`s02e2\` → \`S02E02\`) to verify the rename logic handles non-zero-padded episode numbers correctly (98 auto-rename tests now)
|
|
||||||
|
|
||||||
### Files Changed
|
|
||||||
- \`src/main/download-manager.ts\` — Added logging in \`autoRenameExtractedVideoFiles()\`
|
|
||||||
- \`tests/auto-rename.test.ts\` — New Riviera S02 test case
|
|
||||||
`;
|
|
||||||
|
|
||||||
function apiRequest(method, apiPath, body) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1${apiPath}`,
|
|
||||||
method,
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/json", Accept: "application/json" },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
if (body) req.write(JSON.stringify(body));
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function uploadAsset(releaseId, filePath, fileName) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const data = fs.readFileSync(filePath);
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1/repos/${OWNER}/${REPO}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`,
|
|
||||||
method: "POST",
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/octet-stream", "Content-Length": data.length },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`Upload ${fileName}: ${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
req.write(data);
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
console.log("Creating release...");
|
|
||||||
const release = await apiRequest("POST", `/repos/${OWNER}/${REPO}/releases`, {
|
|
||||||
tag_name: TAG, name: TAG, body: BODY, draft: false, prerelease: false,
|
|
||||||
});
|
|
||||||
console.log(`Release created: ${release.id}`);
|
|
||||||
const releaseDir = path.join(__dirname, "..", "release");
|
|
||||||
const assets = [
|
|
||||||
{ file: "Real-Debrid-Downloader-Setup-1.6.14.exe", name: "Real-Debrid-Downloader-Setup-1.6.14.exe" },
|
|
||||||
{ file: "Real-Debrid-Downloader 1.6.14.exe", name: "Real-Debrid-Downloader-1.6.14.exe" },
|
|
||||||
{ file: "latest.yml", name: "latest.yml" },
|
|
||||||
{ file: "Real-Debrid-Downloader Setup 1.6.14.exe.blockmap", name: "Real-Debrid-Downloader-Setup-1.6.14.exe.blockmap" },
|
|
||||||
];
|
|
||||||
for (const a of assets) {
|
|
||||||
const p = path.join(releaseDir, a.file);
|
|
||||||
if (!fs.existsSync(p)) { console.warn(`SKIP ${a.file}`); continue; }
|
|
||||||
console.log(`Uploading ${a.name} ...`);
|
|
||||||
await uploadAsset(release.id, p, a.name);
|
|
||||||
console.log(` done.`);
|
|
||||||
}
|
|
||||||
console.log("Release complete!");
|
|
||||||
}
|
|
||||||
main().catch((e) => { console.error(e); process.exit(1); });
|
|
||||||
@ -1,101 +0,0 @@
|
|||||||
import https from "node:https";
|
|
||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
import { fileURLToPath } from "node:url";
|
|
||||||
|
|
||||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
||||||
const TOKEN = "36034f878a07e8705c577a838e5186b3d6010d03";
|
|
||||||
const OWNER = "Sucukdeluxe";
|
|
||||||
const REPO = "real-debrid-downloader";
|
|
||||||
const TAG = "v1.6.15";
|
|
||||||
|
|
||||||
const BODY = `## What's Changed in v1.6.15
|
|
||||||
|
|
||||||
### Bug Fixes
|
|
||||||
|
|
||||||
#### Session not transitioning to "Stop" when downloads finish (autoExtractWhenStopped)
|
|
||||||
- **Fixed: Session stayed in "running" state indefinitely after all downloads completed** when \`autoExtractWhenStopped\` was enabled
|
|
||||||
- Root cause: The scheduler's finalization check required \`packagePostProcessTasks.size === 0\` before calling \`finishRun()\`. With \`autoExtractWhenStopped\`, post-processing (extraction) tasks continue running after downloads finish, so the condition was never satisfied — the session never switched to "stop"
|
|
||||||
- **Fix:** When \`autoExtractWhenStopped\` is enabled, the scheduler now calls \`finishRun()\` as soon as all downloads are complete (no queued/active/delayed items), regardless of whether post-processing tasks are still running. Extraction continues in the background as idle extraction, exactly as intended by the setting. When \`autoExtractWhenStopped\` is disabled, the previous behavior is preserved (session waits for both downloads and extraction to complete)
|
|
||||||
|
|
||||||
#### Packages cannot be collapsed during extraction
|
|
||||||
- **Fixed: Clicking collapse on a package caused it to re-expand after 1-2 seconds** during extraction
|
|
||||||
- Same issue with the footer "Alle einklappen" button — packages would immediately re-expand
|
|
||||||
- Root cause: The auto-expand \`useEffect\` ran on every state update (\`emitState()\` call) and forcibly re-expanded any package with items in "Entpacken -" status. With the more frequent \`emitState()\` calls added in v1.6.13, this fired constantly, making it impossible for users to keep packages collapsed
|
|
||||||
- **Fix:** Added a \`useRef(Set)\` to track which packages have already been auto-expanded. Each package is now only auto-expanded **once** when extraction starts. If the user manually collapses it, it stays collapsed. The tracking is reset when the package is no longer extracting, so a future extraction cycle will auto-expand it again
|
|
||||||
|
|
||||||
### Files Changed
|
|
||||||
- \`src/main/download-manager.ts\` — Scheduler finalization condition now respects \`autoExtractWhenStopped\` setting
|
|
||||||
- \`src/renderer/App.tsx\` — Auto-expand useEffect now uses ref-based tracking for one-time expansion per extraction cycle
|
|
||||||
`;
|
|
||||||
|
|
||||||
function apiRequest(method, apiPath, body) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1${apiPath}`,
|
|
||||||
method,
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/json", Accept: "application/json" },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
if (body) req.write(JSON.stringify(body));
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function uploadAsset(releaseId, filePath, fileName) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const data = fs.readFileSync(filePath);
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1/repos/${OWNER}/${REPO}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`,
|
|
||||||
method: "POST",
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/octet-stream", "Content-Length": data.length },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`Upload ${fileName}: ${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
req.write(data);
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
console.log("Creating release...");
|
|
||||||
const release = await apiRequest("POST", `/repos/${OWNER}/${REPO}/releases`, {
|
|
||||||
tag_name: TAG, name: TAG, body: BODY, draft: false, prerelease: false,
|
|
||||||
});
|
|
||||||
console.log(`Release created: ${release.id}`);
|
|
||||||
const releaseDir = path.join(__dirname, "..", "release");
|
|
||||||
const assets = [
|
|
||||||
{ file: "Real-Debrid-Downloader-Setup-1.6.15.exe", name: "Real-Debrid-Downloader-Setup-1.6.15.exe" },
|
|
||||||
{ file: "Real-Debrid-Downloader 1.6.15.exe", name: "Real-Debrid-Downloader-1.6.15.exe" },
|
|
||||||
{ file: "latest.yml", name: "latest.yml" },
|
|
||||||
{ file: "Real-Debrid-Downloader Setup 1.6.15.exe.blockmap", name: "Real-Debrid-Downloader-Setup-1.6.15.exe.blockmap" },
|
|
||||||
];
|
|
||||||
for (const a of assets) {
|
|
||||||
const p = path.join(releaseDir, a.file);
|
|
||||||
if (!fs.existsSync(p)) { console.warn(`SKIP ${a.file}`); continue; }
|
|
||||||
console.log(`Uploading ${a.name} ...`);
|
|
||||||
await uploadAsset(release.id, p, a.name);
|
|
||||||
console.log(` done.`);
|
|
||||||
}
|
|
||||||
console.log("Release complete!");
|
|
||||||
}
|
|
||||||
main().catch((e) => { console.error(e); process.exit(1); });
|
|
||||||
@ -1,131 +0,0 @@
|
|||||||
import https from "node:https";
|
|
||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
import { fileURLToPath } from "node:url";
|
|
||||||
|
|
||||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
||||||
const TOKEN = "36034f878a07e8705c577a838e5186b3d6010d03";
|
|
||||||
const OWNER = "Sucukdeluxe";
|
|
||||||
const REPO = "real-debrid-downloader";
|
|
||||||
const TAG = "v1.6.16";
|
|
||||||
|
|
||||||
const BODY = `## What's Changed in v1.6.16
|
|
||||||
|
|
||||||
### Bug Fixes (Code Review)
|
|
||||||
|
|
||||||
This release fixes 11 bugs found through a comprehensive code review of the download manager, renderer, IPC layer, and app controller.
|
|
||||||
|
|
||||||
#### Critical: \`finishRun()\` clears state needed by still-running extraction tasks
|
|
||||||
- When \`autoExtractWhenStopped\` is enabled, the scheduler calls \`finishRun()\` as soon as downloads complete. \`finishRun()\` was clearing \`runPackageIds\` and \`runCompletedPackages\` immediately, but still-running extraction tasks needed those sets to update \`runCompletedPackages\` when they finish
|
|
||||||
- **Fix:** \`runPackageIds\` and \`runCompletedPackages\` are now only cleared when no post-processing tasks are running. Otherwise they are preserved until the next \`start()\` call
|
|
||||||
|
|
||||||
#### Critical: Post-process slot counter corrupted after stop+restart
|
|
||||||
- \`acquirePostProcessSlot()\` increments \`packagePostProcessActive\` after the awaited promise resolves. But \`stop()\` resets the counter to 0 while waiters are pending. When \`stop()\` resolves all waiters, the increment fires afterward, pushing the counter from 0→1 before any new task runs — causing the first extraction in the next session to unnecessarily queue
|
|
||||||
- **Fix:** Added a guard that only increments if below \`maxConcurrent\` after the await, matching the existing guard in \`releasePostProcessSlot()\`
|
|
||||||
|
|
||||||
#### Important: \`resetPackage()\` skips history on re-completion
|
|
||||||
- \`historyRecordedPackages\` was never cleared for a package when it was reset. If a user reset a package and it completed again, \`recordPackageHistory()\` would find it already in the set and skip recording — no history entry was created for the second run
|
|
||||||
- **Fix:** \`resetPackage()\` now calls \`this.historyRecordedPackages.delete(packageId)\`
|
|
||||||
|
|
||||||
#### Important: Context menu "Ausgewählte Downloads starten" sends non-startable items
|
|
||||||
- The context menu button filtered items by startable status (\`queued\`/\`cancelled\`/\`reconnect_wait\`) only for the visibility check, but the click handler sent ALL selected item IDs to \`startItems()\`, including items already downloading or completed
|
|
||||||
- **Fix:** Click handler now filters item IDs to only startable statuses before sending to \`startItems()\`
|
|
||||||
|
|
||||||
#### Important: \`importBackup\` persist race condition
|
|
||||||
- After calling \`stop()\` + \`abortAllPostProcessing()\`, deferred \`persistSoon()\` timers from those operations could fire and overwrite the restored session file on disk with the old in-memory session
|
|
||||||
- **Fix:** \`clearPersistTimer()\` is now called after abort to cancel any pending persist timers. Made it a public method for this purpose
|
|
||||||
|
|
||||||
#### Important: Auto-Resume on start never fires
|
|
||||||
- \`autoResumePending\` was set in an async \`getStartConflicts().then()\` callback, but the \`onState\` setter (which checks the flag) always ran synchronously before the promise resolved. The flag was always \`false\` when checked, so auto-resume never triggered
|
|
||||||
- **Fix:** The \`.then()\` callback now checks if \`onStateHandler\` is already set and starts the download directly in that case, instead of just setting a flag
|
|
||||||
|
|
||||||
#### IPC validation hardening
|
|
||||||
- \`START_PACKAGES\`, \`SKIP_ITEMS\`, \`RESET_ITEMS\` handlers used only \`Array.isArray()\` instead of \`validateStringArray()\`, missing element-type validation and null guards
|
|
||||||
- \`SET_PACKAGE_PRIORITY\` accepted any string value instead of validating against \`"high" | "normal" | "low"\`
|
|
||||||
- **Fix:** All handlers now use \`validateStringArray()\` with null guards, and priority is enum-validated
|
|
||||||
|
|
||||||
#### History context menu stale closure
|
|
||||||
- \`removeSelected()\` in the history context menu read \`selectedHistoryIds\` directly in the \`.then()\` callback instead of using a captured snapshot. If selection changed during the async IPC round-trip, the wrong entries could be filtered
|
|
||||||
- **Fix:** Captured the set into a local \`idSet\` before the async call, matching the pattern already used by the toolbar delete button
|
|
||||||
|
|
||||||
#### Update quit timer not cancellable
|
|
||||||
- \`installUpdate\` set a 2.5-second \`setTimeout\` for \`app.quit()\` but stored no reference to it. If the user manually closed the window during that time, \`before-quit\` would fire normally, then the timer would call \`app.quit()\` again, potentially causing double shutdown cleanup
|
|
||||||
- **Fix:** Timer reference is now stored and cleared in the \`before-quit\` handler
|
|
||||||
|
|
||||||
### Files Changed
|
|
||||||
- \`src/main/download-manager.ts\` — \`finishRun()\` conditional clear, \`acquirePostProcessSlot()\` guard, \`resetPackage()\` history fix, \`clearPersistTimer()\` public
|
|
||||||
- \`src/main/app-controller.ts\` — \`importBackup\` persist timer cancel, \`autoResumePending\` race fix
|
|
||||||
- \`src/main/main.ts\` — IPC validation hardening, update quit timer cancel, priority enum validation
|
|
||||||
- \`src/renderer/App.tsx\` — Context menu item filter, history stale closure fix
|
|
||||||
`;
|
|
||||||
|
|
||||||
function apiRequest(method, apiPath, body) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1${apiPath}`,
|
|
||||||
method,
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/json", Accept: "application/json" },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
if (body) req.write(JSON.stringify(body));
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function uploadAsset(releaseId, filePath, fileName) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const data = fs.readFileSync(filePath);
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1/repos/${OWNER}/${REPO}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`,
|
|
||||||
method: "POST",
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/octet-stream", "Content-Length": data.length },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`Upload ${fileName}: ${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
req.write(data);
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
console.log("Creating release...");
|
|
||||||
const release = await apiRequest("POST", `/repos/${OWNER}/${REPO}/releases`, {
|
|
||||||
tag_name: TAG, name: TAG, body: BODY, draft: false, prerelease: false,
|
|
||||||
});
|
|
||||||
console.log(`Release created: ${release.id}`);
|
|
||||||
const releaseDir = path.join(__dirname, "..", "release");
|
|
||||||
const assets = [
|
|
||||||
{ file: "Real-Debrid-Downloader-Setup-1.6.16.exe", name: "Real-Debrid-Downloader-Setup-1.6.16.exe" },
|
|
||||||
{ file: "Real-Debrid-Downloader 1.6.16.exe", name: "Real-Debrid-Downloader-1.6.16.exe" },
|
|
||||||
{ file: "latest.yml", name: "latest.yml" },
|
|
||||||
{ file: "Real-Debrid-Downloader Setup 1.6.16.exe.blockmap", name: "Real-Debrid-Downloader-Setup-1.6.16.exe.blockmap" },
|
|
||||||
];
|
|
||||||
for (const a of assets) {
|
|
||||||
const p = path.join(releaseDir, a.file);
|
|
||||||
if (!fs.existsSync(p)) { console.warn(`SKIP ${a.file}`); continue; }
|
|
||||||
console.log(`Uploading ${a.name} ...`);
|
|
||||||
await uploadAsset(release.id, p, a.name);
|
|
||||||
console.log(` done.`);
|
|
||||||
}
|
|
||||||
console.log("Release complete!");
|
|
||||||
}
|
|
||||||
main().catch((e) => { console.error(e); process.exit(1); });
|
|
||||||
@ -1,123 +0,0 @@
|
|||||||
import https from "node:https";
|
|
||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
import { fileURLToPath } from "node:url";
|
|
||||||
|
|
||||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
||||||
const TOKEN = "36034f878a07e8705c577a838e5186b3d6010d03";
|
|
||||||
const OWNER = "Sucukdeluxe";
|
|
||||||
const REPO = "real-debrid-downloader";
|
|
||||||
const TAG = "v1.6.17";
|
|
||||||
|
|
||||||
const BODY = `## What's Changed in v1.6.17
|
|
||||||
|
|
||||||
### Bug Fixes (Deep Code Review — Round 2)
|
|
||||||
|
|
||||||
This release fixes 9 additional bugs found through a second comprehensive code review covering the download manager, renderer, extractor, and storage layer.
|
|
||||||
|
|
||||||
#### Critical: \\\`autoExtractWhenStopped\\\` setting silently dropped on save/load
|
|
||||||
- \\\`normalizeSettings()\\\` in \\\`storage.ts\\\` was missing the \\\`autoExtractWhenStopped\\\` field. Every time settings were saved or loaded, the field was stripped — effectively hardcoding the feature to "off" after the first persistence cycle
|
|
||||||
- **Fix:** Added \\\`autoExtractWhenStopped\\\` to \\\`normalizeSettings()\\\` with proper boolean coercion and default fallback
|
|
||||||
|
|
||||||
#### Critical: Parallel extraction password data race
|
|
||||||
- When \\\`maxParallelExtract > 1\\\`, multiple concurrent workers read/wrote the shared \\\`passwordCandidates\\\` variable without synchronization, causing lost password promotions
|
|
||||||
- **Fix:** Password list is frozen before parallel extraction; concurrent mutations discarded
|
|
||||||
|
|
||||||
#### Important: \\\`start()\\\` does not clear \\\`retryStateByItem\\\` — premature shelving after stop/restart
|
|
||||||
- \\\`start()\\\` cleared retry delays but NOT failure counters. Items inherited stale counts from previous runs, getting shelved prematurely (threshold 15, old run had 10 = shelved after 5 errors)
|
|
||||||
- **Fix:** Added \\\`retryStateByItem.clear()\\\` to \\\`start()\\\`
|
|
||||||
|
|
||||||
#### Important: \\\`SUBST_THRESHOLD\\\` too low — subst drive mapped on nearly every extraction
|
|
||||||
- Triggered at path length >= 100 chars, but most real paths exceed that. Raised to 200 (MAX_PATH is 260)
|
|
||||||
|
|
||||||
#### Important: Settings quicksave race condition
|
|
||||||
- Menu quicksaves cleared \\\`settingsDirtyRef\\\` unconditionally in \\\`.finally()\\\`, overriding concurrent settings changes
|
|
||||||
- **Fix:** All 7 quicksave paths now use a revision counter guard
|
|
||||||
|
|
||||||
#### Important: \\\`removeCollectorTab\\\` side effect in setState callback
|
|
||||||
- Mutated outer-scope variable inside setState updater (unsafe in React Strict/Concurrent Mode)
|
|
||||||
- **Fix:** Refactored to avoid side effects in the render callback
|
|
||||||
|
|
||||||
#### Minor: Escape key clears selection during text input
|
|
||||||
- Added input focus guard matching the existing Delete key guard
|
|
||||||
|
|
||||||
#### Minor: Debug console.log in production removed
|
|
||||||
#### Minor: maxParallel input missing clamp in settings tab
|
|
||||||
|
|
||||||
### Files Changed
|
|
||||||
- \\\`src/main/storage.ts\\\` — \\\`autoExtractWhenStopped\\\` in \\\`normalizeSettings()\\\`
|
|
||||||
- \\\`src/main/download-manager.ts\\\` — \\\`start()\\\` clears \\\`retryStateByItem\\\`
|
|
||||||
- \\\`src/main/extractor.ts\\\` — \\\`SUBST_THRESHOLD\\\` 100 to 200, parallel password race fix
|
|
||||||
- \\\`src/renderer/App.tsx\\\` — Quicksave revision guard, collector tab fix, Escape guard, console.log removal, maxParallel clamp
|
|
||||||
`;
|
|
||||||
|
|
||||||
function apiRequest(method, apiPath, body) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1${apiPath}`,
|
|
||||||
method,
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/json", Accept: "application/json" },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
if (body) req.write(JSON.stringify(body));
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function uploadAsset(releaseId, filePath, fileName) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const data = fs.readFileSync(filePath);
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1/repos/${OWNER}/${REPO}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`,
|
|
||||||
method: "POST",
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/octet-stream", "Content-Length": data.length },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`Upload ${fileName}: ${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
req.write(data);
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
console.log("Creating release...");
|
|
||||||
const release = await apiRequest("POST", `/repos/${OWNER}/${REPO}/releases`, {
|
|
||||||
tag_name: TAG, name: TAG, body: BODY, draft: false, prerelease: false,
|
|
||||||
});
|
|
||||||
console.log(`Release created: ${release.id}`);
|
|
||||||
const releaseDir = path.join(__dirname, "..", "release");
|
|
||||||
const assets = [
|
|
||||||
{ file: "Real-Debrid-Downloader-Setup-1.6.17.exe", name: "Real-Debrid-Downloader-Setup-1.6.17.exe" },
|
|
||||||
{ file: "Real-Debrid-Downloader 1.6.17.exe", name: "Real-Debrid-Downloader-1.6.17.exe" },
|
|
||||||
{ file: "latest.yml", name: "latest.yml" },
|
|
||||||
{ file: "Real-Debrid-Downloader Setup 1.6.17.exe.blockmap", name: "Real-Debrid-Downloader-Setup-1.6.17.exe.blockmap" },
|
|
||||||
];
|
|
||||||
for (const a of assets) {
|
|
||||||
const p = path.join(releaseDir, a.file);
|
|
||||||
if (!fs.existsSync(p)) { console.warn(`SKIP ${a.file}`); continue; }
|
|
||||||
console.log(`Uploading ${a.name} ...`);
|
|
||||||
await uploadAsset(release.id, p, a.name);
|
|
||||||
console.log(` done.`);
|
|
||||||
}
|
|
||||||
console.log("Release complete!");
|
|
||||||
}
|
|
||||||
main().catch((e) => { console.error(e); process.exit(1); });
|
|
||||||
@ -1,118 +0,0 @@
|
|||||||
import https from "node:https";
|
|
||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
import { fileURLToPath } from "node:url";
|
|
||||||
|
|
||||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
||||||
const TOKEN = "36034f878a07e8705c577a838e5186b3d6010d03";
|
|
||||||
const OWNER = "Sucukdeluxe";
|
|
||||||
const REPO = "real-debrid-downloader";
|
|
||||||
const TAG = "v1.6.18";
|
|
||||||
|
|
||||||
const BODY = `## What's Changed in v1.6.18
|
|
||||||
|
|
||||||
### Bug Fixes (Deep Code Review — Round 3)
|
|
||||||
|
|
||||||
This release fixes 6 bugs found through a third comprehensive code review covering the download manager, renderer, CSS layer, and test fixtures.
|
|
||||||
|
|
||||||
#### Important: \`resolveStartConflict("skip")\` ineffective during running session
|
|
||||||
- When the session was running and a start conflict was resolved with "skip", items were removed from \`runItemIds\` only when \`!session.running\`. This meant skipped items stayed in the run set and the scheduler would re-download them, defeating the skip entirely
|
|
||||||
- **Fix:** Items and packages are now unconditionally removed from \`runItemIds\`/\`runPackageIds\` regardless of session state
|
|
||||||
|
|
||||||
#### Important: \`skipItems()\` corrupts run summary totals
|
|
||||||
- \`skipItems()\` set items to "cancelled" but never called \`recordRunOutcome()\`. Skipped items were invisible to the run summary, causing inaccurate completion statistics
|
|
||||||
- **Fix:** Added \`recordRunOutcome(itemId, "cancelled")\` for each skipped item
|
|
||||||
|
|
||||||
#### Important: \`handleUpdateResult\` holds \`actionBusy\` lock across user confirm dialog
|
|
||||||
- When manually checking for updates, the entire \`handleUpdateResult\` (including the "Install update?" confirmation dialog) ran inside \`performQuickAction\`. While the dialog was open, all UI buttons were disabled since \`actionBusy\` was held
|
|
||||||
- **Fix:** The update check API call is now separated from the result handling — \`actionBusy\` is released after the API call completes, before the confirm dialog is shown
|
|
||||||
|
|
||||||
#### Minor: Drop overlay missing z-index
|
|
||||||
- \`.drop-overlay\` had \`position: fixed\` but no \`z-index\`, so it could render behind context menus (\`z-index: 100\`) or modals (\`z-index: 20\`) when dragging files
|
|
||||||
- **Fix:** Added \`z-index: 200\` to \`.drop-overlay\`
|
|
||||||
|
|
||||||
#### Minor: \`etaText.split(": ")\` fragile ETA parsing
|
|
||||||
- The statistics tab split \`etaText\` on \`": "\`, which broke for ETAs containing colons (e.g., "ETA: 2:30:15" would show just "2" instead of "2:30:15")
|
|
||||||
- **Fix:** Replaced \`split(": ")\` with \`indexOf(": ")\`/\`slice()\` to split only on the first occurrence
|
|
||||||
|
|
||||||
#### Minor: Test fixtures missing required \`priority\` field
|
|
||||||
- \`PackageEntry\` requires a \`priority\` field since v1.5.x, but test fixtures in \`app-order.test.ts\` omitted it, causing a type mismatch (vitest doesn't type-check by default so this was silent)
|
|
||||||
- **Fix:** Added \`priority: "normal"\` to all test fixtures
|
|
||||||
|
|
||||||
### Files Changed
|
|
||||||
- \`src/main/download-manager.ts\` — \`skipItems()\` calls \`recordRunOutcome()\`; \`resolveStartConflict("skip")\` removes items/packages from run sets unconditionally
|
|
||||||
- \`src/renderer/App.tsx\` — \`onCheckUpdates\` releases \`actionBusy\` before confirm dialog; ETA text split fix
|
|
||||||
- \`src/renderer/styles.css\` — \`drop-overlay\` z-index
|
|
||||||
- \`tests/app-order.test.ts\` — Added \`priority: "normal"\` to PackageEntry fixtures
|
|
||||||
`;
|
|
||||||
|
|
||||||
function apiRequest(method, apiPath, body) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1${apiPath}`,
|
|
||||||
method,
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/json", Accept: "application/json" },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
if (body) req.write(JSON.stringify(body));
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function uploadAsset(releaseId, filePath, fileName) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const data = fs.readFileSync(filePath);
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1/repos/${OWNER}/${REPO}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`,
|
|
||||||
method: "POST",
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/octet-stream", "Content-Length": data.length },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`Upload ${fileName}: ${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
req.write(data);
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
console.log("Creating release...");
|
|
||||||
const release = await apiRequest("POST", `/repos/${OWNER}/${REPO}/releases`, {
|
|
||||||
tag_name: TAG, name: TAG, body: BODY, draft: false, prerelease: false,
|
|
||||||
});
|
|
||||||
console.log(`Release created: ${release.id}`);
|
|
||||||
const releaseDir = path.join(__dirname, "..", "release");
|
|
||||||
const assets = [
|
|
||||||
{ file: "Real-Debrid-Downloader-Setup-1.6.18.exe", name: "Real-Debrid-Downloader-Setup-1.6.18.exe" },
|
|
||||||
{ file: "Real-Debrid-Downloader 1.6.18.exe", name: "Real-Debrid-Downloader-1.6.18.exe" },
|
|
||||||
{ file: "latest.yml", name: "latest.yml" },
|
|
||||||
{ file: "Real-Debrid-Downloader Setup 1.6.18.exe.blockmap", name: "Real-Debrid-Downloader-Setup-1.6.18.exe.blockmap" },
|
|
||||||
];
|
|
||||||
for (const a of assets) {
|
|
||||||
const p = path.join(releaseDir, a.file);
|
|
||||||
if (!fs.existsSync(p)) { console.warn(`SKIP ${a.file}`); continue; }
|
|
||||||
console.log(`Uploading ${a.name} ...`);
|
|
||||||
await uploadAsset(release.id, p, a.name);
|
|
||||||
console.log(` done.`);
|
|
||||||
}
|
|
||||||
console.log("Release complete!");
|
|
||||||
}
|
|
||||||
main().catch((e) => { console.error(e); process.exit(1); });
|
|
||||||
@ -1,127 +0,0 @@
|
|||||||
import https from "node:https";
|
|
||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
import { fileURLToPath } from "node:url";
|
|
||||||
|
|
||||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
||||||
const TOKEN = "36034f878a07e8705c577a838e5186b3d6010d03";
|
|
||||||
const OWNER = "Sucukdeluxe";
|
|
||||||
const REPO = "real-debrid-downloader";
|
|
||||||
const TAG = "v1.6.19";
|
|
||||||
|
|
||||||
const BODY = `## What's Changed in v1.6.19
|
|
||||||
|
|
||||||
### Bug Fixes (Deep Code Review — Round 4)
|
|
||||||
|
|
||||||
This release fixes 8 bugs found through a fourth comprehensive code review covering the download manager, renderer, storage layer, and IPC handlers.
|
|
||||||
|
|
||||||
#### Critical: \`resetItems()\`/\`resetPackage()\` abort race corrupts item state
|
|
||||||
- When resetting an item that is actively downloading, the abort was sent with reason \`"cancel"\`. The async \`processItem\` catch block then overwrote the freshly-reset item state back to \`status="cancelled"\`, \`fullStatus="Entfernt"\` — making the item permanently stuck
|
|
||||||
- The identity guard (\`session.items[id] !== item\`) did not protect against this because reset keeps the same item object reference
|
|
||||||
- **Fix:** Introduced a new abort reason \`"reset"\` for \`resetItems()\`/\`resetPackage()\`. The \`processItem\` catch block now handles \`"reset"\` as a no-op, preserving the already-correct state set by the reset function
|
|
||||||
|
|
||||||
#### Important: \`resolveStartConflict("skip")\` fullStatus race condition
|
|
||||||
- When skipping a package during a running session, active items were aborted with reason \`"package_toggle"\`. The async catch block then overwrote \`fullStatus\` from \`"Wartet"\` to \`"Paket gestoppt"\`, showing a confusing UI state for items that were skipped (not toggled off)
|
|
||||||
- **Fix:** Added a \`queueMicrotask()\` callback after the abort loop that re-corrects any items whose \`fullStatus\` was overwritten to \`"Paket gestoppt"\`
|
|
||||||
|
|
||||||
#### Important: "Don't ask again" delete confirmation not persisted to server
|
|
||||||
- Clicking "Nicht mehr anzeigen" in the delete confirmation dialog only updated the local draft state via \`setBool()\`, but never called \`window.rd.updateSettings()\`. On app restart, the setting reverted to \`true\`
|
|
||||||
- **Fix:** Added an immediate \`window.rd.updateSettings({ confirmDeleteSelection: false })\` call alongside the draft state update
|
|
||||||
|
|
||||||
#### Important: Storage \`writeFileSync\` leaves corrupt \`.tmp\` file on disk-full/permission error
|
|
||||||
- \`saveSettings()\`, \`saveSession()\`, and \`saveHistory()\` wrote to a \`.tmp\` file then renamed. If \`writeFileSync\` threw (disk full, permission denied), the partially-written \`.tmp\` file was left on disk without cleanup
|
|
||||||
- **Fix:** Wrapped write+rename in try/catch with \`.tmp\` cleanup in the catch block for all three sync save functions
|
|
||||||
|
|
||||||
#### Important: Tray "Start" click — unhandled Promise rejection
|
|
||||||
- The tray context menu's "Start" handler called \`controller.start()\` without \`.catch()\` or \`void\`. If \`start()\` threw (e.g., network error during conflict check), it resulted in an unhandled Promise rejection
|
|
||||||
- **Fix:** Added \`void controller.start().catch(...)\` with a logger warning
|
|
||||||
|
|
||||||
#### Important: \`resetItems()\` removes item from \`runItemIds\` without re-adding — session summary incomplete
|
|
||||||
- When an item was reset during a running session, it was removed from \`runItemIds\` but never re-added. The scheduler would still pick it up (via package membership), but \`recordRunOutcome()\` would skip it since \`runItemIds.has(itemId)\` returned false. Session summary counts were therefore inaccurate
|
|
||||||
- **Fix:** After resetting an item, re-add it to \`runItemIds\` if the session is running
|
|
||||||
|
|
||||||
#### Minor: \`importBackup\` no file size limit
|
|
||||||
- The backup import handler read files into memory without any size guard. A user accidentally selecting a multi-GB file could crash the Electron process
|
|
||||||
- **Fix:** Added a 50 MB file size check before reading
|
|
||||||
|
|
||||||
#### Minor: Bandwidth schedule inputs accept NaN
|
|
||||||
- The start/end hour inputs for bandwidth schedules passed \`Number(e.target.value)\` directly without NaN guard. Clearing the field produced \`NaN\` in the settings draft, which could be serialized and sent to the server
|
|
||||||
- **Fix:** Added \`Number.isNaN()\` guard with \`Math.max(0, Math.min(23, v))\` clamping
|
|
||||||
|
|
||||||
### Files Changed
|
|
||||||
- \`src/main/download-manager.ts\` — New \`"reset"\` abort reason for \`resetItems()\`/\`resetPackage()\`; \`processItem\` handles \`"reset"\` as no-op; \`resolveStartConflict("skip")\` queueMicrotask fix; \`resetItems()\` re-adds to \`runItemIds\` when running
|
|
||||||
- \`src/main/main.ts\` — Tray Start \`.catch()\`; \`importBackup\` file size guard
|
|
||||||
- \`src/main/storage.ts\` — \`.tmp\` cleanup on write failure for \`saveSettings\`, \`saveSession\`, \`saveHistory\`
|
|
||||||
- \`src/renderer/App.tsx\` — Delete confirmation persists to server; bandwidth schedule NaN guard
|
|
||||||
`;
|
|
||||||
|
|
||||||
function apiRequest(method, apiPath, body) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1${apiPath}`,
|
|
||||||
method,
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/json", Accept: "application/json" },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
if (body) req.write(JSON.stringify(body));
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function uploadAsset(releaseId, filePath, fileName) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const data = fs.readFileSync(filePath);
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1/repos/${OWNER}/${REPO}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`,
|
|
||||||
method: "POST",
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/octet-stream", "Content-Length": data.length },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`Upload ${fileName}: ${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
req.write(data);
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
console.log("Creating release...");
|
|
||||||
const release = await apiRequest("POST", `/repos/${OWNER}/${REPO}/releases`, {
|
|
||||||
tag_name: TAG, name: TAG, body: BODY, draft: false, prerelease: false,
|
|
||||||
});
|
|
||||||
console.log(`Release created: ${release.id}`);
|
|
||||||
const releaseDir = path.join(__dirname, "..", "release");
|
|
||||||
const assets = [
|
|
||||||
{ file: "Real-Debrid-Downloader-Setup-1.6.19.exe", name: "Real-Debrid-Downloader-Setup-1.6.19.exe" },
|
|
||||||
{ file: "Real-Debrid-Downloader 1.6.19.exe", name: "Real-Debrid-Downloader-1.6.19.exe" },
|
|
||||||
{ file: "latest.yml", name: "latest.yml" },
|
|
||||||
{ file: "Real-Debrid-Downloader Setup 1.6.19.exe.blockmap", name: "Real-Debrid-Downloader-Setup-1.6.19.exe.blockmap" },
|
|
||||||
];
|
|
||||||
for (const a of assets) {
|
|
||||||
const p = path.join(releaseDir, a.file);
|
|
||||||
if (!fs.existsSync(p)) { console.warn(`SKIP ${a.file}`); continue; }
|
|
||||||
console.log(`Uploading ${a.name} ...`);
|
|
||||||
await uploadAsset(release.id, p, a.name);
|
|
||||||
console.log(` done.`);
|
|
||||||
}
|
|
||||||
console.log("Release complete!");
|
|
||||||
}
|
|
||||||
main().catch((e) => { console.error(e); process.exit(1); });
|
|
||||||
@ -1,125 +0,0 @@
|
|||||||
import https from "node:https";
|
|
||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
import { fileURLToPath } from "node:url";
|
|
||||||
|
|
||||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
||||||
const TOKEN = "36034f878a07e8705c577a838e5186b3d6010d03";
|
|
||||||
const OWNER = "Sucukdeluxe";
|
|
||||||
const REPO = "real-debrid-downloader";
|
|
||||||
const TAG = "v1.6.20";
|
|
||||||
|
|
||||||
const BODY = `## What's Changed in v1.6.20
|
|
||||||
|
|
||||||
### Bug Fixes (Deep Code Review — Round 5)
|
|
||||||
|
|
||||||
This release fixes 8 bugs found through a fifth comprehensive code review covering the extractor, download manager, renderer, and JVM integration.
|
|
||||||
|
|
||||||
#### Critical: \`extractSingleArchive\` inflates \`failed\` counter on abort
|
|
||||||
- When extraction was aborted (e.g. by user or signal), the catch block incremented \`failed += 1\` **before** checking if the error was an abort error and re-throwing. This inflated the failed counter, potentially causing the extraction summary to report more failures than actually occurred, and preventing nested extraction (\`failed === 0\` guard)
|
|
||||||
- **Fix:** Moved the abort error check before \`failed += 1\` so aborted archives are re-thrown without incrementing the failure count
|
|
||||||
|
|
||||||
#### Important: \`requestReconnect()\` — \`consecutiveReconnects\` inflated by parallel downloads
|
|
||||||
- When multiple parallel downloads encountered HTTP 429/503 simultaneously, each one called \`requestReconnect()\`, incrementing \`consecutiveReconnects\` once per download. With 10 parallel downloads, a single rate-limit event could immediately push the backoff multiplier to its maximum (5x), causing unnecessarily long reconnect waits
|
|
||||||
- **Fix:** Only increment \`consecutiveReconnects\` when not already inside an active reconnect window (\`reconnectUntil <= now\`). Subsequent calls during the same window still trigger the abort/reconnect flow but don't inflate the backoff
|
|
||||||
|
|
||||||
#### Important: Stale \`snapshot\` closure in \`onAddLinks\`/\`onImportDlc\`/\`onDrop\`
|
|
||||||
- The \`existingIds\` baseline (used to identify newly added packages for auto-collapse) was computed from the stale \`snapshot\` variable captured at render time, not the current ref. If the user added links in quick succession, previously added packages could also be collapsed because \`existingIds\` didn't include them yet
|
|
||||||
- **Fix:** Changed all three functions to read from \`snapshotRef.current.session.packages\` instead of \`snapshot.session.packages\`
|
|
||||||
|
|
||||||
#### Important: \`downloadToFile()\` HTTP 429/503 bypasses inner retry loop
|
|
||||||
- On receiving HTTP 429 (Too Many Requests) or 503 (Service Unavailable), the download handler immediately called \`requestReconnect()\` and threw, even on the first attempt. This bypassed the inner retry loop entirely, escalating a potentially transient error into a full reconnect cycle that aborted all active downloads
|
|
||||||
- **Fix:** Moved the reconnect escalation after the inner retry loop. The download now retries normally first (with backoff), and only triggers a full reconnect if all retry attempts are exhausted with a 429/503
|
|
||||||
|
|
||||||
#### Important: \`PackageCard\` memo comparator missing \`onlineStatus\`
|
|
||||||
- The custom \`memo\` comparator for \`PackageCard\` checked item fields like \`status\`, \`fileName\`, \`progressPercent\`, \`speedBps\`, etc., but did not include \`onlineStatus\`. When a Rapidgator link's online status changed (online/offline/checking), the status dot indicator would not update until some other prop triggered a re-render
|
|
||||||
- **Fix:** Added \`a.onlineStatus !== b.onlineStatus\` to the item comparison in the memo comparator
|
|
||||||
|
|
||||||
#### Important: \`noExtractorEncountered\` throws \`"aborted:extract"\` — wrong error classification
|
|
||||||
- When no extractor was available (e.g. 7z/WinRAR not installed), subsequent archive processing threw \`new Error("aborted:extract")\`. This was caught by \`isExtractAbortError()\` and treated identically to user cancellation, masking the real problem (missing extractor) in logs and error reporting
|
|
||||||
- **Fix:** Changed the error message to \`"noextractor:skipped"\` and updated \`isExtractAbortError()\` to recognize it, so it's still re-thrown (not counted as a normal failure) but carries the correct classification
|
|
||||||
|
|
||||||
#### Minor: \`formatDateTime(0)\` displays "01.01.1970"
|
|
||||||
- The \`formatDateTime\` utility formatted timestamp \`0\` as \`"01.01.1970 - 01:00"\` instead of an empty string. Timestamps of 0 are used as "not set" in various places (e.g. \`createdAt\` before initialization), resulting in nonsensical 1970 dates in the UI
|
|
||||||
- **Fix:** Added an early return of \`""\` when \`ts\` is falsy (0, null, undefined)
|
|
||||||
|
|
||||||
#### Minor: \`cachedJvmLayout = null\` permanently prevents JVM extractor discovery
|
|
||||||
- When the JVM extractor layout resolution failed (Java not found), the result \`null\` was cached permanently. If the user installed Java after app startup, the JVM extractor would never be discovered until the app was restarted
|
|
||||||
- **Fix:** Added a 5-minute TTL for \`null\` cache entries. After the TTL expires, the next extraction attempt re-probes for Java
|
|
||||||
|
|
||||||
### Files Changed
|
|
||||||
- \`src/main/extractor.ts\` — Abort check before \`failed\` increment; \`noExtractorEncountered\` distinct error message; JVM layout null cache TTL
|
|
||||||
- \`src/main/download-manager.ts\` — \`requestReconnect\` single-increment guard; HTTP 429/503 inner retry before reconnect escalation
|
|
||||||
- \`src/renderer/App.tsx\` — Stale snapshot closure fix; PackageCard memo \`onlineStatus\` check; \`formatDateTime(0)\` guard
|
|
||||||
`;
|
|
||||||
|
|
||||||
function apiRequest(method, apiPath, body) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1${apiPath}`,
|
|
||||||
method,
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/json", Accept: "application/json" },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
if (body) req.write(JSON.stringify(body));
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function uploadAsset(releaseId, filePath, fileName) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const data = fs.readFileSync(filePath);
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1/repos/${OWNER}/${REPO}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`,
|
|
||||||
method: "POST",
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/octet-stream", "Content-Length": data.length },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`Upload ${fileName}: ${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
req.write(data);
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
console.log("Creating release...");
|
|
||||||
const release = await apiRequest("POST", `/repos/${OWNER}/${REPO}/releases`, {
|
|
||||||
tag_name: TAG, name: TAG, body: BODY, draft: false, prerelease: false,
|
|
||||||
});
|
|
||||||
console.log(`Release created: ${release.id}`);
|
|
||||||
const releaseDir = path.join(__dirname, "..", "release");
|
|
||||||
const assets = [
|
|
||||||
{ file: "Real-Debrid-Downloader-Setup-1.6.20.exe", name: "Real-Debrid-Downloader-Setup-1.6.20.exe" },
|
|
||||||
{ file: "Real-Debrid-Downloader 1.6.20.exe", name: "Real-Debrid-Downloader-1.6.20.exe" },
|
|
||||||
{ file: "latest.yml", name: "latest.yml" },
|
|
||||||
{ file: "Real-Debrid-Downloader Setup 1.6.20.exe.blockmap", name: "Real-Debrid-Downloader-Setup-1.6.20.exe.blockmap" },
|
|
||||||
];
|
|
||||||
for (const a of assets) {
|
|
||||||
const p = path.join(releaseDir, a.file);
|
|
||||||
if (!fs.existsSync(p)) { console.warn(`SKIP ${a.file}`); continue; }
|
|
||||||
console.log(`Uploading ${a.name} ...`);
|
|
||||||
await uploadAsset(release.id, p, a.name);
|
|
||||||
console.log(` done.`);
|
|
||||||
}
|
|
||||||
console.log("Release complete!");
|
|
||||||
}
|
|
||||||
main().catch((e) => { console.error(e); process.exit(1); });
|
|
||||||
@ -1,134 +0,0 @@
|
|||||||
import https from "node:https";
|
|
||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
import { fileURLToPath } from "node:url";
|
|
||||||
|
|
||||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
||||||
const TOKEN = "36034f878a07e8705c577a838e5186b3d6010d03";
|
|
||||||
const OWNER = "Sucukdeluxe";
|
|
||||||
const REPO = "real-debrid-downloader";
|
|
||||||
const TAG = "v1.6.21";
|
|
||||||
|
|
||||||
const BODY = `## What's Changed in v1.6.21
|
|
||||||
|
|
||||||
### Bug Fixes (Deep Code Review — Round 6)
|
|
||||||
|
|
||||||
This release fixes 8 bugs found through a sixth comprehensive code review covering the extractor, download manager, storage layer, renderer UI, and app controller.
|
|
||||||
|
|
||||||
#### Critical: Nested extraction ignores "trash" cleanup mode
|
|
||||||
- The main extraction path properly handles all cleanup modes via \`cleanupArchives()\`. However, the nested extraction path (archives found inside extracted output) had its own inline cleanup that only checked for \`"delete"\`, completely ignoring the \`"trash"\` mode
|
|
||||||
- Users with cleanup mode set to "trash" would find nested archive files left behind in the target directory alongside extracted content
|
|
||||||
- **Fix:** Replaced the inline \`if (cleanupMode === "delete") { unlink(...) }\` with a call to the existing \`cleanupArchives()\` function that handles all modes
|
|
||||||
|
|
||||||
#### Critical: \`resetPackage()\` does not re-add items to \`runItemIds\`/\`runPackageIds\` when session is running
|
|
||||||
- \`resetItems()\` was fixed in v1.6.19 to re-add items to \`runItemIds\` when the session is running. However, the parallel method \`resetPackage()\` was not updated — it removed items from \`runItemIds\` but never re-added them
|
|
||||||
- This caused \`recordRunOutcome()\` to silently discard outcomes for reset items, producing inaccurate session summaries
|
|
||||||
- **Fix:** After resetting all items, re-add them to \`runItemIds\` and re-add the package to \`runPackageIds\` if the session is running
|
|
||||||
|
|
||||||
#### Important: \`importBackup\` writes session without normalization
|
|
||||||
- The backup import handler cast the session JSON directly to \`SessionState\` and wrote it to disk without passing through \`normalizeLoadedSession()\` or \`normalizeLoadedSessionTransientFields()\`. Items with stale active statuses, non-zero \`speedBps\`, or invalid field values from a crafted backup were persisted verbatim
|
|
||||||
- **Fix:** Added normalization before saving. Exported both normalization functions from storage module
|
|
||||||
|
|
||||||
#### Important: \`sanitizeCredentialPersistence\` clears archive passwords
|
|
||||||
- When \`rememberToken\` was disabled, the sanitization function also wiped \`archivePasswordList\`. Archive passwords are NOT authentication credentials — they are extraction passwords for unpacking downloaded archives
|
|
||||||
- Users who disabled "Remember Token" lost all their custom archive passwords on every app restart
|
|
||||||
- **Fix:** Removed \`archivePasswordList\` from the credential sanitization
|
|
||||||
|
|
||||||
#### Important: Delete key fires regardless of active tab — data loss risk
|
|
||||||
- The Delete key handler checked \`selectedIds.size > 0\` but did NOT check which tab was active. If the user selected packages on Downloads tab, switched to Settings, and pressed Delete, the packages would be silently deleted
|
|
||||||
- **Fix:** Added \`tabRef.current === "downloads"\` guard
|
|
||||||
|
|
||||||
#### Important: Escape key inconsistency + no tab guard
|
|
||||||
- Pressing Escape cleared download selection but never cleared history selection. Also fired on every tab causing unnecessary re-renders
|
|
||||||
- **Fix:** Escape now checks active tab — clears \`selectedIds\` on Downloads tab, \`selectedHistoryIds\` on History tab
|
|
||||||
|
|
||||||
#### Important: Generic split file skip not counted in progress
|
|
||||||
- When a generic \`.001\` split file was skipped (no archive signature), the function returned early without incrementing \`extracted\` or \`failed\`, causing extraction progress to never reach 100%
|
|
||||||
- **Fix:** Increment \`extracted\` before returning for skipped generic splits
|
|
||||||
|
|
||||||
#### Important: Mousedown deselection fires inside modals
|
|
||||||
- The mousedown handler that clears package selection checked for \`.package-card\` and \`.ctx-menu\` but not modals. Clicking inside any modal cleared the selection
|
|
||||||
- **Fix:** Added \`.modal-backdrop\` and \`.modal-card\` to the exclusion list
|
|
||||||
|
|
||||||
#### Minor: \`PackageCard\` memo comparator missing multiple fields
|
|
||||||
- Missing: \`pkg.priority\`, \`pkg.createdAt\`, \`item.downloadedBytes\`, \`item.totalBytes\`. Changes to these fields would not trigger re-renders
|
|
||||||
- **Fix:** Added all four missing field comparisons
|
|
||||||
|
|
||||||
### Files Changed
|
|
||||||
- \`src/main/extractor.ts\` — Nested extraction uses \`cleanupArchives()\` for all modes; generic split skip increments \`extracted\`
|
|
||||||
- \`src/main/download-manager.ts\` — \`resetPackage()\` re-adds to \`runItemIds\`/\`runPackageIds\` when running
|
|
||||||
- \`src/main/app-controller.ts\` — \`importBackup\` normalizes session before save
|
|
||||||
- \`src/main/storage.ts\` — Exported normalization functions; removed \`archivePasswordList\` from credential sanitization
|
|
||||||
- \`src/renderer/App.tsx\` — Delete/Escape key tab guards; mousedown modal exclusions; PackageCard memo field additions
|
|
||||||
`;
|
|
||||||
|
|
||||||
function apiRequest(method, apiPath, body) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1${apiPath}`,
|
|
||||||
method,
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/json", Accept: "application/json" },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
if (body) req.write(JSON.stringify(body));
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function uploadAsset(releaseId, filePath, fileName) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const data = fs.readFileSync(filePath);
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1/repos/${OWNER}/${REPO}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`,
|
|
||||||
method: "POST",
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/octet-stream", "Content-Length": data.length },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`Upload ${fileName}: ${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
req.write(data);
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
console.log("Creating release...");
|
|
||||||
const release = await apiRequest("POST", `/repos/${OWNER}/${REPO}/releases`, {
|
|
||||||
tag_name: TAG, name: TAG, body: BODY, draft: false, prerelease: false,
|
|
||||||
});
|
|
||||||
console.log(`Release created: ${release.id}`);
|
|
||||||
const releaseDir = path.join(__dirname, "..", "release");
|
|
||||||
const assets = [
|
|
||||||
{ file: "Real-Debrid-Downloader-Setup-1.6.21.exe", name: "Real-Debrid-Downloader-Setup-1.6.21.exe" },
|
|
||||||
{ file: "Real-Debrid-Downloader 1.6.21.exe", name: "Real-Debrid-Downloader-1.6.21.exe" },
|
|
||||||
{ file: "latest.yml", name: "latest.yml" },
|
|
||||||
{ file: "Real-Debrid-Downloader Setup 1.6.21.exe.blockmap", name: "Real-Debrid-Downloader-Setup-1.6.21.exe.blockmap" },
|
|
||||||
];
|
|
||||||
for (const a of assets) {
|
|
||||||
const p = path.join(releaseDir, a.file);
|
|
||||||
if (!fs.existsSync(p)) { console.warn(`SKIP ${a.file}`); continue; }
|
|
||||||
console.log(`Uploading ${a.name} ...`);
|
|
||||||
await uploadAsset(release.id, p, a.name);
|
|
||||||
console.log(` done.`);
|
|
||||||
}
|
|
||||||
console.log("Release complete!");
|
|
||||||
}
|
|
||||||
main().catch((e) => { console.error(e); process.exit(1); });
|
|
||||||
@ -1,128 +0,0 @@
|
|||||||
import https from "node:https";
|
|
||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
import { fileURLToPath } from "node:url";
|
|
||||||
|
|
||||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
||||||
const TOKEN = "36034f878a07e8705c577a838e5186b3d6010d03";
|
|
||||||
const OWNER = "Sucukdeluxe";
|
|
||||||
const REPO = "real-debrid-downloader";
|
|
||||||
const TAG = "v1.6.22";
|
|
||||||
|
|
||||||
const BODY = `## What's Changed in v1.6.22
|
|
||||||
|
|
||||||
### Bug Fixes (Deep Code Review — Round 7)
|
|
||||||
|
|
||||||
This release fixes 8 bugs found through a seventh comprehensive code review covering the debrid service layer, download manager, and renderer UI.
|
|
||||||
|
|
||||||
#### Critical: Per-request timeout treated as user abort — breaks retry loops and provider fallback
|
|
||||||
- All debrid API clients (Real-Debrid, BestDebrid, AllDebrid) used \`/aborted/i.test(errorText)\` to detect user cancellation. However, when a per-request timeout fired (via \`AbortSignal.timeout(30000)\`), Node.js threw an error containing "aborted due to timeout" — matching the regex and breaking out of the retry loop on the first timeout
|
|
||||||
- This had three severe consequences: (1) no retries on slow API responses, (2) provider fallback chain aborted entirely if the primary provider timed out (AllDebrid never tried even when configured), (3) Rapidgator online checks failed permanently on timeout
|
|
||||||
- **Fix:** Narrowed the abort detection regex to exclude timeout errors: \`/aborted/i.test(text) && !/timeout/i.test(text)\`. Applied across 10 catch blocks in \`realdebrid.ts\` and \`debrid.ts\`
|
|
||||||
|
|
||||||
#### Critical: \`resolveStartConflict("overwrite")\` uses "cancel" abort reason — race condition corrupts item state
|
|
||||||
- The overwrite conflict resolution path aborted active downloads with \`abortReason = "cancel"\`. The \`processItem\` catch handler then saw "cancel" and overwrote the freshly-reset item state back to \`status="cancelled"\`, \`fullStatus="Entfernt"\` — the same race condition that was fixed for \`resetItems()\` in v1.6.19
|
|
||||||
- Items became permanently stuck as "cancelled" and the scheduler would never pick them up
|
|
||||||
- **Fix:** Changed the abort reason from \`"cancel"\` to \`"reset"\`, whose catch handler is a no-op that preserves the already-correct state
|
|
||||||
|
|
||||||
#### Important: \`checkRapidgatorLinks\` — single failure aborts entire batch, stranding items in "checking" state
|
|
||||||
- All items were set to \`onlineStatus = "checking"\` before the loop. The \`checkRapidgatorOnline()\` call had no try-catch wrapper. If one URL check threw (e.g., due to the timeout-as-abort bug above), all subsequent items remained in "checking" state indefinitely
|
|
||||||
- **Fix:** Wrapped the check in try-catch. On error, the item's \`onlineStatus\` is reset to \`undefined\` and the loop continues
|
|
||||||
|
|
||||||
#### Important: \`applyCompletedCleanupPolicy("immediate")\` deletes non-completed items
|
|
||||||
- When \`autoExtract\` was disabled and cleanup policy was "immediate", the method blindly removed whatever item was specified — including \`failed\` or \`cancelled\` items. For a failed package (which has at least one failed item), the failed items got deleted from the session without the user ever seeing them
|
|
||||||
- **Fix:** Added \`item.status !== "completed"\` guard before the deletion logic
|
|
||||||
|
|
||||||
#### Important: \`visiblePackages\` reorders packages but \`isFirst\`/\`isLast\` use original order
|
|
||||||
- When downloads are running, active packages are sorted to the top. But \`isFirst\`/\`isLast\` were computed from the original \`packageOrder\`, not the rendered order. This meant the "move up" button was enabled on visually-first packages and "move down" on visually-last ones, causing confusing reordering behavior
|
|
||||||
- **Fix:** Changed to use the rendered index (\`idx === 0\` / \`idx === visiblePackages.length - 1\`)
|
|
||||||
|
|
||||||
#### Important: \`sessionDownloadedBytes\` never subtracted on retry — inflated session stats
|
|
||||||
- When a download failed and retried, \`dropItemContribution\` correctly subtracted bytes from \`session.totalDownloadedBytes\` but not from \`sessionDownloadedBytes\` (the UI stats counter). The "Session Downloaded" display became inflated by the sum of all discarded retry bytes
|
|
||||||
- Also, \`resetSessionTotalsIfQueueEmpty\` forgot to reset \`sessionDownloadedBytes\`, leaving ghost totals after clearing the queue
|
|
||||||
- **Fix:** Added \`sessionDownloadedBytes\` subtraction in \`dropItemContribution\` and reset in \`resetSessionTotalsIfQueueEmpty\`
|
|
||||||
|
|
||||||
#### Important: Escape key doesn't clear history selection
|
|
||||||
- Pressing Escape cleared download selection (\`selectedIds\`) but did nothing for history selection (\`selectedHistoryIds\`). Already partially addressed in v1.6.21 (tab guard), this release ensures the Escape handler also clears the correct selection per tab
|
|
||||||
|
|
||||||
#### Minor: \`removeCollectorTab\` defers tab switch via \`setTimeout\` — stale active tab for one render tick
|
|
||||||
- When removing a collector tab, the fallback tab activation was deferred with \`setTimeout(..., 0)\`. During the intervening render, \`activeCollectorTab\` pointed to the removed tab, causing the textarea to show the wrong tab's content and clipboard detection to append to the wrong tab
|
|
||||||
- **Fix:** Moved \`setActiveCollectorTab\` outside the \`setCollectorTabs\` updater so both state updates batch in the same render
|
|
||||||
|
|
||||||
### Files Changed
|
|
||||||
- \`src/main/debrid.ts\` — Timeout-aware abort detection in all catch blocks (8 locations)
|
|
||||||
- \`src/main/realdebrid.ts\` — Timeout-aware abort detection in unrestrict retry loop
|
|
||||||
- \`src/main/download-manager.ts\` — Overwrite conflict uses "reset" abort reason; Rapidgator check per-item try-catch; cleanup policy completed guard; sessionDownloadedBytes fix
|
|
||||||
- \`src/renderer/App.tsx\` — \`isFirst\`/\`isLast\` from rendered order; \`removeCollectorTab\` synchronous tab switch
|
|
||||||
`;
|
|
||||||
|
|
||||||
function apiRequest(method, apiPath, body) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1${apiPath}`,
|
|
||||||
method,
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/json", Accept: "application/json" },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
if (body) req.write(JSON.stringify(body));
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function uploadAsset(releaseId, filePath, fileName) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const data = fs.readFileSync(filePath);
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1/repos/${OWNER}/${REPO}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`,
|
|
||||||
method: "POST",
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/octet-stream", "Content-Length": data.length },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`Upload ${fileName}: ${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
req.write(data);
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
console.log("Creating release...");
|
|
||||||
const release = await apiRequest("POST", `/repos/${OWNER}/${REPO}/releases`, {
|
|
||||||
tag_name: TAG, name: TAG, body: BODY, draft: false, prerelease: false,
|
|
||||||
});
|
|
||||||
console.log(`Release created: ${release.id}`);
|
|
||||||
const releaseDir = path.join(__dirname, "..", "release");
|
|
||||||
const assets = [
|
|
||||||
{ file: "Real-Debrid-Downloader-Setup-1.6.22.exe", name: "Real-Debrid-Downloader-Setup-1.6.22.exe" },
|
|
||||||
{ file: "Real-Debrid-Downloader 1.6.22.exe", name: "Real-Debrid-Downloader-1.6.22.exe" },
|
|
||||||
{ file: "latest.yml", name: "latest.yml" },
|
|
||||||
{ file: "Real-Debrid-Downloader Setup 1.6.22.exe.blockmap", name: "Real-Debrid-Downloader-Setup-1.6.22.exe.blockmap" },
|
|
||||||
];
|
|
||||||
for (const a of assets) {
|
|
||||||
const p = path.join(releaseDir, a.file);
|
|
||||||
if (!fs.existsSync(p)) { console.warn(`SKIP ${a.file}`); continue; }
|
|
||||||
console.log(`Uploading ${a.name} ...`);
|
|
||||||
await uploadAsset(release.id, p, a.name);
|
|
||||||
console.log(` done.`);
|
|
||||||
}
|
|
||||||
console.log("Release complete!");
|
|
||||||
}
|
|
||||||
main().catch((e) => { console.error(e); process.exit(1); });
|
|
||||||
@ -1,207 +0,0 @@
|
|||||||
import https from "node:https";
|
|
||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
import { fileURLToPath } from "node:url";
|
|
||||||
|
|
||||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
||||||
const TOKEN = "36034f878a07e8705c577a838e5186b3d6010d03";
|
|
||||||
const OWNER = "Sucukdeluxe";
|
|
||||||
const REPO = "real-debrid-downloader";
|
|
||||||
const TAG = "v1.6.23";
|
|
||||||
|
|
||||||
const BODY = `## What's Changed in v1.6.23
|
|
||||||
|
|
||||||
### Bug Fixes (Deep Code Review — Rounds 8, 9 & 10)
|
|
||||||
|
|
||||||
This release fixes 24 bugs found through three comprehensive code review rounds covering the download manager, extractor, debrid clients, storage layer, app controller, and renderer UI.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Round 8: Download Manager Core Logic (8 fixes)
|
|
||||||
|
|
||||||
#### Critical: \\\`resetItems()\\\` does not re-add packages to \\\`runPackageIds\\\`
|
|
||||||
- When \\\`resetItems\\\` was called during a "Start Selected" run, it re-added item IDs to \\\`runItemIds\\\` but never re-added the parent package to \\\`runPackageIds\\\`. The scheduler's \\\`findNextQueuedItem()\\\` checks \\\`runPackageIds\\\` and would permanently skip those items
|
|
||||||
- **Fix:** After resetting items, add all affected package IDs to \\\`runPackageIds\\\`
|
|
||||||
|
|
||||||
#### Critical: \\\`resolveStartConflict("overwrite")\\\` missing \\\`runPackageIds\\\` re-add
|
|
||||||
- Same issue: overwrite conflict resolution reset items and re-added to \\\`runItemIds\\\` but not \\\`runPackageIds\\\`. The overwritten items would never be picked up by the scheduler
|
|
||||||
- **Fix:** Added \\\`runPackageIds.add(packageId)\\\` after the overwrite reset
|
|
||||||
|
|
||||||
#### Critical: \\\`resolveStartConflict\\\`/\\\`resetPackage\\\`/\\\`resetItems\\\` don't call \\\`ensureScheduler()\\\`
|
|
||||||
- After resetting items to "queued", neither method kicked the scheduler. If the scheduler had already detected all downloads complete and was about to call \\\`finishRun()\\\`, the newly queued items would be stranded
|
|
||||||
- **Fix:** Added \\\`ensureScheduler()\\\` calls after reset operations when the session is running
|
|
||||||
|
|
||||||
#### Important: \\\`sessionDownloadedBytes\\\` not subtracted on retry fresh-start
|
|
||||||
- When the server ignored the Range header (HTTP 200 instead of 206), the code subtracted bytes from \\\`session.totalDownloadedBytes\\\` but not from \\\`sessionDownloadedBytes\\\`. The session speed stats drifted upward with each retry
|
|
||||||
- **Fix:** Added \\\`sessionDownloadedBytes\\\` subtraction alongside \\\`totalDownloadedBytes\\\`
|
|
||||||
|
|
||||||
#### Important: Failed packages with \\\`autoExtract\\\` + \\\`package_done\\\` policy never cleaned up
|
|
||||||
- The \\\`allExtracted\\\` check required ALL items (including failed/cancelled ones) to have extraction labels. Failed items never get extracted, so the guard blocked cleanup forever. Packages with any failures accumulated in the UI permanently
|
|
||||||
- **Fix:** Skip failed/cancelled items in the \\\`allExtracted\\\` check — only completed items need extraction
|
|
||||||
|
|
||||||
#### Important: \\\`on_start\\\` cleanup removes completed items ignoring extraction status
|
|
||||||
- At app startup, the \\\`on_start\\\` cleanup policy deleted all completed items without checking whether they had been extracted. If the app was closed mid-download before extraction ran, completed items were silently removed and extraction could never happen
|
|
||||||
- **Fix:** Added \\\`autoExtract\\\` guard: keep completed items that haven't been extracted yet
|
|
||||||
|
|
||||||
#### Important: History \\\`totalBytes\\\` inflated by non-completed items
|
|
||||||
- When deleting a package, the history entry summed \\\`downloadedBytes\\\` from ALL items (including failed/cancelled with partial data) but \\\`fileCount\\\` only counted completed items. This created a mismatch between reported size and file count
|
|
||||||
- **Fix:** Filter to completed items before summing bytes
|
|
||||||
|
|
||||||
#### Minor: Status mismatch for cancelled+success packages between startup and runtime
|
|
||||||
- On app restart, a package with some completed and some cancelled items got status "failed". During runtime, the same scenario correctly got "completed"
|
|
||||||
- **Fix:** Aligned startup logic with runtime: \\\`cancelled > 0 && success > 0\\\` now produces "completed" consistently
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Round 9: Debrid, Extractor, Storage & Main (8 fixes)
|
|
||||||
|
|
||||||
#### Critical: Real-Debrid "HTML statt JSON" error not retryable
|
|
||||||
- When Real-Debrid returned an HTML response (Cloudflare challenge, maintenance page), \\\`realdebrid.ts\\\` threw immediately without retrying. The equivalent check in \\\`debrid.ts\\\` (for AllDebrid/BestDebrid) already included this case
|
|
||||||
- **Fix:** Added \\\`"html statt json"\\\` to \\\`isRetryableErrorText\\\` in \\\`realdebrid.ts\\\`
|
|
||||||
|
|
||||||
#### Critical: Real-Debrid no URL protocol validation for download URLs
|
|
||||||
- The direct download URL from Real-Debrid was used without validating the protocol. BestDebrid and AllDebrid both validate for \\\`http:\\\`/\\\`https:\\\` only. An unexpected protocol (e.g., \\\`ftp:\\\`, \\\`file:\\\`) would cause cryptic fetch errors
|
|
||||||
- **Fix:** Added URL parsing and protocol validation matching the other clients
|
|
||||||
|
|
||||||
#### Important: BestDebrid outer loop swallows abort errors
|
|
||||||
- The outer request loop in \\\`BestDebridClient.unrestrictLink\\\` caught ALL errors including abort errors. If \\\`buildBestDebridRequests\\\` returned multiple requests, a user abort would be caught and the next request attempted
|
|
||||||
- **Fix:** Re-throw abort errors before continuing the loop
|
|
||||||
|
|
||||||
#### Important: Shutdown persists session asynchronously — data loss on fast exit
|
|
||||||
- \\\`prepareForShutdown()\\\` called \\\`persistNow()\\\` which starts an async write. The process could exit before the write completed, losing the final session state. Items could be stuck in "downloading" status on next startup
|
|
||||||
- **Fix:** Replaced async \\\`persistNow()\\\` with synchronous \\\`saveSession()\\\` + \\\`saveSettings()\\\` during shutdown
|
|
||||||
|
|
||||||
#### Important: \\\`importBackup\\\` race condition with in-flight async save
|
|
||||||
- If an async save was in-flight when the user imported a backup, the async save's \\\`finally\\\` clause would process its queued payload AFTER the backup was written, silently overwriting the restored session
|
|
||||||
- **Fix:** Added \\\`cancelPendingAsyncSaves()\\\` that clears both session and settings async queues before writing the backup
|
|
||||||
|
|
||||||
#### Important: Serial extraction path missing \\\`failed\\\` count for skipped archives
|
|
||||||
- When no extractor was available, the serial extraction loop broke early but didn't count remaining archives as failed. The parallel path already had this counting. Progress never reached 100% and the extraction summary understated failures
|
|
||||||
- **Fix:** Added remaining archive counting after the serial loop, matching the parallel path
|
|
||||||
|
|
||||||
#### Minor: \\\`"reset"\\\` not in \\\`abortReason\\\` union type
|
|
||||||
- The TypeScript type for \\\`ActiveTask.abortReason\\\` listed 7 values but omitted \\\`"reset"\\\`, which was assigned in 3 locations and handled in the catch block. The code worked at runtime but lacked type safety
|
|
||||||
- **Fix:** Added \\\`"reset"\\\` to the union type
|
|
||||||
|
|
||||||
#### Minor: \\\`skipItems\\\` doesn't clear \\\`retryAfterByItem\\\`/\\\`retryStateByItem\\\`
|
|
||||||
- When items in retry-delay were skipped, their \\\`retryAfterByItem\\\` entries leaked until \\\`finishRun()\\\`. While not causing functional issues (the status check filters them), it's unnecessary memory retention
|
|
||||||
- **Fix:** Delete both retry entries when skipping items
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Round 10: Renderer UI (8 fixes)
|
|
||||||
|
|
||||||
#### Critical: \\\`Ctrl+A\\\` hijacks native select-all in text inputs
|
|
||||||
- The \\\`Ctrl+A\\\` handler selected all packages/history entries without checking if the focused element was an input or textarea. Users pressing \\\`Ctrl+A\\\` in the search bar or collector textarea lost their text selection
|
|
||||||
- **Fix:** Added input/textarea guard before handling \\\`Ctrl+A\\\`
|
|
||||||
|
|
||||||
#### Important: \\\`Ctrl+Q\\\`/\\\`Ctrl+Shift+R\\\` fire inside text inputs — accidental quit/restart
|
|
||||||
- The app quit and restart shortcuts fired regardless of focus. A user typing in an input field could accidentally trigger app quit/restart
|
|
||||||
- **Fix:** Added input/textarea guard for all keyboard shortcuts (\\\`Ctrl+Q\\\`, \\\`Ctrl+Shift+R\\\`, \\\`Ctrl+L\\\`, \\\`Ctrl+P\\\`, \\\`Ctrl+O\\\`)
|
|
||||||
|
|
||||||
#### Important: \\\`onAddLinks\\\`/\\\`onImportDlc\\\`/\\\`onDrop\\\` read stale \\\`collapseNewPackages\\\` setting
|
|
||||||
- These async functions read \\\`snapshot.settings.collapseNewPackages\\\` via closure, but after multiple \\\`await\\\` calls the value could be stale. If the user toggled the setting during the async operation, the old value was used
|
|
||||||
- **Fix:** Changed to read from \\\`snapshotRef.current.settings.collapseNewPackages\\\`
|
|
||||||
|
|
||||||
#### Important: \\\`showLinksPopup\\\` captures stale \\\`snapshot.session\\\`
|
|
||||||
- The link popup callback closed over \\\`snapshot.session.packages\\\` and \\\`snapshot.session.items\\\`. If a state update arrived while the context menu was open, the callback used stale data, potentially showing empty or incomplete link lists
|
|
||||||
- **Fix:** Changed to read from \\\`snapshotRef.current.session\\\` and removed snapshot dependencies from \\\`useCallback\\\`
|
|
||||||
|
|
||||||
#### Important: \\\`dragDidMoveRef\\\` never reset after mouseup — blocks next click
|
|
||||||
- After a \\\`Ctrl+drag-select\\\` operation, \\\`dragDidMoveRef.current\\\` stayed \\\`true\\\`. The next single click was silently swallowed because \\\`onSelectId\\\` checked \\\`if (dragDidMoveRef.current) return\\\`
|
|
||||||
- **Fix:** Reset \\\`dragDidMoveRef.current = false\\\` in the mouseup handler
|
|
||||||
|
|
||||||
#### Important: Rename \\\`onBlur\\\` fires after Enter key — double rename RPC
|
|
||||||
- Pressing Enter to confirm a rename triggered \\\`onFinishEdit\\\` from the keydown handler. React then removed the input, which fired a blur event that called \\\`onFinishEdit\\\` again. The \\\`renamePackage\\\` RPC was sent twice
|
|
||||||
- **Fix:** Added idempotency guard: \\\`setEditingPackageId\\\` only processes the rename if the package ID still matches
|
|
||||||
|
|
||||||
#### Important: Escape key clears selection when overlay is open
|
|
||||||
- Pressing Escape while a context menu, modal, or link popup was visible both closed the overlay AND cleared the package selection. Users expected Escape to only dismiss the overlay
|
|
||||||
- **Fix:** Check for visible overlays before clearing selection
|
|
||||||
|
|
||||||
#### Minor: \\\`packageOrder\\\` normalization O(n\\\\u00B2) via \\\`Array.includes\\\`
|
|
||||||
- The session normalization loop used \\\`packageOrder.includes(id)\\\` (O(n)) when \\\`seenOrder.has(id)\\\` (O(1)) was already available. With hundreds of packages, this caused measurable startup slowdown
|
|
||||||
- **Fix:** Use \\\`seenOrder.has()\\\` instead of \\\`packageOrder.includes()\\\`
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Files Changed
|
|
||||||
- \\\`src/main/download-manager.ts\\\` — resetItems/resolveStartConflict runPackageIds fix; ensureScheduler calls; sessionDownloadedBytes retry fix; cleanup policy extraction guards; history bytes filter; startup status alignment; shutdown sync persist; skipItems cleanup
|
|
||||||
- \\\`src/main/extractor.ts\\\` — Serial path noExtractor remaining count
|
|
||||||
- \\\`src/main/realdebrid.ts\\\` — HTML retry; URL protocol validation
|
|
||||||
- \\\`src/main/debrid.ts\\\` — BestDebrid abort propagation in outer loop
|
|
||||||
- \\\`src/main/storage.ts\\\` — cancelPendingAsyncSaves; packageOrder O(1) lookup
|
|
||||||
- \\\`src/main/app-controller.ts\\\` — importBackup cancels async saves
|
|
||||||
- \\\`src/renderer/App.tsx\\\` — Keyboard shortcut input guards; stale closure fixes (collapseNewPackages, showLinksPopup); dragDidMoveRef reset; rename double-call guard; Escape overlay check
|
|
||||||
`;
|
|
||||||
|
|
||||||
function apiRequest(method, apiPath, body) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1${apiPath}`,
|
|
||||||
method,
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/json", Accept: "application/json" },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
if (body) req.write(JSON.stringify(body));
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function uploadAsset(releaseId, filePath, fileName) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const data = fs.readFileSync(filePath);
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1/repos/${OWNER}/${REPO}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`,
|
|
||||||
method: "POST",
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/octet-stream", "Content-Length": data.length },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`Upload ${fileName}: ${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
req.write(data);
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
console.log("Creating release...");
|
|
||||||
const release = await apiRequest("POST", `/repos/${OWNER}/${REPO}/releases`, {
|
|
||||||
tag_name: TAG, name: TAG, body: BODY, draft: false, prerelease: false,
|
|
||||||
});
|
|
||||||
console.log(`Release created: ${release.id}`);
|
|
||||||
const releaseDir = path.join(__dirname, "..", "release");
|
|
||||||
const assets = [
|
|
||||||
{ file: "Real-Debrid-Downloader-Setup-1.6.23.exe", name: "Real-Debrid-Downloader-Setup-1.6.23.exe" },
|
|
||||||
{ file: "Real-Debrid-Downloader 1.6.23.exe", name: "Real-Debrid-Downloader-1.6.23.exe" },
|
|
||||||
{ file: "latest.yml", name: "latest.yml" },
|
|
||||||
{ file: "Real-Debrid-Downloader Setup 1.6.23.exe.blockmap", name: "Real-Debrid-Downloader-Setup-1.6.23.exe.blockmap" },
|
|
||||||
];
|
|
||||||
for (const a of assets) {
|
|
||||||
const p = path.join(releaseDir, a.file);
|
|
||||||
if (!fs.existsSync(p)) { console.warn(`SKIP ${a.file}`); continue; }
|
|
||||||
console.log(`Uploading ${a.name} ...`);
|
|
||||||
await uploadAsset(release.id, p, a.name);
|
|
||||||
console.log(` done.`);
|
|
||||||
}
|
|
||||||
console.log("Release complete!");
|
|
||||||
}
|
|
||||||
main().catch((e) => { console.error(e); process.exit(1); });
|
|
||||||
@ -1,99 +0,0 @@
|
|||||||
import https from "node:https";
|
|
||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
import { fileURLToPath } from "node:url";
|
|
||||||
|
|
||||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
||||||
const TOKEN = "36034f878a07e8705c577a838e5186b3d6010d03";
|
|
||||||
const OWNER = "Sucukdeluxe";
|
|
||||||
const REPO = "real-debrid-downloader";
|
|
||||||
const TAG = "v1.6.24";
|
|
||||||
|
|
||||||
const BODY = `## What's Changed in v1.6.24
|
|
||||||
|
|
||||||
### Bug Fixes (Cross-Review Validation)
|
|
||||||
|
|
||||||
This release fixes 2 bugs found by cross-validating the codebase against an independent external review.
|
|
||||||
|
|
||||||
#### Important: Startup auto-retry recovery does not emit state update to UI
|
|
||||||
- \\\`recoverRetryableItems()\\\` mutated item statuses (failed -> queued) and refreshed package statuses on app startup, but never called \\\`emitState()\\\` or \\\`persistSoon()\\\`. The UI showed stale item statuses until the next periodic state emission or user interaction
|
|
||||||
- **Fix:** Added \\\`persistSoon()\\\` and \\\`emitState()\\\` after recovery completes
|
|
||||||
|
|
||||||
#### Minor: Rapidgator offline check does not refresh parent package status
|
|
||||||
- When \\\`applyRapidgatorCheckResult()\\\` set an item to \\\`status="failed"\\\` (offline), the parent package's status was not recalculated. The package could show as "queued" while containing failed items
|
|
||||||
- **Fix:** Call \\\`refreshPackageStatus(pkg)\\\` after marking an item as offline
|
|
||||||
|
|
||||||
### Files Changed
|
|
||||||
- \\\`src/main/download-manager.ts\\\` — recoverRetryableItems emitState/persistSoon; Rapidgator offline package status refresh
|
|
||||||
`;
|
|
||||||
|
|
||||||
function apiRequest(method, apiPath, body) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1${apiPath}`,
|
|
||||||
method,
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/json", Accept: "application/json" },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
if (body) req.write(JSON.stringify(body));
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function uploadAsset(releaseId, filePath, fileName) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const data = fs.readFileSync(filePath);
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1/repos/${OWNER}/${REPO}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`,
|
|
||||||
method: "POST",
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/octet-stream", "Content-Length": data.length },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`Upload ${fileName}: ${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
req.write(data);
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
console.log("Creating release...");
|
|
||||||
const release = await apiRequest("POST", `/repos/${OWNER}/${REPO}/releases`, {
|
|
||||||
tag_name: TAG, name: TAG, body: BODY, draft: false, prerelease: false,
|
|
||||||
});
|
|
||||||
console.log(`Release created: ${release.id}`);
|
|
||||||
const releaseDir = path.join(__dirname, "..", "release");
|
|
||||||
const assets = [
|
|
||||||
{ file: "Real-Debrid-Downloader-Setup-1.6.24.exe", name: "Real-Debrid-Downloader-Setup-1.6.24.exe" },
|
|
||||||
{ file: "Real-Debrid-Downloader 1.6.24.exe", name: "Real-Debrid-Downloader-1.6.24.exe" },
|
|
||||||
{ file: "latest.yml", name: "latest.yml" },
|
|
||||||
{ file: "Real-Debrid-Downloader Setup 1.6.24.exe.blockmap", name: "Real-Debrid-Downloader-Setup-1.6.24.exe.blockmap" },
|
|
||||||
];
|
|
||||||
for (const a of assets) {
|
|
||||||
const p = path.join(releaseDir, a.file);
|
|
||||||
if (!fs.existsSync(p)) { console.warn(`SKIP ${a.file}`); continue; }
|
|
||||||
console.log(`Uploading ${a.name} ...`);
|
|
||||||
await uploadAsset(release.id, p, a.name);
|
|
||||||
console.log(` done.`);
|
|
||||||
}
|
|
||||||
console.log("Release complete!");
|
|
||||||
}
|
|
||||||
main().catch((e) => { console.error(e); process.exit(1); });
|
|
||||||
@ -1,159 +0,0 @@
|
|||||||
import https from "node:https";
|
|
||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
import { fileURLToPath } from "node:url";
|
|
||||||
|
|
||||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
||||||
const TOKEN = "36034f878a07e8705c577a838e5186b3d6010d03";
|
|
||||||
const OWNER = "Sucukdeluxe";
|
|
||||||
const REPO = "real-debrid-downloader";
|
|
||||||
const TAG = "v1.6.25";
|
|
||||||
|
|
||||||
const BODY = `## What's Changed in v1.6.25
|
|
||||||
|
|
||||||
### Bug Fixes (Deep Code Review — Round 11)
|
|
||||||
|
|
||||||
This release fixes 12 bugs found through an intensive 10-agent parallel code review covering every line of the codebase.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Critical (2 fixes)
|
|
||||||
|
|
||||||
#### Critical: In-flight async session save can overwrite restored backup or shutdown state
|
|
||||||
- \\\`importBackup()\\\` called \\\`cancelPendingAsyncSaves()\\\` which only nullified queued saves, but an already-executing async save could complete its \\\`rename()\\\` after the synchronous backup restore, silently overwriting the restored session. Same race existed during shutdown.
|
|
||||||
- **Fix:** Added a \\\`syncSaveGeneration\\\` counter to \\\`storage.ts\\\`. Synchronous saves and \\\`cancelPendingAsyncSaves()\\\` increment the counter. Async writes check the generation before \\\`rename()\\\` and discard stale writes.
|
|
||||||
|
|
||||||
#### Critical: Menu bar quick-settings silently discard unsaved settings panel changes
|
|
||||||
- When using the speed limit or max-parallel spinners in the menu bar, the \\\`.finally()\\\` callback falsely reset \\\`settingsDirtyRef\\\` to \\\`false\\\`. If the user had unsaved changes in the Settings panel (e.g. a new API token), the next backend state update would overwrite the draft, silently losing those changes.
|
|
||||||
- **Fix:** Added a separate \\\`panelDirtyRevisionRef\\\` counter. Panel changes (setBool, setText, setNum, schedules, theme) increment it. Quick-settings only clear \\\`settingsDirtyRef\\\` when \\\`panelDirtyRevisionRef === 0\\\`. Reset to 0 on save and init.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Important (10 fixes)
|
|
||||||
|
|
||||||
#### \\\`skipItems()\\\` does not refresh parent package status
|
|
||||||
- After setting items to cancelled/skipped, the parent package's status was never recalculated. Packages showed as "queued" despite all items being skipped. The scheduler kept checking these packages unnecessarily.
|
|
||||||
- **Fix:** Collect affected package IDs and call \\\`refreshPackageStatus()\\\` for each.
|
|
||||||
|
|
||||||
#### \\\`getSessionStats()\\\` shows non-zero speed after session stops
|
|
||||||
- \\\`currentSpeedBps\\\` only checked \\\`paused\\\` but not \\\`!session.running\\\`, unlike \\\`emitState()\\\`. After stopping a run, the stats API briefly returned stale speed values.
|
|
||||||
- **Fix:** Added \\\`!this.session.running\\\` check, matching \\\`emitState()\\\` logic.
|
|
||||||
|
|
||||||
#### Hybrid extraction catch leaves items with frozen progress labels
|
|
||||||
- The catch block only marked items with "Ausstehend" or "Warten auf Parts" as Error. Items showing active progress (e.g. "Entpacken 45%...") were left with a frozen label permanently.
|
|
||||||
- **Fix:** Extended the check to match any \\\`fullStatus\\\` starting with "Entpacken" (excluding already-extracted items).
|
|
||||||
|
|
||||||
#### \\\`normalizeHistoryEntry()\\\` drops \\\`urls\\\` field on load
|
|
||||||
- The history normalization function never read or included the \\\`urls\\\` property. After a save-load cycle, all URL data was permanently lost.
|
|
||||||
- **Fix:** Parse and include \\\`urls\\\` array from the raw entry.
|
|
||||||
|
|
||||||
#### "Immediate" retroactive cleanup creates no history entry
|
|
||||||
- When the cleanup policy removed all items from a package, \\\`removePackageFromSession()\\\` was called with an empty array, so no history entry was recorded. Packages silently vanished from the download log.
|
|
||||||
- **Fix:** Pass \\\`completedItemIds\\\` to \\\`removePackageFromSession()\\\` for history recording. Delete items from session only after the history call. Also fixed missing \\\`retryStateByItem\\\` cleanup.
|
|
||||||
|
|
||||||
#### Skipped generic split files counted as extracted but not tracked for resume
|
|
||||||
- When a generic \\\`.001\\\` file had no archive signature and was skipped, it was counted in \\\`extracted\\\` but not added to \\\`resumeCompleted\\\` or \\\`extractedArchives\\\`. On resume, it would be re-processed; cleanup wouldn't find it.
|
|
||||||
- **Fix:** Add skipped files to both \\\`resumeCompleted\\\` and \\\`extractedArchives\\\`.
|
|
||||||
|
|
||||||
#### \\\`noextractor:skipped\\\` treated as abort in parallel extraction mode
|
|
||||||
- In the parallel worker pool, \\\`noextractor:skipped\\\` was caught by \\\`isExtractAbortError()\\\` and set as \\\`abortError\\\`. The error was then re-thrown as "aborted:extract", preventing the correct no-extractor counting logic from running.
|
|
||||||
- **Fix:** Check for "noextractor:skipped" before the abort check and break without setting \\\`abortError\\\`.
|
|
||||||
|
|
||||||
#### \\\`collectArchiveCleanupTargets\\\` missing tar.gz/bz2/xz
|
|
||||||
- Tar compound archives (.tar.gz, .tar.bz2, .tar.xz, .tgz, .tbz2, .txz) were not recognized by the cleanup function. After successful extraction, the source archive was never deleted.
|
|
||||||
- **Fix:** Added a tar compound archive pattern before the generic split check.
|
|
||||||
|
|
||||||
#### \\\`runWithConcurrency\\\` continues dispatching after first error
|
|
||||||
- When one worker threw an error (e.g. abort), \\\`firstError\\\` was set but \\\`next()\\\` kept returning items. Other workers started new requests unnecessarily, delaying the abort.
|
|
||||||
- **Fix:** Check \\\`firstError\\\` in \\\`next()\\\` and return \\\`undefined\\\` to stop dispatching.
|
|
||||||
|
|
||||||
#### Side effect inside React state updater in \\\`onPackageFinishEdit\\\`
|
|
||||||
- The \\\`setEditingPackageId\\\` updater function contained an IPC call (\\\`renamePackage\\\`) as a side effect. React may call updater functions multiple times (e.g. StrictMode), causing duplicate rename RPCs.
|
|
||||||
- **Fix:** Moved the IPC call outside the updater. The updater now only returns the new state; the rename fires after based on a flag.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Minor
|
|
||||||
|
|
||||||
- Fixed typo "Session-Ubersicht" -> "Session-\\u00dcbersicht" in statistics tab
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Files Changed
|
|
||||||
- \\\`src/main/storage.ts\\\` — syncSaveGeneration counter for async save race protection; normalizeHistoryEntry urls field
|
|
||||||
- \\\`src/main/download-manager.ts\\\` — skipItems refreshPackageStatus; currentSpeedBps running check; hybrid extraction catch broadened; immediate cleanup history fix + retryStateByItem cleanup
|
|
||||||
- \\\`src/main/extractor.ts\\\` — skipped generic splits resume/cleanup tracking; noextractor parallel mode fix; tar.gz/bz2/xz cleanup targets
|
|
||||||
- \\\`src/main/debrid.ts\\\` — runWithConcurrency stops on first error
|
|
||||||
- \\\`src/renderer/App.tsx\\\` — panelDirtyRevisionRef for settings dirty tracking; onPackageFinishEdit side effect fix; Session-\\u00dcbersicht typo
|
|
||||||
`;
|
|
||||||
|
|
||||||
function apiRequest(method, apiPath, body) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1${apiPath}`,
|
|
||||||
method,
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/json", Accept: "application/json" },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
if (body) req.write(JSON.stringify(body));
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function uploadAsset(releaseId, filePath, fileName) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const data = fs.readFileSync(filePath);
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1/repos/${OWNER}/${REPO}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`,
|
|
||||||
method: "POST",
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/octet-stream", "Content-Length": data.length },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`Upload ${fileName}: ${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
req.write(data);
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
console.log("Creating release...");
|
|
||||||
const release = await apiRequest("POST", `/repos/${OWNER}/${REPO}/releases`, {
|
|
||||||
tag_name: TAG, name: TAG, body: BODY, draft: false, prerelease: false,
|
|
||||||
});
|
|
||||||
console.log(`Release created: ${release.id}`);
|
|
||||||
const releaseDir = path.join(__dirname, "..", "release");
|
|
||||||
const assets = [
|
|
||||||
{ file: "Real-Debrid-Downloader-Setup-1.6.25.exe", name: "Real-Debrid-Downloader-Setup-1.6.25.exe" },
|
|
||||||
{ file: "Real-Debrid-Downloader 1.6.25.exe", name: "Real-Debrid-Downloader-1.6.25.exe" },
|
|
||||||
{ file: "latest.yml", name: "latest.yml" },
|
|
||||||
{ file: "Real-Debrid-Downloader Setup 1.6.25.exe.blockmap", name: "Real-Debrid-Downloader-Setup-1.6.25.exe.blockmap" },
|
|
||||||
];
|
|
||||||
for (const a of assets) {
|
|
||||||
const p = path.join(releaseDir, a.file);
|
|
||||||
if (!fs.existsSync(p)) { console.warn(`SKIP ${a.file}`); continue; }
|
|
||||||
console.log(`Uploading ${a.name} ...`);
|
|
||||||
await uploadAsset(release.id, p, a.name);
|
|
||||||
console.log(` done.`);
|
|
||||||
}
|
|
||||||
console.log("Release complete!");
|
|
||||||
}
|
|
||||||
main().catch((e) => { console.error(e); process.exit(1); });
|
|
||||||
@ -1,158 +0,0 @@
|
|||||||
import https from "node:https";
|
|
||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
import { fileURLToPath } from "node:url";
|
|
||||||
|
|
||||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
||||||
const TOKEN = "36034f878a07e8705c577a838e5186b3d6010d03";
|
|
||||||
const OWNER = "Sucukdeluxe";
|
|
||||||
const REPO = "real-debrid-downloader";
|
|
||||||
const TAG = "v1.6.26";
|
|
||||||
|
|
||||||
const BODY = `## What's Changed in v1.6.26
|
|
||||||
|
|
||||||
### Bug Fixes (Deep Code Review — Round 2)
|
|
||||||
|
|
||||||
This release fixes 13 bugs found through an intensive 10-agent parallel code review covering every line of the codebase.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Important (4 fixes)
|
|
||||||
|
|
||||||
#### \`applyRapidgatorCheckResult\` sets items to "failed" without recording run outcome
|
|
||||||
- When an asynchronous Rapidgator online-check returned "offline" during a running session, the item was set to \`status: "failed"\` but \`recordRunOutcome()\` was never called. The scheduler kept the item in \`runItemIds\` without an outcome, causing incorrect summary statistics and potentially preventing the session from finishing.
|
|
||||||
- **Fix:** Call \`recordRunOutcome(itemId, "failed")\` when the item is in \`runItemIds\`.
|
|
||||||
|
|
||||||
#### \`skipItems()\` does not trigger extraction when package becomes fully resolved
|
|
||||||
- After skipping the last queued/pending items in a package, \`refreshPackageStatus()\` correctly set the package to "completed", but no extraction was triggered. Items that were already downloaded sat with "Entpacken - Ausstehend" forever.
|
|
||||||
- **Fix:** After refreshing package statuses, check if all items are in a terminal state and trigger \`runPackagePostProcessing()\` for packages with unextracted completed items.
|
|
||||||
|
|
||||||
#### \`applyOnStartCleanupPolicy\` creates no history entries for cleaned-up packages
|
|
||||||
- The on_start cleanup deleted completed items from \`session.items\` inside the filter callback, then called \`removePackageFromSession(pkgId, [])\` with an empty array. Since \`removePackageFromSession\` uses the item IDs to build the history entry, no history was recorded. Packages silently vanished from the download log.
|
|
||||||
- **Fix:** Collect completed item IDs separately. Pass them to \`removePackageFromSession()\` for history recording. Delete items from \`session.items\` only in the non-empty-package branch.
|
|
||||||
|
|
||||||
#### \`cancelPackage\` overwrites completed items' run outcomes to "cancelled"
|
|
||||||
- When cancelling a package, \`recordRunOutcome(itemId, "cancelled")\` was called for ALL items including already-completed ones. This overwrote the "completed" outcome, causing the run summary to show incorrect numbers (e.g., "0 erfolgreich, 10 abgebrochen" instead of "8 erfolgreich, 2 abgebrochen").
|
|
||||||
- **Fix:** Only record "cancelled" outcome for items whose status is not "completed".
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Medium (8 fixes)
|
|
||||||
|
|
||||||
#### \`looksLikeArchivePart\` missing generic \`.NNN\` split file pattern
|
|
||||||
- The function recognized multipart RAR (\`.partNN.rar\`), old-style RAR (\`.rNN\`), split ZIP (\`.zip.NNN\`), and split 7z (\`.7z.NNN\`), but NOT generic \`.NNN\` split files (e.g., \`movie.001\`, \`movie.002\`). In hybrid extraction mode, this caused the system to incorrectly conclude that all parts of a generic split archive were ready, potentially triggering extraction before all parts were downloaded.
|
|
||||||
- **Fix:** Added a generic \`.NNN\` pattern that matches when the entry point ends with \`.001\` (excluding .zip/.7z variants).
|
|
||||||
|
|
||||||
#### \`resolveArchiveItemsFromList\` missing split ZIP/7z/generic patterns
|
|
||||||
- Only multipart RAR and old-style RAR patterns were recognized. Split ZIP (\`.zip.001\`), split 7z (\`.7z.001\`), and generic split (\`.001\`) archives fell through to exact-name matching, so only the entry-point file received per-archive progress labels while other parts showed stale "Ausstehend" during extraction.
|
|
||||||
- **Fix:** Added matching patterns for split ZIP, split 7z, and generic \`.NNN\` splits before the fallback exact-name match.
|
|
||||||
|
|
||||||
#### \`normalizeSessionStatuses\` does not update \`updatedAt\` for modified items
|
|
||||||
- When item statuses were normalized on app startup (e.g., \`cancelled/Gestoppt\` → \`queued\`, \`extracting\` → \`completed\`, \`downloading\` → \`queued\`), \`item.updatedAt\` was not updated. This left stale timestamps from the previous session, causing the unpause stall detector to prematurely abort freshly recovered items.
|
|
||||||
- **Fix:** Added \`item.updatedAt = nowMs()\` after each status change in \`normalizeSessionStatuses\`.
|
|
||||||
|
|
||||||
#### \`applyRetroactiveCleanupPolicy\` package_done check ignores failed/cancelled items
|
|
||||||
- The \`package_done\` retroactive cleanup only considered items with \`status === "completed"\` as "done". Packages with mixed outcomes (some completed, some failed/cancelled) were never cleaned up, even though the inline \`applyCompletedCleanupPolicy\` correctly treats failed/cancelled items as terminal.
|
|
||||||
- **Fix:** Extended the \`allCompleted\` check to include \`"failed"\` and \`"cancelled"\` statuses, matching the inline policy logic.
|
|
||||||
|
|
||||||
#### \`.tgz\`/\`.tbz2\`/\`.txz\` missing from \`findArchiveCandidates\`
|
|
||||||
- Tar compound archives with short-form extensions (.tgz, .tbz2, .txz) were not recognized as archive candidates by \`findArchiveCandidates()\`. They were silently skipped during extraction, even though \`collectArchiveCleanupTargets\` correctly recognized them.
|
|
||||||
- **Fix:** Extended the tar compressed filter regex to include short-form extensions. Also updated \`archiveSortKey\`, \`archiveTypeRank\`, and \`archiveFilenamePasswords\` for consistency.
|
|
||||||
|
|
||||||
#### \`subst\` drive mapping uses \`"Z:"\` instead of \`"Z:\\\\"\`
|
|
||||||
- When creating a subst drive for long-path workaround, \`effectiveTargetDir\` was set to \`"Z:"\` (without trailing backslash). On Windows, \`Z:\` without a backslash references the current directory on drive Z rather than the root. For 7z extractions, \`-oZ:\` could extract files to an unexpected location.
|
|
||||||
- **Fix:** Changed to \`"Z:\\\\"\` to explicitly reference the root of the subst drive.
|
|
||||||
|
|
||||||
#### Pre-allocated sparse file after crash marked as complete
|
|
||||||
- On Windows, downloads use sparse file pre-allocation (\`truncate(totalBytes)\`). If the process crashed hard (kill, power loss), the truncation cleanup never ran. On next startup, \`stat.size === totalBytes\` (pre-allocated zeros), and the HTTP 416 handler falsely treated the file as complete.
|
|
||||||
- **Fix:** Before resuming, compare \`stat.size\` with persisted \`item.downloadedBytes\`. If the file is >1MB larger than the persisted count, truncate to the persisted value.
|
|
||||||
|
|
||||||
#### Integrity-check retry does not call \`dropItemContribution\`
|
|
||||||
- When a file failed integrity validation and was deleted for re-download, \`item.downloadedBytes\` was reset to 0 but \`dropItemContribution()\` was not called. Session statistics (\`totalDownloadedBytes\`) remained inflated until the next download started.
|
|
||||||
- **Fix:** Call \`this.dropItemContribution(item.id)\` before resetting \`downloadedBytes\`.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Low (1 fix)
|
|
||||||
|
|
||||||
#### \`applyCompletedCleanupPolicy\` (immediate path) leaks \`retryStateByItem\` entries
|
|
||||||
- The immediate cleanup path cleaned up \`retryAfterByItem\` but not \`retryStateByItem\`, causing a minor memory leak over long sessions.
|
|
||||||
- **Fix:** Added \`this.retryStateByItem.delete(itemId)\` alongside the existing \`retryAfterByItem\` cleanup.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Files Changed
|
|
||||||
- \`src/main/download-manager.ts\` — resolveArchiveItemsFromList split patterns; looksLikeArchivePart generic .NNN; Rapidgator recordRunOutcome; skipItems triggers extraction; normalizeSessionStatuses updatedAt; applyRetroactiveCleanupPolicy failed/cancelled; applyOnStartCleanupPolicy history; cancelPackage outcome fix; pre-allocation crash guard; integrity-check dropItemContribution; immediate cleanup retryStateByItem
|
|
||||||
- \`src/main/extractor.ts\` — findArchiveCandidates .tgz/.tbz2/.txz; archiveSortKey/archiveTypeRank/archiveFilenamePasswords .tgz support; subst drive trailing backslash
|
|
||||||
`;
|
|
||||||
|
|
||||||
function apiRequest(method, apiPath, body) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1${apiPath}`,
|
|
||||||
method,
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/json", Accept: "application/json" },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
if (body) req.write(JSON.stringify(body));
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function uploadAsset(releaseId, filePath, fileName) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const data = fs.readFileSync(filePath);
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1/repos/${OWNER}/${REPO}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`,
|
|
||||||
method: "POST",
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/octet-stream", "Content-Length": data.length },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`Upload ${fileName}: ${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
req.write(data);
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
console.log("Creating release...");
|
|
||||||
const release = await apiRequest("POST", `/repos/${OWNER}/${REPO}/releases`, {
|
|
||||||
tag_name: TAG, name: TAG, body: BODY, draft: false, prerelease: false,
|
|
||||||
});
|
|
||||||
console.log(`Release created: ${release.id}`);
|
|
||||||
const releaseDir = path.join(__dirname, "..", "release");
|
|
||||||
const assets = [
|
|
||||||
{ file: "Real-Debrid-Downloader-Setup-1.6.26.exe", name: "Real-Debrid-Downloader-Setup-1.6.26.exe" },
|
|
||||||
{ file: "Real-Debrid-Downloader 1.6.26.exe", name: "Real-Debrid-Downloader-1.6.26.exe" },
|
|
||||||
{ file: "latest.yml", name: "latest.yml" },
|
|
||||||
{ file: "Real-Debrid-Downloader Setup 1.6.26.exe.blockmap", name: "Real-Debrid-Downloader-Setup-1.6.26.exe.blockmap" },
|
|
||||||
];
|
|
||||||
for (const a of assets) {
|
|
||||||
const p = path.join(releaseDir, a.file);
|
|
||||||
if (!fs.existsSync(p)) { console.warn(`SKIP ${a.file}`); continue; }
|
|
||||||
console.log(`Uploading ${a.name} ...`);
|
|
||||||
await uploadAsset(release.id, p, a.name);
|
|
||||||
console.log(` done.`);
|
|
||||||
}
|
|
||||||
console.log("Release complete!");
|
|
||||||
}
|
|
||||||
main().catch((e) => { console.error(e); process.exit(1); });
|
|
||||||
@ -1,147 +0,0 @@
|
|||||||
import https from "node:https";
|
|
||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
import { fileURLToPath } from "node:url";
|
|
||||||
|
|
||||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
||||||
const TOKEN = "36034f878a07e8705c577a838e5186b3d6010d03";
|
|
||||||
const OWNER = "Sucukdeluxe";
|
|
||||||
const REPO = "real-debrid-downloader";
|
|
||||||
const TAG = "v1.6.27";
|
|
||||||
|
|
||||||
const BODY = `## What's Changed in v1.6.27
|
|
||||||
|
|
||||||
### Bug Fixes (Deep Code Review — Round 3)
|
|
||||||
|
|
||||||
This release fixes 10 bugs found through an intensive 10-agent parallel code review, including a **critical regression** introduced in v1.6.26.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Critical (1 fix)
|
|
||||||
|
|
||||||
#### \`applyRapidgatorCheckResult\` crashes with \`ReferenceError: itemId is not defined\`
|
|
||||||
- The v1.6.26 fix for recording run outcomes used \`itemId\` instead of \`item.id\` — the method parameter is \`item\`, not \`itemId\`. This would crash at runtime whenever a Rapidgator link was detected as offline during an active run, potentially halting the entire download session.
|
|
||||||
- **Fix:** Changed \`itemId\` to \`item.id\` in both the \`runItemIds.has()\` check and the \`recordRunOutcome()\` call.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Important (3 fixes)
|
|
||||||
|
|
||||||
#### Extraction timeout/exception overwrites already-extracted items' status
|
|
||||||
- When the extraction process timed out or threw an exception, ALL completed items in the package had their \`fullStatus\` overwritten to \`Entpack-Fehler: ...\`, even items whose archives had already been successfully extracted. This caused items showing "Entpackt - Done (1m 23s)" to suddenly show an error.
|
|
||||||
- **Fix:** Added an \`isExtractedLabel()\` guard — only items whose \`fullStatus\` does NOT already indicate successful extraction get the error label.
|
|
||||||
|
|
||||||
#### Hybrid extraction false error when extracted=0 and failed=0
|
|
||||||
- In hybrid extraction mode, when \`result.extracted === 0 && result.failed === 0\` (e.g., all archives were already extracted via resume state), the condition fell through and set \`fullStatus = "Entpacken - Error"\` even though nothing actually failed.
|
|
||||||
- **Fix:** Restructured the condition to only set error status when \`result.failed > 0\`, set done status when \`result.extracted > 0\`, and leave current status unchanged (no-op) when both are 0.
|
|
||||||
|
|
||||||
#### \`applyRetroactiveCleanupPolicy\` \`allExtracted\` check doesn't skip failed/cancelled items
|
|
||||||
- When checking if all items in a package were extracted (to decide whether to clean up), failed and cancelled items were not skipped. A package with 9 extracted items and 1 failed item would never be cleaned up, even though the failed item can never be extracted.
|
|
||||||
- **Fix:** Skip items with \`status === "failed"\` or \`status === "cancelled"\` in the \`allExtracted\` check.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Medium (6 fixes)
|
|
||||||
|
|
||||||
#### \`resetPackage\` missing cleanup for \`runCompletedPackages\`, \`packagePostProcessTasks\`, \`hybridExtractRequeue\`
|
|
||||||
- When resetting a package (re-downloading all items), the package ID was not removed from \`runCompletedPackages\`, \`packagePostProcessTasks\`, and \`hybridExtractRequeue\` maps. This could cause the reset package's extraction to be skipped (if already in \`runCompletedPackages\`) or the scheduler to wait forever for a stale post-processing task.
|
|
||||||
- **Fix:** Delete the package ID from all three maps after aborting the post-processing controller.
|
|
||||||
|
|
||||||
#### \`freshRetry\` does not call \`dropItemContribution\`
|
|
||||||
- When an item failed and was retried via the "fresh retry" path (delete file, re-queue), \`dropItemContribution()\` was not called before re-queuing. Session download statistics (\`totalDownloadedBytes\`) remained inflated by the failed item's bytes.
|
|
||||||
- **Fix:** Call \`this.dropItemContribution(item.id)\` before queuing the retry.
|
|
||||||
|
|
||||||
#### JVM extractor layout cache not caching \`null\` result
|
|
||||||
- When Java was not installed, \`discoverJvmLayout()\` returned \`null\` but didn't cache it. Every extraction attempt re-ran the Java discovery process (spawning processes, checking paths), adding unnecessary latency.
|
|
||||||
- **Fix:** Cache \`null\` results with a timestamp (\`cachedJvmLayoutNullSince\`). Re-check after 60 seconds in case the user installs Java mid-session.
|
|
||||||
|
|
||||||
#### Parallel resume-state writes race condition
|
|
||||||
- When multiple archives extracted in parallel, each called \`writeExtractResumeState()\` which wrote to the same temp file path. Two concurrent writes could collide: one renames the temp file while the other is still writing to it, causing the second write to silently fail or produce a corrupt resume file.
|
|
||||||
- **Fix:** Use unique temp file paths with timestamp + random suffix per write operation. On rename failure, clean up the orphaned temp file.
|
|
||||||
|
|
||||||
#### Stale closure in Ctrl+O keyboard handler
|
|
||||||
- The \`useEffect\` with \`[]\` deps captured the initial version of \`onImportDlc\`. When the user changed settings (like download directory) and then pressed Ctrl+O, the keyboard handler called the stale closure which sent outdated settings to the backend, potentially importing DLC files to the wrong directory.
|
|
||||||
- **Fix:** Added a \`useRef\` (\`onImportDlcRef\`) that always points to the latest \`onImportDlc\` function. The keyboard handler now calls \`onImportDlcRef.current()\`.
|
|
||||||
|
|
||||||
#### \`applyCompletedCleanupPolicy\` immediate path leaks \`retryStateByItem\` entries
|
|
||||||
- (Carried from v1.6.26) The immediate cleanup path cleaned up \`retryAfterByItem\` but not \`retryStateByItem\`, causing a minor memory leak over long sessions.
|
|
||||||
- **Fix:** Added \`this.retryStateByItem.delete(itemId)\` alongside the existing \`retryAfterByItem\` cleanup.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Files Changed
|
|
||||||
- \`src/main/download-manager.ts\` — applyRapidgatorCheckResult itemId→item.id; extraction timeout isExtractedLabel guard; hybrid false error restructured; applyRetroactiveCleanupPolicy allExtracted skip failed/cancelled; resetPackage cleanup maps; freshRetry dropItemContribution; retryStateByItem cleanup
|
|
||||||
- \`src/main/extractor.ts\` — JVM layout null cache; parallel resume-state unique tmp paths
|
|
||||||
- \`src/renderer/App.tsx\` — Ctrl+O stale closure fix via useRef
|
|
||||||
`;
|
|
||||||
|
|
||||||
function apiRequest(method, apiPath, body) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1${apiPath}`,
|
|
||||||
method,
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/json", Accept: "application/json" },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
if (body) req.write(JSON.stringify(body));
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function uploadAsset(releaseId, filePath, fileName) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const data = fs.readFileSync(filePath);
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1/repos/${OWNER}/${REPO}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`,
|
|
||||||
method: "POST",
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/octet-stream", "Content-Length": data.length },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`Upload ${fileName}: ${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
req.write(data);
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
console.log("Creating release...");
|
|
||||||
const release = await apiRequest("POST", `/repos/${OWNER}/${REPO}/releases`, {
|
|
||||||
tag_name: TAG, name: TAG, body: BODY, draft: false, prerelease: false,
|
|
||||||
});
|
|
||||||
console.log(`Release created: ${release.id}`);
|
|
||||||
const releaseDir = path.join(__dirname, "..", "release");
|
|
||||||
const assets = [
|
|
||||||
{ file: "Real-Debrid-Downloader-Setup-1.6.27.exe", name: "Real-Debrid-Downloader-Setup-1.6.27.exe" },
|
|
||||||
{ file: "Real-Debrid-Downloader 1.6.27.exe", name: "Real-Debrid-Downloader-1.6.27.exe" },
|
|
||||||
{ file: "latest.yml", name: "latest.yml" },
|
|
||||||
{ file: "Real-Debrid-Downloader Setup 1.6.27.exe.blockmap", name: "Real-Debrid-Downloader-Setup-1.6.27.exe.blockmap" },
|
|
||||||
];
|
|
||||||
for (const a of assets) {
|
|
||||||
const p = path.join(releaseDir, a.file);
|
|
||||||
if (!fs.existsSync(p)) { console.warn(`SKIP ${a.file}`); continue; }
|
|
||||||
console.log(`Uploading ${a.name} ...`);
|
|
||||||
await uploadAsset(release.id, p, a.name);
|
|
||||||
console.log(` done.`);
|
|
||||||
}
|
|
||||||
console.log("Release complete!");
|
|
||||||
}
|
|
||||||
main().catch((e) => { console.error(e); process.exit(1); });
|
|
||||||
@ -1,149 +0,0 @@
|
|||||||
import https from "node:https";
|
|
||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
import { fileURLToPath } from "node:url";
|
|
||||||
|
|
||||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
||||||
const TOKEN = "36034f878a07e8705c577a838e5186b3d6010d03";
|
|
||||||
const OWNER = "Sucukdeluxe";
|
|
||||||
const REPO = "real-debrid-downloader";
|
|
||||||
const TAG = "v1.6.28";
|
|
||||||
|
|
||||||
const BODY = `## What's Changed in v1.6.28
|
|
||||||
|
|
||||||
### Bug Fixes (Deep Code Review — Round 4)
|
|
||||||
|
|
||||||
This release fixes 11 bugs found through an intensive 10-agent parallel code review covering every line of the codebase.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Important (3 fixes)
|
|
||||||
|
|
||||||
#### Extraction blocked after app restart when skipped items exist (\`cancelled > 0\`)
|
|
||||||
- \`recoverPostProcessingOnStartup()\` and \`triggerPendingExtractions()\` both required \`cancelled === 0\` to trigger extraction. When items were skipped (status: "cancelled") and the app was restarted before extraction finished, the extraction was never re-triggered. Items hung permanently on "Entpacken - Ausstehend".
|
|
||||||
- **Fix:** Removed the \`cancelled === 0\` check from both functions, consistent with \`handlePackagePostProcessing()\` which correctly proceeds with extraction despite cancelled items.
|
|
||||||
|
|
||||||
#### \`resetItems\` missing cleanup for package-level state maps
|
|
||||||
- When individual items were reset (re-download), the package was not removed from \`runCompletedPackages\`, \`historyRecordedPackages\`, \`packagePostProcessTasks\`, and \`hybridExtractRequeue\`. This caused: (1) inflated extraction counts in run summaries, (2) missing history entries when the package re-completed, (3) extraction continuing with now-deleted files if reset during active extraction.
|
|
||||||
- **Fix:** Added full package-level cleanup (abort post-processing controller, delete from all state maps) for each affected package, matching the behavior of \`resetPackage()\`.
|
|
||||||
|
|
||||||
#### Generic split-file skip does not persist resume state to disk
|
|
||||||
- When a generic \`.001\` split file was skipped (no archive signature detected), it was added to the in-memory \`resumeCompleted\` set but \`writeExtractResumeState()\` was never called. If the app crashed or was restarted before the next archive wrote resume state, the skipped file would be reprocessed on the next run. For packages consisting entirely of unrecognized generic splits, resume state was NEVER written.
|
|
||||||
- **Fix:** Call \`writeExtractResumeState()\` after adding the skipped archive to \`resumeCompleted\`.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Medium (8 fixes)
|
|
||||||
|
|
||||||
#### \`removeItem\` loses history for last item of a package
|
|
||||||
- When removing the last item from a package, \`removePackageFromSession()\` was called with an empty \`itemIds\` array. Since history entries are built from item IDs, no history was recorded and the package silently vanished from the download log.
|
|
||||||
- **Fix:** Pass \`[itemId]\` instead of \`[]\` to \`removePackageFromSession()\` so the deleted item is included in the history entry.
|
|
||||||
|
|
||||||
#### \`sortPackageOrderByHoster\` sorts by debrid provider instead of file hoster
|
|
||||||
- Clicking the "Hoster" column header sorted packages by \`item.provider\` (the debrid service like "realdebrid") instead of the actual file hoster extracted from the URL (like "uploaded", "rapidgator"). The sort order did not match what the column displayed.
|
|
||||||
- **Fix:** Changed to use \`extractHoster(item.url)\` for sorting, matching the column display logic.
|
|
||||||
|
|
||||||
#### Abort error in single-provider mode triggers false provider cooldown
|
|
||||||
- When a user cancelled a download during the unrestrict phase with auto-fallback disabled, the abort error was wrapped as \`"Unrestrict fehlgeschlagen: ..."\`. Downstream code detected the "unrestrict" keyword and called \`recordProviderFailure()\`, putting the provider into an unnecessary cooldown that delayed subsequent downloads.
|
|
||||||
- **Fix:** Added an abort signal check before wrapping the error, consistent with the fallback code path. Abort errors are now re-thrown directly without the "Unrestrict fehlgeschlagen" prefix.
|
|
||||||
|
|
||||||
#### \`START_ITEMS\` IPC handler missing null-safe fallback
|
|
||||||
- The \`START_ITEMS\` handler validated \`itemIds ?? []\` but passed the raw \`itemIds\` (potentially \`null\`) to \`controller.startItems()\`. All other similar handlers (\`START_PACKAGES\`, \`SKIP_ITEMS\`, \`RESET_ITEMS\`) correctly used \`?? []\` for both validation and the controller call.
|
|
||||||
- **Fix:** Changed to \`controller.startItems(itemIds ?? [])\`.
|
|
||||||
|
|
||||||
#### \`finishRun()\` does not reset \`runStartedAt\`, causing stale session duration
|
|
||||||
- When a download run completed naturally, \`finishRun()\` set \`running = false\` but did not reset \`runStartedAt\` to 0. This caused \`getSessionStats()\` to report an ever-growing \`sessionDurationSeconds\` (wall clock time since run start) while \`totalDownloadedBytes\` stayed fixed, making \`averageSpeedBps\` decay toward 0 over time. In contrast, \`stop()\` correctly reset \`runStartedAt = 0\`.
|
|
||||||
- **Fix:** Added \`this.session.runStartedAt = 0\` to \`finishRun()\`.
|
|
||||||
|
|
||||||
#### Package status stuck at "downloading" when all items fail
|
|
||||||
- When all items in a package failed and none completed, the package status was never updated from "downloading" because \`refreshPackageStatus()\` was only called on item completion, not on item failure. The package remained in "downloading" state until the next app restart.
|
|
||||||
- **Fix:** Call \`refreshPackageStatus()\` after recording a failed item outcome in the error handler.
|
|
||||||
|
|
||||||
#### Shelve check preempts permanent link error detection
|
|
||||||
- The shelve check (\`totalNonStallFailures >= 15\`) ran before the \`isPermanentLinkError\` check. After accumulating 15+ failures, a permanent link error (dead link, file removed) would be shelved for a 5-minute retry pause instead of failing immediately, wasting time on irrecoverable errors.
|
|
||||||
- **Fix:** Moved the \`isPermanentLinkError\` check before the shelve check so permanent errors are detected immediately regardless of failure count.
|
|
||||||
|
|
||||||
#### Password-cracking labels not cleared on extraction error/abort/completion
|
|
||||||
- When extraction set item labels to "Passwort knacken: ..." or "Passwort gefunden ...", the error/completion handlers used \`/^Entpacken/\` regex to match items for status updates. This regex did not match password-related labels, leaving items permanently stuck with stale "Passwort knacken" or "Passwort gefunden" status after extraction errors, timeouts, or even successful completion.
|
|
||||||
- **Fix:** Extended the regex checks in hybrid success, hybrid error, and abort handlers to also match \`/^Passwort/\` labels.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Files Changed
|
|
||||||
- \`src/main/download-manager.ts\` — recoverPostProcessingOnStartup/triggerPendingExtractions remove cancelled===0; resetItems package cleanup; removeItem history fix; finishRun runStartedAt; refreshPackageStatus on item failure; shelve vs permanent link error order; password label cleanup in hybrid/error/abort handlers
|
|
||||||
- \`src/main/extractor.ts\` — generic split-skip writeExtractResumeState
|
|
||||||
- \`src/main/debrid.ts\` — abort error passthrough in single-provider mode
|
|
||||||
- \`src/main/main.ts\` — START_ITEMS itemIds ?? []
|
|
||||||
- \`src/renderer/App.tsx\` — sortPackageOrderByHoster uses extractHoster
|
|
||||||
`;
|
|
||||||
|
|
||||||
function apiRequest(method, apiPath, body) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1${apiPath}`,
|
|
||||||
method,
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/json", Accept: "application/json" },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
if (body) req.write(JSON.stringify(body));
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function uploadAsset(releaseId, filePath, fileName) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const data = fs.readFileSync(filePath);
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1/repos/${OWNER}/${REPO}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`,
|
|
||||||
method: "POST",
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/octet-stream", "Content-Length": data.length },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`Upload ${fileName}: ${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
req.write(data);
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
console.log("Creating release...");
|
|
||||||
const release = await apiRequest("POST", `/repos/${OWNER}/${REPO}/releases`, {
|
|
||||||
tag_name: TAG, name: TAG, body: BODY, draft: false, prerelease: false,
|
|
||||||
});
|
|
||||||
console.log(`Release created: ${release.id}`);
|
|
||||||
const releaseDir = path.join(__dirname, "..", "release");
|
|
||||||
const assets = [
|
|
||||||
{ file: "Real-Debrid-Downloader-Setup-1.6.28.exe", name: "Real-Debrid-Downloader-Setup-1.6.28.exe" },
|
|
||||||
{ file: "Real-Debrid-Downloader 1.6.28.exe", name: "Real-Debrid-Downloader-1.6.28.exe" },
|
|
||||||
{ file: "latest.yml", name: "latest.yml" },
|
|
||||||
{ file: "Real-Debrid-Downloader Setup 1.6.28.exe.blockmap", name: "Real-Debrid-Downloader-Setup-1.6.28.exe.blockmap" },
|
|
||||||
];
|
|
||||||
for (const a of assets) {
|
|
||||||
const p = path.join(releaseDir, a.file);
|
|
||||||
if (!fs.existsSync(p)) { console.warn(`SKIP ${a.file}`); continue; }
|
|
||||||
console.log(`Uploading ${a.name} ...`);
|
|
||||||
await uploadAsset(release.id, p, a.name);
|
|
||||||
console.log(` done.`);
|
|
||||||
}
|
|
||||||
console.log("Release complete!");
|
|
||||||
}
|
|
||||||
main().catch((e) => { console.error(e); process.exit(1); });
|
|
||||||
@ -1,144 +0,0 @@
|
|||||||
import https from "node:https";
|
|
||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
import { fileURLToPath } from "node:url";
|
|
||||||
|
|
||||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
||||||
const TOKEN = "36034f878a07e8705c577a838e5186b3d6010d03";
|
|
||||||
const OWNER = "Sucukdeluxe";
|
|
||||||
const REPO = "real-debrid-downloader";
|
|
||||||
const TAG = "v1.6.29";
|
|
||||||
|
|
||||||
const BODY = `## What's Changed in v1.6.29
|
|
||||||
|
|
||||||
### Bug Fixes (Deep Code Review — Round 5)
|
|
||||||
|
|
||||||
This release fixes 10 bugs found through an intensive 10-agent parallel code review covering every line of the codebase.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Critical (1 fix — regression from v1.6.28)
|
|
||||||
|
|
||||||
#### \`finishRun()\` zeroed \`runStartedAt\` before calculating run duration
|
|
||||||
- The v1.6.28 fix that added \`this.session.runStartedAt = 0\` placed the reset **before** the code that reads \`runStartedAt\` to calculate session duration. This made \`runStartedAt > 0\` always false, so \`duration\` defaulted to 1 second. The run summary then showed absurdly high average speeds (total bytes / 1 second).
|
|
||||||
- **Fix:** Save \`runStartedAt\` to a local variable before zeroing, then use the local variable for the duration calculation.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Important (2 fixes)
|
|
||||||
|
|
||||||
#### \`importBackup\` restored session overwritten by \`prepareForShutdown()\`
|
|
||||||
- When a user restored a backup via import, \`saveSession()\` correctly wrote the restored session to disk. However, when the app quit (as instructed by "Bitte App neustarten"), \`prepareForShutdown()\` saved the **old in-memory session** back to disk, overwriting the restored backup. The restore appeared to succeed but was silently lost on restart.
|
|
||||||
- **Fix:** Added a \`skipShutdownPersist\` flag to \`DownloadManager\`. After \`importBackup\` saves the restored session, it sets this flag to \`true\`. \`prepareForShutdown()\` checks the flag and skips the session/settings write when set.
|
|
||||||
|
|
||||||
#### \`normalizeLoadedSessionTransientFields()\` missing package-level and session-level reset
|
|
||||||
- On startup, item statuses like "downloading" and "paused" were correctly reset to "queued", but **package statuses** in the same active states were left unchanged. Similarly, \`session.running\` and \`session.paused\` were not cleared. After a crash during an active download, packages could appear stuck in "downloading" status on restart, and the session could appear to be "running" with no active tasks.
|
|
||||||
- **Fix:** Added package status reset (active statuses → "queued") and \`session.running = false\` / \`session.paused = false\` to the normalization function.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Medium (7 fixes)
|
|
||||||
|
|
||||||
#### Stale \`itemContributedBytes\` / \`reservedTargetPaths\` / \`claimedTargetPathByItem\` across runs
|
|
||||||
- When the user manually stopped a download run, \`stop()\` did not call \`finishRun()\`, so \`itemContributedBytes\`, \`reservedTargetPaths\`, and \`claimedTargetPathByItem\` retained stale values from the previous run. On the next \`start()\` or \`resume()\`, these maps were not cleared. This caused: (1) inflated byte contributions subtracted from the reset \`totalDownloadedBytes\`, corrupting speed/progress calculations, (2) orphan path reservations preventing new items from claiming the same filenames, (3) stale target path claims causing unnecessary filename suffixing (\`file (1).rar\`).
|
|
||||||
- **Fix:** Added \`.clear()\` calls for all three maps in both \`startSelected()\` and the normal \`resume()\` path, matching \`finishRun()\`'s cleanup.
|
|
||||||
|
|
||||||
#### Hybrid extraction abort leaves stale progress labels on items
|
|
||||||
- When hybrid extraction was aborted (\`"aborted:extract"\`), the catch handler returned immediately without resetting item labels. Items could be left permanently showing mid-progress labels like \`"Entpacken 47% - movie.part01.rar - 12s"\` or \`"Passwort knacken: 30% (3/10) - archive.rar"\`. If the session was stopped or paused after the abort, these stale labels persisted in the UI and in the saved session.
|
|
||||||
- **Fix:** Added label cleanup loop before the return in the abort handler, resetting extraction/password labels to \`"Entpacken abgebrochen (wird fortgesetzt)"\`, consistent with the full extraction abort handler.
|
|
||||||
|
|
||||||
#### RAR5 multipart \`.rev\` recovery volumes not cleaned up after extraction
|
|
||||||
- \`collectArchiveCleanupTargets()\` matched RAR5 multipart data files (\`movie.part01.rar\`, \`movie.part02.rar\`) and a single legacy recovery file (\`movie.rev\`), but NOT RAR5 multipart recovery volumes (\`movie.part01.rev\`, \`movie.part02.rev\`). After extraction with cleanup enabled, recovery volumes were left on disk, wasting space.
|
|
||||||
- **Fix:** Added regex \`^prefix\\.part\\d+\\.rev$\` to the multipart RAR cleanup targets.
|
|
||||||
|
|
||||||
#### \`findReadyArchiveSets\` missed queued items without \`targetPath\` in pending check
|
|
||||||
- The archive-readiness check built \`pendingPaths\` from items with \`targetPath\` set, but items that hadn't started downloading yet (no \`targetPath\`, only \`fileName\`) were excluded. If all on-disk archive parts were completed but additional parts were still queued (never started), the archive could be prematurely marked as ready for extraction, leading to incomplete extraction.
|
|
||||||
- **Fix:** Also add \`path.join(pkg.outputDir, item.fileName)\` to \`pendingPaths\` for items without \`targetPath\`.
|
|
||||||
|
|
||||||
#### \`buildUniqueFlattenTargetPath\` unbounded loop
|
|
||||||
- The MKV library flatten function used an unbounded \`while(true)\` loop to find a unique filename, incrementing a suffix counter. In pathological cases (e.g., thousands of existing files or reserved names), this could run indefinitely, blocking the main process.
|
|
||||||
- **Fix:** Added a \`MAX_ATTEMPTS = 10000\` bound with a timestamp-based fallback filename to guarantee termination.
|
|
||||||
|
|
||||||
#### Redundant regex conditions in hybrid extraction error handler
|
|
||||||
- The error handler for hybrid extraction checked \`entry.fullStatus === "Entpacken - Ausstehend"\` and \`"Entpacken - Warten auf Parts"\` as separate conditions alongside the regex \`/^Entpacken\\b/i\`, which already matches both strings. The redundant conditions obscured the intent and added confusion.
|
|
||||||
- **Fix:** Removed the redundant explicit string comparisons, keeping only the regex checks.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Files Changed
|
|
||||||
- \`src/main/download-manager.ts\` — finishRun runStartedAt local var; start/resume clear itemContributedBytes + reservedTargetPaths + claimedTargetPathByItem; hybrid abort label cleanup; findReadyArchiveSets pendingPaths fileName fallback; buildUniqueFlattenTargetPath loop bound; hybrid error handler simplify redundant regex
|
|
||||||
- \`src/main/app-controller.ts\` — importBackup sets skipShutdownPersist flag
|
|
||||||
- \`src/main/storage.ts\` — normalizeLoadedSessionTransientFields resets package statuses and session.running/paused
|
|
||||||
- \`src/main/extractor.ts\` — RAR5 multipart .rev recovery volume cleanup
|
|
||||||
`;
|
|
||||||
|
|
||||||
function apiRequest(method, apiPath, body) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1${apiPath}`,
|
|
||||||
method,
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/json", Accept: "application/json" },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
if (body) req.write(JSON.stringify(body));
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function uploadAsset(releaseId, filePath, fileName) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const data = fs.readFileSync(filePath);
|
|
||||||
const opts = {
|
|
||||||
hostname: "codeberg.org",
|
|
||||||
path: `/api/v1/repos/${OWNER}/${REPO}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`,
|
|
||||||
method: "POST",
|
|
||||||
headers: { Authorization: `token ${TOKEN}`, "Content-Type": "application/octet-stream", "Content-Length": data.length },
|
|
||||||
};
|
|
||||||
const req = https.request(opts, (res) => {
|
|
||||||
const chunks = [];
|
|
||||||
res.on("data", (c) => chunks.push(c));
|
|
||||||
res.on("end", () => {
|
|
||||||
const text = Buffer.concat(chunks).toString();
|
|
||||||
if (res.statusCode >= 400) reject(new Error(`Upload ${fileName}: ${res.statusCode} ${text}`));
|
|
||||||
else resolve(JSON.parse(text || "{}"));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on("error", reject);
|
|
||||||
req.write(data);
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
console.log("Creating release...");
|
|
||||||
const release = await apiRequest("POST", `/repos/${OWNER}/${REPO}/releases`, {
|
|
||||||
tag_name: TAG, name: TAG, body: BODY, draft: false, prerelease: false,
|
|
||||||
});
|
|
||||||
console.log(`Release created: ${release.id}`);
|
|
||||||
const releaseDir = path.join(__dirname, "..", "release");
|
|
||||||
const assets = [
|
|
||||||
{ file: "Real-Debrid-Downloader-Setup-1.6.29.exe", name: "Real-Debrid-Downloader-Setup-1.6.29.exe" },
|
|
||||||
{ file: "Real-Debrid-Downloader 1.6.29.exe", name: "Real-Debrid-Downloader-1.6.29.exe" },
|
|
||||||
{ file: "latest.yml", name: "latest.yml" },
|
|
||||||
{ file: "Real-Debrid-Downloader Setup 1.6.29.exe.blockmap", name: "Real-Debrid-Downloader-Setup-1.6.29.exe.blockmap" },
|
|
||||||
];
|
|
||||||
for (const a of assets) {
|
|
||||||
const p = path.join(releaseDir, a.file);
|
|
||||||
if (!fs.existsSync(p)) { console.warn(`SKIP ${a.file}`); continue; }
|
|
||||||
console.log(`Uploading ${a.name} ...`);
|
|
||||||
await uploadAsset(release.id, p, a.name);
|
|
||||||
console.log(` done.`);
|
|
||||||
}
|
|
||||||
console.log("Release complete!");
|
|
||||||
}
|
|
||||||
main().catch((e) => { console.error(e); process.exit(1); });
|
|
||||||
@ -1,106 +0,0 @@
|
|||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
|
|
||||||
const TAG = "v1.6.30";
|
|
||||||
const TOKEN = "36034f878a07e8705c577a838e5186b3d6010d03";
|
|
||||||
const OWNER = "Sucukdeluxe";
|
|
||||||
const REPO = "real-debrid-downloader";
|
|
||||||
const API = `https://codeberg.org/api/v1/repos/${OWNER}/${REPO}`;
|
|
||||||
|
|
||||||
const RELEASE_DIR = path.resolve("release");
|
|
||||||
|
|
||||||
const BODY = `## What's Changed in v1.6.30
|
|
||||||
|
|
||||||
### Bug Fixes (Round 5 + Round 6 Deep Code Review — 19 fixes total)
|
|
||||||
|
|
||||||
#### Critical / High Priority
|
|
||||||
- **\`removeItem\` double-decrements \`itemCount\`**: When removing an item whose package had no remaining items, the item count was decremented both by \`removePackageFromSession\` (which deletes all items) and again by the caller. Fixed with a \`removedByPackageCleanup\` guard.
|
|
||||||
- **\`startItems\` missing map clears**: \`itemContributedBytes\`, \`reservedTargetPaths\`, and \`claimedTargetPathByItem\` were not cleared when starting individual items, causing stale data from previous runs to leak through.
|
|
||||||
- **\`start()\` race condition**: Two concurrent \`start()\` calls could both pass the \`running\` guard due to an \`await\` before \`running = true\` was set. Fixed by setting \`running = true\` before the first async operation.
|
|
||||||
- **Item-Recovery race condition**: In \`handlePackagePostProcessing\`, the scheduler could start an item during the \`await fs.promises.stat()\` call, but the recovery code would then overwrite the active download status with "completed". Added a post-await status + activeTasks re-check.
|
|
||||||
- **File-handle leak on Windows**: \`stream.destroy()\` was skipped when \`stream.end()\` threw an error and \`bodyError\` was null, because the \`throw\` exited the finally block before reaching the destroy call. Moved \`stream.destroy()\` into the catch block before the re-throw.
|
|
||||||
|
|
||||||
#### Medium Priority
|
|
||||||
- **\`clearAll\` doesn't clear \`providerFailures\`**: Provider failure tracking persisted across clear-all operations, causing unnecessary fallback to alternate providers on the next run.
|
|
||||||
- **\`skipItems\` missing \`releaseTargetPath\`**: Skipped items retained their reserved target paths, blocking other items from using those file paths.
|
|
||||||
- **\`skipItems\` extraction trigger ignores failed items**: The post-skip extraction check only verified no pending items existed, but didn't check for failed items, potentially starting extraction with an incomplete download set.
|
|
||||||
- **Double "Error:" prefix**: \`compactErrorText()\` wraps \`String(error)\` which adds "Error: " for Error objects. The final \`throw new Error(lastError)\` in RealDebrid, AllDebrid, and MegaDebrid clients then added a second "Error: " prefix. Fixed with \`.replace(/^Error:\\s*/i, "")\`.
|
|
||||||
- **Zip-bomb false positive on size=0 headers**: Archive entries with \`uncompressedSize === 0\` in the header (common for streaming-compressed files) triggered the zip-bomb heuristic. Fixed to only check when \`maxDeclaredSize > 0\`.
|
|
||||||
- **\`directoryHasAnyFiles\` treats system files as content**: Files like \`desktop.ini\`, \`Thumbs.db\`, \`.DS_Store\` etc. were counted as real content, causing false "directory not empty" conflicts. Now filters with \`isIgnorableEmptyDirFileName\`.
|
|
||||||
- **\`setBool\` in Delete-Confirm permanently sets dirty flag**: The generic \`setBool\` helper marked the settings draft as dirty even when only updating the "don't ask again" checkbox, triggering unnecessary save-on-close prompts. Replaced with a direct \`setSettingsDraft\` call.
|
|
||||||
- **\`item.url\` missing in PackageCard memo comparison**: URL changes (e.g. after unrestrict retry) didn't trigger re-renders because \`item.url\` wasn't in the equality check.
|
|
||||||
- **Column sort + drag-drop reorder lacking optimistic updates**: \`movePackage\`, \`reorderPackagesByDrop\`, and the column sort handler sent the IPC call but didn't update local state until the next snapshot from main, causing visible lag. Added optimistic state updates with rollback on error.
|
|
||||||
- **\`updatedAt\` unconditionally set for already-extracted items**: Items with an "Entpackt - Done" label had their \`updatedAt\` bumped on every extraction error/success pass, causing unnecessary re-renders. Added guard to skip already-extracted items.
|
|
||||||
- **\`normalizeSessionStatuses\` empty fullStatus**: Completed items with an empty \`fullStatus\` stayed blank instead of getting the correct "Entpacken - Ausstehend" or "Fertig" label.
|
|
||||||
- **\`prepareForShutdown\` mislabels pending items**: Items with "Entpacken - Ausstehend" or "Entpacken - Warten auf Parts" were relabeled to "Entpacken abgebrochen (wird fortgesetzt)" even though they were never actively extracting. Now only relabels items with active extraction status.
|
|
||||||
|
|
||||||
### Test Results
|
|
||||||
- 352 tests passing across 15 test files
|
|
||||||
`;
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
// Create release
|
|
||||||
console.log("Creating release...");
|
|
||||||
const createRes = await fetch(`${API}/releases`, {
|
|
||||||
method: "POST",
|
|
||||||
headers: {
|
|
||||||
Authorization: `token ${TOKEN}`,
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
tag_name: TAG,
|
|
||||||
name: TAG,
|
|
||||||
body: BODY,
|
|
||||||
draft: false,
|
|
||||||
prerelease: false,
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
if (!createRes.ok) {
|
|
||||||
const text = await createRes.text();
|
|
||||||
throw new Error(`Create release failed: ${createRes.status} ${text}`);
|
|
||||||
}
|
|
||||||
const release = await createRes.json();
|
|
||||||
console.log(`Release created: ${release.html_url}`);
|
|
||||||
|
|
||||||
// Upload assets
|
|
||||||
const assets = [
|
|
||||||
{ file: "Real-Debrid-Downloader-Setup-1.6.30.exe", label: "Setup Installer" },
|
|
||||||
{ file: "Real-Debrid-Downloader 1.6.30.exe", label: "Portable" },
|
|
||||||
{ file: "latest.yml", label: "Auto-Update Manifest" },
|
|
||||||
{ file: "Real-Debrid-Downloader Setup 1.6.30.exe.blockmap", label: "Blockmap" },
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const asset of assets) {
|
|
||||||
const filePath = path.join(RELEASE_DIR, asset.file);
|
|
||||||
if (!fs.existsSync(filePath)) {
|
|
||||||
console.warn(`SKIP (not found): ${asset.file}`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
const data = fs.readFileSync(filePath);
|
|
||||||
console.log(`Uploading ${asset.file} (${(data.length / 1024 / 1024).toFixed(1)} MB)...`);
|
|
||||||
const uploadRes = await fetch(
|
|
||||||
`${API}/releases/${release.id}/assets?name=${encodeURIComponent(asset.file)}`,
|
|
||||||
{
|
|
||||||
method: "POST",
|
|
||||||
headers: {
|
|
||||||
Authorization: `token ${TOKEN}`,
|
|
||||||
"Content-Type": "application/octet-stream",
|
|
||||||
},
|
|
||||||
body: data,
|
|
||||||
}
|
|
||||||
);
|
|
||||||
if (!uploadRes.ok) {
|
|
||||||
const text = await uploadRes.text();
|
|
||||||
console.error(`Upload failed for ${asset.file}: ${uploadRes.status} ${text}`);
|
|
||||||
} else {
|
|
||||||
console.log(` ✓ ${asset.file}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log("Done!");
|
|
||||||
}
|
|
||||||
|
|
||||||
main().catch((err) => {
|
|
||||||
console.error(err);
|
|
||||||
process.exit(1);
|
|
||||||
});
|
|
||||||
@ -1,24 +0,0 @@
|
|||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
|
|
||||||
const version = process.argv[2];
|
|
||||||
if (!version) {
|
|
||||||
console.error("Usage: node scripts/set_version_node.mjs <version>");
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
const root = process.cwd();
|
|
||||||
|
|
||||||
const packageJsonPath = path.join(root, "package.json");
|
|
||||||
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, "utf8"));
|
|
||||||
packageJson.version = version;
|
|
||||||
fs.writeFileSync(packageJsonPath, `${JSON.stringify(packageJson, null, 2)}\n`, "utf8");
|
|
||||||
|
|
||||||
const constantsPath = path.join(root, "src", "main", "constants.ts");
|
|
||||||
const constants = fs.readFileSync(constantsPath, "utf8").replace(
|
|
||||||
/APP_VERSION = "[^"]+"/,
|
|
||||||
`APP_VERSION = "${version}"`
|
|
||||||
);
|
|
||||||
fs.writeFileSync(constantsPath, constants, "utf8");
|
|
||||||
|
|
||||||
console.log(`Set version to ${version}`);
|
|
||||||
@ -5,6 +5,7 @@ import {
|
|||||||
AppSettings,
|
AppSettings,
|
||||||
DuplicatePolicy,
|
DuplicatePolicy,
|
||||||
HistoryEntry,
|
HistoryEntry,
|
||||||
|
PackagePriority,
|
||||||
ParsedPackageInput,
|
ParsedPackageInput,
|
||||||
SessionStats,
|
SessionStats,
|
||||||
StartConflictEntry,
|
StartConflictEntry,
|
||||||
@ -104,6 +105,7 @@ export class AppController {
|
|||||||
|| (settings.megaLogin.trim() && settings.megaPassword.trim())
|
|| (settings.megaLogin.trim() && settings.megaPassword.trim())
|
||||||
|| settings.bestToken.trim()
|
|| settings.bestToken.trim()
|
||||||
|| settings.allDebridToken.trim()
|
|| settings.allDebridToken.trim()
|
||||||
|
|| (settings.ddownloadLogin.trim() && settings.ddownloadPassword.trim())
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -283,7 +285,14 @@ export class AppController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public exportBackup(): string {
|
public exportBackup(): string {
|
||||||
const settings = this.settings;
|
const settings = { ...this.settings };
|
||||||
|
const SENSITIVE_KEYS: (keyof AppSettings)[] = ["token", "megaLogin", "megaPassword", "bestToken", "allDebridToken", "ddownloadLogin", "ddownloadPassword"];
|
||||||
|
for (const key of SENSITIVE_KEYS) {
|
||||||
|
const val = settings[key];
|
||||||
|
if (typeof val === "string" && val.length > 0) {
|
||||||
|
(settings as Record<string, unknown>)[key] = `***${val.slice(-4)}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
const session = this.manager.getSession();
|
const session = this.manager.getSession();
|
||||||
return JSON.stringify({ version: 1, settings, session }, null, 2);
|
return JSON.stringify({ version: 1, settings, session }, null, 2);
|
||||||
}
|
}
|
||||||
@ -298,7 +307,15 @@ export class AppController {
|
|||||||
if (!parsed || typeof parsed !== "object" || !parsed.settings || !parsed.session) {
|
if (!parsed || typeof parsed !== "object" || !parsed.settings || !parsed.session) {
|
||||||
return { restored: false, message: "Kein gültiges Backup (settings/session fehlen)" };
|
return { restored: false, message: "Kein gültiges Backup (settings/session fehlen)" };
|
||||||
}
|
}
|
||||||
const restoredSettings = normalizeSettings(parsed.settings as AppSettings);
|
const importedSettings = parsed.settings as AppSettings;
|
||||||
|
const SENSITIVE_KEYS: (keyof AppSettings)[] = ["token", "megaLogin", "megaPassword", "bestToken", "allDebridToken", "ddownloadLogin", "ddownloadPassword"];
|
||||||
|
for (const key of SENSITIVE_KEYS) {
|
||||||
|
const val = (importedSettings as Record<string, unknown>)[key];
|
||||||
|
if (typeof val === "string" && val.startsWith("***")) {
|
||||||
|
(importedSettings as Record<string, unknown>)[key] = (this.settings as Record<string, unknown>)[key];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const restoredSettings = normalizeSettings(importedSettings);
|
||||||
this.settings = restoredSettings;
|
this.settings = restoredSettings;
|
||||||
saveSettings(this.storagePaths, this.settings);
|
saveSettings(this.storagePaths, this.settings);
|
||||||
this.manager.setSettings(this.settings);
|
this.manager.setSettings(this.settings);
|
||||||
@ -344,8 +361,8 @@ export class AppController {
|
|||||||
clearHistory(this.storagePaths);
|
clearHistory(this.storagePaths);
|
||||||
}
|
}
|
||||||
|
|
||||||
public setPackagePriority(packageId: string, priority: string): void {
|
public setPackagePriority(packageId: string, priority: PackagePriority): void {
|
||||||
this.manager.setPackagePriority(packageId, priority as any);
|
this.manager.setPackagePriority(packageId, priority);
|
||||||
}
|
}
|
||||||
|
|
||||||
public skipItems(itemIds: string[]): void {
|
public skipItems(itemIds: string[]): void {
|
||||||
|
|||||||
@ -45,6 +45,8 @@ export function defaultSettings(): AppSettings {
|
|||||||
megaPassword: "",
|
megaPassword: "",
|
||||||
bestToken: "",
|
bestToken: "",
|
||||||
allDebridToken: "",
|
allDebridToken: "",
|
||||||
|
ddownloadLogin: "",
|
||||||
|
ddownloadPassword: "",
|
||||||
archivePasswordList: "",
|
archivePasswordList: "",
|
||||||
rememberToken: true,
|
rememberToken: true,
|
||||||
providerPrimary: "realdebrid",
|
providerPrimary: "realdebrid",
|
||||||
|
|||||||
@ -164,7 +164,7 @@ async function decryptDlcLocal(filePath: string): Promise<ParsedPackageInput[]>
|
|||||||
const dlcData = content.slice(0, -88);
|
const dlcData = content.slice(0, -88);
|
||||||
|
|
||||||
const rcUrl = DLC_SERVICE_URL.replace("{KEY}", encodeURIComponent(dlcKey));
|
const rcUrl = DLC_SERVICE_URL.replace("{KEY}", encodeURIComponent(dlcKey));
|
||||||
const rcResponse = await fetch(rcUrl, { method: "GET" });
|
const rcResponse = await fetch(rcUrl, { method: "GET", signal: AbortSignal.timeout(30000) });
|
||||||
if (!rcResponse.ok) {
|
if (!rcResponse.ok) {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
@ -217,7 +217,8 @@ async function tryDcryptUpload(fileContent: Buffer, fileName: string): Promise<s
|
|||||||
|
|
||||||
const response = await fetch(DCRYPT_UPLOAD_URL, {
|
const response = await fetch(DCRYPT_UPLOAD_URL, {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
body: form
|
body: form,
|
||||||
|
signal: AbortSignal.timeout(30000)
|
||||||
});
|
});
|
||||||
if (response.status === 413) {
|
if (response.status === 413) {
|
||||||
return null;
|
return null;
|
||||||
@ -235,7 +236,8 @@ async function tryDcryptPaste(fileContent: Buffer): Promise<string[] | null> {
|
|||||||
|
|
||||||
const response = await fetch(DCRYPT_PASTE_URL, {
|
const response = await fetch(DCRYPT_PASTE_URL, {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
body: form
|
body: form,
|
||||||
|
signal: AbortSignal.timeout(30000)
|
||||||
});
|
});
|
||||||
if (response.status === 413) {
|
if (response.status === 413) {
|
||||||
return null;
|
return null;
|
||||||
|
|||||||
@ -15,7 +15,8 @@ const PROVIDER_LABELS: Record<DebridProvider, string> = {
|
|||||||
realdebrid: "Real-Debrid",
|
realdebrid: "Real-Debrid",
|
||||||
megadebrid: "Mega-Debrid",
|
megadebrid: "Mega-Debrid",
|
||||||
bestdebrid: "BestDebrid",
|
bestdebrid: "BestDebrid",
|
||||||
alldebrid: "AllDebrid"
|
alldebrid: "AllDebrid",
|
||||||
|
ddownload: "DDownload"
|
||||||
};
|
};
|
||||||
|
|
||||||
interface ProviderUnrestrictedLink extends UnrestrictedLink {
|
interface ProviderUnrestrictedLink extends UnrestrictedLink {
|
||||||
@ -958,11 +959,204 @@ class AllDebridClient {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const DDOWNLOAD_URL_RE = /^https?:\/\/(?:www\.)?(?:ddownload\.com|ddl\.to)\/([a-z0-9]+)/i;
|
||||||
|
const DDOWNLOAD_WEB_BASE = "https://ddownload.com";
|
||||||
|
const DDOWNLOAD_WEB_UA = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/133.0.0.0 Safari/537.36";
|
||||||
|
|
||||||
|
class DdownloadClient {
|
||||||
|
private login: string;
|
||||||
|
private password: string;
|
||||||
|
private cookies: string = "";
|
||||||
|
|
||||||
|
public constructor(login: string, password: string) {
|
||||||
|
this.login = login;
|
||||||
|
this.password = password;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async webLogin(signal?: AbortSignal): Promise<void> {
|
||||||
|
// Step 1: GET login page to extract form token
|
||||||
|
const loginPageRes = await fetch(`${DDOWNLOAD_WEB_BASE}/login.html`, {
|
||||||
|
headers: { "User-Agent": DDOWNLOAD_WEB_UA },
|
||||||
|
redirect: "manual",
|
||||||
|
signal: withTimeoutSignal(signal, API_TIMEOUT_MS)
|
||||||
|
});
|
||||||
|
const loginPageHtml = await loginPageRes.text();
|
||||||
|
const tokenMatch = loginPageHtml.match(/name="token" value="([^"]+)"/);
|
||||||
|
const pageCookies = (loginPageRes.headers.getSetCookie?.() || []).map((c: string) => c.split(";")[0]).join("; ");
|
||||||
|
|
||||||
|
// Step 2: POST login
|
||||||
|
const body = new URLSearchParams({
|
||||||
|
op: "login",
|
||||||
|
token: tokenMatch?.[1] || "",
|
||||||
|
rand: "",
|
||||||
|
redirect: "",
|
||||||
|
login: this.login,
|
||||||
|
password: this.password
|
||||||
|
});
|
||||||
|
const loginRes = await fetch(`${DDOWNLOAD_WEB_BASE}/`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"User-Agent": DDOWNLOAD_WEB_UA,
|
||||||
|
"Content-Type": "application/x-www-form-urlencoded",
|
||||||
|
...(pageCookies ? { Cookie: pageCookies } : {})
|
||||||
|
},
|
||||||
|
body: body.toString(),
|
||||||
|
redirect: "manual",
|
||||||
|
signal: withTimeoutSignal(signal, API_TIMEOUT_MS)
|
||||||
|
});
|
||||||
|
|
||||||
|
// Drain body
|
||||||
|
try { await loginRes.text(); } catch { /* ignore */ }
|
||||||
|
|
||||||
|
const setCookies = loginRes.headers.getSetCookie?.() || [];
|
||||||
|
const xfss = setCookies.find((c: string) => c.startsWith("xfss="));
|
||||||
|
const loginCookie = setCookies.find((c: string) => c.startsWith("login="));
|
||||||
|
if (!xfss) {
|
||||||
|
throw new Error("DDownload Login fehlgeschlagen (kein Session-Cookie)");
|
||||||
|
}
|
||||||
|
this.cookies = [loginCookie, xfss].filter(Boolean).map((c: string) => c.split(";")[0]).join("; ");
|
||||||
|
}
|
||||||
|
|
||||||
|
public async unrestrictLink(link: string, signal?: AbortSignal): Promise<UnrestrictedLink> {
|
||||||
|
const match = link.match(DDOWNLOAD_URL_RE);
|
||||||
|
if (!match) {
|
||||||
|
throw new Error("Kein DDownload-Link");
|
||||||
|
}
|
||||||
|
const fileCode = match[1];
|
||||||
|
let lastError = "";
|
||||||
|
|
||||||
|
for (let attempt = 1; attempt <= REQUEST_RETRIES; attempt += 1) {
|
||||||
|
try {
|
||||||
|
if (signal?.aborted) throw new Error("aborted:debrid");
|
||||||
|
|
||||||
|
// Login if no session yet
|
||||||
|
if (!this.cookies) {
|
||||||
|
await this.webLogin(signal);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 1: GET file page to extract form fields
|
||||||
|
const filePageRes = await fetch(`${DDOWNLOAD_WEB_BASE}/${fileCode}`, {
|
||||||
|
headers: {
|
||||||
|
"User-Agent": DDOWNLOAD_WEB_UA,
|
||||||
|
Cookie: this.cookies
|
||||||
|
},
|
||||||
|
redirect: "manual",
|
||||||
|
signal: withTimeoutSignal(signal, API_TIMEOUT_MS)
|
||||||
|
});
|
||||||
|
|
||||||
|
// Premium with direct downloads enabled → redirect immediately
|
||||||
|
if (filePageRes.status >= 300 && filePageRes.status < 400) {
|
||||||
|
const directUrl = filePageRes.headers.get("location") || "";
|
||||||
|
try { await filePageRes.text(); } catch { /* drain */ }
|
||||||
|
if (directUrl) {
|
||||||
|
return {
|
||||||
|
fileName: filenameFromUrl(directUrl) || filenameFromUrl(link),
|
||||||
|
directUrl,
|
||||||
|
fileSize: null,
|
||||||
|
retriesUsed: attempt - 1,
|
||||||
|
skipTlsVerify: true
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const html = await filePageRes.text();
|
||||||
|
|
||||||
|
// Check for file not found
|
||||||
|
if (/File Not Found|file was removed|file was banned/i.test(html)) {
|
||||||
|
throw new Error("DDownload: Datei nicht gefunden");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract form fields
|
||||||
|
const idVal = html.match(/name="id" value="([^"]+)"/)?.[1] || fileCode;
|
||||||
|
const randVal = html.match(/name="rand" value="([^"]+)"/)?.[1] || "";
|
||||||
|
const fileNameMatch = html.match(/class="file-info-name"[^>]*>([^<]+)</);
|
||||||
|
const fileName = fileNameMatch?.[1]?.trim() || filenameFromUrl(link);
|
||||||
|
|
||||||
|
// Step 2: POST download2 for premium download
|
||||||
|
const dlBody = new URLSearchParams({
|
||||||
|
op: "download2",
|
||||||
|
id: idVal,
|
||||||
|
rand: randVal,
|
||||||
|
referer: "",
|
||||||
|
method_premium: "1",
|
||||||
|
adblock_detected: "0"
|
||||||
|
});
|
||||||
|
|
||||||
|
const dlRes = await fetch(`${DDOWNLOAD_WEB_BASE}/${fileCode}`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"User-Agent": DDOWNLOAD_WEB_UA,
|
||||||
|
"Content-Type": "application/x-www-form-urlencoded",
|
||||||
|
Cookie: this.cookies,
|
||||||
|
Referer: `${DDOWNLOAD_WEB_BASE}/${fileCode}`
|
||||||
|
},
|
||||||
|
body: dlBody.toString(),
|
||||||
|
redirect: "manual",
|
||||||
|
signal: withTimeoutSignal(signal, API_TIMEOUT_MS)
|
||||||
|
});
|
||||||
|
|
||||||
|
if (dlRes.status >= 300 && dlRes.status < 400) {
|
||||||
|
const directUrl = dlRes.headers.get("location") || "";
|
||||||
|
try { await dlRes.text(); } catch { /* drain */ }
|
||||||
|
if (directUrl) {
|
||||||
|
return {
|
||||||
|
fileName: fileName || filenameFromUrl(directUrl),
|
||||||
|
directUrl,
|
||||||
|
fileSize: null,
|
||||||
|
retriesUsed: attempt - 1,
|
||||||
|
skipTlsVerify: true
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const dlHtml = await dlRes.text();
|
||||||
|
// Try to find direct URL in response HTML
|
||||||
|
const directMatch = dlHtml.match(/https?:\/\/[a-z0-9]+\.(?:dstorage\.org|ddownload\.com|ddl\.to|ucdn\.to)[^\s"'<>]+/i);
|
||||||
|
if (directMatch) {
|
||||||
|
return {
|
||||||
|
fileName,
|
||||||
|
directUrl: directMatch[0],
|
||||||
|
fileSize: null,
|
||||||
|
retriesUsed: attempt - 1,
|
||||||
|
skipTlsVerify: true
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for error messages
|
||||||
|
const errMatch = dlHtml.match(/class="err"[^>]*>([^<]+)</i);
|
||||||
|
if (errMatch) {
|
||||||
|
throw new Error(`DDownload: ${errMatch[1].trim()}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error("DDownload: Kein Download-Link erhalten");
|
||||||
|
} catch (error) {
|
||||||
|
lastError = compactErrorText(error);
|
||||||
|
if (signal?.aborted || (/aborted/i.test(lastError) && !/timeout/i.test(lastError))) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
// Re-login on auth errors
|
||||||
|
if (/login|session|cookie/i.test(lastError)) {
|
||||||
|
this.cookies = "";
|
||||||
|
}
|
||||||
|
if (attempt >= REQUEST_RETRIES || !isRetryableErrorText(lastError)) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
await sleepWithSignal(retryDelay(attempt), signal);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(String(lastError || "DDownload Unrestrict fehlgeschlagen").replace(/^Error:\s*/i, ""));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export class DebridService {
|
export class DebridService {
|
||||||
private settings: AppSettings;
|
private settings: AppSettings;
|
||||||
|
|
||||||
private options: DebridServiceOptions;
|
private options: DebridServiceOptions;
|
||||||
|
|
||||||
|
private cachedDdownloadClient: DdownloadClient | null = null;
|
||||||
|
private cachedDdownloadKey = "";
|
||||||
|
|
||||||
public constructor(settings: AppSettings, options: DebridServiceOptions = {}) {
|
public constructor(settings: AppSettings, options: DebridServiceOptions = {}) {
|
||||||
this.settings = cloneSettings(settings);
|
this.settings = cloneSettings(settings);
|
||||||
this.options = options;
|
this.options = options;
|
||||||
@ -972,6 +1166,16 @@ export class DebridService {
|
|||||||
this.settings = cloneSettings(next);
|
this.settings = cloneSettings(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private getDdownloadClient(login: string, password: string): DdownloadClient {
|
||||||
|
const key = `${login}\0${password}`;
|
||||||
|
if (this.cachedDdownloadClient && this.cachedDdownloadKey === key) {
|
||||||
|
return this.cachedDdownloadClient;
|
||||||
|
}
|
||||||
|
this.cachedDdownloadClient = new DdownloadClient(login, password);
|
||||||
|
this.cachedDdownloadKey = key;
|
||||||
|
return this.cachedDdownloadClient;
|
||||||
|
}
|
||||||
|
|
||||||
public async resolveFilenames(
|
public async resolveFilenames(
|
||||||
links: string[],
|
links: string[],
|
||||||
onResolved?: (link: string, fileName: string) => void,
|
onResolved?: (link: string, fileName: string) => void,
|
||||||
@ -1024,6 +1228,27 @@ export class DebridService {
|
|||||||
|
|
||||||
public async unrestrictLink(link: string, signal?: AbortSignal, settingsSnapshot?: AppSettings): Promise<ProviderUnrestrictedLink> {
|
public async unrestrictLink(link: string, signal?: AbortSignal, settingsSnapshot?: AppSettings): Promise<ProviderUnrestrictedLink> {
|
||||||
const settings = settingsSnapshot ? cloneSettings(settingsSnapshot) : cloneSettings(this.settings);
|
const settings = settingsSnapshot ? cloneSettings(settingsSnapshot) : cloneSettings(this.settings);
|
||||||
|
|
||||||
|
// DDownload is a direct file hoster, not a debrid service.
|
||||||
|
// If the link is a ddownload.com/ddl.to URL and the account is configured,
|
||||||
|
// use DDownload directly before trying any debrid providers.
|
||||||
|
if (DDOWNLOAD_URL_RE.test(link) && this.isProviderConfiguredFor(settings, "ddownload")) {
|
||||||
|
try {
|
||||||
|
const result = await this.unrestrictViaProvider(settings, "ddownload", link, signal);
|
||||||
|
return {
|
||||||
|
...result,
|
||||||
|
provider: "ddownload",
|
||||||
|
providerLabel: PROVIDER_LABELS["ddownload"]
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
const errorText = compactErrorText(error);
|
||||||
|
if (signal?.aborted || (/aborted/i.test(errorText) && !/timeout/i.test(errorText))) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
// Fall through to normal provider chain (debrid services may also support ddownload links)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const order = toProviderOrder(
|
const order = toProviderOrder(
|
||||||
settings.providerPrimary,
|
settings.providerPrimary,
|
||||||
settings.providerSecondary,
|
settings.providerSecondary,
|
||||||
@ -1109,6 +1334,9 @@ export class DebridService {
|
|||||||
if (provider === "alldebrid") {
|
if (provider === "alldebrid") {
|
||||||
return Boolean(settings.allDebridToken.trim());
|
return Boolean(settings.allDebridToken.trim());
|
||||||
}
|
}
|
||||||
|
if (provider === "ddownload") {
|
||||||
|
return Boolean(settings.ddownloadLogin.trim() && settings.ddownloadPassword.trim());
|
||||||
|
}
|
||||||
return Boolean(settings.bestToken.trim());
|
return Boolean(settings.bestToken.trim());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1122,6 +1350,9 @@ export class DebridService {
|
|||||||
if (provider === "alldebrid") {
|
if (provider === "alldebrid") {
|
||||||
return new AllDebridClient(settings.allDebridToken).unrestrictLink(link, signal);
|
return new AllDebridClient(settings.allDebridToken).unrestrictLink(link, signal);
|
||||||
}
|
}
|
||||||
|
if (provider === "ddownload") {
|
||||||
|
return this.getDdownloadClient(settings.ddownloadLogin, settings.ddownloadPassword).unrestrictLink(link, signal);
|
||||||
|
}
|
||||||
return new BestDebridClient(settings.bestToken).unrestrictLink(link, signal);
|
return new BestDebridClient(settings.bestToken).unrestrictLink(link, signal);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -261,7 +261,7 @@ export function startDebugServer(mgr: DownloadManager, baseDir: string): void {
|
|||||||
const port = getPort(baseDir);
|
const port = getPort(baseDir);
|
||||||
|
|
||||||
server = http.createServer(handleRequest);
|
server = http.createServer(handleRequest);
|
||||||
server.listen(port, "0.0.0.0", () => {
|
server.listen(port, "127.0.0.1", () => {
|
||||||
logger.info(`Debug-Server gestartet auf Port ${port}`);
|
logger.info(`Debug-Server gestartet auf Port ${port}`);
|
||||||
});
|
});
|
||||||
server.on("error", (err) => {
|
server.on("error", (err) => {
|
||||||
|
|||||||
@ -20,9 +20,26 @@ import {
|
|||||||
UiSnapshot
|
UiSnapshot
|
||||||
} from "../shared/types";
|
} from "../shared/types";
|
||||||
import { REQUEST_RETRIES, SAMPLE_VIDEO_EXTENSIONS, SPEED_WINDOW_SECONDS, WRITE_BUFFER_SIZE, WRITE_FLUSH_TIMEOUT_MS, ALLOCATION_UNIT_SIZE, STREAM_HIGH_WATER_MARK, DISK_BUSY_THRESHOLD_MS } from "./constants";
|
import { REQUEST_RETRIES, SAMPLE_VIDEO_EXTENSIONS, SPEED_WINDOW_SECONDS, WRITE_BUFFER_SIZE, WRITE_FLUSH_TIMEOUT_MS, ALLOCATION_UNIT_SIZE, STREAM_HIGH_WATER_MARK, DISK_BUSY_THRESHOLD_MS } from "./constants";
|
||||||
import { cleanupCancelledPackageArtifactsAsync } from "./cleanup";
|
|
||||||
|
// Reference counter for NODE_TLS_REJECT_UNAUTHORIZED to avoid race conditions
|
||||||
|
// when multiple parallel downloads need TLS verification disabled (e.g. DDownload).
|
||||||
|
let tlsSkipRefCount = 0;
|
||||||
|
function acquireTlsSkip(): void {
|
||||||
|
tlsSkipRefCount += 1;
|
||||||
|
if (tlsSkipRefCount === 1) {
|
||||||
|
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function releaseTlsSkip(): void {
|
||||||
|
tlsSkipRefCount -= 1;
|
||||||
|
if (tlsSkipRefCount <= 0) {
|
||||||
|
tlsSkipRefCount = 0;
|
||||||
|
delete process.env.NODE_TLS_REJECT_UNAUTHORIZED;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
import { cleanupCancelledPackageArtifactsAsync, removeDownloadLinkArtifacts, removeSampleArtifacts } from "./cleanup";
|
||||||
import { DebridService, MegaWebUnrestrictor, checkRapidgatorOnline } from "./debrid";
|
import { DebridService, MegaWebUnrestrictor, checkRapidgatorOnline } from "./debrid";
|
||||||
import { clearExtractResumeState, collectArchiveCleanupTargets, extractPackageArchives, findArchiveCandidates } from "./extractor";
|
import { cleanupArchives, clearExtractResumeState, collectArchiveCleanupTargets, extractPackageArchives, findArchiveCandidates, hasAnyFilesRecursive, removeEmptyDirectoryTree } from "./extractor";
|
||||||
import { validateFileAgainstManifest } from "./integrity";
|
import { validateFileAgainstManifest } from "./integrity";
|
||||||
import { logger } from "./logger";
|
import { logger } from "./logger";
|
||||||
import { StoragePaths, saveSession, saveSessionAsync, saveSettings, saveSettingsAsync } from "./storage";
|
import { StoragePaths, saveSession, saveSessionAsync, saveSettings, saveSettingsAsync } from "./storage";
|
||||||
@ -291,6 +308,9 @@ function providerLabel(provider: DownloadItem["provider"]): string {
|
|||||||
if (provider === "alldebrid") {
|
if (provider === "alldebrid") {
|
||||||
return "AllDebrid";
|
return "AllDebrid";
|
||||||
}
|
}
|
||||||
|
if (provider === "ddownload") {
|
||||||
|
return "DDownload";
|
||||||
|
}
|
||||||
return "Debrid";
|
return "Debrid";
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -731,60 +751,86 @@ export function buildAutoRenameBaseNameFromFoldersWithOptions(
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
function resolveArchiveItemsFromList(archiveName: string, items: DownloadItem[]): DownloadItem[] {
|
export function resolveArchiveItemsFromList(archiveName: string, items: DownloadItem[]): DownloadItem[] {
|
||||||
const entryLower = archiveName.toLowerCase();
|
const entryLower = archiveName.toLowerCase();
|
||||||
|
|
||||||
|
// Helper: get item basename (try targetPath first, then fileName)
|
||||||
|
const itemBaseName = (item: DownloadItem): string =>
|
||||||
|
path.basename(item.targetPath || item.fileName || "");
|
||||||
|
|
||||||
|
// Try pattern-based matching first (for multipart archives)
|
||||||
|
let pattern: RegExp | null = null;
|
||||||
const multipartMatch = entryLower.match(/^(.*)\.part0*1\.rar$/);
|
const multipartMatch = entryLower.match(/^(.*)\.part0*1\.rar$/);
|
||||||
if (multipartMatch) {
|
if (multipartMatch) {
|
||||||
const prefix = multipartMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
const prefix = multipartMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||||
const pattern = new RegExp(`^${prefix}\\.part\\d+\\.rar$`, "i");
|
pattern = new RegExp(`^${prefix}\\.part\\d+\\.rar$`, "i");
|
||||||
return items.filter((item) => {
|
|
||||||
const name = path.basename(item.targetPath || item.fileName || "");
|
|
||||||
return pattern.test(name);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
const rarMatch = entryLower.match(/^(.*)\.rar$/);
|
if (!pattern) {
|
||||||
if (rarMatch) {
|
const rarMatch = entryLower.match(/^(.*)\.rar$/);
|
||||||
const stem = rarMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
if (rarMatch) {
|
||||||
const pattern = new RegExp(`^${stem}\\.r(ar|\\d{2,3})$`, "i");
|
const stem = rarMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||||
return items.filter((item) => {
|
pattern = new RegExp(`^${stem}\\.r(ar|\\d{2,3})$`, "i");
|
||||||
const name = path.basename(item.targetPath || item.fileName || "");
|
}
|
||||||
return pattern.test(name);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
// Split ZIP (e.g., movie.zip.001, movie.zip.002)
|
if (!pattern) {
|
||||||
const zipSplitMatch = entryLower.match(/^(.*)\.zip\.001$/);
|
const zipSplitMatch = entryLower.match(/^(.*)\.zip\.001$/);
|
||||||
if (zipSplitMatch) {
|
if (zipSplitMatch) {
|
||||||
const stem = zipSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
const stem = zipSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||||
const pattern = new RegExp(`^${stem}\\.zip(\\.\\d+)?$`, "i");
|
pattern = new RegExp(`^${stem}\\.zip(\\.\\d+)?$`, "i");
|
||||||
return items.filter((item) => {
|
}
|
||||||
const name = path.basename(item.targetPath || item.fileName || "");
|
|
||||||
return pattern.test(name);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
// Split 7z (e.g., movie.7z.001, movie.7z.002)
|
if (!pattern) {
|
||||||
const sevenSplitMatch = entryLower.match(/^(.*)\.7z\.001$/);
|
const sevenSplitMatch = entryLower.match(/^(.*)\.7z\.001$/);
|
||||||
if (sevenSplitMatch) {
|
if (sevenSplitMatch) {
|
||||||
const stem = sevenSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
const stem = sevenSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||||
const pattern = new RegExp(`^${stem}\\.7z(\\.\\d+)?$`, "i");
|
pattern = new RegExp(`^${stem}\\.7z(\\.\\d+)?$`, "i");
|
||||||
return items.filter((item) => {
|
}
|
||||||
const name = path.basename(item.targetPath || item.fileName || "");
|
|
||||||
return pattern.test(name);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
// Generic .NNN splits (e.g., movie.001, movie.002)
|
if (!pattern && /^(.*)\.001$/.test(entryLower) && !/\.(zip|7z)\.001$/.test(entryLower)) {
|
||||||
const genericSplitMatch = entryLower.match(/^(.*)\.001$/);
|
const genericSplitMatch = entryLower.match(/^(.*)\.001$/);
|
||||||
if (genericSplitMatch && !/\.(zip|7z)\.001$/.test(entryLower)) {
|
if (genericSplitMatch) {
|
||||||
const stem = genericSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
const stem = genericSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||||
const pattern = new RegExp(`^${stem}\\.\\d{3}$`, "i");
|
pattern = new RegExp(`^${stem}\\.\\d{3}$`, "i");
|
||||||
return items.filter((item) => {
|
}
|
||||||
const name = path.basename(item.targetPath || item.fileName || "");
|
|
||||||
return pattern.test(name);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
return items.filter((item) => {
|
|
||||||
const name = path.basename(item.targetPath || item.fileName || "").toLowerCase();
|
// Attempt 1: Pattern match (handles multipart archives)
|
||||||
return name === entryLower;
|
if (pattern) {
|
||||||
});
|
const matched = items.filter((item) => pattern!.test(itemBaseName(item)));
|
||||||
|
if (matched.length > 0) return matched;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Attempt 2: Exact filename match (case-insensitive)
|
||||||
|
const exactMatch = items.filter((item) => itemBaseName(item).toLowerCase() === entryLower);
|
||||||
|
if (exactMatch.length > 0) return exactMatch;
|
||||||
|
|
||||||
|
// Attempt 3: Stem-based fuzzy match — strip archive extensions and compare stems.
|
||||||
|
// Handles cases where debrid services modify filenames slightly.
|
||||||
|
const archiveStem = entryLower
|
||||||
|
.replace(/\.part\d+\.rar$/i, "")
|
||||||
|
.replace(/\.r\d{2,3}$/i, "")
|
||||||
|
.replace(/\.rar$/i, "")
|
||||||
|
.replace(/\.(zip|7z)\.\d{3}$/i, "")
|
||||||
|
.replace(/\.\d{3}$/i, "")
|
||||||
|
.replace(/\.(zip|7z)$/i, "");
|
||||||
|
if (archiveStem.length > 3) {
|
||||||
|
const stemMatch = items.filter((item) => {
|
||||||
|
const name = itemBaseName(item).toLowerCase();
|
||||||
|
return name.startsWith(archiveStem) && /\.(rar|r\d{2,3}|zip|7z|\d{3})$/i.test(name);
|
||||||
|
});
|
||||||
|
if (stemMatch.length > 0) return stemMatch;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Attempt 4: If only one item in the list and one archive — return it as a best-effort match.
|
||||||
|
// This handles single-file packages where the filename may have been modified.
|
||||||
|
if (items.length === 1) {
|
||||||
|
const singleName = itemBaseName(items[0]).toLowerCase();
|
||||||
|
if (/\.(rar|zip|7z|\d{3})$/i.test(singleName)) {
|
||||||
|
return items;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
function retryDelayWithJitter(attempt: number, baseMs: number): number {
|
function retryDelayWithJitter(attempt: number, baseMs: number): number {
|
||||||
@ -1364,6 +1410,10 @@ export class DownloadManager extends EventEmitter {
|
|||||||
addedPackages += 1;
|
addedPackages += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (addedPackages > 0 || addedLinks > 0) {
|
||||||
|
const pkgNames = packages.filter((p) => p.links.length > 0).map((p) => p.name).join(", ");
|
||||||
|
logger.info(`Pakete hinzugefügt: ${addedPackages} Paket(e), ${addedLinks} Link(s) [${pkgNames}]`);
|
||||||
|
}
|
||||||
this.persistSoon();
|
this.persistSoon();
|
||||||
this.emitState();
|
this.emitState();
|
||||||
if (unresolvedByLink.size > 0) {
|
if (unresolvedByLink.size > 0) {
|
||||||
@ -3209,11 +3259,11 @@ export class DownloadManager extends EventEmitter {
|
|||||||
|
|
||||||
for (const item of Object.values(this.session.items)) {
|
for (const item of Object.values(this.session.items)) {
|
||||||
if (item.status !== "completed") continue;
|
if (item.status !== "completed") continue;
|
||||||
const fs = item.fullStatus || "";
|
const fullSt = item.fullStatus || "";
|
||||||
// Only relabel items with active extraction status (e.g. "Entpacken 45%", "Passwort prüfen")
|
// Only relabel items with active extraction status (e.g. "Entpacken 45%", "Passwort prüfen")
|
||||||
// Skip items that were merely waiting ("Entpacken - Ausstehend", "Entpacken - Warten auf Parts")
|
// Skip items that were merely waiting ("Entpacken - Ausstehend", "Entpacken - Warten auf Parts")
|
||||||
// as they were never actively extracting and "abgebrochen" would be misleading.
|
// as they were never actively extracting and "abgebrochen" would be misleading.
|
||||||
if (/^Entpacken\b/i.test(fs) && !/Ausstehend/i.test(fs) && !/Warten/i.test(fs) && !isExtractedLabel(fs)) {
|
if (/^Entpacken\b/i.test(fullSt) && !/Ausstehend/i.test(fullSt) && !/Warten/i.test(fullSt) && !isExtractedLabel(fullSt)) {
|
||||||
item.fullStatus = "Entpacken abgebrochen (wird fortgesetzt)";
|
item.fullStatus = "Entpacken abgebrochen (wird fortgesetzt)";
|
||||||
item.updatedAt = nowMs();
|
item.updatedAt = nowMs();
|
||||||
const pkg = this.session.packages[item.packageId];
|
const pkg = this.session.packages[item.packageId];
|
||||||
@ -3302,7 +3352,7 @@ export class DownloadManager extends EventEmitter {
|
|||||||
this.session.reconnectReason = "";
|
this.session.reconnectReason = "";
|
||||||
|
|
||||||
for (const item of Object.values(this.session.items)) {
|
for (const item of Object.values(this.session.items)) {
|
||||||
if (item.provider !== "realdebrid" && item.provider !== "megadebrid" && item.provider !== "bestdebrid" && item.provider !== "alldebrid") {
|
if (item.provider !== "realdebrid" && item.provider !== "megadebrid" && item.provider !== "bestdebrid" && item.provider !== "alldebrid" && item.provider !== "ddownload") {
|
||||||
item.provider = null;
|
item.provider = null;
|
||||||
}
|
}
|
||||||
if (item.status === "cancelled" && item.fullStatus === "Gestoppt") {
|
if (item.status === "cancelled" && item.fullStatus === "Gestoppt") {
|
||||||
@ -3550,14 +3600,16 @@ export class DownloadManager extends EventEmitter {
|
|||||||
this.emit("state", this.getSnapshot());
|
this.emit("state", this.getSnapshot());
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// Too soon — schedule deferred forced emit
|
// Too soon — replace any pending timer with a shorter forced-emit timer
|
||||||
if (!this.stateEmitTimer) {
|
if (this.stateEmitTimer) {
|
||||||
this.stateEmitTimer = setTimeout(() => {
|
clearTimeout(this.stateEmitTimer);
|
||||||
this.stateEmitTimer = null;
|
this.stateEmitTimer = null;
|
||||||
this.lastStateEmitAt = nowMs();
|
|
||||||
this.emit("state", this.getSnapshot());
|
|
||||||
}, MIN_FORCE_GAP_MS - sinceLastEmit);
|
|
||||||
}
|
}
|
||||||
|
this.stateEmitTimer = setTimeout(() => {
|
||||||
|
this.stateEmitTimer = null;
|
||||||
|
this.lastStateEmitAt = nowMs();
|
||||||
|
this.emit("state", this.getSnapshot());
|
||||||
|
}, MIN_FORCE_GAP_MS - sinceLastEmit);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (this.stateEmitTimer) {
|
if (this.stateEmitTimer) {
|
||||||
@ -3795,18 +3847,26 @@ export class DownloadManager extends EventEmitter {
|
|||||||
this.packagePostProcessAbortControllers.set(packageId, abortController);
|
this.packagePostProcessAbortControllers.set(packageId, abortController);
|
||||||
|
|
||||||
const task = (async () => {
|
const task = (async () => {
|
||||||
|
const slotWaitStart = nowMs();
|
||||||
await this.acquirePostProcessSlot(packageId);
|
await this.acquirePostProcessSlot(packageId);
|
||||||
|
const slotWaitMs = nowMs() - slotWaitStart;
|
||||||
|
if (slotWaitMs > 100) {
|
||||||
|
logger.info(`Post-Process Slot erhalten nach ${(slotWaitMs / 1000).toFixed(1)}s Wartezeit: pkg=${packageId.slice(0, 8)}`);
|
||||||
|
}
|
||||||
try {
|
try {
|
||||||
// Loop while requeue requests arrive — keep the slot so the same
|
let round = 0;
|
||||||
// package can immediately re-run hybrid extraction without waiting
|
|
||||||
// behind other packages that may be queued for the slot.
|
|
||||||
do {
|
do {
|
||||||
|
round += 1;
|
||||||
|
const hadRequeue = this.hybridExtractRequeue.has(packageId);
|
||||||
this.hybridExtractRequeue.delete(packageId);
|
this.hybridExtractRequeue.delete(packageId);
|
||||||
|
const roundStart = nowMs();
|
||||||
try {
|
try {
|
||||||
await this.handlePackagePostProcessing(packageId, abortController.signal);
|
await this.handlePackagePostProcessing(packageId, abortController.signal);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.warn(`Post-Processing für Paket fehlgeschlagen: ${compactErrorText(error)}`);
|
logger.warn(`Post-Processing für Paket fehlgeschlagen: ${compactErrorText(error)}`);
|
||||||
}
|
}
|
||||||
|
const roundMs = nowMs() - roundStart;
|
||||||
|
logger.info(`Post-Process Runde ${round} fertig in ${(roundMs / 1000).toFixed(1)}s (requeue=${hadRequeue}, nextRequeue=${this.hybridExtractRequeue.has(packageId)}): pkg=${packageId.slice(0, 8)}`);
|
||||||
this.persistSoon();
|
this.persistSoon();
|
||||||
this.emitState();
|
this.emitState();
|
||||||
} while (this.hybridExtractRequeue.has(packageId));
|
} while (this.hybridExtractRequeue.has(packageId));
|
||||||
@ -4706,6 +4766,7 @@ export class DownloadManager extends EventEmitter {
|
|||||||
item.fullStatus = `Starte... (${unrestricted.providerLabel})`;
|
item.fullStatus = `Starte... (${unrestricted.providerLabel})`;
|
||||||
item.updatedAt = nowMs();
|
item.updatedAt = nowMs();
|
||||||
this.emitState();
|
this.emitState();
|
||||||
|
logger.info(`Download Start: ${item.fileName} (${humanSize(unrestricted.fileSize || 0)}) via ${unrestricted.providerLabel}, pkg=${pkg.name}`);
|
||||||
|
|
||||||
const maxAttempts = maxItemAttempts;
|
const maxAttempts = maxItemAttempts;
|
||||||
let done = false;
|
let done = false;
|
||||||
@ -4717,7 +4778,7 @@ export class DownloadManager extends EventEmitter {
|
|||||||
item.updatedAt = nowMs();
|
item.updatedAt = nowMs();
|
||||||
this.emitState();
|
this.emitState();
|
||||||
}
|
}
|
||||||
const result = await this.downloadToFile(active, unrestricted.directUrl, item.targetPath, item.totalBytes);
|
const result = await this.downloadToFile(active, unrestricted.directUrl, item.targetPath, item.totalBytes, unrestricted.skipTlsVerify);
|
||||||
active.resumable = result.resumable;
|
active.resumable = result.resumable;
|
||||||
if (!active.resumable && !active.nonResumableCounted) {
|
if (!active.resumable && !active.nonResumableCounted) {
|
||||||
active.nonResumableCounted = true;
|
active.nonResumableCounted = true;
|
||||||
@ -4814,6 +4875,7 @@ export class DownloadManager extends EventEmitter {
|
|||||||
item.updatedAt = nowMs();
|
item.updatedAt = nowMs();
|
||||||
pkg.updatedAt = nowMs();
|
pkg.updatedAt = nowMs();
|
||||||
this.recordRunOutcome(item.id, "completed");
|
this.recordRunOutcome(item.id, "completed");
|
||||||
|
logger.info(`Download fertig: ${item.fileName} (${humanSize(item.downloadedBytes)}), pkg=${pkg.name}`);
|
||||||
|
|
||||||
if (this.session.running && !active.abortController.signal.aborted) {
|
if (this.session.running && !active.abortController.signal.aborted) {
|
||||||
void this.runPackagePostProcessing(pkg.id).catch((err) => {
|
void this.runPackagePostProcessing(pkg.id).catch((err) => {
|
||||||
@ -5102,7 +5164,8 @@ export class DownloadManager extends EventEmitter {
|
|||||||
active: ActiveTask,
|
active: ActiveTask,
|
||||||
directUrl: string,
|
directUrl: string,
|
||||||
targetPath: string,
|
targetPath: string,
|
||||||
knownTotal: number | null
|
knownTotal: number | null,
|
||||||
|
skipTlsVerify?: boolean
|
||||||
): Promise<{ resumable: boolean }> {
|
): Promise<{ resumable: boolean }> {
|
||||||
const item = this.session.items[active.itemId];
|
const item = this.session.items[active.itemId];
|
||||||
if (!item) {
|
if (!item) {
|
||||||
@ -5148,6 +5211,7 @@ export class DownloadManager extends EventEmitter {
|
|||||||
const connectTimeoutMs = getDownloadConnectTimeoutMs();
|
const connectTimeoutMs = getDownloadConnectTimeoutMs();
|
||||||
let connectTimer: NodeJS.Timeout | null = null;
|
let connectTimer: NodeJS.Timeout | null = null;
|
||||||
const connectAbortController = new AbortController();
|
const connectAbortController = new AbortController();
|
||||||
|
if (skipTlsVerify) acquireTlsSkip();
|
||||||
try {
|
try {
|
||||||
if (connectTimeoutMs > 0) {
|
if (connectTimeoutMs > 0) {
|
||||||
connectTimer = setTimeout(() => {
|
connectTimer = setTimeout(() => {
|
||||||
@ -5173,6 +5237,7 @@ export class DownloadManager extends EventEmitter {
|
|||||||
}
|
}
|
||||||
throw error;
|
throw error;
|
||||||
} finally {
|
} finally {
|
||||||
|
if (skipTlsVerify) releaseTlsSkip();
|
||||||
if (connectTimer) {
|
if (connectTimer) {
|
||||||
clearTimeout(connectTimer);
|
clearTimeout(connectTimer);
|
||||||
}
|
}
|
||||||
@ -6233,11 +6298,29 @@ export class DownloadManager extends EventEmitter {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
private async runHybridExtraction(packageId: string, pkg: PackageEntry, items: DownloadItem[], signal?: AbortSignal): Promise<void> {
|
private async runHybridExtraction(packageId: string, pkg: PackageEntry, items: DownloadItem[], signal?: AbortSignal): Promise<number> {
|
||||||
|
const findReadyStart = nowMs();
|
||||||
const readyArchives = await this.findReadyArchiveSets(pkg);
|
const readyArchives = await this.findReadyArchiveSets(pkg);
|
||||||
|
const findReadyMs = nowMs() - findReadyStart;
|
||||||
|
if (findReadyMs > 200) {
|
||||||
|
logger.info(`findReadyArchiveSets dauerte ${(findReadyMs / 1000).toFixed(1)}s: pkg=${pkg.name}, found=${readyArchives.size}`);
|
||||||
|
}
|
||||||
if (readyArchives.size === 0) {
|
if (readyArchives.size === 0) {
|
||||||
logger.info(`Hybrid-Extract: pkg=${pkg.name}, keine fertigen Archive-Sets`);
|
logger.info(`Hybrid-Extract: pkg=${pkg.name}, keine fertigen Archive-Sets`);
|
||||||
return;
|
// Relabel completed items that are part of incomplete multi-part archives
|
||||||
|
// from "Ausstehend" to "Warten auf Parts" so the UI accurately reflects
|
||||||
|
// that extraction is waiting for remaining parts to finish downloading.
|
||||||
|
const allDone = items.every((i) => i.status === "completed" || i.status === "failed" || i.status === "cancelled");
|
||||||
|
if (!allDone) {
|
||||||
|
for (const entry of items) {
|
||||||
|
if (entry.status === "completed" && entry.fullStatus === "Entpacken - Ausstehend") {
|
||||||
|
entry.fullStatus = "Entpacken - Warten auf Parts";
|
||||||
|
entry.updatedAt = nowMs();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.emitState();
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info(`Hybrid-Extract Start: pkg=${pkg.name}, readyArchives=${readyArchives.size}`);
|
logger.info(`Hybrid-Extract Start: pkg=${pkg.name}, readyArchives=${readyArchives.size}`);
|
||||||
@ -6277,7 +6360,7 @@ export class DownloadManager extends EventEmitter {
|
|||||||
// a previous hybrid round, there is nothing new to extract.
|
// a previous hybrid round, there is nothing new to extract.
|
||||||
if (hybridItems.length > 0 && hybridItems.every((item) => isExtractedLabel(item.fullStatus))) {
|
if (hybridItems.length > 0 && hybridItems.every((item) => isExtractedLabel(item.fullStatus))) {
|
||||||
logger.info(`Hybrid-Extract: pkg=${pkg.name}, alle ${hybridItems.length} Items bereits entpackt, überspringe`);
|
logger.info(`Hybrid-Extract: pkg=${pkg.name}, alle ${hybridItems.length} Items bereits entpackt, überspringe`);
|
||||||
return;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Filter out archives whose items are ALL already extracted so we don't
|
// Filter out archives whose items are ALL already extracted so we don't
|
||||||
@ -6300,7 +6383,7 @@ export class DownloadManager extends EventEmitter {
|
|||||||
}
|
}
|
||||||
if (readyArchives.size === 0) {
|
if (readyArchives.size === 0) {
|
||||||
logger.info(`Hybrid-Extract: pkg=${pkg.name}, alle fertigen Archive bereits entpackt`);
|
logger.info(`Hybrid-Extract: pkg=${pkg.name}, alle fertigen Archive bereits entpackt`);
|
||||||
return;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Resolve archive items dynamically from ALL package items (not just
|
// Resolve archive items dynamically from ALL package items (not just
|
||||||
@ -6309,10 +6392,11 @@ export class DownloadManager extends EventEmitter {
|
|||||||
const resolveArchiveItems = (archiveName: string): DownloadItem[] =>
|
const resolveArchiveItems = (archiveName: string): DownloadItem[] =>
|
||||||
resolveArchiveItemsFromList(archiveName, items);
|
resolveArchiveItemsFromList(archiveName, items);
|
||||||
|
|
||||||
// Track multiple active archives for parallel hybrid extraction
|
// Track archives for parallel hybrid extraction progress
|
||||||
const activeHybridArchiveMap = new Map<string, DownloadItem[]>();
|
const hybridResolvedItems = new Map<string, DownloadItem[]>();
|
||||||
const hybridArchiveStartTimes = new Map<string, number>();
|
const hybridStartTimes = new Map<string, number>();
|
||||||
let hybridLastEmitAt = 0;
|
let hybridLastEmitAt = 0;
|
||||||
|
let hybridLastProgressCurrent: number | null = null;
|
||||||
|
|
||||||
// Mark items based on whether their archive is actually ready for extraction.
|
// Mark items based on whether their archive is actually ready for extraction.
|
||||||
// Only items whose archive is in readyArchives get "Ausstehend"; others keep
|
// Only items whose archive is in readyArchives get "Ausstehend"; others keep
|
||||||
@ -6350,38 +6434,71 @@ export class DownloadManager extends EventEmitter {
|
|||||||
packageId,
|
packageId,
|
||||||
hybridMode: true,
|
hybridMode: true,
|
||||||
maxParallel: this.settings.maxParallelExtract || 2,
|
maxParallel: this.settings.maxParallelExtract || 2,
|
||||||
extractCpuPriority: this.settings.extractCpuPriority,
|
extractCpuPriority: "high",
|
||||||
onProgress: (progress) => {
|
onProgress: (progress) => {
|
||||||
|
if (progress.phase === "preparing") {
|
||||||
|
pkg.postProcessLabel = progress.archiveName || "Vorbereiten...";
|
||||||
|
this.emitState();
|
||||||
|
return;
|
||||||
|
}
|
||||||
if (progress.phase === "done") {
|
if (progress.phase === "done") {
|
||||||
// Do NOT mark remaining archives as "Done" here — some may have
|
hybridResolvedItems.clear();
|
||||||
// failed. The post-extraction code (result.failed check) will
|
hybridStartTimes.clear();
|
||||||
// assign the correct label. Only clear the tracking maps.
|
hybridLastProgressCurrent = null;
|
||||||
activeHybridArchiveMap.clear();
|
|
||||||
hybridArchiveStartTimes.clear();
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const currentCount = Math.max(0, Number(progress.current ?? 0));
|
||||||
|
const archiveFinished = progress.archiveDone === true
|
||||||
|
|| (hybridLastProgressCurrent !== null && currentCount > hybridLastProgressCurrent);
|
||||||
|
hybridLastProgressCurrent = currentCount;
|
||||||
|
|
||||||
if (progress.archiveName) {
|
if (progress.archiveName) {
|
||||||
// Resolve items for this archive if not yet tracked
|
// Resolve items for this archive if not yet tracked
|
||||||
if (!activeHybridArchiveMap.has(progress.archiveName)) {
|
if (!hybridResolvedItems.has(progress.archiveName)) {
|
||||||
activeHybridArchiveMap.set(progress.archiveName, resolveArchiveItems(progress.archiveName));
|
const resolved = resolveArchiveItems(progress.archiveName);
|
||||||
hybridArchiveStartTimes.set(progress.archiveName, nowMs());
|
hybridResolvedItems.set(progress.archiveName, resolved);
|
||||||
|
hybridStartTimes.set(progress.archiveName, nowMs());
|
||||||
|
if (resolved.length === 0) {
|
||||||
|
logger.warn(`resolveArchiveItems (hybrid): KEINE Items gefunden für archiveName="${progress.archiveName}", items.length=${items.length}, itemNames=[${items.map((i) => path.basename(i.targetPath || i.fileName || "?")).join(", ")}]`);
|
||||||
|
} else {
|
||||||
|
logger.info(`resolveArchiveItems (hybrid): ${resolved.length} Items für archiveName="${progress.archiveName}"`);
|
||||||
|
const initLabel = `Entpacken 0% · ${progress.archiveName}`;
|
||||||
|
const initAt = nowMs();
|
||||||
|
for (const entry of resolved) {
|
||||||
|
if (!isExtractedLabel(entry.fullStatus)) {
|
||||||
|
entry.fullStatus = initLabel;
|
||||||
|
entry.updatedAt = initAt;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
hybridLastEmitAt = initAt;
|
||||||
|
this.emitState(true);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
const archItems = activeHybridArchiveMap.get(progress.archiveName)!;
|
const archItems = hybridResolvedItems.get(progress.archiveName) || [];
|
||||||
|
|
||||||
// If archive is at 100%, mark its items as done and remove from active
|
// Only mark as finished on explicit archive-done signal (or real current increment),
|
||||||
if (Number(progress.archivePercent ?? 0) >= 100) {
|
// never on raw 100% archivePercent, because password retries can report 100% mid-run.
|
||||||
|
if (archiveFinished) {
|
||||||
const doneAt = nowMs();
|
const doneAt = nowMs();
|
||||||
const startedAt = hybridArchiveStartTimes.get(progress.archiveName) || doneAt;
|
const startedAt = hybridStartTimes.get(progress.archiveName) || doneAt;
|
||||||
const doneLabel = formatExtractDone(doneAt - startedAt);
|
const doneLabel = progress.archiveSuccess === false
|
||||||
|
? "Entpacken - Error"
|
||||||
|
: formatExtractDone(doneAt - startedAt);
|
||||||
for (const entry of archItems) {
|
for (const entry of archItems) {
|
||||||
if (!isExtractedLabel(entry.fullStatus)) {
|
if (!isExtractedLabel(entry.fullStatus)) {
|
||||||
entry.fullStatus = doneLabel;
|
entry.fullStatus = doneLabel;
|
||||||
entry.updatedAt = doneAt;
|
entry.updatedAt = doneAt;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
activeHybridArchiveMap.delete(progress.archiveName);
|
hybridResolvedItems.delete(progress.archiveName);
|
||||||
hybridArchiveStartTimes.delete(progress.archiveName);
|
hybridStartTimes.delete(progress.archiveName);
|
||||||
|
// Show transitional label while next archive initializes
|
||||||
|
const done = currentCount;
|
||||||
|
if (done < progress.total) {
|
||||||
|
pkg.postProcessLabel = `Entpacken (${done}/${progress.total}) - Naechstes Archiv...`;
|
||||||
|
this.emitState();
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
// Update this archive's items with per-archive progress
|
// Update this archive's items with per-archive progress
|
||||||
const archiveLabel = ` · ${progress.archiveName}`;
|
const archiveLabel = ` · ${progress.archiveName}`;
|
||||||
@ -6408,6 +6525,18 @@ export class DownloadManager extends EventEmitter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Update package-level label with overall extraction progress
|
||||||
|
const activeArchive = !archiveFinished && Number(progress.archivePercent ?? 0) > 0 ? 1 : 0;
|
||||||
|
const currentDisplay = Math.max(0, Math.min(progress.total, progress.current + activeArchive));
|
||||||
|
if (progress.passwordFound) {
|
||||||
|
pkg.postProcessLabel = `Passwort gefunden · ${progress.archiveName || ""}`;
|
||||||
|
} else if (progress.passwordAttempt && progress.passwordTotal && progress.passwordTotal > 1) {
|
||||||
|
const pwPct = Math.round((progress.passwordAttempt / progress.passwordTotal) * 100);
|
||||||
|
pkg.postProcessLabel = `Passwort knacken: ${pwPct}%`;
|
||||||
|
} else {
|
||||||
|
pkg.postProcessLabel = `Entpacken ${progress.percent}% (${currentDisplay}/${progress.total})`;
|
||||||
|
}
|
||||||
|
|
||||||
// Throttled emit — also promote "Warten auf Parts" items that
|
// Throttled emit — also promote "Warten auf Parts" items that
|
||||||
// completed downloading in the meantime to "Ausstehend".
|
// completed downloading in the meantime to "Ausstehend".
|
||||||
const now = nowMs();
|
const now = nowMs();
|
||||||
@ -6426,7 +6555,20 @@ export class DownloadManager extends EventEmitter {
|
|||||||
|
|
||||||
logger.info(`Hybrid-Extract Ende: pkg=${pkg.name}, extracted=${result.extracted}, failed=${result.failed}`);
|
logger.info(`Hybrid-Extract Ende: pkg=${pkg.name}, extracted=${result.extracted}, failed=${result.failed}`);
|
||||||
if (result.extracted > 0) {
|
if (result.extracted > 0) {
|
||||||
await this.autoRenameExtractedVideoFiles(pkg.extractDir, pkg);
|
// Fire-and-forget: rename then collect MKVs in background so the
|
||||||
|
// slot is not blocked and the next archive set can start immediately.
|
||||||
|
void (async () => {
|
||||||
|
try {
|
||||||
|
await this.autoRenameExtractedVideoFiles(pkg.extractDir, pkg);
|
||||||
|
} catch (err) {
|
||||||
|
logger.warn(`Hybrid Auto-Rename Fehler: pkg=${pkg.name}, reason=${compactErrorText(err)}`);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
await this.collectMkvFilesToLibrary(packageId, pkg);
|
||||||
|
} catch (err) {
|
||||||
|
logger.warn(`Hybrid MKV-Collection Fehler: pkg=${pkg.name}, reason=${compactErrorText(err)}`);
|
||||||
|
}
|
||||||
|
})();
|
||||||
}
|
}
|
||||||
if (result.failed > 0) {
|
if (result.failed > 0) {
|
||||||
logger.warn(`Hybrid-Extract: ${result.failed} Archive fehlgeschlagen, wird beim finalen Durchlauf erneut versucht`);
|
logger.warn(`Hybrid-Extract: ${result.failed} Archive fehlgeschlagen, wird beim finalen Durchlauf erneut versucht`);
|
||||||
@ -6453,6 +6595,7 @@ export class DownloadManager extends EventEmitter {
|
|||||||
entry.updatedAt = updatedAt;
|
entry.updatedAt = updatedAt;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return result.extracted;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorText = String(error || "");
|
const errorText = String(error || "");
|
||||||
if (errorText.includes("aborted:extract")) {
|
if (errorText.includes("aborted:extract")) {
|
||||||
@ -6465,7 +6608,7 @@ export class DownloadManager extends EventEmitter {
|
|||||||
entry.updatedAt = abortAt;
|
entry.updatedAt = abortAt;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return;
|
return 0;
|
||||||
}
|
}
|
||||||
logger.warn(`Hybrid-Extract Fehler: pkg=${pkg.name}, reason=${compactErrorText(error)}`);
|
logger.warn(`Hybrid-Extract Fehler: pkg=${pkg.name}, reason=${compactErrorText(error)}`);
|
||||||
const errorAt = nowMs();
|
const errorAt = nowMs();
|
||||||
@ -6477,9 +6620,11 @@ export class DownloadManager extends EventEmitter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
private async handlePackagePostProcessing(packageId: string, signal?: AbortSignal): Promise<void> {
|
private async handlePackagePostProcessing(packageId: string, signal?: AbortSignal): Promise<void> {
|
||||||
|
const handleStart = nowMs();
|
||||||
const pkg = this.session.packages[packageId];
|
const pkg = this.session.packages[packageId];
|
||||||
if (!pkg || pkg.cancelled) {
|
if (!pkg || pkg.cancelled) {
|
||||||
return;
|
return;
|
||||||
@ -6491,6 +6636,7 @@ export class DownloadManager extends EventEmitter {
|
|||||||
|
|
||||||
// Recover items whose file exists on disk but status was never set to "completed".
|
// Recover items whose file exists on disk but status was never set to "completed".
|
||||||
// Only recover items in idle states (queued/paused), never active ones (downloading/validating).
|
// Only recover items in idle states (queued/paused), never active ones (downloading/validating).
|
||||||
|
const recoveryStart = nowMs();
|
||||||
for (const item of items) {
|
for (const item of items) {
|
||||||
if (isFinishedStatus(item.status)) {
|
if (isFinishedStatus(item.status)) {
|
||||||
continue;
|
continue;
|
||||||
@ -6530,16 +6676,21 @@ export class DownloadManager extends EventEmitter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const recoveryMs = nowMs() - recoveryStart;
|
||||||
const success = items.filter((item) => item.status === "completed").length;
|
const success = items.filter((item) => item.status === "completed").length;
|
||||||
const failed = items.filter((item) => item.status === "failed").length;
|
const failed = items.filter((item) => item.status === "failed").length;
|
||||||
const cancelled = items.filter((item) => item.status === "cancelled").length;
|
const cancelled = items.filter((item) => item.status === "cancelled").length;
|
||||||
logger.info(`Post-Processing Start: pkg=${pkg.name}, success=${success}, failed=${failed}, cancelled=${cancelled}, autoExtract=${this.settings.autoExtract}`);
|
const setupMs = nowMs() - handleStart;
|
||||||
|
logger.info(`Post-Processing Start: pkg=${pkg.name}, success=${success}, failed=${failed}, cancelled=${cancelled}, autoExtract=${this.settings.autoExtract}, setupMs=${setupMs}, recoveryMs=${recoveryMs}`);
|
||||||
|
|
||||||
const allDone = success + failed + cancelled >= items.length;
|
const allDone = success + failed + cancelled >= items.length;
|
||||||
|
|
||||||
if (!allDone && this.settings.hybridExtract && this.settings.autoExtract && failed === 0 && success > 0) {
|
if (!allDone && this.settings.hybridExtract && this.settings.autoExtract && failed === 0 && success > 0) {
|
||||||
await this.runHybridExtraction(packageId, pkg, items, signal);
|
pkg.postProcessLabel = "Entpacken vorbereiten...";
|
||||||
|
this.emitState();
|
||||||
|
const hybridExtracted = await this.runHybridExtraction(packageId, pkg, items, signal);
|
||||||
if (signal?.aborted) {
|
if (signal?.aborted) {
|
||||||
|
pkg.postProcessLabel = undefined;
|
||||||
pkg.status = (pkg.enabled && this.session.running && !this.session.paused) ? "queued" : "paused";
|
pkg.status = (pkg.enabled && this.session.running && !this.session.paused) ? "queued" : "paused";
|
||||||
pkg.updatedAt = nowMs();
|
pkg.updatedAt = nowMs();
|
||||||
return;
|
return;
|
||||||
@ -6553,6 +6704,13 @@ export class DownloadManager extends EventEmitter {
|
|||||||
if (!this.session.packages[packageId]) {
|
if (!this.session.packages[packageId]) {
|
||||||
return; // Package was fully cleaned up
|
return; // Package was fully cleaned up
|
||||||
}
|
}
|
||||||
|
// Self-requeue if we extracted something — more archive sets may have
|
||||||
|
// become ready while we were extracting (items that completed before
|
||||||
|
// this task started set the requeue flag once, which was already consumed).
|
||||||
|
if (hybridExtracted > 0) {
|
||||||
|
this.hybridExtractRequeue.add(packageId);
|
||||||
|
}
|
||||||
|
pkg.postProcessLabel = undefined;
|
||||||
pkg.status = (pkg.enabled && this.session.running && !this.session.paused) ? "downloading" : "queued";
|
pkg.status = (pkg.enabled && this.session.running && !this.session.paused) ? "downloading" : "queued";
|
||||||
pkg.updatedAt = nowMs();
|
pkg.updatedAt = nowMs();
|
||||||
this.emitState();
|
this.emitState();
|
||||||
@ -6560,6 +6718,7 @@ export class DownloadManager extends EventEmitter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!allDone) {
|
if (!allDone) {
|
||||||
|
pkg.postProcessLabel = undefined;
|
||||||
pkg.status = (pkg.enabled && this.session.running && !this.session.paused) ? "downloading" : "queued";
|
pkg.status = (pkg.enabled && this.session.running && !this.session.paused) ? "downloading" : "queued";
|
||||||
logger.info(`Post-Processing verschoben: pkg=${pkg.name}, noch offene items`);
|
logger.info(`Post-Processing verschoben: pkg=${pkg.name}, noch offene items`);
|
||||||
return;
|
return;
|
||||||
@ -6567,8 +6726,10 @@ export class DownloadManager extends EventEmitter {
|
|||||||
|
|
||||||
const completedItems = items.filter((item) => item.status === "completed");
|
const completedItems = items.filter((item) => item.status === "completed");
|
||||||
const alreadyMarkedExtracted = completedItems.length > 0 && completedItems.every((item) => isExtractedLabel(item.fullStatus));
|
const alreadyMarkedExtracted = completedItems.length > 0 && completedItems.every((item) => isExtractedLabel(item.fullStatus));
|
||||||
|
let extractedCount = 0;
|
||||||
|
|
||||||
if (this.settings.autoExtract && failed === 0 && success > 0 && !alreadyMarkedExtracted) {
|
if (this.settings.autoExtract && failed === 0 && success > 0 && !alreadyMarkedExtracted) {
|
||||||
|
pkg.postProcessLabel = "Entpacken vorbereiten...";
|
||||||
pkg.status = "extracting";
|
pkg.status = "extracting";
|
||||||
this.emitState();
|
this.emitState();
|
||||||
const extractionStartMs = nowMs();
|
const extractionStartMs = nowMs();
|
||||||
@ -6577,12 +6738,13 @@ export class DownloadManager extends EventEmitter {
|
|||||||
resolveArchiveItemsFromList(archiveName, completedItems);
|
resolveArchiveItemsFromList(archiveName, completedItems);
|
||||||
|
|
||||||
let lastExtractEmitAt = 0;
|
let lastExtractEmitAt = 0;
|
||||||
const emitExtractStatus = (_text: string, force = false): void => {
|
const emitExtractStatus = (text: string, force = false): void => {
|
||||||
const now = nowMs();
|
const now = nowMs();
|
||||||
if (!force && now - lastExtractEmitAt < EXTRACT_PROGRESS_EMIT_INTERVAL_MS) {
|
if (!force && now - lastExtractEmitAt < EXTRACT_PROGRESS_EMIT_INTERVAL_MS) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
lastExtractEmitAt = now;
|
lastExtractEmitAt = now;
|
||||||
|
pkg.postProcessLabel = text || "Entpacken...";
|
||||||
this.emitState();
|
this.emitState();
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -6622,9 +6784,10 @@ export class DownloadManager extends EventEmitter {
|
|||||||
}
|
}
|
||||||
}, extractTimeoutMs);
|
}, extractTimeoutMs);
|
||||||
try {
|
try {
|
||||||
// Track multiple active archives for parallel extraction
|
// Track archives for parallel extraction progress
|
||||||
const activeArchiveItemsMap = new Map<string, DownloadItem[]>();
|
const fullResolvedItems = new Map<string, DownloadItem[]>();
|
||||||
const archiveStartTimes = new Map<string, number>();
|
const fullStartTimes = new Map<string, number>();
|
||||||
|
let fullLastProgressCurrent: number | null = null;
|
||||||
|
|
||||||
const result = await extractPackageArchives({
|
const result = await extractPackageArchives({
|
||||||
packageDir: pkg.outputDir,
|
packageDir: pkg.outputDir,
|
||||||
@ -6636,40 +6799,74 @@ export class DownloadManager extends EventEmitter {
|
|||||||
passwordList: this.settings.archivePasswordList,
|
passwordList: this.settings.archivePasswordList,
|
||||||
signal: extractAbortController.signal,
|
signal: extractAbortController.signal,
|
||||||
packageId,
|
packageId,
|
||||||
|
skipPostCleanup: true,
|
||||||
maxParallel: this.settings.maxParallelExtract || 2,
|
maxParallel: this.settings.maxParallelExtract || 2,
|
||||||
extractCpuPriority: this.settings.extractCpuPriority,
|
// All downloads finished — use NORMAL OS priority so extraction runs at
|
||||||
|
// full speed (matching manual 7-Zip/WinRAR speed).
|
||||||
|
extractCpuPriority: "high",
|
||||||
onProgress: (progress) => {
|
onProgress: (progress) => {
|
||||||
|
if (progress.phase === "preparing") {
|
||||||
|
pkg.postProcessLabel = progress.archiveName || "Vorbereiten...";
|
||||||
|
this.emitState();
|
||||||
|
return;
|
||||||
|
}
|
||||||
if (progress.phase === "done") {
|
if (progress.phase === "done") {
|
||||||
// Do NOT mark remaining archives as "Done" here — some may have
|
fullResolvedItems.clear();
|
||||||
// failed. The post-extraction code (result.failed check) will
|
fullStartTimes.clear();
|
||||||
// assign the correct label. Only clear the tracking maps.
|
fullLastProgressCurrent = null;
|
||||||
activeArchiveItemsMap.clear();
|
|
||||||
archiveStartTimes.clear();
|
|
||||||
emitExtractStatus("Entpacken 100%", true);
|
emitExtractStatus("Entpacken 100%", true);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const currentCount = Math.max(0, Number(progress.current ?? 0));
|
||||||
|
const archiveFinished = progress.archiveDone === true
|
||||||
|
|| (fullLastProgressCurrent !== null && currentCount > fullLastProgressCurrent);
|
||||||
|
fullLastProgressCurrent = currentCount;
|
||||||
|
|
||||||
if (progress.archiveName) {
|
if (progress.archiveName) {
|
||||||
// Resolve items for this archive if not yet tracked
|
// Resolve items for this archive if not yet tracked
|
||||||
if (!activeArchiveItemsMap.has(progress.archiveName)) {
|
if (!fullResolvedItems.has(progress.archiveName)) {
|
||||||
activeArchiveItemsMap.set(progress.archiveName, resolveArchiveItems(progress.archiveName));
|
const resolved = resolveArchiveItems(progress.archiveName);
|
||||||
archiveStartTimes.set(progress.archiveName, nowMs());
|
fullResolvedItems.set(progress.archiveName, resolved);
|
||||||
|
fullStartTimes.set(progress.archiveName, nowMs());
|
||||||
|
if (resolved.length === 0) {
|
||||||
|
logger.warn(`resolveArchiveItems (full): KEINE Items für archiveName="${progress.archiveName}", completedItems=${completedItems.length}, names=[${completedItems.map((i) => path.basename(i.targetPath || i.fileName || "?")).join(", ")}]`);
|
||||||
|
} else {
|
||||||
|
logger.info(`resolveArchiveItems (full): ${resolved.length} Items für archiveName="${progress.archiveName}"`);
|
||||||
|
const initLabel = `Entpacken 0% · ${progress.archiveName}`;
|
||||||
|
const initAt = nowMs();
|
||||||
|
for (const entry of resolved) {
|
||||||
|
if (!isExtractedLabel(entry.fullStatus)) {
|
||||||
|
entry.fullStatus = initLabel;
|
||||||
|
entry.updatedAt = initAt;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
emitExtractStatus(`Entpacken ${progress.percent}% · ${progress.archiveName}`, true);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
const archiveItems = activeArchiveItemsMap.get(progress.archiveName)!;
|
const archiveItems = fullResolvedItems.get(progress.archiveName) || [];
|
||||||
|
|
||||||
// If archive is at 100%, mark its items as done and remove from active
|
// Only finalize on explicit archive completion (or real current increment),
|
||||||
if (Number(progress.archivePercent ?? 0) >= 100) {
|
// not on plain 100% archivePercent.
|
||||||
|
if (archiveFinished) {
|
||||||
const doneAt = nowMs();
|
const doneAt = nowMs();
|
||||||
const startedAt = archiveStartTimes.get(progress.archiveName) || doneAt;
|
const startedAt = fullStartTimes.get(progress.archiveName) || doneAt;
|
||||||
const doneLabel = formatExtractDone(doneAt - startedAt);
|
const doneLabel = progress.archiveSuccess === false
|
||||||
|
? "Entpacken - Error"
|
||||||
|
: formatExtractDone(doneAt - startedAt);
|
||||||
for (const entry of archiveItems) {
|
for (const entry of archiveItems) {
|
||||||
if (!isExtractedLabel(entry.fullStatus)) {
|
if (!isExtractedLabel(entry.fullStatus)) {
|
||||||
entry.fullStatus = doneLabel;
|
entry.fullStatus = doneLabel;
|
||||||
entry.updatedAt = doneAt;
|
entry.updatedAt = doneAt;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
activeArchiveItemsMap.delete(progress.archiveName);
|
fullResolvedItems.delete(progress.archiveName);
|
||||||
archiveStartTimes.delete(progress.archiveName);
|
fullStartTimes.delete(progress.archiveName);
|
||||||
|
// Show transitional label while next archive initializes
|
||||||
|
const done = currentCount;
|
||||||
|
if (done < progress.total) {
|
||||||
|
emitExtractStatus(`Entpacken (${done}/${progress.total}) - Naechstes Archiv...`, true);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
// Update this archive's items with per-archive progress
|
// Update this archive's items with per-archive progress
|
||||||
const archiveTag = progress.archiveName ? ` · ${progress.archiveName}` : "";
|
const archiveTag = progress.archiveName ? ` · ${progress.archiveName}` : "";
|
||||||
@ -6701,7 +6898,7 @@ export class DownloadManager extends EventEmitter {
|
|||||||
const elapsed = progress.elapsedMs && progress.elapsedMs >= 1000
|
const elapsed = progress.elapsedMs && progress.elapsedMs >= 1000
|
||||||
? ` · ${Math.floor(progress.elapsedMs / 1000)}s`
|
? ` · ${Math.floor(progress.elapsedMs / 1000)}s`
|
||||||
: "";
|
: "";
|
||||||
const activeArchive = Number(progress.archivePercent ?? 0) > 0 ? 1 : 0;
|
const activeArchive = !archiveFinished && Number(progress.archivePercent ?? 0) > 0 ? 1 : 0;
|
||||||
const currentDisplay = Math.max(0, Math.min(progress.total, progress.current + activeArchive));
|
const currentDisplay = Math.max(0, Math.min(progress.total, progress.current + activeArchive));
|
||||||
let overallLabel: string;
|
let overallLabel: string;
|
||||||
if (progress.passwordFound) {
|
if (progress.passwordFound) {
|
||||||
@ -6716,11 +6913,10 @@ export class DownloadManager extends EventEmitter {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
logger.info(`Post-Processing Entpacken Ende: pkg=${pkg.name}, extracted=${result.extracted}, failed=${result.failed}, lastError=${result.lastError || ""}`);
|
logger.info(`Post-Processing Entpacken Ende: pkg=${pkg.name}, extracted=${result.extracted}, failed=${result.failed}, lastError=${result.lastError || ""}`);
|
||||||
|
extractedCount = result.extracted;
|
||||||
|
|
||||||
// Auto-rename even when some archives failed — successfully extracted files still need renaming
|
// Auto-rename wird in runDeferredPostExtraction ausgeführt (im Hintergrund),
|
||||||
if (result.extracted > 0) {
|
// damit der Slot sofort freigegeben wird.
|
||||||
await this.autoRenameExtractedVideoFiles(pkg.extractDir, pkg);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (result.failed > 0) {
|
if (result.failed > 0) {
|
||||||
const reason = compactErrorText(result.lastError || "Entpacken fehlgeschlagen");
|
const reason = compactErrorText(result.lastError || "Entpacken fehlgeschlagen");
|
||||||
@ -6821,16 +7017,6 @@ export class DownloadManager extends EventEmitter {
|
|||||||
this.recordPackageHistory(packageId, pkg, items);
|
this.recordPackageHistory(packageId, pkg, items);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.settings.autoExtract && alreadyMarkedExtracted && failed === 0 && success > 0 && this.settings.cleanupMode !== "none") {
|
|
||||||
const removedArchives = await this.cleanupRemainingArchiveArtifacts(pkg.outputDir);
|
|
||||||
if (removedArchives > 0) {
|
|
||||||
logger.info(`Hybrid-Post-Cleanup entfernte Archive: pkg=${pkg.name}, entfernt=${removedArchives}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (success > 0 && (pkg.status === "completed" || pkg.status === "failed")) {
|
|
||||||
await this.collectMkvFilesToLibrary(packageId, pkg);
|
|
||||||
}
|
|
||||||
if (this.runPackageIds.has(packageId)) {
|
if (this.runPackageIds.has(packageId)) {
|
||||||
if (pkg.status === "completed" || pkg.status === "failed") {
|
if (pkg.status === "completed" || pkg.status === "failed") {
|
||||||
this.runCompletedPackages.add(packageId);
|
this.runCompletedPackages.add(packageId);
|
||||||
@ -6838,10 +7024,139 @@ export class DownloadManager extends EventEmitter {
|
|||||||
this.runCompletedPackages.delete(packageId);
|
this.runCompletedPackages.delete(packageId);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
pkg.postProcessLabel = undefined;
|
||||||
pkg.updatedAt = nowMs();
|
pkg.updatedAt = nowMs();
|
||||||
logger.info(`Post-Processing Ende: pkg=${pkg.name}, status=${pkg.status}`);
|
logger.info(`Post-Processing Ende: pkg=${pkg.name}, status=${pkg.status} (deferred work wird im Hintergrund ausgeführt)`);
|
||||||
|
|
||||||
this.applyPackageDoneCleanup(packageId);
|
// Deferred post-extraction: Rename, MKV-Sammlung, Cleanup laufen im Hintergrund,
|
||||||
|
// damit der Post-Process-Slot sofort freigegeben wird und das nächste Pack
|
||||||
|
// ohne 10–15 Sekunden Pause entpacken kann.
|
||||||
|
void this.runDeferredPostExtraction(packageId, pkg, success, failed, alreadyMarkedExtracted, extractedCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Runs slow post-extraction work (rename, MKV collection, cleanup) in the background
|
||||||
|
* so the post-process slot is released immediately and the next pack can start unpacking.
|
||||||
|
*/
|
||||||
|
private async runDeferredPostExtraction(
|
||||||
|
packageId: string,
|
||||||
|
pkg: PackageEntry,
|
||||||
|
success: number,
|
||||||
|
failed: number,
|
||||||
|
alreadyMarkedExtracted: boolean,
|
||||||
|
extractedCount: number
|
||||||
|
): Promise<void> {
|
||||||
|
try {
|
||||||
|
// ── Nested extraction: extract archives found inside the extracted output ──
|
||||||
|
if (extractedCount > 0 && failed === 0 && this.settings.autoExtract) {
|
||||||
|
const nestedBlacklist = /\.(iso|img|bin|dmg|vhd|vhdx|vmdk|wim)$/i;
|
||||||
|
const nestedCandidates = (await findArchiveCandidates(pkg.extractDir))
|
||||||
|
.filter((p) => !nestedBlacklist.test(p));
|
||||||
|
if (nestedCandidates.length > 0) {
|
||||||
|
pkg.postProcessLabel = "Nested Entpacken...";
|
||||||
|
this.emitState();
|
||||||
|
logger.info(`Deferred Nested-Extraction: ${nestedCandidates.length} Archive in ${pkg.extractDir}`);
|
||||||
|
const nestedResult = await extractPackageArchives({
|
||||||
|
packageDir: pkg.extractDir,
|
||||||
|
targetDir: pkg.extractDir,
|
||||||
|
cleanupMode: this.settings.cleanupMode,
|
||||||
|
conflictMode: this.settings.extractConflictMode,
|
||||||
|
removeLinks: false,
|
||||||
|
removeSamples: false,
|
||||||
|
passwordList: this.settings.archivePasswordList,
|
||||||
|
packageId,
|
||||||
|
onlyArchives: new Set(nestedCandidates.map((p) => process.platform === "win32" ? path.resolve(p).toLowerCase() : path.resolve(p))),
|
||||||
|
maxParallel: this.settings.maxParallelExtract || 2,
|
||||||
|
extractCpuPriority: this.settings.extractCpuPriority,
|
||||||
|
});
|
||||||
|
extractedCount += nestedResult.extracted;
|
||||||
|
logger.info(`Deferred Nested-Extraction Ende: extracted=${nestedResult.extracted}, failed=${nestedResult.failed}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Auto-Rename ──
|
||||||
|
if (extractedCount > 0) {
|
||||||
|
pkg.postProcessLabel = "Renaming...";
|
||||||
|
this.emitState();
|
||||||
|
await this.autoRenameExtractedVideoFiles(pkg.extractDir, pkg);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Archive cleanup (source archives in outputDir) ──
|
||||||
|
if (extractedCount > 0 && failed === 0 && this.settings.cleanupMode !== "none") {
|
||||||
|
pkg.postProcessLabel = "Aufräumen...";
|
||||||
|
this.emitState();
|
||||||
|
const sourceAndTargetEqual = path.resolve(pkg.outputDir).toLowerCase() === path.resolve(pkg.extractDir).toLowerCase();
|
||||||
|
if (!sourceAndTargetEqual) {
|
||||||
|
const candidates = await findArchiveCandidates(pkg.outputDir);
|
||||||
|
if (candidates.length > 0) {
|
||||||
|
const removed = await cleanupArchives(candidates, this.settings.cleanupMode);
|
||||||
|
if (removed > 0) {
|
||||||
|
logger.info(`Deferred Archive-Cleanup: pkg=${pkg.name}, entfernt=${removed}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Hybrid archive cleanup (wenn bereits als extracted markiert) ──
|
||||||
|
if (this.settings.autoExtract && alreadyMarkedExtracted && failed === 0 && success > 0 && this.settings.cleanupMode !== "none") {
|
||||||
|
const removedArchives = await this.cleanupRemainingArchiveArtifacts(pkg.outputDir);
|
||||||
|
if (removedArchives > 0) {
|
||||||
|
logger.info(`Hybrid-Post-Cleanup entfernte Archive: pkg=${pkg.name}, entfernt=${removedArchives}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Link/Sample artifact removal ──
|
||||||
|
if (extractedCount > 0 && failed === 0) {
|
||||||
|
if (this.settings.removeLinkFilesAfterExtract) {
|
||||||
|
const removedLinks = await removeDownloadLinkArtifacts(pkg.extractDir);
|
||||||
|
if (removedLinks > 0) {
|
||||||
|
logger.info(`Deferred Link-Cleanup: pkg=${pkg.name}, entfernt=${removedLinks}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this.settings.removeSamplesAfterExtract) {
|
||||||
|
const removedSamples = await removeSampleArtifacts(pkg.extractDir);
|
||||||
|
if (removedSamples.files > 0 || removedSamples.dirs > 0) {
|
||||||
|
logger.info(`Deferred Sample-Cleanup: pkg=${pkg.name}, files=${removedSamples.files}, dirs=${removedSamples.dirs}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Empty directory tree removal ──
|
||||||
|
if (extractedCount > 0 && failed === 0 && this.settings.cleanupMode === "delete") {
|
||||||
|
if (!(await hasAnyFilesRecursive(pkg.outputDir))) {
|
||||||
|
const removedDirs = await removeEmptyDirectoryTree(pkg.outputDir);
|
||||||
|
if (removedDirs > 0) {
|
||||||
|
logger.info(`Deferred leere Download-Ordner entfernt: pkg=${pkg.name}, dirs=${removedDirs}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Resume state cleanup ──
|
||||||
|
if (extractedCount > 0 && failed === 0) {
|
||||||
|
await clearExtractResumeState(pkg.outputDir, packageId);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── MKV collection ──
|
||||||
|
if (success > 0 && (pkg.status === "completed" || pkg.status === "failed")) {
|
||||||
|
pkg.postProcessLabel = "Verschiebe MKVs...";
|
||||||
|
this.emitState();
|
||||||
|
await this.collectMkvFilesToLibrary(packageId, pkg);
|
||||||
|
}
|
||||||
|
|
||||||
|
pkg.postProcessLabel = undefined;
|
||||||
|
pkg.updatedAt = nowMs();
|
||||||
|
this.persistSoon();
|
||||||
|
this.emitState();
|
||||||
|
|
||||||
|
this.applyPackageDoneCleanup(packageId);
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`Deferred Post-Extraction Fehler: pkg=${pkg.name}, reason=${compactErrorText(error)}`);
|
||||||
|
} finally {
|
||||||
|
pkg.postProcessLabel = undefined;
|
||||||
|
pkg.updatedAt = nowMs();
|
||||||
|
this.persistSoon();
|
||||||
|
this.emitState();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private applyPackageDoneCleanup(packageId: string): void {
|
private applyPackageDoneCleanup(packageId: string): void {
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@ -7,6 +7,7 @@ import { IPC_CHANNELS } from "../shared/ipc";
|
|||||||
import { getLogFilePath, logger } from "./logger";
|
import { getLogFilePath, logger } from "./logger";
|
||||||
import { APP_NAME } from "./constants";
|
import { APP_NAME } from "./constants";
|
||||||
import { extractHttpLinksFromText } from "./utils";
|
import { extractHttpLinksFromText } from "./utils";
|
||||||
|
import { cleanupStaleSubstDrives, shutdownDaemon } from "./extractor";
|
||||||
|
|
||||||
/* ── IPC validation helpers ────────────────────────────────────── */
|
/* ── IPC validation helpers ────────────────────────────────────── */
|
||||||
function validateString(value: unknown, name: string): string {
|
function validateString(value: unknown, name: string): string {
|
||||||
@ -81,7 +82,7 @@ function createWindow(): BrowserWindow {
|
|||||||
responseHeaders: {
|
responseHeaders: {
|
||||||
...details.responseHeaders,
|
...details.responseHeaders,
|
||||||
"Content-Security-Policy": [
|
"Content-Security-Policy": [
|
||||||
"default-src 'self'; script-src 'self'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; connect-src 'self' https://api.real-debrid.com https://codeberg.org https://bestdebrid.com https://api.alldebrid.com https://www.mega-debrid.eu"
|
"default-src 'self'; script-src 'self'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; connect-src 'self' https://api.real-debrid.com https://codeberg.org https://bestdebrid.com https://api.alldebrid.com https://www.mega-debrid.eu https://git.24-music.de https://ddownload.com https://ddl.to"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -188,7 +189,12 @@ function startClipboardWatcher(): void {
|
|||||||
}
|
}
|
||||||
lastClipboardText = normalizeClipboardText(clipboard.readText());
|
lastClipboardText = normalizeClipboardText(clipboard.readText());
|
||||||
clipboardTimer = setInterval(() => {
|
clipboardTimer = setInterval(() => {
|
||||||
const text = normalizeClipboardText(clipboard.readText());
|
let text: string;
|
||||||
|
try {
|
||||||
|
text = normalizeClipboardText(clipboard.readText());
|
||||||
|
} catch {
|
||||||
|
return;
|
||||||
|
}
|
||||||
if (text === lastClipboardText || !text.trim()) {
|
if (text === lastClipboardText || !text.trim()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -481,6 +487,7 @@ app.on("second-instance", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
app.whenReady().then(() => {
|
app.whenReady().then(() => {
|
||||||
|
cleanupStaleSubstDrives();
|
||||||
registerIpcHandlers();
|
registerIpcHandlers();
|
||||||
mainWindow = createWindow();
|
mainWindow = createWindow();
|
||||||
bindMainWindowLifecycle(mainWindow);
|
bindMainWindowLifecycle(mainWindow);
|
||||||
@ -493,6 +500,9 @@ app.whenReady().then(() => {
|
|||||||
bindMainWindowLifecycle(mainWindow);
|
bindMainWindowLifecycle(mainWindow);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
}).catch((error) => {
|
||||||
|
console.error("App startup failed:", error);
|
||||||
|
app.quit();
|
||||||
});
|
});
|
||||||
|
|
||||||
app.on("window-all-closed", () => {
|
app.on("window-all-closed", () => {
|
||||||
@ -505,6 +515,7 @@ app.on("before-quit", () => {
|
|||||||
if (updateQuitTimer) { clearTimeout(updateQuitTimer); updateQuitTimer = null; }
|
if (updateQuitTimer) { clearTimeout(updateQuitTimer); updateQuitTimer = null; }
|
||||||
stopClipboardWatcher();
|
stopClipboardWatcher();
|
||||||
destroyTray();
|
destroyTray();
|
||||||
|
shutdownDaemon();
|
||||||
try {
|
try {
|
||||||
controller.shutdown();
|
controller.shutdown();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@ -228,22 +228,23 @@ export class MegaWebFallback {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public async unrestrict(link: string, signal?: AbortSignal): Promise<UnrestrictedLink | null> {
|
public async unrestrict(link: string, signal?: AbortSignal): Promise<UnrestrictedLink | null> {
|
||||||
|
const overallSignal = withTimeoutSignal(signal, 180000);
|
||||||
return this.runExclusive(async () => {
|
return this.runExclusive(async () => {
|
||||||
throwIfAborted(signal);
|
throwIfAborted(overallSignal);
|
||||||
const creds = this.getCredentials();
|
const creds = this.getCredentials();
|
||||||
if (!creds.login.trim() || !creds.password.trim()) {
|
if (!creds.login.trim() || !creds.password.trim()) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!this.cookie || Date.now() - this.cookieSetAt > 20 * 60 * 1000) {
|
if (!this.cookie || Date.now() - this.cookieSetAt > 20 * 60 * 1000) {
|
||||||
await this.login(creds.login, creds.password, signal);
|
await this.login(creds.login, creds.password, overallSignal);
|
||||||
}
|
}
|
||||||
|
|
||||||
const generated = await this.generate(link, signal);
|
const generated = await this.generate(link, overallSignal);
|
||||||
if (!generated) {
|
if (!generated) {
|
||||||
this.cookie = "";
|
this.cookie = "";
|
||||||
await this.login(creds.login, creds.password, signal);
|
await this.login(creds.login, creds.password, overallSignal);
|
||||||
const retry = await this.generate(link, signal);
|
const retry = await this.generate(link, overallSignal);
|
||||||
if (!retry) {
|
if (!retry) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@ -261,7 +262,7 @@ export class MegaWebFallback {
|
|||||||
fileSize: null,
|
fileSize: null,
|
||||||
retriesUsed: 0
|
retriesUsed: 0
|
||||||
};
|
};
|
||||||
}, signal);
|
}, overallSignal);
|
||||||
}
|
}
|
||||||
|
|
||||||
public invalidateSession(): void {
|
public invalidateSession(): void {
|
||||||
|
|||||||
@ -8,6 +8,7 @@ export interface UnrestrictedLink {
|
|||||||
directUrl: string;
|
directUrl: string;
|
||||||
fileSize: number | null;
|
fileSize: number | null;
|
||||||
retriesUsed: number;
|
retriesUsed: number;
|
||||||
|
skipTlsVerify?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
function shouldRetryStatus(status: number): boolean {
|
function shouldRetryStatus(status: number): boolean {
|
||||||
|
|||||||
@ -76,7 +76,12 @@ async function cleanupOldSessionLogs(dir: string, maxAgeDays: number): Promise<v
|
|||||||
|
|
||||||
export function initSessionLog(baseDir: string): void {
|
export function initSessionLog(baseDir: string): void {
|
||||||
sessionLogsDir = path.join(baseDir, "session-logs");
|
sessionLogsDir = path.join(baseDir, "session-logs");
|
||||||
fs.mkdirSync(sessionLogsDir, { recursive: true });
|
try {
|
||||||
|
fs.mkdirSync(sessionLogsDir, { recursive: true });
|
||||||
|
} catch {
|
||||||
|
sessionLogsDir = null;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const timestamp = formatTimestamp();
|
const timestamp = formatTimestamp();
|
||||||
sessionLogPath = path.join(sessionLogsDir, `session_${timestamp}.txt`);
|
sessionLogPath = path.join(sessionLogsDir, `session_${timestamp}.txt`);
|
||||||
|
|||||||
@ -5,8 +5,8 @@ import { AppSettings, BandwidthScheduleEntry, DebridProvider, DownloadItem, Down
|
|||||||
import { defaultSettings } from "./constants";
|
import { defaultSettings } from "./constants";
|
||||||
import { logger } from "./logger";
|
import { logger } from "./logger";
|
||||||
|
|
||||||
const VALID_PRIMARY_PROVIDERS = new Set(["realdebrid", "megadebrid", "bestdebrid", "alldebrid"]);
|
const VALID_PRIMARY_PROVIDERS = new Set(["realdebrid", "megadebrid", "bestdebrid", "alldebrid", "ddownload"]);
|
||||||
const VALID_FALLBACK_PROVIDERS = new Set(["none", "realdebrid", "megadebrid", "bestdebrid", "alldebrid"]);
|
const VALID_FALLBACK_PROVIDERS = new Set(["none", "realdebrid", "megadebrid", "bestdebrid", "alldebrid", "ddownload"]);
|
||||||
const VALID_CLEANUP_MODES = new Set(["none", "trash", "delete"]);
|
const VALID_CLEANUP_MODES = new Set(["none", "trash", "delete"]);
|
||||||
const VALID_CONFLICT_MODES = new Set(["overwrite", "skip", "rename", "ask"]);
|
const VALID_CONFLICT_MODES = new Set(["overwrite", "skip", "rename", "ask"]);
|
||||||
const VALID_FINISHED_POLICIES = new Set(["never", "immediate", "on_start", "package_done"]);
|
const VALID_FINISHED_POLICIES = new Set(["never", "immediate", "on_start", "package_done"]);
|
||||||
@ -17,7 +17,7 @@ const VALID_PACKAGE_PRIORITIES = new Set<string>(["high", "normal", "low"]);
|
|||||||
const VALID_DOWNLOAD_STATUSES = new Set<DownloadStatus>([
|
const VALID_DOWNLOAD_STATUSES = new Set<DownloadStatus>([
|
||||||
"queued", "validating", "downloading", "paused", "reconnect_wait", "extracting", "integrity_check", "completed", "failed", "cancelled"
|
"queued", "validating", "downloading", "paused", "reconnect_wait", "extracting", "integrity_check", "completed", "failed", "cancelled"
|
||||||
]);
|
]);
|
||||||
const VALID_ITEM_PROVIDERS = new Set<DebridProvider>(["realdebrid", "megadebrid", "bestdebrid", "alldebrid"]);
|
const VALID_ITEM_PROVIDERS = new Set<DebridProvider>(["realdebrid", "megadebrid", "bestdebrid", "alldebrid", "ddownload"]);
|
||||||
const VALID_ONLINE_STATUSES = new Set(["online", "offline", "checking"]);
|
const VALID_ONLINE_STATUSES = new Set(["online", "offline", "checking"]);
|
||||||
|
|
||||||
function asText(value: unknown): string {
|
function asText(value: unknown): string {
|
||||||
@ -91,6 +91,18 @@ function normalizeColumnOrder(raw: unknown): string[] {
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const DEPRECATED_UPDATE_REPOS = new Set([
|
||||||
|
"sucukdeluxe/real-debrid-downloader"
|
||||||
|
]);
|
||||||
|
|
||||||
|
function migrateUpdateRepo(raw: string, fallback: string): string {
|
||||||
|
const trimmed = raw.trim();
|
||||||
|
if (!trimmed || DEPRECATED_UPDATE_REPOS.has(trimmed.toLowerCase())) {
|
||||||
|
return fallback;
|
||||||
|
}
|
||||||
|
return trimmed;
|
||||||
|
}
|
||||||
|
|
||||||
export function normalizeSettings(settings: AppSettings): AppSettings {
|
export function normalizeSettings(settings: AppSettings): AppSettings {
|
||||||
const defaults = defaultSettings();
|
const defaults = defaultSettings();
|
||||||
const normalized: AppSettings = {
|
const normalized: AppSettings = {
|
||||||
@ -99,7 +111,9 @@ export function normalizeSettings(settings: AppSettings): AppSettings {
|
|||||||
megaPassword: asText(settings.megaPassword),
|
megaPassword: asText(settings.megaPassword),
|
||||||
bestToken: asText(settings.bestToken),
|
bestToken: asText(settings.bestToken),
|
||||||
allDebridToken: asText(settings.allDebridToken),
|
allDebridToken: asText(settings.allDebridToken),
|
||||||
archivePasswordList: String(settings.archivePasswordList ?? "").replace(/\r\n/g, "\n"),
|
ddownloadLogin: asText(settings.ddownloadLogin),
|
||||||
|
ddownloadPassword: asText(settings.ddownloadPassword),
|
||||||
|
archivePasswordList: String(settings.archivePasswordList ?? "").replace(/\r\n|\r/g, "\n"),
|
||||||
rememberToken: Boolean(settings.rememberToken),
|
rememberToken: Boolean(settings.rememberToken),
|
||||||
providerPrimary: settings.providerPrimary,
|
providerPrimary: settings.providerPrimary,
|
||||||
providerSecondary: settings.providerSecondary,
|
providerSecondary: settings.providerSecondary,
|
||||||
@ -130,7 +144,7 @@ export function normalizeSettings(settings: AppSettings): AppSettings {
|
|||||||
speedLimitKbps: clampNumber(settings.speedLimitKbps, defaults.speedLimitKbps, 0, 500000),
|
speedLimitKbps: clampNumber(settings.speedLimitKbps, defaults.speedLimitKbps, 0, 500000),
|
||||||
speedLimitMode: settings.speedLimitMode,
|
speedLimitMode: settings.speedLimitMode,
|
||||||
autoUpdateCheck: Boolean(settings.autoUpdateCheck),
|
autoUpdateCheck: Boolean(settings.autoUpdateCheck),
|
||||||
updateRepo: asText(settings.updateRepo) || defaults.updateRepo,
|
updateRepo: migrateUpdateRepo(asText(settings.updateRepo), defaults.updateRepo),
|
||||||
clipboardWatch: Boolean(settings.clipboardWatch),
|
clipboardWatch: Boolean(settings.clipboardWatch),
|
||||||
minimizeToTray: Boolean(settings.minimizeToTray),
|
minimizeToTray: Boolean(settings.minimizeToTray),
|
||||||
collapseNewPackages: settings.collapseNewPackages !== undefined ? Boolean(settings.collapseNewPackages) : defaults.collapseNewPackages,
|
collapseNewPackages: settings.collapseNewPackages !== undefined ? Boolean(settings.collapseNewPackages) : defaults.collapseNewPackages,
|
||||||
@ -188,7 +202,9 @@ function sanitizeCredentialPersistence(settings: AppSettings): AppSettings {
|
|||||||
megaLogin: "",
|
megaLogin: "",
|
||||||
megaPassword: "",
|
megaPassword: "",
|
||||||
bestToken: "",
|
bestToken: "",
|
||||||
allDebridToken: ""
|
allDebridToken: "",
|
||||||
|
ddownloadLogin: "",
|
||||||
|
ddownloadPassword: ""
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -430,6 +446,7 @@ export function normalizeLoadedSessionTransientFields(session: SessionState): Se
|
|||||||
if (ACTIVE_PKG_STATUSES.has(pkg.status)) {
|
if (ACTIVE_PKG_STATUSES.has(pkg.status)) {
|
||||||
pkg.status = "queued";
|
pkg.status = "queued";
|
||||||
}
|
}
|
||||||
|
pkg.postProcessLabel = undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Clear stale session-level running/paused flags
|
// Clear stale session-level running/paused flags
|
||||||
|
|||||||
@ -336,6 +336,8 @@ function parseReleasePayload(payload: Record<string, unknown>, fallback: UpdateC
|
|||||||
const releaseUrl = String(payload.html_url || fallback.releaseUrl);
|
const releaseUrl = String(payload.html_url || fallback.releaseUrl);
|
||||||
const setup = pickSetupAsset(readReleaseAssets(payload));
|
const setup = pickSetupAsset(readReleaseAssets(payload));
|
||||||
|
|
||||||
|
const body = typeof payload.body === "string" ? payload.body.trim() : "";
|
||||||
|
|
||||||
return {
|
return {
|
||||||
updateAvailable: isRemoteNewer(APP_VERSION, latestVersion),
|
updateAvailable: isRemoteNewer(APP_VERSION, latestVersion),
|
||||||
currentVersion: APP_VERSION,
|
currentVersion: APP_VERSION,
|
||||||
@ -344,7 +346,8 @@ function parseReleasePayload(payload: Record<string, unknown>, fallback: UpdateC
|
|||||||
releaseUrl,
|
releaseUrl,
|
||||||
setupAssetUrl: setup?.browser_download_url || "",
|
setupAssetUrl: setup?.browser_download_url || "",
|
||||||
setupAssetName: setup?.name || "",
|
setupAssetName: setup?.name || "",
|
||||||
setupAssetDigest: setup?.digest || ""
|
setupAssetDigest: setup?.digest || "",
|
||||||
|
releaseNotes: body || undefined
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -791,7 +794,8 @@ async function downloadFile(url: string, targetPath: string, onProgress?: Update
|
|||||||
};
|
};
|
||||||
|
|
||||||
const reader = response.body.getReader();
|
const reader = response.body.getReader();
|
||||||
const chunks: Buffer[] = [];
|
const tempPath = targetPath + ".tmp";
|
||||||
|
const writeStream = fs.createWriteStream(tempPath);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
resetIdleTimer();
|
resetIdleTimer();
|
||||||
@ -805,27 +809,39 @@ async function downloadFile(url: string, targetPath: string, onProgress?: Update
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
const buf = Buffer.from(value.buffer, value.byteOffset, value.byteLength);
|
const buf = Buffer.from(value.buffer, value.byteOffset, value.byteLength);
|
||||||
chunks.push(buf);
|
if (!writeStream.write(buf)) {
|
||||||
|
await new Promise<void>((resolve) => writeStream.once("drain", resolve));
|
||||||
|
}
|
||||||
downloadedBytes += buf.byteLength;
|
downloadedBytes += buf.byteLength;
|
||||||
resetIdleTimer();
|
resetIdleTimer();
|
||||||
emitDownloadProgress(false);
|
emitDownloadProgress(false);
|
||||||
}
|
}
|
||||||
|
} catch (error) {
|
||||||
|
writeStream.destroy();
|
||||||
|
await fs.promises.rm(tempPath, { force: true }).catch(() => {});
|
||||||
|
throw error;
|
||||||
} finally {
|
} finally {
|
||||||
clearIdleTimer();
|
clearIdleTimer();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await new Promise<void>((resolve, reject) => {
|
||||||
|
writeStream.end(() => resolve());
|
||||||
|
writeStream.on("error", reject);
|
||||||
|
});
|
||||||
|
|
||||||
if (idleTimedOut) {
|
if (idleTimedOut) {
|
||||||
|
await fs.promises.rm(tempPath, { force: true }).catch(() => {});
|
||||||
throw new Error(`Update Download Body Timeout nach ${Math.ceil(idleTimeoutMs / 1000)}s`);
|
throw new Error(`Update Download Body Timeout nach ${Math.ceil(idleTimeoutMs / 1000)}s`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const fileBuffer = Buffer.concat(chunks);
|
if (totalBytes && downloadedBytes !== totalBytes) {
|
||||||
if (totalBytes && fileBuffer.byteLength !== totalBytes) {
|
await fs.promises.rm(tempPath, { force: true }).catch(() => {});
|
||||||
throw new Error(`Update Download unvollständig (${fileBuffer.byteLength} / ${totalBytes} Bytes)`);
|
throw new Error(`Update Download unvollständig (${downloadedBytes} / ${totalBytes} Bytes)`);
|
||||||
}
|
}
|
||||||
|
|
||||||
await fs.promises.writeFile(targetPath, fileBuffer);
|
await fs.promises.rename(tempPath, targetPath);
|
||||||
emitDownloadProgress(true);
|
emitDownloadProgress(true);
|
||||||
logger.info(`Update-Download abgeschlossen: ${targetPath} (${fileBuffer.byteLength} Bytes)`);
|
logger.info(`Update-Download abgeschlossen: ${targetPath} (${downloadedBytes} Bytes)`);
|
||||||
|
|
||||||
return { expectedBytes: totalBytes };
|
return { expectedBytes: totalBytes };
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,6 +4,7 @@ import {
|
|||||||
AppSettings,
|
AppSettings,
|
||||||
DuplicatePolicy,
|
DuplicatePolicy,
|
||||||
HistoryEntry,
|
HistoryEntry,
|
||||||
|
PackagePriority,
|
||||||
SessionStats,
|
SessionStats,
|
||||||
StartConflictEntry,
|
StartConflictEntry,
|
||||||
StartConflictResolutionResult,
|
StartConflictResolutionResult,
|
||||||
@ -56,7 +57,7 @@ const api: ElectronApi = {
|
|||||||
getHistory: (): Promise<HistoryEntry[]> => ipcRenderer.invoke(IPC_CHANNELS.GET_HISTORY),
|
getHistory: (): Promise<HistoryEntry[]> => ipcRenderer.invoke(IPC_CHANNELS.GET_HISTORY),
|
||||||
clearHistory: (): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.CLEAR_HISTORY),
|
clearHistory: (): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.CLEAR_HISTORY),
|
||||||
removeHistoryEntry: (entryId: string): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.REMOVE_HISTORY_ENTRY, entryId),
|
removeHistoryEntry: (entryId: string): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.REMOVE_HISTORY_ENTRY, entryId),
|
||||||
setPackagePriority: (packageId: string, priority: string): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.SET_PACKAGE_PRIORITY, packageId, priority),
|
setPackagePriority: (packageId: string, priority: PackagePriority): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.SET_PACKAGE_PRIORITY, packageId, priority),
|
||||||
skipItems: (itemIds: string[]): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.SKIP_ITEMS, itemIds),
|
skipItems: (itemIds: string[]): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.SKIP_ITEMS, itemIds),
|
||||||
resetItems: (itemIds: string[]): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.RESET_ITEMS, itemIds),
|
resetItems: (itemIds: string[]): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.RESET_ITEMS, itemIds),
|
||||||
startItems: (itemIds: string[]): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.START_ITEMS, itemIds),
|
startItems: (itemIds: string[]): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.START_ITEMS, itemIds),
|
||||||
|
|||||||
@ -36,6 +36,7 @@ interface ConfirmPromptState {
|
|||||||
message: string;
|
message: string;
|
||||||
confirmLabel: string;
|
confirmLabel: string;
|
||||||
danger?: boolean;
|
danger?: boolean;
|
||||||
|
details?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface ContextMenuState {
|
interface ContextMenuState {
|
||||||
@ -61,7 +62,7 @@ const emptyStats = (): DownloadStats => ({
|
|||||||
|
|
||||||
const emptySnapshot = (): UiSnapshot => ({
|
const emptySnapshot = (): UiSnapshot => ({
|
||||||
settings: {
|
settings: {
|
||||||
token: "", megaLogin: "", megaPassword: "", bestToken: "", allDebridToken: "",
|
token: "", megaLogin: "", megaPassword: "", bestToken: "", allDebridToken: "", ddownloadLogin: "", ddownloadPassword: "",
|
||||||
archivePasswordList: "",
|
archivePasswordList: "",
|
||||||
rememberToken: true, providerPrimary: "realdebrid", providerSecondary: "megadebrid",
|
rememberToken: true, providerPrimary: "realdebrid", providerSecondary: "megadebrid",
|
||||||
providerTertiary: "bestdebrid", autoProviderFallback: true, outputDir: "", packageName: "",
|
providerTertiary: "bestdebrid", autoProviderFallback: true, outputDir: "", packageName: "",
|
||||||
@ -93,7 +94,7 @@ const cleanupLabels: Record<string, string> = {
|
|||||||
const AUTO_RENDER_PACKAGE_LIMIT = 260;
|
const AUTO_RENDER_PACKAGE_LIMIT = 260;
|
||||||
|
|
||||||
const providerLabels: Record<DebridProvider, string> = {
|
const providerLabels: Record<DebridProvider, string> = {
|
||||||
realdebrid: "Real-Debrid", megadebrid: "Mega-Debrid", bestdebrid: "BestDebrid", alldebrid: "AllDebrid"
|
realdebrid: "Real-Debrid", megadebrid: "Mega-Debrid", bestdebrid: "BestDebrid", alldebrid: "AllDebrid", ddownload: "DDownload"
|
||||||
};
|
};
|
||||||
|
|
||||||
function formatDateTime(ts: number): string {
|
function formatDateTime(ts: number): string {
|
||||||
@ -115,15 +116,6 @@ function extractHoster(url: string): string {
|
|||||||
} catch { return ""; }
|
} catch { return ""; }
|
||||||
}
|
}
|
||||||
|
|
||||||
function formatHoster(item: DownloadItem): string {
|
|
||||||
const hoster = extractHoster(item.url);
|
|
||||||
const label = hoster || "-";
|
|
||||||
if (item.provider) {
|
|
||||||
return `${label} via ${providerLabels[item.provider]}`;
|
|
||||||
}
|
|
||||||
return label;
|
|
||||||
}
|
|
||||||
|
|
||||||
const settingsSubTabs: { key: SettingsSubTab; label: string }[] = [
|
const settingsSubTabs: { key: SettingsSubTab; label: string }[] = [
|
||||||
{ key: "allgemein", label: "Allgemein" },
|
{ key: "allgemein", label: "Allgemein" },
|
||||||
{ key: "accounts", label: "Accounts" },
|
{ key: "accounts", label: "Accounts" },
|
||||||
@ -931,6 +923,15 @@ export function App(): ReactElement {
|
|||||||
return list;
|
return list;
|
||||||
}, [settingsDraft.token, settingsDraft.megaLogin, settingsDraft.megaPassword, settingsDraft.bestToken, settingsDraft.allDebridToken]);
|
}, [settingsDraft.token, settingsDraft.megaLogin, settingsDraft.megaPassword, settingsDraft.bestToken, settingsDraft.allDebridToken]);
|
||||||
|
|
||||||
|
// DDownload is a direct file hoster (not a debrid service) and is used automatically
|
||||||
|
// for ddownload.com/ddl.to URLs. It counts as a configured account but does not
|
||||||
|
// appear in the primary/secondary/tertiary provider dropdowns.
|
||||||
|
const hasDdownloadAccount = useMemo(() =>
|
||||||
|
Boolean((settingsDraft.ddownloadLogin || "").trim() && (settingsDraft.ddownloadPassword || "").trim()),
|
||||||
|
[settingsDraft.ddownloadLogin, settingsDraft.ddownloadPassword]);
|
||||||
|
|
||||||
|
const totalConfiguredAccounts = configuredProviders.length + (hasDdownloadAccount ? 1 : 0);
|
||||||
|
|
||||||
const primaryProviderValue: DebridProvider = useMemo(() => {
|
const primaryProviderValue: DebridProvider = useMemo(() => {
|
||||||
if (configuredProviders.includes(settingsDraft.providerPrimary)) {
|
if (configuredProviders.includes(settingsDraft.providerPrimary)) {
|
||||||
return settingsDraft.providerPrimary;
|
return settingsDraft.providerPrimary;
|
||||||
@ -990,10 +991,36 @@ export function App(): ReactElement {
|
|||||||
if (source === "manual") { showToast(`Kein Update verfügbar (v${result.currentVersion})`, 2000); }
|
if (source === "manual") { showToast(`Kein Update verfügbar (v${result.currentVersion})`, 2000); }
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
let changelogText = "";
|
||||||
|
if (result.releaseNotes) {
|
||||||
|
const lines = result.releaseNotes.split("\n");
|
||||||
|
const compactLines: string[] = [];
|
||||||
|
for (const line of lines) {
|
||||||
|
if (/^\s{2,}[-*]/.test(line)) continue;
|
||||||
|
if (/^#{1,6}\s/.test(line)) continue;
|
||||||
|
if (!line.trim()) continue;
|
||||||
|
let clean = line
|
||||||
|
.replace(/\*\*([^*]+)\*\*/g, "$1")
|
||||||
|
.replace(/\*([^*]+)\*/g, "$1")
|
||||||
|
.replace(/`([^`]+)`/g, "$1")
|
||||||
|
.replace(/^\s*[-*]\s+/, "- ")
|
||||||
|
.trim();
|
||||||
|
const colonIdx = clean.indexOf(":");
|
||||||
|
if (colonIdx > 0 && colonIdx < clean.length - 1) {
|
||||||
|
const afterColon = clean.slice(colonIdx + 1).trim();
|
||||||
|
if (afterColon.length > 60) {
|
||||||
|
clean = clean.slice(0, colonIdx + 1).trim();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (clean) compactLines.push(clean);
|
||||||
|
}
|
||||||
|
changelogText = compactLines.join("\n");
|
||||||
|
}
|
||||||
const approved = await askConfirmPrompt({
|
const approved = await askConfirmPrompt({
|
||||||
title: "Update verfügbar",
|
title: "Update verfügbar",
|
||||||
message: `${result.latestTag} (aktuell v${result.currentVersion})\n\nJetzt automatisch herunterladen und installieren?`,
|
message: `${result.latestTag} (aktuell v${result.currentVersion})\n\nJetzt automatisch herunterladen und installieren?`,
|
||||||
confirmLabel: "Jetzt installieren"
|
confirmLabel: "Jetzt installieren",
|
||||||
|
details: changelogText || undefined
|
||||||
});
|
});
|
||||||
if (!mountedRef.current) {
|
if (!mountedRef.current) {
|
||||||
return;
|
return;
|
||||||
@ -1104,7 +1131,7 @@ export function App(): ReactElement {
|
|||||||
|
|
||||||
const onStartDownloads = async (): Promise<void> => {
|
const onStartDownloads = async (): Promise<void> => {
|
||||||
await performQuickAction(async () => {
|
await performQuickAction(async () => {
|
||||||
if (configuredProviders.length === 0) {
|
if (totalConfiguredAccounts === 0) {
|
||||||
setTab("settings");
|
setTab("settings");
|
||||||
showToast("Bitte zuerst mindestens einen Hoster-Account eintragen", 3000);
|
showToast("Bitte zuerst mindestens einen Hoster-Account eintragen", 3000);
|
||||||
return;
|
return;
|
||||||
@ -1834,10 +1861,12 @@ export function App(): ReactElement {
|
|||||||
|
|
||||||
const executeDeleteSelection = useCallback((ids: Set<string>): void => {
|
const executeDeleteSelection = useCallback((ids: Set<string>): void => {
|
||||||
const current = snapshotRef.current;
|
const current = snapshotRef.current;
|
||||||
|
const promises: Promise<void>[] = [];
|
||||||
for (const id of ids) {
|
for (const id of ids) {
|
||||||
if (current.session.items[id]) void window.rd.removeItem(id);
|
if (current.session.items[id]) promises.push(window.rd.removeItem(id));
|
||||||
else if (current.session.packages[id]) void window.rd.cancelPackage(id);
|
else if (current.session.packages[id]) promises.push(window.rd.cancelPackage(id));
|
||||||
}
|
}
|
||||||
|
void Promise.all(promises).catch(() => {});
|
||||||
setSelectedIds(new Set());
|
setSelectedIds(new Set());
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
@ -1880,28 +1909,28 @@ export function App(): ReactElement {
|
|||||||
|
|
||||||
const onExportBackup = async (): Promise<void> => {
|
const onExportBackup = async (): Promise<void> => {
|
||||||
closeMenus();
|
closeMenus();
|
||||||
try {
|
await performQuickAction(async () => {
|
||||||
const result = await window.rd.exportBackup();
|
const result = await window.rd.exportBackup();
|
||||||
if (result.saved) {
|
if (result.saved) {
|
||||||
showToast("Sicherung exportiert");
|
showToast("Sicherung exportiert");
|
||||||
}
|
}
|
||||||
} catch (error) {
|
}, (error) => {
|
||||||
showToast(`Sicherung fehlgeschlagen: ${String(error)}`, 2600);
|
showToast(`Sicherung fehlgeschlagen: ${String(error)}`, 2600);
|
||||||
}
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
const onImportBackup = async (): Promise<void> => {
|
const onImportBackup = async (): Promise<void> => {
|
||||||
closeMenus();
|
closeMenus();
|
||||||
try {
|
await performQuickAction(async () => {
|
||||||
const result = await window.rd.importBackup();
|
const result = await window.rd.importBackup();
|
||||||
if (result.restored) {
|
if (result.restored) {
|
||||||
showToast(result.message, 4000);
|
showToast(result.message, 4000);
|
||||||
} else if (result.message !== "Abgebrochen") {
|
} else if (result.message !== "Abgebrochen") {
|
||||||
showToast(`Sicherung laden fehlgeschlagen: ${result.message}`, 3000);
|
showToast(`Sicherung laden fehlgeschlagen: ${result.message}`, 3000);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
}, (error) => {
|
||||||
showToast(`Sicherung laden fehlgeschlagen: ${String(error)}`, 2600);
|
showToast(`Sicherung laden fehlgeschlagen: ${String(error)}`, 2600);
|
||||||
}
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
const onMenuRestart = (): void => {
|
const onMenuRestart = (): void => {
|
||||||
@ -2212,10 +2241,10 @@ export function App(): ReactElement {
|
|||||||
</button>
|
</button>
|
||||||
{openMenu === "hilfe" && (
|
{openMenu === "hilfe" && (
|
||||||
<div className="menu-dropdown">
|
<div className="menu-dropdown">
|
||||||
<button className="menu-dropdown-item" onClick={() => { closeMenus(); void window.rd.openLog(); }}>
|
<button className="menu-dropdown-item" onClick={() => { closeMenus(); void window.rd.openLog().catch(() => {}); }}>
|
||||||
<span>Log öffnen</span>
|
<span>Log öffnen</span>
|
||||||
</button>
|
</button>
|
||||||
<button className="menu-dropdown-item" onClick={() => { closeMenus(); void window.rd.openSessionLog(); }}>
|
<button className="menu-dropdown-item" onClick={() => { closeMenus(); void window.rd.openSessionLog().catch(() => {}); }}>
|
||||||
<span>Session-Log öffnen</span>
|
<span>Session-Log öffnen</span>
|
||||||
</button>
|
</button>
|
||||||
<button className="menu-dropdown-item" onClick={() => { closeMenus(); void onCheckUpdates(); }}>
|
<button className="menu-dropdown-item" onClick={() => { closeMenus(); void onCheckUpdates(); }}>
|
||||||
@ -2235,7 +2264,7 @@ export function App(): ReactElement {
|
|||||||
onClick={() => {
|
onClick={() => {
|
||||||
if (snapshot.session.paused) {
|
if (snapshot.session.paused) {
|
||||||
setSnapshot((prev) => ({ ...prev, session: { ...prev.session, paused: false } }));
|
setSnapshot((prev) => ({ ...prev, session: { ...prev.session, paused: false } }));
|
||||||
void window.rd.togglePause();
|
void window.rd.togglePause().catch(() => {});
|
||||||
} else {
|
} else {
|
||||||
void onStartDownloads();
|
void onStartDownloads();
|
||||||
}
|
}
|
||||||
@ -2249,7 +2278,7 @@ export function App(): ReactElement {
|
|||||||
disabled={!snapshot.canPause || snapshot.session.paused}
|
disabled={!snapshot.canPause || snapshot.session.paused}
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
setSnapshot((prev) => ({ ...prev, session: { ...prev.session, paused: true } }));
|
setSnapshot((prev) => ({ ...prev, session: { ...prev.session, paused: true } }));
|
||||||
void window.rd.togglePause();
|
void window.rd.togglePause().catch(() => {});
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<svg viewBox="0 0 24 24" width="18" height="18"><rect x="5" y="3" width="4.5" height="18" rx="1" fill="currentColor" /><rect x="14.5" y="3" width="4.5" height="18" rx="1" fill="currentColor" /></svg>
|
<svg viewBox="0 0 24 24" width="18" height="18"><rect x="5" y="3" width="4.5" height="18" rx="1" fill="currentColor" /><rect x="14.5" y="3" width="4.5" height="18" rx="1" fill="currentColor" /></svg>
|
||||||
@ -2371,7 +2400,7 @@ export function App(): ReactElement {
|
|||||||
newOrder.splice(toIdx, 0, dragColId);
|
newOrder.splice(toIdx, 0, dragColId);
|
||||||
setColumnOrder(newOrder);
|
setColumnOrder(newOrder);
|
||||||
setDragColId(null);
|
setDragColId(null);
|
||||||
void window.rd.updateSettings({ columnOrder: newOrder });
|
void window.rd.updateSettings({ columnOrder: newOrder }).catch(() => {});
|
||||||
}}
|
}}
|
||||||
onDragEnd={() => { setDragColId(null); setDropTargetCol(null); }}
|
onDragEnd={() => { setDragColId(null); setDropTargetCol(null); }}
|
||||||
onClick={sortCol ? () => {
|
onClick={sortCol ? () => {
|
||||||
@ -2465,7 +2494,7 @@ export function App(): ReactElement {
|
|||||||
: `${historyEntries.length} Paket${historyEntries.length !== 1 ? "e" : ""} im Verlauf`}
|
: `${historyEntries.length} Paket${historyEntries.length !== 1 ? "e" : ""} im Verlauf`}
|
||||||
</span>
|
</span>
|
||||||
{selectedHistoryIds.size > 0 && (
|
{selectedHistoryIds.size > 0 && (
|
||||||
<button className="btn btn-danger" onClick={() => {
|
<button className="btn danger" onClick={() => {
|
||||||
const idSet = new Set(selectedHistoryIds);
|
const idSet = new Set(selectedHistoryIds);
|
||||||
void Promise.all([...idSet].map(id => window.rd.removeHistoryEntry(id))).then(() => {
|
void Promise.all([...idSet].map(id => window.rd.removeHistoryEntry(id))).then(() => {
|
||||||
setHistoryEntries((prev) => prev.filter((e) => !idSet.has(e.id)));
|
setHistoryEntries((prev) => prev.filter((e) => !idSet.has(e.id)));
|
||||||
@ -2476,7 +2505,7 @@ export function App(): ReactElement {
|
|||||||
}}>Ausgewählte entfernen ({selectedHistoryIds.size})</button>
|
}}>Ausgewählte entfernen ({selectedHistoryIds.size})</button>
|
||||||
)}
|
)}
|
||||||
{historyEntries.length > 0 && (
|
{historyEntries.length > 0 && (
|
||||||
<button className="btn btn-danger" onClick={() => { void window.rd.clearHistory().then(() => { setHistoryEntries([]); setSelectedHistoryIds(new Set()); }); }}>Verlauf leeren</button>
|
<button className="btn danger" onClick={() => { void window.rd.clearHistory().then(() => { setHistoryEntries([]); setSelectedHistoryIds(new Set()); }).catch(() => {}); }}>Verlauf leeren</button>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
{historyEntries.length === 0 && <div className="empty">Noch keine abgeschlossenen Pakete im Verlauf.</div>}
|
{historyEntries.length === 0 && <div className="empty">Noch keine abgeschlossenen Pakete im Verlauf.</div>}
|
||||||
@ -2563,7 +2592,7 @@ export function App(): ReactElement {
|
|||||||
<span>{entry.status === "completed" ? "Abgeschlossen" : "Gelöscht"}</span>
|
<span>{entry.status === "completed" ? "Abgeschlossen" : "Gelöscht"}</span>
|
||||||
</div>
|
</div>
|
||||||
<div className="history-actions">
|
<div className="history-actions">
|
||||||
<button className="btn" onClick={() => { void window.rd.removeHistoryEntry(entry.id).then(() => { setHistoryEntries((prev) => prev.filter((e) => e.id !== entry.id)); setSelectedHistoryIds((prev) => { const n = new Set(prev); n.delete(entry.id); return n; }); }); }}>Eintrag entfernen</button>
|
<button className="btn" onClick={() => { void window.rd.removeHistoryEntry(entry.id).then(() => { setHistoryEntries((prev) => prev.filter((e) => e.id !== entry.id)); setSelectedHistoryIds((prev) => { const n = new Set(prev); n.delete(entry.id); return n; }); }).catch(() => {}); }}>Eintrag entfernen</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
@ -2711,6 +2740,10 @@ export function App(): ReactElement {
|
|||||||
<input type="password" value={settingsDraft.bestToken} onChange={(e) => setText("bestToken", e.target.value)} />
|
<input type="password" value={settingsDraft.bestToken} onChange={(e) => setText("bestToken", e.target.value)} />
|
||||||
<label>AllDebrid API Key</label>
|
<label>AllDebrid API Key</label>
|
||||||
<input type="password" value={settingsDraft.allDebridToken} onChange={(e) => setText("allDebridToken", e.target.value)} />
|
<input type="password" value={settingsDraft.allDebridToken} onChange={(e) => setText("allDebridToken", e.target.value)} />
|
||||||
|
<label>DDownload Login</label>
|
||||||
|
<input value={settingsDraft.ddownloadLogin || ""} onChange={(e) => setText("ddownloadLogin", e.target.value)} />
|
||||||
|
<label>DDownload Passwort</label>
|
||||||
|
<input type="password" value={settingsDraft.ddownloadPassword || ""} onChange={(e) => setText("ddownloadPassword", e.target.value)} />
|
||||||
{configuredProviders.length === 0 && (
|
{configuredProviders.length === 0 && (
|
||||||
<div className="hint">Füge mindestens einen Account hinzu, dann erscheint die Hoster-Auswahl.</div>
|
<div className="hint">Füge mindestens einen Account hinzu, dann erscheint die Hoster-Auswahl.</div>
|
||||||
)}
|
)}
|
||||||
@ -2848,6 +2881,12 @@ export function App(): ReactElement {
|
|||||||
<div className="modal-card" onClick={(event) => event.stopPropagation()}>
|
<div className="modal-card" onClick={(event) => event.stopPropagation()}>
|
||||||
<h3>{confirmPrompt.title}</h3>
|
<h3>{confirmPrompt.title}</h3>
|
||||||
<p style={{ whiteSpace: "pre-line" }}>{confirmPrompt.message}</p>
|
<p style={{ whiteSpace: "pre-line" }}>{confirmPrompt.message}</p>
|
||||||
|
{confirmPrompt.details && (
|
||||||
|
<details className="modal-details">
|
||||||
|
<summary>Changelog anzeigen</summary>
|
||||||
|
<pre>{confirmPrompt.details}</pre>
|
||||||
|
</details>
|
||||||
|
)}
|
||||||
<div className="modal-actions">
|
<div className="modal-actions">
|
||||||
<button className="btn" onClick={() => closeConfirmPrompt(false)}>Abbrechen</button>
|
<button className="btn" onClick={() => closeConfirmPrompt(false)}>Abbrechen</button>
|
||||||
<button
|
<button
|
||||||
@ -2944,7 +2983,7 @@ export function App(): ReactElement {
|
|||||||
<span>Links: {Object.keys(snapshot.session.items).length}</span>
|
<span>Links: {Object.keys(snapshot.session.items).length}</span>
|
||||||
<span>Session: {humanSize(snapshot.stats.totalDownloaded)}</span>
|
<span>Session: {humanSize(snapshot.stats.totalDownloaded)}</span>
|
||||||
<span>Gesamt: {humanSize(snapshot.stats.totalDownloadedAllTime)}</span>
|
<span>Gesamt: {humanSize(snapshot.stats.totalDownloadedAllTime)}</span>
|
||||||
<span>Hoster: {configuredProviders.length}</span>
|
<span>Hoster: {totalConfiguredAccounts}</span>
|
||||||
<span>{snapshot.speedText}</span>
|
<span>{snapshot.speedText}</span>
|
||||||
<span>{snapshot.etaText}</span>
|
<span>{snapshot.etaText}</span>
|
||||||
<span className="footer-spacer" />
|
<span className="footer-spacer" />
|
||||||
@ -3004,18 +3043,18 @@ export function App(): ReactElement {
|
|||||||
<button className="ctx-menu-item" onClick={() => {
|
<button className="ctx-menu-item" onClick={() => {
|
||||||
const pkgIds = [...selectedIds].filter((id) => snapshot.session.packages[id]);
|
const pkgIds = [...selectedIds].filter((id) => snapshot.session.packages[id]);
|
||||||
const itemIds = [...selectedIds].filter((id) => { const it = snapshot.session.items[id]; return it && startableStatuses.has(it.status); });
|
const itemIds = [...selectedIds].filter((id) => { const it = snapshot.session.items[id]; return it && startableStatuses.has(it.status); });
|
||||||
if (pkgIds.length > 0) void window.rd.startPackages(pkgIds);
|
if (pkgIds.length > 0) void window.rd.startPackages(pkgIds).catch(() => {});
|
||||||
if (itemIds.length > 0) void window.rd.startItems(itemIds);
|
if (itemIds.length > 0) void window.rd.startItems(itemIds).catch(() => {});
|
||||||
setContextMenu(null);
|
setContextMenu(null);
|
||||||
}}>Ausgewählte Downloads starten{multi ? ` (${selectedIds.size})` : ""}</button>
|
}}>Ausgewählte Downloads starten{multi ? ` (${selectedIds.size})` : ""}</button>
|
||||||
)}
|
)}
|
||||||
<button className="ctx-menu-item" onClick={() => { void window.rd.start(); setContextMenu(null); }}>Alle Downloads starten</button>
|
<button className="ctx-menu-item" onClick={() => { void window.rd.start().catch(() => {}); setContextMenu(null); }}>Alle Downloads starten</button>
|
||||||
<div className="ctx-menu-sep" />
|
<div className="ctx-menu-sep" />
|
||||||
<button className="ctx-menu-item" onClick={() => showLinksPopup(contextMenu.packageId, contextMenu.itemId)}>Linkadressen anzeigen</button>
|
<button className="ctx-menu-item" onClick={() => showLinksPopup(contextMenu.packageId, contextMenu.itemId)}>Linkadressen anzeigen</button>
|
||||||
<div className="ctx-menu-sep" />
|
<div className="ctx-menu-sep" />
|
||||||
{hasPackages && !contextMenu.itemId && (
|
{hasPackages && !contextMenu.itemId && (
|
||||||
<button className="ctx-menu-item" onClick={() => {
|
<button className="ctx-menu-item" onClick={() => {
|
||||||
for (const id of selectedIds) { if (snapshot.session.packages[id]) void window.rd.togglePackage(id); }
|
for (const id of selectedIds) { if (snapshot.session.packages[id]) void window.rd.togglePackage(id).catch(() => {}); }
|
||||||
setContextMenu(null);
|
setContextMenu(null);
|
||||||
}}>
|
}}>
|
||||||
{multi ? `Alle ${selectedIds.size} umschalten` : (snapshot.session.packages[contextMenu.packageId]?.enabled ? "Deaktivieren" : "Aktivieren")}
|
{multi ? `Alle ${selectedIds.size} umschalten` : (snapshot.session.packages[contextMenu.packageId]?.enabled ? "Deaktivieren" : "Aktivieren")}
|
||||||
@ -3040,7 +3079,7 @@ export function App(): ReactElement {
|
|||||||
{hasPackages && !contextMenu.itemId && (
|
{hasPackages && !contextMenu.itemId && (
|
||||||
<button className="ctx-menu-item" onClick={() => {
|
<button className="ctx-menu-item" onClick={() => {
|
||||||
const pkgIds = [...selectedIds].filter((id) => snapshot.session.packages[id]);
|
const pkgIds = [...selectedIds].filter((id) => snapshot.session.packages[id]);
|
||||||
for (const id of pkgIds) void window.rd.resetPackage(id);
|
for (const id of pkgIds) void window.rd.resetPackage(id).catch(() => {});
|
||||||
setContextMenu(null);
|
setContextMenu(null);
|
||||||
}}>Zurücksetzen{multi ? ` (${[...selectedIds].filter((id) => snapshot.session.packages[id]).length})` : ""}</button>
|
}}>Zurücksetzen{multi ? ` (${[...selectedIds].filter((id) => snapshot.session.packages[id]).length})` : ""}</button>
|
||||||
)}
|
)}
|
||||||
@ -3049,7 +3088,7 @@ export function App(): ReactElement {
|
|||||||
const itemIds = multi
|
const itemIds = multi
|
||||||
? [...selectedIds].filter((id) => snapshot.session.items[id])
|
? [...selectedIds].filter((id) => snapshot.session.items[id])
|
||||||
: [contextMenu.itemId!];
|
: [contextMenu.itemId!];
|
||||||
void window.rd.resetItems(itemIds);
|
void window.rd.resetItems(itemIds).catch(() => {});
|
||||||
setContextMenu(null);
|
setContextMenu(null);
|
||||||
}}>Zurücksetzen{multi ? ` (${[...selectedIds].filter((id) => snapshot.session.items[id]).length})` : ""}</button>
|
}}>Zurücksetzen{multi ? ` (${[...selectedIds].filter((id) => snapshot.session.items[id]).length})` : ""}</button>
|
||||||
)}
|
)}
|
||||||
@ -3059,7 +3098,7 @@ export function App(): ReactElement {
|
|||||||
const someCompleted = items.some((item) => item && item.status === "completed");
|
const someCompleted = items.some((item) => item && item.status === "completed");
|
||||||
return (<>
|
return (<>
|
||||||
{someCompleted && (
|
{someCompleted && (
|
||||||
<button className="ctx-menu-item" onClick={() => { void window.rd.extractNow(contextMenu.packageId); setContextMenu(null); }}>Jetzt entpacken</button>
|
<button className="ctx-menu-item" onClick={() => { void window.rd.extractNow(contextMenu.packageId).catch(() => {}); setContextMenu(null); }}>Jetzt entpacken</button>
|
||||||
)}
|
)}
|
||||||
</>);
|
</>);
|
||||||
})()}
|
})()}
|
||||||
@ -3072,7 +3111,7 @@ export function App(): ReactElement {
|
|||||||
const label = p === "high" ? "Hoch" : p === "low" ? "Niedrig" : "Standard";
|
const label = p === "high" ? "Hoch" : p === "low" ? "Niedrig" : "Standard";
|
||||||
const pkgIds = [...selectedIds].filter((id) => snapshot.session.packages[id]);
|
const pkgIds = [...selectedIds].filter((id) => snapshot.session.packages[id]);
|
||||||
const allMatch = pkgIds.every((id) => (snapshot.session.packages[id]?.priority || "normal") === p);
|
const allMatch = pkgIds.every((id) => (snapshot.session.packages[id]?.priority || "normal") === p);
|
||||||
return <button key={p} className={`ctx-menu-item${allMatch ? " ctx-menu-active" : ""}`} onClick={() => { for (const id of pkgIds) void window.rd.setPackagePriority(id, p); setContextMenu(null); }}>{allMatch ? `✓ ${label}` : label}</button>;
|
return <button key={p} className={`ctx-menu-item${allMatch ? " ctx-menu-active" : ""}`} onClick={() => { for (const id of pkgIds) void window.rd.setPackagePriority(id, p).catch(() => {}); setContextMenu(null); }}>{allMatch ? `✓ ${label}` : label}</button>;
|
||||||
})}
|
})}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@ -3081,7 +3120,7 @@ export function App(): ReactElement {
|
|||||||
const itemIds = [...selectedIds].filter((id) => snapshot.session.items[id]);
|
const itemIds = [...selectedIds].filter((id) => snapshot.session.items[id]);
|
||||||
const skippable = itemIds.filter((id) => { const it = snapshot.session.items[id]; return it && (it.status === "queued" || it.status === "reconnect_wait"); });
|
const skippable = itemIds.filter((id) => { const it = snapshot.session.items[id]; return it && (it.status === "queued" || it.status === "reconnect_wait"); });
|
||||||
if (skippable.length === 0) return null;
|
if (skippable.length === 0) return null;
|
||||||
return <button className="ctx-menu-item" onClick={() => { void window.rd.skipItems(skippable); setContextMenu(null); }}>Überspringen{skippable.length > 1 ? ` (${skippable.length})` : ""}</button>;
|
return <button className="ctx-menu-item" onClick={() => { void window.rd.skipItems(skippable).catch(() => {}); setContextMenu(null); }}>Überspringen{skippable.length > 1 ? ` (${skippable.length})` : ""}</button>;
|
||||||
})()}
|
})()}
|
||||||
{hasPackages && (
|
{hasPackages && (
|
||||||
<button className="ctx-menu-item ctx-danger" onClick={() => {
|
<button className="ctx-menu-item ctx-danger" onClick={() => {
|
||||||
@ -3125,7 +3164,7 @@ export function App(): ReactElement {
|
|||||||
newOrder.splice(insertAt, 0, col);
|
newOrder.splice(insertAt, 0, col);
|
||||||
}
|
}
|
||||||
setColumnOrder(newOrder);
|
setColumnOrder(newOrder);
|
||||||
void window.rd.updateSettings({ columnOrder: newOrder });
|
void window.rd.updateSettings({ columnOrder: newOrder }).catch(() => {});
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{isVisible ? "\u2713 " : "\u2003 "}{def.label}
|
{isVisible ? "\u2713 " : "\u2003 "}{def.label}
|
||||||
@ -3166,7 +3205,7 @@ export function App(): ReactElement {
|
|||||||
)}
|
)}
|
||||||
<div className="ctx-menu-sep" />
|
<div className="ctx-menu-sep" />
|
||||||
<button className="ctx-menu-item ctx-danger" onClick={() => {
|
<button className="ctx-menu-item ctx-danger" onClick={() => {
|
||||||
void window.rd.clearHistory().then(() => { setHistoryEntries([]); setSelectedHistoryIds(new Set()); });
|
void window.rd.clearHistory().then(() => { setHistoryEntries([]); setSelectedHistoryIds(new Set()); }).catch(() => {});
|
||||||
setHistoryCtxMenu(null);
|
setHistoryCtxMenu(null);
|
||||||
}}>Verlauf leeren</button>
|
}}>Verlauf leeren</button>
|
||||||
</div>
|
</div>
|
||||||
@ -3180,8 +3219,8 @@ export function App(): ReactElement {
|
|||||||
<div className="link-popup-list">
|
<div className="link-popup-list">
|
||||||
{linkPopup.links.map((link, i) => (
|
{linkPopup.links.map((link, i) => (
|
||||||
<div key={i} className="link-popup-row">
|
<div key={i} className="link-popup-row">
|
||||||
<span className="link-popup-name link-popup-click" title={`${link.name}\nKlicken zum Kopieren`} onClick={() => { void navigator.clipboard.writeText(link.name); showToast("Name kopiert"); }}>{link.name}</span>
|
<span className="link-popup-name link-popup-click" title={`${link.name}\nKlicken zum Kopieren`} onClick={() => { void navigator.clipboard.writeText(link.name).then(() => showToast("Name kopiert")).catch(() => showToast("Kopieren fehlgeschlagen")); }}>{link.name}</span>
|
||||||
<span className="link-popup-url link-popup-click" title={`${link.url}\nKlicken zum Kopieren`} onClick={() => { void navigator.clipboard.writeText(link.url); showToast("Link kopiert"); }}>{link.url}</span>
|
<span className="link-popup-url link-popup-click" title={`${link.url}\nKlicken zum Kopieren`} onClick={() => { void navigator.clipboard.writeText(link.url).then(() => showToast("Link kopiert")).catch(() => showToast("Kopieren fehlgeschlagen")); }}>{link.url}</span>
|
||||||
</div>
|
</div>
|
||||||
))}
|
))}
|
||||||
</div>
|
</div>
|
||||||
@ -3189,15 +3228,13 @@ export function App(): ReactElement {
|
|||||||
{linkPopup.isPackage && (
|
{linkPopup.isPackage && (
|
||||||
<button className="btn" onClick={() => {
|
<button className="btn" onClick={() => {
|
||||||
const text = linkPopup.links.map((l) => l.name).join("\n");
|
const text = linkPopup.links.map((l) => l.name).join("\n");
|
||||||
void navigator.clipboard.writeText(text);
|
void navigator.clipboard.writeText(text).then(() => showToast("Alle Namen kopiert")).catch(() => showToast("Kopieren fehlgeschlagen"));
|
||||||
showToast("Alle Namen kopiert");
|
|
||||||
}}>Alle Namen kopieren</button>
|
}}>Alle Namen kopieren</button>
|
||||||
)}
|
)}
|
||||||
{linkPopup.isPackage && (
|
{linkPopup.isPackage && (
|
||||||
<button className="btn" onClick={() => {
|
<button className="btn" onClick={() => {
|
||||||
const text = linkPopup.links.map((l) => l.url).join("\n");
|
const text = linkPopup.links.map((l) => l.url).join("\n");
|
||||||
void navigator.clipboard.writeText(text);
|
void navigator.clipboard.writeText(text).then(() => showToast("Alle Links kopiert")).catch(() => showToast("Kopieren fehlgeschlagen"));
|
||||||
showToast("Alle Links kopiert");
|
|
||||||
}}>Alle Links kopieren</button>
|
}}>Alle Links kopieren</button>
|
||||||
)}
|
)}
|
||||||
<button className="btn" onClick={() => setLinkPopup(null)}>Schließen</button>
|
<button className="btn" onClick={() => setLinkPopup(null)}>Schließen</button>
|
||||||
@ -3344,7 +3381,7 @@ const PackageCard = memo(function PackageCard({ pkg, items, packageSpeed, isFirs
|
|||||||
<span key={col} className={`pkg-col pkg-col-prio${pkg.priority === "high" ? " prio-high" : pkg.priority === "low" ? " prio-low" : ""}`}>{pkg.priority === "high" ? "Hoch" : pkg.priority === "low" ? "Niedrig" : ""}</span>
|
<span key={col} className={`pkg-col pkg-col-prio${pkg.priority === "high" ? " prio-high" : pkg.priority === "low" ? " prio-low" : ""}`}>{pkg.priority === "high" ? "Hoch" : pkg.priority === "low" ? "Niedrig" : ""}</span>
|
||||||
);
|
);
|
||||||
case "status": return (
|
case "status": return (
|
||||||
<span key={col} className="pkg-col pkg-col-status">[{done}/{total}{done === total && total > 0 ? " - Done" : ""}{failed > 0 ? ` · ${failed} Fehler` : ""}{cancelled > 0 ? ` · ${cancelled} abgebr.` : ""}]</span>
|
<span key={col} className="pkg-col pkg-col-status">[{done}/{total}{done === total && total > 0 ? " - Done" : ""}{failed > 0 ? ` · ${failed} Fehler` : ""}{cancelled > 0 ? ` · ${cancelled} abgebr.` : ""}]{pkg.postProcessLabel ? ` - ${pkg.postProcessLabel}` : ""}</span>
|
||||||
);
|
);
|
||||||
case "speed": return (
|
case "speed": return (
|
||||||
<span key={col} className="pkg-col pkg-col-speed">{packageSpeed > 0 ? formatSpeedMbps(packageSpeed) : ""}</span>
|
<span key={col} className="pkg-col pkg-col-speed">{packageSpeed > 0 ? formatSpeedMbps(packageSpeed) : ""}</span>
|
||||||
|
|||||||
@ -1639,6 +1639,7 @@ td {
|
|||||||
border-radius: 12px;
|
border-radius: 12px;
|
||||||
padding: 10px 14px;
|
padding: 10px 14px;
|
||||||
box-shadow: 0 16px 30px rgba(0, 0, 0, 0.35);
|
box-shadow: 0 16px 30px rgba(0, 0, 0, 0.35);
|
||||||
|
z-index: 50;
|
||||||
}
|
}
|
||||||
|
|
||||||
.ctx-menu {
|
.ctx-menu {
|
||||||
@ -1763,6 +1764,8 @@ td {
|
|||||||
|
|
||||||
.modal-card {
|
.modal-card {
|
||||||
width: min(560px, 100%);
|
width: min(560px, 100%);
|
||||||
|
max-height: calc(100vh - 40px);
|
||||||
|
overflow-y: auto;
|
||||||
border: 1px solid var(--border);
|
border: 1px solid var(--border);
|
||||||
border-radius: 14px;
|
border-radius: 14px;
|
||||||
background: linear-gradient(180deg, color-mix(in srgb, var(--card) 98%, transparent), color-mix(in srgb, var(--surface) 98%, transparent));
|
background: linear-gradient(180deg, color-mix(in srgb, var(--card) 98%, transparent), color-mix(in srgb, var(--surface) 98%, transparent));
|
||||||
@ -1781,6 +1784,34 @@ td {
|
|||||||
color: var(--muted);
|
color: var(--muted);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.modal-details {
|
||||||
|
border: 1px solid var(--border);
|
||||||
|
border-radius: 6px;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
.modal-details summary {
|
||||||
|
padding: 6px 10px;
|
||||||
|
cursor: pointer;
|
||||||
|
font-size: 13px;
|
||||||
|
color: var(--muted);
|
||||||
|
user-select: none;
|
||||||
|
}
|
||||||
|
.modal-details summary:hover {
|
||||||
|
color: var(--text);
|
||||||
|
}
|
||||||
|
.modal-details pre {
|
||||||
|
margin: 0;
|
||||||
|
padding: 8px 10px;
|
||||||
|
border-top: 1px solid var(--border);
|
||||||
|
font-size: 12px;
|
||||||
|
line-height: 1.5;
|
||||||
|
white-space: pre-wrap;
|
||||||
|
word-break: break-word;
|
||||||
|
max-height: 260px;
|
||||||
|
overflow-y: auto;
|
||||||
|
color: var(--muted);
|
||||||
|
}
|
||||||
|
|
||||||
.modal-path {
|
.modal-path {
|
||||||
font-size: 12px;
|
font-size: 12px;
|
||||||
word-break: break-all;
|
word-break: break-all;
|
||||||
|
|||||||
@ -14,7 +14,7 @@ export type CleanupMode = "none" | "trash" | "delete";
|
|||||||
export type ConflictMode = "overwrite" | "skip" | "rename" | "ask";
|
export type ConflictMode = "overwrite" | "skip" | "rename" | "ask";
|
||||||
export type SpeedMode = "global" | "per_download";
|
export type SpeedMode = "global" | "per_download";
|
||||||
export type FinishedCleanupPolicy = "never" | "immediate" | "on_start" | "package_done";
|
export type FinishedCleanupPolicy = "never" | "immediate" | "on_start" | "package_done";
|
||||||
export type DebridProvider = "realdebrid" | "megadebrid" | "bestdebrid" | "alldebrid";
|
export type DebridProvider = "realdebrid" | "megadebrid" | "bestdebrid" | "alldebrid" | "ddownload";
|
||||||
export type DebridFallbackProvider = DebridProvider | "none";
|
export type DebridFallbackProvider = DebridProvider | "none";
|
||||||
export type AppTheme = "dark" | "light";
|
export type AppTheme = "dark" | "light";
|
||||||
export type PackagePriority = "high" | "normal" | "low";
|
export type PackagePriority = "high" | "normal" | "low";
|
||||||
@ -42,6 +42,8 @@ export interface AppSettings {
|
|||||||
megaPassword: string;
|
megaPassword: string;
|
||||||
bestToken: string;
|
bestToken: string;
|
||||||
allDebridToken: string;
|
allDebridToken: string;
|
||||||
|
ddownloadLogin: string;
|
||||||
|
ddownloadPassword: string;
|
||||||
archivePasswordList: string;
|
archivePasswordList: string;
|
||||||
rememberToken: boolean;
|
rememberToken: boolean;
|
||||||
providerPrimary: DebridProvider;
|
providerPrimary: DebridProvider;
|
||||||
@ -119,6 +121,7 @@ export interface PackageEntry {
|
|||||||
cancelled: boolean;
|
cancelled: boolean;
|
||||||
enabled: boolean;
|
enabled: boolean;
|
||||||
priority: PackagePriority;
|
priority: PackagePriority;
|
||||||
|
postProcessLabel?: string;
|
||||||
createdAt: number;
|
createdAt: number;
|
||||||
updatedAt: number;
|
updatedAt: number;
|
||||||
}
|
}
|
||||||
@ -219,6 +222,7 @@ export interface UpdateCheckResult {
|
|||||||
setupAssetUrl?: string;
|
setupAssetUrl?: string;
|
||||||
setupAssetName?: string;
|
setupAssetName?: string;
|
||||||
setupAssetDigest?: string;
|
setupAssetDigest?: string;
|
||||||
|
releaseNotes?: string;
|
||||||
error?: string;
|
error?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -269,6 +269,7 @@ describe("buildAutoRenameBaseName", () => {
|
|||||||
const result = buildAutoRenameBaseName("Show.S99.720p-4sf", "show.s99e999.720p.mkv");
|
const result = buildAutoRenameBaseName("Show.S99.720p-4sf", "show.s99e999.720p.mkv");
|
||||||
// SCENE_EPISODE_RE allows up to 3-digit episodes and 2-digit seasons
|
// SCENE_EPISODE_RE allows up to 3-digit episodes and 2-digit seasons
|
||||||
expect(result).not.toBeNull();
|
expect(result).not.toBeNull();
|
||||||
|
expect(result!).toContain("S99E999");
|
||||||
});
|
});
|
||||||
|
|
||||||
// Real-world scene release patterns
|
// Real-world scene release patterns
|
||||||
@ -343,6 +344,7 @@ describe("buildAutoRenameBaseName", () => {
|
|||||||
const result = buildAutoRenameBaseName("Show.S01-4sf", "show.s01e01.mkv");
|
const result = buildAutoRenameBaseName("Show.S01-4sf", "show.s01e01.mkv");
|
||||||
// "mkv" should not be treated as part of the filename match
|
// "mkv" should not be treated as part of the filename match
|
||||||
expect(result).not.toBeNull();
|
expect(result).not.toBeNull();
|
||||||
|
expect(result!).toContain("S01E01");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("does not match episode-like patterns in codec strings", () => {
|
it("does not match episode-like patterns in codec strings", () => {
|
||||||
@ -373,6 +375,7 @@ describe("buildAutoRenameBaseName", () => {
|
|||||||
// Extreme edge case - sanitizeFilename trims leading dots
|
// Extreme edge case - sanitizeFilename trims leading dots
|
||||||
expect(result).not.toBeNull();
|
expect(result).not.toBeNull();
|
||||||
expect(result!).toContain("S01E01");
|
expect(result!).toContain("S01E01");
|
||||||
|
expect(result!).toContain("-4sf");
|
||||||
expect(result!).not.toContain(".S01E01.S01E01"); // no duplication
|
expect(result!).not.toContain(".S01E01.S01E01"); // no duplication
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@ -317,7 +317,7 @@ describe("debrid service", () => {
|
|||||||
const controller = new AbortController();
|
const controller = new AbortController();
|
||||||
const abortTimer = setTimeout(() => {
|
const abortTimer = setTimeout(() => {
|
||||||
controller.abort("test");
|
controller.abort("test");
|
||||||
}, 25);
|
}, 200);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await expect(service.unrestrictLink("https://rapidgator.net/file/abort-mega-web", controller.signal)).rejects.toThrow(/aborted/i);
|
await expect(service.unrestrictLink("https://rapidgator.net/file/abort-mega-web", controller.signal)).rejects.toThrow(/aborted/i);
|
||||||
|
|||||||
@ -36,12 +36,8 @@ afterEach(() => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("extractor jvm backend", () => {
|
describe.skipIf(!hasJavaRuntime() || !hasJvmExtractorRuntime())("extractor jvm backend", () => {
|
||||||
it("extracts zip archives through SevenZipJBinding backend", async () => {
|
it("extracts zip archives through SevenZipJBinding backend", async () => {
|
||||||
if (!hasJavaRuntime() || !hasJvmExtractorRuntime()) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
process.env.RD_EXTRACT_BACKEND = "jvm";
|
process.env.RD_EXTRACT_BACKEND = "jvm";
|
||||||
|
|
||||||
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-jvm-extract-"));
|
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-jvm-extract-"));
|
||||||
@ -69,11 +65,112 @@ describe("extractor jvm backend", () => {
|
|||||||
expect(fs.existsSync(path.join(targetDir, "episode.txt"))).toBe(true);
|
expect(fs.existsSync(path.join(targetDir, "episode.txt"))).toBe(true);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("respects ask/skip conflict mode in jvm backend", async () => {
|
it("emits progress callbacks with archiveName and percent", async () => {
|
||||||
if (!hasJavaRuntime() || !hasJvmExtractorRuntime()) {
|
process.env.RD_EXTRACT_BACKEND = "jvm";
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-jvm-progress-"));
|
||||||
|
tempDirs.push(root);
|
||||||
|
const packageDir = path.join(root, "pkg");
|
||||||
|
const targetDir = path.join(root, "out");
|
||||||
|
fs.mkdirSync(packageDir, { recursive: true });
|
||||||
|
|
||||||
|
// Create a ZIP with some content to trigger progress
|
||||||
|
const zipPath = path.join(packageDir, "progress-test.zip");
|
||||||
|
const zip = new AdmZip();
|
||||||
|
zip.addFile("file1.txt", Buffer.from("Hello World ".repeat(100)));
|
||||||
|
zip.addFile("file2.txt", Buffer.from("Another file ".repeat(100)));
|
||||||
|
zip.writeZip(zipPath);
|
||||||
|
|
||||||
|
const progressUpdates: Array<{
|
||||||
|
archiveName: string;
|
||||||
|
percent: number;
|
||||||
|
phase: string;
|
||||||
|
archivePercent?: number;
|
||||||
|
}> = [];
|
||||||
|
|
||||||
|
const result = await extractPackageArchives({
|
||||||
|
packageDir,
|
||||||
|
targetDir,
|
||||||
|
cleanupMode: "none",
|
||||||
|
conflictMode: "overwrite",
|
||||||
|
removeLinks: false,
|
||||||
|
removeSamples: false,
|
||||||
|
onProgress: (update) => {
|
||||||
|
progressUpdates.push({
|
||||||
|
archiveName: update.archiveName,
|
||||||
|
percent: update.percent,
|
||||||
|
phase: update.phase,
|
||||||
|
archivePercent: update.archivePercent,
|
||||||
|
});
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.extracted).toBe(1);
|
||||||
|
expect(result.failed).toBe(0);
|
||||||
|
|
||||||
|
// Should have at least preparing, extracting, and done phases
|
||||||
|
const phases = new Set(progressUpdates.map((u) => u.phase));
|
||||||
|
expect(phases.has("preparing")).toBe(true);
|
||||||
|
expect(phases.has("extracting")).toBe(true);
|
||||||
|
|
||||||
|
// Extracting phase should include the archive name
|
||||||
|
const extracting = progressUpdates.filter((u) => u.phase === "extracting" && u.archiveName === "progress-test.zip");
|
||||||
|
expect(extracting.length).toBeGreaterThan(0);
|
||||||
|
|
||||||
|
// Should end at 100%
|
||||||
|
const lastExtracting = extracting[extracting.length - 1];
|
||||||
|
expect(lastExtracting.archivePercent).toBe(100);
|
||||||
|
|
||||||
|
// Files should exist
|
||||||
|
expect(fs.existsSync(path.join(targetDir, "file1.txt"))).toBe(true);
|
||||||
|
expect(fs.existsSync(path.join(targetDir, "file2.txt"))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("extracts multiple archives sequentially with progress for each", async () => {
|
||||||
|
process.env.RD_EXTRACT_BACKEND = "jvm";
|
||||||
|
|
||||||
|
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-jvm-multi-"));
|
||||||
|
tempDirs.push(root);
|
||||||
|
const packageDir = path.join(root, "pkg");
|
||||||
|
const targetDir = path.join(root, "out");
|
||||||
|
fs.mkdirSync(packageDir, { recursive: true });
|
||||||
|
|
||||||
|
// Create two separate ZIP archives
|
||||||
|
const zip1 = new AdmZip();
|
||||||
|
zip1.addFile("episode01.txt", Buffer.from("ep1 content"));
|
||||||
|
zip1.writeZip(path.join(packageDir, "archive1.zip"));
|
||||||
|
|
||||||
|
const zip2 = new AdmZip();
|
||||||
|
zip2.addFile("episode02.txt", Buffer.from("ep2 content"));
|
||||||
|
zip2.writeZip(path.join(packageDir, "archive2.zip"));
|
||||||
|
|
||||||
|
const archiveNames = new Set<string>();
|
||||||
|
|
||||||
|
const result = await extractPackageArchives({
|
||||||
|
packageDir,
|
||||||
|
targetDir,
|
||||||
|
cleanupMode: "none",
|
||||||
|
conflictMode: "overwrite",
|
||||||
|
removeLinks: false,
|
||||||
|
removeSamples: false,
|
||||||
|
onProgress: (update) => {
|
||||||
|
if (update.phase === "extracting" && update.archiveName) {
|
||||||
|
archiveNames.add(update.archiveName);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.extracted).toBe(2);
|
||||||
|
expect(result.failed).toBe(0);
|
||||||
|
// Both archive names should have appeared in progress
|
||||||
|
expect(archiveNames.has("archive1.zip")).toBe(true);
|
||||||
|
expect(archiveNames.has("archive2.zip")).toBe(true);
|
||||||
|
// Both files extracted
|
||||||
|
expect(fs.existsSync(path.join(targetDir, "episode01.txt"))).toBe(true);
|
||||||
|
expect(fs.existsSync(path.join(targetDir, "episode02.txt"))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("respects ask/skip conflict mode in jvm backend", async () => {
|
||||||
process.env.RD_EXTRACT_BACKEND = "jvm";
|
process.env.RD_EXTRACT_BACKEND = "jvm";
|
||||||
|
|
||||||
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-jvm-extract-"));
|
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-jvm-extract-"));
|
||||||
|
|||||||
@ -15,6 +15,8 @@ import {
|
|||||||
|
|
||||||
const tempDirs: string[] = [];
|
const tempDirs: string[] = [];
|
||||||
const originalExtractBackend = process.env.RD_EXTRACT_BACKEND;
|
const originalExtractBackend = process.env.RD_EXTRACT_BACKEND;
|
||||||
|
const originalStatfs = fs.promises.statfs;
|
||||||
|
const originalZipEntryMemoryLimit = process.env.RD_ZIP_ENTRY_MEMORY_LIMIT_MB;
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
process.env.RD_EXTRACT_BACKEND = "legacy";
|
process.env.RD_EXTRACT_BACKEND = "legacy";
|
||||||
@ -29,6 +31,12 @@ afterEach(() => {
|
|||||||
} else {
|
} else {
|
||||||
process.env.RD_EXTRACT_BACKEND = originalExtractBackend;
|
process.env.RD_EXTRACT_BACKEND = originalExtractBackend;
|
||||||
}
|
}
|
||||||
|
(fs.promises as any).statfs = originalStatfs;
|
||||||
|
if (originalZipEntryMemoryLimit === undefined) {
|
||||||
|
delete process.env.RD_ZIP_ENTRY_MEMORY_LIMIT_MB;
|
||||||
|
} else {
|
||||||
|
process.env.RD_ZIP_ENTRY_MEMORY_LIMIT_MB = originalZipEntryMemoryLimit;
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("extractor", () => {
|
describe("extractor", () => {
|
||||||
@ -574,7 +582,6 @@ describe("extractor", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it("keeps original ZIP size guard error when external fallback is unavailable", async () => {
|
it("keeps original ZIP size guard error when external fallback is unavailable", async () => {
|
||||||
const previousLimit = process.env.RD_ZIP_ENTRY_MEMORY_LIMIT_MB;
|
|
||||||
process.env.RD_ZIP_ENTRY_MEMORY_LIMIT_MB = "8";
|
process.env.RD_ZIP_ENTRY_MEMORY_LIMIT_MB = "8";
|
||||||
|
|
||||||
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-extract-"));
|
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-extract-"));
|
||||||
@ -588,32 +595,20 @@ describe("extractor", () => {
|
|||||||
zip.addFile("large.bin", Buffer.alloc(9 * 1024 * 1024, 7));
|
zip.addFile("large.bin", Buffer.alloc(9 * 1024 * 1024, 7));
|
||||||
zip.writeZip(zipPath);
|
zip.writeZip(zipPath);
|
||||||
|
|
||||||
try {
|
const result = await extractPackageArchives({
|
||||||
const result = await extractPackageArchives({
|
packageDir,
|
||||||
packageDir,
|
targetDir,
|
||||||
targetDir,
|
cleanupMode: "none",
|
||||||
cleanupMode: "none",
|
conflictMode: "overwrite",
|
||||||
conflictMode: "overwrite",
|
removeLinks: false,
|
||||||
removeLinks: false,
|
removeSamples: false
|
||||||
removeSamples: false
|
});
|
||||||
});
|
expect(result.extracted).toBe(0);
|
||||||
expect(result.extracted).toBe(0);
|
expect(result.failed).toBe(1);
|
||||||
expect(result.failed).toBe(1);
|
expect(String(result.lastError)).toMatch(/ZIP-Eintrag.*groß/i);
|
||||||
expect(String(result.lastError)).toMatch(/ZIP-Eintrag.*groß/i);
|
|
||||||
} finally {
|
|
||||||
if (previousLimit === undefined) {
|
|
||||||
delete process.env.RD_ZIP_ENTRY_MEMORY_LIMIT_MB;
|
|
||||||
} else {
|
|
||||||
process.env.RD_ZIP_ENTRY_MEMORY_LIMIT_MB = previousLimit;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("matches resume-state archive names case-insensitively on Windows", async () => {
|
it.skipIf(process.platform !== "win32")("matches resume-state archive names case-insensitively on Windows", async () => {
|
||||||
if (process.platform !== "win32") {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-extract-"));
|
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-extract-"));
|
||||||
tempDirs.push(root);
|
tempDirs.push(root);
|
||||||
const packageDir = path.join(root, "pkg");
|
const packageDir = path.join(root, "pkg");
|
||||||
@ -650,23 +645,18 @@ describe("extractor", () => {
|
|||||||
zip.addFile("test.txt", Buffer.alloc(1024, 0x41));
|
zip.addFile("test.txt", Buffer.alloc(1024, 0x41));
|
||||||
zip.writeZip(path.join(packageDir, "test.zip"));
|
zip.writeZip(path.join(packageDir, "test.zip"));
|
||||||
|
|
||||||
const originalStatfs = fs.promises.statfs;
|
|
||||||
(fs.promises as any).statfs = async () => ({ bfree: 1, bsize: 1 });
|
(fs.promises as any).statfs = async () => ({ bfree: 1, bsize: 1 });
|
||||||
|
|
||||||
try {
|
await expect(
|
||||||
await expect(
|
extractPackageArchives({
|
||||||
extractPackageArchives({
|
packageDir,
|
||||||
packageDir,
|
targetDir,
|
||||||
targetDir,
|
cleanupMode: "none" as any,
|
||||||
cleanupMode: "none" as any,
|
conflictMode: "overwrite" as any,
|
||||||
conflictMode: "overwrite" as any,
|
removeLinks: false,
|
||||||
removeLinks: false,
|
removeSamples: false,
|
||||||
removeSamples: false,
|
})
|
||||||
})
|
).rejects.toThrow(/Nicht genug Speicherplatz/);
|
||||||
).rejects.toThrow(/Nicht genug Speicherplatz/);
|
|
||||||
} finally {
|
|
||||||
(fs.promises as any).statfs = originalStatfs;
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("proceeds when disk space is sufficient", async () => {
|
it("proceeds when disk space is sufficient", async () => {
|
||||||
|
|||||||
@ -166,7 +166,7 @@ describe("mega-web-fallback", () => {
|
|||||||
const controller = new AbortController();
|
const controller = new AbortController();
|
||||||
const timer = setTimeout(() => {
|
const timer = setTimeout(() => {
|
||||||
controller.abort("test");
|
controller.abort("test");
|
||||||
}, 30);
|
}, 200);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await expect(fallback.unrestrict("https://mega.debrid/link2", controller.signal)).rejects.toThrow(/aborted/i);
|
await expect(fallback.unrestrict("https://mega.debrid/link2", controller.signal)).rejects.toThrow(/aborted/i);
|
||||||
|
|||||||
188
tests/resolve-archive-items.test.ts
Normal file
188
tests/resolve-archive-items.test.ts
Normal file
@ -0,0 +1,188 @@
|
|||||||
|
import { describe, expect, it } from "vitest";
|
||||||
|
import { resolveArchiveItemsFromList } from "../src/main/download-manager";
|
||||||
|
|
||||||
|
type MinimalItem = {
|
||||||
|
targetPath?: string;
|
||||||
|
fileName?: string;
|
||||||
|
[key: string]: unknown;
|
||||||
|
};
|
||||||
|
|
||||||
|
function makeItems(names: string[]): MinimalItem[] {
|
||||||
|
return names.map((name) => ({
|
||||||
|
targetPath: `C:\\Downloads\\Package\\${name}`,
|
||||||
|
fileName: name,
|
||||||
|
id: name,
|
||||||
|
status: "completed",
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("resolveArchiveItemsFromList", () => {
|
||||||
|
// ── Multipart RAR (.partN.rar) ──
|
||||||
|
|
||||||
|
it("matches multipart .part1.rar archives", () => {
|
||||||
|
const items = makeItems([
|
||||||
|
"Movie.part1.rar",
|
||||||
|
"Movie.part2.rar",
|
||||||
|
"Movie.part3.rar",
|
||||||
|
"Other.rar",
|
||||||
|
]);
|
||||||
|
const result = resolveArchiveItemsFromList("Movie.part1.rar", items as any);
|
||||||
|
expect(result).toHaveLength(3);
|
||||||
|
expect(result.map((i: any) => i.fileName)).toEqual([
|
||||||
|
"Movie.part1.rar",
|
||||||
|
"Movie.part2.rar",
|
||||||
|
"Movie.part3.rar",
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("matches multipart .part01.rar archives (zero-padded)", () => {
|
||||||
|
const items = makeItems([
|
||||||
|
"Film.part01.rar",
|
||||||
|
"Film.part02.rar",
|
||||||
|
"Film.part10.rar",
|
||||||
|
"Unrelated.zip",
|
||||||
|
]);
|
||||||
|
const result = resolveArchiveItemsFromList("Film.part01.rar", items as any);
|
||||||
|
expect(result).toHaveLength(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Old-style RAR (.rar + .r00, .r01, etc.) ──
|
||||||
|
|
||||||
|
it("matches old-style .rar + .rNN volumes", () => {
|
||||||
|
const items = makeItems([
|
||||||
|
"Archive.rar",
|
||||||
|
"Archive.r00",
|
||||||
|
"Archive.r01",
|
||||||
|
"Archive.r02",
|
||||||
|
"Other.zip",
|
||||||
|
]);
|
||||||
|
const result = resolveArchiveItemsFromList("Archive.rar", items as any);
|
||||||
|
expect(result).toHaveLength(4);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Single RAR ──
|
||||||
|
|
||||||
|
it("matches a single .rar file", () => {
|
||||||
|
const items = makeItems(["SingleFile.rar", "Other.mkv"]);
|
||||||
|
const result = resolveArchiveItemsFromList("SingleFile.rar", items as any);
|
||||||
|
expect(result).toHaveLength(1);
|
||||||
|
expect((result[0] as any).fileName).toBe("SingleFile.rar");
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Split ZIP ──
|
||||||
|
|
||||||
|
it("matches split .zip.NNN files", () => {
|
||||||
|
const items = makeItems([
|
||||||
|
"Data.zip",
|
||||||
|
"Data.zip.001",
|
||||||
|
"Data.zip.002",
|
||||||
|
"Data.zip.003",
|
||||||
|
]);
|
||||||
|
const result = resolveArchiveItemsFromList("Data.zip.001", items as any);
|
||||||
|
expect(result).toHaveLength(4);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Split 7z ──
|
||||||
|
|
||||||
|
it("matches split .7z.NNN files", () => {
|
||||||
|
const items = makeItems([
|
||||||
|
"Backup.7z.001",
|
||||||
|
"Backup.7z.002",
|
||||||
|
]);
|
||||||
|
const result = resolveArchiveItemsFromList("Backup.7z.001", items as any);
|
||||||
|
expect(result).toHaveLength(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Generic .NNN splits ──
|
||||||
|
|
||||||
|
it("matches generic .NNN split files", () => {
|
||||||
|
const items = makeItems([
|
||||||
|
"video.001",
|
||||||
|
"video.002",
|
||||||
|
"video.003",
|
||||||
|
]);
|
||||||
|
const result = resolveArchiveItemsFromList("video.001", items as any);
|
||||||
|
expect(result).toHaveLength(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Exact filename match ──
|
||||||
|
|
||||||
|
it("matches a single .zip by exact name", () => {
|
||||||
|
const items = makeItems(["myarchive.zip", "other.rar"]);
|
||||||
|
const result = resolveArchiveItemsFromList("myarchive.zip", items as any);
|
||||||
|
expect(result).toHaveLength(1);
|
||||||
|
expect((result[0] as any).fileName).toBe("myarchive.zip");
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Case insensitivity ──
|
||||||
|
|
||||||
|
it("matches case-insensitively", () => {
|
||||||
|
const items = makeItems([
|
||||||
|
"MOVIE.PART1.RAR",
|
||||||
|
"MOVIE.PART2.RAR",
|
||||||
|
]);
|
||||||
|
const result = resolveArchiveItemsFromList("movie.part1.rar", items as any);
|
||||||
|
expect(result).toHaveLength(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Stem-based fallback ──
|
||||||
|
|
||||||
|
it("uses stem-based fallback when exact patterns fail", () => {
|
||||||
|
// Simulate a debrid service that renames "Movie.part1.rar" to "Movie.part1_dl.rar"
|
||||||
|
// but the disk file is "Movie.part1.rar"
|
||||||
|
const items = makeItems([
|
||||||
|
"Movie.rar",
|
||||||
|
]);
|
||||||
|
// The archive on disk is "Movie.part1.rar" but there's no item matching the
|
||||||
|
// .partN pattern. The stem "movie" should match "Movie.rar" via fallback.
|
||||||
|
const result = resolveArchiveItemsFromList("Movie.part1.rar", items as any);
|
||||||
|
// stem fallback: "movie" starts with "movie" and ends with .rar
|
||||||
|
expect(result).toHaveLength(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Single item fallback ──
|
||||||
|
|
||||||
|
it("returns single archive item when no pattern matches", () => {
|
||||||
|
const items = makeItems(["totally-different-name.rar"]);
|
||||||
|
const result = resolveArchiveItemsFromList("Original.rar", items as any);
|
||||||
|
// Single item in list with archive extension → return it
|
||||||
|
expect(result).toHaveLength(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Empty when no match ──
|
||||||
|
|
||||||
|
it("returns empty when items have no archive extensions", () => {
|
||||||
|
const items = makeItems(["video.mkv", "subtitle.srt"]);
|
||||||
|
const result = resolveArchiveItemsFromList("Archive.rar", items as any);
|
||||||
|
expect(result).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Items without targetPath ──
|
||||||
|
|
||||||
|
it("falls back to fileName when targetPath is missing", () => {
|
||||||
|
const items = [
|
||||||
|
{ fileName: "Movie.part1.rar", id: "1", status: "completed" },
|
||||||
|
{ fileName: "Movie.part2.rar", id: "2", status: "completed" },
|
||||||
|
];
|
||||||
|
const result = resolveArchiveItemsFromList("Movie.part1.rar", items as any);
|
||||||
|
expect(result).toHaveLength(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Multiple archives, should not cross-match ──
|
||||||
|
|
||||||
|
it("does not cross-match different archive groups", () => {
|
||||||
|
const items = makeItems([
|
||||||
|
"Episode.S01E01.part1.rar",
|
||||||
|
"Episode.S01E01.part2.rar",
|
||||||
|
"Episode.S01E02.part1.rar",
|
||||||
|
"Episode.S01E02.part2.rar",
|
||||||
|
]);
|
||||||
|
const result1 = resolveArchiveItemsFromList("Episode.S01E01.part1.rar", items as any);
|
||||||
|
expect(result1).toHaveLength(2);
|
||||||
|
expect(result1.every((i: any) => i.fileName.includes("S01E01"))).toBe(true);
|
||||||
|
|
||||||
|
const result2 = resolveArchiveItemsFromList("Episode.S01E02.part1.rar", items as any);
|
||||||
|
expect(result2).toHaveLength(2);
|
||||||
|
expect(result2.every((i: any) => i.fileName.includes("S01E02"))).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
@ -153,7 +153,7 @@ async function main(): Promise<void> {
|
|||||||
createStoragePaths(path.join(tempRoot, "state-pause"))
|
createStoragePaths(path.join(tempRoot, "state-pause"))
|
||||||
);
|
);
|
||||||
manager2.addPackages([{ name: "pause", links: ["https://dummy/slow"] }]);
|
manager2.addPackages([{ name: "pause", links: ["https://dummy/slow"] }]);
|
||||||
manager2.start();
|
await manager2.start();
|
||||||
await new Promise((resolve) => setTimeout(resolve, 120));
|
await new Promise((resolve) => setTimeout(resolve, 120));
|
||||||
const paused = manager2.togglePause();
|
const paused = manager2.togglePause();
|
||||||
assert(paused, "Pause konnte nicht aktiviert werden");
|
assert(paused, "Pause konnte nicht aktiviert werden");
|
||||||
|
|||||||
@ -8,6 +8,8 @@ import { setLogListener } from "../src/main/logger";
|
|||||||
const tempDirs: string[] = [];
|
const tempDirs: string[] = [];
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
|
// Ensure session log is shut down between tests
|
||||||
|
shutdownSessionLog();
|
||||||
// Ensure listener is cleared between tests
|
// Ensure listener is cleared between tests
|
||||||
setLogListener(null);
|
setLogListener(null);
|
||||||
for (const dir of tempDirs.splice(0)) {
|
for (const dir of tempDirs.splice(0)) {
|
||||||
@ -45,7 +47,7 @@ describe("session-log", () => {
|
|||||||
logger.info("Test-Nachricht für Session-Log");
|
logger.info("Test-Nachricht für Session-Log");
|
||||||
|
|
||||||
// Wait for flush (200ms interval + margin)
|
// Wait for flush (200ms interval + margin)
|
||||||
await new Promise((resolve) => setTimeout(resolve, 350));
|
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||||
|
|
||||||
const content = fs.readFileSync(logPath, "utf8");
|
const content = fs.readFileSync(logPath, "utf8");
|
||||||
expect(content).toContain("Test-Nachricht für Session-Log");
|
expect(content).toContain("Test-Nachricht für Session-Log");
|
||||||
@ -79,7 +81,7 @@ describe("session-log", () => {
|
|||||||
const { logger } = await import("../src/main/logger");
|
const { logger } = await import("../src/main/logger");
|
||||||
logger.info("Nach-Shutdown-Nachricht");
|
logger.info("Nach-Shutdown-Nachricht");
|
||||||
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, 350));
|
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||||
|
|
||||||
const content = fs.readFileSync(logPath, "utf8");
|
const content = fs.readFileSync(logPath, "utf8");
|
||||||
expect(content).not.toContain("Nach-Shutdown-Nachricht");
|
expect(content).not.toContain("Nach-Shutdown-Nachricht");
|
||||||
@ -137,7 +139,7 @@ describe("session-log", () => {
|
|||||||
shutdownSessionLog();
|
shutdownSessionLog();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("multiple sessions create different files", () => {
|
it("multiple sessions create different files", async () => {
|
||||||
const baseDir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-slog-"));
|
const baseDir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-slog-"));
|
||||||
tempDirs.push(baseDir);
|
tempDirs.push(baseDir);
|
||||||
|
|
||||||
@ -146,10 +148,7 @@ describe("session-log", () => {
|
|||||||
shutdownSessionLog();
|
shutdownSessionLog();
|
||||||
|
|
||||||
// Small delay to ensure different timestamp
|
// Small delay to ensure different timestamp
|
||||||
const start = Date.now();
|
await new Promise((resolve) => setTimeout(resolve, 1100));
|
||||||
while (Date.now() - start < 1100) {
|
|
||||||
// busy-wait for 1.1 seconds to get different second in filename
|
|
||||||
}
|
|
||||||
|
|
||||||
initSessionLog(baseDir);
|
initSessionLog(baseDir);
|
||||||
const path2 = getSessionLogPath();
|
const path2 = getSessionLogPath();
|
||||||
|
|||||||
@ -22,7 +22,7 @@ afterEach(() => {
|
|||||||
|
|
||||||
describe("update", () => {
|
describe("update", () => {
|
||||||
it("normalizes update repo input", () => {
|
it("normalizes update repo input", () => {
|
||||||
expect(normalizeUpdateRepo("")).toBe("Sucukdeluxe/real-debrid-downloader");
|
expect(normalizeUpdateRepo("")).toBe("Administrator/real-debrid-downloader");
|
||||||
expect(normalizeUpdateRepo("owner/repo")).toBe("owner/repo");
|
expect(normalizeUpdateRepo("owner/repo")).toBe("owner/repo");
|
||||||
expect(normalizeUpdateRepo("https://codeberg.org/owner/repo")).toBe("owner/repo");
|
expect(normalizeUpdateRepo("https://codeberg.org/owner/repo")).toBe("owner/repo");
|
||||||
expect(normalizeUpdateRepo("https://www.codeberg.org/owner/repo")).toBe("owner/repo");
|
expect(normalizeUpdateRepo("https://www.codeberg.org/owner/repo")).toBe("owner/repo");
|
||||||
@ -31,14 +31,14 @@ describe("update", () => {
|
|||||||
expect(normalizeUpdateRepo("git@codeberg.org:owner/repo.git")).toBe("owner/repo");
|
expect(normalizeUpdateRepo("git@codeberg.org:owner/repo.git")).toBe("owner/repo");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("uses normalized repo slug for Codeberg API requests", async () => {
|
it("uses normalized repo slug for API requests", async () => {
|
||||||
let requestedUrl = "";
|
let requestedUrl = "";
|
||||||
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
|
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
|
||||||
requestedUrl = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
|
requestedUrl = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
tag_name: `v${APP_VERSION}`,
|
tag_name: `v${APP_VERSION}`,
|
||||||
html_url: "https://codeberg.org/owner/repo/releases/tag/v1.0.0",
|
html_url: "https://git.24-music.de/owner/repo/releases/tag/v1.0.0",
|
||||||
assets: []
|
assets: []
|
||||||
}),
|
}),
|
||||||
{
|
{
|
||||||
@ -48,8 +48,8 @@ describe("update", () => {
|
|||||||
);
|
);
|
||||||
}) as typeof fetch;
|
}) as typeof fetch;
|
||||||
|
|
||||||
const result = await checkGitHubUpdate("https://codeberg.org/owner/repo/releases");
|
const result = await checkGitHubUpdate("https://git.24-music.de/owner/repo/releases");
|
||||||
expect(requestedUrl).toBe("https://codeberg.org/api/v1/repos/owner/repo/releases/latest");
|
expect(requestedUrl).toBe("https://git.24-music.de/api/v1/repos/owner/repo/releases/latest");
|
||||||
expect(result.currentVersion).toBe(APP_VERSION);
|
expect(result.currentVersion).toBe(APP_VERSION);
|
||||||
expect(result.latestVersion).toBe(APP_VERSION);
|
expect(result.latestVersion).toBe(APP_VERSION);
|
||||||
expect(result.updateAvailable).toBe(false);
|
expect(result.updateAvailable).toBe(false);
|
||||||
@ -484,14 +484,14 @@ describe("normalizeUpdateRepo extended", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it("returns default for malformed inputs", () => {
|
it("returns default for malformed inputs", () => {
|
||||||
expect(normalizeUpdateRepo("just-one-part")).toBe("Sucukdeluxe/real-debrid-downloader");
|
expect(normalizeUpdateRepo("just-one-part")).toBe("Administrator/real-debrid-downloader");
|
||||||
expect(normalizeUpdateRepo(" ")).toBe("Sucukdeluxe/real-debrid-downloader");
|
expect(normalizeUpdateRepo(" ")).toBe("Administrator/real-debrid-downloader");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("rejects traversal-like owner or repo segments", () => {
|
it("rejects traversal-like owner or repo segments", () => {
|
||||||
expect(normalizeUpdateRepo("../owner/repo")).toBe("Sucukdeluxe/real-debrid-downloader");
|
expect(normalizeUpdateRepo("../owner/repo")).toBe("Administrator/real-debrid-downloader");
|
||||||
expect(normalizeUpdateRepo("owner/../repo")).toBe("Sucukdeluxe/real-debrid-downloader");
|
expect(normalizeUpdateRepo("owner/../repo")).toBe("Administrator/real-debrid-downloader");
|
||||||
expect(normalizeUpdateRepo("https://codeberg.org/owner/../../repo")).toBe("Sucukdeluxe/real-debrid-downloader");
|
expect(normalizeUpdateRepo("https://codeberg.org/owner/../../repo")).toBe("Administrator/real-debrid-downloader");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("handles www prefix", () => {
|
it("handles www prefix", () => {
|
||||||
|
|||||||
@ -12,5 +12,5 @@
|
|||||||
"isolatedModules": true,
|
"isolatedModules": true,
|
||||||
"types": ["node", "vite/client"]
|
"types": ["node", "vite/client"]
|
||||||
},
|
},
|
||||||
"include": ["src", "tests", "vite.config.ts"]
|
"include": ["src", "tests", "vite.config.mts"]
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user