Compare commits
No commits in common. "main" and "v1.6.36" have entirely different histories.
31
README.md
31
README.md
@ -160,7 +160,7 @@ The app stores runtime files in Electron's `userData` directory, including:
|
||||
## Troubleshooting
|
||||
|
||||
- Download does not start: verify token and selected provider in Settings.
|
||||
- Extraction fails: check archive passwords and native extractor installation (7-Zip/WinRAR). Optional JVM extractor can be forced with `RD_EXTRACT_BACKEND=jvm`.
|
||||
- Extraction fails: check archive passwords, JVM runtime (`resources/extractor-jvm`), or force legacy mode with `RD_EXTRACT_BACKEND=legacy`.
|
||||
- Very slow downloads: check active speed limit and bandwidth schedules.
|
||||
- Unexpected interruptions: enable reconnect and fallback providers.
|
||||
- Stalled downloads: the app auto-detects stalls within 10 seconds and retries automatically.
|
||||
@ -169,35 +169,6 @@ The app stores runtime files in Electron's `userData` directory, including:
|
||||
|
||||
Release history is available on [git.24-music.de Releases](https://git.24-music.de/Administrator/real-debrid-downloader/releases).
|
||||
|
||||
### v1.6.61 (2026-03-05)
|
||||
|
||||
- Fixed leftover empty package folders in `Downloader Unfertig` after successful extraction.
|
||||
- Resume marker files (`.rd_extract_progress*.json`) are now treated as ignorable for empty-folder cleanup.
|
||||
- Deferred post-processing now clears resume markers before running empty-directory removal.
|
||||
|
||||
### v1.6.60 (2026-03-05)
|
||||
|
||||
- Added package-scoped password cache for extraction: once the first archive in a package is solved, following archives in the same package reuse that password first.
|
||||
- Kept fallback behavior intact (`""` and other candidates are still tested), but moved empty-password probing behind the learned password to reduce per-archive delays.
|
||||
- Added cache invalidation on real `wrong_password` failures so stale passwords are automatically discarded.
|
||||
|
||||
### v1.6.59 (2026-03-05)
|
||||
|
||||
- Switched default extraction backend to native tools (`legacy`) for more stable archive-to-archive flow.
|
||||
- Prioritized 7-Zip as primary native extractor, with WinRAR/UnRAR as fallback.
|
||||
- JVM extractor remains available as opt-in via `RD_EXTRACT_BACKEND=jvm`.
|
||||
|
||||
### v1.6.58 (2026-03-05)
|
||||
|
||||
- Fixed extraction progress oscillation (`1% -> 100% -> 1%` loops) during password retries.
|
||||
- Kept strict archive completion logic, but normalized in-progress archive percent to avoid false visual done states before real completion.
|
||||
|
||||
### v1.6.57 (2026-03-05)
|
||||
|
||||
- Fixed extraction flow so archives are marked done only on real completion, not on temporary `100%` progress spikes.
|
||||
- Improved password handling: after the first successful archive, the discovered password is prioritized for subsequent archives.
|
||||
- Fixed progress parsing for password retries (reset/restart handling), reducing visible and real gaps between archive extractions.
|
||||
|
||||
## License
|
||||
|
||||
MIT - see `LICENSE`.
|
||||
|
||||
75
_upload_release.mjs
Normal file
75
_upload_release.mjs
Normal file
@ -0,0 +1,75 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { spawnSync } from "node:child_process";
|
||||
|
||||
const credResult = spawnSync("git", ["credential", "fill"], {
|
||||
input: "protocol=https\nhost=codeberg.org\n\n",
|
||||
encoding: "utf8",
|
||||
stdio: ["pipe", "pipe", "pipe"]
|
||||
});
|
||||
const creds = new Map();
|
||||
for (const line of credResult.stdout.split(/\r?\n/)) {
|
||||
if (line.includes("=")) {
|
||||
const [k, v] = line.split("=", 2);
|
||||
creds.set(k, v);
|
||||
}
|
||||
}
|
||||
const auth = "Basic " + Buffer.from(creds.get("username") + ":" + creds.get("password")).toString("base64");
|
||||
const owner = "Sucukdeluxe";
|
||||
const repo = "real-debrid-downloader";
|
||||
const tag = "v1.5.35";
|
||||
const baseApi = `https://codeberg.org/api/v1/repos/${owner}/${repo}`;
|
||||
|
||||
async function main() {
|
||||
await fetch(baseApi, {
|
||||
method: "PATCH",
|
||||
headers: { Authorization: auth, "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ has_releases: true })
|
||||
});
|
||||
|
||||
const createRes = await fetch(`${baseApi}/releases`, {
|
||||
method: "POST",
|
||||
headers: { Authorization: auth, "Content-Type": "application/json", Accept: "application/json" },
|
||||
body: JSON.stringify({
|
||||
tag_name: tag,
|
||||
target_commitish: "main",
|
||||
name: tag,
|
||||
body: "- Fix: Fortschritt zeigt jetzt kombinierten Wert (Download + Entpacken)\n- Fix: Pausieren zeigt nicht mehr 'Warte auf Daten'\n- Pixel-perfekte Dual-Layer Progress-Bar Texte (clip-path)",
|
||||
draft: false,
|
||||
prerelease: false
|
||||
})
|
||||
});
|
||||
const release = await createRes.json();
|
||||
if (!createRes.ok) {
|
||||
console.error("Create failed:", JSON.stringify(release));
|
||||
process.exit(1);
|
||||
}
|
||||
console.log("Release created:", release.id);
|
||||
|
||||
const files = [
|
||||
"Real-Debrid-Downloader Setup 1.5.35.exe",
|
||||
"Real-Debrid-Downloader 1.5.35.exe",
|
||||
"latest.yml",
|
||||
"Real-Debrid-Downloader Setup 1.5.35.exe.blockmap"
|
||||
];
|
||||
for (const f of files) {
|
||||
const filePath = path.join("release", f);
|
||||
const data = fs.readFileSync(filePath);
|
||||
const uploadUrl = `${baseApi}/releases/${release.id}/assets?name=${encodeURIComponent(f)}`;
|
||||
const res = await fetch(uploadUrl, {
|
||||
method: "POST",
|
||||
headers: { Authorization: auth, "Content-Type": "application/octet-stream" },
|
||||
body: data
|
||||
});
|
||||
if (res.ok) {
|
||||
console.log("Uploaded:", f);
|
||||
} else if (res.status === 409 || res.status === 422) {
|
||||
console.log("Skipped existing:", f);
|
||||
} else {
|
||||
console.error("Upload failed for", f, ":", res.status);
|
||||
}
|
||||
}
|
||||
console.log(`Done! https://codeberg.org/${owner}/${repo}/releases/tag/${tag}`);
|
||||
}
|
||||
|
||||
main().catch(e => { console.error(e.message); process.exit(1); });
|
||||
@ -25,11 +25,11 @@ AppPublisher=Sucukdeluxe
|
||||
DefaultDirName={autopf}\{#MyAppName}
|
||||
DefaultGroupName={#MyAppName}
|
||||
OutputDir={#MyOutputDir}
|
||||
OutputBaseFilename=Real-Debrid-Downloader Setup {#MyAppVersion}
|
||||
OutputBaseFilename=Real-Debrid-Downloader-Setup-{#MyAppVersion}
|
||||
Compression=lzma
|
||||
SolidCompression=yes
|
||||
WizardStyle=modern
|
||||
PrivilegesRequired=lowest
|
||||
PrivilegesRequired=admin
|
||||
ArchitecturesInstallIn64BitMode=x64compatible
|
||||
UninstallDisplayIcon={app}\{#MyAppExeName}
|
||||
SetupIconFile={#MyIconFile}
|
||||
@ -39,8 +39,8 @@ Name: "german"; MessagesFile: "compiler:Languages\German.isl"
|
||||
Name: "english"; MessagesFile: "compiler:Default.isl"
|
||||
|
||||
[Files]
|
||||
Source: "{#MySourceDir}\\*"; DestDir: "{app}"; Flags: recursesubdirs createallsubdirs
|
||||
Source: "{#MyIconFile}"; DestDir: "{app}"; DestName: "app_icon.ico"
|
||||
Source: "{#MySourceDir}\\*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs createallsubdirs
|
||||
Source: "{#MyIconFile}"; DestDir: "{app}"; DestName: "app_icon.ico"; Flags: ignoreversion
|
||||
|
||||
[Icons]
|
||||
Name: "{group}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"; IconFilename: "{app}\app_icon.ico"
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "real-debrid-downloader",
|
||||
"version": "1.6.66",
|
||||
"version": "1.6.36",
|
||||
"description": "Desktop downloader",
|
||||
"main": "build/main/main/main.js",
|
||||
"author": "Sucukdeluxe",
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -3,9 +3,7 @@ package com.sucukdeluxe.extractor;
|
||||
import net.lingala.zip4j.ZipFile;
|
||||
import net.lingala.zip4j.exception.ZipException;
|
||||
import net.lingala.zip4j.model.FileHeader;
|
||||
import net.sf.sevenzipjbinding.ExtractAskMode;
|
||||
import net.sf.sevenzipjbinding.ExtractOperationResult;
|
||||
import net.sf.sevenzipjbinding.IArchiveExtractCallback;
|
||||
import net.sf.sevenzipjbinding.IArchiveOpenCallback;
|
||||
import net.sf.sevenzipjbinding.IArchiveOpenVolumeCallback;
|
||||
import net.sf.sevenzipjbinding.IInArchive;
|
||||
@ -28,7 +26,6 @@ import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.io.RandomAccessFile;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Base64;
|
||||
import java.util.HashMap;
|
||||
@ -45,18 +42,12 @@ public final class JBindExtractorMain {
|
||||
private static final Pattern NUMBERED_ZIP_SPLIT_RE = Pattern.compile("(?i).*\\.zip\\.\\d{3}$");
|
||||
private static final Pattern OLD_ZIP_SPLIT_RE = Pattern.compile("(?i).*\\.z\\d{2,3}$");
|
||||
private static final Pattern SEVEN_ZIP_SPLIT_RE = Pattern.compile("(?i).*\\.7z\\.001$");
|
||||
private static final Pattern DIGIT_SUFFIX_RE = Pattern.compile("\\d{2,3}");
|
||||
private static final Pattern WINDOWS_SPECIAL_CHARS_RE = Pattern.compile("[:<>*?\"\\|]");
|
||||
private static volatile boolean sevenZipInitialized = false;
|
||||
|
||||
private JBindExtractorMain() {
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
if (args.length == 1 && "--daemon".equals(args[0])) {
|
||||
runDaemon();
|
||||
return;
|
||||
}
|
||||
int exit = 1;
|
||||
try {
|
||||
ExtractionRequest request = parseArgs(args);
|
||||
@ -71,127 +62,6 @@ public final class JBindExtractorMain {
|
||||
System.exit(exit);
|
||||
}
|
||||
|
||||
private static void runDaemon() {
|
||||
System.out.println("RD_DAEMON_READY");
|
||||
System.out.flush();
|
||||
java.io.BufferedReader reader = new java.io.BufferedReader(
|
||||
new java.io.InputStreamReader(System.in, StandardCharsets.UTF_8));
|
||||
try {
|
||||
String line;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
line = line.trim();
|
||||
if (line.isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
int exitCode = 1;
|
||||
try {
|
||||
ExtractionRequest request = parseDaemonRequest(line);
|
||||
exitCode = runExtraction(request);
|
||||
} catch (IllegalArgumentException error) {
|
||||
emitError("Argumentfehler: " + safeMessage(error));
|
||||
exitCode = 2;
|
||||
} catch (Throwable error) {
|
||||
emitError(safeMessage(error));
|
||||
exitCode = 1;
|
||||
}
|
||||
System.out.println("RD_REQUEST_DONE " + exitCode);
|
||||
System.out.flush();
|
||||
}
|
||||
} catch (IOException ignored) {
|
||||
// stdin closed — parent process exited
|
||||
}
|
||||
}
|
||||
|
||||
private static ExtractionRequest parseDaemonRequest(String jsonLine) {
|
||||
// Minimal JSON parsing without external dependencies.
|
||||
// Expected format: {"archive":"...","target":"...","conflict":"...","backend":"...","passwords":["...","..."]}
|
||||
ExtractionRequest request = new ExtractionRequest();
|
||||
request.archiveFile = new File(extractJsonString(jsonLine, "archive"));
|
||||
request.targetDir = new File(extractJsonString(jsonLine, "target"));
|
||||
String conflict = extractJsonString(jsonLine, "conflict");
|
||||
if (conflict.length() > 0) {
|
||||
request.conflictMode = ConflictMode.fromValue(conflict);
|
||||
}
|
||||
String backend = extractJsonString(jsonLine, "backend");
|
||||
if (backend.length() > 0) {
|
||||
request.backend = Backend.fromValue(backend);
|
||||
}
|
||||
// Parse passwords array
|
||||
int pwStart = jsonLine.indexOf("\"passwords\"");
|
||||
if (pwStart >= 0) {
|
||||
int arrStart = jsonLine.indexOf('[', pwStart);
|
||||
int arrEnd = jsonLine.indexOf(']', arrStart);
|
||||
if (arrStart >= 0 && arrEnd > arrStart) {
|
||||
String arrContent = jsonLine.substring(arrStart + 1, arrEnd);
|
||||
int idx = 0;
|
||||
while (idx < arrContent.length()) {
|
||||
int qStart = arrContent.indexOf('"', idx);
|
||||
if (qStart < 0) break;
|
||||
int qEnd = findClosingQuote(arrContent, qStart + 1);
|
||||
if (qEnd < 0) break;
|
||||
request.passwords.add(unescapeJsonString(arrContent.substring(qStart + 1, qEnd)));
|
||||
idx = qEnd + 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (request.archiveFile == null || !request.archiveFile.exists() || !request.archiveFile.isFile()) {
|
||||
throw new IllegalArgumentException("Archiv nicht gefunden: " +
|
||||
(request.archiveFile == null ? "null" : request.archiveFile.getAbsolutePath()));
|
||||
}
|
||||
if (request.targetDir == null) {
|
||||
throw new IllegalArgumentException("--target fehlt");
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
private static String extractJsonString(String json, String key) {
|
||||
String search = "\"" + key + "\"";
|
||||
int keyIdx = json.indexOf(search);
|
||||
if (keyIdx < 0) return "";
|
||||
int colonIdx = json.indexOf(':', keyIdx + search.length());
|
||||
if (colonIdx < 0) return "";
|
||||
int qStart = json.indexOf('"', colonIdx + 1);
|
||||
if (qStart < 0) return "";
|
||||
int qEnd = findClosingQuote(json, qStart + 1);
|
||||
if (qEnd < 0) return "";
|
||||
return unescapeJsonString(json.substring(qStart + 1, qEnd));
|
||||
}
|
||||
|
||||
private static int findClosingQuote(String s, int from) {
|
||||
for (int i = from; i < s.length(); i++) {
|
||||
char c = s.charAt(i);
|
||||
if (c == '\\') {
|
||||
i++; // skip escaped character
|
||||
continue;
|
||||
}
|
||||
if (c == '"') return i;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
private static String unescapeJsonString(String s) {
|
||||
if (s.indexOf('\\') < 0) return s;
|
||||
StringBuilder sb = new StringBuilder(s.length());
|
||||
for (int i = 0; i < s.length(); i++) {
|
||||
char c = s.charAt(i);
|
||||
if (c == '\\' && i + 1 < s.length()) {
|
||||
char next = s.charAt(i + 1);
|
||||
switch (next) {
|
||||
case '"': sb.append('"'); i++; break;
|
||||
case '\\': sb.append('\\'); i++; break;
|
||||
case '/': sb.append('/'); i++; break;
|
||||
case 'n': sb.append('\n'); i++; break;
|
||||
case 'r': sb.append('\r'); i++; break;
|
||||
case 't': sb.append('\t'); i++; break;
|
||||
default: sb.append(c); break;
|
||||
}
|
||||
} else {
|
||||
sb.append(c);
|
||||
}
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private static int runExtraction(ExtractionRequest request) throws Exception {
|
||||
List<String> passwords = normalizePasswords(request.passwords);
|
||||
Exception lastError = null;
|
||||
@ -282,35 +152,30 @@ public final class JBindExtractorMain {
|
||||
}
|
||||
|
||||
ensureDirectory(output.getParentFile());
|
||||
rejectSymlink(output);
|
||||
long[] remaining = new long[] { itemUnits };
|
||||
boolean extractionSuccess = false;
|
||||
try {
|
||||
InputStream in = zipFile.getInputStream(header);
|
||||
OutputStream out = new FileOutputStream(output);
|
||||
try {
|
||||
OutputStream out = new FileOutputStream(output);
|
||||
try {
|
||||
byte[] buffer = new byte[BUFFER_SIZE];
|
||||
while (true) {
|
||||
int read = in.read(buffer);
|
||||
if (read < 0) {
|
||||
break;
|
||||
}
|
||||
if (read == 0) {
|
||||
continue;
|
||||
}
|
||||
out.write(buffer, 0, read);
|
||||
long accounted = Math.min(remaining[0], (long) read);
|
||||
remaining[0] -= accounted;
|
||||
progress.advance(accounted);
|
||||
byte[] buffer = new byte[BUFFER_SIZE];
|
||||
while (true) {
|
||||
int read = in.read(buffer);
|
||||
if (read < 0) {
|
||||
break;
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
out.close();
|
||||
} catch (Throwable ignored) {
|
||||
if (read == 0) {
|
||||
continue;
|
||||
}
|
||||
out.write(buffer, 0, read);
|
||||
long accounted = Math.min(remaining[0], (long) read);
|
||||
remaining[0] -= accounted;
|
||||
progress.advance(accounted);
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
out.close();
|
||||
} catch (Throwable ignored) {
|
||||
}
|
||||
try {
|
||||
in.close();
|
||||
} catch (Throwable ignored) {
|
||||
@ -323,19 +188,11 @@ public final class JBindExtractorMain {
|
||||
if (modified > 0) {
|
||||
output.setLastModified(modified);
|
||||
}
|
||||
extractionSuccess = true;
|
||||
} catch (ZipException error) {
|
||||
if (isWrongPassword(error, encrypted)) {
|
||||
throw new WrongPasswordException(error);
|
||||
}
|
||||
throw error;
|
||||
} finally {
|
||||
if (!extractionSuccess && output.exists()) {
|
||||
try {
|
||||
output.delete();
|
||||
} catch (Throwable ignored) {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -362,99 +219,98 @@ public final class JBindExtractorMain {
|
||||
try {
|
||||
context = openSevenZipArchive(request.archiveFile, password);
|
||||
IInArchive archive = context.archive;
|
||||
int itemCount = archive.getNumberOfItems();
|
||||
if (itemCount <= 0) {
|
||||
throw new IOException("Archiv enthalt keine Eintrage oder konnte nicht gelesen werden: " + request.archiveFile.getAbsolutePath());
|
||||
}
|
||||
ISimpleInArchive simple = archive.getSimpleInterface();
|
||||
ISimpleInArchiveItem[] items = simple.getArchiveItems();
|
||||
|
||||
// Pre-scan: collect file indices, sizes, output paths, and detect encryption
|
||||
long totalUnits = 0;
|
||||
boolean encrypted = false;
|
||||
List<Integer> fileIndices = new ArrayList<Integer>();
|
||||
List<File> outputFiles = new ArrayList<File>();
|
||||
List<Long> fileSizes = new ArrayList<Long>();
|
||||
for (ISimpleInArchiveItem item : items) {
|
||||
if (item == null || item.isFolder()) {
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
encrypted = encrypted || item.isEncrypted();
|
||||
} catch (Throwable ignored) {
|
||||
// ignore encrypted flag read issues
|
||||
}
|
||||
totalUnits += safeSize(item.getSize());
|
||||
}
|
||||
ProgressTracker progress = new ProgressTracker(totalUnits);
|
||||
progress.emitStart();
|
||||
|
||||
Set<String> reserved = new HashSet<String>();
|
||||
for (ISimpleInArchiveItem item : items) {
|
||||
if (item == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (int i = 0; i < itemCount; i++) {
|
||||
Boolean isFolder = (Boolean) archive.getProperty(i, PropID.IS_FOLDER);
|
||||
String entryPath = (String) archive.getProperty(i, PropID.PATH);
|
||||
String entryName = normalizeEntryName(entryPath, "item-" + i);
|
||||
|
||||
if (Boolean.TRUE.equals(isFolder)) {
|
||||
String entryName = normalizeEntryName(item.getPath(), "item-" + item.getItemIndex());
|
||||
if (item.isFolder()) {
|
||||
File dir = resolveDirectory(request.targetDir, entryName);
|
||||
ensureDirectory(dir);
|
||||
reserved.add(pathKey(dir));
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
Boolean isEncrypted = (Boolean) archive.getProperty(i, PropID.ENCRYPTED);
|
||||
encrypted = encrypted || Boolean.TRUE.equals(isEncrypted);
|
||||
} catch (Throwable ignored) {
|
||||
// ignore encrypted flag read issues
|
||||
}
|
||||
|
||||
Long rawSize = (Long) archive.getProperty(i, PropID.SIZE);
|
||||
long itemSize = safeSize(rawSize);
|
||||
totalUnits += itemSize;
|
||||
|
||||
long itemUnits = safeSize(item.getSize());
|
||||
File output = resolveOutputFile(request.targetDir, entryName, request.conflictMode, reserved);
|
||||
fileIndices.add(i);
|
||||
outputFiles.add(output); // null if skipped
|
||||
fileSizes.add(itemSize);
|
||||
}
|
||||
|
||||
if (fileIndices.isEmpty()) {
|
||||
// All items are folders or skipped
|
||||
ProgressTracker progress = new ProgressTracker(1);
|
||||
progress.emitStart();
|
||||
progress.emitDone();
|
||||
return;
|
||||
}
|
||||
|
||||
ProgressTracker progress = new ProgressTracker(totalUnits);
|
||||
progress.emitStart();
|
||||
|
||||
// Build index array for bulk extract
|
||||
int[] indices = new int[fileIndices.size()];
|
||||
for (int i = 0; i < fileIndices.size(); i++) {
|
||||
indices[i] = fileIndices.get(i);
|
||||
}
|
||||
|
||||
// Map from archive index to our position in fileIndices/outputFiles
|
||||
Map<Integer, Integer> indexToPos = new HashMap<Integer, Integer>();
|
||||
for (int i = 0; i < fileIndices.size(); i++) {
|
||||
indexToPos.put(fileIndices.get(i), i);
|
||||
}
|
||||
|
||||
// Bulk extraction state
|
||||
final boolean encryptedFinal = encrypted;
|
||||
final String effectivePassword = password == null ? "" : password;
|
||||
final File[] currentOutput = new File[1];
|
||||
final FileOutputStream[] currentStream = new FileOutputStream[1];
|
||||
final boolean[] currentSuccess = new boolean[1];
|
||||
final long[] currentRemaining = new long[1];
|
||||
final Throwable[] firstError = new Throwable[1];
|
||||
final int[] currentPos = new int[] { -1 };
|
||||
|
||||
try {
|
||||
archive.extract(indices, false, new BulkExtractCallback(
|
||||
archive, indexToPos, fileIndices, outputFiles, fileSizes,
|
||||
progress, encryptedFinal, effectivePassword, currentOutput,
|
||||
currentStream, currentSuccess, currentRemaining, currentPos, firstError
|
||||
));
|
||||
} catch (SevenZipException error) {
|
||||
if (looksLikeWrongPassword(error, encryptedFinal)) {
|
||||
throw new WrongPasswordException(error);
|
||||
if (output == null) {
|
||||
progress.advance(itemUnits);
|
||||
continue;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
if (firstError[0] != null) {
|
||||
if (firstError[0] instanceof WrongPasswordException) {
|
||||
throw (WrongPasswordException) firstError[0];
|
||||
ensureDirectory(output.getParentFile());
|
||||
final FileOutputStream out = new FileOutputStream(output);
|
||||
final long[] remaining = new long[] { itemUnits };
|
||||
try {
|
||||
ExtractOperationResult result = item.extractSlow(new ISequentialOutStream() {
|
||||
@Override
|
||||
public int write(byte[] data) throws SevenZipException {
|
||||
if (data == null || data.length == 0) {
|
||||
return 0;
|
||||
}
|
||||
try {
|
||||
out.write(data);
|
||||
} catch (IOException error) {
|
||||
throw new SevenZipException("Fehler beim Schreiben: " + error.getMessage(), error);
|
||||
}
|
||||
long accounted = Math.min(remaining[0], (long) data.length);
|
||||
remaining[0] -= accounted;
|
||||
progress.advance(accounted);
|
||||
return data.length;
|
||||
}
|
||||
}, password == null ? "" : password);
|
||||
|
||||
if (remaining[0] > 0) {
|
||||
progress.advance(remaining[0]);
|
||||
}
|
||||
|
||||
if (result != ExtractOperationResult.OK) {
|
||||
if (isPasswordFailure(result, encrypted)) {
|
||||
throw new WrongPasswordException(new IOException("Falsches Passwort"));
|
||||
}
|
||||
throw new IOException("7z-Fehler: " + result.name());
|
||||
}
|
||||
} catch (SevenZipException error) {
|
||||
if (looksLikeWrongPassword(error, encrypted)) {
|
||||
throw new WrongPasswordException(error);
|
||||
}
|
||||
throw error;
|
||||
} finally {
|
||||
try {
|
||||
out.close();
|
||||
} catch (Throwable ignored) {
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
java.util.Date modified = item.getLastWriteTime();
|
||||
if (modified != null) {
|
||||
output.setLastModified(modified.getTime());
|
||||
}
|
||||
} catch (Throwable ignored) {
|
||||
// best effort
|
||||
}
|
||||
throw (Exception) firstError[0];
|
||||
}
|
||||
|
||||
progress.emitDone();
|
||||
@ -472,31 +328,14 @@ public final class JBindExtractorMain {
|
||||
|
||||
if (SEVEN_ZIP_SPLIT_RE.matcher(nameLower).matches()) {
|
||||
VolumedArchiveInStream volumed = new VolumedArchiveInStream(archiveFile.getName(), callback);
|
||||
try {
|
||||
IInArchive archive = SevenZip.openInArchive(null, volumed, callback);
|
||||
return new SevenZipArchiveContext(archive, null, volumed, callback);
|
||||
} catch (Exception error) {
|
||||
callback.close();
|
||||
throw error;
|
||||
}
|
||||
IInArchive archive = SevenZip.openInArchive(null, volumed, callback);
|
||||
return new SevenZipArchiveContext(archive, null, volumed, callback);
|
||||
}
|
||||
|
||||
RandomAccessFile raf = new RandomAccessFile(archiveFile, "r");
|
||||
RandomAccessFileInStream stream = new RandomAccessFileInStream(raf);
|
||||
try {
|
||||
IInArchive archive = SevenZip.openInArchive(null, stream, callback);
|
||||
return new SevenZipArchiveContext(archive, stream, null, callback);
|
||||
} catch (Exception error) {
|
||||
try {
|
||||
stream.close();
|
||||
} catch (Throwable ignored) {
|
||||
}
|
||||
try {
|
||||
raf.close();
|
||||
} catch (Throwable ignored) {
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
IInArchive archive = SevenZip.openInArchive(null, stream, callback);
|
||||
return new SevenZipArchiveContext(archive, stream, null, callback);
|
||||
}
|
||||
|
||||
private static boolean isWrongPassword(ZipException error, boolean encrypted) {
|
||||
@ -557,7 +396,7 @@ public final class JBindExtractorMain {
|
||||
}
|
||||
if (siblingName.startsWith(prefix) && siblingName.length() >= prefix.length() + 2) {
|
||||
String suffix = siblingName.substring(prefix.length());
|
||||
if (DIGIT_SUFFIX_RE.matcher(suffix).matches()) {
|
||||
if (suffix.matches("\\d{2,3}")) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@ -641,12 +480,6 @@ public final class JBindExtractorMain {
|
||||
}
|
||||
if (normalized.matches("^[a-zA-Z]:.*")) {
|
||||
normalized = normalized.substring(2);
|
||||
while (normalized.startsWith("/")) {
|
||||
normalized = normalized.substring(1);
|
||||
}
|
||||
while (normalized.startsWith("\\")) {
|
||||
normalized = normalized.substring(1);
|
||||
}
|
||||
}
|
||||
File targetCanonical = targetDir.getCanonicalFile();
|
||||
File output = new File(targetCanonical, normalized);
|
||||
@ -655,8 +488,7 @@ public final class JBindExtractorMain {
|
||||
String outputPath = outputCanonical.getPath();
|
||||
String targetPathNorm = isWindows() ? targetPath.toLowerCase(Locale.ROOT) : targetPath;
|
||||
String outputPathNorm = isWindows() ? outputPath.toLowerCase(Locale.ROOT) : outputPath;
|
||||
String targetPrefix = targetPathNorm.endsWith(File.separator) ? targetPathNorm : targetPathNorm + File.separator;
|
||||
if (!outputPathNorm.equals(targetPathNorm) && !outputPathNorm.startsWith(targetPrefix)) {
|
||||
if (!outputPathNorm.equals(targetPathNorm) && !outputPathNorm.startsWith(targetPathNorm + File.separator)) {
|
||||
throw new IOException("Path Traversal blockiert: " + entryName);
|
||||
}
|
||||
return outputCanonical;
|
||||
@ -674,50 +506,20 @@ public final class JBindExtractorMain {
|
||||
if (entry.length() == 0) {
|
||||
return fallback;
|
||||
}
|
||||
// Sanitize Windows special characters from each path segment
|
||||
String[] segments = entry.split("/", -1);
|
||||
StringBuilder sanitized = new StringBuilder();
|
||||
for (int i = 0; i < segments.length; i++) {
|
||||
if (i > 0) {
|
||||
sanitized.append('/');
|
||||
}
|
||||
sanitized.append(WINDOWS_SPECIAL_CHARS_RE.matcher(segments[i]).replaceAll("_"));
|
||||
}
|
||||
entry = sanitized.toString();
|
||||
if (entry.length() == 0) {
|
||||
return fallback;
|
||||
}
|
||||
return entry;
|
||||
}
|
||||
|
||||
private static long safeSize(Long value) {
|
||||
if (value == null) {
|
||||
return 0;
|
||||
return 1;
|
||||
}
|
||||
long size = value.longValue();
|
||||
if (size <= 0) {
|
||||
return 0;
|
||||
return 1;
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
||||
private static void rejectSymlink(File file) throws IOException {
|
||||
if (file == null) {
|
||||
return;
|
||||
}
|
||||
if (Files.isSymbolicLink(file.toPath())) {
|
||||
throw new IOException("Zieldatei ist ein Symlink, Schreiben verweigert: " + file.getAbsolutePath());
|
||||
}
|
||||
// Also check parent directories for symlinks
|
||||
File parent = file.getParentFile();
|
||||
while (parent != null) {
|
||||
if (Files.isSymbolicLink(parent.toPath())) {
|
||||
throw new IOException("Elternverzeichnis ist ein Symlink, Schreiben verweigert: " + parent.getAbsolutePath());
|
||||
}
|
||||
parent = parent.getParentFile();
|
||||
}
|
||||
}
|
||||
|
||||
private static void ensureDirectory(File dir) throws IOException {
|
||||
if (dir == null) {
|
||||
return;
|
||||
@ -879,176 +681,6 @@ public final class JBindExtractorMain {
|
||||
private final List<String> passwords = new ArrayList<String>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Bulk extraction callback that implements both IArchiveExtractCallback and
|
||||
* ICryptoGetTextPassword. Using the bulk IInArchive.extract() API instead of
|
||||
* per-item extractSlow() is critical for performance — solid RAR archives
|
||||
* otherwise re-decode from the beginning for every single item.
|
||||
*/
|
||||
private static final class BulkExtractCallback implements IArchiveExtractCallback, ICryptoGetTextPassword {
|
||||
private final IInArchive archive;
|
||||
private final Map<Integer, Integer> indexToPos;
|
||||
private final List<Integer> fileIndices;
|
||||
private final List<File> outputFiles;
|
||||
private final List<Long> fileSizes;
|
||||
private final ProgressTracker progress;
|
||||
private final boolean encrypted;
|
||||
private final String password;
|
||||
private final File[] currentOutput;
|
||||
private final FileOutputStream[] currentStream;
|
||||
private final boolean[] currentSuccess;
|
||||
private final long[] currentRemaining;
|
||||
private final int[] currentPos;
|
||||
private final Throwable[] firstError;
|
||||
|
||||
BulkExtractCallback(IInArchive archive, Map<Integer, Integer> indexToPos,
|
||||
List<Integer> fileIndices, List<File> outputFiles, List<Long> fileSizes,
|
||||
ProgressTracker progress, boolean encrypted, String password,
|
||||
File[] currentOutput, FileOutputStream[] currentStream,
|
||||
boolean[] currentSuccess, long[] currentRemaining, int[] currentPos,
|
||||
Throwable[] firstError) {
|
||||
this.archive = archive;
|
||||
this.indexToPos = indexToPos;
|
||||
this.fileIndices = fileIndices;
|
||||
this.outputFiles = outputFiles;
|
||||
this.fileSizes = fileSizes;
|
||||
this.progress = progress;
|
||||
this.encrypted = encrypted;
|
||||
this.password = password;
|
||||
this.currentOutput = currentOutput;
|
||||
this.currentStream = currentStream;
|
||||
this.currentSuccess = currentSuccess;
|
||||
this.currentRemaining = currentRemaining;
|
||||
this.currentPos = currentPos;
|
||||
this.firstError = firstError;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String cryptoGetTextPassword() {
|
||||
return password;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setTotal(long total) {
|
||||
// 7z reports total compressed bytes; we track uncompressed via ProgressTracker
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setCompleted(long complete) {
|
||||
// Not used — we track per-write progress
|
||||
}
|
||||
|
||||
@Override
|
||||
public ISequentialOutStream getStream(int index, ExtractAskMode extractAskMode) throws SevenZipException {
|
||||
closeCurrentStream();
|
||||
|
||||
Integer pos = indexToPos.get(index);
|
||||
if (pos == null) {
|
||||
return null;
|
||||
}
|
||||
currentPos[0] = pos;
|
||||
currentOutput[0] = outputFiles.get(pos);
|
||||
currentSuccess[0] = false;
|
||||
currentRemaining[0] = fileSizes.get(pos);
|
||||
|
||||
if (extractAskMode != ExtractAskMode.EXTRACT) {
|
||||
currentOutput[0] = null;
|
||||
return null;
|
||||
}
|
||||
|
||||
if (currentOutput[0] == null) {
|
||||
progress.advance(currentRemaining[0]);
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
ensureDirectory(currentOutput[0].getParentFile());
|
||||
rejectSymlink(currentOutput[0]);
|
||||
currentStream[0] = new FileOutputStream(currentOutput[0]);
|
||||
} catch (IOException error) {
|
||||
throw new SevenZipException("Fehler beim Erstellen: " + error.getMessage(), error);
|
||||
}
|
||||
|
||||
return new ISequentialOutStream() {
|
||||
@Override
|
||||
public int write(byte[] data) throws SevenZipException {
|
||||
if (data == null || data.length == 0) {
|
||||
return 0;
|
||||
}
|
||||
try {
|
||||
currentStream[0].write(data);
|
||||
} catch (IOException error) {
|
||||
throw new SevenZipException("Fehler beim Schreiben: " + error.getMessage(), error);
|
||||
}
|
||||
long accounted = Math.min(currentRemaining[0], (long) data.length);
|
||||
currentRemaining[0] -= accounted;
|
||||
progress.advance(accounted);
|
||||
return data.length;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public void prepareOperation(ExtractAskMode extractAskMode) {
|
||||
// no-op
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setOperationResult(ExtractOperationResult result) throws SevenZipException {
|
||||
if (currentRemaining[0] > 0) {
|
||||
progress.advance(currentRemaining[0]);
|
||||
currentRemaining[0] = 0;
|
||||
}
|
||||
|
||||
if (result == ExtractOperationResult.OK) {
|
||||
currentSuccess[0] = true;
|
||||
closeCurrentStream();
|
||||
if (currentPos[0] >= 0 && currentOutput[0] != null) {
|
||||
try {
|
||||
int archiveIndex = fileIndices.get(currentPos[0]);
|
||||
java.util.Date modified = (java.util.Date) archive.getProperty(archiveIndex, PropID.LAST_MODIFICATION_TIME);
|
||||
if (modified != null) {
|
||||
currentOutput[0].setLastModified(modified.getTime());
|
||||
}
|
||||
} catch (Throwable ignored) {
|
||||
// best effort
|
||||
}
|
||||
}
|
||||
} else {
|
||||
closeCurrentStream();
|
||||
if (currentOutput[0] != null && currentOutput[0].exists()) {
|
||||
try {
|
||||
currentOutput[0].delete();
|
||||
} catch (Throwable ignored) {
|
||||
}
|
||||
}
|
||||
if (firstError[0] == null) {
|
||||
if (isPasswordFailure(result, encrypted)) {
|
||||
firstError[0] = new WrongPasswordException(new IOException("Falsches Passwort"));
|
||||
} else {
|
||||
firstError[0] = new IOException("7z-Fehler: " + result.name());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void closeCurrentStream() {
|
||||
if (currentStream[0] != null) {
|
||||
try {
|
||||
currentStream[0].close();
|
||||
} catch (Throwable ignored) {
|
||||
}
|
||||
currentStream[0] = null;
|
||||
}
|
||||
if (!currentSuccess[0] && currentOutput[0] != null && currentOutput[0].exists()) {
|
||||
try {
|
||||
currentOutput[0].delete();
|
||||
} catch (Throwable ignored) {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static final class WrongPasswordException extends Exception {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@ -1196,11 +828,12 @@ public final class JBindExtractorMain {
|
||||
if (filename == null || filename.trim().length() == 0) {
|
||||
return null;
|
||||
}
|
||||
// Always resolve relative to the archive's parent directory.
|
||||
// Never accept absolute paths to prevent path traversal.
|
||||
String baseName = new File(filename).getName();
|
||||
File direct = new File(filename);
|
||||
if (direct.isAbsolute() && direct.exists()) {
|
||||
return direct;
|
||||
}
|
||||
if (archiveDir != null) {
|
||||
File relative = new File(archiveDir, baseName);
|
||||
File relative = new File(archiveDir, filename);
|
||||
if (relative.exists()) {
|
||||
return relative;
|
||||
}
|
||||
@ -1210,13 +843,13 @@ public final class JBindExtractorMain {
|
||||
if (!sibling.isFile()) {
|
||||
continue;
|
||||
}
|
||||
if (sibling.getName().equalsIgnoreCase(baseName)) {
|
||||
if (sibling.getName().equalsIgnoreCase(filename)) {
|
||||
return sibling;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
return direct.exists() ? direct : null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@ -2,17 +2,8 @@ const path = require("path");
|
||||
const { rcedit } = require("rcedit");
|
||||
|
||||
module.exports = async function afterPack(context) {
|
||||
const productFilename = context.packager?.appInfo?.productFilename;
|
||||
if (!productFilename) {
|
||||
console.warn(" • rcedit: skipped — productFilename not available");
|
||||
return;
|
||||
}
|
||||
const exePath = path.join(context.appOutDir, `${productFilename}.exe`);
|
||||
const exePath = path.join(context.appOutDir, `${context.packager.appInfo.productFilename}.exe`);
|
||||
const iconPath = path.resolve(__dirname, "..", "assets", "app_icon.ico");
|
||||
console.log(` • rcedit: patching icon → ${exePath}`);
|
||||
try {
|
||||
await rcedit(exePath, { icon: iconPath });
|
||||
} catch (error) {
|
||||
console.warn(` • rcedit: failed — ${String(error)}`);
|
||||
}
|
||||
await rcedit(exePath, { icon: iconPath });
|
||||
};
|
||||
|
||||
@ -31,21 +31,18 @@ async function main(): Promise<void> {
|
||||
login: settings.megaLogin,
|
||||
password: settings.megaPassword
|
||||
}));
|
||||
try {
|
||||
const service = new DebridService(settings, {
|
||||
megaWebUnrestrict: (link) => megaWeb.unrestrict(link)
|
||||
});
|
||||
for (const link of links) {
|
||||
try {
|
||||
const result = await service.unrestrictLink(link);
|
||||
console.log(`[OK] ${result.providerLabel} -> ${result.fileName}`);
|
||||
} catch (error) {
|
||||
console.log(`[FAIL] ${String(error)}`);
|
||||
}
|
||||
const service = new DebridService(settings, {
|
||||
megaWebUnrestrict: (link) => megaWeb.unrestrict(link)
|
||||
});
|
||||
for (const link of links) {
|
||||
try {
|
||||
const result = await service.unrestrictLink(link);
|
||||
console.log(`[OK] ${result.providerLabel} -> ${result.fileName}`);
|
||||
} catch (error) {
|
||||
console.log(`[FAIL] ${String(error)}`);
|
||||
}
|
||||
} finally {
|
||||
megaWeb.dispose();
|
||||
}
|
||||
megaWeb.dispose();
|
||||
}
|
||||
|
||||
main().catch(e => { console.error(e); process.exit(1); });
|
||||
void main();
|
||||
|
||||
@ -16,8 +16,8 @@ function sleep(ms) {
|
||||
}
|
||||
|
||||
function cookieFrom(headers) {
|
||||
const cookies = headers.getSetCookie();
|
||||
return cookies.map((x) => x.split(";")[0].trim()).filter(Boolean).join("; ");
|
||||
const raw = headers.get("set-cookie") || "";
|
||||
return raw.split(",").map((x) => x.split(";")[0].trim()).filter(Boolean).join("; ");
|
||||
}
|
||||
|
||||
function parseDebridCodes(html) {
|
||||
@ -47,9 +47,6 @@ async function resolveCode(cookie, code) {
|
||||
});
|
||||
const text = (await res.text()).trim();
|
||||
if (text === "reload") {
|
||||
if (attempt % 5 === 0) {
|
||||
console.log(` [retry] code=${code} attempt=${attempt}/50 (waiting for server)`);
|
||||
}
|
||||
await sleep(800);
|
||||
continue;
|
||||
}
|
||||
@ -101,13 +98,7 @@ async function main() {
|
||||
redirect: "manual"
|
||||
});
|
||||
|
||||
if (loginRes.status >= 400) {
|
||||
throw new Error(`Login failed with HTTP ${loginRes.status}`);
|
||||
}
|
||||
const cookie = cookieFrom(loginRes.headers);
|
||||
if (!cookie) {
|
||||
throw new Error("Login returned no session cookie");
|
||||
}
|
||||
console.log("login", loginRes.status, loginRes.headers.get("location") || "");
|
||||
|
||||
const debridRes = await fetch("https://www.mega-debrid.eu/index.php?form=debrid", {
|
||||
@ -145,4 +136,4 @@ async function main() {
|
||||
}
|
||||
}
|
||||
|
||||
await main().catch((e) => { console.error(e); process.exit(1); });
|
||||
await main();
|
||||
|
||||
@ -66,8 +66,6 @@ async function callRealDebrid(link) {
|
||||
};
|
||||
}
|
||||
|
||||
// megaCookie is intentionally cached at module scope so that multiple
|
||||
// callMegaDebrid() invocations reuse the same session cookie.
|
||||
async function callMegaDebrid(link) {
|
||||
if (!megaCookie) {
|
||||
const loginRes = await fetch("https://www.mega-debrid.eu/index.php?form=login", {
|
||||
@ -79,15 +77,13 @@ async function callMegaDebrid(link) {
|
||||
body: new URLSearchParams({ login: megaLogin, password: megaPassword, remember: "on" }),
|
||||
redirect: "manual"
|
||||
});
|
||||
if (loginRes.status >= 400) {
|
||||
return { ok: false, error: `Mega-Web login failed with HTTP ${loginRes.status}` };
|
||||
}
|
||||
megaCookie = loginRes.headers.getSetCookie()
|
||||
megaCookie = (loginRes.headers.get("set-cookie") || "")
|
||||
.split(",")
|
||||
.map((chunk) => chunk.split(";")[0].trim())
|
||||
.filter(Boolean)
|
||||
.join("; ");
|
||||
if (!megaCookie) {
|
||||
return { ok: false, error: "Mega-Web login returned no session cookie" };
|
||||
return { ok: false, error: "Mega-Web login failed" };
|
||||
}
|
||||
}
|
||||
|
||||
@ -294,4 +290,4 @@ async function main() {
|
||||
}
|
||||
}
|
||||
|
||||
await main().catch((e) => { console.error(e); process.exit(1); });
|
||||
await main();
|
||||
|
||||
@ -2,15 +2,7 @@ import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { spawnSync } from "node:child_process";
|
||||
|
||||
const NPM_RELEASE_WIN = process.platform === "win32"
|
||||
? {
|
||||
command: process.env.ComSpec || "cmd.exe",
|
||||
args: ["/d", "/s", "/c", "npm run release:win"]
|
||||
}
|
||||
: {
|
||||
command: "npm",
|
||||
args: ["run", "release:win"]
|
||||
};
|
||||
const NPM_EXECUTABLE = process.platform === "win32" ? "npm.cmd" : "npm";
|
||||
|
||||
function run(command, args, options = {}) {
|
||||
const result = spawnSync(command, args, {
|
||||
@ -45,8 +37,7 @@ function runWithInput(command, args, input) {
|
||||
cwd: process.cwd(),
|
||||
encoding: "utf8",
|
||||
input,
|
||||
stdio: ["pipe", "pipe", "pipe"],
|
||||
timeout: 10000
|
||||
stdio: ["pipe", "pipe", "pipe"]
|
||||
});
|
||||
if (result.status !== 0) {
|
||||
const stderr = String(result.stderr || "").trim();
|
||||
@ -104,17 +95,15 @@ function getGiteaRepo() {
|
||||
|
||||
const preferredBase = normalizeBaseUrl(process.env.GITEA_BASE_URL || process.env.FORGEJO_BASE_URL || "https://git.24-music.de");
|
||||
|
||||
const preferredProtocol = preferredBase ? new URL(preferredBase).protocol : "https:";
|
||||
|
||||
for (const remote of remotes) {
|
||||
try {
|
||||
const remoteUrl = runCapture("git", ["remote", "get-url", remote]);
|
||||
const parsed = parseRemoteUrl(remoteUrl);
|
||||
const remoteBase = `https://${parsed.host}`.toLowerCase();
|
||||
if (preferredBase && remoteBase !== preferredBase.toLowerCase().replace(/^http:/, "https:")) {
|
||||
if (preferredBase && remoteBase !== preferredBase.toLowerCase()) {
|
||||
continue;
|
||||
}
|
||||
return { remote, ...parsed, baseUrl: `${preferredProtocol}//${parsed.host}` };
|
||||
return { remote, ...parsed, baseUrl: `https://${parsed.host}` };
|
||||
} catch {
|
||||
// try next remote
|
||||
}
|
||||
@ -190,8 +179,7 @@ function updatePackageVersion(rootDir, version) {
|
||||
const packagePath = path.join(rootDir, "package.json");
|
||||
const packageJson = JSON.parse(fs.readFileSync(packagePath, "utf8"));
|
||||
if (String(packageJson.version || "") === version) {
|
||||
process.stdout.write(`package.json is already at version ${version}, skipping update.\n`);
|
||||
return;
|
||||
throw new Error(`package.json is already at version ${version}`);
|
||||
}
|
||||
packageJson.version = version;
|
||||
fs.writeFileSync(packagePath, `${JSON.stringify(packageJson, null, 2)}\n`, "utf8");
|
||||
@ -269,31 +257,9 @@ async function createOrGetRelease(baseApi, tag, authHeader, notes) {
|
||||
async function uploadReleaseAssets(baseApi, releaseId, authHeader, releaseDir, files) {
|
||||
for (const fileName of files) {
|
||||
const filePath = path.join(releaseDir, fileName);
|
||||
const fileSize = fs.statSync(filePath).size;
|
||||
const fileData = fs.readFileSync(filePath);
|
||||
const uploadUrl = `${baseApi}/releases/${releaseId}/assets?name=${encodeURIComponent(fileName)}`;
|
||||
|
||||
// Stream large files instead of loading them entirely into memory
|
||||
const fileStream = fs.createReadStream(filePath);
|
||||
const response = await fetch(uploadUrl, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
Authorization: authHeader,
|
||||
"Content-Type": "application/octet-stream",
|
||||
"Content-Length": String(fileSize)
|
||||
},
|
||||
body: fileStream,
|
||||
duplex: "half"
|
||||
});
|
||||
|
||||
const text = await response.text();
|
||||
let parsed;
|
||||
try {
|
||||
parsed = text ? JSON.parse(text) : null;
|
||||
} catch {
|
||||
parsed = text;
|
||||
}
|
||||
|
||||
const response = await apiRequest("POST", uploadUrl, authHeader, fileData, "application/octet-stream");
|
||||
if (response.ok) {
|
||||
process.stdout.write(`Uploaded: ${fileName}\n`);
|
||||
continue;
|
||||
@ -302,7 +268,7 @@ async function uploadReleaseAssets(baseApi, releaseId, authHeader, releaseDir, f
|
||||
process.stdout.write(`Skipped existing asset: ${fileName}\n`);
|
||||
continue;
|
||||
}
|
||||
throw new Error(`Asset upload failed for ${fileName} (${response.status}): ${JSON.stringify(parsed)}`);
|
||||
throw new Error(`Asset upload failed for ${fileName} (${response.status}): ${JSON.stringify(response.body)}`);
|
||||
}
|
||||
}
|
||||
|
||||
@ -324,18 +290,17 @@ async function main() {
|
||||
|
||||
ensureNoTrackedChanges();
|
||||
ensureTagMissing(tag);
|
||||
|
||||
if (args.dryRun) {
|
||||
process.stdout.write(`Dry run: would release ${tag}. No changes made.\n`);
|
||||
return;
|
||||
}
|
||||
|
||||
updatePackageVersion(rootDir, version);
|
||||
|
||||
process.stdout.write(`Building release artifacts for ${tag}...\n`);
|
||||
run(NPM_RELEASE_WIN.command, NPM_RELEASE_WIN.args);
|
||||
run(NPM_EXECUTABLE, ["run", "release:win"]);
|
||||
const assets = ensureAssetsExist(rootDir, version);
|
||||
|
||||
if (args.dryRun) {
|
||||
process.stdout.write(`Dry run complete. Assets exist for ${tag}.\n`);
|
||||
return;
|
||||
}
|
||||
|
||||
run("git", ["add", "package.json"]);
|
||||
run("git", ["commit", "-m", `Release ${tag}`]);
|
||||
run("git", ["push", repo.remote, "main"]);
|
||||
|
||||
@ -5,7 +5,6 @@ import {
|
||||
AppSettings,
|
||||
DuplicatePolicy,
|
||||
HistoryEntry,
|
||||
PackagePriority,
|
||||
ParsedPackageInput,
|
||||
SessionStats,
|
||||
StartConflictEntry,
|
||||
@ -106,7 +105,6 @@ export class AppController {
|
||||
|| settings.bestToken.trim()
|
||||
|| settings.allDebridToken.trim()
|
||||
|| (settings.ddownloadLogin.trim() && settings.ddownloadPassword.trim())
|
||||
|| settings.oneFichierApiKey.trim()
|
||||
);
|
||||
}
|
||||
|
||||
@ -287,7 +285,7 @@ export class AppController {
|
||||
|
||||
public exportBackup(): string {
|
||||
const settings = { ...this.settings };
|
||||
const SENSITIVE_KEYS: (keyof AppSettings)[] = ["token", "megaLogin", "megaPassword", "bestToken", "allDebridToken", "ddownloadLogin", "ddownloadPassword", "oneFichierApiKey"];
|
||||
const SENSITIVE_KEYS: (keyof AppSettings)[] = ["token", "megaPassword", "bestToken", "allDebridToken", "ddownloadPassword"];
|
||||
for (const key of SENSITIVE_KEYS) {
|
||||
const val = settings[key];
|
||||
if (typeof val === "string" && val.length > 0) {
|
||||
@ -309,7 +307,7 @@ export class AppController {
|
||||
return { restored: false, message: "Kein gültiges Backup (settings/session fehlen)" };
|
||||
}
|
||||
const importedSettings = parsed.settings as AppSettings;
|
||||
const SENSITIVE_KEYS: (keyof AppSettings)[] = ["token", "megaLogin", "megaPassword", "bestToken", "allDebridToken", "ddownloadLogin", "ddownloadPassword", "oneFichierApiKey"];
|
||||
const SENSITIVE_KEYS: (keyof AppSettings)[] = ["token", "megaPassword", "bestToken", "allDebridToken", "ddownloadPassword"];
|
||||
for (const key of SENSITIVE_KEYS) {
|
||||
const val = (importedSettings as Record<string, unknown>)[key];
|
||||
if (typeof val === "string" && val.startsWith("***")) {
|
||||
@ -362,8 +360,8 @@ export class AppController {
|
||||
clearHistory(this.storagePaths);
|
||||
}
|
||||
|
||||
public setPackagePriority(packageId: string, priority: PackagePriority): void {
|
||||
this.manager.setPackagePriority(packageId, priority);
|
||||
public setPackagePriority(packageId: string, priority: string): void {
|
||||
this.manager.setPackagePriority(packageId, priority as any);
|
||||
}
|
||||
|
||||
public skipItems(itemIds: string[]): void {
|
||||
|
||||
@ -47,7 +47,6 @@ export function defaultSettings(): AppSettings {
|
||||
allDebridToken: "",
|
||||
ddownloadLogin: "",
|
||||
ddownloadPassword: "",
|
||||
oneFichierApiKey: "",
|
||||
archivePasswordList: "",
|
||||
rememberToken: true,
|
||||
providerPrimary: "realdebrid",
|
||||
|
||||
@ -164,7 +164,7 @@ async function decryptDlcLocal(filePath: string): Promise<ParsedPackageInput[]>
|
||||
const dlcData = content.slice(0, -88);
|
||||
|
||||
const rcUrl = DLC_SERVICE_URL.replace("{KEY}", encodeURIComponent(dlcKey));
|
||||
const rcResponse = await fetch(rcUrl, { method: "GET", signal: AbortSignal.timeout(30000) });
|
||||
const rcResponse = await fetch(rcUrl, { method: "GET" });
|
||||
if (!rcResponse.ok) {
|
||||
return [];
|
||||
}
|
||||
@ -217,8 +217,7 @@ async function tryDcryptUpload(fileContent: Buffer, fileName: string): Promise<s
|
||||
|
||||
const response = await fetch(DCRYPT_UPLOAD_URL, {
|
||||
method: "POST",
|
||||
body: form,
|
||||
signal: AbortSignal.timeout(30000)
|
||||
body: form
|
||||
});
|
||||
if (response.status === 413) {
|
||||
return null;
|
||||
@ -236,8 +235,7 @@ async function tryDcryptPaste(fileContent: Buffer): Promise<string[] | null> {
|
||||
|
||||
const response = await fetch(DCRYPT_PASTE_URL, {
|
||||
method: "POST",
|
||||
body: form,
|
||||
signal: AbortSignal.timeout(30000)
|
||||
body: form
|
||||
});
|
||||
if (response.status === 413) {
|
||||
return null;
|
||||
|
||||
@ -11,16 +11,12 @@ const RAPIDGATOR_SCAN_MAX_BYTES = 512 * 1024;
|
||||
const BEST_DEBRID_API_BASE = "https://bestdebrid.com/api/v1";
|
||||
const ALL_DEBRID_API_BASE = "https://api.alldebrid.com/v4";
|
||||
|
||||
const ONEFICHIER_API_BASE = "https://api.1fichier.com/v1";
|
||||
const ONEFICHIER_URL_RE = /^https?:\/\/(?:www\.)?(?:1fichier\.com|alterupload\.com|cjoint\.net|desfichiers\.com|dfichiers\.com|megadl\.fr|mesfichiers\.org|piecejointe\.net|pjointe\.com|tenvoi\.com|dl4free\.com)\/\?([a-z0-9]{5,20})$/i;
|
||||
|
||||
const PROVIDER_LABELS: Record<DebridProvider, string> = {
|
||||
realdebrid: "Real-Debrid",
|
||||
megadebrid: "Mega-Debrid",
|
||||
bestdebrid: "BestDebrid",
|
||||
alldebrid: "AllDebrid",
|
||||
ddownload: "DDownload",
|
||||
onefichier: "1Fichier"
|
||||
ddownload: "DDownload"
|
||||
};
|
||||
|
||||
interface ProviderUnrestrictedLink extends UnrestrictedLink {
|
||||
@ -963,66 +959,6 @@ class AllDebridClient {
|
||||
}
|
||||
}
|
||||
|
||||
// ── 1Fichier Client ──
|
||||
|
||||
class OneFichierClient {
|
||||
private apiKey: string;
|
||||
|
||||
public constructor(apiKey: string) {
|
||||
this.apiKey = apiKey;
|
||||
}
|
||||
|
||||
public async unrestrictLink(link: string, signal?: AbortSignal): Promise<UnrestrictedLink> {
|
||||
if (!ONEFICHIER_URL_RE.test(link)) {
|
||||
throw new Error("Kein 1Fichier-Link");
|
||||
}
|
||||
|
||||
let lastError = "";
|
||||
for (let attempt = 1; attempt <= REQUEST_RETRIES; attempt += 1) {
|
||||
if (signal?.aborted) throw new Error("aborted:debrid");
|
||||
try {
|
||||
const res = await fetch(`${ONEFICHIER_API_BASE}/download/get_token.cgi`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${this.apiKey}`
|
||||
},
|
||||
body: JSON.stringify({ url: link, pretty: 1 }),
|
||||
signal: withTimeoutSignal(signal, API_TIMEOUT_MS)
|
||||
});
|
||||
|
||||
const json = await res.json() as Record<string, unknown>;
|
||||
|
||||
if (json.status === "KO" || json.error) {
|
||||
const msg = String(json.message || json.error || "Unbekannter 1Fichier-Fehler");
|
||||
throw new Error(msg);
|
||||
}
|
||||
|
||||
const directUrl = String(json.url || "");
|
||||
if (!directUrl) {
|
||||
throw new Error("1Fichier: Keine Download-URL in Antwort");
|
||||
}
|
||||
|
||||
return {
|
||||
fileName: filenameFromUrl(directUrl) || filenameFromUrl(link),
|
||||
directUrl,
|
||||
fileSize: null,
|
||||
retriesUsed: attempt - 1
|
||||
};
|
||||
} catch (error) {
|
||||
lastError = compactErrorText(error);
|
||||
if (signal?.aborted || (/aborted/i.test(lastError) && !/timeout/i.test(lastError))) {
|
||||
throw error;
|
||||
}
|
||||
if (attempt < REQUEST_RETRIES) {
|
||||
await sleep(retryDelay(attempt), signal);
|
||||
}
|
||||
}
|
||||
}
|
||||
throw new Error(`1Fichier-Unrestrict fehlgeschlagen: ${lastError}`);
|
||||
}
|
||||
}
|
||||
|
||||
const DDOWNLOAD_URL_RE = /^https?:\/\/(?:www\.)?(?:ddownload\.com|ddl\.to)\/([a-z0-9]+)/i;
|
||||
const DDOWNLOAD_WEB_BASE = "https://ddownload.com";
|
||||
const DDOWNLOAD_WEB_UA = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/133.0.0.0 Safari/537.36";
|
||||
@ -1117,8 +1053,7 @@ class DdownloadClient {
|
||||
fileName: filenameFromUrl(directUrl) || filenameFromUrl(link),
|
||||
directUrl,
|
||||
fileSize: null,
|
||||
retriesUsed: attempt - 1,
|
||||
skipTlsVerify: true
|
||||
retriesUsed: attempt - 1
|
||||
};
|
||||
}
|
||||
}
|
||||
@ -1167,8 +1102,7 @@ class DdownloadClient {
|
||||
fileName: fileName || filenameFromUrl(directUrl),
|
||||
directUrl,
|
||||
fileSize: null,
|
||||
retriesUsed: attempt - 1,
|
||||
skipTlsVerify: true
|
||||
retriesUsed: attempt - 1
|
||||
};
|
||||
}
|
||||
}
|
||||
@ -1181,8 +1115,7 @@ class DdownloadClient {
|
||||
fileName,
|
||||
directUrl: directMatch[0],
|
||||
fileSize: null,
|
||||
retriesUsed: attempt - 1,
|
||||
skipTlsVerify: true
|
||||
retriesUsed: attempt - 1
|
||||
};
|
||||
}
|
||||
|
||||
@ -1218,9 +1151,6 @@ export class DebridService {
|
||||
|
||||
private options: DebridServiceOptions;
|
||||
|
||||
private cachedDdownloadClient: DdownloadClient | null = null;
|
||||
private cachedDdownloadKey = "";
|
||||
|
||||
public constructor(settings: AppSettings, options: DebridServiceOptions = {}) {
|
||||
this.settings = cloneSettings(settings);
|
||||
this.options = options;
|
||||
@ -1230,16 +1160,6 @@ export class DebridService {
|
||||
this.settings = cloneSettings(next);
|
||||
}
|
||||
|
||||
private getDdownloadClient(login: string, password: string): DdownloadClient {
|
||||
const key = `${login}\0${password}`;
|
||||
if (this.cachedDdownloadClient && this.cachedDdownloadKey === key) {
|
||||
return this.cachedDdownloadClient;
|
||||
}
|
||||
this.cachedDdownloadClient = new DdownloadClient(login, password);
|
||||
this.cachedDdownloadKey = key;
|
||||
return this.cachedDdownloadClient;
|
||||
}
|
||||
|
||||
public async resolveFilenames(
|
||||
links: string[],
|
||||
onResolved?: (link: string, fileName: string) => void,
|
||||
@ -1292,46 +1212,6 @@ export class DebridService {
|
||||
|
||||
public async unrestrictLink(link: string, signal?: AbortSignal, settingsSnapshot?: AppSettings): Promise<ProviderUnrestrictedLink> {
|
||||
const settings = settingsSnapshot ? cloneSettings(settingsSnapshot) : cloneSettings(this.settings);
|
||||
|
||||
// 1Fichier is a direct file hoster. If the link is a 1fichier.com URL
|
||||
// and the API key is configured, use 1Fichier directly before debrid providers.
|
||||
if (ONEFICHIER_URL_RE.test(link) && this.isProviderConfiguredFor(settings, "onefichier")) {
|
||||
try {
|
||||
const result = await this.unrestrictViaProvider(settings, "onefichier", link, signal);
|
||||
return {
|
||||
...result,
|
||||
provider: "onefichier",
|
||||
providerLabel: PROVIDER_LABELS["onefichier"]
|
||||
};
|
||||
} catch (error) {
|
||||
const errorText = compactErrorText(error);
|
||||
if (signal?.aborted || (/aborted/i.test(errorText) && !/timeout/i.test(errorText))) {
|
||||
throw error;
|
||||
}
|
||||
// Fall through to normal provider chain
|
||||
}
|
||||
}
|
||||
|
||||
// DDownload is a direct file hoster, not a debrid service.
|
||||
// If the link is a ddownload.com/ddl.to URL and the account is configured,
|
||||
// use DDownload directly before trying any debrid providers.
|
||||
if (DDOWNLOAD_URL_RE.test(link) && this.isProviderConfiguredFor(settings, "ddownload")) {
|
||||
try {
|
||||
const result = await this.unrestrictViaProvider(settings, "ddownload", link, signal);
|
||||
return {
|
||||
...result,
|
||||
provider: "ddownload",
|
||||
providerLabel: PROVIDER_LABELS["ddownload"]
|
||||
};
|
||||
} catch (error) {
|
||||
const errorText = compactErrorText(error);
|
||||
if (signal?.aborted || (/aborted/i.test(errorText) && !/timeout/i.test(errorText))) {
|
||||
throw error;
|
||||
}
|
||||
// Fall through to normal provider chain (debrid services may also support ddownload links)
|
||||
}
|
||||
}
|
||||
|
||||
const order = toProviderOrder(
|
||||
settings.providerPrimary,
|
||||
settings.providerSecondary,
|
||||
@ -1420,9 +1300,6 @@ export class DebridService {
|
||||
if (provider === "ddownload") {
|
||||
return Boolean(settings.ddownloadLogin.trim() && settings.ddownloadPassword.trim());
|
||||
}
|
||||
if (provider === "onefichier") {
|
||||
return Boolean(settings.oneFichierApiKey.trim());
|
||||
}
|
||||
return Boolean(settings.bestToken.trim());
|
||||
}
|
||||
|
||||
@ -1437,10 +1314,7 @@ export class DebridService {
|
||||
return new AllDebridClient(settings.allDebridToken).unrestrictLink(link, signal);
|
||||
}
|
||||
if (provider === "ddownload") {
|
||||
return this.getDdownloadClient(settings.ddownloadLogin, settings.ddownloadPassword).unrestrictLink(link, signal);
|
||||
}
|
||||
if (provider === "onefichier") {
|
||||
return new OneFichierClient(settings.oneFichierApiKey).unrestrictLink(link, signal);
|
||||
return new DdownloadClient(settings.ddownloadLogin, settings.ddownloadPassword).unrestrictLink(link, signal);
|
||||
}
|
||||
return new BestDebridClient(settings.bestToken).unrestrictLink(link, signal);
|
||||
}
|
||||
|
||||
@ -261,7 +261,7 @@ export function startDebugServer(mgr: DownloadManager, baseDir: string): void {
|
||||
const port = getPort(baseDir);
|
||||
|
||||
server = http.createServer(handleRequest);
|
||||
server.listen(port, "127.0.0.1", () => {
|
||||
server.listen(port, "0.0.0.0", () => {
|
||||
logger.info(`Debug-Server gestartet auf Port ${port}`);
|
||||
});
|
||||
server.on("error", (err) => {
|
||||
|
||||
@ -20,26 +20,9 @@ import {
|
||||
UiSnapshot
|
||||
} from "../shared/types";
|
||||
import { REQUEST_RETRIES, SAMPLE_VIDEO_EXTENSIONS, SPEED_WINDOW_SECONDS, WRITE_BUFFER_SIZE, WRITE_FLUSH_TIMEOUT_MS, ALLOCATION_UNIT_SIZE, STREAM_HIGH_WATER_MARK, DISK_BUSY_THRESHOLD_MS } from "./constants";
|
||||
|
||||
// Reference counter for NODE_TLS_REJECT_UNAUTHORIZED to avoid race conditions
|
||||
// when multiple parallel downloads need TLS verification disabled (e.g. DDownload).
|
||||
let tlsSkipRefCount = 0;
|
||||
function acquireTlsSkip(): void {
|
||||
tlsSkipRefCount += 1;
|
||||
if (tlsSkipRefCount === 1) {
|
||||
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
|
||||
}
|
||||
}
|
||||
function releaseTlsSkip(): void {
|
||||
tlsSkipRefCount -= 1;
|
||||
if (tlsSkipRefCount <= 0) {
|
||||
tlsSkipRefCount = 0;
|
||||
delete process.env.NODE_TLS_REJECT_UNAUTHORIZED;
|
||||
}
|
||||
}
|
||||
import { cleanupCancelledPackageArtifactsAsync, removeDownloadLinkArtifacts, removeSampleArtifacts } from "./cleanup";
|
||||
import { cleanupCancelledPackageArtifactsAsync } from "./cleanup";
|
||||
import { DebridService, MegaWebUnrestrictor, checkRapidgatorOnline } from "./debrid";
|
||||
import { cleanupArchives, clearExtractResumeState, collectArchiveCleanupTargets, extractPackageArchives, findArchiveCandidates, hasAnyFilesRecursive, removeEmptyDirectoryTree } from "./extractor";
|
||||
import { clearExtractResumeState, collectArchiveCleanupTargets, extractPackageArchives, findArchiveCandidates } from "./extractor";
|
||||
import { validateFileAgainstManifest } from "./integrity";
|
||||
import { logger } from "./logger";
|
||||
import { StoragePaths, saveSession, saveSessionAsync, saveSettings, saveSettingsAsync } from "./storage";
|
||||
@ -308,9 +291,6 @@ function providerLabel(provider: DownloadItem["provider"]): string {
|
||||
if (provider === "alldebrid") {
|
||||
return "AllDebrid";
|
||||
}
|
||||
if (provider === "ddownload") {
|
||||
return "DDownload";
|
||||
}
|
||||
return "Debrid";
|
||||
}
|
||||
|
||||
@ -334,11 +314,9 @@ const EMPTY_DIR_IGNORED_FILE_NAMES = new Set([
|
||||
"desktop.ini",
|
||||
".ds_store"
|
||||
]);
|
||||
const EMPTY_DIR_IGNORED_FILE_RE = /^\.rd_extract_progress(?:_[^.\\/]+)?\.json$/i;
|
||||
|
||||
function isIgnorableEmptyDirFileName(fileName: string): boolean {
|
||||
const normalized = String(fileName || "").trim().toLowerCase();
|
||||
return EMPTY_DIR_IGNORED_FILE_NAMES.has(normalized) || EMPTY_DIR_IGNORED_FILE_RE.test(normalized);
|
||||
return EMPTY_DIR_IGNORED_FILE_NAMES.has(String(fileName || "").trim().toLowerCase());
|
||||
}
|
||||
|
||||
function toWindowsLongPathIfNeeded(filePath: string): string {
|
||||
@ -753,86 +731,60 @@ export function buildAutoRenameBaseNameFromFoldersWithOptions(
|
||||
return null;
|
||||
}
|
||||
|
||||
export function resolveArchiveItemsFromList(archiveName: string, items: DownloadItem[]): DownloadItem[] {
|
||||
function resolveArchiveItemsFromList(archiveName: string, items: DownloadItem[]): DownloadItem[] {
|
||||
const entryLower = archiveName.toLowerCase();
|
||||
|
||||
// Helper: get item basename (try targetPath first, then fileName)
|
||||
const itemBaseName = (item: DownloadItem): string =>
|
||||
path.basename(item.targetPath || item.fileName || "");
|
||||
|
||||
// Try pattern-based matching first (for multipart archives)
|
||||
let pattern: RegExp | null = null;
|
||||
const multipartMatch = entryLower.match(/^(.*)\.part0*1\.rar$/);
|
||||
if (multipartMatch) {
|
||||
const prefix = multipartMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
pattern = new RegExp(`^${prefix}\\.part\\d+\\.rar$`, "i");
|
||||
}
|
||||
if (!pattern) {
|
||||
const rarMatch = entryLower.match(/^(.*)\.rar$/);
|
||||
if (rarMatch) {
|
||||
const stem = rarMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
pattern = new RegExp(`^${stem}\\.r(ar|\\d{2,3})$`, "i");
|
||||
}
|
||||
}
|
||||
if (!pattern) {
|
||||
const zipSplitMatch = entryLower.match(/^(.*)\.zip\.001$/);
|
||||
if (zipSplitMatch) {
|
||||
const stem = zipSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
pattern = new RegExp(`^${stem}\\.zip(\\.\\d+)?$`, "i");
|
||||
}
|
||||
}
|
||||
if (!pattern) {
|
||||
const sevenSplitMatch = entryLower.match(/^(.*)\.7z\.001$/);
|
||||
if (sevenSplitMatch) {
|
||||
const stem = sevenSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
pattern = new RegExp(`^${stem}\\.7z(\\.\\d+)?$`, "i");
|
||||
}
|
||||
}
|
||||
if (!pattern && /^(.*)\.001$/.test(entryLower) && !/\.(zip|7z)\.001$/.test(entryLower)) {
|
||||
const genericSplitMatch = entryLower.match(/^(.*)\.001$/);
|
||||
if (genericSplitMatch) {
|
||||
const stem = genericSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
pattern = new RegExp(`^${stem}\\.\\d{3}$`, "i");
|
||||
}
|
||||
}
|
||||
|
||||
// Attempt 1: Pattern match (handles multipart archives)
|
||||
if (pattern) {
|
||||
const matched = items.filter((item) => pattern!.test(itemBaseName(item)));
|
||||
if (matched.length > 0) return matched;
|
||||
}
|
||||
|
||||
// Attempt 2: Exact filename match (case-insensitive)
|
||||
const exactMatch = items.filter((item) => itemBaseName(item).toLowerCase() === entryLower);
|
||||
if (exactMatch.length > 0) return exactMatch;
|
||||
|
||||
// Attempt 3: Stem-based fuzzy match — strip archive extensions and compare stems.
|
||||
// Handles cases where debrid services modify filenames slightly.
|
||||
const archiveStem = entryLower
|
||||
.replace(/\.part\d+\.rar$/i, "")
|
||||
.replace(/\.r\d{2,3}$/i, "")
|
||||
.replace(/\.rar$/i, "")
|
||||
.replace(/\.(zip|7z)\.\d{3}$/i, "")
|
||||
.replace(/\.\d{3}$/i, "")
|
||||
.replace(/\.(zip|7z)$/i, "");
|
||||
if (archiveStem.length > 3) {
|
||||
const stemMatch = items.filter((item) => {
|
||||
const name = itemBaseName(item).toLowerCase();
|
||||
return name.startsWith(archiveStem) && /\.(rar|r\d{2,3}|zip|7z|\d{3})$/i.test(name);
|
||||
const pattern = new RegExp(`^${prefix}\\.part\\d+\\.rar$`, "i");
|
||||
return items.filter((item) => {
|
||||
const name = path.basename(item.targetPath || item.fileName || "");
|
||||
return pattern.test(name);
|
||||
});
|
||||
if (stemMatch.length > 0) return stemMatch;
|
||||
}
|
||||
|
||||
// Attempt 4: If only one item in the list and one archive — return it as a best-effort match.
|
||||
// This handles single-file packages where the filename may have been modified.
|
||||
if (items.length === 1) {
|
||||
const singleName = itemBaseName(items[0]).toLowerCase();
|
||||
if (/\.(rar|zip|7z|\d{3})$/i.test(singleName)) {
|
||||
return items;
|
||||
}
|
||||
const rarMatch = entryLower.match(/^(.*)\.rar$/);
|
||||
if (rarMatch) {
|
||||
const stem = rarMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
const pattern = new RegExp(`^${stem}\\.r(ar|\\d{2,3})$`, "i");
|
||||
return items.filter((item) => {
|
||||
const name = path.basename(item.targetPath || item.fileName || "");
|
||||
return pattern.test(name);
|
||||
});
|
||||
}
|
||||
|
||||
return [];
|
||||
// Split ZIP (e.g., movie.zip.001, movie.zip.002)
|
||||
const zipSplitMatch = entryLower.match(/^(.*)\.zip\.001$/);
|
||||
if (zipSplitMatch) {
|
||||
const stem = zipSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
const pattern = new RegExp(`^${stem}\\.zip(\\.\\d+)?$`, "i");
|
||||
return items.filter((item) => {
|
||||
const name = path.basename(item.targetPath || item.fileName || "");
|
||||
return pattern.test(name);
|
||||
});
|
||||
}
|
||||
// Split 7z (e.g., movie.7z.001, movie.7z.002)
|
||||
const sevenSplitMatch = entryLower.match(/^(.*)\.7z\.001$/);
|
||||
if (sevenSplitMatch) {
|
||||
const stem = sevenSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
const pattern = new RegExp(`^${stem}\\.7z(\\.\\d+)?$`, "i");
|
||||
return items.filter((item) => {
|
||||
const name = path.basename(item.targetPath || item.fileName || "");
|
||||
return pattern.test(name);
|
||||
});
|
||||
}
|
||||
// Generic .NNN splits (e.g., movie.001, movie.002)
|
||||
const genericSplitMatch = entryLower.match(/^(.*)\.001$/);
|
||||
if (genericSplitMatch && !/\.(zip|7z)\.001$/.test(entryLower)) {
|
||||
const stem = genericSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
const pattern = new RegExp(`^${stem}\\.\\d{3}$`, "i");
|
||||
return items.filter((item) => {
|
||||
const name = path.basename(item.targetPath || item.fileName || "");
|
||||
return pattern.test(name);
|
||||
});
|
||||
}
|
||||
return items.filter((item) => {
|
||||
const name = path.basename(item.targetPath || item.fileName || "").toLowerCase();
|
||||
return name === entryLower;
|
||||
});
|
||||
}
|
||||
|
||||
function retryDelayWithJitter(attempt: number, baseMs: number): number {
|
||||
@ -1412,10 +1364,6 @@ export class DownloadManager extends EventEmitter {
|
||||
addedPackages += 1;
|
||||
}
|
||||
|
||||
if (addedPackages > 0 || addedLinks > 0) {
|
||||
const pkgNames = packages.filter((p) => p.links.length > 0).map((p) => p.name).join(", ");
|
||||
logger.info(`Pakete hinzugefügt: ${addedPackages} Paket(e), ${addedLinks} Link(s) [${pkgNames}]`);
|
||||
}
|
||||
this.persistSoon();
|
||||
this.emitState();
|
||||
if (unresolvedByLink.size > 0) {
|
||||
@ -3261,11 +3209,11 @@ export class DownloadManager extends EventEmitter {
|
||||
|
||||
for (const item of Object.values(this.session.items)) {
|
||||
if (item.status !== "completed") continue;
|
||||
const fullSt = item.fullStatus || "";
|
||||
const fs = item.fullStatus || "";
|
||||
// Only relabel items with active extraction status (e.g. "Entpacken 45%", "Passwort prüfen")
|
||||
// Skip items that were merely waiting ("Entpacken - Ausstehend", "Entpacken - Warten auf Parts")
|
||||
// as they were never actively extracting and "abgebrochen" would be misleading.
|
||||
if (/^Entpacken\b/i.test(fullSt) && !/Ausstehend/i.test(fullSt) && !/Warten/i.test(fullSt) && !isExtractedLabel(fullSt)) {
|
||||
if (/^Entpacken\b/i.test(fs) && !/Ausstehend/i.test(fs) && !/Warten/i.test(fs) && !isExtractedLabel(fs)) {
|
||||
item.fullStatus = "Entpacken abgebrochen (wird fortgesetzt)";
|
||||
item.updatedAt = nowMs();
|
||||
const pkg = this.session.packages[item.packageId];
|
||||
@ -3354,7 +3302,7 @@ export class DownloadManager extends EventEmitter {
|
||||
this.session.reconnectReason = "";
|
||||
|
||||
for (const item of Object.values(this.session.items)) {
|
||||
if (item.provider !== "realdebrid" && item.provider !== "megadebrid" && item.provider !== "bestdebrid" && item.provider !== "alldebrid" && item.provider !== "ddownload") {
|
||||
if (item.provider !== "realdebrid" && item.provider !== "megadebrid" && item.provider !== "bestdebrid" && item.provider !== "alldebrid") {
|
||||
item.provider = null;
|
||||
}
|
||||
if (item.status === "cancelled" && item.fullStatus === "Gestoppt") {
|
||||
@ -3602,16 +3550,14 @@ export class DownloadManager extends EventEmitter {
|
||||
this.emit("state", this.getSnapshot());
|
||||
return;
|
||||
}
|
||||
// Too soon — replace any pending timer with a shorter forced-emit timer
|
||||
if (this.stateEmitTimer) {
|
||||
clearTimeout(this.stateEmitTimer);
|
||||
this.stateEmitTimer = null;
|
||||
// Too soon — schedule deferred forced emit
|
||||
if (!this.stateEmitTimer) {
|
||||
this.stateEmitTimer = setTimeout(() => {
|
||||
this.stateEmitTimer = null;
|
||||
this.lastStateEmitAt = nowMs();
|
||||
this.emit("state", this.getSnapshot());
|
||||
}, MIN_FORCE_GAP_MS - sinceLastEmit);
|
||||
}
|
||||
this.stateEmitTimer = setTimeout(() => {
|
||||
this.stateEmitTimer = null;
|
||||
this.lastStateEmitAt = nowMs();
|
||||
this.emit("state", this.getSnapshot());
|
||||
}, MIN_FORCE_GAP_MS - sinceLastEmit);
|
||||
return;
|
||||
}
|
||||
if (this.stateEmitTimer) {
|
||||
@ -3849,26 +3795,18 @@ export class DownloadManager extends EventEmitter {
|
||||
this.packagePostProcessAbortControllers.set(packageId, abortController);
|
||||
|
||||
const task = (async () => {
|
||||
const slotWaitStart = nowMs();
|
||||
await this.acquirePostProcessSlot(packageId);
|
||||
const slotWaitMs = nowMs() - slotWaitStart;
|
||||
if (slotWaitMs > 100) {
|
||||
logger.info(`Post-Process Slot erhalten nach ${(slotWaitMs / 1000).toFixed(1)}s Wartezeit: pkg=${packageId.slice(0, 8)}`);
|
||||
}
|
||||
try {
|
||||
let round = 0;
|
||||
// Loop while requeue requests arrive — keep the slot so the same
|
||||
// package can immediately re-run hybrid extraction without waiting
|
||||
// behind other packages that may be queued for the slot.
|
||||
do {
|
||||
round += 1;
|
||||
const hadRequeue = this.hybridExtractRequeue.has(packageId);
|
||||
this.hybridExtractRequeue.delete(packageId);
|
||||
const roundStart = nowMs();
|
||||
try {
|
||||
await this.handlePackagePostProcessing(packageId, abortController.signal);
|
||||
} catch (error) {
|
||||
logger.warn(`Post-Processing für Paket fehlgeschlagen: ${compactErrorText(error)}`);
|
||||
}
|
||||
const roundMs = nowMs() - roundStart;
|
||||
logger.info(`Post-Process Runde ${round} fertig in ${(roundMs / 1000).toFixed(1)}s (requeue=${hadRequeue}, nextRequeue=${this.hybridExtractRequeue.has(packageId)}): pkg=${packageId.slice(0, 8)}`);
|
||||
this.persistSoon();
|
||||
this.emitState();
|
||||
} while (this.hybridExtractRequeue.has(packageId));
|
||||
@ -4768,7 +4706,6 @@ export class DownloadManager extends EventEmitter {
|
||||
item.fullStatus = `Starte... (${unrestricted.providerLabel})`;
|
||||
item.updatedAt = nowMs();
|
||||
this.emitState();
|
||||
logger.info(`Download Start: ${item.fileName} (${humanSize(unrestricted.fileSize || 0)}) via ${unrestricted.providerLabel}, pkg=${pkg.name}`);
|
||||
|
||||
const maxAttempts = maxItemAttempts;
|
||||
let done = false;
|
||||
@ -4780,7 +4717,7 @@ export class DownloadManager extends EventEmitter {
|
||||
item.updatedAt = nowMs();
|
||||
this.emitState();
|
||||
}
|
||||
const result = await this.downloadToFile(active, unrestricted.directUrl, item.targetPath, item.totalBytes, unrestricted.skipTlsVerify);
|
||||
const result = await this.downloadToFile(active, unrestricted.directUrl, item.targetPath, item.totalBytes);
|
||||
active.resumable = result.resumable;
|
||||
if (!active.resumable && !active.nonResumableCounted) {
|
||||
active.nonResumableCounted = true;
|
||||
@ -4877,7 +4814,6 @@ export class DownloadManager extends EventEmitter {
|
||||
item.updatedAt = nowMs();
|
||||
pkg.updatedAt = nowMs();
|
||||
this.recordRunOutcome(item.id, "completed");
|
||||
logger.info(`Download fertig: ${item.fileName} (${humanSize(item.downloadedBytes)}), pkg=${pkg.name}`);
|
||||
|
||||
if (this.session.running && !active.abortController.signal.aborted) {
|
||||
void this.runPackagePostProcessing(pkg.id).catch((err) => {
|
||||
@ -5166,8 +5102,7 @@ export class DownloadManager extends EventEmitter {
|
||||
active: ActiveTask,
|
||||
directUrl: string,
|
||||
targetPath: string,
|
||||
knownTotal: number | null,
|
||||
skipTlsVerify?: boolean
|
||||
knownTotal: number | null
|
||||
): Promise<{ resumable: boolean }> {
|
||||
const item = this.session.items[active.itemId];
|
||||
if (!item) {
|
||||
@ -5213,7 +5148,6 @@ export class DownloadManager extends EventEmitter {
|
||||
const connectTimeoutMs = getDownloadConnectTimeoutMs();
|
||||
let connectTimer: NodeJS.Timeout | null = null;
|
||||
const connectAbortController = new AbortController();
|
||||
if (skipTlsVerify) acquireTlsSkip();
|
||||
try {
|
||||
if (connectTimeoutMs > 0) {
|
||||
connectTimer = setTimeout(() => {
|
||||
@ -5239,7 +5173,6 @@ export class DownloadManager extends EventEmitter {
|
||||
}
|
||||
throw error;
|
||||
} finally {
|
||||
if (skipTlsVerify) releaseTlsSkip();
|
||||
if (connectTimer) {
|
||||
clearTimeout(connectTimer);
|
||||
}
|
||||
@ -6300,16 +6233,24 @@ export class DownloadManager extends EventEmitter {
|
||||
return false;
|
||||
}
|
||||
|
||||
private async runHybridExtraction(packageId: string, pkg: PackageEntry, items: DownloadItem[], signal?: AbortSignal): Promise<number> {
|
||||
const findReadyStart = nowMs();
|
||||
private async runHybridExtraction(packageId: string, pkg: PackageEntry, items: DownloadItem[], signal?: AbortSignal): Promise<void> {
|
||||
const readyArchives = await this.findReadyArchiveSets(pkg);
|
||||
const findReadyMs = nowMs() - findReadyStart;
|
||||
if (findReadyMs > 200) {
|
||||
logger.info(`findReadyArchiveSets dauerte ${(findReadyMs / 1000).toFixed(1)}s: pkg=${pkg.name}, found=${readyArchives.size}`);
|
||||
}
|
||||
if (readyArchives.size === 0) {
|
||||
logger.info(`Hybrid-Extract: pkg=${pkg.name}, keine fertigen Archive-Sets`);
|
||||
return 0;
|
||||
// Relabel completed items that are part of incomplete multi-part archives
|
||||
// from "Ausstehend" to "Warten auf Parts" so the UI accurately reflects
|
||||
// that extraction is waiting for remaining parts to finish downloading.
|
||||
const allDone = items.every((i) => i.status === "completed" || i.status === "failed" || i.status === "cancelled");
|
||||
if (!allDone) {
|
||||
for (const entry of items) {
|
||||
if (entry.status === "completed" && entry.fullStatus === "Entpacken - Ausstehend") {
|
||||
entry.fullStatus = "Entpacken - Warten auf Parts";
|
||||
entry.updatedAt = nowMs();
|
||||
}
|
||||
}
|
||||
this.emitState();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(`Hybrid-Extract Start: pkg=${pkg.name}, readyArchives=${readyArchives.size}`);
|
||||
@ -6349,7 +6290,7 @@ export class DownloadManager extends EventEmitter {
|
||||
// a previous hybrid round, there is nothing new to extract.
|
||||
if (hybridItems.length > 0 && hybridItems.every((item) => isExtractedLabel(item.fullStatus))) {
|
||||
logger.info(`Hybrid-Extract: pkg=${pkg.name}, alle ${hybridItems.length} Items bereits entpackt, überspringe`);
|
||||
return 0;
|
||||
return;
|
||||
}
|
||||
|
||||
// Filter out archives whose items are ALL already extracted so we don't
|
||||
@ -6372,7 +6313,7 @@ export class DownloadManager extends EventEmitter {
|
||||
}
|
||||
if (readyArchives.size === 0) {
|
||||
logger.info(`Hybrid-Extract: pkg=${pkg.name}, alle fertigen Archive bereits entpackt`);
|
||||
return 0;
|
||||
return;
|
||||
}
|
||||
|
||||
// Resolve archive items dynamically from ALL package items (not just
|
||||
@ -6381,34 +6322,31 @@ export class DownloadManager extends EventEmitter {
|
||||
const resolveArchiveItems = (archiveName: string): DownloadItem[] =>
|
||||
resolveArchiveItemsFromList(archiveName, items);
|
||||
|
||||
// Track archives for parallel hybrid extraction progress
|
||||
const hybridResolvedItems = new Map<string, DownloadItem[]>();
|
||||
const hybridStartTimes = new Map<string, number>();
|
||||
// Track multiple active archives for parallel hybrid extraction
|
||||
const activeHybridArchiveMap = new Map<string, DownloadItem[]>();
|
||||
const hybridArchiveStartTimes = new Map<string, number>();
|
||||
let hybridLastEmitAt = 0;
|
||||
let hybridLastProgressCurrent: number | null = null;
|
||||
|
||||
// Mark items based on whether their archive is actually ready for extraction.
|
||||
// Only items whose archive is in readyArchives get "Ausstehend"; others keep
|
||||
// their current label to avoid flicker between hybrid runs.
|
||||
// "Warten auf Parts" to avoid flicker between hybrid runs.
|
||||
const allDownloaded = completedItems.length >= items.length;
|
||||
let labelsChanged = false;
|
||||
for (const entry of completedItems) {
|
||||
if (isExtractedLabel(entry.fullStatus)) {
|
||||
continue;
|
||||
}
|
||||
const belongsToReady = allDownloaded
|
||||
|| hybridFileNames.has((entry.fileName || "").toLowerCase())
|
||||
|| (entry.targetPath && hybridFileNames.has(path.basename(entry.targetPath).toLowerCase()));
|
||||
const targetLabel = belongsToReady ? "Entpacken - Ausstehend" : "Entpacken - Warten auf Parts";
|
||||
if (entry.fullStatus !== targetLabel) {
|
||||
entry.fullStatus = targetLabel;
|
||||
entry.updatedAt = nowMs();
|
||||
labelsChanged = true;
|
||||
if (allDownloaded) {
|
||||
// Everything downloaded — all remaining items will be extracted
|
||||
entry.fullStatus = "Entpacken - Ausstehend";
|
||||
} else if (hybridFileNames.has((entry.fileName || "").toLowerCase()) ||
|
||||
(entry.targetPath && hybridFileNames.has(path.basename(entry.targetPath).toLowerCase()))) {
|
||||
entry.fullStatus = "Entpacken - Ausstehend";
|
||||
} else {
|
||||
entry.fullStatus = "Entpacken - Warten auf Parts";
|
||||
}
|
||||
entry.updatedAt = nowMs();
|
||||
}
|
||||
if (labelsChanged) {
|
||||
this.emitState();
|
||||
}
|
||||
this.emitState();
|
||||
|
||||
try {
|
||||
const result = await extractPackageArchives({
|
||||
@ -6425,71 +6363,38 @@ export class DownloadManager extends EventEmitter {
|
||||
packageId,
|
||||
hybridMode: true,
|
||||
maxParallel: this.settings.maxParallelExtract || 2,
|
||||
extractCpuPriority: "high",
|
||||
extractCpuPriority: this.settings.extractCpuPriority,
|
||||
onProgress: (progress) => {
|
||||
if (progress.phase === "preparing") {
|
||||
pkg.postProcessLabel = progress.archiveName || "Vorbereiten...";
|
||||
this.emitState();
|
||||
return;
|
||||
}
|
||||
if (progress.phase === "done") {
|
||||
hybridResolvedItems.clear();
|
||||
hybridStartTimes.clear();
|
||||
hybridLastProgressCurrent = null;
|
||||
// Do NOT mark remaining archives as "Done" here — some may have
|
||||
// failed. The post-extraction code (result.failed check) will
|
||||
// assign the correct label. Only clear the tracking maps.
|
||||
activeHybridArchiveMap.clear();
|
||||
hybridArchiveStartTimes.clear();
|
||||
return;
|
||||
}
|
||||
|
||||
const currentCount = Math.max(0, Number(progress.current ?? 0));
|
||||
const archiveFinished = progress.archiveDone === true
|
||||
|| (hybridLastProgressCurrent !== null && currentCount > hybridLastProgressCurrent);
|
||||
hybridLastProgressCurrent = currentCount;
|
||||
|
||||
if (progress.archiveName) {
|
||||
// Resolve items for this archive if not yet tracked
|
||||
if (!hybridResolvedItems.has(progress.archiveName)) {
|
||||
const resolved = resolveArchiveItems(progress.archiveName);
|
||||
hybridResolvedItems.set(progress.archiveName, resolved);
|
||||
hybridStartTimes.set(progress.archiveName, nowMs());
|
||||
if (resolved.length === 0) {
|
||||
logger.warn(`resolveArchiveItems (hybrid): KEINE Items gefunden für archiveName="${progress.archiveName}", items.length=${items.length}, itemNames=[${items.map((i) => path.basename(i.targetPath || i.fileName || "?")).join(", ")}]`);
|
||||
} else {
|
||||
logger.info(`resolveArchiveItems (hybrid): ${resolved.length} Items für archiveName="${progress.archiveName}"`);
|
||||
const initLabel = `Entpacken 0% · ${progress.archiveName}`;
|
||||
const initAt = nowMs();
|
||||
for (const entry of resolved) {
|
||||
if (!isExtractedLabel(entry.fullStatus)) {
|
||||
entry.fullStatus = initLabel;
|
||||
entry.updatedAt = initAt;
|
||||
}
|
||||
}
|
||||
hybridLastEmitAt = initAt;
|
||||
this.emitState(true);
|
||||
}
|
||||
if (!activeHybridArchiveMap.has(progress.archiveName)) {
|
||||
activeHybridArchiveMap.set(progress.archiveName, resolveArchiveItems(progress.archiveName));
|
||||
hybridArchiveStartTimes.set(progress.archiveName, nowMs());
|
||||
}
|
||||
const archItems = hybridResolvedItems.get(progress.archiveName) || [];
|
||||
const archItems = activeHybridArchiveMap.get(progress.archiveName)!;
|
||||
|
||||
// Only mark as finished on explicit archive-done signal (or real current increment),
|
||||
// never on raw 100% archivePercent, because password retries can report 100% mid-run.
|
||||
if (archiveFinished) {
|
||||
// If archive is at 100%, mark its items as done and remove from active
|
||||
if (Number(progress.archivePercent ?? 0) >= 100) {
|
||||
const doneAt = nowMs();
|
||||
const startedAt = hybridStartTimes.get(progress.archiveName) || doneAt;
|
||||
const doneLabel = progress.archiveSuccess === false
|
||||
? "Entpacken - Error"
|
||||
: formatExtractDone(doneAt - startedAt);
|
||||
const startedAt = hybridArchiveStartTimes.get(progress.archiveName) || doneAt;
|
||||
const doneLabel = formatExtractDone(doneAt - startedAt);
|
||||
for (const entry of archItems) {
|
||||
if (!isExtractedLabel(entry.fullStatus)) {
|
||||
entry.fullStatus = doneLabel;
|
||||
entry.updatedAt = doneAt;
|
||||
}
|
||||
}
|
||||
hybridResolvedItems.delete(progress.archiveName);
|
||||
hybridStartTimes.delete(progress.archiveName);
|
||||
// Show transitional label while next archive initializes
|
||||
const done = currentCount;
|
||||
if (done < progress.total) {
|
||||
pkg.postProcessLabel = `Entpacken (${done}/${progress.total}) - Naechstes Archiv...`;
|
||||
this.emitState();
|
||||
}
|
||||
activeHybridArchiveMap.delete(progress.archiveName);
|
||||
hybridArchiveStartTimes.delete(progress.archiveName);
|
||||
} else {
|
||||
// Update this archive's items with per-archive progress
|
||||
const archiveLabel = ` · ${progress.archiveName}`;
|
||||
@ -6516,18 +6421,6 @@ export class DownloadManager extends EventEmitter {
|
||||
}
|
||||
}
|
||||
|
||||
// Update package-level label with overall extraction progress
|
||||
const activeArchive = !archiveFinished && Number(progress.archivePercent ?? 0) > 0 ? 1 : 0;
|
||||
const currentDisplay = Math.max(0, Math.min(progress.total, progress.current + activeArchive));
|
||||
if (progress.passwordFound) {
|
||||
pkg.postProcessLabel = `Passwort gefunden · ${progress.archiveName || ""}`;
|
||||
} else if (progress.passwordAttempt && progress.passwordTotal && progress.passwordTotal > 1) {
|
||||
const pwPct = Math.round((progress.passwordAttempt / progress.passwordTotal) * 100);
|
||||
pkg.postProcessLabel = `Passwort knacken: ${pwPct}%`;
|
||||
} else {
|
||||
pkg.postProcessLabel = `Entpacken ${progress.percent}% (${currentDisplay}/${progress.total})`;
|
||||
}
|
||||
|
||||
// Throttled emit — also promote "Warten auf Parts" items that
|
||||
// completed downloading in the meantime to "Ausstehend".
|
||||
const now = nowMs();
|
||||
@ -6546,20 +6439,9 @@ export class DownloadManager extends EventEmitter {
|
||||
|
||||
logger.info(`Hybrid-Extract Ende: pkg=${pkg.name}, extracted=${result.extracted}, failed=${result.failed}`);
|
||||
if (result.extracted > 0) {
|
||||
// Fire-and-forget: rename then collect MKVs in background so the
|
||||
// slot is not blocked and the next archive set can start immediately.
|
||||
void (async () => {
|
||||
try {
|
||||
await this.autoRenameExtractedVideoFiles(pkg.extractDir, pkg);
|
||||
} catch (err) {
|
||||
logger.warn(`Hybrid Auto-Rename Fehler: pkg=${pkg.name}, reason=${compactErrorText(err)}`);
|
||||
}
|
||||
try {
|
||||
await this.collectMkvFilesToLibrary(packageId, pkg);
|
||||
} catch (err) {
|
||||
logger.warn(`Hybrid MKV-Collection Fehler: pkg=${pkg.name}, reason=${compactErrorText(err)}`);
|
||||
}
|
||||
})();
|
||||
pkg.postProcessLabel = "Renaming...";
|
||||
this.emitState();
|
||||
await this.autoRenameExtractedVideoFiles(pkg.extractDir, pkg);
|
||||
}
|
||||
if (result.failed > 0) {
|
||||
logger.warn(`Hybrid-Extract: ${result.failed} Archive fehlgeschlagen, wird beim finalen Durchlauf erneut versucht`);
|
||||
@ -6586,7 +6468,6 @@ export class DownloadManager extends EventEmitter {
|
||||
entry.updatedAt = updatedAt;
|
||||
}
|
||||
}
|
||||
return result.extracted;
|
||||
} catch (error) {
|
||||
const errorText = String(error || "");
|
||||
if (errorText.includes("aborted:extract")) {
|
||||
@ -6599,7 +6480,7 @@ export class DownloadManager extends EventEmitter {
|
||||
entry.updatedAt = abortAt;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
return;
|
||||
}
|
||||
logger.warn(`Hybrid-Extract Fehler: pkg=${pkg.name}, reason=${compactErrorText(error)}`);
|
||||
const errorAt = nowMs();
|
||||
@ -6611,11 +6492,9 @@ export class DownloadManager extends EventEmitter {
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
private async handlePackagePostProcessing(packageId: string, signal?: AbortSignal): Promise<void> {
|
||||
const handleStart = nowMs();
|
||||
const pkg = this.session.packages[packageId];
|
||||
if (!pkg || pkg.cancelled) {
|
||||
return;
|
||||
@ -6627,7 +6506,6 @@ export class DownloadManager extends EventEmitter {
|
||||
|
||||
// Recover items whose file exists on disk but status was never set to "completed".
|
||||
// Only recover items in idle states (queued/paused), never active ones (downloading/validating).
|
||||
const recoveryStart = nowMs();
|
||||
for (const item of items) {
|
||||
if (isFinishedStatus(item.status)) {
|
||||
continue;
|
||||
@ -6667,19 +6545,16 @@ export class DownloadManager extends EventEmitter {
|
||||
}
|
||||
}
|
||||
|
||||
const recoveryMs = nowMs() - recoveryStart;
|
||||
const success = items.filter((item) => item.status === "completed").length;
|
||||
const failed = items.filter((item) => item.status === "failed").length;
|
||||
const cancelled = items.filter((item) => item.status === "cancelled").length;
|
||||
const setupMs = nowMs() - handleStart;
|
||||
logger.info(`Post-Processing Start: pkg=${pkg.name}, success=${success}, failed=${failed}, cancelled=${cancelled}, autoExtract=${this.settings.autoExtract}, setupMs=${setupMs}, recoveryMs=${recoveryMs}`);
|
||||
logger.info(`Post-Processing Start: pkg=${pkg.name}, success=${success}, failed=${failed}, cancelled=${cancelled}, autoExtract=${this.settings.autoExtract}`);
|
||||
|
||||
const allDone = success + failed + cancelled >= items.length;
|
||||
|
||||
if (!allDone && this.settings.hybridExtract && this.settings.autoExtract && failed === 0 && success > 0) {
|
||||
pkg.postProcessLabel = "Entpacken vorbereiten...";
|
||||
this.emitState();
|
||||
const hybridExtracted = await this.runHybridExtraction(packageId, pkg, items, signal);
|
||||
pkg.postProcessLabel = "Entpacken...";
|
||||
await this.runHybridExtraction(packageId, pkg, items, signal);
|
||||
if (signal?.aborted) {
|
||||
pkg.postProcessLabel = undefined;
|
||||
pkg.status = (pkg.enabled && this.session.running && !this.session.paused) ? "queued" : "paused";
|
||||
@ -6695,12 +6570,6 @@ export class DownloadManager extends EventEmitter {
|
||||
if (!this.session.packages[packageId]) {
|
||||
return; // Package was fully cleaned up
|
||||
}
|
||||
// Self-requeue if we extracted something — more archive sets may have
|
||||
// become ready while we were extracting (items that completed before
|
||||
// this task started set the requeue flag once, which was already consumed).
|
||||
if (hybridExtracted > 0) {
|
||||
this.hybridExtractRequeue.add(packageId);
|
||||
}
|
||||
pkg.postProcessLabel = undefined;
|
||||
pkg.status = (pkg.enabled && this.session.running && !this.session.paused) ? "downloading" : "queued";
|
||||
pkg.updatedAt = nowMs();
|
||||
@ -6717,10 +6586,9 @@ export class DownloadManager extends EventEmitter {
|
||||
|
||||
const completedItems = items.filter((item) => item.status === "completed");
|
||||
const alreadyMarkedExtracted = completedItems.length > 0 && completedItems.every((item) => isExtractedLabel(item.fullStatus));
|
||||
let extractedCount = 0;
|
||||
|
||||
if (this.settings.autoExtract && failed === 0 && success > 0 && !alreadyMarkedExtracted) {
|
||||
pkg.postProcessLabel = "Entpacken vorbereiten...";
|
||||
pkg.postProcessLabel = "Entpacken...";
|
||||
pkg.status = "extracting";
|
||||
this.emitState();
|
||||
const extractionStartMs = nowMs();
|
||||
@ -6729,13 +6597,12 @@ export class DownloadManager extends EventEmitter {
|
||||
resolveArchiveItemsFromList(archiveName, completedItems);
|
||||
|
||||
let lastExtractEmitAt = 0;
|
||||
const emitExtractStatus = (text: string, force = false): void => {
|
||||
const emitExtractStatus = (_text: string, force = false): void => {
|
||||
const now = nowMs();
|
||||
if (!force && now - lastExtractEmitAt < EXTRACT_PROGRESS_EMIT_INTERVAL_MS) {
|
||||
return;
|
||||
}
|
||||
lastExtractEmitAt = now;
|
||||
pkg.postProcessLabel = text || "Entpacken...";
|
||||
this.emitState();
|
||||
};
|
||||
|
||||
@ -6775,10 +6642,9 @@ export class DownloadManager extends EventEmitter {
|
||||
}
|
||||
}, extractTimeoutMs);
|
||||
try {
|
||||
// Track archives for parallel extraction progress
|
||||
const fullResolvedItems = new Map<string, DownloadItem[]>();
|
||||
const fullStartTimes = new Map<string, number>();
|
||||
let fullLastProgressCurrent: number | null = null;
|
||||
// Track multiple active archives for parallel extraction
|
||||
const activeArchiveItemsMap = new Map<string, DownloadItem[]>();
|
||||
const archiveStartTimes = new Map<string, number>();
|
||||
|
||||
const result = await extractPackageArchives({
|
||||
packageDir: pkg.outputDir,
|
||||
@ -6790,74 +6656,40 @@ export class DownloadManager extends EventEmitter {
|
||||
passwordList: this.settings.archivePasswordList,
|
||||
signal: extractAbortController.signal,
|
||||
packageId,
|
||||
skipPostCleanup: true,
|
||||
maxParallel: this.settings.maxParallelExtract || 2,
|
||||
// All downloads finished — use NORMAL OS priority so extraction runs at
|
||||
// full speed (matching manual 7-Zip/WinRAR speed).
|
||||
extractCpuPriority: "high",
|
||||
extractCpuPriority: this.settings.extractCpuPriority,
|
||||
onProgress: (progress) => {
|
||||
if (progress.phase === "preparing") {
|
||||
pkg.postProcessLabel = progress.archiveName || "Vorbereiten...";
|
||||
this.emitState();
|
||||
return;
|
||||
}
|
||||
if (progress.phase === "done") {
|
||||
fullResolvedItems.clear();
|
||||
fullStartTimes.clear();
|
||||
fullLastProgressCurrent = null;
|
||||
// Do NOT mark remaining archives as "Done" here — some may have
|
||||
// failed. The post-extraction code (result.failed check) will
|
||||
// assign the correct label. Only clear the tracking maps.
|
||||
activeArchiveItemsMap.clear();
|
||||
archiveStartTimes.clear();
|
||||
emitExtractStatus("Entpacken 100%", true);
|
||||
return;
|
||||
}
|
||||
|
||||
const currentCount = Math.max(0, Number(progress.current ?? 0));
|
||||
const archiveFinished = progress.archiveDone === true
|
||||
|| (fullLastProgressCurrent !== null && currentCount > fullLastProgressCurrent);
|
||||
fullLastProgressCurrent = currentCount;
|
||||
|
||||
if (progress.archiveName) {
|
||||
// Resolve items for this archive if not yet tracked
|
||||
if (!fullResolvedItems.has(progress.archiveName)) {
|
||||
const resolved = resolveArchiveItems(progress.archiveName);
|
||||
fullResolvedItems.set(progress.archiveName, resolved);
|
||||
fullStartTimes.set(progress.archiveName, nowMs());
|
||||
if (resolved.length === 0) {
|
||||
logger.warn(`resolveArchiveItems (full): KEINE Items für archiveName="${progress.archiveName}", completedItems=${completedItems.length}, names=[${completedItems.map((i) => path.basename(i.targetPath || i.fileName || "?")).join(", ")}]`);
|
||||
} else {
|
||||
logger.info(`resolveArchiveItems (full): ${resolved.length} Items für archiveName="${progress.archiveName}"`);
|
||||
const initLabel = `Entpacken 0% · ${progress.archiveName}`;
|
||||
const initAt = nowMs();
|
||||
for (const entry of resolved) {
|
||||
if (!isExtractedLabel(entry.fullStatus)) {
|
||||
entry.fullStatus = initLabel;
|
||||
entry.updatedAt = initAt;
|
||||
}
|
||||
}
|
||||
emitExtractStatus(`Entpacken ${progress.percent}% · ${progress.archiveName}`, true);
|
||||
}
|
||||
if (!activeArchiveItemsMap.has(progress.archiveName)) {
|
||||
activeArchiveItemsMap.set(progress.archiveName, resolveArchiveItems(progress.archiveName));
|
||||
archiveStartTimes.set(progress.archiveName, nowMs());
|
||||
}
|
||||
const archiveItems = fullResolvedItems.get(progress.archiveName) || [];
|
||||
const archiveItems = activeArchiveItemsMap.get(progress.archiveName)!;
|
||||
|
||||
// Only finalize on explicit archive completion (or real current increment),
|
||||
// not on plain 100% archivePercent.
|
||||
if (archiveFinished) {
|
||||
// If archive is at 100%, mark its items as done and remove from active
|
||||
if (Number(progress.archivePercent ?? 0) >= 100) {
|
||||
const doneAt = nowMs();
|
||||
const startedAt = fullStartTimes.get(progress.archiveName) || doneAt;
|
||||
const doneLabel = progress.archiveSuccess === false
|
||||
? "Entpacken - Error"
|
||||
: formatExtractDone(doneAt - startedAt);
|
||||
const startedAt = archiveStartTimes.get(progress.archiveName) || doneAt;
|
||||
const doneLabel = formatExtractDone(doneAt - startedAt);
|
||||
for (const entry of archiveItems) {
|
||||
if (!isExtractedLabel(entry.fullStatus)) {
|
||||
entry.fullStatus = doneLabel;
|
||||
entry.updatedAt = doneAt;
|
||||
}
|
||||
}
|
||||
fullResolvedItems.delete(progress.archiveName);
|
||||
fullStartTimes.delete(progress.archiveName);
|
||||
// Show transitional label while next archive initializes
|
||||
const done = currentCount;
|
||||
if (done < progress.total) {
|
||||
emitExtractStatus(`Entpacken (${done}/${progress.total}) - Naechstes Archiv...`, true);
|
||||
}
|
||||
activeArchiveItemsMap.delete(progress.archiveName);
|
||||
archiveStartTimes.delete(progress.archiveName);
|
||||
} else {
|
||||
// Update this archive's items with per-archive progress
|
||||
const archiveTag = progress.archiveName ? ` · ${progress.archiveName}` : "";
|
||||
@ -6889,7 +6721,7 @@ export class DownloadManager extends EventEmitter {
|
||||
const elapsed = progress.elapsedMs && progress.elapsedMs >= 1000
|
||||
? ` · ${Math.floor(progress.elapsedMs / 1000)}s`
|
||||
: "";
|
||||
const activeArchive = !archiveFinished && Number(progress.archivePercent ?? 0) > 0 ? 1 : 0;
|
||||
const activeArchive = Number(progress.archivePercent ?? 0) > 0 ? 1 : 0;
|
||||
const currentDisplay = Math.max(0, Math.min(progress.total, progress.current + activeArchive));
|
||||
let overallLabel: string;
|
||||
if (progress.passwordFound) {
|
||||
@ -6904,10 +6736,13 @@ export class DownloadManager extends EventEmitter {
|
||||
}
|
||||
});
|
||||
logger.info(`Post-Processing Entpacken Ende: pkg=${pkg.name}, extracted=${result.extracted}, failed=${result.failed}, lastError=${result.lastError || ""}`);
|
||||
extractedCount = result.extracted;
|
||||
|
||||
// Auto-rename wird in runDeferredPostExtraction ausgeführt (im Hintergrund),
|
||||
// damit der Slot sofort freigegeben wird.
|
||||
// Auto-rename even when some archives failed — successfully extracted files still need renaming
|
||||
if (result.extracted > 0) {
|
||||
pkg.postProcessLabel = "Renaming...";
|
||||
this.emitState();
|
||||
await this.autoRenameExtractedVideoFiles(pkg.extractDir, pkg);
|
||||
}
|
||||
|
||||
if (result.failed > 0) {
|
||||
const reason = compactErrorText(result.lastError || "Entpacken fehlgeschlagen");
|
||||
@ -7008,6 +6843,20 @@ export class DownloadManager extends EventEmitter {
|
||||
this.recordPackageHistory(packageId, pkg, items);
|
||||
}
|
||||
|
||||
if (this.settings.autoExtract && alreadyMarkedExtracted && failed === 0 && success > 0 && this.settings.cleanupMode !== "none") {
|
||||
pkg.postProcessLabel = "Aufräumen...";
|
||||
this.emitState();
|
||||
const removedArchives = await this.cleanupRemainingArchiveArtifacts(pkg.outputDir);
|
||||
if (removedArchives > 0) {
|
||||
logger.info(`Hybrid-Post-Cleanup entfernte Archive: pkg=${pkg.name}, entfernt=${removedArchives}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (success > 0 && (pkg.status === "completed" || pkg.status === "failed")) {
|
||||
pkg.postProcessLabel = "Verschiebe MKVs...";
|
||||
this.emitState();
|
||||
await this.collectMkvFilesToLibrary(packageId, pkg);
|
||||
}
|
||||
if (this.runPackageIds.has(packageId)) {
|
||||
if (pkg.status === "completed" || pkg.status === "failed") {
|
||||
this.runCompletedPackages.add(packageId);
|
||||
@ -7017,141 +6866,9 @@ export class DownloadManager extends EventEmitter {
|
||||
}
|
||||
pkg.postProcessLabel = undefined;
|
||||
pkg.updatedAt = nowMs();
|
||||
logger.info(`Post-Processing Ende: pkg=${pkg.name}, status=${pkg.status} (deferred work wird im Hintergrund ausgeführt)`);
|
||||
logger.info(`Post-Processing Ende: pkg=${pkg.name}, status=${pkg.status}`);
|
||||
|
||||
// Deferred post-extraction: Rename, MKV-Sammlung, Cleanup laufen im Hintergrund,
|
||||
// damit der Post-Process-Slot sofort freigegeben wird und das nächste Pack
|
||||
// ohne 10–15 Sekunden Pause entpacken kann.
|
||||
void this.runDeferredPostExtraction(packageId, pkg, success, failed, alreadyMarkedExtracted, extractedCount);
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs slow post-extraction work (rename, MKV collection, cleanup) in the background
|
||||
* so the post-process slot is released immediately and the next pack can start unpacking.
|
||||
*/
|
||||
private async runDeferredPostExtraction(
|
||||
packageId: string,
|
||||
pkg: PackageEntry,
|
||||
success: number,
|
||||
failed: number,
|
||||
alreadyMarkedExtracted: boolean,
|
||||
extractedCount: number
|
||||
): Promise<void> {
|
||||
try {
|
||||
// ── Nested extraction: extract archives found inside the extracted output ──
|
||||
if ((extractedCount > 0 || alreadyMarkedExtracted) && failed === 0 && this.settings.autoExtract) {
|
||||
const nestedBlacklist = /\.(iso|img|bin|dmg|vhd|vhdx|vmdk|wim)$/i;
|
||||
const nestedCandidates = (await findArchiveCandidates(pkg.extractDir))
|
||||
.filter((p) => !nestedBlacklist.test(p));
|
||||
if (nestedCandidates.length > 0) {
|
||||
pkg.postProcessLabel = "Nested Entpacken...";
|
||||
this.emitState();
|
||||
logger.info(`Deferred Nested-Extraction: ${nestedCandidates.length} Archive in ${pkg.extractDir}`);
|
||||
const nestedResult = await extractPackageArchives({
|
||||
packageDir: pkg.extractDir,
|
||||
targetDir: pkg.extractDir,
|
||||
cleanupMode: this.settings.cleanupMode,
|
||||
conflictMode: this.settings.extractConflictMode,
|
||||
removeLinks: false,
|
||||
removeSamples: false,
|
||||
passwordList: this.settings.archivePasswordList,
|
||||
packageId,
|
||||
onlyArchives: new Set(nestedCandidates.map((p) => process.platform === "win32" ? path.resolve(p).toLowerCase() : path.resolve(p))),
|
||||
maxParallel: this.settings.maxParallelExtract || 2,
|
||||
extractCpuPriority: this.settings.extractCpuPriority,
|
||||
});
|
||||
extractedCount += nestedResult.extracted;
|
||||
logger.info(`Deferred Nested-Extraction Ende: extracted=${nestedResult.extracted}, failed=${nestedResult.failed}`);
|
||||
}
|
||||
}
|
||||
|
||||
// ── Auto-Rename ──
|
||||
if (extractedCount > 0 || alreadyMarkedExtracted) {
|
||||
pkg.postProcessLabel = "Renaming...";
|
||||
this.emitState();
|
||||
await this.autoRenameExtractedVideoFiles(pkg.extractDir, pkg);
|
||||
}
|
||||
|
||||
// ── Archive cleanup (source archives in outputDir) ──
|
||||
// Also run when hybrid extraction already handled everything (extractedCount=0
|
||||
// but alreadyMarkedExtracted=true) so archives are still cleaned up.
|
||||
if ((extractedCount > 0 || alreadyMarkedExtracted) && failed === 0 && this.settings.cleanupMode !== "none") {
|
||||
pkg.postProcessLabel = "Aufräumen...";
|
||||
this.emitState();
|
||||
const sourceAndTargetEqual = path.resolve(pkg.outputDir).toLowerCase() === path.resolve(pkg.extractDir).toLowerCase();
|
||||
if (!sourceAndTargetEqual) {
|
||||
const candidates = await findArchiveCandidates(pkg.outputDir);
|
||||
if (candidates.length > 0) {
|
||||
const removed = await cleanupArchives(candidates, this.settings.cleanupMode);
|
||||
if (removed > 0) {
|
||||
logger.info(`Deferred Archive-Cleanup: pkg=${pkg.name}, entfernt=${removed}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── Hybrid archive cleanup (wenn bereits als extracted markiert) ──
|
||||
if (this.settings.autoExtract && alreadyMarkedExtracted && failed === 0 && success > 0 && this.settings.cleanupMode !== "none") {
|
||||
const removedArchives = await this.cleanupRemainingArchiveArtifacts(pkg.outputDir);
|
||||
if (removedArchives > 0) {
|
||||
logger.info(`Hybrid-Post-Cleanup entfernte Archive: pkg=${pkg.name}, entfernt=${removedArchives}`);
|
||||
}
|
||||
}
|
||||
|
||||
// ── Link/Sample artifact removal ──
|
||||
if ((extractedCount > 0 || alreadyMarkedExtracted) && failed === 0) {
|
||||
if (this.settings.removeLinkFilesAfterExtract) {
|
||||
const removedLinks = await removeDownloadLinkArtifacts(pkg.extractDir);
|
||||
if (removedLinks > 0) {
|
||||
logger.info(`Deferred Link-Cleanup: pkg=${pkg.name}, entfernt=${removedLinks}`);
|
||||
}
|
||||
}
|
||||
if (this.settings.removeSamplesAfterExtract) {
|
||||
const removedSamples = await removeSampleArtifacts(pkg.extractDir);
|
||||
if (removedSamples.files > 0 || removedSamples.dirs > 0) {
|
||||
logger.info(`Deferred Sample-Cleanup: pkg=${pkg.name}, files=${removedSamples.files}, dirs=${removedSamples.dirs}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── Resume state cleanup ──
|
||||
if ((extractedCount > 0 || alreadyMarkedExtracted) && failed === 0) {
|
||||
await clearExtractResumeState(pkg.outputDir, packageId);
|
||||
// Backward compatibility: older versions used .rd_extract_progress.json without package suffix.
|
||||
await clearExtractResumeState(pkg.outputDir);
|
||||
}
|
||||
|
||||
// ── Empty directory tree removal ──
|
||||
if ((extractedCount > 0 || alreadyMarkedExtracted) && failed === 0 && this.settings.cleanupMode === "delete") {
|
||||
if (!(await hasAnyFilesRecursive(pkg.outputDir))) {
|
||||
const removedDirs = await removeEmptyDirectoryTree(pkg.outputDir);
|
||||
if (removedDirs > 0) {
|
||||
logger.info(`Deferred leere Download-Ordner entfernt: pkg=${pkg.name}, dirs=${removedDirs}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── MKV collection ──
|
||||
if (success > 0 && (pkg.status === "completed" || pkg.status === "failed")) {
|
||||
pkg.postProcessLabel = "Verschiebe MKVs...";
|
||||
this.emitState();
|
||||
await this.collectMkvFilesToLibrary(packageId, pkg);
|
||||
}
|
||||
|
||||
pkg.postProcessLabel = undefined;
|
||||
pkg.updatedAt = nowMs();
|
||||
this.persistSoon();
|
||||
this.emitState();
|
||||
|
||||
this.applyPackageDoneCleanup(packageId);
|
||||
} catch (error) {
|
||||
logger.warn(`Deferred Post-Extraction Fehler: pkg=${pkg.name}, reason=${compactErrorText(error)}`);
|
||||
} finally {
|
||||
pkg.postProcessLabel = undefined;
|
||||
pkg.updatedAt = nowMs();
|
||||
this.persistSoon();
|
||||
this.emitState();
|
||||
}
|
||||
this.applyPackageDoneCleanup(packageId);
|
||||
}
|
||||
|
||||
private applyPackageDoneCleanup(packageId: string): void {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -7,7 +7,6 @@ import { IPC_CHANNELS } from "../shared/ipc";
|
||||
import { getLogFilePath, logger } from "./logger";
|
||||
import { APP_NAME } from "./constants";
|
||||
import { extractHttpLinksFromText } from "./utils";
|
||||
import { cleanupStaleSubstDrives, shutdownDaemon } from "./extractor";
|
||||
|
||||
/* ── IPC validation helpers ────────────────────────────────────── */
|
||||
function validateString(value: unknown, name: string): string {
|
||||
@ -82,7 +81,7 @@ function createWindow(): BrowserWindow {
|
||||
responseHeaders: {
|
||||
...details.responseHeaders,
|
||||
"Content-Security-Policy": [
|
||||
"default-src 'self'; script-src 'self'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; connect-src 'self' https://api.real-debrid.com https://codeberg.org https://bestdebrid.com https://api.alldebrid.com https://www.mega-debrid.eu https://git.24-music.de https://ddownload.com https://ddl.to"
|
||||
"default-src 'self'; script-src 'self'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; connect-src 'self' https://api.real-debrid.com https://codeberg.org https://bestdebrid.com https://api.alldebrid.com https://www.mega-debrid.eu"
|
||||
]
|
||||
}
|
||||
});
|
||||
@ -189,12 +188,7 @@ function startClipboardWatcher(): void {
|
||||
}
|
||||
lastClipboardText = normalizeClipboardText(clipboard.readText());
|
||||
clipboardTimer = setInterval(() => {
|
||||
let text: string;
|
||||
try {
|
||||
text = normalizeClipboardText(clipboard.readText());
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
const text = normalizeClipboardText(clipboard.readText());
|
||||
if (text === lastClipboardText || !text.trim()) {
|
||||
return;
|
||||
}
|
||||
@ -487,7 +481,6 @@ app.on("second-instance", () => {
|
||||
});
|
||||
|
||||
app.whenReady().then(() => {
|
||||
cleanupStaleSubstDrives();
|
||||
registerIpcHandlers();
|
||||
mainWindow = createWindow();
|
||||
bindMainWindowLifecycle(mainWindow);
|
||||
@ -500,9 +493,6 @@ app.whenReady().then(() => {
|
||||
bindMainWindowLifecycle(mainWindow);
|
||||
}
|
||||
});
|
||||
}).catch((error) => {
|
||||
console.error("App startup failed:", error);
|
||||
app.quit();
|
||||
});
|
||||
|
||||
app.on("window-all-closed", () => {
|
||||
@ -515,7 +505,6 @@ app.on("before-quit", () => {
|
||||
if (updateQuitTimer) { clearTimeout(updateQuitTimer); updateQuitTimer = null; }
|
||||
stopClipboardWatcher();
|
||||
destroyTray();
|
||||
shutdownDaemon();
|
||||
try {
|
||||
controller.shutdown();
|
||||
} catch (error) {
|
||||
|
||||
@ -228,23 +228,22 @@ export class MegaWebFallback {
|
||||
}
|
||||
|
||||
public async unrestrict(link: string, signal?: AbortSignal): Promise<UnrestrictedLink | null> {
|
||||
const overallSignal = withTimeoutSignal(signal, 180000);
|
||||
return this.runExclusive(async () => {
|
||||
throwIfAborted(overallSignal);
|
||||
throwIfAborted(signal);
|
||||
const creds = this.getCredentials();
|
||||
if (!creds.login.trim() || !creds.password.trim()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!this.cookie || Date.now() - this.cookieSetAt > 20 * 60 * 1000) {
|
||||
await this.login(creds.login, creds.password, overallSignal);
|
||||
await this.login(creds.login, creds.password, signal);
|
||||
}
|
||||
|
||||
const generated = await this.generate(link, overallSignal);
|
||||
const generated = await this.generate(link, signal);
|
||||
if (!generated) {
|
||||
this.cookie = "";
|
||||
await this.login(creds.login, creds.password, overallSignal);
|
||||
const retry = await this.generate(link, overallSignal);
|
||||
await this.login(creds.login, creds.password, signal);
|
||||
const retry = await this.generate(link, signal);
|
||||
if (!retry) {
|
||||
return null;
|
||||
}
|
||||
@ -262,7 +261,7 @@ export class MegaWebFallback {
|
||||
fileSize: null,
|
||||
retriesUsed: 0
|
||||
};
|
||||
}, overallSignal);
|
||||
}, signal);
|
||||
}
|
||||
|
||||
public invalidateSession(): void {
|
||||
|
||||
@ -8,7 +8,6 @@ export interface UnrestrictedLink {
|
||||
directUrl: string;
|
||||
fileSize: number | null;
|
||||
retriesUsed: number;
|
||||
skipTlsVerify?: boolean;
|
||||
}
|
||||
|
||||
function shouldRetryStatus(status: number): boolean {
|
||||
|
||||
@ -76,12 +76,7 @@ async function cleanupOldSessionLogs(dir: string, maxAgeDays: number): Promise<v
|
||||
|
||||
export function initSessionLog(baseDir: string): void {
|
||||
sessionLogsDir = path.join(baseDir, "session-logs");
|
||||
try {
|
||||
fs.mkdirSync(sessionLogsDir, { recursive: true });
|
||||
} catch {
|
||||
sessionLogsDir = null;
|
||||
return;
|
||||
}
|
||||
fs.mkdirSync(sessionLogsDir, { recursive: true });
|
||||
|
||||
const timestamp = formatTimestamp();
|
||||
sessionLogPath = path.join(sessionLogsDir, `session_${timestamp}.txt`);
|
||||
|
||||
@ -5,8 +5,8 @@ import { AppSettings, BandwidthScheduleEntry, DebridProvider, DownloadItem, Down
|
||||
import { defaultSettings } from "./constants";
|
||||
import { logger } from "./logger";
|
||||
|
||||
const VALID_PRIMARY_PROVIDERS = new Set(["realdebrid", "megadebrid", "bestdebrid", "alldebrid", "ddownload", "onefichier"]);
|
||||
const VALID_FALLBACK_PROVIDERS = new Set(["none", "realdebrid", "megadebrid", "bestdebrid", "alldebrid", "ddownload", "onefichier"]);
|
||||
const VALID_PRIMARY_PROVIDERS = new Set(["realdebrid", "megadebrid", "bestdebrid", "alldebrid", "ddownload"]);
|
||||
const VALID_FALLBACK_PROVIDERS = new Set(["none", "realdebrid", "megadebrid", "bestdebrid", "alldebrid", "ddownload"]);
|
||||
const VALID_CLEANUP_MODES = new Set(["none", "trash", "delete"]);
|
||||
const VALID_CONFLICT_MODES = new Set(["overwrite", "skip", "rename", "ask"]);
|
||||
const VALID_FINISHED_POLICIES = new Set(["never", "immediate", "on_start", "package_done"]);
|
||||
@ -17,7 +17,7 @@ const VALID_PACKAGE_PRIORITIES = new Set<string>(["high", "normal", "low"]);
|
||||
const VALID_DOWNLOAD_STATUSES = new Set<DownloadStatus>([
|
||||
"queued", "validating", "downloading", "paused", "reconnect_wait", "extracting", "integrity_check", "completed", "failed", "cancelled"
|
||||
]);
|
||||
const VALID_ITEM_PROVIDERS = new Set<DebridProvider>(["realdebrid", "megadebrid", "bestdebrid", "alldebrid", "ddownload", "onefichier"]);
|
||||
const VALID_ITEM_PROVIDERS = new Set<DebridProvider>(["realdebrid", "megadebrid", "bestdebrid", "alldebrid", "ddownload"]);
|
||||
const VALID_ONLINE_STATUSES = new Set(["online", "offline", "checking"]);
|
||||
|
||||
function asText(value: unknown): string {
|
||||
@ -113,8 +113,7 @@ export function normalizeSettings(settings: AppSettings): AppSettings {
|
||||
allDebridToken: asText(settings.allDebridToken),
|
||||
ddownloadLogin: asText(settings.ddownloadLogin),
|
||||
ddownloadPassword: asText(settings.ddownloadPassword),
|
||||
oneFichierApiKey: asText(settings.oneFichierApiKey),
|
||||
archivePasswordList: String(settings.archivePasswordList ?? "").replace(/\r\n|\r/g, "\n"),
|
||||
archivePasswordList: String(settings.archivePasswordList ?? "").replace(/\r\n/g, "\n"),
|
||||
rememberToken: Boolean(settings.rememberToken),
|
||||
providerPrimary: settings.providerPrimary,
|
||||
providerSecondary: settings.providerSecondary,
|
||||
@ -205,8 +204,7 @@ function sanitizeCredentialPersistence(settings: AppSettings): AppSettings {
|
||||
bestToken: "",
|
||||
allDebridToken: "",
|
||||
ddownloadLogin: "",
|
||||
ddownloadPassword: "",
|
||||
oneFichierApiKey: ""
|
||||
ddownloadPassword: ""
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@ -794,8 +794,7 @@ async function downloadFile(url: string, targetPath: string, onProgress?: Update
|
||||
};
|
||||
|
||||
const reader = response.body.getReader();
|
||||
const tempPath = targetPath + ".tmp";
|
||||
const writeStream = fs.createWriteStream(tempPath);
|
||||
const chunks: Buffer[] = [];
|
||||
|
||||
try {
|
||||
resetIdleTimer();
|
||||
@ -809,39 +808,27 @@ async function downloadFile(url: string, targetPath: string, onProgress?: Update
|
||||
break;
|
||||
}
|
||||
const buf = Buffer.from(value.buffer, value.byteOffset, value.byteLength);
|
||||
if (!writeStream.write(buf)) {
|
||||
await new Promise<void>((resolve) => writeStream.once("drain", resolve));
|
||||
}
|
||||
chunks.push(buf);
|
||||
downloadedBytes += buf.byteLength;
|
||||
resetIdleTimer();
|
||||
emitDownloadProgress(false);
|
||||
}
|
||||
} catch (error) {
|
||||
writeStream.destroy();
|
||||
await fs.promises.rm(tempPath, { force: true }).catch(() => {});
|
||||
throw error;
|
||||
} finally {
|
||||
clearIdleTimer();
|
||||
}
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
writeStream.end(() => resolve());
|
||||
writeStream.on("error", reject);
|
||||
});
|
||||
|
||||
if (idleTimedOut) {
|
||||
await fs.promises.rm(tempPath, { force: true }).catch(() => {});
|
||||
throw new Error(`Update Download Body Timeout nach ${Math.ceil(idleTimeoutMs / 1000)}s`);
|
||||
}
|
||||
|
||||
if (totalBytes && downloadedBytes !== totalBytes) {
|
||||
await fs.promises.rm(tempPath, { force: true }).catch(() => {});
|
||||
throw new Error(`Update Download unvollständig (${downloadedBytes} / ${totalBytes} Bytes)`);
|
||||
const fileBuffer = Buffer.concat(chunks);
|
||||
if (totalBytes && fileBuffer.byteLength !== totalBytes) {
|
||||
throw new Error(`Update Download unvollständig (${fileBuffer.byteLength} / ${totalBytes} Bytes)`);
|
||||
}
|
||||
|
||||
await fs.promises.rename(tempPath, targetPath);
|
||||
await fs.promises.writeFile(targetPath, fileBuffer);
|
||||
emitDownloadProgress(true);
|
||||
logger.info(`Update-Download abgeschlossen: ${targetPath} (${downloadedBytes} Bytes)`);
|
||||
logger.info(`Update-Download abgeschlossen: ${targetPath} (${fileBuffer.byteLength} Bytes)`);
|
||||
|
||||
return { expectedBytes: totalBytes };
|
||||
}
|
||||
|
||||
@ -4,7 +4,6 @@ import {
|
||||
AppSettings,
|
||||
DuplicatePolicy,
|
||||
HistoryEntry,
|
||||
PackagePriority,
|
||||
SessionStats,
|
||||
StartConflictEntry,
|
||||
StartConflictResolutionResult,
|
||||
@ -57,7 +56,7 @@ const api: ElectronApi = {
|
||||
getHistory: (): Promise<HistoryEntry[]> => ipcRenderer.invoke(IPC_CHANNELS.GET_HISTORY),
|
||||
clearHistory: (): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.CLEAR_HISTORY),
|
||||
removeHistoryEntry: (entryId: string): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.REMOVE_HISTORY_ENTRY, entryId),
|
||||
setPackagePriority: (packageId: string, priority: PackagePriority): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.SET_PACKAGE_PRIORITY, packageId, priority),
|
||||
setPackagePriority: (packageId: string, priority: string): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.SET_PACKAGE_PRIORITY, packageId, priority),
|
||||
skipItems: (itemIds: string[]): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.SKIP_ITEMS, itemIds),
|
||||
resetItems: (itemIds: string[]): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.RESET_ITEMS, itemIds),
|
||||
startItems: (itemIds: string[]): Promise<void> => ipcRenderer.invoke(IPC_CHANNELS.START_ITEMS, itemIds),
|
||||
|
||||
@ -36,7 +36,6 @@ interface ConfirmPromptState {
|
||||
message: string;
|
||||
confirmLabel: string;
|
||||
danger?: boolean;
|
||||
details?: string;
|
||||
}
|
||||
|
||||
interface ContextMenuState {
|
||||
@ -62,7 +61,7 @@ const emptyStats = (): DownloadStats => ({
|
||||
|
||||
const emptySnapshot = (): UiSnapshot => ({
|
||||
settings: {
|
||||
token: "", megaLogin: "", megaPassword: "", bestToken: "", allDebridToken: "", ddownloadLogin: "", ddownloadPassword: "", oneFichierApiKey: "",
|
||||
token: "", megaLogin: "", megaPassword: "", bestToken: "", allDebridToken: "",
|
||||
archivePasswordList: "",
|
||||
rememberToken: true, providerPrimary: "realdebrid", providerSecondary: "megadebrid",
|
||||
providerTertiary: "bestdebrid", autoProviderFallback: true, outputDir: "", packageName: "",
|
||||
@ -94,7 +93,7 @@ const cleanupLabels: Record<string, string> = {
|
||||
const AUTO_RENDER_PACKAGE_LIMIT = 260;
|
||||
|
||||
const providerLabels: Record<DebridProvider, string> = {
|
||||
realdebrid: "Real-Debrid", megadebrid: "Mega-Debrid", bestdebrid: "BestDebrid", alldebrid: "AllDebrid", ddownload: "DDownload", onefichier: "1Fichier"
|
||||
realdebrid: "Real-Debrid", megadebrid: "Mega-Debrid", bestdebrid: "BestDebrid", alldebrid: "AllDebrid", ddownload: "DDownload"
|
||||
};
|
||||
|
||||
function formatDateTime(ts: number): string {
|
||||
@ -116,6 +115,15 @@ function extractHoster(url: string): string {
|
||||
} catch { return ""; }
|
||||
}
|
||||
|
||||
function formatHoster(item: DownloadItem): string {
|
||||
const hoster = extractHoster(item.url);
|
||||
const label = hoster || "-";
|
||||
if (item.provider) {
|
||||
return `${label} via ${providerLabels[item.provider]}`;
|
||||
}
|
||||
return label;
|
||||
}
|
||||
|
||||
const settingsSubTabs: { key: SettingsSubTab; label: string }[] = [
|
||||
{ key: "allgemein", label: "Allgemein" },
|
||||
{ key: "accounts", label: "Accounts" },
|
||||
@ -920,21 +928,11 @@ export function App(): ReactElement {
|
||||
if (settingsDraft.allDebridToken.trim()) {
|
||||
list.push("alldebrid");
|
||||
}
|
||||
if ((settingsDraft.ddownloadLogin || "").trim() && (settingsDraft.ddownloadPassword || "").trim()) {
|
||||
list.push("ddownload");
|
||||
}
|
||||
return list;
|
||||
}, [settingsDraft.token, settingsDraft.megaLogin, settingsDraft.megaPassword, settingsDraft.bestToken, settingsDraft.allDebridToken]);
|
||||
|
||||
// DDownload is a direct file hoster (not a debrid service) and is used automatically
|
||||
// for ddownload.com/ddl.to URLs. It counts as a configured account but does not
|
||||
// appear in the primary/secondary/tertiary provider dropdowns.
|
||||
const hasDdownloadAccount = useMemo(() =>
|
||||
Boolean((settingsDraft.ddownloadLogin || "").trim() && (settingsDraft.ddownloadPassword || "").trim()),
|
||||
[settingsDraft.ddownloadLogin, settingsDraft.ddownloadPassword]);
|
||||
|
||||
const hasOneFichierAccount = useMemo(() =>
|
||||
Boolean((settingsDraft.oneFichierApiKey || "").trim()),
|
||||
[settingsDraft.oneFichierApiKey]);
|
||||
|
||||
const totalConfiguredAccounts = configuredProviders.length + (hasDdownloadAccount ? 1 : 0) + (hasOneFichierAccount ? 1 : 0);
|
||||
}, [settingsDraft.token, settingsDraft.megaLogin, settingsDraft.megaPassword, settingsDraft.bestToken, settingsDraft.allDebridToken, settingsDraft.ddownloadLogin, settingsDraft.ddownloadPassword]);
|
||||
|
||||
const primaryProviderValue: DebridProvider = useMemo(() => {
|
||||
if (configuredProviders.includes(settingsDraft.providerPrimary)) {
|
||||
@ -995,36 +993,15 @@ export function App(): ReactElement {
|
||||
if (source === "manual") { showToast(`Kein Update verfügbar (v${result.currentVersion})`, 2000); }
|
||||
return;
|
||||
}
|
||||
let changelogText = "";
|
||||
let changelogBlock = "";
|
||||
if (result.releaseNotes) {
|
||||
const lines = result.releaseNotes.split("\n");
|
||||
const compactLines: string[] = [];
|
||||
for (const line of lines) {
|
||||
if (/^\s{2,}[-*]/.test(line)) continue;
|
||||
if (/^#{1,6}\s/.test(line)) continue;
|
||||
if (!line.trim()) continue;
|
||||
let clean = line
|
||||
.replace(/\*\*([^*]+)\*\*/g, "$1")
|
||||
.replace(/\*([^*]+)\*/g, "$1")
|
||||
.replace(/`([^`]+)`/g, "$1")
|
||||
.replace(/^\s*[-*]\s+/, "- ")
|
||||
.trim();
|
||||
const colonIdx = clean.indexOf(":");
|
||||
if (colonIdx > 0 && colonIdx < clean.length - 1) {
|
||||
const afterColon = clean.slice(colonIdx + 1).trim();
|
||||
if (afterColon.length > 60) {
|
||||
clean = clean.slice(0, colonIdx + 1).trim();
|
||||
}
|
||||
}
|
||||
if (clean) compactLines.push(clean);
|
||||
}
|
||||
changelogText = compactLines.join("\n");
|
||||
const notes = result.releaseNotes.length > 500 ? `${result.releaseNotes.slice(0, 500)}…` : result.releaseNotes;
|
||||
changelogBlock = `\n\n--- Changelog ---\n${notes}`;
|
||||
}
|
||||
const approved = await askConfirmPrompt({
|
||||
title: "Update verfügbar",
|
||||
message: `${result.latestTag} (aktuell v${result.currentVersion})\n\nJetzt automatisch herunterladen und installieren?`,
|
||||
confirmLabel: "Jetzt installieren",
|
||||
details: changelogText || undefined
|
||||
message: `${result.latestTag} (aktuell v${result.currentVersion})${changelogBlock}\n\nJetzt automatisch herunterladen und installieren?`,
|
||||
confirmLabel: "Jetzt installieren"
|
||||
});
|
||||
if (!mountedRef.current) {
|
||||
return;
|
||||
@ -1135,7 +1112,7 @@ export function App(): ReactElement {
|
||||
|
||||
const onStartDownloads = async (): Promise<void> => {
|
||||
await performQuickAction(async () => {
|
||||
if (totalConfiguredAccounts === 0) {
|
||||
if (configuredProviders.length === 0) {
|
||||
setTab("settings");
|
||||
showToast("Bitte zuerst mindestens einen Hoster-Account eintragen", 3000);
|
||||
return;
|
||||
@ -1865,12 +1842,10 @@ export function App(): ReactElement {
|
||||
|
||||
const executeDeleteSelection = useCallback((ids: Set<string>): void => {
|
||||
const current = snapshotRef.current;
|
||||
const promises: Promise<void>[] = [];
|
||||
for (const id of ids) {
|
||||
if (current.session.items[id]) promises.push(window.rd.removeItem(id));
|
||||
else if (current.session.packages[id]) promises.push(window.rd.cancelPackage(id));
|
||||
if (current.session.items[id]) void window.rd.removeItem(id);
|
||||
else if (current.session.packages[id]) void window.rd.cancelPackage(id);
|
||||
}
|
||||
void Promise.all(promises).catch(() => {});
|
||||
setSelectedIds(new Set());
|
||||
}, []);
|
||||
|
||||
@ -1913,28 +1888,28 @@ export function App(): ReactElement {
|
||||
|
||||
const onExportBackup = async (): Promise<void> => {
|
||||
closeMenus();
|
||||
await performQuickAction(async () => {
|
||||
try {
|
||||
const result = await window.rd.exportBackup();
|
||||
if (result.saved) {
|
||||
showToast("Sicherung exportiert");
|
||||
}
|
||||
}, (error) => {
|
||||
} catch (error) {
|
||||
showToast(`Sicherung fehlgeschlagen: ${String(error)}`, 2600);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const onImportBackup = async (): Promise<void> => {
|
||||
closeMenus();
|
||||
await performQuickAction(async () => {
|
||||
try {
|
||||
const result = await window.rd.importBackup();
|
||||
if (result.restored) {
|
||||
showToast(result.message, 4000);
|
||||
} else if (result.message !== "Abgebrochen") {
|
||||
showToast(`Sicherung laden fehlgeschlagen: ${result.message}`, 3000);
|
||||
}
|
||||
}, (error) => {
|
||||
} catch (error) {
|
||||
showToast(`Sicherung laden fehlgeschlagen: ${String(error)}`, 2600);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const onMenuRestart = (): void => {
|
||||
@ -2245,10 +2220,10 @@ export function App(): ReactElement {
|
||||
</button>
|
||||
{openMenu === "hilfe" && (
|
||||
<div className="menu-dropdown">
|
||||
<button className="menu-dropdown-item" onClick={() => { closeMenus(); void window.rd.openLog().catch(() => {}); }}>
|
||||
<button className="menu-dropdown-item" onClick={() => { closeMenus(); void window.rd.openLog(); }}>
|
||||
<span>Log öffnen</span>
|
||||
</button>
|
||||
<button className="menu-dropdown-item" onClick={() => { closeMenus(); void window.rd.openSessionLog().catch(() => {}); }}>
|
||||
<button className="menu-dropdown-item" onClick={() => { closeMenus(); void window.rd.openSessionLog(); }}>
|
||||
<span>Session-Log öffnen</span>
|
||||
</button>
|
||||
<button className="menu-dropdown-item" onClick={() => { closeMenus(); void onCheckUpdates(); }}>
|
||||
@ -2268,7 +2243,7 @@ export function App(): ReactElement {
|
||||
onClick={() => {
|
||||
if (snapshot.session.paused) {
|
||||
setSnapshot((prev) => ({ ...prev, session: { ...prev.session, paused: false } }));
|
||||
void window.rd.togglePause().catch(() => {});
|
||||
void window.rd.togglePause();
|
||||
} else {
|
||||
void onStartDownloads();
|
||||
}
|
||||
@ -2282,7 +2257,7 @@ export function App(): ReactElement {
|
||||
disabled={!snapshot.canPause || snapshot.session.paused}
|
||||
onClick={() => {
|
||||
setSnapshot((prev) => ({ ...prev, session: { ...prev.session, paused: true } }));
|
||||
void window.rd.togglePause().catch(() => {});
|
||||
void window.rd.togglePause();
|
||||
}}
|
||||
>
|
||||
<svg viewBox="0 0 24 24" width="18" height="18"><rect x="5" y="3" width="4.5" height="18" rx="1" fill="currentColor" /><rect x="14.5" y="3" width="4.5" height="18" rx="1" fill="currentColor" /></svg>
|
||||
@ -2404,7 +2379,7 @@ export function App(): ReactElement {
|
||||
newOrder.splice(toIdx, 0, dragColId);
|
||||
setColumnOrder(newOrder);
|
||||
setDragColId(null);
|
||||
void window.rd.updateSettings({ columnOrder: newOrder }).catch(() => {});
|
||||
void window.rd.updateSettings({ columnOrder: newOrder });
|
||||
}}
|
||||
onDragEnd={() => { setDragColId(null); setDropTargetCol(null); }}
|
||||
onClick={sortCol ? () => {
|
||||
@ -2498,7 +2473,7 @@ export function App(): ReactElement {
|
||||
: `${historyEntries.length} Paket${historyEntries.length !== 1 ? "e" : ""} im Verlauf`}
|
||||
</span>
|
||||
{selectedHistoryIds.size > 0 && (
|
||||
<button className="btn danger" onClick={() => {
|
||||
<button className="btn btn-danger" onClick={() => {
|
||||
const idSet = new Set(selectedHistoryIds);
|
||||
void Promise.all([...idSet].map(id => window.rd.removeHistoryEntry(id))).then(() => {
|
||||
setHistoryEntries((prev) => prev.filter((e) => !idSet.has(e.id)));
|
||||
@ -2509,7 +2484,7 @@ export function App(): ReactElement {
|
||||
}}>Ausgewählte entfernen ({selectedHistoryIds.size})</button>
|
||||
)}
|
||||
{historyEntries.length > 0 && (
|
||||
<button className="btn danger" onClick={() => { void window.rd.clearHistory().then(() => { setHistoryEntries([]); setSelectedHistoryIds(new Set()); }).catch(() => {}); }}>Verlauf leeren</button>
|
||||
<button className="btn btn-danger" onClick={() => { void window.rd.clearHistory().then(() => { setHistoryEntries([]); setSelectedHistoryIds(new Set()); }); }}>Verlauf leeren</button>
|
||||
)}
|
||||
</div>
|
||||
{historyEntries.length === 0 && <div className="empty">Noch keine abgeschlossenen Pakete im Verlauf.</div>}
|
||||
@ -2596,7 +2571,7 @@ export function App(): ReactElement {
|
||||
<span>{entry.status === "completed" ? "Abgeschlossen" : "Gelöscht"}</span>
|
||||
</div>
|
||||
<div className="history-actions">
|
||||
<button className="btn" onClick={() => { void window.rd.removeHistoryEntry(entry.id).then(() => { setHistoryEntries((prev) => prev.filter((e) => e.id !== entry.id)); setSelectedHistoryIds((prev) => { const n = new Set(prev); n.delete(entry.id); return n; }); }).catch(() => {}); }}>Eintrag entfernen</button>
|
||||
<button className="btn" onClick={() => { void window.rd.removeHistoryEntry(entry.id).then(() => { setHistoryEntries((prev) => prev.filter((e) => e.id !== entry.id)); setSelectedHistoryIds((prev) => { const n = new Set(prev); n.delete(entry.id); return n; }); }); }}>Eintrag entfernen</button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
@ -2748,8 +2723,6 @@ export function App(): ReactElement {
|
||||
<input value={settingsDraft.ddownloadLogin || ""} onChange={(e) => setText("ddownloadLogin", e.target.value)} />
|
||||
<label>DDownload Passwort</label>
|
||||
<input type="password" value={settingsDraft.ddownloadPassword || ""} onChange={(e) => setText("ddownloadPassword", e.target.value)} />
|
||||
<label>1Fichier API Key</label>
|
||||
<input type="password" value={settingsDraft.oneFichierApiKey || ""} onChange={(e) => setText("oneFichierApiKey", e.target.value)} />
|
||||
{configuredProviders.length === 0 && (
|
||||
<div className="hint">Füge mindestens einen Account hinzu, dann erscheint die Hoster-Auswahl.</div>
|
||||
)}
|
||||
@ -2887,12 +2860,6 @@ export function App(): ReactElement {
|
||||
<div className="modal-card" onClick={(event) => event.stopPropagation()}>
|
||||
<h3>{confirmPrompt.title}</h3>
|
||||
<p style={{ whiteSpace: "pre-line" }}>{confirmPrompt.message}</p>
|
||||
{confirmPrompt.details && (
|
||||
<details className="modal-details">
|
||||
<summary>Changelog anzeigen</summary>
|
||||
<pre>{confirmPrompt.details}</pre>
|
||||
</details>
|
||||
)}
|
||||
<div className="modal-actions">
|
||||
<button className="btn" onClick={() => closeConfirmPrompt(false)}>Abbrechen</button>
|
||||
<button
|
||||
@ -2989,7 +2956,7 @@ export function App(): ReactElement {
|
||||
<span>Links: {Object.keys(snapshot.session.items).length}</span>
|
||||
<span>Session: {humanSize(snapshot.stats.totalDownloaded)}</span>
|
||||
<span>Gesamt: {humanSize(snapshot.stats.totalDownloadedAllTime)}</span>
|
||||
<span>Hoster: {providerStats.length}</span>
|
||||
<span>Hoster: {configuredProviders.length}</span>
|
||||
<span>{snapshot.speedText}</span>
|
||||
<span>{snapshot.etaText}</span>
|
||||
<span className="footer-spacer" />
|
||||
@ -3049,18 +3016,18 @@ export function App(): ReactElement {
|
||||
<button className="ctx-menu-item" onClick={() => {
|
||||
const pkgIds = [...selectedIds].filter((id) => snapshot.session.packages[id]);
|
||||
const itemIds = [...selectedIds].filter((id) => { const it = snapshot.session.items[id]; return it && startableStatuses.has(it.status); });
|
||||
if (pkgIds.length > 0) void window.rd.startPackages(pkgIds).catch(() => {});
|
||||
if (itemIds.length > 0) void window.rd.startItems(itemIds).catch(() => {});
|
||||
if (pkgIds.length > 0) void window.rd.startPackages(pkgIds);
|
||||
if (itemIds.length > 0) void window.rd.startItems(itemIds);
|
||||
setContextMenu(null);
|
||||
}}>Ausgewählte Downloads starten{multi ? ` (${selectedIds.size})` : ""}</button>
|
||||
)}
|
||||
<button className="ctx-menu-item" onClick={() => { void window.rd.start().catch(() => {}); setContextMenu(null); }}>Alle Downloads starten</button>
|
||||
<button className="ctx-menu-item" onClick={() => { void window.rd.start(); setContextMenu(null); }}>Alle Downloads starten</button>
|
||||
<div className="ctx-menu-sep" />
|
||||
<button className="ctx-menu-item" onClick={() => showLinksPopup(contextMenu.packageId, contextMenu.itemId)}>Linkadressen anzeigen</button>
|
||||
<div className="ctx-menu-sep" />
|
||||
{hasPackages && !contextMenu.itemId && (
|
||||
<button className="ctx-menu-item" onClick={() => {
|
||||
for (const id of selectedIds) { if (snapshot.session.packages[id]) void window.rd.togglePackage(id).catch(() => {}); }
|
||||
for (const id of selectedIds) { if (snapshot.session.packages[id]) void window.rd.togglePackage(id); }
|
||||
setContextMenu(null);
|
||||
}}>
|
||||
{multi ? `Alle ${selectedIds.size} umschalten` : (snapshot.session.packages[contextMenu.packageId]?.enabled ? "Deaktivieren" : "Aktivieren")}
|
||||
@ -3085,7 +3052,7 @@ export function App(): ReactElement {
|
||||
{hasPackages && !contextMenu.itemId && (
|
||||
<button className="ctx-menu-item" onClick={() => {
|
||||
const pkgIds = [...selectedIds].filter((id) => snapshot.session.packages[id]);
|
||||
for (const id of pkgIds) void window.rd.resetPackage(id).catch(() => {});
|
||||
for (const id of pkgIds) void window.rd.resetPackage(id);
|
||||
setContextMenu(null);
|
||||
}}>Zurücksetzen{multi ? ` (${[...selectedIds].filter((id) => snapshot.session.packages[id]).length})` : ""}</button>
|
||||
)}
|
||||
@ -3094,7 +3061,7 @@ export function App(): ReactElement {
|
||||
const itemIds = multi
|
||||
? [...selectedIds].filter((id) => snapshot.session.items[id])
|
||||
: [contextMenu.itemId!];
|
||||
void window.rd.resetItems(itemIds).catch(() => {});
|
||||
void window.rd.resetItems(itemIds);
|
||||
setContextMenu(null);
|
||||
}}>Zurücksetzen{multi ? ` (${[...selectedIds].filter((id) => snapshot.session.items[id]).length})` : ""}</button>
|
||||
)}
|
||||
@ -3104,7 +3071,7 @@ export function App(): ReactElement {
|
||||
const someCompleted = items.some((item) => item && item.status === "completed");
|
||||
return (<>
|
||||
{someCompleted && (
|
||||
<button className="ctx-menu-item" onClick={() => { void window.rd.extractNow(contextMenu.packageId).catch(() => {}); setContextMenu(null); }}>Jetzt entpacken</button>
|
||||
<button className="ctx-menu-item" onClick={() => { void window.rd.extractNow(contextMenu.packageId); setContextMenu(null); }}>Jetzt entpacken</button>
|
||||
)}
|
||||
</>);
|
||||
})()}
|
||||
@ -3117,7 +3084,7 @@ export function App(): ReactElement {
|
||||
const label = p === "high" ? "Hoch" : p === "low" ? "Niedrig" : "Standard";
|
||||
const pkgIds = [...selectedIds].filter((id) => snapshot.session.packages[id]);
|
||||
const allMatch = pkgIds.every((id) => (snapshot.session.packages[id]?.priority || "normal") === p);
|
||||
return <button key={p} className={`ctx-menu-item${allMatch ? " ctx-menu-active" : ""}`} onClick={() => { for (const id of pkgIds) void window.rd.setPackagePriority(id, p).catch(() => {}); setContextMenu(null); }}>{allMatch ? `✓ ${label}` : label}</button>;
|
||||
return <button key={p} className={`ctx-menu-item${allMatch ? " ctx-menu-active" : ""}`} onClick={() => { for (const id of pkgIds) void window.rd.setPackagePriority(id, p); setContextMenu(null); }}>{allMatch ? `✓ ${label}` : label}</button>;
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
@ -3126,7 +3093,7 @@ export function App(): ReactElement {
|
||||
const itemIds = [...selectedIds].filter((id) => snapshot.session.items[id]);
|
||||
const skippable = itemIds.filter((id) => { const it = snapshot.session.items[id]; return it && (it.status === "queued" || it.status === "reconnect_wait"); });
|
||||
if (skippable.length === 0) return null;
|
||||
return <button className="ctx-menu-item" onClick={() => { void window.rd.skipItems(skippable).catch(() => {}); setContextMenu(null); }}>Überspringen{skippable.length > 1 ? ` (${skippable.length})` : ""}</button>;
|
||||
return <button className="ctx-menu-item" onClick={() => { void window.rd.skipItems(skippable); setContextMenu(null); }}>Überspringen{skippable.length > 1 ? ` (${skippable.length})` : ""}</button>;
|
||||
})()}
|
||||
{hasPackages && (
|
||||
<button className="ctx-menu-item ctx-danger" onClick={() => {
|
||||
@ -3170,7 +3137,7 @@ export function App(): ReactElement {
|
||||
newOrder.splice(insertAt, 0, col);
|
||||
}
|
||||
setColumnOrder(newOrder);
|
||||
void window.rd.updateSettings({ columnOrder: newOrder }).catch(() => {});
|
||||
void window.rd.updateSettings({ columnOrder: newOrder });
|
||||
}}
|
||||
>
|
||||
{isVisible ? "\u2713 " : "\u2003 "}{def.label}
|
||||
@ -3211,7 +3178,7 @@ export function App(): ReactElement {
|
||||
)}
|
||||
<div className="ctx-menu-sep" />
|
||||
<button className="ctx-menu-item ctx-danger" onClick={() => {
|
||||
void window.rd.clearHistory().then(() => { setHistoryEntries([]); setSelectedHistoryIds(new Set()); }).catch(() => {});
|
||||
void window.rd.clearHistory().then(() => { setHistoryEntries([]); setSelectedHistoryIds(new Set()); });
|
||||
setHistoryCtxMenu(null);
|
||||
}}>Verlauf leeren</button>
|
||||
</div>
|
||||
@ -3225,8 +3192,8 @@ export function App(): ReactElement {
|
||||
<div className="link-popup-list">
|
||||
{linkPopup.links.map((link, i) => (
|
||||
<div key={i} className="link-popup-row">
|
||||
<span className="link-popup-name link-popup-click" title={`${link.name}\nKlicken zum Kopieren`} onClick={() => { void navigator.clipboard.writeText(link.name).then(() => showToast("Name kopiert")).catch(() => showToast("Kopieren fehlgeschlagen")); }}>{link.name}</span>
|
||||
<span className="link-popup-url link-popup-click" title={`${link.url}\nKlicken zum Kopieren`} onClick={() => { void navigator.clipboard.writeText(link.url).then(() => showToast("Link kopiert")).catch(() => showToast("Kopieren fehlgeschlagen")); }}>{link.url}</span>
|
||||
<span className="link-popup-name link-popup-click" title={`${link.name}\nKlicken zum Kopieren`} onClick={() => { void navigator.clipboard.writeText(link.name); showToast("Name kopiert"); }}>{link.name}</span>
|
||||
<span className="link-popup-url link-popup-click" title={`${link.url}\nKlicken zum Kopieren`} onClick={() => { void navigator.clipboard.writeText(link.url); showToast("Link kopiert"); }}>{link.url}</span>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
@ -3234,13 +3201,15 @@ export function App(): ReactElement {
|
||||
{linkPopup.isPackage && (
|
||||
<button className="btn" onClick={() => {
|
||||
const text = linkPopup.links.map((l) => l.name).join("\n");
|
||||
void navigator.clipboard.writeText(text).then(() => showToast("Alle Namen kopiert")).catch(() => showToast("Kopieren fehlgeschlagen"));
|
||||
void navigator.clipboard.writeText(text);
|
||||
showToast("Alle Namen kopiert");
|
||||
}}>Alle Namen kopieren</button>
|
||||
)}
|
||||
{linkPopup.isPackage && (
|
||||
<button className="btn" onClick={() => {
|
||||
const text = linkPopup.links.map((l) => l.url).join("\n");
|
||||
void navigator.clipboard.writeText(text).then(() => showToast("Alle Links kopiert")).catch(() => showToast("Kopieren fehlgeschlagen"));
|
||||
void navigator.clipboard.writeText(text);
|
||||
showToast("Alle Links kopiert");
|
||||
}}>Alle Links kopieren</button>
|
||||
)}
|
||||
<button className="btn" onClick={() => setLinkPopup(null)}>Schließen</button>
|
||||
@ -3387,7 +3356,7 @@ const PackageCard = memo(function PackageCard({ pkg, items, packageSpeed, isFirs
|
||||
<span key={col} className={`pkg-col pkg-col-prio${pkg.priority === "high" ? " prio-high" : pkg.priority === "low" ? " prio-low" : ""}`}>{pkg.priority === "high" ? "Hoch" : pkg.priority === "low" ? "Niedrig" : ""}</span>
|
||||
);
|
||||
case "status": return (
|
||||
<span key={col} className="pkg-col pkg-col-status">[{done}/{total}{done === total && total > 0 ? " - Done" : ""}{failed > 0 ? ` · ${failed} Fehler` : ""}{cancelled > 0 ? ` · ${cancelled} abgebr.` : ""}]{pkg.postProcessLabel ? ` - ${pkg.postProcessLabel}` : ""}</span>
|
||||
<span key={col} className="pkg-col pkg-col-status">[{done}/{total}{done === total && total > 0 ? " - Done" : ""}{failed > 0 ? ` · ${failed} Fehler` : ""}{cancelled > 0 ? ` · ${cancelled} abgebr.` : ""}]{pkg.postProcessLabel ? ` ${pkg.postProcessLabel}` : ""}</span>
|
||||
);
|
||||
case "speed": return (
|
||||
<span key={col} className="pkg-col pkg-col-speed">{packageSpeed > 0 ? formatSpeedMbps(packageSpeed) : ""}</span>
|
||||
|
||||
@ -1639,7 +1639,6 @@ td {
|
||||
border-radius: 12px;
|
||||
padding: 10px 14px;
|
||||
box-shadow: 0 16px 30px rgba(0, 0, 0, 0.35);
|
||||
z-index: 50;
|
||||
}
|
||||
|
||||
.ctx-menu {
|
||||
@ -1764,8 +1763,6 @@ td {
|
||||
|
||||
.modal-card {
|
||||
width: min(560px, 100%);
|
||||
max-height: calc(100vh - 40px);
|
||||
overflow-y: auto;
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 14px;
|
||||
background: linear-gradient(180deg, color-mix(in srgb, var(--card) 98%, transparent), color-mix(in srgb, var(--surface) 98%, transparent));
|
||||
@ -1784,34 +1781,6 @@ td {
|
||||
color: var(--muted);
|
||||
}
|
||||
|
||||
.modal-details {
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 6px;
|
||||
padding: 0;
|
||||
}
|
||||
.modal-details summary {
|
||||
padding: 6px 10px;
|
||||
cursor: pointer;
|
||||
font-size: 13px;
|
||||
color: var(--muted);
|
||||
user-select: none;
|
||||
}
|
||||
.modal-details summary:hover {
|
||||
color: var(--text);
|
||||
}
|
||||
.modal-details pre {
|
||||
margin: 0;
|
||||
padding: 8px 10px;
|
||||
border-top: 1px solid var(--border);
|
||||
font-size: 12px;
|
||||
line-height: 1.5;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
max-height: 260px;
|
||||
overflow-y: auto;
|
||||
color: var(--muted);
|
||||
}
|
||||
|
||||
.modal-path {
|
||||
font-size: 12px;
|
||||
word-break: break-all;
|
||||
|
||||
@ -14,7 +14,7 @@ export type CleanupMode = "none" | "trash" | "delete";
|
||||
export type ConflictMode = "overwrite" | "skip" | "rename" | "ask";
|
||||
export type SpeedMode = "global" | "per_download";
|
||||
export type FinishedCleanupPolicy = "never" | "immediate" | "on_start" | "package_done";
|
||||
export type DebridProvider = "realdebrid" | "megadebrid" | "bestdebrid" | "alldebrid" | "ddownload" | "onefichier";
|
||||
export type DebridProvider = "realdebrid" | "megadebrid" | "bestdebrid" | "alldebrid" | "ddownload";
|
||||
export type DebridFallbackProvider = DebridProvider | "none";
|
||||
export type AppTheme = "dark" | "light";
|
||||
export type PackagePriority = "high" | "normal" | "low";
|
||||
@ -44,7 +44,6 @@ export interface AppSettings {
|
||||
allDebridToken: string;
|
||||
ddownloadLogin: string;
|
||||
ddownloadPassword: string;
|
||||
oneFichierApiKey: string;
|
||||
archivePasswordList: string;
|
||||
rememberToken: boolean;
|
||||
providerPrimary: DebridProvider;
|
||||
|
||||
@ -269,7 +269,6 @@ describe("buildAutoRenameBaseName", () => {
|
||||
const result = buildAutoRenameBaseName("Show.S99.720p-4sf", "show.s99e999.720p.mkv");
|
||||
// SCENE_EPISODE_RE allows up to 3-digit episodes and 2-digit seasons
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!).toContain("S99E999");
|
||||
});
|
||||
|
||||
// Real-world scene release patterns
|
||||
@ -344,7 +343,6 @@ describe("buildAutoRenameBaseName", () => {
|
||||
const result = buildAutoRenameBaseName("Show.S01-4sf", "show.s01e01.mkv");
|
||||
// "mkv" should not be treated as part of the filename match
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!).toContain("S01E01");
|
||||
});
|
||||
|
||||
it("does not match episode-like patterns in codec strings", () => {
|
||||
@ -375,7 +373,6 @@ describe("buildAutoRenameBaseName", () => {
|
||||
// Extreme edge case - sanitizeFilename trims leading dots
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!).toContain("S01E01");
|
||||
expect(result!).toContain("-4sf");
|
||||
expect(result!).not.toContain(".S01E01.S01E01"); // no duplication
|
||||
});
|
||||
|
||||
|
||||
@ -317,7 +317,7 @@ describe("debrid service", () => {
|
||||
const controller = new AbortController();
|
||||
const abortTimer = setTimeout(() => {
|
||||
controller.abort("test");
|
||||
}, 200);
|
||||
}, 25);
|
||||
|
||||
try {
|
||||
await expect(service.unrestrictLink("https://rapidgator.net/file/abort-mega-web", controller.signal)).rejects.toThrow(/aborted/i);
|
||||
|
||||
@ -36,8 +36,12 @@ afterEach(() => {
|
||||
}
|
||||
});
|
||||
|
||||
describe.skipIf(!hasJavaRuntime() || !hasJvmExtractorRuntime())("extractor jvm backend", () => {
|
||||
describe("extractor jvm backend", () => {
|
||||
it("extracts zip archives through SevenZipJBinding backend", async () => {
|
||||
if (!hasJavaRuntime() || !hasJvmExtractorRuntime()) {
|
||||
return;
|
||||
}
|
||||
|
||||
process.env.RD_EXTRACT_BACKEND = "jvm";
|
||||
|
||||
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-jvm-extract-"));
|
||||
@ -65,112 +69,11 @@ describe.skipIf(!hasJavaRuntime() || !hasJvmExtractorRuntime())("extractor jvm b
|
||||
expect(fs.existsSync(path.join(targetDir, "episode.txt"))).toBe(true);
|
||||
});
|
||||
|
||||
it("emits progress callbacks with archiveName and percent", async () => {
|
||||
process.env.RD_EXTRACT_BACKEND = "jvm";
|
||||
|
||||
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-jvm-progress-"));
|
||||
tempDirs.push(root);
|
||||
const packageDir = path.join(root, "pkg");
|
||||
const targetDir = path.join(root, "out");
|
||||
fs.mkdirSync(packageDir, { recursive: true });
|
||||
|
||||
// Create a ZIP with some content to trigger progress
|
||||
const zipPath = path.join(packageDir, "progress-test.zip");
|
||||
const zip = new AdmZip();
|
||||
zip.addFile("file1.txt", Buffer.from("Hello World ".repeat(100)));
|
||||
zip.addFile("file2.txt", Buffer.from("Another file ".repeat(100)));
|
||||
zip.writeZip(zipPath);
|
||||
|
||||
const progressUpdates: Array<{
|
||||
archiveName: string;
|
||||
percent: number;
|
||||
phase: string;
|
||||
archivePercent?: number;
|
||||
}> = [];
|
||||
|
||||
const result = await extractPackageArchives({
|
||||
packageDir,
|
||||
targetDir,
|
||||
cleanupMode: "none",
|
||||
conflictMode: "overwrite",
|
||||
removeLinks: false,
|
||||
removeSamples: false,
|
||||
onProgress: (update) => {
|
||||
progressUpdates.push({
|
||||
archiveName: update.archiveName,
|
||||
percent: update.percent,
|
||||
phase: update.phase,
|
||||
archivePercent: update.archivePercent,
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.extracted).toBe(1);
|
||||
expect(result.failed).toBe(0);
|
||||
|
||||
// Should have at least preparing, extracting, and done phases
|
||||
const phases = new Set(progressUpdates.map((u) => u.phase));
|
||||
expect(phases.has("preparing")).toBe(true);
|
||||
expect(phases.has("extracting")).toBe(true);
|
||||
|
||||
// Extracting phase should include the archive name
|
||||
const extracting = progressUpdates.filter((u) => u.phase === "extracting" && u.archiveName === "progress-test.zip");
|
||||
expect(extracting.length).toBeGreaterThan(0);
|
||||
|
||||
// Should end at 100%
|
||||
const lastExtracting = extracting[extracting.length - 1];
|
||||
expect(lastExtracting.archivePercent).toBe(100);
|
||||
|
||||
// Files should exist
|
||||
expect(fs.existsSync(path.join(targetDir, "file1.txt"))).toBe(true);
|
||||
expect(fs.existsSync(path.join(targetDir, "file2.txt"))).toBe(true);
|
||||
});
|
||||
|
||||
it("extracts multiple archives sequentially with progress for each", async () => {
|
||||
process.env.RD_EXTRACT_BACKEND = "jvm";
|
||||
|
||||
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-jvm-multi-"));
|
||||
tempDirs.push(root);
|
||||
const packageDir = path.join(root, "pkg");
|
||||
const targetDir = path.join(root, "out");
|
||||
fs.mkdirSync(packageDir, { recursive: true });
|
||||
|
||||
// Create two separate ZIP archives
|
||||
const zip1 = new AdmZip();
|
||||
zip1.addFile("episode01.txt", Buffer.from("ep1 content"));
|
||||
zip1.writeZip(path.join(packageDir, "archive1.zip"));
|
||||
|
||||
const zip2 = new AdmZip();
|
||||
zip2.addFile("episode02.txt", Buffer.from("ep2 content"));
|
||||
zip2.writeZip(path.join(packageDir, "archive2.zip"));
|
||||
|
||||
const archiveNames = new Set<string>();
|
||||
|
||||
const result = await extractPackageArchives({
|
||||
packageDir,
|
||||
targetDir,
|
||||
cleanupMode: "none",
|
||||
conflictMode: "overwrite",
|
||||
removeLinks: false,
|
||||
removeSamples: false,
|
||||
onProgress: (update) => {
|
||||
if (update.phase === "extracting" && update.archiveName) {
|
||||
archiveNames.add(update.archiveName);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.extracted).toBe(2);
|
||||
expect(result.failed).toBe(0);
|
||||
// Both archive names should have appeared in progress
|
||||
expect(archiveNames.has("archive1.zip")).toBe(true);
|
||||
expect(archiveNames.has("archive2.zip")).toBe(true);
|
||||
// Both files extracted
|
||||
expect(fs.existsSync(path.join(targetDir, "episode01.txt"))).toBe(true);
|
||||
expect(fs.existsSync(path.join(targetDir, "episode02.txt"))).toBe(true);
|
||||
});
|
||||
|
||||
it("respects ask/skip conflict mode in jvm backend", async () => {
|
||||
if (!hasJavaRuntime() || !hasJvmExtractorRuntime()) {
|
||||
return;
|
||||
}
|
||||
|
||||
process.env.RD_EXTRACT_BACKEND = "jvm";
|
||||
|
||||
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-jvm-extract-"));
|
||||
|
||||
@ -15,8 +15,6 @@ import {
|
||||
|
||||
const tempDirs: string[] = [];
|
||||
const originalExtractBackend = process.env.RD_EXTRACT_BACKEND;
|
||||
const originalStatfs = fs.promises.statfs;
|
||||
const originalZipEntryMemoryLimit = process.env.RD_ZIP_ENTRY_MEMORY_LIMIT_MB;
|
||||
|
||||
beforeEach(() => {
|
||||
process.env.RD_EXTRACT_BACKEND = "legacy";
|
||||
@ -31,12 +29,6 @@ afterEach(() => {
|
||||
} else {
|
||||
process.env.RD_EXTRACT_BACKEND = originalExtractBackend;
|
||||
}
|
||||
(fs.promises as any).statfs = originalStatfs;
|
||||
if (originalZipEntryMemoryLimit === undefined) {
|
||||
delete process.env.RD_ZIP_ENTRY_MEMORY_LIMIT_MB;
|
||||
} else {
|
||||
process.env.RD_ZIP_ENTRY_MEMORY_LIMIT_MB = originalZipEntryMemoryLimit;
|
||||
}
|
||||
});
|
||||
|
||||
describe("extractor", () => {
|
||||
@ -582,6 +574,7 @@ describe("extractor", () => {
|
||||
});
|
||||
|
||||
it("keeps original ZIP size guard error when external fallback is unavailable", async () => {
|
||||
const previousLimit = process.env.RD_ZIP_ENTRY_MEMORY_LIMIT_MB;
|
||||
process.env.RD_ZIP_ENTRY_MEMORY_LIMIT_MB = "8";
|
||||
|
||||
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-extract-"));
|
||||
@ -595,20 +588,32 @@ describe("extractor", () => {
|
||||
zip.addFile("large.bin", Buffer.alloc(9 * 1024 * 1024, 7));
|
||||
zip.writeZip(zipPath);
|
||||
|
||||
const result = await extractPackageArchives({
|
||||
packageDir,
|
||||
targetDir,
|
||||
cleanupMode: "none",
|
||||
conflictMode: "overwrite",
|
||||
removeLinks: false,
|
||||
removeSamples: false
|
||||
});
|
||||
expect(result.extracted).toBe(0);
|
||||
expect(result.failed).toBe(1);
|
||||
expect(String(result.lastError)).toMatch(/ZIP-Eintrag.*groß/i);
|
||||
try {
|
||||
const result = await extractPackageArchives({
|
||||
packageDir,
|
||||
targetDir,
|
||||
cleanupMode: "none",
|
||||
conflictMode: "overwrite",
|
||||
removeLinks: false,
|
||||
removeSamples: false
|
||||
});
|
||||
expect(result.extracted).toBe(0);
|
||||
expect(result.failed).toBe(1);
|
||||
expect(String(result.lastError)).toMatch(/ZIP-Eintrag.*groß/i);
|
||||
} finally {
|
||||
if (previousLimit === undefined) {
|
||||
delete process.env.RD_ZIP_ENTRY_MEMORY_LIMIT_MB;
|
||||
} else {
|
||||
process.env.RD_ZIP_ENTRY_MEMORY_LIMIT_MB = previousLimit;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it.skipIf(process.platform !== "win32")("matches resume-state archive names case-insensitively on Windows", async () => {
|
||||
it("matches resume-state archive names case-insensitively on Windows", async () => {
|
||||
if (process.platform !== "win32") {
|
||||
return;
|
||||
}
|
||||
|
||||
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-extract-"));
|
||||
tempDirs.push(root);
|
||||
const packageDir = path.join(root, "pkg");
|
||||
@ -645,18 +650,23 @@ describe("extractor", () => {
|
||||
zip.addFile("test.txt", Buffer.alloc(1024, 0x41));
|
||||
zip.writeZip(path.join(packageDir, "test.zip"));
|
||||
|
||||
const originalStatfs = fs.promises.statfs;
|
||||
(fs.promises as any).statfs = async () => ({ bfree: 1, bsize: 1 });
|
||||
|
||||
await expect(
|
||||
extractPackageArchives({
|
||||
packageDir,
|
||||
targetDir,
|
||||
cleanupMode: "none" as any,
|
||||
conflictMode: "overwrite" as any,
|
||||
removeLinks: false,
|
||||
removeSamples: false,
|
||||
})
|
||||
).rejects.toThrow(/Nicht genug Speicherplatz/);
|
||||
try {
|
||||
await expect(
|
||||
extractPackageArchives({
|
||||
packageDir,
|
||||
targetDir,
|
||||
cleanupMode: "none" as any,
|
||||
conflictMode: "overwrite" as any,
|
||||
removeLinks: false,
|
||||
removeSamples: false,
|
||||
})
|
||||
).rejects.toThrow(/Nicht genug Speicherplatz/);
|
||||
} finally {
|
||||
(fs.promises as any).statfs = originalStatfs;
|
||||
}
|
||||
});
|
||||
|
||||
it("proceeds when disk space is sufficient", async () => {
|
||||
|
||||
@ -166,7 +166,7 @@ describe("mega-web-fallback", () => {
|
||||
const controller = new AbortController();
|
||||
const timer = setTimeout(() => {
|
||||
controller.abort("test");
|
||||
}, 200);
|
||||
}, 30);
|
||||
|
||||
try {
|
||||
await expect(fallback.unrestrict("https://mega.debrid/link2", controller.signal)).rejects.toThrow(/aborted/i);
|
||||
|
||||
@ -1,188 +0,0 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { resolveArchiveItemsFromList } from "../src/main/download-manager";
|
||||
|
||||
type MinimalItem = {
|
||||
targetPath?: string;
|
||||
fileName?: string;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
|
||||
function makeItems(names: string[]): MinimalItem[] {
|
||||
return names.map((name) => ({
|
||||
targetPath: `C:\\Downloads\\Package\\${name}`,
|
||||
fileName: name,
|
||||
id: name,
|
||||
status: "completed",
|
||||
}));
|
||||
}
|
||||
|
||||
describe("resolveArchiveItemsFromList", () => {
|
||||
// ── Multipart RAR (.partN.rar) ──
|
||||
|
||||
it("matches multipart .part1.rar archives", () => {
|
||||
const items = makeItems([
|
||||
"Movie.part1.rar",
|
||||
"Movie.part2.rar",
|
||||
"Movie.part3.rar",
|
||||
"Other.rar",
|
||||
]);
|
||||
const result = resolveArchiveItemsFromList("Movie.part1.rar", items as any);
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result.map((i: any) => i.fileName)).toEqual([
|
||||
"Movie.part1.rar",
|
||||
"Movie.part2.rar",
|
||||
"Movie.part3.rar",
|
||||
]);
|
||||
});
|
||||
|
||||
it("matches multipart .part01.rar archives (zero-padded)", () => {
|
||||
const items = makeItems([
|
||||
"Film.part01.rar",
|
||||
"Film.part02.rar",
|
||||
"Film.part10.rar",
|
||||
"Unrelated.zip",
|
||||
]);
|
||||
const result = resolveArchiveItemsFromList("Film.part01.rar", items as any);
|
||||
expect(result).toHaveLength(3);
|
||||
});
|
||||
|
||||
// ── Old-style RAR (.rar + .r00, .r01, etc.) ──
|
||||
|
||||
it("matches old-style .rar + .rNN volumes", () => {
|
||||
const items = makeItems([
|
||||
"Archive.rar",
|
||||
"Archive.r00",
|
||||
"Archive.r01",
|
||||
"Archive.r02",
|
||||
"Other.zip",
|
||||
]);
|
||||
const result = resolveArchiveItemsFromList("Archive.rar", items as any);
|
||||
expect(result).toHaveLength(4);
|
||||
});
|
||||
|
||||
// ── Single RAR ──
|
||||
|
||||
it("matches a single .rar file", () => {
|
||||
const items = makeItems(["SingleFile.rar", "Other.mkv"]);
|
||||
const result = resolveArchiveItemsFromList("SingleFile.rar", items as any);
|
||||
expect(result).toHaveLength(1);
|
||||
expect((result[0] as any).fileName).toBe("SingleFile.rar");
|
||||
});
|
||||
|
||||
// ── Split ZIP ──
|
||||
|
||||
it("matches split .zip.NNN files", () => {
|
||||
const items = makeItems([
|
||||
"Data.zip",
|
||||
"Data.zip.001",
|
||||
"Data.zip.002",
|
||||
"Data.zip.003",
|
||||
]);
|
||||
const result = resolveArchiveItemsFromList("Data.zip.001", items as any);
|
||||
expect(result).toHaveLength(4);
|
||||
});
|
||||
|
||||
// ── Split 7z ──
|
||||
|
||||
it("matches split .7z.NNN files", () => {
|
||||
const items = makeItems([
|
||||
"Backup.7z.001",
|
||||
"Backup.7z.002",
|
||||
]);
|
||||
const result = resolveArchiveItemsFromList("Backup.7z.001", items as any);
|
||||
expect(result).toHaveLength(2);
|
||||
});
|
||||
|
||||
// ── Generic .NNN splits ──
|
||||
|
||||
it("matches generic .NNN split files", () => {
|
||||
const items = makeItems([
|
||||
"video.001",
|
||||
"video.002",
|
||||
"video.003",
|
||||
]);
|
||||
const result = resolveArchiveItemsFromList("video.001", items as any);
|
||||
expect(result).toHaveLength(3);
|
||||
});
|
||||
|
||||
// ── Exact filename match ──
|
||||
|
||||
it("matches a single .zip by exact name", () => {
|
||||
const items = makeItems(["myarchive.zip", "other.rar"]);
|
||||
const result = resolveArchiveItemsFromList("myarchive.zip", items as any);
|
||||
expect(result).toHaveLength(1);
|
||||
expect((result[0] as any).fileName).toBe("myarchive.zip");
|
||||
});
|
||||
|
||||
// ── Case insensitivity ──
|
||||
|
||||
it("matches case-insensitively", () => {
|
||||
const items = makeItems([
|
||||
"MOVIE.PART1.RAR",
|
||||
"MOVIE.PART2.RAR",
|
||||
]);
|
||||
const result = resolveArchiveItemsFromList("movie.part1.rar", items as any);
|
||||
expect(result).toHaveLength(2);
|
||||
});
|
||||
|
||||
// ── Stem-based fallback ──
|
||||
|
||||
it("uses stem-based fallback when exact patterns fail", () => {
|
||||
// Simulate a debrid service that renames "Movie.part1.rar" to "Movie.part1_dl.rar"
|
||||
// but the disk file is "Movie.part1.rar"
|
||||
const items = makeItems([
|
||||
"Movie.rar",
|
||||
]);
|
||||
// The archive on disk is "Movie.part1.rar" but there's no item matching the
|
||||
// .partN pattern. The stem "movie" should match "Movie.rar" via fallback.
|
||||
const result = resolveArchiveItemsFromList("Movie.part1.rar", items as any);
|
||||
// stem fallback: "movie" starts with "movie" and ends with .rar
|
||||
expect(result).toHaveLength(1);
|
||||
});
|
||||
|
||||
// ── Single item fallback ──
|
||||
|
||||
it("returns single archive item when no pattern matches", () => {
|
||||
const items = makeItems(["totally-different-name.rar"]);
|
||||
const result = resolveArchiveItemsFromList("Original.rar", items as any);
|
||||
// Single item in list with archive extension → return it
|
||||
expect(result).toHaveLength(1);
|
||||
});
|
||||
|
||||
// ── Empty when no match ──
|
||||
|
||||
it("returns empty when items have no archive extensions", () => {
|
||||
const items = makeItems(["video.mkv", "subtitle.srt"]);
|
||||
const result = resolveArchiveItemsFromList("Archive.rar", items as any);
|
||||
expect(result).toHaveLength(0);
|
||||
});
|
||||
|
||||
// ── Items without targetPath ──
|
||||
|
||||
it("falls back to fileName when targetPath is missing", () => {
|
||||
const items = [
|
||||
{ fileName: "Movie.part1.rar", id: "1", status: "completed" },
|
||||
{ fileName: "Movie.part2.rar", id: "2", status: "completed" },
|
||||
];
|
||||
const result = resolveArchiveItemsFromList("Movie.part1.rar", items as any);
|
||||
expect(result).toHaveLength(2);
|
||||
});
|
||||
|
||||
// ── Multiple archives, should not cross-match ──
|
||||
|
||||
it("does not cross-match different archive groups", () => {
|
||||
const items = makeItems([
|
||||
"Episode.S01E01.part1.rar",
|
||||
"Episode.S01E01.part2.rar",
|
||||
"Episode.S01E02.part1.rar",
|
||||
"Episode.S01E02.part2.rar",
|
||||
]);
|
||||
const result1 = resolveArchiveItemsFromList("Episode.S01E01.part1.rar", items as any);
|
||||
expect(result1).toHaveLength(2);
|
||||
expect(result1.every((i: any) => i.fileName.includes("S01E01"))).toBe(true);
|
||||
|
||||
const result2 = resolveArchiveItemsFromList("Episode.S01E02.part1.rar", items as any);
|
||||
expect(result2).toHaveLength(2);
|
||||
expect(result2.every((i: any) => i.fileName.includes("S01E02"))).toBe(true);
|
||||
});
|
||||
});
|
||||
@ -153,7 +153,7 @@ async function main(): Promise<void> {
|
||||
createStoragePaths(path.join(tempRoot, "state-pause"))
|
||||
);
|
||||
manager2.addPackages([{ name: "pause", links: ["https://dummy/slow"] }]);
|
||||
await manager2.start();
|
||||
manager2.start();
|
||||
await new Promise((resolve) => setTimeout(resolve, 120));
|
||||
const paused = manager2.togglePause();
|
||||
assert(paused, "Pause konnte nicht aktiviert werden");
|
||||
|
||||
@ -8,8 +8,6 @@ import { setLogListener } from "../src/main/logger";
|
||||
const tempDirs: string[] = [];
|
||||
|
||||
afterEach(() => {
|
||||
// Ensure session log is shut down between tests
|
||||
shutdownSessionLog();
|
||||
// Ensure listener is cleared between tests
|
||||
setLogListener(null);
|
||||
for (const dir of tempDirs.splice(0)) {
|
||||
@ -47,7 +45,7 @@ describe("session-log", () => {
|
||||
logger.info("Test-Nachricht für Session-Log");
|
||||
|
||||
// Wait for flush (200ms interval + margin)
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
await new Promise((resolve) => setTimeout(resolve, 350));
|
||||
|
||||
const content = fs.readFileSync(logPath, "utf8");
|
||||
expect(content).toContain("Test-Nachricht für Session-Log");
|
||||
@ -81,7 +79,7 @@ describe("session-log", () => {
|
||||
const { logger } = await import("../src/main/logger");
|
||||
logger.info("Nach-Shutdown-Nachricht");
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
await new Promise((resolve) => setTimeout(resolve, 350));
|
||||
|
||||
const content = fs.readFileSync(logPath, "utf8");
|
||||
expect(content).not.toContain("Nach-Shutdown-Nachricht");
|
||||
@ -139,7 +137,7 @@ describe("session-log", () => {
|
||||
shutdownSessionLog();
|
||||
});
|
||||
|
||||
it("multiple sessions create different files", async () => {
|
||||
it("multiple sessions create different files", () => {
|
||||
const baseDir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-slog-"));
|
||||
tempDirs.push(baseDir);
|
||||
|
||||
@ -148,7 +146,10 @@ describe("session-log", () => {
|
||||
shutdownSessionLog();
|
||||
|
||||
// Small delay to ensure different timestamp
|
||||
await new Promise((resolve) => setTimeout(resolve, 1100));
|
||||
const start = Date.now();
|
||||
while (Date.now() - start < 1100) {
|
||||
// busy-wait for 1.1 seconds to get different second in filename
|
||||
}
|
||||
|
||||
initSessionLog(baseDir);
|
||||
const path2 = getSessionLogPath();
|
||||
|
||||
@ -12,5 +12,5 @@
|
||||
"isolatedModules": true,
|
||||
"types": ["node", "vite/client"]
|
||||
},
|
||||
"include": ["src", "tests", "vite.config.mts"]
|
||||
"include": ["src", "tests", "vite.config.ts"]
|
||||
}
|
||||
|
||||
Loading…
Reference in New Issue
Block a user