Compare commits

..

No commits in common. "main" and "v1.6.50" have entirely different histories.

26 changed files with 298 additions and 1570 deletions

View File

@ -160,7 +160,7 @@ The app stores runtime files in Electron's `userData` directory, including:
## Troubleshooting
- Download does not start: verify token and selected provider in Settings.
- Extraction fails: check archive passwords and native extractor installation (7-Zip/WinRAR). Optional JVM extractor can be forced with `RD_EXTRACT_BACKEND=jvm`.
- Extraction fails: check archive passwords, JVM runtime (`resources/extractor-jvm`), or force legacy mode with `RD_EXTRACT_BACKEND=legacy`.
- Very slow downloads: check active speed limit and bandwidth schedules.
- Unexpected interruptions: enable reconnect and fallback providers.
- Stalled downloads: the app auto-detects stalls within 10 seconds and retries automatically.
@ -169,35 +169,6 @@ The app stores runtime files in Electron's `userData` directory, including:
Release history is available on [git.24-music.de Releases](https://git.24-music.de/Administrator/real-debrid-downloader/releases).
### v1.6.61 (2026-03-05)
- Fixed leftover empty package folders in `Downloader Unfertig` after successful extraction.
- Resume marker files (`.rd_extract_progress*.json`) are now treated as ignorable for empty-folder cleanup.
- Deferred post-processing now clears resume markers before running empty-directory removal.
### v1.6.60 (2026-03-05)
- Added package-scoped password cache for extraction: once the first archive in a package is solved, following archives in the same package reuse that password first.
- Kept fallback behavior intact (`""` and other candidates are still tested), but moved empty-password probing behind the learned password to reduce per-archive delays.
- Added cache invalidation on real `wrong_password` failures so stale passwords are automatically discarded.
### v1.6.59 (2026-03-05)
- Switched default extraction backend to native tools (`legacy`) for more stable archive-to-archive flow.
- Prioritized 7-Zip as primary native extractor, with WinRAR/UnRAR as fallback.
- JVM extractor remains available as opt-in via `RD_EXTRACT_BACKEND=jvm`.
### v1.6.58 (2026-03-05)
- Fixed extraction progress oscillation (`1% -> 100% -> 1%` loops) during password retries.
- Kept strict archive completion logic, but normalized in-progress archive percent to avoid false visual done states before real completion.
### v1.6.57 (2026-03-05)
- Fixed extraction flow so archives are marked done only on real completion, not on temporary `100%` progress spikes.
- Improved password handling: after the first successful archive, the discovered password is prioritized for subsequent archives.
- Fixed progress parsing for password retries (reset/restart handling), reducing visible and real gaps between archive extractions.
## License
MIT - see `LICENSE`.

View File

@ -1,6 +1,6 @@
{
"name": "real-debrid-downloader",
"version": "1.6.66",
"version": "1.6.50",
"description": "Desktop downloader",
"main": "build/main/main/main.js",
"author": "Sucukdeluxe",

View File

@ -3,9 +3,7 @@ package com.sucukdeluxe.extractor;
import net.lingala.zip4j.ZipFile;
import net.lingala.zip4j.exception.ZipException;
import net.lingala.zip4j.model.FileHeader;
import net.sf.sevenzipjbinding.ExtractAskMode;
import net.sf.sevenzipjbinding.ExtractOperationResult;
import net.sf.sevenzipjbinding.IArchiveExtractCallback;
import net.sf.sevenzipjbinding.IArchiveOpenCallback;
import net.sf.sevenzipjbinding.IArchiveOpenVolumeCallback;
import net.sf.sevenzipjbinding.IInArchive;
@ -53,10 +51,6 @@ public final class JBindExtractorMain {
}
public static void main(String[] args) {
if (args.length == 1 && "--daemon".equals(args[0])) {
runDaemon();
return;
}
int exit = 1;
try {
ExtractionRequest request = parseArgs(args);
@ -71,127 +65,6 @@ public final class JBindExtractorMain {
System.exit(exit);
}
private static void runDaemon() {
System.out.println("RD_DAEMON_READY");
System.out.flush();
java.io.BufferedReader reader = new java.io.BufferedReader(
new java.io.InputStreamReader(System.in, StandardCharsets.UTF_8));
try {
String line;
while ((line = reader.readLine()) != null) {
line = line.trim();
if (line.isEmpty()) {
continue;
}
int exitCode = 1;
try {
ExtractionRequest request = parseDaemonRequest(line);
exitCode = runExtraction(request);
} catch (IllegalArgumentException error) {
emitError("Argumentfehler: " + safeMessage(error));
exitCode = 2;
} catch (Throwable error) {
emitError(safeMessage(error));
exitCode = 1;
}
System.out.println("RD_REQUEST_DONE " + exitCode);
System.out.flush();
}
} catch (IOException ignored) {
// stdin closed parent process exited
}
}
private static ExtractionRequest parseDaemonRequest(String jsonLine) {
// Minimal JSON parsing without external dependencies.
// Expected format: {"archive":"...","target":"...","conflict":"...","backend":"...","passwords":["...","..."]}
ExtractionRequest request = new ExtractionRequest();
request.archiveFile = new File(extractJsonString(jsonLine, "archive"));
request.targetDir = new File(extractJsonString(jsonLine, "target"));
String conflict = extractJsonString(jsonLine, "conflict");
if (conflict.length() > 0) {
request.conflictMode = ConflictMode.fromValue(conflict);
}
String backend = extractJsonString(jsonLine, "backend");
if (backend.length() > 0) {
request.backend = Backend.fromValue(backend);
}
// Parse passwords array
int pwStart = jsonLine.indexOf("\"passwords\"");
if (pwStart >= 0) {
int arrStart = jsonLine.indexOf('[', pwStart);
int arrEnd = jsonLine.indexOf(']', arrStart);
if (arrStart >= 0 && arrEnd > arrStart) {
String arrContent = jsonLine.substring(arrStart + 1, arrEnd);
int idx = 0;
while (idx < arrContent.length()) {
int qStart = arrContent.indexOf('"', idx);
if (qStart < 0) break;
int qEnd = findClosingQuote(arrContent, qStart + 1);
if (qEnd < 0) break;
request.passwords.add(unescapeJsonString(arrContent.substring(qStart + 1, qEnd)));
idx = qEnd + 1;
}
}
}
if (request.archiveFile == null || !request.archiveFile.exists() || !request.archiveFile.isFile()) {
throw new IllegalArgumentException("Archiv nicht gefunden: " +
(request.archiveFile == null ? "null" : request.archiveFile.getAbsolutePath()));
}
if (request.targetDir == null) {
throw new IllegalArgumentException("--target fehlt");
}
return request;
}
private static String extractJsonString(String json, String key) {
String search = "\"" + key + "\"";
int keyIdx = json.indexOf(search);
if (keyIdx < 0) return "";
int colonIdx = json.indexOf(':', keyIdx + search.length());
if (colonIdx < 0) return "";
int qStart = json.indexOf('"', colonIdx + 1);
if (qStart < 0) return "";
int qEnd = findClosingQuote(json, qStart + 1);
if (qEnd < 0) return "";
return unescapeJsonString(json.substring(qStart + 1, qEnd));
}
private static int findClosingQuote(String s, int from) {
for (int i = from; i < s.length(); i++) {
char c = s.charAt(i);
if (c == '\\') {
i++; // skip escaped character
continue;
}
if (c == '"') return i;
}
return -1;
}
private static String unescapeJsonString(String s) {
if (s.indexOf('\\') < 0) return s;
StringBuilder sb = new StringBuilder(s.length());
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
if (c == '\\' && i + 1 < s.length()) {
char next = s.charAt(i + 1);
switch (next) {
case '"': sb.append('"'); i++; break;
case '\\': sb.append('\\'); i++; break;
case '/': sb.append('/'); i++; break;
case 'n': sb.append('\n'); i++; break;
case 'r': sb.append('\r'); i++; break;
case 't': sb.append('\t'); i++; break;
default: sb.append(c); break;
}
} else {
sb.append(c);
}
}
return sb.toString();
}
private static int runExtraction(ExtractionRequest request) throws Exception {
List<String> passwords = normalizePasswords(request.passwords);
Exception lastError = null;
@ -362,99 +235,110 @@ public final class JBindExtractorMain {
try {
context = openSevenZipArchive(request.archiveFile, password);
IInArchive archive = context.archive;
int itemCount = archive.getNumberOfItems();
if (itemCount <= 0) {
ISimpleInArchive simple = archive.getSimpleInterface();
ISimpleInArchiveItem[] items = simple.getArchiveItems();
if (items == null) {
throw new IOException("Archiv enthalt keine Eintrage oder konnte nicht gelesen werden: " + request.archiveFile.getAbsolutePath());
}
// Pre-scan: collect file indices, sizes, output paths, and detect encryption
long totalUnits = 0;
boolean encrypted = false;
List<Integer> fileIndices = new ArrayList<Integer>();
List<File> outputFiles = new ArrayList<File>();
List<Long> fileSizes = new ArrayList<Long>();
for (ISimpleInArchiveItem item : items) {
if (item == null || item.isFolder()) {
continue;
}
try {
encrypted = encrypted || item.isEncrypted();
} catch (Throwable ignored) {
// ignore encrypted flag read issues
}
totalUnits += safeSize(item.getSize());
}
ProgressTracker progress = new ProgressTracker(totalUnits);
progress.emitStart();
Set<String> reserved = new HashSet<String>();
for (ISimpleInArchiveItem item : items) {
if (item == null) {
continue;
}
for (int i = 0; i < itemCount; i++) {
Boolean isFolder = (Boolean) archive.getProperty(i, PropID.IS_FOLDER);
String entryPath = (String) archive.getProperty(i, PropID.PATH);
String entryName = normalizeEntryName(entryPath, "item-" + i);
if (Boolean.TRUE.equals(isFolder)) {
String entryName = normalizeEntryName(item.getPath(), "item-" + item.getItemIndex());
if (item.isFolder()) {
File dir = resolveDirectory(request.targetDir, entryName);
ensureDirectory(dir);
reserved.add(pathKey(dir));
continue;
}
try {
Boolean isEncrypted = (Boolean) archive.getProperty(i, PropID.ENCRYPTED);
encrypted = encrypted || Boolean.TRUE.equals(isEncrypted);
} catch (Throwable ignored) {
// ignore encrypted flag read issues
}
Long rawSize = (Long) archive.getProperty(i, PropID.SIZE);
long itemSize = safeSize(rawSize);
totalUnits += itemSize;
long itemUnits = safeSize(item.getSize());
File output = resolveOutputFile(request.targetDir, entryName, request.conflictMode, reserved);
fileIndices.add(i);
outputFiles.add(output); // null if skipped
fileSizes.add(itemSize);
}
if (fileIndices.isEmpty()) {
// All items are folders or skipped
ProgressTracker progress = new ProgressTracker(1);
progress.emitStart();
progress.emitDone();
return;
}
ProgressTracker progress = new ProgressTracker(totalUnits);
progress.emitStart();
// Build index array for bulk extract
int[] indices = new int[fileIndices.size()];
for (int i = 0; i < fileIndices.size(); i++) {
indices[i] = fileIndices.get(i);
}
// Map from archive index to our position in fileIndices/outputFiles
Map<Integer, Integer> indexToPos = new HashMap<Integer, Integer>();
for (int i = 0; i < fileIndices.size(); i++) {
indexToPos.put(fileIndices.get(i), i);
}
// Bulk extraction state
final boolean encryptedFinal = encrypted;
final String effectivePassword = password == null ? "" : password;
final File[] currentOutput = new File[1];
final FileOutputStream[] currentStream = new FileOutputStream[1];
final boolean[] currentSuccess = new boolean[1];
final long[] currentRemaining = new long[1];
final Throwable[] firstError = new Throwable[1];
final int[] currentPos = new int[] { -1 };
try {
archive.extract(indices, false, new BulkExtractCallback(
archive, indexToPos, fileIndices, outputFiles, fileSizes,
progress, encryptedFinal, effectivePassword, currentOutput,
currentStream, currentSuccess, currentRemaining, currentPos, firstError
));
} catch (SevenZipException error) {
if (looksLikeWrongPassword(error, encryptedFinal)) {
throw new WrongPasswordException(error);
if (output == null) {
progress.advance(itemUnits);
continue;
}
throw error;
}
if (firstError[0] != null) {
if (firstError[0] instanceof WrongPasswordException) {
throw (WrongPasswordException) firstError[0];
ensureDirectory(output.getParentFile());
rejectSymlink(output);
final FileOutputStream out = new FileOutputStream(output);
final long[] remaining = new long[] { itemUnits };
boolean extractionSuccess = false;
try {
ExtractOperationResult result = item.extractSlow(new ISequentialOutStream() {
@Override
public int write(byte[] data) throws SevenZipException {
if (data == null || data.length == 0) {
return 0;
}
try {
out.write(data);
} catch (IOException error) {
throw new SevenZipException("Fehler beim Schreiben: " + error.getMessage(), error);
}
long accounted = Math.min(remaining[0], (long) data.length);
remaining[0] -= accounted;
progress.advance(accounted);
return data.length;
}
}, password == null ? "" : password);
if (remaining[0] > 0) {
progress.advance(remaining[0]);
}
if (result != ExtractOperationResult.OK) {
if (isPasswordFailure(result, encrypted)) {
throw new WrongPasswordException(new IOException("Falsches Passwort"));
}
throw new IOException("7z-Fehler: " + result.name());
}
extractionSuccess = true;
} catch (SevenZipException error) {
if (looksLikeWrongPassword(error, encrypted)) {
throw new WrongPasswordException(error);
}
throw error;
} finally {
try {
out.close();
} catch (Throwable ignored) {
}
if (!extractionSuccess && output.exists()) {
try {
output.delete();
} catch (Throwable ignored) {
}
}
}
try {
java.util.Date modified = item.getLastWriteTime();
if (modified != null) {
output.setLastModified(modified.getTime());
}
} catch (Throwable ignored) {
// best effort
}
throw (Exception) firstError[0];
}
progress.emitDone();
@ -879,176 +763,6 @@ public final class JBindExtractorMain {
private final List<String> passwords = new ArrayList<String>();
}
/**
* Bulk extraction callback that implements both IArchiveExtractCallback and
* ICryptoGetTextPassword. Using the bulk IInArchive.extract() API instead of
* per-item extractSlow() is critical for performance solid RAR archives
* otherwise re-decode from the beginning for every single item.
*/
private static final class BulkExtractCallback implements IArchiveExtractCallback, ICryptoGetTextPassword {
private final IInArchive archive;
private final Map<Integer, Integer> indexToPos;
private final List<Integer> fileIndices;
private final List<File> outputFiles;
private final List<Long> fileSizes;
private final ProgressTracker progress;
private final boolean encrypted;
private final String password;
private final File[] currentOutput;
private final FileOutputStream[] currentStream;
private final boolean[] currentSuccess;
private final long[] currentRemaining;
private final int[] currentPos;
private final Throwable[] firstError;
BulkExtractCallback(IInArchive archive, Map<Integer, Integer> indexToPos,
List<Integer> fileIndices, List<File> outputFiles, List<Long> fileSizes,
ProgressTracker progress, boolean encrypted, String password,
File[] currentOutput, FileOutputStream[] currentStream,
boolean[] currentSuccess, long[] currentRemaining, int[] currentPos,
Throwable[] firstError) {
this.archive = archive;
this.indexToPos = indexToPos;
this.fileIndices = fileIndices;
this.outputFiles = outputFiles;
this.fileSizes = fileSizes;
this.progress = progress;
this.encrypted = encrypted;
this.password = password;
this.currentOutput = currentOutput;
this.currentStream = currentStream;
this.currentSuccess = currentSuccess;
this.currentRemaining = currentRemaining;
this.currentPos = currentPos;
this.firstError = firstError;
}
@Override
public String cryptoGetTextPassword() {
return password;
}
@Override
public void setTotal(long total) {
// 7z reports total compressed bytes; we track uncompressed via ProgressTracker
}
@Override
public void setCompleted(long complete) {
// Not used we track per-write progress
}
@Override
public ISequentialOutStream getStream(int index, ExtractAskMode extractAskMode) throws SevenZipException {
closeCurrentStream();
Integer pos = indexToPos.get(index);
if (pos == null) {
return null;
}
currentPos[0] = pos;
currentOutput[0] = outputFiles.get(pos);
currentSuccess[0] = false;
currentRemaining[0] = fileSizes.get(pos);
if (extractAskMode != ExtractAskMode.EXTRACT) {
currentOutput[0] = null;
return null;
}
if (currentOutput[0] == null) {
progress.advance(currentRemaining[0]);
return null;
}
try {
ensureDirectory(currentOutput[0].getParentFile());
rejectSymlink(currentOutput[0]);
currentStream[0] = new FileOutputStream(currentOutput[0]);
} catch (IOException error) {
throw new SevenZipException("Fehler beim Erstellen: " + error.getMessage(), error);
}
return new ISequentialOutStream() {
@Override
public int write(byte[] data) throws SevenZipException {
if (data == null || data.length == 0) {
return 0;
}
try {
currentStream[0].write(data);
} catch (IOException error) {
throw new SevenZipException("Fehler beim Schreiben: " + error.getMessage(), error);
}
long accounted = Math.min(currentRemaining[0], (long) data.length);
currentRemaining[0] -= accounted;
progress.advance(accounted);
return data.length;
}
};
}
@Override
public void prepareOperation(ExtractAskMode extractAskMode) {
// no-op
}
@Override
public void setOperationResult(ExtractOperationResult result) throws SevenZipException {
if (currentRemaining[0] > 0) {
progress.advance(currentRemaining[0]);
currentRemaining[0] = 0;
}
if (result == ExtractOperationResult.OK) {
currentSuccess[0] = true;
closeCurrentStream();
if (currentPos[0] >= 0 && currentOutput[0] != null) {
try {
int archiveIndex = fileIndices.get(currentPos[0]);
java.util.Date modified = (java.util.Date) archive.getProperty(archiveIndex, PropID.LAST_MODIFICATION_TIME);
if (modified != null) {
currentOutput[0].setLastModified(modified.getTime());
}
} catch (Throwable ignored) {
// best effort
}
}
} else {
closeCurrentStream();
if (currentOutput[0] != null && currentOutput[0].exists()) {
try {
currentOutput[0].delete();
} catch (Throwable ignored) {
}
}
if (firstError[0] == null) {
if (isPasswordFailure(result, encrypted)) {
firstError[0] = new WrongPasswordException(new IOException("Falsches Passwort"));
} else {
firstError[0] = new IOException("7z-Fehler: " + result.name());
}
}
}
}
private void closeCurrentStream() {
if (currentStream[0] != null) {
try {
currentStream[0].close();
} catch (Throwable ignored) {
}
currentStream[0] = null;
}
if (!currentSuccess[0] && currentOutput[0] != null && currentOutput[0].exists()) {
try {
currentOutput[0].delete();
} catch (Throwable ignored) {
}
}
}
}
private static final class WrongPasswordException extends Exception {
private static final long serialVersionUID = 1L;

View File

@ -2,15 +2,7 @@ import fs from "node:fs";
import path from "node:path";
import { spawnSync } from "node:child_process";
const NPM_RELEASE_WIN = process.platform === "win32"
? {
command: process.env.ComSpec || "cmd.exe",
args: ["/d", "/s", "/c", "npm run release:win"]
}
: {
command: "npm",
args: ["run", "release:win"]
};
const NPM_EXECUTABLE = process.platform === "win32" ? "npm.cmd" : "npm";
function run(command, args, options = {}) {
const result = spawnSync(command, args, {
@ -333,7 +325,7 @@ async function main() {
updatePackageVersion(rootDir, version);
process.stdout.write(`Building release artifacts for ${tag}...\n`);
run(NPM_RELEASE_WIN.command, NPM_RELEASE_WIN.args);
run(NPM_EXECUTABLE, ["run", "release:win"]);
const assets = ensureAssetsExist(rootDir, version);
run("git", ["add", "package.json"]);

View File

@ -106,7 +106,6 @@ export class AppController {
|| settings.bestToken.trim()
|| settings.allDebridToken.trim()
|| (settings.ddownloadLogin.trim() && settings.ddownloadPassword.trim())
|| settings.oneFichierApiKey.trim()
);
}
@ -287,7 +286,7 @@ export class AppController {
public exportBackup(): string {
const settings = { ...this.settings };
const SENSITIVE_KEYS: (keyof AppSettings)[] = ["token", "megaLogin", "megaPassword", "bestToken", "allDebridToken", "ddownloadLogin", "ddownloadPassword", "oneFichierApiKey"];
const SENSITIVE_KEYS: (keyof AppSettings)[] = ["token", "megaLogin", "megaPassword", "bestToken", "allDebridToken", "ddownloadLogin", "ddownloadPassword"];
for (const key of SENSITIVE_KEYS) {
const val = settings[key];
if (typeof val === "string" && val.length > 0) {
@ -309,7 +308,7 @@ export class AppController {
return { restored: false, message: "Kein gültiges Backup (settings/session fehlen)" };
}
const importedSettings = parsed.settings as AppSettings;
const SENSITIVE_KEYS: (keyof AppSettings)[] = ["token", "megaLogin", "megaPassword", "bestToken", "allDebridToken", "ddownloadLogin", "ddownloadPassword", "oneFichierApiKey"];
const SENSITIVE_KEYS: (keyof AppSettings)[] = ["token", "megaLogin", "megaPassword", "bestToken", "allDebridToken", "ddownloadLogin", "ddownloadPassword"];
for (const key of SENSITIVE_KEYS) {
const val = (importedSettings as Record<string, unknown>)[key];
if (typeof val === "string" && val.startsWith("***")) {

View File

@ -47,7 +47,6 @@ export function defaultSettings(): AppSettings {
allDebridToken: "",
ddownloadLogin: "",
ddownloadPassword: "",
oneFichierApiKey: "",
archivePasswordList: "",
rememberToken: true,
providerPrimary: "realdebrid",

View File

@ -11,16 +11,12 @@ const RAPIDGATOR_SCAN_MAX_BYTES = 512 * 1024;
const BEST_DEBRID_API_BASE = "https://bestdebrid.com/api/v1";
const ALL_DEBRID_API_BASE = "https://api.alldebrid.com/v4";
const ONEFICHIER_API_BASE = "https://api.1fichier.com/v1";
const ONEFICHIER_URL_RE = /^https?:\/\/(?:www\.)?(?:1fichier\.com|alterupload\.com|cjoint\.net|desfichiers\.com|dfichiers\.com|megadl\.fr|mesfichiers\.org|piecejointe\.net|pjointe\.com|tenvoi\.com|dl4free\.com)\/\?([a-z0-9]{5,20})$/i;
const PROVIDER_LABELS: Record<DebridProvider, string> = {
realdebrid: "Real-Debrid",
megadebrid: "Mega-Debrid",
bestdebrid: "BestDebrid",
alldebrid: "AllDebrid",
ddownload: "DDownload",
onefichier: "1Fichier"
ddownload: "DDownload"
};
interface ProviderUnrestrictedLink extends UnrestrictedLink {
@ -963,66 +959,6 @@ class AllDebridClient {
}
}
// ── 1Fichier Client ──
class OneFichierClient {
private apiKey: string;
public constructor(apiKey: string) {
this.apiKey = apiKey;
}
public async unrestrictLink(link: string, signal?: AbortSignal): Promise<UnrestrictedLink> {
if (!ONEFICHIER_URL_RE.test(link)) {
throw new Error("Kein 1Fichier-Link");
}
let lastError = "";
for (let attempt = 1; attempt <= REQUEST_RETRIES; attempt += 1) {
if (signal?.aborted) throw new Error("aborted:debrid");
try {
const res = await fetch(`${ONEFICHIER_API_BASE}/download/get_token.cgi`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${this.apiKey}`
},
body: JSON.stringify({ url: link, pretty: 1 }),
signal: withTimeoutSignal(signal, API_TIMEOUT_MS)
});
const json = await res.json() as Record<string, unknown>;
if (json.status === "KO" || json.error) {
const msg = String(json.message || json.error || "Unbekannter 1Fichier-Fehler");
throw new Error(msg);
}
const directUrl = String(json.url || "");
if (!directUrl) {
throw new Error("1Fichier: Keine Download-URL in Antwort");
}
return {
fileName: filenameFromUrl(directUrl) || filenameFromUrl(link),
directUrl,
fileSize: null,
retriesUsed: attempt - 1
};
} catch (error) {
lastError = compactErrorText(error);
if (signal?.aborted || (/aborted/i.test(lastError) && !/timeout/i.test(lastError))) {
throw error;
}
if (attempt < REQUEST_RETRIES) {
await sleep(retryDelay(attempt), signal);
}
}
}
throw new Error(`1Fichier-Unrestrict fehlgeschlagen: ${lastError}`);
}
}
const DDOWNLOAD_URL_RE = /^https?:\/\/(?:www\.)?(?:ddownload\.com|ddl\.to)\/([a-z0-9]+)/i;
const DDOWNLOAD_WEB_BASE = "https://ddownload.com";
const DDOWNLOAD_WEB_UA = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/133.0.0.0 Safari/537.36";
@ -1293,25 +1229,6 @@ export class DebridService {
public async unrestrictLink(link: string, signal?: AbortSignal, settingsSnapshot?: AppSettings): Promise<ProviderUnrestrictedLink> {
const settings = settingsSnapshot ? cloneSettings(settingsSnapshot) : cloneSettings(this.settings);
// 1Fichier is a direct file hoster. If the link is a 1fichier.com URL
// and the API key is configured, use 1Fichier directly before debrid providers.
if (ONEFICHIER_URL_RE.test(link) && this.isProviderConfiguredFor(settings, "onefichier")) {
try {
const result = await this.unrestrictViaProvider(settings, "onefichier", link, signal);
return {
...result,
provider: "onefichier",
providerLabel: PROVIDER_LABELS["onefichier"]
};
} catch (error) {
const errorText = compactErrorText(error);
if (signal?.aborted || (/aborted/i.test(errorText) && !/timeout/i.test(errorText))) {
throw error;
}
// Fall through to normal provider chain
}
}
// DDownload is a direct file hoster, not a debrid service.
// If the link is a ddownload.com/ddl.to URL and the account is configured,
// use DDownload directly before trying any debrid providers.
@ -1420,9 +1337,6 @@ export class DebridService {
if (provider === "ddownload") {
return Boolean(settings.ddownloadLogin.trim() && settings.ddownloadPassword.trim());
}
if (provider === "onefichier") {
return Boolean(settings.oneFichierApiKey.trim());
}
return Boolean(settings.bestToken.trim());
}
@ -1439,9 +1353,6 @@ export class DebridService {
if (provider === "ddownload") {
return this.getDdownloadClient(settings.ddownloadLogin, settings.ddownloadPassword).unrestrictLink(link, signal);
}
if (provider === "onefichier") {
return new OneFichierClient(settings.oneFichierApiKey).unrestrictLink(link, signal);
}
return new BestDebridClient(settings.bestToken).unrestrictLink(link, signal);
}
}

View File

@ -334,11 +334,9 @@ const EMPTY_DIR_IGNORED_FILE_NAMES = new Set([
"desktop.ini",
".ds_store"
]);
const EMPTY_DIR_IGNORED_FILE_RE = /^\.rd_extract_progress(?:_[^.\\/]+)?\.json$/i;
function isIgnorableEmptyDirFileName(fileName: string): boolean {
const normalized = String(fileName || "").trim().toLowerCase();
return EMPTY_DIR_IGNORED_FILE_NAMES.has(normalized) || EMPTY_DIR_IGNORED_FILE_RE.test(normalized);
return EMPTY_DIR_IGNORED_FILE_NAMES.has(String(fileName || "").trim().toLowerCase());
}
function toWindowsLongPathIfNeeded(filePath: string): string {
@ -753,86 +751,60 @@ export function buildAutoRenameBaseNameFromFoldersWithOptions(
return null;
}
export function resolveArchiveItemsFromList(archiveName: string, items: DownloadItem[]): DownloadItem[] {
function resolveArchiveItemsFromList(archiveName: string, items: DownloadItem[]): DownloadItem[] {
const entryLower = archiveName.toLowerCase();
// Helper: get item basename (try targetPath first, then fileName)
const itemBaseName = (item: DownloadItem): string =>
path.basename(item.targetPath || item.fileName || "");
// Try pattern-based matching first (for multipart archives)
let pattern: RegExp | null = null;
const multipartMatch = entryLower.match(/^(.*)\.part0*1\.rar$/);
if (multipartMatch) {
const prefix = multipartMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
pattern = new RegExp(`^${prefix}\\.part\\d+\\.rar$`, "i");
}
if (!pattern) {
const rarMatch = entryLower.match(/^(.*)\.rar$/);
if (rarMatch) {
const stem = rarMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
pattern = new RegExp(`^${stem}\\.r(ar|\\d{2,3})$`, "i");
}
}
if (!pattern) {
const zipSplitMatch = entryLower.match(/^(.*)\.zip\.001$/);
if (zipSplitMatch) {
const stem = zipSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
pattern = new RegExp(`^${stem}\\.zip(\\.\\d+)?$`, "i");
}
}
if (!pattern) {
const sevenSplitMatch = entryLower.match(/^(.*)\.7z\.001$/);
if (sevenSplitMatch) {
const stem = sevenSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
pattern = new RegExp(`^${stem}\\.7z(\\.\\d+)?$`, "i");
}
}
if (!pattern && /^(.*)\.001$/.test(entryLower) && !/\.(zip|7z)\.001$/.test(entryLower)) {
const genericSplitMatch = entryLower.match(/^(.*)\.001$/);
if (genericSplitMatch) {
const stem = genericSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
pattern = new RegExp(`^${stem}\\.\\d{3}$`, "i");
}
}
// Attempt 1: Pattern match (handles multipart archives)
if (pattern) {
const matched = items.filter((item) => pattern!.test(itemBaseName(item)));
if (matched.length > 0) return matched;
}
// Attempt 2: Exact filename match (case-insensitive)
const exactMatch = items.filter((item) => itemBaseName(item).toLowerCase() === entryLower);
if (exactMatch.length > 0) return exactMatch;
// Attempt 3: Stem-based fuzzy match — strip archive extensions and compare stems.
// Handles cases where debrid services modify filenames slightly.
const archiveStem = entryLower
.replace(/\.part\d+\.rar$/i, "")
.replace(/\.r\d{2,3}$/i, "")
.replace(/\.rar$/i, "")
.replace(/\.(zip|7z)\.\d{3}$/i, "")
.replace(/\.\d{3}$/i, "")
.replace(/\.(zip|7z)$/i, "");
if (archiveStem.length > 3) {
const stemMatch = items.filter((item) => {
const name = itemBaseName(item).toLowerCase();
return name.startsWith(archiveStem) && /\.(rar|r\d{2,3}|zip|7z|\d{3})$/i.test(name);
const pattern = new RegExp(`^${prefix}\\.part\\d+\\.rar$`, "i");
return items.filter((item) => {
const name = path.basename(item.targetPath || item.fileName || "");
return pattern.test(name);
});
if (stemMatch.length > 0) return stemMatch;
}
// Attempt 4: If only one item in the list and one archive — return it as a best-effort match.
// This handles single-file packages where the filename may have been modified.
if (items.length === 1) {
const singleName = itemBaseName(items[0]).toLowerCase();
if (/\.(rar|zip|7z|\d{3})$/i.test(singleName)) {
return items;
}
const rarMatch = entryLower.match(/^(.*)\.rar$/);
if (rarMatch) {
const stem = rarMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const pattern = new RegExp(`^${stem}\\.r(ar|\\d{2,3})$`, "i");
return items.filter((item) => {
const name = path.basename(item.targetPath || item.fileName || "");
return pattern.test(name);
});
}
return [];
// Split ZIP (e.g., movie.zip.001, movie.zip.002)
const zipSplitMatch = entryLower.match(/^(.*)\.zip\.001$/);
if (zipSplitMatch) {
const stem = zipSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const pattern = new RegExp(`^${stem}\\.zip(\\.\\d+)?$`, "i");
return items.filter((item) => {
const name = path.basename(item.targetPath || item.fileName || "");
return pattern.test(name);
});
}
// Split 7z (e.g., movie.7z.001, movie.7z.002)
const sevenSplitMatch = entryLower.match(/^(.*)\.7z\.001$/);
if (sevenSplitMatch) {
const stem = sevenSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const pattern = new RegExp(`^${stem}\\.7z(\\.\\d+)?$`, "i");
return items.filter((item) => {
const name = path.basename(item.targetPath || item.fileName || "");
return pattern.test(name);
});
}
// Generic .NNN splits (e.g., movie.001, movie.002)
const genericSplitMatch = entryLower.match(/^(.*)\.001$/);
if (genericSplitMatch && !/\.(zip|7z)\.001$/.test(entryLower)) {
const stem = genericSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const pattern = new RegExp(`^${stem}\\.\\d{3}$`, "i");
return items.filter((item) => {
const name = path.basename(item.targetPath || item.fileName || "");
return pattern.test(name);
});
}
return items.filter((item) => {
const name = path.basename(item.targetPath || item.fileName || "").toLowerCase();
return name === entryLower;
});
}
function retryDelayWithJitter(attempt: number, baseMs: number): number {
@ -1412,10 +1384,6 @@ export class DownloadManager extends EventEmitter {
addedPackages += 1;
}
if (addedPackages > 0 || addedLinks > 0) {
const pkgNames = packages.filter((p) => p.links.length > 0).map((p) => p.name).join(", ");
logger.info(`Pakete hinzugefügt: ${addedPackages} Paket(e), ${addedLinks} Link(s) [${pkgNames}]`);
}
this.persistSoon();
this.emitState();
if (unresolvedByLink.size > 0) {
@ -3602,16 +3570,14 @@ export class DownloadManager extends EventEmitter {
this.emit("state", this.getSnapshot());
return;
}
// Too soon — replace any pending timer with a shorter forced-emit timer
if (this.stateEmitTimer) {
clearTimeout(this.stateEmitTimer);
this.stateEmitTimer = null;
// Too soon — schedule deferred forced emit
if (!this.stateEmitTimer) {
this.stateEmitTimer = setTimeout(() => {
this.stateEmitTimer = null;
this.lastStateEmitAt = nowMs();
this.emit("state", this.getSnapshot());
}, MIN_FORCE_GAP_MS - sinceLastEmit);
}
this.stateEmitTimer = setTimeout(() => {
this.stateEmitTimer = null;
this.lastStateEmitAt = nowMs();
this.emit("state", this.getSnapshot());
}, MIN_FORCE_GAP_MS - sinceLastEmit);
return;
}
if (this.stateEmitTimer) {
@ -4768,7 +4734,6 @@ export class DownloadManager extends EventEmitter {
item.fullStatus = `Starte... (${unrestricted.providerLabel})`;
item.updatedAt = nowMs();
this.emitState();
logger.info(`Download Start: ${item.fileName} (${humanSize(unrestricted.fileSize || 0)}) via ${unrestricted.providerLabel}, pkg=${pkg.name}`);
const maxAttempts = maxItemAttempts;
let done = false;
@ -6309,6 +6274,19 @@ export class DownloadManager extends EventEmitter {
}
if (readyArchives.size === 0) {
logger.info(`Hybrid-Extract: pkg=${pkg.name}, keine fertigen Archive-Sets`);
// Relabel completed items that are part of incomplete multi-part archives
// from "Ausstehend" to "Warten auf Parts" so the UI accurately reflects
// that extraction is waiting for remaining parts to finish downloading.
const allDone = items.every((i) => i.status === "completed" || i.status === "failed" || i.status === "cancelled");
if (!allDone) {
for (const entry of items) {
if (entry.status === "completed" && entry.fullStatus === "Entpacken - Ausstehend") {
entry.fullStatus = "Entpacken - Warten auf Parts";
entry.updatedAt = nowMs();
}
}
this.emitState();
}
return 0;
}
@ -6381,34 +6359,31 @@ export class DownloadManager extends EventEmitter {
const resolveArchiveItems = (archiveName: string): DownloadItem[] =>
resolveArchiveItemsFromList(archiveName, items);
// Track archives for parallel hybrid extraction progress
const hybridResolvedItems = new Map<string, DownloadItem[]>();
const hybridStartTimes = new Map<string, number>();
// Track multiple active archives for parallel hybrid extraction
const activeHybridArchiveMap = new Map<string, DownloadItem[]>();
const hybridArchiveStartTimes = new Map<string, number>();
let hybridLastEmitAt = 0;
let hybridLastProgressCurrent: number | null = null;
// Mark items based on whether their archive is actually ready for extraction.
// Only items whose archive is in readyArchives get "Ausstehend"; others keep
// their current label to avoid flicker between hybrid runs.
// "Warten auf Parts" to avoid flicker between hybrid runs.
const allDownloaded = completedItems.length >= items.length;
let labelsChanged = false;
for (const entry of completedItems) {
if (isExtractedLabel(entry.fullStatus)) {
continue;
}
const belongsToReady = allDownloaded
|| hybridFileNames.has((entry.fileName || "").toLowerCase())
|| (entry.targetPath && hybridFileNames.has(path.basename(entry.targetPath).toLowerCase()));
const targetLabel = belongsToReady ? "Entpacken - Ausstehend" : "Entpacken - Warten auf Parts";
if (entry.fullStatus !== targetLabel) {
entry.fullStatus = targetLabel;
entry.updatedAt = nowMs();
labelsChanged = true;
if (allDownloaded) {
// Everything downloaded — all remaining items will be extracted
entry.fullStatus = "Entpacken - Ausstehend";
} else if (hybridFileNames.has((entry.fileName || "").toLowerCase()) ||
(entry.targetPath && hybridFileNames.has(path.basename(entry.targetPath).toLowerCase()))) {
entry.fullStatus = "Entpacken - Ausstehend";
} else {
entry.fullStatus = "Entpacken - Warten auf Parts";
}
entry.updatedAt = nowMs();
}
if (labelsChanged) {
this.emitState();
}
this.emitState();
try {
const result = await extractPackageArchives({
@ -6425,7 +6400,7 @@ export class DownloadManager extends EventEmitter {
packageId,
hybridMode: true,
maxParallel: this.settings.maxParallelExtract || 2,
extractCpuPriority: "high",
extractCpuPriority: this.settings.extractCpuPriority,
onProgress: (progress) => {
if (progress.phase === "preparing") {
pkg.postProcessLabel = progress.archiveName || "Vorbereiten...";
@ -6433,27 +6408,26 @@ export class DownloadManager extends EventEmitter {
return;
}
if (progress.phase === "done") {
hybridResolvedItems.clear();
hybridStartTimes.clear();
hybridLastProgressCurrent = null;
// Do NOT mark remaining archives as "Done" here — some may have
// failed. The post-extraction code (result.failed check) will
// assign the correct label. Only clear the tracking maps.
activeHybridArchiveMap.clear();
hybridArchiveStartTimes.clear();
return;
}
const currentCount = Math.max(0, Number(progress.current ?? 0));
const archiveFinished = progress.archiveDone === true
|| (hybridLastProgressCurrent !== null && currentCount > hybridLastProgressCurrent);
hybridLastProgressCurrent = currentCount;
if (progress.archiveName) {
// Resolve items for this archive if not yet tracked
if (!hybridResolvedItems.has(progress.archiveName)) {
if (!activeHybridArchiveMap.has(progress.archiveName)) {
const resolved = resolveArchiveItems(progress.archiveName);
hybridResolvedItems.set(progress.archiveName, resolved);
hybridStartTimes.set(progress.archiveName, nowMs());
activeHybridArchiveMap.set(progress.archiveName, resolved);
hybridArchiveStartTimes.set(progress.archiveName, nowMs());
if (resolved.length === 0) {
logger.warn(`resolveArchiveItems (hybrid): KEINE Items gefunden für archiveName="${progress.archiveName}", items.length=${items.length}, itemNames=[${items.map((i) => path.basename(i.targetPath || i.fileName || "?")).join(", ")}]`);
logger.warn(`resolveArchiveItems (hybrid): KEINE Items gefunden für archiveName="${progress.archiveName}", items.length=${items.length}, itemNames=[${items.slice(0, 5).map((i) => path.basename(i.targetPath || i.fileName || "?")).join(", ")}]`);
} else {
logger.info(`resolveArchiveItems (hybrid): ${resolved.length} Items für archiveName="${progress.archiveName}"`);
// Immediately label the matched items and force emit so the UI
// transitions from "Ausstehend" to the extraction label right away.
const initLabel = `Entpacken 0% · ${progress.archiveName}`;
const initAt = nowMs();
for (const entry of resolved) {
@ -6466,26 +6440,23 @@ export class DownloadManager extends EventEmitter {
this.emitState(true);
}
}
const archItems = hybridResolvedItems.get(progress.archiveName) || [];
const archItems = activeHybridArchiveMap.get(progress.archiveName)!;
// Only mark as finished on explicit archive-done signal (or real current increment),
// never on raw 100% archivePercent, because password retries can report 100% mid-run.
if (archiveFinished) {
// If archive is at 100%, mark its items as done and remove from active
if (Number(progress.archivePercent ?? 0) >= 100) {
const doneAt = nowMs();
const startedAt = hybridStartTimes.get(progress.archiveName) || doneAt;
const doneLabel = progress.archiveSuccess === false
? "Entpacken - Error"
: formatExtractDone(doneAt - startedAt);
const startedAt = hybridArchiveStartTimes.get(progress.archiveName) || doneAt;
const doneLabel = formatExtractDone(doneAt - startedAt);
for (const entry of archItems) {
if (!isExtractedLabel(entry.fullStatus)) {
entry.fullStatus = doneLabel;
entry.updatedAt = doneAt;
}
}
hybridResolvedItems.delete(progress.archiveName);
hybridStartTimes.delete(progress.archiveName);
activeHybridArchiveMap.delete(progress.archiveName);
hybridArchiveStartTimes.delete(progress.archiveName);
// Show transitional label while next archive initializes
const done = currentCount;
const done = progress.current + 1;
if (done < progress.total) {
pkg.postProcessLabel = `Entpacken (${done}/${progress.total}) - Naechstes Archiv...`;
this.emitState();
@ -6517,7 +6488,7 @@ export class DownloadManager extends EventEmitter {
}
// Update package-level label with overall extraction progress
const activeArchive = !archiveFinished && Number(progress.archivePercent ?? 0) > 0 ? 1 : 0;
const activeArchive = Number(progress.archivePercent ?? 0) > 0 ? 1 : 0;
const currentDisplay = Math.max(0, Math.min(progress.total, progress.current + activeArchive));
if (progress.passwordFound) {
pkg.postProcessLabel = `Passwort gefunden · ${progress.archiveName || ""}`;
@ -6775,10 +6746,9 @@ export class DownloadManager extends EventEmitter {
}
}, extractTimeoutMs);
try {
// Track archives for parallel extraction progress
const fullResolvedItems = new Map<string, DownloadItem[]>();
const fullStartTimes = new Map<string, number>();
let fullLastProgressCurrent: number | null = null;
// Track multiple active archives for parallel extraction
const activeArchiveItemsMap = new Map<string, DownloadItem[]>();
const archiveStartTimes = new Map<string, number>();
const result = await extractPackageArchives({
packageDir: pkg.outputDir,
@ -6792,8 +6762,8 @@ export class DownloadManager extends EventEmitter {
packageId,
skipPostCleanup: true,
maxParallel: this.settings.maxParallelExtract || 2,
// All downloads finished — use NORMAL OS priority so extraction runs at
// full speed (matching manual 7-Zip/WinRAR speed).
// All downloads finished — use highest configured priority so extraction
// isn't starved. "high" maps to BELOW_NORMAL instead of the default IDLE.
extractCpuPriority: "high",
onProgress: (progress) => {
if (progress.phase === "preparing") {
@ -6802,28 +6772,26 @@ export class DownloadManager extends EventEmitter {
return;
}
if (progress.phase === "done") {
fullResolvedItems.clear();
fullStartTimes.clear();
fullLastProgressCurrent = null;
// Do NOT mark remaining archives as "Done" here — some may have
// failed. The post-extraction code (result.failed check) will
// assign the correct label. Only clear the tracking maps.
activeArchiveItemsMap.clear();
archiveStartTimes.clear();
emitExtractStatus("Entpacken 100%", true);
return;
}
const currentCount = Math.max(0, Number(progress.current ?? 0));
const archiveFinished = progress.archiveDone === true
|| (fullLastProgressCurrent !== null && currentCount > fullLastProgressCurrent);
fullLastProgressCurrent = currentCount;
if (progress.archiveName) {
// Resolve items for this archive if not yet tracked
if (!fullResolvedItems.has(progress.archiveName)) {
if (!activeArchiveItemsMap.has(progress.archiveName)) {
const resolved = resolveArchiveItems(progress.archiveName);
fullResolvedItems.set(progress.archiveName, resolved);
fullStartTimes.set(progress.archiveName, nowMs());
activeArchiveItemsMap.set(progress.archiveName, resolved);
archiveStartTimes.set(progress.archiveName, nowMs());
if (resolved.length === 0) {
logger.warn(`resolveArchiveItems (full): KEINE Items für archiveName="${progress.archiveName}", completedItems=${completedItems.length}, names=[${completedItems.map((i) => path.basename(i.targetPath || i.fileName || "?")).join(", ")}]`);
logger.warn(`resolveArchiveItems (full): KEINE Items für archiveName="${progress.archiveName}", completedItems=${completedItems.length}, names=[${completedItems.slice(0, 5).map((i) => path.basename(i.targetPath || i.fileName || "?")).join(", ")}]`);
} else {
logger.info(`resolveArchiveItems (full): ${resolved.length} Items für archiveName="${progress.archiveName}"`);
// Immediately label items and force emit for instant UI feedback
const initLabel = `Entpacken 0% · ${progress.archiveName}`;
const initAt = nowMs();
for (const entry of resolved) {
@ -6835,26 +6803,23 @@ export class DownloadManager extends EventEmitter {
emitExtractStatus(`Entpacken ${progress.percent}% · ${progress.archiveName}`, true);
}
}
const archiveItems = fullResolvedItems.get(progress.archiveName) || [];
const archiveItems = activeArchiveItemsMap.get(progress.archiveName)!;
// Only finalize on explicit archive completion (or real current increment),
// not on plain 100% archivePercent.
if (archiveFinished) {
// If archive is at 100%, mark its items as done and remove from active
if (Number(progress.archivePercent ?? 0) >= 100) {
const doneAt = nowMs();
const startedAt = fullStartTimes.get(progress.archiveName) || doneAt;
const doneLabel = progress.archiveSuccess === false
? "Entpacken - Error"
: formatExtractDone(doneAt - startedAt);
const startedAt = archiveStartTimes.get(progress.archiveName) || doneAt;
const doneLabel = formatExtractDone(doneAt - startedAt);
for (const entry of archiveItems) {
if (!isExtractedLabel(entry.fullStatus)) {
entry.fullStatus = doneLabel;
entry.updatedAt = doneAt;
}
}
fullResolvedItems.delete(progress.archiveName);
fullStartTimes.delete(progress.archiveName);
activeArchiveItemsMap.delete(progress.archiveName);
archiveStartTimes.delete(progress.archiveName);
// Show transitional label while next archive initializes
const done = currentCount;
const done = progress.current + 1;
if (done < progress.total) {
emitExtractStatus(`Entpacken (${done}/${progress.total}) - Naechstes Archiv...`, true);
}
@ -6889,7 +6854,7 @@ export class DownloadManager extends EventEmitter {
const elapsed = progress.elapsedMs && progress.elapsedMs >= 1000
? ` · ${Math.floor(progress.elapsedMs / 1000)}s`
: "";
const activeArchive = !archiveFinished && Number(progress.archivePercent ?? 0) > 0 ? 1 : 0;
const activeArchive = Number(progress.archivePercent ?? 0) > 0 ? 1 : 0;
const currentDisplay = Math.max(0, Math.min(progress.total, progress.current + activeArchive));
let overallLabel: string;
if (progress.passwordFound) {
@ -7039,7 +7004,7 @@ export class DownloadManager extends EventEmitter {
): Promise<void> {
try {
// ── Nested extraction: extract archives found inside the extracted output ──
if ((extractedCount > 0 || alreadyMarkedExtracted) && failed === 0 && this.settings.autoExtract) {
if (extractedCount > 0 && failed === 0 && this.settings.autoExtract) {
const nestedBlacklist = /\.(iso|img|bin|dmg|vhd|vhdx|vmdk|wim)$/i;
const nestedCandidates = (await findArchiveCandidates(pkg.extractDir))
.filter((p) => !nestedBlacklist.test(p));
@ -7066,16 +7031,14 @@ export class DownloadManager extends EventEmitter {
}
// ── Auto-Rename ──
if (extractedCount > 0 || alreadyMarkedExtracted) {
if (extractedCount > 0) {
pkg.postProcessLabel = "Renaming...";
this.emitState();
await this.autoRenameExtractedVideoFiles(pkg.extractDir, pkg);
}
// ── Archive cleanup (source archives in outputDir) ──
// Also run when hybrid extraction already handled everything (extractedCount=0
// but alreadyMarkedExtracted=true) so archives are still cleaned up.
if ((extractedCount > 0 || alreadyMarkedExtracted) && failed === 0 && this.settings.cleanupMode !== "none") {
if (extractedCount > 0 && failed === 0 && this.settings.cleanupMode !== "none") {
pkg.postProcessLabel = "Aufräumen...";
this.emitState();
const sourceAndTargetEqual = path.resolve(pkg.outputDir).toLowerCase() === path.resolve(pkg.extractDir).toLowerCase();
@ -7099,7 +7062,7 @@ export class DownloadManager extends EventEmitter {
}
// ── Link/Sample artifact removal ──
if ((extractedCount > 0 || alreadyMarkedExtracted) && failed === 0) {
if (extractedCount > 0 && failed === 0) {
if (this.settings.removeLinkFilesAfterExtract) {
const removedLinks = await removeDownloadLinkArtifacts(pkg.extractDir);
if (removedLinks > 0) {
@ -7114,15 +7077,8 @@ export class DownloadManager extends EventEmitter {
}
}
// ── Resume state cleanup ──
if ((extractedCount > 0 || alreadyMarkedExtracted) && failed === 0) {
await clearExtractResumeState(pkg.outputDir, packageId);
// Backward compatibility: older versions used .rd_extract_progress.json without package suffix.
await clearExtractResumeState(pkg.outputDir);
}
// ── Empty directory tree removal ──
if ((extractedCount > 0 || alreadyMarkedExtracted) && failed === 0 && this.settings.cleanupMode === "delete") {
if (extractedCount > 0 && failed === 0 && this.settings.cleanupMode === "delete") {
if (!(await hasAnyFilesRecursive(pkg.outputDir))) {
const removedDirs = await removeEmptyDirectoryTree(pkg.outputDir);
if (removedDirs > 0) {
@ -7131,6 +7087,11 @@ export class DownloadManager extends EventEmitter {
}
}
// ── Resume state cleanup ──
if (extractedCount > 0 && failed === 0) {
await clearExtractResumeState(pkg.outputDir, packageId);
}
// ── MKV collection ──
if (success > 0 && (pkg.status === "completed" || pkg.status === "failed")) {
pkg.postProcessLabel = "Verschiebe MKVs...";

View File

@ -1,7 +1,7 @@
import fs from "node:fs";
import path from "node:path";
import os from "node:os";
import { spawn, spawnSync, type ChildProcess } from "node:child_process";
import { spawn, spawnSync } from "node:child_process";
import AdmZip from "adm-zip";
import { CleanupMode, ConflictMode } from "../shared/types";
import { logger } from "./logger";
@ -10,7 +10,7 @@ import { removeDownloadLinkArtifacts, removeSampleArtifacts } from "./cleanup";
import crypto from "node:crypto";
const DEFAULT_ARCHIVE_PASSWORDS = ["", "serienfans.org", "serienjunkies.org"];
const NO_EXTRACTOR_MESSAGE = "Kein nativer Entpacker gefunden (7-Zip/WinRAR). Bitte 7-Zip oder WinRAR installieren.";
const NO_EXTRACTOR_MESSAGE = "WinRAR/UnRAR nicht gefunden. Bitte WinRAR installieren.";
const NO_JVM_EXTRACTOR_MESSAGE = "7-Zip-JBinding Runtime nicht gefunden. Bitte resources/extractor-jvm prüfen.";
const JVM_EXTRACTOR_MAIN_CLASS = "com.sucukdeluxe.extractor.JBindExtractorMain";
const JVM_EXTRACTOR_CLASSES_SUBDIR = "classes";
@ -123,8 +123,6 @@ export interface ExtractProgressUpdate {
passwordAttempt?: number;
passwordTotal?: number;
passwordFound?: boolean;
archiveDone?: boolean;
archiveSuccess?: boolean;
}
const MAX_EXTRACT_OUTPUT_BUFFER = 48 * 1024;
@ -135,8 +133,6 @@ const EXTRACT_MAX_TIMEOUT_MS = 120 * 60 * 1000;
const ARCHIVE_SORT_COLLATOR = new Intl.Collator(undefined, { numeric: true, sensitivity: "base" });
const DISK_SPACE_SAFETY_FACTOR = 1.1;
const NESTED_EXTRACT_BLACKLIST_RE = /\.(iso|img|bin|dmg|vhd|vhdx|vmdk|wim)$/i;
const PACKAGE_PASSWORD_CACHE_LIMIT = 256;
const packageLearnedPasswords = new Map<string, string>();
export type ArchiveSignature = "rar" | "7z" | "zip" | "gzip" | "bzip2" | "xz" | null;
@ -149,54 +145,6 @@ const ARCHIVE_SIGNATURES: { prefix: string; type: ArchiveSignature }[] = [
{ prefix: "fd377a585a00", type: "xz" },
];
function packagePasswordCacheKey(packageDir: string, packageId?: string): string {
const normalizedPackageId = String(packageId || "").trim();
if (normalizedPackageId) {
return `pkg:${normalizedPackageId}`;
}
return `dir:${pathSetKey(path.resolve(packageDir))}`;
}
function packagePasswordCacheLabel(packageDir: string, packageId?: string): string {
const normalizedPackageId = String(packageId || "").trim();
if (normalizedPackageId) {
return `packageId=${normalizedPackageId.slice(0, 8)}`;
}
return `packageDir=${path.basename(path.resolve(packageDir))}`;
}
function readCachedPackagePassword(cacheKey: string): string {
const cached = packageLearnedPasswords.get(cacheKey);
if (!cached) {
return "";
}
// Refresh insertion order to keep recently used package caches alive.
packageLearnedPasswords.delete(cacheKey);
packageLearnedPasswords.set(cacheKey, cached);
return cached;
}
function writeCachedPackagePassword(cacheKey: string, password: string): void {
const normalized = String(password || "").trim();
if (!normalized) {
return;
}
if (packageLearnedPasswords.has(cacheKey)) {
packageLearnedPasswords.delete(cacheKey);
}
packageLearnedPasswords.set(cacheKey, normalized);
if (packageLearnedPasswords.size > PACKAGE_PASSWORD_CACHE_LIMIT) {
const oldestKey = packageLearnedPasswords.keys().next().value as string | undefined;
if (oldestKey) {
packageLearnedPasswords.delete(oldestKey);
}
}
}
function clearCachedPackagePassword(cacheKey: string): void {
packageLearnedPasswords.delete(cacheKey);
}
export async function detectArchiveSignature(filePath: string): Promise<ArchiveSignature> {
let fd: fs.promises.FileHandle | null = null;
try {
@ -430,12 +378,6 @@ function parseProgressPercent(chunk: string): number | null {
return latest;
}
function nextArchivePercent(previous: number, incoming: number): number {
const prev = Math.max(0, Math.min(100, Math.floor(Number(previous) || 0)));
const next = Math.max(0, Math.min(100, Math.floor(Number(incoming) || 0)));
return next >= prev ? next : prev;
}
async function shouldPreferExternalZip(archivePath: string): Promise<boolean> {
if (extractorBackendMode() !== "legacy") {
return true;
@ -587,63 +529,32 @@ function prioritizePassword(passwords: string[], successful: string): string[] {
return passwords;
}
const index = passwords.findIndex((candidate) => candidate === target);
if (index === 0) {
if (index <= 0) {
return passwords;
}
if (index < 0) {
return [target, ...passwords.filter((candidate) => candidate !== target)];
}
const next = [...passwords];
const [value] = next.splice(index, 1);
next.unshift(value);
return next;
}
function nativeExtractorCandidates(): string[] {
function winRarCandidates(): string[] {
const programFiles = process.env.ProgramFiles || "C:\\Program Files";
const programFilesX86 = process.env["ProgramFiles(x86)"] || "C:\\Program Files (x86)";
const localAppData = process.env.LOCALAPPDATA || "";
const sevenZipInstalled = [
process.env.RD_7Z_BIN || "",
path.join(programFiles, "7-Zip", "7z.exe"),
path.join(programFilesX86, "7-Zip", "7z.exe")
];
if (localAppData) {
sevenZipInstalled.push(path.join(localAppData, "Programs", "7-Zip", "7z.exe"));
}
const winRarInstalled = [
const installed = [
path.join(programFiles, "WinRAR", "UnRAR.exe"),
path.join(programFilesX86, "WinRAR", "UnRAR.exe")
];
if (localAppData) {
winRarInstalled.push(path.join(localAppData, "Programs", "WinRAR", "UnRAR.exe"));
installed.push(path.join(localAppData, "Programs", "WinRAR", "UnRAR.exe"));
}
const ordered = resolvedExtractorCommand
? [
resolvedExtractorCommand,
...sevenZipInstalled,
"7z.exe",
"7z",
"7za.exe",
"7za",
...winRarInstalled,
"UnRAR.exe",
"unrar"
]
: [
...sevenZipInstalled,
"7z.exe",
"7z",
"7za.exe",
"7za",
...winRarInstalled,
"UnRAR.exe",
"unrar"
];
? [resolvedExtractorCommand, ...installed, "UnRAR.exe", "unrar"]
: [...installed, "UnRAR.exe", "unrar"];
return Array.from(new Set(ordered.filter(Boolean)));
}
@ -689,8 +600,8 @@ function extractCpuBudgetFromPriority(priority?: string): number {
function extractOsPriority(priority?: string): number {
switch (priority) {
case "high": return os.constants.priority.PRIORITY_NORMAL;
default: return os.constants.priority.PRIORITY_BELOW_NORMAL;
case "high": return os.constants.priority.PRIORITY_BELOW_NORMAL;
default: return os.constants.priority.PRIORITY_LOW;
}
}
@ -704,15 +615,10 @@ function extractCpuBudgetPercent(priority?: string): number {
function extractorThreadSwitch(hybridMode = false, priority?: string): string {
if (hybridMode) {
// Use half the CPU budget during hybrid extraction to leave headroom for
// concurrent downloads. Falls back to at least 2 threads.
const envValue = Number(process.env.RD_EXTRACT_THREADS ?? NaN);
if (Number.isFinite(envValue) && envValue >= 1 && envValue <= 32) {
return `-mt${Math.floor(envValue)}`;
}
const cpuCount = Math.max(1, os.cpus().length || 1);
const hybridThreads = Math.max(2, Math.min(8, Math.floor(cpuCount / 2)));
return `-mt${hybridThreads}`;
// 2 threads during hybrid extraction (download + extract simultaneously).
// JDownloader 2 uses in-process 7-Zip-JBinding which naturally limits throughput
// to ~16 MB/s write. 2 UnRAR threads produce similar controlled disk load.
return "-mt2";
}
const envValue = Number(process.env.RD_EXTRACT_THREADS ?? NaN);
if (Number.isFinite(envValue) && envValue >= 1 && envValue <= 32) {
@ -734,8 +640,8 @@ function lowerExtractProcessPriority(childPid: number | undefined, cpuPriority?:
return;
}
try {
// Sets CPU scheduling priority for the extraction process.
// high → NORMAL (full speed), default → BELOW_NORMAL. I/O priority stays Normal.
// Lowers CPU scheduling priority so extraction doesn't starve other processes.
// high → BELOW_NORMAL, middle/low → IDLE. I/O priority stays Normal (like JDownloader 2).
os.setPriority(pid, extractOsPriority(cpuPriority));
} catch {
// ignore: priority lowering is best-effort
@ -938,7 +844,7 @@ type JvmExtractResult = {
};
function extractorBackendMode(): ExtractBackendMode {
const defaultMode = "legacy";
const defaultMode = process.env.VITEST ? "legacy" : "jvm";
const raw = String(process.env.RD_EXTRACT_BACKEND || defaultMode).trim().toLowerCase();
if (raw === "legacy") {
return "legacy";
@ -1050,12 +956,9 @@ function parseJvmLine(
if (trimmed.startsWith("RD_PROGRESS ")) {
const parsed = parseProgressPercent(trimmed);
if (parsed !== null) {
const next = nextArchivePercent(state.bestPercent, parsed);
if (next !== state.bestPercent) {
state.bestPercent = next;
onArchiveProgress?.(next);
}
if (parsed !== null && parsed > state.bestPercent) {
state.bestPercent = parsed;
onArchiveProgress?.(parsed);
}
return;
}
@ -1080,312 +983,7 @@ function parseJvmLine(
}
}
// ── Persistent JVM Daemon ──
// Keeps a single JVM process alive across multiple extraction requests,
// eliminating the ~5s JVM boot overhead per archive.
interface DaemonRequest {
resolve: (result: JvmExtractResult) => void;
onArchiveProgress?: (percent: number) => void;
signal?: AbortSignal;
timeoutMs?: number;
parseState: { bestPercent: number; usedPassword: string; backend: string; reportedError: string };
archiveName: string;
startedAt: number;
passwordCount: number;
}
let daemonProcess: ChildProcess | null = null;
let daemonReady = false;
let daemonBusy = false;
let daemonCurrentRequest: DaemonRequest | null = null;
let daemonStdoutBuffer = "";
let daemonStderrBuffer = "";
let daemonOutput = "";
let daemonTimeoutId: NodeJS.Timeout | null = null;
let daemonAbortHandler: (() => void) | null = null;
let daemonLayout: JvmExtractorLayout | null = null;
export function shutdownDaemon(): void {
if (daemonProcess) {
try { daemonProcess.stdin?.end(); } catch { /* ignore */ }
try { killProcessTree(daemonProcess); } catch { /* ignore */ }
daemonProcess = null;
}
daemonReady = false;
daemonBusy = false;
daemonCurrentRequest = null;
daemonStdoutBuffer = "";
daemonStderrBuffer = "";
daemonOutput = "";
if (daemonTimeoutId) { clearTimeout(daemonTimeoutId); daemonTimeoutId = null; }
if (daemonAbortHandler) { daemonAbortHandler = null; }
daemonLayout = null;
}
function finishDaemonRequest(result: JvmExtractResult): void {
const req = daemonCurrentRequest;
if (!req) return;
daemonCurrentRequest = null;
daemonBusy = false;
daemonStdoutBuffer = "";
daemonStderrBuffer = "";
daemonOutput = "";
if (daemonTimeoutId) { clearTimeout(daemonTimeoutId); daemonTimeoutId = null; }
if (req.signal && daemonAbortHandler) {
req.signal.removeEventListener("abort", daemonAbortHandler);
daemonAbortHandler = null;
}
req.resolve(result);
}
function handleDaemonLine(line: string): void {
const trimmed = String(line || "").trim();
if (!trimmed) return;
// Check for daemon ready signal
if (trimmed === "RD_DAEMON_READY") {
daemonReady = true;
logger.info("JVM Daemon bereit (persistent)");
return;
}
// Check for request completion
if (trimmed.startsWith("RD_REQUEST_DONE ")) {
const code = parseInt(trimmed.slice("RD_REQUEST_DONE ".length).trim(), 10);
const req = daemonCurrentRequest;
if (!req) return;
const elapsedMs = Date.now() - req.startedAt;
logger.info(
`JVM Daemon Request Ende: archive=${req.archiveName}, code=${code}, ms=${elapsedMs}, pwCandidates=${req.passwordCount}, ` +
`bestPercent=${req.parseState.bestPercent}, backend=${req.parseState.backend || "unknown"}, usedPassword=${req.parseState.usedPassword ? "yes" : "no"}`
);
if (code === 0) {
req.onArchiveProgress?.(100);
finishDaemonRequest({
ok: true, missingCommand: false, missingRuntime: false,
aborted: false, timedOut: false, errorText: "",
usedPassword: req.parseState.usedPassword, backend: req.parseState.backend
});
} else {
const message = cleanErrorText(req.parseState.reportedError || daemonOutput) || `Exit Code ${code}`;
finishDaemonRequest({
ok: false, missingCommand: false, missingRuntime: isJvmRuntimeMissingError(message),
aborted: false, timedOut: false, errorText: message,
usedPassword: req.parseState.usedPassword, backend: req.parseState.backend
});
}
return;
}
// Regular progress/status lines — delegate to parseJvmLine
if (daemonCurrentRequest) {
parseJvmLine(trimmed, daemonCurrentRequest.onArchiveProgress, daemonCurrentRequest.parseState);
}
}
function startDaemon(layout: JvmExtractorLayout): boolean {
if (daemonProcess && daemonReady) return true;
// Don't kill a daemon that's still booting — it will become ready soon
if (daemonProcess) return false;
shutdownDaemon();
const jvmTmpDir = path.join(os.tmpdir(), `rd-extract-daemon-${crypto.randomUUID()}`);
fs.mkdirSync(jvmTmpDir, { recursive: true });
const args = [
"-Dfile.encoding=UTF-8",
`-Djava.io.tmpdir=${jvmTmpDir}`,
"-Xms512m",
"-Xmx8g",
"-XX:+UseSerialGC",
"-cp",
layout.classPath,
JVM_EXTRACTOR_MAIN_CLASS,
"--daemon"
];
try {
const child = spawn(layout.javaCommand, args, {
windowsHide: true,
stdio: ["pipe", "pipe", "pipe"]
});
lowerExtractProcessPriority(child.pid, currentExtractCpuPriority);
daemonProcess = child;
daemonLayout = layout;
child.stdout!.on("data", (chunk) => {
const raw = String(chunk || "");
daemonOutput = appendLimited(daemonOutput, raw);
daemonStdoutBuffer += raw;
const lines = daemonStdoutBuffer.split(/\r?\n/);
daemonStdoutBuffer = lines.pop() || "";
for (const line of lines) {
handleDaemonLine(line);
}
});
child.stderr!.on("data", (chunk) => {
const raw = String(chunk || "");
daemonOutput = appendLimited(daemonOutput, raw);
daemonStderrBuffer += raw;
const lines = daemonStderrBuffer.split(/\r?\n/);
daemonStderrBuffer = lines.pop() || "";
for (const line of lines) {
if (daemonCurrentRequest) {
parseJvmLine(line, daemonCurrentRequest.onArchiveProgress, daemonCurrentRequest.parseState);
}
}
});
child.on("error", () => {
if (daemonCurrentRequest) {
finishDaemonRequest({
ok: false, missingCommand: true, missingRuntime: true,
aborted: false, timedOut: false, errorText: "Daemon process error",
usedPassword: "", backend: ""
});
}
shutdownDaemon();
});
child.on("close", () => {
if (daemonCurrentRequest) {
const req = daemonCurrentRequest;
finishDaemonRequest({
ok: false, missingCommand: false, missingRuntime: false,
aborted: false, timedOut: false,
errorText: cleanErrorText(req.parseState.reportedError || daemonOutput) || "Daemon process exited unexpectedly",
usedPassword: req.parseState.usedPassword, backend: req.parseState.backend
});
}
// Clean up tmp dir
fs.rm(jvmTmpDir, { recursive: true, force: true }, () => {});
daemonProcess = null;
daemonReady = false;
daemonBusy = false;
daemonLayout = null;
});
logger.info(`JVM Daemon gestartet (PID ${child.pid})`);
return true;
} catch (error) {
logger.warn(`JVM Daemon Start fehlgeschlagen: ${String(error)}`);
return false;
}
}
function isDaemonAvailable(layout: JvmExtractorLayout): boolean {
// Start daemon if not running yet
if (!daemonProcess || !daemonReady) {
startDaemon(layout);
}
return Boolean(daemonProcess && daemonReady && !daemonBusy);
}
/** Wait for the daemon to become ready (boot phase) or free (busy phase), with timeout. */
function waitForDaemonReady(maxWaitMs: number, signal?: AbortSignal): Promise<boolean> {
return new Promise((resolve) => {
const start = Date.now();
const check = () => {
if (signal?.aborted) { resolve(false); return; }
if (daemonProcess && daemonReady && !daemonBusy) { resolve(true); return; }
// Daemon died while we were waiting
if (!daemonProcess) { resolve(false); return; }
if (Date.now() - start >= maxWaitMs) { resolve(false); return; }
setTimeout(check, 50);
};
check();
});
}
function sendDaemonRequest(
archivePath: string,
targetDir: string,
conflictMode: ConflictMode,
passwordCandidates: string[],
onArchiveProgress?: (percent: number) => void,
signal?: AbortSignal,
timeoutMs?: number
): Promise<JvmExtractResult> {
return new Promise((resolve) => {
const mode = effectiveConflictMode(conflictMode);
const parseState = { bestPercent: 0, usedPassword: "", backend: "", reportedError: "" };
const archiveName = path.basename(archivePath);
daemonBusy = true;
daemonOutput = "";
daemonCurrentRequest = {
resolve,
onArchiveProgress,
signal,
timeoutMs,
parseState,
archiveName,
startedAt: Date.now(),
passwordCount: passwordCandidates.length
};
logger.info(`JVM Daemon Request Start: archive=${archiveName}, pwCandidates=${passwordCandidates.length}, timeoutMs=${timeoutMs || 0}, conflict=${mode}`);
// Set up timeout
if (timeoutMs && timeoutMs > 0) {
daemonTimeoutId = setTimeout(() => {
// Timeout — kill the daemon and restart fresh for next request
const req = daemonCurrentRequest;
if (req) {
finishDaemonRequest({
ok: false, missingCommand: false, missingRuntime: false,
aborted: false, timedOut: true,
errorText: `Entpacken Timeout nach ${Math.ceil(timeoutMs / 1000)}s`,
usedPassword: parseState.usedPassword, backend: parseState.backend
});
}
shutdownDaemon();
}, timeoutMs);
}
// Set up abort handler
if (signal) {
daemonAbortHandler = () => {
const req = daemonCurrentRequest;
if (req) {
finishDaemonRequest({
ok: false, missingCommand: false, missingRuntime: false,
aborted: true, timedOut: false, errorText: "aborted:extract",
usedPassword: parseState.usedPassword, backend: parseState.backend
});
}
// Kill daemon on abort — cleaner than trying to interrupt mid-extraction
shutdownDaemon();
};
signal.addEventListener("abort", daemonAbortHandler, { once: true });
}
// Build and send JSON request
const jsonRequest = JSON.stringify({
archive: archivePath,
target: targetDir,
conflict: mode,
backend: "auto",
passwords: passwordCandidates
});
try {
daemonProcess!.stdin!.write(jsonRequest + "\n");
} catch (error) {
finishDaemonRequest({
ok: false, missingCommand: false, missingRuntime: false,
aborted: false, timedOut: false,
errorText: `Daemon stdin write failed: ${String(error)}`,
usedPassword: "", backend: ""
});
shutdownDaemon();
}
});
}
async function runJvmExtractCommand(
function runJvmExtractCommand(
layout: JvmExtractorLayout,
archivePath: string,
targetDir: string,
@ -1408,29 +1006,6 @@ async function runJvmExtractCommand(
});
}
// Try persistent daemon first — saves ~5s JVM boot per archive
if (isDaemonAvailable(layout)) {
logger.info(`JVM Daemon: Sofort verfügbar, sende Request für ${path.basename(archivePath)} (pwCandidates=${passwordCandidates.length})`);
return sendDaemonRequest(archivePath, targetDir, conflictMode, passwordCandidates, onArchiveProgress, signal, timeoutMs);
}
// Daemon exists but is still booting or busy — wait up to 15s for it
if (daemonProcess) {
const reason = !daemonReady ? "booting" : "busy";
const waitStartedAt = Date.now();
logger.info(`JVM Daemon: Warte auf ${reason} Daemon für ${path.basename(archivePath)}...`);
const ready = await waitForDaemonReady(15_000, signal);
const waitedMs = Date.now() - waitStartedAt;
if (ready) {
logger.info(`JVM Daemon: Bereit nach ${waitedMs}ms — sende Request für ${path.basename(archivePath)}`);
return sendDaemonRequest(archivePath, targetDir, conflictMode, passwordCandidates, onArchiveProgress, signal, timeoutMs);
}
logger.warn(`JVM Daemon: Timeout nach ${waitedMs}ms beim Warten — Fallback auf neuen Prozess für ${path.basename(archivePath)}`);
}
// Fallback: spawn a new JVM process (daemon not available after waiting)
logger.info(`JVM Spawn: Neuer Prozess für ${path.basename(archivePath)}`);
const mode = effectiveConflictMode(conflictMode);
// Each JVM process needs its own temp dir so parallel SevenZipJBinding
// instances don't fight over the same native DLL file lock.
@ -1439,9 +1014,8 @@ async function runJvmExtractCommand(
const args = [
"-Dfile.encoding=UTF-8",
`-Djava.io.tmpdir=${jvmTmpDir}`,
"-Xms512m",
"-Xmx8g",
"-XX:+UseSerialGC",
"-Xms32m",
"-Xmx512m",
"-cp",
layout.classPath,
JVM_EXTRACTOR_MAIN_CLASS,
@ -1673,7 +1247,7 @@ async function resolveExtractorCommandInternal(): Promise<string> {
resolveFailureAt = 0;
}
const candidates = nativeExtractorCandidates();
const candidates = winRarCandidates();
for (const command of candidates) {
if (isAbsoluteCommand(command) && !fs.existsSync(command)) {
continue;
@ -1726,11 +1300,7 @@ async function runExternalExtract(
): Promise<string> {
const timeoutMs = await computeExtractTimeoutMs(archivePath);
const backendMode = extractorBackendMode();
const archiveName = path.basename(archivePath);
const totalStartedAt = Date.now();
let jvmFailureReason = "";
let fallbackFromJvm = false;
logger.info(`Extract-Backend Start: archive=${archiveName}, mode=${backendMode}, pwCandidates=${passwordCandidates.length}, timeoutMs=${timeoutMs}, hybrid=${hybridMode}`);
await fs.promises.mkdir(targetDir, { recursive: true });
@ -1751,8 +1321,7 @@ async function runExternalExtract(
logger.warn(`JVM-Extractor nicht verfügbar, nutze Legacy-Extractor: ${path.basename(archivePath)}`);
} else {
const quotedPasswords = passwordCandidates.map((p) => p === "" ? '""' : `"${p}"`);
logger.info(`JVM-Extractor aktiv (${layout.rootDir}): ${archiveName}, ${passwordCandidates.length} Passwörter: [${quotedPasswords.join(", ")}]`);
const jvmStartedAt = Date.now();
logger.info(`JVM-Extractor aktiv (${layout.rootDir}): ${path.basename(archivePath)}, ${passwordCandidates.length} Passwörter: [${quotedPasswords.join(", ")}]`);
const jvmResult = await runJvmExtractCommand(
layout,
archivePath,
@ -1763,12 +1332,9 @@ async function runExternalExtract(
signal,
timeoutMs
);
const jvmMs = Date.now() - jvmStartedAt;
logger.info(`JVM-Extractor Ergebnis: archive=${archiveName}, ok=${jvmResult.ok}, ms=${jvmMs}, timedOut=${jvmResult.timedOut}, aborted=${jvmResult.aborted}, backend=${jvmResult.backend || "unknown"}, usedPassword=${jvmResult.usedPassword ? "yes" : "no"}`);
if (jvmResult.ok) {
logger.info(`Entpackt via ${jvmResult.backend || "jvm"}: ${archiveName}`);
logger.info(`Extract-Backend Ende: archive=${archiveName}, backend=${jvmResult.backend || "jvm"}, mode=${backendMode}, ms=${Date.now() - totalStartedAt}, fallbackFromJvm=false, usedPassword=${jvmResult.usedPassword ? "yes" : "no"}`);
logger.info(`Entpackt via ${jvmResult.backend || "jvm"}: ${path.basename(archivePath)}`);
return jvmResult.usedPassword;
}
if (jvmResult.aborted) {
@ -1779,7 +1345,6 @@ async function runExternalExtract(
}
jvmFailureReason = jvmResult.errorText || "JVM-Extractor fehlgeschlagen";
fallbackFromJvm = true;
const jvmFailureLower = jvmFailureReason.toLowerCase();
const isUnsupportedMethod = jvmFailureReason.includes("UNSUPPORTEDMETHOD");
const isCodecError = jvmFailureLower.includes("registered codecs")
@ -1808,7 +1373,6 @@ async function runExternalExtract(
const effectiveTargetDir = subst ? `${subst.drive}:\\` : targetDir;
const command = await resolveExtractorCommand();
const legacyStartedAt = Date.now();
const password = await runExternalExtractInner(
command,
archivePath,
@ -1821,14 +1385,12 @@ async function runExternalExtract(
hybridMode,
onPasswordAttempt
);
const legacyMs = Date.now() - legacyStartedAt;
const extractorName = path.basename(command).replace(/\.exe$/i, "");
if (jvmFailureReason) {
logger.info(`Entpackt via legacy/${extractorName} (nach JVM-Fehler): ${archiveName}`);
logger.info(`Entpackt via legacy/${extractorName} (nach JVM-Fehler): ${path.basename(archivePath)}`);
} else {
logger.info(`Entpackt via legacy/${extractorName}: ${archiveName}`);
logger.info(`Entpackt via legacy/${extractorName}: ${path.basename(archivePath)}`);
}
logger.info(`Extract-Backend Ende: archive=${archiveName}, backend=legacy/${extractorName}, mode=${backendMode}, ms=${Date.now() - totalStartedAt}, legacyMs=${legacyMs}, fallbackFromJvm=${fallbackFromJvm}, usedPassword=${password ? "yes" : "no"}`);
return password;
} finally {
if (subst) removeSubstMapping(subst);
@ -1867,7 +1429,6 @@ async function runExternalExtractInner(
onArchiveProgress?.(0);
}
passwordAttempt += 1;
const attemptStartedAt = Date.now();
const quotedPw = password === "" ? '""' : `"${password}"`;
logger.info(`Legacy-Passwort-Versuch ${passwordAttempt}/${passwords.length} für ${path.basename(archivePath)}: ${quotedPw}`);
if (passwords.length > 1) {
@ -1876,14 +1437,11 @@ async function runExternalExtractInner(
let args = buildExternalExtractArgs(command, archivePath, targetDir, conflictMode, password, usePerformanceFlags, hybridMode);
let result = await runExtractCommand(command, args, (chunk) => {
const parsed = parseProgressPercent(chunk);
if (parsed === null) {
if (parsed === null || parsed <= bestPercent) {
return;
}
const next = nextArchivePercent(bestPercent, parsed);
if (next !== bestPercent) {
bestPercent = next;
onArchiveProgress?.(bestPercent);
}
bestPercent = parsed;
onArchiveProgress?.(bestPercent);
}, signal, timeoutMs);
if (!result.ok && usePerformanceFlags && isUnsupportedExtractorSwitchError(result.errorText)) {
@ -1893,22 +1451,14 @@ async function runExternalExtractInner(
args = buildExternalExtractArgs(command, archivePath, targetDir, conflictMode, password, false, hybridMode);
result = await runExtractCommand(command, args, (chunk) => {
const parsed = parseProgressPercent(chunk);
if (parsed === null) {
if (parsed === null || parsed <= bestPercent) {
return;
}
const next = nextArchivePercent(bestPercent, parsed);
if (next !== bestPercent) {
bestPercent = next;
onArchiveProgress?.(bestPercent);
}
bestPercent = parsed;
onArchiveProgress?.(bestPercent);
}, signal, timeoutMs);
}
logger.info(
`Legacy-Passwort-Versuch Ergebnis: archive=${path.basename(archivePath)}, attempt=${passwordAttempt}/${passwords.length}, ` +
`ms=${Date.now() - attemptStartedAt}, ok=${result.ok}, timedOut=${result.timedOut}, missingCommand=${result.missingCommand}, bestPercent=${bestPercent}`
);
if (result.ok) {
onArchiveProgress?.(100);
return password;
@ -2376,14 +1926,7 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
if (options.conflictMode === "ask") {
logger.warn("Extract-ConflictMode 'ask' wird ohne Prompt als 'skip' behandelt");
}
const passwordCacheKey = packagePasswordCacheKey(options.packageDir, options.packageId);
const passwordCacheLabel = packagePasswordCacheLabel(options.packageDir, options.packageId);
let passwordCandidates = archivePasswords(options.passwordList || "");
const cachedPackagePassword = readCachedPackagePassword(passwordCacheKey);
if (cachedPackagePassword) {
passwordCandidates = prioritizePassword(passwordCandidates, cachedPackagePassword);
logger.info(`Passwort-Cache Treffer: ${passwordCacheLabel}, bekanntes Passwort wird zuerst getestet`);
}
const resumeCompleted = await readExtractResumeState(options.packageDir, options.packageId);
const resumeCompletedAtStart = resumeCompleted.size;
const allCandidateNames = new Set(allCandidates.map((archivePath) => archiveNameKey(path.basename(archivePath))));
@ -2402,7 +1945,6 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
let extracted = candidates.length - pendingCandidates.length;
let failed = 0;
let lastError = "";
let learnedPassword = cachedPackagePassword;
const extractedArchives = new Set<string>();
for (const archivePath of candidates) {
if (resumeCompleted.has(archiveNameKey(path.basename(archivePath)))) {
@ -2410,41 +1952,23 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
}
}
const rememberLearnedPassword = (password: string): void => {
const normalized = String(password || "").trim();
if (!normalized) {
return;
}
const changed = normalized !== learnedPassword;
learnedPassword = normalized;
passwordCandidates = prioritizePassword(passwordCandidates, normalized);
writeCachedPackagePassword(passwordCacheKey, normalized);
if (changed) {
logger.info(`Passwort-Cache Update: ${passwordCacheLabel}, neues Passwort gelernt`);
}
};
const emitProgress = (
current: number,
archiveName: string,
phase: "extracting" | "done",
archivePercent?: number,
elapsedMs?: number,
pwInfo?: { passwordAttempt?: number; passwordTotal?: number; passwordFound?: boolean },
archiveInfo?: { archiveDone?: boolean; archiveSuccess?: boolean }
pwInfo?: { passwordAttempt?: number; passwordTotal?: number; passwordFound?: boolean }
): void => {
if (!options.onProgress) {
return;
}
const total = Math.max(1, candidates.length);
let percent = Math.max(0, Math.min(100, Math.floor((current / total) * 100)));
let normalizedArchivePercent = Math.max(0, Math.min(100, Number(archivePercent ?? 0)));
if (phase !== "done") {
const boundedCurrent = Math.max(0, Math.min(total, current));
if (archiveInfo?.archiveDone !== true && normalizedArchivePercent >= 100) {
normalizedArchivePercent = 99;
}
percent = Math.max(0, Math.min(100, Math.floor(((boundedCurrent + (normalizedArchivePercent / 100)) / total) * 100)));
const boundedArchivePercent = Math.max(0, Math.min(100, Number(archivePercent ?? 0)));
percent = Math.max(0, Math.min(100, Math.floor(((boundedCurrent + (boundedArchivePercent / 100)) / total) * 100)));
}
try {
options.onProgress({
@ -2452,10 +1976,9 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
total,
percent,
archiveName,
archivePercent: normalizedArchivePercent,
archivePercent,
elapsedMs,
phase,
...(archiveInfo || {}),
...(pwInfo || {})
});
} catch (error) {
@ -2470,13 +1993,12 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
// rather than leaving them as "Entpacken - Ausstehend" until all extraction finishes.
for (const archivePath of candidates) {
if (resumeCompleted.has(archiveNameKey(path.basename(archivePath)))) {
emitProgress(extracted, path.basename(archivePath), "extracting", 100, 0, undefined, { archiveDone: true, archiveSuccess: true });
emitProgress(extracted, path.basename(archivePath), "extracting", 100, 0);
}
}
const maxParallel = Math.max(1, options.maxParallel || 1);
let noExtractorEncountered = false;
let lastArchiveFinishedAt: number | null = null;
const extractSingleArchive = async (archivePath: string): Promise<void> => {
if (options.signal?.aborted) {
@ -2488,36 +2010,17 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
const archiveName = path.basename(archivePath);
const archiveResumeKey = archiveNameKey(archiveName);
const archiveStartedAt = Date.now();
const startedCurrent = extracted + failed;
if (lastArchiveFinishedAt !== null) {
logger.info(`Extract-Trace Gap: before=${archiveName}, prevDoneToStartMs=${archiveStartedAt - lastArchiveFinishedAt}, progress=${startedCurrent}/${candidates.length}`);
}
let archivePercent = 0;
let reached99At: number | null = null;
let archiveOutcome: "success" | "failed" | "skipped" = "failed";
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, 0);
const pulseTimer = setInterval(() => {
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt);
}, 1100);
const hybrid = Boolean(options.hybridMode);
// Before the first successful extraction, filename-derived candidates are useful.
// After a known password is learned, try that first to avoid per-archive delays.
// Insert archive-filename-derived passwords after "" but before custom passwords
const filenamePasswords = archiveFilenamePasswords(archiveName);
const nonEmptyBasePasswords = passwordCandidates.filter((p) => p !== "");
const orderedNonEmpty = learnedPassword
? [learnedPassword, ...nonEmptyBasePasswords.filter((p) => p !== learnedPassword), ...filenamePasswords]
: [...filenamePasswords, ...nonEmptyBasePasswords];
const archivePasswordCandidates = learnedPassword
? Array.from(new Set([...orderedNonEmpty, ""]))
: Array.from(new Set(["", ...orderedNonEmpty]));
const reportArchiveProgress = (value: number): void => {
archivePercent = nextArchivePercent(archivePercent, value);
if (reached99At === null && archivePercent >= 99) {
reached99At = Date.now();
logger.info(`Extract-Trace 99%: archive=${archiveName}, elapsedMs=${reached99At - archiveStartedAt}`);
}
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt);
};
const archivePasswordCandidates = filenamePasswords.length > 0
? Array.from(new Set(["", ...filenamePasswords, ...passwordCandidates.filter((p) => p !== "")]))
: passwordCandidates;
// Validate generic .001 splits via file signature before attempting extraction
const isGenericSplit = /\.\d{3}$/i.test(archiveName) && !/\.(zip|7z)\.\d{3}$/i.test(archiveName);
@ -2530,10 +2033,6 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
extractedArchives.add(archivePath);
await writeExtractResumeState(options.packageDir, resumeCompleted, options.packageId);
clearInterval(pulseTimer);
archiveOutcome = "skipped";
const skippedAt = Date.now();
lastArchiveFinishedAt = skippedAt;
logger.info(`Extract-Trace Archiv Übersprungen: archive=${archiveName}, ms=${skippedAt - archiveStartedAt}, reason=no-signature`);
return;
}
logger.info(`Generische Split-Datei verifiziert (Signatur: ${sig}): ${archiveName}`);
@ -2556,9 +2055,10 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
if (preferExternal) {
try {
const usedPassword = await runExternalExtract(archivePath, options.targetDir, options.conflictMode, archivePasswordCandidates, (value) => {
reportArchiveProgress(value);
archivePercent = Math.max(archivePercent, value);
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt);
}, options.signal, hybrid, onPwAttempt);
rememberLearnedPassword(usedPassword);
passwordCandidates = prioritizePassword(passwordCandidates, usedPassword);
} catch (error) {
if (isNoExtractorError(String(error))) {
await extractZipArchive(archivePath, options.targetDir, options.conflictMode, options.signal);
@ -2576,9 +2076,10 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
}
try {
const usedPassword = await runExternalExtract(archivePath, options.targetDir, options.conflictMode, archivePasswordCandidates, (value) => {
reportArchiveProgress(value);
archivePercent = Math.max(archivePercent, value);
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt);
}, options.signal, hybrid, onPwAttempt);
rememberLearnedPassword(usedPassword);
passwordCandidates = prioritizePassword(passwordCandidates, usedPassword);
} catch (externalError) {
if (isNoExtractorError(String(externalError)) || isUnsupportedArchiveFormatError(String(externalError))) {
throw error;
@ -2589,25 +2090,21 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
}
} else {
const usedPassword = await runExternalExtract(archivePath, options.targetDir, options.conflictMode, archivePasswordCandidates, (value) => {
reportArchiveProgress(value);
archivePercent = Math.max(archivePercent, value);
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt);
}, options.signal, hybrid, onPwAttempt);
rememberLearnedPassword(usedPassword);
passwordCandidates = prioritizePassword(passwordCandidates, usedPassword);
}
extracted += 1;
extractedArchives.add(archivePath);
resumeCompleted.add(archiveResumeKey);
await writeExtractResumeState(options.packageDir, resumeCompleted, options.packageId);
logger.info(`Entpacken erfolgreich: ${path.basename(archivePath)}`);
archiveOutcome = "success";
const successAt = Date.now();
const tailAfter99Ms = reached99At ? (successAt - reached99At) : -1;
logger.info(`Extract-Trace Archiv Erfolg: archive=${archiveName}, totalMs=${successAt - archiveStartedAt}, tailAfter99Ms=${tailAfter99Ms >= 0 ? tailAfter99Ms : "n/a"}, pwCandidates=${archivePasswordCandidates.length}`);
lastArchiveFinishedAt = successAt;
archivePercent = 100;
if (hasManyPasswords) {
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt, { passwordFound: true }, { archiveDone: true, archiveSuccess: true });
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt, { passwordFound: true });
} else {
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt, undefined, { archiveDone: true, archiveSuccess: true });
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt);
}
} catch (error) {
const errorText = String(error);
@ -2618,25 +2115,12 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
lastError = errorText;
const errorCategory = classifyExtractionError(errorText);
logger.error(`Entpack-Fehler ${path.basename(archivePath)} [${errorCategory}]: ${errorText}`);
if (errorCategory === "wrong_password" && learnedPassword) {
learnedPassword = "";
clearCachedPackagePassword(passwordCacheKey);
logger.warn(`Passwort-Cache verworfen: ${passwordCacheLabel} (wrong_password)`);
}
const failedAt = Date.now();
const tailAfter99Ms = reached99At ? (failedAt - reached99At) : -1;
logger.warn(`Extract-Trace Archiv Fehler: archive=${archiveName}, totalMs=${failedAt - archiveStartedAt}, tailAfter99Ms=${tailAfter99Ms >= 0 ? tailAfter99Ms : "n/a"}, category=${errorCategory}`);
lastArchiveFinishedAt = failedAt;
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt, undefined, { archiveDone: true, archiveSuccess: false });
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt);
if (isNoExtractorError(errorText)) {
noExtractorEncountered = true;
}
} finally {
clearInterval(pulseTimer);
if (lastArchiveFinishedAt === null || lastArchiveFinishedAt < archiveStartedAt) {
lastArchiveFinishedAt = Date.now();
}
logger.info(`Extract-Trace Archiv Ende: archive=${archiveName}, outcome=${archiveOutcome}, elapsedMs=${lastArchiveFinishedAt - archiveStartedAt}, percent=${archivePercent}`);
}
};
@ -2761,11 +2245,11 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
} catch (zipErr) {
if (!shouldFallbackToExternalZip(zipErr)) throw zipErr;
const usedPw = await runExternalExtract(nestedArchive, options.targetDir, options.conflictMode, passwordCandidates, (v) => { nestedPercent = Math.max(nestedPercent, v); }, options.signal, hybrid);
rememberLearnedPassword(usedPw);
passwordCandidates = prioritizePassword(passwordCandidates, usedPw);
}
} else {
const usedPw = await runExternalExtract(nestedArchive, options.targetDir, options.conflictMode, passwordCandidates, (v) => { nestedPercent = Math.max(nestedPercent, v); }, options.signal, hybrid);
rememberLearnedPassword(usedPw);
passwordCandidates = prioritizePassword(passwordCandidates, usedPw);
}
extracted += 1;
nestedExtracted += 1;

View File

@ -7,7 +7,7 @@ import { IPC_CHANNELS } from "../shared/ipc";
import { getLogFilePath, logger } from "./logger";
import { APP_NAME } from "./constants";
import { extractHttpLinksFromText } from "./utils";
import { cleanupStaleSubstDrives, shutdownDaemon } from "./extractor";
import { cleanupStaleSubstDrives } from "./extractor";
/* ── IPC validation helpers ────────────────────────────────────── */
function validateString(value: unknown, name: string): string {
@ -515,7 +515,6 @@ app.on("before-quit", () => {
if (updateQuitTimer) { clearTimeout(updateQuitTimer); updateQuitTimer = null; }
stopClipboardWatcher();
destroyTray();
shutdownDaemon();
try {
controller.shutdown();
} catch (error) {

View File

@ -5,8 +5,8 @@ import { AppSettings, BandwidthScheduleEntry, DebridProvider, DownloadItem, Down
import { defaultSettings } from "./constants";
import { logger } from "./logger";
const VALID_PRIMARY_PROVIDERS = new Set(["realdebrid", "megadebrid", "bestdebrid", "alldebrid", "ddownload", "onefichier"]);
const VALID_FALLBACK_PROVIDERS = new Set(["none", "realdebrid", "megadebrid", "bestdebrid", "alldebrid", "ddownload", "onefichier"]);
const VALID_PRIMARY_PROVIDERS = new Set(["realdebrid", "megadebrid", "bestdebrid", "alldebrid", "ddownload"]);
const VALID_FALLBACK_PROVIDERS = new Set(["none", "realdebrid", "megadebrid", "bestdebrid", "alldebrid", "ddownload"]);
const VALID_CLEANUP_MODES = new Set(["none", "trash", "delete"]);
const VALID_CONFLICT_MODES = new Set(["overwrite", "skip", "rename", "ask"]);
const VALID_FINISHED_POLICIES = new Set(["never", "immediate", "on_start", "package_done"]);
@ -17,7 +17,7 @@ const VALID_PACKAGE_PRIORITIES = new Set<string>(["high", "normal", "low"]);
const VALID_DOWNLOAD_STATUSES = new Set<DownloadStatus>([
"queued", "validating", "downloading", "paused", "reconnect_wait", "extracting", "integrity_check", "completed", "failed", "cancelled"
]);
const VALID_ITEM_PROVIDERS = new Set<DebridProvider>(["realdebrid", "megadebrid", "bestdebrid", "alldebrid", "ddownload", "onefichier"]);
const VALID_ITEM_PROVIDERS = new Set<DebridProvider>(["realdebrid", "megadebrid", "bestdebrid", "alldebrid", "ddownload"]);
const VALID_ONLINE_STATUSES = new Set(["online", "offline", "checking"]);
function asText(value: unknown): string {
@ -113,7 +113,6 @@ export function normalizeSettings(settings: AppSettings): AppSettings {
allDebridToken: asText(settings.allDebridToken),
ddownloadLogin: asText(settings.ddownloadLogin),
ddownloadPassword: asText(settings.ddownloadPassword),
oneFichierApiKey: asText(settings.oneFichierApiKey),
archivePasswordList: String(settings.archivePasswordList ?? "").replace(/\r\n|\r/g, "\n"),
rememberToken: Boolean(settings.rememberToken),
providerPrimary: settings.providerPrimary,
@ -205,8 +204,7 @@ function sanitizeCredentialPersistence(settings: AppSettings): AppSettings {
bestToken: "",
allDebridToken: "",
ddownloadLogin: "",
ddownloadPassword: "",
oneFichierApiKey: ""
ddownloadPassword: ""
};
}

View File

@ -62,7 +62,7 @@ const emptyStats = (): DownloadStats => ({
const emptySnapshot = (): UiSnapshot => ({
settings: {
token: "", megaLogin: "", megaPassword: "", bestToken: "", allDebridToken: "", ddownloadLogin: "", ddownloadPassword: "", oneFichierApiKey: "",
token: "", megaLogin: "", megaPassword: "", bestToken: "", allDebridToken: "", ddownloadLogin: "", ddownloadPassword: "",
archivePasswordList: "",
rememberToken: true, providerPrimary: "realdebrid", providerSecondary: "megadebrid",
providerTertiary: "bestdebrid", autoProviderFallback: true, outputDir: "", packageName: "",
@ -94,7 +94,7 @@ const cleanupLabels: Record<string, string> = {
const AUTO_RENDER_PACKAGE_LIMIT = 260;
const providerLabels: Record<DebridProvider, string> = {
realdebrid: "Real-Debrid", megadebrid: "Mega-Debrid", bestdebrid: "BestDebrid", alldebrid: "AllDebrid", ddownload: "DDownload", onefichier: "1Fichier"
realdebrid: "Real-Debrid", megadebrid: "Mega-Debrid", bestdebrid: "BestDebrid", alldebrid: "AllDebrid", ddownload: "DDownload"
};
function formatDateTime(ts: number): string {
@ -930,11 +930,7 @@ export function App(): ReactElement {
Boolean((settingsDraft.ddownloadLogin || "").trim() && (settingsDraft.ddownloadPassword || "").trim()),
[settingsDraft.ddownloadLogin, settingsDraft.ddownloadPassword]);
const hasOneFichierAccount = useMemo(() =>
Boolean((settingsDraft.oneFichierApiKey || "").trim()),
[settingsDraft.oneFichierApiKey]);
const totalConfiguredAccounts = configuredProviders.length + (hasDdownloadAccount ? 1 : 0) + (hasOneFichierAccount ? 1 : 0);
const totalConfiguredAccounts = configuredProviders.length + (hasDdownloadAccount ? 1 : 0);
const primaryProviderValue: DebridProvider = useMemo(() => {
if (configuredProviders.includes(settingsDraft.providerPrimary)) {
@ -2748,8 +2744,6 @@ export function App(): ReactElement {
<input value={settingsDraft.ddownloadLogin || ""} onChange={(e) => setText("ddownloadLogin", e.target.value)} />
<label>DDownload Passwort</label>
<input type="password" value={settingsDraft.ddownloadPassword || ""} onChange={(e) => setText("ddownloadPassword", e.target.value)} />
<label>1Fichier API Key</label>
<input type="password" value={settingsDraft.oneFichierApiKey || ""} onChange={(e) => setText("oneFichierApiKey", e.target.value)} />
{configuredProviders.length === 0 && (
<div className="hint">Füge mindestens einen Account hinzu, dann erscheint die Hoster-Auswahl.</div>
)}
@ -2989,7 +2983,7 @@ export function App(): ReactElement {
<span>Links: {Object.keys(snapshot.session.items).length}</span>
<span>Session: {humanSize(snapshot.stats.totalDownloaded)}</span>
<span>Gesamt: {humanSize(snapshot.stats.totalDownloadedAllTime)}</span>
<span>Hoster: {providerStats.length}</span>
<span>Hoster: {totalConfiguredAccounts}</span>
<span>{snapshot.speedText}</span>
<span>{snapshot.etaText}</span>
<span className="footer-spacer" />

View File

@ -14,7 +14,7 @@ export type CleanupMode = "none" | "trash" | "delete";
export type ConflictMode = "overwrite" | "skip" | "rename" | "ask";
export type SpeedMode = "global" | "per_download";
export type FinishedCleanupPolicy = "never" | "immediate" | "on_start" | "package_done";
export type DebridProvider = "realdebrid" | "megadebrid" | "bestdebrid" | "alldebrid" | "ddownload" | "onefichier";
export type DebridProvider = "realdebrid" | "megadebrid" | "bestdebrid" | "alldebrid" | "ddownload";
export type DebridFallbackProvider = DebridProvider | "none";
export type AppTheme = "dark" | "light";
export type PackagePriority = "high" | "normal" | "low";
@ -44,7 +44,6 @@ export interface AppSettings {
allDebridToken: string;
ddownloadLogin: string;
ddownloadPassword: string;
oneFichierApiKey: string;
archivePasswordList: string;
rememberToken: boolean;
providerPrimary: DebridProvider;

View File

@ -65,111 +65,6 @@ describe.skipIf(!hasJavaRuntime() || !hasJvmExtractorRuntime())("extractor jvm b
expect(fs.existsSync(path.join(targetDir, "episode.txt"))).toBe(true);
});
it("emits progress callbacks with archiveName and percent", async () => {
process.env.RD_EXTRACT_BACKEND = "jvm";
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-jvm-progress-"));
tempDirs.push(root);
const packageDir = path.join(root, "pkg");
const targetDir = path.join(root, "out");
fs.mkdirSync(packageDir, { recursive: true });
// Create a ZIP with some content to trigger progress
const zipPath = path.join(packageDir, "progress-test.zip");
const zip = new AdmZip();
zip.addFile("file1.txt", Buffer.from("Hello World ".repeat(100)));
zip.addFile("file2.txt", Buffer.from("Another file ".repeat(100)));
zip.writeZip(zipPath);
const progressUpdates: Array<{
archiveName: string;
percent: number;
phase: string;
archivePercent?: number;
}> = [];
const result = await extractPackageArchives({
packageDir,
targetDir,
cleanupMode: "none",
conflictMode: "overwrite",
removeLinks: false,
removeSamples: false,
onProgress: (update) => {
progressUpdates.push({
archiveName: update.archiveName,
percent: update.percent,
phase: update.phase,
archivePercent: update.archivePercent,
});
},
});
expect(result.extracted).toBe(1);
expect(result.failed).toBe(0);
// Should have at least preparing, extracting, and done phases
const phases = new Set(progressUpdates.map((u) => u.phase));
expect(phases.has("preparing")).toBe(true);
expect(phases.has("extracting")).toBe(true);
// Extracting phase should include the archive name
const extracting = progressUpdates.filter((u) => u.phase === "extracting" && u.archiveName === "progress-test.zip");
expect(extracting.length).toBeGreaterThan(0);
// Should end at 100%
const lastExtracting = extracting[extracting.length - 1];
expect(lastExtracting.archivePercent).toBe(100);
// Files should exist
expect(fs.existsSync(path.join(targetDir, "file1.txt"))).toBe(true);
expect(fs.existsSync(path.join(targetDir, "file2.txt"))).toBe(true);
});
it("extracts multiple archives sequentially with progress for each", async () => {
process.env.RD_EXTRACT_BACKEND = "jvm";
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-jvm-multi-"));
tempDirs.push(root);
const packageDir = path.join(root, "pkg");
const targetDir = path.join(root, "out");
fs.mkdirSync(packageDir, { recursive: true });
// Create two separate ZIP archives
const zip1 = new AdmZip();
zip1.addFile("episode01.txt", Buffer.from("ep1 content"));
zip1.writeZip(path.join(packageDir, "archive1.zip"));
const zip2 = new AdmZip();
zip2.addFile("episode02.txt", Buffer.from("ep2 content"));
zip2.writeZip(path.join(packageDir, "archive2.zip"));
const archiveNames = new Set<string>();
const result = await extractPackageArchives({
packageDir,
targetDir,
cleanupMode: "none",
conflictMode: "overwrite",
removeLinks: false,
removeSamples: false,
onProgress: (update) => {
if (update.phase === "extracting" && update.archiveName) {
archiveNames.add(update.archiveName);
}
},
});
expect(result.extracted).toBe(2);
expect(result.failed).toBe(0);
// Both archive names should have appeared in progress
expect(archiveNames.has("archive1.zip")).toBe(true);
expect(archiveNames.has("archive2.zip")).toBe(true);
// Both files extracted
expect(fs.existsSync(path.join(targetDir, "episode01.txt"))).toBe(true);
expect(fs.existsSync(path.join(targetDir, "episode02.txt"))).toBe(true);
});
it("respects ask/skip conflict mode in jvm backend", async () => {
process.env.RD_EXTRACT_BACKEND = "jvm";

View File

@ -1,188 +0,0 @@
import { describe, expect, it } from "vitest";
import { resolveArchiveItemsFromList } from "../src/main/download-manager";
type MinimalItem = {
targetPath?: string;
fileName?: string;
[key: string]: unknown;
};
function makeItems(names: string[]): MinimalItem[] {
return names.map((name) => ({
targetPath: `C:\\Downloads\\Package\\${name}`,
fileName: name,
id: name,
status: "completed",
}));
}
describe("resolveArchiveItemsFromList", () => {
// ── Multipart RAR (.partN.rar) ──
it("matches multipart .part1.rar archives", () => {
const items = makeItems([
"Movie.part1.rar",
"Movie.part2.rar",
"Movie.part3.rar",
"Other.rar",
]);
const result = resolveArchiveItemsFromList("Movie.part1.rar", items as any);
expect(result).toHaveLength(3);
expect(result.map((i: any) => i.fileName)).toEqual([
"Movie.part1.rar",
"Movie.part2.rar",
"Movie.part3.rar",
]);
});
it("matches multipart .part01.rar archives (zero-padded)", () => {
const items = makeItems([
"Film.part01.rar",
"Film.part02.rar",
"Film.part10.rar",
"Unrelated.zip",
]);
const result = resolveArchiveItemsFromList("Film.part01.rar", items as any);
expect(result).toHaveLength(3);
});
// ── Old-style RAR (.rar + .r00, .r01, etc.) ──
it("matches old-style .rar + .rNN volumes", () => {
const items = makeItems([
"Archive.rar",
"Archive.r00",
"Archive.r01",
"Archive.r02",
"Other.zip",
]);
const result = resolveArchiveItemsFromList("Archive.rar", items as any);
expect(result).toHaveLength(4);
});
// ── Single RAR ──
it("matches a single .rar file", () => {
const items = makeItems(["SingleFile.rar", "Other.mkv"]);
const result = resolveArchiveItemsFromList("SingleFile.rar", items as any);
expect(result).toHaveLength(1);
expect((result[0] as any).fileName).toBe("SingleFile.rar");
});
// ── Split ZIP ──
it("matches split .zip.NNN files", () => {
const items = makeItems([
"Data.zip",
"Data.zip.001",
"Data.zip.002",
"Data.zip.003",
]);
const result = resolveArchiveItemsFromList("Data.zip.001", items as any);
expect(result).toHaveLength(4);
});
// ── Split 7z ──
it("matches split .7z.NNN files", () => {
const items = makeItems([
"Backup.7z.001",
"Backup.7z.002",
]);
const result = resolveArchiveItemsFromList("Backup.7z.001", items as any);
expect(result).toHaveLength(2);
});
// ── Generic .NNN splits ──
it("matches generic .NNN split files", () => {
const items = makeItems([
"video.001",
"video.002",
"video.003",
]);
const result = resolveArchiveItemsFromList("video.001", items as any);
expect(result).toHaveLength(3);
});
// ── Exact filename match ──
it("matches a single .zip by exact name", () => {
const items = makeItems(["myarchive.zip", "other.rar"]);
const result = resolveArchiveItemsFromList("myarchive.zip", items as any);
expect(result).toHaveLength(1);
expect((result[0] as any).fileName).toBe("myarchive.zip");
});
// ── Case insensitivity ──
it("matches case-insensitively", () => {
const items = makeItems([
"MOVIE.PART1.RAR",
"MOVIE.PART2.RAR",
]);
const result = resolveArchiveItemsFromList("movie.part1.rar", items as any);
expect(result).toHaveLength(2);
});
// ── Stem-based fallback ──
it("uses stem-based fallback when exact patterns fail", () => {
// Simulate a debrid service that renames "Movie.part1.rar" to "Movie.part1_dl.rar"
// but the disk file is "Movie.part1.rar"
const items = makeItems([
"Movie.rar",
]);
// The archive on disk is "Movie.part1.rar" but there's no item matching the
// .partN pattern. The stem "movie" should match "Movie.rar" via fallback.
const result = resolveArchiveItemsFromList("Movie.part1.rar", items as any);
// stem fallback: "movie" starts with "movie" and ends with .rar
expect(result).toHaveLength(1);
});
// ── Single item fallback ──
it("returns single archive item when no pattern matches", () => {
const items = makeItems(["totally-different-name.rar"]);
const result = resolveArchiveItemsFromList("Original.rar", items as any);
// Single item in list with archive extension → return it
expect(result).toHaveLength(1);
});
// ── Empty when no match ──
it("returns empty when items have no archive extensions", () => {
const items = makeItems(["video.mkv", "subtitle.srt"]);
const result = resolveArchiveItemsFromList("Archive.rar", items as any);
expect(result).toHaveLength(0);
});
// ── Items without targetPath ──
it("falls back to fileName when targetPath is missing", () => {
const items = [
{ fileName: "Movie.part1.rar", id: "1", status: "completed" },
{ fileName: "Movie.part2.rar", id: "2", status: "completed" },
];
const result = resolveArchiveItemsFromList("Movie.part1.rar", items as any);
expect(result).toHaveLength(2);
});
// ── Multiple archives, should not cross-match ──
it("does not cross-match different archive groups", () => {
const items = makeItems([
"Episode.S01E01.part1.rar",
"Episode.S01E01.part2.rar",
"Episode.S01E02.part1.rar",
"Episode.S01E02.part2.rar",
]);
const result1 = resolveArchiveItemsFromList("Episode.S01E01.part1.rar", items as any);
expect(result1).toHaveLength(2);
expect(result1.every((i: any) => i.fileName.includes("S01E01"))).toBe(true);
const result2 = resolveArchiveItemsFromList("Episode.S01E02.part1.rar", items as any);
expect(result2).toHaveLength(2);
expect(result2.every((i: any) => i.fileName.includes("S01E02"))).toBe(true);
});
});