Compare commits

..

No commits in common. "main" and "v1.6.47" have entirely different histories.

20 changed files with 252 additions and 1488 deletions

View File

@ -160,7 +160,7 @@ The app stores runtime files in Electron's `userData` directory, including:
## Troubleshooting ## Troubleshooting
- Download does not start: verify token and selected provider in Settings. - Download does not start: verify token and selected provider in Settings.
- Extraction fails: check archive passwords and native extractor installation (7-Zip/WinRAR). Optional JVM extractor can be forced with `RD_EXTRACT_BACKEND=jvm`. - Extraction fails: check archive passwords, JVM runtime (`resources/extractor-jvm`), or force legacy mode with `RD_EXTRACT_BACKEND=legacy`.
- Very slow downloads: check active speed limit and bandwidth schedules. - Very slow downloads: check active speed limit and bandwidth schedules.
- Unexpected interruptions: enable reconnect and fallback providers. - Unexpected interruptions: enable reconnect and fallback providers.
- Stalled downloads: the app auto-detects stalls within 10 seconds and retries automatically. - Stalled downloads: the app auto-detects stalls within 10 seconds and retries automatically.
@ -169,29 +169,6 @@ The app stores runtime files in Electron's `userData` directory, including:
Release history is available on [git.24-music.de Releases](https://git.24-music.de/Administrator/real-debrid-downloader/releases). Release history is available on [git.24-music.de Releases](https://git.24-music.de/Administrator/real-debrid-downloader/releases).
### v1.6.60 (2026-03-05)
- Added package-scoped password cache for extraction: once the first archive in a package is solved, following archives in the same package reuse that password first.
- Kept fallback behavior intact (`""` and other candidates are still tested), but moved empty-password probing behind the learned password to reduce per-archive delays.
- Added cache invalidation on real `wrong_password` failures so stale passwords are automatically discarded.
### v1.6.59 (2026-03-05)
- Switched default extraction backend to native tools (`legacy`) for more stable archive-to-archive flow.
- Prioritized 7-Zip as primary native extractor, with WinRAR/UnRAR as fallback.
- JVM extractor remains available as opt-in via `RD_EXTRACT_BACKEND=jvm`.
### v1.6.58 (2026-03-05)
- Fixed extraction progress oscillation (`1% -> 100% -> 1%` loops) during password retries.
- Kept strict archive completion logic, but normalized in-progress archive percent to avoid false visual done states before real completion.
### v1.6.57 (2026-03-05)
- Fixed extraction flow so archives are marked done only on real completion, not on temporary `100%` progress spikes.
- Improved password handling: after the first successful archive, the discovered password is prioritized for subsequent archives.
- Fixed progress parsing for password retries (reset/restart handling), reducing visible and real gaps between archive extractions.
## License ## License
MIT - see `LICENSE`. MIT - see `LICENSE`.

View File

@ -1,6 +1,6 @@
{ {
"name": "real-debrid-downloader", "name": "real-debrid-downloader",
"version": "1.6.60", "version": "1.6.47",
"description": "Desktop downloader", "description": "Desktop downloader",
"main": "build/main/main/main.js", "main": "build/main/main/main.js",
"author": "Sucukdeluxe", "author": "Sucukdeluxe",

View File

@ -3,9 +3,7 @@ package com.sucukdeluxe.extractor;
import net.lingala.zip4j.ZipFile; import net.lingala.zip4j.ZipFile;
import net.lingala.zip4j.exception.ZipException; import net.lingala.zip4j.exception.ZipException;
import net.lingala.zip4j.model.FileHeader; import net.lingala.zip4j.model.FileHeader;
import net.sf.sevenzipjbinding.ExtractAskMode;
import net.sf.sevenzipjbinding.ExtractOperationResult; import net.sf.sevenzipjbinding.ExtractOperationResult;
import net.sf.sevenzipjbinding.IArchiveExtractCallback;
import net.sf.sevenzipjbinding.IArchiveOpenCallback; import net.sf.sevenzipjbinding.IArchiveOpenCallback;
import net.sf.sevenzipjbinding.IArchiveOpenVolumeCallback; import net.sf.sevenzipjbinding.IArchiveOpenVolumeCallback;
import net.sf.sevenzipjbinding.IInArchive; import net.sf.sevenzipjbinding.IInArchive;
@ -53,10 +51,6 @@ public final class JBindExtractorMain {
} }
public static void main(String[] args) { public static void main(String[] args) {
if (args.length == 1 && "--daemon".equals(args[0])) {
runDaemon();
return;
}
int exit = 1; int exit = 1;
try { try {
ExtractionRequest request = parseArgs(args); ExtractionRequest request = parseArgs(args);
@ -71,127 +65,6 @@ public final class JBindExtractorMain {
System.exit(exit); System.exit(exit);
} }
private static void runDaemon() {
System.out.println("RD_DAEMON_READY");
System.out.flush();
java.io.BufferedReader reader = new java.io.BufferedReader(
new java.io.InputStreamReader(System.in, StandardCharsets.UTF_8));
try {
String line;
while ((line = reader.readLine()) != null) {
line = line.trim();
if (line.isEmpty()) {
continue;
}
int exitCode = 1;
try {
ExtractionRequest request = parseDaemonRequest(line);
exitCode = runExtraction(request);
} catch (IllegalArgumentException error) {
emitError("Argumentfehler: " + safeMessage(error));
exitCode = 2;
} catch (Throwable error) {
emitError(safeMessage(error));
exitCode = 1;
}
System.out.println("RD_REQUEST_DONE " + exitCode);
System.out.flush();
}
} catch (IOException ignored) {
// stdin closed parent process exited
}
}
private static ExtractionRequest parseDaemonRequest(String jsonLine) {
// Minimal JSON parsing without external dependencies.
// Expected format: {"archive":"...","target":"...","conflict":"...","backend":"...","passwords":["...","..."]}
ExtractionRequest request = new ExtractionRequest();
request.archiveFile = new File(extractJsonString(jsonLine, "archive"));
request.targetDir = new File(extractJsonString(jsonLine, "target"));
String conflict = extractJsonString(jsonLine, "conflict");
if (conflict.length() > 0) {
request.conflictMode = ConflictMode.fromValue(conflict);
}
String backend = extractJsonString(jsonLine, "backend");
if (backend.length() > 0) {
request.backend = Backend.fromValue(backend);
}
// Parse passwords array
int pwStart = jsonLine.indexOf("\"passwords\"");
if (pwStart >= 0) {
int arrStart = jsonLine.indexOf('[', pwStart);
int arrEnd = jsonLine.indexOf(']', arrStart);
if (arrStart >= 0 && arrEnd > arrStart) {
String arrContent = jsonLine.substring(arrStart + 1, arrEnd);
int idx = 0;
while (idx < arrContent.length()) {
int qStart = arrContent.indexOf('"', idx);
if (qStart < 0) break;
int qEnd = findClosingQuote(arrContent, qStart + 1);
if (qEnd < 0) break;
request.passwords.add(unescapeJsonString(arrContent.substring(qStart + 1, qEnd)));
idx = qEnd + 1;
}
}
}
if (request.archiveFile == null || !request.archiveFile.exists() || !request.archiveFile.isFile()) {
throw new IllegalArgumentException("Archiv nicht gefunden: " +
(request.archiveFile == null ? "null" : request.archiveFile.getAbsolutePath()));
}
if (request.targetDir == null) {
throw new IllegalArgumentException("--target fehlt");
}
return request;
}
private static String extractJsonString(String json, String key) {
String search = "\"" + key + "\"";
int keyIdx = json.indexOf(search);
if (keyIdx < 0) return "";
int colonIdx = json.indexOf(':', keyIdx + search.length());
if (colonIdx < 0) return "";
int qStart = json.indexOf('"', colonIdx + 1);
if (qStart < 0) return "";
int qEnd = findClosingQuote(json, qStart + 1);
if (qEnd < 0) return "";
return unescapeJsonString(json.substring(qStart + 1, qEnd));
}
private static int findClosingQuote(String s, int from) {
for (int i = from; i < s.length(); i++) {
char c = s.charAt(i);
if (c == '\\') {
i++; // skip escaped character
continue;
}
if (c == '"') return i;
}
return -1;
}
private static String unescapeJsonString(String s) {
if (s.indexOf('\\') < 0) return s;
StringBuilder sb = new StringBuilder(s.length());
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
if (c == '\\' && i + 1 < s.length()) {
char next = s.charAt(i + 1);
switch (next) {
case '"': sb.append('"'); i++; break;
case '\\': sb.append('\\'); i++; break;
case '/': sb.append('/'); i++; break;
case 'n': sb.append('\n'); i++; break;
case 'r': sb.append('\r'); i++; break;
case 't': sb.append('\t'); i++; break;
default: sb.append(c); break;
}
} else {
sb.append(c);
}
}
return sb.toString();
}
private static int runExtraction(ExtractionRequest request) throws Exception { private static int runExtraction(ExtractionRequest request) throws Exception {
List<String> passwords = normalizePasswords(request.passwords); List<String> passwords = normalizePasswords(request.passwords);
Exception lastError = null; Exception lastError = null;
@ -362,99 +235,110 @@ public final class JBindExtractorMain {
try { try {
context = openSevenZipArchive(request.archiveFile, password); context = openSevenZipArchive(request.archiveFile, password);
IInArchive archive = context.archive; IInArchive archive = context.archive;
int itemCount = archive.getNumberOfItems(); ISimpleInArchive simple = archive.getSimpleInterface();
if (itemCount <= 0) { ISimpleInArchiveItem[] items = simple.getArchiveItems();
if (items == null) {
throw new IOException("Archiv enthalt keine Eintrage oder konnte nicht gelesen werden: " + request.archiveFile.getAbsolutePath()); throw new IOException("Archiv enthalt keine Eintrage oder konnte nicht gelesen werden: " + request.archiveFile.getAbsolutePath());
} }
// Pre-scan: collect file indices, sizes, output paths, and detect encryption
long totalUnits = 0; long totalUnits = 0;
boolean encrypted = false; boolean encrypted = false;
List<Integer> fileIndices = new ArrayList<Integer>(); for (ISimpleInArchiveItem item : items) {
List<File> outputFiles = new ArrayList<File>(); if (item == null || item.isFolder()) {
List<Long> fileSizes = new ArrayList<Long>(); continue;
}
try {
encrypted = encrypted || item.isEncrypted();
} catch (Throwable ignored) {
// ignore encrypted flag read issues
}
totalUnits += safeSize(item.getSize());
}
ProgressTracker progress = new ProgressTracker(totalUnits);
progress.emitStart();
Set<String> reserved = new HashSet<String>(); Set<String> reserved = new HashSet<String>();
for (ISimpleInArchiveItem item : items) {
if (item == null) {
continue;
}
for (int i = 0; i < itemCount; i++) { String entryName = normalizeEntryName(item.getPath(), "item-" + item.getItemIndex());
Boolean isFolder = (Boolean) archive.getProperty(i, PropID.IS_FOLDER); if (item.isFolder()) {
String entryPath = (String) archive.getProperty(i, PropID.PATH);
String entryName = normalizeEntryName(entryPath, "item-" + i);
if (Boolean.TRUE.equals(isFolder)) {
File dir = resolveDirectory(request.targetDir, entryName); File dir = resolveDirectory(request.targetDir, entryName);
ensureDirectory(dir); ensureDirectory(dir);
reserved.add(pathKey(dir)); reserved.add(pathKey(dir));
continue; continue;
} }
try { long itemUnits = safeSize(item.getSize());
Boolean isEncrypted = (Boolean) archive.getProperty(i, PropID.ENCRYPTED);
encrypted = encrypted || Boolean.TRUE.equals(isEncrypted);
} catch (Throwable ignored) {
// ignore encrypted flag read issues
}
Long rawSize = (Long) archive.getProperty(i, PropID.SIZE);
long itemSize = safeSize(rawSize);
totalUnits += itemSize;
File output = resolveOutputFile(request.targetDir, entryName, request.conflictMode, reserved); File output = resolveOutputFile(request.targetDir, entryName, request.conflictMode, reserved);
fileIndices.add(i); if (output == null) {
outputFiles.add(output); // null if skipped progress.advance(itemUnits);
fileSizes.add(itemSize); continue;
}
if (fileIndices.isEmpty()) {
// All items are folders or skipped
ProgressTracker progress = new ProgressTracker(1);
progress.emitStart();
progress.emitDone();
return;
}
ProgressTracker progress = new ProgressTracker(totalUnits);
progress.emitStart();
// Build index array for bulk extract
int[] indices = new int[fileIndices.size()];
for (int i = 0; i < fileIndices.size(); i++) {
indices[i] = fileIndices.get(i);
}
// Map from archive index to our position in fileIndices/outputFiles
Map<Integer, Integer> indexToPos = new HashMap<Integer, Integer>();
for (int i = 0; i < fileIndices.size(); i++) {
indexToPos.put(fileIndices.get(i), i);
}
// Bulk extraction state
final boolean encryptedFinal = encrypted;
final String effectivePassword = password == null ? "" : password;
final File[] currentOutput = new File[1];
final FileOutputStream[] currentStream = new FileOutputStream[1];
final boolean[] currentSuccess = new boolean[1];
final long[] currentRemaining = new long[1];
final Throwable[] firstError = new Throwable[1];
final int[] currentPos = new int[] { -1 };
try {
archive.extract(indices, false, new BulkExtractCallback(
archive, indexToPos, fileIndices, outputFiles, fileSizes,
progress, encryptedFinal, effectivePassword, currentOutput,
currentStream, currentSuccess, currentRemaining, currentPos, firstError
));
} catch (SevenZipException error) {
if (looksLikeWrongPassword(error, encryptedFinal)) {
throw new WrongPasswordException(error);
} }
throw error;
}
if (firstError[0] != null) { ensureDirectory(output.getParentFile());
if (firstError[0] instanceof WrongPasswordException) { rejectSymlink(output);
throw (WrongPasswordException) firstError[0]; final FileOutputStream out = new FileOutputStream(output);
final long[] remaining = new long[] { itemUnits };
boolean extractionSuccess = false;
try {
ExtractOperationResult result = item.extractSlow(new ISequentialOutStream() {
@Override
public int write(byte[] data) throws SevenZipException {
if (data == null || data.length == 0) {
return 0;
}
try {
out.write(data);
} catch (IOException error) {
throw new SevenZipException("Fehler beim Schreiben: " + error.getMessage(), error);
}
long accounted = Math.min(remaining[0], (long) data.length);
remaining[0] -= accounted;
progress.advance(accounted);
return data.length;
}
}, password == null ? "" : password);
if (remaining[0] > 0) {
progress.advance(remaining[0]);
}
if (result != ExtractOperationResult.OK) {
if (isPasswordFailure(result, encrypted)) {
throw new WrongPasswordException(new IOException("Falsches Passwort"));
}
throw new IOException("7z-Fehler: " + result.name());
}
extractionSuccess = true;
} catch (SevenZipException error) {
if (looksLikeWrongPassword(error, encrypted)) {
throw new WrongPasswordException(error);
}
throw error;
} finally {
try {
out.close();
} catch (Throwable ignored) {
}
if (!extractionSuccess && output.exists()) {
try {
output.delete();
} catch (Throwable ignored) {
}
}
}
try {
java.util.Date modified = item.getLastWriteTime();
if (modified != null) {
output.setLastModified(modified.getTime());
}
} catch (Throwable ignored) {
// best effort
} }
throw (Exception) firstError[0];
} }
progress.emitDone(); progress.emitDone();
@ -879,176 +763,6 @@ public final class JBindExtractorMain {
private final List<String> passwords = new ArrayList<String>(); private final List<String> passwords = new ArrayList<String>();
} }
/**
* Bulk extraction callback that implements both IArchiveExtractCallback and
* ICryptoGetTextPassword. Using the bulk IInArchive.extract() API instead of
* per-item extractSlow() is critical for performance solid RAR archives
* otherwise re-decode from the beginning for every single item.
*/
private static final class BulkExtractCallback implements IArchiveExtractCallback, ICryptoGetTextPassword {
private final IInArchive archive;
private final Map<Integer, Integer> indexToPos;
private final List<Integer> fileIndices;
private final List<File> outputFiles;
private final List<Long> fileSizes;
private final ProgressTracker progress;
private final boolean encrypted;
private final String password;
private final File[] currentOutput;
private final FileOutputStream[] currentStream;
private final boolean[] currentSuccess;
private final long[] currentRemaining;
private final int[] currentPos;
private final Throwable[] firstError;
BulkExtractCallback(IInArchive archive, Map<Integer, Integer> indexToPos,
List<Integer> fileIndices, List<File> outputFiles, List<Long> fileSizes,
ProgressTracker progress, boolean encrypted, String password,
File[] currentOutput, FileOutputStream[] currentStream,
boolean[] currentSuccess, long[] currentRemaining, int[] currentPos,
Throwable[] firstError) {
this.archive = archive;
this.indexToPos = indexToPos;
this.fileIndices = fileIndices;
this.outputFiles = outputFiles;
this.fileSizes = fileSizes;
this.progress = progress;
this.encrypted = encrypted;
this.password = password;
this.currentOutput = currentOutput;
this.currentStream = currentStream;
this.currentSuccess = currentSuccess;
this.currentRemaining = currentRemaining;
this.currentPos = currentPos;
this.firstError = firstError;
}
@Override
public String cryptoGetTextPassword() {
return password;
}
@Override
public void setTotal(long total) {
// 7z reports total compressed bytes; we track uncompressed via ProgressTracker
}
@Override
public void setCompleted(long complete) {
// Not used we track per-write progress
}
@Override
public ISequentialOutStream getStream(int index, ExtractAskMode extractAskMode) throws SevenZipException {
closeCurrentStream();
Integer pos = indexToPos.get(index);
if (pos == null) {
return null;
}
currentPos[0] = pos;
currentOutput[0] = outputFiles.get(pos);
currentSuccess[0] = false;
currentRemaining[0] = fileSizes.get(pos);
if (extractAskMode != ExtractAskMode.EXTRACT) {
currentOutput[0] = null;
return null;
}
if (currentOutput[0] == null) {
progress.advance(currentRemaining[0]);
return null;
}
try {
ensureDirectory(currentOutput[0].getParentFile());
rejectSymlink(currentOutput[0]);
currentStream[0] = new FileOutputStream(currentOutput[0]);
} catch (IOException error) {
throw new SevenZipException("Fehler beim Erstellen: " + error.getMessage(), error);
}
return new ISequentialOutStream() {
@Override
public int write(byte[] data) throws SevenZipException {
if (data == null || data.length == 0) {
return 0;
}
try {
currentStream[0].write(data);
} catch (IOException error) {
throw new SevenZipException("Fehler beim Schreiben: " + error.getMessage(), error);
}
long accounted = Math.min(currentRemaining[0], (long) data.length);
currentRemaining[0] -= accounted;
progress.advance(accounted);
return data.length;
}
};
}
@Override
public void prepareOperation(ExtractAskMode extractAskMode) {
// no-op
}
@Override
public void setOperationResult(ExtractOperationResult result) throws SevenZipException {
if (currentRemaining[0] > 0) {
progress.advance(currentRemaining[0]);
currentRemaining[0] = 0;
}
if (result == ExtractOperationResult.OK) {
currentSuccess[0] = true;
closeCurrentStream();
if (currentPos[0] >= 0 && currentOutput[0] != null) {
try {
int archiveIndex = fileIndices.get(currentPos[0]);
java.util.Date modified = (java.util.Date) archive.getProperty(archiveIndex, PropID.LAST_MODIFICATION_TIME);
if (modified != null) {
currentOutput[0].setLastModified(modified.getTime());
}
} catch (Throwable ignored) {
// best effort
}
}
} else {
closeCurrentStream();
if (currentOutput[0] != null && currentOutput[0].exists()) {
try {
currentOutput[0].delete();
} catch (Throwable ignored) {
}
}
if (firstError[0] == null) {
if (isPasswordFailure(result, encrypted)) {
firstError[0] = new WrongPasswordException(new IOException("Falsches Passwort"));
} else {
firstError[0] = new IOException("7z-Fehler: " + result.name());
}
}
}
}
private void closeCurrentStream() {
if (currentStream[0] != null) {
try {
currentStream[0].close();
} catch (Throwable ignored) {
}
currentStream[0] = null;
}
if (!currentSuccess[0] && currentOutput[0] != null && currentOutput[0].exists()) {
try {
currentOutput[0].delete();
} catch (Throwable ignored) {
}
}
}
}
private static final class WrongPasswordException extends Exception { private static final class WrongPasswordException extends Exception {
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;

View File

@ -2,15 +2,7 @@ import fs from "node:fs";
import path from "node:path"; import path from "node:path";
import { spawnSync } from "node:child_process"; import { spawnSync } from "node:child_process";
const NPM_RELEASE_WIN = process.platform === "win32" const NPM_EXECUTABLE = process.platform === "win32" ? "npm.cmd" : "npm";
? {
command: process.env.ComSpec || "cmd.exe",
args: ["/d", "/s", "/c", "npm run release:win"]
}
: {
command: "npm",
args: ["run", "release:win"]
};
function run(command, args, options = {}) { function run(command, args, options = {}) {
const result = spawnSync(command, args, { const result = spawnSync(command, args, {
@ -333,7 +325,7 @@ async function main() {
updatePackageVersion(rootDir, version); updatePackageVersion(rootDir, version);
process.stdout.write(`Building release artifacts for ${tag}...\n`); process.stdout.write(`Building release artifacts for ${tag}...\n`);
run(NPM_RELEASE_WIN.command, NPM_RELEASE_WIN.args); run(NPM_EXECUTABLE, ["run", "release:win"]);
const assets = ensureAssetsExist(rootDir, version); const assets = ensureAssetsExist(rootDir, version);
run("git", ["add", "package.json"]); run("git", ["add", "package.json"]);

View File

@ -751,86 +751,60 @@ export function buildAutoRenameBaseNameFromFoldersWithOptions(
return null; return null;
} }
export function resolveArchiveItemsFromList(archiveName: string, items: DownloadItem[]): DownloadItem[] { function resolveArchiveItemsFromList(archiveName: string, items: DownloadItem[]): DownloadItem[] {
const entryLower = archiveName.toLowerCase(); const entryLower = archiveName.toLowerCase();
// Helper: get item basename (try targetPath first, then fileName)
const itemBaseName = (item: DownloadItem): string =>
path.basename(item.targetPath || item.fileName || "");
// Try pattern-based matching first (for multipart archives)
let pattern: RegExp | null = null;
const multipartMatch = entryLower.match(/^(.*)\.part0*1\.rar$/); const multipartMatch = entryLower.match(/^(.*)\.part0*1\.rar$/);
if (multipartMatch) { if (multipartMatch) {
const prefix = multipartMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); const prefix = multipartMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
pattern = new RegExp(`^${prefix}\\.part\\d+\\.rar$`, "i"); const pattern = new RegExp(`^${prefix}\\.part\\d+\\.rar$`, "i");
} return items.filter((item) => {
if (!pattern) { const name = path.basename(item.targetPath || item.fileName || "");
const rarMatch = entryLower.match(/^(.*)\.rar$/); return pattern.test(name);
if (rarMatch) {
const stem = rarMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
pattern = new RegExp(`^${stem}\\.r(ar|\\d{2,3})$`, "i");
}
}
if (!pattern) {
const zipSplitMatch = entryLower.match(/^(.*)\.zip\.001$/);
if (zipSplitMatch) {
const stem = zipSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
pattern = new RegExp(`^${stem}\\.zip(\\.\\d+)?$`, "i");
}
}
if (!pattern) {
const sevenSplitMatch = entryLower.match(/^(.*)\.7z\.001$/);
if (sevenSplitMatch) {
const stem = sevenSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
pattern = new RegExp(`^${stem}\\.7z(\\.\\d+)?$`, "i");
}
}
if (!pattern && /^(.*)\.001$/.test(entryLower) && !/\.(zip|7z)\.001$/.test(entryLower)) {
const genericSplitMatch = entryLower.match(/^(.*)\.001$/);
if (genericSplitMatch) {
const stem = genericSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
pattern = new RegExp(`^${stem}\\.\\d{3}$`, "i");
}
}
// Attempt 1: Pattern match (handles multipart archives)
if (pattern) {
const matched = items.filter((item) => pattern!.test(itemBaseName(item)));
if (matched.length > 0) return matched;
}
// Attempt 2: Exact filename match (case-insensitive)
const exactMatch = items.filter((item) => itemBaseName(item).toLowerCase() === entryLower);
if (exactMatch.length > 0) return exactMatch;
// Attempt 3: Stem-based fuzzy match — strip archive extensions and compare stems.
// Handles cases where debrid services modify filenames slightly.
const archiveStem = entryLower
.replace(/\.part\d+\.rar$/i, "")
.replace(/\.r\d{2,3}$/i, "")
.replace(/\.rar$/i, "")
.replace(/\.(zip|7z)\.\d{3}$/i, "")
.replace(/\.\d{3}$/i, "")
.replace(/\.(zip|7z)$/i, "");
if (archiveStem.length > 3) {
const stemMatch = items.filter((item) => {
const name = itemBaseName(item).toLowerCase();
return name.startsWith(archiveStem) && /\.(rar|r\d{2,3}|zip|7z|\d{3})$/i.test(name);
}); });
if (stemMatch.length > 0) return stemMatch;
} }
const rarMatch = entryLower.match(/^(.*)\.rar$/);
// Attempt 4: If only one item in the list and one archive — return it as a best-effort match. if (rarMatch) {
// This handles single-file packages where the filename may have been modified. const stem = rarMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
if (items.length === 1) { const pattern = new RegExp(`^${stem}\\.r(ar|\\d{2,3})$`, "i");
const singleName = itemBaseName(items[0]).toLowerCase(); return items.filter((item) => {
if (/\.(rar|zip|7z|\d{3})$/i.test(singleName)) { const name = path.basename(item.targetPath || item.fileName || "");
return items; return pattern.test(name);
} });
} }
// Split ZIP (e.g., movie.zip.001, movie.zip.002)
return []; const zipSplitMatch = entryLower.match(/^(.*)\.zip\.001$/);
if (zipSplitMatch) {
const stem = zipSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const pattern = new RegExp(`^${stem}\\.zip(\\.\\d+)?$`, "i");
return items.filter((item) => {
const name = path.basename(item.targetPath || item.fileName || "");
return pattern.test(name);
});
}
// Split 7z (e.g., movie.7z.001, movie.7z.002)
const sevenSplitMatch = entryLower.match(/^(.*)\.7z\.001$/);
if (sevenSplitMatch) {
const stem = sevenSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const pattern = new RegExp(`^${stem}\\.7z(\\.\\d+)?$`, "i");
return items.filter((item) => {
const name = path.basename(item.targetPath || item.fileName || "");
return pattern.test(name);
});
}
// Generic .NNN splits (e.g., movie.001, movie.002)
const genericSplitMatch = entryLower.match(/^(.*)\.001$/);
if (genericSplitMatch && !/\.(zip|7z)\.001$/.test(entryLower)) {
const stem = genericSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const pattern = new RegExp(`^${stem}\\.\\d{3}$`, "i");
return items.filter((item) => {
const name = path.basename(item.targetPath || item.fileName || "");
return pattern.test(name);
});
}
return items.filter((item) => {
const name = path.basename(item.targetPath || item.fileName || "").toLowerCase();
return name === entryLower;
});
} }
function retryDelayWithJitter(attempt: number, baseMs: number): number { function retryDelayWithJitter(attempt: number, baseMs: number): number {
@ -1410,10 +1384,6 @@ export class DownloadManager extends EventEmitter {
addedPackages += 1; addedPackages += 1;
} }
if (addedPackages > 0 || addedLinks > 0) {
const pkgNames = packages.filter((p) => p.links.length > 0).map((p) => p.name).join(", ");
logger.info(`Pakete hinzugefügt: ${addedPackages} Paket(e), ${addedLinks} Link(s) [${pkgNames}]`);
}
this.persistSoon(); this.persistSoon();
this.emitState(); this.emitState();
if (unresolvedByLink.size > 0) { if (unresolvedByLink.size > 0) {
@ -3600,16 +3570,14 @@ export class DownloadManager extends EventEmitter {
this.emit("state", this.getSnapshot()); this.emit("state", this.getSnapshot());
return; return;
} }
// Too soon — replace any pending timer with a shorter forced-emit timer // Too soon — schedule deferred forced emit
if (this.stateEmitTimer) { if (!this.stateEmitTimer) {
clearTimeout(this.stateEmitTimer); this.stateEmitTimer = setTimeout(() => {
this.stateEmitTimer = null; this.stateEmitTimer = null;
this.lastStateEmitAt = nowMs();
this.emit("state", this.getSnapshot());
}, MIN_FORCE_GAP_MS - sinceLastEmit);
} }
this.stateEmitTimer = setTimeout(() => {
this.stateEmitTimer = null;
this.lastStateEmitAt = nowMs();
this.emit("state", this.getSnapshot());
}, MIN_FORCE_GAP_MS - sinceLastEmit);
return; return;
} }
if (this.stateEmitTimer) { if (this.stateEmitTimer) {
@ -3847,26 +3815,18 @@ export class DownloadManager extends EventEmitter {
this.packagePostProcessAbortControllers.set(packageId, abortController); this.packagePostProcessAbortControllers.set(packageId, abortController);
const task = (async () => { const task = (async () => {
const slotWaitStart = nowMs();
await this.acquirePostProcessSlot(packageId); await this.acquirePostProcessSlot(packageId);
const slotWaitMs = nowMs() - slotWaitStart;
if (slotWaitMs > 100) {
logger.info(`Post-Process Slot erhalten nach ${(slotWaitMs / 1000).toFixed(1)}s Wartezeit: pkg=${packageId.slice(0, 8)}`);
}
try { try {
let round = 0; // Loop while requeue requests arrive — keep the slot so the same
// package can immediately re-run hybrid extraction without waiting
// behind other packages that may be queued for the slot.
do { do {
round += 1;
const hadRequeue = this.hybridExtractRequeue.has(packageId);
this.hybridExtractRequeue.delete(packageId); this.hybridExtractRequeue.delete(packageId);
const roundStart = nowMs();
try { try {
await this.handlePackagePostProcessing(packageId, abortController.signal); await this.handlePackagePostProcessing(packageId, abortController.signal);
} catch (error) { } catch (error) {
logger.warn(`Post-Processing für Paket fehlgeschlagen: ${compactErrorText(error)}`); logger.warn(`Post-Processing für Paket fehlgeschlagen: ${compactErrorText(error)}`);
} }
const roundMs = nowMs() - roundStart;
logger.info(`Post-Process Runde ${round} fertig in ${(roundMs / 1000).toFixed(1)}s (requeue=${hadRequeue}, nextRequeue=${this.hybridExtractRequeue.has(packageId)}): pkg=${packageId.slice(0, 8)}`);
this.persistSoon(); this.persistSoon();
this.emitState(); this.emitState();
} while (this.hybridExtractRequeue.has(packageId)); } while (this.hybridExtractRequeue.has(packageId));
@ -4766,7 +4726,6 @@ export class DownloadManager extends EventEmitter {
item.fullStatus = `Starte... (${unrestricted.providerLabel})`; item.fullStatus = `Starte... (${unrestricted.providerLabel})`;
item.updatedAt = nowMs(); item.updatedAt = nowMs();
this.emitState(); this.emitState();
logger.info(`Download Start: ${item.fileName} (${humanSize(unrestricted.fileSize || 0)}) via ${unrestricted.providerLabel}, pkg=${pkg.name}`);
const maxAttempts = maxItemAttempts; const maxAttempts = maxItemAttempts;
let done = false; let done = false;
@ -4875,7 +4834,6 @@ export class DownloadManager extends EventEmitter {
item.updatedAt = nowMs(); item.updatedAt = nowMs();
pkg.updatedAt = nowMs(); pkg.updatedAt = nowMs();
this.recordRunOutcome(item.id, "completed"); this.recordRunOutcome(item.id, "completed");
logger.info(`Download fertig: ${item.fileName} (${humanSize(item.downloadedBytes)}), pkg=${pkg.name}`);
if (this.session.running && !active.abortController.signal.aborted) { if (this.session.running && !active.abortController.signal.aborted) {
void this.runPackagePostProcessing(pkg.id).catch((err) => { void this.runPackagePostProcessing(pkg.id).catch((err) => {
@ -6299,12 +6257,7 @@ export class DownloadManager extends EventEmitter {
} }
private async runHybridExtraction(packageId: string, pkg: PackageEntry, items: DownloadItem[], signal?: AbortSignal): Promise<number> { private async runHybridExtraction(packageId: string, pkg: PackageEntry, items: DownloadItem[], signal?: AbortSignal): Promise<number> {
const findReadyStart = nowMs();
const readyArchives = await this.findReadyArchiveSets(pkg); const readyArchives = await this.findReadyArchiveSets(pkg);
const findReadyMs = nowMs() - findReadyStart;
if (findReadyMs > 200) {
logger.info(`findReadyArchiveSets dauerte ${(findReadyMs / 1000).toFixed(1)}s: pkg=${pkg.name}, found=${readyArchives.size}`);
}
if (readyArchives.size === 0) { if (readyArchives.size === 0) {
logger.info(`Hybrid-Extract: pkg=${pkg.name}, keine fertigen Archive-Sets`); logger.info(`Hybrid-Extract: pkg=${pkg.name}, keine fertigen Archive-Sets`);
// Relabel completed items that are part of incomplete multi-part archives // Relabel completed items that are part of incomplete multi-part archives
@ -6392,11 +6345,10 @@ export class DownloadManager extends EventEmitter {
const resolveArchiveItems = (archiveName: string): DownloadItem[] => const resolveArchiveItems = (archiveName: string): DownloadItem[] =>
resolveArchiveItemsFromList(archiveName, items); resolveArchiveItemsFromList(archiveName, items);
// Track archives for parallel hybrid extraction progress // Track multiple active archives for parallel hybrid extraction
const hybridResolvedItems = new Map<string, DownloadItem[]>(); const activeHybridArchiveMap = new Map<string, DownloadItem[]>();
const hybridStartTimes = new Map<string, number>(); const hybridArchiveStartTimes = new Map<string, number>();
let hybridLastEmitAt = 0; let hybridLastEmitAt = 0;
let hybridLastProgressCurrent: number | null = null;
// Mark items based on whether their archive is actually ready for extraction. // Mark items based on whether their archive is actually ready for extraction.
// Only items whose archive is in readyArchives get "Ausstehend"; others keep // Only items whose archive is in readyArchives get "Ausstehend"; others keep
@ -6434,7 +6386,7 @@ export class DownloadManager extends EventEmitter {
packageId, packageId,
hybridMode: true, hybridMode: true,
maxParallel: this.settings.maxParallelExtract || 2, maxParallel: this.settings.maxParallelExtract || 2,
extractCpuPriority: "high", extractCpuPriority: this.settings.extractCpuPriority,
onProgress: (progress) => { onProgress: (progress) => {
if (progress.phase === "preparing") { if (progress.phase === "preparing") {
pkg.postProcessLabel = progress.archiveName || "Vorbereiten..."; pkg.postProcessLabel = progress.archiveName || "Vorbereiten...";
@ -6442,59 +6394,37 @@ export class DownloadManager extends EventEmitter {
return; return;
} }
if (progress.phase === "done") { if (progress.phase === "done") {
hybridResolvedItems.clear(); // Do NOT mark remaining archives as "Done" here — some may have
hybridStartTimes.clear(); // failed. The post-extraction code (result.failed check) will
hybridLastProgressCurrent = null; // assign the correct label. Only clear the tracking maps.
activeHybridArchiveMap.clear();
hybridArchiveStartTimes.clear();
return; return;
} }
const currentCount = Math.max(0, Number(progress.current ?? 0));
const archiveFinished = progress.archiveDone === true
|| (hybridLastProgressCurrent !== null && currentCount > hybridLastProgressCurrent);
hybridLastProgressCurrent = currentCount;
if (progress.archiveName) { if (progress.archiveName) {
// Resolve items for this archive if not yet tracked // Resolve items for this archive if not yet tracked
if (!hybridResolvedItems.has(progress.archiveName)) { if (!activeHybridArchiveMap.has(progress.archiveName)) {
const resolved = resolveArchiveItems(progress.archiveName); activeHybridArchiveMap.set(progress.archiveName, resolveArchiveItems(progress.archiveName));
hybridResolvedItems.set(progress.archiveName, resolved); hybridArchiveStartTimes.set(progress.archiveName, nowMs());
hybridStartTimes.set(progress.archiveName, nowMs());
if (resolved.length === 0) {
logger.warn(`resolveArchiveItems (hybrid): KEINE Items gefunden für archiveName="${progress.archiveName}", items.length=${items.length}, itemNames=[${items.map((i) => path.basename(i.targetPath || i.fileName || "?")).join(", ")}]`);
} else {
logger.info(`resolveArchiveItems (hybrid): ${resolved.length} Items für archiveName="${progress.archiveName}"`);
const initLabel = `Entpacken 0% · ${progress.archiveName}`;
const initAt = nowMs();
for (const entry of resolved) {
if (!isExtractedLabel(entry.fullStatus)) {
entry.fullStatus = initLabel;
entry.updatedAt = initAt;
}
}
hybridLastEmitAt = initAt;
this.emitState(true);
}
} }
const archItems = hybridResolvedItems.get(progress.archiveName) || []; const archItems = activeHybridArchiveMap.get(progress.archiveName)!;
// Only mark as finished on explicit archive-done signal (or real current increment), // If archive is at 100%, mark its items as done and remove from active
// never on raw 100% archivePercent, because password retries can report 100% mid-run. if (Number(progress.archivePercent ?? 0) >= 100) {
if (archiveFinished) {
const doneAt = nowMs(); const doneAt = nowMs();
const startedAt = hybridStartTimes.get(progress.archiveName) || doneAt; const startedAt = hybridArchiveStartTimes.get(progress.archiveName) || doneAt;
const doneLabel = progress.archiveSuccess === false const doneLabel = formatExtractDone(doneAt - startedAt);
? "Entpacken - Error"
: formatExtractDone(doneAt - startedAt);
for (const entry of archItems) { for (const entry of archItems) {
if (!isExtractedLabel(entry.fullStatus)) { if (!isExtractedLabel(entry.fullStatus)) {
entry.fullStatus = doneLabel; entry.fullStatus = doneLabel;
entry.updatedAt = doneAt; entry.updatedAt = doneAt;
} }
} }
hybridResolvedItems.delete(progress.archiveName); activeHybridArchiveMap.delete(progress.archiveName);
hybridStartTimes.delete(progress.archiveName); hybridArchiveStartTimes.delete(progress.archiveName);
// Show transitional label while next archive initializes // Show transitional label while next archive initializes
const done = currentCount; const done = progress.current + 1;
if (done < progress.total) { if (done < progress.total) {
pkg.postProcessLabel = `Entpacken (${done}/${progress.total}) - Naechstes Archiv...`; pkg.postProcessLabel = `Entpacken (${done}/${progress.total}) - Naechstes Archiv...`;
this.emitState(); this.emitState();
@ -6526,7 +6456,7 @@ export class DownloadManager extends EventEmitter {
} }
// Update package-level label with overall extraction progress // Update package-level label with overall extraction progress
const activeArchive = !archiveFinished && Number(progress.archivePercent ?? 0) > 0 ? 1 : 0; const activeArchive = Number(progress.archivePercent ?? 0) > 0 ? 1 : 0;
const currentDisplay = Math.max(0, Math.min(progress.total, progress.current + activeArchive)); const currentDisplay = Math.max(0, Math.min(progress.total, progress.current + activeArchive));
if (progress.passwordFound) { if (progress.passwordFound) {
pkg.postProcessLabel = `Passwort gefunden · ${progress.archiveName || ""}`; pkg.postProcessLabel = `Passwort gefunden · ${progress.archiveName || ""}`;
@ -6555,20 +6485,9 @@ export class DownloadManager extends EventEmitter {
logger.info(`Hybrid-Extract Ende: pkg=${pkg.name}, extracted=${result.extracted}, failed=${result.failed}`); logger.info(`Hybrid-Extract Ende: pkg=${pkg.name}, extracted=${result.extracted}, failed=${result.failed}`);
if (result.extracted > 0) { if (result.extracted > 0) {
// Fire-and-forget: rename then collect MKVs in background so the void this.autoRenameExtractedVideoFiles(pkg.extractDir, pkg).catch((err) =>
// slot is not blocked and the next archive set can start immediately. logger.warn(`Hybrid Auto-Rename Fehler: pkg=${pkg.name}, reason=${compactErrorText(err)}`)
void (async () => { );
try {
await this.autoRenameExtractedVideoFiles(pkg.extractDir, pkg);
} catch (err) {
logger.warn(`Hybrid Auto-Rename Fehler: pkg=${pkg.name}, reason=${compactErrorText(err)}`);
}
try {
await this.collectMkvFilesToLibrary(packageId, pkg);
} catch (err) {
logger.warn(`Hybrid MKV-Collection Fehler: pkg=${pkg.name}, reason=${compactErrorText(err)}`);
}
})();
} }
if (result.failed > 0) { if (result.failed > 0) {
logger.warn(`Hybrid-Extract: ${result.failed} Archive fehlgeschlagen, wird beim finalen Durchlauf erneut versucht`); logger.warn(`Hybrid-Extract: ${result.failed} Archive fehlgeschlagen, wird beim finalen Durchlauf erneut versucht`);
@ -6624,7 +6543,6 @@ export class DownloadManager extends EventEmitter {
} }
private async handlePackagePostProcessing(packageId: string, signal?: AbortSignal): Promise<void> { private async handlePackagePostProcessing(packageId: string, signal?: AbortSignal): Promise<void> {
const handleStart = nowMs();
const pkg = this.session.packages[packageId]; const pkg = this.session.packages[packageId];
if (!pkg || pkg.cancelled) { if (!pkg || pkg.cancelled) {
return; return;
@ -6636,7 +6554,6 @@ export class DownloadManager extends EventEmitter {
// Recover items whose file exists on disk but status was never set to "completed". // Recover items whose file exists on disk but status was never set to "completed".
// Only recover items in idle states (queued/paused), never active ones (downloading/validating). // Only recover items in idle states (queued/paused), never active ones (downloading/validating).
const recoveryStart = nowMs();
for (const item of items) { for (const item of items) {
if (isFinishedStatus(item.status)) { if (isFinishedStatus(item.status)) {
continue; continue;
@ -6676,12 +6593,10 @@ export class DownloadManager extends EventEmitter {
} }
} }
const recoveryMs = nowMs() - recoveryStart;
const success = items.filter((item) => item.status === "completed").length; const success = items.filter((item) => item.status === "completed").length;
const failed = items.filter((item) => item.status === "failed").length; const failed = items.filter((item) => item.status === "failed").length;
const cancelled = items.filter((item) => item.status === "cancelled").length; const cancelled = items.filter((item) => item.status === "cancelled").length;
const setupMs = nowMs() - handleStart; logger.info(`Post-Processing Start: pkg=${pkg.name}, success=${success}, failed=${failed}, cancelled=${cancelled}, autoExtract=${this.settings.autoExtract}`);
logger.info(`Post-Processing Start: pkg=${pkg.name}, success=${success}, failed=${failed}, cancelled=${cancelled}, autoExtract=${this.settings.autoExtract}, setupMs=${setupMs}, recoveryMs=${recoveryMs}`);
const allDone = success + failed + cancelled >= items.length; const allDone = success + failed + cancelled >= items.length;
@ -6784,10 +6699,9 @@ export class DownloadManager extends EventEmitter {
} }
}, extractTimeoutMs); }, extractTimeoutMs);
try { try {
// Track archives for parallel extraction progress // Track multiple active archives for parallel extraction
const fullResolvedItems = new Map<string, DownloadItem[]>(); const activeArchiveItemsMap = new Map<string, DownloadItem[]>();
const fullStartTimes = new Map<string, number>(); const archiveStartTimes = new Map<string, number>();
let fullLastProgressCurrent: number | null = null;
const result = await extractPackageArchives({ const result = await extractPackageArchives({
packageDir: pkg.outputDir, packageDir: pkg.outputDir,
@ -6801,9 +6715,7 @@ export class DownloadManager extends EventEmitter {
packageId, packageId,
skipPostCleanup: true, skipPostCleanup: true,
maxParallel: this.settings.maxParallelExtract || 2, maxParallel: this.settings.maxParallelExtract || 2,
// All downloads finished — use NORMAL OS priority so extraction runs at extractCpuPriority: this.settings.extractCpuPriority,
// full speed (matching manual 7-Zip/WinRAR speed).
extractCpuPriority: "high",
onProgress: (progress) => { onProgress: (progress) => {
if (progress.phase === "preparing") { if (progress.phase === "preparing") {
pkg.postProcessLabel = progress.archiveName || "Vorbereiten..."; pkg.postProcessLabel = progress.archiveName || "Vorbereiten...";
@ -6811,59 +6723,38 @@ export class DownloadManager extends EventEmitter {
return; return;
} }
if (progress.phase === "done") { if (progress.phase === "done") {
fullResolvedItems.clear(); // Do NOT mark remaining archives as "Done" here — some may have
fullStartTimes.clear(); // failed. The post-extraction code (result.failed check) will
fullLastProgressCurrent = null; // assign the correct label. Only clear the tracking maps.
activeArchiveItemsMap.clear();
archiveStartTimes.clear();
emitExtractStatus("Entpacken 100%", true); emitExtractStatus("Entpacken 100%", true);
return; return;
} }
const currentCount = Math.max(0, Number(progress.current ?? 0));
const archiveFinished = progress.archiveDone === true
|| (fullLastProgressCurrent !== null && currentCount > fullLastProgressCurrent);
fullLastProgressCurrent = currentCount;
if (progress.archiveName) { if (progress.archiveName) {
// Resolve items for this archive if not yet tracked // Resolve items for this archive if not yet tracked
if (!fullResolvedItems.has(progress.archiveName)) { if (!activeArchiveItemsMap.has(progress.archiveName)) {
const resolved = resolveArchiveItems(progress.archiveName); activeArchiveItemsMap.set(progress.archiveName, resolveArchiveItems(progress.archiveName));
fullResolvedItems.set(progress.archiveName, resolved); archiveStartTimes.set(progress.archiveName, nowMs());
fullStartTimes.set(progress.archiveName, nowMs());
if (resolved.length === 0) {
logger.warn(`resolveArchiveItems (full): KEINE Items für archiveName="${progress.archiveName}", completedItems=${completedItems.length}, names=[${completedItems.map((i) => path.basename(i.targetPath || i.fileName || "?")).join(", ")}]`);
} else {
logger.info(`resolveArchiveItems (full): ${resolved.length} Items für archiveName="${progress.archiveName}"`);
const initLabel = `Entpacken 0% · ${progress.archiveName}`;
const initAt = nowMs();
for (const entry of resolved) {
if (!isExtractedLabel(entry.fullStatus)) {
entry.fullStatus = initLabel;
entry.updatedAt = initAt;
}
}
emitExtractStatus(`Entpacken ${progress.percent}% · ${progress.archiveName}`, true);
}
} }
const archiveItems = fullResolvedItems.get(progress.archiveName) || []; const archiveItems = activeArchiveItemsMap.get(progress.archiveName)!;
// Only finalize on explicit archive completion (or real current increment), // If archive is at 100%, mark its items as done and remove from active
// not on plain 100% archivePercent. if (Number(progress.archivePercent ?? 0) >= 100) {
if (archiveFinished) {
const doneAt = nowMs(); const doneAt = nowMs();
const startedAt = fullStartTimes.get(progress.archiveName) || doneAt; const startedAt = archiveStartTimes.get(progress.archiveName) || doneAt;
const doneLabel = progress.archiveSuccess === false const doneLabel = formatExtractDone(doneAt - startedAt);
? "Entpacken - Error"
: formatExtractDone(doneAt - startedAt);
for (const entry of archiveItems) { for (const entry of archiveItems) {
if (!isExtractedLabel(entry.fullStatus)) { if (!isExtractedLabel(entry.fullStatus)) {
entry.fullStatus = doneLabel; entry.fullStatus = doneLabel;
entry.updatedAt = doneAt; entry.updatedAt = doneAt;
} }
} }
fullResolvedItems.delete(progress.archiveName); activeArchiveItemsMap.delete(progress.archiveName);
fullStartTimes.delete(progress.archiveName); archiveStartTimes.delete(progress.archiveName);
// Show transitional label while next archive initializes // Show transitional label while next archive initializes
const done = currentCount; const done = progress.current + 1;
if (done < progress.total) { if (done < progress.total) {
emitExtractStatus(`Entpacken (${done}/${progress.total}) - Naechstes Archiv...`, true); emitExtractStatus(`Entpacken (${done}/${progress.total}) - Naechstes Archiv...`, true);
} }
@ -6898,7 +6789,7 @@ export class DownloadManager extends EventEmitter {
const elapsed = progress.elapsedMs && progress.elapsedMs >= 1000 const elapsed = progress.elapsedMs && progress.elapsedMs >= 1000
? ` · ${Math.floor(progress.elapsedMs / 1000)}s` ? ` · ${Math.floor(progress.elapsedMs / 1000)}s`
: ""; : "";
const activeArchive = !archiveFinished && Number(progress.archivePercent ?? 0) > 0 ? 1 : 0; const activeArchive = Number(progress.archivePercent ?? 0) > 0 ? 1 : 0;
const currentDisplay = Math.max(0, Math.min(progress.total, progress.current + activeArchive)); const currentDisplay = Math.max(0, Math.min(progress.total, progress.current + activeArchive));
let overallLabel: string; let overallLabel: string;
if (progress.passwordFound) { if (progress.passwordFound) {

View File

@ -1,7 +1,7 @@
import fs from "node:fs"; import fs from "node:fs";
import path from "node:path"; import path from "node:path";
import os from "node:os"; import os from "node:os";
import { spawn, spawnSync, type ChildProcess } from "node:child_process"; import { spawn, spawnSync } from "node:child_process";
import AdmZip from "adm-zip"; import AdmZip from "adm-zip";
import { CleanupMode, ConflictMode } from "../shared/types"; import { CleanupMode, ConflictMode } from "../shared/types";
import { logger } from "./logger"; import { logger } from "./logger";
@ -10,7 +10,7 @@ import { removeDownloadLinkArtifacts, removeSampleArtifacts } from "./cleanup";
import crypto from "node:crypto"; import crypto from "node:crypto";
const DEFAULT_ARCHIVE_PASSWORDS = ["", "serienfans.org", "serienjunkies.org"]; const DEFAULT_ARCHIVE_PASSWORDS = ["", "serienfans.org", "serienjunkies.org"];
const NO_EXTRACTOR_MESSAGE = "Kein nativer Entpacker gefunden (7-Zip/WinRAR). Bitte 7-Zip oder WinRAR installieren."; const NO_EXTRACTOR_MESSAGE = "WinRAR/UnRAR nicht gefunden. Bitte WinRAR installieren.";
const NO_JVM_EXTRACTOR_MESSAGE = "7-Zip-JBinding Runtime nicht gefunden. Bitte resources/extractor-jvm prüfen."; const NO_JVM_EXTRACTOR_MESSAGE = "7-Zip-JBinding Runtime nicht gefunden. Bitte resources/extractor-jvm prüfen.";
const JVM_EXTRACTOR_MAIN_CLASS = "com.sucukdeluxe.extractor.JBindExtractorMain"; const JVM_EXTRACTOR_MAIN_CLASS = "com.sucukdeluxe.extractor.JBindExtractorMain";
const JVM_EXTRACTOR_CLASSES_SUBDIR = "classes"; const JVM_EXTRACTOR_CLASSES_SUBDIR = "classes";
@ -123,8 +123,6 @@ export interface ExtractProgressUpdate {
passwordAttempt?: number; passwordAttempt?: number;
passwordTotal?: number; passwordTotal?: number;
passwordFound?: boolean; passwordFound?: boolean;
archiveDone?: boolean;
archiveSuccess?: boolean;
} }
const MAX_EXTRACT_OUTPUT_BUFFER = 48 * 1024; const MAX_EXTRACT_OUTPUT_BUFFER = 48 * 1024;
@ -135,8 +133,6 @@ const EXTRACT_MAX_TIMEOUT_MS = 120 * 60 * 1000;
const ARCHIVE_SORT_COLLATOR = new Intl.Collator(undefined, { numeric: true, sensitivity: "base" }); const ARCHIVE_SORT_COLLATOR = new Intl.Collator(undefined, { numeric: true, sensitivity: "base" });
const DISK_SPACE_SAFETY_FACTOR = 1.1; const DISK_SPACE_SAFETY_FACTOR = 1.1;
const NESTED_EXTRACT_BLACKLIST_RE = /\.(iso|img|bin|dmg|vhd|vhdx|vmdk|wim)$/i; const NESTED_EXTRACT_BLACKLIST_RE = /\.(iso|img|bin|dmg|vhd|vhdx|vmdk|wim)$/i;
const PACKAGE_PASSWORD_CACHE_LIMIT = 256;
const packageLearnedPasswords = new Map<string, string>();
export type ArchiveSignature = "rar" | "7z" | "zip" | "gzip" | "bzip2" | "xz" | null; export type ArchiveSignature = "rar" | "7z" | "zip" | "gzip" | "bzip2" | "xz" | null;
@ -149,54 +145,6 @@ const ARCHIVE_SIGNATURES: { prefix: string; type: ArchiveSignature }[] = [
{ prefix: "fd377a585a00", type: "xz" }, { prefix: "fd377a585a00", type: "xz" },
]; ];
function packagePasswordCacheKey(packageDir: string, packageId?: string): string {
const normalizedPackageId = String(packageId || "").trim();
if (normalizedPackageId) {
return `pkg:${normalizedPackageId}`;
}
return `dir:${pathSetKey(path.resolve(packageDir))}`;
}
function packagePasswordCacheLabel(packageDir: string, packageId?: string): string {
const normalizedPackageId = String(packageId || "").trim();
if (normalizedPackageId) {
return `packageId=${normalizedPackageId.slice(0, 8)}`;
}
return `packageDir=${path.basename(path.resolve(packageDir))}`;
}
function readCachedPackagePassword(cacheKey: string): string {
const cached = packageLearnedPasswords.get(cacheKey);
if (!cached) {
return "";
}
// Refresh insertion order to keep recently used package caches alive.
packageLearnedPasswords.delete(cacheKey);
packageLearnedPasswords.set(cacheKey, cached);
return cached;
}
function writeCachedPackagePassword(cacheKey: string, password: string): void {
const normalized = String(password || "").trim();
if (!normalized) {
return;
}
if (packageLearnedPasswords.has(cacheKey)) {
packageLearnedPasswords.delete(cacheKey);
}
packageLearnedPasswords.set(cacheKey, normalized);
if (packageLearnedPasswords.size > PACKAGE_PASSWORD_CACHE_LIMIT) {
const oldestKey = packageLearnedPasswords.keys().next().value as string | undefined;
if (oldestKey) {
packageLearnedPasswords.delete(oldestKey);
}
}
}
function clearCachedPackagePassword(cacheKey: string): void {
packageLearnedPasswords.delete(cacheKey);
}
export async function detectArchiveSignature(filePath: string): Promise<ArchiveSignature> { export async function detectArchiveSignature(filePath: string): Promise<ArchiveSignature> {
let fd: fs.promises.FileHandle | null = null; let fd: fs.promises.FileHandle | null = null;
try { try {
@ -430,12 +378,6 @@ function parseProgressPercent(chunk: string): number | null {
return latest; return latest;
} }
function nextArchivePercent(previous: number, incoming: number): number {
const prev = Math.max(0, Math.min(100, Math.floor(Number(previous) || 0)));
const next = Math.max(0, Math.min(100, Math.floor(Number(incoming) || 0)));
return next >= prev ? next : prev;
}
async function shouldPreferExternalZip(archivePath: string): Promise<boolean> { async function shouldPreferExternalZip(archivePath: string): Promise<boolean> {
if (extractorBackendMode() !== "legacy") { if (extractorBackendMode() !== "legacy") {
return true; return true;
@ -587,63 +529,32 @@ function prioritizePassword(passwords: string[], successful: string): string[] {
return passwords; return passwords;
} }
const index = passwords.findIndex((candidate) => candidate === target); const index = passwords.findIndex((candidate) => candidate === target);
if (index === 0) { if (index <= 0) {
return passwords; return passwords;
} }
if (index < 0) {
return [target, ...passwords.filter((candidate) => candidate !== target)];
}
const next = [...passwords]; const next = [...passwords];
const [value] = next.splice(index, 1); const [value] = next.splice(index, 1);
next.unshift(value); next.unshift(value);
return next; return next;
} }
function nativeExtractorCandidates(): string[] { function winRarCandidates(): string[] {
const programFiles = process.env.ProgramFiles || "C:\\Program Files"; const programFiles = process.env.ProgramFiles || "C:\\Program Files";
const programFilesX86 = process.env["ProgramFiles(x86)"] || "C:\\Program Files (x86)"; const programFilesX86 = process.env["ProgramFiles(x86)"] || "C:\\Program Files (x86)";
const localAppData = process.env.LOCALAPPDATA || ""; const localAppData = process.env.LOCALAPPDATA || "";
const sevenZipInstalled = [ const installed = [
process.env.RD_7Z_BIN || "",
path.join(programFiles, "7-Zip", "7z.exe"),
path.join(programFilesX86, "7-Zip", "7z.exe")
];
if (localAppData) {
sevenZipInstalled.push(path.join(localAppData, "Programs", "7-Zip", "7z.exe"));
}
const winRarInstalled = [
path.join(programFiles, "WinRAR", "UnRAR.exe"), path.join(programFiles, "WinRAR", "UnRAR.exe"),
path.join(programFilesX86, "WinRAR", "UnRAR.exe") path.join(programFilesX86, "WinRAR", "UnRAR.exe")
]; ];
if (localAppData) { if (localAppData) {
winRarInstalled.push(path.join(localAppData, "Programs", "WinRAR", "UnRAR.exe")); installed.push(path.join(localAppData, "Programs", "WinRAR", "UnRAR.exe"));
} }
const ordered = resolvedExtractorCommand const ordered = resolvedExtractorCommand
? [ ? [resolvedExtractorCommand, ...installed, "UnRAR.exe", "unrar"]
resolvedExtractorCommand, : [...installed, "UnRAR.exe", "unrar"];
...sevenZipInstalled,
"7z.exe",
"7z",
"7za.exe",
"7za",
...winRarInstalled,
"UnRAR.exe",
"unrar"
]
: [
...sevenZipInstalled,
"7z.exe",
"7z",
"7za.exe",
"7za",
...winRarInstalled,
"UnRAR.exe",
"unrar"
];
return Array.from(new Set(ordered.filter(Boolean))); return Array.from(new Set(ordered.filter(Boolean)));
} }
@ -689,8 +600,8 @@ function extractCpuBudgetFromPriority(priority?: string): number {
function extractOsPriority(priority?: string): number { function extractOsPriority(priority?: string): number {
switch (priority) { switch (priority) {
case "high": return os.constants.priority.PRIORITY_NORMAL; case "high": return os.constants.priority.PRIORITY_BELOW_NORMAL;
default: return os.constants.priority.PRIORITY_BELOW_NORMAL; default: return os.constants.priority.PRIORITY_LOW;
} }
} }
@ -704,15 +615,10 @@ function extractCpuBudgetPercent(priority?: string): number {
function extractorThreadSwitch(hybridMode = false, priority?: string): string { function extractorThreadSwitch(hybridMode = false, priority?: string): string {
if (hybridMode) { if (hybridMode) {
// Use half the CPU budget during hybrid extraction to leave headroom for // 2 threads during hybrid extraction (download + extract simultaneously).
// concurrent downloads. Falls back to at least 2 threads. // JDownloader 2 uses in-process 7-Zip-JBinding which naturally limits throughput
const envValue = Number(process.env.RD_EXTRACT_THREADS ?? NaN); // to ~16 MB/s write. 2 UnRAR threads produce similar controlled disk load.
if (Number.isFinite(envValue) && envValue >= 1 && envValue <= 32) { return "-mt2";
return `-mt${Math.floor(envValue)}`;
}
const cpuCount = Math.max(1, os.cpus().length || 1);
const hybridThreads = Math.max(2, Math.min(8, Math.floor(cpuCount / 2)));
return `-mt${hybridThreads}`;
} }
const envValue = Number(process.env.RD_EXTRACT_THREADS ?? NaN); const envValue = Number(process.env.RD_EXTRACT_THREADS ?? NaN);
if (Number.isFinite(envValue) && envValue >= 1 && envValue <= 32) { if (Number.isFinite(envValue) && envValue >= 1 && envValue <= 32) {
@ -734,8 +640,8 @@ function lowerExtractProcessPriority(childPid: number | undefined, cpuPriority?:
return; return;
} }
try { try {
// Sets CPU scheduling priority for the extraction process. // Lowers CPU scheduling priority so extraction doesn't starve other processes.
// high → NORMAL (full speed), default → BELOW_NORMAL. I/O priority stays Normal. // high → BELOW_NORMAL, middle/low → IDLE. I/O priority stays Normal (like JDownloader 2).
os.setPriority(pid, extractOsPriority(cpuPriority)); os.setPriority(pid, extractOsPriority(cpuPriority));
} catch { } catch {
// ignore: priority lowering is best-effort // ignore: priority lowering is best-effort
@ -938,7 +844,7 @@ type JvmExtractResult = {
}; };
function extractorBackendMode(): ExtractBackendMode { function extractorBackendMode(): ExtractBackendMode {
const defaultMode = "legacy"; const defaultMode = process.env.VITEST ? "legacy" : "jvm";
const raw = String(process.env.RD_EXTRACT_BACKEND || defaultMode).trim().toLowerCase(); const raw = String(process.env.RD_EXTRACT_BACKEND || defaultMode).trim().toLowerCase();
if (raw === "legacy") { if (raw === "legacy") {
return "legacy"; return "legacy";
@ -1050,12 +956,9 @@ function parseJvmLine(
if (trimmed.startsWith("RD_PROGRESS ")) { if (trimmed.startsWith("RD_PROGRESS ")) {
const parsed = parseProgressPercent(trimmed); const parsed = parseProgressPercent(trimmed);
if (parsed !== null) { if (parsed !== null && parsed > state.bestPercent) {
const next = nextArchivePercent(state.bestPercent, parsed); state.bestPercent = parsed;
if (next !== state.bestPercent) { onArchiveProgress?.(parsed);
state.bestPercent = next;
onArchiveProgress?.(next);
}
} }
return; return;
} }
@ -1080,312 +983,7 @@ function parseJvmLine(
} }
} }
// ── Persistent JVM Daemon ── function runJvmExtractCommand(
// Keeps a single JVM process alive across multiple extraction requests,
// eliminating the ~5s JVM boot overhead per archive.
interface DaemonRequest {
resolve: (result: JvmExtractResult) => void;
onArchiveProgress?: (percent: number) => void;
signal?: AbortSignal;
timeoutMs?: number;
parseState: { bestPercent: number; usedPassword: string; backend: string; reportedError: string };
archiveName: string;
startedAt: number;
passwordCount: number;
}
let daemonProcess: ChildProcess | null = null;
let daemonReady = false;
let daemonBusy = false;
let daemonCurrentRequest: DaemonRequest | null = null;
let daemonStdoutBuffer = "";
let daemonStderrBuffer = "";
let daemonOutput = "";
let daemonTimeoutId: NodeJS.Timeout | null = null;
let daemonAbortHandler: (() => void) | null = null;
let daemonLayout: JvmExtractorLayout | null = null;
export function shutdownDaemon(): void {
if (daemonProcess) {
try { daemonProcess.stdin?.end(); } catch { /* ignore */ }
try { killProcessTree(daemonProcess); } catch { /* ignore */ }
daemonProcess = null;
}
daemonReady = false;
daemonBusy = false;
daemonCurrentRequest = null;
daemonStdoutBuffer = "";
daemonStderrBuffer = "";
daemonOutput = "";
if (daemonTimeoutId) { clearTimeout(daemonTimeoutId); daemonTimeoutId = null; }
if (daemonAbortHandler) { daemonAbortHandler = null; }
daemonLayout = null;
}
function finishDaemonRequest(result: JvmExtractResult): void {
const req = daemonCurrentRequest;
if (!req) return;
daemonCurrentRequest = null;
daemonBusy = false;
daemonStdoutBuffer = "";
daemonStderrBuffer = "";
daemonOutput = "";
if (daemonTimeoutId) { clearTimeout(daemonTimeoutId); daemonTimeoutId = null; }
if (req.signal && daemonAbortHandler) {
req.signal.removeEventListener("abort", daemonAbortHandler);
daemonAbortHandler = null;
}
req.resolve(result);
}
function handleDaemonLine(line: string): void {
const trimmed = String(line || "").trim();
if (!trimmed) return;
// Check for daemon ready signal
if (trimmed === "RD_DAEMON_READY") {
daemonReady = true;
logger.info("JVM Daemon bereit (persistent)");
return;
}
// Check for request completion
if (trimmed.startsWith("RD_REQUEST_DONE ")) {
const code = parseInt(trimmed.slice("RD_REQUEST_DONE ".length).trim(), 10);
const req = daemonCurrentRequest;
if (!req) return;
const elapsedMs = Date.now() - req.startedAt;
logger.info(
`JVM Daemon Request Ende: archive=${req.archiveName}, code=${code}, ms=${elapsedMs}, pwCandidates=${req.passwordCount}, ` +
`bestPercent=${req.parseState.bestPercent}, backend=${req.parseState.backend || "unknown"}, usedPassword=${req.parseState.usedPassword ? "yes" : "no"}`
);
if (code === 0) {
req.onArchiveProgress?.(100);
finishDaemonRequest({
ok: true, missingCommand: false, missingRuntime: false,
aborted: false, timedOut: false, errorText: "",
usedPassword: req.parseState.usedPassword, backend: req.parseState.backend
});
} else {
const message = cleanErrorText(req.parseState.reportedError || daemonOutput) || `Exit Code ${code}`;
finishDaemonRequest({
ok: false, missingCommand: false, missingRuntime: isJvmRuntimeMissingError(message),
aborted: false, timedOut: false, errorText: message,
usedPassword: req.parseState.usedPassword, backend: req.parseState.backend
});
}
return;
}
// Regular progress/status lines — delegate to parseJvmLine
if (daemonCurrentRequest) {
parseJvmLine(trimmed, daemonCurrentRequest.onArchiveProgress, daemonCurrentRequest.parseState);
}
}
function startDaemon(layout: JvmExtractorLayout): boolean {
if (daemonProcess && daemonReady) return true;
// Don't kill a daemon that's still booting — it will become ready soon
if (daemonProcess) return false;
shutdownDaemon();
const jvmTmpDir = path.join(os.tmpdir(), `rd-extract-daemon-${crypto.randomUUID()}`);
fs.mkdirSync(jvmTmpDir, { recursive: true });
const args = [
"-Dfile.encoding=UTF-8",
`-Djava.io.tmpdir=${jvmTmpDir}`,
"-Xms512m",
"-Xmx8g",
"-XX:+UseSerialGC",
"-cp",
layout.classPath,
JVM_EXTRACTOR_MAIN_CLASS,
"--daemon"
];
try {
const child = spawn(layout.javaCommand, args, {
windowsHide: true,
stdio: ["pipe", "pipe", "pipe"]
});
lowerExtractProcessPriority(child.pid, currentExtractCpuPriority);
daemonProcess = child;
daemonLayout = layout;
child.stdout!.on("data", (chunk) => {
const raw = String(chunk || "");
daemonOutput = appendLimited(daemonOutput, raw);
daemonStdoutBuffer += raw;
const lines = daemonStdoutBuffer.split(/\r?\n/);
daemonStdoutBuffer = lines.pop() || "";
for (const line of lines) {
handleDaemonLine(line);
}
});
child.stderr!.on("data", (chunk) => {
const raw = String(chunk || "");
daemonOutput = appendLimited(daemonOutput, raw);
daemonStderrBuffer += raw;
const lines = daemonStderrBuffer.split(/\r?\n/);
daemonStderrBuffer = lines.pop() || "";
for (const line of lines) {
if (daemonCurrentRequest) {
parseJvmLine(line, daemonCurrentRequest.onArchiveProgress, daemonCurrentRequest.parseState);
}
}
});
child.on("error", () => {
if (daemonCurrentRequest) {
finishDaemonRequest({
ok: false, missingCommand: true, missingRuntime: true,
aborted: false, timedOut: false, errorText: "Daemon process error",
usedPassword: "", backend: ""
});
}
shutdownDaemon();
});
child.on("close", () => {
if (daemonCurrentRequest) {
const req = daemonCurrentRequest;
finishDaemonRequest({
ok: false, missingCommand: false, missingRuntime: false,
aborted: false, timedOut: false,
errorText: cleanErrorText(req.parseState.reportedError || daemonOutput) || "Daemon process exited unexpectedly",
usedPassword: req.parseState.usedPassword, backend: req.parseState.backend
});
}
// Clean up tmp dir
fs.rm(jvmTmpDir, { recursive: true, force: true }, () => {});
daemonProcess = null;
daemonReady = false;
daemonBusy = false;
daemonLayout = null;
});
logger.info(`JVM Daemon gestartet (PID ${child.pid})`);
return true;
} catch (error) {
logger.warn(`JVM Daemon Start fehlgeschlagen: ${String(error)}`);
return false;
}
}
function isDaemonAvailable(layout: JvmExtractorLayout): boolean {
// Start daemon if not running yet
if (!daemonProcess || !daemonReady) {
startDaemon(layout);
}
return Boolean(daemonProcess && daemonReady && !daemonBusy);
}
/** Wait for the daemon to become ready (boot phase) or free (busy phase), with timeout. */
function waitForDaemonReady(maxWaitMs: number, signal?: AbortSignal): Promise<boolean> {
return new Promise((resolve) => {
const start = Date.now();
const check = () => {
if (signal?.aborted) { resolve(false); return; }
if (daemonProcess && daemonReady && !daemonBusy) { resolve(true); return; }
// Daemon died while we were waiting
if (!daemonProcess) { resolve(false); return; }
if (Date.now() - start >= maxWaitMs) { resolve(false); return; }
setTimeout(check, 50);
};
check();
});
}
function sendDaemonRequest(
archivePath: string,
targetDir: string,
conflictMode: ConflictMode,
passwordCandidates: string[],
onArchiveProgress?: (percent: number) => void,
signal?: AbortSignal,
timeoutMs?: number
): Promise<JvmExtractResult> {
return new Promise((resolve) => {
const mode = effectiveConflictMode(conflictMode);
const parseState = { bestPercent: 0, usedPassword: "", backend: "", reportedError: "" };
const archiveName = path.basename(archivePath);
daemonBusy = true;
daemonOutput = "";
daemonCurrentRequest = {
resolve,
onArchiveProgress,
signal,
timeoutMs,
parseState,
archiveName,
startedAt: Date.now(),
passwordCount: passwordCandidates.length
};
logger.info(`JVM Daemon Request Start: archive=${archiveName}, pwCandidates=${passwordCandidates.length}, timeoutMs=${timeoutMs || 0}, conflict=${mode}`);
// Set up timeout
if (timeoutMs && timeoutMs > 0) {
daemonTimeoutId = setTimeout(() => {
// Timeout — kill the daemon and restart fresh for next request
const req = daemonCurrentRequest;
if (req) {
finishDaemonRequest({
ok: false, missingCommand: false, missingRuntime: false,
aborted: false, timedOut: true,
errorText: `Entpacken Timeout nach ${Math.ceil(timeoutMs / 1000)}s`,
usedPassword: parseState.usedPassword, backend: parseState.backend
});
}
shutdownDaemon();
}, timeoutMs);
}
// Set up abort handler
if (signal) {
daemonAbortHandler = () => {
const req = daemonCurrentRequest;
if (req) {
finishDaemonRequest({
ok: false, missingCommand: false, missingRuntime: false,
aborted: true, timedOut: false, errorText: "aborted:extract",
usedPassword: parseState.usedPassword, backend: parseState.backend
});
}
// Kill daemon on abort — cleaner than trying to interrupt mid-extraction
shutdownDaemon();
};
signal.addEventListener("abort", daemonAbortHandler, { once: true });
}
// Build and send JSON request
const jsonRequest = JSON.stringify({
archive: archivePath,
target: targetDir,
conflict: mode,
backend: "auto",
passwords: passwordCandidates
});
try {
daemonProcess!.stdin!.write(jsonRequest + "\n");
} catch (error) {
finishDaemonRequest({
ok: false, missingCommand: false, missingRuntime: false,
aborted: false, timedOut: false,
errorText: `Daemon stdin write failed: ${String(error)}`,
usedPassword: "", backend: ""
});
shutdownDaemon();
}
});
}
async function runJvmExtractCommand(
layout: JvmExtractorLayout, layout: JvmExtractorLayout,
archivePath: string, archivePath: string,
targetDir: string, targetDir: string,
@ -1408,29 +1006,6 @@ async function runJvmExtractCommand(
}); });
} }
// Try persistent daemon first — saves ~5s JVM boot per archive
if (isDaemonAvailable(layout)) {
logger.info(`JVM Daemon: Sofort verfügbar, sende Request für ${path.basename(archivePath)} (pwCandidates=${passwordCandidates.length})`);
return sendDaemonRequest(archivePath, targetDir, conflictMode, passwordCandidates, onArchiveProgress, signal, timeoutMs);
}
// Daemon exists but is still booting or busy — wait up to 15s for it
if (daemonProcess) {
const reason = !daemonReady ? "booting" : "busy";
const waitStartedAt = Date.now();
logger.info(`JVM Daemon: Warte auf ${reason} Daemon für ${path.basename(archivePath)}...`);
const ready = await waitForDaemonReady(15_000, signal);
const waitedMs = Date.now() - waitStartedAt;
if (ready) {
logger.info(`JVM Daemon: Bereit nach ${waitedMs}ms — sende Request für ${path.basename(archivePath)}`);
return sendDaemonRequest(archivePath, targetDir, conflictMode, passwordCandidates, onArchiveProgress, signal, timeoutMs);
}
logger.warn(`JVM Daemon: Timeout nach ${waitedMs}ms beim Warten — Fallback auf neuen Prozess für ${path.basename(archivePath)}`);
}
// Fallback: spawn a new JVM process (daemon not available after waiting)
logger.info(`JVM Spawn: Neuer Prozess für ${path.basename(archivePath)}`);
const mode = effectiveConflictMode(conflictMode); const mode = effectiveConflictMode(conflictMode);
// Each JVM process needs its own temp dir so parallel SevenZipJBinding // Each JVM process needs its own temp dir so parallel SevenZipJBinding
// instances don't fight over the same native DLL file lock. // instances don't fight over the same native DLL file lock.
@ -1439,9 +1014,8 @@ async function runJvmExtractCommand(
const args = [ const args = [
"-Dfile.encoding=UTF-8", "-Dfile.encoding=UTF-8",
`-Djava.io.tmpdir=${jvmTmpDir}`, `-Djava.io.tmpdir=${jvmTmpDir}`,
"-Xms512m", "-Xms32m",
"-Xmx8g", "-Xmx512m",
"-XX:+UseSerialGC",
"-cp", "-cp",
layout.classPath, layout.classPath,
JVM_EXTRACTOR_MAIN_CLASS, JVM_EXTRACTOR_MAIN_CLASS,
@ -1673,7 +1247,7 @@ async function resolveExtractorCommandInternal(): Promise<string> {
resolveFailureAt = 0; resolveFailureAt = 0;
} }
const candidates = nativeExtractorCandidates(); const candidates = winRarCandidates();
for (const command of candidates) { for (const command of candidates) {
if (isAbsoluteCommand(command) && !fs.existsSync(command)) { if (isAbsoluteCommand(command) && !fs.existsSync(command)) {
continue; continue;
@ -1726,11 +1300,7 @@ async function runExternalExtract(
): Promise<string> { ): Promise<string> {
const timeoutMs = await computeExtractTimeoutMs(archivePath); const timeoutMs = await computeExtractTimeoutMs(archivePath);
const backendMode = extractorBackendMode(); const backendMode = extractorBackendMode();
const archiveName = path.basename(archivePath);
const totalStartedAt = Date.now();
let jvmFailureReason = ""; let jvmFailureReason = "";
let fallbackFromJvm = false;
logger.info(`Extract-Backend Start: archive=${archiveName}, mode=${backendMode}, pwCandidates=${passwordCandidates.length}, timeoutMs=${timeoutMs}, hybrid=${hybridMode}`);
await fs.promises.mkdir(targetDir, { recursive: true }); await fs.promises.mkdir(targetDir, { recursive: true });
@ -1751,8 +1321,7 @@ async function runExternalExtract(
logger.warn(`JVM-Extractor nicht verfügbar, nutze Legacy-Extractor: ${path.basename(archivePath)}`); logger.warn(`JVM-Extractor nicht verfügbar, nutze Legacy-Extractor: ${path.basename(archivePath)}`);
} else { } else {
const quotedPasswords = passwordCandidates.map((p) => p === "" ? '""' : `"${p}"`); const quotedPasswords = passwordCandidates.map((p) => p === "" ? '""' : `"${p}"`);
logger.info(`JVM-Extractor aktiv (${layout.rootDir}): ${archiveName}, ${passwordCandidates.length} Passwörter: [${quotedPasswords.join(", ")}]`); logger.info(`JVM-Extractor aktiv (${layout.rootDir}): ${path.basename(archivePath)}, ${passwordCandidates.length} Passwörter: [${quotedPasswords.join(", ")}]`);
const jvmStartedAt = Date.now();
const jvmResult = await runJvmExtractCommand( const jvmResult = await runJvmExtractCommand(
layout, layout,
archivePath, archivePath,
@ -1763,12 +1332,9 @@ async function runExternalExtract(
signal, signal,
timeoutMs timeoutMs
); );
const jvmMs = Date.now() - jvmStartedAt;
logger.info(`JVM-Extractor Ergebnis: archive=${archiveName}, ok=${jvmResult.ok}, ms=${jvmMs}, timedOut=${jvmResult.timedOut}, aborted=${jvmResult.aborted}, backend=${jvmResult.backend || "unknown"}, usedPassword=${jvmResult.usedPassword ? "yes" : "no"}`);
if (jvmResult.ok) { if (jvmResult.ok) {
logger.info(`Entpackt via ${jvmResult.backend || "jvm"}: ${archiveName}`); logger.info(`Entpackt via ${jvmResult.backend || "jvm"}: ${path.basename(archivePath)}`);
logger.info(`Extract-Backend Ende: archive=${archiveName}, backend=${jvmResult.backend || "jvm"}, mode=${backendMode}, ms=${Date.now() - totalStartedAt}, fallbackFromJvm=false, usedPassword=${jvmResult.usedPassword ? "yes" : "no"}`);
return jvmResult.usedPassword; return jvmResult.usedPassword;
} }
if (jvmResult.aborted) { if (jvmResult.aborted) {
@ -1779,7 +1345,6 @@ async function runExternalExtract(
} }
jvmFailureReason = jvmResult.errorText || "JVM-Extractor fehlgeschlagen"; jvmFailureReason = jvmResult.errorText || "JVM-Extractor fehlgeschlagen";
fallbackFromJvm = true;
const jvmFailureLower = jvmFailureReason.toLowerCase(); const jvmFailureLower = jvmFailureReason.toLowerCase();
const isUnsupportedMethod = jvmFailureReason.includes("UNSUPPORTEDMETHOD"); const isUnsupportedMethod = jvmFailureReason.includes("UNSUPPORTEDMETHOD");
const isCodecError = jvmFailureLower.includes("registered codecs") const isCodecError = jvmFailureLower.includes("registered codecs")
@ -1808,7 +1373,6 @@ async function runExternalExtract(
const effectiveTargetDir = subst ? `${subst.drive}:\\` : targetDir; const effectiveTargetDir = subst ? `${subst.drive}:\\` : targetDir;
const command = await resolveExtractorCommand(); const command = await resolveExtractorCommand();
const legacyStartedAt = Date.now();
const password = await runExternalExtractInner( const password = await runExternalExtractInner(
command, command,
archivePath, archivePath,
@ -1821,14 +1385,12 @@ async function runExternalExtract(
hybridMode, hybridMode,
onPasswordAttempt onPasswordAttempt
); );
const legacyMs = Date.now() - legacyStartedAt;
const extractorName = path.basename(command).replace(/\.exe$/i, ""); const extractorName = path.basename(command).replace(/\.exe$/i, "");
if (jvmFailureReason) { if (jvmFailureReason) {
logger.info(`Entpackt via legacy/${extractorName} (nach JVM-Fehler): ${archiveName}`); logger.info(`Entpackt via legacy/${extractorName} (nach JVM-Fehler): ${path.basename(archivePath)}`);
} else { } else {
logger.info(`Entpackt via legacy/${extractorName}: ${archiveName}`); logger.info(`Entpackt via legacy/${extractorName}: ${path.basename(archivePath)}`);
} }
logger.info(`Extract-Backend Ende: archive=${archiveName}, backend=legacy/${extractorName}, mode=${backendMode}, ms=${Date.now() - totalStartedAt}, legacyMs=${legacyMs}, fallbackFromJvm=${fallbackFromJvm}, usedPassword=${password ? "yes" : "no"}`);
return password; return password;
} finally { } finally {
if (subst) removeSubstMapping(subst); if (subst) removeSubstMapping(subst);
@ -1867,7 +1429,6 @@ async function runExternalExtractInner(
onArchiveProgress?.(0); onArchiveProgress?.(0);
} }
passwordAttempt += 1; passwordAttempt += 1;
const attemptStartedAt = Date.now();
const quotedPw = password === "" ? '""' : `"${password}"`; const quotedPw = password === "" ? '""' : `"${password}"`;
logger.info(`Legacy-Passwort-Versuch ${passwordAttempt}/${passwords.length} für ${path.basename(archivePath)}: ${quotedPw}`); logger.info(`Legacy-Passwort-Versuch ${passwordAttempt}/${passwords.length} für ${path.basename(archivePath)}: ${quotedPw}`);
if (passwords.length > 1) { if (passwords.length > 1) {
@ -1876,14 +1437,11 @@ async function runExternalExtractInner(
let args = buildExternalExtractArgs(command, archivePath, targetDir, conflictMode, password, usePerformanceFlags, hybridMode); let args = buildExternalExtractArgs(command, archivePath, targetDir, conflictMode, password, usePerformanceFlags, hybridMode);
let result = await runExtractCommand(command, args, (chunk) => { let result = await runExtractCommand(command, args, (chunk) => {
const parsed = parseProgressPercent(chunk); const parsed = parseProgressPercent(chunk);
if (parsed === null) { if (parsed === null || parsed <= bestPercent) {
return; return;
} }
const next = nextArchivePercent(bestPercent, parsed); bestPercent = parsed;
if (next !== bestPercent) { onArchiveProgress?.(bestPercent);
bestPercent = next;
onArchiveProgress?.(bestPercent);
}
}, signal, timeoutMs); }, signal, timeoutMs);
if (!result.ok && usePerformanceFlags && isUnsupportedExtractorSwitchError(result.errorText)) { if (!result.ok && usePerformanceFlags && isUnsupportedExtractorSwitchError(result.errorText)) {
@ -1893,22 +1451,14 @@ async function runExternalExtractInner(
args = buildExternalExtractArgs(command, archivePath, targetDir, conflictMode, password, false, hybridMode); args = buildExternalExtractArgs(command, archivePath, targetDir, conflictMode, password, false, hybridMode);
result = await runExtractCommand(command, args, (chunk) => { result = await runExtractCommand(command, args, (chunk) => {
const parsed = parseProgressPercent(chunk); const parsed = parseProgressPercent(chunk);
if (parsed === null) { if (parsed === null || parsed <= bestPercent) {
return; return;
} }
const next = nextArchivePercent(bestPercent, parsed); bestPercent = parsed;
if (next !== bestPercent) { onArchiveProgress?.(bestPercent);
bestPercent = next;
onArchiveProgress?.(bestPercent);
}
}, signal, timeoutMs); }, signal, timeoutMs);
} }
logger.info(
`Legacy-Passwort-Versuch Ergebnis: archive=${path.basename(archivePath)}, attempt=${passwordAttempt}/${passwords.length}, ` +
`ms=${Date.now() - attemptStartedAt}, ok=${result.ok}, timedOut=${result.timedOut}, missingCommand=${result.missingCommand}, bestPercent=${bestPercent}`
);
if (result.ok) { if (result.ok) {
onArchiveProgress?.(100); onArchiveProgress?.(100);
return password; return password;
@ -2376,14 +1926,7 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
if (options.conflictMode === "ask") { if (options.conflictMode === "ask") {
logger.warn("Extract-ConflictMode 'ask' wird ohne Prompt als 'skip' behandelt"); logger.warn("Extract-ConflictMode 'ask' wird ohne Prompt als 'skip' behandelt");
} }
const passwordCacheKey = packagePasswordCacheKey(options.packageDir, options.packageId);
const passwordCacheLabel = packagePasswordCacheLabel(options.packageDir, options.packageId);
let passwordCandidates = archivePasswords(options.passwordList || ""); let passwordCandidates = archivePasswords(options.passwordList || "");
const cachedPackagePassword = readCachedPackagePassword(passwordCacheKey);
if (cachedPackagePassword) {
passwordCandidates = prioritizePassword(passwordCandidates, cachedPackagePassword);
logger.info(`Passwort-Cache Treffer: ${passwordCacheLabel}, bekanntes Passwort wird zuerst getestet`);
}
const resumeCompleted = await readExtractResumeState(options.packageDir, options.packageId); const resumeCompleted = await readExtractResumeState(options.packageDir, options.packageId);
const resumeCompletedAtStart = resumeCompleted.size; const resumeCompletedAtStart = resumeCompleted.size;
const allCandidateNames = new Set(allCandidates.map((archivePath) => archiveNameKey(path.basename(archivePath)))); const allCandidateNames = new Set(allCandidates.map((archivePath) => archiveNameKey(path.basename(archivePath))));
@ -2402,7 +1945,6 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
let extracted = candidates.length - pendingCandidates.length; let extracted = candidates.length - pendingCandidates.length;
let failed = 0; let failed = 0;
let lastError = ""; let lastError = "";
let learnedPassword = cachedPackagePassword;
const extractedArchives = new Set<string>(); const extractedArchives = new Set<string>();
for (const archivePath of candidates) { for (const archivePath of candidates) {
if (resumeCompleted.has(archiveNameKey(path.basename(archivePath)))) { if (resumeCompleted.has(archiveNameKey(path.basename(archivePath)))) {
@ -2410,41 +1952,23 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
} }
} }
const rememberLearnedPassword = (password: string): void => {
const normalized = String(password || "").trim();
if (!normalized) {
return;
}
const changed = normalized !== learnedPassword;
learnedPassword = normalized;
passwordCandidates = prioritizePassword(passwordCandidates, normalized);
writeCachedPackagePassword(passwordCacheKey, normalized);
if (changed) {
logger.info(`Passwort-Cache Update: ${passwordCacheLabel}, neues Passwort gelernt`);
}
};
const emitProgress = ( const emitProgress = (
current: number, current: number,
archiveName: string, archiveName: string,
phase: "extracting" | "done", phase: "extracting" | "done",
archivePercent?: number, archivePercent?: number,
elapsedMs?: number, elapsedMs?: number,
pwInfo?: { passwordAttempt?: number; passwordTotal?: number; passwordFound?: boolean }, pwInfo?: { passwordAttempt?: number; passwordTotal?: number; passwordFound?: boolean }
archiveInfo?: { archiveDone?: boolean; archiveSuccess?: boolean }
): void => { ): void => {
if (!options.onProgress) { if (!options.onProgress) {
return; return;
} }
const total = Math.max(1, candidates.length); const total = Math.max(1, candidates.length);
let percent = Math.max(0, Math.min(100, Math.floor((current / total) * 100))); let percent = Math.max(0, Math.min(100, Math.floor((current / total) * 100)));
let normalizedArchivePercent = Math.max(0, Math.min(100, Number(archivePercent ?? 0)));
if (phase !== "done") { if (phase !== "done") {
const boundedCurrent = Math.max(0, Math.min(total, current)); const boundedCurrent = Math.max(0, Math.min(total, current));
if (archiveInfo?.archiveDone !== true && normalizedArchivePercent >= 100) { const boundedArchivePercent = Math.max(0, Math.min(100, Number(archivePercent ?? 0)));
normalizedArchivePercent = 99; percent = Math.max(0, Math.min(100, Math.floor(((boundedCurrent + (boundedArchivePercent / 100)) / total) * 100)));
}
percent = Math.max(0, Math.min(100, Math.floor(((boundedCurrent + (normalizedArchivePercent / 100)) / total) * 100)));
} }
try { try {
options.onProgress({ options.onProgress({
@ -2452,10 +1976,9 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
total, total,
percent, percent,
archiveName, archiveName,
archivePercent: normalizedArchivePercent, archivePercent,
elapsedMs, elapsedMs,
phase, phase,
...(archiveInfo || {}),
...(pwInfo || {}) ...(pwInfo || {})
}); });
} catch (error) { } catch (error) {
@ -2470,13 +1993,12 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
// rather than leaving them as "Entpacken - Ausstehend" until all extraction finishes. // rather than leaving them as "Entpacken - Ausstehend" until all extraction finishes.
for (const archivePath of candidates) { for (const archivePath of candidates) {
if (resumeCompleted.has(archiveNameKey(path.basename(archivePath)))) { if (resumeCompleted.has(archiveNameKey(path.basename(archivePath)))) {
emitProgress(extracted, path.basename(archivePath), "extracting", 100, 0, undefined, { archiveDone: true, archiveSuccess: true }); emitProgress(extracted, path.basename(archivePath), "extracting", 100, 0);
} }
} }
const maxParallel = Math.max(1, options.maxParallel || 1); const maxParallel = Math.max(1, options.maxParallel || 1);
let noExtractorEncountered = false; let noExtractorEncountered = false;
let lastArchiveFinishedAt: number | null = null;
const extractSingleArchive = async (archivePath: string): Promise<void> => { const extractSingleArchive = async (archivePath: string): Promise<void> => {
if (options.signal?.aborted) { if (options.signal?.aborted) {
@ -2488,36 +2010,17 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
const archiveName = path.basename(archivePath); const archiveName = path.basename(archivePath);
const archiveResumeKey = archiveNameKey(archiveName); const archiveResumeKey = archiveNameKey(archiveName);
const archiveStartedAt = Date.now(); const archiveStartedAt = Date.now();
const startedCurrent = extracted + failed;
if (lastArchiveFinishedAt !== null) {
logger.info(`Extract-Trace Gap: before=${archiveName}, prevDoneToStartMs=${archiveStartedAt - lastArchiveFinishedAt}, progress=${startedCurrent}/${candidates.length}`);
}
let archivePercent = 0; let archivePercent = 0;
let reached99At: number | null = null;
let archiveOutcome: "success" | "failed" | "skipped" = "failed";
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, 0); emitProgress(extracted + failed, archiveName, "extracting", archivePercent, 0);
const pulseTimer = setInterval(() => { const pulseTimer = setInterval(() => {
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt); emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt);
}, 1100); }, 1100);
const hybrid = Boolean(options.hybridMode); const hybrid = Boolean(options.hybridMode);
// Before the first successful extraction, filename-derived candidates are useful. // Insert archive-filename-derived passwords after "" but before custom passwords
// After a known password is learned, try that first to avoid per-archive delays.
const filenamePasswords = archiveFilenamePasswords(archiveName); const filenamePasswords = archiveFilenamePasswords(archiveName);
const nonEmptyBasePasswords = passwordCandidates.filter((p) => p !== ""); const archivePasswordCandidates = filenamePasswords.length > 0
const orderedNonEmpty = learnedPassword ? Array.from(new Set(["", ...filenamePasswords, ...passwordCandidates.filter((p) => p !== "")]))
? [learnedPassword, ...nonEmptyBasePasswords.filter((p) => p !== learnedPassword), ...filenamePasswords] : passwordCandidates;
: [...filenamePasswords, ...nonEmptyBasePasswords];
const archivePasswordCandidates = learnedPassword
? Array.from(new Set([...orderedNonEmpty, ""]))
: Array.from(new Set(["", ...orderedNonEmpty]));
const reportArchiveProgress = (value: number): void => {
archivePercent = nextArchivePercent(archivePercent, value);
if (reached99At === null && archivePercent >= 99) {
reached99At = Date.now();
logger.info(`Extract-Trace 99%: archive=${archiveName}, elapsedMs=${reached99At - archiveStartedAt}`);
}
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt);
};
// Validate generic .001 splits via file signature before attempting extraction // Validate generic .001 splits via file signature before attempting extraction
const isGenericSplit = /\.\d{3}$/i.test(archiveName) && !/\.(zip|7z)\.\d{3}$/i.test(archiveName); const isGenericSplit = /\.\d{3}$/i.test(archiveName) && !/\.(zip|7z)\.\d{3}$/i.test(archiveName);
@ -2530,10 +2033,6 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
extractedArchives.add(archivePath); extractedArchives.add(archivePath);
await writeExtractResumeState(options.packageDir, resumeCompleted, options.packageId); await writeExtractResumeState(options.packageDir, resumeCompleted, options.packageId);
clearInterval(pulseTimer); clearInterval(pulseTimer);
archiveOutcome = "skipped";
const skippedAt = Date.now();
lastArchiveFinishedAt = skippedAt;
logger.info(`Extract-Trace Archiv Übersprungen: archive=${archiveName}, ms=${skippedAt - archiveStartedAt}, reason=no-signature`);
return; return;
} }
logger.info(`Generische Split-Datei verifiziert (Signatur: ${sig}): ${archiveName}`); logger.info(`Generische Split-Datei verifiziert (Signatur: ${sig}): ${archiveName}`);
@ -2556,9 +2055,10 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
if (preferExternal) { if (preferExternal) {
try { try {
const usedPassword = await runExternalExtract(archivePath, options.targetDir, options.conflictMode, archivePasswordCandidates, (value) => { const usedPassword = await runExternalExtract(archivePath, options.targetDir, options.conflictMode, archivePasswordCandidates, (value) => {
reportArchiveProgress(value); archivePercent = Math.max(archivePercent, value);
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt);
}, options.signal, hybrid, onPwAttempt); }, options.signal, hybrid, onPwAttempt);
rememberLearnedPassword(usedPassword); passwordCandidates = prioritizePassword(passwordCandidates, usedPassword);
} catch (error) { } catch (error) {
if (isNoExtractorError(String(error))) { if (isNoExtractorError(String(error))) {
await extractZipArchive(archivePath, options.targetDir, options.conflictMode, options.signal); await extractZipArchive(archivePath, options.targetDir, options.conflictMode, options.signal);
@ -2576,9 +2076,10 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
} }
try { try {
const usedPassword = await runExternalExtract(archivePath, options.targetDir, options.conflictMode, archivePasswordCandidates, (value) => { const usedPassword = await runExternalExtract(archivePath, options.targetDir, options.conflictMode, archivePasswordCandidates, (value) => {
reportArchiveProgress(value); archivePercent = Math.max(archivePercent, value);
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt);
}, options.signal, hybrid, onPwAttempt); }, options.signal, hybrid, onPwAttempt);
rememberLearnedPassword(usedPassword); passwordCandidates = prioritizePassword(passwordCandidates, usedPassword);
} catch (externalError) { } catch (externalError) {
if (isNoExtractorError(String(externalError)) || isUnsupportedArchiveFormatError(String(externalError))) { if (isNoExtractorError(String(externalError)) || isUnsupportedArchiveFormatError(String(externalError))) {
throw error; throw error;
@ -2589,25 +2090,21 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
} }
} else { } else {
const usedPassword = await runExternalExtract(archivePath, options.targetDir, options.conflictMode, archivePasswordCandidates, (value) => { const usedPassword = await runExternalExtract(archivePath, options.targetDir, options.conflictMode, archivePasswordCandidates, (value) => {
reportArchiveProgress(value); archivePercent = Math.max(archivePercent, value);
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt);
}, options.signal, hybrid, onPwAttempt); }, options.signal, hybrid, onPwAttempt);
rememberLearnedPassword(usedPassword); passwordCandidates = prioritizePassword(passwordCandidates, usedPassword);
} }
extracted += 1; extracted += 1;
extractedArchives.add(archivePath); extractedArchives.add(archivePath);
resumeCompleted.add(archiveResumeKey); resumeCompleted.add(archiveResumeKey);
await writeExtractResumeState(options.packageDir, resumeCompleted, options.packageId); await writeExtractResumeState(options.packageDir, resumeCompleted, options.packageId);
logger.info(`Entpacken erfolgreich: ${path.basename(archivePath)}`); logger.info(`Entpacken erfolgreich: ${path.basename(archivePath)}`);
archiveOutcome = "success";
const successAt = Date.now();
const tailAfter99Ms = reached99At ? (successAt - reached99At) : -1;
logger.info(`Extract-Trace Archiv Erfolg: archive=${archiveName}, totalMs=${successAt - archiveStartedAt}, tailAfter99Ms=${tailAfter99Ms >= 0 ? tailAfter99Ms : "n/a"}, pwCandidates=${archivePasswordCandidates.length}`);
lastArchiveFinishedAt = successAt;
archivePercent = 100; archivePercent = 100;
if (hasManyPasswords) { if (hasManyPasswords) {
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt, { passwordFound: true }, { archiveDone: true, archiveSuccess: true }); emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt, { passwordFound: true });
} else { } else {
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt, undefined, { archiveDone: true, archiveSuccess: true }); emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt);
} }
} catch (error) { } catch (error) {
const errorText = String(error); const errorText = String(error);
@ -2618,25 +2115,12 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
lastError = errorText; lastError = errorText;
const errorCategory = classifyExtractionError(errorText); const errorCategory = classifyExtractionError(errorText);
logger.error(`Entpack-Fehler ${path.basename(archivePath)} [${errorCategory}]: ${errorText}`); logger.error(`Entpack-Fehler ${path.basename(archivePath)} [${errorCategory}]: ${errorText}`);
if (errorCategory === "wrong_password" && learnedPassword) { emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt);
learnedPassword = "";
clearCachedPackagePassword(passwordCacheKey);
logger.warn(`Passwort-Cache verworfen: ${passwordCacheLabel} (wrong_password)`);
}
const failedAt = Date.now();
const tailAfter99Ms = reached99At ? (failedAt - reached99At) : -1;
logger.warn(`Extract-Trace Archiv Fehler: archive=${archiveName}, totalMs=${failedAt - archiveStartedAt}, tailAfter99Ms=${tailAfter99Ms >= 0 ? tailAfter99Ms : "n/a"}, category=${errorCategory}`);
lastArchiveFinishedAt = failedAt;
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt, undefined, { archiveDone: true, archiveSuccess: false });
if (isNoExtractorError(errorText)) { if (isNoExtractorError(errorText)) {
noExtractorEncountered = true; noExtractorEncountered = true;
} }
} finally { } finally {
clearInterval(pulseTimer); clearInterval(pulseTimer);
if (lastArchiveFinishedAt === null || lastArchiveFinishedAt < archiveStartedAt) {
lastArchiveFinishedAt = Date.now();
}
logger.info(`Extract-Trace Archiv Ende: archive=${archiveName}, outcome=${archiveOutcome}, elapsedMs=${lastArchiveFinishedAt - archiveStartedAt}, percent=${archivePercent}`);
} }
}; };
@ -2761,11 +2245,11 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
} catch (zipErr) { } catch (zipErr) {
if (!shouldFallbackToExternalZip(zipErr)) throw zipErr; if (!shouldFallbackToExternalZip(zipErr)) throw zipErr;
const usedPw = await runExternalExtract(nestedArchive, options.targetDir, options.conflictMode, passwordCandidates, (v) => { nestedPercent = Math.max(nestedPercent, v); }, options.signal, hybrid); const usedPw = await runExternalExtract(nestedArchive, options.targetDir, options.conflictMode, passwordCandidates, (v) => { nestedPercent = Math.max(nestedPercent, v); }, options.signal, hybrid);
rememberLearnedPassword(usedPw); passwordCandidates = prioritizePassword(passwordCandidates, usedPw);
} }
} else { } else {
const usedPw = await runExternalExtract(nestedArchive, options.targetDir, options.conflictMode, passwordCandidates, (v) => { nestedPercent = Math.max(nestedPercent, v); }, options.signal, hybrid); const usedPw = await runExternalExtract(nestedArchive, options.targetDir, options.conflictMode, passwordCandidates, (v) => { nestedPercent = Math.max(nestedPercent, v); }, options.signal, hybrid);
rememberLearnedPassword(usedPw); passwordCandidates = prioritizePassword(passwordCandidates, usedPw);
} }
extracted += 1; extracted += 1;
nestedExtracted += 1; nestedExtracted += 1;

View File

@ -7,7 +7,7 @@ import { IPC_CHANNELS } from "../shared/ipc";
import { getLogFilePath, logger } from "./logger"; import { getLogFilePath, logger } from "./logger";
import { APP_NAME } from "./constants"; import { APP_NAME } from "./constants";
import { extractHttpLinksFromText } from "./utils"; import { extractHttpLinksFromText } from "./utils";
import { cleanupStaleSubstDrives, shutdownDaemon } from "./extractor"; import { cleanupStaleSubstDrives } from "./extractor";
/* ── IPC validation helpers ────────────────────────────────────── */ /* ── IPC validation helpers ────────────────────────────────────── */
function validateString(value: unknown, name: string): string { function validateString(value: unknown, name: string): string {
@ -515,7 +515,6 @@ app.on("before-quit", () => {
if (updateQuitTimer) { clearTimeout(updateQuitTimer); updateQuitTimer = null; } if (updateQuitTimer) { clearTimeout(updateQuitTimer); updateQuitTimer = null; }
stopClipboardWatcher(); stopClipboardWatcher();
destroyTray(); destroyTray();
shutdownDaemon();
try { try {
controller.shutdown(); controller.shutdown();
} catch (error) { } catch (error) {

View File

@ -65,111 +65,6 @@ describe.skipIf(!hasJavaRuntime() || !hasJvmExtractorRuntime())("extractor jvm b
expect(fs.existsSync(path.join(targetDir, "episode.txt"))).toBe(true); expect(fs.existsSync(path.join(targetDir, "episode.txt"))).toBe(true);
}); });
it("emits progress callbacks with archiveName and percent", async () => {
process.env.RD_EXTRACT_BACKEND = "jvm";
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-jvm-progress-"));
tempDirs.push(root);
const packageDir = path.join(root, "pkg");
const targetDir = path.join(root, "out");
fs.mkdirSync(packageDir, { recursive: true });
// Create a ZIP with some content to trigger progress
const zipPath = path.join(packageDir, "progress-test.zip");
const zip = new AdmZip();
zip.addFile("file1.txt", Buffer.from("Hello World ".repeat(100)));
zip.addFile("file2.txt", Buffer.from("Another file ".repeat(100)));
zip.writeZip(zipPath);
const progressUpdates: Array<{
archiveName: string;
percent: number;
phase: string;
archivePercent?: number;
}> = [];
const result = await extractPackageArchives({
packageDir,
targetDir,
cleanupMode: "none",
conflictMode: "overwrite",
removeLinks: false,
removeSamples: false,
onProgress: (update) => {
progressUpdates.push({
archiveName: update.archiveName,
percent: update.percent,
phase: update.phase,
archivePercent: update.archivePercent,
});
},
});
expect(result.extracted).toBe(1);
expect(result.failed).toBe(0);
// Should have at least preparing, extracting, and done phases
const phases = new Set(progressUpdates.map((u) => u.phase));
expect(phases.has("preparing")).toBe(true);
expect(phases.has("extracting")).toBe(true);
// Extracting phase should include the archive name
const extracting = progressUpdates.filter((u) => u.phase === "extracting" && u.archiveName === "progress-test.zip");
expect(extracting.length).toBeGreaterThan(0);
// Should end at 100%
const lastExtracting = extracting[extracting.length - 1];
expect(lastExtracting.archivePercent).toBe(100);
// Files should exist
expect(fs.existsSync(path.join(targetDir, "file1.txt"))).toBe(true);
expect(fs.existsSync(path.join(targetDir, "file2.txt"))).toBe(true);
});
it("extracts multiple archives sequentially with progress for each", async () => {
process.env.RD_EXTRACT_BACKEND = "jvm";
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-jvm-multi-"));
tempDirs.push(root);
const packageDir = path.join(root, "pkg");
const targetDir = path.join(root, "out");
fs.mkdirSync(packageDir, { recursive: true });
// Create two separate ZIP archives
const zip1 = new AdmZip();
zip1.addFile("episode01.txt", Buffer.from("ep1 content"));
zip1.writeZip(path.join(packageDir, "archive1.zip"));
const zip2 = new AdmZip();
zip2.addFile("episode02.txt", Buffer.from("ep2 content"));
zip2.writeZip(path.join(packageDir, "archive2.zip"));
const archiveNames = new Set<string>();
const result = await extractPackageArchives({
packageDir,
targetDir,
cleanupMode: "none",
conflictMode: "overwrite",
removeLinks: false,
removeSamples: false,
onProgress: (update) => {
if (update.phase === "extracting" && update.archiveName) {
archiveNames.add(update.archiveName);
}
},
});
expect(result.extracted).toBe(2);
expect(result.failed).toBe(0);
// Both archive names should have appeared in progress
expect(archiveNames.has("archive1.zip")).toBe(true);
expect(archiveNames.has("archive2.zip")).toBe(true);
// Both files extracted
expect(fs.existsSync(path.join(targetDir, "episode01.txt"))).toBe(true);
expect(fs.existsSync(path.join(targetDir, "episode02.txt"))).toBe(true);
});
it("respects ask/skip conflict mode in jvm backend", async () => { it("respects ask/skip conflict mode in jvm backend", async () => {
process.env.RD_EXTRACT_BACKEND = "jvm"; process.env.RD_EXTRACT_BACKEND = "jvm";

View File

@ -1,188 +0,0 @@
import { describe, expect, it } from "vitest";
import { resolveArchiveItemsFromList } from "../src/main/download-manager";
type MinimalItem = {
targetPath?: string;
fileName?: string;
[key: string]: unknown;
};
function makeItems(names: string[]): MinimalItem[] {
return names.map((name) => ({
targetPath: `C:\\Downloads\\Package\\${name}`,
fileName: name,
id: name,
status: "completed",
}));
}
describe("resolveArchiveItemsFromList", () => {
// ── Multipart RAR (.partN.rar) ──
it("matches multipart .part1.rar archives", () => {
const items = makeItems([
"Movie.part1.rar",
"Movie.part2.rar",
"Movie.part3.rar",
"Other.rar",
]);
const result = resolveArchiveItemsFromList("Movie.part1.rar", items as any);
expect(result).toHaveLength(3);
expect(result.map((i: any) => i.fileName)).toEqual([
"Movie.part1.rar",
"Movie.part2.rar",
"Movie.part3.rar",
]);
});
it("matches multipart .part01.rar archives (zero-padded)", () => {
const items = makeItems([
"Film.part01.rar",
"Film.part02.rar",
"Film.part10.rar",
"Unrelated.zip",
]);
const result = resolveArchiveItemsFromList("Film.part01.rar", items as any);
expect(result).toHaveLength(3);
});
// ── Old-style RAR (.rar + .r00, .r01, etc.) ──
it("matches old-style .rar + .rNN volumes", () => {
const items = makeItems([
"Archive.rar",
"Archive.r00",
"Archive.r01",
"Archive.r02",
"Other.zip",
]);
const result = resolveArchiveItemsFromList("Archive.rar", items as any);
expect(result).toHaveLength(4);
});
// ── Single RAR ──
it("matches a single .rar file", () => {
const items = makeItems(["SingleFile.rar", "Other.mkv"]);
const result = resolveArchiveItemsFromList("SingleFile.rar", items as any);
expect(result).toHaveLength(1);
expect((result[0] as any).fileName).toBe("SingleFile.rar");
});
// ── Split ZIP ──
it("matches split .zip.NNN files", () => {
const items = makeItems([
"Data.zip",
"Data.zip.001",
"Data.zip.002",
"Data.zip.003",
]);
const result = resolveArchiveItemsFromList("Data.zip.001", items as any);
expect(result).toHaveLength(4);
});
// ── Split 7z ──
it("matches split .7z.NNN files", () => {
const items = makeItems([
"Backup.7z.001",
"Backup.7z.002",
]);
const result = resolveArchiveItemsFromList("Backup.7z.001", items as any);
expect(result).toHaveLength(2);
});
// ── Generic .NNN splits ──
it("matches generic .NNN split files", () => {
const items = makeItems([
"video.001",
"video.002",
"video.003",
]);
const result = resolveArchiveItemsFromList("video.001", items as any);
expect(result).toHaveLength(3);
});
// ── Exact filename match ──
it("matches a single .zip by exact name", () => {
const items = makeItems(["myarchive.zip", "other.rar"]);
const result = resolveArchiveItemsFromList("myarchive.zip", items as any);
expect(result).toHaveLength(1);
expect((result[0] as any).fileName).toBe("myarchive.zip");
});
// ── Case insensitivity ──
it("matches case-insensitively", () => {
const items = makeItems([
"MOVIE.PART1.RAR",
"MOVIE.PART2.RAR",
]);
const result = resolveArchiveItemsFromList("movie.part1.rar", items as any);
expect(result).toHaveLength(2);
});
// ── Stem-based fallback ──
it("uses stem-based fallback when exact patterns fail", () => {
// Simulate a debrid service that renames "Movie.part1.rar" to "Movie.part1_dl.rar"
// but the disk file is "Movie.part1.rar"
const items = makeItems([
"Movie.rar",
]);
// The archive on disk is "Movie.part1.rar" but there's no item matching the
// .partN pattern. The stem "movie" should match "Movie.rar" via fallback.
const result = resolveArchiveItemsFromList("Movie.part1.rar", items as any);
// stem fallback: "movie" starts with "movie" and ends with .rar
expect(result).toHaveLength(1);
});
// ── Single item fallback ──
it("returns single archive item when no pattern matches", () => {
const items = makeItems(["totally-different-name.rar"]);
const result = resolveArchiveItemsFromList("Original.rar", items as any);
// Single item in list with archive extension → return it
expect(result).toHaveLength(1);
});
// ── Empty when no match ──
it("returns empty when items have no archive extensions", () => {
const items = makeItems(["video.mkv", "subtitle.srt"]);
const result = resolveArchiveItemsFromList("Archive.rar", items as any);
expect(result).toHaveLength(0);
});
// ── Items without targetPath ──
it("falls back to fileName when targetPath is missing", () => {
const items = [
{ fileName: "Movie.part1.rar", id: "1", status: "completed" },
{ fileName: "Movie.part2.rar", id: "2", status: "completed" },
];
const result = resolveArchiveItemsFromList("Movie.part1.rar", items as any);
expect(result).toHaveLength(2);
});
// ── Multiple archives, should not cross-match ──
it("does not cross-match different archive groups", () => {
const items = makeItems([
"Episode.S01E01.part1.rar",
"Episode.S01E01.part2.rar",
"Episode.S01E02.part1.rar",
"Episode.S01E02.part2.rar",
]);
const result1 = resolveArchiveItemsFromList("Episode.S01E01.part1.rar", items as any);
expect(result1).toHaveLength(2);
expect(result1.every((i: any) => i.fileName.includes("S01E01"))).toBe(true);
const result2 = resolveArchiveItemsFromList("Episode.S01E02.part1.rar", items as any);
expect(result2).toHaveLength(2);
expect(result2.every((i: any) => i.fileName.includes("S01E02"))).toBe(true);
});
});