Compare commits

...

10 Commits

Author SHA1 Message Date
Sucukdeluxe
72642351d0 Release v1.6.55 2026-03-05 06:25:20 +01:00
Sucukdeluxe
51a01ea03f Use bulk IInArchive.extract() for ~8x faster extraction, fix archive item resolution
- Replace extractSlow() per-item extraction with IInArchive.extract() bulk API
  in 7-Zip-JBinding. Solid RAR archives no longer re-decode from the beginning
  for each item, bringing extraction speed close to native WinRAR/7z.exe (~375 MB/s
  instead of ~43 MB/s).

- Add BulkExtractCallback implementing both IArchiveExtractCallback and
  ICryptoGetTextPassword for proper password handling during bulk extraction.

- Fix resolveArchiveItemsFromList with multi-level fallback matching:
  1. Pattern match (multipart RAR, split ZIP/7z, generic splits)
  2. Exact filename match (case-insensitive)
  3. Stem-based fuzzy match (handles debrid service filename modifications)
  4. Single-item archive fallback

- Simplify caching from Set+Array workaround back to simple Map<string, T>
  (the original "caching failure" was caused by resolveArchiveItemsFromList
  returning empty arrays, not by Map/Set/Object data structure bugs).

- Add comprehensive tests for archive item resolution (14 test cases)
  and JVM extraction progress callbacks (2 test cases).

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-05 06:24:12 +01:00
Sucukdeluxe
d9a78ea837 Release v1.6.54 2026-03-05 05:59:50 +01:00
Sucukdeluxe
5b221d5bd5 Add persistent JVM daemon for extraction, fix caching with Set+Array
- JVM extractor now supports --daemon mode: starts once, processes
  multiple archives via stdin JSON protocol, eliminating ~5s JVM boot
  per archive
- TypeScript side: daemon manager starts JVM once, sends requests via
  stdin, falls back to spawning new process if daemon is busy
- Fix extraction progress caching: replaced Object.create(null) + in
  operator with Set<string> + linear Array scan — both Map.has() and
  the in operator mysteriously failed to find keys that were just set
- Daemon auto-shutdown on app quit via shutdownDaemon() in before-quit

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-05 05:59:13 +01:00
Sucukdeluxe
c36549ca69 Release v1.6.53 2026-03-05 05:48:41 +01:00
Sucukdeluxe
7e79bef8da Increase JVM extractor heap to 8GB max / 512MB initial
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-05 05:48:02 +01:00
Sucukdeluxe
e3b4a4ba19 Release v1.6.52 2026-03-05 05:42:55 +01:00
Sucukdeluxe
30d216c7ca Fix extraction progress caching and JVM tuning
- Replace Map-based archive item cache with plain Object.create(null)
  to work around mysterious Map.has() returning false despite set()
  being called with the same key — this caused resolveArchiveItems
  to run on every 1.1s pulse instead of being cached, preventing
  extraction progress (Entpacken X%) from ever showing in the UI
- Apply same fix to both hybrid and full extraction paths
- Increase JVM heap from 512MB to 1GB for better extraction throughput
- Use SerialGC for faster JVM startup on short-lived extract processes
- Add download lifecycle logging (package add + item download start)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-05 05:42:23 +01:00
Sucukdeluxe
d80483adc2 Add download lifecycle logging for better diagnostics
- Log when packages are added (count + names)
- Log when individual item downloads start (filename, size, provider)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-05 05:32:44 +01:00
Sucukdeluxe
1cda391dfe Fix extraction speed and UI label updates
- Change OS priority from IDLE/BELOW_NORMAL to NORMAL/BELOW_NORMAL so
  extraction runs at full speed (matching manual 7-Zip/WinRAR performance)
- Use "high" priority in both hybrid and full extraction paths
- Increase hybrid extraction threads from hardcoded 2 to dynamic
  calculation (half CPU count, min 2, max 8)
- Fix emitState forced emit being silently dropped when a non-forced
  timer was already pending — forced emits now always replace pending
  timers to ensure immediate UI feedback during extraction transitions

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-05 05:28:42 +01:00
18 changed files with 1078 additions and 191 deletions

View File

@ -1,6 +1,6 @@
{ {
"name": "real-debrid-downloader", "name": "real-debrid-downloader",
"version": "1.6.50", "version": "1.6.55",
"description": "Desktop downloader", "description": "Desktop downloader",
"main": "build/main/main/main.js", "main": "build/main/main/main.js",
"author": "Sucukdeluxe", "author": "Sucukdeluxe",

View File

@ -3,7 +3,9 @@ package com.sucukdeluxe.extractor;
import net.lingala.zip4j.ZipFile; import net.lingala.zip4j.ZipFile;
import net.lingala.zip4j.exception.ZipException; import net.lingala.zip4j.exception.ZipException;
import net.lingala.zip4j.model.FileHeader; import net.lingala.zip4j.model.FileHeader;
import net.sf.sevenzipjbinding.ExtractAskMode;
import net.sf.sevenzipjbinding.ExtractOperationResult; import net.sf.sevenzipjbinding.ExtractOperationResult;
import net.sf.sevenzipjbinding.IArchiveExtractCallback;
import net.sf.sevenzipjbinding.IArchiveOpenCallback; import net.sf.sevenzipjbinding.IArchiveOpenCallback;
import net.sf.sevenzipjbinding.IArchiveOpenVolumeCallback; import net.sf.sevenzipjbinding.IArchiveOpenVolumeCallback;
import net.sf.sevenzipjbinding.IInArchive; import net.sf.sevenzipjbinding.IInArchive;
@ -51,6 +53,10 @@ public final class JBindExtractorMain {
} }
public static void main(String[] args) { public static void main(String[] args) {
if (args.length == 1 && "--daemon".equals(args[0])) {
runDaemon();
return;
}
int exit = 1; int exit = 1;
try { try {
ExtractionRequest request = parseArgs(args); ExtractionRequest request = parseArgs(args);
@ -65,6 +71,127 @@ public final class JBindExtractorMain {
System.exit(exit); System.exit(exit);
} }
private static void runDaemon() {
System.out.println("RD_DAEMON_READY");
System.out.flush();
java.io.BufferedReader reader = new java.io.BufferedReader(
new java.io.InputStreamReader(System.in, StandardCharsets.UTF_8));
try {
String line;
while ((line = reader.readLine()) != null) {
line = line.trim();
if (line.isEmpty()) {
continue;
}
int exitCode = 1;
try {
ExtractionRequest request = parseDaemonRequest(line);
exitCode = runExtraction(request);
} catch (IllegalArgumentException error) {
emitError("Argumentfehler: " + safeMessage(error));
exitCode = 2;
} catch (Throwable error) {
emitError(safeMessage(error));
exitCode = 1;
}
System.out.println("RD_REQUEST_DONE " + exitCode);
System.out.flush();
}
} catch (IOException ignored) {
// stdin closed parent process exited
}
}
private static ExtractionRequest parseDaemonRequest(String jsonLine) {
// Minimal JSON parsing without external dependencies.
// Expected format: {"archive":"...","target":"...","conflict":"...","backend":"...","passwords":["...","..."]}
ExtractionRequest request = new ExtractionRequest();
request.archiveFile = new File(extractJsonString(jsonLine, "archive"));
request.targetDir = new File(extractJsonString(jsonLine, "target"));
String conflict = extractJsonString(jsonLine, "conflict");
if (conflict.length() > 0) {
request.conflictMode = ConflictMode.fromValue(conflict);
}
String backend = extractJsonString(jsonLine, "backend");
if (backend.length() > 0) {
request.backend = Backend.fromValue(backend);
}
// Parse passwords array
int pwStart = jsonLine.indexOf("\"passwords\"");
if (pwStart >= 0) {
int arrStart = jsonLine.indexOf('[', pwStart);
int arrEnd = jsonLine.indexOf(']', arrStart);
if (arrStart >= 0 && arrEnd > arrStart) {
String arrContent = jsonLine.substring(arrStart + 1, arrEnd);
int idx = 0;
while (idx < arrContent.length()) {
int qStart = arrContent.indexOf('"', idx);
if (qStart < 0) break;
int qEnd = findClosingQuote(arrContent, qStart + 1);
if (qEnd < 0) break;
request.passwords.add(unescapeJsonString(arrContent.substring(qStart + 1, qEnd)));
idx = qEnd + 1;
}
}
}
if (request.archiveFile == null || !request.archiveFile.exists() || !request.archiveFile.isFile()) {
throw new IllegalArgumentException("Archiv nicht gefunden: " +
(request.archiveFile == null ? "null" : request.archiveFile.getAbsolutePath()));
}
if (request.targetDir == null) {
throw new IllegalArgumentException("--target fehlt");
}
return request;
}
private static String extractJsonString(String json, String key) {
String search = "\"" + key + "\"";
int keyIdx = json.indexOf(search);
if (keyIdx < 0) return "";
int colonIdx = json.indexOf(':', keyIdx + search.length());
if (colonIdx < 0) return "";
int qStart = json.indexOf('"', colonIdx + 1);
if (qStart < 0) return "";
int qEnd = findClosingQuote(json, qStart + 1);
if (qEnd < 0) return "";
return unescapeJsonString(json.substring(qStart + 1, qEnd));
}
private static int findClosingQuote(String s, int from) {
for (int i = from; i < s.length(); i++) {
char c = s.charAt(i);
if (c == '\\') {
i++; // skip escaped character
continue;
}
if (c == '"') return i;
}
return -1;
}
private static String unescapeJsonString(String s) {
if (s.indexOf('\\') < 0) return s;
StringBuilder sb = new StringBuilder(s.length());
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
if (c == '\\' && i + 1 < s.length()) {
char next = s.charAt(i + 1);
switch (next) {
case '"': sb.append('"'); i++; break;
case '\\': sb.append('\\'); i++; break;
case '/': sb.append('/'); i++; break;
case 'n': sb.append('\n'); i++; break;
case 'r': sb.append('\r'); i++; break;
case 't': sb.append('\t'); i++; break;
default: sb.append(c); break;
}
} else {
sb.append(c);
}
}
return sb.toString();
}
private static int runExtraction(ExtractionRequest request) throws Exception { private static int runExtraction(ExtractionRequest request) throws Exception {
List<String> passwords = normalizePasswords(request.passwords); List<String> passwords = normalizePasswords(request.passwords);
Exception lastError = null; Exception lastError = null;
@ -235,110 +362,99 @@ public final class JBindExtractorMain {
try { try {
context = openSevenZipArchive(request.archiveFile, password); context = openSevenZipArchive(request.archiveFile, password);
IInArchive archive = context.archive; IInArchive archive = context.archive;
ISimpleInArchive simple = archive.getSimpleInterface(); int itemCount = archive.getNumberOfItems();
ISimpleInArchiveItem[] items = simple.getArchiveItems(); if (itemCount <= 0) {
if (items == null) {
throw new IOException("Archiv enthalt keine Eintrage oder konnte nicht gelesen werden: " + request.archiveFile.getAbsolutePath()); throw new IOException("Archiv enthalt keine Eintrage oder konnte nicht gelesen werden: " + request.archiveFile.getAbsolutePath());
} }
// Pre-scan: collect file indices, sizes, output paths, and detect encryption
long totalUnits = 0; long totalUnits = 0;
boolean encrypted = false; boolean encrypted = false;
for (ISimpleInArchiveItem item : items) { List<Integer> fileIndices = new ArrayList<Integer>();
if (item == null || item.isFolder()) { List<File> outputFiles = new ArrayList<File>();
continue; List<Long> fileSizes = new ArrayList<Long>();
}
try {
encrypted = encrypted || item.isEncrypted();
} catch (Throwable ignored) {
// ignore encrypted flag read issues
}
totalUnits += safeSize(item.getSize());
}
ProgressTracker progress = new ProgressTracker(totalUnits);
progress.emitStart();
Set<String> reserved = new HashSet<String>(); Set<String> reserved = new HashSet<String>();
for (ISimpleInArchiveItem item : items) {
if (item == null) {
continue;
}
String entryName = normalizeEntryName(item.getPath(), "item-" + item.getItemIndex()); for (int i = 0; i < itemCount; i++) {
if (item.isFolder()) { Boolean isFolder = (Boolean) archive.getProperty(i, PropID.IS_FOLDER);
String entryPath = (String) archive.getProperty(i, PropID.PATH);
String entryName = normalizeEntryName(entryPath, "item-" + i);
if (Boolean.TRUE.equals(isFolder)) {
File dir = resolveDirectory(request.targetDir, entryName); File dir = resolveDirectory(request.targetDir, entryName);
ensureDirectory(dir); ensureDirectory(dir);
reserved.add(pathKey(dir)); reserved.add(pathKey(dir));
continue; continue;
} }
long itemUnits = safeSize(item.getSize()); try {
Boolean isEncrypted = (Boolean) archive.getProperty(i, PropID.ENCRYPTED);
encrypted = encrypted || Boolean.TRUE.equals(isEncrypted);
} catch (Throwable ignored) {
// ignore encrypted flag read issues
}
Long rawSize = (Long) archive.getProperty(i, PropID.SIZE);
long itemSize = safeSize(rawSize);
totalUnits += itemSize;
File output = resolveOutputFile(request.targetDir, entryName, request.conflictMode, reserved); File output = resolveOutputFile(request.targetDir, entryName, request.conflictMode, reserved);
if (output == null) { fileIndices.add(i);
progress.advance(itemUnits); outputFiles.add(output); // null if skipped
continue; fileSizes.add(itemSize);
} }
ensureDirectory(output.getParentFile()); if (fileIndices.isEmpty()) {
rejectSymlink(output); // All items are folders or skipped
final FileOutputStream out = new FileOutputStream(output); ProgressTracker progress = new ProgressTracker(1);
final long[] remaining = new long[] { itemUnits }; progress.emitStart();
boolean extractionSuccess = false; progress.emitDone();
return;
}
ProgressTracker progress = new ProgressTracker(totalUnits);
progress.emitStart();
// Build index array for bulk extract
int[] indices = new int[fileIndices.size()];
for (int i = 0; i < fileIndices.size(); i++) {
indices[i] = fileIndices.get(i);
}
// Map from archive index to our position in fileIndices/outputFiles
Map<Integer, Integer> indexToPos = new HashMap<Integer, Integer>();
for (int i = 0; i < fileIndices.size(); i++) {
indexToPos.put(fileIndices.get(i), i);
}
// Bulk extraction state
final boolean encryptedFinal = encrypted;
final String effectivePassword = password == null ? "" : password;
final File[] currentOutput = new File[1];
final FileOutputStream[] currentStream = new FileOutputStream[1];
final boolean[] currentSuccess = new boolean[1];
final long[] currentRemaining = new long[1];
final Throwable[] firstError = new Throwable[1];
final int[] currentPos = new int[] { -1 };
try { try {
ExtractOperationResult result = item.extractSlow(new ISequentialOutStream() { archive.extract(indices, false, new BulkExtractCallback(
@Override archive, indexToPos, fileIndices, outputFiles, fileSizes,
public int write(byte[] data) throws SevenZipException { progress, encryptedFinal, effectivePassword, currentOutput,
if (data == null || data.length == 0) { currentStream, currentSuccess, currentRemaining, currentPos, firstError
return 0; ));
}
try {
out.write(data);
} catch (IOException error) {
throw new SevenZipException("Fehler beim Schreiben: " + error.getMessage(), error);
}
long accounted = Math.min(remaining[0], (long) data.length);
remaining[0] -= accounted;
progress.advance(accounted);
return data.length;
}
}, password == null ? "" : password);
if (remaining[0] > 0) {
progress.advance(remaining[0]);
}
if (result != ExtractOperationResult.OK) {
if (isPasswordFailure(result, encrypted)) {
throw new WrongPasswordException(new IOException("Falsches Passwort"));
}
throw new IOException("7z-Fehler: " + result.name());
}
extractionSuccess = true;
} catch (SevenZipException error) { } catch (SevenZipException error) {
if (looksLikeWrongPassword(error, encrypted)) { if (looksLikeWrongPassword(error, encryptedFinal)) {
throw new WrongPasswordException(error); throw new WrongPasswordException(error);
} }
throw error; throw error;
} finally {
try {
out.close();
} catch (Throwable ignored) {
}
if (!extractionSuccess && output.exists()) {
try {
output.delete();
} catch (Throwable ignored) {
}
}
} }
try { if (firstError[0] != null) {
java.util.Date modified = item.getLastWriteTime(); if (firstError[0] instanceof WrongPasswordException) {
if (modified != null) { throw (WrongPasswordException) firstError[0];
output.setLastModified(modified.getTime());
}
} catch (Throwable ignored) {
// best effort
} }
throw (Exception) firstError[0];
} }
progress.emitDone(); progress.emitDone();
@ -763,6 +879,176 @@ public final class JBindExtractorMain {
private final List<String> passwords = new ArrayList<String>(); private final List<String> passwords = new ArrayList<String>();
} }
/**
* Bulk extraction callback that implements both IArchiveExtractCallback and
* ICryptoGetTextPassword. Using the bulk IInArchive.extract() API instead of
* per-item extractSlow() is critical for performance solid RAR archives
* otherwise re-decode from the beginning for every single item.
*/
private static final class BulkExtractCallback implements IArchiveExtractCallback, ICryptoGetTextPassword {
private final IInArchive archive;
private final Map<Integer, Integer> indexToPos;
private final List<Integer> fileIndices;
private final List<File> outputFiles;
private final List<Long> fileSizes;
private final ProgressTracker progress;
private final boolean encrypted;
private final String password;
private final File[] currentOutput;
private final FileOutputStream[] currentStream;
private final boolean[] currentSuccess;
private final long[] currentRemaining;
private final int[] currentPos;
private final Throwable[] firstError;
BulkExtractCallback(IInArchive archive, Map<Integer, Integer> indexToPos,
List<Integer> fileIndices, List<File> outputFiles, List<Long> fileSizes,
ProgressTracker progress, boolean encrypted, String password,
File[] currentOutput, FileOutputStream[] currentStream,
boolean[] currentSuccess, long[] currentRemaining, int[] currentPos,
Throwable[] firstError) {
this.archive = archive;
this.indexToPos = indexToPos;
this.fileIndices = fileIndices;
this.outputFiles = outputFiles;
this.fileSizes = fileSizes;
this.progress = progress;
this.encrypted = encrypted;
this.password = password;
this.currentOutput = currentOutput;
this.currentStream = currentStream;
this.currentSuccess = currentSuccess;
this.currentRemaining = currentRemaining;
this.currentPos = currentPos;
this.firstError = firstError;
}
@Override
public String cryptoGetTextPassword() {
return password;
}
@Override
public void setTotal(long total) {
// 7z reports total compressed bytes; we track uncompressed via ProgressTracker
}
@Override
public void setCompleted(long complete) {
// Not used we track per-write progress
}
@Override
public ISequentialOutStream getStream(int index, ExtractAskMode extractAskMode) throws SevenZipException {
closeCurrentStream();
Integer pos = indexToPos.get(index);
if (pos == null) {
return null;
}
currentPos[0] = pos;
currentOutput[0] = outputFiles.get(pos);
currentSuccess[0] = false;
currentRemaining[0] = fileSizes.get(pos);
if (extractAskMode != ExtractAskMode.EXTRACT) {
currentOutput[0] = null;
return null;
}
if (currentOutput[0] == null) {
progress.advance(currentRemaining[0]);
return null;
}
try {
ensureDirectory(currentOutput[0].getParentFile());
rejectSymlink(currentOutput[0]);
currentStream[0] = new FileOutputStream(currentOutput[0]);
} catch (IOException error) {
throw new SevenZipException("Fehler beim Erstellen: " + error.getMessage(), error);
}
return new ISequentialOutStream() {
@Override
public int write(byte[] data) throws SevenZipException {
if (data == null || data.length == 0) {
return 0;
}
try {
currentStream[0].write(data);
} catch (IOException error) {
throw new SevenZipException("Fehler beim Schreiben: " + error.getMessage(), error);
}
long accounted = Math.min(currentRemaining[0], (long) data.length);
currentRemaining[0] -= accounted;
progress.advance(accounted);
return data.length;
}
};
}
@Override
public void prepareOperation(ExtractAskMode extractAskMode) {
// no-op
}
@Override
public void setOperationResult(ExtractOperationResult result) throws SevenZipException {
if (currentRemaining[0] > 0) {
progress.advance(currentRemaining[0]);
currentRemaining[0] = 0;
}
if (result == ExtractOperationResult.OK) {
currentSuccess[0] = true;
closeCurrentStream();
if (currentPos[0] >= 0 && currentOutput[0] != null) {
try {
int archiveIndex = fileIndices.get(currentPos[0]);
java.util.Date modified = (java.util.Date) archive.getProperty(archiveIndex, PropID.LAST_MODIFICATION_TIME);
if (modified != null) {
currentOutput[0].setLastModified(modified.getTime());
}
} catch (Throwable ignored) {
// best effort
}
}
} else {
closeCurrentStream();
if (currentOutput[0] != null && currentOutput[0].exists()) {
try {
currentOutput[0].delete();
} catch (Throwable ignored) {
}
}
if (firstError[0] == null) {
if (isPasswordFailure(result, encrypted)) {
firstError[0] = new WrongPasswordException(new IOException("Falsches Passwort"));
} else {
firstError[0] = new IOException("7z-Fehler: " + result.name());
}
}
}
}
private void closeCurrentStream() {
if (currentStream[0] != null) {
try {
currentStream[0].close();
} catch (Throwable ignored) {
}
currentStream[0] = null;
}
if (!currentSuccess[0] && currentOutput[0] != null && currentOutput[0].exists()) {
try {
currentOutput[0].delete();
} catch (Throwable ignored) {
}
}
}
}
private static final class WrongPasswordException extends Exception { private static final class WrongPasswordException extends Exception {
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;

View File

@ -751,60 +751,86 @@ export function buildAutoRenameBaseNameFromFoldersWithOptions(
return null; return null;
} }
function resolveArchiveItemsFromList(archiveName: string, items: DownloadItem[]): DownloadItem[] { export function resolveArchiveItemsFromList(archiveName: string, items: DownloadItem[]): DownloadItem[] {
const entryLower = archiveName.toLowerCase(); const entryLower = archiveName.toLowerCase();
// Helper: get item basename (try targetPath first, then fileName)
const itemBaseName = (item: DownloadItem): string =>
path.basename(item.targetPath || item.fileName || "");
// Try pattern-based matching first (for multipart archives)
let pattern: RegExp | null = null;
const multipartMatch = entryLower.match(/^(.*)\.part0*1\.rar$/); const multipartMatch = entryLower.match(/^(.*)\.part0*1\.rar$/);
if (multipartMatch) { if (multipartMatch) {
const prefix = multipartMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); const prefix = multipartMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const pattern = new RegExp(`^${prefix}\\.part\\d+\\.rar$`, "i"); pattern = new RegExp(`^${prefix}\\.part\\d+\\.rar$`, "i");
return items.filter((item) => {
const name = path.basename(item.targetPath || item.fileName || "");
return pattern.test(name);
});
} }
if (!pattern) {
const rarMatch = entryLower.match(/^(.*)\.rar$/); const rarMatch = entryLower.match(/^(.*)\.rar$/);
if (rarMatch) { if (rarMatch) {
const stem = rarMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); const stem = rarMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const pattern = new RegExp(`^${stem}\\.r(ar|\\d{2,3})$`, "i"); pattern = new RegExp(`^${stem}\\.r(ar|\\d{2,3})$`, "i");
return items.filter((item) => {
const name = path.basename(item.targetPath || item.fileName || "");
return pattern.test(name);
});
} }
// Split ZIP (e.g., movie.zip.001, movie.zip.002) }
if (!pattern) {
const zipSplitMatch = entryLower.match(/^(.*)\.zip\.001$/); const zipSplitMatch = entryLower.match(/^(.*)\.zip\.001$/);
if (zipSplitMatch) { if (zipSplitMatch) {
const stem = zipSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); const stem = zipSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const pattern = new RegExp(`^${stem}\\.zip(\\.\\d+)?$`, "i"); pattern = new RegExp(`^${stem}\\.zip(\\.\\d+)?$`, "i");
return items.filter((item) => {
const name = path.basename(item.targetPath || item.fileName || "");
return pattern.test(name);
});
} }
// Split 7z (e.g., movie.7z.001, movie.7z.002) }
if (!pattern) {
const sevenSplitMatch = entryLower.match(/^(.*)\.7z\.001$/); const sevenSplitMatch = entryLower.match(/^(.*)\.7z\.001$/);
if (sevenSplitMatch) { if (sevenSplitMatch) {
const stem = sevenSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); const stem = sevenSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const pattern = new RegExp(`^${stem}\\.7z(\\.\\d+)?$`, "i"); pattern = new RegExp(`^${stem}\\.7z(\\.\\d+)?$`, "i");
return items.filter((item) => {
const name = path.basename(item.targetPath || item.fileName || "");
return pattern.test(name);
});
} }
// Generic .NNN splits (e.g., movie.001, movie.002) }
if (!pattern && /^(.*)\.001$/.test(entryLower) && !/\.(zip|7z)\.001$/.test(entryLower)) {
const genericSplitMatch = entryLower.match(/^(.*)\.001$/); const genericSplitMatch = entryLower.match(/^(.*)\.001$/);
if (genericSplitMatch && !/\.(zip|7z)\.001$/.test(entryLower)) { if (genericSplitMatch) {
const stem = genericSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); const stem = genericSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const pattern = new RegExp(`^${stem}\\.\\d{3}$`, "i"); pattern = new RegExp(`^${stem}\\.\\d{3}$`, "i");
return items.filter((item) => {
const name = path.basename(item.targetPath || item.fileName || "");
return pattern.test(name);
});
} }
return items.filter((item) => { }
const name = path.basename(item.targetPath || item.fileName || "").toLowerCase();
return name === entryLower; // Attempt 1: Pattern match (handles multipart archives)
if (pattern) {
const matched = items.filter((item) => pattern!.test(itemBaseName(item)));
if (matched.length > 0) return matched;
}
// Attempt 2: Exact filename match (case-insensitive)
const exactMatch = items.filter((item) => itemBaseName(item).toLowerCase() === entryLower);
if (exactMatch.length > 0) return exactMatch;
// Attempt 3: Stem-based fuzzy match — strip archive extensions and compare stems.
// Handles cases where debrid services modify filenames slightly.
const archiveStem = entryLower
.replace(/\.part\d+\.rar$/i, "")
.replace(/\.r\d{2,3}$/i, "")
.replace(/\.rar$/i, "")
.replace(/\.(zip|7z)\.\d{3}$/i, "")
.replace(/\.\d{3}$/i, "")
.replace(/\.(zip|7z)$/i, "");
if (archiveStem.length > 3) {
const stemMatch = items.filter((item) => {
const name = itemBaseName(item).toLowerCase();
return name.startsWith(archiveStem) && /\.(rar|r\d{2,3}|zip|7z|\d{3})$/i.test(name);
}); });
if (stemMatch.length > 0) return stemMatch;
}
// Attempt 4: If only one item in the list and one archive — return it as a best-effort match.
// This handles single-file packages where the filename may have been modified.
if (items.length === 1) {
const singleName = itemBaseName(items[0]).toLowerCase();
if (/\.(rar|zip|7z|\d{3})$/i.test(singleName)) {
return items;
}
}
return [];
} }
function retryDelayWithJitter(attempt: number, baseMs: number): number { function retryDelayWithJitter(attempt: number, baseMs: number): number {
@ -1384,6 +1410,10 @@ export class DownloadManager extends EventEmitter {
addedPackages += 1; addedPackages += 1;
} }
if (addedPackages > 0 || addedLinks > 0) {
const pkgNames = packages.filter((p) => p.links.length > 0).map((p) => p.name).join(", ");
logger.info(`Pakete hinzugefügt: ${addedPackages} Paket(e), ${addedLinks} Link(s) [${pkgNames}]`);
}
this.persistSoon(); this.persistSoon();
this.emitState(); this.emitState();
if (unresolvedByLink.size > 0) { if (unresolvedByLink.size > 0) {
@ -3570,14 +3600,16 @@ export class DownloadManager extends EventEmitter {
this.emit("state", this.getSnapshot()); this.emit("state", this.getSnapshot());
return; return;
} }
// Too soon — schedule deferred forced emit // Too soon — replace any pending timer with a shorter forced-emit timer
if (!this.stateEmitTimer) { if (this.stateEmitTimer) {
clearTimeout(this.stateEmitTimer);
this.stateEmitTimer = null;
}
this.stateEmitTimer = setTimeout(() => { this.stateEmitTimer = setTimeout(() => {
this.stateEmitTimer = null; this.stateEmitTimer = null;
this.lastStateEmitAt = nowMs(); this.lastStateEmitAt = nowMs();
this.emit("state", this.getSnapshot()); this.emit("state", this.getSnapshot());
}, MIN_FORCE_GAP_MS - sinceLastEmit); }, MIN_FORCE_GAP_MS - sinceLastEmit);
}
return; return;
} }
if (this.stateEmitTimer) { if (this.stateEmitTimer) {
@ -4734,6 +4766,7 @@ export class DownloadManager extends EventEmitter {
item.fullStatus = `Starte... (${unrestricted.providerLabel})`; item.fullStatus = `Starte... (${unrestricted.providerLabel})`;
item.updatedAt = nowMs(); item.updatedAt = nowMs();
this.emitState(); this.emitState();
logger.info(`Download Start: ${item.fileName} (${humanSize(unrestricted.fileSize || 0)}) via ${unrestricted.providerLabel}, pkg=${pkg.name}`);
const maxAttempts = maxItemAttempts; const maxAttempts = maxItemAttempts;
let done = false; let done = false;
@ -6359,9 +6392,9 @@ export class DownloadManager extends EventEmitter {
const resolveArchiveItems = (archiveName: string): DownloadItem[] => const resolveArchiveItems = (archiveName: string): DownloadItem[] =>
resolveArchiveItemsFromList(archiveName, items); resolveArchiveItemsFromList(archiveName, items);
// Track multiple active archives for parallel hybrid extraction // Track archives for parallel hybrid extraction progress
const activeHybridArchiveMap = new Map<string, DownloadItem[]>(); const hybridResolvedItems = new Map<string, DownloadItem[]>();
const hybridArchiveStartTimes = new Map<string, number>(); const hybridStartTimes = new Map<string, number>();
let hybridLastEmitAt = 0; let hybridLastEmitAt = 0;
// Mark items based on whether their archive is actually ready for extraction. // Mark items based on whether their archive is actually ready for extraction.
@ -6400,7 +6433,7 @@ export class DownloadManager extends EventEmitter {
packageId, packageId,
hybridMode: true, hybridMode: true,
maxParallel: this.settings.maxParallelExtract || 2, maxParallel: this.settings.maxParallelExtract || 2,
extractCpuPriority: this.settings.extractCpuPriority, extractCpuPriority: "high",
onProgress: (progress) => { onProgress: (progress) => {
if (progress.phase === "preparing") { if (progress.phase === "preparing") {
pkg.postProcessLabel = progress.archiveName || "Vorbereiten..."; pkg.postProcessLabel = progress.archiveName || "Vorbereiten...";
@ -6408,26 +6441,21 @@ export class DownloadManager extends EventEmitter {
return; return;
} }
if (progress.phase === "done") { if (progress.phase === "done") {
// Do NOT mark remaining archives as "Done" here — some may have hybridResolvedItems.clear();
// failed. The post-extraction code (result.failed check) will hybridStartTimes.clear();
// assign the correct label. Only clear the tracking maps.
activeHybridArchiveMap.clear();
hybridArchiveStartTimes.clear();
return; return;
} }
if (progress.archiveName) { if (progress.archiveName) {
// Resolve items for this archive if not yet tracked // Resolve items for this archive if not yet tracked
if (!activeHybridArchiveMap.has(progress.archiveName)) { if (!hybridResolvedItems.has(progress.archiveName)) {
const resolved = resolveArchiveItems(progress.archiveName); const resolved = resolveArchiveItems(progress.archiveName);
activeHybridArchiveMap.set(progress.archiveName, resolved); hybridResolvedItems.set(progress.archiveName, resolved);
hybridArchiveStartTimes.set(progress.archiveName, nowMs()); hybridStartTimes.set(progress.archiveName, nowMs());
if (resolved.length === 0) { if (resolved.length === 0) {
logger.warn(`resolveArchiveItems (hybrid): KEINE Items gefunden für archiveName="${progress.archiveName}", items.length=${items.length}, itemNames=[${items.slice(0, 5).map((i) => path.basename(i.targetPath || i.fileName || "?")).join(", ")}]`); logger.warn(`resolveArchiveItems (hybrid): KEINE Items gefunden für archiveName="${progress.archiveName}", items.length=${items.length}, itemNames=[${items.map((i) => path.basename(i.targetPath || i.fileName || "?")).join(", ")}]`);
} else { } else {
logger.info(`resolveArchiveItems (hybrid): ${resolved.length} Items für archiveName="${progress.archiveName}"`); logger.info(`resolveArchiveItems (hybrid): ${resolved.length} Items für archiveName="${progress.archiveName}"`);
// Immediately label the matched items and force emit so the UI
// transitions from "Ausstehend" to the extraction label right away.
const initLabel = `Entpacken 0% · ${progress.archiveName}`; const initLabel = `Entpacken 0% · ${progress.archiveName}`;
const initAt = nowMs(); const initAt = nowMs();
for (const entry of resolved) { for (const entry of resolved) {
@ -6440,12 +6468,12 @@ export class DownloadManager extends EventEmitter {
this.emitState(true); this.emitState(true);
} }
} }
const archItems = activeHybridArchiveMap.get(progress.archiveName)!; const archItems = hybridResolvedItems.get(progress.archiveName) || [];
// If archive is at 100%, mark its items as done and remove from active // If archive is at 100%, mark its items as done and remove from active
if (Number(progress.archivePercent ?? 0) >= 100) { if (Number(progress.archivePercent ?? 0) >= 100) {
const doneAt = nowMs(); const doneAt = nowMs();
const startedAt = hybridArchiveStartTimes.get(progress.archiveName) || doneAt; const startedAt = hybridStartTimes.get(progress.archiveName) || doneAt;
const doneLabel = formatExtractDone(doneAt - startedAt); const doneLabel = formatExtractDone(doneAt - startedAt);
for (const entry of archItems) { for (const entry of archItems) {
if (!isExtractedLabel(entry.fullStatus)) { if (!isExtractedLabel(entry.fullStatus)) {
@ -6453,8 +6481,8 @@ export class DownloadManager extends EventEmitter {
entry.updatedAt = doneAt; entry.updatedAt = doneAt;
} }
} }
activeHybridArchiveMap.delete(progress.archiveName); hybridResolvedItems.delete(progress.archiveName);
hybridArchiveStartTimes.delete(progress.archiveName); hybridStartTimes.delete(progress.archiveName);
// Show transitional label while next archive initializes // Show transitional label while next archive initializes
const done = progress.current + 1; const done = progress.current + 1;
if (done < progress.total) { if (done < progress.total) {
@ -6746,9 +6774,9 @@ export class DownloadManager extends EventEmitter {
} }
}, extractTimeoutMs); }, extractTimeoutMs);
try { try {
// Track multiple active archives for parallel extraction // Track archives for parallel extraction progress
const activeArchiveItemsMap = new Map<string, DownloadItem[]>(); const fullResolvedItems = new Map<string, DownloadItem[]>();
const archiveStartTimes = new Map<string, number>(); const fullStartTimes = new Map<string, number>();
const result = await extractPackageArchives({ const result = await extractPackageArchives({
packageDir: pkg.outputDir, packageDir: pkg.outputDir,
@ -6762,8 +6790,8 @@ export class DownloadManager extends EventEmitter {
packageId, packageId,
skipPostCleanup: true, skipPostCleanup: true,
maxParallel: this.settings.maxParallelExtract || 2, maxParallel: this.settings.maxParallelExtract || 2,
// All downloads finished — use highest configured priority so extraction // All downloads finished — use NORMAL OS priority so extraction runs at
// isn't starved. "high" maps to BELOW_NORMAL instead of the default IDLE. // full speed (matching manual 7-Zip/WinRAR speed).
extractCpuPriority: "high", extractCpuPriority: "high",
onProgress: (progress) => { onProgress: (progress) => {
if (progress.phase === "preparing") { if (progress.phase === "preparing") {
@ -6772,26 +6800,22 @@ export class DownloadManager extends EventEmitter {
return; return;
} }
if (progress.phase === "done") { if (progress.phase === "done") {
// Do NOT mark remaining archives as "Done" here — some may have fullResolvedItems.clear();
// failed. The post-extraction code (result.failed check) will fullStartTimes.clear();
// assign the correct label. Only clear the tracking maps.
activeArchiveItemsMap.clear();
archiveStartTimes.clear();
emitExtractStatus("Entpacken 100%", true); emitExtractStatus("Entpacken 100%", true);
return; return;
} }
if (progress.archiveName) { if (progress.archiveName) {
// Resolve items for this archive if not yet tracked // Resolve items for this archive if not yet tracked
if (!activeArchiveItemsMap.has(progress.archiveName)) { if (!fullResolvedItems.has(progress.archiveName)) {
const resolved = resolveArchiveItems(progress.archiveName); const resolved = resolveArchiveItems(progress.archiveName);
activeArchiveItemsMap.set(progress.archiveName, resolved); fullResolvedItems.set(progress.archiveName, resolved);
archiveStartTimes.set(progress.archiveName, nowMs()); fullStartTimes.set(progress.archiveName, nowMs());
if (resolved.length === 0) { if (resolved.length === 0) {
logger.warn(`resolveArchiveItems (full): KEINE Items für archiveName="${progress.archiveName}", completedItems=${completedItems.length}, names=[${completedItems.slice(0, 5).map((i) => path.basename(i.targetPath || i.fileName || "?")).join(", ")}]`); logger.warn(`resolveArchiveItems (full): KEINE Items für archiveName="${progress.archiveName}", completedItems=${completedItems.length}, names=[${completedItems.map((i) => path.basename(i.targetPath || i.fileName || "?")).join(", ")}]`);
} else { } else {
logger.info(`resolveArchiveItems (full): ${resolved.length} Items für archiveName="${progress.archiveName}"`); logger.info(`resolveArchiveItems (full): ${resolved.length} Items für archiveName="${progress.archiveName}"`);
// Immediately label items and force emit for instant UI feedback
const initLabel = `Entpacken 0% · ${progress.archiveName}`; const initLabel = `Entpacken 0% · ${progress.archiveName}`;
const initAt = nowMs(); const initAt = nowMs();
for (const entry of resolved) { for (const entry of resolved) {
@ -6803,12 +6827,12 @@ export class DownloadManager extends EventEmitter {
emitExtractStatus(`Entpacken ${progress.percent}% · ${progress.archiveName}`, true); emitExtractStatus(`Entpacken ${progress.percent}% · ${progress.archiveName}`, true);
} }
} }
const archiveItems = activeArchiveItemsMap.get(progress.archiveName)!; const archiveItems = fullResolvedItems.get(progress.archiveName) || [];
// If archive is at 100%, mark its items as done and remove from active // If archive is at 100%, mark its items as done and remove from active
if (Number(progress.archivePercent ?? 0) >= 100) { if (Number(progress.archivePercent ?? 0) >= 100) {
const doneAt = nowMs(); const doneAt = nowMs();
const startedAt = archiveStartTimes.get(progress.archiveName) || doneAt; const startedAt = fullStartTimes.get(progress.archiveName) || doneAt;
const doneLabel = formatExtractDone(doneAt - startedAt); const doneLabel = formatExtractDone(doneAt - startedAt);
for (const entry of archiveItems) { for (const entry of archiveItems) {
if (!isExtractedLabel(entry.fullStatus)) { if (!isExtractedLabel(entry.fullStatus)) {
@ -6816,8 +6840,8 @@ export class DownloadManager extends EventEmitter {
entry.updatedAt = doneAt; entry.updatedAt = doneAt;
} }
} }
activeArchiveItemsMap.delete(progress.archiveName); fullResolvedItems.delete(progress.archiveName);
archiveStartTimes.delete(progress.archiveName); fullStartTimes.delete(progress.archiveName);
// Show transitional label while next archive initializes // Show transitional label while next archive initializes
const done = progress.current + 1; const done = progress.current + 1;
if (done < progress.total) { if (done < progress.total) {

View File

@ -1,7 +1,7 @@
import fs from "node:fs"; import fs from "node:fs";
import path from "node:path"; import path from "node:path";
import os from "node:os"; import os from "node:os";
import { spawn, spawnSync } from "node:child_process"; import { spawn, spawnSync, type ChildProcess } from "node:child_process";
import AdmZip from "adm-zip"; import AdmZip from "adm-zip";
import { CleanupMode, ConflictMode } from "../shared/types"; import { CleanupMode, ConflictMode } from "../shared/types";
import { logger } from "./logger"; import { logger } from "./logger";
@ -600,8 +600,8 @@ function extractCpuBudgetFromPriority(priority?: string): number {
function extractOsPriority(priority?: string): number { function extractOsPriority(priority?: string): number {
switch (priority) { switch (priority) {
case "high": return os.constants.priority.PRIORITY_BELOW_NORMAL; case "high": return os.constants.priority.PRIORITY_NORMAL;
default: return os.constants.priority.PRIORITY_LOW; default: return os.constants.priority.PRIORITY_BELOW_NORMAL;
} }
} }
@ -615,10 +615,15 @@ function extractCpuBudgetPercent(priority?: string): number {
function extractorThreadSwitch(hybridMode = false, priority?: string): string { function extractorThreadSwitch(hybridMode = false, priority?: string): string {
if (hybridMode) { if (hybridMode) {
// 2 threads during hybrid extraction (download + extract simultaneously). // Use half the CPU budget during hybrid extraction to leave headroom for
// JDownloader 2 uses in-process 7-Zip-JBinding which naturally limits throughput // concurrent downloads. Falls back to at least 2 threads.
// to ~16 MB/s write. 2 UnRAR threads produce similar controlled disk load. const envValue = Number(process.env.RD_EXTRACT_THREADS ?? NaN);
return "-mt2"; if (Number.isFinite(envValue) && envValue >= 1 && envValue <= 32) {
return `-mt${Math.floor(envValue)}`;
}
const cpuCount = Math.max(1, os.cpus().length || 1);
const hybridThreads = Math.max(2, Math.min(8, Math.floor(cpuCount / 2)));
return `-mt${hybridThreads}`;
} }
const envValue = Number(process.env.RD_EXTRACT_THREADS ?? NaN); const envValue = Number(process.env.RD_EXTRACT_THREADS ?? NaN);
if (Number.isFinite(envValue) && envValue >= 1 && envValue <= 32) { if (Number.isFinite(envValue) && envValue >= 1 && envValue <= 32) {
@ -640,8 +645,8 @@ function lowerExtractProcessPriority(childPid: number | undefined, cpuPriority?:
return; return;
} }
try { try {
// Lowers CPU scheduling priority so extraction doesn't starve other processes. // Sets CPU scheduling priority for the extraction process.
// high → BELOW_NORMAL, middle/low → IDLE. I/O priority stays Normal (like JDownloader 2). // high → NORMAL (full speed), default → BELOW_NORMAL. I/O priority stays Normal.
os.setPriority(pid, extractOsPriority(cpuPriority)); os.setPriority(pid, extractOsPriority(cpuPriority));
} catch { } catch {
// ignore: priority lowering is best-effort // ignore: priority lowering is best-effort
@ -983,6 +988,274 @@ function parseJvmLine(
} }
} }
// ── Persistent JVM Daemon ──
// Keeps a single JVM process alive across multiple extraction requests,
// eliminating the ~5s JVM boot overhead per archive.
interface DaemonRequest {
resolve: (result: JvmExtractResult) => void;
onArchiveProgress?: (percent: number) => void;
signal?: AbortSignal;
timeoutMs?: number;
parseState: { bestPercent: number; usedPassword: string; backend: string; reportedError: string };
}
let daemonProcess: ChildProcess | null = null;
let daemonReady = false;
let daemonBusy = false;
let daemonCurrentRequest: DaemonRequest | null = null;
let daemonStdoutBuffer = "";
let daemonStderrBuffer = "";
let daemonOutput = "";
let daemonTimeoutId: NodeJS.Timeout | null = null;
let daemonAbortHandler: (() => void) | null = null;
let daemonLayout: JvmExtractorLayout | null = null;
export function shutdownDaemon(): void {
if (daemonProcess) {
try { daemonProcess.stdin?.end(); } catch { /* ignore */ }
try { killProcessTree(daemonProcess); } catch { /* ignore */ }
daemonProcess = null;
}
daemonReady = false;
daemonBusy = false;
daemonCurrentRequest = null;
daemonStdoutBuffer = "";
daemonStderrBuffer = "";
daemonOutput = "";
if (daemonTimeoutId) { clearTimeout(daemonTimeoutId); daemonTimeoutId = null; }
if (daemonAbortHandler) { daemonAbortHandler = null; }
daemonLayout = null;
}
function finishDaemonRequest(result: JvmExtractResult): void {
const req = daemonCurrentRequest;
if (!req) return;
daemonCurrentRequest = null;
daemonBusy = false;
daemonStdoutBuffer = "";
daemonStderrBuffer = "";
daemonOutput = "";
if (daemonTimeoutId) { clearTimeout(daemonTimeoutId); daemonTimeoutId = null; }
if (req.signal && daemonAbortHandler) {
req.signal.removeEventListener("abort", daemonAbortHandler);
daemonAbortHandler = null;
}
req.resolve(result);
}
function handleDaemonLine(line: string): void {
const trimmed = String(line || "").trim();
if (!trimmed) return;
// Check for daemon ready signal
if (trimmed === "RD_DAEMON_READY") {
daemonReady = true;
logger.info("JVM Daemon bereit (persistent)");
return;
}
// Check for request completion
if (trimmed.startsWith("RD_REQUEST_DONE ")) {
const code = parseInt(trimmed.slice("RD_REQUEST_DONE ".length).trim(), 10);
const req = daemonCurrentRequest;
if (!req) return;
if (code === 0) {
req.onArchiveProgress?.(100);
finishDaemonRequest({
ok: true, missingCommand: false, missingRuntime: false,
aborted: false, timedOut: false, errorText: "",
usedPassword: req.parseState.usedPassword, backend: req.parseState.backend
});
} else {
const message = cleanErrorText(req.parseState.reportedError || daemonOutput) || `Exit Code ${code}`;
finishDaemonRequest({
ok: false, missingCommand: false, missingRuntime: isJvmRuntimeMissingError(message),
aborted: false, timedOut: false, errorText: message,
usedPassword: req.parseState.usedPassword, backend: req.parseState.backend
});
}
return;
}
// Regular progress/status lines — delegate to parseJvmLine
if (daemonCurrentRequest) {
parseJvmLine(trimmed, daemonCurrentRequest.onArchiveProgress, daemonCurrentRequest.parseState);
}
}
function startDaemon(layout: JvmExtractorLayout): boolean {
if (daemonProcess && daemonReady) return true;
shutdownDaemon();
const jvmTmpDir = path.join(os.tmpdir(), `rd-extract-daemon-${crypto.randomUUID()}`);
fs.mkdirSync(jvmTmpDir, { recursive: true });
const args = [
"-Dfile.encoding=UTF-8",
`-Djava.io.tmpdir=${jvmTmpDir}`,
"-Xms512m",
"-Xmx8g",
"-XX:+UseSerialGC",
"-cp",
layout.classPath,
JVM_EXTRACTOR_MAIN_CLASS,
"--daemon"
];
try {
const child = spawn(layout.javaCommand, args, {
windowsHide: true,
stdio: ["pipe", "pipe", "pipe"]
});
lowerExtractProcessPriority(child.pid, currentExtractCpuPriority);
daemonProcess = child;
daemonLayout = layout;
child.stdout!.on("data", (chunk) => {
const raw = String(chunk || "");
daemonOutput = appendLimited(daemonOutput, raw);
daemonStdoutBuffer += raw;
const lines = daemonStdoutBuffer.split(/\r?\n/);
daemonStdoutBuffer = lines.pop() || "";
for (const line of lines) {
handleDaemonLine(line);
}
});
child.stderr!.on("data", (chunk) => {
const raw = String(chunk || "");
daemonOutput = appendLimited(daemonOutput, raw);
daemonStderrBuffer += raw;
const lines = daemonStderrBuffer.split(/\r?\n/);
daemonStderrBuffer = lines.pop() || "";
for (const line of lines) {
if (daemonCurrentRequest) {
parseJvmLine(line, daemonCurrentRequest.onArchiveProgress, daemonCurrentRequest.parseState);
}
}
});
child.on("error", () => {
if (daemonCurrentRequest) {
finishDaemonRequest({
ok: false, missingCommand: true, missingRuntime: true,
aborted: false, timedOut: false, errorText: "Daemon process error",
usedPassword: "", backend: ""
});
}
shutdownDaemon();
});
child.on("close", () => {
if (daemonCurrentRequest) {
const req = daemonCurrentRequest;
finishDaemonRequest({
ok: false, missingCommand: false, missingRuntime: false,
aborted: false, timedOut: false,
errorText: cleanErrorText(req.parseState.reportedError || daemonOutput) || "Daemon process exited unexpectedly",
usedPassword: req.parseState.usedPassword, backend: req.parseState.backend
});
}
// Clean up tmp dir
fs.rm(jvmTmpDir, { recursive: true, force: true }, () => {});
daemonProcess = null;
daemonReady = false;
daemonBusy = false;
daemonLayout = null;
});
logger.info(`JVM Daemon gestartet (PID ${child.pid})`);
return true;
} catch (error) {
logger.warn(`JVM Daemon Start fehlgeschlagen: ${String(error)}`);
return false;
}
}
function isDaemonAvailable(layout: JvmExtractorLayout): boolean {
// Start daemon if not running yet
if (!daemonProcess || !daemonReady) {
startDaemon(layout);
}
return Boolean(daemonProcess && daemonReady && !daemonBusy);
}
function sendDaemonRequest(
archivePath: string,
targetDir: string,
conflictMode: ConflictMode,
passwordCandidates: string[],
onArchiveProgress?: (percent: number) => void,
signal?: AbortSignal,
timeoutMs?: number
): Promise<JvmExtractResult> {
return new Promise((resolve) => {
const mode = effectiveConflictMode(conflictMode);
const parseState = { bestPercent: 0, usedPassword: "", backend: "", reportedError: "" };
daemonBusy = true;
daemonOutput = "";
daemonCurrentRequest = { resolve, onArchiveProgress, signal, timeoutMs, parseState };
// Set up timeout
if (timeoutMs && timeoutMs > 0) {
daemonTimeoutId = setTimeout(() => {
// Timeout — kill the daemon and restart fresh for next request
const req = daemonCurrentRequest;
if (req) {
finishDaemonRequest({
ok: false, missingCommand: false, missingRuntime: false,
aborted: false, timedOut: true,
errorText: `Entpacken Timeout nach ${Math.ceil(timeoutMs / 1000)}s`,
usedPassword: parseState.usedPassword, backend: parseState.backend
});
}
shutdownDaemon();
}, timeoutMs);
}
// Set up abort handler
if (signal) {
daemonAbortHandler = () => {
const req = daemonCurrentRequest;
if (req) {
finishDaemonRequest({
ok: false, missingCommand: false, missingRuntime: false,
aborted: true, timedOut: false, errorText: "aborted:extract",
usedPassword: parseState.usedPassword, backend: parseState.backend
});
}
// Kill daemon on abort — cleaner than trying to interrupt mid-extraction
shutdownDaemon();
};
signal.addEventListener("abort", daemonAbortHandler, { once: true });
}
// Build and send JSON request
const jsonRequest = JSON.stringify({
archive: archivePath,
target: targetDir,
conflict: mode,
backend: "auto",
passwords: passwordCandidates
});
try {
daemonProcess!.stdin!.write(jsonRequest + "\n");
} catch (error) {
finishDaemonRequest({
ok: false, missingCommand: false, missingRuntime: false,
aborted: false, timedOut: false,
errorText: `Daemon stdin write failed: ${String(error)}`,
usedPassword: "", backend: ""
});
shutdownDaemon();
}
});
}
function runJvmExtractCommand( function runJvmExtractCommand(
layout: JvmExtractorLayout, layout: JvmExtractorLayout,
archivePath: string, archivePath: string,
@ -1006,6 +1279,15 @@ function runJvmExtractCommand(
}); });
} }
// Try persistent daemon first — saves ~5s JVM boot per archive
if (isDaemonAvailable(layout)) {
logger.info(`JVM Daemon: Sende Request für ${path.basename(archivePath)}`);
return sendDaemonRequest(archivePath, targetDir, conflictMode, passwordCandidates, onArchiveProgress, signal, timeoutMs);
}
// Fallback: spawn a new JVM process (daemon busy or not available)
logger.info(`JVM Spawn: Neuer Prozess für ${path.basename(archivePath)}${daemonBusy ? " (Daemon busy)" : ""}`);
const mode = effectiveConflictMode(conflictMode); const mode = effectiveConflictMode(conflictMode);
// Each JVM process needs its own temp dir so parallel SevenZipJBinding // Each JVM process needs its own temp dir so parallel SevenZipJBinding
// instances don't fight over the same native DLL file lock. // instances don't fight over the same native DLL file lock.
@ -1014,8 +1296,9 @@ function runJvmExtractCommand(
const args = [ const args = [
"-Dfile.encoding=UTF-8", "-Dfile.encoding=UTF-8",
`-Djava.io.tmpdir=${jvmTmpDir}`, `-Djava.io.tmpdir=${jvmTmpDir}`,
"-Xms32m", "-Xms512m",
"-Xmx512m", "-Xmx8g",
"-XX:+UseSerialGC",
"-cp", "-cp",
layout.classPath, layout.classPath,
JVM_EXTRACTOR_MAIN_CLASS, JVM_EXTRACTOR_MAIN_CLASS,

View File

@ -7,7 +7,7 @@ import { IPC_CHANNELS } from "../shared/ipc";
import { getLogFilePath, logger } from "./logger"; import { getLogFilePath, logger } from "./logger";
import { APP_NAME } from "./constants"; import { APP_NAME } from "./constants";
import { extractHttpLinksFromText } from "./utils"; import { extractHttpLinksFromText } from "./utils";
import { cleanupStaleSubstDrives } from "./extractor"; import { cleanupStaleSubstDrives, shutdownDaemon } from "./extractor";
/* ── IPC validation helpers ────────────────────────────────────── */ /* ── IPC validation helpers ────────────────────────────────────── */
function validateString(value: unknown, name: string): string { function validateString(value: unknown, name: string): string {
@ -515,6 +515,7 @@ app.on("before-quit", () => {
if (updateQuitTimer) { clearTimeout(updateQuitTimer); updateQuitTimer = null; } if (updateQuitTimer) { clearTimeout(updateQuitTimer); updateQuitTimer = null; }
stopClipboardWatcher(); stopClipboardWatcher();
destroyTray(); destroyTray();
shutdownDaemon();
try { try {
controller.shutdown(); controller.shutdown();
} catch (error) { } catch (error) {

View File

@ -65,6 +65,111 @@ describe.skipIf(!hasJavaRuntime() || !hasJvmExtractorRuntime())("extractor jvm b
expect(fs.existsSync(path.join(targetDir, "episode.txt"))).toBe(true); expect(fs.existsSync(path.join(targetDir, "episode.txt"))).toBe(true);
}); });
it("emits progress callbacks with archiveName and percent", async () => {
process.env.RD_EXTRACT_BACKEND = "jvm";
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-jvm-progress-"));
tempDirs.push(root);
const packageDir = path.join(root, "pkg");
const targetDir = path.join(root, "out");
fs.mkdirSync(packageDir, { recursive: true });
// Create a ZIP with some content to trigger progress
const zipPath = path.join(packageDir, "progress-test.zip");
const zip = new AdmZip();
zip.addFile("file1.txt", Buffer.from("Hello World ".repeat(100)));
zip.addFile("file2.txt", Buffer.from("Another file ".repeat(100)));
zip.writeZip(zipPath);
const progressUpdates: Array<{
archiveName: string;
percent: number;
phase: string;
archivePercent?: number;
}> = [];
const result = await extractPackageArchives({
packageDir,
targetDir,
cleanupMode: "none",
conflictMode: "overwrite",
removeLinks: false,
removeSamples: false,
onProgress: (update) => {
progressUpdates.push({
archiveName: update.archiveName,
percent: update.percent,
phase: update.phase,
archivePercent: update.archivePercent,
});
},
});
expect(result.extracted).toBe(1);
expect(result.failed).toBe(0);
// Should have at least preparing, extracting, and done phases
const phases = new Set(progressUpdates.map((u) => u.phase));
expect(phases.has("preparing")).toBe(true);
expect(phases.has("extracting")).toBe(true);
// Extracting phase should include the archive name
const extracting = progressUpdates.filter((u) => u.phase === "extracting" && u.archiveName === "progress-test.zip");
expect(extracting.length).toBeGreaterThan(0);
// Should end at 100%
const lastExtracting = extracting[extracting.length - 1];
expect(lastExtracting.archivePercent).toBe(100);
// Files should exist
expect(fs.existsSync(path.join(targetDir, "file1.txt"))).toBe(true);
expect(fs.existsSync(path.join(targetDir, "file2.txt"))).toBe(true);
});
it("extracts multiple archives sequentially with progress for each", async () => {
process.env.RD_EXTRACT_BACKEND = "jvm";
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-jvm-multi-"));
tempDirs.push(root);
const packageDir = path.join(root, "pkg");
const targetDir = path.join(root, "out");
fs.mkdirSync(packageDir, { recursive: true });
// Create two separate ZIP archives
const zip1 = new AdmZip();
zip1.addFile("episode01.txt", Buffer.from("ep1 content"));
zip1.writeZip(path.join(packageDir, "archive1.zip"));
const zip2 = new AdmZip();
zip2.addFile("episode02.txt", Buffer.from("ep2 content"));
zip2.writeZip(path.join(packageDir, "archive2.zip"));
const archiveNames = new Set<string>();
const result = await extractPackageArchives({
packageDir,
targetDir,
cleanupMode: "none",
conflictMode: "overwrite",
removeLinks: false,
removeSamples: false,
onProgress: (update) => {
if (update.phase === "extracting" && update.archiveName) {
archiveNames.add(update.archiveName);
}
},
});
expect(result.extracted).toBe(2);
expect(result.failed).toBe(0);
// Both archive names should have appeared in progress
expect(archiveNames.has("archive1.zip")).toBe(true);
expect(archiveNames.has("archive2.zip")).toBe(true);
// Both files extracted
expect(fs.existsSync(path.join(targetDir, "episode01.txt"))).toBe(true);
expect(fs.existsSync(path.join(targetDir, "episode02.txt"))).toBe(true);
});
it("respects ask/skip conflict mode in jvm backend", async () => { it("respects ask/skip conflict mode in jvm backend", async () => {
process.env.RD_EXTRACT_BACKEND = "jvm"; process.env.RD_EXTRACT_BACKEND = "jvm";

View File

@ -0,0 +1,188 @@
import { describe, expect, it } from "vitest";
import { resolveArchiveItemsFromList } from "../src/main/download-manager";
type MinimalItem = {
targetPath?: string;
fileName?: string;
[key: string]: unknown;
};
function makeItems(names: string[]): MinimalItem[] {
return names.map((name) => ({
targetPath: `C:\\Downloads\\Package\\${name}`,
fileName: name,
id: name,
status: "completed",
}));
}
describe("resolveArchiveItemsFromList", () => {
// ── Multipart RAR (.partN.rar) ──
it("matches multipart .part1.rar archives", () => {
const items = makeItems([
"Movie.part1.rar",
"Movie.part2.rar",
"Movie.part3.rar",
"Other.rar",
]);
const result = resolveArchiveItemsFromList("Movie.part1.rar", items as any);
expect(result).toHaveLength(3);
expect(result.map((i: any) => i.fileName)).toEqual([
"Movie.part1.rar",
"Movie.part2.rar",
"Movie.part3.rar",
]);
});
it("matches multipart .part01.rar archives (zero-padded)", () => {
const items = makeItems([
"Film.part01.rar",
"Film.part02.rar",
"Film.part10.rar",
"Unrelated.zip",
]);
const result = resolveArchiveItemsFromList("Film.part01.rar", items as any);
expect(result).toHaveLength(3);
});
// ── Old-style RAR (.rar + .r00, .r01, etc.) ──
it("matches old-style .rar + .rNN volumes", () => {
const items = makeItems([
"Archive.rar",
"Archive.r00",
"Archive.r01",
"Archive.r02",
"Other.zip",
]);
const result = resolveArchiveItemsFromList("Archive.rar", items as any);
expect(result).toHaveLength(4);
});
// ── Single RAR ──
it("matches a single .rar file", () => {
const items = makeItems(["SingleFile.rar", "Other.mkv"]);
const result = resolveArchiveItemsFromList("SingleFile.rar", items as any);
expect(result).toHaveLength(1);
expect((result[0] as any).fileName).toBe("SingleFile.rar");
});
// ── Split ZIP ──
it("matches split .zip.NNN files", () => {
const items = makeItems([
"Data.zip",
"Data.zip.001",
"Data.zip.002",
"Data.zip.003",
]);
const result = resolveArchiveItemsFromList("Data.zip.001", items as any);
expect(result).toHaveLength(4);
});
// ── Split 7z ──
it("matches split .7z.NNN files", () => {
const items = makeItems([
"Backup.7z.001",
"Backup.7z.002",
]);
const result = resolveArchiveItemsFromList("Backup.7z.001", items as any);
expect(result).toHaveLength(2);
});
// ── Generic .NNN splits ──
it("matches generic .NNN split files", () => {
const items = makeItems([
"video.001",
"video.002",
"video.003",
]);
const result = resolveArchiveItemsFromList("video.001", items as any);
expect(result).toHaveLength(3);
});
// ── Exact filename match ──
it("matches a single .zip by exact name", () => {
const items = makeItems(["myarchive.zip", "other.rar"]);
const result = resolveArchiveItemsFromList("myarchive.zip", items as any);
expect(result).toHaveLength(1);
expect((result[0] as any).fileName).toBe("myarchive.zip");
});
// ── Case insensitivity ──
it("matches case-insensitively", () => {
const items = makeItems([
"MOVIE.PART1.RAR",
"MOVIE.PART2.RAR",
]);
const result = resolveArchiveItemsFromList("movie.part1.rar", items as any);
expect(result).toHaveLength(2);
});
// ── Stem-based fallback ──
it("uses stem-based fallback when exact patterns fail", () => {
// Simulate a debrid service that renames "Movie.part1.rar" to "Movie.part1_dl.rar"
// but the disk file is "Movie.part1.rar"
const items = makeItems([
"Movie.rar",
]);
// The archive on disk is "Movie.part1.rar" but there's no item matching the
// .partN pattern. The stem "movie" should match "Movie.rar" via fallback.
const result = resolveArchiveItemsFromList("Movie.part1.rar", items as any);
// stem fallback: "movie" starts with "movie" and ends with .rar
expect(result).toHaveLength(1);
});
// ── Single item fallback ──
it("returns single archive item when no pattern matches", () => {
const items = makeItems(["totally-different-name.rar"]);
const result = resolveArchiveItemsFromList("Original.rar", items as any);
// Single item in list with archive extension → return it
expect(result).toHaveLength(1);
});
// ── Empty when no match ──
it("returns empty when items have no archive extensions", () => {
const items = makeItems(["video.mkv", "subtitle.srt"]);
const result = resolveArchiveItemsFromList("Archive.rar", items as any);
expect(result).toHaveLength(0);
});
// ── Items without targetPath ──
it("falls back to fileName when targetPath is missing", () => {
const items = [
{ fileName: "Movie.part1.rar", id: "1", status: "completed" },
{ fileName: "Movie.part2.rar", id: "2", status: "completed" },
];
const result = resolveArchiveItemsFromList("Movie.part1.rar", items as any);
expect(result).toHaveLength(2);
});
// ── Multiple archives, should not cross-match ──
it("does not cross-match different archive groups", () => {
const items = makeItems([
"Episode.S01E01.part1.rar",
"Episode.S01E01.part2.rar",
"Episode.S01E02.part1.rar",
"Episode.S01E02.part2.rar",
]);
const result1 = resolveArchiveItemsFromList("Episode.S01E01.part1.rar", items as any);
expect(result1).toHaveLength(2);
expect(result1.every((i: any) => i.fileName.includes("S01E01"))).toBe(true);
const result2 = resolveArchiveItemsFromList("Episode.S01E02.part1.rar", items as any);
expect(result2).toHaveLength(2);
expect(result2.every((i: any) => i.fileName.includes("S01E02"))).toBe(true);
});
});