Compare commits
20 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
72642351d0 | ||
|
|
51a01ea03f | ||
|
|
d9a78ea837 | ||
|
|
5b221d5bd5 | ||
|
|
c36549ca69 | ||
|
|
7e79bef8da | ||
|
|
e3b4a4ba19 | ||
|
|
30d216c7ca | ||
|
|
d80483adc2 | ||
|
|
1cda391dfe | ||
|
|
375ec36781 | ||
|
|
4ad1c05444 | ||
|
|
c88eeb0b12 | ||
|
|
c6261aba6a | ||
|
|
a010b967b9 | ||
|
|
af6547f254 | ||
|
|
ba235b0b93 | ||
|
|
1bfde96e46 | ||
|
|
e1f9b4b6d3 | ||
|
|
95cf4fbed8 |
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "real-debrid-downloader",
|
"name": "real-debrid-downloader",
|
||||||
"version": "1.6.45",
|
"version": "1.6.55",
|
||||||
"description": "Desktop downloader",
|
"description": "Desktop downloader",
|
||||||
"main": "build/main/main/main.js",
|
"main": "build/main/main/main.js",
|
||||||
"author": "Sucukdeluxe",
|
"author": "Sucukdeluxe",
|
||||||
|
|||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -3,7 +3,9 @@ package com.sucukdeluxe.extractor;
|
|||||||
import net.lingala.zip4j.ZipFile;
|
import net.lingala.zip4j.ZipFile;
|
||||||
import net.lingala.zip4j.exception.ZipException;
|
import net.lingala.zip4j.exception.ZipException;
|
||||||
import net.lingala.zip4j.model.FileHeader;
|
import net.lingala.zip4j.model.FileHeader;
|
||||||
|
import net.sf.sevenzipjbinding.ExtractAskMode;
|
||||||
import net.sf.sevenzipjbinding.ExtractOperationResult;
|
import net.sf.sevenzipjbinding.ExtractOperationResult;
|
||||||
|
import net.sf.sevenzipjbinding.IArchiveExtractCallback;
|
||||||
import net.sf.sevenzipjbinding.IArchiveOpenCallback;
|
import net.sf.sevenzipjbinding.IArchiveOpenCallback;
|
||||||
import net.sf.sevenzipjbinding.IArchiveOpenVolumeCallback;
|
import net.sf.sevenzipjbinding.IArchiveOpenVolumeCallback;
|
||||||
import net.sf.sevenzipjbinding.IInArchive;
|
import net.sf.sevenzipjbinding.IInArchive;
|
||||||
@ -51,6 +53,10 @@ public final class JBindExtractorMain {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) {
|
||||||
|
if (args.length == 1 && "--daemon".equals(args[0])) {
|
||||||
|
runDaemon();
|
||||||
|
return;
|
||||||
|
}
|
||||||
int exit = 1;
|
int exit = 1;
|
||||||
try {
|
try {
|
||||||
ExtractionRequest request = parseArgs(args);
|
ExtractionRequest request = parseArgs(args);
|
||||||
@ -65,6 +71,127 @@ public final class JBindExtractorMain {
|
|||||||
System.exit(exit);
|
System.exit(exit);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static void runDaemon() {
|
||||||
|
System.out.println("RD_DAEMON_READY");
|
||||||
|
System.out.flush();
|
||||||
|
java.io.BufferedReader reader = new java.io.BufferedReader(
|
||||||
|
new java.io.InputStreamReader(System.in, StandardCharsets.UTF_8));
|
||||||
|
try {
|
||||||
|
String line;
|
||||||
|
while ((line = reader.readLine()) != null) {
|
||||||
|
line = line.trim();
|
||||||
|
if (line.isEmpty()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
int exitCode = 1;
|
||||||
|
try {
|
||||||
|
ExtractionRequest request = parseDaemonRequest(line);
|
||||||
|
exitCode = runExtraction(request);
|
||||||
|
} catch (IllegalArgumentException error) {
|
||||||
|
emitError("Argumentfehler: " + safeMessage(error));
|
||||||
|
exitCode = 2;
|
||||||
|
} catch (Throwable error) {
|
||||||
|
emitError(safeMessage(error));
|
||||||
|
exitCode = 1;
|
||||||
|
}
|
||||||
|
System.out.println("RD_REQUEST_DONE " + exitCode);
|
||||||
|
System.out.flush();
|
||||||
|
}
|
||||||
|
} catch (IOException ignored) {
|
||||||
|
// stdin closed — parent process exited
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static ExtractionRequest parseDaemonRequest(String jsonLine) {
|
||||||
|
// Minimal JSON parsing without external dependencies.
|
||||||
|
// Expected format: {"archive":"...","target":"...","conflict":"...","backend":"...","passwords":["...","..."]}
|
||||||
|
ExtractionRequest request = new ExtractionRequest();
|
||||||
|
request.archiveFile = new File(extractJsonString(jsonLine, "archive"));
|
||||||
|
request.targetDir = new File(extractJsonString(jsonLine, "target"));
|
||||||
|
String conflict = extractJsonString(jsonLine, "conflict");
|
||||||
|
if (conflict.length() > 0) {
|
||||||
|
request.conflictMode = ConflictMode.fromValue(conflict);
|
||||||
|
}
|
||||||
|
String backend = extractJsonString(jsonLine, "backend");
|
||||||
|
if (backend.length() > 0) {
|
||||||
|
request.backend = Backend.fromValue(backend);
|
||||||
|
}
|
||||||
|
// Parse passwords array
|
||||||
|
int pwStart = jsonLine.indexOf("\"passwords\"");
|
||||||
|
if (pwStart >= 0) {
|
||||||
|
int arrStart = jsonLine.indexOf('[', pwStart);
|
||||||
|
int arrEnd = jsonLine.indexOf(']', arrStart);
|
||||||
|
if (arrStart >= 0 && arrEnd > arrStart) {
|
||||||
|
String arrContent = jsonLine.substring(arrStart + 1, arrEnd);
|
||||||
|
int idx = 0;
|
||||||
|
while (idx < arrContent.length()) {
|
||||||
|
int qStart = arrContent.indexOf('"', idx);
|
||||||
|
if (qStart < 0) break;
|
||||||
|
int qEnd = findClosingQuote(arrContent, qStart + 1);
|
||||||
|
if (qEnd < 0) break;
|
||||||
|
request.passwords.add(unescapeJsonString(arrContent.substring(qStart + 1, qEnd)));
|
||||||
|
idx = qEnd + 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (request.archiveFile == null || !request.archiveFile.exists() || !request.archiveFile.isFile()) {
|
||||||
|
throw new IllegalArgumentException("Archiv nicht gefunden: " +
|
||||||
|
(request.archiveFile == null ? "null" : request.archiveFile.getAbsolutePath()));
|
||||||
|
}
|
||||||
|
if (request.targetDir == null) {
|
||||||
|
throw new IllegalArgumentException("--target fehlt");
|
||||||
|
}
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String extractJsonString(String json, String key) {
|
||||||
|
String search = "\"" + key + "\"";
|
||||||
|
int keyIdx = json.indexOf(search);
|
||||||
|
if (keyIdx < 0) return "";
|
||||||
|
int colonIdx = json.indexOf(':', keyIdx + search.length());
|
||||||
|
if (colonIdx < 0) return "";
|
||||||
|
int qStart = json.indexOf('"', colonIdx + 1);
|
||||||
|
if (qStart < 0) return "";
|
||||||
|
int qEnd = findClosingQuote(json, qStart + 1);
|
||||||
|
if (qEnd < 0) return "";
|
||||||
|
return unescapeJsonString(json.substring(qStart + 1, qEnd));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static int findClosingQuote(String s, int from) {
|
||||||
|
for (int i = from; i < s.length(); i++) {
|
||||||
|
char c = s.charAt(i);
|
||||||
|
if (c == '\\') {
|
||||||
|
i++; // skip escaped character
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (c == '"') return i;
|
||||||
|
}
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String unescapeJsonString(String s) {
|
||||||
|
if (s.indexOf('\\') < 0) return s;
|
||||||
|
StringBuilder sb = new StringBuilder(s.length());
|
||||||
|
for (int i = 0; i < s.length(); i++) {
|
||||||
|
char c = s.charAt(i);
|
||||||
|
if (c == '\\' && i + 1 < s.length()) {
|
||||||
|
char next = s.charAt(i + 1);
|
||||||
|
switch (next) {
|
||||||
|
case '"': sb.append('"'); i++; break;
|
||||||
|
case '\\': sb.append('\\'); i++; break;
|
||||||
|
case '/': sb.append('/'); i++; break;
|
||||||
|
case 'n': sb.append('\n'); i++; break;
|
||||||
|
case 'r': sb.append('\r'); i++; break;
|
||||||
|
case 't': sb.append('\t'); i++; break;
|
||||||
|
default: sb.append(c); break;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
sb.append(c);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sb.toString();
|
||||||
|
}
|
||||||
|
|
||||||
private static int runExtraction(ExtractionRequest request) throws Exception {
|
private static int runExtraction(ExtractionRequest request) throws Exception {
|
||||||
List<String> passwords = normalizePasswords(request.passwords);
|
List<String> passwords = normalizePasswords(request.passwords);
|
||||||
Exception lastError = null;
|
Exception lastError = null;
|
||||||
@ -235,110 +362,99 @@ public final class JBindExtractorMain {
|
|||||||
try {
|
try {
|
||||||
context = openSevenZipArchive(request.archiveFile, password);
|
context = openSevenZipArchive(request.archiveFile, password);
|
||||||
IInArchive archive = context.archive;
|
IInArchive archive = context.archive;
|
||||||
ISimpleInArchive simple = archive.getSimpleInterface();
|
int itemCount = archive.getNumberOfItems();
|
||||||
ISimpleInArchiveItem[] items = simple.getArchiveItems();
|
if (itemCount <= 0) {
|
||||||
if (items == null) {
|
|
||||||
throw new IOException("Archiv enthalt keine Eintrage oder konnte nicht gelesen werden: " + request.archiveFile.getAbsolutePath());
|
throw new IOException("Archiv enthalt keine Eintrage oder konnte nicht gelesen werden: " + request.archiveFile.getAbsolutePath());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Pre-scan: collect file indices, sizes, output paths, and detect encryption
|
||||||
long totalUnits = 0;
|
long totalUnits = 0;
|
||||||
boolean encrypted = false;
|
boolean encrypted = false;
|
||||||
for (ISimpleInArchiveItem item : items) {
|
List<Integer> fileIndices = new ArrayList<Integer>();
|
||||||
if (item == null || item.isFolder()) {
|
List<File> outputFiles = new ArrayList<File>();
|
||||||
continue;
|
List<Long> fileSizes = new ArrayList<Long>();
|
||||||
}
|
|
||||||
try {
|
|
||||||
encrypted = encrypted || item.isEncrypted();
|
|
||||||
} catch (Throwable ignored) {
|
|
||||||
// ignore encrypted flag read issues
|
|
||||||
}
|
|
||||||
totalUnits += safeSize(item.getSize());
|
|
||||||
}
|
|
||||||
ProgressTracker progress = new ProgressTracker(totalUnits);
|
|
||||||
progress.emitStart();
|
|
||||||
|
|
||||||
Set<String> reserved = new HashSet<String>();
|
Set<String> reserved = new HashSet<String>();
|
||||||
for (ISimpleInArchiveItem item : items) {
|
|
||||||
if (item == null) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
String entryName = normalizeEntryName(item.getPath(), "item-" + item.getItemIndex());
|
for (int i = 0; i < itemCount; i++) {
|
||||||
if (item.isFolder()) {
|
Boolean isFolder = (Boolean) archive.getProperty(i, PropID.IS_FOLDER);
|
||||||
|
String entryPath = (String) archive.getProperty(i, PropID.PATH);
|
||||||
|
String entryName = normalizeEntryName(entryPath, "item-" + i);
|
||||||
|
|
||||||
|
if (Boolean.TRUE.equals(isFolder)) {
|
||||||
File dir = resolveDirectory(request.targetDir, entryName);
|
File dir = resolveDirectory(request.targetDir, entryName);
|
||||||
ensureDirectory(dir);
|
ensureDirectory(dir);
|
||||||
reserved.add(pathKey(dir));
|
reserved.add(pathKey(dir));
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
long itemUnits = safeSize(item.getSize());
|
|
||||||
File output = resolveOutputFile(request.targetDir, entryName, request.conflictMode, reserved);
|
|
||||||
if (output == null) {
|
|
||||||
progress.advance(itemUnits);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
ensureDirectory(output.getParentFile());
|
|
||||||
rejectSymlink(output);
|
|
||||||
final FileOutputStream out = new FileOutputStream(output);
|
|
||||||
final long[] remaining = new long[] { itemUnits };
|
|
||||||
boolean extractionSuccess = false;
|
|
||||||
try {
|
try {
|
||||||
ExtractOperationResult result = item.extractSlow(new ISequentialOutStream() {
|
Boolean isEncrypted = (Boolean) archive.getProperty(i, PropID.ENCRYPTED);
|
||||||
@Override
|
encrypted = encrypted || Boolean.TRUE.equals(isEncrypted);
|
||||||
public int write(byte[] data) throws SevenZipException {
|
|
||||||
if (data == null || data.length == 0) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
out.write(data);
|
|
||||||
} catch (IOException error) {
|
|
||||||
throw new SevenZipException("Fehler beim Schreiben: " + error.getMessage(), error);
|
|
||||||
}
|
|
||||||
long accounted = Math.min(remaining[0], (long) data.length);
|
|
||||||
remaining[0] -= accounted;
|
|
||||||
progress.advance(accounted);
|
|
||||||
return data.length;
|
|
||||||
}
|
|
||||||
}, password == null ? "" : password);
|
|
||||||
|
|
||||||
if (remaining[0] > 0) {
|
|
||||||
progress.advance(remaining[0]);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (result != ExtractOperationResult.OK) {
|
|
||||||
if (isPasswordFailure(result, encrypted)) {
|
|
||||||
throw new WrongPasswordException(new IOException("Falsches Passwort"));
|
|
||||||
}
|
|
||||||
throw new IOException("7z-Fehler: " + result.name());
|
|
||||||
}
|
|
||||||
extractionSuccess = true;
|
|
||||||
} catch (SevenZipException error) {
|
|
||||||
if (looksLikeWrongPassword(error, encrypted)) {
|
|
||||||
throw new WrongPasswordException(error);
|
|
||||||
}
|
|
||||||
throw error;
|
|
||||||
} finally {
|
|
||||||
try {
|
|
||||||
out.close();
|
|
||||||
} catch (Throwable ignored) {
|
|
||||||
}
|
|
||||||
if (!extractionSuccess && output.exists()) {
|
|
||||||
try {
|
|
||||||
output.delete();
|
|
||||||
} catch (Throwable ignored) {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
java.util.Date modified = item.getLastWriteTime();
|
|
||||||
if (modified != null) {
|
|
||||||
output.setLastModified(modified.getTime());
|
|
||||||
}
|
|
||||||
} catch (Throwable ignored) {
|
} catch (Throwable ignored) {
|
||||||
// best effort
|
// ignore encrypted flag read issues
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Long rawSize = (Long) archive.getProperty(i, PropID.SIZE);
|
||||||
|
long itemSize = safeSize(rawSize);
|
||||||
|
totalUnits += itemSize;
|
||||||
|
|
||||||
|
File output = resolveOutputFile(request.targetDir, entryName, request.conflictMode, reserved);
|
||||||
|
fileIndices.add(i);
|
||||||
|
outputFiles.add(output); // null if skipped
|
||||||
|
fileSizes.add(itemSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fileIndices.isEmpty()) {
|
||||||
|
// All items are folders or skipped
|
||||||
|
ProgressTracker progress = new ProgressTracker(1);
|
||||||
|
progress.emitStart();
|
||||||
|
progress.emitDone();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
ProgressTracker progress = new ProgressTracker(totalUnits);
|
||||||
|
progress.emitStart();
|
||||||
|
|
||||||
|
// Build index array for bulk extract
|
||||||
|
int[] indices = new int[fileIndices.size()];
|
||||||
|
for (int i = 0; i < fileIndices.size(); i++) {
|
||||||
|
indices[i] = fileIndices.get(i);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Map from archive index to our position in fileIndices/outputFiles
|
||||||
|
Map<Integer, Integer> indexToPos = new HashMap<Integer, Integer>();
|
||||||
|
for (int i = 0; i < fileIndices.size(); i++) {
|
||||||
|
indexToPos.put(fileIndices.get(i), i);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Bulk extraction state
|
||||||
|
final boolean encryptedFinal = encrypted;
|
||||||
|
final String effectivePassword = password == null ? "" : password;
|
||||||
|
final File[] currentOutput = new File[1];
|
||||||
|
final FileOutputStream[] currentStream = new FileOutputStream[1];
|
||||||
|
final boolean[] currentSuccess = new boolean[1];
|
||||||
|
final long[] currentRemaining = new long[1];
|
||||||
|
final Throwable[] firstError = new Throwable[1];
|
||||||
|
final int[] currentPos = new int[] { -1 };
|
||||||
|
|
||||||
|
try {
|
||||||
|
archive.extract(indices, false, new BulkExtractCallback(
|
||||||
|
archive, indexToPos, fileIndices, outputFiles, fileSizes,
|
||||||
|
progress, encryptedFinal, effectivePassword, currentOutput,
|
||||||
|
currentStream, currentSuccess, currentRemaining, currentPos, firstError
|
||||||
|
));
|
||||||
|
} catch (SevenZipException error) {
|
||||||
|
if (looksLikeWrongPassword(error, encryptedFinal)) {
|
||||||
|
throw new WrongPasswordException(error);
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (firstError[0] != null) {
|
||||||
|
if (firstError[0] instanceof WrongPasswordException) {
|
||||||
|
throw (WrongPasswordException) firstError[0];
|
||||||
|
}
|
||||||
|
throw (Exception) firstError[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
progress.emitDone();
|
progress.emitDone();
|
||||||
@ -763,6 +879,176 @@ public final class JBindExtractorMain {
|
|||||||
private final List<String> passwords = new ArrayList<String>();
|
private final List<String> passwords = new ArrayList<String>();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bulk extraction callback that implements both IArchiveExtractCallback and
|
||||||
|
* ICryptoGetTextPassword. Using the bulk IInArchive.extract() API instead of
|
||||||
|
* per-item extractSlow() is critical for performance — solid RAR archives
|
||||||
|
* otherwise re-decode from the beginning for every single item.
|
||||||
|
*/
|
||||||
|
private static final class BulkExtractCallback implements IArchiveExtractCallback, ICryptoGetTextPassword {
|
||||||
|
private final IInArchive archive;
|
||||||
|
private final Map<Integer, Integer> indexToPos;
|
||||||
|
private final List<Integer> fileIndices;
|
||||||
|
private final List<File> outputFiles;
|
||||||
|
private final List<Long> fileSizes;
|
||||||
|
private final ProgressTracker progress;
|
||||||
|
private final boolean encrypted;
|
||||||
|
private final String password;
|
||||||
|
private final File[] currentOutput;
|
||||||
|
private final FileOutputStream[] currentStream;
|
||||||
|
private final boolean[] currentSuccess;
|
||||||
|
private final long[] currentRemaining;
|
||||||
|
private final int[] currentPos;
|
||||||
|
private final Throwable[] firstError;
|
||||||
|
|
||||||
|
BulkExtractCallback(IInArchive archive, Map<Integer, Integer> indexToPos,
|
||||||
|
List<Integer> fileIndices, List<File> outputFiles, List<Long> fileSizes,
|
||||||
|
ProgressTracker progress, boolean encrypted, String password,
|
||||||
|
File[] currentOutput, FileOutputStream[] currentStream,
|
||||||
|
boolean[] currentSuccess, long[] currentRemaining, int[] currentPos,
|
||||||
|
Throwable[] firstError) {
|
||||||
|
this.archive = archive;
|
||||||
|
this.indexToPos = indexToPos;
|
||||||
|
this.fileIndices = fileIndices;
|
||||||
|
this.outputFiles = outputFiles;
|
||||||
|
this.fileSizes = fileSizes;
|
||||||
|
this.progress = progress;
|
||||||
|
this.encrypted = encrypted;
|
||||||
|
this.password = password;
|
||||||
|
this.currentOutput = currentOutput;
|
||||||
|
this.currentStream = currentStream;
|
||||||
|
this.currentSuccess = currentSuccess;
|
||||||
|
this.currentRemaining = currentRemaining;
|
||||||
|
this.currentPos = currentPos;
|
||||||
|
this.firstError = firstError;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String cryptoGetTextPassword() {
|
||||||
|
return password;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setTotal(long total) {
|
||||||
|
// 7z reports total compressed bytes; we track uncompressed via ProgressTracker
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setCompleted(long complete) {
|
||||||
|
// Not used — we track per-write progress
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ISequentialOutStream getStream(int index, ExtractAskMode extractAskMode) throws SevenZipException {
|
||||||
|
closeCurrentStream();
|
||||||
|
|
||||||
|
Integer pos = indexToPos.get(index);
|
||||||
|
if (pos == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
currentPos[0] = pos;
|
||||||
|
currentOutput[0] = outputFiles.get(pos);
|
||||||
|
currentSuccess[0] = false;
|
||||||
|
currentRemaining[0] = fileSizes.get(pos);
|
||||||
|
|
||||||
|
if (extractAskMode != ExtractAskMode.EXTRACT) {
|
||||||
|
currentOutput[0] = null;
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (currentOutput[0] == null) {
|
||||||
|
progress.advance(currentRemaining[0]);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
ensureDirectory(currentOutput[0].getParentFile());
|
||||||
|
rejectSymlink(currentOutput[0]);
|
||||||
|
currentStream[0] = new FileOutputStream(currentOutput[0]);
|
||||||
|
} catch (IOException error) {
|
||||||
|
throw new SevenZipException("Fehler beim Erstellen: " + error.getMessage(), error);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new ISequentialOutStream() {
|
||||||
|
@Override
|
||||||
|
public int write(byte[] data) throws SevenZipException {
|
||||||
|
if (data == null || data.length == 0) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
currentStream[0].write(data);
|
||||||
|
} catch (IOException error) {
|
||||||
|
throw new SevenZipException("Fehler beim Schreiben: " + error.getMessage(), error);
|
||||||
|
}
|
||||||
|
long accounted = Math.min(currentRemaining[0], (long) data.length);
|
||||||
|
currentRemaining[0] -= accounted;
|
||||||
|
progress.advance(accounted);
|
||||||
|
return data.length;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void prepareOperation(ExtractAskMode extractAskMode) {
|
||||||
|
// no-op
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setOperationResult(ExtractOperationResult result) throws SevenZipException {
|
||||||
|
if (currentRemaining[0] > 0) {
|
||||||
|
progress.advance(currentRemaining[0]);
|
||||||
|
currentRemaining[0] = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result == ExtractOperationResult.OK) {
|
||||||
|
currentSuccess[0] = true;
|
||||||
|
closeCurrentStream();
|
||||||
|
if (currentPos[0] >= 0 && currentOutput[0] != null) {
|
||||||
|
try {
|
||||||
|
int archiveIndex = fileIndices.get(currentPos[0]);
|
||||||
|
java.util.Date modified = (java.util.Date) archive.getProperty(archiveIndex, PropID.LAST_MODIFICATION_TIME);
|
||||||
|
if (modified != null) {
|
||||||
|
currentOutput[0].setLastModified(modified.getTime());
|
||||||
|
}
|
||||||
|
} catch (Throwable ignored) {
|
||||||
|
// best effort
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
closeCurrentStream();
|
||||||
|
if (currentOutput[0] != null && currentOutput[0].exists()) {
|
||||||
|
try {
|
||||||
|
currentOutput[0].delete();
|
||||||
|
} catch (Throwable ignored) {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (firstError[0] == null) {
|
||||||
|
if (isPasswordFailure(result, encrypted)) {
|
||||||
|
firstError[0] = new WrongPasswordException(new IOException("Falsches Passwort"));
|
||||||
|
} else {
|
||||||
|
firstError[0] = new IOException("7z-Fehler: " + result.name());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void closeCurrentStream() {
|
||||||
|
if (currentStream[0] != null) {
|
||||||
|
try {
|
||||||
|
currentStream[0].close();
|
||||||
|
} catch (Throwable ignored) {
|
||||||
|
}
|
||||||
|
currentStream[0] = null;
|
||||||
|
}
|
||||||
|
if (!currentSuccess[0] && currentOutput[0] != null && currentOutput[0].exists()) {
|
||||||
|
try {
|
||||||
|
currentOutput[0].delete();
|
||||||
|
} catch (Throwable ignored) {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static final class WrongPasswordException extends Exception {
|
private static final class WrongPasswordException extends Exception {
|
||||||
private static final long serialVersionUID = 1L;
|
private static final long serialVersionUID = 1L;
|
||||||
|
|
||||||
|
|||||||
@ -37,9 +37,9 @@ function releaseTlsSkip(): void {
|
|||||||
delete process.env.NODE_TLS_REJECT_UNAUTHORIZED;
|
delete process.env.NODE_TLS_REJECT_UNAUTHORIZED;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
import { cleanupCancelledPackageArtifactsAsync } from "./cleanup";
|
import { cleanupCancelledPackageArtifactsAsync, removeDownloadLinkArtifacts, removeSampleArtifacts } from "./cleanup";
|
||||||
import { DebridService, MegaWebUnrestrictor, checkRapidgatorOnline } from "./debrid";
|
import { DebridService, MegaWebUnrestrictor, checkRapidgatorOnline } from "./debrid";
|
||||||
import { clearExtractResumeState, collectArchiveCleanupTargets, extractPackageArchives, findArchiveCandidates } from "./extractor";
|
import { cleanupArchives, clearExtractResumeState, collectArchiveCleanupTargets, extractPackageArchives, findArchiveCandidates, hasAnyFilesRecursive, removeEmptyDirectoryTree } from "./extractor";
|
||||||
import { validateFileAgainstManifest } from "./integrity";
|
import { validateFileAgainstManifest } from "./integrity";
|
||||||
import { logger } from "./logger";
|
import { logger } from "./logger";
|
||||||
import { StoragePaths, saveSession, saveSessionAsync, saveSettings, saveSettingsAsync } from "./storage";
|
import { StoragePaths, saveSession, saveSessionAsync, saveSettings, saveSettingsAsync } from "./storage";
|
||||||
@ -751,60 +751,86 @@ export function buildAutoRenameBaseNameFromFoldersWithOptions(
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
function resolveArchiveItemsFromList(archiveName: string, items: DownloadItem[]): DownloadItem[] {
|
export function resolveArchiveItemsFromList(archiveName: string, items: DownloadItem[]): DownloadItem[] {
|
||||||
const entryLower = archiveName.toLowerCase();
|
const entryLower = archiveName.toLowerCase();
|
||||||
|
|
||||||
|
// Helper: get item basename (try targetPath first, then fileName)
|
||||||
|
const itemBaseName = (item: DownloadItem): string =>
|
||||||
|
path.basename(item.targetPath || item.fileName || "");
|
||||||
|
|
||||||
|
// Try pattern-based matching first (for multipart archives)
|
||||||
|
let pattern: RegExp | null = null;
|
||||||
const multipartMatch = entryLower.match(/^(.*)\.part0*1\.rar$/);
|
const multipartMatch = entryLower.match(/^(.*)\.part0*1\.rar$/);
|
||||||
if (multipartMatch) {
|
if (multipartMatch) {
|
||||||
const prefix = multipartMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
const prefix = multipartMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||||
const pattern = new RegExp(`^${prefix}\\.part\\d+\\.rar$`, "i");
|
pattern = new RegExp(`^${prefix}\\.part\\d+\\.rar$`, "i");
|
||||||
return items.filter((item) => {
|
|
||||||
const name = path.basename(item.targetPath || item.fileName || "");
|
|
||||||
return pattern.test(name);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
const rarMatch = entryLower.match(/^(.*)\.rar$/);
|
if (!pattern) {
|
||||||
if (rarMatch) {
|
const rarMatch = entryLower.match(/^(.*)\.rar$/);
|
||||||
const stem = rarMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
if (rarMatch) {
|
||||||
const pattern = new RegExp(`^${stem}\\.r(ar|\\d{2,3})$`, "i");
|
const stem = rarMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||||
return items.filter((item) => {
|
pattern = new RegExp(`^${stem}\\.r(ar|\\d{2,3})$`, "i");
|
||||||
const name = path.basename(item.targetPath || item.fileName || "");
|
}
|
||||||
return pattern.test(name);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
// Split ZIP (e.g., movie.zip.001, movie.zip.002)
|
if (!pattern) {
|
||||||
const zipSplitMatch = entryLower.match(/^(.*)\.zip\.001$/);
|
const zipSplitMatch = entryLower.match(/^(.*)\.zip\.001$/);
|
||||||
if (zipSplitMatch) {
|
if (zipSplitMatch) {
|
||||||
const stem = zipSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
const stem = zipSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||||
const pattern = new RegExp(`^${stem}\\.zip(\\.\\d+)?$`, "i");
|
pattern = new RegExp(`^${stem}\\.zip(\\.\\d+)?$`, "i");
|
||||||
return items.filter((item) => {
|
}
|
||||||
const name = path.basename(item.targetPath || item.fileName || "");
|
|
||||||
return pattern.test(name);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
// Split 7z (e.g., movie.7z.001, movie.7z.002)
|
if (!pattern) {
|
||||||
const sevenSplitMatch = entryLower.match(/^(.*)\.7z\.001$/);
|
const sevenSplitMatch = entryLower.match(/^(.*)\.7z\.001$/);
|
||||||
if (sevenSplitMatch) {
|
if (sevenSplitMatch) {
|
||||||
const stem = sevenSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
const stem = sevenSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||||
const pattern = new RegExp(`^${stem}\\.7z(\\.\\d+)?$`, "i");
|
pattern = new RegExp(`^${stem}\\.7z(\\.\\d+)?$`, "i");
|
||||||
return items.filter((item) => {
|
}
|
||||||
const name = path.basename(item.targetPath || item.fileName || "");
|
|
||||||
return pattern.test(name);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
// Generic .NNN splits (e.g., movie.001, movie.002)
|
if (!pattern && /^(.*)\.001$/.test(entryLower) && !/\.(zip|7z)\.001$/.test(entryLower)) {
|
||||||
const genericSplitMatch = entryLower.match(/^(.*)\.001$/);
|
const genericSplitMatch = entryLower.match(/^(.*)\.001$/);
|
||||||
if (genericSplitMatch && !/\.(zip|7z)\.001$/.test(entryLower)) {
|
if (genericSplitMatch) {
|
||||||
const stem = genericSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
const stem = genericSplitMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||||
const pattern = new RegExp(`^${stem}\\.\\d{3}$`, "i");
|
pattern = new RegExp(`^${stem}\\.\\d{3}$`, "i");
|
||||||
return items.filter((item) => {
|
}
|
||||||
const name = path.basename(item.targetPath || item.fileName || "");
|
|
||||||
return pattern.test(name);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
return items.filter((item) => {
|
|
||||||
const name = path.basename(item.targetPath || item.fileName || "").toLowerCase();
|
// Attempt 1: Pattern match (handles multipart archives)
|
||||||
return name === entryLower;
|
if (pattern) {
|
||||||
});
|
const matched = items.filter((item) => pattern!.test(itemBaseName(item)));
|
||||||
|
if (matched.length > 0) return matched;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Attempt 2: Exact filename match (case-insensitive)
|
||||||
|
const exactMatch = items.filter((item) => itemBaseName(item).toLowerCase() === entryLower);
|
||||||
|
if (exactMatch.length > 0) return exactMatch;
|
||||||
|
|
||||||
|
// Attempt 3: Stem-based fuzzy match — strip archive extensions and compare stems.
|
||||||
|
// Handles cases where debrid services modify filenames slightly.
|
||||||
|
const archiveStem = entryLower
|
||||||
|
.replace(/\.part\d+\.rar$/i, "")
|
||||||
|
.replace(/\.r\d{2,3}$/i, "")
|
||||||
|
.replace(/\.rar$/i, "")
|
||||||
|
.replace(/\.(zip|7z)\.\d{3}$/i, "")
|
||||||
|
.replace(/\.\d{3}$/i, "")
|
||||||
|
.replace(/\.(zip|7z)$/i, "");
|
||||||
|
if (archiveStem.length > 3) {
|
||||||
|
const stemMatch = items.filter((item) => {
|
||||||
|
const name = itemBaseName(item).toLowerCase();
|
||||||
|
return name.startsWith(archiveStem) && /\.(rar|r\d{2,3}|zip|7z|\d{3})$/i.test(name);
|
||||||
|
});
|
||||||
|
if (stemMatch.length > 0) return stemMatch;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Attempt 4: If only one item in the list and one archive — return it as a best-effort match.
|
||||||
|
// This handles single-file packages where the filename may have been modified.
|
||||||
|
if (items.length === 1) {
|
||||||
|
const singleName = itemBaseName(items[0]).toLowerCase();
|
||||||
|
if (/\.(rar|zip|7z|\d{3})$/i.test(singleName)) {
|
||||||
|
return items;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
function retryDelayWithJitter(attempt: number, baseMs: number): number {
|
function retryDelayWithJitter(attempt: number, baseMs: number): number {
|
||||||
@ -1384,6 +1410,10 @@ export class DownloadManager extends EventEmitter {
|
|||||||
addedPackages += 1;
|
addedPackages += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (addedPackages > 0 || addedLinks > 0) {
|
||||||
|
const pkgNames = packages.filter((p) => p.links.length > 0).map((p) => p.name).join(", ");
|
||||||
|
logger.info(`Pakete hinzugefügt: ${addedPackages} Paket(e), ${addedLinks} Link(s) [${pkgNames}]`);
|
||||||
|
}
|
||||||
this.persistSoon();
|
this.persistSoon();
|
||||||
this.emitState();
|
this.emitState();
|
||||||
if (unresolvedByLink.size > 0) {
|
if (unresolvedByLink.size > 0) {
|
||||||
@ -3570,14 +3600,16 @@ export class DownloadManager extends EventEmitter {
|
|||||||
this.emit("state", this.getSnapshot());
|
this.emit("state", this.getSnapshot());
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// Too soon — schedule deferred forced emit
|
// Too soon — replace any pending timer with a shorter forced-emit timer
|
||||||
if (!this.stateEmitTimer) {
|
if (this.stateEmitTimer) {
|
||||||
this.stateEmitTimer = setTimeout(() => {
|
clearTimeout(this.stateEmitTimer);
|
||||||
this.stateEmitTimer = null;
|
this.stateEmitTimer = null;
|
||||||
this.lastStateEmitAt = nowMs();
|
|
||||||
this.emit("state", this.getSnapshot());
|
|
||||||
}, MIN_FORCE_GAP_MS - sinceLastEmit);
|
|
||||||
}
|
}
|
||||||
|
this.stateEmitTimer = setTimeout(() => {
|
||||||
|
this.stateEmitTimer = null;
|
||||||
|
this.lastStateEmitAt = nowMs();
|
||||||
|
this.emit("state", this.getSnapshot());
|
||||||
|
}, MIN_FORCE_GAP_MS - sinceLastEmit);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (this.stateEmitTimer) {
|
if (this.stateEmitTimer) {
|
||||||
@ -3815,18 +3847,26 @@ export class DownloadManager extends EventEmitter {
|
|||||||
this.packagePostProcessAbortControllers.set(packageId, abortController);
|
this.packagePostProcessAbortControllers.set(packageId, abortController);
|
||||||
|
|
||||||
const task = (async () => {
|
const task = (async () => {
|
||||||
|
const slotWaitStart = nowMs();
|
||||||
await this.acquirePostProcessSlot(packageId);
|
await this.acquirePostProcessSlot(packageId);
|
||||||
|
const slotWaitMs = nowMs() - slotWaitStart;
|
||||||
|
if (slotWaitMs > 100) {
|
||||||
|
logger.info(`Post-Process Slot erhalten nach ${(slotWaitMs / 1000).toFixed(1)}s Wartezeit: pkg=${packageId.slice(0, 8)}`);
|
||||||
|
}
|
||||||
try {
|
try {
|
||||||
// Loop while requeue requests arrive — keep the slot so the same
|
let round = 0;
|
||||||
// package can immediately re-run hybrid extraction without waiting
|
|
||||||
// behind other packages that may be queued for the slot.
|
|
||||||
do {
|
do {
|
||||||
|
round += 1;
|
||||||
|
const hadRequeue = this.hybridExtractRequeue.has(packageId);
|
||||||
this.hybridExtractRequeue.delete(packageId);
|
this.hybridExtractRequeue.delete(packageId);
|
||||||
|
const roundStart = nowMs();
|
||||||
try {
|
try {
|
||||||
await this.handlePackagePostProcessing(packageId, abortController.signal);
|
await this.handlePackagePostProcessing(packageId, abortController.signal);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.warn(`Post-Processing für Paket fehlgeschlagen: ${compactErrorText(error)}`);
|
logger.warn(`Post-Processing für Paket fehlgeschlagen: ${compactErrorText(error)}`);
|
||||||
}
|
}
|
||||||
|
const roundMs = nowMs() - roundStart;
|
||||||
|
logger.info(`Post-Process Runde ${round} fertig in ${(roundMs / 1000).toFixed(1)}s (requeue=${hadRequeue}, nextRequeue=${this.hybridExtractRequeue.has(packageId)}): pkg=${packageId.slice(0, 8)}`);
|
||||||
this.persistSoon();
|
this.persistSoon();
|
||||||
this.emitState();
|
this.emitState();
|
||||||
} while (this.hybridExtractRequeue.has(packageId));
|
} while (this.hybridExtractRequeue.has(packageId));
|
||||||
@ -4726,6 +4766,7 @@ export class DownloadManager extends EventEmitter {
|
|||||||
item.fullStatus = `Starte... (${unrestricted.providerLabel})`;
|
item.fullStatus = `Starte... (${unrestricted.providerLabel})`;
|
||||||
item.updatedAt = nowMs();
|
item.updatedAt = nowMs();
|
||||||
this.emitState();
|
this.emitState();
|
||||||
|
logger.info(`Download Start: ${item.fileName} (${humanSize(unrestricted.fileSize || 0)}) via ${unrestricted.providerLabel}, pkg=${pkg.name}`);
|
||||||
|
|
||||||
const maxAttempts = maxItemAttempts;
|
const maxAttempts = maxItemAttempts;
|
||||||
let done = false;
|
let done = false;
|
||||||
@ -4834,6 +4875,7 @@ export class DownloadManager extends EventEmitter {
|
|||||||
item.updatedAt = nowMs();
|
item.updatedAt = nowMs();
|
||||||
pkg.updatedAt = nowMs();
|
pkg.updatedAt = nowMs();
|
||||||
this.recordRunOutcome(item.id, "completed");
|
this.recordRunOutcome(item.id, "completed");
|
||||||
|
logger.info(`Download fertig: ${item.fileName} (${humanSize(item.downloadedBytes)}), pkg=${pkg.name}`);
|
||||||
|
|
||||||
if (this.session.running && !active.abortController.signal.aborted) {
|
if (this.session.running && !active.abortController.signal.aborted) {
|
||||||
void this.runPackagePostProcessing(pkg.id).catch((err) => {
|
void this.runPackagePostProcessing(pkg.id).catch((err) => {
|
||||||
@ -6256,8 +6298,13 @@ export class DownloadManager extends EventEmitter {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
private async runHybridExtraction(packageId: string, pkg: PackageEntry, items: DownloadItem[], signal?: AbortSignal): Promise<void> {
|
private async runHybridExtraction(packageId: string, pkg: PackageEntry, items: DownloadItem[], signal?: AbortSignal): Promise<number> {
|
||||||
|
const findReadyStart = nowMs();
|
||||||
const readyArchives = await this.findReadyArchiveSets(pkg);
|
const readyArchives = await this.findReadyArchiveSets(pkg);
|
||||||
|
const findReadyMs = nowMs() - findReadyStart;
|
||||||
|
if (findReadyMs > 200) {
|
||||||
|
logger.info(`findReadyArchiveSets dauerte ${(findReadyMs / 1000).toFixed(1)}s: pkg=${pkg.name}, found=${readyArchives.size}`);
|
||||||
|
}
|
||||||
if (readyArchives.size === 0) {
|
if (readyArchives.size === 0) {
|
||||||
logger.info(`Hybrid-Extract: pkg=${pkg.name}, keine fertigen Archive-Sets`);
|
logger.info(`Hybrid-Extract: pkg=${pkg.name}, keine fertigen Archive-Sets`);
|
||||||
// Relabel completed items that are part of incomplete multi-part archives
|
// Relabel completed items that are part of incomplete multi-part archives
|
||||||
@ -6273,7 +6320,7 @@ export class DownloadManager extends EventEmitter {
|
|||||||
}
|
}
|
||||||
this.emitState();
|
this.emitState();
|
||||||
}
|
}
|
||||||
return;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info(`Hybrid-Extract Start: pkg=${pkg.name}, readyArchives=${readyArchives.size}`);
|
logger.info(`Hybrid-Extract Start: pkg=${pkg.name}, readyArchives=${readyArchives.size}`);
|
||||||
@ -6313,7 +6360,7 @@ export class DownloadManager extends EventEmitter {
|
|||||||
// a previous hybrid round, there is nothing new to extract.
|
// a previous hybrid round, there is nothing new to extract.
|
||||||
if (hybridItems.length > 0 && hybridItems.every((item) => isExtractedLabel(item.fullStatus))) {
|
if (hybridItems.length > 0 && hybridItems.every((item) => isExtractedLabel(item.fullStatus))) {
|
||||||
logger.info(`Hybrid-Extract: pkg=${pkg.name}, alle ${hybridItems.length} Items bereits entpackt, überspringe`);
|
logger.info(`Hybrid-Extract: pkg=${pkg.name}, alle ${hybridItems.length} Items bereits entpackt, überspringe`);
|
||||||
return;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Filter out archives whose items are ALL already extracted so we don't
|
// Filter out archives whose items are ALL already extracted so we don't
|
||||||
@ -6336,7 +6383,7 @@ export class DownloadManager extends EventEmitter {
|
|||||||
}
|
}
|
||||||
if (readyArchives.size === 0) {
|
if (readyArchives.size === 0) {
|
||||||
logger.info(`Hybrid-Extract: pkg=${pkg.name}, alle fertigen Archive bereits entpackt`);
|
logger.info(`Hybrid-Extract: pkg=${pkg.name}, alle fertigen Archive bereits entpackt`);
|
||||||
return;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Resolve archive items dynamically from ALL package items (not just
|
// Resolve archive items dynamically from ALL package items (not just
|
||||||
@ -6345,9 +6392,9 @@ export class DownloadManager extends EventEmitter {
|
|||||||
const resolveArchiveItems = (archiveName: string): DownloadItem[] =>
|
const resolveArchiveItems = (archiveName: string): DownloadItem[] =>
|
||||||
resolveArchiveItemsFromList(archiveName, items);
|
resolveArchiveItemsFromList(archiveName, items);
|
||||||
|
|
||||||
// Track multiple active archives for parallel hybrid extraction
|
// Track archives for parallel hybrid extraction progress
|
||||||
const activeHybridArchiveMap = new Map<string, DownloadItem[]>();
|
const hybridResolvedItems = new Map<string, DownloadItem[]>();
|
||||||
const hybridArchiveStartTimes = new Map<string, number>();
|
const hybridStartTimes = new Map<string, number>();
|
||||||
let hybridLastEmitAt = 0;
|
let hybridLastEmitAt = 0;
|
||||||
|
|
||||||
// Mark items based on whether their archive is actually ready for extraction.
|
// Mark items based on whether their archive is actually ready for extraction.
|
||||||
@ -6386,7 +6433,7 @@ export class DownloadManager extends EventEmitter {
|
|||||||
packageId,
|
packageId,
|
||||||
hybridMode: true,
|
hybridMode: true,
|
||||||
maxParallel: this.settings.maxParallelExtract || 2,
|
maxParallel: this.settings.maxParallelExtract || 2,
|
||||||
extractCpuPriority: this.settings.extractCpuPriority,
|
extractCpuPriority: "high",
|
||||||
onProgress: (progress) => {
|
onProgress: (progress) => {
|
||||||
if (progress.phase === "preparing") {
|
if (progress.phase === "preparing") {
|
||||||
pkg.postProcessLabel = progress.archiveName || "Vorbereiten...";
|
pkg.postProcessLabel = progress.archiveName || "Vorbereiten...";
|
||||||
@ -6394,26 +6441,39 @@ export class DownloadManager extends EventEmitter {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (progress.phase === "done") {
|
if (progress.phase === "done") {
|
||||||
// Do NOT mark remaining archives as "Done" here — some may have
|
hybridResolvedItems.clear();
|
||||||
// failed. The post-extraction code (result.failed check) will
|
hybridStartTimes.clear();
|
||||||
// assign the correct label. Only clear the tracking maps.
|
|
||||||
activeHybridArchiveMap.clear();
|
|
||||||
hybridArchiveStartTimes.clear();
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (progress.archiveName) {
|
if (progress.archiveName) {
|
||||||
// Resolve items for this archive if not yet tracked
|
// Resolve items for this archive if not yet tracked
|
||||||
if (!activeHybridArchiveMap.has(progress.archiveName)) {
|
if (!hybridResolvedItems.has(progress.archiveName)) {
|
||||||
activeHybridArchiveMap.set(progress.archiveName, resolveArchiveItems(progress.archiveName));
|
const resolved = resolveArchiveItems(progress.archiveName);
|
||||||
hybridArchiveStartTimes.set(progress.archiveName, nowMs());
|
hybridResolvedItems.set(progress.archiveName, resolved);
|
||||||
|
hybridStartTimes.set(progress.archiveName, nowMs());
|
||||||
|
if (resolved.length === 0) {
|
||||||
|
logger.warn(`resolveArchiveItems (hybrid): KEINE Items gefunden für archiveName="${progress.archiveName}", items.length=${items.length}, itemNames=[${items.map((i) => path.basename(i.targetPath || i.fileName || "?")).join(", ")}]`);
|
||||||
|
} else {
|
||||||
|
logger.info(`resolveArchiveItems (hybrid): ${resolved.length} Items für archiveName="${progress.archiveName}"`);
|
||||||
|
const initLabel = `Entpacken 0% · ${progress.archiveName}`;
|
||||||
|
const initAt = nowMs();
|
||||||
|
for (const entry of resolved) {
|
||||||
|
if (!isExtractedLabel(entry.fullStatus)) {
|
||||||
|
entry.fullStatus = initLabel;
|
||||||
|
entry.updatedAt = initAt;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
hybridLastEmitAt = initAt;
|
||||||
|
this.emitState(true);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
const archItems = activeHybridArchiveMap.get(progress.archiveName)!;
|
const archItems = hybridResolvedItems.get(progress.archiveName) || [];
|
||||||
|
|
||||||
// If archive is at 100%, mark its items as done and remove from active
|
// If archive is at 100%, mark its items as done and remove from active
|
||||||
if (Number(progress.archivePercent ?? 0) >= 100) {
|
if (Number(progress.archivePercent ?? 0) >= 100) {
|
||||||
const doneAt = nowMs();
|
const doneAt = nowMs();
|
||||||
const startedAt = hybridArchiveStartTimes.get(progress.archiveName) || doneAt;
|
const startedAt = hybridStartTimes.get(progress.archiveName) || doneAt;
|
||||||
const doneLabel = formatExtractDone(doneAt - startedAt);
|
const doneLabel = formatExtractDone(doneAt - startedAt);
|
||||||
for (const entry of archItems) {
|
for (const entry of archItems) {
|
||||||
if (!isExtractedLabel(entry.fullStatus)) {
|
if (!isExtractedLabel(entry.fullStatus)) {
|
||||||
@ -6421,8 +6481,8 @@ export class DownloadManager extends EventEmitter {
|
|||||||
entry.updatedAt = doneAt;
|
entry.updatedAt = doneAt;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
activeHybridArchiveMap.delete(progress.archiveName);
|
hybridResolvedItems.delete(progress.archiveName);
|
||||||
hybridArchiveStartTimes.delete(progress.archiveName);
|
hybridStartTimes.delete(progress.archiveName);
|
||||||
// Show transitional label while next archive initializes
|
// Show transitional label while next archive initializes
|
||||||
const done = progress.current + 1;
|
const done = progress.current + 1;
|
||||||
if (done < progress.total) {
|
if (done < progress.total) {
|
||||||
@ -6485,9 +6545,20 @@ export class DownloadManager extends EventEmitter {
|
|||||||
|
|
||||||
logger.info(`Hybrid-Extract Ende: pkg=${pkg.name}, extracted=${result.extracted}, failed=${result.failed}`);
|
logger.info(`Hybrid-Extract Ende: pkg=${pkg.name}, extracted=${result.extracted}, failed=${result.failed}`);
|
||||||
if (result.extracted > 0) {
|
if (result.extracted > 0) {
|
||||||
pkg.postProcessLabel = "Renaming...";
|
// Fire-and-forget: rename then collect MKVs in background so the
|
||||||
this.emitState();
|
// slot is not blocked and the next archive set can start immediately.
|
||||||
await this.autoRenameExtractedVideoFiles(pkg.extractDir, pkg);
|
void (async () => {
|
||||||
|
try {
|
||||||
|
await this.autoRenameExtractedVideoFiles(pkg.extractDir, pkg);
|
||||||
|
} catch (err) {
|
||||||
|
logger.warn(`Hybrid Auto-Rename Fehler: pkg=${pkg.name}, reason=${compactErrorText(err)}`);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
await this.collectMkvFilesToLibrary(packageId, pkg);
|
||||||
|
} catch (err) {
|
||||||
|
logger.warn(`Hybrid MKV-Collection Fehler: pkg=${pkg.name}, reason=${compactErrorText(err)}`);
|
||||||
|
}
|
||||||
|
})();
|
||||||
}
|
}
|
||||||
if (result.failed > 0) {
|
if (result.failed > 0) {
|
||||||
logger.warn(`Hybrid-Extract: ${result.failed} Archive fehlgeschlagen, wird beim finalen Durchlauf erneut versucht`);
|
logger.warn(`Hybrid-Extract: ${result.failed} Archive fehlgeschlagen, wird beim finalen Durchlauf erneut versucht`);
|
||||||
@ -6514,6 +6585,7 @@ export class DownloadManager extends EventEmitter {
|
|||||||
entry.updatedAt = updatedAt;
|
entry.updatedAt = updatedAt;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return result.extracted;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorText = String(error || "");
|
const errorText = String(error || "");
|
||||||
if (errorText.includes("aborted:extract")) {
|
if (errorText.includes("aborted:extract")) {
|
||||||
@ -6526,7 +6598,7 @@ export class DownloadManager extends EventEmitter {
|
|||||||
entry.updatedAt = abortAt;
|
entry.updatedAt = abortAt;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return;
|
return 0;
|
||||||
}
|
}
|
||||||
logger.warn(`Hybrid-Extract Fehler: pkg=${pkg.name}, reason=${compactErrorText(error)}`);
|
logger.warn(`Hybrid-Extract Fehler: pkg=${pkg.name}, reason=${compactErrorText(error)}`);
|
||||||
const errorAt = nowMs();
|
const errorAt = nowMs();
|
||||||
@ -6538,9 +6610,11 @@ export class DownloadManager extends EventEmitter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
private async handlePackagePostProcessing(packageId: string, signal?: AbortSignal): Promise<void> {
|
private async handlePackagePostProcessing(packageId: string, signal?: AbortSignal): Promise<void> {
|
||||||
|
const handleStart = nowMs();
|
||||||
const pkg = this.session.packages[packageId];
|
const pkg = this.session.packages[packageId];
|
||||||
if (!pkg || pkg.cancelled) {
|
if (!pkg || pkg.cancelled) {
|
||||||
return;
|
return;
|
||||||
@ -6552,6 +6626,7 @@ export class DownloadManager extends EventEmitter {
|
|||||||
|
|
||||||
// Recover items whose file exists on disk but status was never set to "completed".
|
// Recover items whose file exists on disk but status was never set to "completed".
|
||||||
// Only recover items in idle states (queued/paused), never active ones (downloading/validating).
|
// Only recover items in idle states (queued/paused), never active ones (downloading/validating).
|
||||||
|
const recoveryStart = nowMs();
|
||||||
for (const item of items) {
|
for (const item of items) {
|
||||||
if (isFinishedStatus(item.status)) {
|
if (isFinishedStatus(item.status)) {
|
||||||
continue;
|
continue;
|
||||||
@ -6591,17 +6666,19 @@ export class DownloadManager extends EventEmitter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const recoveryMs = nowMs() - recoveryStart;
|
||||||
const success = items.filter((item) => item.status === "completed").length;
|
const success = items.filter((item) => item.status === "completed").length;
|
||||||
const failed = items.filter((item) => item.status === "failed").length;
|
const failed = items.filter((item) => item.status === "failed").length;
|
||||||
const cancelled = items.filter((item) => item.status === "cancelled").length;
|
const cancelled = items.filter((item) => item.status === "cancelled").length;
|
||||||
logger.info(`Post-Processing Start: pkg=${pkg.name}, success=${success}, failed=${failed}, cancelled=${cancelled}, autoExtract=${this.settings.autoExtract}`);
|
const setupMs = nowMs() - handleStart;
|
||||||
|
logger.info(`Post-Processing Start: pkg=${pkg.name}, success=${success}, failed=${failed}, cancelled=${cancelled}, autoExtract=${this.settings.autoExtract}, setupMs=${setupMs}, recoveryMs=${recoveryMs}`);
|
||||||
|
|
||||||
const allDone = success + failed + cancelled >= items.length;
|
const allDone = success + failed + cancelled >= items.length;
|
||||||
|
|
||||||
if (!allDone && this.settings.hybridExtract && this.settings.autoExtract && failed === 0 && success > 0) {
|
if (!allDone && this.settings.hybridExtract && this.settings.autoExtract && failed === 0 && success > 0) {
|
||||||
pkg.postProcessLabel = "Entpacken vorbereiten...";
|
pkg.postProcessLabel = "Entpacken vorbereiten...";
|
||||||
this.emitState();
|
this.emitState();
|
||||||
await this.runHybridExtraction(packageId, pkg, items, signal);
|
const hybridExtracted = await this.runHybridExtraction(packageId, pkg, items, signal);
|
||||||
if (signal?.aborted) {
|
if (signal?.aborted) {
|
||||||
pkg.postProcessLabel = undefined;
|
pkg.postProcessLabel = undefined;
|
||||||
pkg.status = (pkg.enabled && this.session.running && !this.session.paused) ? "queued" : "paused";
|
pkg.status = (pkg.enabled && this.session.running && !this.session.paused) ? "queued" : "paused";
|
||||||
@ -6617,6 +6694,12 @@ export class DownloadManager extends EventEmitter {
|
|||||||
if (!this.session.packages[packageId]) {
|
if (!this.session.packages[packageId]) {
|
||||||
return; // Package was fully cleaned up
|
return; // Package was fully cleaned up
|
||||||
}
|
}
|
||||||
|
// Self-requeue if we extracted something — more archive sets may have
|
||||||
|
// become ready while we were extracting (items that completed before
|
||||||
|
// this task started set the requeue flag once, which was already consumed).
|
||||||
|
if (hybridExtracted > 0) {
|
||||||
|
this.hybridExtractRequeue.add(packageId);
|
||||||
|
}
|
||||||
pkg.postProcessLabel = undefined;
|
pkg.postProcessLabel = undefined;
|
||||||
pkg.status = (pkg.enabled && this.session.running && !this.session.paused) ? "downloading" : "queued";
|
pkg.status = (pkg.enabled && this.session.running && !this.session.paused) ? "downloading" : "queued";
|
||||||
pkg.updatedAt = nowMs();
|
pkg.updatedAt = nowMs();
|
||||||
@ -6633,6 +6716,7 @@ export class DownloadManager extends EventEmitter {
|
|||||||
|
|
||||||
const completedItems = items.filter((item) => item.status === "completed");
|
const completedItems = items.filter((item) => item.status === "completed");
|
||||||
const alreadyMarkedExtracted = completedItems.length > 0 && completedItems.every((item) => isExtractedLabel(item.fullStatus));
|
const alreadyMarkedExtracted = completedItems.length > 0 && completedItems.every((item) => isExtractedLabel(item.fullStatus));
|
||||||
|
let extractedCount = 0;
|
||||||
|
|
||||||
if (this.settings.autoExtract && failed === 0 && success > 0 && !alreadyMarkedExtracted) {
|
if (this.settings.autoExtract && failed === 0 && success > 0 && !alreadyMarkedExtracted) {
|
||||||
pkg.postProcessLabel = "Entpacken vorbereiten...";
|
pkg.postProcessLabel = "Entpacken vorbereiten...";
|
||||||
@ -6690,9 +6774,9 @@ export class DownloadManager extends EventEmitter {
|
|||||||
}
|
}
|
||||||
}, extractTimeoutMs);
|
}, extractTimeoutMs);
|
||||||
try {
|
try {
|
||||||
// Track multiple active archives for parallel extraction
|
// Track archives for parallel extraction progress
|
||||||
const activeArchiveItemsMap = new Map<string, DownloadItem[]>();
|
const fullResolvedItems = new Map<string, DownloadItem[]>();
|
||||||
const archiveStartTimes = new Map<string, number>();
|
const fullStartTimes = new Map<string, number>();
|
||||||
|
|
||||||
const result = await extractPackageArchives({
|
const result = await extractPackageArchives({
|
||||||
packageDir: pkg.outputDir,
|
packageDir: pkg.outputDir,
|
||||||
@ -6704,8 +6788,11 @@ export class DownloadManager extends EventEmitter {
|
|||||||
passwordList: this.settings.archivePasswordList,
|
passwordList: this.settings.archivePasswordList,
|
||||||
signal: extractAbortController.signal,
|
signal: extractAbortController.signal,
|
||||||
packageId,
|
packageId,
|
||||||
|
skipPostCleanup: true,
|
||||||
maxParallel: this.settings.maxParallelExtract || 2,
|
maxParallel: this.settings.maxParallelExtract || 2,
|
||||||
extractCpuPriority: this.settings.extractCpuPriority,
|
// All downloads finished — use NORMAL OS priority so extraction runs at
|
||||||
|
// full speed (matching manual 7-Zip/WinRAR speed).
|
||||||
|
extractCpuPriority: "high",
|
||||||
onProgress: (progress) => {
|
onProgress: (progress) => {
|
||||||
if (progress.phase === "preparing") {
|
if (progress.phase === "preparing") {
|
||||||
pkg.postProcessLabel = progress.archiveName || "Vorbereiten...";
|
pkg.postProcessLabel = progress.archiveName || "Vorbereiten...";
|
||||||
@ -6713,27 +6800,39 @@ export class DownloadManager extends EventEmitter {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (progress.phase === "done") {
|
if (progress.phase === "done") {
|
||||||
// Do NOT mark remaining archives as "Done" here — some may have
|
fullResolvedItems.clear();
|
||||||
// failed. The post-extraction code (result.failed check) will
|
fullStartTimes.clear();
|
||||||
// assign the correct label. Only clear the tracking maps.
|
|
||||||
activeArchiveItemsMap.clear();
|
|
||||||
archiveStartTimes.clear();
|
|
||||||
emitExtractStatus("Entpacken 100%", true);
|
emitExtractStatus("Entpacken 100%", true);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (progress.archiveName) {
|
if (progress.archiveName) {
|
||||||
// Resolve items for this archive if not yet tracked
|
// Resolve items for this archive if not yet tracked
|
||||||
if (!activeArchiveItemsMap.has(progress.archiveName)) {
|
if (!fullResolvedItems.has(progress.archiveName)) {
|
||||||
activeArchiveItemsMap.set(progress.archiveName, resolveArchiveItems(progress.archiveName));
|
const resolved = resolveArchiveItems(progress.archiveName);
|
||||||
archiveStartTimes.set(progress.archiveName, nowMs());
|
fullResolvedItems.set(progress.archiveName, resolved);
|
||||||
|
fullStartTimes.set(progress.archiveName, nowMs());
|
||||||
|
if (resolved.length === 0) {
|
||||||
|
logger.warn(`resolveArchiveItems (full): KEINE Items für archiveName="${progress.archiveName}", completedItems=${completedItems.length}, names=[${completedItems.map((i) => path.basename(i.targetPath || i.fileName || "?")).join(", ")}]`);
|
||||||
|
} else {
|
||||||
|
logger.info(`resolveArchiveItems (full): ${resolved.length} Items für archiveName="${progress.archiveName}"`);
|
||||||
|
const initLabel = `Entpacken 0% · ${progress.archiveName}`;
|
||||||
|
const initAt = nowMs();
|
||||||
|
for (const entry of resolved) {
|
||||||
|
if (!isExtractedLabel(entry.fullStatus)) {
|
||||||
|
entry.fullStatus = initLabel;
|
||||||
|
entry.updatedAt = initAt;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
emitExtractStatus(`Entpacken ${progress.percent}% · ${progress.archiveName}`, true);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
const archiveItems = activeArchiveItemsMap.get(progress.archiveName)!;
|
const archiveItems = fullResolvedItems.get(progress.archiveName) || [];
|
||||||
|
|
||||||
// If archive is at 100%, mark its items as done and remove from active
|
// If archive is at 100%, mark its items as done and remove from active
|
||||||
if (Number(progress.archivePercent ?? 0) >= 100) {
|
if (Number(progress.archivePercent ?? 0) >= 100) {
|
||||||
const doneAt = nowMs();
|
const doneAt = nowMs();
|
||||||
const startedAt = archiveStartTimes.get(progress.archiveName) || doneAt;
|
const startedAt = fullStartTimes.get(progress.archiveName) || doneAt;
|
||||||
const doneLabel = formatExtractDone(doneAt - startedAt);
|
const doneLabel = formatExtractDone(doneAt - startedAt);
|
||||||
for (const entry of archiveItems) {
|
for (const entry of archiveItems) {
|
||||||
if (!isExtractedLabel(entry.fullStatus)) {
|
if (!isExtractedLabel(entry.fullStatus)) {
|
||||||
@ -6741,8 +6840,8 @@ export class DownloadManager extends EventEmitter {
|
|||||||
entry.updatedAt = doneAt;
|
entry.updatedAt = doneAt;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
activeArchiveItemsMap.delete(progress.archiveName);
|
fullResolvedItems.delete(progress.archiveName);
|
||||||
archiveStartTimes.delete(progress.archiveName);
|
fullStartTimes.delete(progress.archiveName);
|
||||||
// Show transitional label while next archive initializes
|
// Show transitional label while next archive initializes
|
||||||
const done = progress.current + 1;
|
const done = progress.current + 1;
|
||||||
if (done < progress.total) {
|
if (done < progress.total) {
|
||||||
@ -6794,13 +6893,10 @@ export class DownloadManager extends EventEmitter {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
logger.info(`Post-Processing Entpacken Ende: pkg=${pkg.name}, extracted=${result.extracted}, failed=${result.failed}, lastError=${result.lastError || ""}`);
|
logger.info(`Post-Processing Entpacken Ende: pkg=${pkg.name}, extracted=${result.extracted}, failed=${result.failed}, lastError=${result.lastError || ""}`);
|
||||||
|
extractedCount = result.extracted;
|
||||||
|
|
||||||
// Auto-rename even when some archives failed — successfully extracted files still need renaming
|
// Auto-rename wird in runDeferredPostExtraction ausgeführt (im Hintergrund),
|
||||||
if (result.extracted > 0) {
|
// damit der Slot sofort freigegeben wird.
|
||||||
pkg.postProcessLabel = "Renaming...";
|
|
||||||
this.emitState();
|
|
||||||
await this.autoRenameExtractedVideoFiles(pkg.extractDir, pkg);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (result.failed > 0) {
|
if (result.failed > 0) {
|
||||||
const reason = compactErrorText(result.lastError || "Entpacken fehlgeschlagen");
|
const reason = compactErrorText(result.lastError || "Entpacken fehlgeschlagen");
|
||||||
@ -6901,20 +6997,6 @@ export class DownloadManager extends EventEmitter {
|
|||||||
this.recordPackageHistory(packageId, pkg, items);
|
this.recordPackageHistory(packageId, pkg, items);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.settings.autoExtract && alreadyMarkedExtracted && failed === 0 && success > 0 && this.settings.cleanupMode !== "none") {
|
|
||||||
pkg.postProcessLabel = "Aufräumen...";
|
|
||||||
this.emitState();
|
|
||||||
const removedArchives = await this.cleanupRemainingArchiveArtifacts(pkg.outputDir);
|
|
||||||
if (removedArchives > 0) {
|
|
||||||
logger.info(`Hybrid-Post-Cleanup entfernte Archive: pkg=${pkg.name}, entfernt=${removedArchives}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (success > 0 && (pkg.status === "completed" || pkg.status === "failed")) {
|
|
||||||
pkg.postProcessLabel = "Verschiebe MKVs...";
|
|
||||||
this.emitState();
|
|
||||||
await this.collectMkvFilesToLibrary(packageId, pkg);
|
|
||||||
}
|
|
||||||
if (this.runPackageIds.has(packageId)) {
|
if (this.runPackageIds.has(packageId)) {
|
||||||
if (pkg.status === "completed" || pkg.status === "failed") {
|
if (pkg.status === "completed" || pkg.status === "failed") {
|
||||||
this.runCompletedPackages.add(packageId);
|
this.runCompletedPackages.add(packageId);
|
||||||
@ -6924,9 +7006,137 @@ export class DownloadManager extends EventEmitter {
|
|||||||
}
|
}
|
||||||
pkg.postProcessLabel = undefined;
|
pkg.postProcessLabel = undefined;
|
||||||
pkg.updatedAt = nowMs();
|
pkg.updatedAt = nowMs();
|
||||||
logger.info(`Post-Processing Ende: pkg=${pkg.name}, status=${pkg.status}`);
|
logger.info(`Post-Processing Ende: pkg=${pkg.name}, status=${pkg.status} (deferred work wird im Hintergrund ausgeführt)`);
|
||||||
|
|
||||||
this.applyPackageDoneCleanup(packageId);
|
// Deferred post-extraction: Rename, MKV-Sammlung, Cleanup laufen im Hintergrund,
|
||||||
|
// damit der Post-Process-Slot sofort freigegeben wird und das nächste Pack
|
||||||
|
// ohne 10–15 Sekunden Pause entpacken kann.
|
||||||
|
void this.runDeferredPostExtraction(packageId, pkg, success, failed, alreadyMarkedExtracted, extractedCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Runs slow post-extraction work (rename, MKV collection, cleanup) in the background
|
||||||
|
* so the post-process slot is released immediately and the next pack can start unpacking.
|
||||||
|
*/
|
||||||
|
private async runDeferredPostExtraction(
|
||||||
|
packageId: string,
|
||||||
|
pkg: PackageEntry,
|
||||||
|
success: number,
|
||||||
|
failed: number,
|
||||||
|
alreadyMarkedExtracted: boolean,
|
||||||
|
extractedCount: number
|
||||||
|
): Promise<void> {
|
||||||
|
try {
|
||||||
|
// ── Nested extraction: extract archives found inside the extracted output ──
|
||||||
|
if (extractedCount > 0 && failed === 0 && this.settings.autoExtract) {
|
||||||
|
const nestedBlacklist = /\.(iso|img|bin|dmg|vhd|vhdx|vmdk|wim)$/i;
|
||||||
|
const nestedCandidates = (await findArchiveCandidates(pkg.extractDir))
|
||||||
|
.filter((p) => !nestedBlacklist.test(p));
|
||||||
|
if (nestedCandidates.length > 0) {
|
||||||
|
pkg.postProcessLabel = "Nested Entpacken...";
|
||||||
|
this.emitState();
|
||||||
|
logger.info(`Deferred Nested-Extraction: ${nestedCandidates.length} Archive in ${pkg.extractDir}`);
|
||||||
|
const nestedResult = await extractPackageArchives({
|
||||||
|
packageDir: pkg.extractDir,
|
||||||
|
targetDir: pkg.extractDir,
|
||||||
|
cleanupMode: this.settings.cleanupMode,
|
||||||
|
conflictMode: this.settings.extractConflictMode,
|
||||||
|
removeLinks: false,
|
||||||
|
removeSamples: false,
|
||||||
|
passwordList: this.settings.archivePasswordList,
|
||||||
|
packageId,
|
||||||
|
onlyArchives: new Set(nestedCandidates.map((p) => process.platform === "win32" ? path.resolve(p).toLowerCase() : path.resolve(p))),
|
||||||
|
maxParallel: this.settings.maxParallelExtract || 2,
|
||||||
|
extractCpuPriority: this.settings.extractCpuPriority,
|
||||||
|
});
|
||||||
|
extractedCount += nestedResult.extracted;
|
||||||
|
logger.info(`Deferred Nested-Extraction Ende: extracted=${nestedResult.extracted}, failed=${nestedResult.failed}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Auto-Rename ──
|
||||||
|
if (extractedCount > 0) {
|
||||||
|
pkg.postProcessLabel = "Renaming...";
|
||||||
|
this.emitState();
|
||||||
|
await this.autoRenameExtractedVideoFiles(pkg.extractDir, pkg);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Archive cleanup (source archives in outputDir) ──
|
||||||
|
if (extractedCount > 0 && failed === 0 && this.settings.cleanupMode !== "none") {
|
||||||
|
pkg.postProcessLabel = "Aufräumen...";
|
||||||
|
this.emitState();
|
||||||
|
const sourceAndTargetEqual = path.resolve(pkg.outputDir).toLowerCase() === path.resolve(pkg.extractDir).toLowerCase();
|
||||||
|
if (!sourceAndTargetEqual) {
|
||||||
|
const candidates = await findArchiveCandidates(pkg.outputDir);
|
||||||
|
if (candidates.length > 0) {
|
||||||
|
const removed = await cleanupArchives(candidates, this.settings.cleanupMode);
|
||||||
|
if (removed > 0) {
|
||||||
|
logger.info(`Deferred Archive-Cleanup: pkg=${pkg.name}, entfernt=${removed}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Hybrid archive cleanup (wenn bereits als extracted markiert) ──
|
||||||
|
if (this.settings.autoExtract && alreadyMarkedExtracted && failed === 0 && success > 0 && this.settings.cleanupMode !== "none") {
|
||||||
|
const removedArchives = await this.cleanupRemainingArchiveArtifacts(pkg.outputDir);
|
||||||
|
if (removedArchives > 0) {
|
||||||
|
logger.info(`Hybrid-Post-Cleanup entfernte Archive: pkg=${pkg.name}, entfernt=${removedArchives}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Link/Sample artifact removal ──
|
||||||
|
if (extractedCount > 0 && failed === 0) {
|
||||||
|
if (this.settings.removeLinkFilesAfterExtract) {
|
||||||
|
const removedLinks = await removeDownloadLinkArtifacts(pkg.extractDir);
|
||||||
|
if (removedLinks > 0) {
|
||||||
|
logger.info(`Deferred Link-Cleanup: pkg=${pkg.name}, entfernt=${removedLinks}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this.settings.removeSamplesAfterExtract) {
|
||||||
|
const removedSamples = await removeSampleArtifacts(pkg.extractDir);
|
||||||
|
if (removedSamples.files > 0 || removedSamples.dirs > 0) {
|
||||||
|
logger.info(`Deferred Sample-Cleanup: pkg=${pkg.name}, files=${removedSamples.files}, dirs=${removedSamples.dirs}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Empty directory tree removal ──
|
||||||
|
if (extractedCount > 0 && failed === 0 && this.settings.cleanupMode === "delete") {
|
||||||
|
if (!(await hasAnyFilesRecursive(pkg.outputDir))) {
|
||||||
|
const removedDirs = await removeEmptyDirectoryTree(pkg.outputDir);
|
||||||
|
if (removedDirs > 0) {
|
||||||
|
logger.info(`Deferred leere Download-Ordner entfernt: pkg=${pkg.name}, dirs=${removedDirs}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Resume state cleanup ──
|
||||||
|
if (extractedCount > 0 && failed === 0) {
|
||||||
|
await clearExtractResumeState(pkg.outputDir, packageId);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── MKV collection ──
|
||||||
|
if (success > 0 && (pkg.status === "completed" || pkg.status === "failed")) {
|
||||||
|
pkg.postProcessLabel = "Verschiebe MKVs...";
|
||||||
|
this.emitState();
|
||||||
|
await this.collectMkvFilesToLibrary(packageId, pkg);
|
||||||
|
}
|
||||||
|
|
||||||
|
pkg.postProcessLabel = undefined;
|
||||||
|
pkg.updatedAt = nowMs();
|
||||||
|
this.persistSoon();
|
||||||
|
this.emitState();
|
||||||
|
|
||||||
|
this.applyPackageDoneCleanup(packageId);
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`Deferred Post-Extraction Fehler: pkg=${pkg.name}, reason=${compactErrorText(error)}`);
|
||||||
|
} finally {
|
||||||
|
pkg.postProcessLabel = undefined;
|
||||||
|
pkg.updatedAt = nowMs();
|
||||||
|
this.persistSoon();
|
||||||
|
this.emitState();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private applyPackageDoneCleanup(packageId: string): void {
|
private applyPackageDoneCleanup(packageId: string): void {
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
import fs from "node:fs";
|
import fs from "node:fs";
|
||||||
import path from "node:path";
|
import path from "node:path";
|
||||||
import os from "node:os";
|
import os from "node:os";
|
||||||
import { spawn, spawnSync } from "node:child_process";
|
import { spawn, spawnSync, type ChildProcess } from "node:child_process";
|
||||||
import AdmZip from "adm-zip";
|
import AdmZip from "adm-zip";
|
||||||
import { CleanupMode, ConflictMode } from "../shared/types";
|
import { CleanupMode, ConflictMode } from "../shared/types";
|
||||||
import { logger } from "./logger";
|
import { logger } from "./logger";
|
||||||
@ -600,8 +600,8 @@ function extractCpuBudgetFromPriority(priority?: string): number {
|
|||||||
|
|
||||||
function extractOsPriority(priority?: string): number {
|
function extractOsPriority(priority?: string): number {
|
||||||
switch (priority) {
|
switch (priority) {
|
||||||
case "high": return os.constants.priority.PRIORITY_BELOW_NORMAL;
|
case "high": return os.constants.priority.PRIORITY_NORMAL;
|
||||||
default: return os.constants.priority.PRIORITY_LOW;
|
default: return os.constants.priority.PRIORITY_BELOW_NORMAL;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -615,10 +615,15 @@ function extractCpuBudgetPercent(priority?: string): number {
|
|||||||
|
|
||||||
function extractorThreadSwitch(hybridMode = false, priority?: string): string {
|
function extractorThreadSwitch(hybridMode = false, priority?: string): string {
|
||||||
if (hybridMode) {
|
if (hybridMode) {
|
||||||
// 2 threads during hybrid extraction (download + extract simultaneously).
|
// Use half the CPU budget during hybrid extraction to leave headroom for
|
||||||
// JDownloader 2 uses in-process 7-Zip-JBinding which naturally limits throughput
|
// concurrent downloads. Falls back to at least 2 threads.
|
||||||
// to ~16 MB/s write. 2 UnRAR threads produce similar controlled disk load.
|
const envValue = Number(process.env.RD_EXTRACT_THREADS ?? NaN);
|
||||||
return "-mt2";
|
if (Number.isFinite(envValue) && envValue >= 1 && envValue <= 32) {
|
||||||
|
return `-mt${Math.floor(envValue)}`;
|
||||||
|
}
|
||||||
|
const cpuCount = Math.max(1, os.cpus().length || 1);
|
||||||
|
const hybridThreads = Math.max(2, Math.min(8, Math.floor(cpuCount / 2)));
|
||||||
|
return `-mt${hybridThreads}`;
|
||||||
}
|
}
|
||||||
const envValue = Number(process.env.RD_EXTRACT_THREADS ?? NaN);
|
const envValue = Number(process.env.RD_EXTRACT_THREADS ?? NaN);
|
||||||
if (Number.isFinite(envValue) && envValue >= 1 && envValue <= 32) {
|
if (Number.isFinite(envValue) && envValue >= 1 && envValue <= 32) {
|
||||||
@ -640,8 +645,8 @@ function lowerExtractProcessPriority(childPid: number | undefined, cpuPriority?:
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
// Lowers CPU scheduling priority so extraction doesn't starve other processes.
|
// Sets CPU scheduling priority for the extraction process.
|
||||||
// high → BELOW_NORMAL, middle/low → IDLE. I/O priority stays Normal (like JDownloader 2).
|
// high → NORMAL (full speed), default → BELOW_NORMAL. I/O priority stays Normal.
|
||||||
os.setPriority(pid, extractOsPriority(cpuPriority));
|
os.setPriority(pid, extractOsPriority(cpuPriority));
|
||||||
} catch {
|
} catch {
|
||||||
// ignore: priority lowering is best-effort
|
// ignore: priority lowering is best-effort
|
||||||
@ -983,6 +988,274 @@ function parseJvmLine(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ── Persistent JVM Daemon ──
|
||||||
|
// Keeps a single JVM process alive across multiple extraction requests,
|
||||||
|
// eliminating the ~5s JVM boot overhead per archive.
|
||||||
|
|
||||||
|
interface DaemonRequest {
|
||||||
|
resolve: (result: JvmExtractResult) => void;
|
||||||
|
onArchiveProgress?: (percent: number) => void;
|
||||||
|
signal?: AbortSignal;
|
||||||
|
timeoutMs?: number;
|
||||||
|
parseState: { bestPercent: number; usedPassword: string; backend: string; reportedError: string };
|
||||||
|
}
|
||||||
|
|
||||||
|
let daemonProcess: ChildProcess | null = null;
|
||||||
|
let daemonReady = false;
|
||||||
|
let daemonBusy = false;
|
||||||
|
let daemonCurrentRequest: DaemonRequest | null = null;
|
||||||
|
let daemonStdoutBuffer = "";
|
||||||
|
let daemonStderrBuffer = "";
|
||||||
|
let daemonOutput = "";
|
||||||
|
let daemonTimeoutId: NodeJS.Timeout | null = null;
|
||||||
|
let daemonAbortHandler: (() => void) | null = null;
|
||||||
|
let daemonLayout: JvmExtractorLayout | null = null;
|
||||||
|
|
||||||
|
export function shutdownDaemon(): void {
|
||||||
|
if (daemonProcess) {
|
||||||
|
try { daemonProcess.stdin?.end(); } catch { /* ignore */ }
|
||||||
|
try { killProcessTree(daemonProcess); } catch { /* ignore */ }
|
||||||
|
daemonProcess = null;
|
||||||
|
}
|
||||||
|
daemonReady = false;
|
||||||
|
daemonBusy = false;
|
||||||
|
daemonCurrentRequest = null;
|
||||||
|
daemonStdoutBuffer = "";
|
||||||
|
daemonStderrBuffer = "";
|
||||||
|
daemonOutput = "";
|
||||||
|
if (daemonTimeoutId) { clearTimeout(daemonTimeoutId); daemonTimeoutId = null; }
|
||||||
|
if (daemonAbortHandler) { daemonAbortHandler = null; }
|
||||||
|
daemonLayout = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function finishDaemonRequest(result: JvmExtractResult): void {
|
||||||
|
const req = daemonCurrentRequest;
|
||||||
|
if (!req) return;
|
||||||
|
daemonCurrentRequest = null;
|
||||||
|
daemonBusy = false;
|
||||||
|
daemonStdoutBuffer = "";
|
||||||
|
daemonStderrBuffer = "";
|
||||||
|
daemonOutput = "";
|
||||||
|
if (daemonTimeoutId) { clearTimeout(daemonTimeoutId); daemonTimeoutId = null; }
|
||||||
|
if (req.signal && daemonAbortHandler) {
|
||||||
|
req.signal.removeEventListener("abort", daemonAbortHandler);
|
||||||
|
daemonAbortHandler = null;
|
||||||
|
}
|
||||||
|
req.resolve(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleDaemonLine(line: string): void {
|
||||||
|
const trimmed = String(line || "").trim();
|
||||||
|
if (!trimmed) return;
|
||||||
|
|
||||||
|
// Check for daemon ready signal
|
||||||
|
if (trimmed === "RD_DAEMON_READY") {
|
||||||
|
daemonReady = true;
|
||||||
|
logger.info("JVM Daemon bereit (persistent)");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for request completion
|
||||||
|
if (trimmed.startsWith("RD_REQUEST_DONE ")) {
|
||||||
|
const code = parseInt(trimmed.slice("RD_REQUEST_DONE ".length).trim(), 10);
|
||||||
|
const req = daemonCurrentRequest;
|
||||||
|
if (!req) return;
|
||||||
|
|
||||||
|
if (code === 0) {
|
||||||
|
req.onArchiveProgress?.(100);
|
||||||
|
finishDaemonRequest({
|
||||||
|
ok: true, missingCommand: false, missingRuntime: false,
|
||||||
|
aborted: false, timedOut: false, errorText: "",
|
||||||
|
usedPassword: req.parseState.usedPassword, backend: req.parseState.backend
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
const message = cleanErrorText(req.parseState.reportedError || daemonOutput) || `Exit Code ${code}`;
|
||||||
|
finishDaemonRequest({
|
||||||
|
ok: false, missingCommand: false, missingRuntime: isJvmRuntimeMissingError(message),
|
||||||
|
aborted: false, timedOut: false, errorText: message,
|
||||||
|
usedPassword: req.parseState.usedPassword, backend: req.parseState.backend
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Regular progress/status lines — delegate to parseJvmLine
|
||||||
|
if (daemonCurrentRequest) {
|
||||||
|
parseJvmLine(trimmed, daemonCurrentRequest.onArchiveProgress, daemonCurrentRequest.parseState);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function startDaemon(layout: JvmExtractorLayout): boolean {
|
||||||
|
if (daemonProcess && daemonReady) return true;
|
||||||
|
shutdownDaemon();
|
||||||
|
|
||||||
|
const jvmTmpDir = path.join(os.tmpdir(), `rd-extract-daemon-${crypto.randomUUID()}`);
|
||||||
|
fs.mkdirSync(jvmTmpDir, { recursive: true });
|
||||||
|
|
||||||
|
const args = [
|
||||||
|
"-Dfile.encoding=UTF-8",
|
||||||
|
`-Djava.io.tmpdir=${jvmTmpDir}`,
|
||||||
|
"-Xms512m",
|
||||||
|
"-Xmx8g",
|
||||||
|
"-XX:+UseSerialGC",
|
||||||
|
"-cp",
|
||||||
|
layout.classPath,
|
||||||
|
JVM_EXTRACTOR_MAIN_CLASS,
|
||||||
|
"--daemon"
|
||||||
|
];
|
||||||
|
|
||||||
|
try {
|
||||||
|
const child = spawn(layout.javaCommand, args, {
|
||||||
|
windowsHide: true,
|
||||||
|
stdio: ["pipe", "pipe", "pipe"]
|
||||||
|
});
|
||||||
|
lowerExtractProcessPriority(child.pid, currentExtractCpuPriority);
|
||||||
|
daemonProcess = child;
|
||||||
|
daemonLayout = layout;
|
||||||
|
|
||||||
|
child.stdout!.on("data", (chunk) => {
|
||||||
|
const raw = String(chunk || "");
|
||||||
|
daemonOutput = appendLimited(daemonOutput, raw);
|
||||||
|
daemonStdoutBuffer += raw;
|
||||||
|
const lines = daemonStdoutBuffer.split(/\r?\n/);
|
||||||
|
daemonStdoutBuffer = lines.pop() || "";
|
||||||
|
for (const line of lines) {
|
||||||
|
handleDaemonLine(line);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
child.stderr!.on("data", (chunk) => {
|
||||||
|
const raw = String(chunk || "");
|
||||||
|
daemonOutput = appendLimited(daemonOutput, raw);
|
||||||
|
daemonStderrBuffer += raw;
|
||||||
|
const lines = daemonStderrBuffer.split(/\r?\n/);
|
||||||
|
daemonStderrBuffer = lines.pop() || "";
|
||||||
|
for (const line of lines) {
|
||||||
|
if (daemonCurrentRequest) {
|
||||||
|
parseJvmLine(line, daemonCurrentRequest.onArchiveProgress, daemonCurrentRequest.parseState);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
child.on("error", () => {
|
||||||
|
if (daemonCurrentRequest) {
|
||||||
|
finishDaemonRequest({
|
||||||
|
ok: false, missingCommand: true, missingRuntime: true,
|
||||||
|
aborted: false, timedOut: false, errorText: "Daemon process error",
|
||||||
|
usedPassword: "", backend: ""
|
||||||
|
});
|
||||||
|
}
|
||||||
|
shutdownDaemon();
|
||||||
|
});
|
||||||
|
|
||||||
|
child.on("close", () => {
|
||||||
|
if (daemonCurrentRequest) {
|
||||||
|
const req = daemonCurrentRequest;
|
||||||
|
finishDaemonRequest({
|
||||||
|
ok: false, missingCommand: false, missingRuntime: false,
|
||||||
|
aborted: false, timedOut: false,
|
||||||
|
errorText: cleanErrorText(req.parseState.reportedError || daemonOutput) || "Daemon process exited unexpectedly",
|
||||||
|
usedPassword: req.parseState.usedPassword, backend: req.parseState.backend
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// Clean up tmp dir
|
||||||
|
fs.rm(jvmTmpDir, { recursive: true, force: true }, () => {});
|
||||||
|
daemonProcess = null;
|
||||||
|
daemonReady = false;
|
||||||
|
daemonBusy = false;
|
||||||
|
daemonLayout = null;
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`JVM Daemon gestartet (PID ${child.pid})`);
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`JVM Daemon Start fehlgeschlagen: ${String(error)}`);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function isDaemonAvailable(layout: JvmExtractorLayout): boolean {
|
||||||
|
// Start daemon if not running yet
|
||||||
|
if (!daemonProcess || !daemonReady) {
|
||||||
|
startDaemon(layout);
|
||||||
|
}
|
||||||
|
return Boolean(daemonProcess && daemonReady && !daemonBusy);
|
||||||
|
}
|
||||||
|
|
||||||
|
function sendDaemonRequest(
|
||||||
|
archivePath: string,
|
||||||
|
targetDir: string,
|
||||||
|
conflictMode: ConflictMode,
|
||||||
|
passwordCandidates: string[],
|
||||||
|
onArchiveProgress?: (percent: number) => void,
|
||||||
|
signal?: AbortSignal,
|
||||||
|
timeoutMs?: number
|
||||||
|
): Promise<JvmExtractResult> {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
const mode = effectiveConflictMode(conflictMode);
|
||||||
|
const parseState = { bestPercent: 0, usedPassword: "", backend: "", reportedError: "" };
|
||||||
|
|
||||||
|
daemonBusy = true;
|
||||||
|
daemonOutput = "";
|
||||||
|
daemonCurrentRequest = { resolve, onArchiveProgress, signal, timeoutMs, parseState };
|
||||||
|
|
||||||
|
// Set up timeout
|
||||||
|
if (timeoutMs && timeoutMs > 0) {
|
||||||
|
daemonTimeoutId = setTimeout(() => {
|
||||||
|
// Timeout — kill the daemon and restart fresh for next request
|
||||||
|
const req = daemonCurrentRequest;
|
||||||
|
if (req) {
|
||||||
|
finishDaemonRequest({
|
||||||
|
ok: false, missingCommand: false, missingRuntime: false,
|
||||||
|
aborted: false, timedOut: true,
|
||||||
|
errorText: `Entpacken Timeout nach ${Math.ceil(timeoutMs / 1000)}s`,
|
||||||
|
usedPassword: parseState.usedPassword, backend: parseState.backend
|
||||||
|
});
|
||||||
|
}
|
||||||
|
shutdownDaemon();
|
||||||
|
}, timeoutMs);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set up abort handler
|
||||||
|
if (signal) {
|
||||||
|
daemonAbortHandler = () => {
|
||||||
|
const req = daemonCurrentRequest;
|
||||||
|
if (req) {
|
||||||
|
finishDaemonRequest({
|
||||||
|
ok: false, missingCommand: false, missingRuntime: false,
|
||||||
|
aborted: true, timedOut: false, errorText: "aborted:extract",
|
||||||
|
usedPassword: parseState.usedPassword, backend: parseState.backend
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// Kill daemon on abort — cleaner than trying to interrupt mid-extraction
|
||||||
|
shutdownDaemon();
|
||||||
|
};
|
||||||
|
signal.addEventListener("abort", daemonAbortHandler, { once: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build and send JSON request
|
||||||
|
const jsonRequest = JSON.stringify({
|
||||||
|
archive: archivePath,
|
||||||
|
target: targetDir,
|
||||||
|
conflict: mode,
|
||||||
|
backend: "auto",
|
||||||
|
passwords: passwordCandidates
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
daemonProcess!.stdin!.write(jsonRequest + "\n");
|
||||||
|
} catch (error) {
|
||||||
|
finishDaemonRequest({
|
||||||
|
ok: false, missingCommand: false, missingRuntime: false,
|
||||||
|
aborted: false, timedOut: false,
|
||||||
|
errorText: `Daemon stdin write failed: ${String(error)}`,
|
||||||
|
usedPassword: "", backend: ""
|
||||||
|
});
|
||||||
|
shutdownDaemon();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
function runJvmExtractCommand(
|
function runJvmExtractCommand(
|
||||||
layout: JvmExtractorLayout,
|
layout: JvmExtractorLayout,
|
||||||
archivePath: string,
|
archivePath: string,
|
||||||
@ -1006,6 +1279,15 @@ function runJvmExtractCommand(
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Try persistent daemon first — saves ~5s JVM boot per archive
|
||||||
|
if (isDaemonAvailable(layout)) {
|
||||||
|
logger.info(`JVM Daemon: Sende Request für ${path.basename(archivePath)}`);
|
||||||
|
return sendDaemonRequest(archivePath, targetDir, conflictMode, passwordCandidates, onArchiveProgress, signal, timeoutMs);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback: spawn a new JVM process (daemon busy or not available)
|
||||||
|
logger.info(`JVM Spawn: Neuer Prozess für ${path.basename(archivePath)}${daemonBusy ? " (Daemon busy)" : ""}`);
|
||||||
|
|
||||||
const mode = effectiveConflictMode(conflictMode);
|
const mode = effectiveConflictMode(conflictMode);
|
||||||
// Each JVM process needs its own temp dir so parallel SevenZipJBinding
|
// Each JVM process needs its own temp dir so parallel SevenZipJBinding
|
||||||
// instances don't fight over the same native DLL file lock.
|
// instances don't fight over the same native DLL file lock.
|
||||||
@ -1014,8 +1296,9 @@ function runJvmExtractCommand(
|
|||||||
const args = [
|
const args = [
|
||||||
"-Dfile.encoding=UTF-8",
|
"-Dfile.encoding=UTF-8",
|
||||||
`-Djava.io.tmpdir=${jvmTmpDir}`,
|
`-Djava.io.tmpdir=${jvmTmpDir}`,
|
||||||
"-Xms32m",
|
"-Xms512m",
|
||||||
"-Xmx512m",
|
"-Xmx8g",
|
||||||
|
"-XX:+UseSerialGC",
|
||||||
"-cp",
|
"-cp",
|
||||||
layout.classPath,
|
layout.classPath,
|
||||||
JVM_EXTRACTOR_MAIN_CLASS,
|
JVM_EXTRACTOR_MAIN_CLASS,
|
||||||
@ -1718,7 +2001,7 @@ export function collectArchiveCleanupTargets(sourceArchivePath: string, director
|
|||||||
return Array.from(targets);
|
return Array.from(targets);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function cleanupArchives(sourceFiles: string[], cleanupMode: CleanupMode): Promise<number> {
|
export async function cleanupArchives(sourceFiles: string[], cleanupMode: CleanupMode): Promise<number> {
|
||||||
if (cleanupMode === "none") {
|
if (cleanupMode === "none") {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
@ -1789,7 +2072,7 @@ async function cleanupArchives(sourceFiles: string[], cleanupMode: CleanupMode):
|
|||||||
return removed;
|
return removed;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function hasAnyFilesRecursive(rootDir: string): Promise<boolean> {
|
export async function hasAnyFilesRecursive(rootDir: string): Promise<boolean> {
|
||||||
const rootExists = await fs.promises.access(rootDir).then(() => true, () => false);
|
const rootExists = await fs.promises.access(rootDir).then(() => true, () => false);
|
||||||
if (!rootExists) {
|
if (!rootExists) {
|
||||||
return false;
|
return false;
|
||||||
@ -1837,7 +2120,7 @@ async function hasAnyEntries(rootDir: string): Promise<boolean> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function removeEmptyDirectoryTree(rootDir: string): Promise<number> {
|
export async function removeEmptyDirectoryTree(rootDir: string): Promise<number> {
|
||||||
const rootExists = await fs.promises.access(rootDir).then(() => true, () => false);
|
const rootExists = await fs.promises.access(rootDir).then(() => true, () => false);
|
||||||
if (!rootExists) {
|
if (!rootExists) {
|
||||||
return 0;
|
return 0;
|
||||||
|
|||||||
@ -7,7 +7,7 @@ import { IPC_CHANNELS } from "../shared/ipc";
|
|||||||
import { getLogFilePath, logger } from "./logger";
|
import { getLogFilePath, logger } from "./logger";
|
||||||
import { APP_NAME } from "./constants";
|
import { APP_NAME } from "./constants";
|
||||||
import { extractHttpLinksFromText } from "./utils";
|
import { extractHttpLinksFromText } from "./utils";
|
||||||
import { cleanupStaleSubstDrives } from "./extractor";
|
import { cleanupStaleSubstDrives, shutdownDaemon } from "./extractor";
|
||||||
|
|
||||||
/* ── IPC validation helpers ────────────────────────────────────── */
|
/* ── IPC validation helpers ────────────────────────────────────── */
|
||||||
function validateString(value: unknown, name: string): string {
|
function validateString(value: unknown, name: string): string {
|
||||||
@ -515,6 +515,7 @@ app.on("before-quit", () => {
|
|||||||
if (updateQuitTimer) { clearTimeout(updateQuitTimer); updateQuitTimer = null; }
|
if (updateQuitTimer) { clearTimeout(updateQuitTimer); updateQuitTimer = null; }
|
||||||
stopClipboardWatcher();
|
stopClipboardWatcher();
|
||||||
destroyTray();
|
destroyTray();
|
||||||
|
shutdownDaemon();
|
||||||
try {
|
try {
|
||||||
controller.shutdown();
|
controller.shutdown();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@ -65,6 +65,111 @@ describe.skipIf(!hasJavaRuntime() || !hasJvmExtractorRuntime())("extractor jvm b
|
|||||||
expect(fs.existsSync(path.join(targetDir, "episode.txt"))).toBe(true);
|
expect(fs.existsSync(path.join(targetDir, "episode.txt"))).toBe(true);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("emits progress callbacks with archiveName and percent", async () => {
|
||||||
|
process.env.RD_EXTRACT_BACKEND = "jvm";
|
||||||
|
|
||||||
|
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-jvm-progress-"));
|
||||||
|
tempDirs.push(root);
|
||||||
|
const packageDir = path.join(root, "pkg");
|
||||||
|
const targetDir = path.join(root, "out");
|
||||||
|
fs.mkdirSync(packageDir, { recursive: true });
|
||||||
|
|
||||||
|
// Create a ZIP with some content to trigger progress
|
||||||
|
const zipPath = path.join(packageDir, "progress-test.zip");
|
||||||
|
const zip = new AdmZip();
|
||||||
|
zip.addFile("file1.txt", Buffer.from("Hello World ".repeat(100)));
|
||||||
|
zip.addFile("file2.txt", Buffer.from("Another file ".repeat(100)));
|
||||||
|
zip.writeZip(zipPath);
|
||||||
|
|
||||||
|
const progressUpdates: Array<{
|
||||||
|
archiveName: string;
|
||||||
|
percent: number;
|
||||||
|
phase: string;
|
||||||
|
archivePercent?: number;
|
||||||
|
}> = [];
|
||||||
|
|
||||||
|
const result = await extractPackageArchives({
|
||||||
|
packageDir,
|
||||||
|
targetDir,
|
||||||
|
cleanupMode: "none",
|
||||||
|
conflictMode: "overwrite",
|
||||||
|
removeLinks: false,
|
||||||
|
removeSamples: false,
|
||||||
|
onProgress: (update) => {
|
||||||
|
progressUpdates.push({
|
||||||
|
archiveName: update.archiveName,
|
||||||
|
percent: update.percent,
|
||||||
|
phase: update.phase,
|
||||||
|
archivePercent: update.archivePercent,
|
||||||
|
});
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.extracted).toBe(1);
|
||||||
|
expect(result.failed).toBe(0);
|
||||||
|
|
||||||
|
// Should have at least preparing, extracting, and done phases
|
||||||
|
const phases = new Set(progressUpdates.map((u) => u.phase));
|
||||||
|
expect(phases.has("preparing")).toBe(true);
|
||||||
|
expect(phases.has("extracting")).toBe(true);
|
||||||
|
|
||||||
|
// Extracting phase should include the archive name
|
||||||
|
const extracting = progressUpdates.filter((u) => u.phase === "extracting" && u.archiveName === "progress-test.zip");
|
||||||
|
expect(extracting.length).toBeGreaterThan(0);
|
||||||
|
|
||||||
|
// Should end at 100%
|
||||||
|
const lastExtracting = extracting[extracting.length - 1];
|
||||||
|
expect(lastExtracting.archivePercent).toBe(100);
|
||||||
|
|
||||||
|
// Files should exist
|
||||||
|
expect(fs.existsSync(path.join(targetDir, "file1.txt"))).toBe(true);
|
||||||
|
expect(fs.existsSync(path.join(targetDir, "file2.txt"))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("extracts multiple archives sequentially with progress for each", async () => {
|
||||||
|
process.env.RD_EXTRACT_BACKEND = "jvm";
|
||||||
|
|
||||||
|
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-jvm-multi-"));
|
||||||
|
tempDirs.push(root);
|
||||||
|
const packageDir = path.join(root, "pkg");
|
||||||
|
const targetDir = path.join(root, "out");
|
||||||
|
fs.mkdirSync(packageDir, { recursive: true });
|
||||||
|
|
||||||
|
// Create two separate ZIP archives
|
||||||
|
const zip1 = new AdmZip();
|
||||||
|
zip1.addFile("episode01.txt", Buffer.from("ep1 content"));
|
||||||
|
zip1.writeZip(path.join(packageDir, "archive1.zip"));
|
||||||
|
|
||||||
|
const zip2 = new AdmZip();
|
||||||
|
zip2.addFile("episode02.txt", Buffer.from("ep2 content"));
|
||||||
|
zip2.writeZip(path.join(packageDir, "archive2.zip"));
|
||||||
|
|
||||||
|
const archiveNames = new Set<string>();
|
||||||
|
|
||||||
|
const result = await extractPackageArchives({
|
||||||
|
packageDir,
|
||||||
|
targetDir,
|
||||||
|
cleanupMode: "none",
|
||||||
|
conflictMode: "overwrite",
|
||||||
|
removeLinks: false,
|
||||||
|
removeSamples: false,
|
||||||
|
onProgress: (update) => {
|
||||||
|
if (update.phase === "extracting" && update.archiveName) {
|
||||||
|
archiveNames.add(update.archiveName);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.extracted).toBe(2);
|
||||||
|
expect(result.failed).toBe(0);
|
||||||
|
// Both archive names should have appeared in progress
|
||||||
|
expect(archiveNames.has("archive1.zip")).toBe(true);
|
||||||
|
expect(archiveNames.has("archive2.zip")).toBe(true);
|
||||||
|
// Both files extracted
|
||||||
|
expect(fs.existsSync(path.join(targetDir, "episode01.txt"))).toBe(true);
|
||||||
|
expect(fs.existsSync(path.join(targetDir, "episode02.txt"))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
it("respects ask/skip conflict mode in jvm backend", async () => {
|
it("respects ask/skip conflict mode in jvm backend", async () => {
|
||||||
process.env.RD_EXTRACT_BACKEND = "jvm";
|
process.env.RD_EXTRACT_BACKEND = "jvm";
|
||||||
|
|
||||||
|
|||||||
188
tests/resolve-archive-items.test.ts
Normal file
188
tests/resolve-archive-items.test.ts
Normal file
@ -0,0 +1,188 @@
|
|||||||
|
import { describe, expect, it } from "vitest";
|
||||||
|
import { resolveArchiveItemsFromList } from "../src/main/download-manager";
|
||||||
|
|
||||||
|
type MinimalItem = {
|
||||||
|
targetPath?: string;
|
||||||
|
fileName?: string;
|
||||||
|
[key: string]: unknown;
|
||||||
|
};
|
||||||
|
|
||||||
|
function makeItems(names: string[]): MinimalItem[] {
|
||||||
|
return names.map((name) => ({
|
||||||
|
targetPath: `C:\\Downloads\\Package\\${name}`,
|
||||||
|
fileName: name,
|
||||||
|
id: name,
|
||||||
|
status: "completed",
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("resolveArchiveItemsFromList", () => {
|
||||||
|
// ── Multipart RAR (.partN.rar) ──
|
||||||
|
|
||||||
|
it("matches multipart .part1.rar archives", () => {
|
||||||
|
const items = makeItems([
|
||||||
|
"Movie.part1.rar",
|
||||||
|
"Movie.part2.rar",
|
||||||
|
"Movie.part3.rar",
|
||||||
|
"Other.rar",
|
||||||
|
]);
|
||||||
|
const result = resolveArchiveItemsFromList("Movie.part1.rar", items as any);
|
||||||
|
expect(result).toHaveLength(3);
|
||||||
|
expect(result.map((i: any) => i.fileName)).toEqual([
|
||||||
|
"Movie.part1.rar",
|
||||||
|
"Movie.part2.rar",
|
||||||
|
"Movie.part3.rar",
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("matches multipart .part01.rar archives (zero-padded)", () => {
|
||||||
|
const items = makeItems([
|
||||||
|
"Film.part01.rar",
|
||||||
|
"Film.part02.rar",
|
||||||
|
"Film.part10.rar",
|
||||||
|
"Unrelated.zip",
|
||||||
|
]);
|
||||||
|
const result = resolveArchiveItemsFromList("Film.part01.rar", items as any);
|
||||||
|
expect(result).toHaveLength(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Old-style RAR (.rar + .r00, .r01, etc.) ──
|
||||||
|
|
||||||
|
it("matches old-style .rar + .rNN volumes", () => {
|
||||||
|
const items = makeItems([
|
||||||
|
"Archive.rar",
|
||||||
|
"Archive.r00",
|
||||||
|
"Archive.r01",
|
||||||
|
"Archive.r02",
|
||||||
|
"Other.zip",
|
||||||
|
]);
|
||||||
|
const result = resolveArchiveItemsFromList("Archive.rar", items as any);
|
||||||
|
expect(result).toHaveLength(4);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Single RAR ──
|
||||||
|
|
||||||
|
it("matches a single .rar file", () => {
|
||||||
|
const items = makeItems(["SingleFile.rar", "Other.mkv"]);
|
||||||
|
const result = resolveArchiveItemsFromList("SingleFile.rar", items as any);
|
||||||
|
expect(result).toHaveLength(1);
|
||||||
|
expect((result[0] as any).fileName).toBe("SingleFile.rar");
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Split ZIP ──
|
||||||
|
|
||||||
|
it("matches split .zip.NNN files", () => {
|
||||||
|
const items = makeItems([
|
||||||
|
"Data.zip",
|
||||||
|
"Data.zip.001",
|
||||||
|
"Data.zip.002",
|
||||||
|
"Data.zip.003",
|
||||||
|
]);
|
||||||
|
const result = resolveArchiveItemsFromList("Data.zip.001", items as any);
|
||||||
|
expect(result).toHaveLength(4);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Split 7z ──
|
||||||
|
|
||||||
|
it("matches split .7z.NNN files", () => {
|
||||||
|
const items = makeItems([
|
||||||
|
"Backup.7z.001",
|
||||||
|
"Backup.7z.002",
|
||||||
|
]);
|
||||||
|
const result = resolveArchiveItemsFromList("Backup.7z.001", items as any);
|
||||||
|
expect(result).toHaveLength(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Generic .NNN splits ──
|
||||||
|
|
||||||
|
it("matches generic .NNN split files", () => {
|
||||||
|
const items = makeItems([
|
||||||
|
"video.001",
|
||||||
|
"video.002",
|
||||||
|
"video.003",
|
||||||
|
]);
|
||||||
|
const result = resolveArchiveItemsFromList("video.001", items as any);
|
||||||
|
expect(result).toHaveLength(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Exact filename match ──
|
||||||
|
|
||||||
|
it("matches a single .zip by exact name", () => {
|
||||||
|
const items = makeItems(["myarchive.zip", "other.rar"]);
|
||||||
|
const result = resolveArchiveItemsFromList("myarchive.zip", items as any);
|
||||||
|
expect(result).toHaveLength(1);
|
||||||
|
expect((result[0] as any).fileName).toBe("myarchive.zip");
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Case insensitivity ──
|
||||||
|
|
||||||
|
it("matches case-insensitively", () => {
|
||||||
|
const items = makeItems([
|
||||||
|
"MOVIE.PART1.RAR",
|
||||||
|
"MOVIE.PART2.RAR",
|
||||||
|
]);
|
||||||
|
const result = resolveArchiveItemsFromList("movie.part1.rar", items as any);
|
||||||
|
expect(result).toHaveLength(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Stem-based fallback ──
|
||||||
|
|
||||||
|
it("uses stem-based fallback when exact patterns fail", () => {
|
||||||
|
// Simulate a debrid service that renames "Movie.part1.rar" to "Movie.part1_dl.rar"
|
||||||
|
// but the disk file is "Movie.part1.rar"
|
||||||
|
const items = makeItems([
|
||||||
|
"Movie.rar",
|
||||||
|
]);
|
||||||
|
// The archive on disk is "Movie.part1.rar" but there's no item matching the
|
||||||
|
// .partN pattern. The stem "movie" should match "Movie.rar" via fallback.
|
||||||
|
const result = resolveArchiveItemsFromList("Movie.part1.rar", items as any);
|
||||||
|
// stem fallback: "movie" starts with "movie" and ends with .rar
|
||||||
|
expect(result).toHaveLength(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Single item fallback ──
|
||||||
|
|
||||||
|
it("returns single archive item when no pattern matches", () => {
|
||||||
|
const items = makeItems(["totally-different-name.rar"]);
|
||||||
|
const result = resolveArchiveItemsFromList("Original.rar", items as any);
|
||||||
|
// Single item in list with archive extension → return it
|
||||||
|
expect(result).toHaveLength(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Empty when no match ──
|
||||||
|
|
||||||
|
it("returns empty when items have no archive extensions", () => {
|
||||||
|
const items = makeItems(["video.mkv", "subtitle.srt"]);
|
||||||
|
const result = resolveArchiveItemsFromList("Archive.rar", items as any);
|
||||||
|
expect(result).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Items without targetPath ──
|
||||||
|
|
||||||
|
it("falls back to fileName when targetPath is missing", () => {
|
||||||
|
const items = [
|
||||||
|
{ fileName: "Movie.part1.rar", id: "1", status: "completed" },
|
||||||
|
{ fileName: "Movie.part2.rar", id: "2", status: "completed" },
|
||||||
|
];
|
||||||
|
const result = resolveArchiveItemsFromList("Movie.part1.rar", items as any);
|
||||||
|
expect(result).toHaveLength(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Multiple archives, should not cross-match ──
|
||||||
|
|
||||||
|
it("does not cross-match different archive groups", () => {
|
||||||
|
const items = makeItems([
|
||||||
|
"Episode.S01E01.part1.rar",
|
||||||
|
"Episode.S01E01.part2.rar",
|
||||||
|
"Episode.S01E02.part1.rar",
|
||||||
|
"Episode.S01E02.part2.rar",
|
||||||
|
]);
|
||||||
|
const result1 = resolveArchiveItemsFromList("Episode.S01E01.part1.rar", items as any);
|
||||||
|
expect(result1).toHaveLength(2);
|
||||||
|
expect(result1.every((i: any) => i.fileName.includes("S01E01"))).toBe(true);
|
||||||
|
|
||||||
|
const result2 = resolveArchiveItemsFromList("Episode.S01E02.part1.rar", items as any);
|
||||||
|
expect(result2).toHaveLength(2);
|
||||||
|
expect(result2.every((i: any) => i.fileName.includes("S01E02"))).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
Loading…
Reference in New Issue
Block a user