Release v1.5.75

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Sucukdeluxe 2026-03-03 22:28:40 +01:00
parent 21dbf46f81
commit 0c058fa162
4 changed files with 142 additions and 32 deletions

View File

@ -1,6 +1,6 @@
{
"name": "real-debrid-downloader",
"version": "1.5.74",
"version": "1.5.75",
"description": "Real-Debrid Downloader Desktop (Electron + React + TypeScript)",
"main": "build/main/main/main.js",
"author": "Sucukdeluxe",

View File

@ -4128,7 +4128,7 @@ export class DownloadManager extends EventEmitter {
item.totalBytes = null;
this.dropItemContribution(item.id);
}
let stallDelayMs = retryDelayWithJitter(active.stallRetries, 300);
let stallDelayMs = retryDelayWithJitter(active.stallRetries, 200);
// Respect provider cooldown
if (item.provider) {
const providerCooldown = this.getProviderCooldownRemaining(item.provider);
@ -4188,7 +4188,7 @@ export class DownloadManager extends EventEmitter {
// ignore
}
this.releaseTargetPath(item.id);
this.queueRetry(item, active, 450, "Netzwerkfehler erkannt, frischer Retry");
this.queueRetry(item, active, 300, "Netzwerkfehler erkannt, frischer Retry");
item.lastError = "";
item.downloadedBytes = 0;
item.totalBytes = null;
@ -4267,7 +4267,7 @@ export class DownloadManager extends EventEmitter {
if (active.genericErrorRetries < maxGenericErrorRetries) {
active.genericErrorRetries += 1;
item.retries += 1;
const genericDelayMs = retryDelayWithJitter(active.genericErrorRetries, 400);
const genericDelayMs = retryDelayWithJitter(active.genericErrorRetries, 250);
logger.warn(`Generic-Fehler: item=${item.fileName || item.id}, retry=${active.genericErrorRetries}/${retryDisplayLimit}, error=${errorText}, provider=${item.provider || "?"}`);
this.queueRetry(item, active, genericDelayMs, `Fehler erkannt, Auto-Retry ${active.genericErrorRetries}/${retryDisplayLimit}`);
item.lastError = errorText;
@ -4356,7 +4356,7 @@ export class DownloadManager extends EventEmitter {
item.retries += 1;
item.fullStatus = `Verbindungsfehler, retry ${attempt}/${retryDisplayLimit}`;
this.emitState();
await sleep(retryDelayWithJitter(attempt, 300));
await sleep(retryDelayWithJitter(attempt, 200));
continue;
}
throw error;
@ -4406,7 +4406,7 @@ export class DownloadManager extends EventEmitter {
this.emitState();
if (attempt < maxAttempts) {
item.retries += 1;
await sleep(retryDelayWithJitter(attempt, 280));
await sleep(retryDelayWithJitter(attempt, 200));
continue;
}
lastError = "HTTP 416";
@ -4425,7 +4425,7 @@ export class DownloadManager extends EventEmitter {
item.retries += 1;
item.fullStatus = `Serverfehler ${response.status}, retry ${attempt}/${retryDisplayLimit}`;
this.emitState();
await sleep(retryDelayWithJitter(attempt, 350));
await sleep(retryDelayWithJitter(attempt, 250));
continue;
}
throw new Error(lastError);
@ -4818,7 +4818,7 @@ export class DownloadManager extends EventEmitter {
item.retries += 1;
item.fullStatus = `Downloadfehler, retry ${attempt}/${retryDisplayLimit}`;
this.emitState();
await sleep(retryDelayWithJitter(attempt, 350));
await sleep(retryDelayWithJitter(attempt, 250));
continue;
}
throw new Error(lastError || "Download fehlgeschlagen");

View File

@ -2031,33 +2031,49 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
await extractSingleArchive(archivePath);
}
} else {
// Parallel extraction pool: N workers pull from a shared queue
const queue = [...pendingCandidates];
let nextIdx = 0;
let abortError: Error | null = null;
const worker = async (): Promise<void> => {
while (nextIdx < queue.length && !abortError && !noExtractorEncountered) {
if (options.signal?.aborted) break;
const idx = nextIdx;
nextIdx += 1;
try {
await extractSingleArchive(queue[idx]);
} catch (error) {
if (isExtractAbortError(String(error))) {
abortError = error instanceof Error ? error : new Error(String(error));
break;
}
// Non-abort errors are already handled inside extractSingleArchive
}
// Password discovery: extract first archive serially to find the correct password,
// then run remaining archives in parallel with the promoted password order.
let parallelQueue = pendingCandidates;
if (passwordCandidates.length > 1 && pendingCandidates.length > 1) {
logger.info(`Passwort-Discovery: Extrahiere erstes Archiv seriell (${passwordCandidates.length} Passwort-Kandidaten)...`);
const first = pendingCandidates[0];
await extractSingleArchive(first);
parallelQueue = pendingCandidates.slice(1);
if (parallelQueue.length > 0) {
logger.info(`Passwort-Discovery abgeschlossen, starte parallele Extraktion für ${parallelQueue.length} verbleibende Archive`);
}
};
}
const workerCount = Math.min(maxParallel, pendingCandidates.length);
logger.info(`Parallele Extraktion: ${workerCount} gleichzeitige Worker für ${pendingCandidates.length} Archive`);
await Promise.all(Array.from({ length: workerCount }, () => worker()));
if (parallelQueue.length > 0 && !options.signal?.aborted && !noExtractorEncountered) {
// Parallel extraction pool: N workers pull from a shared queue
const queue = [...parallelQueue];
let nextIdx = 0;
let abortError: Error | null = null;
const worker = async (): Promise<void> => {
while (nextIdx < queue.length && !abortError && !noExtractorEncountered) {
if (options.signal?.aborted) break;
const idx = nextIdx;
nextIdx += 1;
try {
await extractSingleArchive(queue[idx]);
} catch (error) {
if (isExtractAbortError(String(error))) {
abortError = error instanceof Error ? error : new Error(String(error));
break;
}
// Non-abort errors are already handled inside extractSingleArchive
}
}
};
const workerCount = Math.min(maxParallel, parallelQueue.length);
logger.info(`Parallele Extraktion: ${workerCount} gleichzeitige Worker für ${parallelQueue.length} Archive`);
await Promise.all(Array.from({ length: workerCount }, () => worker()));
if (abortError) throw new Error("aborted:extract");
}
if (abortError) throw new Error("aborted:extract");
if (noExtractorEncountered) {
const remaining = candidates.length - (extracted + failed);
if (remaining > 0) {

View File

@ -1002,4 +1002,98 @@ describe("extractor", () => {
expect(classifyExtractionError("something weird happened")).toBe("unknown");
});
});
describe("password discovery", () => {
it("extracts first archive serially before parallel pool when multiple passwords", async () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-pwdisc-"));
tempDirs.push(root);
const packageDir = path.join(root, "pkg");
const targetDir = path.join(root, "out");
fs.mkdirSync(packageDir, { recursive: true });
// Create 3 zip archives
for (const name of ["ep01.zip", "ep02.zip", "ep03.zip"]) {
const zip = new AdmZip();
zip.addFile(`${name}.txt`, Buffer.from(name));
zip.writeZip(path.join(packageDir, name));
}
const seenOrder: string[] = [];
const result = await extractPackageArchives({
packageDir,
targetDir,
cleanupMode: "none",
conflictMode: "overwrite",
removeLinks: false,
removeSamples: false,
maxParallel: 2,
passwordList: "pw1|pw2|pw3",
onProgress: (update) => {
if (update.phase !== "extracting" || !update.archiveName) return;
if (seenOrder[seenOrder.length - 1] !== update.archiveName) {
seenOrder.push(update.archiveName);
}
}
});
expect(result.extracted).toBe(3);
expect(result.failed).toBe(0);
// First archive should be ep01 (natural order, extracted serially for discovery)
expect(seenOrder[0]).toBe("ep01.zip");
});
it("skips discovery when only one password candidate", async () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-pwdisc-skip-"));
tempDirs.push(root);
const packageDir = path.join(root, "pkg");
const targetDir = path.join(root, "out");
fs.mkdirSync(packageDir, { recursive: true });
for (const name of ["a.zip", "b.zip"]) {
const zip = new AdmZip();
zip.addFile(`${name}.txt`, Buffer.from(name));
zip.writeZip(path.join(packageDir, name));
}
// No passwordList → only empty string → length=1 → no discovery phase
const result = await extractPackageArchives({
packageDir,
targetDir,
cleanupMode: "none",
conflictMode: "overwrite",
removeLinks: false,
removeSamples: false,
maxParallel: 4
});
expect(result.extracted).toBe(2);
expect(result.failed).toBe(0);
});
it("skips discovery when only one archive", async () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-pwdisc-one-"));
tempDirs.push(root);
const packageDir = path.join(root, "pkg");
const targetDir = path.join(root, "out");
fs.mkdirSync(packageDir, { recursive: true });
const zip = new AdmZip();
zip.addFile("single.txt", Buffer.from("single"));
zip.writeZip(path.join(packageDir, "only.zip"));
const result = await extractPackageArchives({
packageDir,
targetDir,
cleanupMode: "none",
conflictMode: "overwrite",
removeLinks: false,
removeSamples: false,
maxParallel: 4,
passwordList: "pw1|pw2|pw3"
});
expect(result.extracted).toBe(1);
expect(result.failed).toBe(0);
});
});
});