Release v1.4.22 with incremental hybrid extraction (JDownloader-style)

Implements hybrid extraction: when a package has multiple episodes with
multi-part archives, completed archive sets are extracted immediately
while the rest of the package continues downloading. Uses the existing
hybridExtract setting (already in UI/types/storage).

Key changes:
- Export findArchiveCandidates/pathSetKey from extractor.ts
- Add onlyArchives/skipPostCleanup options to ExtractOptions
- Add findReadyArchiveSets to identify complete archive sets
- Add runHybridExtraction for incremental extraction passes
- Requeue logic in runPackagePostProcessing for new completions
- Resume state preserved across hybrid passes (no premature clear)
- Guard against extracting incomplete multi-part archives
- Correct abort/toggle handling during hybrid extraction
- Package toggle now also aborts active post-processing

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Sucukdeluxe 2026-02-28 07:25:18 +01:00
parent d7162592e0
commit f70237f13d
3 changed files with 243 additions and 39 deletions

View File

@ -1,6 +1,6 @@
{
"name": "real-debrid-downloader",
"version": "1.4.21",
"version": "1.4.22",
"description": "Real-Debrid Downloader Desktop (Electron + React + TypeScript)",
"main": "build/main/main/main.js",
"author": "Sucukdeluxe",

View File

@ -20,7 +20,7 @@ import {
import { REQUEST_RETRIES, SAMPLE_VIDEO_EXTENSIONS } from "./constants";
import { cleanupCancelledPackageArtifactsAsync } from "./cleanup";
import { DebridService, MegaWebUnrestrictor } from "./debrid";
import { collectArchiveCleanupTargets, extractPackageArchives } from "./extractor";
import { collectArchiveCleanupTargets, extractPackageArchives, findArchiveCandidates } from "./extractor";
import { validateFileAgainstManifest } from "./integrity";
import { logger } from "./logger";
import { StoragePaths, saveSession, saveSessionAsync } from "./storage";
@ -319,6 +319,8 @@ export class DownloadManager extends EventEmitter {
private packagePostProcessAbortControllers = new Map<string, AbortController>();
private hybridExtractRequeue = new Set<string>();
private reservedTargetPaths = new Map<string, string>();
private claimedTargetPathByItem = new Map<string, string>();
@ -526,9 +528,13 @@ export class DownloadManager extends EventEmitter {
pkg.enabled = nextEnabled;
if (!nextEnabled) {
if (pkg.status === "downloading") {
if (pkg.status === "downloading" || pkg.status === "extracting") {
pkg.status = "paused";
}
const postProcessController = this.packagePostProcessAbortControllers.get(packageId);
if (postProcessController && !postProcessController.signal.aborted) {
postProcessController.abort("package_toggle");
}
for (const itemId of pkg.itemIds) {
const item = this.session.items[itemId];
if (!item) {
@ -627,6 +633,7 @@ export class DownloadManager extends EventEmitter {
this.itemContributedBytes.clear();
this.packagePostProcessTasks.clear();
this.packagePostProcessAbortControllers.clear();
this.hybridExtractRequeue.clear();
this.packagePostProcessQueue = Promise.resolve();
this.summary = null;
this.persistNow();
@ -1671,6 +1678,7 @@ export class DownloadManager extends EventEmitter {
private runPackagePostProcessing(packageId: string): Promise<void> {
const existing = this.packagePostProcessTasks.get(packageId);
if (existing) {
this.hybridExtractRequeue.add(packageId);
return existing;
}
@ -1690,6 +1698,11 @@ export class DownloadManager extends EventEmitter {
this.packagePostProcessAbortControllers.delete(packageId);
this.persistSoon();
this.emitState();
if (this.hybridExtractRequeue.delete(packageId)) {
void this.runPackagePostProcessing(packageId).catch((err) =>
logger.warn(`runPackagePostProcessing Fehler (hybridRequeue): ${compactErrorText(err)}`)
);
}
});
this.packagePostProcessTasks.set(packageId, task);
@ -1808,6 +1821,7 @@ export class DownloadManager extends EventEmitter {
this.session.packageOrder = this.session.packageOrder.filter((id) => id !== packageId);
this.runPackageIds.delete(packageId);
this.runCompletedPackages.delete(packageId);
this.hybridExtractRequeue.delete(packageId);
}
private async ensureScheduler(): Promise<void> {
@ -2961,6 +2975,169 @@ export class DownloadManager extends EventEmitter {
}
}
private findReadyArchiveSets(pkg: PackageEntry): Set<string> {
const ready = new Set<string>();
if (!pkg.outputDir || !fs.existsSync(pkg.outputDir)) {
return ready;
}
const completedPaths = new Set<string>();
const pendingPaths = new Set<string>();
for (const itemId of pkg.itemIds) {
const item = this.session.items[itemId];
if (!item) {
continue;
}
if (item.status === "completed" && item.targetPath) {
completedPaths.add(pathKey(item.targetPath));
} else if (item.targetPath) {
pendingPaths.add(pathKey(item.targetPath));
}
}
if (completedPaths.size === 0) {
return ready;
}
const candidates = findArchiveCandidates(pkg.outputDir);
if (candidates.length === 0) {
return ready;
}
let dirFiles: string[] | undefined;
try {
dirFiles = fs.readdirSync(pkg.outputDir, { withFileTypes: true })
.filter((entry) => entry.isFile())
.map((entry) => entry.name);
} catch {
return ready;
}
for (const candidate of candidates) {
const partsOnDisk = collectArchiveCleanupTargets(candidate, dirFiles);
const allPartsCompleted = partsOnDisk.every((part) => completedPaths.has(pathKey(part)));
if (!allPartsCompleted) {
continue;
}
const hasUnstartedParts = [...pendingPaths].some((pendingPath) => {
const pendingName = path.basename(pendingPath).toLowerCase();
const candidateStem = path.basename(candidate).toLowerCase();
return this.looksLikeArchivePart(pendingName, candidateStem);
});
if (hasUnstartedParts) {
continue;
}
ready.add(pathKey(candidate));
}
return ready;
}
private looksLikeArchivePart(fileName: string, entryPointName: string): boolean {
const multipartMatch = entryPointName.match(/^(.*)\.part0*1\.rar$/i);
if (multipartMatch) {
const prefix = multipartMatch[1].toLowerCase();
return new RegExp(`^${prefix.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")}\\.part\\d+\\.rar$`, "i").test(fileName);
}
if (/\.rar$/i.test(entryPointName) && !/\.part\d+\.rar$/i.test(entryPointName)) {
const stem = entryPointName.replace(/\.rar$/i, "").toLowerCase();
const escaped = stem.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
return new RegExp(`^${escaped}\\.r(ar|\\d{2})$`, "i").test(fileName);
}
if (/\.zip\.001$/i.test(entryPointName)) {
const stem = entryPointName.replace(/\.zip\.001$/i, "").toLowerCase();
const escaped = stem.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
return new RegExp(`^${escaped}\\.zip(\\.\\d{3})?$`, "i").test(fileName);
}
if (/\.7z\.001$/i.test(entryPointName)) {
const stem = entryPointName.replace(/\.7z\.001$/i, "").toLowerCase();
const escaped = stem.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
return new RegExp(`^${escaped}\\.7z(\\.\\d{3})?$`, "i").test(fileName);
}
return false;
}
private async runHybridExtraction(packageId: string, pkg: PackageEntry, items: DownloadItem[], signal?: AbortSignal): Promise<void> {
const readyArchives = this.findReadyArchiveSets(pkg);
if (readyArchives.size === 0) {
logger.info(`Hybrid-Extract: pkg=${pkg.name}, keine fertigen Archive-Sets`);
return;
}
logger.info(`Hybrid-Extract Start: pkg=${pkg.name}, readyArchives=${readyArchives.size}`);
pkg.status = "extracting";
this.emitState();
const completedItems = items.filter((item) => item.status === "completed");
const updateExtractingStatus = (text: string): void => {
const updatedAt = nowMs();
for (const entry of completedItems) {
if (isExtractedLabel(entry.fullStatus)) {
continue;
}
if (entry.fullStatus === text) {
continue;
}
entry.fullStatus = text;
entry.updatedAt = updatedAt;
}
};
updateExtractingStatus("Entpacken (hybrid) 0%");
this.emitState();
try {
const result = await extractPackageArchives({
packageDir: pkg.outputDir,
targetDir: pkg.extractDir,
cleanupMode: this.settings.cleanupMode,
conflictMode: this.settings.extractConflictMode,
removeLinks: false,
removeSamples: false,
passwordList: this.settings.archivePasswordList,
signal,
onlyArchives: readyArchives,
skipPostCleanup: true,
onProgress: (progress) => {
if (progress.phase === "done") {
return;
}
const archive = progress.archiveName ? ` · ${progress.archiveName}` : "";
const elapsed = progress.elapsedMs && progress.elapsedMs >= 1000
? ` · ${Math.floor(progress.elapsedMs / 1000)}s`
: "";
const activeArchive = Number(progress.archivePercent ?? 0) > 0 ? 1 : 0;
const currentDisplay = Math.max(0, Math.min(progress.total, progress.current + activeArchive));
const label = `Entpacken (hybrid) ${progress.percent}% (${currentDisplay}/${progress.total})${archive}${elapsed}`;
updateExtractingStatus(label);
this.emitState();
}
});
logger.info(`Hybrid-Extract Ende: pkg=${pkg.name}, extracted=${result.extracted}, failed=${result.failed}`);
if (result.extracted > 0) {
this.autoRenameExtractedVideoFiles(pkg.extractDir);
}
if (result.failed > 0) {
logger.warn(`Hybrid-Extract: ${result.failed} Archive fehlgeschlagen, wird beim finalen Durchlauf erneut versucht`);
}
const updatedAt = nowMs();
for (const entry of completedItems) {
if (/^Entpacken \(hybrid\)/i.test(entry.fullStatus || "")) {
entry.fullStatus = `Fertig (${humanSize(entry.downloadedBytes)})`;
entry.updatedAt = updatedAt;
}
}
} catch (error) {
const errorText = String(error || "");
if (errorText.includes("aborted:extract")) {
logger.info(`Hybrid-Extract abgebrochen: pkg=${pkg.name}`);
return;
}
logger.warn(`Hybrid-Extract Fehler: pkg=${pkg.name}, reason=${compactErrorText(error)}`);
}
}
private async handlePackagePostProcessing(packageId: string, signal?: AbortSignal): Promise<void> {
const pkg = this.session.packages[packageId];
if (!pkg || pkg.cancelled) {
@ -2975,8 +3152,23 @@ export class DownloadManager extends EventEmitter {
const cancelled = items.filter((item) => item.status === "cancelled").length;
logger.info(`Post-Processing Start: pkg=${pkg.name}, success=${success}, failed=${failed}, cancelled=${cancelled}, autoExtract=${this.settings.autoExtract}`);
if (success + failed + cancelled < items.length) {
pkg.status = "downloading";
const allDone = success + failed + cancelled >= items.length;
if (!allDone && this.settings.hybridExtract && this.settings.autoExtract && failed === 0 && success > 0) {
await this.runHybridExtraction(packageId, pkg, items, signal);
if (signal?.aborted) {
pkg.status = pkg.enabled ? "queued" : "paused";
pkg.updatedAt = nowMs();
return;
}
pkg.status = pkg.enabled ? "downloading" : "paused";
pkg.updatedAt = nowMs();
this.emitState();
return;
}
if (!allDone) {
pkg.status = pkg.enabled ? "downloading" : "paused";
logger.info(`Post-Processing verschoben: pkg=${pkg.name}, noch offene items`);
return;
}

View File

@ -24,6 +24,8 @@ export interface ExtractOptions {
passwordList?: string;
signal?: AbortSignal;
onProgress?: (update: ExtractProgressUpdate) => void;
onlyArchives?: Set<string>;
skipPostCleanup?: boolean;
}
export interface ExtractProgressUpdate {
@ -43,7 +45,7 @@ const EXTRACT_PER_GIB_TIMEOUT_MS = 4 * 60 * 1000;
const EXTRACT_MAX_TIMEOUT_MS = 120 * 60 * 1000;
const ARCHIVE_SORT_COLLATOR = new Intl.Collator(undefined, { numeric: true, sensitivity: "base" });
function pathSetKey(filePath: string): string {
export function pathSetKey(filePath: string): string {
return process.platform === "win32" ? filePath.toLowerCase() : filePath;
}
@ -80,7 +82,7 @@ type ExtractResumeState = {
completedArchives: string[];
};
function findArchiveCandidates(packageDir: string): string[] {
export function findArchiveCandidates(packageDir: string): string[] {
if (!packageDir || !fs.existsSync(packageDir)) {
return [];
}
@ -831,9 +833,16 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
throw new Error("aborted:extract");
}
const candidates = findArchiveCandidates(options.packageDir);
logger.info(`Entpacken gestartet: packageDir=${options.packageDir}, targetDir=${options.targetDir}, archives=${candidates.length}, cleanupMode=${options.cleanupMode}, conflictMode=${options.conflictMode}`);
const allCandidates = findArchiveCandidates(options.packageDir);
const candidates = options.onlyArchives
? allCandidates.filter((archivePath) => {
const key = process.platform === "win32" ? path.resolve(archivePath).toLowerCase() : path.resolve(archivePath);
return options.onlyArchives!.has(key);
})
: allCandidates;
logger.info(`Entpacken gestartet: packageDir=${options.packageDir}, targetDir=${options.targetDir}, archives=${candidates.length}${options.onlyArchives ? ` (hybrid, gesamt=${allCandidates.length})` : ""}, cleanupMode=${options.cleanupMode}, conflictMode=${options.conflictMode}`);
if (candidates.length === 0) {
if (!options.onlyArchives) {
const existingResume = readExtractResumeState(options.packageDir);
if (existingResume.size > 0 && hasAnyEntries(options.targetDir)) {
clearExtractResumeState(options.packageDir);
@ -848,6 +857,7 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
return { extracted: existingResume.size, failed: 0, lastError: "" };
}
clearExtractResumeState(options.packageDir);
}
logger.info(`Entpacken übersprungen (keine Archive gefunden): ${options.packageDir}`);
return { extracted: 0, failed: 0, lastError: "" };
}
@ -856,9 +866,9 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
let passwordCandidates = archivePasswords(options.passwordList || "");
const resumeCompleted = readExtractResumeState(options.packageDir);
const resumeCompletedAtStart = resumeCompleted.size;
const candidateNames = new Set(candidates.map((archivePath) => path.basename(archivePath)));
const allCandidateNames = new Set(allCandidates.map((archivePath) => path.basename(archivePath)));
for (const archiveName of Array.from(resumeCompleted.values())) {
if (!candidateNames.has(archiveName)) {
if (!allCandidateNames.has(archiveName)) {
resumeCompleted.delete(archiveName);
}
}
@ -869,7 +879,7 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
}
const pendingCandidates = candidates.filter((archivePath) => !resumeCompleted.has(path.basename(archivePath)));
let extracted = resumeCompleted.size;
let extracted = candidates.length - pendingCandidates.length;
let failed = 0;
let lastError = "";
const extractedArchives = new Set<string>();
@ -996,6 +1006,7 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
extracted = 0;
logger.error(`Entpacken ohne neue Ausgabe erkannt: ${options.targetDir}. Cleanup wird NICHT ausgeführt.`);
} else {
if (!options.skipPostCleanup) {
const cleanupSources = failed === 0 ? candidates : Array.from(extractedArchives.values());
const removedArchives = cleanupArchives(cleanupSources, options.cleanupMode);
if (options.cleanupMode !== "none") {
@ -1009,19 +1020,20 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
const removedSamples = removeSampleArtifacts(options.targetDir);
logger.info(`Sample-Cleanup: ${removedSamples.files} Datei(en), ${removedSamples.dirs} Ordner entfernt`);
}
}
if (failed === 0 && resumeCompleted.size >= candidates.length) {
if (failed === 0 && resumeCompleted.size >= allCandidates.length) {
clearExtractResumeState(options.packageDir);
}
if (options.cleanupMode === "delete" && !hasAnyFilesRecursive(options.packageDir)) {
if (!options.skipPostCleanup && options.cleanupMode === "delete" && !hasAnyFilesRecursive(options.packageDir)) {
const removedDirs = removeEmptyDirectoryTree(options.packageDir);
if (removedDirs > 0) {
logger.info(`Leere Download-Ordner entfernt: ${removedDirs} (root=${options.packageDir})`);
}
}
}
} else {
} else if (!options.skipPostCleanup) {
try {
if (fs.existsSync(options.targetDir) && fs.readdirSync(options.targetDir).length === 0) {
fs.rmSync(options.targetDir, { recursive: true, force: true });