Async FS optimizations, exponential backoff, cleanup dedup and release v1.4.72
Some checks are pending
Build and Release / build (push) Waiting to run

- Convert all sync FS ops (existsSync, readdirSync, statSync, writeFileSync,
  rmSync, renameSync) to async equivalents across download-manager, extractor,
  cleanup, storage, and logger to prevent UI freezes
- Replace linear retry delays with exponential backoff + jitter to prevent
  retry storms with many parallel downloads
- Deduplicate resolveArchiveItems into single shared function
- Replace Array.shift() O(N) in bandwidth chart with slice-based trimming
- Make logger rotation async in the async flush path

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Sucukdeluxe 2026-03-01 21:53:07 +01:00
parent 520ef91d2d
commit e485cf734b
8 changed files with 299 additions and 239 deletions

View File

@ -1,6 +1,6 @@
{
"name": "real-debrid-downloader",
"version": "1.4.71",
"version": "1.4.72",
"description": "Real-Debrid Downloader Desktop (Electron + React + TypeScript)",
"main": "build/main/main/main.js",
"author": "Sucukdeluxe",

View File

@ -68,13 +68,17 @@ export class AppController {
if (this.settings.autoResumeOnStart) {
const snapshot = this.manager.getSnapshot();
const hasPending = Object.values(snapshot.session.items).some((item) => item.status === "queued" || item.status === "reconnect_wait");
const hasConflicts = this.manager.getStartConflicts().length > 0;
if (hasPending && this.hasAnyProviderToken(this.settings) && !hasConflicts) {
if (hasPending) {
void this.manager.getStartConflicts().then((conflicts) => {
const hasConflicts = conflicts.length > 0;
if (this.hasAnyProviderToken(this.settings) && !hasConflicts) {
this.autoResumePending = true;
logger.info("Auto-Resume beim Start vorgemerkt");
} else if (hasPending && hasConflicts) {
} else if (hasConflicts) {
logger.info("Auto-Resume übersprungen: Start-Konflikte erkannt");
}
}).catch((err) => logger.warn(`getStartConflicts Fehler (constructor): ${String(err)}`));
}
}
}
@ -97,7 +101,7 @@ export class AppController {
handler(this.manager.getSnapshot());
if (this.autoResumePending) {
this.autoResumePending = false;
this.manager.start();
void this.manager.start().catch((err) => logger.warn(`Auto-Resume Start Fehler: ${String(err)}`));
logger.info("Auto-Resume beim Start aktiviert");
}
}
@ -174,7 +178,7 @@ export class AppController {
return result;
}
public getStartConflicts(): StartConflictEntry[] {
public async getStartConflicts(): Promise<StartConflictEntry[]> {
return this.manager.getStartConflicts();
}
@ -186,8 +190,8 @@ export class AppController {
this.manager.clearAll();
}
public start(): void {
this.manager.start();
public async start(): Promise<void> {
await this.manager.start();
}
public stop(): void {

View File

@ -88,8 +88,10 @@ export async function cleanupCancelledPackageArtifactsAsync(packageDir: string):
return removed;
}
export function removeDownloadLinkArtifacts(extractDir: string): number {
if (!fs.existsSync(extractDir)) {
export async function removeDownloadLinkArtifacts(extractDir: string): Promise<number> {
try {
await fs.promises.access(extractDir);
} catch {
return 0;
}
let removed = 0;
@ -97,7 +99,7 @@ export function removeDownloadLinkArtifacts(extractDir: string): number {
while (stack.length > 0) {
const current = stack.pop() as string;
let entries: fs.Dirent[] = [];
try { entries = fs.readdirSync(current, { withFileTypes: true }); } catch { continue; }
try { entries = await fs.promises.readdir(current, { withFileTypes: true }); } catch { continue; }
for (const entry of entries) {
const full = path.join(current, entry.name);
if (entry.isDirectory() && !entry.isSymbolicLink()) {
@ -114,9 +116,9 @@ export function removeDownloadLinkArtifacts(extractDir: string): number {
if (!shouldDelete && [".txt", ".html", ".htm", ".nfo"].includes(ext)) {
if (/[._\- ](links?|downloads?|urls?|dlc)([._\- ]|$)/i.test(name)) {
try {
const stat = fs.statSync(full);
const stat = await fs.promises.stat(full);
if (stat.size <= MAX_LINK_ARTIFACT_BYTES) {
const text = fs.readFileSync(full, "utf8");
const text = await fs.promises.readFile(full, "utf8");
shouldDelete = /https?:\/\//i.test(text);
}
} catch {
@ -127,7 +129,7 @@ export function removeDownloadLinkArtifacts(extractDir: string): number {
if (shouldDelete) {
try {
fs.rmSync(full, { force: true });
await fs.promises.rm(full, { force: true });
removed += 1;
} catch {
// ignore
@ -138,8 +140,10 @@ export function removeDownloadLinkArtifacts(extractDir: string): number {
return removed;
}
export function removeSampleArtifacts(extractDir: string): { files: number; dirs: number } {
if (!fs.existsSync(extractDir)) {
export async function removeSampleArtifacts(extractDir: string): Promise<{ files: number; dirs: number }> {
try {
await fs.promises.access(extractDir);
} catch {
return { files: 0, dirs: 0 };
}
@ -148,14 +152,14 @@ export function removeSampleArtifacts(extractDir: string): { files: number; dirs
const sampleDirs: string[] = [];
const stack = [extractDir];
const countFilesRecursive = (rootDir: string): number => {
const countFilesRecursive = async (rootDir: string): Promise<number> => {
let count = 0;
const dirs = [rootDir];
while (dirs.length > 0) {
const current = dirs.pop() as string;
let entries: fs.Dirent[] = [];
try {
entries = fs.readdirSync(current, { withFileTypes: true });
entries = await fs.promises.readdir(current, { withFileTypes: true });
} catch {
continue;
}
@ -163,7 +167,7 @@ export function removeSampleArtifacts(extractDir: string): { files: number; dirs
const full = path.join(current, entry.name);
if (entry.isDirectory()) {
try {
const stat = fs.lstatSync(full);
const stat = await fs.promises.lstat(full);
if (stat.isSymbolicLink()) {
continue;
}
@ -182,7 +186,7 @@ export function removeSampleArtifacts(extractDir: string): { files: number; dirs
while (stack.length > 0) {
const current = stack.pop() as string;
let entries: fs.Dirent[] = [];
try { entries = fs.readdirSync(current, { withFileTypes: true }); } catch { continue; }
try { entries = await fs.promises.readdir(current, { withFileTypes: true }); } catch { continue; }
for (const entry of entries) {
const full = path.join(current, entry.name);
if (entry.isDirectory() || entry.isSymbolicLink()) {
@ -206,7 +210,7 @@ export function removeSampleArtifacts(extractDir: string): { files: number; dirs
if (isSampleVideo) {
try {
fs.rmSync(full, { force: true });
await fs.promises.rm(full, { force: true });
removedFiles += 1;
} catch {
// ignore
@ -218,14 +222,14 @@ export function removeSampleArtifacts(extractDir: string): { files: number; dirs
sampleDirs.sort((a, b) => b.length - a.length);
for (const dir of sampleDirs) {
try {
const stat = fs.lstatSync(dir);
const stat = await fs.promises.lstat(dir);
if (stat.isSymbolicLink()) {
fs.rmSync(dir, { force: true });
await fs.promises.rm(dir, { force: true });
removedDirs += 1;
continue;
}
const filesInDir = countFilesRecursive(dir);
fs.rmSync(dir, { recursive: true, force: true });
const filesInDir = await countFilesRecursive(dir);
await fs.promises.rm(dir, { recursive: true, force: true });
removedFiles += filesInDir;
removedDirs += 1;
} catch {

View File

@ -653,6 +653,39 @@ export function buildAutoRenameBaseNameFromFoldersWithOptions(
return null;
}
function resolveArchiveItemsFromList(archiveName: string, items: DownloadItem[]): DownloadItem[] {
const entryLower = archiveName.toLowerCase();
const multipartMatch = entryLower.match(/^(.*)\.part0*1\.rar$/);
if (multipartMatch) {
const prefix = multipartMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const pattern = new RegExp(`^${prefix}\\.part\\d+\\.rar$`, "i");
return items.filter((item) => {
const name = path.basename(item.targetPath || item.fileName || "");
return pattern.test(name);
});
}
const rarMatch = entryLower.match(/^(.*)\.rar$/);
if (rarMatch) {
const stem = rarMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const pattern = new RegExp(`^${stem}\\.r(ar|\\d{2,3})$`, "i");
return items.filter((item) => {
const name = path.basename(item.targetPath || item.fileName || "");
return pattern.test(name);
});
}
return items.filter((item) => {
const name = path.basename(item.targetPath || item.fileName || "").toLowerCase();
return name === entryLower;
});
}
function retryDelayWithJitter(attempt: number, baseMs: number): number {
const exponential = baseMs * Math.pow(1.5, Math.min(attempt - 1, 8));
const capped = Math.min(exponential, 30000);
const jitter = capped * (0.5 + Math.random() * 0.5);
return Math.floor(jitter);
}
export class DownloadManager extends EventEmitter {
private settings: AppSettings;
@ -738,17 +771,17 @@ export class DownloadManager extends EventEmitter {
this.debridService = new DebridService(settings, { megaWebUnrestrict: options.megaWebUnrestrict });
this.applyOnStartCleanupPolicy();
this.normalizeSessionStatuses();
this.recoverRetryableItems("startup");
void this.recoverRetryableItems("startup").catch((err) => logger.warn(`recoverRetryableItems Fehler (startup): ${compactErrorText(err)}`));
this.recoverPostProcessingOnStartup();
this.resolveExistingQueuedOpaqueFilenames();
this.cleanupExistingExtractedArchives();
void this.cleanupExistingExtractedArchives().catch((err) => logger.warn(`cleanupExistingExtractedArchives Fehler (constructor): ${compactErrorText(err)}`));
}
public setSettings(next: AppSettings): void {
this.settings = next;
this.debridService.setSettings(next);
this.resolveExistingQueuedOpaqueFilenames();
this.cleanupExistingExtractedArchives();
void this.cleanupExistingExtractedArchives().catch((err) => logger.warn(`cleanupExistingExtractedArchives Fehler (setSettings): ${compactErrorText(err)}`));
this.emitState();
}
@ -1174,7 +1207,7 @@ export class DownloadManager extends EventEmitter {
return { addedPackages, addedLinks };
}
public getStartConflicts(): StartConflictEntry[] {
public async getStartConflicts(): Promise<StartConflictEntry[]> {
const hasFilesByExtractDir = new Map<string, boolean>();
const conflicts: StartConflictEntry[] = [];
for (const packageId of this.session.packageOrder) {
@ -1201,7 +1234,7 @@ export class DownloadManager extends EventEmitter {
const extractDirKey = pathKey(pkg.extractDir);
const hasExtractedFiles = hasFilesByExtractDir.has(extractDirKey)
? Boolean(hasFilesByExtractDir.get(extractDirKey))
: this.directoryHasAnyFiles(pkg.extractDir);
: await this.directoryHasAnyFiles(pkg.extractDir);
if (!hasFilesByExtractDir.has(extractDirKey)) {
hasFilesByExtractDir.set(extractDirKey, hasExtractedFiles);
}
@ -1446,7 +1479,7 @@ export class DownloadManager extends EventEmitter {
}
}
private cleanupExistingExtractedArchives(): void {
private async cleanupExistingExtractedArchives(): Promise<void> {
if (this.settings.cleanupMode === "none") {
return;
}
@ -1481,7 +1514,7 @@ export class DownloadManager extends EventEmitter {
const hasExtractMarker = items.some((item) => isExtractedLabel(item.fullStatus));
const extractDirIsUnique = (extractDirUsage.get(pathKey(pkg.extractDir)) || 0) === 1;
const hasExtractedOutput = extractDirIsUnique && this.directoryHasAnyFiles(pkg.extractDir);
const hasExtractedOutput = extractDirIsUnique && await this.directoryHasAnyFiles(pkg.extractDir);
if (!hasExtractMarker && !hasExtractedOutput) {
continue;
}
@ -1498,7 +1531,7 @@ export class DownloadManager extends EventEmitter {
let filesInDir = dirFilesCache.get(dir);
if (!filesInDir) {
try {
filesInDir = fs.readdirSync(dir, { withFileTypes: true })
filesInDir = (await fs.promises.readdir(dir, { withFileTypes: true }))
.filter((entry) => entry.isFile())
.map((entry) => entry.name);
} catch {
@ -1532,7 +1565,7 @@ export class DownloadManager extends EventEmitter {
let removed = 0;
for (const targetPath of targets) {
if (!fs.existsSync(targetPath)) {
if (!await this.existsAsync(targetPath)) {
continue;
}
try {
@ -1545,8 +1578,8 @@ export class DownloadManager extends EventEmitter {
if (removed > 0) {
logger.info(`Nachträgliches Archive-Cleanup für ${pkg.name}: ${removed} Datei(en) gelöscht`);
if (!this.directoryHasAnyFiles(pkg.outputDir)) {
const removedDirs = this.removeEmptyDirectoryTree(pkg.outputDir);
if (!await this.directoryHasAnyFiles(pkg.outputDir)) {
const removedDirs = await this.removeEmptyDirectoryTree(pkg.outputDir);
if (removedDirs > 0) {
logger.info(`Nachträgliches Cleanup entfernte leere Download-Ordner für ${pkg.name}: ${removedDirs}`);
}
@ -1561,8 +1594,13 @@ export class DownloadManager extends EventEmitter {
});
}
private directoryHasAnyFiles(rootDir: string): boolean {
if (!rootDir || !fs.existsSync(rootDir)) {
private async directoryHasAnyFiles(rootDir: string): Promise<boolean> {
if (!rootDir) {
return false;
}
try {
await fs.promises.access(rootDir);
} catch {
return false;
}
const deadline = nowMs() + 55;
@ -1576,7 +1614,7 @@ export class DownloadManager extends EventEmitter {
const current = stack.pop() as string;
let entries: fs.Dirent[] = [];
try {
entries = fs.readdirSync(current, { withFileTypes: true });
entries = await fs.promises.readdir(current, { withFileTypes: true });
} catch {
continue;
}
@ -1593,8 +1631,13 @@ export class DownloadManager extends EventEmitter {
return false;
}
private removeEmptyDirectoryTree(rootDir: string): number {
if (!rootDir || !fs.existsSync(rootDir)) {
private async removeEmptyDirectoryTree(rootDir: string): Promise<number> {
if (!rootDir) {
return 0;
}
try {
await fs.promises.access(rootDir);
} catch {
return 0;
}
@ -1604,7 +1647,7 @@ export class DownloadManager extends EventEmitter {
const current = stack.pop() as string;
let entries: fs.Dirent[] = [];
try {
entries = fs.readdirSync(current, { withFileTypes: true });
entries = await fs.promises.readdir(current, { withFileTypes: true });
} catch {
continue;
}
@ -1621,21 +1664,21 @@ export class DownloadManager extends EventEmitter {
let removed = 0;
for (const dirPath of dirs) {
try {
let entries = fs.readdirSync(dirPath, { withFileTypes: true });
let entries = await fs.promises.readdir(dirPath, { withFileTypes: true });
for (const entry of entries) {
if (!entry.isFile() || !isIgnorableEmptyDirFileName(entry.name)) {
continue;
}
try {
fs.rmSync(path.join(dirPath, entry.name), { force: true });
await fs.promises.rm(path.join(dirPath, entry.name), { force: true });
} catch {
// ignore and keep directory untouched
}
}
entries = fs.readdirSync(dirPath, { withFileTypes: true });
entries = await fs.promises.readdir(dirPath, { withFileTypes: true });
if (entries.length === 0) {
fs.rmdirSync(dirPath);
await fs.promises.rmdir(dirPath);
removed += 1;
}
} catch {
@ -1645,8 +1688,13 @@ export class DownloadManager extends EventEmitter {
return removed;
}
private collectFilesByExtensions(rootDir: string, extensions: Set<string>): string[] {
if (!rootDir || !fs.existsSync(rootDir) || extensions.size === 0) {
private async collectFilesByExtensions(rootDir: string, extensions: Set<string>): Promise<string[]> {
if (!rootDir || extensions.size === 0) {
return [];
}
try {
await fs.promises.access(rootDir);
} catch {
return [];
}
@ -1667,7 +1715,7 @@ export class DownloadManager extends EventEmitter {
const current = stack.pop() as string;
let entries: fs.Dirent[] = [];
try {
entries = fs.readdirSync(current, { withFileTypes: true });
entries = await fs.promises.readdir(current, { withFileTypes: true });
} catch {
continue;
}
@ -1692,23 +1740,24 @@ export class DownloadManager extends EventEmitter {
return files;
}
private collectVideoFiles(rootDir: string): string[] {
return this.collectFilesByExtensions(rootDir, SAMPLE_VIDEO_EXTENSIONS);
private async collectVideoFiles(rootDir: string): Promise<string[]> {
return await this.collectFilesByExtensions(rootDir, SAMPLE_VIDEO_EXTENSIONS);
}
private existsSyncSafe(filePath: string): boolean {
private async existsAsync(filePath: string): Promise<boolean> {
try {
return fs.existsSync(toWindowsLongPathIfNeeded(filePath));
await fs.promises.access(toWindowsLongPathIfNeeded(filePath));
return true;
} catch {
return false;
}
}
private renamePathWithExdevFallback(sourcePath: string, targetPath: string): void {
private async renamePathWithExdevFallback(sourcePath: string, targetPath: string): Promise<void> {
const sourceFsPath = toWindowsLongPathIfNeeded(sourcePath);
const targetFsPath = toWindowsLongPathIfNeeded(targetPath);
try {
fs.renameSync(sourceFsPath, targetFsPath);
await fs.promises.rename(sourceFsPath, targetFsPath);
return;
} catch (error) {
const code = error && typeof error === "object" && "code" in error
@ -1719,8 +1768,8 @@ export class DownloadManager extends EventEmitter {
}
}
fs.copyFileSync(sourceFsPath, targetFsPath);
fs.rmSync(sourceFsPath, { force: true });
await fs.promises.copyFile(sourceFsPath, targetFsPath);
await fs.promises.rm(sourceFsPath, { force: true });
}
private isPathLengthRenameError(error: unknown): boolean {
@ -1827,12 +1876,12 @@ export class DownloadManager extends EventEmitter {
return next;
}
private autoRenameExtractedVideoFiles(extractDir: string): number {
private async autoRenameExtractedVideoFiles(extractDir: string): Promise<number> {
if (!this.settings.autoRename4sf4sj) {
return 0;
}
const videoFiles = this.collectVideoFiles(extractDir);
const videoFiles = await this.collectVideoFiles(extractDir);
let renamed = 0;
for (const sourcePath of videoFiles) {
@ -1882,13 +1931,13 @@ export class DownloadManager extends EventEmitter {
if (pathKey(targetPath) === pathKey(sourcePath)) {
continue;
}
if (this.existsSyncSafe(targetPath)) {
if (await this.existsAsync(targetPath)) {
logger.warn(`Auto-Rename übersprungen (Ziel existiert): ${targetPath}`);
continue;
}
try {
this.renamePathWithExdevFallback(sourcePath, targetPath);
await this.renamePathWithExdevFallback(sourcePath, targetPath);
renamed += 1;
} catch (error) {
if (this.isPathLengthRenameError(error)) {
@ -1902,11 +1951,11 @@ export class DownloadManager extends EventEmitter {
if (!fallbackPath || pathKey(fallbackPath) === pathKey(sourcePath)) {
continue;
}
if (this.existsSyncSafe(fallbackPath)) {
if (await this.existsAsync(fallbackPath)) {
continue;
}
try {
this.renamePathWithExdevFallback(sourcePath, fallbackPath);
await this.renamePathWithExdevFallback(sourcePath, fallbackPath);
logger.warn(`Auto-Rename Fallback wegen Pfadlänge: ${sourceName} -> ${path.basename(fallbackPath)}`);
renamed += 1;
fallbackRenamed = true;
@ -1929,12 +1978,12 @@ export class DownloadManager extends EventEmitter {
return renamed;
}
private moveFileWithExdevFallback(sourcePath: string, targetPath: string): void {
this.renamePathWithExdevFallback(sourcePath, targetPath);
private async moveFileWithExdevFallback(sourcePath: string, targetPath: string): Promise<void> {
await this.renamePathWithExdevFallback(sourcePath, targetPath);
}
private cleanupNonMkvResidualFiles(rootDir: string, targetDir: string): number {
if (!rootDir || !this.existsSyncSafe(rootDir)) {
private async cleanupNonMkvResidualFiles(rootDir: string, targetDir: string): Promise<number> {
if (!rootDir || !await this.existsAsync(rootDir)) {
return 0;
}
@ -1944,7 +1993,7 @@ export class DownloadManager extends EventEmitter {
const current = stack.pop() as string;
let entries: fs.Dirent[] = [];
try {
entries = fs.readdirSync(current, { withFileTypes: true });
entries = await fs.promises.readdir(current, { withFileTypes: true });
} catch {
continue;
}
@ -1966,7 +2015,7 @@ export class DownloadManager extends EventEmitter {
continue;
}
try {
fs.rmSync(toWindowsLongPathIfNeeded(fullPath), { force: true });
await fs.promises.rm(toWindowsLongPathIfNeeded(fullPath), { force: true });
removed += 1;
} catch {
// ignore and keep file
@ -1977,11 +2026,11 @@ export class DownloadManager extends EventEmitter {
return removed;
}
private cleanupRemainingArchiveArtifacts(packageDir: string): number {
private async cleanupRemainingArchiveArtifacts(packageDir: string): Promise<number> {
if (this.settings.cleanupMode === "none") {
return 0;
}
const candidates = findArchiveCandidates(packageDir);
const candidates = await findArchiveCandidates(packageDir);
if (candidates.length === 0) {
return 0;
}
@ -1994,7 +2043,7 @@ export class DownloadManager extends EventEmitter {
let filesInDir = dirFilesCache.get(dir);
if (!filesInDir) {
try {
filesInDir = fs.readdirSync(dir, { withFileTypes: true })
filesInDir = (await fs.promises.readdir(dir, { withFileTypes: true }))
.filter((entry) => entry.isFile())
.map((entry) => entry.name);
} catch {
@ -2009,21 +2058,21 @@ export class DownloadManager extends EventEmitter {
for (const targetPath of targets) {
try {
if (!this.existsSyncSafe(targetPath)) {
if (!await this.existsAsync(targetPath)) {
continue;
}
if (this.settings.cleanupMode === "trash") {
const parsed = path.parse(targetPath);
const trashDir = path.join(parsed.dir, ".rd-trash");
fs.mkdirSync(trashDir, { recursive: true });
await fs.promises.mkdir(trashDir, { recursive: true });
let moved = false;
for (let index = 0; index <= 1000; index += 1) {
const suffix = index === 0 ? "" : `-${index}`;
const candidate = path.join(trashDir, `${parsed.base}.${Date.now()}${suffix}`);
if (this.existsSyncSafe(candidate)) {
if (await this.existsAsync(candidate)) {
continue;
}
this.renamePathWithExdevFallback(targetPath, candidate);
await this.renamePathWithExdevFallback(targetPath, candidate);
moved = true;
break;
}
@ -2032,7 +2081,7 @@ export class DownloadManager extends EventEmitter {
}
continue;
}
fs.rmSync(toWindowsLongPathIfNeeded(targetPath), { force: true });
await fs.promises.rm(toWindowsLongPathIfNeeded(targetPath), { force: true });
removed += 1;
} catch {
// ignore
@ -2042,7 +2091,7 @@ export class DownloadManager extends EventEmitter {
return removed;
}
private buildUniqueFlattenTargetPath(targetDir: string, sourcePath: string, reserved: Set<string>): string {
private async buildUniqueFlattenTargetPath(targetDir: string, sourcePath: string, reserved: Set<string>): Promise<string> {
const parsed = path.parse(path.basename(sourcePath));
const extension = parsed.ext || ".mkv";
const baseName = sanitizeFilename(parsed.name || "video");
@ -2058,7 +2107,7 @@ export class DownloadManager extends EventEmitter {
index += 1;
continue;
}
if (!fs.existsSync(candidatePath)) {
if (!await this.existsAsync(candidatePath)) {
reserved.add(candidateKey);
return candidatePath;
}
@ -2066,7 +2115,7 @@ export class DownloadManager extends EventEmitter {
}
}
private collectMkvFilesToLibrary(packageId: string, pkg: PackageEntry): void {
private async collectMkvFilesToLibrary(packageId: string, pkg: PackageEntry): Promise<void> {
if (!this.settings.collectMkvToLibrary) {
return;
}
@ -2078,19 +2127,19 @@ export class DownloadManager extends EventEmitter {
return;
}
const targetDir = path.resolve(targetDirRaw);
if (!fs.existsSync(sourceDir)) {
if (!await this.existsAsync(sourceDir)) {
logger.info(`MKV-Sammelordner: pkg=${pkg.name}, Quelle fehlt (${sourceDir})`);
return;
}
try {
fs.mkdirSync(targetDir, { recursive: true });
await fs.promises.mkdir(targetDir, { recursive: true });
} catch (error) {
logger.warn(`MKV-Sammelordner konnte nicht erstellt werden: pkg=${pkg.name}, dir=${targetDir}, reason=${compactErrorText(error)}`);
return;
}
const mkvFiles = this.collectFilesByExtensions(sourceDir, new Set([".mkv"]));
const mkvFiles = await this.collectFilesByExtensions(sourceDir, new Set([".mkv"]));
if (mkvFiles.length === 0) {
logger.info(`MKV-Sammelordner: pkg=${pkg.name}, keine MKV gefunden`);
return;
@ -2106,14 +2155,14 @@ export class DownloadManager extends EventEmitter {
skipped += 1;
continue;
}
const targetPath = this.buildUniqueFlattenTargetPath(targetDir, sourcePath, reservedTargets);
const targetPath = await this.buildUniqueFlattenTargetPath(targetDir, sourcePath, reservedTargets);
if (pathKey(sourcePath) === pathKey(targetPath)) {
skipped += 1;
continue;
}
try {
this.moveFileWithExdevFallback(sourcePath, targetPath);
await this.moveFileWithExdevFallback(sourcePath, targetPath);
moved += 1;
} catch (error) {
failed += 1;
@ -2121,12 +2170,12 @@ export class DownloadManager extends EventEmitter {
}
}
if (moved > 0 && fs.existsSync(sourceDir)) {
const removedResidual = this.cleanupNonMkvResidualFiles(sourceDir, targetDir);
if (moved > 0 && await this.existsAsync(sourceDir)) {
const removedResidual = await this.cleanupNonMkvResidualFiles(sourceDir, targetDir);
if (removedResidual > 0) {
logger.info(`MKV-Sammelordner entfernte Restdateien: pkg=${pkg.name}, entfernt=${removedResidual}`);
}
const removedDirs = this.removeEmptyDirectoryTree(sourceDir);
const removedDirs = await this.removeEmptyDirectoryTree(sourceDir);
if (removedDirs > 0) {
logger.info(`MKV-Sammelordner entfernte leere Ordner: pkg=${pkg.name}, entfernt=${removedDirs}`);
}
@ -2178,12 +2227,12 @@ export class DownloadManager extends EventEmitter {
});
}
public start(): void {
public async start(): Promise<void> {
if (this.session.running) {
return;
}
const recoveredItems = this.recoverRetryableItems("start");
const recoveredItems = await this.recoverRetryableItems("start");
let recoveredStoppedItems = 0;
for (const item of Object.values(this.session.items)) {
@ -3615,7 +3664,7 @@ export class DownloadManager extends EventEmitter {
item.retries += 1;
item.fullStatus = `Verbindungsfehler, retry ${attempt}/${retryDisplayLimit}`;
this.emitState();
await sleep(300 * attempt);
await sleep(retryDelayWithJitter(attempt, 300));
continue;
}
throw error;
@ -3640,7 +3689,7 @@ export class DownloadManager extends EventEmitter {
}
try {
fs.rmSync(effectiveTargetPath, { force: true });
await fs.promises.rm(effectiveTargetPath, { force: true });
} catch {
// ignore
}
@ -3654,7 +3703,7 @@ export class DownloadManager extends EventEmitter {
this.emitState();
if (attempt < maxAttempts) {
item.retries += 1;
await sleep(280 * attempt);
await sleep(retryDelayWithJitter(attempt, 280));
continue;
}
lastError = "HTTP 416";
@ -3673,7 +3722,7 @@ export class DownloadManager extends EventEmitter {
item.retries += 1;
item.fullStatus = `Serverfehler ${response.status}, retry ${attempt}/${retryDisplayLimit}`;
this.emitState();
await sleep(350 * attempt);
await sleep(retryDelayWithJitter(attempt, 350));
continue;
}
throw new Error(lastError);
@ -3720,11 +3769,11 @@ export class DownloadManager extends EventEmitter {
this.itemContributedBytes.set(active.itemId, 0);
}
if (existingBytes > 0) {
fs.rmSync(effectiveTargetPath, { force: true });
await fs.promises.rm(effectiveTargetPath, { force: true });
}
}
fs.mkdirSync(path.dirname(effectiveTargetPath), { recursive: true });
await fs.promises.mkdir(path.dirname(effectiveTargetPath), { recursive: true });
const stream = fs.createWriteStream(effectiveTargetPath, { flags: writeMode });
let written = writeMode === "a" ? existingBytes : 0;
let windowBytes = 0;
@ -4004,7 +4053,7 @@ export class DownloadManager extends EventEmitter {
item.retries += 1;
item.fullStatus = `Downloadfehler, retry ${attempt}/${retryDisplayLimit}`;
this.emitState();
await sleep(350 * attempt);
await sleep(retryDelayWithJitter(attempt, 350));
continue;
}
throw new Error(lastError || "Download fehlgeschlagen");
@ -4014,7 +4063,7 @@ export class DownloadManager extends EventEmitter {
throw new Error(lastError || "Download fehlgeschlagen");
}
private recoverRetryableItems(trigger: "startup" | "start"): number {
private async recoverRetryableItems(trigger: "startup" | "start"): Promise<number> {
let recovered = 0;
const touchedPackages = new Set<string>();
const configuredRetryLimit = normalizeRetryLimit(this.settings.retryLimit);
@ -4033,7 +4082,7 @@ export class DownloadManager extends EventEmitter {
}
const is416Failure = this.isHttp416Failure(item);
const hasZeroByteArchive = this.hasZeroByteArchiveArtifact(item);
const hasZeroByteArchive = await this.hasZeroByteArchiveArtifact(item);
if (item.status === "failed") {
if (!is416Failure && !hasZeroByteArchive && item.retries >= maxAutoRetryFailures) {
@ -4112,18 +4161,19 @@ export class DownloadManager extends EventEmitter {
return /(^|\D)416(\D|$)/.test(text);
}
private hasZeroByteArchiveArtifact(item: DownloadItem): boolean {
private async hasZeroByteArchiveArtifact(item: DownloadItem): Promise<boolean> {
const targetPath = String(item.targetPath || "").trim();
const archiveCandidate = isArchiveLikePath(targetPath || item.fileName);
if (!archiveCandidate) {
return false;
}
if (targetPath && fs.existsSync(targetPath)) {
if (targetPath) {
try {
return fs.statSync(targetPath).size <= 0;
const stat = await fs.promises.stat(targetPath);
return stat.size <= 0;
} catch {
return false;
// file does not exist
}
}
@ -4319,9 +4369,14 @@ export class DownloadManager extends EventEmitter {
await this.applyGlobalSpeedLimit(chunkBytes, bytesPerSecond, signal);
}
private findReadyArchiveSets(pkg: PackageEntry): Set<string> {
private async findReadyArchiveSets(pkg: PackageEntry): Promise<Set<string>> {
const ready = new Set<string>();
if (!pkg.outputDir || !fs.existsSync(pkg.outputDir)) {
if (!pkg.outputDir) {
return ready;
}
try {
await fs.promises.access(pkg.outputDir);
} catch {
return ready;
}
@ -4342,14 +4397,14 @@ export class DownloadManager extends EventEmitter {
return ready;
}
const candidates = findArchiveCandidates(pkg.outputDir);
const candidates = await findArchiveCandidates(pkg.outputDir);
if (candidates.length === 0) {
return ready;
}
let dirFiles: string[] | undefined;
try {
dirFiles = fs.readdirSync(pkg.outputDir, { withFileTypes: true })
dirFiles = (await fs.promises.readdir(pkg.outputDir, { withFileTypes: true }))
.filter((entry) => entry.isFile())
.map((entry) => entry.name);
} catch {
@ -4401,7 +4456,7 @@ export class DownloadManager extends EventEmitter {
}
private async runHybridExtraction(packageId: string, pkg: PackageEntry, items: DownloadItem[], signal?: AbortSignal): Promise<void> {
const readyArchives = this.findReadyArchiveSets(pkg);
const readyArchives = await this.findReadyArchiveSets(pkg);
if (readyArchives.size === 0) {
logger.info(`Hybrid-Extract: pkg=${pkg.name}, keine fertigen Archive-Sets`);
return;
@ -4417,7 +4472,7 @@ export class DownloadManager extends EventEmitter {
const hybridItemPaths = new Set<string>();
let dirFiles: string[] | undefined;
try {
dirFiles = fs.readdirSync(pkg.outputDir, { withFileTypes: true })
dirFiles = (await fs.promises.readdir(pkg.outputDir, { withFileTypes: true }))
.filter((entry) => entry.isFile())
.map((entry) => entry.name);
} catch { /* ignore */ }
@ -4432,25 +4487,8 @@ export class DownloadManager extends EventEmitter {
item.targetPath && hybridItemPaths.has(pathKey(item.targetPath))
);
// Resolve items belonging to a specific archive entry point by filename pattern matching.
// This avoids pathKey mismatches by comparing basenames directly.
const resolveArchiveItems = (archiveName: string): DownloadItem[] => {
const entryLower = archiveName.toLowerCase();
const multipartMatch = entryLower.match(/^(.*)\.part0*1\.rar$/);
if (multipartMatch) {
const prefix = multipartMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const pattern = new RegExp(`^${prefix}\\.part\\d+\\.rar$`, "i");
return hybridItems.filter((item) => {
const name = path.basename(item.targetPath || item.fileName || "");
return pattern.test(name);
});
}
// Single-file archive: match only that exact file
return hybridItems.filter((item) => {
const name = path.basename(item.targetPath || item.fileName || "").toLowerCase();
return name === entryLower;
});
};
const resolveArchiveItems = (archiveName: string): DownloadItem[] =>
resolveArchiveItemsFromList(archiveName, hybridItems);
let currentArchiveItems: DownloadItem[] = hybridItems;
const updateExtractingStatus = (text: string): void => {
@ -4532,7 +4570,7 @@ export class DownloadManager extends EventEmitter {
logger.info(`Hybrid-Extract Ende: pkg=${pkg.name}, extracted=${result.extracted}, failed=${result.failed}`);
if (result.extracted > 0) {
this.autoRenameExtractedVideoFiles(pkg.extractDir);
await this.autoRenameExtractedVideoFiles(pkg.extractDir);
}
if (result.failed > 0) {
logger.warn(`Hybrid-Extract: ${result.failed} Archive fehlgeschlagen, wird beim finalen Durchlauf erneut versucht`);
@ -4608,33 +4646,8 @@ export class DownloadManager extends EventEmitter {
pkg.status = "extracting";
this.emitState();
// Resolve items belonging to a specific archive entry point by filename pattern matching
const resolveArchiveItems = (archiveName: string): DownloadItem[] => {
const entryLower = archiveName.toLowerCase();
const multipartMatch = entryLower.match(/^(.*)\.part0*1\.rar$/);
if (multipartMatch) {
const prefix = multipartMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const pattern = new RegExp(`^${prefix}\\.part\\d+\\.rar$`, "i");
return completedItems.filter((item) => {
const name = path.basename(item.targetPath || item.fileName || "");
return pattern.test(name);
});
}
// Single-file archive or non-multipart RAR: match based on archive stem
const rarMatch = entryLower.match(/^(.*)\.rar$/);
if (rarMatch) {
const stem = rarMatch[1].replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const pattern = new RegExp(`^${stem}\\.r(ar|\\d{2,3})$`, "i");
return completedItems.filter((item) => {
const name = path.basename(item.targetPath || item.fileName || "");
return pattern.test(name);
});
}
return completedItems.filter((item) => {
const name = path.basename(item.targetPath || item.fileName || "").toLowerCase();
return name === entryLower;
});
};
const resolveArchiveItems = (archiveName: string): DownloadItem[] =>
resolveArchiveItemsFromList(archiveName, completedItems);
let currentArchiveItems: DownloadItem[] = completedItems;
const updateExtractingStatus = (text: string): void => {
@ -4747,11 +4760,11 @@ export class DownloadManager extends EventEmitter {
}
pkg.status = "failed";
} else {
const hasExtractedOutput = this.directoryHasAnyFiles(pkg.extractDir);
const hasExtractedOutput = await this.directoryHasAnyFiles(pkg.extractDir);
if (result.extracted > 0 || hasExtractedOutput) {
this.autoRenameExtractedVideoFiles(pkg.extractDir);
await this.autoRenameExtractedVideoFiles(pkg.extractDir);
}
const sourceExists = fs.existsSync(pkg.outputDir);
const sourceExists = await this.existsAsync(pkg.outputDir);
let finalStatusText = "";
if (result.extracted > 0 || hasExtractedOutput) {
@ -4821,14 +4834,14 @@ export class DownloadManager extends EventEmitter {
}
if (this.settings.autoExtract && alreadyMarkedExtracted && failed === 0 && success > 0 && this.settings.cleanupMode !== "none") {
const removedArchives = this.cleanupRemainingArchiveArtifacts(pkg.outputDir);
const removedArchives = await this.cleanupRemainingArchiveArtifacts(pkg.outputDir);
if (removedArchives > 0) {
logger.info(`Hybrid-Post-Cleanup entfernte Archive: pkg=${pkg.name}, entfernt=${removedArchives}`);
}
}
if (success > 0 && (pkg.status === "completed" || pkg.status === "failed")) {
this.collectMkvFilesToLibrary(packageId, pkg);
await this.collectMkvFilesToLibrary(packageId, pkg);
}
if (this.runPackageIds.has(packageId)) {
if (pkg.status === "completed") {

View File

@ -102,14 +102,19 @@ type ExtractResumeState = {
completedArchives: string[];
};
export function findArchiveCandidates(packageDir: string): string[] {
if (!packageDir || !fs.existsSync(packageDir)) {
export async function findArchiveCandidates(packageDir: string): Promise<string[]> {
if (!packageDir) {
return [];
}
try {
await fs.promises.access(packageDir);
} catch {
return [];
}
let files: string[] = [];
try {
files = fs.readdirSync(packageDir, { withFileTypes: true })
files = (await fs.promises.readdir(packageDir, { withFileTypes: true }))
.filter((entry) => entry.isFile())
.map((entry) => path.join(packageDir, entry.name));
} catch {
@ -204,28 +209,28 @@ function parseProgressPercent(chunk: string): number | null {
return latest;
}
function shouldPreferExternalZip(archivePath: string): boolean {
async function shouldPreferExternalZip(archivePath: string): Promise<boolean> {
try {
const stat = fs.statSync(archivePath);
const stat = await fs.promises.stat(archivePath);
return stat.size >= 64 * 1024 * 1024;
} catch {
return true;
}
}
function computeExtractTimeoutMs(archivePath: string): number {
async function computeExtractTimeoutMs(archivePath: string): Promise<number> {
try {
const relatedFiles = collectArchiveCleanupTargets(archivePath);
let totalBytes = 0;
for (const filePath of relatedFiles) {
try {
totalBytes += fs.statSync(filePath).size;
totalBytes += (await fs.promises.stat(filePath)).size;
} catch {
// ignore missing parts
}
}
if (totalBytes <= 0) {
totalBytes = fs.statSync(archivePath).size;
totalBytes = (await fs.promises.stat(archivePath)).size;
}
const gib = totalBytes / (1024 * 1024 * 1024);
const dynamicMs = EXTRACT_BASE_TIMEOUT_MS + Math.floor(gib * EXTRACT_PER_GIB_TIMEOUT_MS);
@ -242,13 +247,15 @@ function extractProgressFilePath(packageDir: string, packageId?: string): string
return path.join(packageDir, EXTRACT_PROGRESS_FILE);
}
function readExtractResumeState(packageDir: string, packageId?: string): Set<string> {
async function readExtractResumeState(packageDir: string, packageId?: string): Promise<Set<string>> {
const progressPath = extractProgressFilePath(packageDir, packageId);
if (!fs.existsSync(progressPath)) {
try {
await fs.promises.access(progressPath);
} catch {
return new Set<string>();
}
try {
const payload = JSON.parse(fs.readFileSync(progressPath, "utf8")) as Partial<ExtractResumeState>;
const payload = JSON.parse(await fs.promises.readFile(progressPath, "utf8")) as Partial<ExtractResumeState>;
const names = Array.isArray(payload.completedArchives) ? payload.completedArchives : [];
return new Set(names.map((value) => archiveNameKey(String(value || "").trim())).filter(Boolean));
} catch {
@ -256,24 +263,24 @@ function readExtractResumeState(packageDir: string, packageId?: string): Set<str
}
}
function writeExtractResumeState(packageDir: string, completedArchives: Set<string>, packageId?: string): void {
async function writeExtractResumeState(packageDir: string, completedArchives: Set<string>, packageId?: string): Promise<void> {
try {
fs.mkdirSync(packageDir, { recursive: true });
await fs.promises.mkdir(packageDir, { recursive: true });
const progressPath = extractProgressFilePath(packageDir, packageId);
const payload: ExtractResumeState = {
completedArchives: Array.from(completedArchives)
.map((name) => archiveNameKey(name))
.sort((a, b) => a.localeCompare(b))
};
fs.writeFileSync(progressPath, JSON.stringify(payload, null, 2), "utf8");
await fs.promises.writeFile(progressPath, JSON.stringify(payload, null, 2), "utf8");
} catch (error) {
logger.warn(`ExtractResumeState schreiben fehlgeschlagen: ${String(error)}`);
}
}
function clearExtractResumeState(packageDir: string, packageId?: string): void {
async function clearExtractResumeState(packageDir: string, packageId?: string): Promise<void> {
try {
fs.rmSync(extractProgressFilePath(packageDir, packageId), { force: true });
await fs.promises.rm(extractProgressFilePath(packageDir, packageId), { force: true });
} catch {
// ignore
}
@ -670,9 +677,9 @@ async function runExternalExtract(
const command = await resolveExtractorCommand();
const passwords = passwordCandidates;
let lastError = "";
const timeoutMs = computeExtractTimeoutMs(archivePath);
const timeoutMs = await computeExtractTimeoutMs(archivePath);
fs.mkdirSync(targetDir, { recursive: true });
await fs.promises.mkdir(targetDir, { recursive: true });
let announcedStart = false;
let bestPercent = 0;
@ -766,7 +773,7 @@ function shouldFallbackToExternalZip(error: unknown): boolean {
return true;
}
function extractZipArchive(archivePath: string, targetDir: string, conflictMode: ConflictMode, signal?: AbortSignal): void {
async function extractZipArchive(archivePath: string, targetDir: string, conflictMode: ConflictMode, signal?: AbortSignal): Promise<void> {
const mode = effectiveConflictMode(conflictMode);
const memoryLimitBytes = zipEntryMemoryLimitBytes();
const zip = new AdmZip(archivePath);
@ -785,7 +792,7 @@ function extractZipArchive(archivePath: string, targetDir: string, conflictMode:
continue;
}
if (entry.isDirectory) {
fs.mkdirSync(baseOutputPath, { recursive: true });
await fs.promises.mkdir(baseOutputPath, { recursive: true });
continue;
}
@ -825,11 +832,12 @@ function extractZipArchive(archivePath: string, targetDir: string, conflictMode:
let outputPath = baseOutputPath;
let outputKey = pathSetKey(outputPath);
fs.mkdirSync(path.dirname(outputPath), { recursive: true });
// TOCTOU note: There is a small race between existsSync and writeFileSync below.
await fs.promises.mkdir(path.dirname(outputPath), { recursive: true });
// TOCTOU note: There is a small race between access and writeFile below.
// This is acceptable here because zip extraction is single-threaded and we need
// the exists check to implement skip/rename conflict resolution semantics.
if (usedOutputs.has(outputKey) || fs.existsSync(outputPath)) {
const outputExists = usedOutputs.has(outputKey) || await fs.promises.access(outputPath).then(() => true, () => false);
if (outputExists) {
if (mode === "skip") {
continue;
}
@ -842,7 +850,7 @@ function extractZipArchive(archivePath: string, targetDir: string, conflictMode:
while (n <= 10000) {
candidate = path.join(parsed.dir, `${parsed.name} (${n})${parsed.ext}`);
candidateKey = pathSetKey(candidate);
if (!usedOutputs.has(candidateKey) && !fs.existsSync(candidate)) {
if (!usedOutputs.has(candidateKey) && !(await fs.promises.access(candidate).then(() => true, () => false))) {
break;
}
n += 1;
@ -871,7 +879,7 @@ function extractZipArchive(archivePath: string, targetDir: string, conflictMode:
if (data.length > Math.max(uncompressedSize, compressedSize) * 20) {
throw new Error(`ZIP-Eintrag verdächtig groß nach Entpacken (${entry.entryName})`);
}
fs.writeFileSync(outputPath, data);
await fs.promises.writeFile(outputPath, data);
usedOutputs.add(outputKey);
}
}
@ -951,7 +959,7 @@ export function collectArchiveCleanupTargets(sourceArchivePath: string, director
return Array.from(targets);
}
function cleanupArchives(sourceFiles: string[], cleanupMode: CleanupMode): number {
async function cleanupArchives(sourceFiles: string[], cleanupMode: CleanupMode): Promise<number> {
if (cleanupMode === "none") {
return 0;
}
@ -963,7 +971,7 @@ function cleanupArchives(sourceFiles: string[], cleanupMode: CleanupMode): numbe
let filesInDir = dirFilesCache.get(dir);
if (!filesInDir) {
try {
filesInDir = fs.readdirSync(dir, { withFileTypes: true })
filesInDir = (await fs.promises.readdir(dir, { withFileTypes: true }))
.filter((entry) => entry.isFile())
.map((entry) => entry.name);
} catch {
@ -979,17 +987,18 @@ function cleanupArchives(sourceFiles: string[], cleanupMode: CleanupMode): numbe
let removed = 0;
const moveToTrashLike = (filePath: string): boolean => {
const moveToTrashLike = async (filePath: string): Promise<boolean> => {
try {
const parsed = path.parse(filePath);
const trashDir = path.join(parsed.dir, ".rd-trash");
fs.mkdirSync(trashDir, { recursive: true });
await fs.promises.mkdir(trashDir, { recursive: true });
let index = 0;
while (index <= 10000) {
const suffix = index === 0 ? "" : `-${index}`;
const candidate = path.join(trashDir, `${parsed.base}.${Date.now()}${suffix}`);
if (!fs.existsSync(candidate)) {
fs.renameSync(filePath, candidate);
const candidateExists = await fs.promises.access(candidate).then(() => true, () => false);
if (!candidateExists) {
await fs.promises.rename(filePath, candidate);
return true;
}
index += 1;
@ -1002,16 +1011,17 @@ function cleanupArchives(sourceFiles: string[], cleanupMode: CleanupMode): numbe
for (const filePath of targets) {
try {
if (!fs.existsSync(filePath)) {
const fileExists = await fs.promises.access(filePath).then(() => true, () => false);
if (!fileExists) {
continue;
}
if (cleanupMode === "trash") {
if (moveToTrashLike(filePath)) {
if (await moveToTrashLike(filePath)) {
removed += 1;
}
continue;
}
fs.rmSync(filePath, { force: true });
await fs.promises.rm(filePath, { force: true });
removed += 1;
} catch {
// ignore
@ -1020,8 +1030,9 @@ function cleanupArchives(sourceFiles: string[], cleanupMode: CleanupMode): numbe
return removed;
}
function hasAnyFilesRecursive(rootDir: string): boolean {
if (!fs.existsSync(rootDir)) {
async function hasAnyFilesRecursive(rootDir: string): Promise<boolean> {
const rootExists = await fs.promises.access(rootDir).then(() => true, () => false);
if (!rootExists) {
return false;
}
const deadline = Date.now() + 220;
@ -1035,7 +1046,7 @@ function hasAnyFilesRecursive(rootDir: string): boolean {
const current = stack.pop() as string;
let entries: fs.Dirent[] = [];
try {
entries = fs.readdirSync(current, { withFileTypes: true });
entries = await fs.promises.readdir(current, { withFileTypes: true });
} catch {
continue;
}
@ -1052,19 +1063,24 @@ function hasAnyFilesRecursive(rootDir: string): boolean {
return false;
}
function hasAnyEntries(rootDir: string): boolean {
if (!rootDir || !fs.existsSync(rootDir)) {
async function hasAnyEntries(rootDir: string): Promise<boolean> {
if (!rootDir) {
return false;
}
const rootExists = await fs.promises.access(rootDir).then(() => true, () => false);
if (!rootExists) {
return false;
}
try {
return fs.readdirSync(rootDir).length > 0;
return (await fs.promises.readdir(rootDir)).length > 0;
} catch {
return false;
}
}
function removeEmptyDirectoryTree(rootDir: string): number {
if (!fs.existsSync(rootDir)) {
async function removeEmptyDirectoryTree(rootDir: string): Promise<number> {
const rootExists = await fs.promises.access(rootDir).then(() => true, () => false);
if (!rootExists) {
return 0;
}
@ -1074,7 +1090,7 @@ function removeEmptyDirectoryTree(rootDir: string): number {
const current = stack.pop() as string;
let entries: fs.Dirent[] = [];
try {
entries = fs.readdirSync(current, { withFileTypes: true });
entries = await fs.promises.readdir(current, { withFileTypes: true });
} catch {
continue;
}
@ -1091,9 +1107,9 @@ function removeEmptyDirectoryTree(rootDir: string): number {
let removed = 0;
for (const dirPath of dirs) {
try {
const entries = fs.readdirSync(dirPath);
const entries = await fs.promises.readdir(dirPath);
if (entries.length === 0) {
fs.rmdirSync(dirPath);
await fs.promises.rmdir(dirPath);
removed += 1;
}
} catch {
@ -1108,7 +1124,7 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
throw new Error("aborted:extract");
}
const allCandidates = findArchiveCandidates(options.packageDir);
const allCandidates = await findArchiveCandidates(options.packageDir);
const candidates = options.onlyArchives
? allCandidates.filter((archivePath) => {
const key = process.platform === "win32" ? path.resolve(archivePath).toLowerCase() : path.resolve(archivePath);
@ -1118,9 +1134,9 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
logger.info(`Entpacken gestartet: packageDir=${options.packageDir}, targetDir=${options.targetDir}, archives=${candidates.length}${options.onlyArchives ? ` (hybrid, gesamt=${allCandidates.length})` : ""}, cleanupMode=${options.cleanupMode}, conflictMode=${options.conflictMode}`);
if (candidates.length === 0) {
if (!options.onlyArchives) {
const existingResume = readExtractResumeState(options.packageDir, options.packageId);
if (existingResume.size > 0 && hasAnyEntries(options.targetDir)) {
clearExtractResumeState(options.packageDir, options.packageId);
const existingResume = await readExtractResumeState(options.packageDir, options.packageId);
if (existingResume.size > 0 && await hasAnyEntries(options.targetDir)) {
await clearExtractResumeState(options.packageDir, options.packageId);
logger.info(`Entpacken übersprungen (Archive bereinigt, Ziel hat Dateien): ${options.packageDir}`);
options.onProgress?.({
current: existingResume.size,
@ -1131,7 +1147,7 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
});
return { extracted: existingResume.size, failed: 0, lastError: "" };
}
clearExtractResumeState(options.packageDir, options.packageId);
await clearExtractResumeState(options.packageDir, options.packageId);
}
logger.info(`Entpacken übersprungen (keine Archive gefunden): ${options.packageDir}`);
return { extracted: 0, failed: 0, lastError: "" };
@ -1142,7 +1158,7 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
logger.warn("Extract-ConflictMode 'ask' wird ohne Prompt als 'skip' behandelt");
}
let passwordCandidates = archivePasswords(options.passwordList || "");
const resumeCompleted = readExtractResumeState(options.packageDir, options.packageId);
const resumeCompleted = await readExtractResumeState(options.packageDir, options.packageId);
const resumeCompletedAtStart = resumeCompleted.size;
const allCandidateNames = new Set(allCandidates.map((archivePath) => archiveNameKey(path.basename(archivePath))));
for (const archiveName of Array.from(resumeCompleted.values())) {
@ -1151,9 +1167,9 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
}
}
if (resumeCompleted.size > 0) {
writeExtractResumeState(options.packageDir, resumeCompleted, options.packageId);
await writeExtractResumeState(options.packageDir, resumeCompleted, options.packageId);
} else {
clearExtractResumeState(options.packageDir, options.packageId);
await clearExtractResumeState(options.packageDir, options.packageId);
}
const pendingCandidates = candidates.filter((archivePath) => !resumeCompleted.has(archiveNameKey(path.basename(archivePath))));
@ -1217,7 +1233,7 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
try {
const ext = path.extname(archivePath).toLowerCase();
if (ext === ".zip") {
const preferExternal = shouldPreferExternalZip(archivePath);
const preferExternal = await shouldPreferExternalZip(archivePath);
if (preferExternal) {
try {
const usedPassword = await runExternalExtract(archivePath, options.targetDir, options.conflictMode, passwordCandidates, (value) => {
@ -1227,14 +1243,14 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
passwordCandidates = prioritizePassword(passwordCandidates, usedPassword);
} catch (error) {
if (isNoExtractorError(String(error))) {
extractZipArchive(archivePath, options.targetDir, options.conflictMode, options.signal);
await extractZipArchive(archivePath, options.targetDir, options.conflictMode, options.signal);
} else {
throw error;
}
}
} else {
try {
extractZipArchive(archivePath, options.targetDir, options.conflictMode, options.signal);
await extractZipArchive(archivePath, options.targetDir, options.conflictMode, options.signal);
archivePercent = 100;
} catch (error) {
if (!shouldFallbackToExternalZip(error)) {
@ -1264,7 +1280,7 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
extracted += 1;
extractedArchives.add(archivePath);
resumeCompleted.add(archiveResumeKey);
writeExtractResumeState(options.packageDir, resumeCompleted, options.packageId);
await writeExtractResumeState(options.packageDir, resumeCompleted, options.packageId);
logger.info(`Entpacken erfolgreich: ${path.basename(archivePath)}`);
archivePercent = 100;
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt);
@ -1291,7 +1307,7 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
}
if (extracted > 0) {
const hasOutputAfter = hasAnyEntries(options.targetDir);
const hasOutputAfter = await hasAnyEntries(options.targetDir);
const hadResumeProgress = resumeCompletedAtStart > 0;
if (!hasOutputAfter && conflictMode !== "skip" && !hadResumeProgress) {
lastError = "Keine entpackten Dateien erkannt";
@ -1304,7 +1320,7 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
const sourceAndTargetEqual = pathSetKey(path.resolve(options.packageDir)) === pathSetKey(path.resolve(options.targetDir));
const removedArchives = sourceAndTargetEqual
? 0
: cleanupArchives(cleanupSources, options.cleanupMode);
: await cleanupArchives(cleanupSources, options.cleanupMode);
if (sourceAndTargetEqual && options.cleanupMode !== "none") {
logger.warn(`Archive-Cleanup übersprungen (Quelle=Ziel): ${options.packageDir}`);
}
@ -1312,21 +1328,21 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
logger.info(`Archive-Cleanup abgeschlossen: ${removedArchives} Datei(en) entfernt`);
}
if (options.removeLinks) {
const removedLinks = removeDownloadLinkArtifacts(options.targetDir);
const removedLinks = await removeDownloadLinkArtifacts(options.targetDir);
logger.info(`Link-Artefakt-Cleanup: ${removedLinks} Datei(en) entfernt`);
}
if (options.removeSamples) {
const removedSamples = removeSampleArtifacts(options.targetDir);
const removedSamples = await removeSampleArtifacts(options.targetDir);
logger.info(`Sample-Cleanup: ${removedSamples.files} Datei(en), ${removedSamples.dirs} Ordner entfernt`);
}
}
if (failed === 0 && resumeCompleted.size >= allCandidates.length && !options.skipPostCleanup) {
clearExtractResumeState(options.packageDir, options.packageId);
await clearExtractResumeState(options.packageDir, options.packageId);
}
if (!options.skipPostCleanup && options.cleanupMode === "delete" && !hasAnyFilesRecursive(options.packageDir)) {
const removedDirs = removeEmptyDirectoryTree(options.packageDir);
if (!options.skipPostCleanup && options.cleanupMode === "delete" && !(await hasAnyFilesRecursive(options.packageDir))) {
const removedDirs = await removeEmptyDirectoryTree(options.packageDir);
if (removedDirs > 0) {
logger.info(`Leere Download-Ordner entfernt: ${removedDirs} (root=${options.packageDir})`);
}
@ -1334,8 +1350,9 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
}
} else if (!options.skipPostCleanup) {
try {
if (fs.existsSync(options.targetDir) && fs.readdirSync(options.targetDir).length === 0) {
fs.rmSync(options.targetDir, { recursive: true, force: true });
const targetExists = await fs.promises.access(options.targetDir).then(() => true, () => false);
if (targetExists && (await fs.promises.readdir(options.targetDir)).length === 0) {
await fs.promises.rm(options.targetDir, { recursive: true, force: true });
}
} catch {
// ignore
@ -1344,9 +1361,9 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
if (failed > 0) {
if (resumeCompleted.size > 0) {
writeExtractResumeState(options.packageDir, resumeCompleted, options.packageId);
await writeExtractResumeState(options.packageDir, resumeCompleted, options.packageId);
} else {
clearExtractResumeState(options.packageDir, options.packageId);
await clearExtractResumeState(options.packageDir, options.packageId);
}
}

View File

@ -118,6 +118,26 @@ function rotateIfNeeded(filePath: string): void {
}
}
async function rotateIfNeededAsync(filePath: string): Promise<void> {
try {
const now = Date.now();
const lastRotateCheckAt = rotateCheckAtByFile.get(filePath) || 0;
if (now - lastRotateCheckAt < 60_000) {
return;
}
rotateCheckAtByFile.set(filePath, now);
const stat = await fs.promises.stat(filePath);
if (stat.size < LOG_MAX_FILE_BYTES) {
return;
}
const backup = `${filePath}.old`;
await fs.promises.rm(backup, { force: true }).catch(() => {});
await fs.promises.rename(filePath, backup);
} catch {
// ignore - file may not exist yet
}
}
async function flushAsync(): Promise<void> {
if (flushInFlight || pendingLines.length === 0) {
return;
@ -128,11 +148,11 @@ async function flushAsync(): Promise<void> {
const chunk = linesSnapshot.join("");
try {
rotateIfNeeded(logFilePath);
await rotateIfNeededAsync(logFilePath);
const primary = await appendChunk(logFilePath, chunk);
let wroteAny = primary.ok;
if (fallbackLogFilePath) {
rotateIfNeeded(fallbackLogFilePath);
await rotateIfNeededAsync(fallbackLogFilePath);
const fallback = await appendChunk(fallbackLogFilePath, chunk);
wroteAny = wroteAny || fallback.ok;
if (!primary.ok && !fallback.ok) {

View File

@ -477,9 +477,7 @@ let asyncSaveQueued: { paths: StoragePaths; payload: string } | null = null;
async function writeSessionPayload(paths: StoragePaths, payload: string): Promise<void> {
await fs.promises.mkdir(paths.baseDir, { recursive: true });
if (fs.existsSync(paths.sessionFile)) {
await fsp.copyFile(paths.sessionFile, sessionBackupPath(paths.sessionFile)).catch(() => {});
}
const tempPath = sessionTempPath(paths.sessionFile, "async");
await fsp.writeFile(tempPath, payload, "utf8");
try {

View File

@ -239,8 +239,12 @@ const BandwidthChart = memo(function BandwidthChart({ items, running, paused }:
history.push({ time: now, speed: paused ? 0 : totalSpeed });
const cutoff = now - 60000;
while (history.length > 0 && history[0].time < cutoff) {
history.shift();
let trimIndex = 0;
while (trimIndex < history.length && history[trimIndex].time < cutoff) {
trimIndex += 1;
}
if (trimIndex > 0) {
speedHistoryRef.current = history.slice(trimIndex);
}
lastUpdateRef.current = now;