diff --git a/package.json b/package.json index 18da2f1..a3e627a 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "real-debrid-downloader", - "version": "1.4.19", + "version": "1.4.20", "description": "Real-Debrid Downloader Desktop (Electron + React + TypeScript)", "main": "build/main/main/main.js", "author": "Sucukdeluxe", diff --git a/src/main/cleanup.ts b/src/main/cleanup.ts index 067e418..e129026 100644 --- a/src/main/cleanup.ts +++ b/src/main/cleanup.ts @@ -1,6 +1,6 @@ import fs from "node:fs"; import path from "node:path"; -import { ARCHIVE_TEMP_EXTENSIONS, LINK_ARTIFACT_EXTENSIONS, RAR_SPLIT_RE, SAMPLE_DIR_NAMES, SAMPLE_TOKEN_RE, SAMPLE_VIDEO_EXTENSIONS } from "./constants"; +import { ARCHIVE_TEMP_EXTENSIONS, LINK_ARTIFACT_EXTENSIONS, MAX_LINK_ARTIFACT_BYTES, RAR_SPLIT_RE, SAMPLE_DIR_NAMES, SAMPLE_TOKEN_RE, SAMPLE_VIDEO_EXTENSIONS } from "./constants"; async function yieldToLoop(): Promise { await new Promise((resolve) => { @@ -111,7 +111,7 @@ export function removeDownloadLinkArtifacts(extractDir: string): number { if (/[._\- ](links?|downloads?|urls?|dlc)([._\- ]|$)/i.test(name)) { try { const stat = fs.statSync(full); - if (stat.size <= 256 * 1024) { + if (stat.size <= MAX_LINK_ARTIFACT_BYTES) { const text = fs.readFileSync(full, "utf8"); shouldDelete = /https?:\/\//i.test(text); } diff --git a/src/main/constants.ts b/src/main/constants.ts index 3e277cc..9271304 100644 --- a/src/main/constants.ts +++ b/src/main/constants.ts @@ -20,9 +20,14 @@ export const SAMPLE_VIDEO_EXTENSIONS = new Set([".mkv", ".mp4", ".avi", ".mov", export const LINK_ARTIFACT_EXTENSIONS = new Set([".url", ".webloc", ".dlc", ".rsdf", ".ccf"]); export const SAMPLE_TOKEN_RE = /(^|[._\-\s])sample([._\-\s]|$)/i; -export const ARCHIVE_TEMP_EXTENSIONS = new Set([".rar", ".zip", ".7z", ".tmp", ".part"]); +export const ARCHIVE_TEMP_EXTENSIONS = new Set([".rar", ".zip", ".7z", ".tmp", ".part", ".tar", ".gz", ".bz2", ".xz"]); export const RAR_SPLIT_RE = /\.r\d{2}$/i; +export const MAX_MANIFEST_FILE_BYTES = 5 * 1024 * 1024; +export const MAX_LINK_ARTIFACT_BYTES = 256 * 1024; +export const SPEED_WINDOW_SECONDS = 3; +export const CLIPBOARD_POLL_INTERVAL_MS = 2000; + export const DEFAULT_UPDATE_REPO = "Sucukdeluxe/real-debrid-downloader"; export function defaultSettings(): AppSettings { diff --git a/src/main/debrid.ts b/src/main/debrid.ts index 6fdf5f1..0483e56 100644 --- a/src/main/debrid.ts +++ b/src/main/debrid.ts @@ -160,7 +160,7 @@ function looksLikeFileName(value: string): boolean { return /\.(?:part\d+\.rar|r\d{2}|rar|zip|7z|tar|gz|bz2|xz|iso|mkv|mp4|avi|mov|wmv|m4v|m2ts|ts|webm|mp3|flac|aac|srt|ass|sub)$/i.test(value); } -function normalizeResolvedFilename(value: string): string { +export function normalizeResolvedFilename(value: string): string { const candidate = decodeHtmlEntities(String(value || "")) .replace(/<[^>]*>/g, " ") .replace(/\s+/g, " ") @@ -174,7 +174,7 @@ function normalizeResolvedFilename(value: string): string { return candidate; } -function filenameFromRapidgatorUrlPath(link: string): string { +export function filenameFromRapidgatorUrlPath(link: string): string { try { const parsed = new URL(link); const pathParts = parsed.pathname.split("/").filter(Boolean); @@ -191,10 +191,10 @@ function filenameFromRapidgatorUrlPath(link: string): string { } } -function extractRapidgatorFilenameFromHtml(html: string): string { +export function extractRapidgatorFilenameFromHtml(html: string): string { const patterns = [ - /]+property=["']og:title["'][^>]+content=["']([^"']+)["']/i, - /]+name=["']title["'][^>]+content=["']([^"']+)["']/i, + /]+(?:property=["']og:title["'][^>]+content=["']([^"']+)["']|content=["']([^"']+)["'][^>]+property=["']og:title["'])/i, + /]+(?:name=["']title["'][^>]+content=["']([^"']+)["']|content=["']([^"']+)["'][^>]+name=["']title["'])/i, /([^<]+)<\/title>/i, /(?:Dateiname|File\s*name)\s*[:\-]\s*<[^>]*>\s*([^<]+)\s*</i, /(?:Dateiname|File\s*name)\s*[:\-]\s*([^<\r\n]+)/i, @@ -204,7 +204,10 @@ function extractRapidgatorFilenameFromHtml(html: string): string { for (const pattern of patterns) { const match = html.match(pattern); - const normalized = normalizeResolvedFilename(match?.[1] || ""); + // Some patterns have multiple capture groups for attribute-order independence; + // pick the first non-empty group. + const raw = match?.[1] || match?.[2] || ""; + const normalized = normalizeResolvedFilename(raw); if (normalized) { return normalized; } diff --git a/src/main/download-manager.ts b/src/main/download-manager.ts index 77a0cd8..d620917 100644 --- a/src/main/download-manager.ts +++ b/src/main/download-manager.ts @@ -326,6 +326,8 @@ export class DownloadManager extends EventEmitter { private claimedTargetPathByItem = new Map<string, string>(); + private itemContributedBytes = new Map<string, number>(); + private runItemIds = new Set<string>(); private runPackageIds = new Set<string>(); @@ -338,6 +340,8 @@ export class DownloadManager extends EventEmitter { private lastReconnectMarkAt = 0; + private consecutiveReconnects = 0; + private lastGlobalProgressBytes = 0; private lastGlobalProgressAt = 0; @@ -562,7 +566,7 @@ export class DownloadManager extends EventEmitter { } } if (this.session.running) { - void this.ensureScheduler(); + void this.ensureScheduler().catch((err) => logger.warn(`ensureScheduler Fehler (togglePackage): ${compactErrorText(err)}`)); } } @@ -618,6 +622,7 @@ export class DownloadManager extends EventEmitter { this.runCompletedPackages.clear(); this.reservedTargetPaths.clear(); this.claimedTargetPathByItem.clear(); + this.itemContributedBytes.clear(); this.packagePostProcessTasks.clear(); this.packagePostProcessAbortControllers.clear(); this.packagePostProcessQueue = Promise.resolve(); @@ -697,7 +702,7 @@ export class DownloadManager extends EventEmitter { this.persistSoon(); this.emitState(); if (unresolvedByLink.size > 0) { - void this.resolveQueuedFilenames(unresolvedByLink); + void this.resolveQueuedFilenames(unresolvedByLink).catch((err) => logger.warn(`resolveQueuedFilenames Fehler (addPackages): ${compactErrorText(err)}`)); } return { addedPackages, addedLinks }; } @@ -913,7 +918,7 @@ export class DownloadManager extends EventEmitter { } if (unresolvedByLink.size > 0) { - void this.resolveQueuedFilenames(unresolvedByLink); + void this.resolveQueuedFilenames(unresolvedByLink).catch((err) => logger.warn(`resolveQueuedFilenames Fehler (resolveExisting): ${compactErrorText(err)}`)); } } @@ -1268,12 +1273,15 @@ export class DownloadManager extends EventEmitter { this.session.running = true; this.session.paused = false; + // By design: runStartedAt and totalDownloadedBytes reset on each start/resume so that + // duration, average speed, and ETA are calculated relative to the current run, not cumulative. this.session.runStartedAt = nowMs(); this.session.totalDownloadedBytes = 0; this.session.summaryText = ""; this.session.reconnectUntil = 0; this.session.reconnectReason = ""; this.lastReconnectMarkAt = 0; + this.consecutiveReconnects = 0; this.speedEvents = []; this.speedBytesLastWindow = 0; this.lastGlobalProgressBytes = 0; @@ -1501,7 +1509,7 @@ export class DownloadManager extends EventEmitter { private persistNow(): void { this.lastPersistAt = nowMs(); if (this.session.running) { - void saveSessionAsync(this.storagePaths, this.session); + void saveSessionAsync(this.storagePaths, this.session).catch((err) => logger.warn(`saveSessionAsync Fehler: ${compactErrorText(err)}`)); } else { saveSession(this.storagePaths, this.session); } @@ -1715,7 +1723,7 @@ export class DownloadManager extends EventEmitter { } } changed = true; - void this.runPackagePostProcessing(packageId); + void this.runPackagePostProcessing(packageId).catch((err) => logger.warn(`runPackagePostProcessing Fehler (recoverPostProcessing): ${compactErrorText(err)}`)); } else if (pkg.status !== "completed") { pkg.status = "completed"; pkg.updatedAt = nowMs(); @@ -1775,7 +1783,7 @@ export class DownloadManager extends EventEmitter { } } logger.info(`Entpacken via Start ausgelöst: pkg=${pkg.name}`); - void this.runPackagePostProcessing(packageId); + void this.runPackagePostProcessing(packageId).catch((err) => logger.warn(`runPackagePostProcessing Fehler (triggerPending): ${compactErrorText(err)}`)); } } @@ -1847,7 +1855,17 @@ export class DownloadManager extends EventEmitter { } private reconnectActive(): boolean { - return this.session.reconnectUntil > nowMs(); + if (this.session.reconnectUntil <= 0) { + return false; + } + const now = nowMs(); + // Safety: if reconnectUntil is unreasonably far in the future (clock regression), + // clamp it to reconnectWaitSeconds * 2 from now + const maxWaitMs = this.settings.reconnectWaitSeconds * 2 * 1000; + if (this.session.reconnectUntil - now > maxWaitMs) { + this.session.reconnectUntil = now + maxWaitMs; + } + return this.session.reconnectUntil > now; } private runGlobalStallWatchdog(now: number): void { @@ -1900,8 +1918,18 @@ export class DownloadManager extends EventEmitter { return; } - const until = nowMs() + this.settings.reconnectWaitSeconds * 1000; + this.consecutiveReconnects += 1; + const backoffMultiplier = Math.min(this.consecutiveReconnects, 5); + const waitMs = this.settings.reconnectWaitSeconds * 1000 * backoffMultiplier; + const maxWaitMs = this.settings.reconnectWaitSeconds * 2 * 1000; + const cappedWaitMs = Math.min(waitMs, maxWaitMs); + const until = nowMs() + cappedWaitMs; this.session.reconnectUntil = Math.max(this.session.reconnectUntil, until); + // Safety cap: never let reconnectUntil exceed reconnectWaitSeconds * 2 from now + const absoluteMax = nowMs() + maxWaitMs; + if (this.session.reconnectUntil > absoluteMax) { + this.session.reconnectUntil = absoluteMax; + } this.session.reconnectReason = reason; this.lastReconnectMarkAt = 0; @@ -1912,7 +1940,7 @@ export class DownloadManager extends EventEmitter { } } - logger.warn(`Reconnect angefordert: ${reason}`); + logger.warn(`Reconnect angefordert: ${reason} (consecutive=${this.consecutiveReconnects}, wait=${Math.ceil(cappedWaitMs / 1000)}s)`); this.emitState(); } @@ -2022,7 +2050,9 @@ export class DownloadManager extends EventEmitter { this.activeTasks.set(itemId, active); this.emitState(); - void this.processItem(active).finally(() => { + void this.processItem(active).catch((err) => { + logger.warn(`processItem unbehandelt (${itemId}): ${compactErrorText(err)}`); + }).finally(() => { this.releaseTargetPath(item.id); if (active.nonResumableCounted) { this.nonResumableActive = Math.max(0, this.nonResumableActive - 1); @@ -2140,7 +2170,9 @@ export class DownloadManager extends EventEmitter { pkg.updatedAt = nowMs(); this.recordRunOutcome(item.id, "completed"); - void this.runPackagePostProcessing(pkg.id).finally(() => { + void this.runPackagePostProcessing(pkg.id).catch((err) => { + logger.warn(`runPackagePostProcessing Fehler (processItem): ${compactErrorText(err)}`); + }).finally(() => { this.applyCompletedCleanupPolicy(pkg.id, item.id); this.persistSoon(); this.emitState(); @@ -2429,7 +2461,8 @@ export class DownloadManager extends EventEmitter { const resumable = response.status === 206 || acceptRanges; active.resumable = resumable; - const contentLength = Number(response.headers.get("content-length") || 0); + const rawContentLength = Number(response.headers.get("content-length") || 0); + const contentLength = Number.isFinite(rawContentLength) && rawContentLength > 0 ? rawContentLength : 0; const totalFromRange = parseContentRangeTotal(response.headers.get("content-range")); if (knownTotal && knownTotal > 0) { item.totalBytes = knownTotal; @@ -2440,10 +2473,19 @@ export class DownloadManager extends EventEmitter { } const writeMode = existingBytes > 0 && response.status === 206 ? "a" : "w"; - if (writeMode === "w" && existingBytes > 0) { - fs.rmSync(effectiveTargetPath, { force: true }); + if (writeMode === "w") { + // Starting fresh: subtract any previously counted bytes for this item to avoid double-counting on retry + const previouslyContributed = this.itemContributedBytes.get(active.itemId) || 0; + if (previouslyContributed > 0) { + this.session.totalDownloadedBytes = Math.max(0, this.session.totalDownloadedBytes - previouslyContributed); + this.itemContributedBytes.set(active.itemId, 0); + } + if (existingBytes > 0) { + fs.rmSync(effectiveTargetPath, { force: true }); + } } + fs.mkdirSync(path.dirname(effectiveTargetPath), { recursive: true }); const stream = fs.createWriteStream(effectiveTargetPath, { flags: writeMode }); let written = writeMode === "a" ? existingBytes : 0; let windowBytes = 0; @@ -2623,9 +2665,10 @@ export class DownloadManager extends EventEmitter { written += buffer.length; windowBytes += buffer.length; this.session.totalDownloadedBytes += buffer.length; + this.itemContributedBytes.set(active.itemId, (this.itemContributedBytes.get(active.itemId) || 0) + buffer.length); this.recordSpeed(buffer.length); - const elapsed = Math.max((nowMs() - windowStarted) / 1000, 0.1); + const elapsed = Math.max((nowMs() - windowStarted) / 1000, 0.5); const speed = windowBytes / elapsed; if (elapsed >= 1.2) { windowStarted = nowMs(); @@ -3136,6 +3179,7 @@ export class DownloadManager extends EventEmitter { this.runCompletedPackages.clear(); this.reservedTargetPaths.clear(); this.claimedTargetPathByItem.clear(); + this.itemContributedBytes.clear(); this.lastGlobalProgressBytes = this.session.totalDownloadedBytes; this.lastGlobalProgressAt = nowMs(); this.persistNow(); diff --git a/src/main/extractor.ts b/src/main/extractor.ts index cf44385..5fdf8df 100644 --- a/src/main/extractor.ts +++ b/src/main/extractor.ts @@ -466,6 +466,10 @@ export function buildExternalExtractArgs( const lower = command.toLowerCase(); if (lower.includes("unrar") || lower.includes("winrar")) { const overwrite = mode === "overwrite" ? "-o+" : mode === "rename" ? "-or" : "-o-"; + // NOTE: The password is passed as a CLI argument (-p<password>), which means it may be + // visible via process listing tools (e.g. `ps aux` on Unix). This is unavoidable because + // WinRAR/UnRAR CLI does not support password input via stdin or environment variables. + // On Windows (the target platform) this is less of a concern than on shared Unix systems. const pass = password ? `-p${password}` : "-p-"; const perfArgs = usePerformanceFlags && shouldUseExtractorPerformanceFlags() ? ["-idc", extractorThreadSwitch()] @@ -474,6 +478,7 @@ export function buildExternalExtractArgs( } const overwrite = mode === "overwrite" ? "-aoa" : mode === "rename" ? "-aou" : "-aos"; + // NOTE: Same password-in-args limitation as above applies to 7z as well. const pass = password ? `-p${password}` : "-p"; return ["x", "-y", overwrite, pass, archivePath, `-o${targetDir}`]; } @@ -599,6 +604,9 @@ function extractZipArchive(archivePath: string, targetDir: string, conflictMode: continue; } fs.mkdirSync(path.dirname(outputPath), { recursive: true }); + // TOCTOU note: There is a small race between existsSync and writeFileSync below. + // This is acceptable here because zip extraction is single-threaded and we need + // the exists check to implement skip/rename conflict resolution semantics. if (fs.existsSync(outputPath)) { if (mode === "skip") { continue; diff --git a/src/main/integrity.ts b/src/main/integrity.ts index 697925a..5943405 100644 --- a/src/main/integrity.ts +++ b/src/main/integrity.ts @@ -2,6 +2,7 @@ import fs from "node:fs"; import path from "node:path"; import crypto from "node:crypto"; import { ParsedHashEntry } from "../shared/types"; +import { MAX_MANIFEST_FILE_BYTES } from "./constants"; export function parseHashLine(line: string): ParsedHashEntry | null { const text = String(line || "").trim(); @@ -53,7 +54,7 @@ export function readHashManifest(packageDir: string): Map<string, ParsedHashEntr let lines: string[]; try { const stat = fs.statSync(filePath); - if (stat.size > 5 * 1024 * 1024) { + if (stat.size > MAX_MANIFEST_FILE_BYTES) { continue; } lines = fs.readFileSync(filePath, "utf8").split(/\r?\n/); diff --git a/src/main/main.ts b/src/main/main.ts index 06e3042..8619c2d 100644 --- a/src/main/main.ts +++ b/src/main/main.ts @@ -6,6 +6,20 @@ import { IPC_CHANNELS } from "../shared/ipc"; import { logger } from "./logger"; import { APP_NAME } from "./constants"; +/* ── IPC validation helpers ────────────────────────────────────── */ +function validateString(value: unknown, name: string): string { + if (typeof value !== "string") { + throw new Error(`${name} muss ein String sein`); + } + return value; +} +function validateStringArray(value: unknown, name: string): string[] { + if (!Array.isArray(value) || !value.every(v => typeof v === "string")) { + throw new Error(`${name} muss ein String-Array sein`); + } + return value as string[]; +} + /* ── Single Instance Lock ───────────────────────────────────────── */ const gotLock = app.requestSingleInstanceLock(); if (!gotLock) { @@ -45,6 +59,19 @@ function createWindow(): BrowserWindow { } }); + if (!isDevMode()) { + window.webContents.session.webRequest.onHeadersReceived((details, callback) => { + callback({ + responseHeaders: { + ...details.responseHeaders, + "Content-Security-Policy": [ + "default-src 'self'; script-src 'self'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; connect-src 'self' https://api.real-debrid.com https://api.github.com https://bestdebrid.com https://api.alldebrid.com https://www.mega-debrid.eu" + ] + } + }); + }); + } + if (isDevMode()) { void window.loadURL("http://localhost:5173"); } else { @@ -96,13 +123,13 @@ function startClipboardWatcher(): void { if (clipboardTimer) { return; } - lastClipboardText = clipboard.readText(); + lastClipboardText = clipboard.readText().slice(0, 50000); clipboardTimer = setInterval(() => { const text = clipboard.readText(); if (text === lastClipboardText || !text.trim()) { return; } - lastClipboardText = text; + lastClipboardText = text.slice(0, 50000); const links = extractLinksFromText(text); if (links.length > 0 && mainWindow && !mainWindow.isDestroyed()) { mainWindow.webContents.send(IPC_CHANNELS.CLIPBOARD_DETECTED, links); @@ -144,7 +171,7 @@ function registerIpcHandlers(): void { if (result.started) { setTimeout(() => { app.quit(); - }, 350); + }, 800); } return result; }); @@ -166,22 +193,50 @@ function registerIpcHandlers(): void { updateTray(); return result; }); - ipcMain.handle(IPC_CHANNELS.ADD_LINKS, (_event: IpcMainInvokeEvent, payload: AddLinksPayload) => controller.addLinks(payload)); + ipcMain.handle(IPC_CHANNELS.ADD_LINKS, (_event: IpcMainInvokeEvent, payload: AddLinksPayload) => { + validateString(payload?.rawText, "rawText"); + return controller.addLinks(payload); + }); ipcMain.handle(IPC_CHANNELS.ADD_CONTAINERS, async (_event: IpcMainInvokeEvent, filePaths: string[]) => controller.addContainers(filePaths ?? [])); ipcMain.handle(IPC_CHANNELS.GET_START_CONFLICTS, () => controller.getStartConflicts()); - ipcMain.handle(IPC_CHANNELS.RESOLVE_START_CONFLICT, (_event: IpcMainInvokeEvent, packageId: string, policy: "keep" | "skip" | "overwrite") => - controller.resolveStartConflict(packageId, policy)); + ipcMain.handle(IPC_CHANNELS.RESOLVE_START_CONFLICT, (_event: IpcMainInvokeEvent, packageId: string, policy: "keep" | "skip" | "overwrite") => { + validateString(packageId, "packageId"); + validateString(policy, "policy"); + if (policy !== "keep" && policy !== "skip" && policy !== "overwrite") { + throw new Error("policy muss 'keep', 'skip' oder 'overwrite' sein"); + } + return controller.resolveStartConflict(packageId, policy); + }); ipcMain.handle(IPC_CHANNELS.CLEAR_ALL, () => controller.clearAll()); ipcMain.handle(IPC_CHANNELS.START, () => controller.start()); ipcMain.handle(IPC_CHANNELS.STOP, () => controller.stop()); ipcMain.handle(IPC_CHANNELS.TOGGLE_PAUSE, () => controller.togglePause()); - ipcMain.handle(IPC_CHANNELS.CANCEL_PACKAGE, (_event: IpcMainInvokeEvent, packageId: string) => controller.cancelPackage(packageId)); - ipcMain.handle(IPC_CHANNELS.RENAME_PACKAGE, (_event: IpcMainInvokeEvent, packageId: string, newName: string) => controller.renamePackage(packageId, newName)); - ipcMain.handle(IPC_CHANNELS.REORDER_PACKAGES, (_event: IpcMainInvokeEvent, packageIds: string[]) => controller.reorderPackages(packageIds)); - ipcMain.handle(IPC_CHANNELS.REMOVE_ITEM, (_event: IpcMainInvokeEvent, itemId: string) => controller.removeItem(itemId)); - ipcMain.handle(IPC_CHANNELS.TOGGLE_PACKAGE, (_event: IpcMainInvokeEvent, packageId: string) => controller.togglePackage(packageId)); + ipcMain.handle(IPC_CHANNELS.CANCEL_PACKAGE, (_event: IpcMainInvokeEvent, packageId: string) => { + validateString(packageId, "packageId"); + return controller.cancelPackage(packageId); + }); + ipcMain.handle(IPC_CHANNELS.RENAME_PACKAGE, (_event: IpcMainInvokeEvent, packageId: string, newName: string) => { + validateString(packageId, "packageId"); + validateString(newName, "newName"); + return controller.renamePackage(packageId, newName); + }); + ipcMain.handle(IPC_CHANNELS.REORDER_PACKAGES, (_event: IpcMainInvokeEvent, packageIds: string[]) => { + validateStringArray(packageIds, "packageIds"); + return controller.reorderPackages(packageIds); + }); + ipcMain.handle(IPC_CHANNELS.REMOVE_ITEM, (_event: IpcMainInvokeEvent, itemId: string) => { + validateString(itemId, "itemId"); + return controller.removeItem(itemId); + }); + ipcMain.handle(IPC_CHANNELS.TOGGLE_PACKAGE, (_event: IpcMainInvokeEvent, packageId: string) => { + validateString(packageId, "packageId"); + return controller.togglePackage(packageId); + }); ipcMain.handle(IPC_CHANNELS.EXPORT_QUEUE, () => controller.exportQueue()); - ipcMain.handle(IPC_CHANNELS.IMPORT_QUEUE, (_event: IpcMainInvokeEvent, json: string) => controller.importQueue(json)); + ipcMain.handle(IPC_CHANNELS.IMPORT_QUEUE, (_event: IpcMainInvokeEvent, json: string) => { + validateString(json, "json"); + return controller.importQueue(json); + }); ipcMain.handle(IPC_CHANNELS.TOGGLE_CLIPBOARD, () => { const settings = controller.getSettings(); const next = !settings.clipboardWatch; diff --git a/src/main/storage.ts b/src/main/storage.ts index 8f206a5..f62d3b2 100644 --- a/src/main/storage.ts +++ b/src/main/storage.ts @@ -147,6 +147,8 @@ export function loadSettings(paths: StoragePaths): AppSettings { return defaultSettings(); } try { + // Safe: parsed is spread into a fresh object with defaults first, and normalizeSettings + // validates every field, so prototype pollution via __proto__ / constructor is not a concern. const parsed = JSON.parse(fs.readFileSync(paths.configFile, "utf8")) as AppSettings; const merged = normalizeSettings({ ...defaultSettings(), @@ -163,7 +165,7 @@ function syncRenameWithExdevFallback(tempPath: string, targetPath: string): void try { fs.renameSync(tempPath, targetPath); } catch (renameError: unknown) { - if ((renameError as NodeJS.ErrnoException).code === "EXDEV") { + if (renameError && typeof renameError === "object" && "code" in renameError && (renameError as NodeJS.ErrnoException).code === "EXDEV") { fs.copyFileSync(tempPath, targetPath); try { fs.rmSync(tempPath, { force: true }); } catch {} } else { @@ -174,6 +176,14 @@ function syncRenameWithExdevFallback(tempPath: string, targetPath: string): void export function saveSettings(paths: StoragePaths, settings: AppSettings): void { ensureBaseDir(paths.baseDir); + // Create a backup of the existing config before overwriting + if (fs.existsSync(paths.configFile)) { + try { + fs.copyFileSync(paths.configFile, `${paths.configFile}.bak`); + } catch { + // Best-effort backup; proceed even if it fails + } + } const persisted = sanitizeCredentialPersistence(normalizeSettings(settings)); const payload = JSON.stringify(persisted, null, 2); const tempPath = `${paths.configFile}.tmp`; @@ -205,13 +215,26 @@ export function loadSession(paths: StoragePaths): SessionState { } try { const parsed = JSON.parse(fs.readFileSync(paths.sessionFile, "utf8")) as Partial<SessionState>; - return { + const session: SessionState = { ...emptySession(), ...parsed, packages: parsed.packages ?? {}, items: parsed.items ?? {}, packageOrder: parsed.packageOrder ?? [] }; + + // Reset transient fields that may be stale from a previous crash + const ACTIVE_STATUSES = new Set(["downloading", "validating", "extracting", "integrity_check", "paused", "reconnect_wait"]); + for (const item of Object.values(session.items)) { + if (ACTIVE_STATUSES.has(item.status)) { + item.status = "queued"; + item.lastError = ""; + } + // Always clear stale speed values + item.speedBps = 0; + } + + return session; } catch (error) { logger.error(`Session konnte nicht geladen werden: ${String(error)}`); return emptySession(); @@ -243,7 +266,7 @@ export async function saveSessionAsync(paths: StoragePaths, session: SessionStat try { await fsp.rename(tempPath, paths.sessionFile); } catch (renameError: unknown) { - if ((renameError as NodeJS.ErrnoException).code === "EXDEV") { + if (renameError && typeof renameError === "object" && "code" in renameError && (renameError as NodeJS.ErrnoException).code === "EXDEV") { await fsp.copyFile(tempPath, paths.sessionFile); await fsp.rm(tempPath, { force: true }).catch(() => {}); } else { diff --git a/src/main/update.ts b/src/main/update.ts index 92388c8..894eb94 100644 --- a/src/main/update.ts +++ b/src/main/update.ts @@ -69,12 +69,12 @@ function timeoutController(ms: number): { signal: AbortSignal; clear: () => void }; } -function parseVersionParts(version: string): number[] { +export function parseVersionParts(version: string): number[] { const cleaned = version.replace(/^v/i, "").trim(); return cleaned.split(".").map((part) => Number(part.replace(/[^0-9].*$/, "") || "0")); } -function isRemoteNewer(currentVersion: string, latestVersion: string): boolean { +export function isRemoteNewer(currentVersion: string, latestVersion: string): boolean { const current = parseVersionParts(currentVersion); const latest = parseVersionParts(latestVersion); const maxLen = Math.max(current.length, latest.length); diff --git a/tests/cleanup.test.ts b/tests/cleanup.test.ts index c31f39f..ce0f905 100644 --- a/tests/cleanup.test.ts +++ b/tests/cleanup.test.ts @@ -37,4 +37,56 @@ describe("cleanup", () => { expect(links).toBeGreaterThan(0); expect(samples.files + samples.dirs).toBeGreaterThan(0); }); + + it("cleans up archive files in nested directories", () => { + const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-clean-")); + tempDirs.push(dir); + + // Create nested directory structure with archive files + const sub1 = path.join(dir, "season1"); + const sub2 = path.join(dir, "season1", "extras"); + fs.mkdirSync(sub2, { recursive: true }); + + fs.writeFileSync(path.join(sub1, "episode.part1.rar"), "x"); + fs.writeFileSync(path.join(sub1, "episode.part2.rar"), "x"); + fs.writeFileSync(path.join(sub2, "bonus.zip"), "x"); + fs.writeFileSync(path.join(sub2, "bonus.7z"), "x"); + // Non-archive files should be kept + fs.writeFileSync(path.join(sub1, "video.mkv"), "real content"); + fs.writeFileSync(path.join(sub2, "subtitle.srt"), "subtitle content"); + + const removed = cleanupCancelledPackageArtifacts(dir); + expect(removed).toBe(4); // 2 rar parts + zip + 7z + expect(fs.existsSync(path.join(sub1, "episode.part1.rar"))).toBe(false); + expect(fs.existsSync(path.join(sub1, "episode.part2.rar"))).toBe(false); + expect(fs.existsSync(path.join(sub2, "bonus.zip"))).toBe(false); + expect(fs.existsSync(path.join(sub2, "bonus.7z"))).toBe(false); + // Non-archives kept + expect(fs.existsSync(path.join(sub1, "video.mkv"))).toBe(true); + expect(fs.existsSync(path.join(sub2, "subtitle.srt"))).toBe(true); + }); + + it("detects link artifacts by URL content in text files", () => { + const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-clean-")); + tempDirs.push(dir); + + // File with link-like name containing URLs should be removed + fs.writeFileSync(path.join(dir, "download_links.txt"), "https://rapidgator.net/file/abc123\nhttps://uploaded.net/file/def456\n"); + // File with link-like name but no URLs should be kept + fs.writeFileSync(path.join(dir, "my_downloads.txt"), "Just some random text without URLs"); + // Regular text file that doesn't match the link pattern should be kept + fs.writeFileSync(path.join(dir, "readme.txt"), "https://example.com"); + // .url files should always be removed + fs.writeFileSync(path.join(dir, "bookmark.url"), "[InternetShortcut]\nURL=https://example.com"); + // .dlc files should always be removed + fs.writeFileSync(path.join(dir, "container.dlc"), "encrypted-data"); + + const removed = removeDownloadLinkArtifacts(dir); + expect(removed).toBeGreaterThanOrEqual(3); // download_links.txt + bookmark.url + container.dlc + expect(fs.existsSync(path.join(dir, "download_links.txt"))).toBe(false); + expect(fs.existsSync(path.join(dir, "bookmark.url"))).toBe(false); + expect(fs.existsSync(path.join(dir, "container.dlc"))).toBe(false); + // Non-matching files should be kept + expect(fs.existsSync(path.join(dir, "readme.txt"))).toBe(true); + }); }); diff --git a/tests/debrid.test.ts b/tests/debrid.test.ts index e4f8d63..fae088e 100644 --- a/tests/debrid.test.ts +++ b/tests/debrid.test.ts @@ -1,6 +1,6 @@ import { afterEach, describe, expect, it, vi } from "vitest"; import { defaultSettings } from "../src/main/constants"; -import { DebridService } from "../src/main/debrid"; +import { DebridService, extractRapidgatorFilenameFromHtml, filenameFromRapidgatorUrlPath, normalizeResolvedFilename } from "../src/main/debrid"; const originalFetch = globalThis.fetch; @@ -368,3 +368,102 @@ describe("debrid service", () => { ])); }); }); + +describe("normalizeResolvedFilename", () => { + it("strips HTML entities", () => { + expect(normalizeResolvedFilename("Show.S01E01.German.DL.720p.part01.rar")).toBe("Show.S01E01.German.DL.720p.part01.rar"); + expect(normalizeResolvedFilename("File&Name.part1.rar")).toBe("File&Name.part1.rar"); + expect(normalizeResolvedFilename("File"Name".part1.rar")).toBe('File"Name".part1.rar'); + }); + + it("strips HTML tags and collapses whitespace", () => { + // Tags are replaced by spaces, then multiple spaces collapsed + const result = normalizeResolvedFilename("<b>Show.S01E01</b>.part01.rar"); + expect(result).toBe("Show.S01E01 .part01.rar"); + + // Entity decoding happens before tag removal, so <...> becomes <...> then gets stripped + const entityTagResult = normalizeResolvedFilename("File<Tag>.part1.rar"); + expect(entityTagResult).toBe("File .part1.rar"); + }); + + it("strips 'download file' prefix", () => { + expect(normalizeResolvedFilename("Download file Show.S01E01.part01.rar")).toBe("Show.S01E01.part01.rar"); + expect(normalizeResolvedFilename("download file Movie.2024.mkv")).toBe("Movie.2024.mkv"); + }); + + it("strips Rapidgator suffix", () => { + expect(normalizeResolvedFilename("Show.S01E01.part01.rar - Rapidgator")).toBe("Show.S01E01.part01.rar"); + expect(normalizeResolvedFilename("Movie.mkv | Rapidgator.net")).toBe("Movie.mkv"); + }); + + it("returns empty for opaque or non-filename values", () => { + expect(normalizeResolvedFilename("")).toBe(""); + expect(normalizeResolvedFilename("just some text")).toBe(""); + expect(normalizeResolvedFilename("e51f6809bb6ca615601f5ac5db433737")).toBe(""); + expect(normalizeResolvedFilename("download.bin")).toBe(""); + }); + + it("handles combined transforms", () => { + // "Download file" prefix stripped, & decoded to &, "- Rapidgator" suffix stripped + expect(normalizeResolvedFilename("Download file Show.S01E01.part01.rar - Rapidgator")) + .toBe("Show.S01E01.part01.rar"); + }); +}); + +describe("filenameFromRapidgatorUrlPath", () => { + it("extracts filename from standard rapidgator URL", () => { + expect(filenameFromRapidgatorUrlPath("https://rapidgator.net/file/abc123/Show.S01E01.part01.rar.html")) + .toBe("Show.S01E01.part01.rar"); + }); + + it("extracts filename without .html suffix", () => { + expect(filenameFromRapidgatorUrlPath("https://rapidgator.net/file/abc123/Movie.2024.mkv")) + .toBe("Movie.2024.mkv"); + }); + + it("returns empty for hash-only URL paths", () => { + expect(filenameFromRapidgatorUrlPath("https://rapidgator.net/file/e51f6809bb6ca615601f5ac5db433737")) + .toBe(""); + }); + + it("returns empty for invalid URLs", () => { + expect(filenameFromRapidgatorUrlPath("not-a-url")).toBe(""); + expect(filenameFromRapidgatorUrlPath("")).toBe(""); + }); + + it("handles URL-encoded path segments", () => { + expect(filenameFromRapidgatorUrlPath("https://rapidgator.net/file/id/Show%20Name.S01E01.part01.rar.html")) + .toBe("Show Name.S01E01.part01.rar"); + }); +}); + +describe("extractRapidgatorFilenameFromHtml", () => { + it("extracts filename from title tag", () => { + const html = "<html><head><title>Download file Show.S01E01.German.DL.720p.part01.rar - Rapidgator"; + expect(extractRapidgatorFilenameFromHtml(html)).toBe("Show.S01E01.German.DL.720p.part01.rar"); + }); + + it("extracts filename from og:title meta tag", () => { + const html = ''; + expect(extractRapidgatorFilenameFromHtml(html)).toBe("Movie.2024.German.DL.1080p.mkv"); + }); + + it("extracts filename from reversed og:title attribute order", () => { + const html = ''; + expect(extractRapidgatorFilenameFromHtml(html)).toBe("Movie.2024.German.DL.1080p.mkv"); + }); + + it("returns empty for HTML without recognizable filenames", () => { + const html = "Rapidgator: Fast, Pair and UnlimitedNo file here"; + expect(extractRapidgatorFilenameFromHtml(html)).toBe(""); + }); + + it("returns empty for empty HTML", () => { + expect(extractRapidgatorFilenameFromHtml("")).toBe(""); + }); + + it("extracts from File name label in page body", () => { + const html = 'File name: Show.S02E03.720p.part01.rar'; + expect(extractRapidgatorFilenameFromHtml(html)).toBe("Show.S02E03.720p.part01.rar"); + }); +}); diff --git a/tests/extractor.test.ts b/tests/extractor.test.ts index b54748d..00ad25d 100644 --- a/tests/extractor.test.ts +++ b/tests/extractor.test.ts @@ -447,4 +447,111 @@ describe("extractor", () => { expect(fs.existsSync(path.join(targetDir, "safe.txt"))).toBe(true); expect(fs.existsSync(path.join(root, "escaped.txt"))).toBe(false); }); + + it("builds external extract args for 7z-style extractor", () => { + const args7z = buildExternalExtractArgs("7z.exe", "archive.7z", "C:\\target", "overwrite"); + expect(args7z[0]).toBe("x"); + expect(args7z).toContain("-y"); + expect(args7z).toContain("-aoa"); + expect(args7z).toContain("-p"); + expect(args7z).toContain("archive.7z"); + expect(args7z).toContain("-oC:\\target"); + }); + + it("builds 7z args with skip conflict mode", () => { + const args = buildExternalExtractArgs("7z", "archive.zip", "/out", "skip"); + expect(args).toContain("-aos"); + }); + + it("builds 7z args with rename conflict mode", () => { + const args = buildExternalExtractArgs("7z", "archive.zip", "/out", "rename"); + expect(args).toContain("-aou"); + }); + + it("builds 7z args with password", () => { + const args = buildExternalExtractArgs("7z", "archive.7z", "/out", "overwrite", "secretpass"); + expect(args).toContain("-psecretpass"); + }); + + it("builds WinRAR args with empty password uses -p-", () => { + const args = buildExternalExtractArgs("WinRAR.exe", "archive.rar", "/out", "overwrite", ""); + expect(args).toContain("-p-"); + }); + + it("builds WinRAR args with skip conflict mode uses -o-", () => { + const args = buildExternalExtractArgs("WinRAR.exe", "archive.rar", "/out", "skip"); + expect(args[1]).toBe("-o-"); + }); + + it("collects split zip companion parts for cleanup", () => { + const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-extract-")); + tempDirs.push(root); + const packageDir = path.join(root, "pkg"); + fs.mkdirSync(packageDir, { recursive: true }); + + const mainZip = path.join(packageDir, "release.zip"); + const z01 = path.join(packageDir, "release.z01"); + const z02 = path.join(packageDir, "release.z02"); + const otherZip = path.join(packageDir, "other.zip"); + + fs.writeFileSync(mainZip, "a", "utf8"); + fs.writeFileSync(z01, "b", "utf8"); + fs.writeFileSync(z02, "c", "utf8"); + fs.writeFileSync(otherZip, "x", "utf8"); + + const targets = new Set(collectArchiveCleanupTargets(mainZip)); + expect(targets.has(mainZip)).toBe(true); + expect(targets.has(z01)).toBe(true); + expect(targets.has(z02)).toBe(true); + expect(targets.has(otherZip)).toBe(false); + }); + + it("collects numbered split zip parts (.zip.001, .zip.002) for cleanup", () => { + const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-extract-")); + tempDirs.push(root); + const packageDir = path.join(root, "pkg"); + fs.mkdirSync(packageDir, { recursive: true }); + + const part1 = path.join(packageDir, "movie.zip.001"); + const part2 = path.join(packageDir, "movie.zip.002"); + const part3 = path.join(packageDir, "movie.zip.003"); + const mainZip = path.join(packageDir, "movie.zip"); + const other = path.join(packageDir, "other.zip.001"); + + fs.writeFileSync(part1, "a", "utf8"); + fs.writeFileSync(part2, "b", "utf8"); + fs.writeFileSync(part3, "c", "utf8"); + fs.writeFileSync(mainZip, "d", "utf8"); + fs.writeFileSync(other, "x", "utf8"); + + const targets = new Set(collectArchiveCleanupTargets(part1)); + expect(targets.has(part1)).toBe(true); + expect(targets.has(part2)).toBe(true); + expect(targets.has(part3)).toBe(true); + expect(targets.has(mainZip)).toBe(true); + expect(targets.has(other)).toBe(false); + }); + + it("collects old-style rar split parts (.r00, .r01) for cleanup", () => { + const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-extract-")); + tempDirs.push(root); + const packageDir = path.join(root, "pkg"); + fs.mkdirSync(packageDir, { recursive: true }); + + const mainRar = path.join(packageDir, "show.rar"); + const r00 = path.join(packageDir, "show.r00"); + const r01 = path.join(packageDir, "show.r01"); + const r02 = path.join(packageDir, "show.r02"); + + fs.writeFileSync(mainRar, "a", "utf8"); + fs.writeFileSync(r00, "b", "utf8"); + fs.writeFileSync(r01, "c", "utf8"); + fs.writeFileSync(r02, "d", "utf8"); + + const targets = new Set(collectArchiveCleanupTargets(mainRar)); + expect(targets.has(mainRar)).toBe(true); + expect(targets.has(r00)).toBe(true); + expect(targets.has(r01)).toBe(true); + expect(targets.has(r02)).toBe(true); + }); }); diff --git a/tests/integrity.test.ts b/tests/integrity.test.ts index ece4669..12a87f2 100644 --- a/tests/integrity.test.ts +++ b/tests/integrity.test.ts @@ -2,7 +2,7 @@ import fs from "node:fs"; import os from "node:os"; import path from "node:path"; import { afterEach, describe, expect, it } from "vitest"; -import { parseHashLine, validateFileAgainstManifest } from "../src/main/integrity"; +import { parseHashLine, readHashManifest, validateFileAgainstManifest } from "../src/main/integrity"; const tempDirs: string[] = []; @@ -29,4 +29,45 @@ describe("integrity", () => { const result = await validateFileAgainstManifest(filePath, dir); expect(result.ok).toBe(true); }); + + it("skips manifest files larger than 5MB", () => { + const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-int-")); + tempDirs.push(dir); + + // Create a .md5 manifest that exceeds the 5MB limit + const largeContent = "d41d8cd98f00b204e9800998ecf8427e sample.bin\n".repeat(200000); + const manifestPath = path.join(dir, "hashes.md5"); + fs.writeFileSync(manifestPath, largeContent, "utf8"); + + // Verify the file is actually > 5MB + const stat = fs.statSync(manifestPath); + expect(stat.size).toBeGreaterThan(5 * 1024 * 1024); + + // readHashManifest should skip the oversized file + const manifest = readHashManifest(dir); + expect(manifest.size).toBe(0); + }); + + it("does not parse SHA256 (64-char hex) as valid hash", () => { + // SHA256 is 64 chars - parseHashLine only supports 32 (MD5) and 40 (SHA1) + const sha256Line = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 emptyfile.bin"; + const result = parseHashLine(sha256Line); + // 64-char hex should not match the MD5 (32) or SHA1 (40) pattern + expect(result).toBeNull(); + }); + + it("parses SHA1 hash lines correctly", () => { + const sha1Line = "da39a3ee5e6b4b0d3255bfef95601890afd80709 emptyfile.bin"; + const result = parseHashLine(sha1Line); + expect(result).not.toBeNull(); + expect(result?.algorithm).toBe("sha1"); + expect(result?.digest).toBe("da39a3ee5e6b4b0d3255bfef95601890afd80709"); + expect(result?.fileName).toBe("emptyfile.bin"); + }); + + it("ignores comment lines in hash manifests", () => { + expect(parseHashLine("; This is a comment")).toBeNull(); + expect(parseHashLine("")).toBeNull(); + expect(parseHashLine(" ")).toBeNull(); + }); }); diff --git a/tests/storage.test.ts b/tests/storage.test.ts index 375a32e..5a5ebc6 100644 --- a/tests/storage.test.ts +++ b/tests/storage.test.ts @@ -4,7 +4,7 @@ import path from "node:path"; import { afterEach, describe, expect, it } from "vitest"; import { AppSettings } from "../src/shared/types"; import { defaultSettings } from "../src/main/constants"; -import { createStoragePaths, loadSettings, normalizeSettings, saveSettings } from "../src/main/storage"; +import { createStoragePaths, emptySession, loadSession, loadSettings, normalizeSettings, saveSession, saveSettings } from "../src/main/storage"; const tempDirs: string[] = []; @@ -148,4 +148,187 @@ describe("settings storage", () => { expect(normalized.archivePasswordList).toBe("one\ntwo\nthree"); }); + + it("resets stale active statuses to queued on session load", () => { + const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-store-")); + tempDirs.push(dir); + const paths = createStoragePaths(dir); + + const session = emptySession(); + session.packages["pkg1"] = { + id: "pkg1", + name: "Test Package", + outputDir: "/tmp/out", + extractDir: "/tmp/extract", + status: "downloading", + itemIds: ["item1", "item2", "item3", "item4"], + cancelled: false, + enabled: true, + createdAt: Date.now(), + updatedAt: Date.now() + }; + session.items["item1"] = { + id: "item1", + packageId: "pkg1", + url: "https://example.com/file1.rar", + provider: null, + status: "downloading", + retries: 0, + speedBps: 1024, + downloadedBytes: 5000, + totalBytes: 10000, + progressPercent: 50, + fileName: "file1.rar", + targetPath: "/tmp/out/file1.rar", + resumable: true, + attempts: 1, + lastError: "some error", + fullStatus: "", + createdAt: Date.now(), + updatedAt: Date.now() + }; + session.items["item2"] = { + id: "item2", + packageId: "pkg1", + url: "https://example.com/file2.rar", + provider: null, + status: "paused", + retries: 0, + speedBps: 0, + downloadedBytes: 0, + totalBytes: null, + progressPercent: 0, + fileName: "file2.rar", + targetPath: "/tmp/out/file2.rar", + resumable: false, + attempts: 0, + lastError: "", + fullStatus: "", + createdAt: Date.now(), + updatedAt: Date.now() + }; + session.items["item3"] = { + id: "item3", + packageId: "pkg1", + url: "https://example.com/file3.rar", + provider: null, + status: "completed", + retries: 0, + speedBps: 0, + downloadedBytes: 10000, + totalBytes: 10000, + progressPercent: 100, + fileName: "file3.rar", + targetPath: "/tmp/out/file3.rar", + resumable: false, + attempts: 1, + lastError: "", + fullStatus: "", + createdAt: Date.now(), + updatedAt: Date.now() + }; + session.items["item4"] = { + id: "item4", + packageId: "pkg1", + url: "https://example.com/file4.rar", + provider: null, + status: "queued", + retries: 0, + speedBps: 0, + downloadedBytes: 0, + totalBytes: null, + progressPercent: 0, + fileName: "file4.rar", + targetPath: "/tmp/out/file4.rar", + resumable: false, + attempts: 0, + lastError: "", + fullStatus: "", + createdAt: Date.now(), + updatedAt: Date.now() + }; + + saveSession(paths, session); + const loaded = loadSession(paths); + + // Active statuses (downloading, paused) should be reset to "queued" + expect(loaded.items["item1"].status).toBe("queued"); + expect(loaded.items["item2"].status).toBe("queued"); + // Speed should be cleared + expect(loaded.items["item1"].speedBps).toBe(0); + // lastError should be cleared for reset items + expect(loaded.items["item1"].lastError).toBe(""); + // Completed and queued statuses should be preserved + expect(loaded.items["item3"].status).toBe("completed"); + expect(loaded.items["item4"].status).toBe("queued"); + // Downloaded bytes should be preserved + expect(loaded.items["item1"].downloadedBytes).toBe(5000); + // Package data should be preserved + expect(loaded.packages["pkg1"].name).toBe("Test Package"); + }); + + it("returns empty session when session file contains invalid JSON", () => { + const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-store-")); + tempDirs.push(dir); + const paths = createStoragePaths(dir); + + fs.writeFileSync(paths.sessionFile, "{{{corrupted json!!!", "utf8"); + + const loaded = loadSession(paths); + const empty = emptySession(); + expect(loaded.packages).toEqual(empty.packages); + expect(loaded.items).toEqual(empty.items); + expect(loaded.packageOrder).toEqual(empty.packageOrder); + }); + + it("returns defaults when config file contains invalid JSON", () => { + const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-store-")); + tempDirs.push(dir); + const paths = createStoragePaths(dir); + + // Write invalid JSON to the config file + fs.writeFileSync(paths.configFile, "{{{{not valid json!!!}", "utf8"); + + const loaded = loadSettings(paths); + const defaults = defaultSettings(); + expect(loaded.providerPrimary).toBe(defaults.providerPrimary); + expect(loaded.maxParallel).toBe(defaults.maxParallel); + expect(loaded.outputDir).toBe(defaults.outputDir); + expect(loaded.cleanupMode).toBe(defaults.cleanupMode); + }); + + it("applies defaults for missing fields when loading old config", () => { + const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-store-")); + tempDirs.push(dir); + const paths = createStoragePaths(dir); + + // Write a minimal config that simulates an old version missing newer fields + fs.writeFileSync( + paths.configFile, + JSON.stringify({ + token: "my-token", + rememberToken: true, + outputDir: "/custom/output" + }), + "utf8" + ); + + const loaded = loadSettings(paths); + const defaults = defaultSettings(); + + // Old fields should be preserved + expect(loaded.token).toBe("my-token"); + expect(loaded.outputDir).toBe("/custom/output"); + + // Missing new fields should get default values + expect(loaded.autoProviderFallback).toBe(defaults.autoProviderFallback); + expect(loaded.hybridExtract).toBe(defaults.hybridExtract); + expect(loaded.completedCleanupPolicy).toBe(defaults.completedCleanupPolicy); + expect(loaded.speedLimitMode).toBe(defaults.speedLimitMode); + expect(loaded.clipboardWatch).toBe(defaults.clipboardWatch); + expect(loaded.minimizeToTray).toBe(defaults.minimizeToTray); + expect(loaded.theme).toBe(defaults.theme); + expect(loaded.bandwidthSchedules).toEqual(defaults.bandwidthSchedules); + expect(loaded.updateRepo).toBe(defaults.updateRepo); + }); }); diff --git a/tests/update.test.ts b/tests/update.test.ts index ef3d110..4e81180 100644 --- a/tests/update.test.ts +++ b/tests/update.test.ts @@ -1,6 +1,6 @@ import fs from "node:fs"; import { afterEach, describe, expect, it, vi } from "vitest"; -import { checkGitHubUpdate, installLatestUpdate, normalizeUpdateRepo } from "../src/main/update"; +import { checkGitHubUpdate, installLatestUpdate, isRemoteNewer, normalizeUpdateRepo, parseVersionParts } from "../src/main/update"; import { APP_VERSION } from "../src/main/constants"; import { UpdateCheckResult } from "../src/shared/types"; @@ -108,3 +108,91 @@ describe("update", () => { expect(requestedUrls.some((url) => url.includes("/releases/latest/download/"))).toBe(true); }); }); + +describe("normalizeUpdateRepo extended", () => { + it("handles trailing slashes and extra path segments", () => { + expect(normalizeUpdateRepo("owner/repo/")).toBe("owner/repo"); + expect(normalizeUpdateRepo("/owner/repo/")).toBe("owner/repo"); + expect(normalizeUpdateRepo("https://github.com/owner/repo/tree/main/src")).toBe("owner/repo"); + }); + + it("handles ssh-style git URLs", () => { + expect(normalizeUpdateRepo("git@github.com:user/project.git")).toBe("user/project"); + }); + + it("returns default for malformed inputs", () => { + expect(normalizeUpdateRepo("just-one-part")).toBe("Sucukdeluxe/real-debrid-downloader"); + expect(normalizeUpdateRepo(" ")).toBe("Sucukdeluxe/real-debrid-downloader"); + }); + + it("handles www prefix", () => { + expect(normalizeUpdateRepo("https://www.github.com/owner/repo")).toBe("owner/repo"); + expect(normalizeUpdateRepo("www.github.com/owner/repo")).toBe("owner/repo"); + }); +}); + +describe("isRemoteNewer", () => { + it("detects newer major version", () => { + expect(isRemoteNewer("1.0.0", "2.0.0")).toBe(true); + }); + + it("detects newer minor version", () => { + expect(isRemoteNewer("1.2.0", "1.3.0")).toBe(true); + }); + + it("detects newer patch version", () => { + expect(isRemoteNewer("1.2.3", "1.2.4")).toBe(true); + }); + + it("returns false for same version", () => { + expect(isRemoteNewer("1.2.3", "1.2.3")).toBe(false); + }); + + it("returns false for older version", () => { + expect(isRemoteNewer("2.0.0", "1.0.0")).toBe(false); + expect(isRemoteNewer("1.3.0", "1.2.0")).toBe(false); + expect(isRemoteNewer("1.2.4", "1.2.3")).toBe(false); + }); + + it("handles versions with different segment counts", () => { + expect(isRemoteNewer("1.2", "1.2.1")).toBe(true); + expect(isRemoteNewer("1.2.1", "1.2")).toBe(false); + expect(isRemoteNewer("1", "1.0.1")).toBe(true); + }); + + it("handles v-prefix in version strings", () => { + expect(isRemoteNewer("v1.0.0", "v2.0.0")).toBe(true); + expect(isRemoteNewer("v1.0.0", "v1.0.0")).toBe(false); + }); +}); + +describe("parseVersionParts", () => { + it("parses standard version strings", () => { + expect(parseVersionParts("1.2.3")).toEqual([1, 2, 3]); + expect(parseVersionParts("10.20.30")).toEqual([10, 20, 30]); + }); + + it("strips v prefix", () => { + expect(parseVersionParts("v1.2.3")).toEqual([1, 2, 3]); + expect(parseVersionParts("V1.2.3")).toEqual([1, 2, 3]); + }); + + it("handles single segment", () => { + expect(parseVersionParts("5")).toEqual([5]); + }); + + it("handles version with pre-release suffix", () => { + // Non-numeric suffixes are stripped per part + expect(parseVersionParts("1.2.3-beta")).toEqual([1, 2, 3]); + expect(parseVersionParts("1.2.3rc1")).toEqual([1, 2, 3]); + }); + + it("handles empty and whitespace", () => { + expect(parseVersionParts("")).toEqual([0]); + expect(parseVersionParts(" ")).toEqual([0]); + }); + + it("handles versions with extra dots", () => { + expect(parseVersionParts("1.2.3.4")).toEqual([1, 2, 3, 4]); + }); +}); diff --git a/tests/utils.test.ts b/tests/utils.test.ts index dbeb437..b0c1312 100644 --- a/tests/utils.test.ts +++ b/tests/utils.test.ts @@ -42,4 +42,62 @@ describe("utils", () => { expect(looksLikeOpaqueFilename("e51f6809bb6ca615601f5ac5db433737")).toBe(true); expect(looksLikeOpaqueFilename("movie.part1.rar")).toBe(false); }); + + it("preserves unicode filenames", () => { + expect(sanitizeFilename("日本語ファイル.txt")).toBe("日本語ファイル.txt"); + expect(sanitizeFilename("Ünïcödé Tëst.mkv")).toBe("Ünïcödé Tëst.mkv"); + expect(sanitizeFilename("파일이름.rar")).toBe("파일이름.rar"); + expect(sanitizeFilename("файл.zip")).toBe("файл.zip"); + }); + + it("handles very long filenames", () => { + const longName = "a".repeat(300); + const result = sanitizeFilename(longName); + expect(typeof result).toBe("string"); + expect(result.length).toBeGreaterThan(0); + // The function should return a non-empty string and not crash + expect(result).toBe(longName); + }); + + it("formats eta with very large values without crashing", () => { + const result = formatEta(999999); + expect(typeof result).toBe("string"); + expect(result.length).toBeGreaterThan(0); + // 999999 seconds = 277h 46m 39s + expect(result).toBe("277:46:39"); + }); + + it("formats eta with edge cases", () => { + expect(formatEta(0)).toBe("00:00"); + expect(formatEta(NaN)).toBe("--"); + expect(formatEta(Infinity)).toBe("--"); + expect(formatEta(Number.MAX_SAFE_INTEGER)).toMatch(/^\d+:\d{2}:\d{2}$/); + }); + + it("extracts filenames from URLs with encoded characters", () => { + expect(filenameFromUrl("https://example.com/file%20with%20spaces.rar")).toBe("file with spaces.rar"); + // %C3%A9 decodes to e-acute (UTF-8), which is preserved + expect(filenameFromUrl("https://example.com/t%C3%A9st%20file.zip")).toBe("t\u00e9st file.zip"); + expect(filenameFromUrl("https://example.com/dl?filename=Movie%20Name%20S01E01.mkv")).toBe("Movie Name S01E01.mkv"); + // Malformed percent-encoding should not crash + const result = filenameFromUrl("https://example.com/%ZZ%invalid"); + expect(typeof result).toBe("string"); + expect(result.length).toBeGreaterThan(0); + }); + + it("handles looksLikeOpaqueFilename edge cases", () => { + // Empty string -> sanitizeFilename returns "Paket" which is not opaque + expect(looksLikeOpaqueFilename("")).toBe(false); + expect(looksLikeOpaqueFilename("a")).toBe(false); + expect(looksLikeOpaqueFilename("ab")).toBe(false); + expect(looksLikeOpaqueFilename("abc")).toBe(false); + expect(looksLikeOpaqueFilename("download.bin")).toBe(true); + // 24-char hex string is opaque (matches /^[a-f0-9]{24,}$/) + expect(looksLikeOpaqueFilename("abcdef123456789012345678")).toBe(true); + expect(looksLikeOpaqueFilename("abcdef1234567890abcdef12")).toBe(true); + // Short hex strings (< 24 chars) are NOT considered opaque + expect(looksLikeOpaqueFilename("abcdef12345")).toBe(false); + // Real filename with extension + expect(looksLikeOpaqueFilename("Show.S01E01.720p.mkv")).toBe(false); + }); });