Release v1.4.27 with bug audit hardening fixes

This commit is contained in:
Sucukdeluxe 2026-02-28 14:12:16 +01:00
parent cbc423e4b7
commit 8700db4a37
27 changed files with 1322 additions and 119 deletions

4
package-lock.json generated
View File

@ -1,12 +1,12 @@
{ {
"name": "real-debrid-downloader", "name": "real-debrid-downloader",
"version": "1.4.23", "version": "1.4.27",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "real-debrid-downloader", "name": "real-debrid-downloader",
"version": "1.4.23", "version": "1.4.27",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"adm-zip": "^0.5.16", "adm-zip": "^0.5.16",

View File

@ -1,6 +1,6 @@
{ {
"name": "real-debrid-downloader", "name": "real-debrid-downloader",
"version": "1.4.26", "version": "1.4.27",
"description": "Real-Debrid Downloader Desktop (Electron + React + TypeScript)", "description": "Real-Debrid Downloader Desktop (Electron + React + TypeScript)",
"main": "build/main/main/main.js", "main": "build/main/main/main.js",
"author": "Sucukdeluxe", "author": "Sucukdeluxe",

View File

@ -8,7 +8,7 @@ export const APP_VERSION: string = packageJson.version;
export const API_BASE_URL = "https://api.real-debrid.com/rest/1.0"; export const API_BASE_URL = "https://api.real-debrid.com/rest/1.0";
export const DCRYPT_UPLOAD_URL = "https://dcrypt.it/decrypt/upload"; export const DCRYPT_UPLOAD_URL = "https://dcrypt.it/decrypt/upload";
export const DLC_SERVICE_URL = "http://service.jdownloader.org/dlcrypt/service.php?srcType=dlc&destType=pylo&data={KEY}"; export const DLC_SERVICE_URL = "https://service.jdownloader.org/dlcrypt/service.php?srcType=dlc&destType=pylo&data={KEY}";
export const DLC_AES_KEY = Buffer.from("cb99b5cbc24db398", "utf8"); export const DLC_AES_KEY = Buffer.from("cb99b5cbc24db398", "utf8");
export const DLC_AES_IV = Buffer.from("9bc24cb995cb8db3", "utf8"); export const DLC_AES_IV = Buffer.from("9bc24cb995cb8db3", "utf8");

View File

@ -5,6 +5,8 @@ import { DCRYPT_UPLOAD_URL, DLC_AES_IV, DLC_AES_KEY, DLC_SERVICE_URL } from "./c
import { compactErrorText, inferPackageNameFromLinks, isHttpLink, sanitizeFilename, uniquePreserveOrder } from "./utils"; import { compactErrorText, inferPackageNameFromLinks, isHttpLink, sanitizeFilename, uniquePreserveOrder } from "./utils";
import { ParsedPackageInput } from "../shared/types"; import { ParsedPackageInput } from "../shared/types";
const MAX_DLC_FILE_BYTES = 8 * 1024 * 1024;
function decodeDcryptPayload(responseText: string): unknown { function decodeDcryptPayload(responseText: string): unknown {
let text = String(responseText || "").trim(); let text = String(responseText || "").trim();
const m = text.match(/<textarea[^>]*>([\s\S]*?)<\/textarea>/i); const m = text.match(/<textarea[^>]*>([\s\S]*?)<\/textarea>/i);
@ -62,6 +64,14 @@ function decryptRcPayload(base64Rc: string): Buffer {
return Buffer.concat([decipher.update(rcBytes), decipher.final()]); return Buffer.concat([decipher.update(rcBytes), decipher.final()]);
} }
function readDlcFileWithLimit(filePath: string): Buffer {
const stat = fs.statSync(filePath);
if (stat.size <= 0 || stat.size > MAX_DLC_FILE_BYTES) {
throw new Error(`DLC-Datei ungültig oder zu groß (${Math.floor(stat.size)} B)`);
}
return fs.readFileSync(filePath);
}
function parsePackagesFromDlcXml(xml: string): ParsedPackageInput[] { function parsePackagesFromDlcXml(xml: string): ParsedPackageInput[] {
const packages: ParsedPackageInput[] = []; const packages: ParsedPackageInput[] = [];
const packageRegex = /<package\s+[^>]*name="([^"]*)"[^>]*>([\s\S]*?)<\/package>/gi; const packageRegex = /<package\s+[^>]*name="([^"]*)"[^>]*>([\s\S]*?)<\/package>/gi;
@ -104,7 +114,7 @@ function parsePackagesFromDlcXml(xml: string): ParsedPackageInput[] {
} }
async function decryptDlcLocal(filePath: string): Promise<ParsedPackageInput[]> { async function decryptDlcLocal(filePath: string): Promise<ParsedPackageInput[]> {
const content = fs.readFileSync(filePath, "ascii").trim(); const content = readDlcFileWithLimit(filePath).toString("ascii").trim();
if (content.length < 89) { if (content.length < 89) {
return []; return [];
} }
@ -129,10 +139,19 @@ async function decryptDlcLocal(filePath: string): Promise<ParsedPackageInput[]>
decipher.setAutoPadding(false); decipher.setAutoPadding(false);
let decrypted = Buffer.concat([decipher.update(encrypted), decipher.final()]); let decrypted = Buffer.concat([decipher.update(encrypted), decipher.final()]);
const pad = decrypted[decrypted.length - 1]; if (decrypted.length === 0) {
if (pad > 0 && pad <= 16) { throw new Error("DLC-Entschlüsselung lieferte keine Daten");
decrypted = decrypted.subarray(0, decrypted.length - pad);
} }
const pad = decrypted[decrypted.length - 1];
if (pad <= 0 || pad > 16 || pad > decrypted.length) {
throw new Error("Ungültiges DLC-Padding");
}
for (let index = 1; index <= pad; index += 1) {
if (decrypted[decrypted.length - index] !== pad) {
throw new Error("Ungültiges DLC-Padding");
}
}
decrypted = decrypted.subarray(0, decrypted.length - pad);
const xmlData = Buffer.from(decrypted.toString("utf8"), "base64").toString("utf8"); const xmlData = Buffer.from(decrypted.toString("utf8"), "base64").toString("utf8");
return parsePackagesFromDlcXml(xmlData); return parsePackagesFromDlcXml(xmlData);
@ -140,7 +159,7 @@ async function decryptDlcLocal(filePath: string): Promise<ParsedPackageInput[]>
async function decryptDlcViaDcrypt(filePath: string): Promise<ParsedPackageInput[]> { async function decryptDlcViaDcrypt(filePath: string): Promise<ParsedPackageInput[]> {
const fileName = path.basename(filePath); const fileName = path.basename(filePath);
const blob = new Blob([fs.readFileSync(filePath)]); const blob = new Blob([new Uint8Array(readDlcFileWithLimit(filePath))]);
const form = new FormData(); const form = new FormData();
form.set("dlcfile", blob, fileName); form.set("dlcfile", blob, fileName);

View File

@ -50,6 +50,27 @@ function retryDelay(attempt: number): number {
return Math.min(5000, 400 * 2 ** attempt); return Math.min(5000, 400 * 2 ** attempt);
} }
function readHttpStatusFromErrorText(text: string): number {
const match = String(text || "").match(/HTTP\s+(\d{3})/i);
return match ? Number(match[1]) : 0;
}
function isRetryableErrorText(text: string): boolean {
const status = readHttpStatusFromErrorText(text);
if (status === 429 || status >= 500) {
return true;
}
const lower = String(text || "").toLowerCase();
return lower.includes("timeout")
|| lower.includes("network")
|| lower.includes("fetch failed")
|| lower.includes("aborted")
|| lower.includes("econnreset")
|| lower.includes("enotfound")
|| lower.includes("etimedout")
|| lower.includes("html statt json");
}
function asRecord(value: unknown): Record<string, unknown> | null { function asRecord(value: unknown): Record<string, unknown> | null {
if (!value || typeof value !== "object" || Array.isArray(value)) { if (!value || typeof value !== "object" || Array.isArray(value)) {
return null; return null;
@ -286,15 +307,12 @@ async function resolveRapidgatorFilename(link: string): Promise<string> {
function buildBestDebridRequests(link: string, token: string): BestDebridRequest[] { function buildBestDebridRequests(link: string, token: string): BestDebridRequest[] {
const linkParam = encodeURIComponent(link); const linkParam = encodeURIComponent(link);
const authParam = encodeURIComponent(token); const safeToken = String(token || "").trim();
const useAuthHeader = Boolean(safeToken);
return [ return [
{ {
url: `${BEST_DEBRID_API_BASE}/generateLink?link=${linkParam}`, url: `${BEST_DEBRID_API_BASE}/generateLink?link=${linkParam}`,
useAuthHeader: true useAuthHeader
},
{
url: `${BEST_DEBRID_API_BASE}/generateLink?auth=${authParam}&link=${linkParam}`,
useAuthHeader: false
} }
]; ];
} }
@ -402,7 +420,7 @@ class BestDebridClient {
throw new Error("BestDebrid Antwort ohne Download-Link"); throw new Error("BestDebrid Antwort ohne Download-Link");
} catch (error) { } catch (error) {
lastError = compactErrorText(error); lastError = compactErrorText(error);
if (attempt >= REQUEST_RETRIES) { if (attempt >= REQUEST_RETRIES || !isRetryableErrorText(lastError)) {
break; break;
} }
await sleep(retryDelay(attempt)); await sleep(retryDelay(attempt));
@ -490,7 +508,8 @@ class AllDebridClient {
chunkResolved = true; chunkResolved = true;
break; break;
} catch (error) { } catch (error) {
if (attempt >= REQUEST_RETRIES) { const errorText = compactErrorText(error);
if (attempt >= REQUEST_RETRIES || !isRetryableErrorText(errorText)) {
throw error; throw error;
} }
await sleep(retryDelay(attempt)); await sleep(retryDelay(attempt));
@ -579,7 +598,7 @@ class AllDebridClient {
}; };
} catch (error) { } catch (error) {
lastError = compactErrorText(error); lastError = compactErrorText(error);
if (attempt >= REQUEST_RETRIES) { if (attempt >= REQUEST_RETRIES || !isRetryableErrorText(lastError)) {
break; break;
} }
await sleep(retryDelay(attempt)); await sleep(retryDelay(attempt));
@ -738,7 +757,7 @@ export class DebridService {
return Boolean(this.settings.token.trim()); return Boolean(this.settings.token.trim());
} }
if (provider === "megadebrid") { if (provider === "megadebrid") {
return Boolean(this.settings.megaLogin.trim() && this.settings.megaPassword.trim()); return Boolean(this.settings.megaLogin.trim() && this.settings.megaPassword.trim() && this.options.megaWebUnrestrict);
} }
if (provider === "alldebrid") { if (provider === "alldebrid") {
return Boolean(this.settings.allDebridToken.trim()); return Boolean(this.settings.allDebridToken.trim());

View File

@ -655,6 +655,9 @@ export class DownloadManager extends EventEmitter {
this.reservedTargetPaths.clear(); this.reservedTargetPaths.clear();
this.claimedTargetPathByItem.clear(); this.claimedTargetPathByItem.clear();
this.itemContributedBytes.clear(); this.itemContributedBytes.clear();
this.speedEvents = [];
this.speedEventsHead = 0;
this.speedBytesLastWindow = 0;
this.packagePostProcessTasks.clear(); this.packagePostProcessTasks.clear();
this.packagePostProcessAbortControllers.clear(); this.packagePostProcessAbortControllers.clear();
this.hybridExtractRequeue.clear(); this.hybridExtractRequeue.clear();
@ -798,11 +801,17 @@ export class DownloadManager extends EventEmitter {
active.abortController.abort("cancel"); active.abortController.abort("cancel");
} }
this.releaseTargetPath(itemId); this.releaseTargetPath(itemId);
this.runItemIds.delete(itemId);
this.runOutcomes.delete(itemId);
this.itemContributedBytes.delete(itemId);
delete this.session.items[itemId]; delete this.session.items[itemId];
this.itemCount = Math.max(0, this.itemCount - 1); this.itemCount = Math.max(0, this.itemCount - 1);
} }
delete this.session.packages[packageId]; delete this.session.packages[packageId];
this.session.packageOrder = this.session.packageOrder.filter((id) => id !== packageId); this.session.packageOrder = this.session.packageOrder.filter((id) => id !== packageId);
this.runPackageIds.delete(packageId);
this.runCompletedPackages.delete(packageId);
this.hybridExtractRequeue.delete(packageId);
this.persistSoon(); this.persistSoon();
this.emitState(true); this.emitState(true);
return { skipped: true, overwritten: false }; return { skipped: true, overwritten: false };
@ -846,6 +855,11 @@ export class DownloadManager extends EventEmitter {
item.fullStatus = "Wartet"; item.fullStatus = "Wartet";
item.updatedAt = nowMs(); item.updatedAt = nowMs();
item.targetPath = path.join(pkg.outputDir, sanitizeFilename(item.fileName || filenameFromUrl(item.url))); item.targetPath = path.join(pkg.outputDir, sanitizeFilename(item.fileName || filenameFromUrl(item.url)));
this.runOutcomes.delete(itemId);
this.itemContributedBytes.delete(itemId);
if (this.session.running) {
this.runItemIds.add(itemId);
}
} }
pkg.status = "queued"; pkg.status = "queued";
pkg.updatedAt = nowMs(); pkg.updatedAt = nowMs();
@ -1294,6 +1308,7 @@ export class DownloadManager extends EventEmitter {
this.session.reconnectReason = ""; this.session.reconnectReason = "";
this.speedEvents = []; this.speedEvents = [];
this.speedBytesLastWindow = 0; this.speedBytesLastWindow = 0;
this.speedEventsHead = 0;
this.lastGlobalProgressBytes = 0; this.lastGlobalProgressBytes = 0;
this.lastGlobalProgressAt = nowMs(); this.lastGlobalProgressAt = nowMs();
this.summary = null; this.summary = null;
@ -1319,6 +1334,7 @@ export class DownloadManager extends EventEmitter {
this.consecutiveReconnects = 0; this.consecutiveReconnects = 0;
this.speedEvents = []; this.speedEvents = [];
this.speedBytesLastWindow = 0; this.speedBytesLastWindow = 0;
this.speedEventsHead = 0;
this.lastGlobalProgressBytes = 0; this.lastGlobalProgressBytes = 0;
this.lastGlobalProgressAt = nowMs(); this.lastGlobalProgressAt = nowMs();
this.globalSpeedLimitQueue = Promise.resolve(); this.globalSpeedLimitQueue = Promise.resolve();
@ -1326,7 +1342,13 @@ export class DownloadManager extends EventEmitter {
this.summary = null; this.summary = null;
this.persistSoon(); this.persistSoon();
this.emitState(true); this.emitState(true);
this.ensureScheduler(); void this.ensureScheduler().catch((error) => {
logger.error(`Scheduler abgestürzt: ${compactErrorText(error)}`);
this.session.running = false;
this.session.paused = false;
this.persistSoon();
this.emitState(true);
});
} }
public stop(): void { public stop(): void {
@ -1396,6 +1418,7 @@ export class DownloadManager extends EventEmitter {
this.speedEvents = []; this.speedEvents = [];
this.speedBytesLastWindow = 0; this.speedBytesLastWindow = 0;
this.speedEventsHead = 0;
this.runItemIds.clear(); this.runItemIds.clear();
this.runPackageIds.clear(); this.runPackageIds.clear();
this.runOutcomes.clear(); this.runOutcomes.clear();
@ -1599,6 +1622,9 @@ export class DownloadManager extends EventEmitter {
private recordSpeed(bytes: number): void { private recordSpeed(bytes: number): void {
const now = nowMs(); const now = nowMs();
if (bytes > 0 && this.consecutiveReconnects > 0) {
this.consecutiveReconnects = 0;
}
const bucket = now - (now % 120); const bucket = now - (now % 120);
const last = this.speedEvents[this.speedEvents.length - 1]; const last = this.speedEvents[this.speedEvents.length - 1];
if (last && last.at === bucket) { if (last && last.at === bucket) {
@ -3363,7 +3389,9 @@ export class DownloadManager extends EventEmitter {
} }
} catch (error) { } catch (error) {
const reasonRaw = String(error || ""); const reasonRaw = String(error || "");
if (reasonRaw.includes("aborted:extract") || reasonRaw.includes("extract_timeout")) { const isExtractAbort = reasonRaw.includes("aborted:extract") || reasonRaw.includes("extract_timeout");
let timeoutHandled = false;
if (isExtractAbort) {
if (timedOut) { if (timedOut) {
const timeoutReason = `Entpacken Timeout nach ${Math.ceil(extractTimeoutMs / 1000)}s`; const timeoutReason = `Entpacken Timeout nach ${Math.ceil(extractTimeoutMs / 1000)}s`;
logger.error(`Post-Processing Entpacken Timeout: pkg=${pkg.name}`); logger.error(`Post-Processing Entpacken Timeout: pkg=${pkg.name}`);
@ -3373,6 +3401,7 @@ export class DownloadManager extends EventEmitter {
} }
pkg.status = "failed"; pkg.status = "failed";
pkg.updatedAt = nowMs(); pkg.updatedAt = nowMs();
timeoutHandled = true;
} else { } else {
for (const entry of completedItems) { for (const entry of completedItems) {
if (/^Entpacken/i.test(entry.fullStatus || "")) { if (/^Entpacken/i.test(entry.fullStatus || "")) {
@ -3386,6 +3415,7 @@ export class DownloadManager extends EventEmitter {
return; return;
} }
} }
if (!timeoutHandled) {
const reason = compactErrorText(error); const reason = compactErrorText(error);
logger.error(`Post-Processing Entpacken Exception: pkg=${pkg.name}, reason=${reason}`); logger.error(`Post-Processing Entpacken Exception: pkg=${pkg.name}, reason=${reason}`);
for (const entry of completedItems) { for (const entry of completedItems) {
@ -3393,6 +3423,7 @@ export class DownloadManager extends EventEmitter {
entry.updatedAt = nowMs(); entry.updatedAt = nowMs();
} }
pkg.status = "failed"; pkg.status = "failed";
}
} finally { } finally {
clearTimeout(extractDeadline); clearTimeout(extractDeadline);
if (signal) { if (signal) {
@ -3500,6 +3531,9 @@ export class DownloadManager extends EventEmitter {
this.reservedTargetPaths.clear(); this.reservedTargetPaths.clear();
this.claimedTargetPathByItem.clear(); this.claimedTargetPathByItem.clear();
this.itemContributedBytes.clear(); this.itemContributedBytes.clear();
this.speedEvents = [];
this.speedEventsHead = 0;
this.speedBytesLastWindow = 0;
this.globalSpeedLimitQueue = Promise.resolve(); this.globalSpeedLimitQueue = Promise.resolve();
this.globalSpeedLimitNextAt = 0; this.globalSpeedLimitNextAt = 0;
this.lastGlobalProgressBytes = this.session.totalDownloadedBytes; this.lastGlobalProgressBytes = this.session.totalDownloadedBytes;

View File

@ -18,6 +18,7 @@ let resolveExtractorCommandInFlight: Promise<string> | null = null;
const EXTRACTOR_RETRY_AFTER_MS = 30_000; const EXTRACTOR_RETRY_AFTER_MS = 30_000;
const DEFAULT_ZIP_ENTRY_MEMORY_LIMIT_MB = 256; const DEFAULT_ZIP_ENTRY_MEMORY_LIMIT_MB = 256;
const EXTRACTOR_PROBE_TIMEOUT_MS = 8_000;
export interface ExtractOptions { export interface ExtractOptions {
packageDir: string; packageDir: string;
@ -63,6 +64,10 @@ export function pathSetKey(filePath: string): string {
return process.platform === "win32" ? filePath.toLowerCase() : filePath; return process.platform === "win32" ? filePath.toLowerCase() : filePath;
} }
function archiveNameKey(fileName: string): string {
return process.platform === "win32" ? String(fileName || "").toLowerCase() : String(fileName || "");
}
function archiveSortKey(filePath: string): string { function archiveSortKey(filePath: string): string {
const fileName = path.basename(filePath).toLowerCase(); const fileName = path.basename(filePath).toLowerCase();
return fileName return fileName
@ -244,7 +249,7 @@ function readExtractResumeState(packageDir: string, packageId?: string): Set<str
try { try {
const payload = JSON.parse(fs.readFileSync(progressPath, "utf8")) as Partial<ExtractResumeState>; const payload = JSON.parse(fs.readFileSync(progressPath, "utf8")) as Partial<ExtractResumeState>;
const names = Array.isArray(payload.completedArchives) ? payload.completedArchives : []; const names = Array.isArray(payload.completedArchives) ? payload.completedArchives : [];
return new Set(names.map((value) => String(value || "").trim()).filter(Boolean)); return new Set(names.map((value) => archiveNameKey(String(value || "").trim())).filter(Boolean));
} catch { } catch {
return new Set<string>(); return new Set<string>();
} }
@ -255,7 +260,9 @@ function writeExtractResumeState(packageDir: string, completedArchives: Set<stri
fs.mkdirSync(packageDir, { recursive: true }); fs.mkdirSync(packageDir, { recursive: true });
const progressPath = extractProgressFilePath(packageDir, packageId); const progressPath = extractProgressFilePath(packageDir, packageId);
const payload: ExtractResumeState = { const payload: ExtractResumeState = {
completedArchives: Array.from(completedArchives).sort((a, b) => a.localeCompare(b)) completedArchives: Array.from(completedArchives)
.map((name) => archiveNameKey(name))
.sort((a, b) => a.localeCompare(b))
}; };
fs.writeFileSync(progressPath, JSON.stringify(payload, null, 2), "utf8"); fs.writeFileSync(progressPath, JSON.stringify(payload, null, 2), "utf8");
} catch (error) { } catch (error) {
@ -457,10 +464,24 @@ function runExtractCommand(
}); });
child.on("close", (code) => { child.on("close", (code) => {
if (code === 0 || code === 1) { if (code === 0) {
finish({ ok: true, missingCommand: false, aborted: false, timedOut: false, errorText: "" }); finish({ ok: true, missingCommand: false, aborted: false, timedOut: false, errorText: "" });
return; return;
} }
if (code === 1) {
const lowered = output.toLowerCase();
const warningOnly = !lowered.includes("crc failed")
&& !lowered.includes("checksum error")
&& !lowered.includes("wrong password")
&& !lowered.includes("cannot open")
&& !lowered.includes("fatal error")
&& !lowered.includes("unexpected end of archive")
&& !lowered.includes("error:");
if (warningOnly) {
finish({ ok: true, missingCommand: false, aborted: false, timedOut: false, errorText: "" });
return;
}
}
const cleaned = cleanErrorText(output); const cleaned = cleanErrorText(output);
finish({ finish({
ok: false, ok: false,
@ -521,7 +542,7 @@ async function resolveExtractorCommandInternal(): Promise<string> {
continue; continue;
} }
const probeArgs = command.toLowerCase().includes("winrar") ? ["-?"] : ["?"]; const probeArgs = command.toLowerCase().includes("winrar") ? ["-?"] : ["?"];
const probe = await runExtractCommand(command, probeArgs); const probe = await runExtractCommand(command, probeArgs, undefined, undefined, EXTRACTOR_PROBE_TIMEOUT_MS);
if (!probe.missingCommand) { if (!probe.missingCommand) {
resolvedExtractorCommand = command; resolvedExtractorCommand = command;
resolveFailureReason = ""; resolveFailureReason = "";
@ -634,13 +655,35 @@ async function runExternalExtract(
throw new Error(lastError || "Entpacken fehlgeschlagen"); throw new Error(lastError || "Entpacken fehlgeschlagen");
} }
function extractZipArchive(archivePath: string, targetDir: string, conflictMode: ConflictMode): void { function isZipSafetyGuardError(error: unknown): boolean {
const text = String(error || "").toLowerCase();
return text.includes("zip-eintrag zu groß")
|| text.includes("zip-eintrag komprimiert zu groß")
|| text.includes("zip-eintrag ohne sichere groessenangabe")
|| text.includes("path traversal");
}
function shouldFallbackToExternalZip(error: unknown): boolean {
if (isZipSafetyGuardError(error)) {
return false;
}
const text = String(error || "").toLowerCase();
if (text.includes("aborted:extract") || text.includes("extract_aborted")) {
return false;
}
return true;
}
function extractZipArchive(archivePath: string, targetDir: string, conflictMode: ConflictMode, signal?: AbortSignal): void {
const mode = effectiveConflictMode(conflictMode); const mode = effectiveConflictMode(conflictMode);
const memoryLimitBytes = zipEntryMemoryLimitBytes(); const memoryLimitBytes = zipEntryMemoryLimitBytes();
const zip = new AdmZip(archivePath); const zip = new AdmZip(archivePath);
const entries = zip.getEntries(); const entries = zip.getEntries();
const resolvedTarget = path.resolve(targetDir); const resolvedTarget = path.resolve(targetDir);
for (const entry of entries) { for (const entry of entries) {
if (signal?.aborted) {
throw new Error("aborted:extract");
}
const outputPath = path.resolve(targetDir, entry.entryName); const outputPath = path.resolve(targetDir, entry.entryName);
if (!outputPath.startsWith(resolvedTarget + path.sep) && outputPath !== resolvedTarget) { if (!outputPath.startsWith(resolvedTarget + path.sep) && outputPath !== resolvedTarget) {
logger.warn(`ZIP-Eintrag übersprungen (Path Traversal): ${entry.entryName}`); logger.warn(`ZIP-Eintrag übersprungen (Path Traversal): ${entry.entryName}`);
@ -700,10 +743,16 @@ function extractZipArchive(archivePath: string, targetDir: string, conflictMode:
candidate = path.join(parsed.dir, `${parsed.name} (${n})${parsed.ext}`); candidate = path.join(parsed.dir, `${parsed.name} (${n})${parsed.ext}`);
n += 1; n += 1;
} }
if (signal?.aborted) {
throw new Error("aborted:extract");
}
fs.writeFileSync(candidate, entry.getData()); fs.writeFileSync(candidate, entry.getData());
continue; continue;
} }
} }
if (signal?.aborted) {
throw new Error("aborted:extract");
}
fs.writeFileSync(outputPath, entry.getData()); fs.writeFileSync(outputPath, entry.getData());
} }
} }
@ -945,7 +994,7 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
let passwordCandidates = archivePasswords(options.passwordList || ""); let passwordCandidates = archivePasswords(options.passwordList || "");
const resumeCompleted = readExtractResumeState(options.packageDir, options.packageId); const resumeCompleted = readExtractResumeState(options.packageDir, options.packageId);
const resumeCompletedAtStart = resumeCompleted.size; const resumeCompletedAtStart = resumeCompleted.size;
const allCandidateNames = new Set(allCandidates.map((archivePath) => path.basename(archivePath))); const allCandidateNames = new Set(allCandidates.map((archivePath) => archiveNameKey(path.basename(archivePath))));
for (const archiveName of Array.from(resumeCompleted.values())) { for (const archiveName of Array.from(resumeCompleted.values())) {
if (!allCandidateNames.has(archiveName)) { if (!allCandidateNames.has(archiveName)) {
resumeCompleted.delete(archiveName); resumeCompleted.delete(archiveName);
@ -957,13 +1006,13 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
clearExtractResumeState(options.packageDir, options.packageId); clearExtractResumeState(options.packageDir, options.packageId);
} }
const pendingCandidates = candidates.filter((archivePath) => !resumeCompleted.has(path.basename(archivePath))); const pendingCandidates = candidates.filter((archivePath) => !resumeCompleted.has(archiveNameKey(path.basename(archivePath))));
let extracted = candidates.length - pendingCandidates.length; let extracted = candidates.length - pendingCandidates.length;
let failed = 0; let failed = 0;
let lastError = ""; let lastError = "";
const extractedArchives = new Set<string>(); const extractedArchives = new Set<string>();
for (const archivePath of candidates) { for (const archivePath of candidates) {
if (resumeCompleted.has(path.basename(archivePath))) { if (resumeCompleted.has(archiveNameKey(path.basename(archivePath)))) {
extractedArchives.add(archivePath); extractedArchives.add(archivePath);
} }
} }
@ -1003,6 +1052,7 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
throw new Error("aborted:extract"); throw new Error("aborted:extract");
} }
const archiveName = path.basename(archivePath); const archiveName = path.basename(archivePath);
const archiveResumeKey = archiveNameKey(archiveName);
const archiveStartedAt = Date.now(); const archiveStartedAt = Date.now();
let archivePercent = 0; let archivePercent = 0;
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, 0); emitProgress(extracted + failed, archiveName, "extracting", archivePercent, 0);
@ -1023,16 +1073,19 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
passwordCandidates = prioritizePassword(passwordCandidates, usedPassword); passwordCandidates = prioritizePassword(passwordCandidates, usedPassword);
} catch (error) { } catch (error) {
if (isNoExtractorError(String(error))) { if (isNoExtractorError(String(error))) {
extractZipArchive(archivePath, options.targetDir, options.conflictMode); extractZipArchive(archivePath, options.targetDir, options.conflictMode, options.signal);
} else { } else {
throw error; throw error;
} }
} }
} else { } else {
try { try {
extractZipArchive(archivePath, options.targetDir, options.conflictMode); extractZipArchive(archivePath, options.targetDir, options.conflictMode, options.signal);
archivePercent = 100; archivePercent = 100;
} catch { } catch (error) {
if (!shouldFallbackToExternalZip(error)) {
throw error;
}
const usedPassword = await runExternalExtract(archivePath, options.targetDir, "overwrite", passwordCandidates, (value) => { const usedPassword = await runExternalExtract(archivePath, options.targetDir, "overwrite", passwordCandidates, (value) => {
archivePercent = Math.max(archivePercent, value); archivePercent = Math.max(archivePercent, value);
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt); emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt);
@ -1049,7 +1102,7 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
} }
extracted += 1; extracted += 1;
extractedArchives.add(archivePath); extractedArchives.add(archivePath);
resumeCompleted.add(archiveName); resumeCompleted.add(archiveResumeKey);
writeExtractResumeState(options.packageDir, resumeCompleted, options.packageId); writeExtractResumeState(options.packageDir, resumeCompleted, options.packageId);
logger.info(`Entpacken erfolgreich: ${path.basename(archivePath)}`); logger.info(`Entpacken erfolgreich: ${path.basename(archivePath)}`);
archivePercent = 100; archivePercent = 100;

View File

@ -41,7 +41,17 @@ export function readHashManifest(packageDir: string): Map<string, ParsedHashEntr
return map; return map;
} }
for (const entry of fs.readdirSync(packageDir, { withFileTypes: true })) { const manifestFiles = fs.readdirSync(packageDir, { withFileTypes: true })
.filter((entry) => {
if (!entry.isFile()) {
return false;
}
const ext = path.extname(entry.name).toLowerCase();
return patterns.some(([pattern]) => pattern === ext);
})
.sort((a, b) => a.name.localeCompare(b.name, undefined, { numeric: true, sensitivity: "base" }));
for (const entry of manifestFiles) {
if (!entry.isFile()) { if (!entry.isFile()) {
continue; continue;
} }
@ -70,7 +80,11 @@ export function readHashManifest(packageDir: string): Map<string, ParsedHashEntr
...parsed, ...parsed,
algorithm: hit[1] algorithm: hit[1]
}; };
map.set(parsed.fileName.toLowerCase(), normalized); const key = parsed.fileName.toLowerCase();
if (map.has(key)) {
continue;
}
map.set(key, normalized);
} }
} }
return map; return map;

View File

@ -5,6 +5,7 @@ import { AppController } from "./app-controller";
import { IPC_CHANNELS } from "../shared/ipc"; import { IPC_CHANNELS } from "../shared/ipc";
import { logger } from "./logger"; import { logger } from "./logger";
import { APP_NAME } from "./constants"; import { APP_NAME } from "./constants";
import { extractHttpLinksFromText } from "./utils";
/* ── IPC validation helpers ────────────────────────────────────── */ /* ── IPC validation helpers ────────────────────────────────────── */
function validateString(value: unknown, name: string): string { function validateString(value: unknown, name: string): string {
@ -39,6 +40,7 @@ let tray: Tray | null = null;
let clipboardTimer: ReturnType<typeof setInterval> | null = null; let clipboardTimer: ReturnType<typeof setInterval> | null = null;
let lastClipboardText = ""; let lastClipboardText = "";
const controller = new AppController(); const controller = new AppController();
const CLIPBOARD_MAX_TEXT_CHARS = 50_000;
function isDevMode(): boolean { function isDevMode(): boolean {
return process.env.NODE_ENV === "development"; return process.env.NODE_ENV === "development";
@ -115,21 +117,24 @@ function destroyTray(): void {
} }
function extractLinksFromText(text: string): string[] { function extractLinksFromText(text: string): string[] {
const matches = text.match(/https?:\/\/[^\s<>"']+/gi); return extractHttpLinksFromText(text);
return matches ? Array.from(new Set(matches)) : []; }
function normalizeClipboardText(text: string): string {
return String(text || "").slice(0, CLIPBOARD_MAX_TEXT_CHARS);
} }
function startClipboardWatcher(): void { function startClipboardWatcher(): void {
if (clipboardTimer) { if (clipboardTimer) {
return; return;
} }
lastClipboardText = clipboard.readText().slice(0, 50000); lastClipboardText = normalizeClipboardText(clipboard.readText());
clipboardTimer = setInterval(() => { clipboardTimer = setInterval(() => {
const text = clipboard.readText(); const text = normalizeClipboardText(clipboard.readText());
if (text === lastClipboardText || !text.trim()) { if (text === lastClipboardText || !text.trim()) {
return; return;
} }
lastClipboardText = text.slice(0, 50000); lastClipboardText = text;
const links = extractLinksFromText(text); const links = extractLinksFromText(text);
if (links.length > 0 && mainWindow && !mainWindow.isDestroyed()) { if (links.length > 0 && mainWindow && !mainWindow.isDestroyed()) {
mainWindow.webContents.send(IPC_CHANNELS.CLIPBOARD_DETECTED, links); mainWindow.webContents.send(IPC_CHANNELS.CLIPBOARD_DETECTED, links);

View File

@ -16,7 +16,18 @@ function retryDelay(attempt: number): number {
return Math.min(5000, 400 * 2 ** attempt); return Math.min(5000, 400 * 2 ** attempt);
} }
function parseErrorBody(status: number, body: string): string { function looksLikeHtmlResponse(contentType: string, body: string): boolean {
const type = String(contentType || "").toLowerCase();
if (type.includes("text/html") || type.includes("application/xhtml+xml")) {
return true;
}
return /^\s*<(!doctype\s+html|html\b)/i.test(String(body || ""));
}
function parseErrorBody(status: number, body: string, contentType: string): string {
if (looksLikeHtmlResponse(contentType, body)) {
return `Real-Debrid lieferte HTML statt JSON (HTTP ${status})`;
}
const clean = compactErrorText(body); const clean = compactErrorText(body);
return clean || `HTTP ${status}`; return clean || `HTTP ${status}`;
} }
@ -45,8 +56,9 @@ export class RealDebridClient {
}); });
const text = await response.text(); const text = await response.text();
const contentType = String(response.headers.get("content-type") || "");
if (!response.ok) { if (!response.ok) {
const parsed = parseErrorBody(response.status, text); const parsed = parseErrorBody(response.status, text, contentType);
if (shouldRetryStatus(response.status) && attempt < REQUEST_RETRIES) { if (shouldRetryStatus(response.status) && attempt < REQUEST_RETRIES) {
await sleep(retryDelay(attempt)); await sleep(retryDelay(attempt));
continue; continue;
@ -54,11 +66,15 @@ export class RealDebridClient {
throw new Error(parsed); throw new Error(parsed);
} }
if (looksLikeHtmlResponse(contentType, text)) {
throw new Error("Real-Debrid lieferte HTML statt JSON");
}
let payload: Record<string, unknown>; let payload: Record<string, unknown>;
try { try {
payload = JSON.parse(text) as Record<string, unknown>; payload = JSON.parse(text) as Record<string, unknown>;
} catch { } catch {
throw new Error(`Ungültige JSON-Antwort: ${text.slice(0, 120)}`); throw new Error("Ungültige JSON-Antwort von Real-Debrid");
} }
const directUrl = String(payload.download || payload.link || "").trim(); const directUrl = String(payload.download || payload.link || "").trim();
if (!directUrl) { if (!directUrl) {

View File

@ -1,7 +1,7 @@
import fs from "node:fs"; import fs from "node:fs";
import fsp from "node:fs/promises"; import fsp from "node:fs/promises";
import path from "node:path"; import path from "node:path";
import { AppSettings, BandwidthScheduleEntry, SessionState } from "../shared/types"; import { AppSettings, BandwidthScheduleEntry, DebridProvider, DownloadItem, DownloadStatus, PackageEntry, SessionState } from "../shared/types";
import { defaultSettings } from "./constants"; import { defaultSettings } from "./constants";
import { logger } from "./logger"; import { logger } from "./logger";
@ -12,6 +12,10 @@ const VALID_CONFLICT_MODES = new Set(["overwrite", "skip", "rename", "ask"]);
const VALID_FINISHED_POLICIES = new Set(["never", "immediate", "on_start", "package_done"]); const VALID_FINISHED_POLICIES = new Set(["never", "immediate", "on_start", "package_done"]);
const VALID_SPEED_MODES = new Set(["global", "per_download"]); const VALID_SPEED_MODES = new Set(["global", "per_download"]);
const VALID_THEMES = new Set(["dark", "light"]); const VALID_THEMES = new Set(["dark", "light"]);
const VALID_DOWNLOAD_STATUSES = new Set<DownloadStatus>([
"queued", "validating", "downloading", "paused", "reconnect_wait", "extracting", "integrity_check", "completed", "failed", "cancelled"
]);
const VALID_ITEM_PROVIDERS = new Set<DebridProvider>(["realdebrid", "megadebrid", "bestdebrid", "alldebrid"]);
function asText(value: unknown): string { function asText(value: unknown): string {
return String(value ?? "").trim(); return String(value ?? "").trim();
@ -148,24 +152,171 @@ function ensureBaseDir(baseDir: string): void {
fs.mkdirSync(baseDir, { recursive: true }); fs.mkdirSync(baseDir, { recursive: true });
} }
export function loadSettings(paths: StoragePaths): AppSettings { function asRecord(value: unknown): Record<string, unknown> | null {
ensureBaseDir(paths.baseDir); if (!value || typeof value !== "object" || Array.isArray(value)) {
if (!fs.existsSync(paths.configFile)) { return null;
return defaultSettings();
} }
return value as Record<string, unknown>;
}
function readSettingsFile(filePath: string): AppSettings | null {
try { try {
// Safe: parsed is spread into a fresh object with defaults first, and normalizeSettings const parsed = JSON.parse(fs.readFileSync(filePath, "utf8")) as AppSettings;
// validates every field, so prototype pollution via __proto__ / constructor is not a concern.
const parsed = JSON.parse(fs.readFileSync(paths.configFile, "utf8")) as AppSettings;
const merged = normalizeSettings({ const merged = normalizeSettings({
...defaultSettings(), ...defaultSettings(),
...parsed ...parsed
}); });
return sanitizeCredentialPersistence(merged); return sanitizeCredentialPersistence(merged);
} catch (error) { } catch {
logger.error(`Konfiguration konnte nicht geladen werden: ${String(error)}`); return null;
}
}
function normalizeLoadedSession(raw: unknown): SessionState {
const fallback = emptySession();
const parsed = asRecord(raw);
if (!parsed) {
return fallback;
}
const now = Date.now();
const itemsById: Record<string, DownloadItem> = {};
const rawItems = asRecord(parsed.items) ?? {};
for (const [entryId, rawItem] of Object.entries(rawItems)) {
const item = asRecord(rawItem);
if (!item) {
continue;
}
const id = asText(item.id) || entryId;
const packageId = asText(item.packageId);
const url = asText(item.url);
if (!id || !packageId || !url) {
continue;
}
const statusRaw = asText(item.status) as DownloadStatus;
const status: DownloadStatus = VALID_DOWNLOAD_STATUSES.has(statusRaw) ? statusRaw : "queued";
const providerRaw = asText(item.provider) as DebridProvider;
itemsById[id] = {
id,
packageId,
url,
provider: VALID_ITEM_PROVIDERS.has(providerRaw) ? providerRaw : null,
status,
retries: clampNumber(item.retries, 0, 0, 1_000_000),
speedBps: clampNumber(item.speedBps, 0, 0, 10_000_000_000),
downloadedBytes: clampNumber(item.downloadedBytes, 0, 0, 10_000_000_000_000),
totalBytes: item.totalBytes == null ? null : clampNumber(item.totalBytes, 0, 0, 10_000_000_000_000),
progressPercent: clampNumber(item.progressPercent, 0, 0, 100),
fileName: asText(item.fileName) || "download.bin",
targetPath: asText(item.targetPath),
resumable: item.resumable === undefined ? true : Boolean(item.resumable),
attempts: clampNumber(item.attempts, 0, 0, 10_000),
lastError: asText(item.lastError),
fullStatus: asText(item.fullStatus),
createdAt: clampNumber(item.createdAt, now, 0, Number.MAX_SAFE_INTEGER),
updatedAt: clampNumber(item.updatedAt, now, 0, Number.MAX_SAFE_INTEGER)
};
}
const packagesById: Record<string, PackageEntry> = {};
const rawPackages = asRecord(parsed.packages) ?? {};
for (const [entryId, rawPkg] of Object.entries(rawPackages)) {
const pkg = asRecord(rawPkg);
if (!pkg) {
continue;
}
const id = asText(pkg.id) || entryId;
if (!id) {
continue;
}
const statusRaw = asText(pkg.status) as DownloadStatus;
const status: DownloadStatus = VALID_DOWNLOAD_STATUSES.has(statusRaw) ? statusRaw : "queued";
const rawItemIds = Array.isArray(pkg.itemIds) ? pkg.itemIds : [];
packagesById[id] = {
id,
name: asText(pkg.name) || "Paket",
outputDir: asText(pkg.outputDir),
extractDir: asText(pkg.extractDir),
status,
itemIds: rawItemIds
.map((value) => asText(value))
.filter((value) => value.length > 0),
cancelled: Boolean(pkg.cancelled),
enabled: pkg.enabled === undefined ? true : Boolean(pkg.enabled),
createdAt: clampNumber(pkg.createdAt, now, 0, Number.MAX_SAFE_INTEGER),
updatedAt: clampNumber(pkg.updatedAt, now, 0, Number.MAX_SAFE_INTEGER)
};
}
for (const [itemId, item] of Object.entries(itemsById)) {
if (!packagesById[item.packageId]) {
delete itemsById[itemId];
}
}
for (const pkg of Object.values(packagesById)) {
pkg.itemIds = pkg.itemIds.filter((itemId) => {
const item = itemsById[itemId];
return Boolean(item) && item.packageId === pkg.id;
});
}
const rawOrder = Array.isArray(parsed.packageOrder) ? parsed.packageOrder : [];
const packageOrder = rawOrder
.map((entry) => asText(entry))
.filter((id) => id in packagesById);
for (const packageId of Object.keys(packagesById)) {
if (!packageOrder.includes(packageId)) {
packageOrder.push(packageId);
}
}
return {
...fallback,
version: clampNumber(parsed.version, fallback.version, 1, 10),
packageOrder,
packages: packagesById,
items: itemsById,
runStartedAt: clampNumber(parsed.runStartedAt, 0, 0, Number.MAX_SAFE_INTEGER),
totalDownloadedBytes: clampNumber(parsed.totalDownloadedBytes, 0, 0, Number.MAX_SAFE_INTEGER),
summaryText: asText(parsed.summaryText),
reconnectUntil: clampNumber(parsed.reconnectUntil, 0, 0, Number.MAX_SAFE_INTEGER),
reconnectReason: asText(parsed.reconnectReason),
paused: Boolean(parsed.paused),
running: Boolean(parsed.running),
updatedAt: clampNumber(parsed.updatedAt, now, 0, Number.MAX_SAFE_INTEGER)
};
}
export function loadSettings(paths: StoragePaths): AppSettings {
ensureBaseDir(paths.baseDir);
if (!fs.existsSync(paths.configFile)) {
return defaultSettings(); return defaultSettings();
} }
const loaded = readSettingsFile(paths.configFile);
if (loaded) {
return loaded;
}
const backupFile = `${paths.configFile}.bak`;
const backupLoaded = fs.existsSync(backupFile) ? readSettingsFile(backupFile) : null;
if (backupLoaded) {
logger.warn("Konfiguration defekt, Backup-Datei wird verwendet");
try {
const payload = JSON.stringify(backupLoaded, null, 2);
const tempPath = `${paths.configFile}.tmp`;
fs.writeFileSync(tempPath, payload, "utf8");
syncRenameWithExdevFallback(tempPath, paths.configFile);
} catch {
// ignore restore write failure
}
return backupLoaded;
}
logger.error("Konfiguration konnte nicht geladen werden (auch Backup fehlgeschlagen)");
return defaultSettings();
} }
function syncRenameWithExdevFallback(tempPath: string, targetPath: string): void { function syncRenameWithExdevFallback(tempPath: string, targetPath: string): void {
@ -221,14 +372,8 @@ export function loadSession(paths: StoragePaths): SessionState {
return emptySession(); return emptySession();
} }
try { try {
const parsed = JSON.parse(fs.readFileSync(paths.sessionFile, "utf8")) as Partial<SessionState>; const parsed = JSON.parse(fs.readFileSync(paths.sessionFile, "utf8")) as unknown;
const session: SessionState = { const session = normalizeLoadedSession(parsed);
...emptySession(),
...parsed,
packages: parsed.packages ?? {},
items: parsed.items ?? {},
packageOrder: parsed.packageOrder ?? []
};
// Reset transient fields that may be stale from a previous crash // Reset transient fields that may be stale from a previous crash
const ACTIVE_STATUSES = new Set(["downloading", "validating", "extracting", "integrity_check", "paused", "reconnect_wait"]); const ACTIVE_STATUSES = new Set(["downloading", "validating", "extracting", "integrity_check", "paused", "reconnect_wait"]);
@ -257,17 +402,10 @@ export function saveSession(paths: StoragePaths, session: SessionState): void {
} }
let asyncSaveRunning = false; let asyncSaveRunning = false;
let asyncSaveQueued: { paths: StoragePaths; session: SessionState } | null = null; let asyncSaveQueued: { paths: StoragePaths; payload: string } | null = null;
export async function saveSessionAsync(paths: StoragePaths, session: SessionState): Promise<void> { async function writeSessionPayload(paths: StoragePaths, payload: string): Promise<void> {
if (asyncSaveRunning) {
asyncSaveQueued = { paths, session };
return;
}
asyncSaveRunning = true;
try {
await fs.promises.mkdir(paths.baseDir, { recursive: true }); await fs.promises.mkdir(paths.baseDir, { recursive: true });
const payload = JSON.stringify({ ...session, updatedAt: Date.now() });
const tempPath = `${paths.sessionFile}.tmp`; const tempPath = `${paths.sessionFile}.tmp`;
await fsp.writeFile(tempPath, payload, "utf8"); await fsp.writeFile(tempPath, payload, "utf8");
try { try {
@ -280,6 +418,16 @@ export async function saveSessionAsync(paths: StoragePaths, session: SessionStat
throw renameError; throw renameError;
} }
} }
}
async function saveSessionPayloadAsync(paths: StoragePaths, payload: string): Promise<void> {
if (asyncSaveRunning) {
asyncSaveQueued = { paths, payload };
return;
}
asyncSaveRunning = true;
try {
await writeSessionPayload(paths, payload);
} catch (error) { } catch (error) {
logger.error(`Async Session-Save fehlgeschlagen: ${String(error)}`); logger.error(`Async Session-Save fehlgeschlagen: ${String(error)}`);
} finally { } finally {
@ -287,7 +435,12 @@ export async function saveSessionAsync(paths: StoragePaths, session: SessionStat
if (asyncSaveQueued) { if (asyncSaveQueued) {
const queued = asyncSaveQueued; const queued = asyncSaveQueued;
asyncSaveQueued = null; asyncSaveQueued = null;
void saveSessionAsync(queued.paths, queued.session); void saveSessionPayloadAsync(queued.paths, queued.payload);
} }
} }
} }
export async function saveSessionAsync(paths: StoragePaths, session: SessionState): Promise<void> {
const payload = JSON.stringify({ ...session, updatedAt: Date.now() });
await saveSessionPayloadAsync(paths, payload);
}

View File

@ -1,6 +1,7 @@
import fs from "node:fs"; import fs from "node:fs";
import os from "node:os"; import os from "node:os";
import path from "node:path"; import path from "node:path";
import crypto from "node:crypto";
import { spawn } from "node:child_process"; import { spawn } from "node:child_process";
import { Readable } from "node:stream"; import { Readable } from "node:stream";
import { pipeline } from "node:stream/promises"; import { pipeline } from "node:stream/promises";
@ -20,6 +21,7 @@ const UPDATE_USER_AGENT = `RD-Node-Downloader/${APP_VERSION}`;
type ReleaseAsset = { type ReleaseAsset = {
name: string; name: string;
browser_download_url: string; browser_download_url: string;
digest: string;
}; };
export function normalizeUpdateRepo(repo: string): string { export function normalizeUpdateRepo(repo: string): string {
@ -28,6 +30,17 @@ export function normalizeUpdateRepo(repo: string): string {
return DEFAULT_UPDATE_REPO; return DEFAULT_UPDATE_REPO;
} }
const isValidRepoPart = (value: string): boolean => {
const part = String(value || "").trim();
if (!part || part === "." || part === "..") {
return false;
}
if (part.includes("..")) {
return false;
}
return /^[A-Za-z0-9][A-Za-z0-9._-]{0,99}$/.test(part);
};
const normalizeParts = (input: string): string => { const normalizeParts = (input: string): string => {
const cleaned = input const cleaned = input
.replace(/^https?:\/\/(?:www\.)?github\.com\//i, "") .replace(/^https?:\/\/(?:www\.)?github\.com\//i, "")
@ -37,7 +50,11 @@ export function normalizeUpdateRepo(repo: string): string {
.replace(/^\/+|\/+$/g, ""); .replace(/^\/+|\/+$/g, "");
const parts = cleaned.split("/").filter(Boolean); const parts = cleaned.split("/").filter(Boolean);
if (parts.length >= 2) { if (parts.length >= 2) {
return `${parts[0]}/${parts[1]}`; const owner = parts[0];
const repository = parts[1];
if (isValidRepoPart(owner) && isValidRepoPart(repository)) {
return `${owner}/${repository}`;
}
} }
return ""; return "";
}; };
@ -70,6 +87,31 @@ function timeoutController(ms: number): { signal: AbortSignal; clear: () => void
}; };
} }
async function readJsonWithTimeout(response: Response, timeoutMs: number): Promise<Record<string, unknown> | null> {
let timer: NodeJS.Timeout | null = null;
const timeoutPromise = new Promise<never>((_resolve, reject) => {
timer = setTimeout(() => {
void response.body?.cancel().catch(() => undefined);
reject(new Error(`timeout:${timeoutMs}`));
}, timeoutMs);
});
try {
const payload = await Promise.race([
response.json().catch(() => null) as Promise<unknown>,
timeoutPromise
]);
if (!payload || typeof payload !== "object" || Array.isArray(payload)) {
return null;
}
return payload as Record<string, unknown>;
} finally {
if (timer) {
clearTimeout(timer);
}
}
}
function getDownloadBodyIdleTimeoutMs(): number { function getDownloadBodyIdleTimeoutMs(): number {
const fromEnv = Number(process.env.RD_UPDATE_BODY_IDLE_TIMEOUT_MS ?? NaN); const fromEnv = Number(process.env.RD_UPDATE_BODY_IDLE_TIMEOUT_MS ?? NaN);
if (Number.isFinite(fromEnv) && fromEnv >= 1000 && fromEnv <= 30 * 60 * 1000) { if (Number.isFinite(fromEnv) && fromEnv >= 1000 && fromEnv <= 30 * 60 * 1000) {
@ -116,7 +158,8 @@ function readReleaseAssets(payload: Record<string, unknown>): ReleaseAsset[] {
return assets return assets
.map((asset) => ({ .map((asset) => ({
name: String(asset.name || ""), name: String(asset.name || ""),
browser_download_url: String(asset.browser_download_url || "") browser_download_url: String(asset.browser_download_url || ""),
digest: String(asset.digest || "").trim()
})) }))
.filter((asset) => asset.name && asset.browser_download_url); .filter((asset) => asset.name && asset.browser_download_url);
} }
@ -145,10 +188,15 @@ function parseReleasePayload(payload: Record<string, unknown>, fallback: UpdateC
latestTag, latestTag,
releaseUrl, releaseUrl,
setupAssetUrl: setup?.browser_download_url || "", setupAssetUrl: setup?.browser_download_url || "",
setupAssetName: setup?.name || "" setupAssetName: setup?.name || "",
setupAssetDigest: setup?.digest || ""
}; };
} }
function isDraftOrPrereleaseRelease(payload: Record<string, unknown>): boolean {
return Boolean(payload.draft) || Boolean(payload.prerelease);
}
async function fetchReleasePayload(safeRepo: string, endpoint: string): Promise<{ ok: boolean; status: number; payload: Record<string, unknown> | null }> { async function fetchReleasePayload(safeRepo: string, endpoint: string): Promise<{ ok: boolean; status: number; payload: Record<string, unknown> | null }> {
const timeout = timeoutController(RELEASE_FETCH_TIMEOUT_MS); const timeout = timeoutController(RELEASE_FETCH_TIMEOUT_MS);
let response: Response; let response: Response;
@ -164,7 +212,7 @@ async function fetchReleasePayload(safeRepo: string, endpoint: string): Promise<
timeout.clear(); timeout.clear();
} }
const payload = await response.json().catch(() => null) as Record<string, unknown> | null; const payload = await readJsonWithTimeout(response, RELEASE_FETCH_TIMEOUT_MS);
return { return {
ok: response.ok, ok: response.ok,
status: response.status, status: response.status,
@ -245,7 +293,40 @@ function deriveUpdateFileName(check: UpdateCheckResult, url: string): string {
} }
} }
async function resolveSetupAssetFromApi(safeRepo: string, tagHint: string): Promise<{ setupAssetUrl: string; setupAssetName: string } | null> { function normalizeSha256Digest(raw: string): string {
const text = String(raw || "").trim();
const prefixed = text.match(/^sha256:([a-fA-F0-9]{64})$/i);
if (prefixed) {
return prefixed[1].toLowerCase();
}
const plain = text.match(/^([a-fA-F0-9]{64})$/);
return plain ? plain[1].toLowerCase() : "";
}
async function sha256File(filePath: string): Promise<string> {
const hash = crypto.createHash("sha256");
const stream = fs.createReadStream(filePath, { highWaterMark: 1024 * 1024 });
return await new Promise<string>((resolve, reject) => {
stream.on("data", (chunk: string | Buffer) => {
hash.update(typeof chunk === "string" ? Buffer.from(chunk) : chunk);
});
stream.on("error", reject);
stream.on("end", () => resolve(hash.digest("hex").toLowerCase()));
});
}
async function verifyDownloadedInstaller(targetPath: string, expectedDigestRaw: string): Promise<void> {
const expectedDigest = normalizeSha256Digest(expectedDigestRaw);
if (!expectedDigest) {
throw new Error("Update-Asset ohne gültigen SHA256-Digest");
}
const actualDigest = await sha256File(targetPath);
if (actualDigest !== expectedDigest) {
throw new Error("Update-Integritätsprüfung fehlgeschlagen (SHA256 mismatch)");
}
}
async function resolveSetupAssetFromApi(safeRepo: string, tagHint: string): Promise<{ setupAssetUrl: string; setupAssetName: string; setupAssetDigest: string } | null> {
const endpointCandidates = uniqueStrings([ const endpointCandidates = uniqueStrings([
tagHint ? `releases/tags/${encodeURIComponent(tagHint)}` : "", tagHint ? `releases/tags/${encodeURIComponent(tagHint)}` : "",
"releases/latest" "releases/latest"
@ -257,13 +338,17 @@ async function resolveSetupAssetFromApi(safeRepo: string, tagHint: string): Prom
if (!release.ok || !release.payload) { if (!release.ok || !release.payload) {
continue; continue;
} }
if (isDraftOrPrereleaseRelease(release.payload)) {
continue;
}
const setup = pickSetupAsset(readReleaseAssets(release.payload)); const setup = pickSetupAsset(readReleaseAssets(release.payload));
if (!setup) { if (!setup) {
continue; continue;
} }
return { return {
setupAssetUrl: setup.browser_download_url, setupAssetUrl: setup.browser_download_url,
setupAssetName: setup.name setupAssetName: setup.name,
setupAssetDigest: setup.digest
}; };
} catch { } catch {
// ignore and continue with next endpoint candidate // ignore and continue with next endpoint candidate
@ -433,16 +518,18 @@ export async function installLatestUpdate(repo: string, prechecked?: UpdateCheck
let effectiveCheck: UpdateCheckResult = { let effectiveCheck: UpdateCheckResult = {
...check, ...check,
setupAssetUrl: String(check.setupAssetUrl || ""), setupAssetUrl: String(check.setupAssetUrl || ""),
setupAssetName: String(check.setupAssetName || "") setupAssetName: String(check.setupAssetName || ""),
setupAssetDigest: String(check.setupAssetDigest || "")
}; };
if (!effectiveCheck.setupAssetUrl) { if (!effectiveCheck.setupAssetUrl || !effectiveCheck.setupAssetDigest) {
const refreshed = await resolveSetupAssetFromApi(safeRepo, effectiveCheck.latestTag); const refreshed = await resolveSetupAssetFromApi(safeRepo, effectiveCheck.latestTag);
if (refreshed) { if (refreshed) {
effectiveCheck = { effectiveCheck = {
...effectiveCheck, ...effectiveCheck,
setupAssetUrl: refreshed.setupAssetUrl, setupAssetUrl: refreshed.setupAssetUrl,
setupAssetName: refreshed.setupAssetName setupAssetName: refreshed.setupAssetName,
setupAssetDigest: refreshed.setupAssetDigest
}; };
} }
} }
@ -457,6 +544,7 @@ export async function installLatestUpdate(repo: string, prechecked?: UpdateCheck
try { try {
await downloadFromCandidates(candidates, targetPath); await downloadFromCandidates(candidates, targetPath);
await verifyDownloadedInstaller(targetPath, String(effectiveCheck.setupAssetDigest || ""));
const child = spawn(targetPath, [], { const child = spawn(targetPath, [], {
detached: true, detached: true,
stdio: "ignore" stdio: "ignore"

View File

@ -63,6 +63,33 @@ export function isHttpLink(value: string): boolean {
} }
} }
export function extractHttpLinksFromText(text: string): string[] {
const matches = String(text || "").match(/https?:\/\/[^\s<>"']+/gi) ?? [];
const seen = new Set<string>();
const links: string[] = [];
for (const match of matches) {
let candidate = String(match || "").trim();
while (candidate.length > 0 && /[)\],.!?;:]+$/.test(candidate)) {
if (candidate.endsWith(")")) {
const openCount = (candidate.match(/\(/g) || []).length;
const closeCount = (candidate.match(/\)/g) || []).length;
if (closeCount <= openCount) {
break;
}
}
candidate = candidate.slice(0, -1);
}
if (!candidate || !isHttpLink(candidate) || seen.has(candidate)) {
continue;
}
seen.add(candidate);
links.push(candidate);
}
return links;
}
export function humanSize(bytes: number): string { export function humanSize(bytes: number): string {
const value = Number(bytes); const value = Number(bytes);
if (!Number.isFinite(value) || value < 0) { if (!Number.isFinite(value) || value < 0) {
@ -84,6 +111,9 @@ export function humanSize(bytes: number): string {
export function filenameFromUrl(url: string): string { export function filenameFromUrl(url: string): string {
try { try {
const parsed = new URL(url); const parsed = new URL(url);
if (parsed.protocol !== "http:" && parsed.protocol !== "https:") {
return "download.bin";
}
const queryName = parsed.searchParams.get("filename") const queryName = parsed.searchParams.get("filename")
|| parsed.searchParams.get("file") || parsed.searchParams.get("file")
|| parsed.searchParams.get("name") || parsed.searchParams.get("name")

View File

@ -128,6 +128,7 @@ export function App(): ReactElement {
const [activeCollectorTab, setActiveCollectorTab] = useState(collectorTabs[0].id); const [activeCollectorTab, setActiveCollectorTab] = useState(collectorTabs[0].id);
const activeCollectorTabRef = useRef(activeCollectorTab); const activeCollectorTabRef = useRef(activeCollectorTab);
const activeTabRef = useRef<Tab>(tab); const activeTabRef = useRef<Tab>(tab);
const packageOrderRef = useRef<string[]>([]);
const draggedPackageIdRef = useRef<string | null>(null); const draggedPackageIdRef = useRef<string | null>(null);
const [collapsedPackages, setCollapsedPackages] = useState<Record<string, boolean>>({}); const [collapsedPackages, setCollapsedPackages] = useState<Record<string, boolean>>({});
const [downloadSearch, setDownloadSearch] = useState(""); const [downloadSearch, setDownloadSearch] = useState("");
@ -150,6 +151,10 @@ export function App(): ReactElement {
activeTabRef.current = tab; activeTabRef.current = tab;
}, [tab]); }, [tab]);
useEffect(() => {
packageOrderRef.current = snapshot.session.packageOrder;
}, [snapshot.session.packageOrder]);
const showToast = (message: string, timeoutMs = 2200): void => { const showToast = (message: string, timeoutMs = 2200): void => {
setStatusToast(message); setStatusToast(message);
if (toastTimerRef.current) { clearTimeout(toastTimerRef.current); } if (toastTimerRef.current) { clearTimeout(toastTimerRef.current); }
@ -647,24 +652,28 @@ export function App(): ReactElement {
}; };
const movePackage = useCallback((packageId: string, direction: "up" | "down") => { const movePackage = useCallback((packageId: string, direction: "up" | "down") => {
const order = [...snapshot.session.packageOrder]; const currentOrder = packageOrderRef.current;
const order = [...currentOrder];
const idx = order.indexOf(packageId); const idx = order.indexOf(packageId);
if (idx < 0) { return; } if (idx < 0) { return; }
const target = direction === "up" ? idx - 1 : idx + 1; const target = direction === "up" ? idx - 1 : idx + 1;
if (target < 0 || target >= order.length) { return; } if (target < 0 || target >= order.length) { return; }
[order[idx], order[target]] = [order[target], order[idx]]; [order[idx], order[target]] = [order[target], order[idx]];
packageOrderRef.current = order;
void window.rd.reorderPackages(order); void window.rd.reorderPackages(order);
}, [snapshot.session.packageOrder]); }, []);
const reorderPackagesByDrop = useCallback((draggedPackageId: string, targetPackageId: string) => { const reorderPackagesByDrop = useCallback((draggedPackageId: string, targetPackageId: string) => {
const nextOrder = reorderPackageOrderByDrop(snapshot.session.packageOrder, draggedPackageId, targetPackageId); const currentOrder = packageOrderRef.current;
const unchanged = nextOrder.length === snapshot.session.packageOrder.length const nextOrder = reorderPackageOrderByDrop(currentOrder, draggedPackageId, targetPackageId);
&& nextOrder.every((id, index) => id === snapshot.session.packageOrder[index]); const unchanged = nextOrder.length === currentOrder.length
&& nextOrder.every((id, index) => id === currentOrder[index]);
if (unchanged) { if (unchanged) {
return; return;
} }
packageOrderRef.current = nextOrder;
void window.rd.reorderPackages(nextOrder); void window.rd.reorderPackages(nextOrder);
}, [snapshot.session.packageOrder]); }, []);
const addCollectorTab = (): void => { const addCollectorTab = (): void => {
const id = `tab-${nextCollectorId++}`; const id = `tab-${nextCollectorId++}`;
@ -888,7 +897,9 @@ export function App(): ReactElement {
onClick={() => { onClick={() => {
const nextDescending = !downloadsSortDescending; const nextDescending = !downloadsSortDescending;
setDownloadsSortDescending(nextDescending); setDownloadsSortDescending(nextDescending);
const sorted = sortPackageOrderByName(snapshot.session.packageOrder, snapshot.session.packages, nextDescending); const baseOrder = packageOrderRef.current.length > 0 ? packageOrderRef.current : snapshot.session.packageOrder;
const sorted = sortPackageOrderByName(baseOrder, snapshot.session.packages, nextDescending);
packageOrderRef.current = sorted;
void window.rd.reorderPackages(sorted); void window.rd.reorderPackages(sorted);
}} }}
> >
@ -1021,6 +1032,7 @@ export function App(): ReactElement {
</div> </div>
<label className="toggle-line"><input type="checkbox" checked={settingsDraft.autoExtract} onChange={(e) => setBool("autoExtract", e.target.checked)} /> Auto-Extract</label> <label className="toggle-line"><input type="checkbox" checked={settingsDraft.autoExtract} onChange={(e) => setBool("autoExtract", e.target.checked)} /> Auto-Extract</label>
<label className="toggle-line"><input type="checkbox" checked={settingsDraft.autoRename4sf4sj} onChange={(e) => setBool("autoRename4sf4sj", e.target.checked)} /> Auto-Rename (4SF/4SJ)</label> <label className="toggle-line"><input type="checkbox" checked={settingsDraft.autoRename4sf4sj} onChange={(e) => setBool("autoRename4sf4sj", e.target.checked)} /> Auto-Rename (4SF/4SJ)</label>
<label className="toggle-line"><input type="checkbox" checked={settingsDraft.createExtractSubfolder} onChange={(e) => setBool("createExtractSubfolder", e.target.checked)} /> Entpackte Dateien in Paket-Unterordner speichern</label>
<label className="toggle-line"><input type="checkbox" checked={settingsDraft.hybridExtract} onChange={(e) => setBool("hybridExtract", e.target.checked)} /> Hybrid-Extract</label> <label className="toggle-line"><input type="checkbox" checked={settingsDraft.hybridExtract} onChange={(e) => setBool("hybridExtract", e.target.checked)} /> Hybrid-Extract</label>
<label>Passwortliste (eine Zeile pro Passwort)</label> <label>Passwortliste (eine Zeile pro Passwort)</label>
<textarea <textarea

View File

@ -278,12 +278,22 @@ body,
.downloads-toolbar { .downloads-toolbar {
display: flex; display: flex;
justify-content: space-between;
align-items: center; align-items: center;
gap: 10px; gap: 10px;
flex-wrap: wrap; flex-wrap: wrap;
} }
.downloads-toolbar-actions {
display: flex;
align-items: center;
gap: 8px;
flex-wrap: wrap;
}
.downloads-toolbar .search-input {
margin-left: auto;
}
.search-input { .search-input {
width: min(360px, 100%); width: min(360px, 100%);
background: var(--field); background: var(--field);
@ -729,6 +739,11 @@ td {
align-items: flex-start; align-items: flex-start;
} }
.downloads-toolbar .search-input {
width: 100%;
margin-left: 0;
}
.settings-toolbar-actions { .settings-toolbar-actions {
width: 100%; width: 100%;
} }

View File

@ -19,7 +19,7 @@ export type DebridFallbackProvider = DebridProvider | "none";
export type AppTheme = "dark" | "light"; export type AppTheme = "dark" | "light";
export interface BandwidthScheduleEntry { export interface BandwidthScheduleEntry {
id?: string; id: string;
startHour: number; startHour: number;
endHour: number; endHour: number;
speedLimitKbps: number; speedLimitKbps: number;
@ -200,6 +200,7 @@ export interface UpdateCheckResult {
releaseUrl: string; releaseUrl: string;
setupAssetUrl?: string; setupAssetUrl?: string;
setupAssetName?: string; setupAssetName?: string;
setupAssetDigest?: string;
error?: string; error?: string;
} }

View File

@ -1,5 +1,5 @@
import { describe, expect, it } from "vitest"; import { describe, expect, it } from "vitest";
import { reorderPackageOrderByDrop } from "../src/renderer/App"; import { reorderPackageOrderByDrop, sortPackageOrderByName } from "../src/renderer/App";
describe("reorderPackageOrderByDrop", () => { describe("reorderPackageOrderByDrop", () => {
it("moves adjacent package down by one on drop", () => { it("moves adjacent package down by one on drop", () => {
@ -19,3 +19,31 @@ describe("reorderPackageOrderByDrop", () => {
expect(reorderPackageOrderByDrop(order, "a", "a")).toEqual(order); expect(reorderPackageOrderByDrop(order, "a", "a")).toEqual(order);
}); });
}); });
describe("sortPackageOrderByName", () => {
it("sorts package IDs alphabetically ascending", () => {
const sorted = sortPackageOrderByName(
["pkg3", "pkg1", "pkg2"],
{
pkg1: { id: "pkg1", name: "Alpha", outputDir: "", extractDir: "", status: "queued", itemIds: [], cancelled: false, enabled: true, createdAt: 0, updatedAt: 0 },
pkg2: { id: "pkg2", name: "beta", outputDir: "", extractDir: "", status: "queued", itemIds: [], cancelled: false, enabled: true, createdAt: 0, updatedAt: 0 },
pkg3: { id: "pkg3", name: "Gamma", outputDir: "", extractDir: "", status: "queued", itemIds: [], cancelled: false, enabled: true, createdAt: 0, updatedAt: 0 }
},
false
);
expect(sorted).toEqual(["pkg1", "pkg2", "pkg3"]);
});
it("sorts package IDs alphabetically descending", () => {
const sorted = sortPackageOrderByName(
["pkg1", "pkg2", "pkg3"],
{
pkg1: { id: "pkg1", name: "Alpha", outputDir: "", extractDir: "", status: "queued", itemIds: [], cancelled: false, enabled: true, createdAt: 0, updatedAt: 0 },
pkg2: { id: "pkg2", name: "beta", outputDir: "", extractDir: "", status: "queued", itemIds: [], cancelled: false, enabled: true, createdAt: 0, updatedAt: 0 },
pkg3: { id: "pkg3", name: "Gamma", outputDir: "", extractDir: "", status: "queued", itemIds: [], cancelled: false, enabled: true, createdAt: 0, updatedAt: 0 }
},
true
);
expect(sorted).toEqual(["pkg3", "pkg2", "pkg1"]);
});
});

View File

@ -89,4 +89,21 @@ describe("cleanup", () => {
// Non-matching files should be kept // Non-matching files should be kept
expect(fs.existsSync(path.join(dir, "readme.txt"))).toBe(true); expect(fs.existsSync(path.join(dir, "readme.txt"))).toBe(true);
}); });
it("does not recurse into sample symlink or junction targets", () => {
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-clean-"));
const external = fs.mkdtempSync(path.join(os.tmpdir(), "rd-clean-ext-"));
tempDirs.push(dir, external);
const outsideFile = path.join(external, "outside-sample.mkv");
fs.writeFileSync(outsideFile, "keep", "utf8");
const linkedSampleDir = path.join(dir, "sample");
const linkType: fs.symlink.Type = process.platform === "win32" ? "junction" : "dir";
fs.symlinkSync(external, linkedSampleDir, linkType);
const result = removeSampleArtifacts(dir);
expect(result.files).toBe(0);
expect(fs.existsSync(outsideFile)).toBe(true);
});
}); });

31
tests/container.test.ts Normal file
View File

@ -0,0 +1,31 @@
import fs from "node:fs";
import os from "node:os";
import path from "node:path";
import { afterEach, describe, expect, it, vi } from "vitest";
import { importDlcContainers } from "../src/main/container";
const tempDirs: string[] = [];
const originalFetch = globalThis.fetch;
afterEach(() => {
globalThis.fetch = originalFetch;
vi.restoreAllMocks();
for (const dir of tempDirs.splice(0)) {
fs.rmSync(dir, { recursive: true, force: true });
}
});
describe("container", () => {
it("rejects oversized DLC files before network access", async () => {
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-dlc-"));
tempDirs.push(dir);
const filePath = path.join(dir, "oversized.dlc");
fs.writeFileSync(filePath, Buffer.alloc((8 * 1024 * 1024) + 1, 1));
const fetchSpy = vi.fn(async () => new Response("should-not-run", { status: 500 }));
globalThis.fetch = fetchSpy as unknown as typeof fetch;
await expect(importDlcContainers([filePath])).rejects.toThrow(/zu groß/i);
expect(fetchSpy).toHaveBeenCalledTimes(0);
});
});

View File

@ -1,5 +1,5 @@
import { afterEach, describe, expect, it, vi } from "vitest"; import { afterEach, describe, expect, it, vi } from "vitest";
import { defaultSettings } from "../src/main/constants"; import { defaultSettings, REQUEST_RETRIES } from "../src/main/constants";
import { DebridService, extractRapidgatorFilenameFromHtml, filenameFromRapidgatorUrlPath, normalizeResolvedFilename } from "../src/main/debrid"; import { DebridService, extractRapidgatorFilenameFromHtml, filenameFromRapidgatorUrlPath, normalizeResolvedFilename } from "../src/main/debrid";
const originalFetch = globalThis.fetch; const originalFetch = globalThis.fetch;
@ -80,7 +80,7 @@ describe("debrid service", () => {
expect(megaWeb).toHaveBeenCalledTimes(0); expect(megaWeb).toHaveBeenCalledTimes(0);
}); });
it("supports BestDebrid auth query fallback", async () => { it("uses BestDebrid auth header without token query fallback", async () => {
const settings = { const settings = {
...defaultSettings(), ...defaultSettings(),
token: "", token: "",
@ -91,15 +91,11 @@ describe("debrid service", () => {
autoProviderFallback: true autoProviderFallback: true
}; };
const calledUrls: string[] = [];
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => { globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url; const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
calledUrls.push(url);
if (url.includes("/api/v1/generateLink?link=")) { if (url.includes("/api/v1/generateLink?link=")) {
return new Response(JSON.stringify({ message: "Bad token, expired, or invalid" }), {
status: 200,
headers: { "Content-Type": "application/json" }
});
}
if (url.includes("/api/v1/generateLink?auth=")) {
return new Response(JSON.stringify({ download: "https://best.example/file.bin", filename: "file.bin", filesize: 2048 }), { return new Response(JSON.stringify({ download: "https://best.example/file.bin", filename: "file.bin", filesize: 2048 }), {
status: 200, status: 200,
headers: { "Content-Type": "application/json" } headers: { "Content-Type": "application/json" }
@ -112,6 +108,7 @@ describe("debrid service", () => {
const result = await service.unrestrictLink("https://rapidgator.net/file/example.part3.rar.html"); const result = await service.unrestrictLink("https://rapidgator.net/file/example.part3.rar.html");
expect(result.provider).toBe("bestdebrid"); expect(result.provider).toBe("bestdebrid");
expect(result.fileSize).toBe(2048); expect(result.fileSize).toBe(2048);
expect(calledUrls.some((url) => url.includes("auth="))).toBe(false);
}); });
it("sends Bearer auth header to BestDebrid", async () => { it("sends Bearer auth header to BestDebrid", async () => {
@ -152,6 +149,63 @@ describe("debrid service", () => {
expect(authHeader).toBe("Bearer best-token"); expect(authHeader).toBe("Bearer best-token");
}); });
it("does not retry BestDebrid auth failures (401)", async () => {
const settings = {
...defaultSettings(),
token: "",
bestToken: "best-token",
providerPrimary: "bestdebrid" as const,
providerSecondary: "none" as const,
providerTertiary: "none" as const,
autoProviderFallback: true
};
let calls = 0;
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
if (url.includes("/api/v1/generateLink?link=")) {
calls += 1;
return new Response(JSON.stringify({ message: "Unauthorized" }), {
status: 401,
headers: { "Content-Type": "application/json" }
});
}
return new Response("not-found", { status: 404 });
}) as typeof fetch;
const service = new DebridService(settings);
await expect(service.unrestrictLink("https://hoster.example/file/no-retry")).rejects.toThrow();
expect(calls).toBe(1);
});
it("does not retry AllDebrid auth failures (403)", async () => {
const settings = {
...defaultSettings(),
allDebridToken: "ad-token",
providerPrimary: "alldebrid" as const,
providerSecondary: "none" as const,
providerTertiary: "none" as const,
autoProviderFallback: true
};
let calls = 0;
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
if (url.includes("api.alldebrid.com/v4/link/unlock")) {
calls += 1;
return new Response(JSON.stringify({ status: "error", error: { message: "forbidden" } }), {
status: 403,
headers: { "Content-Type": "application/json" }
});
}
return new Response("not-found", { status: 404 });
}) as typeof fetch;
const service = new DebridService(settings);
await expect(service.unrestrictLink("https://hoster.example/file/no-retry-ad")).rejects.toThrow();
expect(calls).toBe(1);
});
it("supports AllDebrid unlock", async () => { it("supports AllDebrid unlock", async () => {
const settings = { const settings = {
...defaultSettings(), ...defaultSettings(),
@ -189,6 +243,21 @@ describe("debrid service", () => {
expect(result.fileSize).toBe(4096); expect(result.fileSize).toBe(4096);
}); });
it("treats MegaDebrid as not configured when web fallback callback is unavailable", async () => {
const settings = {
...defaultSettings(),
megaLogin: "user",
megaPassword: "pass",
providerPrimary: "megadebrid" as const,
providerSecondary: "none" as const,
providerTertiary: "none" as const,
autoProviderFallback: false
};
const service = new DebridService(settings);
await expect(service.unrestrictLink("https://rapidgator.net/file/missing-mega-web")).rejects.toThrow(/nicht konfiguriert/i);
});
it("uses Mega web path exclusively", async () => { it("uses Mega web path exclusively", async () => {
const settings = { const settings = {
...defaultSettings(), ...defaultSettings(),
@ -505,6 +574,75 @@ describe("debrid service", () => {
const resolved = await service.resolveFilenames([linkA, linkB]); const resolved = await service.resolveFilenames([linkA, linkB]);
expect(resolved.size).toBe(0); expect(resolved.size).toBe(0);
}); });
it("retries AllDebrid filename infos after transient server error", async () => {
const settings = {
...defaultSettings(),
allDebridToken: "ad-token"
};
const link = "https://rapidgator.net/file/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
let infoCalls = 0;
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
if (url.includes("api.alldebrid.com/v4/link/infos")) {
infoCalls += 1;
if (infoCalls === 1) {
return new Response("temporary error", { status: 500 });
}
return new Response(JSON.stringify({
status: "success",
data: {
infos: [
{ link, filename: "resolved-from-infos.mkv" }
]
}
}), {
status: 200,
headers: { "Content-Type": "application/json" }
});
}
return new Response("not-found", { status: 404 });
}) as typeof fetch;
const service = new DebridService(settings);
const resolved = await service.resolveFilenames([link]);
expect(resolved.get(link)).toBe("resolved-from-infos.mkv");
expect(infoCalls).toBe(2);
});
it("retries AllDebrid filename infos when HTML challenge is returned", async () => {
const settings = {
...defaultSettings(),
allDebridToken: "ad-token"
};
const link = "https://rapidgator.net/file/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb";
let infoCalls = 0;
let pageCalls = 0;
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
if (url.includes("api.alldebrid.com/v4/link/infos")) {
infoCalls += 1;
return new Response("<html><title>cf challenge</title></html>", {
status: 200,
headers: { "Content-Type": "text/html" }
});
}
if (url === link) {
pageCalls += 1;
}
return new Response("not-found", { status: 404 });
}) as typeof fetch;
const service = new DebridService(settings);
const resolved = await service.resolveFilenames([link]);
expect(resolved.size).toBe(0);
expect(infoCalls).toBe(REQUEST_RETRIES);
expect(pageCalls).toBe(1);
});
}); });
describe("normalizeResolvedFilename", () => { describe("normalizeResolvedFilename", () => {

View File

@ -3900,4 +3900,191 @@ describe("download manager", () => {
expect(fs.existsSync(originalExtractedPath)).toBe(true); expect(fs.existsSync(originalExtractedPath)).toBe(true);
expect(fs.existsSync(path.join(extractDir, unexpectedName))).toBe(false); expect(fs.existsSync(path.join(extractDir, unexpectedName))).toBe(false);
}); });
it("throws a controlled error for invalid queue import JSON", () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-dm-"));
tempDirs.push(root);
const manager = new DownloadManager(
{
...defaultSettings(),
token: "rd-token",
outputDir: path.join(root, "downloads"),
extractDir: path.join(root, "extract")
},
emptySession(),
createStoragePaths(path.join(root, "state"))
);
expect(() => manager.importQueue("{not-json")).toThrow(/Ungultige Queue-Datei/i);
});
it("applies global speed limit path when global mode is enabled", async () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-dm-"));
tempDirs.push(root);
const manager = new DownloadManager(
{
...defaultSettings(),
token: "rd-token",
outputDir: path.join(root, "downloads"),
extractDir: path.join(root, "extract"),
speedLimitEnabled: true,
speedLimitMode: "global",
speedLimitKbps: 512
},
emptySession(),
createStoragePaths(path.join(root, "state"))
);
const internal = manager as unknown as {
applySpeedLimit: (chunkBytes: number, localWindowBytes: number, localWindowStarted: number) => Promise<void>;
globalSpeedLimitNextAt: number;
};
const start = Date.now();
await internal.applySpeedLimit(1024, 0, start);
expect(internal.globalSpeedLimitNextAt).toBeGreaterThan(start);
});
it("resets speed window head when start finds no runnable items", () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-dm-"));
tempDirs.push(root);
const manager = new DownloadManager(
{
...defaultSettings(),
token: "rd-token",
outputDir: path.join(root, "downloads"),
extractDir: path.join(root, "extract")
},
emptySession(),
createStoragePaths(path.join(root, "state"))
);
const internal = manager as unknown as {
speedEvents: Array<{ at: number; bytes: number }>;
speedEventsHead: number;
speedBytesLastWindow: number;
};
internal.speedEvents = [{ at: Date.now() - 10_000, bytes: 999 }];
internal.speedEventsHead = 5;
internal.speedBytesLastWindow = 999;
manager.start();
expect(internal.speedEventsHead).toBe(0);
expect(internal.speedEvents.length).toBe(0);
expect(internal.speedBytesLastWindow).toBe(0);
});
it("cleans run tracking when start conflict is skipped", async () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-dm-"));
tempDirs.push(root);
const manager = new DownloadManager(
{
...defaultSettings(),
token: "rd-token",
outputDir: path.join(root, "downloads"),
extractDir: path.join(root, "extract")
},
emptySession(),
createStoragePaths(path.join(root, "state"))
);
manager.addPackages([{ name: "conflict-skip", links: ["https://dummy/skip"] }]);
const snapshot = manager.getSnapshot();
const packageId = snapshot.session.packageOrder[0];
const itemId = snapshot.session.packages[packageId]?.itemIds[0] || "";
const internal = manager as unknown as {
runItemIds: Set<string>;
runPackageIds: Set<string>;
runOutcomes: Map<string, "completed" | "failed" | "cancelled">;
};
internal.runItemIds.add(itemId);
internal.runPackageIds.add(packageId);
internal.runOutcomes.set(itemId, "completed");
const result = await manager.resolveStartConflict(packageId, "skip");
expect(result.skipped).toBe(true);
expect(internal.runItemIds.has(itemId)).toBe(false);
expect(internal.runPackageIds.has(packageId)).toBe(false);
expect(internal.runOutcomes.has(itemId)).toBe(false);
});
it("clears stale run outcomes on overwrite conflict resolution", async () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-dm-"));
tempDirs.push(root);
const manager = new DownloadManager(
{
...defaultSettings(),
token: "rd-token",
outputDir: path.join(root, "downloads"),
extractDir: path.join(root, "extract")
},
emptySession(),
createStoragePaths(path.join(root, "state"))
);
manager.addPackages([{ name: "conflict-overwrite", links: ["https://dummy/overwrite"] }]);
const snapshot = manager.getSnapshot();
const packageId = snapshot.session.packageOrder[0];
const itemId = snapshot.session.packages[packageId]?.itemIds[0] || "";
const internal = manager as unknown as {
runOutcomes: Map<string, "completed" | "failed" | "cancelled">;
};
internal.runOutcomes.set(itemId, "failed");
const result = await manager.resolveStartConflict(packageId, "overwrite");
expect(result.overwritten).toBe(true);
expect(internal.runOutcomes.has(itemId)).toBe(false);
expect(manager.getSnapshot().session.items[itemId]?.status).toBe("queued");
});
it("clears speed display buffers when run finishes", () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-dm-"));
tempDirs.push(root);
const manager = new DownloadManager(
{
...defaultSettings(),
token: "rd-token",
outputDir: path.join(root, "downloads"),
extractDir: path.join(root, "extract")
},
emptySession(),
createStoragePaths(path.join(root, "state"))
);
const internal = manager as unknown as {
runItemIds: Set<string>;
runOutcomes: Map<string, "completed" | "failed" | "cancelled">;
runCompletedPackages: Set<string>;
session: { runStartedAt: number; totalDownloadedBytes: number; running: boolean; paused: boolean };
speedEvents: Array<{ at: number; bytes: number }>;
speedEventsHead: number;
speedBytesLastWindow: number;
finishRun: () => void;
};
internal.session.running = true;
internal.session.paused = false;
internal.session.runStartedAt = Date.now() - 2000;
internal.session.totalDownloadedBytes = 4096;
internal.runItemIds = new Set(["x"]);
internal.runOutcomes = new Map([["x", "completed"]]);
internal.runCompletedPackages = new Set();
internal.speedEvents = [{ at: Date.now(), bytes: 4096 }];
internal.speedEventsHead = 1;
internal.speedBytesLastWindow = 4096;
internal.finishRun();
expect(internal.speedEvents.length).toBe(0);
expect(internal.speedEventsHead).toBe(0);
expect(internal.speedBytesLastWindow).toBe(0);
});
}); });

View File

@ -554,4 +554,68 @@ describe("extractor", () => {
expect(targets.has(r01)).toBe(true); expect(targets.has(r01)).toBe(true);
expect(targets.has(r02)).toBe(true); expect(targets.has(r02)).toBe(true);
}); });
it("does not fallback to external extractor when ZIP safety guard triggers", async () => {
const previousLimit = process.env.RD_ZIP_ENTRY_MEMORY_LIMIT_MB;
process.env.RD_ZIP_ENTRY_MEMORY_LIMIT_MB = "8";
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-extract-"));
tempDirs.push(root);
const packageDir = path.join(root, "pkg");
const targetDir = path.join(root, "out");
fs.mkdirSync(packageDir, { recursive: true });
const zipPath = path.join(packageDir, "too-large.zip");
const zip = new AdmZip();
zip.addFile("large.bin", Buffer.alloc(9 * 1024 * 1024, 7));
zip.writeZip(zipPath);
try {
const result = await extractPackageArchives({
packageDir,
targetDir,
cleanupMode: "none",
conflictMode: "overwrite",
removeLinks: false,
removeSamples: false
});
expect(result.extracted).toBe(0);
expect(result.failed).toBe(1);
expect(String(result.lastError)).toMatch(/ZIP-Eintrag.*groß/i);
} finally {
if (previousLimit === undefined) {
delete process.env.RD_ZIP_ENTRY_MEMORY_LIMIT_MB;
} else {
process.env.RD_ZIP_ENTRY_MEMORY_LIMIT_MB = previousLimit;
}
}
});
it("matches resume-state archive names case-insensitively on Windows", async () => {
if (process.platform !== "win32") {
return;
}
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-extract-"));
tempDirs.push(root);
const packageDir = path.join(root, "pkg");
const targetDir = path.join(root, "out");
fs.mkdirSync(packageDir, { recursive: true });
const archivePath = path.join(packageDir, "episode.zip");
fs.writeFileSync(archivePath, "not-a-zip", "utf8");
fs.writeFileSync(path.join(packageDir, ".rd_extract_progress.json"), JSON.stringify({ completedArchives: ["EPISODE.ZIP"] }), "utf8");
const result = await extractPackageArchives({
packageDir,
targetDir,
cleanupMode: "none",
conflictMode: "overwrite",
removeLinks: false,
removeSamples: false
});
expect(result.extracted).toBe(1);
expect(result.failed).toBe(0);
});
}); });

View File

@ -70,4 +70,15 @@ describe("integrity", () => {
expect(parseHashLine("")).toBeNull(); expect(parseHashLine("")).toBeNull();
expect(parseHashLine(" ")).toBeNull(); expect(parseHashLine(" ")).toBeNull();
}); });
it("keeps first hash entry when duplicate filename appears across manifests", () => {
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-int-"));
tempDirs.push(dir);
fs.writeFileSync(path.join(dir, "disc1.md5"), "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa movie.mkv\n", "utf8");
fs.writeFileSync(path.join(dir, "disc2.md5"), "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb movie.mkv\n", "utf8");
const manifest = readHashManifest(dir);
expect(manifest.get("movie.mkv")?.digest).toBe("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
});
}); });

42
tests/realdebrid.test.ts Normal file
View File

@ -0,0 +1,42 @@
import { afterEach, describe, expect, it } from "vitest";
import { RealDebridClient } from "../src/main/realdebrid";
const originalFetch = globalThis.fetch;
afterEach(() => {
globalThis.fetch = originalFetch;
});
describe("realdebrid client", () => {
it("returns a clear error when HTML is returned instead of JSON", async () => {
globalThis.fetch = (async (): Promise<Response> => {
return new Response("<html><title>Cloudflare</title></html>", {
status: 200,
headers: { "Content-Type": "text/html" }
});
}) as typeof fetch;
const client = new RealDebridClient("rd-token");
await expect(client.unrestrictLink("https://hoster.example/file/html")).rejects.toThrow(/html/i);
});
it("does not leak raw response body on JSON parse errors", async () => {
globalThis.fetch = (async (): Promise<Response> => {
return new Response("<html>token=secret-should-not-leak</html>", {
status: 200,
headers: { "Content-Type": "application/json" }
});
}) as typeof fetch;
const client = new RealDebridClient("rd-token");
try {
await client.unrestrictLink("https://hoster.example/file/invalid-json");
throw new Error("expected unrestrict to fail");
} catch (error) {
const text = String(error || "");
expect(text.toLowerCase()).toContain("json");
expect(text.toLowerCase()).not.toContain("secret-should-not-leak");
expect(text.toLowerCase()).not.toContain("<html>");
}
});
});

View File

@ -4,7 +4,7 @@ import path from "node:path";
import { afterEach, describe, expect, it } from "vitest"; import { afterEach, describe, expect, it } from "vitest";
import { AppSettings } from "../src/shared/types"; import { AppSettings } from "../src/shared/types";
import { defaultSettings } from "../src/main/constants"; import { defaultSettings } from "../src/main/constants";
import { createStoragePaths, emptySession, loadSession, loadSettings, normalizeSettings, saveSession, saveSettings } from "../src/main/storage"; import { createStoragePaths, emptySession, loadSession, loadSettings, normalizeSettings, saveSession, saveSessionAsync, saveSettings } from "../src/main/storage";
const tempDirs: string[] = []; const tempDirs: string[] = [];
@ -152,7 +152,7 @@ describe("settings storage", () => {
it("assigns and preserves bandwidth schedule ids", () => { it("assigns and preserves bandwidth schedule ids", () => {
const normalized = normalizeSettings({ const normalized = normalizeSettings({
...defaultSettings(), ...defaultSettings(),
bandwidthSchedules: [{ startHour: 1, endHour: 6, speedLimitKbps: 1024, enabled: true }] bandwidthSchedules: [{ id: "", startHour: 1, endHour: 6, speedLimitKbps: 1024, enabled: true }]
}); });
const generatedId = normalized.bandwidthSchedules[0]?.id; const generatedId = normalized.bandwidthSchedules[0]?.id;
@ -314,6 +314,80 @@ describe("settings storage", () => {
expect(loaded.cleanupMode).toBe(defaults.cleanupMode); expect(loaded.cleanupMode).toBe(defaults.cleanupMode);
}); });
it("loads backup config when primary config is corrupted", () => {
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-store-"));
tempDirs.push(dir);
const paths = createStoragePaths(dir);
const backupSettings = {
...defaultSettings(),
outputDir: path.join(dir, "backup-output"),
packageName: "from-backup"
};
fs.writeFileSync(`${paths.configFile}.bak`, JSON.stringify(backupSettings, null, 2), "utf8");
fs.writeFileSync(paths.configFile, "{broken-json", "utf8");
const loaded = loadSettings(paths);
expect(loaded.outputDir).toBe(backupSettings.outputDir);
expect(loaded.packageName).toBe("from-backup");
});
it("sanitizes malformed persisted session structures", () => {
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-store-"));
tempDirs.push(dir);
const paths = createStoragePaths(dir);
fs.writeFileSync(paths.sessionFile, JSON.stringify({
version: "invalid",
packageOrder: [123, "pkg-valid"],
packages: {
"1": "bad-entry",
"pkg-valid": {
id: "pkg-valid",
name: "Valid Package",
outputDir: "C:/tmp/out",
extractDir: "C:/tmp/extract",
status: "downloading",
itemIds: ["item-valid", 123],
cancelled: false,
enabled: true
}
},
items: {
"item-valid": {
id: "item-valid",
packageId: "pkg-valid",
url: "https://example.com/file",
status: "queued",
fileName: "file.bin",
targetPath: "C:/tmp/out/file.bin"
},
"item-bad": "broken"
}
}), "utf8");
const loaded = loadSession(paths);
expect(Object.keys(loaded.packages)).toEqual(["pkg-valid"]);
expect(Object.keys(loaded.items)).toEqual(["item-valid"]);
expect(loaded.packageOrder).toEqual(["pkg-valid"]);
});
it("captures async session save payload before later mutations", async () => {
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-store-"));
tempDirs.push(dir);
const paths = createStoragePaths(dir);
const session = emptySession();
session.summaryText = "before-mutation";
const pending = saveSessionAsync(paths, session);
session.summaryText = "after-mutation";
await pending;
const persisted = JSON.parse(fs.readFileSync(paths.sessionFile, "utf8")) as { summaryText: string };
expect(persisted.summaryText).toBe("before-mutation");
});
it("applies defaults for missing fields when loading old config", () => { it("applies defaults for missing fields when loading old config", () => {
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-store-")); const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rd-store-"));
tempDirs.push(dir); tempDirs.push(dir);

View File

@ -1,4 +1,5 @@
import fs from "node:fs"; import fs from "node:fs";
import crypto from "node:crypto";
import { afterEach, describe, expect, it, vi } from "vitest"; import { afterEach, describe, expect, it, vi } from "vitest";
import { checkGitHubUpdate, installLatestUpdate, isRemoteNewer, normalizeUpdateRepo, parseVersionParts } from "../src/main/update"; import { checkGitHubUpdate, installLatestUpdate, isRemoteNewer, normalizeUpdateRepo, parseVersionParts } from "../src/main/update";
import { APP_VERSION } from "../src/main/constants"; import { APP_VERSION } from "../src/main/constants";
@ -6,6 +7,10 @@ import { UpdateCheckResult } from "../src/shared/types";
const originalFetch = globalThis.fetch; const originalFetch = globalThis.fetch;
function sha256Hex(buffer: Buffer): string {
return crypto.createHash("sha256").update(buffer).digest("hex");
}
afterEach(() => { afterEach(() => {
globalThis.fetch = originalFetch; globalThis.fetch = originalFetch;
vi.restoreAllMocks(); vi.restoreAllMocks();
@ -58,7 +63,8 @@ describe("update", () => {
}, },
{ {
name: "Real-Debrid-Downloader Setup 9.9.9.exe", name: "Real-Debrid-Downloader Setup 9.9.9.exe",
browser_download_url: "https://example.invalid/setup.exe" browser_download_url: "https://example.invalid/setup.exe",
digest: "sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
} }
] ]
}), }),
@ -76,6 +82,7 @@ describe("update", () => {
it("falls back to alternate download URL when setup asset URL returns 404", async () => { it("falls back to alternate download URL when setup asset URL returns 404", async () => {
const executablePayload = fs.readFileSync(process.execPath); const executablePayload = fs.readFileSync(process.execPath);
const executableDigest = sha256Hex(executablePayload);
const requestedUrls: string[] = []; const requestedUrls: string[] = [];
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => { globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url; const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
@ -100,7 +107,8 @@ describe("update", () => {
latestTag: "v9.9.9", latestTag: "v9.9.9",
releaseUrl: "https://github.com/owner/repo/releases/tag/v9.9.9", releaseUrl: "https://github.com/owner/repo/releases/tag/v9.9.9",
setupAssetUrl: "https://example.invalid/stale-setup.exe", setupAssetUrl: "https://example.invalid/stale-setup.exe",
setupAssetName: "Real-Debrid-Downloader Setup 9.9.9.exe" setupAssetName: "Real-Debrid-Downloader Setup 9.9.9.exe",
setupAssetDigest: `sha256:${executableDigest}`
}; };
const result = await installLatestUpdate("owner/repo", prechecked); const result = await installLatestUpdate("owner/repo", prechecked);
@ -109,6 +117,103 @@ describe("update", () => {
expect(requestedUrls.filter((url) => url.includes("stale-setup.exe"))).toHaveLength(1); expect(requestedUrls.filter((url) => url.includes("stale-setup.exe"))).toHaveLength(1);
}); });
it("skips draft tag payload and resolves setup asset from stable latest release", async () => {
const executablePayload = fs.readFileSync(process.execPath);
const requestedUrls: string[] = [];
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
requestedUrls.push(url);
if (url.endsWith("/releases/tags/v9.9.9")) {
return new Response(JSON.stringify({
tag_name: "v9.9.9",
draft: true,
prerelease: false,
assets: [
{
name: "Draft Setup 9.9.9.exe",
browser_download_url: "https://example.invalid/draft-setup.exe"
}
]
}), {
status: 200,
headers: { "Content-Type": "application/json" }
});
}
if (url.endsWith("/releases/latest")) {
const stableDigest = sha256Hex(executablePayload);
return new Response(JSON.stringify({
tag_name: "v9.9.9",
draft: false,
prerelease: false,
assets: [
{
name: "Stable Setup 9.9.9.exe",
browser_download_url: "https://example.invalid/stable-setup.exe",
digest: `sha256:${stableDigest}`
}
]
}), {
status: 200,
headers: { "Content-Type": "application/json" }
});
}
if (url.includes("stable-setup.exe")) {
return new Response(executablePayload, {
status: 200,
headers: { "Content-Type": "application/octet-stream" }
});
}
return new Response("missing", { status: 404 });
}) as typeof fetch;
const prechecked: UpdateCheckResult = {
updateAvailable: true,
currentVersion: APP_VERSION,
latestVersion: "9.9.9",
latestTag: "v9.9.9",
releaseUrl: "https://github.com/owner/repo/releases/tag/v9.9.9",
setupAssetUrl: "",
setupAssetName: ""
};
const result = await installLatestUpdate("owner/repo", prechecked);
expect(result.started).toBe(true);
expect(requestedUrls.some((url) => url.endsWith("/releases/tags/v9.9.9"))).toBe(true);
expect(requestedUrls.some((url) => url.endsWith("/releases/latest"))).toBe(true);
expect(requestedUrls.some((url) => url.includes("stable-setup.exe"))).toBe(true);
expect(requestedUrls.some((url) => url.includes("draft-setup.exe"))).toBe(false);
});
it("times out hanging release JSON body reads", async () => {
vi.useFakeTimers();
try {
const cancelSpy = vi.fn(async () => undefined);
globalThis.fetch = (async (): Promise<Response> => ({
ok: true,
status: 200,
headers: new Headers({ "Content-Type": "application/json" }),
json: () => new Promise(() => undefined),
body: {
cancel: cancelSpy
}
} as unknown as Response)) as typeof fetch;
const pending = checkGitHubUpdate("owner/repo");
await vi.advanceTimersByTimeAsync(13000);
const result = await pending;
expect(result.updateAvailable).toBe(false);
expect(String(result.error || "")).toMatch(/timeout/i);
expect(cancelSpy).toHaveBeenCalledTimes(1);
} finally {
vi.useRealTimers();
}
});
it("aborts hanging update body downloads on idle timeout", async () => { it("aborts hanging update body downloads on idle timeout", async () => {
const previousTimeout = process.env.RD_UPDATE_BODY_IDLE_TIMEOUT_MS; const previousTimeout = process.env.RD_UPDATE_BODY_IDLE_TIMEOUT_MS;
process.env.RD_UPDATE_BODY_IDLE_TIMEOUT_MS = "1000"; process.env.RD_UPDATE_BODY_IDLE_TIMEOUT_MS = "1000";
@ -137,7 +242,8 @@ describe("update", () => {
latestTag: "v9.9.9", latestTag: "v9.9.9",
releaseUrl: "https://github.com/owner/repo/releases/tag/v9.9.9", releaseUrl: "https://github.com/owner/repo/releases/tag/v9.9.9",
setupAssetUrl: "https://example.invalid/hang-setup.exe", setupAssetUrl: "https://example.invalid/hang-setup.exe",
setupAssetName: "" setupAssetName: "",
setupAssetDigest: "sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}; };
const result = await installLatestUpdate("owner/repo", prechecked); const result = await installLatestUpdate("owner/repo", prechecked);
@ -151,6 +257,35 @@ describe("update", () => {
} }
} }
}, 20000); }, 20000);
it("blocks installer start when SHA256 digest mismatches", async () => {
const executablePayload = fs.readFileSync(process.execPath);
globalThis.fetch = (async (input: RequestInfo | URL): Promise<Response> => {
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
if (url.includes("mismatch-setup.exe")) {
return new Response(executablePayload, {
status: 200,
headers: { "Content-Type": "application/octet-stream" }
});
}
return new Response("missing", { status: 404 });
}) as typeof fetch;
const prechecked: UpdateCheckResult = {
updateAvailable: true,
currentVersion: APP_VERSION,
latestVersion: "9.9.9",
latestTag: "v9.9.9",
releaseUrl: "https://github.com/owner/repo/releases/tag/v9.9.9",
setupAssetUrl: "https://example.invalid/mismatch-setup.exe",
setupAssetName: "setup.exe",
setupAssetDigest: "sha256:1111111111111111111111111111111111111111111111111111111111111111"
};
const result = await installLatestUpdate("owner/repo", prechecked);
expect(result.started).toBe(false);
expect(result.message).toMatch(/integrit|sha256|mismatch/i);
});
}); });
describe("normalizeUpdateRepo extended", () => { describe("normalizeUpdateRepo extended", () => {
@ -169,6 +304,12 @@ describe("normalizeUpdateRepo extended", () => {
expect(normalizeUpdateRepo(" ")).toBe("Sucukdeluxe/real-debrid-downloader"); expect(normalizeUpdateRepo(" ")).toBe("Sucukdeluxe/real-debrid-downloader");
}); });
it("rejects traversal-like owner or repo segments", () => {
expect(normalizeUpdateRepo("../owner/repo")).toBe("Sucukdeluxe/real-debrid-downloader");
expect(normalizeUpdateRepo("owner/../repo")).toBe("Sucukdeluxe/real-debrid-downloader");
expect(normalizeUpdateRepo("https://github.com/owner/../../repo")).toBe("Sucukdeluxe/real-debrid-downloader");
});
it("handles www prefix", () => { it("handles www prefix", () => {
expect(normalizeUpdateRepo("https://www.github.com/owner/repo")).toBe("owner/repo"); expect(normalizeUpdateRepo("https://www.github.com/owner/repo")).toBe("owner/repo");
expect(normalizeUpdateRepo("www.github.com/owner/repo")).toBe("owner/repo"); expect(normalizeUpdateRepo("www.github.com/owner/repo")).toBe("owner/repo");

View File

@ -1,5 +1,5 @@
import { describe, expect, it } from "vitest"; import { describe, expect, it } from "vitest";
import { parsePackagesFromLinksText, isHttpLink, sanitizeFilename, formatEta, filenameFromUrl, looksLikeOpaqueFilename } from "../src/main/utils"; import { extractHttpLinksFromText, parsePackagesFromLinksText, isHttpLink, sanitizeFilename, formatEta, filenameFromUrl, looksLikeOpaqueFilename } from "../src/main/utils";
describe("utils", () => { describe("utils", () => {
it("validates http links", () => { it("validates http links", () => {
@ -9,6 +9,15 @@ describe("utils", () => {
expect(isHttpLink("foo bar")).toBe(false); expect(isHttpLink("foo bar")).toBe(false);
}); });
it("extracts links from text and trims trailing punctuation", () => {
const links = extractHttpLinksFromText("See (https://example.com/test) and https://rapidgator.net/file/abc123, plus https://example.com/a.b.");
expect(links).toEqual([
"https://example.com/test",
"https://rapidgator.net/file/abc123",
"https://example.com/a.b"
]);
});
it("sanitizes filenames", () => { it("sanitizes filenames", () => {
expect(sanitizeFilename("foo/bar:baz*")).toBe("foo bar baz"); expect(sanitizeFilename("foo/bar:baz*")).toBe("foo bar baz");
expect(sanitizeFilename(" ")).toBe("Paket"); expect(sanitizeFilename(" ")).toBe("Paket");
@ -42,6 +51,8 @@ describe("utils", () => {
expect(filenameFromUrl("https://debrid.example/dl/abc?filename=Movie.S01E01.mkv")).toBe("Movie.S01E01.mkv"); expect(filenameFromUrl("https://debrid.example/dl/abc?filename=Movie.S01E01.mkv")).toBe("Movie.S01E01.mkv");
expect(filenameFromUrl("https://debrid.example/dl/%E0%A4%A")).toBe("%E0%A4%A"); expect(filenameFromUrl("https://debrid.example/dl/%E0%A4%A")).toBe("%E0%A4%A");
expect(filenameFromUrl("https://debrid.example/dl/e51f6809bb6ca615601f5ac5db433737")).toBe("e51f6809bb6ca615601f5ac5db433737"); expect(filenameFromUrl("https://debrid.example/dl/e51f6809bb6ca615601f5ac5db433737")).toBe("e51f6809bb6ca615601f5ac5db433737");
expect(filenameFromUrl("data:text/plain;base64,SGVsbG8=")).toBe("download.bin");
expect(filenameFromUrl("blob:https://example.com/12345678-1234-1234-1234-1234567890ab")).toBe("download.bin");
expect(looksLikeOpaqueFilename("download.bin")).toBe(true); expect(looksLikeOpaqueFilename("download.bin")).toBe(true);
expect(looksLikeOpaqueFilename("e51f6809bb6ca615601f5ac5db433737")).toBe(true); expect(looksLikeOpaqueFilename("e51f6809bb6ca615601f5ac5db433737")).toBe(true);
expect(looksLikeOpaqueFilename("movie.part1.rar")).toBe(false); expect(looksLikeOpaqueFilename("movie.part1.rar")).toBe(false);