Replace streaming download with chunked buffer download to fix corruption
Some checks are pending
Build and Release / build (push) Waiting to run

- Replace Readable.fromWeb() + pipeline with ReadableStream.getReader() loop
- Collect chunks in memory, verify size, then write to disk in one shot
- Add Accept-Encoding: identity to prevent content encoding issues
- Eliminates stream conversion bugs that caused file corruption on some servers

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Sucukdeluxe 2026-03-01 17:20:02 +01:00
parent 98425764d3
commit e384199c6e
2 changed files with 37 additions and 56 deletions

View File

@ -1,6 +1,6 @@
{ {
"name": "real-debrid-downloader", "name": "real-debrid-downloader",
"version": "1.4.62", "version": "1.4.63",
"description": "Real-Debrid Downloader Desktop (Electron + React + TypeScript)", "description": "Real-Debrid Downloader Desktop (Electron + React + TypeScript)",
"main": "build/main/main/main.js", "main": "build/main/main/main.js",
"author": "Sucukdeluxe", "author": "Sucukdeluxe",

View File

@ -3,9 +3,6 @@ import os from "node:os";
import path from "node:path"; import path from "node:path";
import crypto from "node:crypto"; import crypto from "node:crypto";
import { spawn } from "node:child_process"; import { spawn } from "node:child_process";
import { Readable } from "node:stream";
import { pipeline } from "node:stream/promises";
import { ReadableStream as NodeReadableStream } from "node:stream/web";
import { APP_VERSION, DEFAULT_UPDATE_REPO } from "./constants"; import { APP_VERSION, DEFAULT_UPDATE_REPO } from "./constants";
import { UpdateCheckResult, UpdateInstallProgress, UpdateInstallResult } from "../shared/types"; import { UpdateCheckResult, UpdateInstallProgress, UpdateInstallResult } from "../shared/types";
import { compactErrorText, humanSize } from "./utils"; import { compactErrorText, humanSize } from "./utils";
@ -700,7 +697,8 @@ async function downloadFile(url: string, targetPath: string, onProgress?: Update
try { try {
response = await fetch(url, { response = await fetch(url, {
headers: { headers: {
"User-Agent": UPDATE_USER_AGENT "User-Agent": UPDATE_USER_AGENT,
"Accept-Encoding": "identity"
}, },
redirect: "follow", redirect: "follow",
signal: combineSignals(timeout.signal, shutdownSignal) signal: combineSignals(timeout.signal, shutdownSignal)
@ -741,81 +739,64 @@ async function downloadFile(url: string, targetPath: string, onProgress?: Update
emitDownloadProgress(true); emitDownloadProgress(true);
await fs.promises.mkdir(path.dirname(targetPath), { recursive: true }); await fs.promises.mkdir(path.dirname(targetPath), { recursive: true });
const source = Readable.fromWeb(response.body as unknown as NodeReadableStream<Uint8Array>);
const target = fs.createWriteStream(targetPath);
const idleTimeoutMs = getDownloadBodyIdleTimeoutMs(); const idleTimeoutMs = getDownloadBodyIdleTimeoutMs();
let idleTimer: NodeJS.Timeout | null = null; let idleTimer: NodeJS.Timeout | null = null;
let idleTimedOut = false;
const clearIdleTimer = (): void => { const clearIdleTimer = (): void => {
if (idleTimer) { if (idleTimer) {
clearTimeout(idleTimer); clearTimeout(idleTimer);
idleTimer = null; idleTimer = null;
} }
}; };
const onIdleTimeout = (): void => {
const timeoutError = new Error(`Update Download Body Timeout nach ${Math.ceil(idleTimeoutMs / 1000)}s`);
source.destroy(timeoutError);
target.destroy(timeoutError);
};
const resetIdleTimer = (): void => { const resetIdleTimer = (): void => {
if (idleTimeoutMs <= 0) { if (idleTimeoutMs <= 0) {
return; return;
} }
clearIdleTimer(); clearIdleTimer();
idleTimer = setTimeout(onIdleTimeout, idleTimeoutMs); idleTimer = setTimeout(() => {
idleTimedOut = true;
reader.cancel().catch(() => undefined);
}, idleTimeoutMs);
}; };
const onSourceData = (chunk: string | Buffer): void => { const reader = response.body.getReader();
downloadedBytes += typeof chunk === "string" ? Buffer.byteLength(chunk) : chunk.byteLength; const chunks: Buffer[] = [];
try {
resetIdleTimer();
for (;;) {
if (shutdownSignal?.aborted) {
await reader.cancel().catch(() => undefined);
throw new Error("aborted:update_shutdown");
}
const { done, value } = await reader.read();
if (done) {
break;
}
const buf = Buffer.from(value.buffer, value.byteOffset, value.byteLength);
chunks.push(buf);
downloadedBytes += buf.byteLength;
resetIdleTimer(); resetIdleTimer();
emitDownloadProgress(false); emitDownloadProgress(false);
};
const onSourceDone = (): void => {
clearIdleTimer();
};
if (idleTimeoutMs > 0) {
source.on("data", onSourceData);
source.on("end", onSourceDone);
source.on("close", onSourceDone);
source.on("error", onSourceDone);
target.on("close", onSourceDone);
target.on("error", onSourceDone);
resetIdleTimer();
} }
try {
await pipeline(source, target);
} catch (error) {
try {
source.destroy();
} catch {
// ignore
}
try {
target.destroy();
} catch {
// ignore
}
throw error;
} finally { } finally {
clearIdleTimer(); clearIdleTimer();
source.off("data", onSourceData);
source.off("end", onSourceDone);
source.off("close", onSourceDone);
source.off("error", onSourceDone);
target.off("close", onSourceDone);
target.off("error", onSourceDone);
} }
emitDownloadProgress(true);
logger.info(`Update-Download abgeschlossen: ${targetPath}`);
if (totalBytes) { if (idleTimedOut) {
const actualSize = (await fs.promises.stat(targetPath)).size; throw new Error(`Update Download Body Timeout nach ${Math.ceil(idleTimeoutMs / 1000)}s`);
if (actualSize !== totalBytes) {
throw new Error(`Update Download unvollständig (${actualSize} / ${totalBytes} Bytes)`);
} }
const fileBuffer = Buffer.concat(chunks);
if (totalBytes && fileBuffer.byteLength !== totalBytes) {
throw new Error(`Update Download unvollständig (${fileBuffer.byteLength} / ${totalBytes} Bytes)`);
} }
await fs.promises.writeFile(targetPath, fileBuffer);
emitDownloadProgress(true);
logger.info(`Update-Download abgeschlossen: ${targetPath} (${fileBuffer.byteLength} Bytes)`);
return { expectedBytes: totalBytes }; return { expectedBytes: totalBytes };
} }