Release v1.5.68: Extractor optimizations inspired by JDownloader

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Sucukdeluxe 2026-03-03 21:25:02 +01:00
parent 1956be0c71
commit 3dbb94d298
4 changed files with 390 additions and 10 deletions

View File

@ -1,6 +1,6 @@
{
"name": "real-debrid-downloader",
"version": "1.5.67",
"version": "1.5.68",
"description": "Real-Debrid Downloader Desktop (Electron + React + TypeScript)",
"main": "build/main/main/main.js",
"author": "Sucukdeluxe",

View File

@ -21,7 +21,7 @@ export const SAMPLE_VIDEO_EXTENSIONS = new Set([".mkv", ".mp4", ".avi", ".mov",
export const LINK_ARTIFACT_EXTENSIONS = new Set([".url", ".webloc", ".dlc", ".rsdf", ".ccf"]);
export const SAMPLE_TOKEN_RE = /(^|[._\-\s])sample([._\-\s]|$)/i;
export const ARCHIVE_TEMP_EXTENSIONS = new Set([".rar", ".zip", ".7z", ".tmp", ".part", ".tar", ".gz", ".bz2", ".xz"]);
export const ARCHIVE_TEMP_EXTENSIONS = new Set([".rar", ".zip", ".7z", ".tmp", ".part", ".tar", ".gz", ".bz2", ".xz", ".rev"]);
export const RAR_SPLIT_RE = /\.r\d{2,3}$/i;
export const MAX_MANIFEST_FILE_BYTES = 5 * 1024 * 1024;

View File

@ -106,7 +106,37 @@ const EXTRACT_PER_GIB_TIMEOUT_MS = 4 * 60 * 1000;
const EXTRACT_MAX_TIMEOUT_MS = 120 * 60 * 1000;
const ARCHIVE_SORT_COLLATOR = new Intl.Collator(undefined, { numeric: true, sensitivity: "base" });
const DISK_SPACE_SAFETY_FACTOR = 1.1;
const NESTED_EXTRACT_BLACKLIST_RE = /\.(iso|img|bin|dmg)$/i;
const NESTED_EXTRACT_BLACKLIST_RE = /\.(iso|img|bin|dmg|vhd|vhdx|vmdk|wim)$/i;
export type ArchiveSignature = "rar" | "7z" | "zip" | "gzip" | "bzip2" | "xz" | null;
const ARCHIVE_SIGNATURES: { prefix: string; type: ArchiveSignature }[] = [
{ prefix: "526172211a07", type: "rar" },
{ prefix: "377abcaf271c", type: "7z" },
{ prefix: "504b0304", type: "zip" },
{ prefix: "1f8b08", type: "gzip" },
{ prefix: "425a68", type: "bzip2" },
{ prefix: "fd377a585a00", type: "xz" },
];
export async function detectArchiveSignature(filePath: string): Promise<ArchiveSignature> {
let fd: fs.promises.FileHandle | null = null;
try {
fd = await fs.promises.open(filePath, "r");
const buf = Buffer.alloc(8);
const { bytesRead } = await fd.read(buf, 0, 8, 0);
if (bytesRead < 3) return null;
const hex = buf.subarray(0, bytesRead).toString("hex");
for (const sig of ARCHIVE_SIGNATURES) {
if (hex.startsWith(sig.prefix)) return sig.type;
}
return null;
} catch {
return null;
} finally {
await fd?.close();
}
}
async function estimateArchivesTotalBytes(candidates: string[]): Promise<number> {
let total = 0;
@ -172,6 +202,8 @@ function archiveSortKey(filePath: string): string {
.replace(/\.part0*1\.rar$/i, "")
.replace(/\.zip\.\d{3}$/i, "")
.replace(/\.7z\.\d{3}$/i, "")
.replace(/\.\d{3}$/i, "")
.replace(/\.tar\.(gz|bz2|xz)$/i, "")
.replace(/\.rar$/i, "")
.replace(/\.zip$/i, "")
.replace(/\.7z$/i, "")
@ -192,6 +224,12 @@ function archiveTypeRank(filePath: string): number {
if (/\.7z(?:\.\d{3})?$/i.test(fileName)) {
return 3;
}
if (/\.tar\.(gz|bz2|xz)$/i.test(fileName)) {
return 4;
}
if (/\.\d{3}$/i.test(fileName)) {
return 5;
}
return 9;
}
@ -237,10 +275,18 @@ export async function findArchiveCandidates(packageDir: string): Promise<string[
}
return !fileNamesLower.has(`${fileName}.001`.toLowerCase());
});
const tarCompressed = files.filter((filePath) => /\.tar\.(gz|bz2|xz)$/i.test(filePath));
// Generic .001 splits (HJSplit etc.) — exclude already-recognized .zip.001 and .7z.001
const genericSplit = files.filter((filePath) => {
const fileName = path.basename(filePath).toLowerCase();
if (!/\.001$/.test(fileName)) return false;
if (/\.zip\.001$/.test(fileName) || /\.7z\.001$/.test(fileName)) return false;
return true;
});
const unique: string[] = [];
const seen = new Set<string>();
for (const candidate of [...multipartRar, ...singleRar, ...zipSplit, ...zip, ...sevenSplit, ...seven]) {
for (const candidate of [...multipartRar, ...singleRar, ...zipSplit, ...zip, ...sevenSplit, ...seven, ...tarCompressed, ...genericSplit]) {
const key = pathSetKey(candidate);
if (seen.has(key)) {
continue;
@ -388,11 +434,52 @@ async function clearExtractResumeState(packageDir: string, packageId?: string):
}
}
export type ExtractErrorCategory =
| "crc_error"
| "wrong_password"
| "missing_parts"
| "unsupported_format"
| "disk_full"
| "timeout"
| "aborted"
| "no_extractor"
| "unknown";
export function classifyExtractionError(errorText: string): ExtractErrorCategory {
const text = String(errorText || "").toLowerCase();
if (text.includes("aborted:extract") || text.includes("extract_aborted")) return "aborted";
if (text.includes("timeout")) return "timeout";
if (text.includes("wrong password") || text.includes("falsches passwort") || text.includes("incorrect password")) return "wrong_password";
if (text.includes("crc failed") || text.includes("checksum error") || text.includes("crc error")) return "crc_error";
if (text.includes("missing volume") || text.includes("next volume") || text.includes("unexpected end of archive") || text.includes("missing parts")) return "missing_parts";
if (text.includes("nicht gefunden") || text.includes("not found") || text.includes("no extractor")) return "no_extractor";
if (text.includes("kein rar-archiv") || text.includes("not a rar archive") || text.includes("unsupported") || text.includes("unsupportedmethod")) return "unsupported_format";
if (text.includes("disk full") || text.includes("speicherplatz") || text.includes("no space left") || text.includes("not enough space")) return "disk_full";
return "unknown";
}
function isExtractAbortError(errorText: string): boolean {
const text = String(errorText || "").toLowerCase();
return text.includes("aborted:extract") || text.includes("extract_aborted");
}
export function archiveFilenamePasswords(archiveName: string): string[] {
const name = String(archiveName || "");
if (!name) return [];
const stem = name
.replace(/\.part\d+\.rar$/i, "")
.replace(/\.zip\.\d{3}$/i, "")
.replace(/\.7z\.\d{3}$/i, "")
.replace(/\.\d{3}$/i, "")
.replace(/\.tar\.(gz|bz2|xz)$/i, "")
.replace(/\.(rar|zip|7z|tar|gz|bz2|xz)$/i, "");
if (!stem) return [];
const candidates = [stem];
const withSpaces = stem.replace(/[._]/g, " ");
if (withSpaces !== stem) candidates.push(withSpaces);
return candidates;
}
function archivePasswords(listInput: string): string[] {
const custom = String(listInput || "")
.split(/\r?\n/g)
@ -1491,6 +1578,7 @@ export function collectArchiveCleanupTargets(sourceArchivePath: string, director
if (multipartRar) {
const prefix = escapeRegex(multipartRar[1]);
addMatching(new RegExp(`^${prefix}\\.part\\d+\\.rar$`, "i"));
addMatching(new RegExp(`^${prefix}\\.rev$`, "i"));
return Array.from(targets);
}
@ -1498,6 +1586,7 @@ export function collectArchiveCleanupTargets(sourceArchivePath: string, director
const stem = escapeRegex(fileName.replace(/\.rar$/i, ""));
addMatching(new RegExp(`^${stem}\\.rar$`, "i"));
addMatching(new RegExp(`^${stem}\\.r\\d{2,3}$`, "i"));
addMatching(new RegExp(`^${stem}\\.rev$`, "i"));
return Array.from(targets);
}
@ -1531,6 +1620,14 @@ export function collectArchiveCleanupTargets(sourceArchivePath: string, director
return Array.from(targets);
}
// Generic .NNN split files (HJSplit etc.)
const genericSplit = fileName.match(/^(.*)\.(\d{3})$/i);
if (genericSplit) {
const stem = escapeRegex(genericSplit[1]);
addMatching(new RegExp(`^${stem}\\.\\d{3}$`, "i"));
return Array.from(targets);
}
return Array.from(targets);
}
@ -1814,6 +1911,23 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt);
}, 1100);
const hybrid = Boolean(options.hybridMode);
// Insert archive-filename-derived passwords after "" but before custom passwords
const filenamePasswords = archiveFilenamePasswords(archiveName);
const archivePasswordCandidates = filenamePasswords.length > 0
? Array.from(new Set(["", ...filenamePasswords, ...passwordCandidates.filter((p) => p !== "")]))
: passwordCandidates;
// Validate generic .001 splits via file signature before attempting extraction
const isGenericSplit = /\.\d{3}$/i.test(archiveName) && !/\.(zip|7z)\.\d{3}$/i.test(archiveName);
if (isGenericSplit) {
const sig = await detectArchiveSignature(archivePath);
if (!sig) {
logger.info(`Generische Split-Datei übersprungen (keine Archiv-Signatur): ${archiveName}`);
continue;
}
logger.info(`Generische Split-Datei verifiziert (Signatur: ${sig}): ${archiveName}`);
}
logger.info(`Entpacke Archiv: ${path.basename(archivePath)} -> ${options.targetDir}${hybrid ? " (hybrid, reduced threads, low I/O)" : ""}`);
try {
const ext = path.extname(archivePath).toLowerCase();
@ -1821,7 +1935,7 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
const preferExternal = await shouldPreferExternalZip(archivePath);
if (preferExternal) {
try {
const usedPassword = await runExternalExtract(archivePath, options.targetDir, options.conflictMode, passwordCandidates, (value) => {
const usedPassword = await runExternalExtract(archivePath, options.targetDir, options.conflictMode, archivePasswordCandidates, (value) => {
archivePercent = Math.max(archivePercent, value);
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt);
}, options.signal, hybrid);
@ -1842,7 +1956,7 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
throw error;
}
try {
const usedPassword = await runExternalExtract(archivePath, options.targetDir, options.conflictMode, passwordCandidates, (value) => {
const usedPassword = await runExternalExtract(archivePath, options.targetDir, options.conflictMode, archivePasswordCandidates, (value) => {
archivePercent = Math.max(archivePercent, value);
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt);
}, options.signal, hybrid);
@ -1856,7 +1970,7 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
}
}
} else {
const usedPassword = await runExternalExtract(archivePath, options.targetDir, options.conflictMode, passwordCandidates, (value) => {
const usedPassword = await runExternalExtract(archivePath, options.targetDir, options.conflictMode, archivePasswordCandidates, (value) => {
archivePercent = Math.max(archivePercent, value);
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt);
}, options.signal, hybrid);
@ -1876,7 +1990,8 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
throw new Error("aborted:extract");
}
lastError = errorText;
logger.error(`Entpack-Fehler ${path.basename(archivePath)}: ${errorText}`);
const errorCategory = classifyExtractionError(errorText);
logger.error(`Entpack-Fehler ${path.basename(archivePath)} [${errorCategory}]: ${errorText}`);
emitProgress(extracted + failed, archiveName, "extracting", archivePercent, Date.now() - archiveStartedAt);
if (isNoExtractorError(errorText)) {
const remaining = candidates.length - (extracted + failed);
@ -1898,6 +2013,8 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
.filter((p) => !NESTED_EXTRACT_BLACKLIST_RE.test(p));
if (nestedCandidates.length > 0) {
logger.info(`Nested-Extraction: ${nestedCandidates.length} Archive im Output gefunden`);
let nestedExtracted = 0;
let nestedFailed = 0;
try {
await checkDiskSpaceForExtraction(options.targetDir, nestedCandidates);
} catch (spaceError) {
@ -1936,6 +2053,7 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
passwordCandidates = prioritizePassword(passwordCandidates, usedPw);
}
extracted += 1;
nestedExtracted += 1;
extractedArchives.add(nestedArchive);
resumeCompleted.add(nestedKey);
await writeExtractResumeState(options.packageDir, resumeCompleted, options.packageId);
@ -1953,12 +2071,15 @@ export async function extractPackageArchives(options: ExtractOptions): Promise<{
break;
}
failed += 1;
nestedFailed += 1;
lastError = errText;
logger.error(`Nested-Entpack-Fehler ${nestedName}: ${errText}`);
const nestedCategory = classifyExtractionError(errText);
logger.error(`Nested-Entpack-Fehler ${nestedName} [${nestedCategory}]: ${errText}`);
} finally {
clearInterval(nestedPulse);
}
}
logger.info(`Nested-Extraction abgeschlossen: ${nestedExtracted} entpackt, ${nestedFailed} fehlgeschlagen von ${nestedCandidates.length} Kandidaten`);
}
} catch (nestedError) {
const errText = String(nestedError);

View File

@ -3,7 +3,15 @@ import os from "node:os";
import path from "node:path";
import AdmZip from "adm-zip";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { buildExternalExtractArgs, collectArchiveCleanupTargets, extractPackageArchives } from "../src/main/extractor";
import {
buildExternalExtractArgs,
collectArchiveCleanupTargets,
extractPackageArchives,
archiveFilenamePasswords,
detectArchiveSignature,
classifyExtractionError,
findArchiveCandidates,
} from "../src/main/extractor";
const tempDirs: string[] = [];
const originalExtractBackend = process.env.RD_EXTRACT_BACKEND;
@ -743,4 +751,255 @@ describe("extractor", () => {
expect(fs.existsSync(path.join(targetDir, "readme.txt"))).toBe(true);
});
});
describe("archiveFilenamePasswords", () => {
it("extracts stem and spaced variant from archive name", () => {
const result = archiveFilenamePasswords("MyRelease.S01E01.rar");
expect(result).toContain("MyRelease.S01E01");
expect(result).toContain("MyRelease S01E01");
});
it("strips multipart rar suffix", () => {
const result = archiveFilenamePasswords("Show.S02E03.part01.rar");
expect(result).toContain("Show.S02E03");
expect(result).toContain("Show S02E03");
});
it("strips .zip.001 suffix", () => {
const result = archiveFilenamePasswords("Movie.2024.zip.001");
expect(result).toContain("Movie.2024");
});
it("strips .tar.gz suffix", () => {
const result = archiveFilenamePasswords("backup.tar.gz");
expect(result).toContain("backup");
});
it("returns empty array for empty input", () => {
expect(archiveFilenamePasswords("")).toEqual([]);
});
it("returns single entry when no dots/underscores", () => {
const result = archiveFilenamePasswords("simple.zip");
expect(result).toEqual(["simple"]);
});
it("replaces underscores with spaces", () => {
const result = archiveFilenamePasswords("my_archive_name.7z");
expect(result).toContain("my_archive_name");
expect(result).toContain("my archive name");
});
});
describe(".rev cleanup", () => {
it("collects .rev files for single RAR cleanup", () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-rev-"));
tempDirs.push(root);
const packageDir = path.join(root, "pkg");
fs.mkdirSync(packageDir, { recursive: true });
const mainRar = path.join(packageDir, "show.rar");
const rev = path.join(packageDir, "show.rev");
const r00 = path.join(packageDir, "show.r00");
fs.writeFileSync(mainRar, "a", "utf8");
fs.writeFileSync(rev, "b", "utf8");
fs.writeFileSync(r00, "c", "utf8");
const targets = new Set(collectArchiveCleanupTargets(mainRar));
expect(targets.has(mainRar)).toBe(true);
expect(targets.has(rev)).toBe(true);
expect(targets.has(r00)).toBe(true);
});
it("collects .rev files for multipart RAR cleanup", () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-rev-mp-"));
tempDirs.push(root);
const packageDir = path.join(root, "pkg");
fs.mkdirSync(packageDir, { recursive: true });
const part1 = path.join(packageDir, "show.part01.rar");
const part2 = path.join(packageDir, "show.part02.rar");
const rev = path.join(packageDir, "show.rev");
fs.writeFileSync(part1, "a", "utf8");
fs.writeFileSync(part2, "b", "utf8");
fs.writeFileSync(rev, "c", "utf8");
const targets = new Set(collectArchiveCleanupTargets(part1));
expect(targets.has(part1)).toBe(true);
expect(targets.has(part2)).toBe(true);
expect(targets.has(rev)).toBe(true);
});
});
describe("generic .001 split cleanup", () => {
it("collects all numbered parts for generic splits", () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-split-"));
tempDirs.push(root);
const packageDir = path.join(root, "pkg");
fs.mkdirSync(packageDir, { recursive: true });
const p001 = path.join(packageDir, "movie.001");
const p002 = path.join(packageDir, "movie.002");
const p003 = path.join(packageDir, "movie.003");
const other = path.join(packageDir, "other.001");
fs.writeFileSync(p001, "a", "utf8");
fs.writeFileSync(p002, "b", "utf8");
fs.writeFileSync(p003, "c", "utf8");
fs.writeFileSync(other, "x", "utf8");
const targets = new Set(collectArchiveCleanupTargets(p001));
expect(targets.has(p001)).toBe(true);
expect(targets.has(p002)).toBe(true);
expect(targets.has(p003)).toBe(true);
expect(targets.has(other)).toBe(false);
});
});
describe("detectArchiveSignature", () => {
it("detects RAR signature", async () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-sig-"));
tempDirs.push(root);
const filePath = path.join(root, "test.rar");
// RAR5 signature: 52 61 72 21 1A 07
fs.writeFileSync(filePath, Buffer.from("526172211a0700", "hex"));
const sig = await detectArchiveSignature(filePath);
expect(sig).toBe("rar");
});
it("detects ZIP signature", async () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-sig-"));
tempDirs.push(root);
const filePath = path.join(root, "test.zip");
fs.writeFileSync(filePath, Buffer.from("504b030414000000", "hex"));
const sig = await detectArchiveSignature(filePath);
expect(sig).toBe("zip");
});
it("detects 7z signature", async () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-sig-"));
tempDirs.push(root);
const filePath = path.join(root, "test.7z");
fs.writeFileSync(filePath, Buffer.from("377abcaf271c0004", "hex"));
const sig = await detectArchiveSignature(filePath);
expect(sig).toBe("7z");
});
it("returns null for non-archive files", async () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-sig-"));
tempDirs.push(root);
const filePath = path.join(root, "test.txt");
fs.writeFileSync(filePath, "Hello World", "utf8");
const sig = await detectArchiveSignature(filePath);
expect(sig).toBeNull();
});
it("returns null for non-existent file", async () => {
const sig = await detectArchiveSignature("/nonexistent/file.rar");
expect(sig).toBeNull();
});
});
describe("findArchiveCandidates extended formats", () => {
it("finds .tar.gz files", async () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-tar-"));
tempDirs.push(root);
const packageDir = path.join(root, "pkg");
fs.mkdirSync(packageDir, { recursive: true });
fs.writeFileSync(path.join(packageDir, "backup.tar.gz"), "data", "utf8");
fs.writeFileSync(path.join(packageDir, "readme.txt"), "info", "utf8");
const candidates = await findArchiveCandidates(packageDir);
expect(candidates.map((c) => path.basename(c))).toContain("backup.tar.gz");
});
it("finds .tar.bz2 files", async () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-tar-"));
tempDirs.push(root);
const packageDir = path.join(root, "pkg");
fs.mkdirSync(packageDir, { recursive: true });
fs.writeFileSync(path.join(packageDir, "archive.tar.bz2"), "data", "utf8");
const candidates = await findArchiveCandidates(packageDir);
expect(candidates.map((c) => path.basename(c))).toContain("archive.tar.bz2");
});
it("finds generic .001 split files", async () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-split-"));
tempDirs.push(root);
const packageDir = path.join(root, "pkg");
fs.mkdirSync(packageDir, { recursive: true });
fs.writeFileSync(path.join(packageDir, "movie.001"), "data", "utf8");
fs.writeFileSync(path.join(packageDir, "movie.002"), "data", "utf8");
const candidates = await findArchiveCandidates(packageDir);
const names = candidates.map((c) => path.basename(c));
expect(names).toContain("movie.001");
// .002 should NOT be in candidates (only .001 is the entry point)
expect(names).not.toContain("movie.002");
});
it("does not duplicate .zip.001 as generic split", async () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), "rd-dedup-"));
tempDirs.push(root);
const packageDir = path.join(root, "pkg");
fs.mkdirSync(packageDir, { recursive: true });
fs.writeFileSync(path.join(packageDir, "movie.zip.001"), "data", "utf8");
fs.writeFileSync(path.join(packageDir, "movie.zip.002"), "data", "utf8");
const candidates = await findArchiveCandidates(packageDir);
const names = candidates.map((c) => path.basename(c));
// .zip.001 should appear once from zipSplit detection, not duplicated by genericSplit
expect(names.filter((n) => n === "movie.zip.001")).toHaveLength(1);
});
});
describe("classifyExtractionError", () => {
it("classifies CRC errors", () => {
expect(classifyExtractionError("CRC failed for file.txt")).toBe("crc_error");
expect(classifyExtractionError("Checksum error in data")).toBe("crc_error");
});
it("classifies wrong password", () => {
expect(classifyExtractionError("Wrong password")).toBe("wrong_password");
expect(classifyExtractionError("Falsches Passwort")).toBe("wrong_password");
});
it("classifies missing parts", () => {
expect(classifyExtractionError("Missing volume: part2.rar")).toBe("missing_parts");
expect(classifyExtractionError("Unexpected end of archive")).toBe("missing_parts");
});
it("classifies unsupported format", () => {
expect(classifyExtractionError("kein RAR-Archiv")).toBe("unsupported_format");
expect(classifyExtractionError("UNSUPPORTEDMETHOD")).toBe("unsupported_format");
});
it("classifies disk full", () => {
expect(classifyExtractionError("Nicht genug Speicherplatz")).toBe("disk_full");
expect(classifyExtractionError("No space left on device")).toBe("disk_full");
});
it("classifies timeout", () => {
expect(classifyExtractionError("Entpacken Timeout nach 360s")).toBe("timeout");
});
it("classifies abort", () => {
expect(classifyExtractionError("aborted:extract")).toBe("aborted");
});
it("classifies no extractor", () => {
expect(classifyExtractionError("WinRAR/UnRAR nicht gefunden")).toBe("no_extractor");
});
it("returns unknown for unrecognized errors", () => {
expect(classifyExtractionError("something weird happened")).toBe("unknown");
});
});
});