feat: stream hashing and archive-entry import support

- Añade `computeHashesFromStream` para hashing desde streams
- Adapta `importDirectory` para procesar entradas internas usando `streamArchiveEntry`
- Añade tests unitarios para hashing por stream e import de entradas de archive
This commit is contained in:
2026-02-09 19:36:18 +01:00
parent 97a7f74685
commit 7ca465fb73
6 changed files with 183 additions and 3 deletions

View File

@@ -72,4 +72,37 @@ export async function computeHashes(filePath: string): Promise<{
});
}
export async function computeHashesFromStream(rs: NodeJS.ReadableStream): Promise<{
size: number;
md5: string;
sha1: string;
crc32: string;
}> {
return new Promise((resolve, reject) => {
const md5 = createHash('md5');
const sha1 = createHash('sha1');
let size = 0;
let crc = 0xffffffff >>> 0;
rs.on('error', (err: any) => reject(err));
rs.on('data', (chunk: Buffer) => {
md5.update(chunk);
sha1.update(chunk);
size += chunk.length;
crc = updateCrc(crc, chunk);
});
rs.on('end', () => {
const md5sum = md5.digest('hex');
const sha1sum = sha1.digest('hex');
const final = (crc ^ 0xffffffff) >>> 0;
const crcHex = final.toString(16).padStart(8, '0').toLowerCase();
resolve({ size, md5: md5sum, sha1: sha1sum, crc32: crcHex });
});
});
}
export default computeHashes;

View File

@@ -12,7 +12,8 @@
import path from 'path';
import { promises as fsPromises } from 'fs';
import { scanDirectory } from './fsScanner';
import { computeHashes } from './checksumService';
import { computeHashes, computeHashesFromStream } from './checksumService';
import { streamArchiveEntry } from './archiveReader';
import prisma from '../plugins/prisma';
/**
@@ -66,7 +67,22 @@ export async function importDirectory(
processed++;
try {
const hashes = await computeHashes(file.path);
let hashes: { size: number; md5: string; sha1: string; crc32: string };
if (file.isArchiveEntry) {
const stream = await streamArchiveEntry(file.containerPath, file.entryPath, logger);
if (!stream) {
logger.warn?.(
{ file },
'importDirectory: no se pudo extraer entrada del archive, saltando'
);
continue;
}
hashes = await computeHashesFromStream(stream as any);
} else {
hashes = await computeHashes(file.path);
}
const checksum = hashes.md5;
const size = hashes.size;
@@ -100,7 +116,10 @@ export async function importDirectory(
upserted++;
}
} catch (err) {
logger.warn?.({ err, file }, 'importDirectory: error procesando fichero, se continúa con el siguiente');
logger.warn?.(
{ err, file },
'importDirectory: error procesando fichero, se continúa con el siguiente'
);
continue;
}
}