feat: stream hashing and archive-entry import support
- Añade `computeHashesFromStream` para hashing desde streams - Adapta `importDirectory` para procesar entradas internas usando `streamArchiveEntry` - Añade tests unitarios para hashing por stream e import de entradas de archive
This commit is contained in:
@@ -72,4 +72,37 @@ export async function computeHashes(filePath: string): Promise<{
|
||||
});
|
||||
}
|
||||
|
||||
export async function computeHashesFromStream(rs: NodeJS.ReadableStream): Promise<{
|
||||
size: number;
|
||||
md5: string;
|
||||
sha1: string;
|
||||
crc32: string;
|
||||
}> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const md5 = createHash('md5');
|
||||
const sha1 = createHash('sha1');
|
||||
|
||||
let size = 0;
|
||||
let crc = 0xffffffff >>> 0;
|
||||
|
||||
rs.on('error', (err: any) => reject(err));
|
||||
|
||||
rs.on('data', (chunk: Buffer) => {
|
||||
md5.update(chunk);
|
||||
sha1.update(chunk);
|
||||
size += chunk.length;
|
||||
crc = updateCrc(crc, chunk);
|
||||
});
|
||||
|
||||
rs.on('end', () => {
|
||||
const md5sum = md5.digest('hex');
|
||||
const sha1sum = sha1.digest('hex');
|
||||
const final = (crc ^ 0xffffffff) >>> 0;
|
||||
const crcHex = final.toString(16).padStart(8, '0').toLowerCase();
|
||||
|
||||
resolve({ size, md5: md5sum, sha1: sha1sum, crc32: crcHex });
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export default computeHashes;
|
||||
|
||||
@@ -12,7 +12,8 @@
|
||||
import path from 'path';
|
||||
import { promises as fsPromises } from 'fs';
|
||||
import { scanDirectory } from './fsScanner';
|
||||
import { computeHashes } from './checksumService';
|
||||
import { computeHashes, computeHashesFromStream } from './checksumService';
|
||||
import { streamArchiveEntry } from './archiveReader';
|
||||
import prisma from '../plugins/prisma';
|
||||
|
||||
/**
|
||||
@@ -66,7 +67,22 @@ export async function importDirectory(
|
||||
processed++;
|
||||
|
||||
try {
|
||||
const hashes = await computeHashes(file.path);
|
||||
let hashes: { size: number; md5: string; sha1: string; crc32: string };
|
||||
|
||||
if (file.isArchiveEntry) {
|
||||
const stream = await streamArchiveEntry(file.containerPath, file.entryPath, logger);
|
||||
if (!stream) {
|
||||
logger.warn?.(
|
||||
{ file },
|
||||
'importDirectory: no se pudo extraer entrada del archive, saltando'
|
||||
);
|
||||
continue;
|
||||
}
|
||||
hashes = await computeHashesFromStream(stream as any);
|
||||
} else {
|
||||
hashes = await computeHashes(file.path);
|
||||
}
|
||||
|
||||
const checksum = hashes.md5;
|
||||
const size = hashes.size;
|
||||
|
||||
@@ -100,7 +116,10 @@ export async function importDirectory(
|
||||
upserted++;
|
||||
}
|
||||
} catch (err) {
|
||||
logger.warn?.({ err, file }, 'importDirectory: error procesando fichero, se continúa con el siguiente');
|
||||
logger.warn?.(
|
||||
{ err, file },
|
||||
'importDirectory: error procesando fichero, se continúa con el siguiente'
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user