feat: import job runner in-memory
- Añade ImportRunner en memoria con concurrencia configurable - Tests TDD para enqueue, concurrencia y comportamiento tras stop - Actualiza /api/import/scan para encolar jobs y registrar errores - Ajusta tsconfig.json para incluir tests en comprobaciones de tipo
This commit is contained in:
62
backend/src/services/checksumService.ts
Normal file
62
backend/src/services/checksumService.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
import fs from 'fs';
|
||||
import { createHash } from 'crypto';
|
||||
|
||||
function makeCRCTable(): Uint32Array {
|
||||
const table = new Uint32Array(256);
|
||||
for (let n = 0; n < 256; n++) {
|
||||
let c = n;
|
||||
for (let k = 0; k < 8; k++) {
|
||||
if (c & 1) c = 0xedb88320 ^ (c >>> 1);
|
||||
else c = c >>> 1;
|
||||
}
|
||||
table[n] = c >>> 0;
|
||||
}
|
||||
return table;
|
||||
}
|
||||
|
||||
const CRC_TABLE = makeCRCTable();
|
||||
|
||||
function updateCrc(crc: number, buf: Buffer): number {
|
||||
let c = crc >>> 0;
|
||||
for (let i = 0; i < buf.length; i++) {
|
||||
c = (CRC_TABLE[(c ^ buf[i]) & 0xff] ^ (c >>> 8)) >>> 0;
|
||||
}
|
||||
return c >>> 0;
|
||||
}
|
||||
|
||||
export async function computeHashes(filePath: string): Promise<{
|
||||
size: number;
|
||||
md5: string;
|
||||
sha1: string;
|
||||
crc32: string;
|
||||
}> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const md5 = createHash('md5');
|
||||
const sha1 = createHash('sha1');
|
||||
|
||||
let size = 0;
|
||||
let crc = 0xffffffff >>> 0;
|
||||
|
||||
const rs = fs.createReadStream(filePath);
|
||||
|
||||
rs.on('error', (err) => reject(err));
|
||||
|
||||
rs.on('data', (chunk: Buffer) => {
|
||||
md5.update(chunk);
|
||||
sha1.update(chunk);
|
||||
size += chunk.length;
|
||||
crc = updateCrc(crc, chunk);
|
||||
});
|
||||
|
||||
rs.on('end', () => {
|
||||
const md5sum = md5.digest('hex');
|
||||
const sha1sum = sha1.digest('hex');
|
||||
const final = (crc ^ 0xffffffff) >>> 0;
|
||||
const crcHex = final.toString(16).padStart(8, '0').toLowerCase();
|
||||
|
||||
resolve({ size, md5: md5sum, sha1: sha1sum, crc32: crcHex });
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export default computeHashes;
|
||||
11
backend/src/services/datVerifier.ts
Normal file
11
backend/src/services/datVerifier.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
export function parseDat(_xml: string): any {
|
||||
// Stub: el parseo completo no se implementa en esta fase.
|
||||
return {};
|
||||
}
|
||||
|
||||
export async function verifyRomAgainstDat(_romMeta: any, _parsedDat: any): Promise<any> {
|
||||
// Stub: verificación mínima para que los tests de integración puedan ser saltados.
|
||||
return {};
|
||||
}
|
||||
|
||||
export default { parseDat, verifyRomAgainstDat };
|
||||
42
backend/src/services/fsScanner.ts
Normal file
42
backend/src/services/fsScanner.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import path from 'path';
|
||||
import { promises as fsPromises } from 'fs';
|
||||
import { detectFormat } from '../lib/fileTypeDetector';
|
||||
|
||||
export async function scanDirectory(dirPath: string): Promise<any[]> {
|
||||
const results: any[] = [];
|
||||
|
||||
async function walk(dir: string) {
|
||||
const entries = await fsPromises.readdir(dir, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
if (entry.name.startsWith('.')) continue; // ignore dotfiles
|
||||
|
||||
const full = path.join(dir, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
await walk(full);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entry.isFile()) {
|
||||
const stat = await fsPromises.stat(full);
|
||||
const format = detectFormat(entry.name);
|
||||
const isArchive = ['zip', '7z', 'chd'].includes(format);
|
||||
|
||||
results.push({
|
||||
path: full,
|
||||
filename: entry.name,
|
||||
name: entry.name,
|
||||
size: stat.size,
|
||||
format,
|
||||
isArchive,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await walk(dirPath);
|
||||
return results;
|
||||
}
|
||||
|
||||
export default scanDirectory;
|
||||
Reference in New Issue
Block a user