627 lines
20 KiB
JavaScript
627 lines
20 KiB
JavaScript
const fs = require('fs');
|
|
const path = require('path');
|
|
const zlib = require('zlib');
|
|
|
|
/**
|
|
* Entry types in PAK files
|
|
*/
|
|
const EntryType = {
|
|
Directory: 0,
|
|
File: 1
|
|
};
|
|
|
|
/**
|
|
* Compression types for PAK entries
|
|
*/
|
|
const CompressionType = {
|
|
None: 0,
|
|
Zlib: 0x106
|
|
};
|
|
|
|
/**
|
|
* Represents a file entry in a PAK archive
|
|
*/
|
|
class PakEntryFile {
|
|
constructor(entryName, reader) {
|
|
this.name = entryName;
|
|
this.offset = reader.readInt32LE();
|
|
this.size = reader.readInt32LE();
|
|
this.originalSize = reader.readInt32LE();
|
|
reader.skip(4);
|
|
this.compression = reader.readInt32BE();
|
|
this.unknown = reader.readBytes(4);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Binary reader for PAK files with helper methods
|
|
*/
|
|
class PakReader {
|
|
constructor(buffer) {
|
|
this.buffer = buffer;
|
|
this.position = 0;
|
|
}
|
|
|
|
canRead(length) {
|
|
return length >= 0 && this.position + length <= this.buffer.length;
|
|
}
|
|
|
|
readBytes(count) {
|
|
if (!this.canRead(count)) {
|
|
throw new Error(`Cannot read ${count} bytes at position ${this.position}`);
|
|
}
|
|
const bytes = this.buffer.slice(this.position, this.position + count);
|
|
this.position += count;
|
|
return bytes;
|
|
}
|
|
|
|
readByte() {
|
|
return this.readBytes(1)[0];
|
|
}
|
|
|
|
readInt32LE() {
|
|
if (!this.canRead(4)) {
|
|
console.warn('Warning: Not enough data to read 4 bytes for an Int32.');
|
|
return 0;
|
|
}
|
|
const value = this.buffer.readInt32LE(this.position);
|
|
this.position += 4;
|
|
return value;
|
|
}
|
|
|
|
readInt32BE() {
|
|
if (!this.canRead(4)) {
|
|
console.warn('Warning: Not enough data to read 4 bytes for an Int32.');
|
|
return 0;
|
|
}
|
|
const value = this.buffer.readInt32BE(this.position);
|
|
this.position += 4;
|
|
return value;
|
|
}
|
|
|
|
readStringUtf8(length) {
|
|
if (length <= 0) {
|
|
console.warn('Warning: Attempted to read a string with non-positive length.');
|
|
return '';
|
|
}
|
|
|
|
if (!this.canRead(length)) {
|
|
console.warn(`Warning: Not enough data to read a string of length ${length}.`);
|
|
return '';
|
|
}
|
|
|
|
try {
|
|
const bytes = this.readBytes(length);
|
|
return bytes.toString('utf8');
|
|
} catch (ex) {
|
|
console.error(`Error while reading the string: ${ex.message}`);
|
|
return 'InvalidEntry';
|
|
}
|
|
}
|
|
|
|
skip(count) {
|
|
if (this.canRead(count)) {
|
|
this.position += count;
|
|
} else {
|
|
console.warn(`Warning: Attempt to skip ${count} bytes, but not enough data remains.`);
|
|
this.position = this.buffer.length;
|
|
}
|
|
}
|
|
|
|
skipSignature() {
|
|
if (this.canRead(4)) {
|
|
this.position += 4;
|
|
} else {
|
|
console.warn('Warning: Not enough data to skip the signature.');
|
|
this.position = this.buffer.length;
|
|
}
|
|
}
|
|
|
|
pos() {
|
|
return this.position;
|
|
}
|
|
|
|
seek(position) {
|
|
if (position >= 0 && position <= this.buffer.length) {
|
|
this.position = position;
|
|
} else {
|
|
throw new Error(`Invalid seek position: ${position}`);
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Main PAK unpacker class for Arma Reforger .pak files
|
|
*/
|
|
class PakUnpacker {
|
|
constructor(filePath) {
|
|
this.filePath = filePath;
|
|
this.formSize = 0;
|
|
this.dataSize = 0;
|
|
this.entriesSize = 0;
|
|
this.entries = [];
|
|
this.buffer = null;
|
|
}
|
|
|
|
/**
|
|
* Load and parse the PAK file
|
|
* @returns {Promise<void>}
|
|
*/
|
|
async load() {
|
|
this.buffer = await fs.promises.readFile(this.filePath);
|
|
const reader = new PakReader(this.buffer);
|
|
|
|
this.readForm(reader);
|
|
this.readHead(reader);
|
|
this.readData(reader);
|
|
this.readEntries(reader);
|
|
}
|
|
|
|
/**
|
|
* Load PAK file synchronously
|
|
*/
|
|
loadSync() {
|
|
this.buffer = fs.readFileSync(this.filePath);
|
|
const reader = new PakReader(this.buffer);
|
|
|
|
this.readForm(reader);
|
|
this.readHead(reader);
|
|
this.readData(reader);
|
|
this.readEntries(reader);
|
|
}
|
|
|
|
readForm(reader) {
|
|
reader.skipSignature();
|
|
this.formSize = reader.readInt32BE();
|
|
reader.skipSignature();
|
|
}
|
|
|
|
readHead(reader) {
|
|
reader.skipSignature();
|
|
const headBytes = reader.readBytes(32);
|
|
console.log('Head of the Pak File:', headBytes.toString('base64'));
|
|
}
|
|
|
|
readData(reader) {
|
|
reader.skipSignature();
|
|
this.dataSize = reader.readInt32BE();
|
|
reader.skip(this.dataSize);
|
|
}
|
|
|
|
readEntries(reader) {
|
|
reader.skipSignature();
|
|
this.entriesSize = reader.readInt32BE();
|
|
|
|
try {
|
|
reader.skip(2);
|
|
reader.skip(4);
|
|
|
|
const posEntries = reader.pos();
|
|
while (reader.pos() - posEntries < this.entriesSize) {
|
|
const previousPos = reader.pos();
|
|
|
|
try {
|
|
if (!reader.canRead(2)) break;
|
|
|
|
const entryType = reader.readByte();
|
|
const entryNameLength = reader.readByte();
|
|
|
|
if (!reader.canRead(entryNameLength)) break;
|
|
|
|
const entryName = reader.readStringUtf8(entryNameLength);
|
|
|
|
if (entryType === EntryType.Directory) {
|
|
this.readEntriesFromDirectory(entryName, reader);
|
|
} else {
|
|
this.entries.push(new PakEntryFile(entryName, reader));
|
|
}
|
|
} catch (ex) {
|
|
console.error(`Error while reading an entry: ${ex.message}`);
|
|
reader.seek(previousPos);
|
|
break;
|
|
}
|
|
}
|
|
} catch (ex) {
|
|
console.error(`Error while processing the entries: ${ex.message}`);
|
|
}
|
|
}
|
|
|
|
readEntriesFromDirectory(dirName, reader) {
|
|
if (!reader.canRead(4)) {
|
|
console.warn("Warning: Can't read directory entry as there is not enough data.");
|
|
return;
|
|
}
|
|
|
|
const childCount = reader.readInt32LE();
|
|
|
|
for (let i = 0; i < childCount; i++) {
|
|
const previousPos = reader.pos();
|
|
|
|
try {
|
|
if (!reader.canRead(2)) break;
|
|
|
|
const entryType = reader.readByte();
|
|
const entryNameLength = reader.readByte();
|
|
|
|
if (!reader.canRead(entryNameLength)) break;
|
|
|
|
const entryName = path.join(dirName, reader.readStringUtf8(entryNameLength));
|
|
|
|
if (entryType === EntryType.File) {
|
|
this.entries.push(new PakEntryFile(entryName, reader));
|
|
} else if (entryType === EntryType.Directory) {
|
|
this.readEntriesFromDirectory(entryName, reader);
|
|
} else {
|
|
console.warn('Unknown entry type.');
|
|
}
|
|
} catch (ex) {
|
|
console.error(`Error while reading a directory entry: ${ex.message}`);
|
|
reader.seek(previousPos);
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Extract all files to the destination directory
|
|
* @param {string} dstDir - Destination directory
|
|
* @returns {Promise<void>}
|
|
*/
|
|
async extract(dstDir) {
|
|
await fs.promises.mkdir(dstDir, { recursive: true });
|
|
|
|
for (const entry of this.entries) {
|
|
try {
|
|
const destPath = path.join(dstDir, entry.name);
|
|
const destDirPath = path.dirname(destPath);
|
|
|
|
await fs.promises.mkdir(destDirPath, { recursive: true });
|
|
|
|
if (entry.offset + entry.size > this.buffer.length) {
|
|
console.warn(`Warning: Entry '${entry.name}' exceeds file size. Skipping.`);
|
|
continue;
|
|
}
|
|
|
|
const fileData = this.buffer.slice(entry.offset, entry.offset + entry.size);
|
|
|
|
if (entry.compression === CompressionType.Zlib) {
|
|
try {
|
|
const decompressed = await this.decompressZlib(fileData, entry.originalSize);
|
|
await fs.promises.writeFile(destPath, decompressed);
|
|
console.log(`Extracted (Zlib): ${entry.name}`);
|
|
} catch (ex) {
|
|
console.error(`Error during Zlib decompression: ${entry.name}`, ex.message);
|
|
await fs.promises.writeFile(destPath + '_Error', fileData);
|
|
}
|
|
} else {
|
|
await fs.promises.writeFile(destPath, fileData);
|
|
console.log(`Extracted: ${entry.name}`);
|
|
}
|
|
} catch (ex) {
|
|
console.error(`Error while extracting a file (${entry.name}): ${ex.message}`);
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Extract all files synchronously
|
|
* @param {string} dstDir - Destination directory
|
|
*/
|
|
extractSync(dstDir) {
|
|
fs.mkdirSync(dstDir, { recursive: true });
|
|
|
|
for (const entry of this.entries) {
|
|
try {
|
|
const destPath = path.join(dstDir, entry.name);
|
|
const destDirPath = path.dirname(destPath);
|
|
|
|
fs.mkdirSync(destDirPath, { recursive: true });
|
|
|
|
if (entry.offset + entry.size > this.buffer.length) {
|
|
console.warn(`Warning: Entry '${entry.name}' exceeds file size. Skipping.`);
|
|
continue;
|
|
}
|
|
|
|
const fileData = this.buffer.slice(entry.offset, entry.offset + entry.size);
|
|
|
|
if (entry.compression === CompressionType.Zlib) {
|
|
try {
|
|
const decompressed = this.decompressZlibSync(fileData, entry.originalSize);
|
|
fs.writeFileSync(destPath, decompressed);
|
|
console.log(`Extracted (Zlib): ${entry.name}`);
|
|
} catch (ex) {
|
|
console.error(`Error during Zlib decompression: ${entry.name}`, ex.message);
|
|
fs.writeFileSync(destPath + '_Error', fileData);
|
|
}
|
|
} else {
|
|
fs.writeFileSync(destPath, fileData);
|
|
console.log(`Extracted: ${entry.name}`);
|
|
}
|
|
} catch (ex) {
|
|
console.error(`Error while extracting a file (${entry.name}): ${ex.message}`);
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Decompress Zlib data asynchronously
|
|
* @param {Buffer} data - Compressed data
|
|
* @param {number} expectedSize - Expected size after decompression
|
|
* @returns {Promise<Buffer>}
|
|
*/
|
|
decompressZlib(data, expectedSize) {
|
|
return new Promise((resolve, reject) => {
|
|
if (data.length < 2) {
|
|
return reject(new Error('The input data is too short for decompression.'));
|
|
}
|
|
|
|
if (data[0] !== 0x78) {
|
|
return reject(new Error('Invalid Zlib header.'));
|
|
}
|
|
|
|
// Skip the 2-byte Zlib header and use raw deflate
|
|
const deflateData = data.slice(2);
|
|
|
|
zlib.inflateRaw(deflateData, (err, result) => {
|
|
if (err) {
|
|
return reject(err);
|
|
}
|
|
|
|
if (result.length < expectedSize) {
|
|
console.warn(`Warning: Decompressed size (${result.length} bytes) is smaller than expected (${expectedSize} bytes).`);
|
|
}
|
|
|
|
resolve(result);
|
|
});
|
|
});
|
|
}
|
|
|
|
/**
|
|
* Decompress Zlib data synchronously
|
|
* @param {Buffer} data - Compressed data
|
|
* @param {number} expectedSize - Expected size after decompression
|
|
* @returns {Buffer}
|
|
*/
|
|
decompressZlibSync(data, expectedSize) {
|
|
if (data.length < 2) {
|
|
throw new Error('The input data is too short for decompression.');
|
|
}
|
|
|
|
if (data[0] !== 0x78) {
|
|
throw new Error('Invalid Zlib header.');
|
|
}
|
|
|
|
// Skip the 2-byte Zlib header and use raw deflate
|
|
const deflateData = data.slice(2);
|
|
const result = zlib.inflateRawSync(deflateData);
|
|
|
|
if (result.length < expectedSize) {
|
|
console.warn(`Warning: Decompressed size (${result.length} bytes) is smaller than expected (${expectedSize} bytes).`);
|
|
}
|
|
|
|
return result;
|
|
}
|
|
|
|
/**
|
|
* Get a specific file's data by name
|
|
* @param {string} fileName - Name of the file to extract
|
|
* @returns {Promise<Buffer|null>} The file data or null if not found
|
|
*/
|
|
async getFile(fileName) {
|
|
const entry = this.entries.find(e => e.name === fileName || e.name.endsWith(fileName));
|
|
|
|
if (!entry) {
|
|
return null;
|
|
}
|
|
|
|
if (entry.offset + entry.size > this.buffer.length) {
|
|
throw new Error(`Entry '${entry.name}' exceeds file size.`);
|
|
}
|
|
|
|
const fileData = this.buffer.slice(entry.offset, entry.offset + entry.size);
|
|
|
|
if (entry.compression === CompressionType.Zlib) {
|
|
return await this.decompressZlib(fileData, entry.originalSize);
|
|
} else {
|
|
return fileData;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Get a specific file's data by name synchronously
|
|
* @param {string} fileName - Name of the file to extract
|
|
* @returns {Buffer|null} The file data or null if not found
|
|
*/
|
|
getFileSync(fileName) {
|
|
const entry = this.entries.find(e => e.name === fileName || e.name.endsWith(fileName));
|
|
|
|
if (!entry) {
|
|
return null;
|
|
}
|
|
|
|
if (entry.offset + entry.size > this.buffer.length) {
|
|
throw new Error(`Entry '${entry.name}' exceeds file size.`);
|
|
}
|
|
|
|
const fileData = this.buffer.slice(entry.offset, entry.offset + entry.size);
|
|
|
|
if (entry.compression === CompressionType.Zlib) {
|
|
return this.decompressZlibSync(fileData, entry.originalSize);
|
|
} else {
|
|
return fileData;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Extract a single file to a specific output path
|
|
* @param {string} fileName - Name of the file to extract from the archive
|
|
* @param {string} outputPath - Full path where the file should be saved
|
|
* @returns {Promise<boolean>} True if successful, false if file not found
|
|
*/
|
|
async extractFile(fileName, outputPath) {
|
|
const entry = this.entries.find(e => e.name === fileName || e.name.endsWith(fileName));
|
|
|
|
if (!entry) {
|
|
console.warn(`File not found in archive: ${fileName}`);
|
|
return false;
|
|
}
|
|
|
|
try {
|
|
// Create directory if it doesn't exist
|
|
const destDirPath = path.dirname(outputPath);
|
|
await fs.promises.mkdir(destDirPath, { recursive: true });
|
|
|
|
// Get file data
|
|
const fileData = await this.getFile(fileName);
|
|
|
|
if (fileData) {
|
|
await fs.promises.writeFile(outputPath, fileData);
|
|
console.log(`Extracted: ${entry.name} -> ${outputPath}`);
|
|
return true;
|
|
}
|
|
|
|
return false;
|
|
} catch (ex) {
|
|
console.error(`Error while extracting file (${fileName}): ${ex.message}`);
|
|
throw ex;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Extract a single file to a specific output path synchronously
|
|
* @param {string} fileName - Name of the file to extract from the archive
|
|
* @param {string} outputPath - Full path where the file should be saved
|
|
* @returns {boolean} True if successful, false if file not found
|
|
*/
|
|
extractFileSync(fileName, outputPath) {
|
|
const entry = this.entries.find(e => e.name === fileName || e.name.endsWith(fileName));
|
|
|
|
if (!entry) {
|
|
console.warn(`File not found in archive: ${fileName}`);
|
|
return false;
|
|
}
|
|
|
|
try {
|
|
// Create directory if it doesn't exist
|
|
const destDirPath = path.dirname(outputPath);
|
|
fs.mkdirSync(destDirPath, { recursive: true });
|
|
|
|
// Get file data
|
|
const fileData = this.getFileSync(fileName);
|
|
|
|
if (fileData) {
|
|
fs.writeFileSync(outputPath, fileData);
|
|
console.log(`Extracted: ${entry.name} -> ${outputPath}`);
|
|
return true;
|
|
}
|
|
|
|
return false;
|
|
} catch (ex) {
|
|
console.error(`Error while extracting file (${fileName}): ${ex.message}`);
|
|
throw ex;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Extract files matching a pattern (e.g., "*.conf", "*.json", "models/*.xml")
|
|
* @param {string} pattern - Glob-like pattern to match files (supports * wildcard)
|
|
* @param {string} outputDir - Directory where files should be extracted
|
|
* @returns {Promise<Array<string>>} Array of extracted file paths
|
|
*/
|
|
async extractByPattern(pattern, outputDir) {
|
|
const matchedFiles = this.findFilesByPattern(pattern);
|
|
const extractedFiles = [];
|
|
|
|
if (matchedFiles.length === 0) {
|
|
console.warn(`No files found matching pattern: ${pattern}`);
|
|
return extractedFiles;
|
|
}
|
|
|
|
console.log(`Found ${matchedFiles.length} file(s) matching pattern: ${pattern}`);
|
|
|
|
for (const fileName of matchedFiles) {
|
|
try {
|
|
const outputPath = path.join(outputDir, fileName);
|
|
const success = await this.extractFile(fileName, outputPath);
|
|
|
|
if (success) {
|
|
extractedFiles.push(outputPath);
|
|
}
|
|
} catch (ex) {
|
|
console.error(`Failed to extract ${fileName}: ${ex.message}`);
|
|
}
|
|
}
|
|
|
|
return extractedFiles;
|
|
}
|
|
|
|
/**
|
|
* Extract files matching a pattern synchronously
|
|
* @param {string} pattern - Glob-like pattern to match files (supports * wildcard)
|
|
* @param {string} outputDir - Directory where files should be extracted
|
|
* @returns {Array<string>} Array of extracted file paths
|
|
*/
|
|
extractByPatternSync(pattern, outputDir) {
|
|
const matchedFiles = this.findFilesByPattern(pattern);
|
|
const extractedFiles = [];
|
|
|
|
if (matchedFiles.length === 0) {
|
|
console.warn(`No files found matching pattern: ${pattern}`);
|
|
return extractedFiles;
|
|
}
|
|
|
|
console.log(`Found ${matchedFiles.length} file(s) matching pattern: ${pattern}`);
|
|
|
|
for (const fileName of matchedFiles) {
|
|
try {
|
|
const outputPath = path.join(outputDir, fileName);
|
|
const success = this.extractFileSync(fileName, outputPath);
|
|
|
|
if (success) {
|
|
extractedFiles.push(outputPath);
|
|
}
|
|
} catch (ex) {
|
|
console.error(`Failed to extract ${fileName}: ${ex.message}`);
|
|
}
|
|
}
|
|
|
|
return extractedFiles;
|
|
}
|
|
|
|
/**
|
|
* Find files matching a pattern
|
|
* @param {string} pattern - Pattern to match (e.g., "*.conf", "config/*.json", "**test.xml")
|
|
* @returns {Array<string>} Array of matching file names
|
|
*/
|
|
findFilesByPattern(pattern) {
|
|
// Convert simple glob pattern to regex
|
|
const regexPattern = pattern
|
|
.replace(/\./g, '\\.') // Escape dots
|
|
.replace(/\*\*/g, '§GLOBSTAR§') // Temporarily replace **
|
|
.replace(/\*/g, '[^/\\\\]*') // * matches anything except path separators
|
|
.replace(/§GLOBSTAR§/g, '.*') // ** matches anything including path separators
|
|
.replace(/\?/g, '[^/\\\\]'); // ? matches single character except path separator
|
|
|
|
const regex = new RegExp(`^${regexPattern}$`, 'i'); // Case-insensitive
|
|
|
|
return this.entries
|
|
.map(entry => entry.name)
|
|
.filter(name => regex.test(name.replace(/\\/g, '/'))); // Normalize path separators
|
|
}
|
|
|
|
/**
|
|
* List all files in the PAK archive
|
|
* @returns {Array<{name: string, size: number, originalSize: number, compressed: boolean}>}
|
|
*/
|
|
listFiles() {
|
|
return this.entries.map(entry => ({
|
|
name: entry.name,
|
|
size: entry.size,
|
|
originalSize: entry.originalSize,
|
|
compressed: entry.compression === CompressionType.Zlib
|
|
}));
|
|
}
|
|
}
|
|
|
|
module.exports = { PakUnpacker, EntryType, CompressionType };
|