I come in peace
If you need extremely high performance or custom ZIP compression formats, you can use the highly-extensible ZIP streams. They take streams as both input and output. You can even use custom compression/decompression algorithms from other libraries, as long as they are defined in the ZIP spec (see section 4.4.5). If you'd like more info on using custom compressors, feel free to ask.
// ZIP object
// Can also specify zip.ondata outside of the constructor
const zip = new fflate.Zip((err, dat, final) => {
if (!err) {
// output of the streams
console.log(dat, final);
}
});
const helloTxt = new fflate.ZipDeflate('hello.txt', {
level: 9
});
// Always add streams to ZIP archives before pushing to those streams
zip.add(helloTxt);
helloTxt.push(chunk1);
// Last chunk
helloTxt.push(chunk2, true);
// ZipPassThrough is like ZipDeflate with level 0, but allows for tree shaking
const nonStreamingFile = new fflate.ZipPassThrough('test.png');
zip.add(nonStreamingFile);
// If you have data already loaded, just .push(data, true)
nonStreamingFile.push(pngData, true);
// You need to call .end() after finishing
// This ensures the ZIP is valid
zip.end();
// Unzip object
const unzipper = new fflate.Unzip();
// This function will almost always have to be called. It is used to support
// compression algorithms such as BZIP2 or LZMA in ZIP files if just DEFLATE
// is not enough (though it almost always is).
// If your ZIP files are not compressed, this line is not needed.
unzipper.register(fflate.UnzipInflate);
const neededFiles = ['file1.txt', 'example.json'];
// Can specify handler in constructor too
unzipper.onfile = file => {
// file.name is a string, file is a stream
if (neededFiles.includes(file.name)) {
file.ondata = (err, dat, final) => {
// Stream output here
console.log(dat, final);
};
console.log('Reading:', file.name);
// File sizes are sometimes not set if the ZIP file did not encode
// them, so you may want to check that file.size != undefined
console.log('Compressed size', file.size);
console.log('Decompressed size', file.originalSize);
// You should only start the stream if you plan to use it to improve
// performance. Only after starting the stream will ondata be called.
// This method will throw if the compression method hasn't been registered
file.start();
}
};
// Try to keep under 5,000 files per chunk to avoid stack limit errors
// For example, if all files are a few kB, multi-megabyte chunks are OK
// If files are mostly under 100 bytes, 64kB chunks are the limit
unzipper.push(zipChunk1);
unzipper.push(zipChunk2);
unzipper.push(zipChunk3, true);