From 2b87c3505898aaaeb34babbf8a4275b09ceeae50 Mon Sep 17 00:00:00 2001 From: Waldemar Reusch Date: Fri, 3 Feb 2023 14:05:02 +0100 Subject: [PATCH] introduce utils for creating a jar archive --- src/bin/tools/crc32.ts | 54 +++++++++ src/bin/tools/deflate.ts | 61 ++++++++++ src/bin/tools/jar.ts | 100 ++++++++++++++++ src/bin/tools/tee.ts | 37 ++++++ src/bin/tools/walk.ts | 19 +++ src/bin/tools/zip.ts | 245 +++++++++++++++++++++++++++++++++++++++ 6 files changed, 516 insertions(+) create mode 100644 src/bin/tools/crc32.ts create mode 100644 src/bin/tools/deflate.ts create mode 100644 src/bin/tools/jar.ts create mode 100644 src/bin/tools/tee.ts create mode 100644 src/bin/tools/walk.ts create mode 100644 src/bin/tools/zip.ts diff --git a/src/bin/tools/crc32.ts b/src/bin/tools/crc32.ts new file mode 100644 index 00000000..c2211611 --- /dev/null +++ b/src/bin/tools/crc32.ts @@ -0,0 +1,54 @@ +import { Readable } from "stream"; + +const crc32tab = [ + 0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419, 0x706af48f, 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4, 0xe0d5e91e, 0x97d2d988, + 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07, 0x90bf1d91, 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de, 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7, + 0x136c9856, 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9, 0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4, 0xa2677172, + 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b, 0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3, 0x45df5c75, 0xdcd60dcf, 0xabd13d59, + 0x26d930ac, 0x51de003a, 0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599, 0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924, + 0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190, 0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f, 0x9fbfe4a5, 0xe8b8d433, + 0x7807c9a2, 0x0f00f934, 0x9609a88e, 0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01, 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, + 0x6c0695ed, 0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950, 0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3, 0xfbd44c65, + 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2, 0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a, 0x346ed9fc, 0xad678846, 0xda60b8d0, + 0x44042d73, 0x33031de5, 0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa, 0xbe0b1010, 0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f, + 0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17, 0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6, 0x03b6e20c, 0x74b1d29a, + 0xead54739, 0x9dd277af, 0x04db2615, 0x73dc1683, 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8, 0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1, + 0xf00f9344, 0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb, 0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a, 0x67dd4acc, + 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5, 0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1, 0xa6bc5767, 0x3fb506dd, 0x48b2364b, + 0xd80d2bda, 0xaf0a1b4c, 0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef, 0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236, + 0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe, 0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31, 0x2cd99e8b, 0x5bdeae1d, + 0x9b64c2b0, 0xec63f226, 0x756aa39c, 0x026d930a, 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713, 0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38, + 0x92d28e9b, 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242, 0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1, 0x18b74777, + 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c, 0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45, 0xa00ae278, 0xd70dd2ee, 0x4e048354, 0x3903b3c2, + 0xa7672661, 0xd06016f7, 0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66, 0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9, + 0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605, 0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8, 0x5d681b02, 0x2a6f2b94, + 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, 0x2d02ef8d +]; + +/** + * + * @param input either a byte stream, a string or a buffer, you want to have the checksum for + * @returns a promise for a checksum (uint32) + */ +export function crc32(input: Readable | String | Buffer): Promise { + if (typeof input === "string") { + let crc = ~0; + for (let i = 0; i < input.length; i++) crc = (crc >>> 8) ^ crc32tab[(crc ^ input.charCodeAt(i)) & 0xff]; + return Promise.resolve((crc ^ -1) >>> 0); + } else if (input instanceof Buffer) { + let crc = ~0; + for (let i = 0; i < input.length; i++) crc = (crc >>> 8) ^ crc32tab[(crc ^ input[i]) & 0xff]; + return Promise.resolve((crc ^ -1) >>> 0); + } else if (input instanceof Readable) { + return new Promise((resolve, reject) => { + let crc = ~0; + input.on("end", () => resolve((crc ^ -1) >>> 0)); + input.on("error", e => reject(e)); + input.on("data", (chunk: Buffer) => { + for (let i = 0; i < chunk.length; i++) crc = (crc >>> 8) ^ crc32tab[(crc ^ chunk[i]) & 0xff]; + }); + }); + } else { + throw new Error("Unsupported input " + typeof input); + } +} diff --git a/src/bin/tools/deflate.ts b/src/bin/tools/deflate.ts new file mode 100644 index 00000000..85834b74 --- /dev/null +++ b/src/bin/tools/deflate.ts @@ -0,0 +1,61 @@ +import { PassThrough, Readable, TransformCallback, Writable } from "stream"; +import { pipeline } from "stream/promises"; +import { deflateRaw as deflateRawCb, createDeflateRaw } from "zlib"; +import { promisify } from "util"; + +import { crc32 } from "./crc32"; +import tee from "./tee"; + +const deflateRaw = promisify(deflateRawCb); + +/** + * A stream transformer that records the number of bytes + * passed in its `size` property. + */ +class ByteCounter extends PassThrough { + size: number = 0; + _transform(chunk: any, encoding: BufferEncoding, callback: TransformCallback) { + if ("length" in chunk) this.size += chunk.length; + super._transform(chunk, encoding, callback); + } +} + +/** + * @param data buffer containing the data to be compressed + * @returns a buffer containing the compressed/deflated data and the crc32 checksum + * of the source data + */ +export async function deflateBuffer(data: Buffer) { + const [deflated, checksum] = await Promise.all([deflateRaw(data), crc32(data)]); + return { deflated, crc32: checksum }; +} + +/** + * @param input a byte stream, containing data to be compressed + * @param sink a method that will accept chunks of compressed data; We don't pass + * a writable here, since we don't want the writablestream to be closed after + * a single file + * @returns a promise, which will resolve with the crc32 checksum and the + * compressed size + */ +export async function deflateStream(input: Readable, sink: (chunk: Buffer) => void) { + const deflateWriter = new Writable({ + write(chunk, _, callback) { + sink(chunk); + callback(); + } + }); + + // tee the input stream, so we can compress and calc crc32 in parallel + const [rs1, rs2] = tee(input); + const byteCounter = new ByteCounter(); + const [_, crc] = await Promise.all([ + // pipe input into zip compressor, count the bytes + // returned and pass compressed data to the sink + pipeline(rs1, createDeflateRaw(), byteCounter, deflateWriter), + // calc checksum + crc32(rs2) + ]); + + return { crc32: crc, compressedSize: byteCounter.size }; +} diff --git a/src/bin/tools/jar.ts b/src/bin/tools/jar.ts new file mode 100644 index 00000000..76fbba57 --- /dev/null +++ b/src/bin/tools/jar.ts @@ -0,0 +1,100 @@ +import { Readable, Transform } from "stream"; +import { pipeline } from "stream/promises"; +import { relative, sep } from "path"; +import { createWriteStream } from "fs"; + +import walk from "./walk"; +import type { ZipSource } from "./zip"; +import zip from "./zip"; + +/** Trim leading whitespace from every line */ +const trimIndent = (s: string) => s.replace(/(\n)\s+/g, "$1"); + +type JarArgs = { + rootPath: string; + targetPath: string; + groupId: string; + artifactId: string; + version: string; +}; + +/** + * Create a jar archive, using the resources found at `rootPath` (a directory) and write the + * archive to `targetPath` (a file). Use `groupId`, `artifactId` and `version` to define + * the contents of the pom.properties file which is going to be added to the archive. + */ +export default async function jar({ groupId, artifactId, version, rootPath, targetPath }: JarArgs) { + const manifest: ZipSource = { + path: "META-INF/MANIFEST.MF", + data: Buffer.from( + trimIndent( + `Manifest-Version: 1.0 + Archiver-Version: Plexus Archiver + Created-By: Keycloakify + Built-By: unknown + Build-Jdk: 19.0.0` + ) + ) + }; + + const pomProps: ZipSource = { + path: `META-INF/maven/${groupId}/${artifactId}/pom.properties`, + data: Buffer.from( + trimIndent( + `# Generated by keycloakify + # ${new Date()} + artifactId=${artifactId} + groupId=${groupId} + version=${version}` + ) + ) + }; + + /** + * Convert every path entry to a ZipSource record, and when all records are + * processed, append records for MANIFEST.mf and pom.properties + */ + const pathToRecord = () => + new Transform({ + objectMode: true, + transform: function (path, _, cb) { + const filename = relative(rootPath, path).split(sep).join("/"); + this.push({ filename, path }); + cb(); + }, + final: function () { + this.push(manifest); + this.push(pomProps); + this.push(null); + } + }); + + /** + * Create an async pipeline, wait until everything is fully processed + */ + await pipeline( + // walk all files in `rootPath` recursively + Readable.from(walk(rootPath)), + // transform every path into a ZipSource object + pathToRecord(), + // let the zip lib convert all ZipSource objects into a byte stream + zip(), + // write that byte stream to targetPath + createWriteStream(targetPath, { encoding: "binary" }) + ); +} + +/** + * Standalone usage, call e.g. `ts-node jar.ts dirWithSources some-jar.jar` + */ +if (require.main === module) { + const main = () => + jar({ + rootPath: process.argv[2], + targetPath: process.argv[3], + artifactId: process.env.ARTIFACT_ID ?? "artifact", + groupId: process.env.GROUP_ID ?? "group", + version: process.env.VERSION ?? "1.0.0" + }); + main().catch(e => console.error(e)); +} diff --git a/src/bin/tools/tee.ts b/src/bin/tools/tee.ts new file mode 100644 index 00000000..5c53cb42 --- /dev/null +++ b/src/bin/tools/tee.ts @@ -0,0 +1,37 @@ +import { PassThrough, Readable } from "stream"; + +export default function tee(input: Readable) { + const a = new PassThrough(); + const b = new PassThrough(); + + let aFull = false; + let bFull = false; + + a.on("drain", () => { + aFull = false; + if (!aFull && !bFull) input.resume(); + }); + b.on("drain", () => { + bFull = false; + if (!aFull && !bFull) input.resume(); + }); + + input.on("error", e => { + a.emit("error", e); + b.emit("error", e); + }); + + input.on("data", chunk => { + aFull = !a.write(chunk); + bFull = !b.write(chunk); + + if (aFull || bFull) input.pause(); + }); + + input.on("end", () => { + a.end(); + b.end(); + }); + + return [a, b] as const; +} diff --git a/src/bin/tools/walk.ts b/src/bin/tools/walk.ts new file mode 100644 index 00000000..d22ef1bf --- /dev/null +++ b/src/bin/tools/walk.ts @@ -0,0 +1,19 @@ +import { readdir } from "fs/promises"; +import { resolve } from "path"; + +/** + * Asynchronously and recursively walk a directory tree, yielding every file and directory + * found + * + * @param root the starting directory + * @returns AsyncGenerator + */ +export default async function* walk(root: string): AsyncGenerator { + for (const entry of await readdir(root, { withFileTypes: true })) { + const absolutePath = resolve(root, entry.name); + if (entry.isDirectory()) { + yield absolutePath; + yield* walk(absolutePath); + } else yield absolutePath; + } +} diff --git a/src/bin/tools/zip.ts b/src/bin/tools/zip.ts new file mode 100644 index 00000000..737ba4c2 --- /dev/null +++ b/src/bin/tools/zip.ts @@ -0,0 +1,245 @@ +import { Transform, TransformOptions } from "stream"; +import { createReadStream } from "fs"; +import { stat } from "fs/promises"; + +import { deflateBuffer, deflateStream } from "./deflate"; + +/** + * Zip source + * @property filename the name of the entry in the archie + * @property path of the source file, if the source is an actual file + * @property data the actual data buffer, if the source is constructed in-memory + */ +export type ZipSource = { path: string } & ({ fsPath: string } | { data: Buffer }); + +export type ZipRecord = { + path: string; + compression: "deflate" | undefined; + uncompressedSize: number; + compressedSize?: number; + crc32?: number; + offset?: number; +}; + +/** + * @returns the actual byte size of an string + */ +function utf8size(s: string) { + return new Blob([s]).size; +} + +/** + * @param record + * @returns a buffer representing a Zip local header + * @link https://en.wikipedia.org/wiki/ZIP_(file_format)#Local_file_header + */ +function localHeader(record: ZipRecord) { + const { path, compression, uncompressedSize } = record; + const filenameSize = utf8size(path); + const buf = Buffer.alloc(30 + filenameSize); + + buf.writeUInt32LE(0x04_03_4b_50, 0); // local header signature + buf.writeUInt16LE(10, 4); // min version + // we write 0x08 because crc and compressed size are unknown at + buf.writeUInt16LE(0x08, 6); // general purpose bit flag + buf.writeUInt16LE(compression ? ({ "deflate": 8 } as const)[compression] : 0, 8); + buf.writeUInt16LE(0, 10); // modified time + buf.writeUInt16LE(0, 12); // modified date + buf.writeUInt32LE(0, 14); // crc unknown + buf.writeUInt32LE(0, 18); // compressed size unknown + buf.writeUInt32LE(uncompressedSize, 22); + buf.writeUInt16LE(filenameSize, 26); + buf.writeUInt16LE(0, 28); // extra field length + buf.write(path, 30, "utf-8"); + + return buf; +} + +/** + * @param record + * @returns a buffer representing a Zip central header + * @link https://en.wikipedia.org/wiki/ZIP_(file_format)#Central_directory_file_header + */ +function centralHeader(record: ZipRecord) { + const { path, compression, crc32, compressedSize, uncompressedSize, offset } = record; + const filenameSize = utf8size(path); + const buf = Buffer.alloc(46 + filenameSize); + const isFile = !path.endsWith("/"); + + if (typeof offset === "undefined") throw new Error("Illegal argument"); + + // we don't want to deal with possibly messed up file or directory + // permissions, so we ignore the original permissions + const externalAttr = isFile ? 0x81a40000 : 0x41ed0000; + + buf.writeUInt32LE(0x0201_4b50, 0); // central header signature + buf.writeUInt16LE(10, 4); // version + buf.writeUInt16LE(10, 6); // min version + buf.writeUInt16LE(0, 8); // general purpose bit flag + buf.writeUInt16LE(compression ? ({ "deflate": 8 } as const)[compression] : 0, 10); + buf.writeUInt16LE(0, 12); // modified time + buf.writeUInt16LE(0, 14); // modified date + buf.writeUInt32LE(crc32 || 0, 16); + buf.writeUInt32LE(compressedSize || 0, 20); + buf.writeUInt32LE(uncompressedSize, 24); + buf.writeUInt16LE(filenameSize, 28); + buf.writeUInt16LE(0, 30); // extra field length + buf.writeUInt16LE(0, 32); // comment field length + buf.writeUInt16LE(0, 34); // disk number + buf.writeUInt16LE(0, 36); // internal + buf.writeUInt32LE(externalAttr, 38); // external + buf.writeUInt32LE(offset, 42); // offset where file starts + buf.write(path, 46, "utf-8"); + + return buf; +} + +/** + * @returns a buffer representing an Zip End-Of-Central-Directory block + * @link https://en.wikipedia.org/wiki/ZIP_(file_format)#End_of_central_directory_record_(EOCD) + */ +function eocd({ offset, cdSize, nRecords }: { offset: number; cdSize: number; nRecords: number }) { + const buf = Buffer.alloc(22); + buf.writeUint32LE(0x06054b50, 0); // eocd signature + buf.writeUInt16LE(0, 4); // disc number + buf.writeUint16LE(0, 6); // disc where central directory starts + buf.writeUint16LE(nRecords, 8); // records on this disc + buf.writeUInt16LE(nRecords, 10); // records total + buf.writeUInt32LE(cdSize, 12); // byte size of cd + buf.writeUInt32LE(offset, 16); // cd offset + buf.writeUint16LE(0, 20); // comment length + + return buf; +} + +/** + * @returns a stream Transform, which reads a stream of ZipRecords and + * writes a bytestream + */ +export default function zip() { + /** + * This is called when the input stream of ZipSource items is finished. + * Will write central directory and end-of-central-direcotry blocks. + */ + const final = () => { + // write central directory + let cdSize = 0; + for (const record of records) { + const head = centralHeader(record); + zipTransform.push(head); + cdSize += head.length; + } + + // write end-of-central-directory + zipTransform.push(eocd({ offset, cdSize, nRecords: records.length })); + // signal stream end + zipTransform.push(null); + }; + + /** + * Write a directory entry to the archive + * @param path + */ + const writeDir = async (path: string) => { + const record: ZipRecord = { + path: path + "/", + offset, + compression: undefined, + uncompressedSize: 0 + }; + const head = localHeader(record); + zipTransform.push(head); + records.push(record); + offset += head.length; + }; + + /** + * Write a file entry to the archive + * @param archivePath path of the file in archive + * @param fsPath path to file on filesystem + * @param size of the actual, uncompressed, file + */ + const writeFile = async (archivePath: string, fsPath: string, size: number) => { + const record: ZipRecord = { + path: archivePath, + offset, + compression: "deflate", + uncompressedSize: size + }; + const head = localHeader(record); + zipTransform.push(head); + + const { crc32, compressedSize } = await deflateStream(createReadStream(fsPath), chunk => zipTransform.push(chunk)); + + record.crc32 = crc32; + record.compressedSize = compressedSize; + records.push(record); + offset += head.length + compressedSize; + }; + + /** + * Write archive record based on filesystem file or directory + * @param archivePath path of item in archive + * @param fsPath path to item on filesystem + */ + const writeFromPath = async (archivePath: string, fsPath: string) => { + const fileStats = await stat(fsPath); + fileStats.isDirectory() ? await writeDir(archivePath) /**/ : await writeFile(archivePath, fsPath, fileStats.size) /**/; + }; + + /** + * Write archive record based on data in a buffer + * @param path + * @param data + */ + const writeFromBuffer = async (path: string, data: Buffer) => { + const { deflated, crc32 } = await deflateBuffer(data); + const record: ZipRecord = { + path, + compression: "deflate", + crc32, + uncompressedSize: data.length, + compressedSize: deflated.length, + offset + }; + const head = localHeader(record); + zipTransform.push(head); + zipTransform.push(deflated); + records.push(record); + offset += head.length + deflated.length; + }; + + /** + * Write an archive record + * @param source + */ + const writeRecord = async (source: ZipSource) => { + if ("fsPath" in source) await writeFromPath(source.path, source.fsPath); + else if ("data" in source) await writeFromBuffer(source.path, source.data); + else throw new Error("Illegal argument " + typeof source + " " + source); + }; + + /** + * The actual stream transform function + * @param source + * @param _ encoding, ignored + * @param cb + */ + const transform: TransformOptions["transform"] = async (source: ZipSource, _, cb) => { + await writeRecord(source); + cb(); + }; + + /** offset and records keep local state during processing */ + let offset = 0; + const records: ZipRecord[] = []; + + const zipTransform = new Transform({ + readableObjectMode: false, + writableObjectMode: true, + transform, + final + }); + + return zipTransform; +}