refactor(jar): introduce yazl for creating jars

* introduce yazl
* remove old zip code
* refactor jar code to make it better testable
* introduce unit test for jar creation
This commit is contained in:
Waldemar Reusch 2023-04-02 22:47:42 +02:00
parent 0b16df7731
commit dcfefad17f
6 changed files with 156 additions and 340 deletions

View File

@ -86,6 +86,7 @@
},
"dependencies": {
"@octokit/rest": "^18.12.0",
"@types/yazl": "^2.4.2",
"cheerio": "^1.0.0-rc.5",
"cli-select": "^1.1.2",
"evt": "^2.4.18",
@ -97,6 +98,7 @@
"rfc4648": "^1.5.2",
"tsafe": "^1.6.0",
"yauzl": "^2.10.0",
"yazl": "^2.5.1",
"zod": "^3.17.10"
}
}

View File

@ -1,94 +1,88 @@
import { Readable, Transform } from "stream";
import { dirname, relative, sep } from "path";
import { createWriteStream } from "fs";
import walk from "./walk";
import zip, { type ZipSource } from "./zip";
import { ZipFile } from "yazl";
import { mkdir } from "fs/promises";
import trimIndent from "./trimIndent";
type JarArgs = {
rootPath: string;
targetPath: string;
export type ZipEntry = { zipPath: string } & ({ fsPath: string } | { buffer: Buffer })
export type ZipEntryGenerator = AsyncGenerator<ZipEntry, void, unknown>
type CommonJarArgs = {
groupId: string;
artifactId: string;
version: string;
}
export type JarStreamArgs = CommonJarArgs & {
asyncPathGeneratorFn(): ZipEntryGenerator
}
export type JarArgs = CommonJarArgs & {
targetPath: string;
rootPath: string;
};
export async function jarStream({ groupId, artifactId, version, asyncPathGeneratorFn }: JarStreamArgs) {
const manifestPath = "META-INF/MANIFEST.MF"
const manifestData = Buffer.from(trimIndent`
Manifest-Version: 1.0
Archiver-Version: Plexus Archiver
Created-By: Keycloakify
Built-By: unknown
Build-Jdk: 19.0.0
`)
const pomPropsPath = `META-INF/maven/${groupId}/${artifactId}/pom.properties`
const pomPropsData = Buffer.from(trimIndent`
# Generated by keycloakify
# ${new Date()}
artifactId=${artifactId}
groupId=${groupId}
version=${version}
`)
const zipFile = new ZipFile()
for await (const entry of asyncPathGeneratorFn()) {
if ("buffer" in entry) {
zipFile.addBuffer(entry.buffer, entry.zipPath)
} else if ("fsPath" in entry) {
zipFile.addFile(entry.fsPath, entry.zipPath)
}
}
zipFile.addBuffer(manifestData, manifestPath)
zipFile.addBuffer(pomPropsData, pomPropsPath)
zipFile.end()
return zipFile
}
/**
* Create a jar archive, using the resources found at `rootPath` (a directory) and write the
* archive to `targetPath` (a file). Use `groupId`, `artifactId` and `version` to define
* the contents of the pom.properties file which is going to be added to the archive.
*/
export default async function jar({ groupId, artifactId, version, rootPath, targetPath }: JarArgs) {
const manifest: ZipSource = {
path: "META-INF/MANIFEST.MF",
data: Buffer.from(trimIndent`
Manifest-Version: 1.0
Archiver-Version: Plexus Archiver
Created-By: Keycloakify
Built-By: unknown
Build-Jdk: 19.0.0
`)
};
const pomProps: ZipSource = {
path: `META-INF/maven/${groupId}/${artifactId}/pom.properties`,
data: Buffer.from(trimIndent`# Generated by keycloakify
# ${new Date().toString()}
artifactId=${artifactId}
groupId=${groupId}
version=${version}
`)
};
/**
* Convert every path entry to a ZipSource record, and when all records are
* processed, append records for MANIFEST.mf and pom.properties
*/
const pathToRecord = () =>
new Transform({
objectMode: true,
transform: function (fsPath, _, cb) {
const path = relative(rootPath, fsPath).split(sep).join("/");
this.push({ path, fsPath });
cb();
},
final: function () {
this.push(manifest);
this.push(pomProps);
this.push(null);
}
});
await mkdir(dirname(targetPath), { recursive: true });
// Create an async pipeline, wait until everything is fully processed
await new Promise<void>((resolve, reject) => {
// walk all files in `rootPath` recursively
Readable.from(walk(rootPath))
// transform every path into a ZipSource object
.pipe(pathToRecord())
// let the zip lib convert all ZipSource objects into a byte stream
.pipe(zip())
// write that byte stream to targetPath
.pipe(createWriteStream(targetPath, { encoding: "binary" }))
.on("finish", () => resolve())
.on("error", e => reject(e));
const asyncPathGeneratorFn = (async function* (): ZipEntryGenerator {
for await (const fsPath of walk(rootPath)) {
const zipPath = relative(rootPath, fsPath).split(sep).join("/");
yield ({ fsPath, zipPath })
}
})
const zipFile = await jarStream({ groupId, artifactId, version, asyncPathGeneratorFn })
await new Promise<void>(async (resolve, reject) => {
zipFile.outputStream.pipe(createWriteStream(targetPath, { encoding: "binary" }))
.on("close", () => resolve())
.on("error", e => reject(e))
});
}
/**
* Standalone usage, call e.g. `ts-node jar.ts dirWithSources some-jar.jar`
*/
if (require.main === module) {
const main = () =>
jar({
rootPath: process.argv[2],
targetPath: process.argv[3],
artifactId: process.env.ARTIFACT_ID ?? "artifact",
groupId: process.env.GROUP_ID ?? "group",
version: process.env.VERSION ?? "1.0.0"
});
main();
}

View File

@ -8,7 +8,7 @@ import { resolve } from "path";
* @param root the starting directory
* @returns AsyncGenerator
*/
export default async function* walk(root: string): AsyncGenerator<string, void, void> {
export default async function* walk(root: string): AsyncGenerator<string, void, unknown> {
for (const entry of await readdir(root, { withFileTypes: true })) {
const absolutePath = resolve(root, entry.name);
if (entry.isDirectory()) {

View File

@ -1,246 +0,0 @@
import { Transform, TransformOptions } from "stream";
import { createReadStream } from "fs";
import { stat } from "fs/promises";
import { Blob } from "buffer";
import { deflateBuffer, deflateStream } from "./deflate";
/**
* Zip source
* @property filename the name of the entry in the archie
* @property path of the source file, if the source is an actual file
* @property data the actual data buffer, if the source is constructed in-memory
*/
export type ZipSource = { path: string } & ({ fsPath: string } | { data: Buffer });
export type ZipRecord = {
path: string;
compression: "deflate" | undefined;
uncompressedSize: number;
compressedSize?: number;
crc32?: number;
offset?: number;
};
/**
* @returns the actual byte size of an string
*/
function utf8size(s: string) {
return new Blob([s]).size;
}
/**
* @param record
* @returns a buffer representing a Zip local header
* @link https://en.wikipedia.org/wiki/ZIP_(file_format)#Local_file_header
*/
function localHeader(record: ZipRecord) {
const { path, compression, uncompressedSize } = record;
const filenameSize = utf8size(path);
const buf = Buffer.alloc(30 + filenameSize);
buf.writeUInt32LE(0x04_03_4b_50, 0); // local header signature
buf.writeUInt16LE(10, 4); // min version
// we write 0x08 because crc and compressed size are unknown at
buf.writeUInt16LE(0x08, 6); // general purpose bit flag
buf.writeUInt16LE(compression ? ({ "deflate": 8 } as const)[compression] : 0, 8);
buf.writeUInt16LE(0, 10); // modified time
buf.writeUInt16LE(0, 12); // modified date
buf.writeUInt32LE(0, 14); // crc unknown
buf.writeUInt32LE(0, 18); // compressed size unknown
buf.writeUInt32LE(uncompressedSize, 22);
buf.writeUInt16LE(filenameSize, 26);
buf.writeUInt16LE(0, 28); // extra field length
buf.write(path, 30, "utf-8");
return buf;
}
/**
* @param record
* @returns a buffer representing a Zip central header
* @link https://en.wikipedia.org/wiki/ZIP_(file_format)#Central_directory_file_header
*/
function centralHeader(record: ZipRecord) {
const { path, compression, crc32, compressedSize, uncompressedSize, offset } = record;
const filenameSize = utf8size(path);
const buf = Buffer.alloc(46 + filenameSize);
const isFile = !path.endsWith("/");
if (typeof offset === "undefined") throw new Error("Illegal argument");
// we don't want to deal with possibly messed up file or directory
// permissions, so we ignore the original permissions
const externalAttr = isFile ? 0x81a40000 : 0x41ed0000;
buf.writeUInt32LE(0x0201_4b50, 0); // central header signature
buf.writeUInt16LE(10, 4); // version
buf.writeUInt16LE(10, 6); // min version
buf.writeUInt16LE(0, 8); // general purpose bit flag
buf.writeUInt16LE(compression ? ({ "deflate": 8 } as const)[compression] : 0, 10);
buf.writeUInt16LE(0, 12); // modified time
buf.writeUInt16LE(0, 14); // modified date
buf.writeUInt32LE(crc32 || 0, 16);
buf.writeUInt32LE(compressedSize || 0, 20);
buf.writeUInt32LE(uncompressedSize, 24);
buf.writeUInt16LE(filenameSize, 28);
buf.writeUInt16LE(0, 30); // extra field length
buf.writeUInt16LE(0, 32); // comment field length
buf.writeUInt16LE(0, 34); // disk number
buf.writeUInt16LE(0, 36); // internal
buf.writeUInt32LE(externalAttr, 38); // external
buf.writeUInt32LE(offset, 42); // offset where file starts
buf.write(path, 46, "utf-8");
return buf;
}
/**
* @returns a buffer representing an Zip End-Of-Central-Directory block
* @link https://en.wikipedia.org/wiki/ZIP_(file_format)#End_of_central_directory_record_(EOCD)
*/
function eocd({ offset, cdSize, nRecords }: { offset: number; cdSize: number; nRecords: number }) {
const buf = Buffer.alloc(22);
buf.writeUint32LE(0x06054b50, 0); // eocd signature
buf.writeUInt16LE(0, 4); // disc number
buf.writeUint16LE(0, 6); // disc where central directory starts
buf.writeUint16LE(nRecords, 8); // records on this disc
buf.writeUInt16LE(nRecords, 10); // records total
buf.writeUInt32LE(cdSize, 12); // byte size of cd
buf.writeUInt32LE(offset, 16); // cd offset
buf.writeUint16LE(0, 20); // comment length
return buf;
}
/**
* @returns a stream Transform, which reads a stream of ZipRecords and
* writes a bytestream
*/
export default function zip() {
/**
* This is called when the input stream of ZipSource items is finished.
* Will write central directory and end-of-central-direcotry blocks.
*/
const final = () => {
// write central directory
let cdSize = 0;
for (const record of records) {
const head = centralHeader(record);
zipTransform.push(head);
cdSize += head.length;
}
// write end-of-central-directory
zipTransform.push(eocd({ offset, cdSize, nRecords: records.length }));
// signal stream end
zipTransform.push(null);
};
/**
* Write a directory entry to the archive
* @param path
*/
const writeDir = async (path: string) => {
const record: ZipRecord = {
path: path + "/",
offset,
compression: undefined,
uncompressedSize: 0
};
const head = localHeader(record);
zipTransform.push(head);
records.push(record);
offset += head.length;
};
/**
* Write a file entry to the archive
* @param archivePath path of the file in archive
* @param fsPath path to file on filesystem
* @param size of the actual, uncompressed, file
*/
const writeFile = async (archivePath: string, fsPath: string, size: number) => {
const record: ZipRecord = {
path: archivePath,
offset,
compression: "deflate",
uncompressedSize: size
};
const head = localHeader(record);
zipTransform.push(head);
const { crc32, compressedSize } = await deflateStream(createReadStream(fsPath), chunk => zipTransform.push(chunk));
record.crc32 = crc32;
record.compressedSize = compressedSize;
records.push(record);
offset += head.length + compressedSize;
};
/**
* Write archive record based on filesystem file or directory
* @param archivePath path of item in archive
* @param fsPath path to item on filesystem
*/
const writeFromPath = async (archivePath: string, fsPath: string) => {
const fileStats = await stat(fsPath);
fileStats.isDirectory() ? await writeDir(archivePath) /**/ : await writeFile(archivePath, fsPath, fileStats.size) /**/;
};
/**
* Write archive record based on data in a buffer
* @param path
* @param data
*/
const writeFromBuffer = async (path: string, data: Buffer) => {
const { deflated, crc32 } = await deflateBuffer(data);
const record: ZipRecord = {
path,
compression: "deflate",
crc32,
uncompressedSize: data.length,
compressedSize: deflated.length,
offset
};
const head = localHeader(record);
zipTransform.push(head);
zipTransform.push(deflated);
records.push(record);
offset += head.length + deflated.length;
};
/**
* Write an archive record
* @param source
*/
const writeRecord = async (source: ZipSource) => {
if ("fsPath" in source) await writeFromPath(source.path, source.fsPath);
else if ("data" in source) await writeFromBuffer(source.path, source.data);
else throw new Error("Illegal argument " + typeof source + " " + JSON.stringify(source));
};
/**
* The actual stream transform function
* @param source
* @param _ encoding, ignored
* @param cb
*/
const transform: TransformOptions["transform"] = async (source: ZipSource, _, cb) => {
await writeRecord(source);
cb();
};
/** offset and records keep local state during processing */
let offset = 0;
const records: ZipRecord[] = [];
const zipTransform = new Transform({
readableObjectMode: false,
writableObjectMode: true,
transform,
final
});
return zipTransform;
}

View File

@ -1,26 +1,78 @@
import jar from "keycloakify/bin/tools/jar";
import { it, describe, vi } from "vitest";
import { jarStream, type ZipEntryGenerator } from "keycloakify/bin/tools/jar";
import { fromBuffer, Entry, ZipFile } from "yauzl";
import { it, describe, assert } from "vitest";
import { Readable } from "stream";
vi.mock("fs", () => ({ promises: { mkdir: () => {} }, createWriteStream: () => {} }));
vi.mock("stream", async () => {
const readableMock = () => {
const mockDecorators = {
on: () => mockDecorators,
pipe: () => mockDecorators
};
return {
from: () => mockDecorators
};
};
type AsyncIterable<T> = {
[Symbol.asyncIterator](): AsyncIterableIterator<T>;
}
async function arrayFromAsync<T>(asyncIterable: AsyncIterable<T>) {
const chunks: T[] = []
for await (const chunk of asyncIterable) chunks.push(chunk)
return chunks
}
async function readToBuffer(stream: NodeJS.ReadableStream) {
return Buffer.concat(await arrayFromAsync(stream as AsyncIterable<Buffer>))
}
function unzip(buffer: Buffer) {
return new Promise<ZipFile>((resolve, reject) =>
fromBuffer(buffer, { lazyEntries: true }, (err, zipFile) => {
if (err !== null) { reject(err) } else { resolve(zipFile) }
}))
}
function readEntry(zipFile: ZipFile, entry: Entry): Promise<Readable> {
return new Promise<Readable>((resolve, reject) => {
zipFile.openReadStream(entry, (err, stream) => {
if (err !== null) { reject(err) } else { resolve(stream) }
})
})
}
function readAll(zipFile: ZipFile): Promise<Map<string, Buffer>> {
return new Promise<Map<string, Buffer>>((resolve, reject) => {
const entries1: Map<string, Buffer> = new Map()
zipFile.on("entry", async (entry: Entry) => {
const stream = await readEntry(zipFile, entry)
const buffer = await readToBuffer(stream)
entries1.set(entry.fileName, buffer)
zipFile.readEntry()
})
zipFile.on("end", () => resolve(entries1))
zipFile.on("error", e => reject(e))
zipFile.readEntry()
})
}
return {
// @ts-ignore
...(await vi.importActual("stream")),
Readable: readableMock()
};
});
describe("jar", () => {
it("creates jar artifacts without error", () => {
jar({ artifactId: "artifactId", groupId: "groupId", rootPath: "rootPath", targetPath: "targetPath", version: "1.0.0" });
it("creates jar artifacts without error", async () => {
async function* mockFiles(): ZipEntryGenerator {
yield { zipPath: "foo", buffer: Buffer.from("foo") }
}
const opts = { artifactId: "someArtifactId", groupId: "someGroupId", version: "1.2.3", asyncPathGeneratorFn: mockFiles }
const zipped = await jarStream(opts);
const buffered = await readToBuffer(zipped.outputStream)
const unzipped = await unzip(buffered)
const entries = await readAll(unzipped)
assert.equal(entries.size, 3)
assert.isOk(entries.has("foo"))
assert.isOk(entries.has("META-INF/MANIFEST.MF"))
assert.isOk(entries.has("META-INF/maven/someGroupId/someArtifactId/pom.properties"))
assert.equal("foo", entries.get("foo")?.toString("utf-8"))
const manifest = entries.get("META-INF/MANIFEST.MF")?.toString("utf-8")
const pomProperties = entries.get("META-INF/maven/someGroupId/someArtifactId/pom.properties")?.toString("utf-8")
assert.isOk(manifest?.includes("Created-By: Keycloakify"))
assert.isOk(pomProperties?.includes("1.2.3"))
assert.isOk(pomProperties?.includes("someGroupId"))
assert.isOk(pomProperties?.includes("someArtifactId"))
});
});

View File

@ -604,6 +604,13 @@
dependencies:
"@types/node" "*"
"@types/yazl@^2.4.2":
version "2.4.2"
resolved "https://registry.yarnpkg.com/@types/yazl/-/yazl-2.4.2.tgz#d5f8a4752261badbf1a36e8b49e042dc18ec84bc"
integrity sha512-T+9JH8O2guEjXNxqmybzQ92mJUh2oCwDDMSSimZSe1P+pceZiFROZLYmcbqkzV5EUwz6VwcKXCO2S2yUpra6XQ==
dependencies:
"@types/node" "*"
"@vitest/expect@0.29.8":
version "0.29.8"
resolved "https://registry.yarnpkg.com/@vitest/expect/-/expect-0.29.8.tgz#6ecdd031b4ea8414717d10b65ccd800908384612"
@ -3213,6 +3220,13 @@ yauzl@^2.10.0:
buffer-crc32 "~0.2.3"
fd-slicer "~1.1.0"
yazl@^2.5.1:
version "2.5.1"
resolved "https://registry.yarnpkg.com/yazl/-/yazl-2.5.1.tgz#a3d65d3dd659a5b0937850e8609f22fffa2b5c35"
integrity sha512-phENi2PLiHnHb6QBVot+dJnaAZ0xosj7p3fWl+znIjBDlnMI2PsZCJZ306BPTFOaHf5qdDEI8x5qFrSOBN5vrw==
dependencies:
buffer-crc32 "~0.2.3"
yn@3.1.1:
version "3.1.1"
resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50"