Compare commits

...

34 Commits

Author SHA1 Message Date
aca8d3f4b7 fix(deps): update dependency powerhooks to ^0.26.1 2023-02-08 16:38:28 +00:00
b5b3af4659 Bump version 2023-02-07 01:32:36 +01:00
6cd231426d Import Blob from node builtins 2023-02-07 01:32:20 +01:00
0c7cd1cd75 Bump version 2023-02-07 01:21:17 +01:00
2425704ead Merge branch 'main' of https://github.com/InseeFrLab/keycloakify 2023-02-07 01:20:33 +01:00
4e22159206 Bump version 2023-02-07 01:20:26 +01:00
52cf1ba02c Fix tsafe related warnings 2023-02-07 01:20:12 +01:00
516e84182f fix(deps): update dependency powerhooks to ^0.26.0 2023-02-05 15:10:42 +00:00
a3a9853e18 bump version 2023-02-05 14:58:53 +01:00
08e26600fd Use keycloakify as bundler by default 2023-02-05 14:58:38 +01:00
7793c2c6ba Update package.json 2023-02-05 14:41:32 +01:00
9e826d16dd Merge pull request #241 from lordvlad/mvn-begone
Mvn begone addendum
2023-02-05 14:41:13 +01:00
80618bbd9c Merge branch 'main' into mvn-begone 2023-02-05 13:36:52 +01:00
38ad47ea75 use hand-crafted promise, pipeline does not resolve properly 2023-02-05 13:32:24 +01:00
45ed359bef fix keycloak theme source path for internal bundler 2023-02-05 13:31:34 +01:00
fcc26c3e7a now that main is a promise, we shuold catch errors 2023-02-05 13:31:03 +01:00
d4ff6b1f40 fix: bundler fix missing directory 2023-02-05 12:59:05 +01:00
557de34eea fix: bundler fix missing change 2023-02-05 12:56:01 +01:00
e034dc4d90 Merge branch 'mvn-begone' of github.com:lordvlad/keycloakify into mvn-begone
* 'mvn-begone' of github.com:lordvlad/keycloakify:
  fix(deps): update garronej_modules_update
  Update README.md
  Rollback via update
  Bump version
  keycloak test script: use env to launch bash
  fix(deps): update dependency powerhooks to ^0.22.0
  Update dependency powerhooks to ^0.21.0
  Relase candidate
  fmt
  Update README.md
  Bump version
  Update src/bin/tools/downloadAndUnzip.ts
  Bump version
  #232
  Bump version
  keycloak test script: use env to launch bash
  fix(deps): update dependency powerhooks to ^0.22.0
  Update dependency powerhooks to ^0.21.0
2023-02-05 12:35:15 +01:00
cfbd1e5e4b fix(bundler): fix type mismatch introduced in last-minute 'fixes' 2023-02-05 12:34:48 +01:00
0df661819f Bump version 2023-02-04 20:51:06 +01:00
1a9f6d10d4 Actually run the top level await 2023-02-04 20:50:53 +01:00
a787215c95 Bump version 2023-02-04 20:39:38 +01:00
64ab400af5 Temporarly restore mvn as default bundler 2023-02-04 20:38:50 +01:00
a463878bf2 Bump version 2023-02-04 20:22:45 +01:00
9f72024c61 Merge pull request #240 from InseeFrLab/mvn-begone
Mvn begone
2023-02-04 19:47:36 +01:00
243fbd4dc9 Set the artifactId name in the build option 2023-02-04 19:36:42 +01:00
4e6a290693 Make new node based bundler the default 2023-02-04 18:02:39 +01:00
ac05d529ca Minor fixes 2023-02-04 17:44:02 +01:00
b38d79004a Merge branch 'main' into mvn-begone 2023-02-03 14:42:05 +01:00
f4a547df11 introduce options to choose a bundle strategy
Pick from 'none', 'keycloakify' or 'mvn', default to 'mvn'. 'none' will
not create a jar, 'keycloakify' will create a jar file using only tools
available to native nodejs, no additional  system library required.
Choosing 'mvn' will behave as before, starting maven in a subprocess.

The bundler can be chosen in `package.json` or via `KEYCLOAKIFY_BUNDLER`
env var.

This commit also adds `KEYCLOAKIFY_GROUP_ID` and
`KEYCLOAKIFY_ARTIFACT_ID` env vars, which will be used to
define group id and artifact id in pom.xml and pom.properties, if given.
2023-02-03 14:28:06 +01:00
2b87c35058 introduce utils for creating a jar archive 2023-02-03 14:06:24 +01:00
b11833e450 fix typo 2023-02-03 13:48:32 +01:00
fa8e119514 fix(deps): update garronej_modules_update 2023-02-01 15:16:03 +00:00
15 changed files with 606 additions and 31 deletions

View File

@ -49,6 +49,12 @@
# Changelog highlights
## 6.11.4
- You no longer need to have Maven installed to build the theme. Thanks to @lordvlad, [see PR](https://github.com/InseeFrLab/keycloakify/pull/239).
- Feature new build options: [`bundler`](https://docs.keycloakify.dev/build-options#keycloakify.bundler), [`groupId`](https://docs.keycloakify.dev/build-options#keycloakify.groupid), [`artifactId`](https://docs.keycloakify.dev/build-options#keycloakify.artifactid), [`version`](https://docs.keycloakify.dev/build-options#version).
Theses options can be user to customize the output name of the .jar. You can use environnement variables to overrides the values read in the package.json. Thanks to @lordvlad.
## 6.10.0
- Widows compat (thanks to @lordvlad, [see PR](https://github.com/InseeFrLab/keycloakify/pull/226)). WSL is no longer required 🎉

View File

@ -1,6 +1,6 @@
{
"name": "keycloakify",
"version": "6.10.0",
"version": "6.11.8",
"description": "Keycloak theme generator for Reacts app",
"repository": {
"type": "git",
@ -82,11 +82,11 @@
"minimal-polyfills": "^2.2.2",
"minimist": "^1.2.6",
"path-browserify": "^1.0.1",
"powerhooks": "^0.22.0",
"powerhooks": "^0.26.1",
"react-markdown": "^5.0.3",
"rfc4648": "^1.5.2",
"scripting-tools": "^0.19.13",
"tsafe": "^1.4.1",
"tsafe": "^1.4.3",
"tss-react": "4.4.1-rc.0",
"zod": "^3.17.10"
}

View File

@ -3,7 +3,11 @@ import { assert } from "tsafe/assert";
import type { Equals } from "tsafe";
import { id } from "tsafe/id";
import { parse as urlParse } from "url";
import { typeGuard } from "tsafe/typeGuard";
import { symToStr } from "tsafe/symToStr";
const bundlers = ["mvn", "keycloakify", "none"] as const;
type Bundler = typeof bundlers[number];
type ParsedPackageJson = {
name: string;
version: string;
@ -12,6 +16,9 @@ type ParsedPackageJson = {
extraPages?: string[];
extraThemeProperties?: string[];
areAppAndKeycloakServerSharingSameDomain?: boolean;
artifactId?: string;
groupId?: string;
bundler?: Bundler;
};
};
@ -23,7 +30,10 @@ const zParsedPackageJson = z.object({
.object({
"extraPages": z.array(z.string()).optional(),
"extraThemeProperties": z.array(z.string()).optional(),
"areAppAndKeycloakServerSharingSameDomain": z.boolean().optional()
"areAppAndKeycloakServerSharingSameDomain": z.boolean().optional(),
"artifactId": z.string().optional(),
"groupId": z.string().optional(),
"bundler": z.enum(bundlers).optional()
})
.optional()
});
@ -40,8 +50,9 @@ export namespace BuildOptions {
themeName: string;
extraPages?: string[];
extraThemeProperties?: string[];
//NOTE: Only for the pom.xml file, questionable utility...
groupId: string;
artifactId: string;
bundler: Bundler;
};
export type Standalone = Common & {
@ -108,7 +119,7 @@ export function readBuildOptions(params: {
const common: BuildOptions.Common = (() => {
const { name, keycloakify = {}, version, homepage } = parsedPackageJson;
const { extraPages, extraThemeProperties } = keycloakify ?? {};
const { extraPages, extraThemeProperties, groupId, artifactId, bundler } = keycloakify ?? {};
const themeName = name
.replace(/^@(.*)/, "$1")
@ -117,10 +128,26 @@ export function readBuildOptions(params: {
return {
themeName,
"bundler": (() => {
const { KEYCLOAKIFY_BUNDLER } = process.env;
assert(
typeGuard<Bundler | undefined>(
KEYCLOAKIFY_BUNDLER,
[undefined, ...id<readonly string[]>(bundlers)].includes(KEYCLOAKIFY_BUNDLER)
),
`${symToStr({ KEYCLOAKIFY_BUNDLER })} should be one of ${bundlers.join(", ")}`
);
return KEYCLOAKIFY_BUNDLER ?? bundler ?? "keycloakify";
})(),
"artifactId": process.env.KEYCLOAKIFY_ARTIFACT_ID ?? artifactId ?? `${themeName}-keycloak-theme`,
"groupId": (() => {
const fallbackGroupId = `${themeName}.keycloak`;
return (
process.env.KEYCLOAKIFY_GROUP_ID ??
groupId ??
(!homepage
? fallbackGroupId
: urlParse(homepage)
@ -130,7 +157,7 @@ export function readBuildOptions(params: {
.join(".") ?? fallbackGroupId) + ".keycloak"
);
})(),
"version": version,
"version": process.env.KEYCLOAKIFY_VERSION ?? version,
extraPages,
extraThemeProperties,
isSilent

View File

@ -7,6 +7,8 @@ import type { BuildOptions } from "./BuildOptions";
export type BuildOptionsLike = {
themeName: string;
groupId: string;
artifactId?: string;
version: string;
};
{
@ -16,7 +18,6 @@ export type BuildOptionsLike = {
}
export function generateJavaStackFiles(params: {
version: string;
keycloakThemeBuildingDirPath: string;
doBundlesEmailTemplate: boolean;
buildOptions: BuildOptionsLike;
@ -24,14 +25,11 @@ export function generateJavaStackFiles(params: {
jarFilePath: string;
} {
const {
version,
buildOptions: { groupId, themeName },
buildOptions: { groupId, themeName, version, artifactId },
keycloakThemeBuildingDirPath,
doBundlesEmailTemplate
} = params;
const artefactId = `${themeName}-keycloak-theme`;
{
const { pomFileCode } = (function generatePomFileCode(): {
pomFileCode: string;
@ -43,9 +41,9 @@ export function generateJavaStackFiles(params: {
` xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">`,
` <modelVersion>4.0.0</modelVersion>`,
` <groupId>${groupId}</groupId>`,
` <artifactId>${artefactId}</artifactId>`,
` <artifactId>${artifactId}</artifactId>`,
` <version>${version}</version>`,
` <name>${artefactId}</name>`,
` <name>${artifactId}</name>`,
` <description />`,
`</project>`
].join("\n");
@ -84,6 +82,6 @@ export function generateJavaStackFiles(params: {
}
return {
"jarFilePath": pathJoin(keycloakThemeBuildingDirPath, "target", `${artefactId}-${version}.jar`)
"jarFilePath": pathJoin(keycloakThemeBuildingDirPath, "target", `${artifactId}-${version}.jar`)
};
}

View File

@ -4,5 +4,5 @@ export * from "./keycloakify";
import { main } from "./keycloakify";
if (require.main === module) {
main();
main().catch(e => console.error(e));
}

View File

@ -7,6 +7,9 @@ import * as fs from "fs";
import { readBuildOptions } from "./BuildOptions";
import { getLogger } from "../tools/logger";
import { getCliOptions } from "../tools/cliOptions";
import jar from "../tools/jar";
import { assert } from "tsafe/assert";
import type { Equals } from "tsafe";
const reactProjectDirPath = process.cwd();
@ -45,17 +48,34 @@ export async function main() {
});
const { jarFilePath } = generateJavaStackFiles({
"version": buildOptions.version,
keycloakThemeBuildingDirPath,
doBundlesEmailTemplate,
buildOptions
});
child_process.execSync("mvn package", {
"cwd": keycloakThemeBuildingDirPath
});
switch (buildOptions.bundler) {
case "none":
logger.log("😱 Skipping bundling step, there will be no jar");
break;
case "keycloakify":
logger.log("🫶 Let keycloakify do its thang");
await jar({
"rootPath": pathJoin(keycloakThemeBuildingDirPath, "src", "main", "resources"),
"version": buildOptions.version,
"groupId": buildOptions.groupId,
"artifactId": buildOptions.artifactId,
"targetPath": jarFilePath
});
break;
case "mvn":
logger.log("🫙 Run maven to deliver a jar");
child_process.execSync("mvn package", { "cwd": keycloakThemeBuildingDirPath });
break;
default:
assert<Equals<typeof buildOptions.bundler, never>>(false);
}
//We want, however, to test in a container running the latest Keycloak version
// We want, however, to test in a container running the latest Keycloak version
const containerKeycloakVersion = "20.0.1";
generateStartKeycloakTestingContainer({

54
src/bin/tools/crc32.ts Normal file
View File

@ -0,0 +1,54 @@
import { Readable } from "stream";
const crc32tab = [
0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419, 0x706af48f, 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4, 0xe0d5e91e, 0x97d2d988,
0x09b64c2b, 0x7eb17cbd, 0xe7b82d07, 0x90bf1d91, 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de, 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7,
0x136c9856, 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9, 0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4, 0xa2677172,
0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b, 0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3, 0x45df5c75, 0xdcd60dcf, 0xabd13d59,
0x26d930ac, 0x51de003a, 0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599, 0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924,
0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190, 0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f, 0x9fbfe4a5, 0xe8b8d433,
0x7807c9a2, 0x0f00f934, 0x9609a88e, 0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01, 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e,
0x6c0695ed, 0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950, 0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3, 0xfbd44c65,
0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2, 0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a, 0x346ed9fc, 0xad678846, 0xda60b8d0,
0x44042d73, 0x33031de5, 0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa, 0xbe0b1010, 0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f,
0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17, 0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6, 0x03b6e20c, 0x74b1d29a,
0xead54739, 0x9dd277af, 0x04db2615, 0x73dc1683, 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8, 0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1,
0xf00f9344, 0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb, 0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a, 0x67dd4acc,
0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5, 0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1, 0xa6bc5767, 0x3fb506dd, 0x48b2364b,
0xd80d2bda, 0xaf0a1b4c, 0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef, 0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236,
0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe, 0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31, 0x2cd99e8b, 0x5bdeae1d,
0x9b64c2b0, 0xec63f226, 0x756aa39c, 0x026d930a, 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713, 0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38,
0x92d28e9b, 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242, 0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1, 0x18b74777,
0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c, 0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45, 0xa00ae278, 0xd70dd2ee, 0x4e048354, 0x3903b3c2,
0xa7672661, 0xd06016f7, 0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66, 0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9,
0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605, 0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8, 0x5d681b02, 0x2a6f2b94,
0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, 0x2d02ef8d
];
/**
*
* @param input either a byte stream, a string or a buffer, you want to have the checksum for
* @returns a promise for a checksum (uint32)
*/
export function crc32(input: Readable | String | Buffer): Promise<number> {
if (typeof input === "string") {
let crc = ~0;
for (let i = 0; i < input.length; i++) crc = (crc >>> 8) ^ crc32tab[(crc ^ input.charCodeAt(i)) & 0xff];
return Promise.resolve((crc ^ -1) >>> 0);
} else if (input instanceof Buffer) {
let crc = ~0;
for (let i = 0; i < input.length; i++) crc = (crc >>> 8) ^ crc32tab[(crc ^ input[i]) & 0xff];
return Promise.resolve((crc ^ -1) >>> 0);
} else if (input instanceof Readable) {
return new Promise<number>((resolve, reject) => {
let crc = ~0;
input.on("end", () => resolve((crc ^ -1) >>> 0));
input.on("error", e => reject(e));
input.on("data", (chunk: Buffer) => {
for (let i = 0; i < chunk.length; i++) crc = (crc >>> 8) ^ crc32tab[(crc ^ chunk[i]) & 0xff];
});
});
} else {
throw new Error("Unsupported input " + typeof input);
}
}

61
src/bin/tools/deflate.ts Normal file
View File

@ -0,0 +1,61 @@
import { PassThrough, Readable, TransformCallback, Writable } from "stream";
import { pipeline } from "stream/promises";
import { deflateRaw as deflateRawCb, createDeflateRaw } from "zlib";
import { promisify } from "util";
import { crc32 } from "./crc32";
import tee from "./tee";
const deflateRaw = promisify(deflateRawCb);
/**
* A stream transformer that records the number of bytes
* passed in its `size` property.
*/
class ByteCounter extends PassThrough {
size: number = 0;
_transform(chunk: any, encoding: BufferEncoding, callback: TransformCallback) {
if ("length" in chunk) this.size += chunk.length;
super._transform(chunk, encoding, callback);
}
}
/**
* @param data buffer containing the data to be compressed
* @returns a buffer containing the compressed/deflated data and the crc32 checksum
* of the source data
*/
export async function deflateBuffer(data: Buffer) {
const [deflated, checksum] = await Promise.all([deflateRaw(data), crc32(data)]);
return { deflated, crc32: checksum };
}
/**
* @param input a byte stream, containing data to be compressed
* @param sink a method that will accept chunks of compressed data; We don't pass
* a writable here, since we don't want the writablestream to be closed after
* a single file
* @returns a promise, which will resolve with the crc32 checksum and the
* compressed size
*/
export async function deflateStream(input: Readable, sink: (chunk: Buffer) => void) {
const deflateWriter = new Writable({
write(chunk, _, callback) {
sink(chunk);
callback();
}
});
// tee the input stream, so we can compress and calc crc32 in parallel
const [rs1, rs2] = tee(input);
const byteCounter = new ByteCounter();
const [_, crc] = await Promise.all([
// pipe input into zip compressor, count the bytes
// returned and pass compressed data to the sink
pipeline(rs1, createDeflateRaw(), byteCounter, deflateWriter),
// calc checksum
crc32(rs2)
]);
return { crc32: crc, compressedSize: byteCounter.size };
}

View File

@ -243,7 +243,7 @@ async function* iterateZipArchive(zipFile: string): ZipRecordGenerator {
const filenameLength = chunk.readUint16LE(i + 28);
const extraLength = chunk.readUint16LE(i + 30);
const commentLength = chunk.readUint16LE(i + 32);
// Start of thea actual content byte stream is after the 'local' record header,
// Start of the actual content byte stream is after the 'local' record header,
// which is 30 bytes long plus filename and extra field
const start = chunk.readUint32LE(i + 42) + 30 + filenameLength + extraLength;
const end = start + compressedFileSize;

View File

@ -3,10 +3,10 @@ import { join as pathJoin } from "path";
import { constants } from "fs";
import { chmod, stat } from "fs/promises";
async () => {
var { bin } = await import(pathJoin(getProjectRoot(), "package.json"));
(async () => {
const { bin } = await import(pathJoin(getProjectRoot(), "package.json"));
var promises = Object.values<string>(bin).map(async scriptPath => {
const promises = Object.values<string>(bin).map(async scriptPath => {
const fullPath = pathJoin(getProjectRoot(), scriptPath);
const oldMode = (await stat(fullPath)).mode;
const newMode = oldMode | constants.S_IXUSR | constants.S_IXGRP | constants.S_IXOTH;
@ -14,4 +14,4 @@ async () => {
});
await Promise.all(promises);
};
})();

102
src/bin/tools/jar.ts Normal file
View File

@ -0,0 +1,102 @@
import { Readable, Transform } from "stream";
import { dirname, relative, sep } from "path";
import { createWriteStream } from "fs";
import walk from "./walk";
import type { ZipSource } from "./zip";
import zip from "./zip";
import { mkdir } from "fs/promises";
/** Trim leading whitespace from every line */
const trimIndent = (s: string) => s.replace(/(\n)\s+/g, "$1");
type JarArgs = {
rootPath: string;
targetPath: string;
groupId: string;
artifactId: string;
version: string;
};
/**
* Create a jar archive, using the resources found at `rootPath` (a directory) and write the
* archive to `targetPath` (a file). Use `groupId`, `artifactId` and `version` to define
* the contents of the pom.properties file which is going to be added to the archive.
*/
export default async function jar({ groupId, artifactId, version, rootPath, targetPath }: JarArgs) {
const manifest: ZipSource = {
path: "META-INF/MANIFEST.MF",
data: Buffer.from(
trimIndent(
`Manifest-Version: 1.0
Archiver-Version: Plexus Archiver
Created-By: Keycloakify
Built-By: unknown
Build-Jdk: 19.0.0`
)
)
};
const pomProps: ZipSource = {
path: `META-INF/maven/${groupId}/${artifactId}/pom.properties`,
data: Buffer.from(
trimIndent(
`# Generated by keycloakify
# ${new Date()}
artifactId=${artifactId}
groupId=${groupId}
version=${version}`
)
)
};
/**
* Convert every path entry to a ZipSource record, and when all records are
* processed, append records for MANIFEST.mf and pom.properties
*/
const pathToRecord = () =>
new Transform({
objectMode: true,
transform: function (fsPath, _, cb) {
const path = relative(rootPath, fsPath).split(sep).join("/");
this.push({ path, fsPath });
cb();
},
final: function () {
this.push(manifest);
this.push(pomProps);
this.push(null);
}
});
await mkdir(dirname(targetPath), { recursive: true });
// Create an async pipeline, wait until everything is fully processed
await new Promise<void>((resolve, reject) => {
// walk all files in `rootPath` recursively
Readable.from(walk(rootPath))
// transform every path into a ZipSource object
.pipe(pathToRecord())
// let the zip lib convert all ZipSource objects into a byte stream
.pipe(zip())
// write that byte stream to targetPath
.pipe(createWriteStream(targetPath, { encoding: "binary" }))
.on("finish", () => resolve())
.on("error", e => reject(e));
});
}
/**
* Standalone usage, call e.g. `ts-node jar.ts dirWithSources some-jar.jar`
*/
if (require.main === module) {
const main = () =>
jar({
rootPath: process.argv[2],
targetPath: process.argv[3],
artifactId: process.env.ARTIFACT_ID ?? "artifact",
groupId: process.env.GROUP_ID ?? "group",
version: process.env.VERSION ?? "1.0.0"
});
main().catch(e => console.error(e));
}

37
src/bin/tools/tee.ts Normal file
View File

@ -0,0 +1,37 @@
import { PassThrough, Readable } from "stream";
export default function tee(input: Readable) {
const a = new PassThrough();
const b = new PassThrough();
let aFull = false;
let bFull = false;
a.on("drain", () => {
aFull = false;
if (!aFull && !bFull) input.resume();
});
b.on("drain", () => {
bFull = false;
if (!aFull && !bFull) input.resume();
});
input.on("error", e => {
a.emit("error", e);
b.emit("error", e);
});
input.on("data", chunk => {
aFull = !a.write(chunk);
bFull = !b.write(chunk);
if (aFull || bFull) input.pause();
});
input.on("end", () => {
a.end();
b.end();
});
return [a, b] as const;
}

19
src/bin/tools/walk.ts Normal file
View File

@ -0,0 +1,19 @@
import { readdir } from "fs/promises";
import { resolve } from "path";
/**
* Asynchronously and recursively walk a directory tree, yielding every file and directory
* found
*
* @param root the starting directory
* @returns AsyncGenerator
*/
export default async function* walk(root: string): AsyncGenerator<string, void, void> {
for (const entry of await readdir(root, { withFileTypes: true })) {
const absolutePath = resolve(root, entry.name);
if (entry.isDirectory()) {
yield absolutePath;
yield* walk(absolutePath);
} else yield absolutePath;
}
}

246
src/bin/tools/zip.ts Normal file
View File

@ -0,0 +1,246 @@
import { Transform, TransformOptions } from "stream";
import { createReadStream } from "fs";
import { stat } from "fs/promises";
import { Blob } from "buffer";
import { deflateBuffer, deflateStream } from "./deflate";
/**
* Zip source
* @property filename the name of the entry in the archie
* @property path of the source file, if the source is an actual file
* @property data the actual data buffer, if the source is constructed in-memory
*/
export type ZipSource = { path: string } & ({ fsPath: string } | { data: Buffer });
export type ZipRecord = {
path: string;
compression: "deflate" | undefined;
uncompressedSize: number;
compressedSize?: number;
crc32?: number;
offset?: number;
};
/**
* @returns the actual byte size of an string
*/
function utf8size(s: string) {
return new Blob([s]).size;
}
/**
* @param record
* @returns a buffer representing a Zip local header
* @link https://en.wikipedia.org/wiki/ZIP_(file_format)#Local_file_header
*/
function localHeader(record: ZipRecord) {
const { path, compression, uncompressedSize } = record;
const filenameSize = utf8size(path);
const buf = Buffer.alloc(30 + filenameSize);
buf.writeUInt32LE(0x04_03_4b_50, 0); // local header signature
buf.writeUInt16LE(10, 4); // min version
// we write 0x08 because crc and compressed size are unknown at
buf.writeUInt16LE(0x08, 6); // general purpose bit flag
buf.writeUInt16LE(compression ? ({ "deflate": 8 } as const)[compression] : 0, 8);
buf.writeUInt16LE(0, 10); // modified time
buf.writeUInt16LE(0, 12); // modified date
buf.writeUInt32LE(0, 14); // crc unknown
buf.writeUInt32LE(0, 18); // compressed size unknown
buf.writeUInt32LE(uncompressedSize, 22);
buf.writeUInt16LE(filenameSize, 26);
buf.writeUInt16LE(0, 28); // extra field length
buf.write(path, 30, "utf-8");
return buf;
}
/**
* @param record
* @returns a buffer representing a Zip central header
* @link https://en.wikipedia.org/wiki/ZIP_(file_format)#Central_directory_file_header
*/
function centralHeader(record: ZipRecord) {
const { path, compression, crc32, compressedSize, uncompressedSize, offset } = record;
const filenameSize = utf8size(path);
const buf = Buffer.alloc(46 + filenameSize);
const isFile = !path.endsWith("/");
if (typeof offset === "undefined") throw new Error("Illegal argument");
// we don't want to deal with possibly messed up file or directory
// permissions, so we ignore the original permissions
const externalAttr = isFile ? 0x81a40000 : 0x41ed0000;
buf.writeUInt32LE(0x0201_4b50, 0); // central header signature
buf.writeUInt16LE(10, 4); // version
buf.writeUInt16LE(10, 6); // min version
buf.writeUInt16LE(0, 8); // general purpose bit flag
buf.writeUInt16LE(compression ? ({ "deflate": 8 } as const)[compression] : 0, 10);
buf.writeUInt16LE(0, 12); // modified time
buf.writeUInt16LE(0, 14); // modified date
buf.writeUInt32LE(crc32 || 0, 16);
buf.writeUInt32LE(compressedSize || 0, 20);
buf.writeUInt32LE(uncompressedSize, 24);
buf.writeUInt16LE(filenameSize, 28);
buf.writeUInt16LE(0, 30); // extra field length
buf.writeUInt16LE(0, 32); // comment field length
buf.writeUInt16LE(0, 34); // disk number
buf.writeUInt16LE(0, 36); // internal
buf.writeUInt32LE(externalAttr, 38); // external
buf.writeUInt32LE(offset, 42); // offset where file starts
buf.write(path, 46, "utf-8");
return buf;
}
/**
* @returns a buffer representing an Zip End-Of-Central-Directory block
* @link https://en.wikipedia.org/wiki/ZIP_(file_format)#End_of_central_directory_record_(EOCD)
*/
function eocd({ offset, cdSize, nRecords }: { offset: number; cdSize: number; nRecords: number }) {
const buf = Buffer.alloc(22);
buf.writeUint32LE(0x06054b50, 0); // eocd signature
buf.writeUInt16LE(0, 4); // disc number
buf.writeUint16LE(0, 6); // disc where central directory starts
buf.writeUint16LE(nRecords, 8); // records on this disc
buf.writeUInt16LE(nRecords, 10); // records total
buf.writeUInt32LE(cdSize, 12); // byte size of cd
buf.writeUInt32LE(offset, 16); // cd offset
buf.writeUint16LE(0, 20); // comment length
return buf;
}
/**
* @returns a stream Transform, which reads a stream of ZipRecords and
* writes a bytestream
*/
export default function zip() {
/**
* This is called when the input stream of ZipSource items is finished.
* Will write central directory and end-of-central-direcotry blocks.
*/
const final = () => {
// write central directory
let cdSize = 0;
for (const record of records) {
const head = centralHeader(record);
zipTransform.push(head);
cdSize += head.length;
}
// write end-of-central-directory
zipTransform.push(eocd({ offset, cdSize, nRecords: records.length }));
// signal stream end
zipTransform.push(null);
};
/**
* Write a directory entry to the archive
* @param path
*/
const writeDir = async (path: string) => {
const record: ZipRecord = {
path: path + "/",
offset,
compression: undefined,
uncompressedSize: 0
};
const head = localHeader(record);
zipTransform.push(head);
records.push(record);
offset += head.length;
};
/**
* Write a file entry to the archive
* @param archivePath path of the file in archive
* @param fsPath path to file on filesystem
* @param size of the actual, uncompressed, file
*/
const writeFile = async (archivePath: string, fsPath: string, size: number) => {
const record: ZipRecord = {
path: archivePath,
offset,
compression: "deflate",
uncompressedSize: size
};
const head = localHeader(record);
zipTransform.push(head);
const { crc32, compressedSize } = await deflateStream(createReadStream(fsPath), chunk => zipTransform.push(chunk));
record.crc32 = crc32;
record.compressedSize = compressedSize;
records.push(record);
offset += head.length + compressedSize;
};
/**
* Write archive record based on filesystem file or directory
* @param archivePath path of item in archive
* @param fsPath path to item on filesystem
*/
const writeFromPath = async (archivePath: string, fsPath: string) => {
const fileStats = await stat(fsPath);
fileStats.isDirectory() ? await writeDir(archivePath) /**/ : await writeFile(archivePath, fsPath, fileStats.size) /**/;
};
/**
* Write archive record based on data in a buffer
* @param path
* @param data
*/
const writeFromBuffer = async (path: string, data: Buffer) => {
const { deflated, crc32 } = await deflateBuffer(data);
const record: ZipRecord = {
path,
compression: "deflate",
crc32,
uncompressedSize: data.length,
compressedSize: deflated.length,
offset
};
const head = localHeader(record);
zipTransform.push(head);
zipTransform.push(deflated);
records.push(record);
offset += head.length + deflated.length;
};
/**
* Write an archive record
* @param source
*/
const writeRecord = async (source: ZipSource) => {
if ("fsPath" in source) await writeFromPath(source.path, source.fsPath);
else if ("data" in source) await writeFromBuffer(source.path, source.data);
else throw new Error("Illegal argument " + typeof source + " " + JSON.stringify(source));
};
/**
* The actual stream transform function
* @param source
* @param _ encoding, ignored
* @param cb
*/
const transform: TransformOptions["transform"] = async (source: ZipSource, _, cb) => {
await writeRecord(source);
cb();
};
/** offset and records keep local state during processing */
let offset = 0;
const records: ZipRecord[] = [];
const zipTransform = new Transform({
readableObjectMode: false,
writableObjectMode: true,
transform,
final
});
return zipTransform;
}

View File

@ -1639,15 +1639,15 @@ please-upgrade-node@^3.2.0:
dependencies:
semver-compare "^1.0.0"
powerhooks@^0.22.0:
version "0.22.0"
resolved "https://registry.yarnpkg.com/powerhooks/-/powerhooks-0.22.0.tgz#501cbdd28fd5427dd54e1796cd1089a02a101543"
integrity sha512-3HTG7MA9i8rE9Pk25TTO9bXAY12WWi+bypO5ZWGUN03MlqM1gWQDtxrTPJjx2zR3e6DbO+0jaunTdHv22fYNbg==
powerhooks@^0.26.1:
version "0.26.1"
resolved "https://registry.yarnpkg.com/powerhooks/-/powerhooks-0.26.1.tgz#4497cb570af7de68a2120c6afc8326524a4a64a0"
integrity sha512-jaT1lx6zjB3NkBQDco/aKC0pxm5au0qi1pgWD9Gu1b3f8pwMclP7ToyGyhrPp/B6zFGNIpGDbE9WiXqEQ8ER7g==
dependencies:
evt "^2.4.13"
memoizee "^0.4.15"
resize-observer-polyfill "^1.5.1"
tsafe "^1.4.1"
tsafe "^1.4.3"
prettier@^2.3.0:
version "2.7.1"
@ -2002,6 +2002,11 @@ tsafe@^1.4.1:
resolved "https://registry.yarnpkg.com/tsafe/-/tsafe-1.4.1.tgz#59cdad8ac41babf88e56dcd1a683ae2fb145d059"
integrity sha512-3IDBalvf6SyvHFS14UiwCWzqdSdo+Q0k2J7DZyJYaHW/iraW9DJpaBKDJpry3yQs3o/t/A+oGaRW3iVt2lKxzA==
tsafe@^1.4.3:
version "1.4.3"
resolved "https://registry.yarnpkg.com/tsafe/-/tsafe-1.4.3.tgz#a98ce83616f0d9c01e3c6167a2ead45ba455b2ae"
integrity sha512-KjCdgjIqsbKW9oeJGSMVC23jhWm/VXJwkaZ7jffo/WaTioLGTHJqliHe9dECEVzIACNVNs/fwtKwU8wWK4jY4g==
tslib@^2.1.0:
version "2.4.0"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3"