Compare commits
62 Commits
Author | SHA1 | Date | |
---|---|---|---|
aca8d3f4b7 | |||
b5b3af4659 | |||
6cd231426d | |||
0c7cd1cd75 | |||
2425704ead | |||
4e22159206 | |||
52cf1ba02c | |||
516e84182f | |||
a3a9853e18 | |||
08e26600fd | |||
7793c2c6ba | |||
9e826d16dd | |||
80618bbd9c | |||
38ad47ea75 | |||
45ed359bef | |||
fcc26c3e7a | |||
d4ff6b1f40 | |||
557de34eea | |||
e034dc4d90 | |||
cfbd1e5e4b | |||
0df661819f | |||
1a9f6d10d4 | |||
a787215c95 | |||
64ab400af5 | |||
a463878bf2 | |||
9f72024c61 | |||
243fbd4dc9 | |||
4e6a290693 | |||
ac05d529ca | |||
b38d79004a | |||
f4a547df11 | |||
2b87c35058 | |||
b11833e450 | |||
fa8e119514 | |||
677cb5c330 | |||
6e74c79bfe | |||
54474f5908 | |||
99cc0f519b | |||
92a01f89ef | |||
fd83a0c743 | |||
988e46c875 | |||
f081c2fc20 | |||
b4b376a1a5 | |||
0db4179d47 | |||
795b7c6234 | |||
091b9a57f5 | |||
564e1422ac | |||
8ed4ed3fc4 | |||
29fe4566a7 | |||
ae3bfb28ed | |||
14aab97d8a | |||
52d7a47cd7 | |||
f338dcbeed | |||
dcec058a22 | |||
2bdc6b156b | |||
84ca9e6b81 | |||
11cb0fd2db | |||
3f620ffb6f | |||
1a0e05d073 | |||
85cecc9811 | |||
9899f742a8 | |||
b5484740b7 |
10
README.md
10
README.md
@ -49,6 +49,16 @@
|
||||
|
||||
# Changelog highlights
|
||||
|
||||
## 6.11.4
|
||||
|
||||
- You no longer need to have Maven installed to build the theme. Thanks to @lordvlad, [see PR](https://github.com/InseeFrLab/keycloakify/pull/239).
|
||||
- Feature new build options: [`bundler`](https://docs.keycloakify.dev/build-options#keycloakify.bundler), [`groupId`](https://docs.keycloakify.dev/build-options#keycloakify.groupid), [`artifactId`](https://docs.keycloakify.dev/build-options#keycloakify.artifactid), [`version`](https://docs.keycloakify.dev/build-options#version).
|
||||
Theses options can be user to customize the output name of the .jar. You can use environnement variables to overrides the values read in the package.json. Thanks to @lordvlad.
|
||||
|
||||
## 6.10.0
|
||||
|
||||
- Widows compat (thanks to @lordvlad, [see PR](https://github.com/InseeFrLab/keycloakify/pull/226)). WSL is no longer required 🎉
|
||||
|
||||
## 6.8.4
|
||||
|
||||
- `@emotion/react` is no longer a peer dependency of Keycloakify.
|
||||
|
6
package.json
Executable file → Normal file
6
package.json
Executable file → Normal file
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "keycloakify",
|
||||
"version": "6.8.13",
|
||||
"version": "6.11.8",
|
||||
"description": "Keycloak theme generator for Reacts app",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@ -82,11 +82,11 @@
|
||||
"minimal-polyfills": "^2.2.2",
|
||||
"minimist": "^1.2.6",
|
||||
"path-browserify": "^1.0.1",
|
||||
"powerhooks": "^0.21.0",
|
||||
"powerhooks": "^0.26.1",
|
||||
"react-markdown": "^5.0.3",
|
||||
"rfc4648": "^1.5.2",
|
||||
"scripting-tools": "^0.19.13",
|
||||
"tsafe": "^1.4.1",
|
||||
"tsafe": "^1.4.3",
|
||||
"tss-react": "4.4.1-rc.0",
|
||||
"zod": "^3.17.10"
|
||||
}
|
||||
|
@ -7,11 +7,11 @@ import { promptKeycloakVersion } from "./promptKeycloakVersion";
|
||||
import { getCliOptions } from "./tools/cliOptions";
|
||||
import { getLogger } from "./tools/logger";
|
||||
|
||||
export function downloadBuiltinKeycloakTheme(params: { keycloakVersion: string; destDirPath: string; isSilent: boolean }) {
|
||||
export async function downloadBuiltinKeycloakTheme(params: { keycloakVersion: string; destDirPath: string; isSilent: boolean }) {
|
||||
const { keycloakVersion, destDirPath, isSilent } = params;
|
||||
|
||||
for (const ext of ["", "-community"]) {
|
||||
downloadAndUnzip({
|
||||
await downloadAndUnzip({
|
||||
"destDirPath": destDirPath,
|
||||
"url": `https://github.com/keycloak/keycloak/archive/refs/tags/${keycloakVersion}.zip`,
|
||||
"pathOfDirToExtractInArchive": `keycloak-${keycloakVersion}/themes/src/main/resources${ext}/theme`,
|
||||
@ -31,7 +31,7 @@ if (require.main === module) {
|
||||
|
||||
logger.log(`Downloading builtins theme of Keycloak ${keycloakVersion} here ${destDirPath}`);
|
||||
|
||||
downloadBuiltinKeycloakTheme({
|
||||
await downloadBuiltinKeycloakTheme({
|
||||
keycloakVersion,
|
||||
destDirPath,
|
||||
isSilent
|
||||
|
@ -3,7 +3,11 @@ import { assert } from "tsafe/assert";
|
||||
import type { Equals } from "tsafe";
|
||||
import { id } from "tsafe/id";
|
||||
import { parse as urlParse } from "url";
|
||||
import { typeGuard } from "tsafe/typeGuard";
|
||||
import { symToStr } from "tsafe/symToStr";
|
||||
|
||||
const bundlers = ["mvn", "keycloakify", "none"] as const;
|
||||
type Bundler = typeof bundlers[number];
|
||||
type ParsedPackageJson = {
|
||||
name: string;
|
||||
version: string;
|
||||
@ -12,6 +16,9 @@ type ParsedPackageJson = {
|
||||
extraPages?: string[];
|
||||
extraThemeProperties?: string[];
|
||||
areAppAndKeycloakServerSharingSameDomain?: boolean;
|
||||
artifactId?: string;
|
||||
groupId?: string;
|
||||
bundler?: Bundler;
|
||||
};
|
||||
};
|
||||
|
||||
@ -23,7 +30,10 @@ const zParsedPackageJson = z.object({
|
||||
.object({
|
||||
"extraPages": z.array(z.string()).optional(),
|
||||
"extraThemeProperties": z.array(z.string()).optional(),
|
||||
"areAppAndKeycloakServerSharingSameDomain": z.boolean().optional()
|
||||
"areAppAndKeycloakServerSharingSameDomain": z.boolean().optional(),
|
||||
"artifactId": z.string().optional(),
|
||||
"groupId": z.string().optional(),
|
||||
"bundler": z.enum(bundlers).optional()
|
||||
})
|
||||
.optional()
|
||||
});
|
||||
@ -40,8 +50,9 @@ export namespace BuildOptions {
|
||||
themeName: string;
|
||||
extraPages?: string[];
|
||||
extraThemeProperties?: string[];
|
||||
//NOTE: Only for the pom.xml file, questionable utility...
|
||||
groupId: string;
|
||||
artifactId: string;
|
||||
bundler: Bundler;
|
||||
};
|
||||
|
||||
export type Standalone = Common & {
|
||||
@ -108,7 +119,7 @@ export function readBuildOptions(params: {
|
||||
const common: BuildOptions.Common = (() => {
|
||||
const { name, keycloakify = {}, version, homepage } = parsedPackageJson;
|
||||
|
||||
const { extraPages, extraThemeProperties } = keycloakify ?? {};
|
||||
const { extraPages, extraThemeProperties, groupId, artifactId, bundler } = keycloakify ?? {};
|
||||
|
||||
const themeName = name
|
||||
.replace(/^@(.*)/, "$1")
|
||||
@ -117,10 +128,26 @@ export function readBuildOptions(params: {
|
||||
|
||||
return {
|
||||
themeName,
|
||||
"bundler": (() => {
|
||||
const { KEYCLOAKIFY_BUNDLER } = process.env;
|
||||
|
||||
assert(
|
||||
typeGuard<Bundler | undefined>(
|
||||
KEYCLOAKIFY_BUNDLER,
|
||||
[undefined, ...id<readonly string[]>(bundlers)].includes(KEYCLOAKIFY_BUNDLER)
|
||||
),
|
||||
`${symToStr({ KEYCLOAKIFY_BUNDLER })} should be one of ${bundlers.join(", ")}`
|
||||
);
|
||||
|
||||
return KEYCLOAKIFY_BUNDLER ?? bundler ?? "keycloakify";
|
||||
})(),
|
||||
"artifactId": process.env.KEYCLOAKIFY_ARTIFACT_ID ?? artifactId ?? `${themeName}-keycloak-theme`,
|
||||
"groupId": (() => {
|
||||
const fallbackGroupId = `${themeName}.keycloak`;
|
||||
|
||||
return (
|
||||
process.env.KEYCLOAKIFY_GROUP_ID ??
|
||||
groupId ??
|
||||
(!homepage
|
||||
? fallbackGroupId
|
||||
: urlParse(homepage)
|
||||
@ -130,7 +157,7 @@ export function readBuildOptions(params: {
|
||||
.join(".") ?? fallbackGroupId) + ".keycloak"
|
||||
);
|
||||
})(),
|
||||
"version": version,
|
||||
"version": process.env.KEYCLOAKIFY_VERSION ?? version,
|
||||
extraPages,
|
||||
extraThemeProperties,
|
||||
isSilent
|
||||
|
@ -7,6 +7,8 @@ import type { BuildOptions } from "./BuildOptions";
|
||||
export type BuildOptionsLike = {
|
||||
themeName: string;
|
||||
groupId: string;
|
||||
artifactId?: string;
|
||||
version: string;
|
||||
};
|
||||
|
||||
{
|
||||
@ -16,7 +18,6 @@ export type BuildOptionsLike = {
|
||||
}
|
||||
|
||||
export function generateJavaStackFiles(params: {
|
||||
version: string;
|
||||
keycloakThemeBuildingDirPath: string;
|
||||
doBundlesEmailTemplate: boolean;
|
||||
buildOptions: BuildOptionsLike;
|
||||
@ -24,8 +25,7 @@ export function generateJavaStackFiles(params: {
|
||||
jarFilePath: string;
|
||||
} {
|
||||
const {
|
||||
version,
|
||||
buildOptions: { groupId, themeName },
|
||||
buildOptions: { groupId, themeName, version, artifactId },
|
||||
keycloakThemeBuildingDirPath,
|
||||
doBundlesEmailTemplate
|
||||
} = params;
|
||||
@ -34,8 +34,6 @@ export function generateJavaStackFiles(params: {
|
||||
const { pomFileCode } = (function generatePomFileCode(): {
|
||||
pomFileCode: string;
|
||||
} {
|
||||
const artefactId = `${themeName}-keycloak-theme`;
|
||||
|
||||
const pomFileCode = [
|
||||
`<?xml version="1.0"?>`,
|
||||
`<project xmlns="http://maven.apache.org/POM/4.0.0"`,
|
||||
@ -43,9 +41,9 @@ export function generateJavaStackFiles(params: {
|
||||
` xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">`,
|
||||
` <modelVersion>4.0.0</modelVersion>`,
|
||||
` <groupId>${groupId}</groupId>`,
|
||||
` <artifactId>${artefactId}</artifactId>`,
|
||||
` <artifactId>${artifactId}</artifactId>`,
|
||||
` <version>${version}</version>`,
|
||||
` <name>${artefactId}</name>`,
|
||||
` <name>${artifactId}</name>`,
|
||||
` <description />`,
|
||||
`</project>`
|
||||
].join("\n");
|
||||
@ -84,6 +82,6 @@ export function generateJavaStackFiles(params: {
|
||||
}
|
||||
|
||||
return {
|
||||
"jarFilePath": pathJoin(keycloakThemeBuildingDirPath, "target", `${themeName}-${version}.jar`)
|
||||
"jarFilePath": pathJoin(keycloakThemeBuildingDirPath, "target", `${artifactId}-${version}.jar`)
|
||||
};
|
||||
}
|
||||
|
@ -5,7 +5,6 @@ import { replaceImportsFromStaticInJsCode } from "./replacers/replaceImportsFrom
|
||||
import { replaceImportsInCssCode } from "./replacers/replaceImportsInCssCode";
|
||||
import { generateFtlFilesCodeFactory, pageIds } from "./generateFtl";
|
||||
import { downloadBuiltinKeycloakTheme } from "../download-builtin-keycloak-theme";
|
||||
import * as child_process from "child_process";
|
||||
import { mockTestingResourcesCommonPath, mockTestingResourcesPath, mockTestingSubDirOfPublicDirBasename } from "../mockTestingResourcesPath";
|
||||
import { isInside } from "../tools/isInside";
|
||||
import type { BuildOptions } from "./BuildOptions";
|
||||
@ -53,13 +52,13 @@ export namespace BuildOptionsLike {
|
||||
assert<typeof buildOptions extends BuildOptionsLike ? true : false>();
|
||||
}
|
||||
|
||||
export function generateKeycloakThemeResources(params: {
|
||||
export async function generateKeycloakThemeResources(params: {
|
||||
reactAppBuildDirPath: string;
|
||||
keycloakThemeBuildingDirPath: string;
|
||||
keycloakThemeEmailDirPath: string;
|
||||
keycloakVersion: string;
|
||||
buildOptions: BuildOptionsLike;
|
||||
}): { doBundlesEmailTemplate: boolean } {
|
||||
}): Promise<{ doBundlesEmailTemplate: boolean }> {
|
||||
const { reactAppBuildDirPath, keycloakThemeBuildingDirPath, keycloakThemeEmailDirPath, keycloakVersion, buildOptions } = params;
|
||||
|
||||
const logger = getLogger({ isSilent: buildOptions.isSilent });
|
||||
@ -155,7 +154,7 @@ export function generateKeycloakThemeResources(params: {
|
||||
{
|
||||
const tmpDirPath = pathJoin(themeDirPath, "..", "tmp_xxKdLpdIdLd");
|
||||
|
||||
downloadBuiltinKeycloakTheme({
|
||||
await downloadBuiltinKeycloakTheme({
|
||||
keycloakVersion,
|
||||
"destDirPath": tmpDirPath,
|
||||
isSilent: buildOptions.isSilent
|
||||
@ -190,8 +189,7 @@ export function generateKeycloakThemeResources(params: {
|
||||
);
|
||||
|
||||
fs.writeFileSync(pathJoin(keycloakResourcesWithinPublicDirPath, ".gitignore"), Buffer.from("*", "utf8"));
|
||||
|
||||
child_process.execSync(`rm -r ${tmpDirPath}`);
|
||||
fs.rmSync(tmpDirPath, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
fs.writeFileSync(
|
||||
|
@ -34,7 +34,7 @@ export function generateStartKeycloakTestingContainer(params: {
|
||||
pathJoin(keycloakThemeBuildingDirPath, generateStartKeycloakTestingContainer.basename),
|
||||
Buffer.from(
|
||||
[
|
||||
"#!/bin/bash",
|
||||
"#!/usr/bin/env bash",
|
||||
"",
|
||||
`docker rm ${containerName} || true`,
|
||||
"",
|
||||
|
@ -4,5 +4,5 @@ export * from "./keycloakify";
|
||||
import { main } from "./keycloakify";
|
||||
|
||||
if (require.main === module) {
|
||||
main();
|
||||
main().catch(e => console.error(e));
|
||||
}
|
||||
|
@ -7,13 +7,16 @@ import * as fs from "fs";
|
||||
import { readBuildOptions } from "./BuildOptions";
|
||||
import { getLogger } from "../tools/logger";
|
||||
import { getCliOptions } from "../tools/cliOptions";
|
||||
import jar from "../tools/jar";
|
||||
import { assert } from "tsafe/assert";
|
||||
import type { Equals } from "tsafe";
|
||||
|
||||
const reactProjectDirPath = process.cwd();
|
||||
|
||||
export const keycloakThemeBuildingDirPath = pathJoin(reactProjectDirPath, "build_keycloak");
|
||||
export const keycloakThemeEmailDirPath = pathJoin(keycloakThemeBuildingDirPath, "..", "keycloak_email");
|
||||
|
||||
export function main() {
|
||||
export async function main() {
|
||||
const { isSilent, hasExternalAssets } = getCliOptions(process.argv.slice(2));
|
||||
const logger = getLogger({ isSilent });
|
||||
logger.log("🔏 Building the keycloak theme...⌚");
|
||||
@ -33,7 +36,7 @@ export function main() {
|
||||
"isSilent": isSilent
|
||||
});
|
||||
|
||||
const { doBundlesEmailTemplate } = generateKeycloakThemeResources({
|
||||
const { doBundlesEmailTemplate } = await generateKeycloakThemeResources({
|
||||
keycloakThemeBuildingDirPath,
|
||||
keycloakThemeEmailDirPath,
|
||||
"reactAppBuildDirPath": pathJoin(reactProjectDirPath, "build"),
|
||||
@ -45,17 +48,34 @@ export function main() {
|
||||
});
|
||||
|
||||
const { jarFilePath } = generateJavaStackFiles({
|
||||
"version": buildOptions.version,
|
||||
keycloakThemeBuildingDirPath,
|
||||
doBundlesEmailTemplate,
|
||||
buildOptions
|
||||
});
|
||||
|
||||
child_process.execSync("mvn package", {
|
||||
"cwd": keycloakThemeBuildingDirPath
|
||||
});
|
||||
switch (buildOptions.bundler) {
|
||||
case "none":
|
||||
logger.log("😱 Skipping bundling step, there will be no jar");
|
||||
break;
|
||||
case "keycloakify":
|
||||
logger.log("🫶 Let keycloakify do its thang");
|
||||
await jar({
|
||||
"rootPath": pathJoin(keycloakThemeBuildingDirPath, "src", "main", "resources"),
|
||||
"version": buildOptions.version,
|
||||
"groupId": buildOptions.groupId,
|
||||
"artifactId": buildOptions.artifactId,
|
||||
"targetPath": jarFilePath
|
||||
});
|
||||
break;
|
||||
case "mvn":
|
||||
logger.log("🫙 Run maven to deliver a jar");
|
||||
child_process.execSync("mvn package", { "cwd": keycloakThemeBuildingDirPath });
|
||||
break;
|
||||
default:
|
||||
assert<Equals<typeof buildOptions.bundler, never>>(false);
|
||||
}
|
||||
|
||||
//We want, however, to test in a container running the latest Keycloak version
|
||||
// We want, however, to test in a container running the latest Keycloak version
|
||||
const containerKeycloakVersion = "20.0.1";
|
||||
|
||||
generateStartKeycloakTestingContainer({
|
||||
|
@ -15,7 +15,8 @@ fs.writeFileSync(
|
||||
return {
|
||||
...packageJsonParsed,
|
||||
"main": packageJsonParsed["main"].replace(/^dist\//, ""),
|
||||
"types": packageJsonParsed["types"].replace(/^dist\//, "")
|
||||
"types": packageJsonParsed["types"].replace(/^dist\//, ""),
|
||||
"bin": Object.fromEntries(Object.entries<string>(packageJsonParsed["bin"]).map(([k, v]) => [k, v.replace(/^dist\//, "")]))
|
||||
};
|
||||
})(),
|
||||
null,
|
||||
|
54
src/bin/tools/crc32.ts
Normal file
54
src/bin/tools/crc32.ts
Normal file
@ -0,0 +1,54 @@
|
||||
import { Readable } from "stream";
|
||||
|
||||
const crc32tab = [
|
||||
0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419, 0x706af48f, 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4, 0xe0d5e91e, 0x97d2d988,
|
||||
0x09b64c2b, 0x7eb17cbd, 0xe7b82d07, 0x90bf1d91, 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de, 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7,
|
||||
0x136c9856, 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9, 0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4, 0xa2677172,
|
||||
0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b, 0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3, 0x45df5c75, 0xdcd60dcf, 0xabd13d59,
|
||||
0x26d930ac, 0x51de003a, 0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599, 0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924,
|
||||
0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190, 0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f, 0x9fbfe4a5, 0xe8b8d433,
|
||||
0x7807c9a2, 0x0f00f934, 0x9609a88e, 0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01, 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e,
|
||||
0x6c0695ed, 0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950, 0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3, 0xfbd44c65,
|
||||
0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2, 0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a, 0x346ed9fc, 0xad678846, 0xda60b8d0,
|
||||
0x44042d73, 0x33031de5, 0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa, 0xbe0b1010, 0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f,
|
||||
0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17, 0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6, 0x03b6e20c, 0x74b1d29a,
|
||||
0xead54739, 0x9dd277af, 0x04db2615, 0x73dc1683, 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8, 0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1,
|
||||
0xf00f9344, 0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb, 0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a, 0x67dd4acc,
|
||||
0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5, 0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1, 0xa6bc5767, 0x3fb506dd, 0x48b2364b,
|
||||
0xd80d2bda, 0xaf0a1b4c, 0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef, 0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236,
|
||||
0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe, 0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31, 0x2cd99e8b, 0x5bdeae1d,
|
||||
0x9b64c2b0, 0xec63f226, 0x756aa39c, 0x026d930a, 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713, 0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38,
|
||||
0x92d28e9b, 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242, 0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1, 0x18b74777,
|
||||
0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c, 0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45, 0xa00ae278, 0xd70dd2ee, 0x4e048354, 0x3903b3c2,
|
||||
0xa7672661, 0xd06016f7, 0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66, 0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9,
|
||||
0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605, 0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8, 0x5d681b02, 0x2a6f2b94,
|
||||
0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, 0x2d02ef8d
|
||||
];
|
||||
|
||||
/**
|
||||
*
|
||||
* @param input either a byte stream, a string or a buffer, you want to have the checksum for
|
||||
* @returns a promise for a checksum (uint32)
|
||||
*/
|
||||
export function crc32(input: Readable | String | Buffer): Promise<number> {
|
||||
if (typeof input === "string") {
|
||||
let crc = ~0;
|
||||
for (let i = 0; i < input.length; i++) crc = (crc >>> 8) ^ crc32tab[(crc ^ input.charCodeAt(i)) & 0xff];
|
||||
return Promise.resolve((crc ^ -1) >>> 0);
|
||||
} else if (input instanceof Buffer) {
|
||||
let crc = ~0;
|
||||
for (let i = 0; i < input.length; i++) crc = (crc >>> 8) ^ crc32tab[(crc ^ input[i]) & 0xff];
|
||||
return Promise.resolve((crc ^ -1) >>> 0);
|
||||
} else if (input instanceof Readable) {
|
||||
return new Promise<number>((resolve, reject) => {
|
||||
let crc = ~0;
|
||||
input.on("end", () => resolve((crc ^ -1) >>> 0));
|
||||
input.on("error", e => reject(e));
|
||||
input.on("data", (chunk: Buffer) => {
|
||||
for (let i = 0; i < chunk.length; i++) crc = (crc >>> 8) ^ crc32tab[(crc ^ chunk[i]) & 0xff];
|
||||
});
|
||||
});
|
||||
} else {
|
||||
throw new Error("Unsupported input " + typeof input);
|
||||
}
|
||||
}
|
61
src/bin/tools/deflate.ts
Normal file
61
src/bin/tools/deflate.ts
Normal file
@ -0,0 +1,61 @@
|
||||
import { PassThrough, Readable, TransformCallback, Writable } from "stream";
|
||||
import { pipeline } from "stream/promises";
|
||||
import { deflateRaw as deflateRawCb, createDeflateRaw } from "zlib";
|
||||
import { promisify } from "util";
|
||||
|
||||
import { crc32 } from "./crc32";
|
||||
import tee from "./tee";
|
||||
|
||||
const deflateRaw = promisify(deflateRawCb);
|
||||
|
||||
/**
|
||||
* A stream transformer that records the number of bytes
|
||||
* passed in its `size` property.
|
||||
*/
|
||||
class ByteCounter extends PassThrough {
|
||||
size: number = 0;
|
||||
_transform(chunk: any, encoding: BufferEncoding, callback: TransformCallback) {
|
||||
if ("length" in chunk) this.size += chunk.length;
|
||||
super._transform(chunk, encoding, callback);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param data buffer containing the data to be compressed
|
||||
* @returns a buffer containing the compressed/deflated data and the crc32 checksum
|
||||
* of the source data
|
||||
*/
|
||||
export async function deflateBuffer(data: Buffer) {
|
||||
const [deflated, checksum] = await Promise.all([deflateRaw(data), crc32(data)]);
|
||||
return { deflated, crc32: checksum };
|
||||
}
|
||||
|
||||
/**
|
||||
* @param input a byte stream, containing data to be compressed
|
||||
* @param sink a method that will accept chunks of compressed data; We don't pass
|
||||
* a writable here, since we don't want the writablestream to be closed after
|
||||
* a single file
|
||||
* @returns a promise, which will resolve with the crc32 checksum and the
|
||||
* compressed size
|
||||
*/
|
||||
export async function deflateStream(input: Readable, sink: (chunk: Buffer) => void) {
|
||||
const deflateWriter = new Writable({
|
||||
write(chunk, _, callback) {
|
||||
sink(chunk);
|
||||
callback();
|
||||
}
|
||||
});
|
||||
|
||||
// tee the input stream, so we can compress and calc crc32 in parallel
|
||||
const [rs1, rs2] = tee(input);
|
||||
const byteCounter = new ByteCounter();
|
||||
const [_, crc] = await Promise.all([
|
||||
// pipe input into zip compressor, count the bytes
|
||||
// returned and pass compressed data to the sink
|
||||
pipeline(rs1, createDeflateRaw(), byteCounter, deflateWriter),
|
||||
// calc checksum
|
||||
crc32(rs2)
|
||||
]);
|
||||
|
||||
return { crc32: crc, compressedSize: byteCounter.size };
|
||||
}
|
@ -1,80 +1,289 @@
|
||||
import { basename as pathBasename, join as pathJoin } from "path";
|
||||
import { execSync } from "child_process";
|
||||
import * as fs from "fs";
|
||||
import { dirname as pathDirname, basename as pathBasename, join as pathJoin } from "path";
|
||||
import { createReadStream, createWriteStream, unlinkSync } from "fs";
|
||||
import { stat, mkdir, unlink, readFile, writeFile } from "fs/promises";
|
||||
import { transformCodebase } from "./transformCodebase";
|
||||
import * as crypto from "crypto";
|
||||
import { createHash } from "crypto";
|
||||
import http from "http";
|
||||
import https from "https";
|
||||
import { createInflateRaw } from "zlib";
|
||||
|
||||
/** assert url ends with .zip */
|
||||
export function downloadAndUnzip(params: {
|
||||
import type { Readable } from "stream";
|
||||
|
||||
function hash(s: string) {
|
||||
return createHash("sha256").update(s).digest("hex");
|
||||
}
|
||||
|
||||
async function maybeReadFile(path: string) {
|
||||
try {
|
||||
return await readFile(path, "utf-8");
|
||||
} catch (error) {
|
||||
if ((error as Error & { code: string }).code === "ENOENT") return undefined;
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function maybeStat(path: string) {
|
||||
try {
|
||||
return await stat(path);
|
||||
} catch (error) {
|
||||
if ((error as Error & { code: string }).code === "ENOENT") return undefined;
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Download a file from `url` to `dir`. Will try to avoid downloading existing
|
||||
* files by using an `{hash(url)}.etag` file. If this file exists, we add an
|
||||
* etag headear, so server can tell us if file changed and we should re-download
|
||||
* or if our file is up-to-date.
|
||||
*
|
||||
* Warning, this method assumes that the target filename can be extracted from
|
||||
* url, content-disposition headers are ignored.
|
||||
*
|
||||
* If the target directory does not exist, it will be created.
|
||||
*
|
||||
* If the target file exists and is out of date, it will be overwritten.
|
||||
* If the target file exists and there is no etag file, the target file will
|
||||
* be overwritten.
|
||||
*
|
||||
* @param url download url
|
||||
* @param dir target directory
|
||||
* @returns promise for the full path of the downloaded file
|
||||
*/
|
||||
async function download(url: string, dir: string): Promise<string> {
|
||||
await mkdir(dir, { recursive: true });
|
||||
const filename = pathBasename(url);
|
||||
const filepath = pathJoin(dir, filename);
|
||||
// If downloaded file exists already and has an `.etag` companion file,
|
||||
// read the etag from that file. This will avoid re-downloading the file
|
||||
// if it is up to date.
|
||||
const exists = await maybeStat(filepath);
|
||||
const etagFilepath = pathJoin(dir, "_" + hash(url).substring(0, 15) + ".etag");
|
||||
const etag = !exists ? undefined : await maybeReadFile(etagFilepath);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
// use inner method to allow following redirects
|
||||
function request(url1: URL) {
|
||||
const headers: Record<string, string> = {};
|
||||
if (etag) headers["If-None-Match"] = etag;
|
||||
(url1.protocol === "https:" ? https : http).get(url1, { headers }, response => {
|
||||
if (response.statusCode === 301 || response.statusCode === 302) {
|
||||
// follow redirects
|
||||
request(new URL(response.headers.location!!));
|
||||
} else if (response.statusCode === 304) {
|
||||
// up-to-date, resolve now
|
||||
resolve(filepath);
|
||||
} else if (response.statusCode !== 200) {
|
||||
reject(new Error(`Request to ${url1} returned status ${response.statusCode}.`));
|
||||
} else {
|
||||
const fp = createWriteStream(filepath, { autoClose: true });
|
||||
fp.on("err", e => {
|
||||
fp.close();
|
||||
unlinkSync(filepath);
|
||||
reject(e);
|
||||
});
|
||||
fp.on("finish", async () => {
|
||||
// when targetfile has been written, write etag file so that
|
||||
// next time around we don't need to re-download
|
||||
const responseEtag = response.headers.etag;
|
||||
if (responseEtag) await writeFile(etagFilepath, responseEtag, "utf-8");
|
||||
resolve(filepath);
|
||||
});
|
||||
response.pipe(fp);
|
||||
}
|
||||
});
|
||||
}
|
||||
request(new URL(url));
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef
|
||||
* @type MultiError = Error & { cause: Error[] }
|
||||
*/
|
||||
|
||||
/**
|
||||
* Extract the archive `zipFile` into the directory `dir`. If `archiveDir` is given,
|
||||
* only that directory will be extracted, stripping the given path components.
|
||||
*
|
||||
* If dir does not exist, it will be created.
|
||||
*
|
||||
* If any archive file exists, it will be overwritten.
|
||||
*
|
||||
* Will unzip using all available nodejs worker threads.
|
||||
*
|
||||
* Will try to clean up extracted files on failure.
|
||||
*
|
||||
* If unpacking fails, will either throw an regular error, or
|
||||
* possibly an `MultiError`, which contains a `cause` field with
|
||||
* a number of root cause errors.
|
||||
*
|
||||
* Warning this method is not optimized for continuous reading of the zip
|
||||
* archive, but is a trade-off between simplicity and allowing extraction
|
||||
* of a single directory from the archive.
|
||||
*
|
||||
* @param zipFile the file to unzip
|
||||
* @param dir the target directory
|
||||
* @param archiveDir if given, unpack only files from this archive directory
|
||||
* @throws {MultiError} error
|
||||
* @returns Promise for a list of full file paths pointing to actually extracted files
|
||||
*/
|
||||
async function unzip(zipFile: string, dir: string, archiveDir?: string): Promise<string[]> {
|
||||
await mkdir(dir, { recursive: true });
|
||||
const promises: Promise<string>[] = [];
|
||||
|
||||
// Iterate over all files in the zip, skip files which are not in archiveDir,
|
||||
// if given.
|
||||
for await (const record of iterateZipArchive(zipFile)) {
|
||||
const { path: recordPath, createReadStream: createRecordReadStream } = record;
|
||||
const filePath = pathJoin(dir, recordPath);
|
||||
const parent = pathDirname(filePath);
|
||||
if (archiveDir && !recordPath.startsWith(archiveDir)) continue;
|
||||
promises.push(
|
||||
new Promise<string>(async (resolve, reject) => {
|
||||
await mkdir(parent, { recursive: true });
|
||||
// Pull the file out of the archive, write it to the target directory
|
||||
const input = createRecordReadStream();
|
||||
const output = createWriteStream(filePath);
|
||||
output.on("error", e => reject(Object.assign(e, { filePath })));
|
||||
output.on("finish", () => resolve(filePath));
|
||||
input.pipe(output);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
// Wait until _all_ files are either extracted or failed
|
||||
const results = await Promise.allSettled(promises);
|
||||
const success = results.filter(r => r.status === "fulfilled").map(r => (r as PromiseFulfilledResult<string>).value);
|
||||
const failure = results.filter(r => r.status === "rejected").map(r => (r as PromiseRejectedResult).reason);
|
||||
|
||||
// If any extraction failed, try to clean up, then throw a MultiError,
|
||||
// which has a `cause` field, containing a list of root cause errors.
|
||||
if (failure.length) {
|
||||
await Promise.all(success.map(path => unlink(path)));
|
||||
await Promise.all(failure.map(e => e && e.path && unlink(e.path as string)));
|
||||
const e = new Error("Failed to extract: " + failure.map(e => e.message).join(";"));
|
||||
(e as any).cause = failure;
|
||||
throw e;
|
||||
}
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param file file to read
|
||||
* @param start first byte to read
|
||||
* @param end last byte to read
|
||||
* @returns Promise of a buffer of read bytes
|
||||
*/
|
||||
async function readFileChunk(file: string, start: number, end: number): Promise<Buffer> {
|
||||
const chunks: Buffer[] = [];
|
||||
return new Promise((resolve, reject) => {
|
||||
const stream = createReadStream(file, { start, end });
|
||||
stream.on("error", e => reject(e));
|
||||
stream.on("end", () => resolve(Buffer.concat(chunks)));
|
||||
stream.on("data", chunk => chunks.push(chunk as Buffer));
|
||||
});
|
||||
}
|
||||
|
||||
type ZipRecord = {
|
||||
path: string;
|
||||
createReadStream: () => Readable;
|
||||
compressionMethod: "deflate" | undefined;
|
||||
};
|
||||
|
||||
type ZipRecordGenerator = AsyncGenerator<ZipRecord, void, unknown>;
|
||||
|
||||
/**
|
||||
* Iterate over all records of a zipfile, and yield a ZipRecord.
|
||||
* Use `record.createReadStream()` to actually read the file.
|
||||
*
|
||||
* Warning this method will only work with single-disk zip files.
|
||||
* Warning this method may fail if the zip archive has an crazy amount
|
||||
* of files and the central directory is not fully contained within the
|
||||
* last 65k bytes of the zip file.
|
||||
*
|
||||
* @param zipFile
|
||||
* @returns AsyncGenerator which will yield ZipRecords
|
||||
*/
|
||||
async function* iterateZipArchive(zipFile: string): ZipRecordGenerator {
|
||||
// Need to know zip file size before we can do anything else
|
||||
const { size } = await stat(zipFile);
|
||||
const chunkSize = 65_535 + 22 + 1; // max comment size + end header size + wiggle
|
||||
// Read last ~65k bytes. Zip files have an comment up to 65_535 bytes at the very end,
|
||||
// before that comes the zip central directory end header.
|
||||
let chunk = await readFileChunk(zipFile, size - chunkSize, size);
|
||||
const unread = size - chunk.length;
|
||||
let i = chunk.length - 4;
|
||||
let found = false;
|
||||
// Find central directory end header, reading backwards from the end
|
||||
while (!found && i-- > 0) if (chunk[i] === 0x50 && chunk.readUInt32LE(i) === 0x06054b50) found = true;
|
||||
if (!found) throw new Error("Not a zip file");
|
||||
// This method will fail on a multi-disk zip, so bail early.
|
||||
if (chunk.readUInt16LE(i + 4) !== 0) throw new Error("Multi-disk zip not supported");
|
||||
let nFiles = chunk.readUint16LE(i + 10);
|
||||
// Get the position of the central directory
|
||||
const directorySize = chunk.readUint32LE(i + 12);
|
||||
const directoryOffset = chunk.readUint32LE(i + 16);
|
||||
if (directoryOffset === 0xffff_ffff) throw new Error("zip64 not supported");
|
||||
if (directoryOffset > size) throw new Error(`Central directory offset ${directoryOffset} is outside file`);
|
||||
i = directoryOffset - unread;
|
||||
// If i < 0, it means that the central directory is not contained within `chunk`
|
||||
if (i < 0) {
|
||||
chunk = await readFileChunk(zipFile, directoryOffset, directoryOffset + directorySize);
|
||||
i = 0;
|
||||
}
|
||||
// Now iterate the central directory records, yield an `ZipRecord` for every entry
|
||||
while (nFiles-- > 0) {
|
||||
// Check for marker bytes
|
||||
if (chunk.readUInt32LE(i) !== 0x02014b50) throw new Error("No central directory record at position " + (unread + i));
|
||||
const compressionMethod = ({ 8: "deflate" } as const)[chunk.readUint16LE(i + 10)];
|
||||
const compressedFileSize = chunk.readUint32LE(i + 20);
|
||||
const filenameLength = chunk.readUint16LE(i + 28);
|
||||
const extraLength = chunk.readUint16LE(i + 30);
|
||||
const commentLength = chunk.readUint16LE(i + 32);
|
||||
// Start of the actual content byte stream is after the 'local' record header,
|
||||
// which is 30 bytes long plus filename and extra field
|
||||
const start = chunk.readUint32LE(i + 42) + 30 + filenameLength + extraLength;
|
||||
const end = start + compressedFileSize;
|
||||
const filename = chunk.slice(i + 46, i + 46 + filenameLength).toString("utf-8");
|
||||
const createRecordReadStream = () => {
|
||||
const input = createReadStream(zipFile, { start, end });
|
||||
if (compressionMethod === "deflate") {
|
||||
const inflate = createInflateRaw();
|
||||
input.pipe(inflate);
|
||||
return inflate;
|
||||
}
|
||||
return input;
|
||||
};
|
||||
if (end > start) yield { path: filename, createReadStream: createRecordReadStream, compressionMethod };
|
||||
// advance pointer to next central directory entry
|
||||
i += 46 + filenameLength + extraLength + commentLength;
|
||||
}
|
||||
}
|
||||
|
||||
export async function downloadAndUnzip({
|
||||
url,
|
||||
destDirPath,
|
||||
pathOfDirToExtractInArchive,
|
||||
cacheDirPath
|
||||
}: {
|
||||
isSilent: boolean;
|
||||
url: string;
|
||||
destDirPath: string;
|
||||
pathOfDirToExtractInArchive?: string;
|
||||
cacheDirPath: string;
|
||||
}) {
|
||||
const { url, destDirPath, pathOfDirToExtractInArchive, cacheDirPath } = params;
|
||||
const downloadHash = hash(JSON.stringify({ url, pathOfDirToExtractInArchive })).substring(0, 15);
|
||||
const extractDirPath = pathJoin(cacheDirPath, `_${downloadHash}`);
|
||||
|
||||
const extractDirPath = pathJoin(
|
||||
cacheDirPath,
|
||||
`_${crypto.createHash("sha256").update(JSON.stringify({ url, pathOfDirToExtractInArchive })).digest("hex").substring(0, 15)}`
|
||||
);
|
||||
const zipFilepath = await download(url, cacheDirPath);
|
||||
const zipMtime = (await stat(zipFilepath)).mtimeMs;
|
||||
const unzipMtime = (await maybeStat(extractDirPath))?.mtimeMs;
|
||||
|
||||
fs.mkdirSync(cacheDirPath, { "recursive": true });
|
||||
if (!unzipMtime || zipMtime > unzipMtime) await unzip(zipFilepath, extractDirPath, pathOfDirToExtractInArchive);
|
||||
|
||||
const { readIsSuccessByExtractDirPath, writeIsSuccessByExtractDirPath } = (() => {
|
||||
const filePath = pathJoin(cacheDirPath, "isSuccessByExtractDirPath.json");
|
||||
|
||||
type IsSuccessByExtractDirPath = Record<string, boolean | undefined>;
|
||||
|
||||
function readIsSuccessByExtractDirPath(): IsSuccessByExtractDirPath {
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return {};
|
||||
}
|
||||
|
||||
return JSON.parse(fs.readFileSync(filePath).toString("utf8"));
|
||||
}
|
||||
|
||||
function writeIsSuccessByExtractDirPath(isSuccessByExtractDirPath: IsSuccessByExtractDirPath): void {
|
||||
fs.writeFileSync(filePath, Buffer.from(JSON.stringify(isSuccessByExtractDirPath, null, 2), "utf8"));
|
||||
}
|
||||
|
||||
return { readIsSuccessByExtractDirPath, writeIsSuccessByExtractDirPath };
|
||||
})();
|
||||
|
||||
downloadAndUnzip: {
|
||||
const isSuccessByExtractDirPath = readIsSuccessByExtractDirPath();
|
||||
|
||||
if (isSuccessByExtractDirPath[extractDirPath]) {
|
||||
break downloadAndUnzip;
|
||||
}
|
||||
|
||||
writeIsSuccessByExtractDirPath({
|
||||
...isSuccessByExtractDirPath,
|
||||
[extractDirPath]: false
|
||||
});
|
||||
|
||||
fs.rmSync(extractDirPath, { "recursive": true, "force": true });
|
||||
|
||||
fs.mkdirSync(extractDirPath);
|
||||
|
||||
const zipFileBasename = pathBasename(url);
|
||||
|
||||
execSync(`curl -L ${url} -o ${zipFileBasename} ${params.isSilent ? "-s" : ""}`, { "cwd": extractDirPath });
|
||||
|
||||
execSync(`unzip -o ${zipFileBasename}${pathOfDirToExtractInArchive === undefined ? "" : ` "${pathOfDirToExtractInArchive}/**/*"`}`, {
|
||||
"cwd": extractDirPath
|
||||
});
|
||||
|
||||
fs.rmSync(pathJoin(extractDirPath, zipFileBasename), { "recursive": true, "force": true });
|
||||
|
||||
writeIsSuccessByExtractDirPath({
|
||||
...isSuccessByExtractDirPath,
|
||||
[extractDirPath]: true
|
||||
});
|
||||
}
|
||||
|
||||
transformCodebase({
|
||||
"srcDirPath": pathOfDirToExtractInArchive === undefined ? extractDirPath : pathJoin(extractDirPath, pathOfDirToExtractInArchive),
|
||||
destDirPath
|
||||
});
|
||||
const srcDirPath = pathOfDirToExtractInArchive === undefined ? extractDirPath : pathJoin(extractDirPath, pathOfDirToExtractInArchive);
|
||||
transformCodebase({ srcDirPath, destDirPath });
|
||||
}
|
||||
|
@ -1,10 +1,17 @@
|
||||
import { getProjectRoot } from "./getProjectRoot";
|
||||
import { join as pathJoin } from "path";
|
||||
import * as child_process from "child_process";
|
||||
import * as fs from "fs";
|
||||
import { constants } from "fs";
|
||||
import { chmod, stat } from "fs/promises";
|
||||
|
||||
Object.entries<string>(JSON.parse(fs.readFileSync(pathJoin(getProjectRoot(), "package.json")).toString("utf8"))["bin"]).forEach(([, scriptPath]) =>
|
||||
child_process.execSync(`chmod +x ${scriptPath}`, {
|
||||
"cwd": getProjectRoot()
|
||||
})
|
||||
);
|
||||
(async () => {
|
||||
const { bin } = await import(pathJoin(getProjectRoot(), "package.json"));
|
||||
|
||||
const promises = Object.values<string>(bin).map(async scriptPath => {
|
||||
const fullPath = pathJoin(getProjectRoot(), scriptPath);
|
||||
const oldMode = (await stat(fullPath)).mode;
|
||||
const newMode = oldMode | constants.S_IXUSR | constants.S_IXGRP | constants.S_IXOTH;
|
||||
await chmod(fullPath, newMode);
|
||||
});
|
||||
|
||||
await Promise.all(promises);
|
||||
})();
|
||||
|
102
src/bin/tools/jar.ts
Normal file
102
src/bin/tools/jar.ts
Normal file
@ -0,0 +1,102 @@
|
||||
import { Readable, Transform } from "stream";
|
||||
import { dirname, relative, sep } from "path";
|
||||
import { createWriteStream } from "fs";
|
||||
|
||||
import walk from "./walk";
|
||||
import type { ZipSource } from "./zip";
|
||||
import zip from "./zip";
|
||||
import { mkdir } from "fs/promises";
|
||||
|
||||
/** Trim leading whitespace from every line */
|
||||
const trimIndent = (s: string) => s.replace(/(\n)\s+/g, "$1");
|
||||
|
||||
type JarArgs = {
|
||||
rootPath: string;
|
||||
targetPath: string;
|
||||
groupId: string;
|
||||
artifactId: string;
|
||||
version: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a jar archive, using the resources found at `rootPath` (a directory) and write the
|
||||
* archive to `targetPath` (a file). Use `groupId`, `artifactId` and `version` to define
|
||||
* the contents of the pom.properties file which is going to be added to the archive.
|
||||
*/
|
||||
export default async function jar({ groupId, artifactId, version, rootPath, targetPath }: JarArgs) {
|
||||
const manifest: ZipSource = {
|
||||
path: "META-INF/MANIFEST.MF",
|
||||
data: Buffer.from(
|
||||
trimIndent(
|
||||
`Manifest-Version: 1.0
|
||||
Archiver-Version: Plexus Archiver
|
||||
Created-By: Keycloakify
|
||||
Built-By: unknown
|
||||
Build-Jdk: 19.0.0`
|
||||
)
|
||||
)
|
||||
};
|
||||
|
||||
const pomProps: ZipSource = {
|
||||
path: `META-INF/maven/${groupId}/${artifactId}/pom.properties`,
|
||||
data: Buffer.from(
|
||||
trimIndent(
|
||||
`# Generated by keycloakify
|
||||
# ${new Date()}
|
||||
artifactId=${artifactId}
|
||||
groupId=${groupId}
|
||||
version=${version}`
|
||||
)
|
||||
)
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert every path entry to a ZipSource record, and when all records are
|
||||
* processed, append records for MANIFEST.mf and pom.properties
|
||||
*/
|
||||
const pathToRecord = () =>
|
||||
new Transform({
|
||||
objectMode: true,
|
||||
transform: function (fsPath, _, cb) {
|
||||
const path = relative(rootPath, fsPath).split(sep).join("/");
|
||||
this.push({ path, fsPath });
|
||||
cb();
|
||||
},
|
||||
final: function () {
|
||||
this.push(manifest);
|
||||
this.push(pomProps);
|
||||
this.push(null);
|
||||
}
|
||||
});
|
||||
|
||||
await mkdir(dirname(targetPath), { recursive: true });
|
||||
|
||||
// Create an async pipeline, wait until everything is fully processed
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
// walk all files in `rootPath` recursively
|
||||
Readable.from(walk(rootPath))
|
||||
// transform every path into a ZipSource object
|
||||
.pipe(pathToRecord())
|
||||
// let the zip lib convert all ZipSource objects into a byte stream
|
||||
.pipe(zip())
|
||||
// write that byte stream to targetPath
|
||||
.pipe(createWriteStream(targetPath, { encoding: "binary" }))
|
||||
.on("finish", () => resolve())
|
||||
.on("error", e => reject(e));
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Standalone usage, call e.g. `ts-node jar.ts dirWithSources some-jar.jar`
|
||||
*/
|
||||
if (require.main === module) {
|
||||
const main = () =>
|
||||
jar({
|
||||
rootPath: process.argv[2],
|
||||
targetPath: process.argv[3],
|
||||
artifactId: process.env.ARTIFACT_ID ?? "artifact",
|
||||
groupId: process.env.GROUP_ID ?? "group",
|
||||
version: process.env.VERSION ?? "1.0.0"
|
||||
});
|
||||
main().catch(e => console.error(e));
|
||||
}
|
37
src/bin/tools/tee.ts
Normal file
37
src/bin/tools/tee.ts
Normal file
@ -0,0 +1,37 @@
|
||||
import { PassThrough, Readable } from "stream";
|
||||
|
||||
export default function tee(input: Readable) {
|
||||
const a = new PassThrough();
|
||||
const b = new PassThrough();
|
||||
|
||||
let aFull = false;
|
||||
let bFull = false;
|
||||
|
||||
a.on("drain", () => {
|
||||
aFull = false;
|
||||
if (!aFull && !bFull) input.resume();
|
||||
});
|
||||
b.on("drain", () => {
|
||||
bFull = false;
|
||||
if (!aFull && !bFull) input.resume();
|
||||
});
|
||||
|
||||
input.on("error", e => {
|
||||
a.emit("error", e);
|
||||
b.emit("error", e);
|
||||
});
|
||||
|
||||
input.on("data", chunk => {
|
||||
aFull = !a.write(chunk);
|
||||
bFull = !b.write(chunk);
|
||||
|
||||
if (aFull || bFull) input.pause();
|
||||
});
|
||||
|
||||
input.on("end", () => {
|
||||
a.end();
|
||||
b.end();
|
||||
});
|
||||
|
||||
return [a, b] as const;
|
||||
}
|
19
src/bin/tools/walk.ts
Normal file
19
src/bin/tools/walk.ts
Normal file
@ -0,0 +1,19 @@
|
||||
import { readdir } from "fs/promises";
|
||||
import { resolve } from "path";
|
||||
|
||||
/**
|
||||
* Asynchronously and recursively walk a directory tree, yielding every file and directory
|
||||
* found
|
||||
*
|
||||
* @param root the starting directory
|
||||
* @returns AsyncGenerator
|
||||
*/
|
||||
export default async function* walk(root: string): AsyncGenerator<string, void, void> {
|
||||
for (const entry of await readdir(root, { withFileTypes: true })) {
|
||||
const absolutePath = resolve(root, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
yield absolutePath;
|
||||
yield* walk(absolutePath);
|
||||
} else yield absolutePath;
|
||||
}
|
||||
}
|
246
src/bin/tools/zip.ts
Normal file
246
src/bin/tools/zip.ts
Normal file
@ -0,0 +1,246 @@
|
||||
import { Transform, TransformOptions } from "stream";
|
||||
import { createReadStream } from "fs";
|
||||
import { stat } from "fs/promises";
|
||||
import { Blob } from "buffer";
|
||||
|
||||
import { deflateBuffer, deflateStream } from "./deflate";
|
||||
|
||||
/**
|
||||
* Zip source
|
||||
* @property filename the name of the entry in the archie
|
||||
* @property path of the source file, if the source is an actual file
|
||||
* @property data the actual data buffer, if the source is constructed in-memory
|
||||
*/
|
||||
export type ZipSource = { path: string } & ({ fsPath: string } | { data: Buffer });
|
||||
|
||||
export type ZipRecord = {
|
||||
path: string;
|
||||
compression: "deflate" | undefined;
|
||||
uncompressedSize: number;
|
||||
compressedSize?: number;
|
||||
crc32?: number;
|
||||
offset?: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* @returns the actual byte size of an string
|
||||
*/
|
||||
function utf8size(s: string) {
|
||||
return new Blob([s]).size;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param record
|
||||
* @returns a buffer representing a Zip local header
|
||||
* @link https://en.wikipedia.org/wiki/ZIP_(file_format)#Local_file_header
|
||||
*/
|
||||
function localHeader(record: ZipRecord) {
|
||||
const { path, compression, uncompressedSize } = record;
|
||||
const filenameSize = utf8size(path);
|
||||
const buf = Buffer.alloc(30 + filenameSize);
|
||||
|
||||
buf.writeUInt32LE(0x04_03_4b_50, 0); // local header signature
|
||||
buf.writeUInt16LE(10, 4); // min version
|
||||
// we write 0x08 because crc and compressed size are unknown at
|
||||
buf.writeUInt16LE(0x08, 6); // general purpose bit flag
|
||||
buf.writeUInt16LE(compression ? ({ "deflate": 8 } as const)[compression] : 0, 8);
|
||||
buf.writeUInt16LE(0, 10); // modified time
|
||||
buf.writeUInt16LE(0, 12); // modified date
|
||||
buf.writeUInt32LE(0, 14); // crc unknown
|
||||
buf.writeUInt32LE(0, 18); // compressed size unknown
|
||||
buf.writeUInt32LE(uncompressedSize, 22);
|
||||
buf.writeUInt16LE(filenameSize, 26);
|
||||
buf.writeUInt16LE(0, 28); // extra field length
|
||||
buf.write(path, 30, "utf-8");
|
||||
|
||||
return buf;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param record
|
||||
* @returns a buffer representing a Zip central header
|
||||
* @link https://en.wikipedia.org/wiki/ZIP_(file_format)#Central_directory_file_header
|
||||
*/
|
||||
function centralHeader(record: ZipRecord) {
|
||||
const { path, compression, crc32, compressedSize, uncompressedSize, offset } = record;
|
||||
const filenameSize = utf8size(path);
|
||||
const buf = Buffer.alloc(46 + filenameSize);
|
||||
const isFile = !path.endsWith("/");
|
||||
|
||||
if (typeof offset === "undefined") throw new Error("Illegal argument");
|
||||
|
||||
// we don't want to deal with possibly messed up file or directory
|
||||
// permissions, so we ignore the original permissions
|
||||
const externalAttr = isFile ? 0x81a40000 : 0x41ed0000;
|
||||
|
||||
buf.writeUInt32LE(0x0201_4b50, 0); // central header signature
|
||||
buf.writeUInt16LE(10, 4); // version
|
||||
buf.writeUInt16LE(10, 6); // min version
|
||||
buf.writeUInt16LE(0, 8); // general purpose bit flag
|
||||
buf.writeUInt16LE(compression ? ({ "deflate": 8 } as const)[compression] : 0, 10);
|
||||
buf.writeUInt16LE(0, 12); // modified time
|
||||
buf.writeUInt16LE(0, 14); // modified date
|
||||
buf.writeUInt32LE(crc32 || 0, 16);
|
||||
buf.writeUInt32LE(compressedSize || 0, 20);
|
||||
buf.writeUInt32LE(uncompressedSize, 24);
|
||||
buf.writeUInt16LE(filenameSize, 28);
|
||||
buf.writeUInt16LE(0, 30); // extra field length
|
||||
buf.writeUInt16LE(0, 32); // comment field length
|
||||
buf.writeUInt16LE(0, 34); // disk number
|
||||
buf.writeUInt16LE(0, 36); // internal
|
||||
buf.writeUInt32LE(externalAttr, 38); // external
|
||||
buf.writeUInt32LE(offset, 42); // offset where file starts
|
||||
buf.write(path, 46, "utf-8");
|
||||
|
||||
return buf;
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns a buffer representing an Zip End-Of-Central-Directory block
|
||||
* @link https://en.wikipedia.org/wiki/ZIP_(file_format)#End_of_central_directory_record_(EOCD)
|
||||
*/
|
||||
function eocd({ offset, cdSize, nRecords }: { offset: number; cdSize: number; nRecords: number }) {
|
||||
const buf = Buffer.alloc(22);
|
||||
buf.writeUint32LE(0x06054b50, 0); // eocd signature
|
||||
buf.writeUInt16LE(0, 4); // disc number
|
||||
buf.writeUint16LE(0, 6); // disc where central directory starts
|
||||
buf.writeUint16LE(nRecords, 8); // records on this disc
|
||||
buf.writeUInt16LE(nRecords, 10); // records total
|
||||
buf.writeUInt32LE(cdSize, 12); // byte size of cd
|
||||
buf.writeUInt32LE(offset, 16); // cd offset
|
||||
buf.writeUint16LE(0, 20); // comment length
|
||||
|
||||
return buf;
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns a stream Transform, which reads a stream of ZipRecords and
|
||||
* writes a bytestream
|
||||
*/
|
||||
export default function zip() {
|
||||
/**
|
||||
* This is called when the input stream of ZipSource items is finished.
|
||||
* Will write central directory and end-of-central-direcotry blocks.
|
||||
*/
|
||||
const final = () => {
|
||||
// write central directory
|
||||
let cdSize = 0;
|
||||
for (const record of records) {
|
||||
const head = centralHeader(record);
|
||||
zipTransform.push(head);
|
||||
cdSize += head.length;
|
||||
}
|
||||
|
||||
// write end-of-central-directory
|
||||
zipTransform.push(eocd({ offset, cdSize, nRecords: records.length }));
|
||||
// signal stream end
|
||||
zipTransform.push(null);
|
||||
};
|
||||
|
||||
/**
|
||||
* Write a directory entry to the archive
|
||||
* @param path
|
||||
*/
|
||||
const writeDir = async (path: string) => {
|
||||
const record: ZipRecord = {
|
||||
path: path + "/",
|
||||
offset,
|
||||
compression: undefined,
|
||||
uncompressedSize: 0
|
||||
};
|
||||
const head = localHeader(record);
|
||||
zipTransform.push(head);
|
||||
records.push(record);
|
||||
offset += head.length;
|
||||
};
|
||||
|
||||
/**
|
||||
* Write a file entry to the archive
|
||||
* @param archivePath path of the file in archive
|
||||
* @param fsPath path to file on filesystem
|
||||
* @param size of the actual, uncompressed, file
|
||||
*/
|
||||
const writeFile = async (archivePath: string, fsPath: string, size: number) => {
|
||||
const record: ZipRecord = {
|
||||
path: archivePath,
|
||||
offset,
|
||||
compression: "deflate",
|
||||
uncompressedSize: size
|
||||
};
|
||||
const head = localHeader(record);
|
||||
zipTransform.push(head);
|
||||
|
||||
const { crc32, compressedSize } = await deflateStream(createReadStream(fsPath), chunk => zipTransform.push(chunk));
|
||||
|
||||
record.crc32 = crc32;
|
||||
record.compressedSize = compressedSize;
|
||||
records.push(record);
|
||||
offset += head.length + compressedSize;
|
||||
};
|
||||
|
||||
/**
|
||||
* Write archive record based on filesystem file or directory
|
||||
* @param archivePath path of item in archive
|
||||
* @param fsPath path to item on filesystem
|
||||
*/
|
||||
const writeFromPath = async (archivePath: string, fsPath: string) => {
|
||||
const fileStats = await stat(fsPath);
|
||||
fileStats.isDirectory() ? await writeDir(archivePath) /**/ : await writeFile(archivePath, fsPath, fileStats.size) /**/;
|
||||
};
|
||||
|
||||
/**
|
||||
* Write archive record based on data in a buffer
|
||||
* @param path
|
||||
* @param data
|
||||
*/
|
||||
const writeFromBuffer = async (path: string, data: Buffer) => {
|
||||
const { deflated, crc32 } = await deflateBuffer(data);
|
||||
const record: ZipRecord = {
|
||||
path,
|
||||
compression: "deflate",
|
||||
crc32,
|
||||
uncompressedSize: data.length,
|
||||
compressedSize: deflated.length,
|
||||
offset
|
||||
};
|
||||
const head = localHeader(record);
|
||||
zipTransform.push(head);
|
||||
zipTransform.push(deflated);
|
||||
records.push(record);
|
||||
offset += head.length + deflated.length;
|
||||
};
|
||||
|
||||
/**
|
||||
* Write an archive record
|
||||
* @param source
|
||||
*/
|
||||
const writeRecord = async (source: ZipSource) => {
|
||||
if ("fsPath" in source) await writeFromPath(source.path, source.fsPath);
|
||||
else if ("data" in source) await writeFromBuffer(source.path, source.data);
|
||||
else throw new Error("Illegal argument " + typeof source + " " + JSON.stringify(source));
|
||||
};
|
||||
|
||||
/**
|
||||
* The actual stream transform function
|
||||
* @param source
|
||||
* @param _ encoding, ignored
|
||||
* @param cb
|
||||
*/
|
||||
const transform: TransformOptions["transform"] = async (source: ZipSource, _, cb) => {
|
||||
await writeRecord(source);
|
||||
cb();
|
||||
};
|
||||
|
||||
/** offset and records keep local state during processing */
|
||||
let offset = 0;
|
||||
const records: ZipRecord[] = [];
|
||||
|
||||
const zipTransform = new Transform({
|
||||
readableObjectMode: false,
|
||||
writableObjectMode: true,
|
||||
transform,
|
||||
final
|
||||
});
|
||||
|
||||
return zipTransform;
|
||||
}
|
15
yarn.lock
15
yarn.lock
@ -1639,15 +1639,15 @@ please-upgrade-node@^3.2.0:
|
||||
dependencies:
|
||||
semver-compare "^1.0.0"
|
||||
|
||||
powerhooks@^0.21.0:
|
||||
version "0.21.0"
|
||||
resolved "https://registry.yarnpkg.com/powerhooks/-/powerhooks-0.21.0.tgz#bafe67c2ce8c4c38cd26dec08de27fdf0d81da70"
|
||||
integrity sha512-O9JG79YKv3yJ6Tkcx+eNWUC/rUKPCFUBkDIDbnoKM29P0+8VAx9pjp/C9eE2qnKNXr5azgnGtEoyLzGB/UKorQ==
|
||||
powerhooks@^0.26.1:
|
||||
version "0.26.1"
|
||||
resolved "https://registry.yarnpkg.com/powerhooks/-/powerhooks-0.26.1.tgz#4497cb570af7de68a2120c6afc8326524a4a64a0"
|
||||
integrity sha512-jaT1lx6zjB3NkBQDco/aKC0pxm5au0qi1pgWD9Gu1b3f8pwMclP7ToyGyhrPp/B6zFGNIpGDbE9WiXqEQ8ER7g==
|
||||
dependencies:
|
||||
evt "^2.4.13"
|
||||
memoizee "^0.4.15"
|
||||
resize-observer-polyfill "^1.5.1"
|
||||
tsafe "^1.4.1"
|
||||
tsafe "^1.4.3"
|
||||
|
||||
prettier@^2.3.0:
|
||||
version "2.7.1"
|
||||
@ -2002,6 +2002,11 @@ tsafe@^1.4.1:
|
||||
resolved "https://registry.yarnpkg.com/tsafe/-/tsafe-1.4.1.tgz#59cdad8ac41babf88e56dcd1a683ae2fb145d059"
|
||||
integrity sha512-3IDBalvf6SyvHFS14UiwCWzqdSdo+Q0k2J7DZyJYaHW/iraW9DJpaBKDJpry3yQs3o/t/A+oGaRW3iVt2lKxzA==
|
||||
|
||||
tsafe@^1.4.3:
|
||||
version "1.4.3"
|
||||
resolved "https://registry.yarnpkg.com/tsafe/-/tsafe-1.4.3.tgz#a98ce83616f0d9c01e3c6167a2ead45ba455b2ae"
|
||||
integrity sha512-KjCdgjIqsbKW9oeJGSMVC23jhWm/VXJwkaZ7jffo/WaTioLGTHJqliHe9dECEVzIACNVNs/fwtKwU8wWK4jY4g==
|
||||
|
||||
tslib@^2.1.0:
|
||||
version "2.4.0"
|
||||
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3"
|
||||
|
Reference in New Issue
Block a user