Compare commits
174 Commits
v6.8.2
...
v7.0.0-rc.
Author | SHA1 | Date | |
---|---|---|---|
2b6c991190 | |||
26020ba8bb | |||
b573bc20b5 | |||
210dbfa265 | |||
b37cac93ff | |||
eea953efb6 | |||
7ad9d7b291 | |||
20937c4f72 | |||
dbbfa07639 | |||
9e1a4cad5c | |||
02bbedcfca | |||
cd70d90914 | |||
819f297de8 | |||
0608adde89 | |||
ad7bcf4669 | |||
2eccc86e83 | |||
16d18f23a1 | |||
5631ae1b6c | |||
5fb29992f6 | |||
910d633ac2 | |||
32f8380e56 | |||
43e4dd6bb6 | |||
4f0b1688db | |||
9e75ee09bb | |||
9ae8822e00 | |||
babffd1fe6 | |||
5615d62032 | |||
4b89d15c1e | |||
815f510d5f | |||
199ba193be | |||
4ae9bd3f9a | |||
1c9cf639ea | |||
0040464ca1 | |||
79997efbb6 | |||
0e42009798 | |||
93fdcb8739 | |||
aca926e202 | |||
9941027b10 | |||
9104de4290 | |||
5dc692809c | |||
8dc1d1bd21 | |||
fe588485a9 | |||
19ef1d7025 | |||
62523a8662 | |||
6e97665e2e | |||
4988680353 | |||
c5de5c20c7 | |||
1a0fee1aa2 | |||
06a44603cd | |||
e48459762e | |||
235ebeae97 | |||
dfe909606e | |||
6fd0c7726c | |||
819e045811 | |||
1ba780598d | |||
aeb0cb3110 | |||
88923838c5 | |||
df9f6fd7fd | |||
98e46d6ac9 | |||
daff614fb4 | |||
5ea324c7f2 | |||
23fedbf94a | |||
593d66d8d6 | |||
851dcd5bf7 | |||
2e919681ae | |||
5da68cd48c | |||
27fdaeff46 | |||
53c0079656 | |||
93780b77e0 | |||
b712ed0421 | |||
ee96f1b345 | |||
d13464df3d | |||
6bde2e4d96 | |||
0a4953c020 | |||
96c488880c | |||
7e0adf3f66 | |||
09f716440a | |||
2251c84171 | |||
5cfe78dcd1 | |||
6a48325132 | |||
294be0a79a | |||
c94b264b44 | |||
7220c4e3e3 | |||
5aadeba2ec | |||
0f47a5b6ba | |||
36f32d28f2 | |||
6d69ccf229 | |||
37073b42be | |||
837501c948 | |||
b300966fa8 | |||
730eb06c84 | |||
aca8d3f4b7 | |||
b5b3af4659 | |||
6cd231426d | |||
0c7cd1cd75 | |||
2425704ead | |||
4e22159206 | |||
52cf1ba02c | |||
516e84182f | |||
a3a9853e18 | |||
08e26600fd | |||
7793c2c6ba | |||
9e826d16dd | |||
80618bbd9c | |||
38ad47ea75 | |||
45ed359bef | |||
fcc26c3e7a | |||
d4ff6b1f40 | |||
557de34eea | |||
e034dc4d90 | |||
cfbd1e5e4b | |||
0df661819f | |||
1a9f6d10d4 | |||
a787215c95 | |||
64ab400af5 | |||
a463878bf2 | |||
9f72024c61 | |||
243fbd4dc9 | |||
4e6a290693 | |||
ac05d529ca | |||
b38d79004a | |||
f4a547df11 | |||
2b87c35058 | |||
b11833e450 | |||
fa8e119514 | |||
677cb5c330 | |||
6e74c79bfe | |||
54474f5908 | |||
99cc0f519b | |||
92a01f89ef | |||
fd83a0c743 | |||
988e46c875 | |||
f081c2fc20 | |||
b4b376a1a5 | |||
0db4179d47 | |||
795b7c6234 | |||
091b9a57f5 | |||
564e1422ac | |||
8ed4ed3fc4 | |||
29fe4566a7 | |||
ae3bfb28ed | |||
14aab97d8a | |||
52d7a47cd7 | |||
f338dcbeed | |||
dcec058a22 | |||
2bdc6b156b | |||
84ca9e6b81 | |||
11cb0fd2db | |||
3f620ffb6f | |||
1a0e05d073 | |||
a4d2de23a1 | |||
85cecc9811 | |||
9899f742a8 | |||
b5484740b7 | |||
016b15b437 | |||
6fb936798e | |||
a692b87843 | |||
19663885a4 | |||
49b87777f9 | |||
d4523bb1e6 | |||
e3200899e2 | |||
36c7a1ab9e | |||
c54fbd5eca | |||
bbe828071e | |||
23f6c7db00 | |||
b1ea9e7a71 | |||
fb71d0e272 | |||
fa72a29999 | |||
af77b31d54 | |||
8280dace26 | |||
ecaf1c7b7c | |||
8702ec29a8 | |||
d8206434bc | |||
c71c2a8710 |
4
.github/workflows/ci.yaml
vendored
4
.github/workflows/ci.yaml
vendored
@ -28,7 +28,7 @@ jobs:
|
||||
needs: test_lint
|
||||
strategy:
|
||||
matrix:
|
||||
node: [ '14', '15' ,'16', '17' ]
|
||||
node: [ '14','16' ]
|
||||
os: [ windows-latest, ubuntu-latest ]
|
||||
name: Test with Node v${{ matrix.node }} on ${{ matrix.os }}
|
||||
steps:
|
||||
@ -136,4 +136,4 @@ jobs:
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
|
||||
VERSION: ${{ needs.check_if_version_upgraded.outputs.to_version }}
|
||||
IS_PRE_RELEASE: ${{ needs.check_if_version_upgraded.outputs.is_pre_release }}
|
||||
IS_PRE_RELEASE: ${{ needs.check_if_version_upgraded.outputs.is_pre_release }}
|
||||
|
@ -6,4 +6,6 @@ node_modules/
|
||||
/src/test/apps/
|
||||
/src/tools/types/
|
||||
/sample_react_project
|
||||
/build_keycloak/
|
||||
/build_keycloak/
|
||||
/src/i18n/generated_messages/
|
||||
/.vscode/
|
||||
|
38
README.md
38
README.md
@ -8,9 +8,6 @@
|
||||
<a href="https://github.com/garronej/keycloakify/actions">
|
||||
<img src="https://github.com/garronej/keycloakify/workflows/ci/badge.svg?branch=main">
|
||||
</a>
|
||||
<a href="https://bundlephobia.com/package/keycloakify">
|
||||
<img src="https://img.shields.io/bundlephobia/minzip/keycloakify">
|
||||
</a>
|
||||
<a href="https://www.npmjs.com/package/keycloakify">
|
||||
<img src="https://img.shields.io/npm/dm/keycloakify">
|
||||
</a>
|
||||
@ -27,15 +24,9 @@
|
||||
<a href="https://www.keycloakify.dev">Home</a>
|
||||
-
|
||||
<a href="https://docs.keycloakify.dev">Documentation</a>
|
||||
</p>
|
||||
<p align="center"> ---- Project starter / Demo setup ---- </p>
|
||||
<p align="center">
|
||||
<a href="https://github.com/garronej/keycloakify-starter">CSS Level customization</a>
|
||||
-
|
||||
<a href="https://github.com/garronej/keycloakify-advanced-starter">Component Level customization</a>
|
||||
<a href="https://github.com/codegouvfr/keycloakify-starter">Starter project</a>
|
||||
</p>
|
||||
<p align="center"> ---- </p>
|
||||
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
@ -49,6 +40,33 @@
|
||||
|
||||
# Changelog highlights
|
||||
|
||||
## 6.13
|
||||
|
||||
- Build work behind corporate proxies, [see issue](https://github.com/InseeFrLab/keycloakify/issues/257).
|
||||
|
||||
## 6.12
|
||||
|
||||
Massive improvement in the developer experience:
|
||||
|
||||
- There is now only one starter repo: https://github.com/codegouvfr/keycloakify-starter
|
||||
- A lot of comments have been added in the code of the starter to make it easier to get started.
|
||||
- The doc has been updated: https://docs.keycloakify.dev
|
||||
- A lot of improvements in the type system.
|
||||
|
||||
## 6.11.4
|
||||
|
||||
- You no longer need to have Maven installed to build the theme. Thanks to @lordvlad, [see PR](https://github.com/InseeFrLab/keycloakify/pull/239).
|
||||
- Feature new build options: [`bundler`](https://docs.keycloakify.dev/build-options#keycloakify.bundler), [`groupId`](https://docs.keycloakify.dev/build-options#keycloakify.groupid), [`artifactId`](https://docs.keycloakify.dev/build-options#keycloakify.artifactid), [`version`](https://docs.keycloakify.dev/build-options#version).
|
||||
Theses options can be user to customize the output name of the .jar. You can use environnement variables to overrides the values read in the package.json. Thanks to @lordvlad.
|
||||
|
||||
## 6.10.0
|
||||
|
||||
- Widows compat (thanks to @lordvlad, [see PR](https://github.com/InseeFrLab/keycloakify/pull/226)). WSL is no longer required 🎉
|
||||
|
||||
## 6.8.4
|
||||
|
||||
- `@emotion/react` is no longer a peer dependency of Keycloakify.
|
||||
|
||||
## 6.8.0
|
||||
|
||||
- It is now possible to pass a custom `<Template />` component as a prop to `<KcApp />` and every
|
||||
|
56
package.json
Executable file → Normal file
56
package.json
Executable file → Normal file
@ -1,30 +1,34 @@
|
||||
{
|
||||
"name": "keycloakify",
|
||||
"version": "6.8.2",
|
||||
"description": "Keycloak theme generator for Reacts app",
|
||||
"version": "7.0.0-rc.3",
|
||||
"description": "Create Keycloak themes using React",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/garronej/keycloakify.git"
|
||||
"url": "git://github.com/inseefrlab/keycloakify.git"
|
||||
},
|
||||
"main": "dist/lib/index.js",
|
||||
"types": "dist/lib/index.d.ts",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"scripts": {
|
||||
"build": "rimraf dist/ && tsc -p src/bin && tsc -p src/lib && yarn grant-exec-perms && yarn copy-files dist/",
|
||||
"build:test": "rimraf dist_test/ && tsc -p src/test && yarn copy-files dist_test/",
|
||||
"build": "rimraf dist/ && tsc -p src/bin && tsc -p src/tsconfig.json && tsc-alias -p src/tsconfig.json && yarn grant-exec-perms && yarn copy-files dist/",
|
||||
"build:watch": "tsc -p src/tsconfig.json && (concurrently \"tsc -p src/tsconfig.json -w\" \"tsc-alias -p src/tsconfig.json\")",
|
||||
"build:test": "rimraf dist_test/ && tsc -p test/tsconfig.json && tsc-alias -p test/tsconfig.json && yarn copy-files dist_test/src",
|
||||
"grant-exec-perms": "node dist/bin/tools/grant-exec-perms.js",
|
||||
"copy-files": "copyfiles -u 1 src/**/*.ftl",
|
||||
"pretest": "yarn build:test",
|
||||
"test": "node dist_test/test/bin && node dist_test/test/lib",
|
||||
"generate-messages": "node dist/bin/generate-i18n-messages.js",
|
||||
"link_in_test_app": "node dist/bin/link_in_test_app.js",
|
||||
"test": "yarn build:test && node dist_test/test/bin && node dist_test/test/lib",
|
||||
"test:sample-app": "yarn build:test && node dist_test/test/bin/main.js",
|
||||
"_format": "prettier '**/*.{ts,tsx,json,md}'",
|
||||
"format": "yarn _format --write",
|
||||
"format:check": "yarn _format --list-different"
|
||||
"format:check": "yarn _format --list-different",
|
||||
"generate-messages": "ts-node --skipProject scripts/generate-i18n-messages.ts",
|
||||
"link-in-app": "ts-node --skipProject scripts/link-in-app.ts",
|
||||
"link-in-starter": "yarn link-in-app keycloakify-starter",
|
||||
"tsc-watch": "tsc -p src/bin -w & tsc -p src/lib -w "
|
||||
},
|
||||
"bin": {
|
||||
"keycloakify": "dist/bin/keycloakify/index.js",
|
||||
"create-keycloak-email-directory": "dist/bin/create-keycloak-email-directory.js",
|
||||
"download-builtin-keycloak-theme": "dist/bin/download-builtin-keycloak-theme.js"
|
||||
"download-builtin-keycloak-theme": "dist/bin/download-builtin-keycloak-theme.js",
|
||||
"eject-keycloak-page": "dist/bin/eject-keycloak-page.js"
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.{ts,tsx,json,md}": [
|
||||
@ -41,7 +45,8 @@
|
||||
"files": [
|
||||
"src/",
|
||||
"dist/",
|
||||
"!dist/tsconfig.tsbuildinfo"
|
||||
"!dist/tsconfig.tsbuildinfo",
|
||||
"!dist/bin/tsconfig.tsbuildinfo"
|
||||
],
|
||||
"keywords": [
|
||||
"bluehats",
|
||||
@ -53,18 +58,17 @@
|
||||
"login",
|
||||
"register"
|
||||
],
|
||||
"homepage": "https://github.com/garronej/keycloakify",
|
||||
"homepage": "https://www.keycloakify.dev",
|
||||
"peerDependencies": {
|
||||
"@emotion/react": "^11.4.1",
|
||||
"react": "^16.8.0 || ^17.0.0 || ^18.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.0.0",
|
||||
"@emotion/react": "^11.4.1",
|
||||
"@types/memoizee": "^0.4.7",
|
||||
"@types/make-fetch-happen": "^10.0.1",
|
||||
"@types/minimist": "^1.2.2",
|
||||
"@types/node": "^17.0.25",
|
||||
"@types/node": "^18.15.3",
|
||||
"@types/react": "18.0.9",
|
||||
"concurrently": "^7.6.0",
|
||||
"copyfiles": "^2.4.1",
|
||||
"husky": "^4.3.8",
|
||||
"lint-staged": "^11.0.0",
|
||||
@ -72,23 +76,23 @@
|
||||
"properties-parser": "^0.3.1",
|
||||
"react": "18.1.0",
|
||||
"rimraf": "^3.0.2",
|
||||
"typescript": "^4.2.3"
|
||||
"scripting-tools": "^0.19.13",
|
||||
"ts-node": "^10.9.1",
|
||||
"tsc-alias": "^1.8.3",
|
||||
"typescript": "^5.0.1-rc"
|
||||
},
|
||||
"dependencies": {
|
||||
"@octokit/rest": "^18.12.0",
|
||||
"cheerio": "^1.0.0-rc.5",
|
||||
"cli-select": "^1.1.2",
|
||||
"evt": "^2.4.6",
|
||||
"memoizee": "^0.4.15",
|
||||
"evt": "^2.4.18",
|
||||
"make-fetch-happen": "^11.0.3",
|
||||
"minimal-polyfills": "^2.2.2",
|
||||
"minimist": "^1.2.6",
|
||||
"path-browserify": "^1.0.1",
|
||||
"powerhooks": "^0.20.23",
|
||||
"react-markdown": "^5.0.3",
|
||||
"rfc4648": "^1.5.2",
|
||||
"scripting-tools": "^0.19.13",
|
||||
"tsafe": "^1.1.3",
|
||||
"tss-react": "^4.3.4",
|
||||
"tsafe": "^1.6.0",
|
||||
"zod": "^3.17.10"
|
||||
}
|
||||
}
|
||||
|
@ -13,11 +13,11 @@
|
||||
"packageRules": [
|
||||
{
|
||||
"packagePatterns": ["*"],
|
||||
"excludePackagePatterns": ["tss-react", "powerhooks", "tsafe", "evt"],
|
||||
"excludePackagePatterns": ["tsafe", "evt"],
|
||||
"enabled": false
|
||||
},
|
||||
{
|
||||
"packagePatterns": ["tss-react", "powerhooks", "tsafe", "evt"],
|
||||
"packagePatterns": ["tsafe", "evt"],
|
||||
"matchUpdateTypes": ["minor", "patch"],
|
||||
"automerge": true,
|
||||
"automergeType": "branch",
|
||||
|
@ -1,12 +1,11 @@
|
||||
import "minimal-polyfills/Object.fromEntries";
|
||||
import * as fs from "fs";
|
||||
import { join as pathJoin, relative as pathRelative, dirname as pathDirname } from "path";
|
||||
import { crawl } from "./tools/crawl";
|
||||
import { downloadBuiltinKeycloakTheme } from "./download-builtin-keycloak-theme";
|
||||
import { getProjectRoot } from "./tools/getProjectRoot";
|
||||
import { rm_rf, rm_r } from "./tools/rm";
|
||||
import { getCliOptions } from "./tools/cliOptions";
|
||||
import { getLogger } from "./tools/logger";
|
||||
import { crawl } from "../src/bin/tools/crawl";
|
||||
import { downloadBuiltinKeycloakTheme } from "../src/bin/download-builtin-keycloak-theme";
|
||||
import { getProjectRoot } from "../src/bin/tools/getProjectRoot";
|
||||
import { getCliOptions } from "../src/bin/tools/cliOptions";
|
||||
import { getLogger } from "../src/bin/tools/logger";
|
||||
|
||||
//NOTE: To run without argument when we want to generate src/i18n/generated_kcMessages files,
|
||||
// update the version array for generating for newer version.
|
||||
@ -22,7 +21,7 @@ for (const keycloakVersion of ["11.0.3", "15.0.2", "18.0.1"]) {
|
||||
|
||||
const tmpDirPath = pathJoin(getProjectRoot(), "tmp_xImOef9dOd44");
|
||||
|
||||
rm_rf(tmpDirPath);
|
||||
fs.rmSync(tmpDirPath, { "recursive": true, "force": true });
|
||||
|
||||
downloadBuiltinKeycloakTheme({
|
||||
keycloakVersion,
|
||||
@ -54,7 +53,7 @@ for (const keycloakVersion of ["11.0.3", "15.0.2", "18.0.1"]) {
|
||||
});
|
||||
}
|
||||
|
||||
rm_r(tmpDirPath);
|
||||
fs.rmSync(tmpDirPath, { recursive: true, force: true });
|
||||
|
||||
Object.keys(record).forEach(pageType => {
|
||||
const recordForPageType = record[pageType];
|
143
scripts/link-in-app.ts
Normal file
143
scripts/link-in-app.ts
Normal file
@ -0,0 +1,143 @@
|
||||
import { execSync } from "child_process";
|
||||
import { join as pathJoin, relative as pathRelative } from "path";
|
||||
import { getProjectRoot } from "../src/bin/tools/getProjectRoot";
|
||||
import * as fs from "fs";
|
||||
|
||||
const singletonDependencies: string[] = ["react", "@types/react"];
|
||||
|
||||
const rootDirPath = getProjectRoot();
|
||||
|
||||
//NOTE: This is only required because of: https://github.com/garronej/ts-ci/blob/c0e207b9677523d4ec97fe672ddd72ccbb3c1cc4/README.md?plain=1#L54-L58
|
||||
fs.writeFileSync(
|
||||
pathJoin(rootDirPath, "dist", "package.json"),
|
||||
Buffer.from(
|
||||
JSON.stringify(
|
||||
(() => {
|
||||
const packageJsonParsed = JSON.parse(fs.readFileSync(pathJoin(rootDirPath, "package.json")).toString("utf8"));
|
||||
|
||||
return {
|
||||
...packageJsonParsed,
|
||||
"main": packageJsonParsed["main"]?.replace(/^dist\//, ""),
|
||||
"types": packageJsonParsed["types"]?.replace(/^dist\//, ""),
|
||||
"module": packageJsonParsed["module"]?.replace(/^dist\//, ""),
|
||||
"exports": !("exports" in packageJsonParsed)
|
||||
? undefined
|
||||
: Object.fromEntries(
|
||||
Object.entries(packageJsonParsed["exports"]).map(([key, value]) => [
|
||||
key,
|
||||
(value as string).replace(/^\.\/dist\//, "./")
|
||||
])
|
||||
)
|
||||
};
|
||||
})(),
|
||||
null,
|
||||
2
|
||||
),
|
||||
"utf8"
|
||||
)
|
||||
);
|
||||
|
||||
fs.cpSync(pathJoin(rootDirPath, "src"), pathJoin(rootDirPath, "dist", "src"), { "recursive": true });
|
||||
|
||||
const commonThirdPartyDeps = (() => {
|
||||
// For example [ "@emotion" ] it's more convenient than
|
||||
// having to list every sub emotion packages (@emotion/css @emotion/utils ...)
|
||||
// in singletonDependencies
|
||||
const namespaceSingletonDependencies: string[] = [];
|
||||
|
||||
return [
|
||||
...namespaceSingletonDependencies
|
||||
.map(namespaceModuleName =>
|
||||
fs
|
||||
.readdirSync(pathJoin(rootDirPath, "node_modules", namespaceModuleName))
|
||||
.map(submoduleName => `${namespaceModuleName}/${submoduleName}`)
|
||||
)
|
||||
.reduce((prev, curr) => [...prev, ...curr], []),
|
||||
...singletonDependencies
|
||||
];
|
||||
})();
|
||||
|
||||
const yarnGlobalDirPath = pathJoin(rootDirPath, ".yarn_home");
|
||||
|
||||
fs.rmSync(yarnGlobalDirPath, { "recursive": true, "force": true });
|
||||
fs.mkdirSync(yarnGlobalDirPath);
|
||||
|
||||
const execYarnLink = (params: { targetModuleName?: string; cwd: string }) => {
|
||||
const { targetModuleName, cwd } = params;
|
||||
|
||||
const cmd = ["yarn", "link", ...(targetModuleName !== undefined ? [targetModuleName] : ["--no-bin-links"])].join(" ");
|
||||
|
||||
console.log(`$ cd ${pathRelative(rootDirPath, cwd) || "."} && ${cmd}`);
|
||||
|
||||
execSync(cmd, {
|
||||
cwd,
|
||||
"env": {
|
||||
...process.env,
|
||||
"HOME": yarnGlobalDirPath
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const testAppPaths = (() => {
|
||||
const [, , ...testAppNames] = process.argv;
|
||||
|
||||
return testAppNames
|
||||
.map(testAppName => {
|
||||
const testAppPath = pathJoin(rootDirPath, "..", testAppName);
|
||||
|
||||
if (fs.existsSync(testAppPath)) {
|
||||
return testAppPath;
|
||||
}
|
||||
|
||||
console.warn(`Skipping ${testAppName} since it cant be found here: ${testAppPath}`);
|
||||
|
||||
return undefined;
|
||||
})
|
||||
.filter((path): path is string => path !== undefined);
|
||||
})();
|
||||
|
||||
if (testAppPaths.length === 0) {
|
||||
console.error("No test app to link into!");
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
testAppPaths.forEach(testAppPath => execSync("yarn install", { "cwd": testAppPath }));
|
||||
|
||||
console.log("=== Linking common dependencies ===");
|
||||
|
||||
const total = commonThirdPartyDeps.length;
|
||||
let current = 0;
|
||||
|
||||
commonThirdPartyDeps.forEach(commonThirdPartyDep => {
|
||||
current++;
|
||||
|
||||
console.log(`${current}/${total} ${commonThirdPartyDep}`);
|
||||
|
||||
const localInstallPath = pathJoin(
|
||||
...[rootDirPath, "node_modules", ...(commonThirdPartyDep.startsWith("@") ? commonThirdPartyDep.split("/") : [commonThirdPartyDep])]
|
||||
);
|
||||
|
||||
execYarnLink({ "cwd": localInstallPath });
|
||||
});
|
||||
|
||||
commonThirdPartyDeps.forEach(commonThirdPartyDep =>
|
||||
testAppPaths.forEach(testAppPath =>
|
||||
execYarnLink({
|
||||
"cwd": testAppPath,
|
||||
"targetModuleName": commonThirdPartyDep
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
console.log("=== Linking in house dependencies ===");
|
||||
|
||||
execYarnLink({ "cwd": pathJoin(rootDirPath, "dist") });
|
||||
|
||||
testAppPaths.forEach(testAppPath =>
|
||||
execYarnLink({
|
||||
"cwd": testAppPath,
|
||||
"targetModuleName": JSON.parse(fs.readFileSync(pathJoin(rootDirPath, "package.json")).toString("utf8"))["name"]
|
||||
})
|
||||
);
|
||||
|
||||
export {};
|
81
src/Fallback.tsx
Normal file
81
src/Fallback.tsx
Normal file
@ -0,0 +1,81 @@
|
||||
import { lazy, Suspense } from "react";
|
||||
import type { PageProps } from "keycloakify/pages/PageProps";
|
||||
import type { I18n } from "keycloakify/i18n";
|
||||
import type { KcContext } from "./kcContext";
|
||||
|
||||
const Login = lazy(() => import("keycloakify/pages/Login"));
|
||||
const Register = lazy(() => import("keycloakify/pages/Register"));
|
||||
const RegisterUserProfile = lazy(() => import("keycloakify/pages/RegisterUserProfile"));
|
||||
const Info = lazy(() => import("keycloakify/pages/Info"));
|
||||
const Error = lazy(() => import("keycloakify/pages/Error"));
|
||||
const LoginResetPassword = lazy(() => import("keycloakify/pages/LoginResetPassword"));
|
||||
const LoginVerifyEmail = lazy(() => import("keycloakify/pages/LoginVerifyEmail"));
|
||||
const Terms = lazy(() => import("keycloakify/pages/Terms"));
|
||||
const LoginOtp = lazy(() => import("keycloakify/pages/LoginOtp"));
|
||||
const LoginPassword = lazy(() => import("keycloakify/pages/LoginPassword"));
|
||||
const LoginUsername = lazy(() => import("keycloakify/pages/LoginUsername"));
|
||||
const WebauthnAuthenticate = lazy(() => import("keycloakify/pages/WebauthnAuthenticate"));
|
||||
const LoginUpdatePassword = lazy(() => import("keycloakify/pages/LoginUpdatePassword"));
|
||||
const LoginUpdateProfile = lazy(() => import("keycloakify/pages/LoginUpdateProfile"));
|
||||
const LoginIdpLinkConfirm = lazy(() => import("keycloakify/pages/LoginIdpLinkConfirm"));
|
||||
const LoginPageExpired = lazy(() => import("keycloakify/pages/LoginPageExpired"));
|
||||
const LoginIdpLinkEmail = lazy(() => import("keycloakify/pages/LoginIdpLinkEmail"));
|
||||
const LoginConfigTotp = lazy(() => import("keycloakify/pages/LoginConfigTotp"));
|
||||
const LogoutConfirm = lazy(() => import("keycloakify/pages/LogoutConfirm"));
|
||||
const UpdateUserProfile = lazy(() => import("keycloakify/pages/UpdateUserProfile"));
|
||||
const IdpReviewUserProfile = lazy(() => import("keycloakify/pages/IdpReviewUserProfile"));
|
||||
|
||||
export default function Fallback(props: PageProps<KcContext, I18n>) {
|
||||
const { kcContext, ...rest } = props;
|
||||
|
||||
return (
|
||||
<Suspense>
|
||||
{(() => {
|
||||
switch (kcContext.pageId) {
|
||||
case "login.ftl":
|
||||
return <Login kcContext={kcContext} {...rest} />;
|
||||
case "register.ftl":
|
||||
return <Register kcContext={kcContext} {...rest} />;
|
||||
case "register-user-profile.ftl":
|
||||
return <RegisterUserProfile kcContext={kcContext} {...rest} />;
|
||||
case "info.ftl":
|
||||
return <Info kcContext={kcContext} {...rest} />;
|
||||
case "error.ftl":
|
||||
return <Error kcContext={kcContext} {...rest} />;
|
||||
case "login-reset-password.ftl":
|
||||
return <LoginResetPassword kcContext={kcContext} {...rest} />;
|
||||
case "login-verify-email.ftl":
|
||||
return <LoginVerifyEmail kcContext={kcContext} {...rest} />;
|
||||
case "terms.ftl":
|
||||
return <Terms kcContext={kcContext} {...rest} />;
|
||||
case "login-otp.ftl":
|
||||
return <LoginOtp kcContext={kcContext} {...rest} />;
|
||||
case "login-username.ftl":
|
||||
return <LoginUsername kcContext={kcContext} {...rest} />;
|
||||
case "login-password.ftl":
|
||||
return <LoginPassword kcContext={kcContext} {...rest} />;
|
||||
case "webauthn-authenticate.ftl":
|
||||
return <WebauthnAuthenticate kcContext={kcContext} {...rest} />;
|
||||
case "login-update-password.ftl":
|
||||
return <LoginUpdatePassword kcContext={kcContext} {...rest} />;
|
||||
case "login-update-profile.ftl":
|
||||
return <LoginUpdateProfile kcContext={kcContext} {...rest} />;
|
||||
case "login-idp-link-confirm.ftl":
|
||||
return <LoginIdpLinkConfirm kcContext={kcContext} {...rest} />;
|
||||
case "login-idp-link-email.ftl":
|
||||
return <LoginIdpLinkEmail kcContext={kcContext} {...rest} />;
|
||||
case "login-page-expired.ftl":
|
||||
return <LoginPageExpired kcContext={kcContext} {...rest} />;
|
||||
case "login-config-totp.ftl":
|
||||
return <LoginConfigTotp kcContext={kcContext} {...rest} />;
|
||||
case "logout-confirm.ftl":
|
||||
return <LogoutConfirm kcContext={kcContext} {...rest} />;
|
||||
case "update-user-profile.ftl":
|
||||
return <UpdateUserProfile kcContext={kcContext} {...rest} />;
|
||||
case "idp-review-user-profile.ftl":
|
||||
return <IdpReviewUserProfile kcContext={kcContext} {...rest} />;
|
||||
}
|
||||
})()}
|
||||
</Suspense>
|
||||
);
|
||||
}
|
@ -1,32 +1,12 @@
|
||||
import React, { useReducer, useEffect, memo } from "react";
|
||||
import type { ReactNode } from "react";
|
||||
import type { KcContextBase } from "../getKcContext/KcContextBase";
|
||||
import { assert } from "../tools/assert";
|
||||
import { useCallbackFactory } from "powerhooks/useCallbackFactory";
|
||||
import { headInsert } from "../tools/headInsert";
|
||||
import { pathJoin } from "../../bin/tools/pathJoin";
|
||||
import { useConstCallback } from "powerhooks/useConstCallback";
|
||||
import type { KcTemplateProps } from "./KcProps";
|
||||
import { useCssAndCx } from "../tools/useCssAndCx";
|
||||
import type { I18n } from "../i18n";
|
||||
import { assert } from "keycloakify/tools/assert";
|
||||
import { clsx } from "keycloakify/tools/clsx";
|
||||
import { usePrepareTemplate } from "keycloakify/lib/usePrepareTemplate";
|
||||
import { type TemplateProps, defaultTemplateClasses } from "keycloakify/TemplateProps";
|
||||
import { useGetClassName } from "keycloakify/lib/useGetClassName";
|
||||
import type { KcContext } from "./kcContext";
|
||||
import type { I18n } from "./i18n";
|
||||
|
||||
export type TemplateProps = {
|
||||
displayInfo?: boolean;
|
||||
displayMessage?: boolean;
|
||||
displayRequiredFields?: boolean;
|
||||
displayWide?: boolean;
|
||||
showAnotherWayIfPresent?: boolean;
|
||||
headerNode: ReactNode;
|
||||
showUsernameNode?: ReactNode;
|
||||
formNode: ReactNode;
|
||||
infoNode?: ReactNode;
|
||||
/** If you write your own page you probably want
|
||||
* to avoid pulling the default theme assets.
|
||||
*/
|
||||
doFetchDefaultThemeResources: boolean;
|
||||
} & { kcContext: KcContextBase; i18n: I18n } & KcTemplateProps;
|
||||
|
||||
const Template = memo((props: TemplateProps) => {
|
||||
export default function Template(props: TemplateProps<KcContext, I18n>) {
|
||||
const {
|
||||
displayInfo = false,
|
||||
displayMessage = true,
|
||||
@ -39,102 +19,58 @@ const Template = memo((props: TemplateProps) => {
|
||||
infoNode = null,
|
||||
kcContext,
|
||||
i18n,
|
||||
doFetchDefaultThemeResources
|
||||
doUseDefaultCss,
|
||||
classes
|
||||
} = props;
|
||||
|
||||
const { cx } = useCssAndCx();
|
||||
const { getClassName } = useGetClassName({
|
||||
"defaultClasses": !doUseDefaultCss ? undefined : defaultTemplateClasses,
|
||||
classes
|
||||
});
|
||||
|
||||
const { msg, changeLocale, labelBySupportedLanguageTag, currentLanguageTag } = i18n;
|
||||
|
||||
const onChangeLanguageClickFactory = useCallbackFactory(([kcLanguageTag]: [string]) => changeLocale(kcLanguageTag));
|
||||
|
||||
const onTryAnotherWayClick = useConstCallback(() => (document.forms["kc-select-try-another-way-form" as never].submit(), false));
|
||||
|
||||
const { realm, locale, auth, url, message, isAppInitiatedAction } = kcContext;
|
||||
|
||||
const [isExtraCssLoaded, setExtraCssLoaded] = useReducer(() => true, false);
|
||||
const { isReady } = usePrepareTemplate({
|
||||
"doFetchDefaultThemeResources": doUseDefaultCss,
|
||||
url,
|
||||
"stylesCommon": [
|
||||
"node_modules/patternfly/dist/css/patternfly.min.css",
|
||||
"node_modules/patternfly/dist/css/patternfly-additions.min.css",
|
||||
"lib/zocial/zocial.css"
|
||||
],
|
||||
"styles": ["css/login.css"],
|
||||
"htmlClassName": getClassName("kcHtmlClass")
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (!doFetchDefaultThemeResources) {
|
||||
setExtraCssLoaded();
|
||||
return;
|
||||
}
|
||||
|
||||
let isUnmounted = false;
|
||||
const cleanups: (() => void)[] = [];
|
||||
|
||||
const toArr = (x: string | readonly string[] | undefined) => (typeof x === "string" ? x.split(" ") : x ?? []);
|
||||
|
||||
Promise.all(
|
||||
[
|
||||
...toArr(props.stylesCommon).map(relativePath => pathJoin(url.resourcesCommonPath, relativePath)),
|
||||
...toArr(props.styles).map(relativePath => pathJoin(url.resourcesPath, relativePath))
|
||||
]
|
||||
.reverse()
|
||||
.map(href =>
|
||||
headInsert({
|
||||
"type": "css",
|
||||
href,
|
||||
"position": "prepend"
|
||||
})
|
||||
)
|
||||
).then(() => {
|
||||
if (isUnmounted) {
|
||||
return;
|
||||
}
|
||||
|
||||
setExtraCssLoaded();
|
||||
});
|
||||
|
||||
toArr(props.scripts).forEach(relativePath =>
|
||||
headInsert({
|
||||
"type": "javascript",
|
||||
"src": pathJoin(url.resourcesPath, relativePath)
|
||||
})
|
||||
);
|
||||
|
||||
if (props.kcHtmlClass !== undefined) {
|
||||
const htmlClassList = document.getElementsByTagName("html")[0].classList;
|
||||
|
||||
const tokens = cx(props.kcHtmlClass).split(" ");
|
||||
|
||||
htmlClassList.add(...tokens);
|
||||
|
||||
cleanups.push(() => htmlClassList.remove(...tokens));
|
||||
}
|
||||
|
||||
return () => {
|
||||
isUnmounted = true;
|
||||
|
||||
cleanups.forEach(f => f());
|
||||
};
|
||||
}, [props.kcHtmlClass]);
|
||||
|
||||
if (!isExtraCssLoaded) {
|
||||
if (!isReady) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={cx(props.kcLoginClass)}>
|
||||
<div id="kc-header" className={cx(props.kcHeaderClass)}>
|
||||
<div id="kc-header-wrapper" className={cx(props.kcHeaderWrapperClass)}>
|
||||
<div className={getClassName("kcLoginClass")}>
|
||||
<div id="kc-header" className={getClassName("kcHeaderClass")}>
|
||||
<div id="kc-header-wrapper" className={getClassName("kcHeaderWrapperClass")}>
|
||||
{msg("loginTitleHtml", realm.displayNameHtml)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className={cx(props.kcFormCardClass, displayWide && props.kcFormCardAccountClass)}>
|
||||
<header className={cx(props.kcFormHeaderClass)}>
|
||||
<div className={clsx(getClassName("kcFormCardClass"), displayWide && getClassName("kcFormCardAccountClass"))}>
|
||||
<header className={getClassName("kcFormHeaderClass")}>
|
||||
{realm.internationalizationEnabled && (assert(locale !== undefined), true) && locale.supported.length > 1 && (
|
||||
<div id="kc-locale">
|
||||
<div id="kc-locale-wrapper" className={cx(props.kcLocaleWrapperClass)}>
|
||||
<div id="kc-locale-wrapper" className={getClassName("kcLocaleWrapperClass")}>
|
||||
<div className="kc-dropdown" id="kc-locale-dropdown">
|
||||
{/* eslint-disable-next-line jsx-a11y/anchor-is-valid */}
|
||||
<a href="#" id="kc-current-locale-link">
|
||||
{labelBySupportedLanguageTag[currentLanguageTag]}
|
||||
</a>
|
||||
<ul>
|
||||
{locale.supported.map(({ languageTag }) => (
|
||||
<li key={languageTag} className="kc-dropdown-item">
|
||||
<a href="#" onClick={onChangeLanguageClickFactory(languageTag)}>
|
||||
{/* eslint-disable-next-line jsx-a11y/anchor-is-valid */}
|
||||
<a href="#" onClick={() => changeLocale(languageTag)}>
|
||||
{labelBySupportedLanguageTag[languageTag]}
|
||||
</a>
|
||||
</li>
|
||||
@ -146,8 +82,8 @@ const Template = memo((props: TemplateProps) => {
|
||||
)}
|
||||
{!(auth !== undefined && auth.showUsername && !auth.showResetCredentials) ? (
|
||||
displayRequiredFields ? (
|
||||
<div className={cx(props.kcContentWrapperClass)}>
|
||||
<div className={cx(props.kcLabelWrapperClass, "subtitle")}>
|
||||
<div className={getClassName("kcContentWrapperClass")}>
|
||||
<div className={clsx(getClassName("kcLabelWrapperClass"), "subtitle")}>
|
||||
<span className="subtitle">
|
||||
<span className="required">*</span>
|
||||
{msg("requiredFields")}
|
||||
@ -161,20 +97,20 @@ const Template = memo((props: TemplateProps) => {
|
||||
<h1 id="kc-page-title">{headerNode}</h1>
|
||||
)
|
||||
) : displayRequiredFields ? (
|
||||
<div className={cx(props.kcContentWrapperClass)}>
|
||||
<div className={cx(props.kcLabelWrapperClass, "subtitle")}>
|
||||
<div className={getClassName("kcContentWrapperClass")}>
|
||||
<div className={clsx(getClassName("kcLabelWrapperClass"), "subtitle")}>
|
||||
<span className="subtitle">
|
||||
<span className="required">*</span> {msg("requiredFields")}
|
||||
</span>
|
||||
</div>
|
||||
<div className="col-md-10">
|
||||
{showUsernameNode}
|
||||
<div className={cx(props.kcFormGroupClass)}>
|
||||
<div className={getClassName("kcFormGroupClass")}>
|
||||
<div id="kc-username">
|
||||
<label id="kc-attempted-username">{auth?.attemptedUsername}</label>
|
||||
<a id="reset-login" href={url.loginRestartFlowUrl}>
|
||||
<div className="kc-login-tooltip">
|
||||
<i className={cx(props.kcResetFlowIcon)}></i>
|
||||
<i className={getClassName("kcResetFlowIcon")}></i>
|
||||
<span className="kc-tooltip-text">{msg("restartLoginTooltip")}</span>
|
||||
</div>
|
||||
</a>
|
||||
@ -185,12 +121,12 @@ const Template = memo((props: TemplateProps) => {
|
||||
) : (
|
||||
<>
|
||||
{showUsernameNode}
|
||||
<div className={cx(props.kcFormGroupClass)}>
|
||||
<div className={getClassName("kcFormGroupClass")}>
|
||||
<div id="kc-username">
|
||||
<label id="kc-attempted-username">{auth?.attemptedUsername}</label>
|
||||
<a id="reset-login" href={url.loginRestartFlowUrl}>
|
||||
<div className="kc-login-tooltip">
|
||||
<i className={cx(props.kcResetFlowIcon)}></i>
|
||||
<i className={getClassName("kcResetFlowIcon")}></i>
|
||||
<span className="kc-tooltip-text">{msg("restartLoginTooltip")}</span>
|
||||
</div>
|
||||
</a>
|
||||
@ -203,11 +139,11 @@ const Template = memo((props: TemplateProps) => {
|
||||
<div id="kc-content-wrapper">
|
||||
{/* App-initiated actions should not see warning messages about the need to complete the action during login. */}
|
||||
{displayMessage && message !== undefined && (message.type !== "warning" || !isAppInitiatedAction) && (
|
||||
<div className={cx("alert", `alert-${message.type}`)}>
|
||||
{message.type === "success" && <span className={cx(props.kcFeedbackSuccessIcon)}></span>}
|
||||
{message.type === "warning" && <span className={cx(props.kcFeedbackWarningIcon)}></span>}
|
||||
{message.type === "error" && <span className={cx(props.kcFeedbackErrorIcon)}></span>}
|
||||
{message.type === "info" && <span className={cx(props.kcFeedbackInfoIcon)}></span>}
|
||||
<div className={clsx("alert", `alert-${message.type}`)}>
|
||||
{message.type === "success" && <span className={getClassName("kcFeedbackSuccessIcon")}></span>}
|
||||
{message.type === "warning" && <span className={getClassName("kcFeedbackWarningIcon")}></span>}
|
||||
{message.type === "error" && <span className={getClassName("kcFeedbackErrorIcon")}></span>}
|
||||
{message.type === "info" && <span className={getClassName("kcFeedbackInfoIcon")}></span>}
|
||||
<span
|
||||
className="kc-feedback-text"
|
||||
dangerouslySetInnerHTML={{
|
||||
@ -222,12 +158,24 @@ const Template = memo((props: TemplateProps) => {
|
||||
id="kc-select-try-another-way-form"
|
||||
action={url.loginAction}
|
||||
method="post"
|
||||
className={cx(displayWide && props.kcContentWrapperClass)}
|
||||
className={clsx(displayWide && getClassName("kcContentWrapperClass"))}
|
||||
>
|
||||
<div className={cx(displayWide && [props.kcFormSocialAccountContentClass, props.kcFormSocialAccountClass])}>
|
||||
<div className={cx(props.kcFormGroupClass)}>
|
||||
<div
|
||||
className={clsx(
|
||||
displayWide && [getClassName("kcFormSocialAccountContentClass"), getClassName("kcFormSocialAccountClass")]
|
||||
)}
|
||||
>
|
||||
<div className={getClassName("kcFormGroupClass")}>
|
||||
<input type="hidden" name="tryAnotherWay" value="on" />
|
||||
<a href="#" id="try-another-way" onClick={onTryAnotherWayClick}>
|
||||
{/* eslint-disable-next-line jsx-a11y/anchor-is-valid */}
|
||||
<a
|
||||
href="#"
|
||||
id="try-another-way"
|
||||
onClick={() => {
|
||||
document.forms["kc-select-try-another-way-form" as never].submit();
|
||||
return false;
|
||||
}}
|
||||
>
|
||||
{msg("doTryAnotherWay")}
|
||||
</a>
|
||||
</div>
|
||||
@ -235,8 +183,8 @@ const Template = memo((props: TemplateProps) => {
|
||||
</form>
|
||||
)}
|
||||
{displayInfo && (
|
||||
<div id="kc-info" className={cx(props.kcSignUpClass)}>
|
||||
<div id="kc-info-wrapper" className={cx(props.kcInfoAreaWrapperClass)}>
|
||||
<div id="kc-info" className={getClassName("kcSignUpClass")}>
|
||||
<div id="kc-info-wrapper" className={getClassName("kcInfoAreaWrapperClass")}>
|
||||
{infoNode}
|
||||
</div>
|
||||
</div>
|
||||
@ -246,6 +194,4 @@ const Template = memo((props: TemplateProps) => {
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
});
|
||||
|
||||
export default Template;
|
||||
}
|
65
src/TemplateProps.ts
Normal file
65
src/TemplateProps.ts
Normal file
@ -0,0 +1,65 @@
|
||||
import type { ReactNode } from "react";
|
||||
import type { KcContext } from "./kcContext";
|
||||
import type { I18n } from "./i18n";
|
||||
|
||||
export type TemplateProps<KcContext extends KcContext.Common, I18nExtended extends I18n> = {
|
||||
kcContext: KcContext;
|
||||
i18n: I18nExtended;
|
||||
doUseDefaultCss: boolean;
|
||||
classes?: Partial<Record<TemplateClassKey, string>>;
|
||||
|
||||
formNode: ReactNode;
|
||||
displayInfo?: boolean;
|
||||
displayMessage?: boolean;
|
||||
displayRequiredFields?: boolean;
|
||||
displayWide?: boolean;
|
||||
showAnotherWayIfPresent?: boolean;
|
||||
headerNode: ReactNode;
|
||||
showUsernameNode?: ReactNode;
|
||||
infoNode?: ReactNode;
|
||||
};
|
||||
|
||||
export type TemplateClassKey =
|
||||
| "kcHtmlClass"
|
||||
| "kcLoginClass"
|
||||
| "kcHeaderClass"
|
||||
| "kcHeaderWrapperClass"
|
||||
| "kcFormCardClass"
|
||||
| "kcFormCardAccountClass"
|
||||
| "kcFormHeaderClass"
|
||||
| "kcLocaleWrapperClass"
|
||||
| "kcContentWrapperClass"
|
||||
| "kcLabelWrapperClass"
|
||||
| "kcFormGroupClass"
|
||||
| "kcResetFlowIcon"
|
||||
| "kcFeedbackSuccessIcon"
|
||||
| "kcFeedbackWarningIcon"
|
||||
| "kcFeedbackErrorIcon"
|
||||
| "kcFeedbackInfoIcon"
|
||||
| "kcFormSocialAccountContentClass"
|
||||
| "kcFormSocialAccountClass"
|
||||
| "kcSignUpClass"
|
||||
| "kcInfoAreaWrapperClass";
|
||||
|
||||
export const defaultTemplateClasses: Record<TemplateClassKey, string | undefined> = {
|
||||
"kcHtmlClass": "login-pf",
|
||||
"kcLoginClass": "login-pf-page",
|
||||
"kcContentWrapperClass": "row",
|
||||
"kcHeaderClass": "login-pf-page-header",
|
||||
"kcHeaderWrapperClass": undefined,
|
||||
"kcFormCardClass": "card-pf",
|
||||
"kcFormCardAccountClass": "login-pf-accounts",
|
||||
"kcFormSocialAccountClass": "login-pf-social-section",
|
||||
"kcFormSocialAccountContentClass": "col-xs-12 col-sm-6",
|
||||
"kcFormHeaderClass": "login-pf-header",
|
||||
"kcLocaleWrapperClass": undefined,
|
||||
"kcFeedbackErrorIcon": "pficon pficon-error-circle-o",
|
||||
"kcFeedbackWarningIcon": "pficon pficon-warning-triangle-o",
|
||||
"kcFeedbackSuccessIcon": "pficon pficon-ok",
|
||||
"kcFeedbackInfoIcon": "pficon pficon-info",
|
||||
"kcResetFlowIcon": "pficon pficon-arrow fa-2x",
|
||||
"kcFormGroupClass": "form-group",
|
||||
"kcLabelWrapperClass": "col-xs-12 col-sm-12 col-md-12 col-lg-12",
|
||||
"kcSignUpClass": "login-pf-signup",
|
||||
"kcInfoAreaWrapperClass": undefined
|
||||
};
|
@ -7,11 +7,11 @@ import { promptKeycloakVersion } from "./promptKeycloakVersion";
|
||||
import { getCliOptions } from "./tools/cliOptions";
|
||||
import { getLogger } from "./tools/logger";
|
||||
|
||||
export function downloadBuiltinKeycloakTheme(params: { keycloakVersion: string; destDirPath: string; isSilent: boolean }) {
|
||||
export async function downloadBuiltinKeycloakTheme(params: { keycloakVersion: string; destDirPath: string; isSilent: boolean }) {
|
||||
const { keycloakVersion, destDirPath, isSilent } = params;
|
||||
|
||||
for (const ext of ["", "-community"]) {
|
||||
downloadAndUnzip({
|
||||
await downloadAndUnzip({
|
||||
"destDirPath": destDirPath,
|
||||
"url": `https://github.com/keycloak/keycloak/archive/refs/tags/${keycloakVersion}.zip`,
|
||||
"pathOfDirToExtractInArchive": `keycloak-${keycloakVersion}/themes/src/main/resources${ext}/theme`,
|
||||
@ -31,7 +31,7 @@ if (require.main === module) {
|
||||
|
||||
logger.log(`Downloading builtins theme of Keycloak ${keycloakVersion} here ${destDirPath}`);
|
||||
|
||||
downloadBuiltinKeycloakTheme({
|
||||
await downloadBuiltinKeycloakTheme({
|
||||
keycloakVersion,
|
||||
destDirPath,
|
||||
isSilent
|
||||
|
38
src/bin/eject-keycloak-page.ts
Normal file
38
src/bin/eject-keycloak-page.ts
Normal file
@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { getProjectRoot } from "./tools/getProjectRoot";
|
||||
import cliSelect from "cli-select";
|
||||
import { loginThemePageIds, type PageId } from "./keycloakify/generateFtl/generateFtl";
|
||||
import { capitalize } from "tsafe/capitalize";
|
||||
import { readFile, writeFile } from "fs/promises";
|
||||
import { existsSync } from "fs";
|
||||
import { join as pathJoin, relative as pathRelative } from "path";
|
||||
import { kebabCaseToCamelCase } from "./tools/kebabCaseToSnakeCase";
|
||||
|
||||
(async () => {
|
||||
const projectRootDir = getProjectRoot();
|
||||
|
||||
const { value: pageId } = await cliSelect<PageId>({
|
||||
"values": [...loginThemePageIds]
|
||||
}).catch(() => {
|
||||
console.log("Aborting");
|
||||
|
||||
process.exit(-1);
|
||||
});
|
||||
|
||||
const pageBasename = capitalize(kebabCaseToCamelCase(pageId)).replace(/ftl$/, "tsx");
|
||||
|
||||
console.log(pageId);
|
||||
|
||||
const targetFilePath = pathJoin(process.cwd(), "src", "keycloak-theme", "pages", pageBasename);
|
||||
|
||||
if (existsSync(targetFilePath)) {
|
||||
console.log(`${pageId} is already ejected, ${pathRelative(process.cwd(), targetFilePath)} already exists`);
|
||||
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
writeFile(targetFilePath, await readFile(pathJoin(projectRootDir, "src", "pages", pageBasename)));
|
||||
|
||||
console.log(`${pathRelative(process.cwd(), targetFilePath)} created`);
|
||||
})();
|
@ -3,15 +3,25 @@ import { assert } from "tsafe/assert";
|
||||
import type { Equals } from "tsafe";
|
||||
import { id } from "tsafe/id";
|
||||
import { parse as urlParse } from "url";
|
||||
import { typeGuard } from "tsafe/typeGuard";
|
||||
import { symToStr } from "tsafe/symToStr";
|
||||
|
||||
const bundlers = ["mvn", "keycloakify", "none"] as const;
|
||||
type Bundler = (typeof bundlers)[number];
|
||||
type ParsedPackageJson = {
|
||||
name: string;
|
||||
version: string;
|
||||
homepage?: string;
|
||||
keycloakify?: {
|
||||
/** @deprecated: use extraLoginPages instead */
|
||||
extraPages?: string[];
|
||||
extraLoginPages?: string[];
|
||||
extraAccountPages?: string[];
|
||||
extraThemeProperties?: string[];
|
||||
areAppAndKeycloakServerSharingSameDomain?: boolean;
|
||||
artifactId?: string;
|
||||
groupId?: string;
|
||||
bundler?: Bundler;
|
||||
};
|
||||
};
|
||||
|
||||
@ -22,13 +32,18 @@ const zParsedPackageJson = z.object({
|
||||
"keycloakify": z
|
||||
.object({
|
||||
"extraPages": z.array(z.string()).optional(),
|
||||
"extraLoginPages": z.array(z.string()).optional(),
|
||||
"extraAccountPages": z.array(z.string()).optional(),
|
||||
"extraThemeProperties": z.array(z.string()).optional(),
|
||||
"areAppAndKeycloakServerSharingSameDomain": z.boolean().optional()
|
||||
"areAppAndKeycloakServerSharingSameDomain": z.boolean().optional(),
|
||||
"artifactId": z.string().optional(),
|
||||
"groupId": z.string().optional(),
|
||||
"bundler": z.enum(bundlers).optional()
|
||||
})
|
||||
.optional()
|
||||
});
|
||||
|
||||
assert<Equals<ReturnType<typeof zParsedPackageJson["parse"]>, ParsedPackageJson>>();
|
||||
assert<Equals<ReturnType<(typeof zParsedPackageJson)["parse"]>, ParsedPackageJson>>();
|
||||
|
||||
/** Consolidated build option gathered form CLI arguments and config in package.json */
|
||||
export type BuildOptions = BuildOptions.Standalone | BuildOptions.ExternalAssets;
|
||||
@ -38,10 +53,12 @@ export namespace BuildOptions {
|
||||
isSilent: boolean;
|
||||
version: string;
|
||||
themeName: string;
|
||||
extraPages?: string[];
|
||||
extraLoginPages: string[] | undefined;
|
||||
extraAccountPages: string[] | undefined;
|
||||
extraThemeProperties?: string[];
|
||||
//NOTE: Only for the pom.xml file, questionable utility...
|
||||
groupId: string;
|
||||
artifactId: string;
|
||||
bundler: Bundler;
|
||||
};
|
||||
|
||||
export type Standalone = Common & {
|
||||
@ -108,7 +125,7 @@ export function readBuildOptions(params: {
|
||||
const common: BuildOptions.Common = (() => {
|
||||
const { name, keycloakify = {}, version, homepage } = parsedPackageJson;
|
||||
|
||||
const { extraPages, extraThemeProperties } = keycloakify ?? {};
|
||||
const { extraPages, extraLoginPages, extraAccountPages, extraThemeProperties, groupId, artifactId, bundler } = keycloakify ?? {};
|
||||
|
||||
const themeName = name
|
||||
.replace(/^@(.*)/, "$1")
|
||||
@ -117,10 +134,26 @@ export function readBuildOptions(params: {
|
||||
|
||||
return {
|
||||
themeName,
|
||||
"bundler": (() => {
|
||||
const { KEYCLOAKIFY_BUNDLER } = process.env;
|
||||
|
||||
assert(
|
||||
typeGuard<Bundler | undefined>(
|
||||
KEYCLOAKIFY_BUNDLER,
|
||||
[undefined, ...id<readonly string[]>(bundlers)].includes(KEYCLOAKIFY_BUNDLER)
|
||||
),
|
||||
`${symToStr({ KEYCLOAKIFY_BUNDLER })} should be one of ${bundlers.join(", ")}`
|
||||
);
|
||||
|
||||
return KEYCLOAKIFY_BUNDLER ?? bundler ?? "keycloakify";
|
||||
})(),
|
||||
"artifactId": process.env.KEYCLOAKIFY_ARTIFACT_ID ?? artifactId ?? `${themeName}-keycloak-theme`,
|
||||
"groupId": (() => {
|
||||
const fallbackGroupId = `${themeName}.keycloak`;
|
||||
|
||||
return (
|
||||
process.env.KEYCLOAKIFY_GROUP_ID ??
|
||||
groupId ??
|
||||
(!homepage
|
||||
? fallbackGroupId
|
||||
: urlParse(homepage)
|
||||
@ -130,8 +163,9 @@ export function readBuildOptions(params: {
|
||||
.join(".") ?? fallbackGroupId) + ".keycloak"
|
||||
);
|
||||
})(),
|
||||
"version": version,
|
||||
extraPages,
|
||||
"version": process.env.KEYCLOAKIFY_VERSION ?? version,
|
||||
"extraLoginPages": [...(extraPages ?? []), ...(extraLoginPages ?? [])],
|
||||
extraAccountPages,
|
||||
extraThemeProperties,
|
||||
isSilent
|
||||
};
|
||||
|
@ -36,7 +36,11 @@ ${ftl_object_to_js_code_declaring_an_object(.data_model, [])?no_esc};
|
||||
<#list fieldNames as fieldName>
|
||||
if(fieldName === "${fieldName}" ){
|
||||
<#attempt>
|
||||
return "${messagesPerField.printIfExists(fieldName,'1')}" ? x : undefined;
|
||||
<#if '${fieldName}' == 'username' || '${fieldName}' == 'password'>
|
||||
return <#if messagesPerField.existsError('username', 'password')>x<#else>undefined</#if>;
|
||||
<#else>
|
||||
return <#if messagesPerField.existsError('${fieldName}')>x<#else>undefined</#if>;
|
||||
</#if>
|
||||
<#recover>
|
||||
</#attempt>
|
||||
}
|
||||
@ -51,7 +55,11 @@ ${ftl_object_to_js_code_declaring_an_object(.data_model, [])?no_esc};
|
||||
<#list fieldNames as fieldName>
|
||||
if(fieldName === "${fieldName}" ){
|
||||
<#attempt>
|
||||
return <#if messagesPerField.existsError('${fieldName}')>true<#else>false</#if>;
|
||||
<#if '${fieldName}' == 'username' || '${fieldName}' == 'password'>
|
||||
return <#if messagesPerField.existsError('username', 'password')>true<#else>false</#if>;
|
||||
<#else>
|
||||
return <#if messagesPerField.existsError('${fieldName}')>true<#else>false</#if>;
|
||||
</#if>
|
||||
<#recover>
|
||||
</#attempt>
|
||||
}
|
||||
@ -66,8 +74,14 @@ ${ftl_object_to_js_code_declaring_an_object(.data_model, [])?no_esc};
|
||||
<#list fieldNames as fieldName>
|
||||
if(fieldName === "${fieldName}" ){
|
||||
<#attempt>
|
||||
<#if messagesPerField.existsError('${fieldName}')>
|
||||
return "${messagesPerField.get('${fieldName}')?no_esc}";
|
||||
<#if '${fieldName}' == 'username' || '${fieldName}' == 'password'>
|
||||
<#if messagesPerField.existsError('username', 'password')>
|
||||
return 'Invalid username or password.';
|
||||
</#if>
|
||||
<#else>
|
||||
<#if messagesPerField.existsError('${fieldName}')>
|
||||
return "${messagesPerField.get('${fieldName}')?no_esc}";
|
||||
</#if>
|
||||
</#if>
|
||||
<#recover>
|
||||
</#attempt>
|
||||
@ -83,7 +97,11 @@ ${ftl_object_to_js_code_declaring_an_object(.data_model, [])?no_esc};
|
||||
<#list fieldNames as fieldName>
|
||||
if(fieldName === "${fieldName}" ){
|
||||
<#attempt>
|
||||
return <#if messagesPerField.exists('${fieldName}')>true<#else>false</#if>;
|
||||
<#if '${fieldName}' == 'username' || '${fieldName}' == 'password'>
|
||||
return <#if messagesPerField.exists('username') || messagesPerField.exists('password')>true<#else>false</#if>;
|
||||
<#else>
|
||||
return <#if messagesPerField.exists('${fieldName}')>true<#else>false</#if>;
|
||||
</#if>
|
||||
<#recover>
|
||||
</#attempt>
|
||||
}
|
||||
@ -156,6 +174,10 @@ ${ftl_object_to_js_code_declaring_an_object(.data_model, [])?no_esc};
|
||||
) || (
|
||||
["masterAdminClient", "delegateForUpdate", "defaultRole"]?seq_contains(key) &&
|
||||
are_same_path(path, ["realm"])
|
||||
) || (
|
||||
"error.ftl" == pageId &&
|
||||
are_same_path(path, ["realm"]) &&
|
||||
!["name", "displayName", "displayNameHtml", "internationalizationEnabled", "registrationEmailAsUsername" ]?seq_contains(key)
|
||||
)
|
||||
>
|
||||
<#local out_seq += ["/*If you need '" + key + "' on " + pageId + ", please submit an issue to the Keycloakify repo*/"]>
|
||||
|
@ -10,8 +10,11 @@ import type { BuildOptions } from "../BuildOptions";
|
||||
import { assert } from "tsafe/assert";
|
||||
import { Reflect } from "tsafe/Reflect";
|
||||
|
||||
// https://github.com/keycloak/keycloak/blob/main/services/src/main/java/org/keycloak/forms/login/freemarker/Templates.java
|
||||
export const pageIds = [
|
||||
export const themeTypes = ["login", "account"] as const;
|
||||
|
||||
export type ThemeType = (typeof themeTypes)[number];
|
||||
|
||||
export const loginThemePageIds = [
|
||||
"login.ftl",
|
||||
"login-username.ftl",
|
||||
"login-password.ftl",
|
||||
@ -35,6 +38,11 @@ export const pageIds = [
|
||||
"idp-review-user-profile.ftl"
|
||||
] as const;
|
||||
|
||||
export const accountThemePageIds = ["password.ftl"] as const;
|
||||
|
||||
export type PageId = (typeof loginThemePageIds)[number];
|
||||
export type AccountThemePageId = (typeof accountThemePageIds)[number];
|
||||
|
||||
export type BuildOptionsLike = BuildOptionsLike.Standalone | BuildOptionsLike.ExternalAssets;
|
||||
|
||||
export namespace BuildOptionsLike {
|
||||
@ -68,8 +76,6 @@ export namespace BuildOptionsLike {
|
||||
assert<typeof buildOptions extends BuildOptionsLike ? true : false>();
|
||||
}
|
||||
|
||||
export type PageId = typeof pageIds[number];
|
||||
|
||||
export function generateFtlFilesCodeFactory(params: {
|
||||
indexHtmlCode: string;
|
||||
//NOTE: Expected to be an empty object if external assets mode is enabled.
|
||||
|
@ -1,5 +1,6 @@
|
||||
import * as fs from "fs";
|
||||
import { join as pathJoin, dirname as pathDirname } from "path";
|
||||
import { themeTypes } from "./generateFtl/generateFtl";
|
||||
import { assert } from "tsafe/assert";
|
||||
import { Reflect } from "tsafe/Reflect";
|
||||
import type { BuildOptions } from "./BuildOptions";
|
||||
@ -7,6 +8,8 @@ import type { BuildOptions } from "./BuildOptions";
|
||||
export type BuildOptionsLike = {
|
||||
themeName: string;
|
||||
groupId: string;
|
||||
artifactId?: string;
|
||||
version: string;
|
||||
};
|
||||
|
||||
{
|
||||
@ -16,7 +19,6 @@ export type BuildOptionsLike = {
|
||||
}
|
||||
|
||||
export function generateJavaStackFiles(params: {
|
||||
version: string;
|
||||
keycloakThemeBuildingDirPath: string;
|
||||
doBundlesEmailTemplate: boolean;
|
||||
buildOptions: BuildOptionsLike;
|
||||
@ -24,8 +26,7 @@ export function generateJavaStackFiles(params: {
|
||||
jarFilePath: string;
|
||||
} {
|
||||
const {
|
||||
version,
|
||||
buildOptions: { groupId, themeName },
|
||||
buildOptions: { groupId, themeName, version, artifactId },
|
||||
keycloakThemeBuildingDirPath,
|
||||
doBundlesEmailTemplate
|
||||
} = params;
|
||||
@ -34,8 +35,6 @@ export function generateJavaStackFiles(params: {
|
||||
const { pomFileCode } = (function generatePomFileCode(): {
|
||||
pomFileCode: string;
|
||||
} {
|
||||
const artefactId = `${themeName}-keycloak-theme`;
|
||||
|
||||
const pomFileCode = [
|
||||
`<?xml version="1.0"?>`,
|
||||
`<project xmlns="http://maven.apache.org/POM/4.0.0"`,
|
||||
@ -43,9 +42,9 @@ export function generateJavaStackFiles(params: {
|
||||
` xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">`,
|
||||
` <modelVersion>4.0.0</modelVersion>`,
|
||||
` <groupId>${groupId}</groupId>`,
|
||||
` <artifactId>${artefactId}</artifactId>`,
|
||||
` <artifactId>${artifactId}</artifactId>`,
|
||||
` <version>${version}</version>`,
|
||||
` <name>${artefactId}</name>`,
|
||||
` <name>${artifactId}</name>`,
|
||||
` <description />`,
|
||||
`</project>`
|
||||
].join("\n");
|
||||
@ -71,7 +70,7 @@ export function generateJavaStackFiles(params: {
|
||||
"themes": [
|
||||
{
|
||||
"name": themeName,
|
||||
"types": ["login", ...(doBundlesEmailTemplate ? ["email"] : [])]
|
||||
"types": [...themeTypes, ...(doBundlesEmailTemplate ? ["email"] : [])]
|
||||
}
|
||||
]
|
||||
},
|
||||
@ -84,6 +83,6 @@ export function generateJavaStackFiles(params: {
|
||||
}
|
||||
|
||||
return {
|
||||
"jarFilePath": pathJoin(keycloakThemeBuildingDirPath, "target", `${themeName}-${version}.jar`)
|
||||
"jarFilePath": pathJoin(keycloakThemeBuildingDirPath, "target", `${artifactId}-${version}.jar`)
|
||||
};
|
||||
}
|
||||
|
@ -3,9 +3,8 @@ import * as fs from "fs";
|
||||
import { join as pathJoin, basename as pathBasename } from "path";
|
||||
import { replaceImportsFromStaticInJsCode } from "./replacers/replaceImportsFromStaticInJsCode";
|
||||
import { replaceImportsInCssCode } from "./replacers/replaceImportsInCssCode";
|
||||
import { generateFtlFilesCodeFactory, pageIds } from "./generateFtl";
|
||||
import { generateFtlFilesCodeFactory, loginThemePageIds, themeTypes, ThemeType } from "./generateFtl";
|
||||
import { downloadBuiltinKeycloakTheme } from "../download-builtin-keycloak-theme";
|
||||
import * as child_process from "child_process";
|
||||
import { mockTestingResourcesCommonPath, mockTestingResourcesPath, mockTestingSubDirOfPublicDirBasename } from "../mockTestingResourcesPath";
|
||||
import { isInside } from "../tools/isInside";
|
||||
import type { BuildOptions } from "./BuildOptions";
|
||||
@ -18,7 +17,8 @@ export type BuildOptionsLike = BuildOptionsLike.Standalone | BuildOptionsLike.Ex
|
||||
export namespace BuildOptionsLike {
|
||||
export type Common = {
|
||||
themeName: string;
|
||||
extraPages?: string[];
|
||||
extraLoginPages?: string[];
|
||||
extraAccountPages?: string[];
|
||||
extraThemeProperties?: string[];
|
||||
isSilent: boolean;
|
||||
};
|
||||
@ -53,68 +53,164 @@ export namespace BuildOptionsLike {
|
||||
assert<typeof buildOptions extends BuildOptionsLike ? true : false>();
|
||||
}
|
||||
|
||||
export function generateKeycloakThemeResources(params: {
|
||||
export async function generateKeycloakThemeResources(params: {
|
||||
reactAppBuildDirPath: string;
|
||||
keycloakThemeBuildingDirPath: string;
|
||||
keycloakThemeEmailDirPath: string;
|
||||
keycloakVersion: string;
|
||||
buildOptions: BuildOptionsLike;
|
||||
}): { doBundlesEmailTemplate: boolean } {
|
||||
}): Promise<{ doBundlesEmailTemplate: boolean }> {
|
||||
const { reactAppBuildDirPath, keycloakThemeBuildingDirPath, keycloakThemeEmailDirPath, keycloakVersion, buildOptions } = params;
|
||||
|
||||
const logger = getLogger({ isSilent: buildOptions.isSilent });
|
||||
const themeDirPath = pathJoin(keycloakThemeBuildingDirPath, "src", "main", "resources", "theme", buildOptions.themeName, "login");
|
||||
|
||||
const getThemeDirPath = (themeType: ThemeType | "email") =>
|
||||
pathJoin(keycloakThemeBuildingDirPath, "src", "main", "resources", "theme", buildOptions.themeName, themeType);
|
||||
|
||||
let allCssGlobalsToDefine: Record<string, string> = {};
|
||||
|
||||
transformCodebase({
|
||||
"destDirPath": buildOptions.isStandalone ? pathJoin(themeDirPath, "resources", "build") : reactAppBuildDirPath,
|
||||
"srcDirPath": reactAppBuildDirPath,
|
||||
"transformSourceCode": ({ filePath, sourceCode }) => {
|
||||
//NOTE: Prevent cycles, excludes the folder we generated for debug in public/
|
||||
if (
|
||||
buildOptions.isStandalone &&
|
||||
isInside({
|
||||
"dirPath": pathJoin(reactAppBuildDirPath, mockTestingSubDirOfPublicDirBasename),
|
||||
filePath
|
||||
})
|
||||
) {
|
||||
return undefined;
|
||||
}
|
||||
let generateFtlFilesCode_glob: ReturnType<typeof generateFtlFilesCodeFactory>["generateFtlFilesCode"] | undefined = undefined;
|
||||
|
||||
if (/\.css?$/i.test(filePath)) {
|
||||
if (!buildOptions.isStandalone) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const { cssGlobalsToDefine, fixedCssCode } = replaceImportsInCssCode({
|
||||
"cssCode": sourceCode.toString("utf8")
|
||||
});
|
||||
|
||||
allCssGlobalsToDefine = {
|
||||
...allCssGlobalsToDefine,
|
||||
...cssGlobalsToDefine
|
||||
};
|
||||
|
||||
return { "modifiedSourceCode": Buffer.from(fixedCssCode, "utf8") };
|
||||
}
|
||||
|
||||
if (/\.js?$/i.test(filePath)) {
|
||||
if (!buildOptions.isStandalone && buildOptions.areAppAndKeycloakServerSharingSameDomain) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const { fixedJsCode } = replaceImportsFromStaticInJsCode({
|
||||
"jsCode": sourceCode.toString("utf8"),
|
||||
buildOptions
|
||||
});
|
||||
|
||||
return { "modifiedSourceCode": Buffer.from(fixedJsCode, "utf8") };
|
||||
}
|
||||
|
||||
return buildOptions.isStandalone ? { "modifiedSourceCode": sourceCode } : undefined;
|
||||
for (const themeType of themeTypes) {
|
||||
if (themeType === "account") {
|
||||
continue;
|
||||
}
|
||||
});
|
||||
|
||||
const themeDirPath = getThemeDirPath(themeType);
|
||||
|
||||
copy_app_resources_to_theme_path: {
|
||||
const isFirstPass = themeType.indexOf(themeType) === 0;
|
||||
|
||||
if (!isFirstPass && !buildOptions.isStandalone) {
|
||||
break copy_app_resources_to_theme_path;
|
||||
}
|
||||
|
||||
transformCodebase({
|
||||
"destDirPath": buildOptions.isStandalone ? pathJoin(themeDirPath, "resources", "build") : reactAppBuildDirPath,
|
||||
"srcDirPath": reactAppBuildDirPath,
|
||||
"transformSourceCode": ({ filePath, sourceCode }) => {
|
||||
//NOTE: Prevent cycles, excludes the folder we generated for debug in public/
|
||||
if (
|
||||
buildOptions.isStandalone &&
|
||||
isInside({
|
||||
"dirPath": pathJoin(reactAppBuildDirPath, mockTestingSubDirOfPublicDirBasename),
|
||||
filePath
|
||||
})
|
||||
) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (/\.css?$/i.test(filePath)) {
|
||||
if (!buildOptions.isStandalone) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const { cssGlobalsToDefine, fixedCssCode } = replaceImportsInCssCode({
|
||||
"cssCode": sourceCode.toString("utf8")
|
||||
});
|
||||
|
||||
register_css_variables: {
|
||||
if (!isFirstPass) {
|
||||
break register_css_variables;
|
||||
}
|
||||
|
||||
allCssGlobalsToDefine = {
|
||||
...allCssGlobalsToDefine,
|
||||
...cssGlobalsToDefine
|
||||
};
|
||||
}
|
||||
|
||||
return { "modifiedSourceCode": Buffer.from(fixedCssCode, "utf8") };
|
||||
}
|
||||
|
||||
if (/\.js?$/i.test(filePath)) {
|
||||
if (!buildOptions.isStandalone && buildOptions.areAppAndKeycloakServerSharingSameDomain) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const { fixedJsCode } = replaceImportsFromStaticInJsCode({
|
||||
"jsCode": sourceCode.toString("utf8"),
|
||||
buildOptions
|
||||
});
|
||||
|
||||
return { "modifiedSourceCode": Buffer.from(fixedJsCode, "utf8") };
|
||||
}
|
||||
|
||||
return buildOptions.isStandalone ? { "modifiedSourceCode": sourceCode } : undefined;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const generateFtlFilesCode = (() => {
|
||||
if (generateFtlFilesCode_glob !== undefined) {
|
||||
return generateFtlFilesCode_glob;
|
||||
}
|
||||
|
||||
const { generateFtlFilesCode } = generateFtlFilesCodeFactory({
|
||||
"indexHtmlCode": fs.readFileSync(pathJoin(reactAppBuildDirPath, "index.html")).toString("utf8"),
|
||||
"cssGlobalsToDefine": allCssGlobalsToDefine,
|
||||
"buildOptions": buildOptions
|
||||
});
|
||||
|
||||
return generateFtlFilesCode;
|
||||
})();
|
||||
|
||||
[...loginThemePageIds, ...(buildOptions.extraLoginPages ?? [])].forEach(pageId => {
|
||||
const { ftlCode } = generateFtlFilesCode({ pageId });
|
||||
|
||||
fs.mkdirSync(themeDirPath, { "recursive": true });
|
||||
|
||||
fs.writeFileSync(pathJoin(themeDirPath, pageId), Buffer.from(ftlCode, "utf8"));
|
||||
});
|
||||
|
||||
{
|
||||
const tmpDirPath = pathJoin(themeDirPath, "..", "tmp_xxKdLpdIdLd");
|
||||
|
||||
await downloadBuiltinKeycloakTheme({
|
||||
keycloakVersion,
|
||||
"destDirPath": tmpDirPath,
|
||||
isSilent: buildOptions.isSilent
|
||||
});
|
||||
|
||||
const themeResourcesDirPath = pathJoin(themeDirPath, "resources");
|
||||
|
||||
transformCodebase({
|
||||
"srcDirPath": pathJoin(tmpDirPath, "keycloak", "login", "resources"),
|
||||
"destDirPath": themeResourcesDirPath
|
||||
});
|
||||
|
||||
const reactAppPublicDirPath = pathJoin(reactAppBuildDirPath, "..", "public");
|
||||
|
||||
transformCodebase({
|
||||
"srcDirPath": pathJoin(tmpDirPath, "keycloak", "common", "resources"),
|
||||
"destDirPath": pathJoin(themeResourcesDirPath, pathBasename(mockTestingResourcesCommonPath))
|
||||
});
|
||||
|
||||
transformCodebase({
|
||||
"srcDirPath": themeResourcesDirPath,
|
||||
"destDirPath": pathJoin(reactAppPublicDirPath, mockTestingResourcesPath)
|
||||
});
|
||||
|
||||
const keycloakResourcesWithinPublicDirPath = pathJoin(reactAppPublicDirPath, mockTestingSubDirOfPublicDirBasename);
|
||||
|
||||
fs.writeFileSync(
|
||||
pathJoin(keycloakResourcesWithinPublicDirPath, "README.txt"),
|
||||
Buffer.from(
|
||||
["This is just a test folder that helps develop", "the login and register page without having to run a Keycloak container"].join(
|
||||
" "
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
fs.writeFileSync(pathJoin(keycloakResourcesWithinPublicDirPath, ".gitignore"), Buffer.from("*", "utf8"));
|
||||
fs.rmSync(tmpDirPath, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
fs.writeFileSync(
|
||||
pathJoin(themeDirPath, "theme.properties"),
|
||||
Buffer.from(["parent=keycloak", ...(buildOptions.extraThemeProperties ?? [])].join("\n\n"), "utf8")
|
||||
);
|
||||
}
|
||||
|
||||
let doBundlesEmailTemplate: boolean;
|
||||
|
||||
@ -134,70 +230,9 @@ export function generateKeycloakThemeResources(params: {
|
||||
|
||||
transformCodebase({
|
||||
"srcDirPath": keycloakThemeEmailDirPath,
|
||||
"destDirPath": pathJoin(themeDirPath, "..", "email")
|
||||
"destDirPath": getThemeDirPath("email")
|
||||
});
|
||||
}
|
||||
|
||||
const { generateFtlFilesCode } = generateFtlFilesCodeFactory({
|
||||
"indexHtmlCode": fs.readFileSync(pathJoin(reactAppBuildDirPath, "index.html")).toString("utf8"),
|
||||
"cssGlobalsToDefine": allCssGlobalsToDefine,
|
||||
"buildOptions": buildOptions
|
||||
});
|
||||
|
||||
[...pageIds, ...(buildOptions.extraPages ?? [])].forEach(pageId => {
|
||||
const { ftlCode } = generateFtlFilesCode({ pageId });
|
||||
|
||||
fs.mkdirSync(themeDirPath, { "recursive": true });
|
||||
|
||||
fs.writeFileSync(pathJoin(themeDirPath, pageId), Buffer.from(ftlCode, "utf8"));
|
||||
});
|
||||
|
||||
{
|
||||
const tmpDirPath = pathJoin(themeDirPath, "..", "tmp_xxKdLpdIdLd");
|
||||
|
||||
downloadBuiltinKeycloakTheme({
|
||||
keycloakVersion,
|
||||
"destDirPath": tmpDirPath,
|
||||
isSilent: buildOptions.isSilent
|
||||
});
|
||||
|
||||
const themeResourcesDirPath = pathJoin(themeDirPath, "resources");
|
||||
|
||||
transformCodebase({
|
||||
"srcDirPath": pathJoin(tmpDirPath, "keycloak", "login", "resources"),
|
||||
"destDirPath": themeResourcesDirPath
|
||||
});
|
||||
|
||||
const reactAppPublicDirPath = pathJoin(reactAppBuildDirPath, "..", "public");
|
||||
|
||||
transformCodebase({
|
||||
"srcDirPath": pathJoin(tmpDirPath, "keycloak", "common", "resources"),
|
||||
"destDirPath": pathJoin(themeResourcesDirPath, pathBasename(mockTestingResourcesCommonPath))
|
||||
});
|
||||
|
||||
transformCodebase({
|
||||
"srcDirPath": themeResourcesDirPath,
|
||||
"destDirPath": pathJoin(reactAppPublicDirPath, mockTestingResourcesPath)
|
||||
});
|
||||
|
||||
const keycloakResourcesWithinPublicDirPath = pathJoin(reactAppPublicDirPath, mockTestingSubDirOfPublicDirBasename);
|
||||
|
||||
fs.writeFileSync(
|
||||
pathJoin(keycloakResourcesWithinPublicDirPath, "README.txt"),
|
||||
Buffer.from(
|
||||
["This is just a test folder that helps develop", "the login and register page without having to run a Keycloak container"].join(" ")
|
||||
)
|
||||
);
|
||||
|
||||
fs.writeFileSync(pathJoin(keycloakResourcesWithinPublicDirPath, ".gitignore"), Buffer.from("*", "utf8"));
|
||||
|
||||
child_process.execSync(`rm -r ${tmpDirPath}`);
|
||||
}
|
||||
|
||||
fs.writeFileSync(
|
||||
pathJoin(themeDirPath, "theme.properties"),
|
||||
Buffer.from(["parent=keycloak", ...(buildOptions.extraThemeProperties ?? [])].join("\n\n"), "utf8")
|
||||
);
|
||||
|
||||
return { doBundlesEmailTemplate };
|
||||
}
|
||||
|
@ -30,15 +30,18 @@ export function generateStartKeycloakTestingContainer(params: {
|
||||
buildOptions: { themeName }
|
||||
} = params;
|
||||
|
||||
const keycloakThemePath = pathJoin(keycloakThemeBuildingDirPath, "src", "main", "resources", "theme", themeName).replace(/\\/g, "/");
|
||||
|
||||
fs.writeFileSync(
|
||||
pathJoin(keycloakThemeBuildingDirPath, generateStartKeycloakTestingContainer.basename),
|
||||
|
||||
Buffer.from(
|
||||
[
|
||||
"#!/bin/bash",
|
||||
"#!/usr/bin/env bash",
|
||||
"",
|
||||
`docker rm ${containerName} || true`,
|
||||
"",
|
||||
`cd ${keycloakThemeBuildingDirPath}`,
|
||||
`cd "${keycloakThemeBuildingDirPath.replace(/\\/g, "/")}"`,
|
||||
"",
|
||||
"docker run \\",
|
||||
" -p 8080:8080 \\",
|
||||
@ -46,14 +49,7 @@ export function generateStartKeycloakTestingContainer(params: {
|
||||
" -e KEYCLOAK_ADMIN=admin \\",
|
||||
" -e KEYCLOAK_ADMIN_PASSWORD=admin \\",
|
||||
" -e JAVA_OPTS=-Dkeycloak.profile=preview \\",
|
||||
` -v ${pathJoin(
|
||||
keycloakThemeBuildingDirPath,
|
||||
"src",
|
||||
"main",
|
||||
"resources",
|
||||
"theme",
|
||||
themeName
|
||||
)}:/opt/keycloak/themes/${themeName}:rw \\`,
|
||||
` -v "${keycloakThemePath}":"/opt/keycloak/themes/${themeName}":rw \\`,
|
||||
` -it quay.io/keycloak/keycloak:${keycloakVersion} \\`,
|
||||
` start-dev`,
|
||||
""
|
||||
|
@ -4,5 +4,5 @@ export * from "./keycloakify";
|
||||
import { main } from "./keycloakify";
|
||||
|
||||
if (require.main === module) {
|
||||
main();
|
||||
main().catch(e => console.error(e));
|
||||
}
|
||||
|
@ -7,13 +7,16 @@ import * as fs from "fs";
|
||||
import { readBuildOptions } from "./BuildOptions";
|
||||
import { getLogger } from "../tools/logger";
|
||||
import { getCliOptions } from "../tools/cliOptions";
|
||||
import jar from "../tools/jar";
|
||||
import { assert } from "tsafe/assert";
|
||||
import type { Equals } from "tsafe";
|
||||
|
||||
const reactProjectDirPath = process.cwd();
|
||||
|
||||
export const keycloakThemeBuildingDirPath = pathJoin(reactProjectDirPath, "build_keycloak");
|
||||
export const keycloakThemeEmailDirPath = pathJoin(keycloakThemeBuildingDirPath, "..", "keycloak_email");
|
||||
|
||||
export function main() {
|
||||
export async function main() {
|
||||
const { isSilent, hasExternalAssets } = getCliOptions(process.argv.slice(2));
|
||||
const logger = getLogger({ isSilent });
|
||||
logger.log("🔏 Building the keycloak theme...⌚");
|
||||
@ -33,7 +36,7 @@ export function main() {
|
||||
"isSilent": isSilent
|
||||
});
|
||||
|
||||
const { doBundlesEmailTemplate } = generateKeycloakThemeResources({
|
||||
const { doBundlesEmailTemplate } = await generateKeycloakThemeResources({
|
||||
keycloakThemeBuildingDirPath,
|
||||
keycloakThemeEmailDirPath,
|
||||
"reactAppBuildDirPath": pathJoin(reactProjectDirPath, "build"),
|
||||
@ -45,18 +48,35 @@ export function main() {
|
||||
});
|
||||
|
||||
const { jarFilePath } = generateJavaStackFiles({
|
||||
"version": buildOptions.version,
|
||||
keycloakThemeBuildingDirPath,
|
||||
doBundlesEmailTemplate,
|
||||
buildOptions
|
||||
});
|
||||
|
||||
child_process.execSync("mvn package", {
|
||||
"cwd": keycloakThemeBuildingDirPath
|
||||
});
|
||||
switch (buildOptions.bundler) {
|
||||
case "none":
|
||||
logger.log("😱 Skipping bundling step, there will be no jar");
|
||||
break;
|
||||
case "keycloakify":
|
||||
logger.log("🫶 Let keycloakify do its thang");
|
||||
await jar({
|
||||
"rootPath": pathJoin(keycloakThemeBuildingDirPath, "src", "main", "resources"),
|
||||
"version": buildOptions.version,
|
||||
"groupId": buildOptions.groupId,
|
||||
"artifactId": buildOptions.artifactId,
|
||||
"targetPath": jarFilePath
|
||||
});
|
||||
break;
|
||||
case "mvn":
|
||||
logger.log("🫙 Run maven to deliver a jar");
|
||||
child_process.execSync("mvn package", { "cwd": keycloakThemeBuildingDirPath });
|
||||
break;
|
||||
default:
|
||||
assert<Equals<typeof buildOptions.bundler, never>>(false);
|
||||
}
|
||||
|
||||
//We want, however, to test in a container running the latest Keycloak version
|
||||
const containerKeycloakVersion = "19.0.1";
|
||||
// We want, however, to test in a container running the latest Keycloak version
|
||||
const containerKeycloakVersion = "20.0.1";
|
||||
|
||||
generateStartKeycloakTestingContainer({
|
||||
keycloakThemeBuildingDirPath,
|
||||
|
@ -1,126 +0,0 @@
|
||||
import { execSync } from "child_process";
|
||||
import { join as pathJoin, relative as pathRelative } from "path";
|
||||
import { exclude } from "tsafe/exclude";
|
||||
import * as fs from "fs";
|
||||
|
||||
const keycloakifyDirPath = pathJoin(__dirname, "..", "..");
|
||||
|
||||
fs.writeFileSync(
|
||||
pathJoin(keycloakifyDirPath, "dist", "package.json"),
|
||||
Buffer.from(
|
||||
JSON.stringify(
|
||||
(() => {
|
||||
const packageJsonParsed = JSON.parse(fs.readFileSync(pathJoin(keycloakifyDirPath, "package.json")).toString("utf8"));
|
||||
|
||||
return {
|
||||
...packageJsonParsed,
|
||||
"main": packageJsonParsed["main"].replace(/^dist\//, ""),
|
||||
"types": packageJsonParsed["types"].replace(/^dist\//, "")
|
||||
};
|
||||
})(),
|
||||
null,
|
||||
2
|
||||
),
|
||||
"utf8"
|
||||
)
|
||||
);
|
||||
|
||||
const commonThirdPartyDeps = (() => {
|
||||
const namespaceModuleNames = ["@emotion"];
|
||||
const standaloneModuleNames = ["react", "@types/react", "powerhooks", "tss-react", "evt"];
|
||||
|
||||
return [
|
||||
...namespaceModuleNames
|
||||
.map(namespaceModuleName =>
|
||||
fs
|
||||
.readdirSync(pathJoin(keycloakifyDirPath, "node_modules", namespaceModuleName))
|
||||
.map(submoduleName => `${namespaceModuleName}/${submoduleName}`)
|
||||
)
|
||||
.reduce((prev, curr) => [...prev, ...curr], []),
|
||||
...standaloneModuleNames
|
||||
];
|
||||
})();
|
||||
|
||||
const yarnHomeDirPath = pathJoin(keycloakifyDirPath, ".yarn_home");
|
||||
|
||||
execSync(["rm -rf", "mkdir"].map(cmd => `${cmd} ${yarnHomeDirPath}`).join(" && "));
|
||||
|
||||
const execYarnLink = (params: { targetModuleName?: string; cwd: string }) => {
|
||||
const { targetModuleName, cwd } = params;
|
||||
|
||||
const cmd = ["yarn", "link", ...(targetModuleName !== undefined ? [targetModuleName] : [])].join(" ");
|
||||
|
||||
console.log(`$ cd ${pathRelative(keycloakifyDirPath, cwd) || "."} && ${cmd}`);
|
||||
|
||||
execSync(cmd, {
|
||||
cwd,
|
||||
"env": {
|
||||
...process.env,
|
||||
"HOME": yarnHomeDirPath
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const testAppPaths = (() => {
|
||||
const arg = process.argv[2];
|
||||
|
||||
const testAppNames = arg !== undefined ? [arg] : ["keycloakify-starter", "keycloakify-advanced-starter"];
|
||||
|
||||
return testAppNames
|
||||
.map(testAppName => {
|
||||
const testAppPath = pathJoin(keycloakifyDirPath, "..", testAppName);
|
||||
|
||||
if (fs.existsSync(testAppPath)) {
|
||||
return testAppPath;
|
||||
}
|
||||
|
||||
console.warn(`Skipping ${testAppName} since it cant be found here: ${testAppPath}`);
|
||||
|
||||
return undefined;
|
||||
})
|
||||
.filter(exclude(undefined));
|
||||
})();
|
||||
|
||||
if (testAppPaths.length === 0) {
|
||||
console.error("No test app to link into!");
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
testAppPaths.forEach(testAppPath => execSync("yarn install", { "cwd": testAppPath }));
|
||||
|
||||
console.log("=== Linking common dependencies ===");
|
||||
|
||||
const total = commonThirdPartyDeps.length;
|
||||
let current = 0;
|
||||
|
||||
commonThirdPartyDeps.forEach(commonThirdPartyDep => {
|
||||
current++;
|
||||
|
||||
console.log(`${current}/${total} ${commonThirdPartyDep}`);
|
||||
|
||||
const localInstallPath = pathJoin(
|
||||
...[keycloakifyDirPath, "node_modules", ...(commonThirdPartyDep.startsWith("@") ? commonThirdPartyDep.split("/") : [commonThirdPartyDep])]
|
||||
);
|
||||
|
||||
execYarnLink({ "cwd": localInstallPath });
|
||||
});
|
||||
|
||||
commonThirdPartyDeps.forEach(commonThirdPartyDep =>
|
||||
testAppPaths.forEach(testAppPath =>
|
||||
execYarnLink({
|
||||
"cwd": testAppPath,
|
||||
"targetModuleName": commonThirdPartyDep
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
console.log("=== Linking in house dependencies ===");
|
||||
|
||||
execYarnLink({ "cwd": pathJoin(keycloakifyDirPath, "dist") });
|
||||
|
||||
testAppPaths.forEach(testAppPath =>
|
||||
execYarnLink({
|
||||
"cwd": testAppPath,
|
||||
"targetModuleName": "keycloakify"
|
||||
})
|
||||
);
|
54
src/bin/tools/crc32.ts
Normal file
54
src/bin/tools/crc32.ts
Normal file
@ -0,0 +1,54 @@
|
||||
import { Readable } from "stream";
|
||||
|
||||
const crc32tab = [
|
||||
0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419, 0x706af48f, 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4, 0xe0d5e91e, 0x97d2d988,
|
||||
0x09b64c2b, 0x7eb17cbd, 0xe7b82d07, 0x90bf1d91, 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de, 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7,
|
||||
0x136c9856, 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9, 0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4, 0xa2677172,
|
||||
0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b, 0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3, 0x45df5c75, 0xdcd60dcf, 0xabd13d59,
|
||||
0x26d930ac, 0x51de003a, 0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599, 0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924,
|
||||
0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190, 0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f, 0x9fbfe4a5, 0xe8b8d433,
|
||||
0x7807c9a2, 0x0f00f934, 0x9609a88e, 0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01, 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e,
|
||||
0x6c0695ed, 0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950, 0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3, 0xfbd44c65,
|
||||
0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2, 0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a, 0x346ed9fc, 0xad678846, 0xda60b8d0,
|
||||
0x44042d73, 0x33031de5, 0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa, 0xbe0b1010, 0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f,
|
||||
0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17, 0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6, 0x03b6e20c, 0x74b1d29a,
|
||||
0xead54739, 0x9dd277af, 0x04db2615, 0x73dc1683, 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8, 0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1,
|
||||
0xf00f9344, 0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb, 0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a, 0x67dd4acc,
|
||||
0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5, 0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1, 0xa6bc5767, 0x3fb506dd, 0x48b2364b,
|
||||
0xd80d2bda, 0xaf0a1b4c, 0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef, 0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236,
|
||||
0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe, 0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31, 0x2cd99e8b, 0x5bdeae1d,
|
||||
0x9b64c2b0, 0xec63f226, 0x756aa39c, 0x026d930a, 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713, 0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38,
|
||||
0x92d28e9b, 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242, 0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1, 0x18b74777,
|
||||
0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c, 0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45, 0xa00ae278, 0xd70dd2ee, 0x4e048354, 0x3903b3c2,
|
||||
0xa7672661, 0xd06016f7, 0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66, 0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9,
|
||||
0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605, 0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8, 0x5d681b02, 0x2a6f2b94,
|
||||
0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, 0x2d02ef8d
|
||||
];
|
||||
|
||||
/**
|
||||
*
|
||||
* @param input either a byte stream, a string or a buffer, you want to have the checksum for
|
||||
* @returns a promise for a checksum (uint32)
|
||||
*/
|
||||
export function crc32(input: Readable | String | Buffer): Promise<number> {
|
||||
if (typeof input === "string") {
|
||||
let crc = ~0;
|
||||
for (let i = 0; i < input.length; i++) crc = (crc >>> 8) ^ crc32tab[(crc ^ input.charCodeAt(i)) & 0xff];
|
||||
return Promise.resolve((crc ^ -1) >>> 0);
|
||||
} else if (input instanceof Buffer) {
|
||||
let crc = ~0;
|
||||
for (let i = 0; i < input.length; i++) crc = (crc >>> 8) ^ crc32tab[(crc ^ input[i]) & 0xff];
|
||||
return Promise.resolve((crc ^ -1) >>> 0);
|
||||
} else if (input instanceof Readable) {
|
||||
return new Promise<number>((resolve, reject) => {
|
||||
let crc = ~0;
|
||||
input.on("end", () => resolve((crc ^ -1) >>> 0));
|
||||
input.on("error", e => reject(e));
|
||||
input.on("data", (chunk: Buffer) => {
|
||||
for (let i = 0; i < chunk.length; i++) crc = (crc >>> 8) ^ crc32tab[(crc ^ chunk[i]) & 0xff];
|
||||
});
|
||||
});
|
||||
} else {
|
||||
throw new Error("Unsupported input " + typeof input);
|
||||
}
|
||||
}
|
61
src/bin/tools/deflate.ts
Normal file
61
src/bin/tools/deflate.ts
Normal file
@ -0,0 +1,61 @@
|
||||
import { PassThrough, Readable, TransformCallback, Writable } from "stream";
|
||||
import { pipeline } from "stream/promises";
|
||||
import { deflateRaw as deflateRawCb, createDeflateRaw } from "zlib";
|
||||
import { promisify } from "util";
|
||||
|
||||
import { crc32 } from "./crc32";
|
||||
import tee from "./tee";
|
||||
|
||||
const deflateRaw = promisify(deflateRawCb);
|
||||
|
||||
/**
|
||||
* A stream transformer that records the number of bytes
|
||||
* passed in its `size` property.
|
||||
*/
|
||||
class ByteCounter extends PassThrough {
|
||||
size: number = 0;
|
||||
_transform(chunk: any, encoding: BufferEncoding, callback: TransformCallback) {
|
||||
if ("length" in chunk) this.size += chunk.length;
|
||||
super._transform(chunk, encoding, callback);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param data buffer containing the data to be compressed
|
||||
* @returns a buffer containing the compressed/deflated data and the crc32 checksum
|
||||
* of the source data
|
||||
*/
|
||||
export async function deflateBuffer(data: Buffer) {
|
||||
const [deflated, checksum] = await Promise.all([deflateRaw(data), crc32(data)]);
|
||||
return { deflated, crc32: checksum };
|
||||
}
|
||||
|
||||
/**
|
||||
* @param input a byte stream, containing data to be compressed
|
||||
* @param sink a method that will accept chunks of compressed data; We don't pass
|
||||
* a writable here, since we don't want the writablestream to be closed after
|
||||
* a single file
|
||||
* @returns a promise, which will resolve with the crc32 checksum and the
|
||||
* compressed size
|
||||
*/
|
||||
export async function deflateStream(input: Readable, sink: (chunk: Buffer) => void) {
|
||||
const deflateWriter = new Writable({
|
||||
write(chunk, _, callback) {
|
||||
sink(chunk);
|
||||
callback();
|
||||
}
|
||||
});
|
||||
|
||||
// tee the input stream, so we can compress and calc crc32 in parallel
|
||||
const [rs1, rs2] = tee(input);
|
||||
const byteCounter = new ByteCounter();
|
||||
const [_, crc] = await Promise.all([
|
||||
// pipe input into zip compressor, count the bytes
|
||||
// returned and pass compressed data to the sink
|
||||
pipeline(rs1, createDeflateRaw(), byteCounter, deflateWriter),
|
||||
// calc checksum
|
||||
crc32(rs2)
|
||||
]);
|
||||
|
||||
return { crc32: crc, compressedSize: byteCounter.size };
|
||||
}
|
@ -1,81 +1,277 @@
|
||||
import { basename as pathBasename, join as pathJoin } from "path";
|
||||
import { execSync } from "child_process";
|
||||
import * as fs from "fs";
|
||||
import { dirname as pathDirname, basename as pathBasename, join as pathJoin, join } from "path";
|
||||
import { createReadStream, createWriteStream } from "fs";
|
||||
import { stat, mkdir, unlink, writeFile } from "fs/promises";
|
||||
import { transformCodebase } from "./transformCodebase";
|
||||
import { rm, rm_rf } from "./rm";
|
||||
import * as crypto from "crypto";
|
||||
import { createHash } from "crypto";
|
||||
import fetch from "make-fetch-happen";
|
||||
import { createInflateRaw } from "zlib";
|
||||
import type { Readable } from "stream";
|
||||
import { homedir } from "os";
|
||||
import { FetchOptions } from "make-fetch-happen";
|
||||
import { exec as execCallback } from "child_process";
|
||||
import { promisify } from "util";
|
||||
|
||||
/** assert url ends with .zip */
|
||||
export function downloadAndUnzip(params: {
|
||||
const exec = promisify(execCallback);
|
||||
|
||||
function hash(s: string) {
|
||||
return createHash("sha256").update(s).digest("hex");
|
||||
}
|
||||
|
||||
async function maybeStat(path: string) {
|
||||
try {
|
||||
return await stat(path);
|
||||
} catch (error) {
|
||||
if ((error as Error & { code: string }).code === "ENOENT") return undefined;
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an npm configuration value as string, undefined if not set.
|
||||
*
|
||||
* @param key
|
||||
* @returns string or undefined
|
||||
*/
|
||||
async function getNmpConfig(key: string): Promise<string | undefined> {
|
||||
const { stdout } = await exec(`npm config get ${key}`);
|
||||
const value = stdout.trim();
|
||||
return value && value !== "null" ? value : undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get proxy configuration from npm config files. Note that we don't care about
|
||||
* proxy config in env vars, because make-fetch-happen will do that for us.
|
||||
*
|
||||
* @returns proxy configuration
|
||||
*/
|
||||
async function getNpmProxyConfig(): Promise<Pick<FetchOptions, "proxy" | "noProxy">> {
|
||||
const proxy = (await getNmpConfig("https-proxy")) ?? (await getNmpConfig("proxy"));
|
||||
const noProxy = (await getNmpConfig("noproxy")) ?? (await getNmpConfig("no-proxy"));
|
||||
|
||||
return { proxy, noProxy };
|
||||
}
|
||||
|
||||
/**
|
||||
* Download a file from `url` to `dir`. Will try to avoid downloading existing
|
||||
* files by using the cache directory ~/.keycloakify/cache
|
||||
*
|
||||
* If the target directory does not exist, it will be created.
|
||||
*
|
||||
* If the target file exists, it will be overwritten.
|
||||
*
|
||||
* We use make-fetch-happen's internal file cache here, so we don't need to
|
||||
* worry about redownloading the same file over and over. Unfortunately, that
|
||||
* cache does not have a single file per entry, but bundles and indexes them,
|
||||
* so we still need to write the contents to the target directory (possibly
|
||||
* over and over), cause the current unzip implementation wants random access.
|
||||
*
|
||||
* @param url download url
|
||||
* @param dir target directory
|
||||
* @param filename target filename
|
||||
* @returns promise for the full path of the downloaded file
|
||||
*/
|
||||
async function download(url: string, dir: string, filename: string): Promise<string> {
|
||||
const proxyOpts = await getNpmProxyConfig();
|
||||
const cacheRoot = process.env.XDG_CACHE_HOME ?? homedir();
|
||||
const cachePath = join(cacheRoot, ".keycloakify/cache");
|
||||
const opts: FetchOptions = { cachePath, ...proxyOpts };
|
||||
const response = await fetch(url, opts);
|
||||
const filepath = pathJoin(dir, filename);
|
||||
await mkdir(dir, { recursive: true });
|
||||
await writeFile(filepath, response.body);
|
||||
return filepath;
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef
|
||||
* @type MultiError = Error & { cause: Error[] }
|
||||
*/
|
||||
|
||||
/**
|
||||
* Extract the archive `zipFile` into the directory `dir`. If `archiveDir` is given,
|
||||
* only that directory will be extracted, stripping the given path components.
|
||||
*
|
||||
* If dir does not exist, it will be created.
|
||||
*
|
||||
* If any archive file exists, it will be overwritten.
|
||||
*
|
||||
* Will unzip using all available nodejs worker threads.
|
||||
*
|
||||
* Will try to clean up extracted files on failure.
|
||||
*
|
||||
* If unpacking fails, will either throw an regular error, or
|
||||
* possibly an `MultiError`, which contains a `cause` field with
|
||||
* a number of root cause errors.
|
||||
*
|
||||
* Warning this method is not optimized for continuous reading of the zip
|
||||
* archive, but is a trade-off between simplicity and allowing extraction
|
||||
* of a single directory from the archive.
|
||||
*
|
||||
* @param zipFile the file to unzip
|
||||
* @param dir the target directory
|
||||
* @param archiveDir if given, unpack only files from this archive directory
|
||||
* @throws {MultiError} error
|
||||
* @returns Promise for a list of full file paths pointing to actually extracted files
|
||||
*/
|
||||
async function unzip(zipFile: string, dir: string, archiveDir?: string): Promise<string[]> {
|
||||
await mkdir(dir, { recursive: true });
|
||||
const promises: Promise<string>[] = [];
|
||||
|
||||
// Iterate over all files in the zip, skip files which are not in archiveDir,
|
||||
// if given.
|
||||
for await (const record of iterateZipArchive(zipFile)) {
|
||||
const { path: recordPath, createReadStream: createRecordReadStream } = record;
|
||||
const filePath = pathJoin(dir, recordPath);
|
||||
const parent = pathDirname(filePath);
|
||||
if (archiveDir && !recordPath.startsWith(archiveDir)) continue;
|
||||
promises.push(
|
||||
new Promise<string>(async (resolve, reject) => {
|
||||
await mkdir(parent, { recursive: true });
|
||||
// Pull the file out of the archive, write it to the target directory
|
||||
const input = createRecordReadStream();
|
||||
const output = createWriteStream(filePath);
|
||||
output.setMaxListeners(Infinity);
|
||||
output.on("error", e => reject(Object.assign(e, { filePath })));
|
||||
output.on("finish", () => resolve(filePath));
|
||||
input.pipe(output);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
// Wait until _all_ files are either extracted or failed
|
||||
const results = await Promise.allSettled(promises);
|
||||
const success = results.filter(r => r.status === "fulfilled").map(r => (r as PromiseFulfilledResult<string>).value);
|
||||
const failure = results.filter(r => r.status === "rejected").map(r => (r as PromiseRejectedResult).reason);
|
||||
|
||||
// If any extraction failed, try to clean up, then throw a MultiError,
|
||||
// which has a `cause` field, containing a list of root cause errors.
|
||||
if (failure.length) {
|
||||
await Promise.all(success.map(path => unlink(path)));
|
||||
await Promise.all(failure.map(e => e && e.path && unlink(e.path as string)));
|
||||
const e = new Error("Failed to extract: " + failure.map(e => e.message).join(";"));
|
||||
(e as any).cause = failure;
|
||||
throw e;
|
||||
}
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param file file to read
|
||||
* @param start first byte to read
|
||||
* @param end last byte to read
|
||||
* @returns Promise of a buffer of read bytes
|
||||
*/
|
||||
async function readFileChunk(file: string, start: number, end: number): Promise<Buffer> {
|
||||
const chunks: Buffer[] = [];
|
||||
return new Promise((resolve, reject) => {
|
||||
const stream = createReadStream(file, { start, end });
|
||||
stream.setMaxListeners(Infinity);
|
||||
stream.on("error", e => reject(e));
|
||||
stream.on("end", () => resolve(Buffer.concat(chunks)));
|
||||
stream.on("data", chunk => chunks.push(chunk as Buffer));
|
||||
});
|
||||
}
|
||||
|
||||
type ZipRecord = {
|
||||
path: string;
|
||||
createReadStream: () => Readable;
|
||||
compressionMethod: "deflate" | undefined;
|
||||
};
|
||||
|
||||
type ZipRecordGenerator = AsyncGenerator<ZipRecord, void, unknown>;
|
||||
|
||||
/**
|
||||
* Iterate over all records of a zipfile, and yield a ZipRecord.
|
||||
* Use `record.createReadStream()` to actually read the file.
|
||||
*
|
||||
* Warning this method will only work with single-disk zip files.
|
||||
* Warning this method may fail if the zip archive has an crazy amount
|
||||
* of files and the central directory is not fully contained within the
|
||||
* last 65k bytes of the zip file.
|
||||
*
|
||||
* @param zipFile
|
||||
* @returns AsyncGenerator which will yield ZipRecords
|
||||
*/
|
||||
async function* iterateZipArchive(zipFile: string): ZipRecordGenerator {
|
||||
// Need to know zip file size before we can do anything else
|
||||
const { size } = await stat(zipFile);
|
||||
const chunkSize = 65_535 + 22 + 1; // max comment size + end header size + wiggle
|
||||
// Read last ~65k bytes. Zip files have an comment up to 65_535 bytes at the very end,
|
||||
// before that comes the zip central directory end header.
|
||||
let chunk = await readFileChunk(zipFile, size - chunkSize, size);
|
||||
const unread = size - chunk.length;
|
||||
let i = chunk.length - 4;
|
||||
let found = false;
|
||||
// Find central directory end header, reading backwards from the end
|
||||
while (!found && i-- > 0) if (chunk[i] === 0x50 && chunk.readUInt32LE(i) === 0x06054b50) found = true;
|
||||
if (!found) throw new Error("Not a zip file");
|
||||
// This method will fail on a multi-disk zip, so bail early.
|
||||
if (chunk.readUInt16LE(i + 4) !== 0) throw new Error("Multi-disk zip not supported");
|
||||
let nFiles = chunk.readUint16LE(i + 10);
|
||||
// Get the position of the central directory
|
||||
const directorySize = chunk.readUint32LE(i + 12);
|
||||
const directoryOffset = chunk.readUint32LE(i + 16);
|
||||
if (directoryOffset === 0xffff_ffff) throw new Error("zip64 not supported");
|
||||
if (directoryOffset > size) throw new Error(`Central directory offset ${directoryOffset} is outside file`);
|
||||
i = directoryOffset - unread;
|
||||
// If i < 0, it means that the central directory is not contained within `chunk`
|
||||
if (i < 0) {
|
||||
chunk = await readFileChunk(zipFile, directoryOffset, directoryOffset + directorySize);
|
||||
i = 0;
|
||||
}
|
||||
// Now iterate the central directory records, yield an `ZipRecord` for every entry
|
||||
while (nFiles-- > 0) {
|
||||
// Check for marker bytes
|
||||
if (chunk.readUInt32LE(i) !== 0x02014b50) throw new Error("No central directory record at position " + (unread + i));
|
||||
const compressionMethod = ({ 8: "deflate" } as const)[chunk.readUint16LE(i + 10)];
|
||||
const compressedFileSize = chunk.readUint32LE(i + 20);
|
||||
const filenameLength = chunk.readUint16LE(i + 28);
|
||||
const extraLength = chunk.readUint16LE(i + 30);
|
||||
const commentLength = chunk.readUint16LE(i + 32);
|
||||
// Start of the actual content byte stream is after the 'local' record header,
|
||||
// which is 30 bytes long plus filename and extra field
|
||||
const start = chunk.readUint32LE(i + 42) + 30 + filenameLength + extraLength;
|
||||
const end = start + compressedFileSize;
|
||||
const filename = chunk.slice(i + 46, i + 46 + filenameLength).toString("utf-8");
|
||||
const createRecordReadStream = () => {
|
||||
const input = createReadStream(zipFile, { start, end });
|
||||
if (compressionMethod === "deflate") {
|
||||
const inflate = createInflateRaw();
|
||||
input.pipe(inflate);
|
||||
return inflate;
|
||||
}
|
||||
return input;
|
||||
};
|
||||
if (end > start) yield { path: filename, createReadStream: createRecordReadStream, compressionMethod };
|
||||
// advance pointer to next central directory entry
|
||||
i += 46 + filenameLength + extraLength + commentLength;
|
||||
}
|
||||
}
|
||||
|
||||
export async function downloadAndUnzip({
|
||||
url,
|
||||
destDirPath,
|
||||
pathOfDirToExtractInArchive,
|
||||
cacheDirPath
|
||||
}: {
|
||||
isSilent: boolean;
|
||||
url: string;
|
||||
destDirPath: string;
|
||||
pathOfDirToExtractInArchive?: string;
|
||||
cacheDirPath: string;
|
||||
}) {
|
||||
const { url, destDirPath, pathOfDirToExtractInArchive, cacheDirPath } = params;
|
||||
const downloadHash = hash(JSON.stringify({ url, pathOfDirToExtractInArchive })).substring(0, 15);
|
||||
const extractDirPath = pathJoin(cacheDirPath, `_${downloadHash}`);
|
||||
|
||||
const extractDirPath = pathJoin(
|
||||
cacheDirPath,
|
||||
`_${crypto.createHash("sha256").update(JSON.stringify({ url, pathOfDirToExtractInArchive })).digest("hex").substring(0, 15)}`
|
||||
);
|
||||
const filename = pathBasename(url);
|
||||
const zipFilepath = await download(url, cacheDirPath, filename);
|
||||
const zipMtime = (await stat(zipFilepath)).mtimeMs;
|
||||
const unzipMtime = (await maybeStat(extractDirPath))?.mtimeMs;
|
||||
|
||||
fs.mkdirSync(cacheDirPath, { "recursive": true });
|
||||
if (!unzipMtime || zipMtime > unzipMtime) await unzip(zipFilepath, extractDirPath, pathOfDirToExtractInArchive);
|
||||
|
||||
const { readIsSuccessByExtractDirPath, writeIsSuccessByExtractDirPath } = (() => {
|
||||
const filePath = pathJoin(cacheDirPath, "isSuccessByExtractDirPath.json");
|
||||
|
||||
type IsSuccessByExtractDirPath = Record<string, boolean | undefined>;
|
||||
|
||||
function readIsSuccessByExtractDirPath(): IsSuccessByExtractDirPath {
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return {};
|
||||
}
|
||||
|
||||
return JSON.parse(fs.readFileSync(filePath).toString("utf8"));
|
||||
}
|
||||
|
||||
function writeIsSuccessByExtractDirPath(isSuccessByExtractDirPath: IsSuccessByExtractDirPath): void {
|
||||
fs.writeFileSync(filePath, Buffer.from(JSON.stringify(isSuccessByExtractDirPath, null, 2), "utf8"));
|
||||
}
|
||||
|
||||
return { readIsSuccessByExtractDirPath, writeIsSuccessByExtractDirPath };
|
||||
})();
|
||||
|
||||
downloadAndUnzip: {
|
||||
const isSuccessByExtractDirPath = readIsSuccessByExtractDirPath();
|
||||
|
||||
if (isSuccessByExtractDirPath[extractDirPath]) {
|
||||
break downloadAndUnzip;
|
||||
}
|
||||
|
||||
writeIsSuccessByExtractDirPath({
|
||||
...isSuccessByExtractDirPath,
|
||||
[extractDirPath]: false
|
||||
});
|
||||
|
||||
rm_rf(extractDirPath);
|
||||
|
||||
fs.mkdirSync(extractDirPath);
|
||||
|
||||
const zipFileBasename = pathBasename(url);
|
||||
|
||||
execSync(`curl -L ${url} -o ${zipFileBasename} ${params.isSilent ? "-s" : ""}`, { "cwd": extractDirPath });
|
||||
|
||||
execSync(`unzip -o ${zipFileBasename}${pathOfDirToExtractInArchive === undefined ? "" : ` "${pathOfDirToExtractInArchive}/**/*"`}`, {
|
||||
"cwd": extractDirPath
|
||||
});
|
||||
|
||||
rm(zipFileBasename, { "cwd": extractDirPath });
|
||||
|
||||
writeIsSuccessByExtractDirPath({
|
||||
...isSuccessByExtractDirPath,
|
||||
[extractDirPath]: true
|
||||
});
|
||||
}
|
||||
|
||||
transformCodebase({
|
||||
"srcDirPath": pathOfDirToExtractInArchive === undefined ? extractDirPath : pathJoin(extractDirPath, pathOfDirToExtractInArchive),
|
||||
destDirPath
|
||||
});
|
||||
const srcDirPath = pathOfDirToExtractInArchive === undefined ? extractDirPath : pathJoin(extractDirPath, pathOfDirToExtractInArchive);
|
||||
transformCodebase({ srcDirPath, destDirPath });
|
||||
}
|
||||
|
@ -1,10 +1,17 @@
|
||||
import { getProjectRoot } from "./getProjectRoot";
|
||||
import { join as pathJoin } from "path";
|
||||
import * as child_process from "child_process";
|
||||
import * as fs from "fs";
|
||||
import { constants } from "fs";
|
||||
import { chmod, stat } from "fs/promises";
|
||||
|
||||
Object.entries<string>(JSON.parse(fs.readFileSync(pathJoin(getProjectRoot(), "package.json")).toString("utf8"))["bin"]).forEach(([, scriptPath]) =>
|
||||
child_process.execSync(`chmod +x ${scriptPath}`, {
|
||||
"cwd": getProjectRoot()
|
||||
})
|
||||
);
|
||||
(async () => {
|
||||
const { bin } = await import(pathJoin(getProjectRoot(), "package.json"));
|
||||
|
||||
const promises = Object.values<string>(bin).map(async scriptPath => {
|
||||
const fullPath = pathJoin(getProjectRoot(), scriptPath);
|
||||
const oldMode = (await stat(fullPath)).mode;
|
||||
const newMode = oldMode | constants.S_IXUSR | constants.S_IXGRP | constants.S_IXOTH;
|
||||
await chmod(fullPath, newMode);
|
||||
});
|
||||
|
||||
await Promise.all(promises);
|
||||
})();
|
||||
|
102
src/bin/tools/jar.ts
Normal file
102
src/bin/tools/jar.ts
Normal file
@ -0,0 +1,102 @@
|
||||
import { Readable, Transform } from "stream";
|
||||
import { dirname, relative, sep } from "path";
|
||||
import { createWriteStream } from "fs";
|
||||
|
||||
import walk from "./walk";
|
||||
import type { ZipSource } from "./zip";
|
||||
import zip from "./zip";
|
||||
import { mkdir } from "fs/promises";
|
||||
|
||||
/** Trim leading whitespace from every line */
|
||||
const trimIndent = (s: string) => s.replace(/(\n)\s+/g, "$1");
|
||||
|
||||
type JarArgs = {
|
||||
rootPath: string;
|
||||
targetPath: string;
|
||||
groupId: string;
|
||||
artifactId: string;
|
||||
version: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a jar archive, using the resources found at `rootPath` (a directory) and write the
|
||||
* archive to `targetPath` (a file). Use `groupId`, `artifactId` and `version` to define
|
||||
* the contents of the pom.properties file which is going to be added to the archive.
|
||||
*/
|
||||
export default async function jar({ groupId, artifactId, version, rootPath, targetPath }: JarArgs) {
|
||||
const manifest: ZipSource = {
|
||||
path: "META-INF/MANIFEST.MF",
|
||||
data: Buffer.from(
|
||||
trimIndent(
|
||||
`Manifest-Version: 1.0
|
||||
Archiver-Version: Plexus Archiver
|
||||
Created-By: Keycloakify
|
||||
Built-By: unknown
|
||||
Build-Jdk: 19.0.0`
|
||||
)
|
||||
)
|
||||
};
|
||||
|
||||
const pomProps: ZipSource = {
|
||||
path: `META-INF/maven/${groupId}/${artifactId}/pom.properties`,
|
||||
data: Buffer.from(
|
||||
trimIndent(
|
||||
`# Generated by keycloakify
|
||||
# ${new Date()}
|
||||
artifactId=${artifactId}
|
||||
groupId=${groupId}
|
||||
version=${version}`
|
||||
)
|
||||
)
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert every path entry to a ZipSource record, and when all records are
|
||||
* processed, append records for MANIFEST.mf and pom.properties
|
||||
*/
|
||||
const pathToRecord = () =>
|
||||
new Transform({
|
||||
objectMode: true,
|
||||
transform: function (fsPath, _, cb) {
|
||||
const path = relative(rootPath, fsPath).split(sep).join("/");
|
||||
this.push({ path, fsPath });
|
||||
cb();
|
||||
},
|
||||
final: function () {
|
||||
this.push(manifest);
|
||||
this.push(pomProps);
|
||||
this.push(null);
|
||||
}
|
||||
});
|
||||
|
||||
await mkdir(dirname(targetPath), { recursive: true });
|
||||
|
||||
// Create an async pipeline, wait until everything is fully processed
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
// walk all files in `rootPath` recursively
|
||||
Readable.from(walk(rootPath))
|
||||
// transform every path into a ZipSource object
|
||||
.pipe(pathToRecord())
|
||||
// let the zip lib convert all ZipSource objects into a byte stream
|
||||
.pipe(zip())
|
||||
// write that byte stream to targetPath
|
||||
.pipe(createWriteStream(targetPath, { encoding: "binary" }))
|
||||
.on("finish", () => resolve())
|
||||
.on("error", e => reject(e));
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Standalone usage, call e.g. `ts-node jar.ts dirWithSources some-jar.jar`
|
||||
*/
|
||||
if (require.main === module) {
|
||||
const main = () =>
|
||||
jar({
|
||||
rootPath: process.argv[2],
|
||||
targetPath: process.argv[3],
|
||||
artifactId: process.env.ARTIFACT_ID ?? "artifact",
|
||||
groupId: process.env.GROUP_ID ?? "group",
|
||||
version: process.env.VERSION ?? "1.0.0"
|
||||
});
|
||||
main().catch(e => console.error(e));
|
||||
}
|
7
src/bin/tools/kebabCaseToSnakeCase.ts
Normal file
7
src/bin/tools/kebabCaseToSnakeCase.ts
Normal file
@ -0,0 +1,7 @@
|
||||
import { capitalize } from "tsafe/capitalize";
|
||||
|
||||
export function kebabCaseToCamelCase(kebabCaseString: string): string {
|
||||
const [first, ...rest] = kebabCaseString.split("-");
|
||||
|
||||
return [first, ...rest.map(capitalize)].join("");
|
||||
}
|
@ -1,31 +0,0 @@
|
||||
import { execSync } from "child_process";
|
||||
|
||||
function rmInternal(params: { pathToRemove: string; args: string | undefined; cwd: string | undefined }) {
|
||||
const { pathToRemove, args, cwd } = params;
|
||||
|
||||
execSync(`rm ${args ? `-${args} ` : ""}${pathToRemove.replace(/ /g, "\\ ")}`, cwd !== undefined ? { cwd } : undefined);
|
||||
}
|
||||
|
||||
export function rm(pathToRemove: string, options?: { cwd: string }) {
|
||||
rmInternal({
|
||||
pathToRemove,
|
||||
"args": undefined,
|
||||
"cwd": options?.cwd
|
||||
});
|
||||
}
|
||||
|
||||
export function rm_r(pathToRemove: string, options?: { cwd: string }) {
|
||||
rmInternal({
|
||||
pathToRemove,
|
||||
"args": "r",
|
||||
"cwd": options?.cwd
|
||||
});
|
||||
}
|
||||
|
||||
export function rm_rf(pathToRemove: string, options?: { cwd: string }) {
|
||||
rmInternal({
|
||||
pathToRemove,
|
||||
"args": "rf",
|
||||
"cwd": options?.cwd
|
||||
});
|
||||
}
|
37
src/bin/tools/tee.ts
Normal file
37
src/bin/tools/tee.ts
Normal file
@ -0,0 +1,37 @@
|
||||
import { PassThrough, Readable } from "stream";
|
||||
|
||||
export default function tee(input: Readable) {
|
||||
const a = new PassThrough();
|
||||
const b = new PassThrough();
|
||||
|
||||
let aFull = false;
|
||||
let bFull = false;
|
||||
|
||||
a.on("drain", () => {
|
||||
aFull = false;
|
||||
if (!aFull && !bFull) input.resume();
|
||||
});
|
||||
b.on("drain", () => {
|
||||
bFull = false;
|
||||
if (!aFull && !bFull) input.resume();
|
||||
});
|
||||
|
||||
input.on("error", e => {
|
||||
a.emit("error", e);
|
||||
b.emit("error", e);
|
||||
});
|
||||
|
||||
input.on("data", chunk => {
|
||||
aFull = !a.write(chunk);
|
||||
bFull = !b.write(chunk);
|
||||
|
||||
if (aFull || bFull) input.pause();
|
||||
});
|
||||
|
||||
input.on("end", () => {
|
||||
a.end();
|
||||
b.end();
|
||||
});
|
||||
|
||||
return [a, b] as const;
|
||||
}
|
19
src/bin/tools/walk.ts
Normal file
19
src/bin/tools/walk.ts
Normal file
@ -0,0 +1,19 @@
|
||||
import { readdir } from "fs/promises";
|
||||
import { resolve } from "path";
|
||||
|
||||
/**
|
||||
* Asynchronously and recursively walk a directory tree, yielding every file and directory
|
||||
* found
|
||||
*
|
||||
* @param root the starting directory
|
||||
* @returns AsyncGenerator
|
||||
*/
|
||||
export default async function* walk(root: string): AsyncGenerator<string, void, void> {
|
||||
for (const entry of await readdir(root, { withFileTypes: true })) {
|
||||
const absolutePath = resolve(root, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
yield absolutePath;
|
||||
yield* walk(absolutePath);
|
||||
} else yield absolutePath;
|
||||
}
|
||||
}
|
246
src/bin/tools/zip.ts
Normal file
246
src/bin/tools/zip.ts
Normal file
@ -0,0 +1,246 @@
|
||||
import { Transform, TransformOptions } from "stream";
|
||||
import { createReadStream } from "fs";
|
||||
import { stat } from "fs/promises";
|
||||
import { Blob } from "buffer";
|
||||
|
||||
import { deflateBuffer, deflateStream } from "./deflate";
|
||||
|
||||
/**
|
||||
* Zip source
|
||||
* @property filename the name of the entry in the archie
|
||||
* @property path of the source file, if the source is an actual file
|
||||
* @property data the actual data buffer, if the source is constructed in-memory
|
||||
*/
|
||||
export type ZipSource = { path: string } & ({ fsPath: string } | { data: Buffer });
|
||||
|
||||
export type ZipRecord = {
|
||||
path: string;
|
||||
compression: "deflate" | undefined;
|
||||
uncompressedSize: number;
|
||||
compressedSize?: number;
|
||||
crc32?: number;
|
||||
offset?: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* @returns the actual byte size of an string
|
||||
*/
|
||||
function utf8size(s: string) {
|
||||
return new Blob([s]).size;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param record
|
||||
* @returns a buffer representing a Zip local header
|
||||
* @link https://en.wikipedia.org/wiki/ZIP_(file_format)#Local_file_header
|
||||
*/
|
||||
function localHeader(record: ZipRecord) {
|
||||
const { path, compression, uncompressedSize } = record;
|
||||
const filenameSize = utf8size(path);
|
||||
const buf = Buffer.alloc(30 + filenameSize);
|
||||
|
||||
buf.writeUInt32LE(0x04_03_4b_50, 0); // local header signature
|
||||
buf.writeUInt16LE(10, 4); // min version
|
||||
// we write 0x08 because crc and compressed size are unknown at
|
||||
buf.writeUInt16LE(0x08, 6); // general purpose bit flag
|
||||
buf.writeUInt16LE(compression ? ({ "deflate": 8 } as const)[compression] : 0, 8);
|
||||
buf.writeUInt16LE(0, 10); // modified time
|
||||
buf.writeUInt16LE(0, 12); // modified date
|
||||
buf.writeUInt32LE(0, 14); // crc unknown
|
||||
buf.writeUInt32LE(0, 18); // compressed size unknown
|
||||
buf.writeUInt32LE(uncompressedSize, 22);
|
||||
buf.writeUInt16LE(filenameSize, 26);
|
||||
buf.writeUInt16LE(0, 28); // extra field length
|
||||
buf.write(path, 30, "utf-8");
|
||||
|
||||
return buf;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param record
|
||||
* @returns a buffer representing a Zip central header
|
||||
* @link https://en.wikipedia.org/wiki/ZIP_(file_format)#Central_directory_file_header
|
||||
*/
|
||||
function centralHeader(record: ZipRecord) {
|
||||
const { path, compression, crc32, compressedSize, uncompressedSize, offset } = record;
|
||||
const filenameSize = utf8size(path);
|
||||
const buf = Buffer.alloc(46 + filenameSize);
|
||||
const isFile = !path.endsWith("/");
|
||||
|
||||
if (typeof offset === "undefined") throw new Error("Illegal argument");
|
||||
|
||||
// we don't want to deal with possibly messed up file or directory
|
||||
// permissions, so we ignore the original permissions
|
||||
const externalAttr = isFile ? 0x81a40000 : 0x41ed0000;
|
||||
|
||||
buf.writeUInt32LE(0x0201_4b50, 0); // central header signature
|
||||
buf.writeUInt16LE(10, 4); // version
|
||||
buf.writeUInt16LE(10, 6); // min version
|
||||
buf.writeUInt16LE(0, 8); // general purpose bit flag
|
||||
buf.writeUInt16LE(compression ? ({ "deflate": 8 } as const)[compression] : 0, 10);
|
||||
buf.writeUInt16LE(0, 12); // modified time
|
||||
buf.writeUInt16LE(0, 14); // modified date
|
||||
buf.writeUInt32LE(crc32 || 0, 16);
|
||||
buf.writeUInt32LE(compressedSize || 0, 20);
|
||||
buf.writeUInt32LE(uncompressedSize, 24);
|
||||
buf.writeUInt16LE(filenameSize, 28);
|
||||
buf.writeUInt16LE(0, 30); // extra field length
|
||||
buf.writeUInt16LE(0, 32); // comment field length
|
||||
buf.writeUInt16LE(0, 34); // disk number
|
||||
buf.writeUInt16LE(0, 36); // internal
|
||||
buf.writeUInt32LE(externalAttr, 38); // external
|
||||
buf.writeUInt32LE(offset, 42); // offset where file starts
|
||||
buf.write(path, 46, "utf-8");
|
||||
|
||||
return buf;
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns a buffer representing an Zip End-Of-Central-Directory block
|
||||
* @link https://en.wikipedia.org/wiki/ZIP_(file_format)#End_of_central_directory_record_(EOCD)
|
||||
*/
|
||||
function eocd({ offset, cdSize, nRecords }: { offset: number; cdSize: number; nRecords: number }) {
|
||||
const buf = Buffer.alloc(22);
|
||||
buf.writeUint32LE(0x06054b50, 0); // eocd signature
|
||||
buf.writeUInt16LE(0, 4); // disc number
|
||||
buf.writeUint16LE(0, 6); // disc where central directory starts
|
||||
buf.writeUint16LE(nRecords, 8); // records on this disc
|
||||
buf.writeUInt16LE(nRecords, 10); // records total
|
||||
buf.writeUInt32LE(cdSize, 12); // byte size of cd
|
||||
buf.writeUInt32LE(offset, 16); // cd offset
|
||||
buf.writeUint16LE(0, 20); // comment length
|
||||
|
||||
return buf;
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns a stream Transform, which reads a stream of ZipRecords and
|
||||
* writes a bytestream
|
||||
*/
|
||||
export default function zip() {
|
||||
/**
|
||||
* This is called when the input stream of ZipSource items is finished.
|
||||
* Will write central directory and end-of-central-direcotry blocks.
|
||||
*/
|
||||
const final = () => {
|
||||
// write central directory
|
||||
let cdSize = 0;
|
||||
for (const record of records) {
|
||||
const head = centralHeader(record);
|
||||
zipTransform.push(head);
|
||||
cdSize += head.length;
|
||||
}
|
||||
|
||||
// write end-of-central-directory
|
||||
zipTransform.push(eocd({ offset, cdSize, nRecords: records.length }));
|
||||
// signal stream end
|
||||
zipTransform.push(null);
|
||||
};
|
||||
|
||||
/**
|
||||
* Write a directory entry to the archive
|
||||
* @param path
|
||||
*/
|
||||
const writeDir = async (path: string) => {
|
||||
const record: ZipRecord = {
|
||||
path: path + "/",
|
||||
offset,
|
||||
compression: undefined,
|
||||
uncompressedSize: 0
|
||||
};
|
||||
const head = localHeader(record);
|
||||
zipTransform.push(head);
|
||||
records.push(record);
|
||||
offset += head.length;
|
||||
};
|
||||
|
||||
/**
|
||||
* Write a file entry to the archive
|
||||
* @param archivePath path of the file in archive
|
||||
* @param fsPath path to file on filesystem
|
||||
* @param size of the actual, uncompressed, file
|
||||
*/
|
||||
const writeFile = async (archivePath: string, fsPath: string, size: number) => {
|
||||
const record: ZipRecord = {
|
||||
path: archivePath,
|
||||
offset,
|
||||
compression: "deflate",
|
||||
uncompressedSize: size
|
||||
};
|
||||
const head = localHeader(record);
|
||||
zipTransform.push(head);
|
||||
|
||||
const { crc32, compressedSize } = await deflateStream(createReadStream(fsPath), chunk => zipTransform.push(chunk));
|
||||
|
||||
record.crc32 = crc32;
|
||||
record.compressedSize = compressedSize;
|
||||
records.push(record);
|
||||
offset += head.length + compressedSize;
|
||||
};
|
||||
|
||||
/**
|
||||
* Write archive record based on filesystem file or directory
|
||||
* @param archivePath path of item in archive
|
||||
* @param fsPath path to item on filesystem
|
||||
*/
|
||||
const writeFromPath = async (archivePath: string, fsPath: string) => {
|
||||
const fileStats = await stat(fsPath);
|
||||
fileStats.isDirectory() ? await writeDir(archivePath) /**/ : await writeFile(archivePath, fsPath, fileStats.size) /**/;
|
||||
};
|
||||
|
||||
/**
|
||||
* Write archive record based on data in a buffer
|
||||
* @param path
|
||||
* @param data
|
||||
*/
|
||||
const writeFromBuffer = async (path: string, data: Buffer) => {
|
||||
const { deflated, crc32 } = await deflateBuffer(data);
|
||||
const record: ZipRecord = {
|
||||
path,
|
||||
compression: "deflate",
|
||||
crc32,
|
||||
uncompressedSize: data.length,
|
||||
compressedSize: deflated.length,
|
||||
offset
|
||||
};
|
||||
const head = localHeader(record);
|
||||
zipTransform.push(head);
|
||||
zipTransform.push(deflated);
|
||||
records.push(record);
|
||||
offset += head.length + deflated.length;
|
||||
};
|
||||
|
||||
/**
|
||||
* Write an archive record
|
||||
* @param source
|
||||
*/
|
||||
const writeRecord = async (source: ZipSource) => {
|
||||
if ("fsPath" in source) await writeFromPath(source.path, source.fsPath);
|
||||
else if ("data" in source) await writeFromBuffer(source.path, source.data);
|
||||
else throw new Error("Illegal argument " + typeof source + " " + JSON.stringify(source));
|
||||
};
|
||||
|
||||
/**
|
||||
* The actual stream transform function
|
||||
* @param source
|
||||
* @param _ encoding, ignored
|
||||
* @param cb
|
||||
*/
|
||||
const transform: TransformOptions["transform"] = async (source: ZipSource, _, cb) => {
|
||||
await writeRecord(source);
|
||||
cb();
|
||||
};
|
||||
|
||||
/** offset and records keep local state during processing */
|
||||
let offset = 0;
|
||||
const records: ZipRecord[] = [];
|
||||
|
||||
const zipTransform = new Transform({
|
||||
readableObjectMode: false,
|
||||
writableObjectMode: true,
|
||||
transform,
|
||||
final
|
||||
});
|
||||
|
||||
return zipTransform;
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user