Resource fetching optimization
This commit is contained in:
parent
7e84d0b108
commit
dd33f554da
@ -7,7 +7,7 @@
|
|||||||
"url": "git://github.com/keycloakify/keycloakify.git"
|
"url": "git://github.com/keycloakify/keycloakify.git"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"prepare": "ts-node --skipProject scripts/generate-i18n-messages.ts && patch-package",
|
"prepare": "patch-package && ts-node --skipProject scripts/generate-i18n-messages.ts",
|
||||||
"build": "ts-node --skipProject scripts/build.ts",
|
"build": "ts-node --skipProject scripts/build.ts",
|
||||||
"storybook": "yarn build && yarn copy-keycloak-resources-to-storybook-static && start-storybook -p 6006",
|
"storybook": "yarn build && yarn copy-keycloak-resources-to-storybook-static && start-storybook -p 6006",
|
||||||
"link-in-starter": "ts-node --skipProject scripts/link-in-starter.ts",
|
"link-in-starter": "ts-node --skipProject scripts/link-in-starter.ts",
|
||||||
|
@ -22,20 +22,8 @@ async function main() {
|
|||||||
|
|
||||||
const thisCodebaseRootDirPath = getThisCodebaseRootDirPath();
|
const thisCodebaseRootDirPath = getThisCodebaseRootDirPath();
|
||||||
|
|
||||||
const tmpDirPath = pathJoin(thisCodebaseRootDirPath, "tmp_xImOef9dOd44");
|
const { defaultThemeDirPath } = await downloadKeycloakDefaultTheme({
|
||||||
|
|
||||||
rmSync(tmpDirPath, { recursive: true, force: true });
|
|
||||||
|
|
||||||
fs.mkdirSync(tmpDirPath);
|
|
||||||
|
|
||||||
fs.writeFileSync(
|
|
||||||
pathJoin(tmpDirPath, ".gitignore"),
|
|
||||||
Buffer.from("/*\n!.gitignore\n", "utf8")
|
|
||||||
);
|
|
||||||
|
|
||||||
await downloadKeycloakDefaultTheme({
|
|
||||||
keycloakVersion,
|
keycloakVersion,
|
||||||
destDirPath: tmpDirPath,
|
|
||||||
buildOptions: {
|
buildOptions: {
|
||||||
cacheDirPath: pathJoin(
|
cacheDirPath: pathJoin(
|
||||||
thisCodebaseRootDirPath,
|
thisCodebaseRootDirPath,
|
||||||
@ -52,7 +40,7 @@ async function main() {
|
|||||||
const record: { [typeOfPage: string]: { [language: string]: Dictionary } } = {};
|
const record: { [typeOfPage: string]: { [language: string]: Dictionary } } = {};
|
||||||
|
|
||||||
{
|
{
|
||||||
const baseThemeDirPath = pathJoin(tmpDirPath, "base");
|
const baseThemeDirPath = pathJoin(defaultThemeDirPath, "base");
|
||||||
const re = new RegExp(
|
const re = new RegExp(
|
||||||
`^([^\\${pathSep}]+)\\${pathSep}messages\\${pathSep}messages_([^.]+).properties$`
|
`^([^\\${pathSep}]+)\\${pathSep}messages\\${pathSep}messages_([^.]+).properties$`
|
||||||
);
|
);
|
||||||
@ -84,8 +72,6 @@ async function main() {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
rmSync(tmpDirPath, { recursive: true });
|
|
||||||
|
|
||||||
Object.keys(record).forEach(themeType => {
|
Object.keys(record).forEach(themeType => {
|
||||||
const recordForPageType = record[themeType];
|
const recordForPageType = record[themeType];
|
||||||
|
|
||||||
|
@ -2,6 +2,7 @@ import { join as pathJoin, relative as pathRelative, sep as pathSep } from "path
|
|||||||
import { promptKeycloakVersion } from "./shared/promptKeycloakVersion";
|
import { promptKeycloakVersion } from "./shared/promptKeycloakVersion";
|
||||||
import { readBuildOptions } from "./shared/buildOptions";
|
import { readBuildOptions } from "./shared/buildOptions";
|
||||||
import { downloadKeycloakDefaultTheme } from "./shared/downloadKeycloakDefaultTheme";
|
import { downloadKeycloakDefaultTheme } from "./shared/downloadKeycloakDefaultTheme";
|
||||||
|
import { transformCodebase } from "./tools/transformCodebase";
|
||||||
import type { CliCommandOptions } from "./main";
|
import type { CliCommandOptions } from "./main";
|
||||||
import chalk from "chalk";
|
import chalk from "chalk";
|
||||||
|
|
||||||
@ -48,11 +49,15 @@ export async function command(params: { cliCommandOptions: CliCommandOptions })
|
|||||||
].join("\n")
|
].join("\n")
|
||||||
);
|
);
|
||||||
|
|
||||||
await downloadKeycloakDefaultTheme({
|
const { defaultThemeDirPath } = await downloadKeycloakDefaultTheme({
|
||||||
keycloakVersion,
|
keycloakVersion,
|
||||||
destDirPath,
|
|
||||||
buildOptions
|
buildOptions
|
||||||
});
|
});
|
||||||
|
|
||||||
|
transformCodebase({
|
||||||
|
srcDirPath: defaultThemeDirPath,
|
||||||
|
destDirPath
|
||||||
|
});
|
||||||
|
|
||||||
console.log(chalk.green(`✓ done`));
|
console.log(chalk.green(`✓ done`));
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,6 @@ import { promptKeycloakVersion } from "./shared/promptKeycloakVersion";
|
|||||||
import { readBuildOptions } from "./shared/buildOptions";
|
import { readBuildOptions } from "./shared/buildOptions";
|
||||||
import * as fs from "fs";
|
import * as fs from "fs";
|
||||||
import { getThemeSrcDirPath } from "./shared/getThemeSrcDirPath";
|
import { getThemeSrcDirPath } from "./shared/getThemeSrcDirPath";
|
||||||
import { rmSync } from "./tools/fs.rmSync";
|
|
||||||
import type { CliCommandOptions } from "./main";
|
import type { CliCommandOptions } from "./main";
|
||||||
|
|
||||||
export async function command(params: { cliCommandOptions: CliCommandOptions }) {
|
export async function command(params: { cliCommandOptions: CliCommandOptions }) {
|
||||||
@ -38,24 +37,13 @@ export async function command(params: { cliCommandOptions: CliCommandOptions })
|
|||||||
cacheDirPath: buildOptions.cacheDirPath
|
cacheDirPath: buildOptions.cacheDirPath
|
||||||
});
|
});
|
||||||
|
|
||||||
const builtinKeycloakThemeTmpDirPath = pathJoin(
|
const { defaultThemeDirPath } = await downloadKeycloakDefaultTheme({
|
||||||
buildOptions.cacheDirPath,
|
|
||||||
"initialize-email-theme_tmp"
|
|
||||||
);
|
|
||||||
|
|
||||||
rmSync(builtinKeycloakThemeTmpDirPath, {
|
|
||||||
recursive: true,
|
|
||||||
force: true
|
|
||||||
});
|
|
||||||
|
|
||||||
await downloadKeycloakDefaultTheme({
|
|
||||||
keycloakVersion,
|
keycloakVersion,
|
||||||
destDirPath: builtinKeycloakThemeTmpDirPath,
|
|
||||||
buildOptions
|
buildOptions
|
||||||
});
|
});
|
||||||
|
|
||||||
transformCodebase({
|
transformCodebase({
|
||||||
srcDirPath: pathJoin(builtinKeycloakThemeTmpDirPath, "base", "email"),
|
srcDirPath: pathJoin(defaultThemeDirPath, "base", "email"),
|
||||||
destDirPath: emailThemeSrcDirPath
|
destDirPath: emailThemeSrcDirPath
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -78,6 +66,4 @@ export async function command(params: { cliCommandOptions: CliCommandOptions })
|
|||||||
)}\` directory have been created.`
|
)}\` directory have been created.`
|
||||||
);
|
);
|
||||||
console.log("You can delete any file you don't modify.");
|
console.log("You can delete any file you don't modify.");
|
||||||
|
|
||||||
rmSync(builtinKeycloakThemeTmpDirPath, { recursive: true });
|
|
||||||
}
|
}
|
||||||
|
@ -9,7 +9,6 @@ import {
|
|||||||
} from "../../shared/constants";
|
} from "../../shared/constants";
|
||||||
import { downloadKeycloakDefaultTheme } from "../../shared/downloadKeycloakDefaultTheme";
|
import { downloadKeycloakDefaultTheme } from "../../shared/downloadKeycloakDefaultTheme";
|
||||||
import { transformCodebase } from "../../tools/transformCodebase";
|
import { transformCodebase } from "../../tools/transformCodebase";
|
||||||
import { rmSync } from "../../tools/fs.rmSync";
|
|
||||||
|
|
||||||
type BuildOptionsLike = {
|
type BuildOptionsLike = {
|
||||||
cacheDirPath: string;
|
cacheDirPath: string;
|
||||||
@ -22,13 +21,7 @@ assert<BuildOptions extends BuildOptionsLike ? true : false>();
|
|||||||
export async function bringInAccountV1(params: { buildOptions: BuildOptionsLike }) {
|
export async function bringInAccountV1(params: { buildOptions: BuildOptionsLike }) {
|
||||||
const { buildOptions } = params;
|
const { buildOptions } = params;
|
||||||
|
|
||||||
const builtinKeycloakThemeTmpDirPath = pathJoin(
|
const { defaultThemeDirPath } = await downloadKeycloakDefaultTheme({
|
||||||
buildOptions.cacheDirPath,
|
|
||||||
"bringInAccountV1_tmp"
|
|
||||||
);
|
|
||||||
|
|
||||||
await downloadKeycloakDefaultTheme({
|
|
||||||
destDirPath: builtinKeycloakThemeTmpDirPath,
|
|
||||||
keycloakVersion: lastKeycloakVersionWithAccountV1,
|
keycloakVersion: lastKeycloakVersionWithAccountV1,
|
||||||
buildOptions
|
buildOptions
|
||||||
});
|
});
|
||||||
@ -44,32 +37,20 @@ export async function bringInAccountV1(params: { buildOptions: BuildOptionsLike
|
|||||||
);
|
);
|
||||||
|
|
||||||
transformCodebase({
|
transformCodebase({
|
||||||
srcDirPath: pathJoin(builtinKeycloakThemeTmpDirPath, "base", "account"),
|
srcDirPath: pathJoin(defaultThemeDirPath, "base", "account"),
|
||||||
destDirPath: accountV1DirPath
|
destDirPath: accountV1DirPath
|
||||||
});
|
});
|
||||||
|
|
||||||
transformCodebase({
|
transformCodebase({
|
||||||
srcDirPath: pathJoin(
|
srcDirPath: pathJoin(defaultThemeDirPath, "keycloak", "account", "resources"),
|
||||||
builtinKeycloakThemeTmpDirPath,
|
|
||||||
"keycloak",
|
|
||||||
"account",
|
|
||||||
"resources"
|
|
||||||
),
|
|
||||||
destDirPath: pathJoin(accountV1DirPath, "resources")
|
destDirPath: pathJoin(accountV1DirPath, "resources")
|
||||||
});
|
});
|
||||||
|
|
||||||
transformCodebase({
|
transformCodebase({
|
||||||
srcDirPath: pathJoin(
|
srcDirPath: pathJoin(defaultThemeDirPath, "keycloak", "common", "resources"),
|
||||||
builtinKeycloakThemeTmpDirPath,
|
|
||||||
"keycloak",
|
|
||||||
"common",
|
|
||||||
"resources"
|
|
||||||
),
|
|
||||||
destDirPath: pathJoin(accountV1DirPath, "resources", resources_common)
|
destDirPath: pathJoin(accountV1DirPath, "resources", resources_common)
|
||||||
});
|
});
|
||||||
|
|
||||||
rmSync(builtinKeycloakThemeTmpDirPath, { recursive: true });
|
|
||||||
|
|
||||||
fs.writeFileSync(
|
fs.writeFileSync(
|
||||||
pathJoin(accountV1DirPath, "theme.properties"),
|
pathJoin(accountV1DirPath, "theme.properties"),
|
||||||
Buffer.from(
|
Buffer.from(
|
||||||
|
@ -1,224 +0,0 @@
|
|||||||
import { createHash } from "crypto";
|
|
||||||
import { mkdir, writeFile, unlink } from "fs/promises";
|
|
||||||
import fetch from "make-fetch-happen";
|
|
||||||
import { dirname as pathDirname, join as pathJoin, basename as pathBasename } from "path";
|
|
||||||
import { assert } from "tsafe/assert";
|
|
||||||
import { transformCodebase } from "../tools/transformCodebase";
|
|
||||||
import { unzip, zip } from "../tools/unzip";
|
|
||||||
import { rm } from "../tools/fs.rm";
|
|
||||||
import * as child_process from "child_process";
|
|
||||||
import { existsAsync } from "../tools/fs.existsAsync";
|
|
||||||
import type { BuildOptions } from "./buildOptions";
|
|
||||||
import { getProxyFetchOptions } from "../tools/fetchProxyOptions";
|
|
||||||
|
|
||||||
export type BuildOptionsLike = {
|
|
||||||
cacheDirPath: string;
|
|
||||||
npmWorkspaceRootDirPath: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
assert<BuildOptions extends BuildOptionsLike ? true : false>();
|
|
||||||
|
|
||||||
export async function downloadAndUnzip(params: {
|
|
||||||
url: string;
|
|
||||||
destDirPath: string;
|
|
||||||
specificDirsToExtract?: string[];
|
|
||||||
preCacheTransform?: {
|
|
||||||
actionCacheId: string;
|
|
||||||
action: (params: { destDirPath: string }) => Promise<void>;
|
|
||||||
};
|
|
||||||
buildOptions: BuildOptionsLike;
|
|
||||||
}) {
|
|
||||||
const { url, destDirPath, specificDirsToExtract, preCacheTransform, buildOptions } =
|
|
||||||
params;
|
|
||||||
|
|
||||||
const { extractDirPath, zipFilePath } = (() => {
|
|
||||||
const zipFileBasenameWithoutExt = generateFileNameFromURL({
|
|
||||||
url,
|
|
||||||
preCacheTransform:
|
|
||||||
preCacheTransform === undefined
|
|
||||||
? undefined
|
|
||||||
: {
|
|
||||||
actionCacheId: preCacheTransform.actionCacheId,
|
|
||||||
actionFootprint: preCacheTransform.action.toString()
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
const zipFilePath = pathJoin(
|
|
||||||
buildOptions.cacheDirPath,
|
|
||||||
`${zipFileBasenameWithoutExt}.zip`
|
|
||||||
);
|
|
||||||
const extractDirPath = pathJoin(
|
|
||||||
buildOptions.cacheDirPath,
|
|
||||||
`tmp_unzip_${zipFileBasenameWithoutExt}`
|
|
||||||
);
|
|
||||||
|
|
||||||
return { zipFilePath, extractDirPath };
|
|
||||||
})();
|
|
||||||
|
|
||||||
download_zip_and_transform: {
|
|
||||||
if (await existsAsync(zipFilePath)) {
|
|
||||||
break download_zip_and_transform;
|
|
||||||
}
|
|
||||||
|
|
||||||
const { response, isFromRemoteCache } = await (async () => {
|
|
||||||
const proxyFetchOptions = await getProxyFetchOptions({
|
|
||||||
npmWorkspaceRootDirPath: buildOptions.npmWorkspaceRootDirPath
|
|
||||||
});
|
|
||||||
|
|
||||||
const response = await fetch(
|
|
||||||
`https://github.com/keycloakify/keycloakify/releases/download/v0.0.1/${pathBasename(
|
|
||||||
zipFilePath
|
|
||||||
)}`,
|
|
||||||
proxyFetchOptions
|
|
||||||
);
|
|
||||||
|
|
||||||
if (response.status === 200) {
|
|
||||||
return {
|
|
||||||
response,
|
|
||||||
isFromRemoteCache: true
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
response: await fetch(url, proxyFetchOptions),
|
|
||||||
isFromRemoteCache: false
|
|
||||||
};
|
|
||||||
})();
|
|
||||||
|
|
||||||
await mkdir(pathDirname(zipFilePath), { recursive: true });
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The correct way to fix this is to upgrade node-fetch beyond 3.2.5
|
|
||||||
* (see https://github.com/node-fetch/node-fetch/issues/1295#issuecomment-1144061991.)
|
|
||||||
* Unfortunately, octokit (a dependency of keycloakify) also uses node-fetch, and
|
|
||||||
* does not support node-fetch 3.x. So we stick around with this band-aid until
|
|
||||||
* octokit upgrades.
|
|
||||||
*/
|
|
||||||
response.body?.setMaxListeners(Number.MAX_VALUE);
|
|
||||||
assert(typeof response.body !== "undefined" && response.body != null);
|
|
||||||
|
|
||||||
await writeFile(zipFilePath, response.body);
|
|
||||||
|
|
||||||
if (isFromRemoteCache) {
|
|
||||||
break download_zip_and_transform;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (specificDirsToExtract === undefined && preCacheTransform === undefined) {
|
|
||||||
break download_zip_and_transform;
|
|
||||||
}
|
|
||||||
|
|
||||||
await unzip(zipFilePath, extractDirPath, specificDirsToExtract);
|
|
||||||
|
|
||||||
try {
|
|
||||||
await preCacheTransform?.action({
|
|
||||||
destDirPath: extractDirPath
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
await Promise.all([
|
|
||||||
rm(extractDirPath, { recursive: true }),
|
|
||||||
unlink(zipFilePath)
|
|
||||||
]);
|
|
||||||
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
|
|
||||||
await unlink(zipFilePath);
|
|
||||||
|
|
||||||
await zip(extractDirPath, zipFilePath);
|
|
||||||
|
|
||||||
await rm(extractDirPath, { recursive: true });
|
|
||||||
|
|
||||||
upload_to_remote_cache_if_admin: {
|
|
||||||
const githubToken =
|
|
||||||
process.env["KEYCLOAKIFY_ADMIN_GITHUB_PERSONAL_ACCESS_TOKEN"];
|
|
||||||
|
|
||||||
if (!githubToken) {
|
|
||||||
break upload_to_remote_cache_if_admin;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log("uploading to remote cache");
|
|
||||||
|
|
||||||
try {
|
|
||||||
child_process.execSync(`which putasset`);
|
|
||||||
} catch {
|
|
||||||
child_process.execSync(`npm install -g putasset`);
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
child_process.execFileSync("putasset", [
|
|
||||||
"--owner",
|
|
||||||
"keycloakify",
|
|
||||||
"--repo",
|
|
||||||
"keycloakify",
|
|
||||||
"--tag",
|
|
||||||
"v0.0.1",
|
|
||||||
"--filename",
|
|
||||||
zipFilePath,
|
|
||||||
"--token",
|
|
||||||
githubToken
|
|
||||||
]);
|
|
||||||
} catch {
|
|
||||||
console.log(
|
|
||||||
"upload failed, asset probably already exists in remote cache"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await unzip(zipFilePath, extractDirPath);
|
|
||||||
|
|
||||||
transformCodebase({
|
|
||||||
srcDirPath: extractDirPath,
|
|
||||||
destDirPath: destDirPath
|
|
||||||
});
|
|
||||||
|
|
||||||
await rm(extractDirPath, { recursive: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
function generateFileNameFromURL(params: {
|
|
||||||
url: string;
|
|
||||||
preCacheTransform:
|
|
||||||
| {
|
|
||||||
actionCacheId: string;
|
|
||||||
actionFootprint: string;
|
|
||||||
}
|
|
||||||
| undefined;
|
|
||||||
}): string {
|
|
||||||
const { preCacheTransform } = params;
|
|
||||||
|
|
||||||
// Parse the URL
|
|
||||||
const url = new URL(params.url);
|
|
||||||
|
|
||||||
// Extract pathname and remove leading slashes
|
|
||||||
let fileName = url.pathname.replace(/^\//, "").replace(/\//g, "_");
|
|
||||||
|
|
||||||
// Optionally, add query parameters replacing special characters
|
|
||||||
if (url.search) {
|
|
||||||
fileName += url.search.replace(/[&=?]/g, "-");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Replace any characters that are not valid in filenames
|
|
||||||
fileName = fileName.replace(/[^a-zA-Z0-9-_]/g, "");
|
|
||||||
|
|
||||||
// Trim or pad the fileName to a specific length
|
|
||||||
fileName = fileName.substring(0, 50);
|
|
||||||
|
|
||||||
add_pre_cache_transform: {
|
|
||||||
if (preCacheTransform === undefined) {
|
|
||||||
break add_pre_cache_transform;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sanitize actionCacheId the same way as other components
|
|
||||||
const sanitizedActionCacheId = preCacheTransform.actionCacheId.replace(
|
|
||||||
/[^a-zA-Z0-9-_]/g,
|
|
||||||
"_"
|
|
||||||
);
|
|
||||||
|
|
||||||
fileName += `_${sanitizedActionCacheId}_${createHash("sha256")
|
|
||||||
.update(preCacheTransform.actionFootprint)
|
|
||||||
.digest("hex")
|
|
||||||
.substring(0, 5)}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
return fileName;
|
|
||||||
}
|
|
@ -1,12 +1,9 @@
|
|||||||
import { join as pathJoin } from "path";
|
import { join as pathJoin, relative as pathRelative } from "path";
|
||||||
import { downloadAndUnzip } from "./downloadAndUnzip";
|
|
||||||
import { type BuildOptions } from "./buildOptions";
|
import { type BuildOptions } from "./buildOptions";
|
||||||
import { assert } from "tsafe/assert";
|
import { assert } from "tsafe/assert";
|
||||||
import * as child_process from "child_process";
|
|
||||||
import * as fs from "fs";
|
|
||||||
import { rmSync } from "../tools/fs.rmSync";
|
|
||||||
import { lastKeycloakVersionWithAccountV1 } from "./constants";
|
import { lastKeycloakVersionWithAccountV1 } from "./constants";
|
||||||
import { transformCodebase } from "../tools/transformCodebase";
|
import { downloadAndExtractArchive } from "../tools/downloadAndExtractArchive";
|
||||||
|
import { isInside } from "../tools/isInside";
|
||||||
|
|
||||||
export type BuildOptionsLike = {
|
export type BuildOptionsLike = {
|
||||||
cacheDirPath: string;
|
cacheDirPath: string;
|
||||||
@ -17,363 +14,101 @@ assert<BuildOptions extends BuildOptionsLike ? true : false>();
|
|||||||
|
|
||||||
export async function downloadKeycloakDefaultTheme(params: {
|
export async function downloadKeycloakDefaultTheme(params: {
|
||||||
keycloakVersion: string;
|
keycloakVersion: string;
|
||||||
destDirPath: string;
|
|
||||||
buildOptions: BuildOptionsLike;
|
buildOptions: BuildOptionsLike;
|
||||||
}) {
|
}): Promise<{ defaultThemeDirPath: string }> {
|
||||||
const { keycloakVersion, destDirPath, buildOptions } = params;
|
const { keycloakVersion, buildOptions } = params;
|
||||||
|
|
||||||
await downloadAndUnzip({
|
const { extractedDirPath } = await downloadAndExtractArchive({
|
||||||
destDirPath,
|
url: `https://repo1.maven.org/maven2/org/keycloak/keycloak-themes/${keycloakVersion}/keycloak-themes-${keycloakVersion}.jar`,
|
||||||
url: `https://github.com/keycloak/keycloak/archive/refs/tags/${keycloakVersion}.zip`,
|
cacheDirPath: buildOptions.cacheDirPath,
|
||||||
specificDirsToExtract: ["", "-community"].map(
|
npmWorkspaceRootDirPath: buildOptions.npmWorkspaceRootDirPath,
|
||||||
ext => `keycloak-${keycloakVersion}/themes/src/main/resources${ext}/theme`
|
uniqueIdOfOnOnArchiveFile: "downloadKeycloakDefaultTheme",
|
||||||
),
|
onArchiveFile: async params => {
|
||||||
buildOptions,
|
if (!isInside({ dirPath: "theme", filePath: params.fileRelativePath })) {
|
||||||
preCacheTransform: {
|
return;
|
||||||
actionCacheId: "npm install and build",
|
}
|
||||||
action: async ({ destDirPath }) => {
|
|
||||||
install_common_node_modules: {
|
|
||||||
const commonResourcesDirPath = pathJoin(
|
|
||||||
destDirPath,
|
|
||||||
"keycloak",
|
|
||||||
"common",
|
|
||||||
"resources"
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!fs.existsSync(commonResourcesDirPath)) {
|
const { readFile, writeFile } = params;
|
||||||
break install_common_node_modules;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
const fileRelativePath = pathRelative("theme", params.fileRelativePath);
|
||||||
!fs.existsSync(pathJoin(commonResourcesDirPath, "package.json"))
|
|
||||||
) {
|
|
||||||
break install_common_node_modules;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (fs.existsSync(pathJoin(commonResourcesDirPath, "node_modules"))) {
|
skip_keycloak_v2: {
|
||||||
break install_common_node_modules;
|
if (
|
||||||
}
|
!isInside({
|
||||||
|
dirPath: pathJoin("keycloak.v2"),
|
||||||
child_process.execSync("npm install --omit=dev", {
|
filePath: fileRelativePath
|
||||||
cwd: commonResourcesDirPath,
|
})
|
||||||
stdio: "ignore"
|
) {
|
||||||
});
|
break skip_keycloak_v2;
|
||||||
}
|
}
|
||||||
|
|
||||||
repatriate_common_resources_from_base_login_theme: {
|
return;
|
||||||
const baseLoginThemeResourceDir = pathJoin(
|
}
|
||||||
destDirPath,
|
|
||||||
"base",
|
|
||||||
"login",
|
|
||||||
"resources"
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!fs.existsSync(baseLoginThemeResourceDir)) {
|
last_account_v1_transformations: {
|
||||||
break repatriate_common_resources_from_base_login_theme;
|
if (lastKeycloakVersionWithAccountV1 !== keycloakVersion) {
|
||||||
|
break last_account_v1_transformations;
|
||||||
|
}
|
||||||
|
|
||||||
|
patch_account_css: {
|
||||||
|
if (
|
||||||
|
fileRelativePath !==
|
||||||
|
pathJoin("keycloak", "account", "resources", "css", "account.css")
|
||||||
|
) {
|
||||||
|
break patch_account_css;
|
||||||
}
|
}
|
||||||
|
|
||||||
transformCodebase({
|
await writeFile({
|
||||||
srcDirPath: baseLoginThemeResourceDir,
|
fileRelativePath,
|
||||||
destDirPath: pathJoin(
|
modifiedData: Buffer.from(
|
||||||
destDirPath,
|
(await readFile())
|
||||||
"keycloak",
|
.toString("utf8")
|
||||||
"login",
|
.replace("top: -34px;", "top: -34px !important;"),
|
||||||
"resources"
|
"utf8"
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
install_and_move_to_common_resources_generated_in_keycloak_v2: {
|
skip_unused_node_modules: {
|
||||||
if (
|
const dirPath = pathJoin(
|
||||||
!fs
|
|
||||||
.readFileSync(
|
|
||||||
pathJoin(
|
|
||||||
destDirPath,
|
|
||||||
"keycloak",
|
|
||||||
"login",
|
|
||||||
"theme.properties"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.toString("utf8")
|
|
||||||
.includes("web_modules")
|
|
||||||
) {
|
|
||||||
break install_and_move_to_common_resources_generated_in_keycloak_v2;
|
|
||||||
}
|
|
||||||
|
|
||||||
const accountV2DirSrcDirPath = pathJoin(
|
|
||||||
destDirPath,
|
|
||||||
"keycloak.v2",
|
|
||||||
"account",
|
|
||||||
"src"
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!fs.existsSync(accountV2DirSrcDirPath)) {
|
|
||||||
break install_and_move_to_common_resources_generated_in_keycloak_v2;
|
|
||||||
}
|
|
||||||
|
|
||||||
const packageManager = fs.existsSync(
|
|
||||||
pathJoin(accountV2DirSrcDirPath, "pnpm-lock.yaml")
|
|
||||||
)
|
|
||||||
? "pnpm"
|
|
||||||
: "npm";
|
|
||||||
|
|
||||||
if (packageManager === "pnpm") {
|
|
||||||
try {
|
|
||||||
child_process.execSync(`which pnpm`);
|
|
||||||
} catch {
|
|
||||||
console.log(`Installing pnpm globally`);
|
|
||||||
child_process.execSync(`npm install -g pnpm`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
child_process.execSync(`${packageManager} install`, {
|
|
||||||
cwd: accountV2DirSrcDirPath,
|
|
||||||
stdio: "ignore"
|
|
||||||
});
|
|
||||||
|
|
||||||
const packageJsonFilePath = pathJoin(
|
|
||||||
accountV2DirSrcDirPath,
|
|
||||||
"package.json"
|
|
||||||
);
|
|
||||||
|
|
||||||
const packageJsonRaw = fs.readFileSync(packageJsonFilePath);
|
|
||||||
|
|
||||||
const parsedPackageJson = JSON.parse(packageJsonRaw.toString("utf8"));
|
|
||||||
|
|
||||||
parsedPackageJson.scripts.build = parsedPackageJson.scripts.build
|
|
||||||
.replace(`${packageManager} run check-types`, "true")
|
|
||||||
.replace(`${packageManager} run babel`, "true");
|
|
||||||
|
|
||||||
fs.writeFileSync(
|
|
||||||
packageJsonFilePath,
|
|
||||||
Buffer.from(JSON.stringify(parsedPackageJson, null, 2), "utf8")
|
|
||||||
);
|
|
||||||
|
|
||||||
child_process.execSync(`${packageManager} run build`, {
|
|
||||||
cwd: accountV2DirSrcDirPath,
|
|
||||||
stdio: "ignore"
|
|
||||||
});
|
|
||||||
|
|
||||||
fs.writeFileSync(packageJsonFilePath, packageJsonRaw);
|
|
||||||
|
|
||||||
fs.rmSync(pathJoin(accountV2DirSrcDirPath, "node_modules"), {
|
|
||||||
recursive: true
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
remove_keycloak_v2: {
|
|
||||||
const keycloakV2DirPath = pathJoin(destDirPath, "keycloak.v2");
|
|
||||||
|
|
||||||
if (!fs.existsSync(keycloakV2DirPath)) {
|
|
||||||
break remove_keycloak_v2;
|
|
||||||
}
|
|
||||||
|
|
||||||
rmSync(keycloakV2DirPath, { recursive: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Note, this is an optimization for reducing the size of the jar
|
|
||||||
remove_unused_node_modules: {
|
|
||||||
const nodeModuleDirPath = pathJoin(
|
|
||||||
destDirPath,
|
|
||||||
"keycloak",
|
"keycloak",
|
||||||
"common",
|
"common",
|
||||||
"resources",
|
"resources",
|
||||||
"node_modules"
|
"node_modules"
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!fs.existsSync(nodeModuleDirPath)) {
|
if (!isInside({ dirPath, filePath: fileRelativePath })) {
|
||||||
break remove_unused_node_modules;
|
break skip_unused_node_modules;
|
||||||
}
|
}
|
||||||
|
|
||||||
const toDeletePerfixes = [
|
const toKeepPrefixes = [
|
||||||
"angular",
|
...[
|
||||||
"bootstrap",
|
"patternfly.min.css",
|
||||||
"rcue",
|
"patternfly-additions.min.css",
|
||||||
"font-awesome",
|
"patternfly-additions.min.css"
|
||||||
"ng-file-upload",
|
].map(fileBasename =>
|
||||||
pathJoin("patternfly", "dist", "sass"),
|
pathJoin(dirPath, "patternfly", "dist", "css", fileBasename)
|
||||||
pathJoin("patternfly", "dist", "less"),
|
),
|
||||||
pathJoin("patternfly", "dist", "js"),
|
pathJoin(dirPath, "patternfly", "dist", "fonts")
|
||||||
"d3",
|
|
||||||
pathJoin("jquery", "src"),
|
|
||||||
"c3",
|
|
||||||
"core-js",
|
|
||||||
"eonasdan-bootstrap-datetimepicker",
|
|
||||||
"moment",
|
|
||||||
"react",
|
|
||||||
"patternfly-bootstrap-treeview",
|
|
||||||
"popper.js",
|
|
||||||
"tippy.js",
|
|
||||||
"jquery-match-height",
|
|
||||||
"google-code-prettify",
|
|
||||||
"patternfly-bootstrap-combobox",
|
|
||||||
"focus-trap",
|
|
||||||
"tabbable",
|
|
||||||
"scheduler",
|
|
||||||
"@types",
|
|
||||||
"datatables.net",
|
|
||||||
"datatables.net-colreorder",
|
|
||||||
"tslib",
|
|
||||||
"prop-types",
|
|
||||||
"file-selector",
|
|
||||||
"datatables.net-colreorder-bs",
|
|
||||||
"object-assign",
|
|
||||||
"warning",
|
|
||||||
"js-tokens",
|
|
||||||
"loose-envify",
|
|
||||||
"prop-types-extra",
|
|
||||||
"attr-accept",
|
|
||||||
"datatables.net-select",
|
|
||||||
"drmonty-datatables-colvis",
|
|
||||||
"datatables.net-bs",
|
|
||||||
pathJoin("@patternfly", "react"),
|
|
||||||
pathJoin("@patternfly", "patternfly", "docs")
|
|
||||||
];
|
];
|
||||||
|
|
||||||
transformCodebase({
|
if (
|
||||||
srcDirPath: nodeModuleDirPath,
|
toKeepPrefixes.find(prefix =>
|
||||||
destDirPath: nodeModuleDirPath,
|
fileRelativePath.startsWith(prefix)
|
||||||
transformSourceCode: ({ sourceCode, fileRelativePath }) => {
|
) !== undefined
|
||||||
if (fileRelativePath.endsWith(".map")) {
|
) {
|
||||||
return undefined;
|
break skip_unused_node_modules;
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
toDeletePerfixes.find(prefix =>
|
|
||||||
fileRelativePath.startsWith(prefix)
|
|
||||||
) !== undefined
|
|
||||||
) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
fileRelativePath.startsWith(
|
|
||||||
pathJoin("patternfly", "dist", "fonts")
|
|
||||||
)
|
|
||||||
) {
|
|
||||||
if (
|
|
||||||
!fileRelativePath.endsWith(".woff2") &&
|
|
||||||
!fileRelativePath.endsWith(".woff") &&
|
|
||||||
!fileRelativePath.endsWith(".ttf")
|
|
||||||
) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return { modifiedSourceCode: sourceCode };
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Just like node_modules
|
|
||||||
remove_unused_lib: {
|
|
||||||
const libDirPath = pathJoin(
|
|
||||||
destDirPath,
|
|
||||||
"keycloak",
|
|
||||||
"common",
|
|
||||||
"resources",
|
|
||||||
"lib"
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!fs.existsSync(libDirPath)) {
|
|
||||||
break remove_unused_lib;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const toDeletePerfixes = [
|
return;
|
||||||
"ui-ace",
|
|
||||||
"filesaver",
|
|
||||||
"fileupload",
|
|
||||||
"angular",
|
|
||||||
"ui-ace"
|
|
||||||
];
|
|
||||||
|
|
||||||
transformCodebase({
|
|
||||||
srcDirPath: libDirPath,
|
|
||||||
destDirPath: libDirPath,
|
|
||||||
transformSourceCode: ({ sourceCode, fileRelativePath }) => {
|
|
||||||
if (fileRelativePath.endsWith(".map")) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
toDeletePerfixes.find(prefix =>
|
|
||||||
fileRelativePath.startsWith(prefix)
|
|
||||||
) !== undefined
|
|
||||||
) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
return { modifiedSourceCode: sourceCode };
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
last_account_v1_transformations: {
|
|
||||||
if (lastKeycloakVersionWithAccountV1 !== keycloakVersion) {
|
|
||||||
break last_account_v1_transformations;
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
const accountCssFilePath = pathJoin(
|
|
||||||
destDirPath,
|
|
||||||
"keycloak",
|
|
||||||
"account",
|
|
||||||
"resources",
|
|
||||||
"css",
|
|
||||||
"account.css"
|
|
||||||
);
|
|
||||||
|
|
||||||
fs.writeFileSync(
|
|
||||||
accountCssFilePath,
|
|
||||||
Buffer.from(
|
|
||||||
fs
|
|
||||||
.readFileSync(accountCssFilePath)
|
|
||||||
.toString("utf8")
|
|
||||||
.replace("top: -34px;", "top: -34px !important;"),
|
|
||||||
"utf8"
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Note, this is an optimization for reducing the size of the jar,
|
|
||||||
// For this version we know exactly which resources are used.
|
|
||||||
{
|
|
||||||
const nodeModulesDirPath = pathJoin(
|
|
||||||
destDirPath,
|
|
||||||
"keycloak",
|
|
||||||
"common",
|
|
||||||
"resources",
|
|
||||||
"node_modules"
|
|
||||||
);
|
|
||||||
|
|
||||||
const toKeepPrefixes = [
|
|
||||||
...[
|
|
||||||
"patternfly.min.css",
|
|
||||||
"patternfly-additions.min.css",
|
|
||||||
"patternfly-additions.min.css"
|
|
||||||
].map(fileBasename =>
|
|
||||||
pathJoin("patternfly", "dist", "css", fileBasename)
|
|
||||||
),
|
|
||||||
pathJoin("patternfly", "dist", "fonts")
|
|
||||||
];
|
|
||||||
|
|
||||||
transformCodebase({
|
|
||||||
srcDirPath: nodeModulesDirPath,
|
|
||||||
destDirPath: nodeModulesDirPath,
|
|
||||||
transformSourceCode: ({ sourceCode, fileRelativePath }) => {
|
|
||||||
if (
|
|
||||||
toKeepPrefixes.find(prefix =>
|
|
||||||
fileRelativePath.startsWith(prefix)
|
|
||||||
) === undefined
|
|
||||||
) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
return { modifiedSourceCode: sourceCode };
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await writeFile({ fileRelativePath });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
return { defaultThemeDirPath: extractedDirPath };
|
||||||
}
|
}
|
||||||
|
@ -1,16 +1,15 @@
|
|||||||
import { transformCodebase } from "../tools/transformCodebase";
|
import { transformCodebase } from "../tools/transformCodebase";
|
||||||
import { join as pathJoin } from "path";
|
import { join as pathJoin } from "path";
|
||||||
import { downloadKeycloakDefaultTheme } from "./downloadKeycloakDefaultTheme";
|
import {
|
||||||
|
downloadKeycloakDefaultTheme,
|
||||||
|
type BuildOptionsLike as BuildOptionsLike_downloadKeycloakDefaultTheme
|
||||||
|
} from "./downloadKeycloakDefaultTheme";
|
||||||
import { resources_common, type ThemeType } from "./constants";
|
import { resources_common, type ThemeType } from "./constants";
|
||||||
import type { BuildOptions } from "./buildOptions";
|
import type { BuildOptions } from "./buildOptions";
|
||||||
import { assert } from "tsafe/assert";
|
import { assert } from "tsafe/assert";
|
||||||
import * as crypto from "crypto";
|
import { existsAsync } from "../tools/fs.existsAsync";
|
||||||
import { rmSync } from "../tools/fs.rmSync";
|
|
||||||
|
|
||||||
export type BuildOptionsLike = {
|
export type BuildOptionsLike = BuildOptionsLike_downloadKeycloakDefaultTheme & {};
|
||||||
cacheDirPath: string;
|
|
||||||
npmWorkspaceRootDirPath: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
assert<BuildOptions extends BuildOptionsLike ? true : false>();
|
assert<BuildOptions extends BuildOptionsLike ? true : false>();
|
||||||
|
|
||||||
@ -22,32 +21,33 @@ export async function downloadKeycloakStaticResources(params: {
|
|||||||
}) {
|
}) {
|
||||||
const { themeType, themeDirPath, keycloakVersion, buildOptions } = params;
|
const { themeType, themeDirPath, keycloakVersion, buildOptions } = params;
|
||||||
|
|
||||||
const tmpDirPath = pathJoin(
|
const { defaultThemeDirPath } = await downloadKeycloakDefaultTheme({
|
||||||
buildOptions.cacheDirPath,
|
|
||||||
`downloadKeycloakStaticResources_tmp_${crypto
|
|
||||||
.createHash("sha256")
|
|
||||||
.update(`${themeType}-${keycloakVersion}`)
|
|
||||||
.digest("hex")
|
|
||||||
.slice(0, 8)}`
|
|
||||||
);
|
|
||||||
|
|
||||||
await downloadKeycloakDefaultTheme({
|
|
||||||
keycloakVersion,
|
keycloakVersion,
|
||||||
destDirPath: tmpDirPath,
|
|
||||||
buildOptions
|
buildOptions
|
||||||
});
|
});
|
||||||
|
|
||||||
const resourcesPath = pathJoin(themeDirPath, themeType, "resources");
|
const resourcesDirPath = pathJoin(themeDirPath, themeType, "resources");
|
||||||
|
|
||||||
|
repatriate_base_resources: {
|
||||||
|
const srcDirPath = pathJoin(defaultThemeDirPath, "base", themeType, "resources");
|
||||||
|
|
||||||
|
if (!(await existsAsync(srcDirPath))) {
|
||||||
|
break repatriate_base_resources;
|
||||||
|
}
|
||||||
|
|
||||||
|
transformCodebase({
|
||||||
|
srcDirPath,
|
||||||
|
destDirPath: resourcesDirPath
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
transformCodebase({
|
transformCodebase({
|
||||||
srcDirPath: pathJoin(tmpDirPath, "keycloak", themeType, "resources"),
|
srcDirPath: pathJoin(defaultThemeDirPath, "keycloak", themeType, "resources"),
|
||||||
destDirPath: resourcesPath
|
destDirPath: resourcesDirPath
|
||||||
});
|
});
|
||||||
|
|
||||||
transformCodebase({
|
transformCodebase({
|
||||||
srcDirPath: pathJoin(tmpDirPath, "keycloak", "common", "resources"),
|
srcDirPath: pathJoin(defaultThemeDirPath, "keycloak", "common", "resources"),
|
||||||
destDirPath: pathJoin(resourcesPath, resources_common)
|
destDirPath: pathJoin(resourcesDirPath, resources_common)
|
||||||
});
|
});
|
||||||
|
|
||||||
rmSync(tmpDirPath, { recursive: true });
|
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,101 @@
|
|||||||
|
import fetch from "make-fetch-happen";
|
||||||
|
import { mkdir, unlink, writeFile, readdir } from "fs/promises";
|
||||||
|
import { dirname as pathDirname, join as pathJoin } from "path";
|
||||||
|
import { assert } from "tsafe/assert";
|
||||||
|
import { extractArchive } from "../extractArchive";
|
||||||
|
import { existsAsync } from "../fs.existsAsync";
|
||||||
|
import { getProxyFetchOptions } from "./fetchProxyOptions";
|
||||||
|
import * as crypto from "crypto";
|
||||||
|
|
||||||
|
export async function downloadAndExtractArchive(params: {
|
||||||
|
url: string;
|
||||||
|
uniqueIdOfOnOnArchiveFile: string;
|
||||||
|
onArchiveFile: (params: {
|
||||||
|
fileRelativePath: string;
|
||||||
|
readFile: () => Promise<Buffer>;
|
||||||
|
writeFile: (params: {
|
||||||
|
fileRelativePath: string;
|
||||||
|
modifiedData?: Buffer;
|
||||||
|
}) => Promise<void>;
|
||||||
|
}) => Promise<void>;
|
||||||
|
cacheDirPath: string;
|
||||||
|
npmWorkspaceRootDirPath: string;
|
||||||
|
}): Promise<{ extractedDirPath: string }> {
|
||||||
|
const {
|
||||||
|
url,
|
||||||
|
uniqueIdOfOnOnArchiveFile,
|
||||||
|
onArchiveFile,
|
||||||
|
cacheDirPath,
|
||||||
|
npmWorkspaceRootDirPath
|
||||||
|
} = params;
|
||||||
|
|
||||||
|
const archiveFileBasename = url.split("?")[0].split("/").reverse()[0];
|
||||||
|
|
||||||
|
const archiveFilePath = pathJoin(cacheDirPath, archiveFileBasename);
|
||||||
|
|
||||||
|
download: {
|
||||||
|
if (await existsAsync(archiveFilePath)) {
|
||||||
|
break download;
|
||||||
|
}
|
||||||
|
|
||||||
|
await mkdir(pathDirname(archiveFilePath), { recursive: true });
|
||||||
|
|
||||||
|
const response = await fetch(
|
||||||
|
url,
|
||||||
|
await getProxyFetchOptions({ npmWorkspaceRootDirPath })
|
||||||
|
);
|
||||||
|
|
||||||
|
response.body?.setMaxListeners(Number.MAX_VALUE);
|
||||||
|
assert(typeof response.body !== "undefined" && response.body != null);
|
||||||
|
|
||||||
|
await writeFile(archiveFilePath, response.body);
|
||||||
|
}
|
||||||
|
|
||||||
|
const extractDirBasename = `${archiveFileBasename.split(".")[0]}_${uniqueIdOfOnOnArchiveFile}_${crypto
|
||||||
|
.createHash("sha256")
|
||||||
|
.update(onArchiveFile.toString())
|
||||||
|
.digest("hex")
|
||||||
|
.substring(0, 5)}`;
|
||||||
|
|
||||||
|
await Promise.all(
|
||||||
|
(await readdir(cacheDirPath))
|
||||||
|
.filter(
|
||||||
|
(() => {
|
||||||
|
const prefix = extractDirBasename
|
||||||
|
.split("_")
|
||||||
|
.reverse()
|
||||||
|
.slice(1)
|
||||||
|
.reverse()
|
||||||
|
.join("_");
|
||||||
|
|
||||||
|
return basename =>
|
||||||
|
basename !== extractDirBasename && basename.startsWith(prefix);
|
||||||
|
})()
|
||||||
|
)
|
||||||
|
.map(basename => unlink(pathJoin(cacheDirPath, basename)))
|
||||||
|
);
|
||||||
|
|
||||||
|
const extractedDirPath = pathJoin(cacheDirPath, extractDirBasename);
|
||||||
|
|
||||||
|
extract_and_transform: {
|
||||||
|
if (await existsAsync(extractedDirPath)) {
|
||||||
|
break extract_and_transform;
|
||||||
|
}
|
||||||
|
|
||||||
|
await extractArchive({
|
||||||
|
archiveFilePath,
|
||||||
|
onArchiveFile: async ({ relativeFilePathInArchive, readFile, writeFile }) =>
|
||||||
|
onArchiveFile({
|
||||||
|
fileRelativePath: relativeFilePathInArchive,
|
||||||
|
readFile,
|
||||||
|
writeFile: ({ fileRelativePath, modifiedData }) =>
|
||||||
|
writeFile({
|
||||||
|
filePath: pathJoin(extractedDirPath, fileRelativePath),
|
||||||
|
modifiedData
|
||||||
|
})
|
||||||
|
})
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return { extractedDirPath };
|
||||||
|
}
|
96
src/bin/tools/downloadAndExtractArchive/fetchProxyOptions.ts
Normal file
96
src/bin/tools/downloadAndExtractArchive/fetchProxyOptions.ts
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
import { exec as execCallback } from "child_process";
|
||||||
|
import { readFile } from "fs/promises";
|
||||||
|
import { type FetchOptions } from "make-fetch-happen";
|
||||||
|
import { promisify } from "util";
|
||||||
|
|
||||||
|
function ensureArray<T>(arg0: T | T[]) {
|
||||||
|
return Array.isArray(arg0) ? arg0 : typeof arg0 === "undefined" ? [] : [arg0];
|
||||||
|
}
|
||||||
|
|
||||||
|
function ensureSingleOrNone<T>(arg0: T | T[]) {
|
||||||
|
if (!Array.isArray(arg0)) return arg0;
|
||||||
|
if (arg0.length === 0) return undefined;
|
||||||
|
if (arg0.length === 1) return arg0[0];
|
||||||
|
throw new Error(
|
||||||
|
"Illegal configuration, expected a single value but found multiple: " +
|
||||||
|
arg0.map(String).join(", ")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
type NPMConfig = Record<string, string | string[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get npm configuration as map
|
||||||
|
*/
|
||||||
|
async function getNmpConfig(params: { npmWorkspaceRootDirPath: string }) {
|
||||||
|
const { npmWorkspaceRootDirPath } = params;
|
||||||
|
|
||||||
|
const exec = promisify(execCallback);
|
||||||
|
|
||||||
|
const stdout = await exec("npm config get", {
|
||||||
|
encoding: "utf8",
|
||||||
|
cwd: npmWorkspaceRootDirPath
|
||||||
|
}).then(({ stdout }) => stdout);
|
||||||
|
|
||||||
|
const npmConfigReducer = (cfg: NPMConfig, [key, value]: [string, string]) =>
|
||||||
|
key in cfg
|
||||||
|
? { ...cfg, [key]: [...ensureArray(cfg[key]), value] }
|
||||||
|
: { ...cfg, [key]: value };
|
||||||
|
|
||||||
|
return stdout
|
||||||
|
.split("\n")
|
||||||
|
.filter(line => !line.startsWith(";"))
|
||||||
|
.map(line => line.trim())
|
||||||
|
.map(line => line.split("=", 2) as [string, string])
|
||||||
|
.reduce(npmConfigReducer, {} as NPMConfig);
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ProxyFetchOptions = Pick<
|
||||||
|
FetchOptions,
|
||||||
|
"proxy" | "noProxy" | "strictSSL" | "cert" | "ca"
|
||||||
|
>;
|
||||||
|
|
||||||
|
export async function getProxyFetchOptions(params: {
|
||||||
|
npmWorkspaceRootDirPath: string;
|
||||||
|
}): Promise<ProxyFetchOptions> {
|
||||||
|
const { npmWorkspaceRootDirPath } = params;
|
||||||
|
|
||||||
|
const cfg = await getNmpConfig({ npmWorkspaceRootDirPath });
|
||||||
|
|
||||||
|
const proxy = ensureSingleOrNone(cfg["https-proxy"] ?? cfg["proxy"]);
|
||||||
|
const noProxy = cfg["noproxy"] ?? cfg["no-proxy"];
|
||||||
|
|
||||||
|
function maybeBoolean(arg0: string | undefined) {
|
||||||
|
return typeof arg0 === "undefined" ? undefined : Boolean(arg0);
|
||||||
|
}
|
||||||
|
|
||||||
|
const strictSSL = maybeBoolean(ensureSingleOrNone(cfg["strict-ssl"]));
|
||||||
|
const cert = cfg["cert"];
|
||||||
|
const ca = ensureArray(cfg["ca"] ?? cfg["ca[]"]);
|
||||||
|
const cafile = ensureSingleOrNone(cfg["cafile"]);
|
||||||
|
|
||||||
|
if (typeof cafile !== "undefined" && cafile !== "null") {
|
||||||
|
ca.push(
|
||||||
|
...(await (async () => {
|
||||||
|
function chunks<T>(arr: T[], size: number = 2) {
|
||||||
|
return arr
|
||||||
|
.map((_, i) => i % size == 0 && arr.slice(i, i + size))
|
||||||
|
.filter(Boolean) as T[][];
|
||||||
|
}
|
||||||
|
|
||||||
|
const cafileContent = await readFile(cafile, "utf-8");
|
||||||
|
return chunks(cafileContent.split(/(-----END CERTIFICATE-----)/), 2).map(
|
||||||
|
ca => ca.join("").replace(/^\n/, "").replace(/\n/g, "\\n")
|
||||||
|
);
|
||||||
|
})())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
proxy,
|
||||||
|
noProxy,
|
||||||
|
strictSSL,
|
||||||
|
cert,
|
||||||
|
ca: ca.length === 0 ? undefined : ca
|
||||||
|
};
|
||||||
|
}
|
1
src/bin/tools/downloadAndExtractArchive/index.ts
Normal file
1
src/bin/tools/downloadAndExtractArchive/index.ts
Normal file
@ -0,0 +1 @@
|
|||||||
|
export * from "./downloadAndExtractArchive";
|
125
src/bin/tools/extractArchive.ts
Normal file
125
src/bin/tools/extractArchive.ts
Normal file
@ -0,0 +1,125 @@
|
|||||||
|
import fs from "fs/promises";
|
||||||
|
import fsSync from "fs";
|
||||||
|
import yauzl from "yauzl";
|
||||||
|
import stream from "stream";
|
||||||
|
import { Deferred } from "evt/tools/Deferred";
|
||||||
|
import { dirname as pathDirname, sep as pathSep } from "path";
|
||||||
|
|
||||||
|
export async function extractArchive(params: {
|
||||||
|
archiveFilePath: string;
|
||||||
|
onArchiveFile: (params: {
|
||||||
|
relativeFilePathInArchive: string;
|
||||||
|
readFile: () => Promise<Buffer>;
|
||||||
|
writeFile: (params: { filePath: string; modifiedData?: Buffer }) => Promise<void>;
|
||||||
|
}) => Promise<void>;
|
||||||
|
}) {
|
||||||
|
const { archiveFilePath, onArchiveFile } = params;
|
||||||
|
|
||||||
|
const zipFile = await new Promise<yauzl.ZipFile>((resolve, reject) => {
|
||||||
|
yauzl.open(archiveFilePath, { lazyEntries: true }, async (error, zipFile) => {
|
||||||
|
if (error !== null) {
|
||||||
|
reject(error);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
resolve(zipFile);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
const dDone = new Deferred<void>();
|
||||||
|
|
||||||
|
zipFile.once("end", () => {
|
||||||
|
zipFile.close();
|
||||||
|
dDone.resolve();
|
||||||
|
});
|
||||||
|
|
||||||
|
// TODO: See benchmark if using a class here improves the performance over anonymous functions
|
||||||
|
class FileWriter {
|
||||||
|
constructor(private entry: yauzl.Entry) {}
|
||||||
|
|
||||||
|
public async writeToFile(params: {
|
||||||
|
filePath: string;
|
||||||
|
modifiedData?: Buffer;
|
||||||
|
}): Promise<void> {
|
||||||
|
const { filePath, modifiedData } = params;
|
||||||
|
|
||||||
|
await fs.mkdir(pathDirname(filePath), { recursive: true });
|
||||||
|
|
||||||
|
if (modifiedData !== undefined) {
|
||||||
|
await fs.writeFile(filePath, modifiedData);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const readStream = await new Promise<stream.Readable>(resolve =>
|
||||||
|
zipFile.openReadStream(this.entry, async (error, readStream) => {
|
||||||
|
if (error !== null) {
|
||||||
|
dDone.reject(error);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
resolve(readStream);
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
const dDoneWithFile = new Deferred<void>();
|
||||||
|
|
||||||
|
stream.pipeline(readStream, fsSync.createWriteStream(filePath), error => {
|
||||||
|
if (error !== null) {
|
||||||
|
dDone.reject(error);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
dDoneWithFile.resolve();
|
||||||
|
});
|
||||||
|
|
||||||
|
await dDoneWithFile.pr;
|
||||||
|
}
|
||||||
|
|
||||||
|
public readFile(): Promise<Buffer> {
|
||||||
|
return new Promise<Buffer>(resolve =>
|
||||||
|
zipFile.openReadStream(this.entry, async (error, readStream) => {
|
||||||
|
if (error !== null) {
|
||||||
|
dDone.reject(error);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const chunks: Buffer[] = [];
|
||||||
|
|
||||||
|
readStream.on("data", chunk => {
|
||||||
|
chunks.push(chunk);
|
||||||
|
});
|
||||||
|
|
||||||
|
readStream.on("end", () => {
|
||||||
|
resolve(Buffer.concat(chunks));
|
||||||
|
});
|
||||||
|
|
||||||
|
readStream.on("error", error => {
|
||||||
|
dDone.reject(error);
|
||||||
|
});
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
zipFile.on("entry", async (entry: yauzl.Entry) => {
|
||||||
|
handle_file: {
|
||||||
|
// NOTE: Skip directories
|
||||||
|
if (entry.fileName.endsWith(pathSep)) {
|
||||||
|
break handle_file;
|
||||||
|
}
|
||||||
|
|
||||||
|
const fileWriter = new FileWriter(entry);
|
||||||
|
|
||||||
|
await onArchiveFile({
|
||||||
|
relativeFilePathInArchive: entry.fileName.split("/").join(pathSep),
|
||||||
|
readFile: fileWriter.readFile.bind(fileWriter),
|
||||||
|
writeFile: fileWriter.writeToFile.bind(fileWriter)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
zipFile.readEntry();
|
||||||
|
});
|
||||||
|
|
||||||
|
zipFile.readEntry();
|
||||||
|
|
||||||
|
await dDone.pr;
|
||||||
|
}
|
@ -1,149 +0,0 @@
|
|||||||
import fsp from "node:fs/promises";
|
|
||||||
import fs from "fs";
|
|
||||||
import path from "node:path";
|
|
||||||
import yauzl from "yauzl";
|
|
||||||
import yazl from "yazl";
|
|
||||||
import stream from "node:stream";
|
|
||||||
import { promisify } from "node:util";
|
|
||||||
|
|
||||||
const pipeline = promisify(stream.pipeline);
|
|
||||||
|
|
||||||
async function pathExists(path: string) {
|
|
||||||
try {
|
|
||||||
await fsp.stat(path);
|
|
||||||
return true;
|
|
||||||
} catch (error) {
|
|
||||||
if ((error as { code: string }).code === "ENOENT") {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handlings of non posix path is not implemented correctly
|
|
||||||
// it work by coincidence. Don't have the time to fix but it should be fixed.
|
|
||||||
export async function unzip(
|
|
||||||
file: string,
|
|
||||||
targetFolder: string,
|
|
||||||
specificDirsToExtract?: string[]
|
|
||||||
) {
|
|
||||||
specificDirsToExtract = specificDirsToExtract?.map(dirPath => {
|
|
||||||
if (!dirPath.endsWith("/") || !dirPath.endsWith("\\")) {
|
|
||||||
dirPath += "/";
|
|
||||||
}
|
|
||||||
|
|
||||||
return dirPath;
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!targetFolder.endsWith("/") || !targetFolder.endsWith("\\")) {
|
|
||||||
targetFolder += "/";
|
|
||||||
}
|
|
||||||
if (!fs.existsSync(targetFolder)) {
|
|
||||||
fs.mkdirSync(targetFolder, { recursive: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
return new Promise<void>((resolve, reject) => {
|
|
||||||
yauzl.open(file, { lazyEntries: true }, async (err, zipfile) => {
|
|
||||||
if (err) {
|
|
||||||
reject(err);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
zipfile.readEntry();
|
|
||||||
|
|
||||||
zipfile.on("entry", async entry => {
|
|
||||||
if (specificDirsToExtract !== undefined) {
|
|
||||||
const dirPath = specificDirsToExtract.find(dirPath =>
|
|
||||||
entry.fileName.startsWith(dirPath)
|
|
||||||
);
|
|
||||||
|
|
||||||
// Skip files outside of the unzipSubPath
|
|
||||||
if (dirPath === undefined) {
|
|
||||||
zipfile.readEntry();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove the unzipSubPath from the file name
|
|
||||||
entry.fileName = entry.fileName.substring(dirPath.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
const target = path.join(targetFolder, entry.fileName);
|
|
||||||
|
|
||||||
// Directory file names end with '/'.
|
|
||||||
// Note that entries for directories themselves are optional.
|
|
||||||
// An entry's fileName implicitly requires its parent directories to exist.
|
|
||||||
if (/[\/\\]$/.test(target)) {
|
|
||||||
await fsp.mkdir(target, { recursive: true });
|
|
||||||
|
|
||||||
zipfile.readEntry();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Skip existing files
|
|
||||||
if (await pathExists(target)) {
|
|
||||||
zipfile.readEntry();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
zipfile.openReadStream(entry, async (err, readStream) => {
|
|
||||||
if (err) {
|
|
||||||
reject(err);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
await fsp.mkdir(path.dirname(target), {
|
|
||||||
recursive: true
|
|
||||||
});
|
|
||||||
|
|
||||||
await pipeline(readStream, fs.createWriteStream(target));
|
|
||||||
|
|
||||||
zipfile.readEntry();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
zipfile.once("end", function () {
|
|
||||||
zipfile.close();
|
|
||||||
resolve();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// NOTE: This code was directly copied from ChatGPT and appears to function as expected.
|
|
||||||
// However, confidence in its complete accuracy and robustness is limited.
|
|
||||||
export async function zip(sourceFolder: string, targetZip: string) {
|
|
||||||
return new Promise<void>(async (resolve, reject) => {
|
|
||||||
const zipfile = new yazl.ZipFile();
|
|
||||||
const files: string[] = [];
|
|
||||||
|
|
||||||
// Recursive function to explore directories and their subdirectories
|
|
||||||
async function exploreDir(dir: string) {
|
|
||||||
const dirContent = await fsp.readdir(dir);
|
|
||||||
for (const file of dirContent) {
|
|
||||||
const filePath = path.join(dir, file);
|
|
||||||
const stat = await fsp.stat(filePath);
|
|
||||||
if (stat.isDirectory()) {
|
|
||||||
await exploreDir(filePath);
|
|
||||||
} else if (stat.isFile()) {
|
|
||||||
files.push(filePath);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Collecting all files to be zipped
|
|
||||||
await exploreDir(sourceFolder);
|
|
||||||
|
|
||||||
// Adding files to zip
|
|
||||||
for (const file of files) {
|
|
||||||
const relativePath = path.relative(sourceFolder, file);
|
|
||||||
zipfile.addFile(file, relativePath);
|
|
||||||
}
|
|
||||||
|
|
||||||
zipfile.outputStream
|
|
||||||
.pipe(fs.createWriteStream(targetZip))
|
|
||||||
.on("close", () => resolve())
|
|
||||||
.on("error", err => reject(err)); // Listen to error events
|
|
||||||
|
|
||||||
zipfile.end();
|
|
||||||
});
|
|
||||||
}
|
|
14
yarn.lock
14
yarn.lock
@ -3562,13 +3562,6 @@
|
|||||||
dependencies:
|
dependencies:
|
||||||
"@types/node" "*"
|
"@types/node" "*"
|
||||||
|
|
||||||
"@types/yazl@^2.4.5":
|
|
||||||
version "2.4.5"
|
|
||||||
resolved "https://registry.yarnpkg.com/@types/yazl/-/yazl-2.4.5.tgz#0e21674799c7690afa23aeaff59806be5fe7494d"
|
|
||||||
integrity sha512-qpmPfx32HS7vlGJf7EsoM9qJnLZhXJBf1KH0hzfdc+D794rljQWh4H0I/UrZy+6Nhqn0l2jdBZXBGZtR1vnHqw==
|
|
||||||
dependencies:
|
|
||||||
"@types/node" "*"
|
|
||||||
|
|
||||||
"@typescript-eslint/scope-manager@5.59.0":
|
"@typescript-eslint/scope-manager@5.59.0":
|
||||||
version "5.59.0"
|
version "5.59.0"
|
||||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.59.0.tgz#86501d7a17885710b6716a23be2e93fc54a4fe8c"
|
resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.59.0.tgz#86501d7a17885710b6716a23be2e93fc54a4fe8c"
|
||||||
@ -13307,13 +13300,6 @@ yauzl@^2.10.0:
|
|||||||
buffer-crc32 "~0.2.3"
|
buffer-crc32 "~0.2.3"
|
||||||
fd-slicer "~1.1.0"
|
fd-slicer "~1.1.0"
|
||||||
|
|
||||||
yazl@^2.5.1:
|
|
||||||
version "2.5.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/yazl/-/yazl-2.5.1.tgz#a3d65d3dd659a5b0937850e8609f22fffa2b5c35"
|
|
||||||
integrity sha512-phENi2PLiHnHb6QBVot+dJnaAZ0xosj7p3fWl+znIjBDlnMI2PsZCJZ306BPTFOaHf5qdDEI8x5qFrSOBN5vrw==
|
|
||||||
dependencies:
|
|
||||||
buffer-crc32 "~0.2.3"
|
|
||||||
|
|
||||||
yn@3.1.1:
|
yn@3.1.1:
|
||||||
version "3.1.1"
|
version "3.1.1"
|
||||||
resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50"
|
resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50"
|
||||||
|
Loading…
x
Reference in New Issue
Block a user