Remove dead code

This commit is contained in:
Joseph Garrone 2024-12-26 15:24:12 +01:00
parent 5866c802e5
commit 1ac678a368
10 changed files with 5 additions and 632 deletions

View File

@ -2,11 +2,6 @@ import { relative as pathRelative, dirname as pathDirname } from "path";
import type { BuildContext } from "../shared/buildContext";
import * as fs from "fs";
import chalk from "chalk";
import {
getLatestsSemVersionedTag,
type BuildContextLike as BuildContextLike_getLatestsSemVersionedTag
} from "../shared/getLatestsSemVersionedTag";
import { SemVer } from "../tools/SemVer";
import fetch from "make-fetch-happen";
import { z } from "zod";
import { assert, type Equals, is } from "tsafe/assert";
@ -14,7 +9,7 @@ import { id } from "tsafe/id";
import { npmInstall } from "../tools/npmInstall";
import { copyBoilerplate } from "./copyBoilerplate";
type BuildContextLike = BuildContextLike_getLatestsSemVersionedTag & {
type BuildContextLike = {
fetchOptions: BuildContext["fetchOptions"];
packageJsonFilePath: string;
};
@ -30,16 +25,10 @@ export async function initializeAccountTheme_singlePage(params: {
const OWNER = "keycloakify";
const REPO = "keycloak-account-ui";
const [semVersionedTag] = await getLatestsSemVersionedTag({
owner: OWNER,
repo: REPO,
count: 1,
doIgnoreReleaseCandidates: false,
buildContext
});
const version = "26.0.6-rc.1";
const dependencies = await fetch(
`https://raw.githubusercontent.com/${OWNER}/${REPO}/${semVersionedTag.tag}/dependencies.gen.json`,
`https://raw.githubusercontent.com/${OWNER}/${REPO}/v${version}/dependencies.gen.json`,
buildContext.fetchOptions
)
.then(r => r.json())
@ -67,9 +56,7 @@ export async function initializeAccountTheme_singlePage(params: {
})()
);
dependencies.dependencies["@keycloakify/keycloak-account-ui"] = SemVer.stringify(
semVersionedTag.version
);
dependencies.dependencies["@keycloakify/keycloak-account-ui"] = version;
const parsedPackageJson = (() => {
type ParsedPackageJson = {
@ -133,7 +120,7 @@ export async function initializeAccountTheme_singlePage(params: {
chalk.green(
"The Single-Page account theme has been successfully initialized."
),
`Using Account UI of Keycloak version: ${chalk.bold(semVersionedTag.tag.split("-")[0])}`,
`Using Account UI of Keycloak version: ${chalk.bold(version.split("-")[0])}`,
`Directory created: ${chalk.bold(pathRelative(process.cwd(), accountThemeSrcDirPath))}`,
`Dependencies added to your project's package.json: `,
chalk.bold(JSON.stringify(dependencies, null, 2))

View File

@ -1,201 +0,0 @@
import { getLatestsSemVersionedTagFactory } from "../tools/octokit-addons/getLatestsSemVersionedTag";
import { Octokit } from "@octokit/rest";
import type { ReturnType } from "tsafe";
import type { Param0 } from "tsafe";
import { join as pathJoin, dirname as pathDirname } from "path";
import * as fs from "fs";
import { z } from "zod";
import { assert, type Equals } from "tsafe/assert";
import { id } from "tsafe/id";
import type { SemVer } from "../tools/SemVer";
import { same } from "evt/tools/inDepth/same";
import type { BuildContext } from "./buildContext";
import fetch from "make-fetch-happen";
type GetLatestsSemVersionedTag = ReturnType<
typeof getLatestsSemVersionedTagFactory
>["getLatestsSemVersionedTag"];
type Params = Param0<GetLatestsSemVersionedTag>;
type R = ReturnType<GetLatestsSemVersionedTag>;
let getLatestsSemVersionedTag_stateless: GetLatestsSemVersionedTag | undefined =
undefined;
const CACHE_VERSION = 1;
type Cache = {
version: typeof CACHE_VERSION;
entries: {
time: number;
params: Params;
result: R;
}[];
};
export type BuildContextLike = {
cacheDirPath: string;
fetchOptions: BuildContext["fetchOptions"];
};
assert<BuildContext extends BuildContextLike ? true : false>();
export async function getLatestsSemVersionedTag({
buildContext,
...params
}: Params & {
buildContext: BuildContextLike;
}): Promise<R> {
const cacheFilePath = pathJoin(
buildContext.cacheDirPath,
"latest-sem-versioned-tags.json"
);
const cacheLookupResult = (() => {
const getResult_currentCache = (currentCacheEntries: Cache["entries"]) => ({
hasCachedResult: false as const,
currentCache: {
version: CACHE_VERSION,
entries: currentCacheEntries
}
});
if (!fs.existsSync(cacheFilePath)) {
return getResult_currentCache([]);
}
let cache_json;
try {
cache_json = fs.readFileSync(cacheFilePath).toString("utf8");
} catch {
return getResult_currentCache([]);
}
let cache_json_parsed: unknown;
try {
cache_json_parsed = JSON.parse(cache_json);
} catch {
return getResult_currentCache([]);
}
const zSemVer = (() => {
type TargetType = SemVer;
const zTargetType = z.object({
major: z.number(),
minor: z.number(),
patch: z.number(),
rc: z.number().optional(),
parsedFrom: z.string()
});
assert<Equals<z.infer<typeof zTargetType>, TargetType>>();
return id<z.ZodType<TargetType>>(zTargetType);
})();
const zCache = (() => {
type TargetType = Cache;
const zTargetType = z.object({
version: z.literal(CACHE_VERSION),
entries: z.array(
z.object({
time: z.number(),
params: z.object({
owner: z.string(),
repo: z.string(),
count: z.number(),
doIgnoreReleaseCandidates: z.boolean()
}),
result: z.array(
z.object({
tag: z.string(),
version: zSemVer
})
)
})
)
});
assert<Equals<z.infer<typeof zTargetType>, TargetType>>();
return id<z.ZodType<TargetType>>(zTargetType);
})();
let cache: Cache;
try {
cache = zCache.parse(cache_json_parsed);
} catch {
return getResult_currentCache([]);
}
const cacheEntry = cache.entries.find(e => same(e.params, params));
if (cacheEntry === undefined) {
return getResult_currentCache(cache.entries);
}
if (Date.now() - cacheEntry.time > 3_600_000) {
return getResult_currentCache(cache.entries.filter(e => e !== cacheEntry));
}
return {
hasCachedResult: true as const,
cachedResult: cacheEntry.result
};
})();
if (cacheLookupResult.hasCachedResult) {
return cacheLookupResult.cachedResult;
}
const { currentCache } = cacheLookupResult;
getLatestsSemVersionedTag_stateless ??= (() => {
const octokit = (() => {
const githubToken = process.env.GITHUB_TOKEN;
const octokit = new Octokit({
...(githubToken === undefined ? {} : { auth: githubToken }),
request: {
fetch: (url: string, options?: any) =>
fetch(url, {
...options,
...buildContext.fetchOptions
})
}
});
return octokit;
})();
const { getLatestsSemVersionedTag } = getLatestsSemVersionedTagFactory({
octokit
});
return getLatestsSemVersionedTag;
})();
const result = await getLatestsSemVersionedTag_stateless(params);
currentCache.entries.push({
time: Date.now(),
params,
result
});
{
const dirPath = pathDirname(cacheFilePath);
if (!fs.existsSync(dirPath)) {
fs.mkdirSync(dirPath, { recursive: true });
}
}
fs.writeFileSync(cacheFilePath, JSON.stringify(currentCache, null, 2));
return result;
}

View File

@ -1,72 +0,0 @@
import {
getLatestsSemVersionedTag,
type BuildContextLike as BuildContextLike_getLatestsSemVersionedTag
} from "./getLatestsSemVersionedTag";
import cliSelect from "cli-select";
import { assert } from "tsafe/assert";
import { SemVer } from "../tools/SemVer";
import type { BuildContext } from "./buildContext";
export type BuildContextLike = BuildContextLike_getLatestsSemVersionedTag & {};
assert<BuildContext extends BuildContextLike ? true : false>();
export async function promptKeycloakVersion(params: {
startingFromMajor: number | undefined;
excludeMajorVersions: number[];
doOmitPatch: boolean;
buildContext: BuildContextLike;
}) {
const { startingFromMajor, excludeMajorVersions, doOmitPatch, buildContext } = params;
const semVersionedTagByMajor = new Map<number, { tag: string; version: SemVer }>();
const semVersionedTags = await getLatestsSemVersionedTag({
count: 50,
owner: "keycloak",
repo: "keycloak",
doIgnoreReleaseCandidates: true,
buildContext
});
semVersionedTags.forEach(semVersionedTag => {
if (
startingFromMajor !== undefined &&
semVersionedTag.version.major < startingFromMajor
) {
return;
}
if (excludeMajorVersions.includes(semVersionedTag.version.major)) {
return;
}
const currentSemVersionedTag = semVersionedTagByMajor.get(
semVersionedTag.version.major
);
if (
currentSemVersionedTag !== undefined &&
SemVer.compare(semVersionedTag.version, currentSemVersionedTag.version) === -1
) {
return;
}
semVersionedTagByMajor.set(semVersionedTag.version.major, semVersionedTag);
});
const lastMajorVersions = Array.from(semVersionedTagByMajor.values()).map(
({ version }) =>
`${version.major}.${version.minor}${doOmitPatch ? "" : `.${version.patch}`}`
);
const { value } = await cliSelect<string>({
values: lastMajorVersions
}).catch(() => {
process.exit(-1);
});
const keycloakVersion = value.split(" ")[0];
return { keycloakVersion };
}

View File

@ -1,12 +0,0 @@
type PropertiesThatCanBeUndefined<T extends Record<string, unknown>> = {
[Key in keyof T]: undefined extends T[Key] ? Key : never;
}[keyof T];
/**
* OptionalIfCanBeUndefined<{ p1: string | undefined; p2: string; }>
* is
* { p1?: string | undefined; p2: string }
*/
export type OptionalIfCanBeUndefined<T extends Record<string, unknown>> = {
[K in PropertiesThatCanBeUndefined<T>]?: T[K];
} & { [K in Exclude<keyof T, PropertiesThatCanBeUndefined<T>>]: T[K] };

View File

@ -1,73 +0,0 @@
import { Readable } from "stream";
const crc32tab = [
0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419, 0x706af48f, 0xe963a535,
0x9e6495a3, 0x0edb8832, 0x79dcb8a4, 0xe0d5e91e, 0x97d2d988, 0x09b64c2b, 0x7eb17cbd,
0xe7b82d07, 0x90bf1d91, 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de, 0x1adad47d,
0x6ddde4eb, 0xf4d4b551, 0x83d385c7, 0x136c9856, 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec,
0x14015c4f, 0x63066cd9, 0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4,
0xa2677172, 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b, 0x35b5a8fa, 0x42b2986c,
0xdbbbc9d6, 0xacbcf940, 0x32d86ce3, 0x45df5c75, 0xdcd60dcf, 0xabd13d59, 0x26d930ac,
0x51de003a, 0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599, 0xb8bda50f,
0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924, 0x2f6f7c87, 0x58684c11, 0xc1611dab,
0xb6662d3d, 0x76dc4190, 0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f,
0x9fbfe4a5, 0xe8b8d433, 0x7807c9a2, 0x0f00f934, 0x9609a88e, 0xe10e9818, 0x7f6a0dbb,
0x086d3d2d, 0x91646c97, 0xe6635c01, 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e,
0x6c0695ed, 0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950, 0x8bbeb8ea,
0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3, 0xfbd44c65, 0x4db26158, 0x3ab551ce,
0xa3bc0074, 0xd4bb30e2, 0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a,
0x346ed9fc, 0xad678846, 0xda60b8d0, 0x44042d73, 0x33031de5, 0xaa0a4c5f, 0xdd0d7cc9,
0x5005713c, 0x270241aa, 0xbe0b1010, 0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409,
0xce61e49f, 0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17, 0x2eb40d81,
0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6, 0x03b6e20c, 0x74b1d29a, 0xead54739,
0x9dd277af, 0x04db2615, 0x73dc1683, 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8,
0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1, 0xf00f9344, 0x8708a3d2, 0x1e01f268,
0x6906c2fe, 0xf762575d, 0x806567cb, 0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0,
0x10da7a5a, 0x67dd4acc, 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5, 0xd6d6a3e8,
0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1, 0xa6bc5767, 0x3fb506dd, 0x48b2364b,
0xd80d2bda, 0xaf0a1b4c, 0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef,
0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236, 0xcc0c7795, 0xbb0b4703,
0x220216b9, 0x5505262f, 0xc5ba3bbe, 0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7,
0xb5d0cf31, 0x2cd99e8b, 0x5bdeae1d, 0x9b64c2b0, 0xec63f226, 0x756aa39c, 0x026d930a,
0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713, 0x95bf4a82, 0xe2b87a14, 0x7bb12bae,
0x0cb61b38, 0x92d28e9b, 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242,
0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1, 0x18b74777, 0x88085ae6,
0xff0f6a70, 0x66063bca, 0x11010b5c, 0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45,
0xa00ae278, 0xd70dd2ee, 0x4e048354, 0x3903b3c2, 0xa7672661, 0xd06016f7, 0x4969474d,
0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66, 0x37d83bf0, 0xa9bcae53, 0xdebb9ec5,
0x47b2cf7f, 0x30b5ffe9, 0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605,
0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8, 0x5d681b02, 0x2a6f2b94,
0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, 0x2d02ef8d
];
/**
*
* @param input either a byte stream, a string or a buffer, you want to have the checksum for
* @returns a promise for a checksum (uint32)
*/
export function crc32(input: Readable | String | Buffer): Promise<number> {
if (typeof input === "string") {
let crc = ~0;
for (let i = 0; i < input.length; i++)
crc = (crc >>> 8) ^ crc32tab[(crc ^ input.charCodeAt(i)) & 0xff];
return Promise.resolve((crc ^ -1) >>> 0);
} else if (input instanceof Buffer) {
let crc = ~0;
for (let i = 0; i < input.length; i++)
crc = (crc >>> 8) ^ crc32tab[(crc ^ input[i]) & 0xff];
return Promise.resolve((crc ^ -1) >>> 0);
} else if (input instanceof Readable) {
return new Promise<number>((resolve, reject) => {
let crc = ~0;
input.setMaxListeners(Infinity);
input.on("end", () => resolve((crc ^ -1) >>> 0));
input.on("error", e => reject(e));
input.on("data", (chunk: Buffer) => {
for (let i = 0; i < chunk.length; i++)
crc = (crc >>> 8) ^ crc32tab[(crc ^ chunk[i]) & 0xff];
});
});
} else {
throw new Error("Unsupported input " + typeof input);
}
}

View File

@ -1,61 +0,0 @@
import { PassThrough, Readable, TransformCallback, Writable } from "stream";
import { pipeline } from "stream/promises";
import { deflateRaw as deflateRawCb, createDeflateRaw } from "zlib";
import { promisify } from "util";
import { crc32 } from "./crc32";
import tee from "./tee";
const deflateRaw = promisify(deflateRawCb);
/**
* A stream transformer that records the number of bytes
* passed in its `size` property.
*/
class ByteCounter extends PassThrough {
size: number = 0;
_transform(chunk: any, encoding: BufferEncoding, callback: TransformCallback) {
if ("length" in chunk) this.size += chunk.length;
super._transform(chunk, encoding, callback);
}
}
/**
* @param data buffer containing the data to be compressed
* @returns a buffer containing the compressed/deflated data and the crc32 checksum
* of the source data
*/
export async function deflateBuffer(data: Buffer) {
const [deflated, checksum] = await Promise.all([deflateRaw(data), crc32(data)]);
return { deflated, crc32: checksum };
}
/**
* @param input a byte stream, containing data to be compressed
* @param sink a method that will accept chunks of compressed data; We don't pass
* a writable here, since we don't want the writablestream to be closed after
* a single file
* @returns a promise, which will resolve with the crc32 checksum and the
* compressed size
*/
export async function deflateStream(input: Readable, sink: (chunk: Buffer) => void) {
const deflateWriter = new Writable({
write(chunk, _, callback) {
sink(chunk);
callback();
}
});
// tee the input stream, so we can compress and calc crc32 in parallel
const [rs1, rs2] = tee(input);
const byteCounter = new ByteCounter();
const [_, crc] = await Promise.all([
// pipe input into zip compressor, count the bytes
// returned and pass compressed data to the sink
pipeline(rs1, createDeflateRaw(), byteCounter, deflateWriter),
// calc checksum
crc32(rs2)
]);
return { crc32: crc, compressedSize: byteCounter.size };
}

View File

@ -1,47 +0,0 @@
import { listTagsFactory } from "./listTags";
import type { Octokit } from "@octokit/rest";
import { SemVer } from "../SemVer";
export function getLatestsSemVersionedTagFactory(params: { octokit: Octokit }) {
const { octokit } = params;
async function getLatestsSemVersionedTag(params: {
owner: string;
repo: string;
count: number;
doIgnoreReleaseCandidates: boolean;
}): Promise<
{
tag: string;
version: SemVer;
}[]
> {
const { owner, repo, count, doIgnoreReleaseCandidates } = params;
const semVersionedTags: { tag: string; version: SemVer }[] = [];
const { listTags } = listTagsFactory({ octokit });
for await (const tag of listTags({ owner, repo })) {
let version: SemVer;
try {
version = SemVer.parse(tag.replace(/^[vV]?/, ""));
} catch {
continue;
}
if (doIgnoreReleaseCandidates && version.rc !== undefined) {
continue;
}
semVersionedTags.push({ tag, version });
}
return semVersionedTags
.sort(({ version: vX }, { version: vY }) => SemVer.compare(vY, vX))
.slice(0, count);
}
return { getLatestsSemVersionedTag };
}

View File

@ -1,60 +0,0 @@
import type { Octokit } from "@octokit/rest";
const per_page = 99;
export function listTagsFactory(params: { octokit: Octokit }) {
const { octokit } = params;
const octokit_repo_listTags = async (params: {
owner: string;
repo: string;
per_page: number;
page: number;
}) => {
return octokit.repos.listTags(params);
};
async function* listTags(params: {
owner: string;
repo: string;
}): AsyncGenerator<string> {
const { owner, repo } = params;
let page = 1;
while (true) {
const resp = await octokit_repo_listTags({
owner,
repo,
per_page,
page: page++
});
for (const branch of resp.data.map(({ name }) => name)) {
yield branch;
}
if (resp.data.length < 99) {
break;
}
}
}
/** Returns the same "latest" tag as deno.land/x, not actually the latest though */
async function getLatestTag(params: {
owner: string;
repo: string;
}): Promise<string | undefined> {
const { owner, repo } = params;
const itRes = await listTags({ owner, repo }).next();
if (itRes.done) {
return undefined;
}
return itRes.value;
}
return { listTags, getLatestTag };
}

View File

@ -1,39 +0,0 @@
import { PassThrough, Readable } from "stream";
export default function tee(input: Readable) {
const a = new PassThrough();
const b = new PassThrough();
let aFull = false;
let bFull = false;
a.setMaxListeners(Infinity);
a.on("drain", () => {
aFull = false;
if (!aFull && !bFull) input.resume();
});
b.on("drain", () => {
bFull = false;
if (!aFull && !bFull) input.resume();
});
input.on("error", e => {
a.emit("error", e);
b.emit("error", e);
});
input.on("data", chunk => {
aFull = !a.write(chunk);
bFull = !b.write(chunk);
if (aFull || bFull) input.pause();
});
input.on("end", () => {
a.end();
b.end();
});
return [a, b] as const;
}

View File

@ -1,49 +0,0 @@
/**
* Concatenate the string fragments and interpolated values
* to get a single string.
*/
function populateTemplate(strings: TemplateStringsArray, ...args: unknown[]) {
const chunks: string[] = [];
for (let i = 0; i < strings.length; i++) {
let lastStringLineLength = 0;
if (strings[i]) {
chunks.push(strings[i]);
// remember last indent of the string portion
lastStringLineLength = strings[i].split("\n").slice(-1)[0]?.length ?? 0;
}
if (args[i]) {
// if the interpolation value has newlines, indent the interpolation values
// using the last known string indent
const chunk = String(args[i]).replace(
/([\r?\n])/g,
"$1" + " ".repeat(lastStringLineLength)
);
chunks.push(chunk);
}
}
return chunks.join("");
}
/**
* Shift all lines left by the *smallest* indentation level,
* and remove initial newline and all trailing spaces.
*/
export default function trimIndent(strings: TemplateStringsArray, ...args: any[]) {
// Remove initial and final newlines
let string = populateTemplate(strings, ...args)
.replace(/^[\r\n]/, "")
.replace(/\r?\n *$/, "");
const dents =
string
.match(/^([ \t])+/gm)
?.filter(s => /^\s+$/.test(s))
?.map(s => s.length) ?? [];
// No dents? no change required
if (!dents || dents.length == 0) return string;
const minDent = Math.min(...dents);
// The min indentation is 0, no change needed
if (!minDent) return string;
const re = new RegExp(`^${" ".repeat(minDent)}`, "gm");
const dedented = string.replace(re, "");
return dedented;
}