diff --git a/src/bin/tools/crawl.ts b/src/bin/tools/crawl.ts index dd53a6d9..b72ae6c9 100644 --- a/src/bin/tools/crawl.ts +++ b/src/bin/tools/crawl.ts @@ -1,8 +1,15 @@ import * as fs from "fs"; import * as path from "path"; -const crawlRec = (dir_path: string, paths: string[]) => { - for (const file_name of fs.readdirSync(dir_path)) { +export const crawlRec = (dir_path: string, paths: string[]) => { + let file_names: string[] = []; + try { + file_names = fs.readdirSync(dir_path); + } catch (e) { + // dir_path does not exist + return; + } + for (const file_name of file_names) { const file_path = path.join(dir_path, file_name); if (fs.lstatSync(file_path).isDirectory()) { diff --git a/test/bin/tools/crawl.spec.ts b/test/bin/tools/crawl.spec.ts new file mode 100644 index 00000000..bf45c538 --- /dev/null +++ b/test/bin/tools/crawl.spec.ts @@ -0,0 +1,65 @@ +import path from "path"; +import { it, describe, expect, vi, beforeAll, afterAll } from "vitest"; +import { crawlRec } from "keycloakify/bin/tools/crawl"; + +describe("crawl", () => { + describe("crawRec", () => { + beforeAll(() => { + vi.mock("node:fs", async () => { + const mod = await vi.importActual("fs"); + return { + ...mod, + readdirSync: vi.fn().mockImplementation((dir_path: string) => { + switch (dir_path) { + case "root_dir": + return ["sub_1_dir", "file_1", "sub_2_dir", "file_2"]; + case path.join("root_dir", "sub_1_dir"): + return ["file_3", "sub_3_dir", "file_4"]; + case path.join("root_dir", "sub_1_dir", "sub_3_dir"): + return ["file_5"]; + case path.join("root_dir", "sub_2_dir"): + return []; + default: { + const enoent = new Error(`ENOENT: no such file or directory, scandir '${dir_path}'`); + // @ts-ignore + enoent.code = "ENOENT"; + // @ts-ignore + enoent.syscall = "open"; + // @ts-ignore + enoent.path = dir_path; + throw enoent; + } + } + }), + lstatSync: vi.fn().mockImplementation((file_path: string) => { + return { isDirectory: () => file_path.endsWith("_dir") }; + }) + }; + }); + }); + afterAll(() => { + vi.resetAllMocks(); + }); + it("returns files under a given dir_path", async () => { + const paths: string[] = []; + crawlRec("root_dir/sub_1_dir/sub_3_dir", paths); + expect(paths).toEqual(["root_dir/sub_1_dir/sub_3_dir/file_5"]); + }); + it("returns files recursively under a given dir_path", async () => { + const paths: string[] = []; + crawlRec("root_dir", paths); + expect(paths).toEqual([ + "root_dir/sub_1_dir/file_3", + "root_dir/sub_1_dir/sub_3_dir/file_5", + "root_dir/sub_1_dir/file_4", + "root_dir/file_1", + "root_dir/file_2" + ]); + }); + it("return empty file list if dir_path does not exist", async () => { + const paths: string[] = []; + crawlRec("404", paths); + expect(paths).toEqual([]); + }); + }); +});