#366: fix tests
This commit is contained in:
@ -1,15 +1,8 @@
|
|||||||
import * as fs from "fs";
|
import * as fs from "fs";
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
|
|
||||||
export const crawlRec = (dir_path: string, paths: string[]) => {
|
const crawlRec = (dir_path: string, paths: string[]) => {
|
||||||
let file_names: string[] = [];
|
for (const file_name of fs.readdirSync(dir_path)) {
|
||||||
try {
|
|
||||||
file_names = fs.readdirSync(dir_path);
|
|
||||||
} catch (e) {
|
|
||||||
// dir_path does not exist
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
for (const file_name of file_names) {
|
|
||||||
const file_path = path.join(dir_path, file_name);
|
const file_path = path.join(dir_path, file_name);
|
||||||
|
|
||||||
if (fs.lstatSync(file_path).isDirectory()) {
|
if (fs.lstatSync(file_path).isDirectory()) {
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import path from "path";
|
import path from "path";
|
||||||
import { it, describe, expect, vi, beforeAll, afterAll } from "vitest";
|
import { it, describe, expect, vi, beforeAll, afterAll } from "vitest";
|
||||||
import { crawlRec } from "keycloakify/bin/tools/crawl";
|
import { crawl } from "keycloakify/bin/tools/crawl";
|
||||||
|
|
||||||
describe("crawl", () => {
|
describe("crawl", () => {
|
||||||
describe("crawRec", () => {
|
describe("crawRec", () => {
|
||||||
@ -41,13 +41,11 @@ describe("crawl", () => {
|
|||||||
vi.resetAllMocks();
|
vi.resetAllMocks();
|
||||||
});
|
});
|
||||||
it("returns files under a given dir_path", async () => {
|
it("returns files under a given dir_path", async () => {
|
||||||
const paths: string[] = [];
|
const paths = crawl({ "dirPath": "root_dir/sub_1_dir/sub_3_dir", "returnedPathsType": "absolute" });
|
||||||
crawlRec("root_dir/sub_1_dir/sub_3_dir", paths);
|
|
||||||
expect(paths).toEqual(["root_dir/sub_1_dir/sub_3_dir/file_5"]);
|
expect(paths).toEqual(["root_dir/sub_1_dir/sub_3_dir/file_5"]);
|
||||||
});
|
});
|
||||||
it("returns files recursively under a given dir_path", async () => {
|
it("returns files recursively under a given dir_path", async () => {
|
||||||
const paths: string[] = [];
|
const paths = crawl({ "dirPath": "root_dir", "returnedPathsType": "absolute" });
|
||||||
crawlRec("root_dir", paths);
|
|
||||||
expect(paths).toEqual([
|
expect(paths).toEqual([
|
||||||
"root_dir/sub_1_dir/file_3",
|
"root_dir/sub_1_dir/file_3",
|
||||||
"root_dir/sub_1_dir/sub_3_dir/file_5",
|
"root_dir/sub_1_dir/sub_3_dir/file_5",
|
||||||
@ -56,10 +54,15 @@ describe("crawl", () => {
|
|||||||
"root_dir/file_2"
|
"root_dir/file_2"
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
it("return empty file list if dir_path does not exist", async () => {
|
it("throw dir_path does not exist", async () => {
|
||||||
const paths: string[] = [];
|
try {
|
||||||
crawlRec("404", paths);
|
crawl({ "dirPath": "404", "returnedPathsType": "absolute" });
|
||||||
expect(paths).toEqual([]);
|
} catch {
|
||||||
|
expect(true);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(false);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
Reference in New Issue
Block a user