Skip to content

Commit fb08cb3

Browse files
committed
fix: Refactor CWS crawler to more safely extract extension details
1 parent 2ced470 commit fb08cb3

File tree

2 files changed

+212
-163
lines changed

2 files changed

+212
-163
lines changed
+28-35
Original file line numberDiff line numberDiff line change
@@ -1,41 +1,34 @@
1-
import { describe, expect, it } from "bun:test";
1+
import { beforeEach, describe, expect, it, mock } from "bun:test";
22
import { crawlExtension } from "../chrome-crawler";
3+
import { readdir } from "node:fs/promises";
4+
import { join } from "node:path";
35

4-
const githubBetterLineCountsId = "ocfdgncpifmegplaglcnglhioflaimkd";
6+
const fetchMock = mock<typeof fetch>(() => {
7+
throw Error("Not mocked");
8+
});
9+
globalThis.fetch = fetchMock;
510

6-
describe("Chrome Web Store Crawler", () => {
7-
it("should load and crawl an extension ID correctly", async () => {
8-
const res = await crawlExtension(githubBetterLineCountsId, "en");
11+
describe("Chrome Web Store Crawler", async () => {
12+
const fixturesDir = join(import.meta.dir, "fixtures/chrome-web-store");
13+
const testFiles = (await readdir(fixturesDir))
14+
.filter((file) => !file.startsWith("."))
15+
.toSorted();
16+
const getExtensionIdFromFile = (file: string) =>
17+
file.match(/.*-([a-z]+)\.html/)![1];
918

10-
expect(res).toEqual({
11-
iconUrl:
12-
"https://lh3.googleusercontent.com/GcffNyCJaxT2G9dsQCJHhUEMlu_E0vEzph5cLPrQj7UHKat7QyCzGu69Dmp_DDUL8rY-bPMFJceQarS1wcqdwTalTg=s256",
13-
id: githubBetterLineCountsId,
14-
lastUpdated: expect.any(String),
15-
longDescription: expect.stringContaining("Isn't it annoying when you"),
16-
name: "GitHub: Better Line Counts",
17-
rating: expect.any(Number),
18-
reviewCount: expect.any(Number),
19-
shortDescription: "Remove generated files from GitHub line counts",
20-
storeUrl: expect.stringContaining(
21-
"https://chromewebstore.google.com/detail/github-better-line-counts/ocfdgncpifmegplaglcnglhioflaimkd",
22-
),
23-
version: expect.any(String),
24-
weeklyActiveUsers: expect.any(Number),
25-
screenshots: [
26-
{
27-
index: 0,
28-
indexUrl:
29-
"http://localhost:3000/api/rest/chrome-extensions/ocfdgncpifmegplaglcnglhioflaimkd/screenshots/0",
30-
rawUrl: expect.any(String),
31-
},
32-
{
33-
index: 1,
34-
indexUrl:
35-
"http://localhost:3000/api/rest/chrome-extensions/ocfdgncpifmegplaglcnglhioflaimkd/screenshots/1",
36-
rawUrl: expect.any(String),
37-
},
38-
],
39-
});
19+
beforeEach(() => {
20+
fetchMock.mockReset();
4021
});
22+
23+
it.each(testFiles)(
24+
"should extract extension details from %s",
25+
async (file) => {
26+
const id = getExtensionIdFromFile(file);
27+
globalThis.fetch = mock(() =>
28+
Promise.resolve(new Response(Bun.file(join(fixturesDir, file)))),
29+
);
30+
const res = await crawlExtension(id, "en", true);
31+
expect(res).toMatchSnapshot();
32+
},
33+
);
4134
});

0 commit comments

Comments
 (0)