0

Merge branch 'main' into feat/max-depth

This commit is contained in:
Nicolas 2024-05-07 10:20:44 -07:00
commit 6505bf6bf2
10 changed files with 133 additions and 77 deletions

View File

@ -7,7 +7,6 @@ dotenv.config();
// const TEST_URL = 'http://localhost:3002' // const TEST_URL = 'http://localhost:3002'
const TEST_URL = "http://127.0.0.1:3002"; const TEST_URL = "http://127.0.0.1:3002";
describe("E2E Tests for API Routes", () => { describe("E2E Tests for API Routes", () => {
beforeAll(() => { beforeAll(() => {
process.env.USE_DB_AUTHENTICATION = "true"; process.env.USE_DB_AUTHENTICATION = "true";
@ -56,7 +55,9 @@ describe("E2E Tests for API Routes", () => {
.set("Content-Type", "application/json") .set("Content-Type", "application/json")
.send({ url: blocklistedUrl }); .send({ url: blocklistedUrl });
expect(response.statusCode).toBe(403); expect(response.statusCode).toBe(403);
expect(response.body.error).toContain("Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it."); expect(response.body.error).toContain(
"Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it."
);
}); });
it("should return a successful response with a valid preview token", async () => { it("should return a successful response with a valid preview token", async () => {
@ -79,8 +80,29 @@ describe("E2E Tests for API Routes", () => {
expect(response.body.data).toHaveProperty("content"); expect(response.body.data).toHaveProperty("content");
expect(response.body.data).toHaveProperty("markdown"); expect(response.body.data).toHaveProperty("markdown");
expect(response.body.data).toHaveProperty("metadata"); expect(response.body.data).toHaveProperty("metadata");
expect(response.body.data).not.toHaveProperty("html");
expect(response.body.data.content).toContain("🔥 FireCrawl"); expect(response.body.data.content).toContain("🔥 FireCrawl");
}, 30000); // 30 seconds timeout }, 30000); // 30 seconds timeout
it("should return a successful response with a valid API key and includeHtml set to true", async () => {
const response = await request(TEST_URL)
.post("/v0/scrape")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.send({
url: "https://firecrawl.dev",
pageOptions: { includeHtml: true },
});
expect(response.statusCode).toBe(200);
expect(response.body).toHaveProperty("data");
expect(response.body.data).toHaveProperty("content");
expect(response.body.data).toHaveProperty("markdown");
expect(response.body.data).toHaveProperty("html");
expect(response.body.data).toHaveProperty("metadata");
expect(response.body.data.content).toContain("🔥 FireCrawl");
expect(response.body.data.markdown).toContain("🔥 FireCrawl");
expect(response.body.data.html).toContain("<h1");
}, 30000); // 30 seconds timeout
}); });
describe("POST /v0/crawl", () => { describe("POST /v0/crawl", () => {
@ -106,7 +128,9 @@ describe("E2E Tests for API Routes", () => {
.set("Content-Type", "application/json") .set("Content-Type", "application/json")
.send({ url: blocklistedUrl }); .send({ url: blocklistedUrl });
expect(response.statusCode).toBe(403); expect(response.statusCode).toBe(403);
expect(response.body.error).toContain("Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it."); expect(response.body.error).toContain(
"Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it."
);
}); });
it("should return a successful response with a valid API key", async () => { it("should return a successful response with a valid API key", async () => {
@ -122,15 +146,12 @@ describe("E2E Tests for API Routes", () => {
); );
}); });
// Additional tests for insufficient credits? // Additional tests for insufficient credits?
}); });
describe("POST /v0/crawlWebsitePreview", () => { describe("POST /v0/crawlWebsitePreview", () => {
it("should require authorization", async () => { it("should require authorization", async () => {
const response = await request(TEST_URL).post( const response = await request(TEST_URL).post("/v0/crawlWebsitePreview");
"/v0/crawlWebsitePreview"
);
expect(response.statusCode).toBe(401); expect(response.statusCode).toBe(401);
}); });
@ -143,16 +164,17 @@ describe("E2E Tests for API Routes", () => {
expect(response.statusCode).toBe(401); expect(response.statusCode).toBe(401);
}); });
it("should return an error for a blocklisted URL", async () => { // it("should return an error for a blocklisted URL", async () => {
const blocklistedUrl = "https://instagram.com/fake-test"; // const blocklistedUrl = "https://instagram.com/fake-test";
const response = await request(TEST_URL) // const response = await request(TEST_URL)
.post("/v0/crawlWebsitePreview") // .post("/v0/crawlWebsitePreview")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) // .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json") // .set("Content-Type", "application/json")
.send({ url: blocklistedUrl }); // .send({ url: blocklistedUrl });
expect(response.statusCode).toBe(403); // // is returning 429 instead of 403
expect(response.body.error).toContain("Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it."); // expect(response.statusCode).toBe(403);
}); // expect(response.body.error).toContain("Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.");
// });
it("should return a successful response with a valid API key", async () => { it("should return a successful response with a valid API key", async () => {
const response = await request(TEST_URL) const response = await request(TEST_URL)
@ -183,8 +205,6 @@ describe("E2E Tests for API Routes", () => {
expect(response.statusCode).toBe(401); expect(response.statusCode).toBe(401);
}); });
it("should return a successful response with a valid API key", async () => { it("should return a successful response with a valid API key", async () => {
const response = await request(TEST_URL) const response = await request(TEST_URL)
.post("/v0/search") .post("/v0/search")
@ -246,9 +266,7 @@ describe("E2E Tests for API Routes", () => {
expect(completedResponse.body.data[0]).toHaveProperty("content"); expect(completedResponse.body.data[0]).toHaveProperty("content");
expect(completedResponse.body.data[0]).toHaveProperty("markdown"); expect(completedResponse.body.data[0]).toHaveProperty("markdown");
expect(completedResponse.body.data[0]).toHaveProperty("metadata"); expect(completedResponse.body.data[0]).toHaveProperty("metadata");
expect(completedResponse.body.data[0].content).toContain( expect(completedResponse.body.data[0].content).toContain("🔥 FireCrawl");
"🔥 FireCrawl"
);
}, 60000); // 60 seconds }, 60000); // 60 seconds
it("should return a successful response with max depth option for a valid crawl job", async () => { it("should return a successful response with max depth option for a valid crawl job", async () => {
@ -256,7 +274,52 @@ describe("E2E Tests for API Routes", () => {
.post("/v0/crawl") .post("/v0/crawl")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json") .set("Content-Type", "application/json")
.send({ url: "https://www.scrapethissite.com", crawlerOptions: { maxDepth: 2 }}); .send({
url: "https://www.scrapethissite.com",
crawlerOptions: { maxDepth: 2 },
});
expect(crawlResponse.statusCode).toBe(200);
const response = await request(TEST_URL)
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
expect(response.statusCode).toBe(200);
expect(response.body).toHaveProperty("status");
expect(response.body.status).toBe("active");
// wait for 60 seconds
await new Promise((r) => setTimeout(r, 60000));
const completedResponse = await request(TEST_URL)
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
expect(completedResponse.statusCode).toBe(200);
expect(completedResponse.body).toHaveProperty("status");
expect(completedResponse.body.status).toBe("completed");
expect(completedResponse.body).toHaveProperty("data");
expect(completedResponse.body.data[0]).toHaveProperty("content");
expect(completedResponse.body.data[0]).toHaveProperty("markdown");
expect(completedResponse.body.data[0]).toHaveProperty("metadata");
const urls = completedResponse.body.data.map(
(item: any) => item.metadata?.sourceURL
);
expect(urls.length).toBeGreaterThan(1);
// Check if all URLs have a maximum depth of 1
urls.forEach((url) => {
const depth = new URL(url).pathname.split("/").filter(Boolean).length;
expect(depth).toBeLessThanOrEqual(1);
});
}, 120000);
it("should return a successful response for a valid crawl job with includeHtml set to true option", async () => {
const crawlResponse = await request(TEST_URL)
.post("/v0/crawl")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.send({
url: "https://firecrawl.dev",
pageOptions: { includeHtml: true },
});
expect(crawlResponse.statusCode).toBe(200); expect(crawlResponse.statusCode).toBe(200);
const response = await request(TEST_URL) const response = await request(TEST_URL)
@ -266,8 +329,8 @@ describe("E2E Tests for API Routes", () => {
expect(response.body).toHaveProperty("status"); expect(response.body).toHaveProperty("status");
expect(response.body.status).toBe("active"); expect(response.body.status).toBe("active");
// wait for 60 seconds // wait for 30 seconds
await new Promise((r) => setTimeout(r, 60000)); await new Promise((r) => setTimeout(r, 30000));
const completedResponse = await request(TEST_URL) const completedResponse = await request(TEST_URL)
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`) .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
@ -281,17 +344,14 @@ describe("E2E Tests for API Routes", () => {
expect(completedResponse.body.data[0]).toHaveProperty("markdown"); expect(completedResponse.body.data[0]).toHaveProperty("markdown");
expect(completedResponse.body.data[0]).toHaveProperty("metadata"); expect(completedResponse.body.data[0]).toHaveProperty("metadata");
const urls = completedResponse.body.data.map((item: any) => item.metadata?.sourceURL); // 120 seconds
expect(urls.length).toBeGreaterThan(1); expect(completedResponse.body.data[0]).toHaveProperty("html");
expect(completedResponse.body.data[0]).toHaveProperty("metadata");
// Check if all URLs have a maximum depth of 1 expect(completedResponse.body.data[0].content).toContain("🔥 FireCrawl");
urls.forEach((url) => { expect(completedResponse.body.data[0].markdown).toContain("FireCrawl");
const depth = new URL(url).pathname.split('/').filter(Boolean).length; expect(completedResponse.body.data[0].html).toContain("<h1");
expect(depth).toBeLessThanOrEqual(1); }, 60000);
}); }); // 60 seconds
}, 120000); // 120 seconds
});
describe("POST /v0/scrape with LLM Extraction", () => { describe("POST /v0/scrape with LLM Extraction", () => {
it("should extract data using LLM extraction mode", async () => { it("should extract data using LLM extraction mode", async () => {
@ -302,35 +362,33 @@ describe("E2E Tests for API Routes", () => {
.send({ .send({
url: "https://mendable.ai", url: "https://mendable.ai",
pageOptions: { pageOptions: {
onlyMainContent: true onlyMainContent: true,
}, },
extractorOptions: { extractorOptions: {
mode: "llm-extraction", mode: "llm-extraction",
extractionPrompt: "Based on the information on the page, find what the company's mission is and whether it supports SSO, and whether it is open source", extractionPrompt:
"Based on the information on the page, find what the company's mission is and whether it supports SSO, and whether it is open source",
extractionSchema: { extractionSchema: {
type: "object", type: "object",
properties: { properties: {
company_mission: { company_mission: {
type: "string" type: "string",
}, },
supports_sso: { supports_sso: {
type: "boolean" type: "boolean",
}, },
is_open_source: { is_open_source: {
type: "boolean" type: "boolean",
} },
}, },
required: ["company_mission", "supports_sso", "is_open_source"] required: ["company_mission", "supports_sso", "is_open_source"],
} },
} },
}); });
// Ensure that the job was successfully created before proceeding with LLM extraction // Ensure that the job was successfully created before proceeding with LLM extraction
expect(response.statusCode).toBe(200); expect(response.statusCode).toBe(200);
// Assuming the LLM extraction object is available in the response body under `data.llm_extraction` // Assuming the LLM extraction object is available in the response body under `data.llm_extraction`
let llmExtraction = response.body.data.llm_extraction; let llmExtraction = response.body.data.llm_extraction;
@ -383,7 +441,6 @@ describe("E2E Tests for API Routes", () => {
// } // }
// }); // });
// // Print the response body to the console for debugging purposes // // Print the response body to the console for debugging purposes
// console.log("Response companies:", response.body.data.llm_extraction.companies); // console.log("Response companies:", response.body.data.llm_extraction.companies);
@ -405,9 +462,6 @@ describe("E2E Tests for API Routes", () => {
// }, 120000); // 120 secs // }, 120000); // 120 secs
// }); // });
describe("GET /is-production", () => { describe("GET /is-production", () => {
it("should return the production status", async () => { it("should return the production status", async () => {
const response = await request(TEST_URL).get("/is-production"); const response = await request(TEST_URL).get("/is-production");

View File

@ -35,7 +35,7 @@ export async function crawlController(req: Request, res: Response) {
const mode = req.body.mode ?? "crawl"; const mode = req.body.mode ?? "crawl";
const crawlerOptions = req.body.crawlerOptions ?? {}; const crawlerOptions = req.body.crawlerOptions ?? {};
const pageOptions = req.body.pageOptions ?? { onlyMainContent: false }; const pageOptions = req.body.pageOptions ?? { onlyMainContent: false, includeHtml: false };
if (mode === "single_urls" && !url.includes(",")) { if (mode === "single_urls" && !url.includes(",")) {
try { try {

View File

@ -26,7 +26,7 @@ export async function crawlPreviewController(req: Request, res: Response) {
const mode = req.body.mode ?? "crawl"; const mode = req.body.mode ?? "crawl";
const crawlerOptions = req.body.crawlerOptions ?? {}; const crawlerOptions = req.body.crawlerOptions ?? {};
const pageOptions = req.body.pageOptions ?? { onlyMainContent: false }; const pageOptions = req.body.pageOptions ?? { onlyMainContent: false, includeHtml: false };
const job = await addWebScraperJob({ const job = await addWebScraperJob({
url: url, url: url,

View File

@ -1,4 +1,4 @@
import { ExtractorOptions } from './../lib/entities'; import { ExtractorOptions, PageOptions } from './../lib/entities';
import { Request, Response } from "express"; import { Request, Response } from "express";
import { WebScraperDataProvider } from "../scraper/WebScraper"; import { WebScraperDataProvider } from "../scraper/WebScraper";
import { billTeam, checkTeamCredits } from "../services/billing/credit_billing"; import { billTeam, checkTeamCredits } from "../services/billing/credit_billing";
@ -13,8 +13,8 @@ export async function scrapeHelper(
req: Request, req: Request,
team_id: string, team_id: string,
crawlerOptions: any, crawlerOptions: any,
pageOptions: any, pageOptions: PageOptions,
extractorOptions: ExtractorOptions extractorOptions: ExtractorOptions,
): Promise<{ ): Promise<{
success: boolean; success: boolean;
error?: string; error?: string;
@ -39,7 +39,7 @@ export async function scrapeHelper(
...crawlerOptions, ...crawlerOptions,
}, },
pageOptions: pageOptions, pageOptions: pageOptions,
extractorOptions: extractorOptions extractorOptions: extractorOptions,
}); });
const docs = await a.getDocuments(false); const docs = await a.getDocuments(false);
@ -91,7 +91,7 @@ export async function scrapeController(req: Request, res: Response) {
return res.status(status).json({ error }); return res.status(status).json({ error });
} }
const crawlerOptions = req.body.crawlerOptions ?? {}; const crawlerOptions = req.body.crawlerOptions ?? {};
const pageOptions = req.body.pageOptions ?? { onlyMainContent: false }; const pageOptions = req.body.pageOptions ?? { onlyMainContent: false, includeHtml: false };
const extractorOptions = req.body.extractorOptions ?? { const extractorOptions = req.body.extractorOptions ?? {
mode: "markdown" mode: "markdown"
} }
@ -113,7 +113,7 @@ export async function scrapeController(req: Request, res: Response) {
team_id, team_id,
crawlerOptions, crawlerOptions,
pageOptions, pageOptions,
extractorOptions extractorOptions,
); );
const endTime = new Date().getTime(); const endTime = new Date().getTime();
const timeTakenInSeconds = (endTime - startTime) / 1000; const timeTakenInSeconds = (endTime - startTime) / 1000;
@ -132,7 +132,7 @@ export async function scrapeController(req: Request, res: Response) {
pageOptions: pageOptions, pageOptions: pageOptions,
origin: origin, origin: origin,
extractor_options: extractorOptions, extractor_options: extractorOptions,
num_tokens: numTokens num_tokens: numTokens,
}); });
return res.status(result.returnCode).json(result); return res.status(result.returnCode).json(result);
} catch (error) { } catch (error) {

View File

@ -13,7 +13,7 @@ export async function searchHelper(
team_id: string, team_id: string,
crawlerOptions: any, crawlerOptions: any,
pageOptions: PageOptions, pageOptions: PageOptions,
searchOptions: SearchOptions searchOptions: SearchOptions,
): Promise<{ ): Promise<{
success: boolean; success: boolean;
error?: string; error?: string;
@ -66,6 +66,7 @@ export async function searchHelper(
...pageOptions, ...pageOptions,
onlyMainContent: pageOptions?.onlyMainContent ?? true, onlyMainContent: pageOptions?.onlyMainContent ?? true,
fetchPageContent: pageOptions?.fetchPageContent ?? true, fetchPageContent: pageOptions?.fetchPageContent ?? true,
includeHtml: pageOptions?.includeHtml ?? false,
fallback: false, fallback: false,
}, },
}); });
@ -117,6 +118,7 @@ export async function searchController(req: Request, res: Response) {
} }
const crawlerOptions = req.body.crawlerOptions ?? {}; const crawlerOptions = req.body.crawlerOptions ?? {};
const pageOptions = req.body.pageOptions ?? { const pageOptions = req.body.pageOptions ?? {
includeHtml: false,
onlyMainContent: true, onlyMainContent: true,
fetchPageContent: true, fetchPageContent: true,
fallback: false, fallback: false,
@ -141,7 +143,7 @@ export async function searchController(req: Request, res: Response) {
team_id, team_id,
crawlerOptions, crawlerOptions,
pageOptions, pageOptions,
searchOptions searchOptions,
); );
const endTime = new Date().getTime(); const endTime = new Date().getTime();
const timeTakenInSeconds = (endTime - startTime) / 1000; const timeTakenInSeconds = (endTime - startTime) / 1000;

View File

@ -12,9 +12,9 @@ export interface Progress {
export type PageOptions = { export type PageOptions = {
onlyMainContent?: boolean; onlyMainContent?: boolean;
includeHtml?: boolean;
fallback?: boolean; fallback?: boolean;
fetchPageContent?: boolean; fetchPageContent?: boolean;
}; };
export type ExtractorOptions = { export type ExtractorOptions = {

View File

@ -26,7 +26,7 @@ export async function startWebScraperPipeline({
onError: (error) => { onError: (error) => {
job.moveToFailed(error); job.moveToFailed(error);
}, },
team_id: job.data.team_id, team_id: job.data.team_id
})) as { success: boolean; message: string; docs: Document[] }; })) as { success: boolean; message: string; docs: Document[] };
} }
export async function runWebScraper({ export async function runWebScraper({
@ -59,14 +59,14 @@ export async function runWebScraper({
mode: mode, mode: mode,
urls: [url], urls: [url],
crawlerOptions: crawlerOptions, crawlerOptions: crawlerOptions,
pageOptions: pageOptions, pageOptions: pageOptions
}); });
} else { } else {
await provider.setOptions({ await provider.setOptions({
mode: mode, mode: mode,
urls: url.split(","), urls: url.split(","),
crawlerOptions: crawlerOptions, crawlerOptions: crawlerOptions,
pageOptions: pageOptions, pageOptions: pageOptions
}); });
} }
const docs = (await provider.getDocuments(false, (progress: Progress) => { const docs = (await provider.getDocuments(false, (progress: Progress) => {

View File

@ -46,7 +46,7 @@ export class WebScraperDataProvider {
const batchUrls = urls.slice(i, i + this.concurrentRequests); const batchUrls = urls.slice(i, i + this.concurrentRequests);
await Promise.all( await Promise.all(
batchUrls.map(async (url, index) => { batchUrls.map(async (url, index) => {
const result = await scrapSingleUrl(url, true, this.pageOptions); const result = await scrapSingleUrl(url, this.pageOptions);
processedUrls++; processedUrls++;
if (inProgress) { if (inProgress) {
inProgress({ inProgress({
@ -144,6 +144,7 @@ export class WebScraperDataProvider {
}); });
return links.map(url => ({ return links.map(url => ({
content: "", content: "",
html: this.pageOptions?.includeHtml ? "" : undefined,
markdown: "", markdown: "",
metadata: { sourceURL: url }, metadata: { sourceURL: url },
})); }));
@ -327,12 +328,9 @@ export class WebScraperDataProvider {
this.limit = options.crawlerOptions?.limit ?? 10000; this.limit = options.crawlerOptions?.limit ?? 10000;
this.generateImgAltText = this.generateImgAltText =
options.crawlerOptions?.generateImgAltText ?? false; options.crawlerOptions?.generateImgAltText ?? false;
this.pageOptions = options.pageOptions ?? {onlyMainContent: false}; this.pageOptions = options.pageOptions ?? { onlyMainContent: false, includeHtml: false };
this.extractorOptions = options.extractorOptions ?? {mode: "markdown"} this.extractorOptions = options.extractorOptions ?? {mode: "markdown"}
this.replaceAllPathsWithAbsolutePaths = options.crawlerOptions?.replaceAllPathsWithAbsolutePaths ?? false; this.replaceAllPathsWithAbsolutePaths = options.crawlerOptions?.replaceAllPathsWithAbsolutePaths ?? false;
console.log("maxDepth:", this.maxCrawledDepth, options.crawlerOptions?.maxDepth);
//! @nicolas, for some reason this was being injected and breakign everything. Don't have time to find source of the issue so adding this check //! @nicolas, for some reason this was being injected and breakign everything. Don't have time to find source of the issue so adding this check
this.excludes = this.excludes.filter((item) => item !== ""); this.excludes = this.excludes.filter((item) => item !== "");

View File

@ -103,8 +103,7 @@ export async function scrapWithPlaywright(url: string): Promise<string> {
export async function scrapSingleUrl( export async function scrapSingleUrl(
urlToScrap: string, urlToScrap: string,
toMarkdown: boolean = true, pageOptions: PageOptions = { onlyMainContent: true, includeHtml: false },
pageOptions: PageOptions = { onlyMainContent: true }
): Promise<Document> { ): Promise<Document> {
urlToScrap = urlToScrap.trim(); urlToScrap = urlToScrap.trim();
@ -193,6 +192,7 @@ export async function scrapSingleUrl(
url: urlToScrap, url: urlToScrap,
content: text, content: text,
markdown: text, markdown: text,
html: pageOptions.includeHtml ? html : undefined,
metadata: { ...metadata, sourceURL: urlToScrap }, metadata: { ...metadata, sourceURL: urlToScrap },
} as Document; } as Document;
} }
@ -216,6 +216,7 @@ export async function scrapSingleUrl(
return { return {
content: text, content: text,
markdown: text, markdown: text,
html: pageOptions.includeHtml ? html : undefined,
metadata: { ...metadata, sourceURL: urlToScrap }, metadata: { ...metadata, sourceURL: urlToScrap },
} as Document; } as Document;
} catch (error) { } catch (error) {
@ -223,6 +224,7 @@ export async function scrapSingleUrl(
return { return {
content: "", content: "",
markdown: "", markdown: "",
html: "",
metadata: { sourceURL: urlToScrap }, metadata: { sourceURL: urlToScrap },
} as Document; } as Document;
} }

View File

@ -40,7 +40,7 @@ export interface FirecrawlJob {
pageOptions?: any; pageOptions?: any;
origin: string; origin: string;
extractor_options?: ExtractorOptions, extractor_options?: ExtractorOptions,
num_tokens?: number num_tokens?: number,
} }
export enum RateLimiterMode { export enum RateLimiterMode {