0
This commit is contained in:
Nicolas 2024-05-13 13:57:34 -07:00
commit 999176d576
2 changed files with 29 additions and 6 deletions

View File

@ -176,6 +176,16 @@ describe("E2E Tests for API Routes", () => {
// expect(response.body.error).toContain("Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it."); // expect(response.body.error).toContain("Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.");
// }); // });
it("should return a timeout error when scraping takes longer than the specified timeout", async () => {
const response = await request(TEST_URL)
.post("/v0/scrape")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.send({ url: "https://firecrawl.dev", timeout: 1000 });
expect(response.statusCode).toBe(408);
}, 3000);
it("should return a successful response with a valid API key", async () => { it("should return a successful response with a valid API key", async () => {
const response = await request(TEST_URL) const response = await request(TEST_URL)
.post("/v0/crawlWebsitePreview") .post("/v0/crawlWebsitePreview")

View File

@ -15,6 +15,7 @@ export async function scrapeHelper(
crawlerOptions: any, crawlerOptions: any,
pageOptions: PageOptions, pageOptions: PageOptions,
extractorOptions: ExtractorOptions, extractorOptions: ExtractorOptions,
timeout: number
): Promise<{ ): Promise<{
success: boolean; success: boolean;
error?: string; error?: string;
@ -30,7 +31,6 @@ export async function scrapeHelper(
return { success: false, error: "Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.", returnCode: 403 }; return { success: false, error: "Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.", returnCode: 403 };
} }
const a = new WebScraperDataProvider(); const a = new WebScraperDataProvider();
await a.setOptions({ await a.setOptions({
mode: "single_urls", mode: "single_urls",
@ -42,7 +42,19 @@ export async function scrapeHelper(
extractorOptions: extractorOptions, extractorOptions: extractorOptions,
}); });
const docs = await a.getDocuments(false); const timeoutPromise = new Promise<{ success: boolean; error?: string; returnCode: number }>((_, reject) =>
setTimeout(() => reject({ success: false, error: "Request timed out. Increase the timeout by passing `timeout` param to the request.", returnCode: 408 }), timeout)
);
const docsPromise = a.getDocuments(false);
let docs;
try {
docs = await Promise.race([docsPromise, timeoutPromise]);
} catch (error) {
return error;
}
// make sure doc.content is not empty // make sure doc.content is not empty
const filteredDocs = docs.filter( const filteredDocs = docs.filter(
(doc: { content?: string }) => doc.content && doc.content.trim().length > 0 (doc: { content?: string }) => doc.content && doc.content.trim().length > 0
@ -51,12 +63,11 @@ export async function scrapeHelper(
return { success: true, error: "No page found", returnCode: 200 }; return { success: true, error: "No page found", returnCode: 200 };
} }
let creditsToBeBilled = filteredDocs.length; let creditsToBeBilled = filteredDocs.length;
const creditsPerLLMExtract = 5; const creditsPerLLMExtract = 5;
if (extractorOptions.mode === "llm-extraction"){ if (extractorOptions.mode === "llm-extraction") {
creditsToBeBilled = creditsToBeBilled + (creditsPerLLMExtract * filteredDocs.length) creditsToBeBilled = creditsToBeBilled + (creditsPerLLMExtract * filteredDocs.length);
} }
const billingResult = await billTeam( const billingResult = await billTeam(
@ -96,6 +107,7 @@ export async function scrapeController(req: Request, res: Response) {
mode: "markdown" mode: "markdown"
} }
const origin = req.body.origin ?? "api"; const origin = req.body.origin ?? "api";
const timeout = req.body.timeout ?? 30000; // Default timeout of 30 seconds
try { try {
const { success: creditsCheckSuccess, message: creditsCheckMessage } = const { success: creditsCheckSuccess, message: creditsCheckMessage } =
@ -114,6 +126,7 @@ export async function scrapeController(req: Request, res: Response) {
crawlerOptions, crawlerOptions,
pageOptions, pageOptions,
extractorOptions, extractorOptions,
timeout
); );
const endTime = new Date().getTime(); const endTime = new Date().getTime();
const timeTakenInSeconds = (endTime - startTime) / 1000; const timeTakenInSeconds = (endTime - startTime) / 1000;