From ee282c3d5537f87ee81f84cf6ea6999c422268c0 Mon Sep 17 00:00:00 2001 From: rafaelsideguide <150964962+rafaelsideguide@users.noreply.github.com> Date: Tue, 11 Jun 2024 15:24:39 -0300 Subject: [PATCH 1/4] Added allowBackwardCrawling option --- apps/api/src/controllers/crawl.ts | 8 +++----- apps/api/src/lib/entities.ts | 1 + apps/api/src/scraper/WebScraper/crawler.ts | 12 +++++++++++- apps/api/src/scraper/WebScraper/index.ts | 4 +++- 4 files changed, 18 insertions(+), 7 deletions(-) diff --git a/apps/api/src/controllers/crawl.ts b/apps/api/src/controllers/crawl.ts index 5345b4f..55c3a2e 100644 --- a/apps/api/src/controllers/crawl.ts +++ b/apps/api/src/controllers/crawl.ts @@ -55,7 +55,7 @@ export async function crawlController(req: Request, res: Response) { } const mode = req.body.mode ?? "crawl"; - const crawlerOptions = req.body.crawlerOptions ?? {}; + const crawlerOptions = req.body.crawlerOptions ?? { allowBackwardCrawling: false, returnOnlyUrls: true }; const pageOptions = req.body.pageOptions ?? { onlyMainContent: false, includeHtml: false }; if (mode === "single_urls" && !url.includes(",")) { @@ -64,9 +64,7 @@ export async function crawlController(req: Request, res: Response) { await a.setOptions({ mode: "single_urls", urls: [url], - crawlerOptions: { - returnOnlyUrls: true, - }, + crawlerOptions: crawlerOptions, pageOptions: pageOptions, }); @@ -91,7 +89,7 @@ export async function crawlController(req: Request, res: Response) { const job = await addWebScraperJob({ url: url, mode: mode ?? "crawl", // fix for single urls not working - crawlerOptions: { ...crawlerOptions }, + crawlerOptions: crawlerOptions, team_id: team_id, pageOptions: pageOptions, origin: req.body.origin ?? "api", diff --git a/apps/api/src/lib/entities.ts b/apps/api/src/lib/entities.ts index 744c07b..facc81e 100644 --- a/apps/api/src/lib/entities.ts +++ b/apps/api/src/lib/entities.ts @@ -46,6 +46,7 @@ export type CrawlerOptions = { replaceAllPathsWithAbsolutePaths?: boolean; ignoreSitemap?: boolean; mode?: "default" | "fast"; // have a mode of some sort + allowBackwardCrawling?: boolean; } export type WebScraperOptions = { diff --git a/apps/api/src/scraper/WebScraper/crawler.ts b/apps/api/src/scraper/WebScraper/crawler.ts index fc95e7c..7720991 100644 --- a/apps/api/src/scraper/WebScraper/crawler.ts +++ b/apps/api/src/scraper/WebScraper/crawler.ts @@ -20,6 +20,7 @@ export class WebCrawler { private robotsTxtUrl: string; private robots: any; private generateImgAltText: boolean; + private allowBackwardCrawling: boolean; constructor({ initialUrl, @@ -29,6 +30,7 @@ export class WebCrawler { limit = 10000, generateImgAltText = false, maxCrawledDepth = 10, + allowBackwardCrawling = false }: { initialUrl: string; includes?: string[]; @@ -37,6 +39,7 @@ export class WebCrawler { limit?: number; generateImgAltText?: boolean; maxCrawledDepth?: number; + allowBackwardCrawling?: boolean; }) { this.initialUrl = initialUrl; this.baseUrl = new URL(initialUrl).origin; @@ -49,6 +52,7 @@ export class WebCrawler { this.maxCrawledLinks = maxCrawledLinks ?? limit; this.maxCrawledDepth = maxCrawledDepth ?? 10; this.generateImgAltText = generateImgAltText ?? false; + this.allowBackwardCrawling = allowBackwardCrawling ?? false; } private filterLinks(sitemapLinks: string[], limit: number, maxDepth: number): string[] { @@ -90,10 +94,16 @@ export class WebCrawler { const linkHostname = normalizedLink.hostname.replace(/^www\./, ''); // Ensure the protocol and hostname match, and the path starts with the initial URL's path - if (linkHostname !== initialHostname || !normalizedLink.pathname.startsWith(normalizedInitialUrl.pathname)) { + if (linkHostname !== initialHostname) { return false; } + if (!this.allowBackwardCrawling) { + if (!normalizedLink.pathname.startsWith(normalizedInitialUrl.pathname)) { + return false; + } + } + const isAllowed = this.robots.isAllowed(link, "FireCrawlAgent") ?? true; // Check if the link is disallowed by robots.txt if (!isAllowed) { diff --git a/apps/api/src/scraper/WebScraper/index.ts b/apps/api/src/scraper/WebScraper/index.ts index 7dcd175..5344320 100644 --- a/apps/api/src/scraper/WebScraper/index.ts +++ b/apps/api/src/scraper/WebScraper/index.ts @@ -38,8 +38,8 @@ export class WebScraperDataProvider { private generateImgAltTextModel: "gpt-4-turbo" | "claude-3-opus" = "gpt-4-turbo"; private crawlerMode: string = "default"; + private allowBackwardCrawling: boolean = false; - authorize(): void { throw new Error("Method not implemented."); } @@ -171,6 +171,7 @@ export class WebScraperDataProvider { maxCrawledDepth: this.maxCrawledDepth, limit: this.limit, generateImgAltText: this.generateImgAltText, + allowBackwardCrawling: this.allowBackwardCrawling, }); let links = await crawler.start( @@ -480,6 +481,7 @@ export class WebScraperDataProvider { this.excludes = this.excludes.filter((item) => item !== ""); this.crawlerMode = options.crawlerOptions?.mode ?? "default"; this.ignoreSitemap = options.crawlerOptions?.ignoreSitemap ?? false; + this.allowBackwardCrawling = options.crawlerOptions?.allowBackwardCrawling ?? false; // make sure all urls start with https:// this.urls = this.urls.map((url) => { From def2ba998717fcbf97d9fe0679bc92e4e4657fa6 Mon Sep 17 00:00:00 2001 From: rafaelsideguide <150964962+rafaelsideguide@users.noreply.github.com> Date: Tue, 11 Jun 2024 17:46:25 -0300 Subject: [PATCH 2/4] added tests --- .../src/__tests__/e2e_withAuth/index.test.ts | 114 ++++++++++++++---- 1 file changed, 90 insertions(+), 24 deletions(-) diff --git a/apps/api/src/__tests__/e2e_withAuth/index.test.ts b/apps/api/src/__tests__/e2e_withAuth/index.test.ts index f619254..05dd7ff 100644 --- a/apps/api/src/__tests__/e2e_withAuth/index.test.ts +++ b/apps/api/src/__tests__/e2e_withAuth/index.test.ts @@ -596,7 +596,7 @@ describe("E2E Tests for API Routes", () => { .post("/v0/crawl") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") - .send({ url: "https://roastmywebsite.ai" }); + .send({ url: "https://mendable.ai/blog" }); expect(crawlResponse.statusCode).toBe(200); let isCompleted = false; @@ -622,7 +622,13 @@ describe("E2E Tests for API Routes", () => { expect(completedResponse.body.data[0]).toHaveProperty("content"); expect(completedResponse.body.data[0]).toHaveProperty("markdown"); expect(completedResponse.body.data[0]).toHaveProperty("metadata"); - expect(completedResponse.body.data[0].content).toContain("_Roast_"); + expect(completedResponse.body.data[0].content).toContain("Mendable"); + + const childrenLinks = completedResponse.body.data.filter(doc => + doc.sourceURL && doc.sourceURL.startsWith("https://mendable.ai/blog") + ); + + expect(childrenLinks.length).toBe(completedResponse.body.data.length); }, 120000); // 120 seconds it.concurrent('should return a successful response for a valid crawl job with PDF files without explicit .pdf extension', async () => { @@ -757,40 +763,100 @@ describe("E2E Tests for API Routes", () => { }, 60000); }); // 60 seconds - it.concurrent("If someone cancels a crawl job, it should turn into failed status", async () => { + it.concurrent("should return a successful response for a valid crawl job with allowBackwardCrawling set to true option", async () => { const crawlResponse = await request(TEST_URL) .post("/v0/crawl") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") - .send({ url: "https://jestjs.io" }); + .send({ + url: "https://mendable.ai/blog", + pageOptions: { includeHtml: true }, + crawlerOptions: { allowBackwardCrawling: true }, + }); expect(crawlResponse.statusCode).toBe(200); + + let isFinished = false; + let completedResponse; - // wait for 30 seconds - await new Promise((r) => setTimeout(r, 20000)); + while (!isFinished) { + const response = await request(TEST_URL) + .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("status"); - const response = await request(TEST_URL) - .delete(`/v0/crawl/cancel/${crawlResponse.body.jobId}`) - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty("status"); - expect(response.body.status).toBe("cancelled"); + if (response.body.status === "completed") { + isFinished = true; + completedResponse = response; + } else { + await new Promise((r) => setTimeout(r, 1000)); // Wait for 1 second before checking again + } + } - await new Promise((r) => setTimeout(r, 10000)); - - const completedResponse = await request(TEST_URL) - .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); expect(completedResponse.statusCode).toBe(200); expect(completedResponse.body).toHaveProperty("status"); - expect(completedResponse.body.status).toBe("failed"); + expect(completedResponse.body.status).toBe("completed"); expect(completedResponse.body).toHaveProperty("data"); - expect(completedResponse.body.data).toEqual(null); - expect(completedResponse.body).toHaveProperty("partial_data"); - expect(completedResponse.body.partial_data[0]).toHaveProperty("content"); - expect(completedResponse.body.partial_data[0]).toHaveProperty("markdown"); - expect(completedResponse.body.partial_data[0]).toHaveProperty("metadata"); + expect(completedResponse.body.data[0]).toHaveProperty("content"); + expect(completedResponse.body.data[0]).toHaveProperty("markdown"); + expect(completedResponse.body.data[0]).toHaveProperty("metadata"); + expect(completedResponse.body.data[0]).toHaveProperty("html"); + expect(completedResponse.body.data[0].content).toContain("Mendable"); + expect(completedResponse.body.data[0].markdown).toContain("Mendable"); + + const onlyChildrenLinks = completedResponse.body.data.filter(doc => { + return doc.metadata && doc.metadata.sourceURL && doc.metadata.sourceURL.includes("mendable.ai/blog") + }); + + expect(completedResponse.body.data.length).toBeGreaterThan(onlyChildrenLinks.length); + }, 60000); + + // it.concurrent("If someone cancels a crawl job, it should turn into failed status", async () => { + // const crawlResponse = await request(TEST_URL) + // .post("/v0/crawl") + // .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + // .set("Content-Type", "application/json") + // .send({ url: "https://scrapethissite.com" }); + + // expect(crawlResponse.statusCode).toBe(200); + + // await new Promise((r) => setTimeout(r, 2000)); // Wait for 1 seconds before cancelling the job + + // const responseCancel = await request(TEST_URL) + // .delete(`/v0/crawl/cancel/${crawlResponse.body.jobId}`) + // .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + // expect(responseCancel.statusCode).toBe(200); + + // let isFinished = false; + // let completedResponse; + + // while (!isFinished) { + // const response = await request(TEST_URL) + // .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) + // .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + // expect(response.statusCode).toBe(200); + // expect(response.body).toHaveProperty("status"); + // console.log(response.body.status) + + // if (response.body.status === "failed") { + // isFinished = true; + // completedResponse = response; + // } else { + // await new Promise((r) => setTimeout(r, 1000)); // Wait for 1 second before checking again + // } + // } + + // expect(completedResponse.statusCode).toBe(200); + // expect(completedResponse.body).toHaveProperty("status"); + // expect(completedResponse.body.status).toBe("failed"); + // expect(completedResponse.body).toHaveProperty("data"); + // expect(completedResponse.body.data).toBeNull(); + // expect(completedResponse.body).toHaveProperty("partial_data"); + // expect(completedResponse.body.partial_data[0]).toHaveProperty("content"); + // expect(completedResponse.body.partial_data[0]).toHaveProperty("markdown"); + // expect(completedResponse.body.partial_data[0]).toHaveProperty("metadata"); - }, 60000); // 60 seconds + // }, 60000); // 60 seconds describe("POST /v0/scrape with LLM Extraction", () => { it.concurrent("should extract data using LLM extraction mode", async () => { From df3a678cf485107558f38b66db96381ca5012d14 Mon Sep 17 00:00:00 2001 From: rafaelsideguide <150964962+rafaelsideguide@users.noreply.github.com> Date: Tue, 11 Jun 2024 17:46:56 -0300 Subject: [PATCH 3/4] getting back the cancel test, this should work --- .../src/__tests__/e2e_withAuth/index.test.ts | 76 +++++++++---------- 1 file changed, 38 insertions(+), 38 deletions(-) diff --git a/apps/api/src/__tests__/e2e_withAuth/index.test.ts b/apps/api/src/__tests__/e2e_withAuth/index.test.ts index 05dd7ff..5adf05d 100644 --- a/apps/api/src/__tests__/e2e_withAuth/index.test.ts +++ b/apps/api/src/__tests__/e2e_withAuth/index.test.ts @@ -811,52 +811,52 @@ describe("E2E Tests for API Routes", () => { expect(completedResponse.body.data.length).toBeGreaterThan(onlyChildrenLinks.length); }, 60000); - // it.concurrent("If someone cancels a crawl job, it should turn into failed status", async () => { - // const crawlResponse = await request(TEST_URL) - // .post("/v0/crawl") - // .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - // .set("Content-Type", "application/json") - // .send({ url: "https://scrapethissite.com" }); + it.concurrent("If someone cancels a crawl job, it should turn into failed status", async () => { + const crawlResponse = await request(TEST_URL) + .post("/v0/crawl") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://scrapethissite.com" }); - // expect(crawlResponse.statusCode).toBe(200); + expect(crawlResponse.statusCode).toBe(200); - // await new Promise((r) => setTimeout(r, 2000)); // Wait for 1 seconds before cancelling the job + await new Promise((r) => setTimeout(r, 2000)); // Wait for 1 seconds before cancelling the job - // const responseCancel = await request(TEST_URL) - // .delete(`/v0/crawl/cancel/${crawlResponse.body.jobId}`) - // .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); - // expect(responseCancel.statusCode).toBe(200); + const responseCancel = await request(TEST_URL) + .delete(`/v0/crawl/cancel/${crawlResponse.body.jobId}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + expect(responseCancel.statusCode).toBe(200); - // let isFinished = false; - // let completedResponse; + let isFinished = false; + let completedResponse; - // while (!isFinished) { - // const response = await request(TEST_URL) - // .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) - // .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); - // expect(response.statusCode).toBe(200); - // expect(response.body).toHaveProperty("status"); - // console.log(response.body.status) + while (!isFinished) { + const response = await request(TEST_URL) + .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("status"); + console.log(response.body.status) - // if (response.body.status === "failed") { - // isFinished = true; - // completedResponse = response; - // } else { - // await new Promise((r) => setTimeout(r, 1000)); // Wait for 1 second before checking again - // } - // } + if (response.body.status === "failed") { + isFinished = true; + completedResponse = response; + } else { + await new Promise((r) => setTimeout(r, 1000)); // Wait for 1 second before checking again + } + } - // expect(completedResponse.statusCode).toBe(200); - // expect(completedResponse.body).toHaveProperty("status"); - // expect(completedResponse.body.status).toBe("failed"); - // expect(completedResponse.body).toHaveProperty("data"); - // expect(completedResponse.body.data).toBeNull(); - // expect(completedResponse.body).toHaveProperty("partial_data"); - // expect(completedResponse.body.partial_data[0]).toHaveProperty("content"); - // expect(completedResponse.body.partial_data[0]).toHaveProperty("markdown"); - // expect(completedResponse.body.partial_data[0]).toHaveProperty("metadata"); + expect(completedResponse.statusCode).toBe(200); + expect(completedResponse.body).toHaveProperty("status"); + expect(completedResponse.body.status).toBe("failed"); + expect(completedResponse.body).toHaveProperty("data"); + expect(completedResponse.body.data).toBeNull(); + expect(completedResponse.body).toHaveProperty("partial_data"); + expect(completedResponse.body.partial_data[0]).toHaveProperty("content"); + expect(completedResponse.body.partial_data[0]).toHaveProperty("markdown"); + expect(completedResponse.body.partial_data[0]).toHaveProperty("metadata"); - // }, 60000); // 60 seconds + }, 60000); // 60 seconds describe("POST /v0/scrape with LLM Extraction", () => { it.concurrent("should extract data using LLM extraction mode", async () => { From 01c9f071fa554ec687882ad3727e480b3cc09dcd Mon Sep 17 00:00:00 2001 From: rafaelsideguide <150964962+rafaelsideguide@users.noreply.github.com> Date: Wed, 12 Jun 2024 11:27:06 -0300 Subject: [PATCH 4/4] fixed --- .../src/__tests__/e2e_withAuth/index.test.ts | 36 +++++++------------ apps/api/src/controllers/crawl.ts | 4 +-- 2 files changed, 14 insertions(+), 26 deletions(-) diff --git a/apps/api/src/__tests__/e2e_withAuth/index.test.ts b/apps/api/src/__tests__/e2e_withAuth/index.test.ts index 5adf05d..02e4a47 100644 --- a/apps/api/src/__tests__/e2e_withAuth/index.test.ts +++ b/apps/api/src/__tests__/e2e_withAuth/index.test.ts @@ -624,11 +624,11 @@ describe("E2E Tests for API Routes", () => { expect(completedResponse.body.data[0]).toHaveProperty("metadata"); expect(completedResponse.body.data[0].content).toContain("Mendable"); - const childrenLinks = completedResponse.body.data.filter(doc => - doc.sourceURL && doc.sourceURL.startsWith("https://mendable.ai/blog") - ); + const childrenLinks = completedResponse.body.data.filter(doc => + doc.metadata && doc.metadata.sourceURL && doc.metadata.sourceURL.includes("mendable.ai/blog") + ); - expect(childrenLinks.length).toBe(completedResponse.body.data.length); + expect(childrenLinks.length).toBe(completedResponse.body.data.length); }, 120000); // 120 seconds it.concurrent('should return a successful response for a valid crawl job with PDF files without explicit .pdf extension', async () => { @@ -816,35 +816,23 @@ describe("E2E Tests for API Routes", () => { .post("/v0/crawl") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") - .send({ url: "https://scrapethissite.com" }); + .send({ url: "https://jestjs.io" }); expect(crawlResponse.statusCode).toBe(200); - await new Promise((r) => setTimeout(r, 2000)); // Wait for 1 seconds before cancelling the job + await new Promise((r) => setTimeout(r, 20000)); const responseCancel = await request(TEST_URL) .delete(`/v0/crawl/cancel/${crawlResponse.body.jobId}`) .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); expect(responseCancel.statusCode).toBe(200); + expect(responseCancel.body).toHaveProperty("status"); + expect(responseCancel.body.status).toBe("cancelled"); - let isFinished = false; - let completedResponse; - - while (!isFinished) { - const response = await request(TEST_URL) - .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty("status"); - console.log(response.body.status) - - if (response.body.status === "failed") { - isFinished = true; - completedResponse = response; - } else { - await new Promise((r) => setTimeout(r, 1000)); // Wait for 1 second before checking again - } - } + await new Promise((r) => setTimeout(r, 10000)); + const completedResponse = await request(TEST_URL) + .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); expect(completedResponse.statusCode).toBe(200); expect(completedResponse.body).toHaveProperty("status"); diff --git a/apps/api/src/controllers/crawl.ts b/apps/api/src/controllers/crawl.ts index 55c3a2e..58d01e2 100644 --- a/apps/api/src/controllers/crawl.ts +++ b/apps/api/src/controllers/crawl.ts @@ -55,7 +55,7 @@ export async function crawlController(req: Request, res: Response) { } const mode = req.body.mode ?? "crawl"; - const crawlerOptions = req.body.crawlerOptions ?? { allowBackwardCrawling: false, returnOnlyUrls: true }; + const crawlerOptions = req.body.crawlerOptions ?? { allowBackwardCrawling: false }; const pageOptions = req.body.pageOptions ?? { onlyMainContent: false, includeHtml: false }; if (mode === "single_urls" && !url.includes(",")) { @@ -64,7 +64,7 @@ export async function crawlController(req: Request, res: Response) { await a.setOptions({ mode: "single_urls", urls: [url], - crawlerOptions: crawlerOptions, + crawlerOptions: { ...crawlerOptions, returnOnlyUrls: true }, pageOptions: pageOptions, });