From ee282c3d5537f87ee81f84cf6ea6999c422268c0 Mon Sep 17 00:00:00 2001 From: rafaelsideguide <150964962+rafaelsideguide@users.noreply.github.com> Date: Tue, 11 Jun 2024 15:24:39 -0300 Subject: [PATCH] Added allowBackwardCrawling option --- apps/api/src/controllers/crawl.ts | 8 +++----- apps/api/src/lib/entities.ts | 1 + apps/api/src/scraper/WebScraper/crawler.ts | 12 +++++++++++- apps/api/src/scraper/WebScraper/index.ts | 4 +++- 4 files changed, 18 insertions(+), 7 deletions(-) diff --git a/apps/api/src/controllers/crawl.ts b/apps/api/src/controllers/crawl.ts index 5345b4f..55c3a2e 100644 --- a/apps/api/src/controllers/crawl.ts +++ b/apps/api/src/controllers/crawl.ts @@ -55,7 +55,7 @@ export async function crawlController(req: Request, res: Response) { } const mode = req.body.mode ?? "crawl"; - const crawlerOptions = req.body.crawlerOptions ?? {}; + const crawlerOptions = req.body.crawlerOptions ?? { allowBackwardCrawling: false, returnOnlyUrls: true }; const pageOptions = req.body.pageOptions ?? { onlyMainContent: false, includeHtml: false }; if (mode === "single_urls" && !url.includes(",")) { @@ -64,9 +64,7 @@ export async function crawlController(req: Request, res: Response) { await a.setOptions({ mode: "single_urls", urls: [url], - crawlerOptions: { - returnOnlyUrls: true, - }, + crawlerOptions: crawlerOptions, pageOptions: pageOptions, }); @@ -91,7 +89,7 @@ export async function crawlController(req: Request, res: Response) { const job = await addWebScraperJob({ url: url, mode: mode ?? "crawl", // fix for single urls not working - crawlerOptions: { ...crawlerOptions }, + crawlerOptions: crawlerOptions, team_id: team_id, pageOptions: pageOptions, origin: req.body.origin ?? "api", diff --git a/apps/api/src/lib/entities.ts b/apps/api/src/lib/entities.ts index 744c07b..facc81e 100644 --- a/apps/api/src/lib/entities.ts +++ b/apps/api/src/lib/entities.ts @@ -46,6 +46,7 @@ export type CrawlerOptions = { replaceAllPathsWithAbsolutePaths?: boolean; ignoreSitemap?: boolean; mode?: "default" | "fast"; // have a mode of some sort + allowBackwardCrawling?: boolean; } export type WebScraperOptions = { diff --git a/apps/api/src/scraper/WebScraper/crawler.ts b/apps/api/src/scraper/WebScraper/crawler.ts index fc95e7c..7720991 100644 --- a/apps/api/src/scraper/WebScraper/crawler.ts +++ b/apps/api/src/scraper/WebScraper/crawler.ts @@ -20,6 +20,7 @@ export class WebCrawler { private robotsTxtUrl: string; private robots: any; private generateImgAltText: boolean; + private allowBackwardCrawling: boolean; constructor({ initialUrl, @@ -29,6 +30,7 @@ export class WebCrawler { limit = 10000, generateImgAltText = false, maxCrawledDepth = 10, + allowBackwardCrawling = false }: { initialUrl: string; includes?: string[]; @@ -37,6 +39,7 @@ export class WebCrawler { limit?: number; generateImgAltText?: boolean; maxCrawledDepth?: number; + allowBackwardCrawling?: boolean; }) { this.initialUrl = initialUrl; this.baseUrl = new URL(initialUrl).origin; @@ -49,6 +52,7 @@ export class WebCrawler { this.maxCrawledLinks = maxCrawledLinks ?? limit; this.maxCrawledDepth = maxCrawledDepth ?? 10; this.generateImgAltText = generateImgAltText ?? false; + this.allowBackwardCrawling = allowBackwardCrawling ?? false; } private filterLinks(sitemapLinks: string[], limit: number, maxDepth: number): string[] { @@ -90,10 +94,16 @@ export class WebCrawler { const linkHostname = normalizedLink.hostname.replace(/^www\./, ''); // Ensure the protocol and hostname match, and the path starts with the initial URL's path - if (linkHostname !== initialHostname || !normalizedLink.pathname.startsWith(normalizedInitialUrl.pathname)) { + if (linkHostname !== initialHostname) { return false; } + if (!this.allowBackwardCrawling) { + if (!normalizedLink.pathname.startsWith(normalizedInitialUrl.pathname)) { + return false; + } + } + const isAllowed = this.robots.isAllowed(link, "FireCrawlAgent") ?? true; // Check if the link is disallowed by robots.txt if (!isAllowed) { diff --git a/apps/api/src/scraper/WebScraper/index.ts b/apps/api/src/scraper/WebScraper/index.ts index 7dcd175..5344320 100644 --- a/apps/api/src/scraper/WebScraper/index.ts +++ b/apps/api/src/scraper/WebScraper/index.ts @@ -38,8 +38,8 @@ export class WebScraperDataProvider { private generateImgAltTextModel: "gpt-4-turbo" | "claude-3-opus" = "gpt-4-turbo"; private crawlerMode: string = "default"; + private allowBackwardCrawling: boolean = false; - authorize(): void { throw new Error("Method not implemented."); } @@ -171,6 +171,7 @@ export class WebScraperDataProvider { maxCrawledDepth: this.maxCrawledDepth, limit: this.limit, generateImgAltText: this.generateImgAltText, + allowBackwardCrawling: this.allowBackwardCrawling, }); let links = await crawler.start( @@ -480,6 +481,7 @@ export class WebScraperDataProvider { this.excludes = this.excludes.filter((item) => item !== ""); this.crawlerMode = options.crawlerOptions?.mode ?? "default"; this.ignoreSitemap = options.crawlerOptions?.ignoreSitemap ?? false; + this.allowBackwardCrawling = options.crawlerOptions?.allowBackwardCrawling ?? false; // make sure all urls start with https:// this.urls = this.urls.map((url) => {