0

Added allowBackwardCrawling option

This commit is contained in:
rafaelsideguide 2024-06-11 15:24:39 -03:00
parent 06b0d01fd4
commit ee282c3d55
4 changed files with 18 additions and 7 deletions

View File

@ -55,7 +55,7 @@ export async function crawlController(req: Request, res: Response) {
}
const mode = req.body.mode ?? "crawl";
const crawlerOptions = req.body.crawlerOptions ?? {};
const crawlerOptions = req.body.crawlerOptions ?? { allowBackwardCrawling: false, returnOnlyUrls: true };
const pageOptions = req.body.pageOptions ?? { onlyMainContent: false, includeHtml: false };
if (mode === "single_urls" && !url.includes(",")) {
@ -64,9 +64,7 @@ export async function crawlController(req: Request, res: Response) {
await a.setOptions({
mode: "single_urls",
urls: [url],
crawlerOptions: {
returnOnlyUrls: true,
},
crawlerOptions: crawlerOptions,
pageOptions: pageOptions,
});
@ -91,7 +89,7 @@ export async function crawlController(req: Request, res: Response) {
const job = await addWebScraperJob({
url: url,
mode: mode ?? "crawl", // fix for single urls not working
crawlerOptions: { ...crawlerOptions },
crawlerOptions: crawlerOptions,
team_id: team_id,
pageOptions: pageOptions,
origin: req.body.origin ?? "api",

View File

@ -46,6 +46,7 @@ export type CrawlerOptions = {
replaceAllPathsWithAbsolutePaths?: boolean;
ignoreSitemap?: boolean;
mode?: "default" | "fast"; // have a mode of some sort
allowBackwardCrawling?: boolean;
}
export type WebScraperOptions = {

View File

@ -20,6 +20,7 @@ export class WebCrawler {
private robotsTxtUrl: string;
private robots: any;
private generateImgAltText: boolean;
private allowBackwardCrawling: boolean;
constructor({
initialUrl,
@ -29,6 +30,7 @@ export class WebCrawler {
limit = 10000,
generateImgAltText = false,
maxCrawledDepth = 10,
allowBackwardCrawling = false
}: {
initialUrl: string;
includes?: string[];
@ -37,6 +39,7 @@ export class WebCrawler {
limit?: number;
generateImgAltText?: boolean;
maxCrawledDepth?: number;
allowBackwardCrawling?: boolean;
}) {
this.initialUrl = initialUrl;
this.baseUrl = new URL(initialUrl).origin;
@ -49,6 +52,7 @@ export class WebCrawler {
this.maxCrawledLinks = maxCrawledLinks ?? limit;
this.maxCrawledDepth = maxCrawledDepth ?? 10;
this.generateImgAltText = generateImgAltText ?? false;
this.allowBackwardCrawling = allowBackwardCrawling ?? false;
}
private filterLinks(sitemapLinks: string[], limit: number, maxDepth: number): string[] {
@ -90,10 +94,16 @@ export class WebCrawler {
const linkHostname = normalizedLink.hostname.replace(/^www\./, '');
// Ensure the protocol and hostname match, and the path starts with the initial URL's path
if (linkHostname !== initialHostname || !normalizedLink.pathname.startsWith(normalizedInitialUrl.pathname)) {
if (linkHostname !== initialHostname) {
return false;
}
if (!this.allowBackwardCrawling) {
if (!normalizedLink.pathname.startsWith(normalizedInitialUrl.pathname)) {
return false;
}
}
const isAllowed = this.robots.isAllowed(link, "FireCrawlAgent") ?? true;
// Check if the link is disallowed by robots.txt
if (!isAllowed) {

View File

@ -38,8 +38,8 @@ export class WebScraperDataProvider {
private generateImgAltTextModel: "gpt-4-turbo" | "claude-3-opus" =
"gpt-4-turbo";
private crawlerMode: string = "default";
private allowBackwardCrawling: boolean = false;
authorize(): void {
throw new Error("Method not implemented.");
}
@ -171,6 +171,7 @@ export class WebScraperDataProvider {
maxCrawledDepth: this.maxCrawledDepth,
limit: this.limit,
generateImgAltText: this.generateImgAltText,
allowBackwardCrawling: this.allowBackwardCrawling,
});
let links = await crawler.start(
@ -480,6 +481,7 @@ export class WebScraperDataProvider {
this.excludes = this.excludes.filter((item) => item !== "");
this.crawlerMode = options.crawlerOptions?.mode ?? "default";
this.ignoreSitemap = options.crawlerOptions?.ignoreSitemap ?? false;
this.allowBackwardCrawling = options.crawlerOptions?.allowBackwardCrawling ?? false;
// make sure all urls start with https://
this.urls = this.urls.map((url) => {