0

Added max depth option

This commit is contained in:
rafaelsideguide 2024-05-07 11:06:26 -03:00
parent 6913fda710
commit 83f3408634
4 changed files with 73 additions and 6 deletions

View File

@ -250,6 +250,47 @@ describe("E2E Tests for API Routes", () => {
"🔥 FireCrawl" "🔥 FireCrawl"
); );
}, 60000); // 60 seconds }, 60000); // 60 seconds
it("should return a successful response with max depth option for a valid crawl job", async () => {
const crawlResponse = await request(TEST_URL)
.post("/v0/crawl")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.send({ url: "https://www.scrapethissite.com", crawlerOptions: { maxDepth: 2 }});
expect(crawlResponse.statusCode).toBe(200);
const response = await request(TEST_URL)
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
expect(response.statusCode).toBe(200);
expect(response.body).toHaveProperty("status");
expect(response.body.status).toBe("active");
// wait for 60 seconds
await new Promise((r) => setTimeout(r, 60000));
const completedResponse = await request(TEST_URL)
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
expect(completedResponse.statusCode).toBe(200);
expect(completedResponse.body).toHaveProperty("status");
expect(completedResponse.body.status).toBe("completed");
expect(completedResponse.body).toHaveProperty("data");
expect(completedResponse.body.data[0]).toHaveProperty("content");
expect(completedResponse.body.data[0]).toHaveProperty("markdown");
expect(completedResponse.body.data[0]).toHaveProperty("metadata");
const urls = completedResponse.body.data.map((item: any) => item.metadata?.sourceURL);
expect(urls.length).toBeGreaterThan(1);
// Check if all URLs have a maximum depth of 1
urls.forEach((url) => {
const depth = new URL(url).pathname.split('/').filter(Boolean).length;
expect(depth).toBeLessThanOrEqual(1);
});
}, 120000); // 120 seconds
}); });
describe("POST /v0/scrape with LLM Extraction", () => { describe("POST /v0/scrape with LLM Extraction", () => {

View File

@ -40,6 +40,7 @@ export type WebScraperOptions = {
includes?: string[]; includes?: string[];
excludes?: string[]; excludes?: string[];
maxCrawledLinks?: number; maxCrawledLinks?: number;
maxDepth?: number;
limit?: number; limit?: number;
generateImgAltText?: boolean; generateImgAltText?: boolean;
replaceAllPathsWithAbsolutePaths?: boolean; replaceAllPathsWithAbsolutePaths?: boolean;

View File

@ -13,6 +13,7 @@ export class WebCrawler {
private includes: string[]; private includes: string[];
private excludes: string[]; private excludes: string[];
private maxCrawledLinks: number; private maxCrawledLinks: number;
private maxCrawledDepth: number;
private visited: Set<string> = new Set(); private visited: Set<string> = new Set();
private crawledUrls: Set<string> = new Set(); private crawledUrls: Set<string> = new Set();
private limit: number; private limit: number;
@ -27,6 +28,7 @@ export class WebCrawler {
maxCrawledLinks, maxCrawledLinks,
limit = 10000, limit = 10000,
generateImgAltText = false, generateImgAltText = false,
maxCrawledDepth = 10,
}: { }: {
initialUrl: string; initialUrl: string;
includes?: string[]; includes?: string[];
@ -34,6 +36,7 @@ export class WebCrawler {
maxCrawledLinks?: number; maxCrawledLinks?: number;
limit?: number; limit?: number;
generateImgAltText?: boolean; generateImgAltText?: boolean;
maxCrawledDepth?: number;
}) { }) {
this.initialUrl = initialUrl; this.initialUrl = initialUrl;
this.baseUrl = new URL(initialUrl).origin; this.baseUrl = new URL(initialUrl).origin;
@ -44,15 +47,22 @@ export class WebCrawler {
this.robots = robotsParser(this.robotsTxtUrl, ""); this.robots = robotsParser(this.robotsTxtUrl, "");
// Deprecated, use limit instead // Deprecated, use limit instead
this.maxCrawledLinks = maxCrawledLinks ?? limit; this.maxCrawledLinks = maxCrawledLinks ?? limit;
this.maxCrawledDepth = maxCrawledDepth ?? 10;
this.generateImgAltText = generateImgAltText ?? false; this.generateImgAltText = generateImgAltText ?? false;
} }
private filterLinks(sitemapLinks: string[], limit: number): string[] { private filterLinks(sitemapLinks: string[], limit: number, maxDepth: number): string[] {
return sitemapLinks return sitemapLinks
.filter((link) => { .filter((link) => {
const url = new URL(link); const url = new URL(link);
const path = url.pathname; const path = url.pathname;
const depth = url.pathname.split('/').length - 1;
// Check if the link exceeds the maximum depth allowed
if (depth > maxDepth) {
return false;
}
// Check if the link should be excluded // Check if the link should be excluded
if (this.excludes.length > 0 && this.excludes[0] !== "") { if (this.excludes.length > 0 && this.excludes[0] !== "") {
@ -87,7 +97,8 @@ export class WebCrawler {
public async start( public async start(
inProgress?: (progress: Progress) => void, inProgress?: (progress: Progress) => void,
concurrencyLimit: number = 5, concurrencyLimit: number = 5,
limit: number = 10000 limit: number = 10000,
maxDepth: number = 10
): Promise<string[]> { ): Promise<string[]> {
// Fetch and parse robots.txt // Fetch and parse robots.txt
try { try {
@ -99,7 +110,7 @@ export class WebCrawler {
const sitemapLinks = await this.tryFetchSitemapLinks(this.initialUrl); const sitemapLinks = await this.tryFetchSitemapLinks(this.initialUrl);
if (sitemapLinks.length > 0) { if (sitemapLinks.length > 0) {
const filteredLinks = this.filterLinks(sitemapLinks, limit); const filteredLinks = this.filterLinks(sitemapLinks, limit, maxDepth);
return filteredLinks; return filteredLinks;
} }
@ -110,13 +121,13 @@ export class WebCrawler {
); );
if ( if (
urls.length === 0 && urls.length === 0 &&
this.filterLinks([this.initialUrl], limit).length > 0 this.filterLinks([this.initialUrl], limit, this.maxCrawledDepth).length > 0
) { ) {
return [this.initialUrl]; return [this.initialUrl];
} }
// make sure to run include exclude here again // make sure to run include exclude here again
return this.filterLinks(urls, limit); return this.filterLinks(urls, limit, this.maxCrawledDepth);
} }
private async crawlUrls( private async crawlUrls(

View File

@ -16,6 +16,7 @@ export class WebScraperDataProvider {
private includes: string[]; private includes: string[];
private excludes: string[]; private excludes: string[];
private maxCrawledLinks: number; private maxCrawledLinks: number;
private maxCrawledDepth: number = 10;
private returnOnlyUrls: boolean; private returnOnlyUrls: boolean;
private limit: number = 10000; private limit: number = 10000;
private concurrentRequests: number = 20; private concurrentRequests: number = 20;
@ -106,10 +107,11 @@ export class WebScraperDataProvider {
includes: this.includes, includes: this.includes,
excludes: this.excludes, excludes: this.excludes,
maxCrawledLinks: this.maxCrawledLinks, maxCrawledLinks: this.maxCrawledLinks,
maxCrawledDepth: this.maxCrawledDepth,
limit: this.limit, limit: this.limit,
generateImgAltText: this.generateImgAltText, generateImgAltText: this.generateImgAltText,
}); });
let links = await crawler.start(inProgress, 5, this.limit); let links = await crawler.start(inProgress, 5, this.limit, this.maxCrawledDepth);
if (this.returnOnlyUrls) { if (this.returnOnlyUrls) {
return this.returnOnlyUrlsResponse(links, inProgress); return this.returnOnlyUrlsResponse(links, inProgress);
} }
@ -198,6 +200,7 @@ export class WebScraperDataProvider {
documents = this.mergeNewDocuments(documents, newDocuments); documents = this.mergeNewDocuments(documents, newDocuments);
} }
documents = this.filterDocsExcludeInclude(documents); documents = this.filterDocsExcludeInclude(documents);
documents = this.filterDepth(documents);
documents = this.removeChildLinks(documents); documents = this.removeChildLinks(documents);
return documents.splice(0, this.limit); return documents.splice(0, this.limit);
} }
@ -319,6 +322,7 @@ export class WebScraperDataProvider {
this.includes = options.crawlerOptions?.includes ?? []; this.includes = options.crawlerOptions?.includes ?? [];
this.excludes = options.crawlerOptions?.excludes ?? []; this.excludes = options.crawlerOptions?.excludes ?? [];
this.maxCrawledLinks = options.crawlerOptions?.maxCrawledLinks ?? 1000; this.maxCrawledLinks = options.crawlerOptions?.maxCrawledLinks ?? 1000;
this.maxCrawledDepth = options.crawlerOptions?.maxDepth ?? 10;
this.returnOnlyUrls = options.crawlerOptions?.returnOnlyUrls ?? false; this.returnOnlyUrls = options.crawlerOptions?.returnOnlyUrls ?? false;
this.limit = options.crawlerOptions?.limit ?? 10000; this.limit = options.crawlerOptions?.limit ?? 10000;
this.generateImgAltText = this.generateImgAltText =
@ -327,6 +331,8 @@ export class WebScraperDataProvider {
this.extractorOptions = options.extractorOptions ?? {mode: "markdown"} this.extractorOptions = options.extractorOptions ?? {mode: "markdown"}
this.replaceAllPathsWithAbsolutePaths = options.crawlerOptions?.replaceAllPathsWithAbsolutePaths ?? false; this.replaceAllPathsWithAbsolutePaths = options.crawlerOptions?.replaceAllPathsWithAbsolutePaths ?? false;
console.log("maxDepth:", this.maxCrawledDepth, options.crawlerOptions?.maxDepth);
//! @nicolas, for some reason this was being injected and breakign everything. Don't have time to find source of the issue so adding this check //! @nicolas, for some reason this was being injected and breakign everything. Don't have time to find source of the issue so adding this check
this.excludes = this.excludes.filter((item) => item !== ""); this.excludes = this.excludes.filter((item) => item !== "");
@ -411,4 +417,12 @@ export class WebScraperDataProvider {
return documents; return documents;
}; };
filterDepth(documents: Document[]): Document[] {
return documents.filter((document) => {
const url = new URL(document.metadata.sourceURL);
const path = url.pathname;
return path.split("/").length <= this.maxCrawledDepth;
});
}
} }