0

nested includeHtml inside pageOptions

This commit is contained in:
rafaelsideguide 2024-05-07 13:40:24 -03:00
parent 509250c4ef
commit e1f52c538f
11 changed files with 19 additions and 47 deletions

View File

@ -88,7 +88,7 @@ describe("E2E Tests for API Routes", () => {
.post("/v0/scrape") .post("/v0/scrape")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json") .set("Content-Type", "application/json")
.send({ url: "https://firecrawl.dev", includeHtml: true }); .send({ url: "https://firecrawl.dev", pageOptions: { includeHtml: true }});
expect(response.statusCode).toBe(200); expect(response.statusCode).toBe(200);
expect(response.body).toHaveProperty("data"); expect(response.body).toHaveProperty("data");
expect(response.body.data).toHaveProperty("content"); expect(response.body.data).toHaveProperty("content");
@ -270,12 +270,12 @@ describe("E2E Tests for API Routes", () => {
); );
}, 60000); // 60 seconds }, 60000); // 60 seconds
it("should return a successful response for a valid crawl job with toMarkdown set to false option", async () => { it("should return a successful response for a valid crawl job with includeHtml set to true option", async () => {
const crawlResponse = await request(TEST_URL) const crawlResponse = await request(TEST_URL)
.post("/v0/crawl") .post("/v0/crawl")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json") .set("Content-Type", "application/json")
.send({ url: "https://firecrawl.dev", includeHtml: true }); .send({ url: "https://firecrawl.dev", pageOptions: { includeHtml: true } });
expect(crawlResponse.statusCode).toBe(200); expect(crawlResponse.statusCode).toBe(200);
const response = await request(TEST_URL) const response = await request(TEST_URL)

View File

@ -35,8 +35,7 @@ export async function crawlController(req: Request, res: Response) {
const mode = req.body.mode ?? "crawl"; const mode = req.body.mode ?? "crawl";
const crawlerOptions = req.body.crawlerOptions ?? {}; const crawlerOptions = req.body.crawlerOptions ?? {};
const pageOptions = req.body.pageOptions ?? { onlyMainContent: false }; const pageOptions = req.body.pageOptions ?? { onlyMainContent: false, includeHtml: false };
const includeHtml = req.body.includeHtml || false;
if (mode === "single_urls" && !url.includes(",")) { if (mode === "single_urls" && !url.includes(",")) {
try { try {
@ -48,7 +47,6 @@ export async function crawlController(req: Request, res: Response) {
returnOnlyUrls: true, returnOnlyUrls: true,
}, },
pageOptions: pageOptions, pageOptions: pageOptions,
includeHtml: includeHtml,
}); });
const docs = await a.getDocuments(false, (progress) => { const docs = await a.getDocuments(false, (progress) => {
@ -75,7 +73,6 @@ export async function crawlController(req: Request, res: Response) {
team_id: team_id, team_id: team_id,
pageOptions: pageOptions, pageOptions: pageOptions,
origin: req.body.origin ?? "api", origin: req.body.origin ?? "api",
includeHtml: includeHtml,
}); });
res.json({ jobId: job.id }); res.json({ jobId: job.id });

View File

@ -26,8 +26,7 @@ export async function crawlPreviewController(req: Request, res: Response) {
const mode = req.body.mode ?? "crawl"; const mode = req.body.mode ?? "crawl";
const crawlerOptions = req.body.crawlerOptions ?? {}; const crawlerOptions = req.body.crawlerOptions ?? {};
const pageOptions = req.body.pageOptions ?? { onlyMainContent: false }; const pageOptions = req.body.pageOptions ?? { onlyMainContent: false, includeHtml: false };
const includeHtml = req.body.includeHtml ?? false;
const job = await addWebScraperJob({ const job = await addWebScraperJob({
url: url, url: url,
@ -36,7 +35,6 @@ export async function crawlPreviewController(req: Request, res: Response) {
team_id: "preview", team_id: "preview",
pageOptions: pageOptions, pageOptions: pageOptions,
origin: "website-preview", origin: "website-preview",
includeHtml: includeHtml,
}); });
res.json({ jobId: job.id }); res.json({ jobId: job.id });

View File

@ -15,7 +15,6 @@ export async function scrapeHelper(
crawlerOptions: any, crawlerOptions: any,
pageOptions: PageOptions, pageOptions: PageOptions,
extractorOptions: ExtractorOptions, extractorOptions: ExtractorOptions,
includeHtml: boolean = false
): Promise<{ ): Promise<{
success: boolean; success: boolean;
error?: string; error?: string;
@ -41,7 +40,6 @@ export async function scrapeHelper(
}, },
pageOptions: pageOptions, pageOptions: pageOptions,
extractorOptions: extractorOptions, extractorOptions: extractorOptions,
includeHtml: includeHtml
}); });
const docs = await a.getDocuments(false); const docs = await a.getDocuments(false);
@ -93,12 +91,11 @@ export async function scrapeController(req: Request, res: Response) {
return res.status(status).json({ error }); return res.status(status).json({ error });
} }
const crawlerOptions = req.body.crawlerOptions ?? {}; const crawlerOptions = req.body.crawlerOptions ?? {};
const pageOptions = req.body.pageOptions ?? { onlyMainContent: false }; const pageOptions = req.body.pageOptions ?? { onlyMainContent: false, includeHtml: false };
const extractorOptions = req.body.extractorOptions ?? { const extractorOptions = req.body.extractorOptions ?? {
mode: "markdown" mode: "markdown"
} }
const origin = req.body.origin ?? "api"; const origin = req.body.origin ?? "api";
const includeHtml = req.body.includeHtml ?? false;
try { try {
const { success: creditsCheckSuccess, message: creditsCheckMessage } = const { success: creditsCheckSuccess, message: creditsCheckMessage } =
@ -117,7 +114,6 @@ export async function scrapeController(req: Request, res: Response) {
crawlerOptions, crawlerOptions,
pageOptions, pageOptions,
extractorOptions, extractorOptions,
includeHtml
); );
const endTime = new Date().getTime(); const endTime = new Date().getTime();
const timeTakenInSeconds = (endTime - startTime) / 1000; const timeTakenInSeconds = (endTime - startTime) / 1000;
@ -137,7 +133,6 @@ export async function scrapeController(req: Request, res: Response) {
origin: origin, origin: origin,
extractor_options: extractorOptions, extractor_options: extractorOptions,
num_tokens: numTokens, num_tokens: numTokens,
includeHtml: includeHtml
}); });
return res.status(result.returnCode).json(result); return res.status(result.returnCode).json(result);
} catch (error) { } catch (error) {

View File

@ -14,7 +14,6 @@ export async function searchHelper(
crawlerOptions: any, crawlerOptions: any,
pageOptions: PageOptions, pageOptions: PageOptions,
searchOptions: SearchOptions, searchOptions: SearchOptions,
includeHtml: boolean = false
): Promise<{ ): Promise<{
success: boolean; success: boolean;
error?: string; error?: string;
@ -60,7 +59,6 @@ export async function searchHelper(
await a.setOptions({ await a.setOptions({
mode: "single_urls", mode: "single_urls",
urls: res.map((r) => r.url).slice(0, searchOptions.limit ?? 7), urls: res.map((r) => r.url).slice(0, searchOptions.limit ?? 7),
includeHtml,
crawlerOptions: { crawlerOptions: {
...crawlerOptions, ...crawlerOptions,
}, },
@ -68,6 +66,7 @@ export async function searchHelper(
...pageOptions, ...pageOptions,
onlyMainContent: pageOptions?.onlyMainContent ?? true, onlyMainContent: pageOptions?.onlyMainContent ?? true,
fetchPageContent: pageOptions?.fetchPageContent ?? true, fetchPageContent: pageOptions?.fetchPageContent ?? true,
includeHtml: pageOptions?.includeHtml ?? false,
fallback: false, fallback: false,
}, },
}); });
@ -119,6 +118,7 @@ export async function searchController(req: Request, res: Response) {
} }
const crawlerOptions = req.body.crawlerOptions ?? {}; const crawlerOptions = req.body.crawlerOptions ?? {};
const pageOptions = req.body.pageOptions ?? { const pageOptions = req.body.pageOptions ?? {
includeHtml: false,
onlyMainContent: true, onlyMainContent: true,
fetchPageContent: true, fetchPageContent: true,
fallback: false, fallback: false,
@ -126,7 +126,6 @@ export async function searchController(req: Request, res: Response) {
const origin = req.body.origin ?? "api"; const origin = req.body.origin ?? "api";
const searchOptions = req.body.searchOptions ?? { limit: 7 }; const searchOptions = req.body.searchOptions ?? { limit: 7 };
const includeHtml = req.body.includeHtml ?? false;
try { try {
const { success: creditsCheckSuccess, message: creditsCheckMessage } = const { success: creditsCheckSuccess, message: creditsCheckMessage } =
@ -145,7 +144,6 @@ export async function searchController(req: Request, res: Response) {
crawlerOptions, crawlerOptions,
pageOptions, pageOptions,
searchOptions, searchOptions,
includeHtml
); );
const endTime = new Date().getTime(); const endTime = new Date().getTime();
const timeTakenInSeconds = (endTime - startTime) / 1000; const timeTakenInSeconds = (endTime - startTime) / 1000;
@ -161,7 +159,6 @@ export async function searchController(req: Request, res: Response) {
crawlerOptions: crawlerOptions, crawlerOptions: crawlerOptions,
pageOptions: pageOptions, pageOptions: pageOptions,
origin: origin, origin: origin,
includeHtml,
}); });
return res.status(result.returnCode).json(result); return res.status(result.returnCode).json(result);
} catch (error) { } catch (error) {

View File

@ -12,8 +12,9 @@ export interface Progress {
export type PageOptions = { export type PageOptions = {
onlyMainContent?: boolean; onlyMainContent?: boolean;
includeHtml?: boolean;
fallback?: boolean; fallback?: boolean;
fetchPageContent?: boolean; fetchPageContent?: boolean;
}; };
export type ExtractorOptions = { export type ExtractorOptions = {
@ -46,7 +47,6 @@ export type WebScraperOptions = {
pageOptions?: PageOptions; pageOptions?: PageOptions;
extractorOptions?: ExtractorOptions; extractorOptions?: ExtractorOptions;
concurrentRequests?: number; concurrentRequests?: number;
includeHtml?: boolean;
}; };
export interface DocumentUrl { export interface DocumentUrl {

View File

@ -26,8 +26,7 @@ export async function startWebScraperPipeline({
onError: (error) => { onError: (error) => {
job.moveToFailed(error); job.moveToFailed(error);
}, },
team_id: job.data.team_id, team_id: job.data.team_id
includeHtml: job.data.includeHtml,
})) as { success: boolean; message: string; docs: Document[] }; })) as { success: boolean; message: string; docs: Document[] };
} }
export async function runWebScraper({ export async function runWebScraper({
@ -39,7 +38,6 @@ export async function runWebScraper({
onSuccess, onSuccess,
onError, onError,
team_id, team_id,
includeHtml = false,
}: { }: {
url: string; url: string;
mode: "crawl" | "single_urls" | "sitemap"; mode: "crawl" | "single_urls" | "sitemap";
@ -49,7 +47,6 @@ export async function runWebScraper({
onSuccess: (result: any) => void; onSuccess: (result: any) => void;
onError: (error: any) => void; onError: (error: any) => void;
team_id: string; team_id: string;
includeHtml?: boolean;
}): Promise<{ }): Promise<{
success: boolean; success: boolean;
message: string; message: string;
@ -62,16 +59,14 @@ export async function runWebScraper({
mode: mode, mode: mode,
urls: [url], urls: [url],
crawlerOptions: crawlerOptions, crawlerOptions: crawlerOptions,
pageOptions: pageOptions, pageOptions: pageOptions
includeHtml: includeHtml,
}); });
} else { } else {
await provider.setOptions({ await provider.setOptions({
mode: mode, mode: mode,
urls: url.split(","), urls: url.split(","),
crawlerOptions: crawlerOptions, crawlerOptions: crawlerOptions,
pageOptions: pageOptions, pageOptions: pageOptions
includeHtml: includeHtml,
}); });
} }
const docs = (await provider.getDocuments(false, (progress: Progress) => { const docs = (await provider.getDocuments(false, (progress: Progress) => {

View File

@ -19,7 +19,6 @@ export class WebCrawler {
private robotsTxtUrl: string; private robotsTxtUrl: string;
private robots: any; private robots: any;
private generateImgAltText: boolean; private generateImgAltText: boolean;
private includeHtml: boolean;
constructor({ constructor({
initialUrl, initialUrl,
@ -28,7 +27,6 @@ export class WebCrawler {
maxCrawledLinks, maxCrawledLinks,
limit = 10000, limit = 10000,
generateImgAltText = false, generateImgAltText = false,
includeHtml = false,
}: { }: {
initialUrl: string; initialUrl: string;
includes?: string[]; includes?: string[];
@ -36,7 +34,6 @@ export class WebCrawler {
maxCrawledLinks?: number; maxCrawledLinks?: number;
limit?: number; limit?: number;
generateImgAltText?: boolean; generateImgAltText?: boolean;
includeHtml?: boolean;
}) { }) {
this.initialUrl = initialUrl; this.initialUrl = initialUrl;
this.baseUrl = new URL(initialUrl).origin; this.baseUrl = new URL(initialUrl).origin;
@ -48,7 +45,6 @@ export class WebCrawler {
// Deprecated, use limit instead // Deprecated, use limit instead
this.maxCrawledLinks = maxCrawledLinks ?? limit; this.maxCrawledLinks = maxCrawledLinks ?? limit;
this.generateImgAltText = generateImgAltText ?? false; this.generateImgAltText = generateImgAltText ?? false;
this.includeHtml = includeHtml ?? false;
} }

View File

@ -24,7 +24,6 @@ export class WebScraperDataProvider {
private extractorOptions?: ExtractorOptions; private extractorOptions?: ExtractorOptions;
private replaceAllPathsWithAbsolutePaths?: boolean = false; private replaceAllPathsWithAbsolutePaths?: boolean = false;
private generateImgAltTextModel: "gpt-4-turbo" | "claude-3-opus" = "gpt-4-turbo"; private generateImgAltTextModel: "gpt-4-turbo" | "claude-3-opus" = "gpt-4-turbo";
private includeHtml: boolean = false;
authorize(): void { authorize(): void {
throw new Error("Method not implemented."); throw new Error("Method not implemented.");
@ -46,7 +45,7 @@ export class WebScraperDataProvider {
const batchUrls = urls.slice(i, i + this.concurrentRequests); const batchUrls = urls.slice(i, i + this.concurrentRequests);
await Promise.all( await Promise.all(
batchUrls.map(async (url, index) => { batchUrls.map(async (url, index) => {
const result = await scrapSingleUrl(url, this.pageOptions, this.includeHtml); const result = await scrapSingleUrl(url, this.pageOptions);
processedUrls++; processedUrls++;
if (inProgress) { if (inProgress) {
inProgress({ inProgress({
@ -109,7 +108,6 @@ export class WebScraperDataProvider {
maxCrawledLinks: this.maxCrawledLinks, maxCrawledLinks: this.maxCrawledLinks,
limit: this.limit, limit: this.limit,
generateImgAltText: this.generateImgAltText, generateImgAltText: this.generateImgAltText,
includeHtml: this.includeHtml,
}); });
let links = await crawler.start(inProgress, 5, this.limit); let links = await crawler.start(inProgress, 5, this.limit);
if (this.returnOnlyUrls) { if (this.returnOnlyUrls) {
@ -144,7 +142,7 @@ export class WebScraperDataProvider {
}); });
return links.map(url => ({ return links.map(url => ({
content: "", content: "",
html: this.includeHtml ? "" : undefined, html: this.pageOptions?.includeHtml ? "" : undefined,
markdown: "", markdown: "",
metadata: { sourceURL: url }, metadata: { sourceURL: url },
})); }));
@ -326,10 +324,9 @@ export class WebScraperDataProvider {
this.limit = options.crawlerOptions?.limit ?? 10000; this.limit = options.crawlerOptions?.limit ?? 10000;
this.generateImgAltText = this.generateImgAltText =
options.crawlerOptions?.generateImgAltText ?? false; options.crawlerOptions?.generateImgAltText ?? false;
this.pageOptions = options.pageOptions ?? {onlyMainContent: false }; this.pageOptions = options.pageOptions ?? { onlyMainContent: false, includeHtml: false };
this.extractorOptions = options.extractorOptions ?? {mode: "markdown"} this.extractorOptions = options.extractorOptions ?? {mode: "markdown"}
this.replaceAllPathsWithAbsolutePaths = options.crawlerOptions?.replaceAllPathsWithAbsolutePaths ?? false; this.replaceAllPathsWithAbsolutePaths = options.crawlerOptions?.replaceAllPathsWithAbsolutePaths ?? false;
this.includeHtml = options?.includeHtml ?? false;
//! @nicolas, for some reason this was being injected and breakign everything. Don't have time to find source of the issue so adding this check //! @nicolas, for some reason this was being injected and breakign everything. Don't have time to find source of the issue so adding this check
this.excludes = this.excludes.filter((item) => item !== ""); this.excludes = this.excludes.filter((item) => item !== "");

View File

@ -103,8 +103,7 @@ export async function scrapWithPlaywright(url: string): Promise<string> {
export async function scrapSingleUrl( export async function scrapSingleUrl(
urlToScrap: string, urlToScrap: string,
pageOptions: PageOptions = { onlyMainContent: true }, pageOptions: PageOptions = { onlyMainContent: true, includeHtml: false },
includeHtml: boolean = false
): Promise<Document> { ): Promise<Document> {
urlToScrap = urlToScrap.trim(); urlToScrap = urlToScrap.trim();
@ -193,7 +192,7 @@ export async function scrapSingleUrl(
url: urlToScrap, url: urlToScrap,
content: text, content: text,
markdown: text, markdown: text,
html: includeHtml ? html : undefined, html: pageOptions.includeHtml ? html : undefined,
metadata: { ...metadata, sourceURL: urlToScrap }, metadata: { ...metadata, sourceURL: urlToScrap },
} as Document; } as Document;
} }
@ -217,7 +216,7 @@ export async function scrapSingleUrl(
return { return {
content: text, content: text,
markdown: text, markdown: text,
html: includeHtml ? html : undefined, html: pageOptions.includeHtml ? html : undefined,
metadata: { ...metadata, sourceURL: urlToScrap }, metadata: { ...metadata, sourceURL: urlToScrap },
} as Document; } as Document;
} catch (error) { } catch (error) {

View File

@ -25,7 +25,6 @@ export interface WebScraperOptions {
pageOptions: any; pageOptions: any;
team_id: string; team_id: string;
origin?: string; origin?: string;
includeHtml?: boolean;
} }
export interface FirecrawlJob { export interface FirecrawlJob {
@ -42,7 +41,6 @@ export interface FirecrawlJob {
origin: string; origin: string;
extractor_options?: ExtractorOptions, extractor_options?: ExtractorOptions,
num_tokens?: number, num_tokens?: number,
includeHtml?: boolean;
} }
export enum RateLimiterMode { export enum RateLimiterMode {