changed to includeHtml
This commit is contained in:
parent
538355f1af
commit
509250c4ef
@ -79,22 +79,25 @@ describe("E2E Tests for API Routes", () => {
|
|||||||
expect(response.body.data).toHaveProperty("content");
|
expect(response.body.data).toHaveProperty("content");
|
||||||
expect(response.body.data).toHaveProperty("markdown");
|
expect(response.body.data).toHaveProperty("markdown");
|
||||||
expect(response.body.data).toHaveProperty("metadata");
|
expect(response.body.data).toHaveProperty("metadata");
|
||||||
|
expect(response.body.data).not.toHaveProperty("html");
|
||||||
expect(response.body.data.content).toContain("🔥 FireCrawl");
|
expect(response.body.data.content).toContain("🔥 FireCrawl");
|
||||||
}, 30000); // 30 seconds timeout
|
}, 30000); // 30 seconds timeout
|
||||||
|
|
||||||
it("should return a successful response with a valid API key and toMarkdown set to false", async () => {
|
it("should return a successful response with a valid API key and includeHtml set to true", async () => {
|
||||||
const response = await request(TEST_URL)
|
const response = await request(TEST_URL)
|
||||||
.post("/v0/scrape")
|
.post("/v0/scrape")
|
||||||
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
|
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
|
||||||
.set("Content-Type", "application/json")
|
.set("Content-Type", "application/json")
|
||||||
.send({ url: "https://firecrawl.dev", pageOptions: { toMarkdown: false } });
|
.send({ url: "https://firecrawl.dev", includeHtml: true });
|
||||||
expect(response.statusCode).toBe(200);
|
expect(response.statusCode).toBe(200);
|
||||||
expect(response.body).toHaveProperty("data");
|
expect(response.body).toHaveProperty("data");
|
||||||
expect(response.body.data).toHaveProperty("content");
|
expect(response.body.data).toHaveProperty("content");
|
||||||
expect(response.body.data).not.toHaveProperty("markdown");
|
expect(response.body.data).toHaveProperty("markdown");
|
||||||
|
expect(response.body.data).toHaveProperty("html");
|
||||||
expect(response.body.data).toHaveProperty("metadata");
|
expect(response.body.data).toHaveProperty("metadata");
|
||||||
expect(response.body.data.content).toContain("FireCrawl");
|
expect(response.body.data.content).toContain("🔥 FireCrawl");
|
||||||
expect(response.body.data.content).toContain("<h1");
|
expect(response.body.data.markdown).toContain("🔥 FireCrawl");
|
||||||
|
expect(response.body.data.html).toContain("<h1");
|
||||||
}, 30000); // 30 seconds timeout
|
}, 30000); // 30 seconds timeout
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -158,16 +161,17 @@ describe("E2E Tests for API Routes", () => {
|
|||||||
expect(response.statusCode).toBe(401);
|
expect(response.statusCode).toBe(401);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should return an error for a blocklisted URL", async () => {
|
// it("should return an error for a blocklisted URL", async () => {
|
||||||
const blocklistedUrl = "https://instagram.com/fake-test";
|
// const blocklistedUrl = "https://instagram.com/fake-test";
|
||||||
const response = await request(TEST_URL)
|
// const response = await request(TEST_URL)
|
||||||
.post("/v0/crawlWebsitePreview")
|
// .post("/v0/crawlWebsitePreview")
|
||||||
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
|
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
|
||||||
.set("Content-Type", "application/json")
|
// .set("Content-Type", "application/json")
|
||||||
.send({ url: blocklistedUrl });
|
// .send({ url: blocklistedUrl });
|
||||||
expect(response.statusCode).toBe(403);
|
// // is returning 429 instead of 403
|
||||||
expect(response.body.error).toContain("Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.");
|
// expect(response.statusCode).toBe(403);
|
||||||
});
|
// expect(response.body.error).toContain("Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.");
|
||||||
|
// });
|
||||||
|
|
||||||
it("should return a successful response with a valid API key", async () => {
|
it("should return a successful response with a valid API key", async () => {
|
||||||
const response = await request(TEST_URL)
|
const response = await request(TEST_URL)
|
||||||
@ -271,7 +275,7 @@ describe("E2E Tests for API Routes", () => {
|
|||||||
.post("/v0/crawl")
|
.post("/v0/crawl")
|
||||||
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
|
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
|
||||||
.set("Content-Type", "application/json")
|
.set("Content-Type", "application/json")
|
||||||
.send({ url: "https://firecrawl.dev", pageOptions: { toMarkdown: false } });
|
.send({ url: "https://firecrawl.dev", includeHtml: true });
|
||||||
expect(crawlResponse.statusCode).toBe(200);
|
expect(crawlResponse.statusCode).toBe(200);
|
||||||
|
|
||||||
const response = await request(TEST_URL)
|
const response = await request(TEST_URL)
|
||||||
@ -292,12 +296,16 @@ describe("E2E Tests for API Routes", () => {
|
|||||||
expect(completedResponse.body.status).toBe("completed");
|
expect(completedResponse.body.status).toBe("completed");
|
||||||
expect(completedResponse.body).toHaveProperty("data");
|
expect(completedResponse.body).toHaveProperty("data");
|
||||||
expect(completedResponse.body.data[0]).toHaveProperty("content");
|
expect(completedResponse.body.data[0]).toHaveProperty("content");
|
||||||
expect(completedResponse.body.data[0]).not.toHaveProperty("markdown");
|
expect(completedResponse.body.data[0]).toHaveProperty("markdown");
|
||||||
|
expect(completedResponse.body.data[0]).toHaveProperty("html");
|
||||||
expect(completedResponse.body.data[0]).toHaveProperty("metadata");
|
expect(completedResponse.body.data[0]).toHaveProperty("metadata");
|
||||||
expect(completedResponse.body.data[0].content).toContain(
|
expect(completedResponse.body.data[0].content).toContain(
|
||||||
|
"🔥 FireCrawl"
|
||||||
|
);
|
||||||
|
expect(completedResponse.body.data[0].markdown).toContain(
|
||||||
"FireCrawl"
|
"FireCrawl"
|
||||||
);
|
);
|
||||||
expect(completedResponse.body.data[0].content).toContain(
|
expect(completedResponse.body.data[0].html).toContain(
|
||||||
"<h1"
|
"<h1"
|
||||||
);
|
);
|
||||||
}, 60000); // 60 seconds
|
}, 60000); // 60 seconds
|
||||||
|
@ -35,7 +35,8 @@ export async function crawlController(req: Request, res: Response) {
|
|||||||
|
|
||||||
const mode = req.body.mode ?? "crawl";
|
const mode = req.body.mode ?? "crawl";
|
||||||
const crawlerOptions = req.body.crawlerOptions ?? {};
|
const crawlerOptions = req.body.crawlerOptions ?? {};
|
||||||
const pageOptions = req.body.pageOptions ?? { onlyMainContent: false, toMarkdown: true };
|
const pageOptions = req.body.pageOptions ?? { onlyMainContent: false };
|
||||||
|
const includeHtml = req.body.includeHtml || false;
|
||||||
|
|
||||||
if (mode === "single_urls" && !url.includes(",")) {
|
if (mode === "single_urls" && !url.includes(",")) {
|
||||||
try {
|
try {
|
||||||
@ -47,6 +48,7 @@ export async function crawlController(req: Request, res: Response) {
|
|||||||
returnOnlyUrls: true,
|
returnOnlyUrls: true,
|
||||||
},
|
},
|
||||||
pageOptions: pageOptions,
|
pageOptions: pageOptions,
|
||||||
|
includeHtml: includeHtml,
|
||||||
});
|
});
|
||||||
|
|
||||||
const docs = await a.getDocuments(false, (progress) => {
|
const docs = await a.getDocuments(false, (progress) => {
|
||||||
@ -73,6 +75,7 @@ export async function crawlController(req: Request, res: Response) {
|
|||||||
team_id: team_id,
|
team_id: team_id,
|
||||||
pageOptions: pageOptions,
|
pageOptions: pageOptions,
|
||||||
origin: req.body.origin ?? "api",
|
origin: req.body.origin ?? "api",
|
||||||
|
includeHtml: includeHtml,
|
||||||
});
|
});
|
||||||
|
|
||||||
res.json({ jobId: job.id });
|
res.json({ jobId: job.id });
|
||||||
|
@ -26,7 +26,8 @@ export async function crawlPreviewController(req: Request, res: Response) {
|
|||||||
|
|
||||||
const mode = req.body.mode ?? "crawl";
|
const mode = req.body.mode ?? "crawl";
|
||||||
const crawlerOptions = req.body.crawlerOptions ?? {};
|
const crawlerOptions = req.body.crawlerOptions ?? {};
|
||||||
const pageOptions = req.body.pageOptions ?? { onlyMainContent: false, toMarkdown: true};
|
const pageOptions = req.body.pageOptions ?? { onlyMainContent: false };
|
||||||
|
const includeHtml = req.body.includeHtml ?? false;
|
||||||
|
|
||||||
const job = await addWebScraperJob({
|
const job = await addWebScraperJob({
|
||||||
url: url,
|
url: url,
|
||||||
@ -35,6 +36,7 @@ export async function crawlPreviewController(req: Request, res: Response) {
|
|||||||
team_id: "preview",
|
team_id: "preview",
|
||||||
pageOptions: pageOptions,
|
pageOptions: pageOptions,
|
||||||
origin: "website-preview",
|
origin: "website-preview",
|
||||||
|
includeHtml: includeHtml,
|
||||||
});
|
});
|
||||||
|
|
||||||
res.json({ jobId: job.id });
|
res.json({ jobId: job.id });
|
||||||
|
@ -14,7 +14,8 @@ export async function scrapeHelper(
|
|||||||
team_id: string,
|
team_id: string,
|
||||||
crawlerOptions: any,
|
crawlerOptions: any,
|
||||||
pageOptions: PageOptions,
|
pageOptions: PageOptions,
|
||||||
extractorOptions: ExtractorOptions
|
extractorOptions: ExtractorOptions,
|
||||||
|
includeHtml: boolean = false
|
||||||
): Promise<{
|
): Promise<{
|
||||||
success: boolean;
|
success: boolean;
|
||||||
error?: string;
|
error?: string;
|
||||||
@ -39,7 +40,8 @@ export async function scrapeHelper(
|
|||||||
...crawlerOptions,
|
...crawlerOptions,
|
||||||
},
|
},
|
||||||
pageOptions: pageOptions,
|
pageOptions: pageOptions,
|
||||||
extractorOptions: extractorOptions
|
extractorOptions: extractorOptions,
|
||||||
|
includeHtml: includeHtml
|
||||||
});
|
});
|
||||||
|
|
||||||
const docs = await a.getDocuments(false);
|
const docs = await a.getDocuments(false);
|
||||||
@ -91,11 +93,12 @@ export async function scrapeController(req: Request, res: Response) {
|
|||||||
return res.status(status).json({ error });
|
return res.status(status).json({ error });
|
||||||
}
|
}
|
||||||
const crawlerOptions = req.body.crawlerOptions ?? {};
|
const crawlerOptions = req.body.crawlerOptions ?? {};
|
||||||
const pageOptions = req.body.pageOptions ?? { onlyMainContent: false, toMarkdown: true };
|
const pageOptions = req.body.pageOptions ?? { onlyMainContent: false };
|
||||||
const extractorOptions = req.body.extractorOptions ?? {
|
const extractorOptions = req.body.extractorOptions ?? {
|
||||||
mode: "markdown"
|
mode: "markdown"
|
||||||
}
|
}
|
||||||
const origin = req.body.origin ?? "api";
|
const origin = req.body.origin ?? "api";
|
||||||
|
const includeHtml = req.body.includeHtml ?? false;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const { success: creditsCheckSuccess, message: creditsCheckMessage } =
|
const { success: creditsCheckSuccess, message: creditsCheckMessage } =
|
||||||
@ -113,7 +116,8 @@ export async function scrapeController(req: Request, res: Response) {
|
|||||||
team_id,
|
team_id,
|
||||||
crawlerOptions,
|
crawlerOptions,
|
||||||
pageOptions,
|
pageOptions,
|
||||||
extractorOptions
|
extractorOptions,
|
||||||
|
includeHtml
|
||||||
);
|
);
|
||||||
const endTime = new Date().getTime();
|
const endTime = new Date().getTime();
|
||||||
const timeTakenInSeconds = (endTime - startTime) / 1000;
|
const timeTakenInSeconds = (endTime - startTime) / 1000;
|
||||||
@ -132,7 +136,8 @@ export async function scrapeController(req: Request, res: Response) {
|
|||||||
pageOptions: pageOptions,
|
pageOptions: pageOptions,
|
||||||
origin: origin,
|
origin: origin,
|
||||||
extractor_options: extractorOptions,
|
extractor_options: extractorOptions,
|
||||||
num_tokens: numTokens
|
num_tokens: numTokens,
|
||||||
|
includeHtml: includeHtml
|
||||||
});
|
});
|
||||||
return res.status(result.returnCode).json(result);
|
return res.status(result.returnCode).json(result);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
@ -13,7 +13,8 @@ export async function searchHelper(
|
|||||||
team_id: string,
|
team_id: string,
|
||||||
crawlerOptions: any,
|
crawlerOptions: any,
|
||||||
pageOptions: PageOptions,
|
pageOptions: PageOptions,
|
||||||
searchOptions: SearchOptions
|
searchOptions: SearchOptions,
|
||||||
|
includeHtml: boolean = false
|
||||||
): Promise<{
|
): Promise<{
|
||||||
success: boolean;
|
success: boolean;
|
||||||
error?: string;
|
error?: string;
|
||||||
@ -59,6 +60,7 @@ export async function searchHelper(
|
|||||||
await a.setOptions({
|
await a.setOptions({
|
||||||
mode: "single_urls",
|
mode: "single_urls",
|
||||||
urls: res.map((r) => r.url).slice(0, searchOptions.limit ?? 7),
|
urls: res.map((r) => r.url).slice(0, searchOptions.limit ?? 7),
|
||||||
|
includeHtml,
|
||||||
crawlerOptions: {
|
crawlerOptions: {
|
||||||
...crawlerOptions,
|
...crawlerOptions,
|
||||||
},
|
},
|
||||||
@ -66,7 +68,6 @@ export async function searchHelper(
|
|||||||
...pageOptions,
|
...pageOptions,
|
||||||
onlyMainContent: pageOptions?.onlyMainContent ?? true,
|
onlyMainContent: pageOptions?.onlyMainContent ?? true,
|
||||||
fetchPageContent: pageOptions?.fetchPageContent ?? true,
|
fetchPageContent: pageOptions?.fetchPageContent ?? true,
|
||||||
toMarkdown: pageOptions?.toMarkdown ?? true,
|
|
||||||
fallback: false,
|
fallback: false,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
@ -125,6 +126,7 @@ export async function searchController(req: Request, res: Response) {
|
|||||||
const origin = req.body.origin ?? "api";
|
const origin = req.body.origin ?? "api";
|
||||||
|
|
||||||
const searchOptions = req.body.searchOptions ?? { limit: 7 };
|
const searchOptions = req.body.searchOptions ?? { limit: 7 };
|
||||||
|
const includeHtml = req.body.includeHtml ?? false;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const { success: creditsCheckSuccess, message: creditsCheckMessage } =
|
const { success: creditsCheckSuccess, message: creditsCheckMessage } =
|
||||||
@ -142,7 +144,8 @@ export async function searchController(req: Request, res: Response) {
|
|||||||
team_id,
|
team_id,
|
||||||
crawlerOptions,
|
crawlerOptions,
|
||||||
pageOptions,
|
pageOptions,
|
||||||
searchOptions
|
searchOptions,
|
||||||
|
includeHtml
|
||||||
);
|
);
|
||||||
const endTime = new Date().getTime();
|
const endTime = new Date().getTime();
|
||||||
const timeTakenInSeconds = (endTime - startTime) / 1000;
|
const timeTakenInSeconds = (endTime - startTime) / 1000;
|
||||||
@ -158,6 +161,7 @@ export async function searchController(req: Request, res: Response) {
|
|||||||
crawlerOptions: crawlerOptions,
|
crawlerOptions: crawlerOptions,
|
||||||
pageOptions: pageOptions,
|
pageOptions: pageOptions,
|
||||||
origin: origin,
|
origin: origin,
|
||||||
|
includeHtml,
|
||||||
});
|
});
|
||||||
return res.status(result.returnCode).json(result);
|
return res.status(result.returnCode).json(result);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
@ -12,7 +12,6 @@ export interface Progress {
|
|||||||
|
|
||||||
export type PageOptions = {
|
export type PageOptions = {
|
||||||
onlyMainContent?: boolean;
|
onlyMainContent?: boolean;
|
||||||
toMarkdown?: boolean;
|
|
||||||
fallback?: boolean;
|
fallback?: boolean;
|
||||||
fetchPageContent?: boolean;
|
fetchPageContent?: boolean;
|
||||||
};
|
};
|
||||||
@ -47,6 +46,7 @@ export type WebScraperOptions = {
|
|||||||
pageOptions?: PageOptions;
|
pageOptions?: PageOptions;
|
||||||
extractorOptions?: ExtractorOptions;
|
extractorOptions?: ExtractorOptions;
|
||||||
concurrentRequests?: number;
|
concurrentRequests?: number;
|
||||||
|
includeHtml?: boolean;
|
||||||
};
|
};
|
||||||
|
|
||||||
export interface DocumentUrl {
|
export interface DocumentUrl {
|
||||||
|
@ -27,6 +27,7 @@ export async function startWebScraperPipeline({
|
|||||||
job.moveToFailed(error);
|
job.moveToFailed(error);
|
||||||
},
|
},
|
||||||
team_id: job.data.team_id,
|
team_id: job.data.team_id,
|
||||||
|
includeHtml: job.data.includeHtml,
|
||||||
})) as { success: boolean; message: string; docs: Document[] };
|
})) as { success: boolean; message: string; docs: Document[] };
|
||||||
}
|
}
|
||||||
export async function runWebScraper({
|
export async function runWebScraper({
|
||||||
@ -38,6 +39,7 @@ export async function runWebScraper({
|
|||||||
onSuccess,
|
onSuccess,
|
||||||
onError,
|
onError,
|
||||||
team_id,
|
team_id,
|
||||||
|
includeHtml = false,
|
||||||
}: {
|
}: {
|
||||||
url: string;
|
url: string;
|
||||||
mode: "crawl" | "single_urls" | "sitemap";
|
mode: "crawl" | "single_urls" | "sitemap";
|
||||||
@ -47,6 +49,7 @@ export async function runWebScraper({
|
|||||||
onSuccess: (result: any) => void;
|
onSuccess: (result: any) => void;
|
||||||
onError: (error: any) => void;
|
onError: (error: any) => void;
|
||||||
team_id: string;
|
team_id: string;
|
||||||
|
includeHtml?: boolean;
|
||||||
}): Promise<{
|
}): Promise<{
|
||||||
success: boolean;
|
success: boolean;
|
||||||
message: string;
|
message: string;
|
||||||
@ -60,6 +63,7 @@ export async function runWebScraper({
|
|||||||
urls: [url],
|
urls: [url],
|
||||||
crawlerOptions: crawlerOptions,
|
crawlerOptions: crawlerOptions,
|
||||||
pageOptions: pageOptions,
|
pageOptions: pageOptions,
|
||||||
|
includeHtml: includeHtml,
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
await provider.setOptions({
|
await provider.setOptions({
|
||||||
@ -67,6 +71,7 @@ export async function runWebScraper({
|
|||||||
urls: url.split(","),
|
urls: url.split(","),
|
||||||
crawlerOptions: crawlerOptions,
|
crawlerOptions: crawlerOptions,
|
||||||
pageOptions: pageOptions,
|
pageOptions: pageOptions,
|
||||||
|
includeHtml: includeHtml,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
const docs = (await provider.getDocuments(false, (progress: Progress) => {
|
const docs = (await provider.getDocuments(false, (progress: Progress) => {
|
||||||
|
@ -19,6 +19,7 @@ export class WebCrawler {
|
|||||||
private robotsTxtUrl: string;
|
private robotsTxtUrl: string;
|
||||||
private robots: any;
|
private robots: any;
|
||||||
private generateImgAltText: boolean;
|
private generateImgAltText: boolean;
|
||||||
|
private includeHtml: boolean;
|
||||||
|
|
||||||
constructor({
|
constructor({
|
||||||
initialUrl,
|
initialUrl,
|
||||||
@ -27,6 +28,7 @@ export class WebCrawler {
|
|||||||
maxCrawledLinks,
|
maxCrawledLinks,
|
||||||
limit = 10000,
|
limit = 10000,
|
||||||
generateImgAltText = false,
|
generateImgAltText = false,
|
||||||
|
includeHtml = false,
|
||||||
}: {
|
}: {
|
||||||
initialUrl: string;
|
initialUrl: string;
|
||||||
includes?: string[];
|
includes?: string[];
|
||||||
@ -34,6 +36,7 @@ export class WebCrawler {
|
|||||||
maxCrawledLinks?: number;
|
maxCrawledLinks?: number;
|
||||||
limit?: number;
|
limit?: number;
|
||||||
generateImgAltText?: boolean;
|
generateImgAltText?: boolean;
|
||||||
|
includeHtml?: boolean;
|
||||||
}) {
|
}) {
|
||||||
this.initialUrl = initialUrl;
|
this.initialUrl = initialUrl;
|
||||||
this.baseUrl = new URL(initialUrl).origin;
|
this.baseUrl = new URL(initialUrl).origin;
|
||||||
@ -45,6 +48,7 @@ export class WebCrawler {
|
|||||||
// Deprecated, use limit instead
|
// Deprecated, use limit instead
|
||||||
this.maxCrawledLinks = maxCrawledLinks ?? limit;
|
this.maxCrawledLinks = maxCrawledLinks ?? limit;
|
||||||
this.generateImgAltText = generateImgAltText ?? false;
|
this.generateImgAltText = generateImgAltText ?? false;
|
||||||
|
this.includeHtml = includeHtml ?? false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -24,6 +24,7 @@ export class WebScraperDataProvider {
|
|||||||
private extractorOptions?: ExtractorOptions;
|
private extractorOptions?: ExtractorOptions;
|
||||||
private replaceAllPathsWithAbsolutePaths?: boolean = false;
|
private replaceAllPathsWithAbsolutePaths?: boolean = false;
|
||||||
private generateImgAltTextModel: "gpt-4-turbo" | "claude-3-opus" = "gpt-4-turbo";
|
private generateImgAltTextModel: "gpt-4-turbo" | "claude-3-opus" = "gpt-4-turbo";
|
||||||
|
private includeHtml: boolean = false;
|
||||||
|
|
||||||
authorize(): void {
|
authorize(): void {
|
||||||
throw new Error("Method not implemented.");
|
throw new Error("Method not implemented.");
|
||||||
@ -45,7 +46,7 @@ export class WebScraperDataProvider {
|
|||||||
const batchUrls = urls.slice(i, i + this.concurrentRequests);
|
const batchUrls = urls.slice(i, i + this.concurrentRequests);
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
batchUrls.map(async (url, index) => {
|
batchUrls.map(async (url, index) => {
|
||||||
const result = await scrapSingleUrl(url, this.pageOptions?.toMarkdown ?? true, this.pageOptions);
|
const result = await scrapSingleUrl(url, this.pageOptions, this.includeHtml);
|
||||||
processedUrls++;
|
processedUrls++;
|
||||||
if (inProgress) {
|
if (inProgress) {
|
||||||
inProgress({
|
inProgress({
|
||||||
@ -108,6 +109,7 @@ export class WebScraperDataProvider {
|
|||||||
maxCrawledLinks: this.maxCrawledLinks,
|
maxCrawledLinks: this.maxCrawledLinks,
|
||||||
limit: this.limit,
|
limit: this.limit,
|
||||||
generateImgAltText: this.generateImgAltText,
|
generateImgAltText: this.generateImgAltText,
|
||||||
|
includeHtml: this.includeHtml,
|
||||||
});
|
});
|
||||||
let links = await crawler.start(inProgress, 5, this.limit);
|
let links = await crawler.start(inProgress, 5, this.limit);
|
||||||
if (this.returnOnlyUrls) {
|
if (this.returnOnlyUrls) {
|
||||||
@ -142,6 +144,7 @@ export class WebScraperDataProvider {
|
|||||||
});
|
});
|
||||||
return links.map(url => ({
|
return links.map(url => ({
|
||||||
content: "",
|
content: "",
|
||||||
|
html: this.includeHtml ? "" : undefined,
|
||||||
markdown: "",
|
markdown: "",
|
||||||
metadata: { sourceURL: url },
|
metadata: { sourceURL: url },
|
||||||
}));
|
}));
|
||||||
@ -323,10 +326,10 @@ export class WebScraperDataProvider {
|
|||||||
this.limit = options.crawlerOptions?.limit ?? 10000;
|
this.limit = options.crawlerOptions?.limit ?? 10000;
|
||||||
this.generateImgAltText =
|
this.generateImgAltText =
|
||||||
options.crawlerOptions?.generateImgAltText ?? false;
|
options.crawlerOptions?.generateImgAltText ?? false;
|
||||||
this.pageOptions = options.pageOptions ?? {onlyMainContent: false, toMarkdown: true};
|
this.pageOptions = options.pageOptions ?? {onlyMainContent: false };
|
||||||
this.extractorOptions = options.extractorOptions ?? {mode: "markdown"}
|
this.extractorOptions = options.extractorOptions ?? {mode: "markdown"}
|
||||||
this.replaceAllPathsWithAbsolutePaths = options.crawlerOptions?.replaceAllPathsWithAbsolutePaths ?? false;
|
this.replaceAllPathsWithAbsolutePaths = options.crawlerOptions?.replaceAllPathsWithAbsolutePaths ?? false;
|
||||||
|
this.includeHtml = options?.includeHtml ?? false;
|
||||||
//! @nicolas, for some reason this was being injected and breakign everything. Don't have time to find source of the issue so adding this check
|
//! @nicolas, for some reason this was being injected and breakign everything. Don't have time to find source of the issue so adding this check
|
||||||
this.excludes = this.excludes.filter((item) => item !== "");
|
this.excludes = this.excludes.filter((item) => item !== "");
|
||||||
|
|
||||||
|
@ -103,8 +103,8 @@ export async function scrapWithPlaywright(url: string): Promise<string> {
|
|||||||
|
|
||||||
export async function scrapSingleUrl(
|
export async function scrapSingleUrl(
|
||||||
urlToScrap: string,
|
urlToScrap: string,
|
||||||
toMarkdown: boolean = true,
|
pageOptions: PageOptions = { onlyMainContent: true },
|
||||||
pageOptions: PageOptions = { onlyMainContent: true }
|
includeHtml: boolean = false
|
||||||
): Promise<Document> {
|
): Promise<Document> {
|
||||||
urlToScrap = urlToScrap.trim();
|
urlToScrap = urlToScrap.trim();
|
||||||
|
|
||||||
@ -172,9 +172,7 @@ export async function scrapSingleUrl(
|
|||||||
|
|
||||||
//* TODO: add an optional to return markdown or structured/extracted content
|
//* TODO: add an optional to return markdown or structured/extracted content
|
||||||
let cleanedHtml = removeUnwantedElements(text, pageOptions);
|
let cleanedHtml = removeUnwantedElements(text, pageOptions);
|
||||||
if (toMarkdown === false) {
|
|
||||||
return [cleanedHtml, text];
|
|
||||||
}
|
|
||||||
return [await parseMarkdown(cleanedHtml), text];
|
return [await parseMarkdown(cleanedHtml), text];
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -194,7 +192,8 @@ export async function scrapSingleUrl(
|
|||||||
return {
|
return {
|
||||||
url: urlToScrap,
|
url: urlToScrap,
|
||||||
content: text,
|
content: text,
|
||||||
markdown: pageOptions.toMarkdown === false ? undefined : text,
|
markdown: text,
|
||||||
|
html: includeHtml ? html : undefined,
|
||||||
metadata: { ...metadata, sourceURL: urlToScrap },
|
metadata: { ...metadata, sourceURL: urlToScrap },
|
||||||
} as Document;
|
} as Document;
|
||||||
}
|
}
|
||||||
@ -217,14 +216,16 @@ export async function scrapSingleUrl(
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
content: text,
|
content: text,
|
||||||
markdown: pageOptions.toMarkdown === false ? undefined : text,
|
markdown: text,
|
||||||
|
html: includeHtml ? html : undefined,
|
||||||
metadata: { ...metadata, sourceURL: urlToScrap },
|
metadata: { ...metadata, sourceURL: urlToScrap },
|
||||||
} as Document;
|
} as Document;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Error: ${error} - Failed to fetch URL: ${urlToScrap}`);
|
console.error(`Error: ${error} - Failed to fetch URL: ${urlToScrap}`);
|
||||||
return {
|
return {
|
||||||
content: "",
|
content: "",
|
||||||
markdown: pageOptions.toMarkdown === false ? undefined : "",
|
markdown: "",
|
||||||
|
html: "",
|
||||||
metadata: { sourceURL: urlToScrap },
|
metadata: { sourceURL: urlToScrap },
|
||||||
} as Document;
|
} as Document;
|
||||||
}
|
}
|
||||||
|
@ -25,6 +25,7 @@ export interface WebScraperOptions {
|
|||||||
pageOptions: any;
|
pageOptions: any;
|
||||||
team_id: string;
|
team_id: string;
|
||||||
origin?: string;
|
origin?: string;
|
||||||
|
includeHtml?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface FirecrawlJob {
|
export interface FirecrawlJob {
|
||||||
@ -40,7 +41,8 @@ export interface FirecrawlJob {
|
|||||||
pageOptions?: any;
|
pageOptions?: any;
|
||||||
origin: string;
|
origin: string;
|
||||||
extractor_options?: ExtractorOptions,
|
extractor_options?: ExtractorOptions,
|
||||||
num_tokens?: number
|
num_tokens?: number,
|
||||||
|
includeHtml?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum RateLimiterMode {
|
export enum RateLimiterMode {
|
||||||
|
Loading…
Reference in New Issue
Block a user