0
v-firecrawl/apps/api/src/main/runWebScraper.ts

124 lines
3.2 KiB
TypeScript
Raw Normal View History

2024-04-15 17:01:47 -04:00
import { Job } from "bull";
import { CrawlResult, WebScraperOptions } from "../types";
import { WebScraperDataProvider } from "../scraper/WebScraper";
2024-04-20 14:59:42 -04:00
import { DocumentUrl, Progress } from "../lib/entities";
2024-04-15 17:01:47 -04:00
import { billTeam } from "../services/billing/credit_billing";
2024-04-20 14:59:42 -04:00
import { Document } from "../lib/entities";
2024-04-15 17:01:47 -04:00
export async function startWebScraperPipeline({
job,
}: {
job: Job<WebScraperOptions>;
}) {
2024-05-04 15:30:12 -04:00
let partialDocs: Document[] = [];
2024-04-15 17:01:47 -04:00
return (await runWebScraper({
url: job.data.url,
mode: job.data.mode,
crawlerOptions: job.data.crawlerOptions,
2024-04-17 21:24:46 -04:00
pageOptions: job.data.pageOptions,
2024-04-15 17:01:47 -04:00
inProgress: (progress) => {
2024-05-04 15:30:12 -04:00
partialDocs.push(progress.currentDocument);
job.progress({...progress, partialDocs: partialDocs});
2024-04-15 17:01:47 -04:00
},
onSuccess: (result) => {
job.moveToCompleted(result);
},
onError: (error) => {
job.moveToFailed(error);
},
team_id: job.data.team_id,
2024-05-06 20:16:43 -04:00
bull_job_id: job.id.toString(),
2024-04-20 16:53:11 -04:00
})) as { success: boolean; message: string; docs: Document[] };
2024-04-15 17:01:47 -04:00
}
export async function runWebScraper({
url,
mode,
crawlerOptions,
2024-04-17 21:24:46 -04:00
pageOptions,
2024-04-15 17:01:47 -04:00
inProgress,
onSuccess,
onError,
team_id,
2024-05-06 20:16:43 -04:00
bull_job_id,
2024-04-15 17:01:47 -04:00
}: {
url: string;
mode: "crawl" | "single_urls" | "sitemap";
crawlerOptions: any;
2024-04-17 21:24:46 -04:00
pageOptions?: any;
2024-04-15 17:01:47 -04:00
inProgress: (progress: any) => void;
onSuccess: (result: any) => void;
onError: (error: any) => void;
team_id: string;
2024-05-06 20:16:43 -04:00
bull_job_id: string;
2024-04-20 14:59:42 -04:00
}): Promise<{
success: boolean;
message: string;
docs: Document[] | DocumentUrl[];
}> {
2024-04-15 17:01:47 -04:00
try {
const provider = new WebScraperDataProvider();
if (mode === "crawl") {
await provider.setOptions({
mode: mode,
urls: [url],
crawlerOptions: crawlerOptions,
2024-04-17 21:24:46 -04:00
pageOptions: pageOptions,
2024-05-06 20:16:43 -04:00
bullJobId: bull_job_id,
2024-04-15 17:01:47 -04:00
});
} else {
await provider.setOptions({
mode: mode,
urls: url.split(","),
crawlerOptions: crawlerOptions,
2024-04-17 21:24:46 -04:00
pageOptions: pageOptions,
2024-04-15 17:01:47 -04:00
});
}
const docs = (await provider.getDocuments(false, (progress: Progress) => {
inProgress(progress);
2024-05-04 15:30:12 -04:00
2024-04-20 14:59:42 -04:00
})) as Document[];
2024-04-20 14:46:06 -04:00
2024-04-15 17:01:47 -04:00
if (docs.length === 0) {
return {
success: true,
message: "No pages found",
2024-04-20 22:37:45 -04:00
docs: []
2024-04-15 17:01:47 -04:00
};
}
// remove docs with empty content
2024-04-20 14:59:42 -04:00
const filteredDocs = crawlerOptions.returnOnlyUrls
? docs.map((doc) => {
if (doc.metadata.sourceURL) {
return { url: doc.metadata.sourceURL };
}
})
: docs.filter((doc) => doc.content.trim().length > 0);
2024-04-15 17:01:47 -04:00
2024-04-26 10:42:49 -04:00
const billingResult = await billTeam(
2024-04-15 17:01:47 -04:00
team_id,
filteredDocs.length
);
2024-04-20 16:53:11 -04:00
2024-04-26 10:42:49 -04:00
if (!billingResult.success) {
2024-04-17 01:13:27 -04:00
// throw new Error("Failed to bill team, no subscription was found");
2024-04-15 17:01:47 -04:00
return {
success: false,
2024-04-17 01:13:27 -04:00
message: "Failed to bill team, no subscription was found",
2024-04-20 22:37:45 -04:00
docs: []
2024-04-15 17:01:47 -04:00
};
}
2024-04-20 16:53:11 -04:00
// This is where the returnvalue from the job is set
onSuccess(filteredDocs);
// this return doesn't matter too much for the job completion result
2024-04-20 14:59:42 -04:00
return { success: true, message: "", docs: filteredDocs };
2024-04-15 17:01:47 -04:00
} catch (error) {
console.error("Error running web scraper", error);
onError(error);
return { success: false, message: error.message, docs: [] };
}
}