0

Merge branch 'main' of https://github.com/mendableai/firecrawl into cjp/contributors-guide-and

This commit is contained in:
Caleb Peffer 2024-04-20 19:56:55 -07:00
commit ad7951a679
19 changed files with 621 additions and 290 deletions

View File

@ -1,4 +1,4 @@
# Required ENV=
NUM_WORKERS_PER_QUEUE=8 NUM_WORKERS_PER_QUEUE=8
PORT= PORT=
HOST= HOST=

View File

@ -2,4 +2,7 @@ module.exports = {
preset: "ts-jest", preset: "ts-jest",
testEnvironment: "node", testEnvironment: "node",
setupFiles: ["./jest.setup.js"], setupFiles: ["./jest.setup.js"],
// ignore dist folder root dir
modulePathIgnorePatterns: ["<rootDir>/dist/"],
}; };

View File

@ -0,0 +1,67 @@
import { parseApi } from "../../src/lib/parseApi";
import { getRateLimiter } from "../../src/services/rate-limiter";
import { RateLimiterMode } from "../../src/types";
import { supabase_service } from "../../src/services/supabase";
export async function authenticateUser(
req,
res,
mode?: RateLimiterMode
): Promise<{
success: boolean;
team_id?: string;
error?: string;
status?: number;
}> {
const authHeader = req.headers.authorization;
if (!authHeader) {
return { success: false, error: "Unauthorized", status: 401 };
}
const token = authHeader.split(" ")[1]; // Extract the token from "Bearer <token>"
if (!token) {
return {
success: false,
error: "Unauthorized: Token missing",
status: 401,
};
}
try {
const incomingIP = (req.headers["x-forwarded-for"] ||
req.socket.remoteAddress) as string;
const iptoken = incomingIP + token;
await getRateLimiter(
token === "this_is_just_a_preview_token" ? RateLimiterMode.Preview : mode
).consume(iptoken);
} catch (rateLimiterRes) {
console.error(rateLimiterRes);
return {
success: false,
error: "Rate limit exceeded. Too many requests, try again in 1 minute.",
status: 429,
};
}
if (
token === "this_is_just_a_preview_token" &&
(mode === RateLimiterMode.Scrape || mode === RateLimiterMode.Preview)
) {
return { success: true, team_id: "preview" };
}
const normalizedApi = parseApi(token);
// make sure api key is valid, based on the api_keys table in supabase
const { data, error } = await supabase_service
.from("api_keys")
.select("*")
.eq("key", normalizedApi);
if (error || !data || data.length === 0) {
return {
success: false,
error: "Unauthorized: Invalid token",
status: 401,
};
}
return { success: true, team_id: data[0].team_id };
}

View File

@ -0,0 +1,36 @@
import { Request, Response } from "express";
import { authenticateUser } from "./auth";
import { RateLimiterMode } from "../../src/types";
import { addWebScraperJob } from "../../src/services/queue-jobs";
import { getWebScraperQueue } from "../../src/services/queue-service";
export async function crawlStatusController(req: Request, res: Response) {
try {
const { success, team_id, error, status } = await authenticateUser(
req,
res,
RateLimiterMode.CrawlStatus
);
if (!success) {
return res.status(status).json({ error });
}
const job = await getWebScraperQueue().getJob(req.params.jobId);
if (!job) {
return res.status(404).json({ error: "Job not found" });
}
const { current, current_url, total, current_step } = await job.progress();
res.json({
status: await job.getState(),
// progress: job.progress(),
current: current,
current_url: current_url,
current_step: current_step,
total: total,
data: job.returnvalue,
});
} catch (error) {
console.error(error);
return res.status(500).json({ error: error.message });
}
}

View File

@ -0,0 +1,79 @@
import { Request, Response } from "express";
import { WebScraperDataProvider } from "../../src/scraper/WebScraper";
import { billTeam } from "../../src/services/billing/credit_billing";
import { checkTeamCredits } from "../../src/services/billing/credit_billing";
import { authenticateUser } from "./auth";
import { RateLimiterMode } from "../../src/types";
import { addWebScraperJob } from "../../src/services/queue-jobs";
export async function crawlController(req: Request, res: Response) {
try {
const { success, team_id, error, status } = await authenticateUser(
req,
res,
RateLimiterMode.Crawl
);
if (!success) {
return res.status(status).json({ error });
}
const { success: creditsCheckSuccess, message: creditsCheckMessage } =
await checkTeamCredits(team_id, 1);
if (!creditsCheckSuccess) {
return res.status(402).json({ error: "Insufficient credits" });
}
// authenticate on supabase
const url = req.body.url;
if (!url) {
return res.status(400).json({ error: "Url is required" });
}
const mode = req.body.mode ?? "crawl";
const crawlerOptions = req.body.crawlerOptions ?? {};
const pageOptions = req.body.pageOptions ?? { onlyMainContent: false };
if (mode === "single_urls" && !url.includes(",")) {
try {
const a = new WebScraperDataProvider();
await a.setOptions({
mode: "single_urls",
urls: [url],
crawlerOptions: {
returnOnlyUrls: true,
},
pageOptions: pageOptions,
});
const docs = await a.getDocuments(false, (progress) => {
job.progress({
current: progress.current,
total: progress.total,
current_step: "SCRAPING",
current_url: progress.currentDocumentUrl,
});
});
return res.json({
success: true,
documents: docs,
});
} catch (error) {
console.error(error);
return res.status(500).json({ error: error.message });
}
}
const job = await addWebScraperJob({
url: url,
mode: mode ?? "crawl", // fix for single urls not working
crawlerOptions: { ...crawlerOptions },
team_id: team_id,
pageOptions: pageOptions,
origin: req.body.origin ?? "api",
});
res.json({ jobId: job.id });
} catch (error) {
console.error(error);
return res.status(500).json({ error: error.message });
}
}

View File

@ -0,0 +1,39 @@
import { Request, Response } from "express";
import { authenticateUser } from "./auth";
import { RateLimiterMode } from "../../src/types";
import { addWebScraperJob } from "../../src/services/queue-jobs";
export async function crawlPreviewController(req: Request, res: Response) {
try {
const { success, team_id, error, status } = await authenticateUser(
req,
res,
RateLimiterMode.Preview
);
if (!success) {
return res.status(status).json({ error });
}
// authenticate on supabase
const url = req.body.url;
if (!url) {
return res.status(400).json({ error: "Url is required" });
}
const mode = req.body.mode ?? "crawl";
const crawlerOptions = req.body.crawlerOptions ?? {};
const pageOptions = req.body.pageOptions ?? { onlyMainContent: false };
const job = await addWebScraperJob({
url: url,
mode: mode ?? "crawl", // fix for single urls not working
crawlerOptions: { ...crawlerOptions, limit: 5, maxCrawledLinks: 5 },
team_id: "preview",
pageOptions: pageOptions,
origin: "website-preview",
});
res.json({ jobId: job.id });
} catch (error) {
console.error(error);
return res.status(500).json({ error: error.message });
}
}

View File

@ -0,0 +1,114 @@
import { Request, Response } from "express";
import { WebScraperDataProvider } from "../scraper/WebScraper";
import { billTeam, checkTeamCredits } from "../services/billing/credit_billing";
import { authenticateUser } from "./auth";
import { RateLimiterMode } from "../types";
import { logJob } from "../services/logging/log_job";
import { Document } from "../lib/entities";
export async function scrapeHelper(
req: Request,
team_id: string,
crawlerOptions: any,
pageOptions: any
): Promise<{
success: boolean;
error?: string;
data?: Document;
returnCode: number;
}> {
const url = req.body.url;
if (!url) {
return { success: false, error: "Url is required", returnCode: 400 };
}
const a = new WebScraperDataProvider();
await a.setOptions({
mode: "single_urls",
urls: [url],
crawlerOptions: {
...crawlerOptions,
},
pageOptions: pageOptions,
});
const docs = await a.getDocuments(false);
// make sure doc.content is not empty
const filteredDocs = docs.filter(
(doc: { content?: string }) => doc.content && doc.content.trim().length > 0
);
if (filteredDocs.length === 0) {
return { success: true, error: "No page found", returnCode: 200 };
}
const { success, credit_usage } = await billTeam(
team_id,
filteredDocs.length
);
if (!success) {
return {
success: false,
error:
"Failed to bill team. Insufficient credits or subscription not found.",
returnCode: 402,
};
}
return {
success: true,
data: filteredDocs[0],
returnCode: 200,
};
}
export async function scrapeController(req: Request, res: Response) {
try {
// make sure to authenticate user first, Bearer <token>
const { success, team_id, error, status } = await authenticateUser(
req,
res,
RateLimiterMode.Scrape
);
if (!success) {
return res.status(status).json({ error });
}
const crawlerOptions = req.body.crawlerOptions ?? {};
const pageOptions = req.body.pageOptions ?? { onlyMainContent: false };
const origin = req.body.origin ?? "api";
try {
const { success: creditsCheckSuccess, message: creditsCheckMessage } =
await checkTeamCredits(team_id, 1);
if (!creditsCheckSuccess) {
return res.status(402).json({ error: "Insufficient credits" });
}
} catch (error) {
console.error(error);
return res.status(500).json({ error: "Internal server error" });
}
const startTime = new Date().getTime();
const result = await scrapeHelper(
req,
team_id,
crawlerOptions,
pageOptions
);
const endTime = new Date().getTime();
const timeTakenInSeconds = (endTime - startTime) / 1000;
logJob({
success: result.success,
message: result.error,
num_docs: 1,
docs: [result.data],
time_taken: timeTakenInSeconds,
team_id: team_id,
mode: "scrape",
url: req.body.url,
crawlerOptions: crawlerOptions,
pageOptions: pageOptions,
origin: origin,
});
return res.status(result.returnCode).json(result);
} catch (error) {
console.error(error);
return res.status(500).json({ error: error.message });
}
}

View File

@ -0,0 +1,25 @@
import { Request, Response } from "express";
import { getWebScraperQueue } from "../../src/services/queue-service";
export async function crawlJobStatusPreviewController(req: Request, res: Response) {
try {
const job = await getWebScraperQueue().getJob(req.params.jobId);
if (!job) {
return res.status(404).json({ error: "Job not found" });
}
const { current, current_url, total, current_step } = await job.progress();
res.json({
status: await job.getState(),
// progress: job.progress(),
current: current,
current_url: current_url,
current_step: current_step,
total: total,
data: job.returnvalue,
});
} catch (error) {
console.error(error);
return res.status(500).json({ error: error.message });
}
}

View File

@ -3,12 +3,8 @@ import bodyParser from "body-parser";
import cors from "cors"; import cors from "cors";
import "dotenv/config"; import "dotenv/config";
import { getWebScraperQueue } from "./services/queue-service"; import { getWebScraperQueue } from "./services/queue-service";
import { addWebScraperJob } from "./services/queue-jobs"; import { redisClient } from "./services/rate-limiter";
import { supabase_service } from "./services/supabase"; import { v0Router } from "./routes/v0";
import { WebScraperDataProvider } from "./scraper/WebScraper";
import { billTeam, checkTeamCredits } from "./services/billing/credit_billing";
import { getRateLimiter, redisClient } from "./services/rate-limiter";
import { parseApi } from "./lib/parseApi";
const { createBullBoard } = require("@bull-board/api"); const { createBullBoard } = require("@bull-board/api");
const { BullAdapter } = require("@bull-board/api/bullAdapter"); const { BullAdapter } = require("@bull-board/api/bullAdapter");
@ -16,7 +12,6 @@ const { ExpressAdapter } = require("@bull-board/express");
export const app = express(); export const app = express();
global.isProduction = process.env.IS_PRODUCTION === "true"; global.isProduction = process.env.IS_PRODUCTION === "true";
app.use(bodyParser.urlencoded({ extended: true })); app.use(bodyParser.urlencoded({ extended: true }));
@ -46,266 +41,8 @@ app.get("/test", async (req, res) => {
res.send("Hello, world!"); res.send("Hello, world!");
}); });
async function authenticateUser(req, res, mode?: string): Promise<{ success: boolean, team_id?: string, error?: string, status?: number }> { // register router
const authHeader = req.headers.authorization; app.use(v0Router);
if (!authHeader) {
return { success: false, error: "Unauthorized", status: 401 };
}
const token = authHeader.split(" ")[1]; // Extract the token from "Bearer <token>"
if (!token) {
return { success: false, error: "Unauthorized: Token missing", status: 401 };
}
try {
const incomingIP = (req.headers["x-forwarded-for"] ||
req.socket.remoteAddress) as string;
const iptoken = incomingIP + token;
await getRateLimiter(
token === "this_is_just_a_preview_token" ? true : false
).consume(iptoken);
} catch (rateLimiterRes) {
console.error(rateLimiterRes);
return { success: false, error: "Rate limit exceeded. Too many requests, try again in 1 minute.", status: 429 };
}
if (token === "this_is_just_a_preview_token" && mode === "scrape") {
return { success: true, team_id: "preview" };
}
const normalizedApi = parseApi(token);
// make sure api key is valid, based on the api_keys table in supabase
const { data, error } = await supabase_service
.from("api_keys")
.select("*")
.eq("key", normalizedApi);
if (error || !data || data.length === 0) {
return { success: false, error: "Unauthorized: Invalid token", status: 401 };
}
return { success: true, team_id: data[0].team_id };
}
app.post("/v0/scrape", async (req, res) => {
try {
// make sure to authenticate user first, Bearer <token>
const { success, team_id, error, status } = await authenticateUser(req, res, "scrape");
if (!success) {
return res.status(status).json({ error });
}
const crawlerOptions = req.body.crawlerOptions ?? {};
try {
const { success: creditsCheckSuccess, message: creditsCheckMessage } =
await checkTeamCredits(team_id, 1);
if (!creditsCheckSuccess) {
return res.status(402).json({ error: "Insufficient credits" });
}
} catch (error) {
console.error(error);
return res.status(500).json({ error: "Internal server error" });
}
// authenticate on supabase
const url = req.body.url;
if (!url) {
return res.status(400).json({ error: "Url is required" });
}
const pageOptions = req.body.pageOptions ?? { onlyMainContent: false };
try {
const a = new WebScraperDataProvider();
await a.setOptions({
mode: "single_urls",
urls: [url],
crawlerOptions: {
...crawlerOptions,
},
pageOptions: pageOptions,
});
const docs = await a.getDocuments(false);
// make sure doc.content is not empty
const filteredDocs = docs.filter(
(doc: { content?: string }) =>
doc.content && doc.content.trim().length > 0
);
if (filteredDocs.length === 0) {
return res.status(200).json({ success: true, data: [] });
}
const { success, credit_usage } = await billTeam(
team_id,
filteredDocs.length
);
if (!success) {
// throw new Error("Failed to bill team, no subscription was found");
// return {
// success: false,
// message: "Failed to bill team, no subscription was found",
// docs: [],
// };
return res
.status(402)
.json({ error: "Failed to bill, no subscription was found" });
}
return res.json({
success: true,
data: filteredDocs[0],
});
} catch (error) {
console.error(error);
return res.status(500).json({ error: error.message });
}
} catch (error) {
console.error(error);
return res.status(500).json({ error: error.message });
}
});
app.post("/v0/crawl", async (req, res) => {
try {
const { success, team_id, error, status } = await authenticateUser(req, res, "crawl");
if (!success) {
return res.status(status).json({ error });
}
const { success: creditsCheckSuccess, message: creditsCheckMessage } =
await checkTeamCredits(team_id, 1);
if (!creditsCheckSuccess) {
return res.status(402).json({ error: "Insufficient credits" });
}
// authenticate on supabase
const url = req.body.url;
if (!url) {
return res.status(400).json({ error: "Url is required" });
}
const mode = req.body.mode ?? "crawl";
const crawlerOptions = req.body.crawlerOptions ?? {};
const pageOptions = req.body.pageOptions ?? { onlyMainContent: false };
if (mode === "single_urls" && !url.includes(",")) {
try {
const a = new WebScraperDataProvider();
await a.setOptions({
mode: "single_urls",
urls: [url],
crawlerOptions: {
returnOnlyUrls: true,
},
pageOptions: pageOptions,
});
const docs = await a.getDocuments(false, (progress) => {
job.progress({
current: progress.current,
total: progress.total,
current_step: "SCRAPING",
current_url: progress.currentDocumentUrl,
});
});
return res.json({
success: true,
documents: docs,
});
} catch (error) {
console.error(error);
return res.status(500).json({ error: error.message });
}
}
const job = await addWebScraperJob({
url: url,
mode: mode ?? "crawl", // fix for single urls not working
crawlerOptions: { ...crawlerOptions },
team_id: team_id,
pageOptions: pageOptions,
});
res.json({ jobId: job.id });
} catch (error) {
console.error(error);
return res.status(500).json({ error: error.message });
}
});
app.post("/v0/crawlWebsitePreview", async (req, res) => {
try {
const { success, team_id, error, status } = await authenticateUser(req, res, "scrape");
if (!success) {
return res.status(status).json({ error });
}
// authenticate on supabase
const url = req.body.url;
if (!url) {
return res.status(400).json({ error: "Url is required" });
}
const mode = req.body.mode ?? "crawl";
const crawlerOptions = req.body.crawlerOptions ?? {};
const pageOptions = req.body.pageOptions ?? { onlyMainContent: false };
const job = await addWebScraperJob({
url: url,
mode: mode ?? "crawl", // fix for single urls not working
crawlerOptions: { ...crawlerOptions, limit: 5, maxCrawledLinks: 5 },
team_id: "preview",
pageOptions: pageOptions,
});
res.json({ jobId: job.id });
} catch (error) {
console.error(error);
return res.status(500).json({ error: error.message });
}
});
app.get("/v0/crawl/status/:jobId", async (req, res) => {
try {
const { success, team_id, error, status } = await authenticateUser(req, res, "scrape");
if (!success) {
return res.status(status).json({ error });
}
const job = await getWebScraperQueue().getJob(req.params.jobId);
if (!job) {
return res.status(404).json({ error: "Job not found" });
}
const { current, current_url, total, current_step } = await job.progress();
res.json({
status: await job.getState(),
// progress: job.progress(),
current: current,
current_url: current_url,
current_step: current_step,
total: total,
data: job.returnvalue,
});
} catch (error) {
console.error(error);
return res.status(500).json({ error: error.message });
}
});
app.get("/v0/checkJobStatus/:jobId", async (req, res) => {
try {
const job = await getWebScraperQueue().getJob(req.params.jobId);
if (!job) {
return res.status(404).json({ error: "Job not found" });
}
const { current, current_url, total, current_step } = await job.progress();
res.json({
status: await job.getState(),
// progress: job.progress(),
current: current,
current_url: current_url,
current_step: current_step,
total: total,
data: job.returnvalue,
});
} catch (error) {
console.error(error);
return res.status(500).json({ error: error.message });
}
});
const DEFAULT_PORT = process.env.PORT ?? 3002; const DEFAULT_PORT = process.env.PORT ?? 3002;
const HOST = process.env.HOST ?? "localhost"; const HOST = process.env.HOST ?? "localhost";
@ -314,7 +51,9 @@ redisClient.connect();
export function startServer(port = DEFAULT_PORT) { export function startServer(port = DEFAULT_PORT) {
const server = app.listen(Number(port), HOST, () => { const server = app.listen(Number(port), HOST, () => {
console.log(`Server listening on port ${port}`); console.log(`Server listening on port ${port}`);
console.log(`For the UI, open http://${HOST}:${port}/admin/${process.env.BULL_AUTH_KEY}/queues`); console.log(
`For the UI, open http://${HOST}:${port}/admin/${process.env.BULL_AUTH_KEY}/queues`
);
console.log(""); console.log("");
console.log("1. Make sure Redis is running on port 6379 by default"); console.log("1. Make sure Redis is running on port 6379 by default");
console.log( console.log(
@ -351,4 +90,3 @@ app.get(`/admin/${process.env.BULL_AUTH_KEY}/queues`, async (req, res) => {
app.get("/is-production", (req, res) => { app.get("/is-production", (req, res) => {
res.send({ isProduction: global.isProduction }); res.send({ isProduction: global.isProduction });
}); });

View File

@ -3,7 +3,7 @@ import { CrawlResult, WebScraperOptions } from "../types";
import { WebScraperDataProvider } from "../scraper/WebScraper"; import { WebScraperDataProvider } from "../scraper/WebScraper";
import { Progress } from "../lib/entities"; import { Progress } from "../lib/entities";
import { billTeam } from "../services/billing/credit_billing"; import { billTeam } from "../services/billing/credit_billing";
import { Document } from "../lib/entities";
export async function startWebScraperPipeline({ export async function startWebScraperPipeline({
job, job,
}: { }: {
@ -24,7 +24,7 @@ export async function startWebScraperPipeline({
job.moveToFailed(error); job.moveToFailed(error);
}, },
team_id: job.data.team_id, team_id: job.data.team_id,
})) as { success: boolean; message: string; docs: CrawlResult[] }; })) as { success: boolean; message: string; docs: Document[] };
} }
export async function runWebScraper({ export async function runWebScraper({
url, url,
@ -44,7 +44,11 @@ export async function runWebScraper({
onSuccess: (result: any) => void; onSuccess: (result: any) => void;
onError: (error: any) => void; onError: (error: any) => void;
team_id: string; team_id: string;
}): Promise<{ success: boolean; message: string; docs: CrawlResult[] }> { }): Promise<{
success: boolean;
message: string;
docs: CrawlResult[];
}> {
try { try {
const provider = new WebScraperDataProvider(); const provider = new WebScraperDataProvider();
if (mode === "crawl") { if (mode === "crawl") {
@ -70,28 +74,32 @@ export async function runWebScraper({
return { return {
success: true, success: true,
message: "No pages found", message: "No pages found",
docs: [], docs: []
}; };
} }
// remove docs with empty content // remove docs with empty content
const filteredDocs = docs.filter((doc) => doc.content.trim().length > 0); const filteredDocs = docs.filter((doc) => doc.content.trim().length > 0);
onSuccess(filteredDocs);
const { success, credit_usage } = await billTeam( const { success, credit_usage } = await billTeam(
team_id, team_id,
filteredDocs.length filteredDocs.length
); );
if (!success) { if (!success) {
// throw new Error("Failed to bill team, no subscription was found"); // throw new Error("Failed to bill team, no subscription was found");
return { return {
success: false, success: false,
message: "Failed to bill team, no subscription was found", message: "Failed to bill team, no subscription was found",
docs: [], docs: []
}; };
} }
return { success: true, message: "", docs: filteredDocs as CrawlResult[] }; // This is where the returnvalue from the job is set
onSuccess(filteredDocs);
// this return doesn't matter too much for the job completion result
return { success: true, message: "", docs: filteredDocs };
} catch (error) { } catch (error) {
console.error("Error running web scraper", error); console.error("Error running web scraper", error);
onError(error); onError(error);

14
apps/api/src/routes/v0.ts Normal file
View File

@ -0,0 +1,14 @@
import express from "express";
import { crawlController } from "../../src/controllers/crawl";
import { crawlStatusController } from "../../src/controllers/crawl-status";
import { scrapeController } from "../../src/controllers/scrape";
import { crawlPreviewController } from "../../src/controllers/crawlPreview";
import { crawlJobStatusPreviewController } from "../../src/controllers/status";
export const v0Router = express.Router();
v0Router.post("/v0/scrape", scrapeController);
v0Router.post("/v0/crawl", crawlController);
v0Router.post("/v0/crawlWebsitePreview", crawlPreviewController);
v0Router.get("/v0/crawl/status/:jobId", crawlStatusController);
v0Router.get("/v0/checkJobStatus/:jobId", crawlJobStatusPreviewController);

View File

@ -0,0 +1,34 @@
import { supabase_service } from "../supabase";
import { FirecrawlJob } from "../../types";
import "dotenv/config";
export async function logJob(job: FirecrawlJob) {
try {
// Only log jobs in production
if (process.env.ENV !== "production") {
return;
}
const { data, error } = await supabase_service
.from("firecrawl_jobs")
.insert([
{
success: job.success,
message: job.message,
num_docs: job.num_docs,
docs: job.docs,
time_taken: job.time_taken,
team_id: job.team_id === "preview" ? null : job.team_id,
mode: job.mode,
url: job.url,
crawler_options: job.crawlerOptions,
page_options: job.pageOptions,
origin: job.origin,
},
]);
if (error) {
console.error("Error logging job:\n", error);
}
} catch (error) {
console.error("Error logging job:\n", error);
}
}

View File

@ -4,6 +4,7 @@ import "dotenv/config";
import { logtail } from "./logtail"; import { logtail } from "./logtail";
import { startWebScraperPipeline } from "../main/runWebScraper"; import { startWebScraperPipeline } from "../main/runWebScraper";
import { callWebhook } from "./webhook"; import { callWebhook } from "./webhook";
import { logJob } from "./logging/log_job";
getWebScraperQueue().process( getWebScraperQueue().process(
Math.floor(Number(process.env.NUM_WORKERS_PER_QUEUE ?? 8)), Math.floor(Number(process.env.NUM_WORKERS_PER_QUEUE ?? 8)),
@ -15,7 +16,11 @@ getWebScraperQueue().process(
current_step: "SCRAPING", current_step: "SCRAPING",
current_url: "", current_url: "",
}); });
const start = Date.now();
const { success, message, docs } = await startWebScraperPipeline({ job }); const { success, message, docs } = await startWebScraperPipeline({ job });
const end = Date.now();
const timeTakenInSeconds = (end - start) / 1000;
const data = { const data = {
success: success, success: success,
@ -29,6 +34,20 @@ getWebScraperQueue().process(
}; };
await callWebhook(job.data.team_id, data); await callWebhook(job.data.team_id, data);
await logJob({
success: success,
message: message,
num_docs: docs.length,
docs: docs,
time_taken: timeTakenInSeconds,
team_id: job.data.team_id,
mode: "crawl",
url: job.data.url,
crawlerOptions: job.data.crawlerOptions,
pageOptions: job.data.pageOptions,
origin: job.data.origin,
});
done(null, data); done(null, data);
} catch (error) { } catch (error) {
if (error instanceof CustomError) { if (error instanceof CustomError) {

View File

@ -1,5 +1,6 @@
import { RateLimiterRedis } from "rate-limiter-flexible"; import { RateLimiterRedis } from "rate-limiter-flexible";
import * as redis from "redis"; import * as redis from "redis";
import { RateLimiterMode } from "../../src/types";
const MAX_REQUESTS_PER_MINUTE_PREVIEW = 5; const MAX_REQUESTS_PER_MINUTE_PREVIEW = 5;
const MAX_CRAWLS_PER_MINUTE_STARTER = 2; const MAX_CRAWLS_PER_MINUTE_STARTER = 2;
@ -8,6 +9,9 @@ const MAX_CRAWLS_PER_MINUTE_SCALE = 20;
const MAX_REQUESTS_PER_MINUTE_ACCOUNT = 20; const MAX_REQUESTS_PER_MINUTE_ACCOUNT = 20;
const MAX_REQUESTS_PER_MINUTE_CRAWL_STATUS = 120;
export const redisClient = redis.createClient({ export const redisClient = redis.createClient({
@ -29,6 +33,13 @@ export const serverRateLimiter = new RateLimiterRedis({
duration: 60, // Duration in seconds duration: 60, // Duration in seconds
}); });
export const crawlStatusRateLimiter = new RateLimiterRedis({
storeClient: redisClient,
keyPrefix: "middleware",
points: MAX_REQUESTS_PER_MINUTE_CRAWL_STATUS,
duration: 60, // Duration in seconds
});
export function crawlRateLimit(plan: string){ export function crawlRateLimit(plan: string){
if(plan === "standard"){ if(plan === "standard"){
@ -56,10 +67,15 @@ export function crawlRateLimit(plan: string){
} }
export function getRateLimiter(preview: boolean){
if(preview){
return previewRateLimiter; export function getRateLimiter(mode: RateLimiterMode){
}else{ switch(mode) {
return serverRateLimiter; case RateLimiterMode.Preview:
return previewRateLimiter;
case RateLimiterMode.CrawlStatus:
return crawlStatusRateLimiter;
default:
return serverRateLimiter;
} }
} }

View File

@ -1,6 +1,7 @@
import { supabase_service } from "./supabase"; import { supabase_service } from "./supabase";
export const callWebhook = async (teamId: string, data: any) => { export const callWebhook = async (teamId: string, data: any) => {
try {
const { data: webhooksData, error } = await supabase_service const { data: webhooksData, error } = await supabase_service
.from('webhooks') .from('webhooks')
.select('url') .select('url')
@ -37,5 +38,9 @@ export const callWebhook = async (teamId: string, data: any) => {
data: dataToSend, data: dataToSend,
error: data.error || undefined, error: data.error || undefined,
}), }),
}); });
} } catch (error) {
console.error(`Error sending webhook for team ID: ${teamId}`, error.message);
}
};

View File

@ -22,7 +22,31 @@ export interface WebScraperOptions {
crawlerOptions: any; crawlerOptions: any;
pageOptions: any; pageOptions: any;
team_id: string; team_id: string;
origin?: string;
}
export interface FirecrawlJob {
success: boolean;
message: string;
num_docs: number;
docs: any[];
time_taken: number;
team_id: string;
mode: string;
url: string;
crawlerOptions?: any;
pageOptions?: any;
origin: string;
} }
export enum RateLimiterMode {
Crawl = "crawl",
CrawlStatus = "crawl-status",
Scrape = "scrape",
Preview = "preview",
}

View File

@ -1,8 +1,9 @@
{ {
"name": "@mendable/firecrawl-js", "name": "@mendable/firecrawl-js",
"version": "0.0.10", "version": "0.0.11",
"description": "JavaScript SDK for Firecrawl API", "description": "JavaScript SDK for Firecrawl API",
"main": "build/index.js", "main": "build/index.js",
"types": "types/index.d.ts",
"type": "module", "type": "module",
"scripts": { "scripts": {
"test": "echo \"Error: no test specified\" && exit 1" "test": "echo \"Error: no test specified\" && exit 1"

View File

@ -49,7 +49,7 @@
// "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */
/* Emit */ /* Emit */
// "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
// "declarationMap": true, /* Create sourcemaps for d.ts files. */ // "declarationMap": true, /* Create sourcemaps for d.ts files. */
// "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
// "sourceMap": true, /* Create source map files for emitted JavaScript files. */ // "sourceMap": true, /* Create source map files for emitted JavaScript files. */
@ -70,7 +70,7 @@
// "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */
// "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
// "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */
// "declarationDir": "./", /* Specify the output directory for generated declaration files. */ "declarationDir": "./types", /* Specify the output directory for generated declaration files. */
// "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */
/* Interop Constraints */ /* Interop Constraints */
@ -105,5 +105,7 @@
/* Completeness */ /* Completeness */
// "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
"skipLibCheck": true /* Skip type checking all .d.ts files. */ "skipLibCheck": true /* Skip type checking all .d.ts files. */
} },
"include": ["src/**/*"],
"exclude": ["node_modules", "dist", "**/__tests__/*"]
} }

107
apps/js-sdk/firecrawl/types/index.d.ts vendored Normal file
View File

@ -0,0 +1,107 @@
import { AxiosResponse, AxiosRequestHeaders } from 'axios';
/**
* Configuration interface for FirecrawlApp.
*/
export interface FirecrawlAppConfig {
apiKey?: string | null;
}
/**
* Generic parameter interface.
*/
export interface Params {
[key: string]: any;
}
/**
* Response interface for scraping operations.
*/
export interface ScrapeResponse {
success: boolean;
data?: any;
error?: string;
}
/**
* Response interface for crawling operations.
*/
export interface CrawlResponse {
success: boolean;
jobId?: string;
data?: any;
error?: string;
}
/**
* Response interface for job status checks.
*/
export interface JobStatusResponse {
success: boolean;
status: string;
jobId?: string;
data?: any;
error?: string;
}
/**
* Main class for interacting with the Firecrawl API.
*/
export default class FirecrawlApp {
private apiKey;
/**
* Initializes a new instance of the FirecrawlApp class.
* @param {FirecrawlAppConfig} config - Configuration options for the FirecrawlApp instance.
*/
constructor({ apiKey }: FirecrawlAppConfig);
/**
* Scrapes a URL using the Firecrawl API.
* @param {string} url - The URL to scrape.
* @param {Params | null} params - Additional parameters for the scrape request.
* @returns {Promise<ScrapeResponse>} The response from the scrape operation.
*/
scrapeUrl(url: string, params?: Params | null): Promise<ScrapeResponse>;
/**
* Initiates a crawl job for a URL using the Firecrawl API.
* @param {string} url - The URL to crawl.
* @param {Params | null} params - Additional parameters for the crawl request.
* @param {boolean} waitUntilDone - Whether to wait for the crawl job to complete.
* @param {number} timeout - Timeout in seconds for job status checks.
* @returns {Promise<CrawlResponse>} The response from the crawl operation.
*/
crawlUrl(url: string, params?: Params | null, waitUntilDone?: boolean, timeout?: number): Promise<CrawlResponse>;
/**
* Checks the status of a crawl job using the Firecrawl API.
* @param {string} jobId - The job ID of the crawl operation.
* @returns {Promise<JobStatusResponse>} The response containing the job status.
*/
checkCrawlStatus(jobId: string): Promise<JobStatusResponse>;
/**
* Prepares the headers for an API request.
* @returns {AxiosRequestHeaders} The prepared headers.
*/
prepareHeaders(): AxiosRequestHeaders;
/**
* Sends a POST request to the specified URL.
* @param {string} url - The URL to send the request to.
* @param {Params} data - The data to send in the request.
* @param {AxiosRequestHeaders} headers - The headers for the request.
* @returns {Promise<AxiosResponse>} The response from the POST request.
*/
postRequest(url: string, data: Params, headers: AxiosRequestHeaders): Promise<AxiosResponse>;
/**
* Sends a GET request to the specified URL.
* @param {string} url - The URL to send the request to.
* @param {AxiosRequestHeaders} headers - The headers for the request.
* @returns {Promise<AxiosResponse>} The response from the GET request.
*/
getRequest(url: string, headers: AxiosRequestHeaders): Promise<AxiosResponse>;
/**
* Monitors the status of a crawl job until completion or failure.
* @param {string} jobId - The job ID of the crawl operation.
* @param {AxiosRequestHeaders} headers - The headers for the request.
* @param {number} timeout - Timeout in seconds for job status checks.
* @returns {Promise<any>} The final job status or data.
*/
monitorJobStatus(jobId: string, headers: AxiosRequestHeaders, timeout: number): Promise<any>;
/**
* Handles errors from API responses.
* @param {AxiosResponse} response - The response from the API.
* @param {string} action - The action being performed when the error occurred.
*/
handleError(response: AxiosResponse, action: string): void;
}