2024-04-15 17:01:47 -04:00
|
|
|
import express from "express";
|
|
|
|
import bodyParser from "body-parser";
|
|
|
|
import cors from "cors";
|
|
|
|
import "dotenv/config";
|
|
|
|
import { getWebScraperQueue } from "./services/queue-service";
|
|
|
|
import { addWebScraperJob } from "./services/queue-jobs";
|
|
|
|
import { supabase_service } from "./services/supabase";
|
|
|
|
import { WebScraperDataProvider } from "./scraper/WebScraper";
|
|
|
|
import { billTeam, checkTeamCredits } from "./services/billing/credit_billing";
|
|
|
|
import { getRateLimiter, redisClient } from "./services/rate-limiter";
|
2024-04-15 20:39:25 -04:00
|
|
|
import { parseApi } from "./lib/parseApi";
|
2024-04-15 17:01:47 -04:00
|
|
|
|
|
|
|
const { createBullBoard } = require("@bull-board/api");
|
|
|
|
const { BullAdapter } = require("@bull-board/api/bullAdapter");
|
|
|
|
const { ExpressAdapter } = require("@bull-board/express");
|
|
|
|
|
|
|
|
export const app = express();
|
|
|
|
|
|
|
|
global.isProduction = process.env.IS_PRODUCTION === "true";
|
|
|
|
|
|
|
|
app.use(bodyParser.urlencoded({ extended: true }));
|
|
|
|
app.use(bodyParser.json({ limit: "10mb" }));
|
|
|
|
|
|
|
|
app.use(cors()); // Add this line to enable CORS
|
|
|
|
|
|
|
|
const serverAdapter = new ExpressAdapter();
|
|
|
|
serverAdapter.setBasePath(`/admin/${process.env.BULL_AUTH_KEY}/queues`);
|
|
|
|
|
|
|
|
const { addQueue, removeQueue, setQueues, replaceQueues } = createBullBoard({
|
|
|
|
queues: [new BullAdapter(getWebScraperQueue())],
|
|
|
|
serverAdapter: serverAdapter,
|
|
|
|
});
|
|
|
|
|
|
|
|
app.use(
|
|
|
|
`/admin/${process.env.BULL_AUTH_KEY}/queues`,
|
|
|
|
serverAdapter.getRouter()
|
|
|
|
);
|
|
|
|
|
|
|
|
app.get("/", (req, res) => {
|
|
|
|
res.send("SCRAPERS-JS: Hello, world! Fly.io");
|
|
|
|
});
|
|
|
|
|
|
|
|
//write a simple test function
|
|
|
|
app.get("/test", async (req, res) => {
|
|
|
|
res.send("Hello, world!");
|
|
|
|
});
|
|
|
|
|
2024-04-17 13:52:10 -04:00
|
|
|
async function authenticateUser(req, res, mode?: string): Promise<{ success: boolean, team_id?: string, error?: string, status?: number }> {
|
2024-04-15 17:01:47 -04:00
|
|
|
const authHeader = req.headers.authorization;
|
|
|
|
if (!authHeader) {
|
2024-04-17 13:52:10 -04:00
|
|
|
return { success: false, error: "Unauthorized", status: 401 };
|
2024-04-15 17:01:47 -04:00
|
|
|
}
|
|
|
|
const token = authHeader.split(" ")[1]; // Extract the token from "Bearer <token>"
|
|
|
|
if (!token) {
|
2024-04-17 13:52:10 -04:00
|
|
|
return { success: false, error: "Unauthorized: Token missing", status: 401 };
|
2024-04-15 17:01:47 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
try {
|
|
|
|
const incomingIP = (req.headers["x-forwarded-for"] ||
|
|
|
|
req.socket.remoteAddress) as string;
|
|
|
|
const iptoken = incomingIP + token;
|
|
|
|
await getRateLimiter(
|
|
|
|
token === "this_is_just_a_preview_token" ? true : false
|
|
|
|
).consume(iptoken);
|
|
|
|
} catch (rateLimiterRes) {
|
|
|
|
console.error(rateLimiterRes);
|
2024-04-17 13:52:10 -04:00
|
|
|
return { success: false, error: "Rate limit exceeded. Too many requests, try again in 1 minute.", status: 429 };
|
2024-04-15 17:01:47 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
if (token === "this_is_just_a_preview_token" && mode === "scrape") {
|
2024-04-17 13:52:10 -04:00
|
|
|
return { success: true, team_id: "preview" };
|
2024-04-15 17:01:47 -04:00
|
|
|
}
|
2024-04-15 20:39:25 -04:00
|
|
|
|
|
|
|
const normalizedApi = parseApi(token);
|
2024-04-15 17:01:47 -04:00
|
|
|
// make sure api key is valid, based on the api_keys table in supabase
|
|
|
|
const { data, error } = await supabase_service
|
|
|
|
.from("api_keys")
|
|
|
|
.select("*")
|
2024-04-15 20:39:25 -04:00
|
|
|
.eq("key", normalizedApi);
|
2024-04-15 17:01:47 -04:00
|
|
|
if (error || !data || data.length === 0) {
|
2024-04-17 13:52:10 -04:00
|
|
|
return { success: false, error: "Unauthorized: Invalid token", status: 401 };
|
2024-04-15 17:01:47 -04:00
|
|
|
}
|
|
|
|
|
2024-04-17 13:52:10 -04:00
|
|
|
return { success: true, team_id: data[0].team_id };
|
2024-04-15 17:01:47 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
app.post("/v0/scrape", async (req, res) => {
|
|
|
|
try {
|
|
|
|
// make sure to authenticate user first, Bearer <token>
|
2024-04-17 13:57:01 -04:00
|
|
|
const { success, team_id, error, status } = await authenticateUser(req, res, "crawl");
|
2024-04-17 13:52:10 -04:00
|
|
|
if (!success) {
|
|
|
|
return res.status(status).json({ error });
|
|
|
|
}
|
2024-04-16 15:31:33 -04:00
|
|
|
const crawlerOptions = req.body.crawlerOptions ?? {};
|
2024-04-15 17:01:47 -04:00
|
|
|
|
|
|
|
try {
|
|
|
|
const { success: creditsCheckSuccess, message: creditsCheckMessage } =
|
|
|
|
await checkTeamCredits(team_id, 1);
|
|
|
|
if (!creditsCheckSuccess) {
|
|
|
|
return res.status(402).json({ error: "Insufficient credits" });
|
|
|
|
}
|
|
|
|
} catch (error) {
|
|
|
|
console.error(error);
|
|
|
|
return res.status(500).json({ error: "Internal server error" });
|
|
|
|
}
|
|
|
|
|
|
|
|
// authenticate on supabase
|
|
|
|
const url = req.body.url;
|
|
|
|
if (!url) {
|
|
|
|
return res.status(400).json({ error: "Url is required" });
|
|
|
|
}
|
|
|
|
|
|
|
|
try {
|
|
|
|
const a = new WebScraperDataProvider();
|
|
|
|
await a.setOptions({
|
|
|
|
mode: "single_urls",
|
|
|
|
urls: [url],
|
2024-04-16 15:31:33 -04:00
|
|
|
crawlerOptions: {
|
|
|
|
...crawlerOptions,
|
|
|
|
},
|
2024-04-15 17:01:47 -04:00
|
|
|
});
|
|
|
|
|
|
|
|
const docs = await a.getDocuments(false);
|
|
|
|
// make sure doc.content is not empty
|
|
|
|
const filteredDocs = docs.filter(
|
|
|
|
(doc: { content?: string }) =>
|
|
|
|
doc.content && doc.content.trim().length > 0
|
|
|
|
);
|
|
|
|
if (filteredDocs.length === 0) {
|
|
|
|
return res.status(200).json({ success: true, data: [] });
|
|
|
|
}
|
|
|
|
const { success, credit_usage } = await billTeam(
|
|
|
|
team_id,
|
|
|
|
filteredDocs.length
|
|
|
|
);
|
|
|
|
if (!success) {
|
2024-04-17 01:13:27 -04:00
|
|
|
// throw new Error("Failed to bill team, no subscription was found");
|
2024-04-15 17:01:47 -04:00
|
|
|
// return {
|
|
|
|
// success: false,
|
2024-04-17 01:13:27 -04:00
|
|
|
// message: "Failed to bill team, no subscription was found",
|
2024-04-15 17:01:47 -04:00
|
|
|
// docs: [],
|
|
|
|
// };
|
|
|
|
return res
|
|
|
|
.status(402)
|
2024-04-17 01:13:27 -04:00
|
|
|
.json({ error: "Failed to bill, no subscription was found" });
|
2024-04-15 17:01:47 -04:00
|
|
|
}
|
|
|
|
return res.json({
|
|
|
|
success: true,
|
|
|
|
data: filteredDocs[0],
|
|
|
|
});
|
|
|
|
} catch (error) {
|
|
|
|
console.error(error);
|
|
|
|
return res.status(500).json({ error: error.message });
|
|
|
|
}
|
|
|
|
} catch (error) {
|
|
|
|
console.error(error);
|
|
|
|
return res.status(500).json({ error: error.message });
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
app.post("/v0/crawl", async (req, res) => {
|
|
|
|
try {
|
2024-04-17 13:52:10 -04:00
|
|
|
const { success, team_id, error, status } = await authenticateUser(req, res, "scrape");
|
|
|
|
if (!success) {
|
|
|
|
return res.status(status).json({ error });
|
|
|
|
}
|
2024-04-15 17:01:47 -04:00
|
|
|
|
|
|
|
const { success: creditsCheckSuccess, message: creditsCheckMessage } =
|
|
|
|
await checkTeamCredits(team_id, 1);
|
|
|
|
if (!creditsCheckSuccess) {
|
|
|
|
return res.status(402).json({ error: "Insufficient credits" });
|
|
|
|
}
|
|
|
|
|
|
|
|
// authenticate on supabase
|
|
|
|
const url = req.body.url;
|
|
|
|
if (!url) {
|
|
|
|
return res.status(400).json({ error: "Url is required" });
|
|
|
|
}
|
|
|
|
const mode = req.body.mode ?? "crawl";
|
|
|
|
const crawlerOptions = req.body.crawlerOptions ?? {};
|
|
|
|
|
|
|
|
if (mode === "single_urls" && !url.includes(",")) {
|
|
|
|
try {
|
|
|
|
const a = new WebScraperDataProvider();
|
|
|
|
await a.setOptions({
|
|
|
|
mode: "single_urls",
|
|
|
|
urls: [url],
|
|
|
|
crawlerOptions: {
|
|
|
|
returnOnlyUrls: true,
|
|
|
|
},
|
|
|
|
});
|
|
|
|
|
|
|
|
const docs = await a.getDocuments(false, (progress) => {
|
|
|
|
job.progress({
|
|
|
|
current: progress.current,
|
|
|
|
total: progress.total,
|
|
|
|
current_step: "SCRAPING",
|
|
|
|
current_url: progress.currentDocumentUrl,
|
|
|
|
});
|
|
|
|
});
|
|
|
|
return res.json({
|
|
|
|
success: true,
|
|
|
|
documents: docs,
|
|
|
|
});
|
|
|
|
} catch (error) {
|
|
|
|
console.error(error);
|
|
|
|
return res.status(500).json({ error: error.message });
|
|
|
|
}
|
|
|
|
}
|
|
|
|
const job = await addWebScraperJob({
|
|
|
|
url: url,
|
|
|
|
mode: mode ?? "crawl", // fix for single urls not working
|
|
|
|
crawlerOptions: { ...crawlerOptions },
|
|
|
|
team_id: team_id,
|
|
|
|
});
|
|
|
|
|
|
|
|
res.json({ jobId: job.id });
|
|
|
|
} catch (error) {
|
|
|
|
console.error(error);
|
|
|
|
return res.status(500).json({ error: error.message });
|
|
|
|
}
|
|
|
|
});
|
|
|
|
app.post("/v0/crawlWebsitePreview", async (req, res) => {
|
|
|
|
try {
|
|
|
|
// make sure to authenticate user first, Bearer <token>
|
|
|
|
const authHeader = req.headers.authorization;
|
|
|
|
if (!authHeader) {
|
|
|
|
return res.status(401).json({ error: "Unauthorized" });
|
|
|
|
}
|
|
|
|
const token = authHeader.split(" ")[1]; // Extract the token from "Bearer <token>"
|
|
|
|
if (!token) {
|
|
|
|
return res.status(401).json({ error: "Unauthorized: Token missing" });
|
|
|
|
}
|
|
|
|
|
|
|
|
// authenticate on supabase
|
|
|
|
const url = req.body.url;
|
|
|
|
if (!url) {
|
|
|
|
return res.status(400).json({ error: "Url is required" });
|
|
|
|
}
|
|
|
|
const mode = req.body.mode ?? "crawl";
|
|
|
|
const crawlerOptions = req.body.crawlerOptions ?? {};
|
|
|
|
const job = await addWebScraperJob({
|
|
|
|
url: url,
|
|
|
|
mode: mode ?? "crawl", // fix for single urls not working
|
|
|
|
crawlerOptions: { ...crawlerOptions, limit: 5, maxCrawledLinks: 5 },
|
|
|
|
team_id: "preview",
|
|
|
|
});
|
|
|
|
|
|
|
|
res.json({ jobId: job.id });
|
|
|
|
} catch (error) {
|
|
|
|
console.error(error);
|
|
|
|
return res.status(500).json({ error: error.message });
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
app.get("/v0/crawl/status/:jobId", async (req, res) => {
|
|
|
|
try {
|
2024-04-17 13:52:10 -04:00
|
|
|
const { success, team_id, error, status } = await authenticateUser(req, res, "scrape");
|
|
|
|
if (!success) {
|
|
|
|
return res.status(status).json({ error });
|
2024-04-15 17:01:47 -04:00
|
|
|
}
|
|
|
|
const job = await getWebScraperQueue().getJob(req.params.jobId);
|
|
|
|
if (!job) {
|
|
|
|
return res.status(404).json({ error: "Job not found" });
|
|
|
|
}
|
|
|
|
|
|
|
|
const { current, current_url, total, current_step } = await job.progress();
|
|
|
|
res.json({
|
|
|
|
status: await job.getState(),
|
|
|
|
// progress: job.progress(),
|
|
|
|
current: current,
|
|
|
|
current_url: current_url,
|
|
|
|
current_step: current_step,
|
|
|
|
total: total,
|
|
|
|
data: job.returnvalue,
|
|
|
|
});
|
|
|
|
} catch (error) {
|
|
|
|
console.error(error);
|
|
|
|
return res.status(500).json({ error: error.message });
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
app.get("/v0/checkJobStatus/:jobId", async (req, res) => {
|
|
|
|
try {
|
|
|
|
const job = await getWebScraperQueue().getJob(req.params.jobId);
|
|
|
|
if (!job) {
|
|
|
|
return res.status(404).json({ error: "Job not found" });
|
|
|
|
}
|
|
|
|
|
|
|
|
const { current, current_url, total, current_step } = await job.progress();
|
|
|
|
res.json({
|
|
|
|
status: await job.getState(),
|
|
|
|
// progress: job.progress(),
|
|
|
|
current: current,
|
|
|
|
current_url: current_url,
|
|
|
|
current_step: current_step,
|
|
|
|
total: total,
|
|
|
|
data: job.returnvalue,
|
|
|
|
});
|
|
|
|
} catch (error) {
|
|
|
|
console.error(error);
|
|
|
|
return res.status(500).json({ error: error.message });
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
const DEFAULT_PORT = process.env.PORT ?? 3002;
|
|
|
|
const HOST = process.env.HOST ?? "localhost";
|
|
|
|
redisClient.connect();
|
|
|
|
|
|
|
|
export function startServer(port = DEFAULT_PORT) {
|
|
|
|
const server = app.listen(Number(port), HOST, () => {
|
|
|
|
console.log(`Server listening on port ${port}`);
|
2024-04-15 20:39:25 -04:00
|
|
|
console.log(`For the UI, open http://${HOST}:${port}/admin/${process.env.BULL_AUTH_KEY}/queues`);
|
2024-04-15 17:01:47 -04:00
|
|
|
console.log("");
|
|
|
|
console.log("1. Make sure Redis is running on port 6379 by default");
|
|
|
|
console.log(
|
|
|
|
"2. If you want to run nango, make sure you do port forwarding in 3002 using ngrok http 3002 "
|
|
|
|
);
|
|
|
|
});
|
|
|
|
return server;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (require.main === module) {
|
|
|
|
startServer();
|
|
|
|
}
|
|
|
|
|
2024-04-15 20:39:25 -04:00
|
|
|
// Use this as a "health check" that way we dont destroy the server
|
2024-04-15 17:01:47 -04:00
|
|
|
app.get(`/admin/${process.env.BULL_AUTH_KEY}/queues`, async (req, res) => {
|
|
|
|
try {
|
|
|
|
const webScraperQueue = getWebScraperQueue();
|
|
|
|
const [webScraperActive] = await Promise.all([
|
|
|
|
webScraperQueue.getActiveCount(),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const noActiveJobs = webScraperActive === 0;
|
|
|
|
// 200 if no active jobs, 503 if there are active jobs
|
|
|
|
return res.status(noActiveJobs ? 200 : 500).json({
|
|
|
|
webScraperActive,
|
|
|
|
noActiveJobs,
|
|
|
|
});
|
|
|
|
} catch (error) {
|
|
|
|
console.error(error);
|
|
|
|
return res.status(500).json({ error: error.message });
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
app.get("/is-production", (req, res) => {
|
|
|
|
res.send({ isProduction: global.isProduction });
|
|
|
|
});
|
|
|
|
|