From c71ea7a795f9096bbde997bbd48539a5ec865ea0 Mon Sep 17 00:00:00 2001 From: Matt Joyce Date: Sat, 8 Jun 2024 11:08:26 +1000 Subject: [PATCH 01/12] Prepare headers consistently --- apps/python-sdk/firecrawl/firecrawl.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/apps/python-sdk/firecrawl/firecrawl.py b/apps/python-sdk/firecrawl/firecrawl.py index b9a823f..c5207fb 100644 --- a/apps/python-sdk/firecrawl/firecrawl.py +++ b/apps/python-sdk/firecrawl/firecrawl.py @@ -45,10 +45,8 @@ class FirecrawlApp: Exception: If the scrape request fails. """ - headers = { - 'Content-Type': 'application/json', - 'Authorization': f'Bearer {self.api_key}' - } + headers = self._prepare_headers() + # Prepare the base scrape parameters with the URL scrape_params = {'url': url} @@ -101,10 +99,7 @@ class FirecrawlApp: Raises: Exception: If the search request fails. """ - headers = { - 'Content-Type': 'application/json', - 'Authorization': f'Bearer {self.api_key}' - } + headers = self._prepare_headers() json_data = {'query': query} if params: json_data.update(params) @@ -297,3 +292,4 @@ class FirecrawlApp: raise Exception(f'Failed to {action}. Status code: {response.status_code}. Error: {error_message}') else: raise Exception(f'Unexpected error occurred while trying to {action}. Status code: {response.status_code}') + From 9f306736afc3cead950da4898f0d8b658aac860e Mon Sep 17 00:00:00 2001 From: Matt Joyce Date: Sat, 8 Jun 2024 11:18:30 +1000 Subject: [PATCH 02/12] More detailed error handling --- apps/python-sdk/firecrawl/firecrawl.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/apps/python-sdk/firecrawl/firecrawl.py b/apps/python-sdk/firecrawl/firecrawl.py index c5207fb..f13ba72 100644 --- a/apps/python-sdk/firecrawl/firecrawl.py +++ b/apps/python-sdk/firecrawl/firecrawl.py @@ -287,9 +287,20 @@ class FirecrawlApp: Raises: Exception: An exception with a message containing the status code and error details from the response. """ - if response.status_code in [402, 408, 409, 500]: - error_message = response.json().get('error', 'Unknown error occurred') - raise Exception(f'Failed to {action}. Status code: {response.status_code}. Error: {error_message}') + error_message = response.json().get('error', 'No additional error details provided.') + + if response.status_code == 402: + message = f"Payment Required: Failed to {action}. {error_message}" + elif response.status_code == 408: + message = f"Request Timeout: Failed to {action} as the request timed out. {error_message}" + elif response.status_code == 409: + message = f"Conflict: Failed to {action} due to a conflict. {error_message}" + elif response.status_code == 500: + message = f"Internal Server Error: Failed to {action}. {error_message}" else: - raise Exception(f'Unexpected error occurred while trying to {action}. Status code: {response.status_code}') + message = f"Unexpected error during {action}: Status code {response.status_code}. {error_message}" + + # Raise an HTTPError with the custom message and attach the response + raise requests.exceptions.HTTPError(message, response=response) + From 7477c5e5bd23b88faa30d7ddf5e34cb335a6b6fb Mon Sep 17 00:00:00 2001 From: Matt Joyce Date: Sat, 8 Jun 2024 11:28:51 +1000 Subject: [PATCH 03/12] Use error handler consistently --- apps/python-sdk/firecrawl/firecrawl.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/apps/python-sdk/firecrawl/firecrawl.py b/apps/python-sdk/firecrawl/firecrawl.py index f13ba72..a820ef2 100644 --- a/apps/python-sdk/firecrawl/firecrawl.py +++ b/apps/python-sdk/firecrawl/firecrawl.py @@ -30,6 +30,7 @@ class FirecrawlApp: if self.api_key is None: raise ValueError('No API key provided') self.api_url = api_url or os.getenv('FIRECRAWL_API_URL', 'https://api.firecrawl.dev') + def scrape_url(self, url: str, params: Optional[Dict[str, Any]] = None) -> Any: """ Scrape the specified URL using the Firecrawl API. @@ -79,11 +80,8 @@ class FirecrawlApp: return response['data'] else: raise Exception(f'Failed to scrape URL. Error: {response["error"]}') - elif response.status_code in [402, 408, 409, 500]: - error_message = response.json().get('error', 'Unknown error occurred') - raise Exception(f'Failed to scrape URL. Status code: {response.status_code}. Error: {error_message}') else: - raise Exception(f'Failed to scrape URL. Status code: {response.status_code}') + self._handle_error(response, 'scrape URL') def search(self, query, params=None): """ @@ -116,11 +114,8 @@ class FirecrawlApp: else: raise Exception(f'Failed to search. Error: {response["error"]}') - elif response.status_code in [402, 409, 500]: - error_message = response.json().get('error', 'Unknown error occurred') - raise Exception(f'Failed to search. Status code: {response.status_code}. Error: {error_message}') else: - raise Exception(f'Failed to search. Status code: {response.status_code}') + self._handle_error(response, 'search') def crawl_url(self, url, params=None, wait_until_done=True, poll_interval=2, idempotency_key=None): """ @@ -303,4 +298,3 @@ class FirecrawlApp: # Raise an HTTPError with the custom message and attach the response raise requests.exceptions.HTTPError(message, response=response) - From 6fd9ce1c89ca19ecb737a36778f7dcb0cd2a7c35 Mon Sep 17 00:00:00 2001 From: Matt Joyce Date: Sat, 8 Jun 2024 11:46:52 +1000 Subject: [PATCH 04/12] type hints and linting --- apps/python-sdk/firecrawl/firecrawl.py | 31 +++++++++++++++++--------- 1 file changed, 21 insertions(+), 10 deletions(-) diff --git a/apps/python-sdk/firecrawl/firecrawl.py b/apps/python-sdk/firecrawl/firecrawl.py index a820ef2..fb12af4 100644 --- a/apps/python-sdk/firecrawl/firecrawl.py +++ b/apps/python-sdk/firecrawl/firecrawl.py @@ -83,7 +83,7 @@ class FirecrawlApp: else: self._handle_error(response, 'scrape URL') - def search(self, query, params=None): + def search(self, query: str, params: Optional[Dict[str, Any]] = None) -> Any: """ Perform a search using the Firecrawl API. @@ -117,7 +117,11 @@ class FirecrawlApp: else: self._handle_error(response, 'search') - def crawl_url(self, url, params=None, wait_until_done=True, poll_interval=2, idempotency_key=None): + def crawl_url(self, url: str, + params: Optional[Dict[str, Any]] = None, + wait_until_done: bool = True, + poll_interval: int = 2, + idempotency_key: Optional[str] = None) -> Any: """ Initiate a crawl job for the specified URL using the Firecrawl API. @@ -148,7 +152,7 @@ class FirecrawlApp: else: self._handle_error(response, 'start crawl job') - def check_crawl_status(self, job_id): + def check_crawl_status(self, job_id: str) -> Any: """ Check the status of a crawl job using the Firecrawl API. @@ -168,7 +172,7 @@ class FirecrawlApp: else: self._handle_error(response, 'check crawl status') - def _prepare_headers(self, idempotency_key=None): + def _prepare_headers(self, idempotency_key: Optional[str] = None) -> Dict[str, str]: """ Prepare the headers for API requests. @@ -190,7 +194,11 @@ class FirecrawlApp: 'Authorization': f'Bearer {self.api_key}', } - def _post_request(self, url, data, headers, retries=3, backoff_factor=0.5): + def _post_request(self, url: str, + data: Dict[str, Any], + headers: Dict[str, str], + retries: int = 3, + backoff_factor: float = 0.5) -> requests.Response: """ Make a POST request with retries. @@ -215,7 +223,10 @@ class FirecrawlApp: return response return response - def _get_request(self, url, headers, retries=3, backoff_factor=0.5): + def _get_request(self, url: str, + headers: Dict[str, str], + retries: int = 3, + backoff_factor: float = 0.5) -> requests.Response: """ Make a GET request with retries. @@ -239,7 +250,7 @@ class FirecrawlApp: return response return response - def _monitor_job_status(self, job_id, headers, poll_interval): + def _monitor_job_status(self, job_id: str, headers: Dict[str, str], poll_interval: int) -> Any: """ Monitor the status of a crawl job until completion. @@ -271,7 +282,7 @@ class FirecrawlApp: else: self._handle_error(status_response, 'check crawl status') - def _handle_error(self, response, action): + def _handle_error(self, response: requests.Response, action: str) -> None: """ Handle errors from API responses. @@ -283,7 +294,7 @@ class FirecrawlApp: Exception: An exception with a message containing the status code and error details from the response. """ error_message = response.json().get('error', 'No additional error details provided.') - + if response.status_code == 402: message = f"Payment Required: Failed to {action}. {error_message}" elif response.status_code == 408: @@ -297,4 +308,4 @@ class FirecrawlApp: # Raise an HTTPError with the custom message and attach the response raise requests.exceptions.HTTPError(message, response=response) - + \ No newline at end of file From 827354a116a4ea424af7c1994aae7214d78c8032 Mon Sep 17 00:00:00 2001 From: Matt Joyce Date: Mon, 10 Jun 2024 21:21:23 +1000 Subject: [PATCH 05/12] Added logging to python sdk FIRECRAWL_LOGGING_LEVEL Instantiates the logger early and depends on env to set. --- apps/python-sdk/firecrawl/__init__.py | 54 ++++++++++++++++++++++++++ apps/python-sdk/firecrawl/firecrawl.py | 10 ++++- 2 files changed, 63 insertions(+), 1 deletion(-) diff --git a/apps/python-sdk/firecrawl/__init__.py b/apps/python-sdk/firecrawl/__init__.py index ecb017f..4e53e77 100644 --- a/apps/python-sdk/firecrawl/__init__.py +++ b/apps/python-sdk/firecrawl/__init__.py @@ -1,3 +1,57 @@ +""" +This is the Firecrawl package. + +This package provides a Python SDK for interacting with the Firecrawl API. +It includes methods to scrape URLs, perform searches, initiate and monitor crawl jobs, +and check the status of these jobs. + +For more information visit https://github.com/firecrawl/ +""" + +import logging +import os + from .firecrawl import FirecrawlApp __version__ = "0.0.14" + +# Define the logger for the Firecrawl project +logger: logging.Logger = logging.getLogger("firecrawl") + + +def _basic_config() -> None: + """Set up basic configuration for logging with a specific format and date format.""" + try: + logging.basicConfig( + format="[%(asctime)s - %(name)s:%(lineno)d - %(levelname)s] %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + except Exception as e: + logger.error("Failed to configure logging: %s", e) + + +def setup_logging() -> None: + """Set up logging based on the FIRECRAWL_LOGGING_LEVEL environment variable.""" + env = os.environ.get( + "FIRECRAWL_LOGGING_LEVEL", "INFO" + ).upper() # Default to 'INFO' level + _basic_config() + + if env == "DEBUG": + logger.setLevel(logging.DEBUG) + elif env == "INFO": + logger.setLevel(logging.INFO) + elif env == "WARNING": + logger.setLevel(logging.WARNING) + elif env == "ERROR": + logger.setLevel(logging.ERROR) + elif env == "CRITICAL": + logger.setLevel(logging.CRITICAL) + else: + logger.setLevel(logging.INFO) + logger.warning("Unknown logging level: %s, defaulting to INFO", env) + + +# Initialize logging configuration when the module is imported +setup_logging() +logger.debug("Debugging logger setup") diff --git a/apps/python-sdk/firecrawl/firecrawl.py b/apps/python-sdk/firecrawl/firecrawl.py index b9a823f..f20d4bd 100644 --- a/apps/python-sdk/firecrawl/firecrawl.py +++ b/apps/python-sdk/firecrawl/firecrawl.py @@ -9,13 +9,14 @@ and handles retries for certain HTTP status codes. Classes: - FirecrawlApp: Main class for interacting with the Firecrawl API. """ - +import logging import os import time from typing import Any, Dict, Optional import requests +logger : logging.Logger = logging.getLogger("firecrawl") class FirecrawlApp: """ @@ -28,8 +29,15 @@ class FirecrawlApp: def __init__(self, api_key: Optional[str] = None, api_url: Optional[str] = None) -> None: self.api_key = api_key or os.getenv('FIRECRAWL_API_KEY') if self.api_key is None: + logger.warning("No API key provided") raise ValueError('No API key provided') + else: + logger.debug("Initialized FirecrawlApp with API key: %s", self.api_key) + self.api_url = api_url or os.getenv('FIRECRAWL_API_URL', 'https://api.firecrawl.dev') + if self.api_url != 'https://api.firecrawl.dev': + logger.debug("Initialized FirecrawlApp with API URL: %s", self.api_url) + def scrape_url(self, url: str, params: Optional[Dict[str, Any]] = None) -> Any: """ Scrape the specified URL using the Firecrawl API. From d20af257baebbeea8fe907f9c3447e2e12eb1d1b Mon Sep 17 00:00:00 2001 From: rafaelsideguide <150964962+rafaelsideguide@users.noreply.github.com> Date: Wed, 12 Jun 2024 15:38:41 -0300 Subject: [PATCH 06/12] Added jobId to webhook data --- apps/api/src/services/queue-worker.ts | 4 ++-- apps/api/src/services/webhook.ts | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/apps/api/src/services/queue-worker.ts b/apps/api/src/services/queue-worker.ts index 6772c57..a42b3e8 100644 --- a/apps/api/src/services/queue-worker.ts +++ b/apps/api/src/services/queue-worker.ts @@ -38,7 +38,7 @@ getWebScraperQueue().process( error: message /* etc... */, }; - await callWebhook(job.data.team_id, data); + await callWebhook(job.data.team_id, job.id as string, data); await logJob({ success: success, @@ -78,7 +78,7 @@ getWebScraperQueue().process( error: "Something went wrong... Contact help@mendable.ai or try again." /* etc... */, }; - await callWebhook(job.data.team_id, data); + await callWebhook(job.data.team_id, job.id as string, data); await logJob({ success: false, message: typeof error === 'string' ? error : (error.message ?? "Something went wrong... Contact help@mendable.ai"), diff --git a/apps/api/src/services/webhook.ts b/apps/api/src/services/webhook.ts index 1f8d647..fc5962b 100644 --- a/apps/api/src/services/webhook.ts +++ b/apps/api/src/services/webhook.ts @@ -1,6 +1,6 @@ import { supabase_service } from "./supabase"; -export const callWebhook = async (teamId: string, data: any) => { +export const callWebhook = async (teamId: string, jobId: string,data: any) => { try { const selfHostedUrl = process.env.SELF_HOSTED_WEBHOOK_URL; const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === 'true'; @@ -47,6 +47,7 @@ export const callWebhook = async (teamId: string, data: any) => { }, body: JSON.stringify({ success: data.success, + jobId: jobId, data: dataToSend, error: data.error || undefined, }), From 67dc46b454cb07d50ae3bc7fca219f597a009a83 Mon Sep 17 00:00:00 2001 From: Nicolas Date: Wed, 12 Jun 2024 17:53:04 -0700 Subject: [PATCH 07/12] Nick: clusters --- .../src/__tests__/e2e_noAuth/index.test.ts | 1 - .../src/__tests__/e2e_withAuth/index.test.ts | 3 +- apps/api/src/index.ts | 331 ++++++++++-------- apps/api/src/services/redis.ts | 31 +- 4 files changed, 208 insertions(+), 158 deletions(-) diff --git a/apps/api/src/__tests__/e2e_noAuth/index.test.ts b/apps/api/src/__tests__/e2e_noAuth/index.test.ts index c443e71..acb2278 100644 --- a/apps/api/src/__tests__/e2e_noAuth/index.test.ts +++ b/apps/api/src/__tests__/e2e_noAuth/index.test.ts @@ -1,5 +1,4 @@ import request from "supertest"; -import { app } from "../../index"; import dotenv from "dotenv"; const fs = require("fs"); const path = require("path"); diff --git a/apps/api/src/__tests__/e2e_withAuth/index.test.ts b/apps/api/src/__tests__/e2e_withAuth/index.test.ts index 02e4a47..431c7d1 100644 --- a/apps/api/src/__tests__/e2e_withAuth/index.test.ts +++ b/apps/api/src/__tests__/e2e_withAuth/index.test.ts @@ -1,5 +1,4 @@ import request from "supertest"; -import { app } from "../../index"; import dotenv from "dotenv"; import { v4 as uuidv4 } from "uuid"; @@ -35,7 +34,7 @@ describe("E2E Tests for API Routes", () => { describe("POST /v0/scrape", () => { it.concurrent("should require authorization", async () => { - const response = await request(app).post("/v0/scrape"); + const response = await request(TEST_URL).post("/v0/scrape"); expect(response.statusCode).toBe(401); }); diff --git a/apps/api/src/index.ts b/apps/api/src/index.ts index cc8376b..6b62f06 100644 --- a/apps/api/src/index.ts +++ b/apps/api/src/index.ts @@ -5,190 +5,215 @@ import "dotenv/config"; import { getWebScraperQueue } from "./services/queue-service"; import { redisClient } from "./services/rate-limiter"; import { v0Router } from "./routes/v0"; -import { initSDK } from '@hyperdx/node-opentelemetry'; +import { initSDK } from "@hyperdx/node-opentelemetry"; +import cluster from "cluster"; +import os from "os"; const { createBullBoard } = require("@bull-board/api"); const { BullAdapter } = require("@bull-board/api/bullAdapter"); const { ExpressAdapter } = require("@bull-board/express"); -export const app = express(); +const numCPUs = os.cpus().length; +console.log(`Number of CPUs: ${numCPUs} available`); -global.isProduction = process.env.IS_PRODUCTION === "true"; +if (cluster.isMaster) { + console.log(`Master ${process.pid} is running`); -app.use(bodyParser.urlencoded({ extended: true })); -app.use(bodyParser.json({ limit: "10mb" })); + // Fork workers. + for (let i = 0; i < numCPUs; i++) { + cluster.fork(); + } -app.use(cors()); // Add this line to enable CORS - -const serverAdapter = new ExpressAdapter(); -serverAdapter.setBasePath(`/admin/${process.env.BULL_AUTH_KEY}/queues`); - -const { addQueue, removeQueue, setQueues, replaceQueues } = createBullBoard({ - queues: [new BullAdapter(getWebScraperQueue())], - serverAdapter: serverAdapter, -}); - -app.use( - `/admin/${process.env.BULL_AUTH_KEY}/queues`, - serverAdapter.getRouter() -); - -app.get("/", (req, res) => { - res.send("SCRAPERS-JS: Hello, world! Fly.io"); -}); - -//write a simple test function -app.get("/test", async (req, res) => { - res.send("Hello, world!"); -}); - -// register router -app.use(v0Router); - -const DEFAULT_PORT = process.env.PORT ?? 3002; -const HOST = process.env.HOST ?? "localhost"; -redisClient.connect(); - -// HyperDX OpenTelemetry -if(process.env.ENV === 'production') { - initSDK({ consoleCapture: true, additionalInstrumentations: []}); -} - - -export function startServer(port = DEFAULT_PORT) { - const server = app.listen(Number(port), HOST, () => { - console.log(`Server listening on port ${port}`); - console.log( - `For the UI, open http://${HOST}:${port}/admin/${process.env.BULL_AUTH_KEY}/queues` - ); - console.log(""); - console.log("1. Make sure Redis is running on port 6379 by default"); - console.log( - "2. If you want to run nango, make sure you do port forwarding in 3002 using ngrok http 3002 " - ); + cluster.on("exit", (worker, code, signal) => { + console.log(`Worker ${worker.process.pid} exited`); + console.log("Starting a new worker"); + cluster.fork(); }); - return server; -} +} else { + const app = express(); -if (require.main === module) { - startServer(); -} + global.isProduction = process.env.IS_PRODUCTION === "true"; -// Use this as a "health check" that way we dont destroy the server -app.get(`/admin/${process.env.BULL_AUTH_KEY}/queues`, async (req, res) => { - try { - const webScraperQueue = getWebScraperQueue(); - const [webScraperActive] = await Promise.all([ - webScraperQueue.getActiveCount(), - ]); + app.use(bodyParser.urlencoded({ extended: true })); + app.use(bodyParser.json({ limit: "10mb" })); - const noActiveJobs = webScraperActive === 0; - // 200 if no active jobs, 503 if there are active jobs - return res.status(noActiveJobs ? 200 : 500).json({ - webScraperActive, - noActiveJobs, - }); - } catch (error) { - console.error(error); - return res.status(500).json({ error: error.message }); + app.use(cors()); // Add this line to enable CORS + + const serverAdapter = new ExpressAdapter(); + serverAdapter.setBasePath(`/admin/${process.env.BULL_AUTH_KEY}/queues`); + + const { addQueue, removeQueue, setQueues, replaceQueues } = createBullBoard({ + queues: [new BullAdapter(getWebScraperQueue())], + serverAdapter: serverAdapter, + }); + + app.use( + `/admin/${process.env.BULL_AUTH_KEY}/queues`, + serverAdapter.getRouter() + ); + + app.get("/", (req, res) => { + res.send("SCRAPERS-JS: Hello, world! Fly.io"); + }); + + //write a simple test function + app.get("/test", async (req, res) => { + res.send("Hello, world!"); + }); + + // register router + app.use(v0Router); + + const DEFAULT_PORT = process.env.PORT ?? 3002; + const HOST = process.env.HOST ?? "localhost"; + redisClient.connect(); + + // HyperDX OpenTelemetry + if (process.env.ENV === "production") { + initSDK({ consoleCapture: true, additionalInstrumentations: [] }); } -}); -app.get(`/serverHealthCheck`, async (req, res) => { - try { - const webScraperQueue = getWebScraperQueue(); - const [waitingJobs] = await Promise.all([ - webScraperQueue.getWaitingCount(), - ]); - - const noWaitingJobs = waitingJobs === 0; - // 200 if no active jobs, 503 if there are active jobs - return res.status(noWaitingJobs ? 200 : 500).json({ - waitingJobs, + function startServer(port = DEFAULT_PORT) { + const server = app.listen(Number(port), HOST, () => { + console.log(`Worker ${process.pid} listening on port ${port}`); + console.log( + `For the UI, open http://${HOST}:${port}/admin/${process.env.BULL_AUTH_KEY}/queues` + ); + console.log(""); + console.log("1. Make sure Redis is running on port 6379 by default"); + console.log( + "2. If you want to run nango, make sure you do port forwarding in 3002 using ngrok http 3002 " + ); }); - } catch (error) { - console.error(error); - return res.status(500).json({ error: error.message }); + return server; } -}); -app.get('/serverHealthCheck/notify', async (req, res) => { - if (process.env.SLACK_WEBHOOK_URL) { - const treshold = 1; // The treshold value for the active jobs - const timeout = 60000; // 1 minute // The timeout value for the check in milliseconds + if (require.main === module) { + startServer(); + } - const getWaitingJobsCount = async () => { + // Use this as a "health check" that way we dont destroy the server + app.get(`/admin/${process.env.BULL_AUTH_KEY}/queues`, async (req, res) => { + try { const webScraperQueue = getWebScraperQueue(); - const [waitingJobsCount] = await Promise.all([ + const [webScraperActive] = await Promise.all([ + webScraperQueue.getActiveCount(), + ]); + + const noActiveJobs = webScraperActive === 0; + // 200 if no active jobs, 503 if there are active jobs + return res.status(noActiveJobs ? 200 : 500).json({ + webScraperActive, + noActiveJobs, + }); + } catch (error) { + console.error(error); + return res.status(500).json({ error: error.message }); + } + }); + + app.get(`/serverHealthCheck`, async (req, res) => { + try { + const webScraperQueue = getWebScraperQueue(); + const [waitingJobs] = await Promise.all([ webScraperQueue.getWaitingCount(), ]); - return waitingJobsCount; - }; + const noWaitingJobs = waitingJobs === 0; + // 200 if no active jobs, 503 if there are active jobs + return res.status(noWaitingJobs ? 200 : 500).json({ + waitingJobs, + }); + } catch (error) { + console.error(error); + return res.status(500).json({ error: error.message }); + } + }); - res.status(200).json({ message: "Check initiated" }); + app.get("/serverHealthCheck/notify", async (req, res) => { + if (process.env.SLACK_WEBHOOK_URL) { + const treshold = 1; // The treshold value for the active jobs + const timeout = 60000; // 1 minute // The timeout value for the check in milliseconds - const checkWaitingJobs = async () => { - try { - let waitingJobsCount = await getWaitingJobsCount(); - if (waitingJobsCount >= treshold) { - setTimeout(async () => { - // Re-check the waiting jobs count after the timeout - waitingJobsCount = await getWaitingJobsCount(); - if (waitingJobsCount >= treshold) { - const slackWebhookUrl = process.env.SLACK_WEBHOOK_URL; - const message = { - text: `⚠️ Warning: The number of active jobs (${waitingJobsCount}) has exceeded the threshold (${treshold}) for more than ${timeout/60000} minute(s).`, - }; + const getWaitingJobsCount = async () => { + const webScraperQueue = getWebScraperQueue(); + const [waitingJobsCount] = await Promise.all([ + webScraperQueue.getWaitingCount(), + ]); - const response = await fetch(slackWebhookUrl, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify(message), - }) - - if (!response.ok) { - console.error('Failed to send Slack notification') + return waitingJobsCount; + }; + + res.status(200).json({ message: "Check initiated" }); + + const checkWaitingJobs = async () => { + try { + let waitingJobsCount = await getWaitingJobsCount(); + if (waitingJobsCount >= treshold) { + setTimeout(async () => { + // Re-check the waiting jobs count after the timeout + waitingJobsCount = await getWaitingJobsCount(); + if (waitingJobsCount >= treshold) { + const slackWebhookUrl = process.env.SLACK_WEBHOOK_URL; + const message = { + text: `⚠️ Warning: The number of active jobs (${waitingJobsCount}) has exceeded the threshold (${treshold}) for more than ${ + timeout / 60000 + } minute(s).`, + }; + + const response = await fetch(slackWebhookUrl, { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(message), + }); + + if (!response.ok) { + console.error("Failed to send Slack notification"); + } } - } - }, timeout); + }, timeout); + } + } catch (error) { + console.error(error); } - } catch (error) { - console.error(error); - } - }; + }; - checkWaitingJobs(); - } -}); + checkWaitingJobs(); + } + }); -app.get(`/admin/${process.env.BULL_AUTH_KEY}/clean-before-24h-complete-jobs`, async (req, res) => { - try { - const webScraperQueue = getWebScraperQueue(); - const completedJobs = await webScraperQueue.getJobs(['completed']); - const before24hJobs = completedJobs.filter(job => job.finishedOn < Date.now() - 24 * 60 * 60 * 1000); - const jobIds = before24hJobs.map(job => job.id) as string[]; - let count = 0; - for (const jobId of jobIds) { + app.get( + `/admin/${process.env.BULL_AUTH_KEY}/clean-before-24h-complete-jobs`, + async (req, res) => { try { - await webScraperQueue.removeJobs(jobId); - count++; - } catch (jobError) { - console.error(`Failed to remove job with ID ${jobId}:`, jobError); + const webScraperQueue = getWebScraperQueue(); + const completedJobs = await webScraperQueue.getJobs(["completed"]); + const before24hJobs = completedJobs.filter( + (job) => job.finishedOn < Date.now() - 24 * 60 * 60 * 1000 + ); + const jobIds = before24hJobs.map((job) => job.id) as string[]; + let count = 0; + for (const jobId of jobIds) { + try { + await webScraperQueue.removeJobs(jobId); + count++; + } catch (jobError) { + console.error(`Failed to remove job with ID ${jobId}:`, jobError); + } + } + res.status(200).send(`Removed ${count} completed jobs.`); + } catch (error) { + console.error("Failed to clean last 24h complete jobs:", error); + res.status(500).send("Failed to clean jobs"); } } - res.status(200).send(`Removed ${count} completed jobs.`); - } catch (error) { - console.error('Failed to clean last 24h complete jobs:', error); - res.status(500).send('Failed to clean jobs'); - } -}); + ); -app.get("/is-production", (req, res) => { - res.send({ isProduction: global.isProduction }); -}); + app.get("/is-production", (req, res) => { + res.send({ isProduction: global.isProduction }); + }); - -// /workers health check, cant act as load balancer, just has to be a pre deploy thing \ No newline at end of file + console.log(`Worker ${process.pid} started`); +} diff --git a/apps/api/src/services/redis.ts b/apps/api/src/services/redis.ts index f2cedd1..491eeb1 100644 --- a/apps/api/src/services/redis.ts +++ b/apps/api/src/services/redis.ts @@ -1,8 +1,35 @@ -import Redis from 'ioredis'; +import Redis from "ioredis"; // Initialize Redis client const redis = new Redis(process.env.REDIS_URL); +// Listen to 'error' events to the Redis connection +redis.on("error", (error) => { + try { + if (error.message === "ECONNRESET") { + console.log("Connection to Redis Session Store timed out."); + } else if (error.message === "ECONNREFUSED") { + console.log("Connection to Redis Session Store refused!"); + } else console.log(error); + } catch (error) {} +}); + +// Listen to 'reconnecting' event to Redis +redis.on("reconnecting", (err) => { + try { + if (redis.status === "reconnecting") + console.log("Reconnecting to Redis Session Store..."); + else console.log("Error reconnecting to Redis Session Store."); + } catch (error) {} +}); + +// Listen to the 'connect' event to Redis +redis.on("connect", (err) => { + try { + if (!err) console.log("Connected to Redis Session Store!"); + } catch (error) {} +}); + /** * Set a value in Redis with an optional expiration time. * @param {string} key The key under which to store the value. @@ -11,7 +38,7 @@ const redis = new Redis(process.env.REDIS_URL); */ const setValue = async (key: string, value: string, expire?: number) => { if (expire) { - await redis.set(key, value, 'EX', expire); + await redis.set(key, value, "EX", expire); } else { await redis.set(key, value); } From 11b6d5afa5285476d934900ee6e4db8b8f48710c Mon Sep 17 00:00:00 2001 From: Nicolas Date: Wed, 12 Jun 2024 18:00:22 -0700 Subject: [PATCH 08/12] Update fly.toml --- apps/api/fly.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/api/fly.toml b/apps/api/fly.toml index 6bc8266..468695d 100644 --- a/apps/api/fly.toml +++ b/apps/api/fly.toml @@ -54,7 +54,7 @@ kill_timeout = '5s' soft_limit = 12 [[vm]] - size = 'performance-8x' + size = 'performance-4x' processes = ['app'] From 182f8d4d6c3fbc9598d054d0c13aadbe1dba8b52 Mon Sep 17 00:00:00 2001 From: Nicolas Date: Wed, 12 Jun 2024 18:07:05 -0700 Subject: [PATCH 09/12] Update index.ts --- apps/api/src/index.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/api/src/index.ts b/apps/api/src/index.ts index 6b62f06..494b4d5 100644 --- a/apps/api/src/index.ts +++ b/apps/api/src/index.ts @@ -13,7 +13,7 @@ const { createBullBoard } = require("@bull-board/api"); const { BullAdapter } = require("@bull-board/api/bullAdapter"); const { ExpressAdapter } = require("@bull-board/express"); -const numCPUs = os.cpus().length; +const numCPUs = process.env.ENV === "local" ? 2 : os.cpus().length; console.log(`Number of CPUs: ${numCPUs} available`); if (cluster.isMaster) { From 676d6e8ab5f7a1fd14ff5b76f8289db7543082c4 Mon Sep 17 00:00:00 2001 From: rafaelsideguide <150964962+rafaelsideguide@users.noreply.github.com> Date: Thu, 13 Jun 2024 10:51:05 -0300 Subject: [PATCH 10/12] Added pageOptions.removeTags --- apps/api/openapi.json | 19 +++++++++++ .../src/__tests__/e2e_withAuth/index.test.ts | 34 +++++++++++++++++++ apps/api/src/controllers/crawl.ts | 10 ++++-- apps/api/src/controllers/crawlPreview.ts | 2 +- apps/api/src/controllers/search.ts | 2 ++ apps/api/src/lib/entities.ts | 1 + apps/api/src/scraper/WebScraper/index.ts | 7 +++- apps/api/src/scraper/WebScraper/single_url.ts | 13 +++++++ 8 files changed, 84 insertions(+), 4 deletions(-) diff --git a/apps/api/openapi.json b/apps/api/openapi.json index a755e37..b07e43f 100644 --- a/apps/api/openapi.json +++ b/apps/api/openapi.json @@ -61,6 +61,13 @@ "description": "Wait x amount of milliseconds for the page to load to fetch content", "default": 0 }, + "removeTags": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Tags, classes and ids to remove from the page. Use comma separated values. Example: 'script, .ad, #footer'" + }, "headers": { "type": "object", "description": "Headers to send with the request. Can be used to send cookies, user-agent, etc." @@ -194,6 +201,11 @@ "type": "integer", "description": "Maximum number of pages to crawl", "default": 10000 + }, + "allowBackwardCrawling": { + "type": "boolean", + "description": "Allow backward crawling (crawl from the base URL to the previous URLs)", + "default": false } } }, @@ -219,6 +231,13 @@ "type": "object", "description": "Headers to send with the request when scraping. Can be used to send cookies, user-agent, etc." }, + "removeTags": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Tags, classes and ids to remove from the page. Use comma separated values. Example: 'script, .ad, #footer'" + }, "replaceAllPathsWithAbsolutePaths": { "type": "boolean", "description": "Replace all relative paths with absolute paths for images and links", diff --git a/apps/api/src/__tests__/e2e_withAuth/index.test.ts b/apps/api/src/__tests__/e2e_withAuth/index.test.ts index 02e4a47..3423b3a 100644 --- a/apps/api/src/__tests__/e2e_withAuth/index.test.ts +++ b/apps/api/src/__tests__/e2e_withAuth/index.test.ts @@ -136,6 +136,40 @@ describe("E2E Tests for API Routes", () => { expect(response.body.data.content).toContain('We present spectrophotometric observations of the Broad Line Radio Galaxy'); }, 60000); // 60 seconds + it.concurrent("should return a successful response with a valid API key with removeTags option", async () => { + const responseWithoutRemoveTags = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://www.scrapethissite.com/" }); + expect(responseWithoutRemoveTags.statusCode).toBe(200); + expect(responseWithoutRemoveTags.body).toHaveProperty("data"); + expect(responseWithoutRemoveTags.body.data).toHaveProperty("content"); + expect(responseWithoutRemoveTags.body.data).toHaveProperty("markdown"); + expect(responseWithoutRemoveTags.body.data).toHaveProperty("metadata"); + expect(responseWithoutRemoveTags.body.data).not.toHaveProperty("html"); + expect(responseWithoutRemoveTags.body.data.content).toContain("Scrape This Site"); + expect(responseWithoutRemoveTags.body.data.content).toContain("Lessons and Videos"); // #footer + expect(responseWithoutRemoveTags.body.data.content).toContain("[Sandbox]("); // .nav + expect(responseWithoutRemoveTags.body.data.content).toContain("web scraping"); // strong + + const response = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://www.scrapethissite.com/", pageOptions: { removeTags: ['.nav', '#footer', 'strong'] } }); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + expect(response.body.data).toHaveProperty("content"); + expect(response.body.data).toHaveProperty("markdown"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data).not.toHaveProperty("html"); + expect(response.body.data.content).toContain("Scrape This Site"); + expect(response.body.data.content).not.toContain("Lessons and Videos"); // #footer + expect(response.body.data.content).not.toContain("[Sandbox]("); // .nav + expect(response.body.data.content).not.toContain("web scraping"); // strong + }, 30000); // 30 seconds timeout + // TODO: add this test back once we nail the waitFor option to be more deterministic // it.concurrent("should return a successful response with a valid API key and waitFor option", async () => { // const startTime = Date.now(); diff --git a/apps/api/src/controllers/crawl.ts b/apps/api/src/controllers/crawl.ts index 58d01e2..7eab78f 100644 --- a/apps/api/src/controllers/crawl.ts +++ b/apps/api/src/controllers/crawl.ts @@ -55,8 +55,14 @@ export async function crawlController(req: Request, res: Response) { } const mode = req.body.mode ?? "crawl"; - const crawlerOptions = req.body.crawlerOptions ?? { allowBackwardCrawling: false }; - const pageOptions = req.body.pageOptions ?? { onlyMainContent: false, includeHtml: false }; + const crawlerOptions = req.body.crawlerOptions ?? { + allowBackwardCrawling: false + }; + const pageOptions = req.body.pageOptions ?? { + onlyMainContent: false, + includeHtml: false, + removeTags: [] + }; if (mode === "single_urls" && !url.includes(",")) { try { diff --git a/apps/api/src/controllers/crawlPreview.ts b/apps/api/src/controllers/crawlPreview.ts index d3e9afe..2c3dc4e 100644 --- a/apps/api/src/controllers/crawlPreview.ts +++ b/apps/api/src/controllers/crawlPreview.ts @@ -26,7 +26,7 @@ export async function crawlPreviewController(req: Request, res: Response) { const mode = req.body.mode ?? "crawl"; const crawlerOptions = req.body.crawlerOptions ?? {}; - const pageOptions = req.body.pageOptions ?? { onlyMainContent: false, includeHtml: false }; + const pageOptions = req.body.pageOptions ?? { onlyMainContent: false, includeHtml: false, removeTags: [] }; const job = await addWebScraperJob({ url: url, diff --git a/apps/api/src/controllers/search.ts b/apps/api/src/controllers/search.ts index 7474aae..abbc357 100644 --- a/apps/api/src/controllers/search.ts +++ b/apps/api/src/controllers/search.ts @@ -85,6 +85,7 @@ export async function searchHelper( onlyMainContent: pageOptions?.onlyMainContent ?? true, fetchPageContent: pageOptions?.fetchPageContent ?? true, includeHtml: pageOptions?.includeHtml ?? false, + removeTags: pageOptions?.removeTags ?? [], fallback: false, }, }); @@ -139,6 +140,7 @@ export async function searchController(req: Request, res: Response) { includeHtml: false, onlyMainContent: true, fetchPageContent: true, + removeTags: [], fallback: false, }; const origin = req.body.origin ?? "api"; diff --git a/apps/api/src/lib/entities.ts b/apps/api/src/lib/entities.ts index 81bf12c..92170c1 100644 --- a/apps/api/src/lib/entities.ts +++ b/apps/api/src/lib/entities.ts @@ -19,6 +19,7 @@ export type PageOptions = { screenshot?: boolean; headers?: Record; replaceAllPathsWithAbsolutePaths?: boolean; + removeTags?: string | string[]; }; export type ExtractorOptions = { diff --git a/apps/api/src/scraper/WebScraper/index.ts b/apps/api/src/scraper/WebScraper/index.ts index f432f43..1a6ffd0 100644 --- a/apps/api/src/scraper/WebScraper/index.ts +++ b/apps/api/src/scraper/WebScraper/index.ts @@ -475,7 +475,12 @@ export class WebScraperDataProvider { this.limit = options.crawlerOptions?.limit ?? 10000; this.generateImgAltText = options.crawlerOptions?.generateImgAltText ?? false; - this.pageOptions = options.pageOptions ?? { onlyMainContent: false, includeHtml: false, replaceAllPathsWithAbsolutePaths: false }; + this.pageOptions = options.pageOptions ?? { + onlyMainContent: false, + includeHtml: false, + replaceAllPathsWithAbsolutePaths: false, + removeTags: [] + }; this.extractorOptions = options.extractorOptions ?? {mode: "markdown"} this.replaceAllPathsWithAbsolutePaths = options.crawlerOptions?.replaceAllPathsWithAbsolutePaths ?? options.pageOptions?.replaceAllPathsWithAbsolutePaths ?? false; //! @nicolas, for some reason this was being injected and breaking everything. Don't have time to find source of the issue so adding this check diff --git a/apps/api/src/scraper/WebScraper/single_url.ts b/apps/api/src/scraper/WebScraper/single_url.ts index c2dcea1..a16f6f0 100644 --- a/apps/api/src/scraper/WebScraper/single_url.ts +++ b/apps/api/src/scraper/WebScraper/single_url.ts @@ -304,6 +304,19 @@ export async function scrapSingleUrl( const removeUnwantedElements = (html: string, pageOptions: PageOptions) => { const soup = cheerio.load(html); soup("script, style, iframe, noscript, meta, head").remove(); + + if (pageOptions.removeTags) { + if (typeof pageOptions.removeTags === 'string') { + pageOptions.removeTags.split(',').forEach((tag) => { + soup(tag.trim()).remove(); + }); + } else if (Array.isArray(pageOptions.removeTags)) { + pageOptions.removeTags.forEach((tag) => { + soup(tag).remove(); + }); + } + } + if (pageOptions.onlyMainContent) { // remove any other tags that are not in the main content excludeNonMainTags.forEach((tag) => { From 6963a490f1284d89756ce9f6290b5c654ae14b79 Mon Sep 17 00:00:00 2001 From: rafaelsideguide <150964962+rafaelsideguide@users.noreply.github.com> Date: Fri, 14 Jun 2024 10:21:44 -0300 Subject: [PATCH 11/12] Updated version --- apps/python-sdk/firecrawl/__init__.py | 2 +- .../test.cpython-311-pytest-8.2.1.pyc | Bin 0 -> 44947 bytes 2 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 apps/python-sdk/firecrawl/__tests__/e2e_withAuth/__pycache__/test.cpython-311-pytest-8.2.1.pyc diff --git a/apps/python-sdk/firecrawl/__init__.py b/apps/python-sdk/firecrawl/__init__.py index 4e53e77..2fe16ba 100644 --- a/apps/python-sdk/firecrawl/__init__.py +++ b/apps/python-sdk/firecrawl/__init__.py @@ -13,7 +13,7 @@ import os from .firecrawl import FirecrawlApp -__version__ = "0.0.14" +__version__ = "0.0.15" # Define the logger for the Firecrawl project logger: logging.Logger = logging.getLogger("firecrawl") diff --git a/apps/python-sdk/firecrawl/__tests__/e2e_withAuth/__pycache__/test.cpython-311-pytest-8.2.1.pyc b/apps/python-sdk/firecrawl/__tests__/e2e_withAuth/__pycache__/test.cpython-311-pytest-8.2.1.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5ba1f1324fe139772739cdae776d127cd5002ca8 GIT binary patch literal 44947 zcmeHwZ)_V!mS`a`Do!E};jGaF-9vrx|x@A+Q zNTr*yV-1zl++`PwU~n@y?B2}mW`Nw@Z8k=Bf%$T{J77OtK5c-zJv0pp(Gdt>;IJR& za34IE`7rRO`@O2>V|B6Hre%9PI}zP_u6p(ARkPS#uimRyul}a5FJ<8KC-DoD3mXjM ze^aLMG5y5Xe;+f9pBk27#R|sxnE1bAIzG`6Gp5b+oiSO`bv_|VlIN4+|L*hM;{Vk7 z6#nlh^i21j?-jK1Lf>@%`Tm&EVc7kjppnlSEgC2{&!?@<^8+YLO|5DaSY4ms9zMgL z{G1;|`H+>c2maFdMMtnKX4r|T;RfZ$s~aixe=BJZe9{4q_>-UWBY|3L8hG`8dnDM> z`L*`Q)Vc=6$Lkv@^?!S9unz8S?bKrpit~?I-5(g|H&`jajaCofsMQO&$?5~lSp9&T ztu){kYXIS_}BJ6+35)t@{-=Ved+=}k`MrDgJTo^knRo07CwH~5 zn^U`HuGM?rJbLcU?2$K)XHOpe-rTw)FP}T}`de=v%^o{`_UJ2TkG%7G_KhQNzV>{k z-g~UU;K(80^L8jCrG+0NVeQiJ$-Z z+gOlxo}O6z3;y0QquBGfF>bi#)sFY#-!rbpV#fFI{NV}gnD3|G&>mI<(yt z;Mk?QClfqeuH+)n3Q*LKQ+{rH|Sw{C)R&se(NHFK?OyXAV9lgqod zTkk&l-h@3<&X=|dGG(D4Z z?0R?B%}v^bsrQ01dto+TDCdiAy{k|Hv0Go8buX1>3zn#ry;vw+$Q821+_YWqKO*iE z%^!7~l2aebPL`bMTsdpMH&e(Jb97^4Nr&FtdQZNHd&*nc+{{e9hZ>S!a*vvMPR;CB zGtbD3>%gd&#DG|%=lfKNThlJdjK3H;6#skkh>`d_9s>-QmLrgvL1|g$8H|DX+23{q zr5vOr%P~k+lDOid%FE!m1<4w#7#}OO8g;;58)3z4?p`FT3CS9pqTxb(AXyb@c~{BG zvq5|l9`!BAT}ak0lEN*LRjncBxzucnWKEQn+1?b($q>n^^5Z!{nI%GqWL0bM9YoiF zbW2#tHt%P+ta9i3sc7>bNLH1O??R_T+sOz@?RSr$(j{v*OV*Utqe#|`^4BLA;Ka|u zvLtKo>3X_3&WmC4dWS>9vy`_OHgAi0u8WCob2@9;SLU`J%jFBUl_{4pE~dLPb_Q~6 zSLR$fSDtk<6D7-jKJ)CJafco{hh|kW6Tr;Xv~;9O@Qll)xqe83LOLYyr5QbRMTP{mRZ01fC?YmB2Ov+X;*T z1SFF46fpHoLVPrv&uabT>;)CwlKa1;I2j!M>#N>@fnjm;E9EkEfVIy$L}m9 zhrUdX-bjvmoAy{tWHx<*0lzcQAv<{4VNeqZJdcIGU zxHbJX4FkU%IWidke9$~HlK6aG3@}{KJRt;)6@vg_bqueoB5YLUtVQg^ROM`{sp|0H zqYsr3I}i;YvuMDb@<&$z?pf@_=i`-*DOMNfO?B9*Xu}p1IVfvT=(;LhLZKVt8!@K% z{-MfsLF@?H7O|t&U`0(AZ%|{{bO3?I!*__-QTeg3DGxO7D=5OD(5W@}4x($2Qe`FD zyr1C!cfKE8tU~OB-i1y_A}@kc`(+}iI)mTdO8R0aVRc)nO2Sv@l4TlI{ZD`s_hTg) z!LaB6l7xbImlex?GP8xWfU1+sU8MC(? z8M8`}F*Hxp1dTIJxOF!lviP{U7%K3`iOp zjUi)Bc*iq?-?8P4RUd-wxO1&2Y?Ki)(X4HR_eQotU)L!gDFZQDs zV^*pu^yd@ktAyXHg+L$ij0uVp^=g_Zm)ENSzRkYZjF;Ac|NWEJDr>Nk^q->cHlCvH z;Bynq`BIZ4X2(1hh+qO8I$hsTzLa;fxTa=d*UDvQ9Q#V%zM3tUF55*RFK@4F2vw() zbIa4$uG$yeeA(WW%U_S(?#jEFVyO%x=?IxWVW!MTOop6Wq~q%gWRNNKJe zXb+}YXAsD-9z}c$QQcGlHSJGmgqx{hp#r9y{gi%*z%~HYMmet9D4$g``-Hlv((YA@ zeY2;uXYO}ta9~O9i}G=)KF&@|*seP{OD0^|x57}Fm8R9;Pk0V&=oZ~+jMT7Cl8E>A zoc2;@s;M)z)R~1>mePYie*eSwz0rf!^z*g!^9!#nnVY<@Yth_P3&VrB+9;AVoJA4W zVq_~OluWV(R4*`*#2OQ3t`aYq887TwG&8j@Jcz5JhzVpCc!}XGW4KBv21t3KBBxah z*lKO5O>mVZ^J%Yf)fx%L$i$-gbW~AQ6fr?)Rx&}6##UlbSF0|WXvAvvYdm#E_)Oyq zNZx4UX+WQi>1I^SIcT=h^072&95Ux%Qzhg*n^mzONOKO8W+(c%j-@`<22qFS3n6n3 zm3MP&Z#4j_bes;Eb99!u6fJXF*V(KtmXNXZGUrfhaJr(;IUZqOTfGmp7irGXx6+IB%HGGb*qFJg^Iv|NR-Q#Hr?jI*P%oza zXAv_&p+$uE;(&OgSFOC!s=HHT7L8sU6k0@gq!tn0t8$zi3e_Ui8l2Y0$-o=bq|&vB zVb&s6TO*1Vad)|JuUf>K(?U;h4&n874ioqmftLszA@B-;Zxc95fbQ8jM&MO|dU87F zT((MAi%8k!oRur*>gJ{LbYTqI1yl<-U5xqK1*rlO)6g!Q*ND$?0w)NZBtUcf>%Gd{ zUYO?YWrFX#4i=#^@F9Hx2VMfs_d!jjMfV?|E&6lpfEjINip?`wB;*WNoAv_#sHfPQ zJu~E5G&k46@F1>=A|{Yo;3bB$jNvMw7$D_^ikwz4V5_yIHo;YvTB^$;uFI61kbIbc z*-8v&K_5kfB9%;X28NqZF<_$cX{`(TKcFwtRH~v>G}f+E6-uT<)itJ9^)RWcsT?F` z8QPokCI+-{?X8Xx{K0E)R@^eJ&U`GdYkaM?W0rb>SznjX;}W5IT=J3WamfgIt!b-P zH)|l|qVX{FxZbujkUlZ%>%Wg#-y`g6EB#P=k><_=E4_$;;ePeFRV%&t0D6%$kh^Qc zD{nM)wdUlm=y5~nMXZ+HqaG)Fb@-v{ajRL68?n|rG(ArC!N51V57vt3VAVIe4`@|z zoubdzduDPM?K5I?flJ=|6NOpJehu3BoA<8jJ8$DPbiPaA9Rg9h`&|6crE%vxkwog@ zhuT>Q^L_NQxuOL}OL6O2vMe}R@FoWscMseM?yRu?ms4My^1h$-PX3YiLav&=P)lD} zSs-iZ>a5JKXz#4(-nY`b(KAD?MRQ{<3=iU}C}IMc1zut}%NVW_iUCqysK{v*1GZXQ zY7<;#MSFiF+WV5Z!)si%MuIUiv1smyDyoViCJ4<+CMeR_N(}01)g=>+SZ0^ZC%nd0 zYa|#W6N}~(QAJfz!~~&P$pl3jTZutkt-55Q5v$odSVj)P(){2pBSAl&m33_hZt)3N zR#w)vHD3exEq1P~Yd4-1-NORjpQ;s+(>mSns28P2;_}uK!#xuD8wI>aG@pxzKG4?^ zqdXEH{959W)IH{&YWN)HocHoqc9ooqyJxJ)-Ej7EN;_vR?LN47&-k7_Qjm1)oPuUaF)7@1f!r=yChqKFAXvyuslG`13h zx>|L~L?c$ScbL9ezmw@(V9+Jk1o6?gOwiUkZOcTcE8<|w@j!t zn%`LyrPwm@$oe{R%fy4}#W2S^wNF-h@m?J7?zHjbPQ5x1y40Z7hs|&Av zWhOs3@u%lOAm=^Fd4uTjztceFLwS@%2iKAM^q~&6(p$j32*8 zSnlcuBg~Q2d0^yR8gt}--Z6fho#+bb`Mn3Jxsz`d5%3Lm6ol`wDG*$yG%+#jILKbT z1k**P>|DziFN)AU1uwGtxALki-nUKgmqe7B1gY9 zRc$|T&wf8}&&C_cjo#>U)#Scfa-WynN8uFqQT+|;O~te#B_E9jtplT8 z5(8q5p6^p7ZcRJS;wKJx#j>!FaeAVnC){~YBLbZ+FHVmU5vNB}$XDX@;Ab5-vUe@Ah8WE`H24dt&46L;Bmg0XrYq&#^bv3&A)f|e7@M}`X6N406vaF>vAv~0t> z{$>209a4vEL|-k>I>oFIuoP!CtH<>zfC24C^5gonhV)q)c@B|&^f0B{WL%%V5Bq*n zswTJAl3TsxRuV+pNWe%yf@m8FA_dnhZKixdq*JX3B2}K;uOK>z-#8$M$OQiT*UT_JObi;f|fp`*2Jf6mxP4aqLB>UqR}0&Qn5`j#8#~?ZIbvEpOAUDO2=uP z4)gHtfJMGZsoSbkyt$wU!F3+O77aD*n|`0LWYZS%j!M^MlfE!Q)+F5j7r680T0(y% zAwt!Fx+SE+K%4!%F2I}nxl9o>{ZG&_1wxejPSqKsYoPB3(EF@QsTTR)pWi+FTj|?! ztyLeHLWm}>g2`T(bs_iuVwv&i9Pk!6(?Ji6gF7AHnZqC~j)D6+^V=p4I(r!PHlZ<$e5KMkp zdxUxt5$~}n8)5A;FN!VM2(46)7wqD7(|H*@nz5S1rtClzCIr!UC@iIOlmI=qVODzK zBtF)so9gpK-mZ1N3}UbdhHP?95sT9V&JcJKKn>0GtaFyo+X*~D;0*%b0dQ%&;yD^6 zVbwf!nh7m|gt}AIci&BXJWMnHw zGKs2-Qmw9v;U*X<`EV8qnU)EL;E6PdHF|D~DLJuZKJGQHS|h<2nOHO*k1DE)A|?pU zN+u}M*h&oQYSkqZjaX)v%w1mNs)>YzjO4_kxhtxuDvFpOG%NX_h#9IF)YYm>CK|Dt zy~Bz?2IJIn9emaBf2tG=&UAtj^$sThbOdMQ7B)VA0*U__OMT234GOnhi6iKySuqhV zGIXXzVWTix>?TTmc3`Cf)@+_#1n(JV-2^>DnpSUZ4jt)1qS0Yymp|fq199p{;WJ%Nn%#c zZKyL^gpN$2p9hG#rJq-|+0W|&yt$vt6t~|01iuGn>@p|J}pCm404< zHR#XEyR9K>xYF%E9Vu1Q3UK1Ss%SIdV6Xa52Zf+cBADiC%)${?RGEeLR(dMEn3Yqo z+d`lP)@%`$hXT*7Axiyu2-M);tfI}gBdB_V_iC;6-;h3QowdHw=l4N(8-37@Uq=u6 zAU@w~J*M}8wfdXfA8L^Ese#73S>Dn}=^K&}cE3OJw3iKc8|&}cz`qf{R{s=5N)n&> zgesl%w`e|%XEZIJW)X;=-;%)iCZFd~qFvteY;C&FQLwA=ee~Xq90en5U=-Yhb4b*+ z+MC z_e2ttcMBWp1MGm#k1f>L4t8QF@FjYiew=9rs`X+=st8VysMe|T+iEi}&M+CQQLcL!&;D{G4nV{Ng` z0Mz|rrHHnawjLeA^IwQ{PU}{bN4cKIM`xJU!&srkJ{Wbz+&n1ES{0lZ#|%8{{rvVQ ziQw8@uofN4fgN87<&YfiW%M}P^U+nziq zCyCYOm8S{6c;Gi_S|r=t`G%)yz4A20RqL339OHaa*bRl31@AlW#Z3~g&I}RTnQx1$ed|0?6;1nor*I(VF0P_x;m{3VL zIDk?*I3S@pI6R{|IP3>U;ZGnP9O&GtXB;w?P-_32daTt!d92mJz)8gPT z)WYDzC41s>R_;NNognWeeHY&b;lHpzeBL1N9Rg$sQvDtFDIO5=2&x11-j`;skq5(c zt~|a+9F4A-grwucW6~hha&y86^dq!GmM6C#rq(bc5F34-_4dE|#Z%rpS?}#X@{Z@c zeHT0{UrkTd(o=sxMxG^e)C;>7&CyyI9>i5q!~`-6yu@&pF|L~L?f12H9njggNT1=wX{HQKwEEewo#}@8Fzigh zSxnnr>njJuL4NBis*O1yyo~68xI0C);JI$ViqSlzcA5Hj#2i>#A?4U8NvoecaM=6d z?pk6y-Vgq9s;XTm#B+ZD9?Kn-l&{b9RC*#k_g6uWfh~ooTlATs`_yM1tmppF@BiV5 z5l@Jw{aeHABSc4KHS7qzZD>iFr%iBt@!t^g-0zuB2@T0VD(iNNX3QBZ;5ygttQbk> zXrZmpTNLUcJx2xa5{OzZZ}Fdo4rS+@iAxXGg<7Z?K@CGF6>{a7JgK8xr_(}%koP3C zhM$1=kEz0c$9Ip?8sGbgl!wes_?Ot*_d0mW=D}@B)axq387YeRf z+D!Sb7CO~U^Qb4Tnt+P}R?VX_D~lKc4}#Dl(8yX$z)U2Dv!Ik1Kt{G=WTR1axjE*h zVp@@sk4A&mfl)7s0kKBU_o))MrduP2&?1T$`L93*JS?XQtINW<;MlUAurRUpVd|s! zMZ-${RScUa;)oF*U-p@MePSuPKXgNPx=nQ@xWvrMFCk-{sz`ELr^8Z9I%49lFGV-v zTASGLF!)fMF62^l%-^0HwG=%poGw=1httI)>}zX;x5YnR6mbvE6hps*UQJkQ152{aj8Tb~$=lffd;%TpGtxoD2W|jK_bb{; z(&qaewY>UZ%@OM(-g#|#)q3nc?^^8_i0jYrCqI#k!jUyFQau)N*V=NgwZS)0bhcrX zXlF@2j$bihZEXJJ*yh&Eto;6l8}-yi{+j>Xet%(*8ubJ54mDyiP1{8)ccEbK%H`_= zh&k)zCd#EoAc495LScHRkHvQOxqtm~4tpXprJ^_|rHHnfyqm!hZJ@Am%7T;0<3NVlnVFJPb~ESBo!Oxmg1j_i7c*{Y z)|s&DU8M_C2s?<#z1L=t!x0e(GgwbtD3uC!t~eI2r~MYQetY%a2K%h*mg@a^H%t7o zlHXX|`FD7u>!}$W2w^+ryzSPLj{WXz9-$1u&73Y-Hunu~{|yIUsH|enVWA$U+8+iR z=UW6`B5;JjD+Im`P^Z46sJ_(0;G+3HI6o)CzW|tfF6ZJXin4HMc&_25fJCdC!VW=$ z5Df^(P*3IEd=Xv(MRbu#oiXOeWFWhmGeCgu!}&{q@L0%gTL=G&`1}Xr)7_3I!oMeS z;S)fUICd$J&ICik6sYyhe*_(AN$&qNqs@WufB3#Px_2qLerfoLTOG#c$&Qb}gFQYGHT~S49yM$Sm*@!&$~~l~4?j@d;2*GfV$ORrlWRK$SP{Q0A|9-Rt)n84X3#wBv3`G#oSc-CG!Q( ze4!@bVgpti7!>h^Qjn=d5(CJ{Rt&03BoZ1KXPIUhtR!2>L2F3m4+42DnTI{|a81C) z2COzPDB=sHAXAGZ29S}h7*v-?Bs4P4GR-nrNw$)M){x2{1nMO$!ecw?$qTdjLK!2r zBfQ?$WoNF@NVrQ5{tytm9Q&%1FWdEQM%5EOPK?wLWY~h|lneO_yJpMzLcN>waw{Qu zlGQs(Zrz-n&0AtuV!g|vjgD6wjH`x2Gr;J6%DPS^OLfZkb~loqHLiwAr4Nen^aIAfW(|F*pfhCx+ft_ zg&$kJtAI0=Y^UC1zXu+YQf{Jrnr>+?fdd525unJ1;`CN=5RwRm@6Z-Qhr%9;$T|+~ zI&{dK=FlRNFs?ZyKdu|1tA&)gojfF4deQl>D55#M+YJo|_On=Qi9}5JKhIdr*OJk< zApR{GJqzOBlF_{&{w*0JUhrBnM!n#5lWP62?*p*7ncBRNSX%##XRKLDul-Z!N7sHf zT1{`SrMEBi-ZVNFI=!wJye$U_cD+zF4%Li9o^j}AV&H@KzfA1Bk=XfZ(R=Z&YU1r$ z;_U@=|fX{@f*m_f>noxt8L1@?9di~{>RV>@mc4EmFlWiXtIJ$cJuu(m5!1VaT5Bm_o* zgJ!HG8fAz^DXqZ%uGns3 Date: Fri, 14 Jun 2024 11:05:19 -0300 Subject: [PATCH 12/12] Fixed tests' message and updated version --- apps/python-sdk/firecrawl/__init__.py | 2 +- .../firecrawl/__tests__/e2e_withAuth/test.py | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/apps/python-sdk/firecrawl/__init__.py b/apps/python-sdk/firecrawl/__init__.py index 2fe16ba..fbb2bdb 100644 --- a/apps/python-sdk/firecrawl/__init__.py +++ b/apps/python-sdk/firecrawl/__init__.py @@ -13,7 +13,7 @@ import os from .firecrawl import FirecrawlApp -__version__ = "0.0.15" +__version__ = "0.0.16" # Define the logger for the Firecrawl project logger: logging.Logger = logging.getLogger("firecrawl") diff --git a/apps/python-sdk/firecrawl/__tests__/e2e_withAuth/test.py b/apps/python-sdk/firecrawl/__tests__/e2e_withAuth/test.py index 90a6498..452d498 100644 --- a/apps/python-sdk/firecrawl/__tests__/e2e_withAuth/test.py +++ b/apps/python-sdk/firecrawl/__tests__/e2e_withAuth/test.py @@ -27,14 +27,14 @@ def test_scrape_url_invalid_api_key(): invalid_app = FirecrawlApp(api_url=API_URL, api_key="invalid_api_key") with pytest.raises(Exception) as excinfo: invalid_app.scrape_url('https://firecrawl.dev') - assert "Failed to scrape URL. Status code: 401" in str(excinfo.value) + assert "Unexpected error during scrape URL: Status code 401. Unauthorized: Invalid token" in str(excinfo.value) def test_blocklisted_url(): blocklisted_url = "https://facebook.com/fake-test" app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY) with pytest.raises(Exception) as excinfo: app.scrape_url(blocklisted_url) - assert "Failed to scrape URL. Status code: 403" in str(excinfo.value) + assert "Unexpected error during scrape URL: Status code 403. Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it." in str(excinfo.value) def test_successful_response_with_valid_preview_token(): app = FirecrawlApp(api_url=API_URL, api_key="this_is_just_a_preview_token") @@ -86,14 +86,14 @@ def test_crawl_url_invalid_api_key(): invalid_app = FirecrawlApp(api_url=API_URL, api_key="invalid_api_key") with pytest.raises(Exception) as excinfo: invalid_app.crawl_url('https://firecrawl.dev') - assert "Unexpected error occurred while trying to start crawl job. Status code: 401" in str(excinfo.value) + assert "Unexpected error during start crawl job: Status code 401. Unauthorized: Invalid token" in str(excinfo.value) def test_should_return_error_for_blocklisted_url(): app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY) blocklisted_url = "https://twitter.com/fake-test" with pytest.raises(Exception) as excinfo: app.crawl_url(blocklisted_url) - assert "Unexpected error occurred while trying to start crawl job. Status code: 403" in str(excinfo.value) + assert "Unexpected error during start crawl job: Status code 403. Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it." in str(excinfo.value) def test_crawl_url_wait_for_completion_e2e(): app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY) @@ -114,7 +114,7 @@ def test_crawl_url_with_idempotency_key_e2e(): with pytest.raises(Exception) as excinfo: app.crawl_url('https://firecrawl.dev', {'crawlerOptions': {'excludes': ['blog/*']}}, True, 2, uniqueIdempotencyKey) - assert "Failed to start crawl job. Status code: 409. Error: Idempotency key already used" in str(excinfo.value) + assert "Conflict: Failed to start crawl job due to a conflict. Idempotency key already used" in str(excinfo.value) def test_check_crawl_status_e2e(): app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY) @@ -141,7 +141,7 @@ def test_search_invalid_api_key(): invalid_app = FirecrawlApp(api_url=API_URL, api_key="invalid_api_key") with pytest.raises(Exception) as excinfo: invalid_app.search("test query") - assert "Failed to search. Status code: 401" in str(excinfo.value) + assert "Unexpected error during search: Status code 401. Unauthorized: Invalid token" in str(excinfo.value) def test_llm_extraction(): app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY)