Merge branch 'main' into detect-pdfs
This commit is contained in:
commit
df6c3d1e7d
26
SELF_HOST.md
26
SELF_HOST.md
@ -1,6 +1,30 @@
|
|||||||
# Self-hosting Firecrawl
|
# Self-hosting Firecrawl
|
||||||
|
*We're currently working on a more in-depth guide on how to self-host, but in the meantime, here is a simplified version.*
|
||||||
|
|
||||||
Refer to [CONTRIBUTING.md](https://github.com/mendableai/firecrawl/blob/main/CONTRIBUTING.md) for instructions on how to run it locally.
|
Refer to [CONTRIBUTING.md](https://github.com/mendableai/firecrawl/blob/main/CONTRIBUTING.md) for instructions on how to run it locally.
|
||||||
|
|
||||||
*This repository is currently in its early stages of development. We are in the process of merging custom modules into this mono repository. The primary objective is to enhance the accuracy of LLM responses by utilizing clean data. It is not ready for full self-host yet - we're working on it*
|
## Getting Started
|
||||||
|
|
||||||
|
First, clone this repository and copy the example env file from api folder `.env.example` to `.env`.
|
||||||
|
```bash
|
||||||
|
git clone https://github.com/mendableai/firecrawl.git
|
||||||
|
cd firecrawl
|
||||||
|
cp ./apps/api/.env.example ./.env
|
||||||
|
```
|
||||||
|
|
||||||
|
For running the simplest version of FireCrawl, edit the `USE_DB_AUTHENTICATION` on `.env` to not use the database authentication.
|
||||||
|
```yml
|
||||||
|
USE_DB_AUTHENTICATION=false
|
||||||
|
```
|
||||||
|
|
||||||
|
Update the Redis URL in the .env file to align with the Docker configuration:
|
||||||
|
```yml
|
||||||
|
REDIS_URL=redis://redis:6379
|
||||||
|
```
|
||||||
|
|
||||||
|
Once that's complete, you can simply run the following commands to get started:
|
||||||
|
```bash
|
||||||
|
docker compose up
|
||||||
|
```
|
||||||
|
|
||||||
|
This will run a local instance of Firecrawl which can be accessed at `http://localhost:3002`.
|
||||||
|
@ -3,6 +3,7 @@ NUM_WORKERS_PER_QUEUE=8
|
|||||||
PORT=3002
|
PORT=3002
|
||||||
HOST=0.0.0.0
|
HOST=0.0.0.0
|
||||||
REDIS_URL=redis://localhost:6379
|
REDIS_URL=redis://localhost:6379
|
||||||
|
PLAYWRIGHT_MICROSERVICE_URL=http://playwright-service:3000
|
||||||
|
|
||||||
## To turn on DB authentication, you need to set up supabase.
|
## To turn on DB authentication, you need to set up supabase.
|
||||||
USE_DB_AUTHENTICATION=true
|
USE_DB_AUTHENTICATION=true
|
||||||
@ -20,7 +21,6 @@ SCRAPING_BEE_API_KEY= #Set if you'd like to use scraping Be to handle JS blockin
|
|||||||
OPENAI_API_KEY= # add for LLM dependednt features (image alt generation, etc.)
|
OPENAI_API_KEY= # add for LLM dependednt features (image alt generation, etc.)
|
||||||
BULL_AUTH_KEY= #
|
BULL_AUTH_KEY= #
|
||||||
LOGTAIL_KEY= # Use if you're configuring basic logging with logtail
|
LOGTAIL_KEY= # Use if you're configuring basic logging with logtail
|
||||||
PLAYWRIGHT_MICROSERVICE_URL= # set if you'd like to run a playwright fallback
|
|
||||||
LLAMAPARSE_API_KEY= #Set if you have a llamaparse key you'd like to use to parse pdfs
|
LLAMAPARSE_API_KEY= #Set if you have a llamaparse key you'd like to use to parse pdfs
|
||||||
SERPER_API_KEY= #Set if you have a serper key you'd like to use as a search api
|
SERPER_API_KEY= #Set if you have a serper key you'd like to use as a search api
|
||||||
SLACK_WEBHOOK_URL= # set if you'd like to send slack server health status messages
|
SLACK_WEBHOOK_URL= # set if you'd like to send slack server health status messages
|
||||||
|
@ -18,8 +18,8 @@
|
|||||||
"paths": {
|
"paths": {
|
||||||
"/scrape": {
|
"/scrape": {
|
||||||
"post": {
|
"post": {
|
||||||
"summary": "Scrape a single URL",
|
"summary": "Scrape a single URL and optionally extract information using an LLM",
|
||||||
"operationId": "scrapeSingleUrl",
|
"operationId": "scrapeAndExtractFromUrl",
|
||||||
"tags": ["Scraping"],
|
"tags": ["Scraping"],
|
||||||
"security": [
|
"security": [
|
||||||
{
|
{
|
||||||
@ -45,8 +45,43 @@
|
|||||||
"type": "boolean",
|
"type": "boolean",
|
||||||
"description": "Only return the main content of the page excluding headers, navs, footers, etc.",
|
"description": "Only return the main content of the page excluding headers, navs, footers, etc.",
|
||||||
"default": false
|
"default": false
|
||||||
|
},
|
||||||
|
"includeHtml": {
|
||||||
|
"type": "boolean",
|
||||||
|
"description": "Include the raw HTML content of the page. Will output a html key in the response.",
|
||||||
|
"default": false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"extractorOptions": {
|
||||||
|
"type": "object",
|
||||||
|
"description": "Options for LLM-based extraction of structured information from the page content",
|
||||||
|
"properties": {
|
||||||
|
"mode": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["llm-extraction"],
|
||||||
|
"description": "The extraction mode to use, currently supports 'llm-extraction'"
|
||||||
|
},
|
||||||
|
"extractionPrompt": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "A prompt describing what information to extract from the page"
|
||||||
|
},
|
||||||
|
"extractionSchema": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": true,
|
||||||
|
"description": "The schema for the data to be extracted",
|
||||||
|
"required": [
|
||||||
|
"company_mission",
|
||||||
|
"supports_sso",
|
||||||
|
"is_open_source"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"timeout": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "Timeout in milliseconds for the request",
|
||||||
|
"default": 30000
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": ["url"]
|
"required": ["url"]
|
||||||
@ -126,9 +161,20 @@
|
|||||||
"description": "If true, returns only the URLs as a list on the crawl status. Attention: the return response will be a list of URLs inside the data, not a list of documents.",
|
"description": "If true, returns only the URLs as a list on the crawl status. Attention: the return response will be a list of URLs inside the data, not a list of documents.",
|
||||||
"default": false
|
"default": false
|
||||||
},
|
},
|
||||||
|
"maxDepth": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "Maximum depth to crawl. Depth 1 is the base URL, depth 2 is the base URL and its direct children, and so on."
|
||||||
|
},
|
||||||
|
"mode": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["default", "fast"],
|
||||||
|
"description": "The crawling mode to use. Fast mode crawls 4x faster websites without sitemap, but may not be as accurate and shouldn't be used in heavy js-rendered websites.",
|
||||||
|
"default": "default"
|
||||||
|
},
|
||||||
"limit": {
|
"limit": {
|
||||||
"type": "integer",
|
"type": "integer",
|
||||||
"description": "Maximum number of pages to crawl"
|
"description": "Maximum number of pages to crawl",
|
||||||
|
"default": 10000
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -139,6 +185,11 @@
|
|||||||
"type": "boolean",
|
"type": "boolean",
|
||||||
"description": "Only return the main content of the page excluding headers, navs, footers, etc.",
|
"description": "Only return the main content of the page excluding headers, navs, footers, etc.",
|
||||||
"default": false
|
"default": false
|
||||||
|
},
|
||||||
|
"includeHtml": {
|
||||||
|
"type": "boolean",
|
||||||
|
"description": "Include the raw HTML content of the page. Will output a html key in the response.",
|
||||||
|
"default": false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -191,7 +242,7 @@
|
|||||||
"query": {
|
"query": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"format": "uri",
|
"format": "uri",
|
||||||
"description": "The URL to scrape"
|
"description": "The query to search for"
|
||||||
},
|
},
|
||||||
"pageOptions": {
|
"pageOptions": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@ -205,6 +256,11 @@
|
|||||||
"type": "boolean",
|
"type": "boolean",
|
||||||
"description": "Fetch the content of each page. If false, defaults to a basic fast serp API.",
|
"description": "Fetch the content of each page. If false, defaults to a basic fast serp API.",
|
||||||
"default": true
|
"default": true
|
||||||
|
},
|
||||||
|
"includeHtml": {
|
||||||
|
"type": "boolean",
|
||||||
|
"description": "Include the raw HTML content of the page. Will output a html key in the response.",
|
||||||
|
"default": false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -298,9 +354,66 @@
|
|||||||
"data": {
|
"data": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
"$ref": "#/components/schemas/ScrapeResponse"
|
"$ref": "#/components/schemas/CrawlStatusResponseObj"
|
||||||
},
|
},
|
||||||
"description": "Data returned from the job (null when it is in progress)"
|
"description": "Data returned from the job (null when it is in progress)"
|
||||||
|
},
|
||||||
|
"partial_data": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/components/schemas/CrawlStatusResponseObj"
|
||||||
|
},
|
||||||
|
"description": "Partial documents returned as it is being crawls (streaming). When a page is ready it will append to the parial_data array - so no need to wait for all the website to be crawled."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"402": {
|
||||||
|
"description": "Payment required"
|
||||||
|
},
|
||||||
|
"429": {
|
||||||
|
"description": "Too many requests"
|
||||||
|
},
|
||||||
|
"500": {
|
||||||
|
"description": "Server error"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"/crawl/cancel/{jobId}": {
|
||||||
|
"delete": {
|
||||||
|
"tags": ["Crawl"],
|
||||||
|
"summary": "Cancel a crawl job",
|
||||||
|
"operationId": "cancelCrawlJob",
|
||||||
|
"security": [
|
||||||
|
{
|
||||||
|
"bearerAuth": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"name": "jobId",
|
||||||
|
"in": "path",
|
||||||
|
"description": "ID of the crawl job",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "Successful response",
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"status": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Returns cancelled."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -343,6 +456,11 @@
|
|||||||
"content": {
|
"content": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
"html": {
|
||||||
|
"type": "string",
|
||||||
|
"nullable": true,
|
||||||
|
"description": "Raw HTML content of the page if `includeHtml` is true"
|
||||||
|
},
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
@ -366,6 +484,41 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"CrawlStatusResponseObj": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"markdown": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"content": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"html": {
|
||||||
|
"type": "string",
|
||||||
|
"nullable": true,
|
||||||
|
"description": "Raw HTML content of the page if `includeHtml` is true"
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"title": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"description": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"language": {
|
||||||
|
"type": "string",
|
||||||
|
"nullable": true
|
||||||
|
},
|
||||||
|
"sourceURL": {
|
||||||
|
"type": "string",
|
||||||
|
"format": "uri"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"SearchResponse": {
|
"SearchResponse": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
@ -33,6 +33,7 @@
|
|||||||
"express": "^4.18.2",
|
"express": "^4.18.2",
|
||||||
"jest": "^29.6.3",
|
"jest": "^29.6.3",
|
||||||
"jest-fetch-mock": "^3.0.3",
|
"jest-fetch-mock": "^3.0.3",
|
||||||
|
"mammoth": "^1.7.2",
|
||||||
"nodemon": "^2.0.20",
|
"nodemon": "^2.0.20",
|
||||||
"supabase": "^1.77.9",
|
"supabase": "^1.77.9",
|
||||||
"supertest": "^6.3.3",
|
"supertest": "^6.3.3",
|
||||||
|
111
apps/api/pnpm-lock.yaml
generated
111
apps/api/pnpm-lock.yaml
generated
@ -97,7 +97,7 @@ dependencies:
|
|||||||
version: 0.0.25
|
version: 0.0.25
|
||||||
langchain:
|
langchain:
|
||||||
specifier: ^0.1.25
|
specifier: ^0.1.25
|
||||||
version: 0.1.25(@supabase/supabase-js@2.39.7)(axios@1.6.7)(cheerio@1.0.0-rc.12)(ioredis@5.3.2)(pdf-parse@1.1.1)(puppeteer@22.6.3)(redis@4.6.13)(typesense@1.7.2)
|
version: 0.1.25(@supabase/supabase-js@2.39.7)(axios@1.6.7)(cheerio@1.0.0-rc.12)(ioredis@5.3.2)(mammoth@1.7.2)(pdf-parse@1.1.1)(puppeteer@22.6.3)(redis@4.6.13)(typesense@1.7.2)
|
||||||
languagedetect:
|
languagedetect:
|
||||||
specifier: ^2.0.0
|
specifier: ^2.0.0
|
||||||
version: 2.0.0
|
version: 2.0.0
|
||||||
@ -214,6 +214,9 @@ devDependencies:
|
|||||||
jest-fetch-mock:
|
jest-fetch-mock:
|
||||||
specifier: ^3.0.3
|
specifier: ^3.0.3
|
||||||
version: 3.0.3
|
version: 3.0.3
|
||||||
|
mammoth:
|
||||||
|
specifier: ^1.7.2
|
||||||
|
version: 1.7.2
|
||||||
nodemon:
|
nodemon:
|
||||||
specifier: ^2.0.20
|
specifier: ^2.0.20
|
||||||
version: 2.0.22
|
version: 2.0.22
|
||||||
@ -1765,6 +1768,10 @@ packages:
|
|||||||
dev: false
|
dev: false
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
/@xmldom/xmldom@0.8.10:
|
||||||
|
resolution: {integrity: sha512-2WALfTl4xo2SkGCYRt6rDTFfk9R1czmBvUQy12gK2KuRKIpWEhcbbzy8EZXtz/jkRqHX8bFEc6FC1HjX4TUWYw==}
|
||||||
|
engines: {node: '>=10.0.0'}
|
||||||
|
|
||||||
/abbrev@1.1.1:
|
/abbrev@1.1.1:
|
||||||
resolution: {integrity: sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==}
|
resolution: {integrity: sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==}
|
||||||
dev: true
|
dev: true
|
||||||
@ -1895,7 +1902,6 @@ packages:
|
|||||||
resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==}
|
resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==}
|
||||||
dependencies:
|
dependencies:
|
||||||
sprintf-js: 1.0.3
|
sprintf-js: 1.0.3
|
||||||
dev: true
|
|
||||||
|
|
||||||
/argparse@2.0.1:
|
/argparse@2.0.1:
|
||||||
resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==}
|
resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==}
|
||||||
@ -2071,7 +2077,6 @@ packages:
|
|||||||
|
|
||||||
/base64-js@1.5.1:
|
/base64-js@1.5.1:
|
||||||
resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==}
|
resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==}
|
||||||
dev: false
|
|
||||||
|
|
||||||
/basic-ftp@5.0.5:
|
/basic-ftp@5.0.5:
|
||||||
resolution: {integrity: sha512-4Bcg1P8xhUuqcii/S0Z9wiHIrQVPMermM1any+MX5GeGD7faD3/msQUDGLol9wOcz4/jbg/WJnGqoJF6LiBdtg==}
|
resolution: {integrity: sha512-4Bcg1P8xhUuqcii/S0Z9wiHIrQVPMermM1any+MX5GeGD7faD3/msQUDGLol9wOcz4/jbg/WJnGqoJF6LiBdtg==}
|
||||||
@ -2096,6 +2101,9 @@ packages:
|
|||||||
resolution: {integrity: sha512-nbE1WxOTTrUWIfsfZ4aHGYu5DOuNkbxGokjV6Z2kxfJK3uaAb8zNK1muzOeipoLHZjInT4Br88BHpzevc681xA==}
|
resolution: {integrity: sha512-nbE1WxOTTrUWIfsfZ4aHGYu5DOuNkbxGokjV6Z2kxfJK3uaAb8zNK1muzOeipoLHZjInT4Br88BHpzevc681xA==}
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/bluebird@3.4.7:
|
||||||
|
resolution: {integrity: sha512-iD3898SR7sWVRHbiQv+sHUtHnMvC1o3nW5rAcqnq3uOn07DSAppZYUkIGslDz6gXC7HfunPe7YVBgoEJASPcHA==}
|
||||||
|
|
||||||
/body-parser@1.20.2:
|
/body-parser@1.20.2:
|
||||||
resolution: {integrity: sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==}
|
resolution: {integrity: sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==}
|
||||||
engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16}
|
engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16}
|
||||||
@ -2421,6 +2429,9 @@ packages:
|
|||||||
resolution: {integrity: sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw==}
|
resolution: {integrity: sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw==}
|
||||||
dev: true
|
dev: true
|
||||||
|
|
||||||
|
/core-util-is@1.0.3:
|
||||||
|
resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==}
|
||||||
|
|
||||||
/cors@2.8.5:
|
/cors@2.8.5:
|
||||||
resolution: {integrity: sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==}
|
resolution: {integrity: sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==}
|
||||||
engines: {node: '>= 0.10'}
|
engines: {node: '>= 0.10'}
|
||||||
@ -2659,6 +2670,9 @@ packages:
|
|||||||
md5: 2.3.0
|
md5: 2.3.0
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/dingbat-to-unicode@1.0.1:
|
||||||
|
resolution: {integrity: sha512-98l0sW87ZT58pU4i61wa2OHwxbiYSbuxsCBozaVnYX2iCnr3bLM3fIes1/ej7h1YdOKuKt/MLs706TVnALA65w==}
|
||||||
|
|
||||||
/dom-serializer@2.0.0:
|
/dom-serializer@2.0.0:
|
||||||
resolution: {integrity: sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==}
|
resolution: {integrity: sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==}
|
||||||
dependencies:
|
dependencies:
|
||||||
@ -2695,6 +2709,11 @@ packages:
|
|||||||
engines: {node: '>=12'}
|
engines: {node: '>=12'}
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/duck@0.1.12:
|
||||||
|
resolution: {integrity: sha512-wkctla1O6VfP89gQ+J/yDesM0S7B7XLXjKGzXxMDVFg7uEn706niAtyYovKbyq1oT9YwDcly721/iUWoc8MVRg==}
|
||||||
|
dependencies:
|
||||||
|
underscore: 1.13.6
|
||||||
|
|
||||||
/eastasianwidth@0.2.0:
|
/eastasianwidth@0.2.0:
|
||||||
resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==}
|
resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==}
|
||||||
dev: false
|
dev: false
|
||||||
@ -3332,6 +3351,9 @@ packages:
|
|||||||
resolution: {integrity: sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==}
|
resolution: {integrity: sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==}
|
||||||
dev: true
|
dev: true
|
||||||
|
|
||||||
|
/immediate@3.0.6:
|
||||||
|
resolution: {integrity: sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==}
|
||||||
|
|
||||||
/import-fresh@3.3.0:
|
/import-fresh@3.3.0:
|
||||||
resolution: {integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==}
|
resolution: {integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==}
|
||||||
engines: {node: '>=6'}
|
engines: {node: '>=6'}
|
||||||
@ -3462,6 +3484,9 @@ packages:
|
|||||||
engines: {node: '>=8'}
|
engines: {node: '>=8'}
|
||||||
dev: true
|
dev: true
|
||||||
|
|
||||||
|
/isarray@1.0.0:
|
||||||
|
resolution: {integrity: sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==}
|
||||||
|
|
||||||
/isexe@2.0.0:
|
/isexe@2.0.0:
|
||||||
resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==}
|
resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==}
|
||||||
|
|
||||||
@ -4049,6 +4074,14 @@ packages:
|
|||||||
engines: {node: '>=0.10.0'}
|
engines: {node: '>=0.10.0'}
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/jszip@3.10.1:
|
||||||
|
resolution: {integrity: sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g==}
|
||||||
|
dependencies:
|
||||||
|
lie: 3.3.0
|
||||||
|
pako: 1.0.11
|
||||||
|
readable-stream: 2.3.8
|
||||||
|
setimmediate: 1.0.5
|
||||||
|
|
||||||
/kareem@2.5.1:
|
/kareem@2.5.1:
|
||||||
resolution: {integrity: sha512-7jFxRVm+jD+rkq3kY0iZDJfsO2/t4BBPeEb2qKn2lR/9KhuksYk5hxzfRYWMPV8P/x2d0kHD306YyWLzjjH+uA==}
|
resolution: {integrity: sha512-7jFxRVm+jD+rkq3kY0iZDJfsO2/t4BBPeEb2qKn2lR/9KhuksYk5hxzfRYWMPV8P/x2d0kHD306YyWLzjjH+uA==}
|
||||||
engines: {node: '>=12.0.0'}
|
engines: {node: '>=12.0.0'}
|
||||||
@ -4064,7 +4097,7 @@ packages:
|
|||||||
engines: {node: '>=6'}
|
engines: {node: '>=6'}
|
||||||
dev: true
|
dev: true
|
||||||
|
|
||||||
/langchain@0.1.25(@supabase/supabase-js@2.39.7)(axios@1.6.7)(cheerio@1.0.0-rc.12)(ioredis@5.3.2)(pdf-parse@1.1.1)(puppeteer@22.6.3)(redis@4.6.13)(typesense@1.7.2):
|
/langchain@0.1.25(@supabase/supabase-js@2.39.7)(axios@1.6.7)(cheerio@1.0.0-rc.12)(ioredis@5.3.2)(mammoth@1.7.2)(pdf-parse@1.1.1)(puppeteer@22.6.3)(redis@4.6.13)(typesense@1.7.2):
|
||||||
resolution: {integrity: sha512-sfEChvr4H2CklHdSByNBbytwBrFhgtA5kPOnwcBrxuXGg1iOaTzhVxQA0QcNcQucI3hZrsNbZjxGp+Can1ooZQ==}
|
resolution: {integrity: sha512-sfEChvr4H2CklHdSByNBbytwBrFhgtA5kPOnwcBrxuXGg1iOaTzhVxQA0QcNcQucI3hZrsNbZjxGp+Can1ooZQ==}
|
||||||
engines: {node: '>=18'}
|
engines: {node: '>=18'}
|
||||||
peerDependencies:
|
peerDependencies:
|
||||||
@ -4238,6 +4271,7 @@ packages:
|
|||||||
jsonpointer: 5.0.1
|
jsonpointer: 5.0.1
|
||||||
langchainhub: 0.0.8
|
langchainhub: 0.0.8
|
||||||
langsmith: 0.1.13
|
langsmith: 0.1.13
|
||||||
|
mammoth: 1.7.2
|
||||||
ml-distance: 4.0.1
|
ml-distance: 4.0.1
|
||||||
openapi-types: 12.1.3
|
openapi-types: 12.1.3
|
||||||
p-retry: 4.6.2
|
p-retry: 4.6.2
|
||||||
@ -4344,6 +4378,11 @@ packages:
|
|||||||
type-check: 0.3.2
|
type-check: 0.3.2
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/lie@3.3.0:
|
||||||
|
resolution: {integrity: sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ==}
|
||||||
|
dependencies:
|
||||||
|
immediate: 3.0.6
|
||||||
|
|
||||||
/lines-and-columns@1.2.4:
|
/lines-and-columns@1.2.4:
|
||||||
resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==}
|
resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==}
|
||||||
|
|
||||||
@ -4380,6 +4419,13 @@ packages:
|
|||||||
- encoding
|
- encoding
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/lop@0.4.1:
|
||||||
|
resolution: {integrity: sha512-9xyho9why2A2tzm5aIcMWKvzqKsnxrf9B5I+8O30olh6lQU8PH978LqZoI4++37RBgS1Em5i54v1TFs/3wnmXQ==}
|
||||||
|
dependencies:
|
||||||
|
duck: 0.1.12
|
||||||
|
option: 0.2.4
|
||||||
|
underscore: 1.13.6
|
||||||
|
|
||||||
/lru-cache@10.2.0:
|
/lru-cache@10.2.0:
|
||||||
resolution: {integrity: sha512-2bIM8x+VAf6JT4bKAljS1qUWgMsqZRPGJS6FSahIMPVvctcNhyVp7AJu7quxOW9jwkryBReKZY5tY5JYv2n/7Q==}
|
resolution: {integrity: sha512-2bIM8x+VAf6JT4bKAljS1qUWgMsqZRPGJS6FSahIMPVvctcNhyVp7AJu7quxOW9jwkryBReKZY5tY5JYv2n/7Q==}
|
||||||
engines: {node: 14 || >=16.14}
|
engines: {node: 14 || >=16.14}
|
||||||
@ -4423,6 +4469,22 @@ packages:
|
|||||||
tmpl: 1.0.5
|
tmpl: 1.0.5
|
||||||
dev: true
|
dev: true
|
||||||
|
|
||||||
|
/mammoth@1.7.2:
|
||||||
|
resolution: {integrity: sha512-MqWU2hcLf1I5QMKyAbfJCvrLxnv5WztrAQyorfZ+WPq7Hk82vZFmvfR2/64ajIPpM4jlq0TXp1xZvp/FFaL1Ug==}
|
||||||
|
engines: {node: '>=12.0.0'}
|
||||||
|
hasBin: true
|
||||||
|
dependencies:
|
||||||
|
'@xmldom/xmldom': 0.8.10
|
||||||
|
argparse: 1.0.10
|
||||||
|
base64-js: 1.5.1
|
||||||
|
bluebird: 3.4.7
|
||||||
|
dingbat-to-unicode: 1.0.1
|
||||||
|
jszip: 3.10.1
|
||||||
|
lop: 0.4.1
|
||||||
|
path-is-absolute: 1.0.1
|
||||||
|
underscore: 1.13.6
|
||||||
|
xmlbuilder: 10.1.1
|
||||||
|
|
||||||
/md5@2.3.0:
|
/md5@2.3.0:
|
||||||
resolution: {integrity: sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==}
|
resolution: {integrity: sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==}
|
||||||
dependencies:
|
dependencies:
|
||||||
@ -4867,6 +4929,9 @@ packages:
|
|||||||
resolution: {integrity: sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw==}
|
resolution: {integrity: sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw==}
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/option@0.2.4:
|
||||||
|
resolution: {integrity: sha512-pkEqbDyl8ou5cpq+VsnQbe/WlEy5qS7xPzMS1U55OCG9KPvwFD46zDbxQIj3egJSFc3D+XhYOPUzz49zQAVy7A==}
|
||||||
|
|
||||||
/optionator@0.8.3:
|
/optionator@0.8.3:
|
||||||
resolution: {integrity: sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==}
|
resolution: {integrity: sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==}
|
||||||
engines: {node: '>= 0.8.0'}
|
engines: {node: '>= 0.8.0'}
|
||||||
@ -4957,6 +5022,9 @@ packages:
|
|||||||
netmask: 2.0.2
|
netmask: 2.0.2
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/pako@1.0.11:
|
||||||
|
resolution: {integrity: sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==}
|
||||||
|
|
||||||
/parent-module@1.0.1:
|
/parent-module@1.0.1:
|
||||||
resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==}
|
resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==}
|
||||||
engines: {node: '>=6'}
|
engines: {node: '>=6'}
|
||||||
@ -5002,7 +5070,6 @@ packages:
|
|||||||
/path-is-absolute@1.0.1:
|
/path-is-absolute@1.0.1:
|
||||||
resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==}
|
resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==}
|
||||||
engines: {node: '>=0.10.0'}
|
engines: {node: '>=0.10.0'}
|
||||||
dev: true
|
|
||||||
|
|
||||||
/path-key@3.1.1:
|
/path-key@3.1.1:
|
||||||
resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==}
|
resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==}
|
||||||
@ -5095,6 +5162,9 @@ packages:
|
|||||||
react-is: 18.2.0
|
react-is: 18.2.0
|
||||||
dev: true
|
dev: true
|
||||||
|
|
||||||
|
/process-nextick-args@2.0.1:
|
||||||
|
resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==}
|
||||||
|
|
||||||
/progress@2.0.3:
|
/progress@2.0.3:
|
||||||
resolution: {integrity: sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==}
|
resolution: {integrity: sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==}
|
||||||
engines: {node: '>=0.4.0'}
|
engines: {node: '>=0.4.0'}
|
||||||
@ -5251,6 +5321,17 @@ packages:
|
|||||||
engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}
|
engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}
|
||||||
dev: true
|
dev: true
|
||||||
|
|
||||||
|
/readable-stream@2.3.8:
|
||||||
|
resolution: {integrity: sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==}
|
||||||
|
dependencies:
|
||||||
|
core-util-is: 1.0.3
|
||||||
|
inherits: 2.0.4
|
||||||
|
isarray: 1.0.0
|
||||||
|
process-nextick-args: 2.0.1
|
||||||
|
safe-buffer: 5.1.2
|
||||||
|
string_decoder: 1.1.1
|
||||||
|
util-deprecate: 1.0.2
|
||||||
|
|
||||||
/readdirp@3.6.0:
|
/readdirp@3.6.0:
|
||||||
resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==}
|
resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==}
|
||||||
engines: {node: '>=8.10.0'}
|
engines: {node: '>=8.10.0'}
|
||||||
@ -5347,6 +5428,9 @@ packages:
|
|||||||
resolution: {integrity: sha512-cLgakCUf6PedEu15t8kbsjnwIFFR2D4RfL+W3iWFJ4iac7z4B0ZI8fxy4R3J956kAI68HclCFGL8MPoUVC3qVA==}
|
resolution: {integrity: sha512-cLgakCUf6PedEu15t8kbsjnwIFFR2D4RfL+W3iWFJ4iac7z4B0ZI8fxy4R3J956kAI68HclCFGL8MPoUVC3qVA==}
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/safe-buffer@5.1.2:
|
||||||
|
resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==}
|
||||||
|
|
||||||
/safe-buffer@5.2.1:
|
/safe-buffer@5.2.1:
|
||||||
resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==}
|
resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==}
|
||||||
|
|
||||||
@ -5460,6 +5544,9 @@ packages:
|
|||||||
gopd: 1.0.1
|
gopd: 1.0.1
|
||||||
has-property-descriptors: 1.0.2
|
has-property-descriptors: 1.0.2
|
||||||
|
|
||||||
|
/setimmediate@1.0.5:
|
||||||
|
resolution: {integrity: sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==}
|
||||||
|
|
||||||
/setprototypeof@1.2.0:
|
/setprototypeof@1.2.0:
|
||||||
resolution: {integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==}
|
resolution: {integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==}
|
||||||
|
|
||||||
@ -5562,7 +5649,6 @@ packages:
|
|||||||
|
|
||||||
/sprintf-js@1.0.3:
|
/sprintf-js@1.0.3:
|
||||||
resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==}
|
resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==}
|
||||||
dev: true
|
|
||||||
|
|
||||||
/sprintf-js@1.1.3:
|
/sprintf-js@1.1.3:
|
||||||
resolution: {integrity: sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==}
|
resolution: {integrity: sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==}
|
||||||
@ -5631,6 +5717,11 @@ packages:
|
|||||||
strip-ansi: 7.1.0
|
strip-ansi: 7.1.0
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/string_decoder@1.1.1:
|
||||||
|
resolution: {integrity: sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==}
|
||||||
|
dependencies:
|
||||||
|
safe-buffer: 5.1.2
|
||||||
|
|
||||||
/strip-ansi@6.0.1:
|
/strip-ansi@6.0.1:
|
||||||
resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==}
|
resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==}
|
||||||
engines: {node: '>=8'}
|
engines: {node: '>=8'}
|
||||||
@ -5975,7 +6066,6 @@ packages:
|
|||||||
|
|
||||||
/underscore@1.13.6:
|
/underscore@1.13.6:
|
||||||
resolution: {integrity: sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A==}
|
resolution: {integrity: sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A==}
|
||||||
dev: false
|
|
||||||
|
|
||||||
/undici-types@5.26.5:
|
/undici-types@5.26.5:
|
||||||
resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==}
|
resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==}
|
||||||
@ -6022,6 +6112,9 @@ packages:
|
|||||||
resolution: {integrity: sha512-H/A06tKD7sS1O1X2SshBVeA5FLycRpjqiBeqGKmBwBDBy28EnRjORxTNe269KSSr5un5qyWi1iL61wLxpd+ZOg==}
|
resolution: {integrity: sha512-H/A06tKD7sS1O1X2SshBVeA5FLycRpjqiBeqGKmBwBDBy28EnRjORxTNe269KSSr5un5qyWi1iL61wLxpd+ZOg==}
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/util-deprecate@1.0.2:
|
||||||
|
resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==}
|
||||||
|
|
||||||
/utils-merge@1.0.1:
|
/utils-merge@1.0.1:
|
||||||
resolution: {integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==}
|
resolution: {integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==}
|
||||||
engines: {node: '>= 0.4.0'}
|
engines: {node: '>= 0.4.0'}
|
||||||
@ -6182,6 +6275,10 @@ packages:
|
|||||||
xmlbuilder: 11.0.1
|
xmlbuilder: 11.0.1
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/xmlbuilder@10.1.1:
|
||||||
|
resolution: {integrity: sha512-OyzrcFLL/nb6fMGHbiRDuPup9ljBycsdCypwuyg5AAHvyWzGfChJpCXMG88AGTIMFhGZ9RccFN1e6lhg3hkwKg==}
|
||||||
|
engines: {node: '>=4.0'}
|
||||||
|
|
||||||
/xmlbuilder@11.0.1:
|
/xmlbuilder@11.0.1:
|
||||||
resolution: {integrity: sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==}
|
resolution: {integrity: sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==}
|
||||||
engines: {node: '>=4.0'}
|
engines: {node: '>=4.0'}
|
||||||
|
@ -176,7 +176,274 @@ describe("E2E Tests for API Routes", () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Additional tests for insufficient credits?
|
it("should return a successful response with a valid API key and valid includes option", async () => {
|
||||||
|
const crawlResponse = await request(TEST_URL)
|
||||||
|
.post("/v0/crawl")
|
||||||
|
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
|
||||||
|
.set("Content-Type", "application/json")
|
||||||
|
.send({
|
||||||
|
url: "https://mendable.ai",
|
||||||
|
limit: 10,
|
||||||
|
crawlerOptions: {
|
||||||
|
includes: ["blog/*"],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
let response;
|
||||||
|
let isFinished = false;
|
||||||
|
|
||||||
|
while (!isFinished) {
|
||||||
|
response = await request(TEST_URL)
|
||||||
|
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
|
||||||
|
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
|
||||||
|
|
||||||
|
expect(response.statusCode).toBe(200);
|
||||||
|
expect(response.body).toHaveProperty("status");
|
||||||
|
isFinished = response.body.status === "completed";
|
||||||
|
|
||||||
|
if (!isFinished) {
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const completedResponse = response;
|
||||||
|
|
||||||
|
const urls = completedResponse.body.data.map(
|
||||||
|
(item: any) => item.metadata?.sourceURL
|
||||||
|
);
|
||||||
|
expect(urls.length).toBeGreaterThan(5);
|
||||||
|
urls.forEach((url: string) => {
|
||||||
|
console.log({url})
|
||||||
|
expect(url.startsWith("https://www.mendable.ai/blog/")).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(completedResponse.statusCode).toBe(200);
|
||||||
|
expect(completedResponse.body).toHaveProperty("status");
|
||||||
|
expect(completedResponse.body.status).toBe("completed");
|
||||||
|
expect(completedResponse.body).toHaveProperty("data");
|
||||||
|
expect(completedResponse.body.data[0]).toHaveProperty("content");
|
||||||
|
expect(completedResponse.body.data[0]).toHaveProperty("markdown");
|
||||||
|
expect(completedResponse.body.data[0]).toHaveProperty("metadata");
|
||||||
|
expect(completedResponse.body.data[0].content).toContain("Mendable");
|
||||||
|
}, 60000); // 60 seconds
|
||||||
|
|
||||||
|
it("should return a successful response with a valid API key and valid excludes option", async () => {
|
||||||
|
const crawlResponse = await request(TEST_URL)
|
||||||
|
.post("/v0/crawl")
|
||||||
|
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
|
||||||
|
.set("Content-Type", "application/json")
|
||||||
|
.send({
|
||||||
|
url: "https://mendable.ai",
|
||||||
|
limit: 10,
|
||||||
|
crawlerOptions: {
|
||||||
|
excludes: ["blog/*"],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
let isFinished = false;
|
||||||
|
let response;
|
||||||
|
|
||||||
|
while (!isFinished) {
|
||||||
|
response = await request(TEST_URL)
|
||||||
|
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
|
||||||
|
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
|
||||||
|
|
||||||
|
expect(response.statusCode).toBe(200);
|
||||||
|
expect(response.body).toHaveProperty("status");
|
||||||
|
isFinished = response.body.status === "completed";
|
||||||
|
|
||||||
|
if (!isFinished) {
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const completedResponse = response;
|
||||||
|
|
||||||
|
const urls = completedResponse.body.data.map(
|
||||||
|
(item: any) => item.metadata?.sourceURL
|
||||||
|
);
|
||||||
|
expect(urls.length).toBeGreaterThan(5);
|
||||||
|
urls.forEach((url: string) => {
|
||||||
|
expect(url.startsWith("https://wwww.mendable.ai/blog/")).toBeFalsy();
|
||||||
|
});
|
||||||
|
}, 60000); // 60 seconds
|
||||||
|
|
||||||
|
it("should return a successful response with a valid API key and limit to 3", async () => {
|
||||||
|
const crawlResponse = await request(TEST_URL)
|
||||||
|
.post("/v0/crawl")
|
||||||
|
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
|
||||||
|
.set("Content-Type", "application/json")
|
||||||
|
.send({
|
||||||
|
url: "https://mendable.ai",
|
||||||
|
crawlerOptions: { limit: 3 },
|
||||||
|
});
|
||||||
|
|
||||||
|
let isFinished = false;
|
||||||
|
let response;
|
||||||
|
|
||||||
|
while (!isFinished) {
|
||||||
|
response = await request(TEST_URL)
|
||||||
|
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
|
||||||
|
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
|
||||||
|
|
||||||
|
expect(response.statusCode).toBe(200);
|
||||||
|
expect(response.body).toHaveProperty("status");
|
||||||
|
isFinished = response.body.status === "completed";
|
||||||
|
|
||||||
|
if (!isFinished) {
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const completedResponse = response;
|
||||||
|
|
||||||
|
expect(completedResponse.statusCode).toBe(200);
|
||||||
|
expect(completedResponse.body).toHaveProperty("status");
|
||||||
|
expect(completedResponse.body.status).toBe("completed");
|
||||||
|
expect(completedResponse.body).toHaveProperty("data");
|
||||||
|
expect(completedResponse.body.data.length).toBe(3);
|
||||||
|
expect(completedResponse.body.data[0]).toHaveProperty("content");
|
||||||
|
expect(completedResponse.body.data[0]).toHaveProperty("markdown");
|
||||||
|
expect(completedResponse.body.data[0]).toHaveProperty("metadata");
|
||||||
|
expect(completedResponse.body.data[0].content).toContain("Mendable");
|
||||||
|
}, 60000); // 60 seconds
|
||||||
|
|
||||||
|
it("should return a successful response with max depth option for a valid crawl job", async () => {
|
||||||
|
const crawlResponse = await request(TEST_URL)
|
||||||
|
.post("/v0/crawl")
|
||||||
|
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
|
||||||
|
.set("Content-Type", "application/json")
|
||||||
|
.send({
|
||||||
|
url: "https://www.scrapethissite.com",
|
||||||
|
crawlerOptions: { maxDepth: 2 },
|
||||||
|
});
|
||||||
|
expect(crawlResponse.statusCode).toBe(200);
|
||||||
|
|
||||||
|
const response = await request(TEST_URL)
|
||||||
|
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
|
||||||
|
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
|
||||||
|
expect(response.statusCode).toBe(200);
|
||||||
|
expect(response.body).toHaveProperty("status");
|
||||||
|
expect(response.body.status).toBe("active");
|
||||||
|
// wait for 60 seconds
|
||||||
|
await new Promise((r) => setTimeout(r, 60000));
|
||||||
|
const completedResponse = await request(TEST_URL)
|
||||||
|
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
|
||||||
|
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
|
||||||
|
|
||||||
|
expect(completedResponse.statusCode).toBe(200);
|
||||||
|
expect(completedResponse.body).toHaveProperty("status");
|
||||||
|
expect(completedResponse.body.status).toBe("completed");
|
||||||
|
expect(completedResponse.body).toHaveProperty("data");
|
||||||
|
expect(completedResponse.body.data[0]).toHaveProperty("content");
|
||||||
|
expect(completedResponse.body.data[0]).toHaveProperty("markdown");
|
||||||
|
expect(completedResponse.body.data[0]).toHaveProperty("metadata");
|
||||||
|
const urls = completedResponse.body.data.map(
|
||||||
|
(item: any) => item.metadata?.sourceURL
|
||||||
|
);
|
||||||
|
expect(urls.length).toBeGreaterThan(1);
|
||||||
|
|
||||||
|
// Check if all URLs have a maximum depth of 1
|
||||||
|
urls.forEach((url: string) => {
|
||||||
|
const depth = new URL(url).pathname.split("/").filter(Boolean).length;
|
||||||
|
expect(depth).toBeLessThanOrEqual(1);
|
||||||
|
});
|
||||||
|
}, 120000);
|
||||||
|
|
||||||
|
// it("should return a successful response with a valid API key and valid limit option", async () => {
|
||||||
|
// const crawlResponse = await request(TEST_URL)
|
||||||
|
// .post("/v0/crawl")
|
||||||
|
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
|
||||||
|
// .set("Content-Type", "application/json")
|
||||||
|
// .send({
|
||||||
|
// url: "https://mendable.ai",
|
||||||
|
// crawlerOptions: { limit: 10 },
|
||||||
|
// });
|
||||||
|
|
||||||
|
// const response = await request(TEST_URL)
|
||||||
|
// .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
|
||||||
|
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
|
||||||
|
// expect(response.statusCode).toBe(200);
|
||||||
|
// expect(response.body).toHaveProperty("status");
|
||||||
|
// expect(response.body.status).toBe("active");
|
||||||
|
|
||||||
|
// let isCompleted = false;
|
||||||
|
// while (!isCompleted) {
|
||||||
|
// const statusCheckResponse = await request(TEST_URL)
|
||||||
|
// .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
|
||||||
|
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
|
||||||
|
// expect(statusCheckResponse.statusCode).toBe(200);
|
||||||
|
// isCompleted = statusCheckResponse.body.status === "completed";
|
||||||
|
// if (!isCompleted) {
|
||||||
|
// await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// const completedResponse = await request(TEST_URL)
|
||||||
|
// .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
|
||||||
|
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
|
||||||
|
|
||||||
|
// expect(completedResponse.statusCode).toBe(200);
|
||||||
|
// expect(completedResponse.body).toHaveProperty("status");
|
||||||
|
// expect(completedResponse.body.status).toBe("completed");
|
||||||
|
// expect(completedResponse.body).toHaveProperty("data");
|
||||||
|
// expect(completedResponse.body.data.length).toBe(10);
|
||||||
|
// expect(completedResponse.body.data[0]).toHaveProperty("content");
|
||||||
|
// expect(completedResponse.body.data[0]).toHaveProperty("markdown");
|
||||||
|
// expect(completedResponse.body.data[0]).toHaveProperty("metadata");
|
||||||
|
// expect(completedResponse.body.data[0].content).toContain("Mendable");
|
||||||
|
// expect(completedResponse.body.data[0].content).not.toContain("main menu");
|
||||||
|
// }, 60000); // 60 seconds
|
||||||
|
|
||||||
|
it("should return a successful response for a valid crawl job with includeHtml set to true option", async () => {
|
||||||
|
const crawlResponse = await request(TEST_URL)
|
||||||
|
.post("/v0/crawl")
|
||||||
|
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
|
||||||
|
.set("Content-Type", "application/json")
|
||||||
|
.send({
|
||||||
|
url: "https://firecrawl.dev",
|
||||||
|
pageOptions: { includeHtml: true },
|
||||||
|
});
|
||||||
|
expect(crawlResponse.statusCode).toBe(200);
|
||||||
|
|
||||||
|
const response = await request(TEST_URL)
|
||||||
|
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
|
||||||
|
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
|
||||||
|
expect(response.statusCode).toBe(200);
|
||||||
|
expect(response.body).toHaveProperty("status");
|
||||||
|
expect(response.body.status).toBe("active");
|
||||||
|
|
||||||
|
let isCompleted = false;
|
||||||
|
while (!isCompleted) {
|
||||||
|
const statusCheckResponse = await request(TEST_URL)
|
||||||
|
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
|
||||||
|
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
|
||||||
|
expect(statusCheckResponse.statusCode).toBe(200);
|
||||||
|
isCompleted = statusCheckResponse.body.status === "completed";
|
||||||
|
if (!isCompleted) {
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const completedResponse = await request(TEST_URL)
|
||||||
|
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
|
||||||
|
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
|
||||||
|
|
||||||
|
expect(completedResponse.statusCode).toBe(200);
|
||||||
|
expect(completedResponse.body).toHaveProperty("status");
|
||||||
|
expect(completedResponse.body.status).toBe("completed");
|
||||||
|
expect(completedResponse.body).toHaveProperty("data");
|
||||||
|
expect(completedResponse.body.data[0]).toHaveProperty("content");
|
||||||
|
expect(completedResponse.body.data[0]).toHaveProperty("markdown");
|
||||||
|
expect(completedResponse.body.data[0]).toHaveProperty("metadata");
|
||||||
|
|
||||||
|
// 120 seconds
|
||||||
|
expect(completedResponse.body.data[0]).toHaveProperty("html");
|
||||||
|
expect(completedResponse.body.data[0]).toHaveProperty("metadata");
|
||||||
|
expect(completedResponse.body.data[0].content).toContain("🔥 FireCrawl");
|
||||||
|
expect(completedResponse.body.data[0].markdown).toContain("FireCrawl");
|
||||||
|
expect(completedResponse.body.data[0].html).toContain("<h1");
|
||||||
|
}, 60000);
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("POST /v0/crawlWebsitePreview", () => {
|
describe("POST /v0/crawlWebsitePreview", () => {
|
||||||
@ -206,6 +473,16 @@ describe("E2E Tests for API Routes", () => {
|
|||||||
// expect(response.body.error).toContain("Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.");
|
// expect(response.body.error).toContain("Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.");
|
||||||
// });
|
// });
|
||||||
|
|
||||||
|
it("should return a timeout error when scraping takes longer than the specified timeout", async () => {
|
||||||
|
const response = await request(TEST_URL)
|
||||||
|
.post("/v0/scrape")
|
||||||
|
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
|
||||||
|
.set("Content-Type", "application/json")
|
||||||
|
.send({ url: "https://firecrawl.dev", timeout: 1000 });
|
||||||
|
|
||||||
|
expect(response.statusCode).toBe(408);
|
||||||
|
}, 3000);
|
||||||
|
|
||||||
it("should return a successful response with a valid API key", async () => {
|
it("should return a successful response with a valid API key", async () => {
|
||||||
const response = await request(TEST_URL)
|
const response = await request(TEST_URL)
|
||||||
.post("/v0/crawlWebsitePreview")
|
.post("/v0/crawlWebsitePreview")
|
||||||
@ -268,7 +545,7 @@ describe("E2E Tests for API Routes", () => {
|
|||||||
expect(response.statusCode).toBe(404);
|
expect(response.statusCode).toBe(404);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should return a successful response for a valid crawl job", async () => {
|
it("should return a successful crawl status response for a valid crawl job", async () => {
|
||||||
const crawlResponse = await request(TEST_URL)
|
const crawlResponse = await request(TEST_URL)
|
||||||
.post("/v0/crawl")
|
.post("/v0/crawl")
|
||||||
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
|
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
|
||||||
@ -276,20 +553,23 @@ describe("E2E Tests for API Routes", () => {
|
|||||||
.send({ url: "https://firecrawl.dev" });
|
.send({ url: "https://firecrawl.dev" });
|
||||||
expect(crawlResponse.statusCode).toBe(200);
|
expect(crawlResponse.statusCode).toBe(200);
|
||||||
|
|
||||||
|
let isCompleted = false;
|
||||||
|
let completedResponse;
|
||||||
|
|
||||||
|
while (!isCompleted) {
|
||||||
const response = await request(TEST_URL)
|
const response = await request(TEST_URL)
|
||||||
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
|
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
|
||||||
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
|
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
|
||||||
expect(response.statusCode).toBe(200);
|
expect(response.statusCode).toBe(200);
|
||||||
expect(response.body).toHaveProperty("status");
|
expect(response.body).toHaveProperty("status");
|
||||||
expect(response.body.status).toBe("active");
|
|
||||||
|
|
||||||
// wait for 30 seconds
|
if (response.body.status === "completed") {
|
||||||
await new Promise((r) => setTimeout(r, 30000));
|
isCompleted = true;
|
||||||
|
completedResponse = response;
|
||||||
const completedResponse = await request(TEST_URL)
|
} else {
|
||||||
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
|
await new Promise((r) => setTimeout(r, 1000)); // Wait for 1 second before checking again
|
||||||
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
|
}
|
||||||
expect(completedResponse.statusCode).toBe(200);
|
}
|
||||||
expect(completedResponse.body).toHaveProperty("status");
|
expect(completedResponse.body).toHaveProperty("status");
|
||||||
expect(completedResponse.body.status).toBe("completed");
|
expect(completedResponse.body.status).toBe("completed");
|
||||||
expect(completedResponse.body).toHaveProperty("data");
|
expect(completedResponse.body).toHaveProperty("data");
|
||||||
@ -563,6 +843,107 @@ describe("E2E Tests for API Routes", () => {
|
|||||||
// }, 120000); // 120 secs
|
// }, 120000); // 120 secs
|
||||||
// });
|
// });
|
||||||
|
|
||||||
|
describe("POST /v0/crawl with fast mode", () => {
|
||||||
|
it("should complete the crawl under 20 seconds", async () => {
|
||||||
|
const startTime = Date.now();
|
||||||
|
|
||||||
|
const crawlResponse = await request(TEST_URL)
|
||||||
|
.post("/v0/crawl")
|
||||||
|
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
|
||||||
|
.set("Content-Type", "application/json")
|
||||||
|
.send({
|
||||||
|
url: "https://flutterbricks.com",
|
||||||
|
crawlerOptions: {
|
||||||
|
mode: "fast"
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(crawlResponse.statusCode).toBe(200);
|
||||||
|
|
||||||
|
const jobId = crawlResponse.body.jobId;
|
||||||
|
let statusResponse;
|
||||||
|
let isFinished = false;
|
||||||
|
|
||||||
|
while (!isFinished) {
|
||||||
|
statusResponse = await request(TEST_URL)
|
||||||
|
.get(`/v0/crawl/status/${jobId}`)
|
||||||
|
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
|
||||||
|
|
||||||
|
expect(statusResponse.statusCode).toBe(200);
|
||||||
|
isFinished = statusResponse.body.status === "completed";
|
||||||
|
|
||||||
|
if (!isFinished) {
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const endTime = Date.now();
|
||||||
|
const timeElapsed = (endTime - startTime) / 1000; // Convert to seconds
|
||||||
|
|
||||||
|
console.log(`Time elapsed: ${timeElapsed} seconds`);
|
||||||
|
|
||||||
|
expect(statusResponse.body.status).toBe("completed");
|
||||||
|
expect(statusResponse.body).toHaveProperty("data");
|
||||||
|
expect(statusResponse.body.data[0]).toHaveProperty("content");
|
||||||
|
expect(statusResponse.body.data[0]).toHaveProperty("markdown");
|
||||||
|
const results = statusResponse.body.data;
|
||||||
|
// results.forEach((result, i) => {
|
||||||
|
// console.log(result.metadata.sourceURL);
|
||||||
|
// });
|
||||||
|
expect(results.length).toBeGreaterThanOrEqual(10);
|
||||||
|
expect(results.length).toBeLessThanOrEqual(15);
|
||||||
|
|
||||||
|
}, 20000);
|
||||||
|
|
||||||
|
// it("should complete the crawl in more than 10 seconds", async () => {
|
||||||
|
// const startTime = Date.now();
|
||||||
|
|
||||||
|
// const crawlResponse = await request(TEST_URL)
|
||||||
|
// .post("/v0/crawl")
|
||||||
|
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
|
||||||
|
// .set("Content-Type", "application/json")
|
||||||
|
// .send({
|
||||||
|
// url: "https://flutterbricks.com",
|
||||||
|
// });
|
||||||
|
|
||||||
|
// expect(crawlResponse.statusCode).toBe(200);
|
||||||
|
|
||||||
|
// const jobId = crawlResponse.body.jobId;
|
||||||
|
// let statusResponse;
|
||||||
|
// let isFinished = false;
|
||||||
|
|
||||||
|
// while (!isFinished) {
|
||||||
|
// statusResponse = await request(TEST_URL)
|
||||||
|
// .get(`/v0/crawl/status/${jobId}`)
|
||||||
|
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
|
||||||
|
|
||||||
|
// expect(statusResponse.statusCode).toBe(200);
|
||||||
|
// isFinished = statusResponse.body.status === "completed";
|
||||||
|
|
||||||
|
// if (!isFinished) {
|
||||||
|
// await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// const endTime = Date.now();
|
||||||
|
// const timeElapsed = (endTime - startTime) / 1000; // Convert to seconds
|
||||||
|
|
||||||
|
// console.log(`Time elapsed: ${timeElapsed} seconds`);
|
||||||
|
|
||||||
|
// expect(statusResponse.body.status).toBe("completed");
|
||||||
|
// expect(statusResponse.body).toHaveProperty("data");
|
||||||
|
// expect(statusResponse.body.data[0]).toHaveProperty("content");
|
||||||
|
// expect(statusResponse.body.data[0]).toHaveProperty("markdown");
|
||||||
|
// const results = statusResponse.body.data;
|
||||||
|
// // results.forEach((result, i) => {
|
||||||
|
// // console.log(result.metadata.sourceURL);
|
||||||
|
// // });
|
||||||
|
// expect(results.length).toBeGreaterThanOrEqual(10);
|
||||||
|
// expect(results.length).toBeLessThanOrEqual(15);
|
||||||
|
|
||||||
|
// }, 50000);// 15 seconds timeout to account for network delays
|
||||||
|
});
|
||||||
|
|
||||||
describe("GET /is-production", () => {
|
describe("GET /is-production", () => {
|
||||||
it("should return the production status", async () => {
|
it("should return the production status", async () => {
|
||||||
const response = await request(TEST_URL).get("/is-production");
|
const response = await request(TEST_URL).get("/is-production");
|
||||||
|
@ -15,6 +15,7 @@ export async function scrapeHelper(
|
|||||||
crawlerOptions: any,
|
crawlerOptions: any,
|
||||||
pageOptions: PageOptions,
|
pageOptions: PageOptions,
|
||||||
extractorOptions: ExtractorOptions,
|
extractorOptions: ExtractorOptions,
|
||||||
|
timeout: number
|
||||||
): Promise<{
|
): Promise<{
|
||||||
success: boolean;
|
success: boolean;
|
||||||
error?: string;
|
error?: string;
|
||||||
@ -30,7 +31,6 @@ export async function scrapeHelper(
|
|||||||
return { success: false, error: "Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.", returnCode: 403 };
|
return { success: false, error: "Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.", returnCode: 403 };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
const a = new WebScraperDataProvider();
|
const a = new WebScraperDataProvider();
|
||||||
await a.setOptions({
|
await a.setOptions({
|
||||||
mode: "single_urls",
|
mode: "single_urls",
|
||||||
@ -42,7 +42,19 @@ export async function scrapeHelper(
|
|||||||
extractorOptions: extractorOptions,
|
extractorOptions: extractorOptions,
|
||||||
});
|
});
|
||||||
|
|
||||||
const docs = await a.getDocuments(false);
|
const timeoutPromise = new Promise<{ success: boolean; error?: string; returnCode: number }>((_, reject) =>
|
||||||
|
setTimeout(() => reject({ success: false, error: "Request timed out. Increase the timeout by passing `timeout` param to the request.", returnCode: 408 }), timeout)
|
||||||
|
);
|
||||||
|
|
||||||
|
const docsPromise = a.getDocuments(false);
|
||||||
|
|
||||||
|
let docs;
|
||||||
|
try {
|
||||||
|
docs = await Promise.race([docsPromise, timeoutPromise]);
|
||||||
|
} catch (error) {
|
||||||
|
return error;
|
||||||
|
}
|
||||||
|
|
||||||
// make sure doc.content is not empty
|
// make sure doc.content is not empty
|
||||||
const filteredDocs = docs.filter(
|
const filteredDocs = docs.filter(
|
||||||
(doc: { content?: string }) => doc.content && doc.content.trim().length > 0
|
(doc: { content?: string }) => doc.content && doc.content.trim().length > 0
|
||||||
@ -51,12 +63,11 @@ export async function scrapeHelper(
|
|||||||
return { success: true, error: "No page found", returnCode: 200 };
|
return { success: true, error: "No page found", returnCode: 200 };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
let creditsToBeBilled = filteredDocs.length;
|
let creditsToBeBilled = filteredDocs.length;
|
||||||
const creditsPerLLMExtract = 5;
|
const creditsPerLLMExtract = 5;
|
||||||
|
|
||||||
if (extractorOptions.mode === "llm-extraction") {
|
if (extractorOptions.mode === "llm-extraction") {
|
||||||
creditsToBeBilled = creditsToBeBilled + (creditsPerLLMExtract * filteredDocs.length)
|
creditsToBeBilled = creditsToBeBilled + (creditsPerLLMExtract * filteredDocs.length);
|
||||||
}
|
}
|
||||||
|
|
||||||
const billingResult = await billTeam(
|
const billingResult = await billTeam(
|
||||||
@ -96,6 +107,7 @@ export async function scrapeController(req: Request, res: Response) {
|
|||||||
mode: "markdown"
|
mode: "markdown"
|
||||||
}
|
}
|
||||||
const origin = req.body.origin ?? "api";
|
const origin = req.body.origin ?? "api";
|
||||||
|
const timeout = req.body.timeout ?? 30000; // Default timeout of 30 seconds
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const { success: creditsCheckSuccess, message: creditsCheckMessage } =
|
const { success: creditsCheckSuccess, message: creditsCheckMessage } =
|
||||||
@ -114,6 +126,7 @@ export async function scrapeController(req: Request, res: Response) {
|
|||||||
crawlerOptions,
|
crawlerOptions,
|
||||||
pageOptions,
|
pageOptions,
|
||||||
extractorOptions,
|
extractorOptions,
|
||||||
|
timeout
|
||||||
);
|
);
|
||||||
const endTime = new Date().getTime();
|
const endTime = new Date().getTime();
|
||||||
const timeTakenInSeconds = (endTime - startTime) / 1000;
|
const timeTakenInSeconds = (endTime - startTime) / 1000;
|
||||||
|
@ -24,7 +24,7 @@ function prepareOpenAIDoc(
|
|||||||
|
|
||||||
export async function generateOpenAICompletions({
|
export async function generateOpenAICompletions({
|
||||||
client,
|
client,
|
||||||
model = "gpt-4-turbo",
|
model = "gpt-4o",
|
||||||
document,
|
document,
|
||||||
schema, //TODO - add zod dynamic type checking
|
schema, //TODO - add zod dynamic type checking
|
||||||
prompt = defaultPrompt,
|
prompt = defaultPrompt,
|
||||||
|
@ -44,6 +44,7 @@ export type WebScraperOptions = {
|
|||||||
limit?: number;
|
limit?: number;
|
||||||
generateImgAltText?: boolean;
|
generateImgAltText?: boolean;
|
||||||
replaceAllPathsWithAbsolutePaths?: boolean;
|
replaceAllPathsWithAbsolutePaths?: boolean;
|
||||||
|
mode?: "default" | "fast"; // have a mode of some sort
|
||||||
};
|
};
|
||||||
pageOptions?: PageOptions;
|
pageOptions?: PageOptions;
|
||||||
extractorOptions?: ExtractorOptions;
|
extractorOptions?: ExtractorOptions;
|
||||||
|
@ -17,8 +17,10 @@ export async function startWebScraperPipeline({
|
|||||||
crawlerOptions: job.data.crawlerOptions,
|
crawlerOptions: job.data.crawlerOptions,
|
||||||
pageOptions: job.data.pageOptions,
|
pageOptions: job.data.pageOptions,
|
||||||
inProgress: (progress) => {
|
inProgress: (progress) => {
|
||||||
|
if (progress.currentDocument) {
|
||||||
partialDocs.push(progress.currentDocument);
|
partialDocs.push(progress.currentDocument);
|
||||||
job.progress({ ...progress, partialDocs: partialDocs });
|
job.progress({ ...progress, partialDocs: partialDocs });
|
||||||
|
}
|
||||||
},
|
},
|
||||||
onSuccess: (result) => {
|
onSuccess: (result) => {
|
||||||
job.moveToCompleted(result);
|
job.moveToCompleted(result);
|
||||||
@ -27,7 +29,7 @@ export async function startWebScraperPipeline({
|
|||||||
job.moveToFailed(error);
|
job.moveToFailed(error);
|
||||||
},
|
},
|
||||||
team_id: job.data.team_id,
|
team_id: job.data.team_id,
|
||||||
bull_job_id: job.id.toString()
|
bull_job_id: job.id.toString(),
|
||||||
})) as { success: boolean; message: string; docs: Document[] };
|
})) as { success: boolean; message: string; docs: Document[] };
|
||||||
}
|
}
|
||||||
export async function runWebScraper({
|
export async function runWebScraper({
|
||||||
@ -63,26 +65,25 @@ export async function runWebScraper({
|
|||||||
urls: [url],
|
urls: [url],
|
||||||
crawlerOptions: crawlerOptions,
|
crawlerOptions: crawlerOptions,
|
||||||
pageOptions: pageOptions,
|
pageOptions: pageOptions,
|
||||||
bullJobId: bull_job_id
|
bullJobId: bull_job_id,
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
await provider.setOptions({
|
await provider.setOptions({
|
||||||
mode: mode,
|
mode: mode,
|
||||||
urls: url.split(","),
|
urls: url.split(","),
|
||||||
crawlerOptions: crawlerOptions,
|
crawlerOptions: crawlerOptions,
|
||||||
pageOptions: pageOptions
|
pageOptions: pageOptions,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
const docs = (await provider.getDocuments(false, (progress: Progress) => {
|
const docs = (await provider.getDocuments(false, (progress: Progress) => {
|
||||||
inProgress(progress);
|
inProgress(progress);
|
||||||
|
|
||||||
})) as Document[];
|
})) as Document[];
|
||||||
|
|
||||||
if (docs.length === 0) {
|
if (docs.length === 0) {
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
message: "No pages found",
|
message: "No pages found",
|
||||||
docs: []
|
docs: [],
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -95,18 +96,14 @@ export async function runWebScraper({
|
|||||||
})
|
})
|
||||||
: docs.filter((doc) => doc.content.trim().length > 0);
|
: docs.filter((doc) => doc.content.trim().length > 0);
|
||||||
|
|
||||||
|
const billingResult = await billTeam(team_id, filteredDocs.length);
|
||||||
const billingResult = await billTeam(
|
|
||||||
team_id,
|
|
||||||
filteredDocs.length
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!billingResult.success) {
|
if (!billingResult.success) {
|
||||||
// throw new Error("Failed to bill team, no subscription was found");
|
// throw new Error("Failed to bill team, no subscription was found");
|
||||||
return {
|
return {
|
||||||
success: false,
|
success: false,
|
||||||
message: "Failed to bill team, no subscription was found",
|
message: "Failed to bill team, no subscription was found",
|
||||||
docs: []
|
docs: [],
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@ import { URL } from "url";
|
|||||||
import { getLinksFromSitemap } from "./sitemap";
|
import { getLinksFromSitemap } from "./sitemap";
|
||||||
import async from "async";
|
import async from "async";
|
||||||
import { Progress } from "../../lib/entities";
|
import { Progress } from "../../lib/entities";
|
||||||
import { scrapWithScrapingBee } from "./single_url";
|
import { scrapSingleUrl, scrapWithScrapingBee } from "./single_url";
|
||||||
import robotsParser from "robots-parser";
|
import robotsParser from "robots-parser";
|
||||||
|
|
||||||
export class WebCrawler {
|
export class WebCrawler {
|
||||||
@ -15,7 +15,7 @@ export class WebCrawler {
|
|||||||
private maxCrawledLinks: number;
|
private maxCrawledLinks: number;
|
||||||
private maxCrawledDepth: number;
|
private maxCrawledDepth: number;
|
||||||
private visited: Set<string> = new Set();
|
private visited: Set<string> = new Set();
|
||||||
private crawledUrls: Set<string> = new Set();
|
private crawledUrls: Map<string, string> = new Map();
|
||||||
private limit: number;
|
private limit: number;
|
||||||
private robotsTxtUrl: string;
|
private robotsTxtUrl: string;
|
||||||
private robots: any;
|
private robots: any;
|
||||||
@ -51,7 +51,6 @@ export class WebCrawler {
|
|||||||
this.generateImgAltText = generateImgAltText ?? false;
|
this.generateImgAltText = generateImgAltText ?? false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private filterLinks(sitemapLinks: string[], limit: number, maxDepth: number): string[] {
|
private filterLinks(sitemapLinks: string[], limit: number, maxDepth: number): string[] {
|
||||||
return sitemapLinks
|
return sitemapLinks
|
||||||
.filter((link) => {
|
.filter((link) => {
|
||||||
@ -77,9 +76,22 @@ export class WebCrawler {
|
|||||||
|
|
||||||
// Check if the link matches the include patterns, if any are specified
|
// Check if the link matches the include patterns, if any are specified
|
||||||
if (this.includes.length > 0 && this.includes[0] !== "") {
|
if (this.includes.length > 0 && this.includes[0] !== "") {
|
||||||
return this.includes.some((includePattern) =>
|
if (!this.includes.some((includePattern) =>
|
||||||
new RegExp(includePattern).test(path)
|
new RegExp(includePattern).test(path)
|
||||||
);
|
)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize the initial URL and the link to account for www and non-www versions
|
||||||
|
const normalizedInitialUrl = new URL(this.initialUrl);
|
||||||
|
const normalizedLink = new URL(link);
|
||||||
|
const initialHostname = normalizedInitialUrl.hostname.replace(/^www\./, '');
|
||||||
|
const linkHostname = normalizedLink.hostname.replace(/^www\./, '');
|
||||||
|
|
||||||
|
// Ensure the protocol and hostname match, and the path starts with the initial URL's path
|
||||||
|
if (linkHostname !== initialHostname || !normalizedLink.pathname.startsWith(normalizedInitialUrl.pathname)) {
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
const isAllowed = this.robots.isAllowed(link, "FireCrawlAgent") ?? true;
|
const isAllowed = this.robots.isAllowed(link, "FireCrawlAgent") ?? true;
|
||||||
@ -99,19 +111,21 @@ export class WebCrawler {
|
|||||||
concurrencyLimit: number = 5,
|
concurrencyLimit: number = 5,
|
||||||
limit: number = 10000,
|
limit: number = 10000,
|
||||||
maxDepth: number = 10
|
maxDepth: number = 10
|
||||||
): Promise<string[]> {
|
): Promise<{ url: string, html: string }[]> {
|
||||||
// Fetch and parse robots.txt
|
// Fetch and parse robots.txt
|
||||||
try {
|
try {
|
||||||
const response = await axios.get(this.robotsTxtUrl);
|
const response = await axios.get(this.robotsTxtUrl);
|
||||||
this.robots = robotsParser(this.robotsTxtUrl, response.data);
|
this.robots = robotsParser(this.robotsTxtUrl, response.data);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Failed to fetch robots.txt from ${this.robotsTxtUrl}`);
|
console.error(`Failed to fetch robots.txt from ${this.robotsTxtUrl}`);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
const sitemapLinks = await this.tryFetchSitemapLinks(this.initialUrl);
|
const sitemapLinks = await this.tryFetchSitemapLinks(this.initialUrl);
|
||||||
if (sitemapLinks.length > 0) {
|
if (sitemapLinks.length > 0) {
|
||||||
const filteredLinks = this.filterLinks(sitemapLinks, limit, maxDepth);
|
let filteredLinks = this.filterLinks(sitemapLinks, limit, maxDepth);
|
||||||
return filteredLinks;
|
return filteredLinks.map(link => ({ url: link, html: "" }));
|
||||||
}
|
}
|
||||||
|
|
||||||
const urls = await this.crawlUrls(
|
const urls = await this.crawlUrls(
|
||||||
@ -123,18 +137,20 @@ export class WebCrawler {
|
|||||||
urls.length === 0 &&
|
urls.length === 0 &&
|
||||||
this.filterLinks([this.initialUrl], limit, this.maxCrawledDepth).length > 0
|
this.filterLinks([this.initialUrl], limit, this.maxCrawledDepth).length > 0
|
||||||
) {
|
) {
|
||||||
return [this.initialUrl];
|
return [{ url: this.initialUrl, html: "" }];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// make sure to run include exclude here again
|
// make sure to run include exclude here again
|
||||||
return this.filterLinks(urls, limit, this.maxCrawledDepth);
|
const filteredUrls = this.filterLinks(urls.map(urlObj => urlObj.url), limit, this.maxCrawledDepth);
|
||||||
|
return filteredUrls.map(url => ({ url, html: urls.find(urlObj => urlObj.url === url)?.html || "" }));
|
||||||
}
|
}
|
||||||
|
|
||||||
private async crawlUrls(
|
private async crawlUrls(
|
||||||
urls: string[],
|
urls: string[],
|
||||||
concurrencyLimit: number,
|
concurrencyLimit: number,
|
||||||
inProgress?: (progress: Progress) => void
|
inProgress?: (progress: Progress) => void,
|
||||||
): Promise<string[]> {
|
): Promise<{ url: string, html: string }[]> {
|
||||||
const queue = async.queue(async (task: string, callback) => {
|
const queue = async.queue(async (task: string, callback) => {
|
||||||
if (this.crawledUrls.size >= this.maxCrawledLinks) {
|
if (this.crawledUrls.size >= this.maxCrawledLinks) {
|
||||||
if (callback && typeof callback === "function") {
|
if (callback && typeof callback === "function") {
|
||||||
@ -143,13 +159,26 @@ export class WebCrawler {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const newUrls = await this.crawl(task);
|
const newUrls = await this.crawl(task);
|
||||||
newUrls.forEach((url) => this.crawledUrls.add(url));
|
// add the initial url if not already added
|
||||||
|
// if (this.visited.size === 1) {
|
||||||
|
// let normalizedInitial = this.initialUrl;
|
||||||
|
// if (!normalizedInitial.endsWith("/")) {
|
||||||
|
// normalizedInitial = normalizedInitial + "/";
|
||||||
|
// }
|
||||||
|
// if (!newUrls.some(page => page.url === this.initialUrl)) {
|
||||||
|
// newUrls.push({ url: this.initialUrl, html: "" });
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
|
||||||
|
newUrls.forEach((page) => this.crawledUrls.set(page.url, page.html));
|
||||||
|
|
||||||
if (inProgress && newUrls.length > 0) {
|
if (inProgress && newUrls.length > 0) {
|
||||||
inProgress({
|
inProgress({
|
||||||
current: this.crawledUrls.size,
|
current: this.crawledUrls.size,
|
||||||
total: this.maxCrawledLinks,
|
total: this.maxCrawledLinks,
|
||||||
status: "SCRAPING",
|
status: "SCRAPING",
|
||||||
currentDocumentUrl: newUrls[newUrls.length - 1],
|
currentDocumentUrl: newUrls[newUrls.length - 1].url,
|
||||||
});
|
});
|
||||||
} else if (inProgress) {
|
} else if (inProgress) {
|
||||||
inProgress({
|
inProgress({
|
||||||
@ -159,7 +188,7 @@ export class WebCrawler {
|
|||||||
currentDocumentUrl: task,
|
currentDocumentUrl: task,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
await this.crawlUrls(newUrls, concurrencyLimit, inProgress);
|
await this.crawlUrls(newUrls.map((p) => p.url), concurrencyLimit, inProgress);
|
||||||
if (callback && typeof callback === "function") {
|
if (callback && typeof callback === "function") {
|
||||||
callback();
|
callback();
|
||||||
}
|
}
|
||||||
@ -175,34 +204,48 @@ export class WebCrawler {
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
await queue.drain();
|
await queue.drain();
|
||||||
return Array.from(this.crawledUrls);
|
return Array.from(this.crawledUrls.entries()).map(([url, html]) => ({ url, html }));
|
||||||
}
|
}
|
||||||
|
|
||||||
async crawl(url: string): Promise<string[]> {
|
async crawl(url: string): Promise<{url: string, html: string}[]> {
|
||||||
if (this.visited.has(url) || !this.robots.isAllowed(url, "FireCrawlAgent"))
|
if (this.visited.has(url) || !this.robots.isAllowed(url, "FireCrawlAgent")){
|
||||||
return [];
|
return [];
|
||||||
|
}
|
||||||
this.visited.add(url);
|
this.visited.add(url);
|
||||||
|
|
||||||
|
|
||||||
if (!url.startsWith("http")) {
|
if (!url.startsWith("http")) {
|
||||||
url = "https://" + url;
|
url = "https://" + url;
|
||||||
|
|
||||||
}
|
}
|
||||||
if (url.endsWith("/")) {
|
if (url.endsWith("/")) {
|
||||||
url = url.slice(0, -1);
|
url = url.slice(0, -1);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.isFile(url) || this.isSocialMediaOrEmail(url)) {
|
if (this.isFile(url) || this.isSocialMediaOrEmail(url)) {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
let content;
|
let content : string = "";
|
||||||
// If it is the first link, fetch with scrapingbee
|
// If it is the first link, fetch with single url
|
||||||
if (this.visited.size === 1) {
|
if (this.visited.size === 1) {
|
||||||
content = await scrapWithScrapingBee(url, "load");
|
const page = await scrapSingleUrl(url, {includeHtml: true});
|
||||||
|
content = page.html ?? ""
|
||||||
} else {
|
} else {
|
||||||
const response = await axios.get(url);
|
const response = await axios.get(url);
|
||||||
content = response.data;
|
content = response.data ?? "";
|
||||||
}
|
}
|
||||||
const $ = load(content);
|
const $ = load(content);
|
||||||
let links: string[] = [];
|
let links: {url: string, html: string}[] = [];
|
||||||
|
|
||||||
|
// Add the initial URL to the list of links
|
||||||
|
if(this.visited.size === 1)
|
||||||
|
{
|
||||||
|
links.push({url, html: content});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
$("a").each((_, element) => {
|
$("a").each((_, element) => {
|
||||||
const href = $(element).attr("href");
|
const href = $(element).attr("href");
|
||||||
@ -215,7 +258,6 @@ export class WebCrawler {
|
|||||||
const path = url.pathname;
|
const path = url.pathname;
|
||||||
|
|
||||||
if (
|
if (
|
||||||
// fullUrl.startsWith(this.initialUrl) && // this condition makes it stop crawling back the url
|
|
||||||
this.isInternalLink(fullUrl) &&
|
this.isInternalLink(fullUrl) &&
|
||||||
this.matchesPattern(fullUrl) &&
|
this.matchesPattern(fullUrl) &&
|
||||||
this.noSections(fullUrl) &&
|
this.noSections(fullUrl) &&
|
||||||
@ -223,12 +265,16 @@ export class WebCrawler {
|
|||||||
!this.matchesExcludes(path) &&
|
!this.matchesExcludes(path) &&
|
||||||
this.robots.isAllowed(fullUrl, "FireCrawlAgent")
|
this.robots.isAllowed(fullUrl, "FireCrawlAgent")
|
||||||
) {
|
) {
|
||||||
links.push(fullUrl);
|
links.push({url: fullUrl, html: content});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
return links.filter((link) => !this.visited.has(link));
|
if(this.visited.size === 1){
|
||||||
|
return links;
|
||||||
|
}
|
||||||
|
// Create a new list to return to avoid modifying the visited list
|
||||||
|
return links.filter((link) => !this.visited.has(link.url));
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
@ -275,7 +321,7 @@ export class WebCrawler {
|
|||||||
".mp4",
|
".mp4",
|
||||||
".mp3",
|
".mp3",
|
||||||
".pptx",
|
".pptx",
|
||||||
".docx",
|
// ".docx",
|
||||||
".xlsx",
|
".xlsx",
|
||||||
".xml",
|
".xml",
|
||||||
];
|
];
|
||||||
@ -294,18 +340,57 @@ export class WebCrawler {
|
|||||||
return socialMediaOrEmail.some((ext) => url.includes(ext));
|
return socialMediaOrEmail.some((ext) => url.includes(ext));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//
|
||||||
private async tryFetchSitemapLinks(url: string): Promise<string[]> {
|
private async tryFetchSitemapLinks(url: string): Promise<string[]> {
|
||||||
|
const normalizeUrl = (url: string) => {
|
||||||
|
url = url.replace(/^https?:\/\//, "").replace(/^www\./, "");
|
||||||
|
if (url.endsWith("/")) {
|
||||||
|
url = url.slice(0, -1);
|
||||||
|
}
|
||||||
|
return url;
|
||||||
|
};
|
||||||
|
|
||||||
const sitemapUrl = url.endsWith("/sitemap.xml")
|
const sitemapUrl = url.endsWith("/sitemap.xml")
|
||||||
? url
|
? url
|
||||||
: `${url}/sitemap.xml`;
|
: `${url}/sitemap.xml`;
|
||||||
|
|
||||||
|
let sitemapLinks: string[] = [];
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await axios.get(sitemapUrl);
|
const response = await axios.get(sitemapUrl);
|
||||||
if (response.status === 200) {
|
if (response.status === 200) {
|
||||||
return await getLinksFromSitemap(sitemapUrl);
|
sitemapLinks = await getLinksFromSitemap(sitemapUrl);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// Error handling for failed sitemap fetch
|
// Error handling for failed sitemap fetch
|
||||||
|
// console.error(`Failed to fetch sitemap from ${sitemapUrl}: ${error}`);
|
||||||
}
|
}
|
||||||
return [];
|
|
||||||
|
if (sitemapLinks.length === 0) {
|
||||||
|
// If the first one doesn't work, try the base URL
|
||||||
|
const baseUrlSitemap = `${this.baseUrl}/sitemap.xml`;
|
||||||
|
try {
|
||||||
|
const response = await axios.get(baseUrlSitemap);
|
||||||
|
if (response.status === 200) {
|
||||||
|
sitemapLinks = await getLinksFromSitemap(baseUrlSitemap);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// Error handling for failed base URL sitemap fetch
|
||||||
|
// console.error(`Failed to fetch sitemap from ${baseUrlSitemap}: ${error}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize and check if the URL is present in any of the sitemaps
|
||||||
|
const normalizedUrl = normalizeUrl(url);
|
||||||
|
|
||||||
|
const normalizedSitemapLinks = sitemapLinks.map(link => normalizeUrl(link));
|
||||||
|
|
||||||
|
// has to be greater than 0 to avoid adding the initial URL to the sitemap links, and preventing crawler to crawl
|
||||||
|
if (!normalizedSitemapLinks.includes(normalizedUrl) && sitemapLinks.length > 0) {
|
||||||
|
// do not push the normalized url
|
||||||
|
sitemapLinks.push(url);
|
||||||
|
}
|
||||||
|
|
||||||
|
return sitemapLinks;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -17,6 +17,7 @@ import {
|
|||||||
} from "./utils/replacePaths";
|
} from "./utils/replacePaths";
|
||||||
import { generateCompletions } from "../../lib/LLM-extraction";
|
import { generateCompletions } from "../../lib/LLM-extraction";
|
||||||
import { getWebScraperQueue } from "../../../src/services/queue-service";
|
import { getWebScraperQueue } from "../../../src/services/queue-service";
|
||||||
|
import { fetchAndProcessDocx } from "./utils/docxProcessor";
|
||||||
|
|
||||||
export class WebScraperDataProvider {
|
export class WebScraperDataProvider {
|
||||||
private bullJobId: string;
|
private bullJobId: string;
|
||||||
@ -35,6 +36,7 @@ export class WebScraperDataProvider {
|
|||||||
private replaceAllPathsWithAbsolutePaths?: boolean = false;
|
private replaceAllPathsWithAbsolutePaths?: boolean = false;
|
||||||
private generateImgAltTextModel: "gpt-4-turbo" | "claude-3-opus" =
|
private generateImgAltTextModel: "gpt-4-turbo" | "claude-3-opus" =
|
||||||
"gpt-4-turbo";
|
"gpt-4-turbo";
|
||||||
|
private crawlerMode: string = "default";
|
||||||
|
|
||||||
authorize(): void {
|
authorize(): void {
|
||||||
throw new Error("Method not implemented.");
|
throw new Error("Method not implemented.");
|
||||||
@ -46,7 +48,8 @@ export class WebScraperDataProvider {
|
|||||||
|
|
||||||
private async convertUrlsToDocuments(
|
private async convertUrlsToDocuments(
|
||||||
urls: string[],
|
urls: string[],
|
||||||
inProgress?: (progress: Progress) => void
|
inProgress?: (progress: Progress) => void,
|
||||||
|
allHtmls?: string[]
|
||||||
): Promise<Document[]> {
|
): Promise<Document[]> {
|
||||||
const totalUrls = urls.length;
|
const totalUrls = urls.length;
|
||||||
let processedUrls = 0;
|
let processedUrls = 0;
|
||||||
@ -56,7 +59,12 @@ export class WebScraperDataProvider {
|
|||||||
const batchUrls = urls.slice(i, i + this.concurrentRequests);
|
const batchUrls = urls.slice(i, i + this.concurrentRequests);
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
batchUrls.map(async (url, index) => {
|
batchUrls.map(async (url, index) => {
|
||||||
const result = await scrapSingleUrl(url, this.pageOptions);
|
const existingHTML = allHtmls ? allHtmls[i + index] : "";
|
||||||
|
const result = await scrapSingleUrl(
|
||||||
|
url,
|
||||||
|
this.pageOptions,
|
||||||
|
existingHTML
|
||||||
|
);
|
||||||
processedUrls++;
|
processedUrls++;
|
||||||
if (inProgress) {
|
if (inProgress) {
|
||||||
inProgress({
|
inProgress({
|
||||||
@ -127,9 +135,30 @@ export class WebScraperDataProvider {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private async cleanIrrelevantPath(links: string[]) {
|
||||||
|
return links.filter((link) => {
|
||||||
|
const normalizedInitialUrl = new URL(this.urls[0]);
|
||||||
|
const normalizedLink = new URL(link);
|
||||||
|
|
||||||
|
// Normalize the hostname to account for www and non-www versions
|
||||||
|
const initialHostname = normalizedInitialUrl.hostname.replace(
|
||||||
|
/^www\./,
|
||||||
|
""
|
||||||
|
);
|
||||||
|
const linkHostname = normalizedLink.hostname.replace(/^www\./, "");
|
||||||
|
|
||||||
|
// Ensure the protocol and hostname match, and the path starts with the initial URL's path
|
||||||
|
return (
|
||||||
|
linkHostname === initialHostname &&
|
||||||
|
normalizedLink.pathname.startsWith(normalizedInitialUrl.pathname)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
private async handleCrawlMode(
|
private async handleCrawlMode(
|
||||||
inProgress?: (progress: Progress) => void
|
inProgress?: (progress: Progress) => void
|
||||||
): Promise<Document[]> {
|
): Promise<Document[]> {
|
||||||
|
|
||||||
const crawler = new WebCrawler({
|
const crawler = new WebCrawler({
|
||||||
initialUrl: this.urls[0],
|
initialUrl: this.urls[0],
|
||||||
includes: this.includes,
|
includes: this.includes,
|
||||||
@ -139,13 +168,30 @@ export class WebScraperDataProvider {
|
|||||||
limit: this.limit,
|
limit: this.limit,
|
||||||
generateImgAltText: this.generateImgAltText,
|
generateImgAltText: this.generateImgAltText,
|
||||||
});
|
});
|
||||||
let links = await crawler.start(inProgress, 5, this.limit, this.maxCrawledDepth);
|
|
||||||
|
let links = await crawler.start(
|
||||||
|
inProgress,
|
||||||
|
5,
|
||||||
|
this.limit,
|
||||||
|
this.maxCrawledDepth
|
||||||
|
);
|
||||||
|
|
||||||
|
let allLinks = links.map((e) => e.url);
|
||||||
|
const allHtmls = links.map((e) => e.html);
|
||||||
|
|
||||||
if (this.returnOnlyUrls) {
|
if (this.returnOnlyUrls) {
|
||||||
return this.returnOnlyUrlsResponse(links, inProgress);
|
return this.returnOnlyUrlsResponse(allLinks, inProgress);
|
||||||
}
|
}
|
||||||
|
|
||||||
let documents = await this.processLinks(links, inProgress);
|
let documents = [];
|
||||||
return this.cacheAndFinalizeDocuments(documents, links);
|
// check if fast mode is enabled and there is html inside the links
|
||||||
|
if (this.crawlerMode === "fast" && links.some((link) => link.html)) {
|
||||||
|
documents = await this.processLinks(allLinks, inProgress, allHtmls);
|
||||||
|
} else {
|
||||||
|
documents = await this.processLinks(allLinks, inProgress);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.cacheAndFinalizeDocuments(documents, allLinks);
|
||||||
}
|
}
|
||||||
|
|
||||||
private async handleSingleUrlsMode(
|
private async handleSingleUrlsMode(
|
||||||
@ -161,6 +207,8 @@ export class WebScraperDataProvider {
|
|||||||
inProgress?: (progress: Progress) => void
|
inProgress?: (progress: Progress) => void
|
||||||
): Promise<Document[]> {
|
): Promise<Document[]> {
|
||||||
let links = await getLinksFromSitemap(this.urls[0]);
|
let links = await getLinksFromSitemap(this.urls[0]);
|
||||||
|
links = await this.cleanIrrelevantPath(links);
|
||||||
|
|
||||||
if (this.returnOnlyUrls) {
|
if (this.returnOnlyUrls) {
|
||||||
return this.returnOnlyUrlsResponse(links, inProgress);
|
return this.returnOnlyUrlsResponse(links, inProgress);
|
||||||
}
|
}
|
||||||
@ -189,16 +237,26 @@ export class WebScraperDataProvider {
|
|||||||
|
|
||||||
private async processLinks(
|
private async processLinks(
|
||||||
links: string[],
|
links: string[],
|
||||||
inProgress?: (progress: Progress) => void
|
inProgress?: (progress: Progress) => void,
|
||||||
|
allHtmls?: string[]
|
||||||
): Promise<Document[]> {
|
): Promise<Document[]> {
|
||||||
let pdfLinks = links.filter((link) => link.endsWith(".pdf"));
|
const pdfLinks = links.filter(link => link.endsWith(".pdf"));
|
||||||
let pdfDocuments = await this.fetchPdfDocuments(pdfLinks);
|
const docLinks = links.filter(link => link.endsWith(".doc") || link.endsWith(".docx"));
|
||||||
links = links.filter((link) => !link.endsWith(".pdf"));
|
|
||||||
|
|
||||||
let documents = await this.convertUrlsToDocuments(links, inProgress);
|
const pdfDocuments = await this.fetchPdfDocuments(pdfLinks);
|
||||||
|
const docxDocuments = await this.fetchDocxDocuments(docLinks);
|
||||||
|
|
||||||
|
links = links.filter(link => !pdfLinks.includes(link) && !docLinks.includes(link));
|
||||||
|
|
||||||
|
let documents = await this.convertUrlsToDocuments(
|
||||||
|
links,
|
||||||
|
inProgress,
|
||||||
|
allHtmls
|
||||||
|
);
|
||||||
documents = await this.getSitemapData(this.urls[0], documents);
|
documents = await this.getSitemapData(this.urls[0], documents);
|
||||||
|
|
||||||
documents = this.applyPathReplacements(documents);
|
documents = this.applyPathReplacements(documents);
|
||||||
documents = await this.applyImgAltText(documents);
|
// documents = await this.applyImgAltText(documents);
|
||||||
|
|
||||||
if (
|
if (
|
||||||
this.extractorOptions.mode === "llm-extraction" &&
|
this.extractorOptions.mode === "llm-extraction" &&
|
||||||
@ -206,7 +264,7 @@ export class WebScraperDataProvider {
|
|||||||
) {
|
) {
|
||||||
documents = await generateCompletions(documents, this.extractorOptions);
|
documents = await generateCompletions(documents, this.extractorOptions);
|
||||||
}
|
}
|
||||||
return documents.concat(pdfDocuments);
|
return documents.concat(pdfDocuments).concat(docxDocuments);
|
||||||
}
|
}
|
||||||
|
|
||||||
private async fetchPdfDocuments(pdfLinks: string[]): Promise<Document[]> {
|
private async fetchPdfDocuments(pdfLinks: string[]): Promise<Document[]> {
|
||||||
@ -221,6 +279,18 @@ export class WebScraperDataProvider {
|
|||||||
})
|
})
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
private async fetchDocxDocuments(docxLinks: string[]): Promise<Document[]> {
|
||||||
|
return Promise.all(
|
||||||
|
docxLinks.map(async (p) => {
|
||||||
|
const docXDocument = await fetchAndProcessDocx(p);
|
||||||
|
return {
|
||||||
|
content: docXDocument,
|
||||||
|
metadata: { sourceURL: p },
|
||||||
|
provider: "web-scraper",
|
||||||
|
};
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
private applyPathReplacements(documents: Document[]): Document[] {
|
private applyPathReplacements(documents: Document[]): Document[] {
|
||||||
return this.replaceAllPathsWithAbsolutePaths
|
return this.replaceAllPathsWithAbsolutePaths
|
||||||
@ -397,9 +467,9 @@ export class WebScraperDataProvider {
|
|||||||
this.pageOptions = options.pageOptions ?? { onlyMainContent: false, includeHtml: false };
|
this.pageOptions = options.pageOptions ?? { onlyMainContent: false, includeHtml: false };
|
||||||
this.extractorOptions = options.extractorOptions ?? {mode: "markdown"}
|
this.extractorOptions = options.extractorOptions ?? {mode: "markdown"}
|
||||||
this.replaceAllPathsWithAbsolutePaths = options.crawlerOptions?.replaceAllPathsWithAbsolutePaths ?? false;
|
this.replaceAllPathsWithAbsolutePaths = options.crawlerOptions?.replaceAllPathsWithAbsolutePaths ?? false;
|
||||||
|
//! @nicolas, for some reason this was being injected and breaking everything. Don't have time to find source of the issue so adding this check
|
||||||
//! @nicolas, for some reason this was being injected and breakign everything. Don't have time to find source of the issue so adding this check
|
|
||||||
this.excludes = this.excludes.filter((item) => item !== "");
|
this.excludes = this.excludes.filter((item) => item !== "");
|
||||||
|
this.crawlerMode = options.crawlerOptions?.mode ?? "default";
|
||||||
|
|
||||||
// make sure all urls start with https://
|
// make sure all urls start with https://
|
||||||
this.urls = this.urls.map((url) => {
|
this.urls = this.urls.map((url) => {
|
||||||
|
@ -118,7 +118,8 @@ export async function scrapWithPlaywright(url: string): Promise<string> {
|
|||||||
|
|
||||||
export async function scrapSingleUrl(
|
export async function scrapSingleUrl(
|
||||||
urlToScrap: string,
|
urlToScrap: string,
|
||||||
pageOptions: PageOptions = { onlyMainContent: true, includeHtml: false }
|
pageOptions: PageOptions = { onlyMainContent: true, includeHtml: false },
|
||||||
|
existingHtml: string = ""
|
||||||
): Promise<Document> {
|
): Promise<Document> {
|
||||||
urlToScrap = urlToScrap.trim();
|
urlToScrap = urlToScrap.trim();
|
||||||
|
|
||||||
@ -215,8 +216,15 @@ export async function scrapSingleUrl(
|
|||||||
: ["scrapingBee", "playwright", "scrapingBeeLoad", "fetch"];
|
: ["scrapingBee", "playwright", "scrapingBeeLoad", "fetch"];
|
||||||
|
|
||||||
for (const scraper of scrapersInOrder) {
|
for (const scraper of scrapersInOrder) {
|
||||||
|
// If exists text coming from crawler, use it
|
||||||
|
if (existingHtml && existingHtml.trim().length >= 100) {
|
||||||
|
let cleanedHtml = removeUnwantedElements(existingHtml, pageOptions);
|
||||||
|
text = await parseMarkdown(cleanedHtml);
|
||||||
|
html = existingHtml;
|
||||||
|
break;
|
||||||
|
}
|
||||||
[text, html] = await attemptScraping(urlToScrap, scraper);
|
[text, html] = await attemptScraping(urlToScrap, scraper);
|
||||||
if (text && text.length >= 100) break;
|
if (text && text.trim().length >= 100) break;
|
||||||
console.log(`Falling back to ${scraper}`);
|
console.log(`Falling back to ${scraper}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -0,0 +1,13 @@
|
|||||||
|
import * as docxProcessor from "../docxProcessor";
|
||||||
|
|
||||||
|
describe("DOCX Processing Module - Integration Test", () => {
|
||||||
|
it("should correctly process a simple DOCX file without the LLAMAPARSE_API_KEY", async () => {
|
||||||
|
delete process.env.LLAMAPARSE_API_KEY;
|
||||||
|
const docxContent = await docxProcessor.fetchAndProcessDocx(
|
||||||
|
"https://nvca.org/wp-content/uploads/2019/06/NVCA-Model-Document-Stock-Purchase-Agreement.docx"
|
||||||
|
);
|
||||||
|
expect(docxContent.trim()).toContain(
|
||||||
|
"SERIES A PREFERRED STOCK PURCHASE AGREEMENT"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
41
apps/api/src/scraper/WebScraper/utils/docxProcessor.ts
Normal file
41
apps/api/src/scraper/WebScraper/utils/docxProcessor.ts
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
import axios from "axios";
|
||||||
|
import fs from "fs";
|
||||||
|
import { createWriteStream } from "node:fs";
|
||||||
|
import path from "path";
|
||||||
|
import os from "os";
|
||||||
|
import mammoth from "mammoth";
|
||||||
|
|
||||||
|
export async function fetchAndProcessDocx(url: string): Promise<string> {
|
||||||
|
const tempFilePath = await downloadDocx(url);
|
||||||
|
const content = await processDocxToText(tempFilePath);
|
||||||
|
fs.unlinkSync(tempFilePath); // Clean up the temporary file
|
||||||
|
return content;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function downloadDocx(url: string): Promise<string> {
|
||||||
|
const response = await axios({
|
||||||
|
url,
|
||||||
|
method: "GET",
|
||||||
|
responseType: "stream",
|
||||||
|
});
|
||||||
|
|
||||||
|
const tempFilePath = path.join(os.tmpdir(), `tempDocx-${Date.now()}.docx`);
|
||||||
|
const writer = createWriteStream(tempFilePath);
|
||||||
|
|
||||||
|
response.data.pipe(writer);
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
writer.on("finish", () => resolve(tempFilePath));
|
||||||
|
writer.on("error", reject);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function processDocxToText(filePath: string): Promise<string> {
|
||||||
|
const content = await extractTextFromDocx(filePath);
|
||||||
|
return content;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function extractTextFromDocx(filePath: string): Promise<string> {
|
||||||
|
const result = await mammoth.extractRawText({ path: filePath });
|
||||||
|
return result.value;
|
||||||
|
}
|
@ -26,7 +26,7 @@ getWebScraperQueue().process(
|
|||||||
success: success,
|
success: success,
|
||||||
result: {
|
result: {
|
||||||
links: docs.map((doc) => {
|
links: docs.map((doc) => {
|
||||||
return { content: doc, source: doc.metadata.sourceURL };
|
return { content: doc, source: doc?.metadata?.sourceURL ?? doc?.url ?? "" };
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
project_id: job.data.project_id,
|
project_id: job.data.project_id,
|
||||||
|
@ -43,7 +43,7 @@ export const crawlStatusRateLimiter = new RateLimiterRedis({
|
|||||||
export const testSuiteRateLimiter = new RateLimiterRedis({
|
export const testSuiteRateLimiter = new RateLimiterRedis({
|
||||||
storeClient: redisClient,
|
storeClient: redisClient,
|
||||||
keyPrefix: "middleware",
|
keyPrefix: "middleware",
|
||||||
points: 1000,
|
points: 100000,
|
||||||
duration: 60, // Duration in seconds
|
duration: 60, // Duration in seconds
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -240,7 +240,7 @@ export default class FirecrawlApp {
|
|||||||
* @param {string} action - The action being performed when the error occurred.
|
* @param {string} action - The action being performed when the error occurred.
|
||||||
*/
|
*/
|
||||||
handleError(response, action) {
|
handleError(response, action) {
|
||||||
if ([402, 409, 500].includes(response.status)) {
|
if ([402, 408, 409, 500].includes(response.status)) {
|
||||||
const errorMessage = response.data.error || "Unknown error occurred";
|
const errorMessage = response.data.error || "Unknown error occurred";
|
||||||
throw new Error(`Failed to ${action}. Status code: ${response.status}. Error: ${errorMessage}`);
|
throw new Error(`Failed to ${action}. Status code: ${response.status}. Error: ${errorMessage}`);
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mendable/firecrawl-js",
|
"name": "@mendable/firecrawl-js",
|
||||||
"version": "0.0.20",
|
"version": "0.0.21",
|
||||||
"description": "JavaScript SDK for Firecrawl API",
|
"description": "JavaScript SDK for Firecrawl API",
|
||||||
"main": "build/index.js",
|
"main": "build/index.js",
|
||||||
"types": "types/index.d.ts",
|
"types": "types/index.d.ts",
|
||||||
|
@ -109,7 +109,7 @@ export default class FirecrawlApp {
|
|||||||
const response: AxiosResponse = await axios.post(
|
const response: AxiosResponse = await axios.post(
|
||||||
"https://api.firecrawl.dev/v0/scrape",
|
"https://api.firecrawl.dev/v0/scrape",
|
||||||
jsonData,
|
jsonData,
|
||||||
{ headers }
|
{ headers },
|
||||||
);
|
);
|
||||||
if (response.status === 200) {
|
if (response.status === 200) {
|
||||||
const responseData = response.data;
|
const responseData = response.data;
|
||||||
@ -324,7 +324,7 @@ export default class FirecrawlApp {
|
|||||||
* @param {string} action - The action being performed when the error occurred.
|
* @param {string} action - The action being performed when the error occurred.
|
||||||
*/
|
*/
|
||||||
handleError(response: AxiosResponse, action: string): void {
|
handleError(response: AxiosResponse, action: string): void {
|
||||||
if ([402, 409, 500].includes(response.status)) {
|
if ([402, 408, 409, 500].includes(response.status)) {
|
||||||
const errorMessage: string =
|
const errorMessage: string =
|
||||||
response.data.error || "Unknown error occurred";
|
response.data.error || "Unknown error occurred";
|
||||||
throw new Error(
|
throw new Error(
|
||||||
|
@ -48,7 +48,7 @@ class FirecrawlApp:
|
|||||||
return response['data']
|
return response['data']
|
||||||
else:
|
else:
|
||||||
raise Exception(f'Failed to scrape URL. Error: {response["error"]}')
|
raise Exception(f'Failed to scrape URL. Error: {response["error"]}')
|
||||||
elif response.status_code in [402, 409, 500]:
|
elif response.status_code in [402, 408, 409, 500]:
|
||||||
error_message = response.json().get('error', 'Unknown error occurred')
|
error_message = response.json().get('error', 'Unknown error occurred')
|
||||||
raise Exception(f'Failed to scrape URL. Status code: {response.status_code}. Error: {error_message}')
|
raise Exception(f'Failed to scrape URL. Status code: {response.status_code}. Error: {error_message}')
|
||||||
else:
|
else:
|
||||||
@ -148,7 +148,7 @@ class FirecrawlApp:
|
|||||||
self._handle_error(status_response, 'check crawl status')
|
self._handle_error(status_response, 'check crawl status')
|
||||||
|
|
||||||
def _handle_error(self, response, action):
|
def _handle_error(self, response, action):
|
||||||
if response.status_code in [402, 409, 500]:
|
if response.status_code in [402, 408, 409, 500]:
|
||||||
error_message = response.json().get('error', 'Unknown error occurred')
|
error_message = response.json().get('error', 'Unknown error occurred')
|
||||||
raise Exception(f'Failed to {action}. Status code: {response.status_code}. Error: {error_message}')
|
raise Exception(f'Failed to {action}. Status code: {response.status_code}. Error: {error_message}')
|
||||||
else:
|
else:
|
||||||
|
178
apps/test-suite/data/crawl.json
Normal file
178
apps/test-suite/data/crawl.json
Normal file
@ -0,0 +1,178 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"website": "https://www.vellum.ai/llm-leaderboard",
|
||||||
|
"expected_min_num_of_pages": 1,
|
||||||
|
"expected_crawled_pages": ["https://www.vellum.ai/llm-leaderboard"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"website": "https://openai.com/news",
|
||||||
|
"expected_min_num_of_pages": 4,
|
||||||
|
"expected_crawled_pages": [
|
||||||
|
"https://openai.com/news/company/",
|
||||||
|
"https://openai.com/news/research/",
|
||||||
|
"https://openai.com/news/safety-and-alignment/",
|
||||||
|
"https://openai.com/news/stories/"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"website": "https://www.framer.com/pricing",
|
||||||
|
"expected_min_num_of_pages": 1,
|
||||||
|
"expected_not_crawled_pages": [
|
||||||
|
"https://www.framer.com/features/navigation/",
|
||||||
|
"https://www.framer.com/contact/",
|
||||||
|
"https://www.framer.com/add-ons/",
|
||||||
|
"https://www.framer.com/free-saas-ui-kit/",
|
||||||
|
"https://www.framer.com/help/",
|
||||||
|
"https://www.framer.com/features/effects/",
|
||||||
|
"https://www.framer.com/enterprise/",
|
||||||
|
"https://www.framer.com/templates/"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"website": "https://mendable.ai/pricing",
|
||||||
|
"expected_min_num_of_pages": 1,
|
||||||
|
"expected_not_crawled_pages": [
|
||||||
|
"https://mendable.ai/",
|
||||||
|
"https://mendable.ai/blog",
|
||||||
|
"https://mendable.ai/signin",
|
||||||
|
"https://mendable.ai/signup",
|
||||||
|
"https://mendable.ai",
|
||||||
|
"https://mendable.ai/usecases/sales-enablement",
|
||||||
|
"https://mendable.ai/usecases/documentation",
|
||||||
|
"https://mendable.ai/usecases/cs-enablement",
|
||||||
|
"https://mendable.ai/usecases/productcopilot",
|
||||||
|
"https://mendable.ai/security"
|
||||||
|
],
|
||||||
|
"notes": "This one should not go backwards, but it does!"
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"website": "https://agentops.ai/blog",
|
||||||
|
"expected_min_num_of_pages": 6,
|
||||||
|
"expected_crawled_pages": [
|
||||||
|
"https://www.agentops.ai/blog/effortless-hr-management-with-saas",
|
||||||
|
"https://www.agentops.ai/blog/streamlining-hr-with-saas",
|
||||||
|
"https://www.agentops.ai/blog/simplify-hr-with-modern-saas-solutions",
|
||||||
|
"https://www.agentops.ai/blog/efficient-hr-operations-with-saas",
|
||||||
|
"https://www.agentops.ai/blog/hr-made-simple-with-saas",
|
||||||
|
"https://agentops.ai/blog"
|
||||||
|
],
|
||||||
|
"expected_not_crawled_pages": [
|
||||||
|
"https://agentops.ai/about-us",
|
||||||
|
"https://agentops.ai/contact-us"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"website": "https://en.wikipedia.org/wiki/T._N._Seshan",
|
||||||
|
"expected_min_num_of_pages": 1,
|
||||||
|
"expected_not_crawled_pages": [
|
||||||
|
"https://en.wikipedia.org/wiki/Wikipedia:Contents",
|
||||||
|
"https://en.wikipedia.org/wiki/Wikipedia:Contact_us",
|
||||||
|
"https://en.wikipedia.org/wiki/V._S._Ramadevi",
|
||||||
|
"https://en.wikipedia.org/wiki/Wikipedia:About",
|
||||||
|
"https://en.wikipedia.org/wiki/Help:Introduction",
|
||||||
|
"https://en.wikipedia.org/wiki/H._D._Deve_Gowda",
|
||||||
|
"https://en.wikipedia.org/wiki/File:T.N._Seshan_in_1994.jpg"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"website": "https://ycombinator.com/companies",
|
||||||
|
"expected_min_num_of_pages": 20,
|
||||||
|
"expected_crawled_pages": [
|
||||||
|
"https://www.ycombinator.com/companies/industry/elearning",
|
||||||
|
"https://www.ycombinator.com/companies/industry/computer-vision",
|
||||||
|
"https://www.ycombinator.com/companies/industry/health-tech",
|
||||||
|
"https://www.ycombinator.com/companies/industry/education",
|
||||||
|
"https://www.ycombinator.com/companies/industry/robotics",
|
||||||
|
"https://www.ycombinator.com/companies/industry/hardware",
|
||||||
|
"https://www.ycombinator.com/companies/industry/saas",
|
||||||
|
"https://www.ycombinator.com/companies/industry/hard-tech",
|
||||||
|
"https://www.ycombinator.com/companies/industry/developer-tools",
|
||||||
|
"https://www.ycombinator.com/companies/industry/entertainment",
|
||||||
|
"https://www.ycombinator.com/companies/industry/finance",
|
||||||
|
"https://www.ycombinator.com/companies/industry/generative-ai",
|
||||||
|
"https://www.ycombinator.com/companies/industry/machine-learning"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"website": "https://firecrawl.dev",
|
||||||
|
"expected_min_num_of_pages": 2,
|
||||||
|
"expected_crawled_pages": [
|
||||||
|
"https://firecrawl.dev/",
|
||||||
|
"https://firecrawl.dev/pricing"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
|
||||||
|
|
||||||
|
{
|
||||||
|
"website": "https://fly.io/docs/gpus/gpu-quickstart",
|
||||||
|
"expected_min_num_of_pages": 1,
|
||||||
|
"expected_not_crawled_pages": [
|
||||||
|
"https://fly.io/docs/getting-started/",
|
||||||
|
"https://fly.io/docs/hands-on/",
|
||||||
|
"https://fly.io/docs/about/support/",
|
||||||
|
"https://fly.io/docs/blueprints/going-to-production-with-healthcare-apps/",
|
||||||
|
"https://fly.io/docs/machines/flyctl/fly-machine-update/",
|
||||||
|
"https://fly.io/docs/blueprints/review-apps-guide/",
|
||||||
|
"https://fly.io/docs/blueprints/supercronic/"
|
||||||
|
],
|
||||||
|
"notes": "This one should not go backwards, but it does!"
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"website": "https://www.instructables.com/circuits",
|
||||||
|
"expected_min_num_of_pages": 12,
|
||||||
|
"expected_crawled_pages": [
|
||||||
|
"https://www.instructables.com/circuits/",
|
||||||
|
"https://www.instructables.com/circuits/apple/projects/",
|
||||||
|
"https://www.instructables.com/circuits/art/projects/",
|
||||||
|
"https://www.instructables.com/circuits/electronics/projects/",
|
||||||
|
"https://www.instructables.com/circuits/microsoft/projects/",
|
||||||
|
"https://www.instructables.com/circuits/microcontrollers/projects/",
|
||||||
|
"https://www.instructables.com/circuits/community/",
|
||||||
|
"https://www.instructables.com/circuits/leds/projects/",
|
||||||
|
"https://www.instructables.com/circuits/gadgets/projects/",
|
||||||
|
"https://www.instructables.com/circuits/arduino/projects/",
|
||||||
|
"https://www.instructables.com/circuits/lasers/projects/",
|
||||||
|
"https://www.instructables.com/circuits/clocks/projects/"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"website": "https://richmondconfidential.org",
|
||||||
|
"expected_min_num_of_pages": 20,
|
||||||
|
"expected_crawled_pages": [
|
||||||
|
"https://richmondconfidential.org/2009/10/13/salesians-star-guard-has-a-big-impact/",
|
||||||
|
"https://richmondconfidential.org/2009/10/13/on-team-of-beginners-oilers-old-hand-stands-out/",
|
||||||
|
"https://richmondconfidential.org/2009/10/19/point-richmond-clockmaker-turns-clutter-into-crafts/",
|
||||||
|
"https://richmondconfidential.org/2009/10/13/profile-maurice-cathy/",
|
||||||
|
"https://richmondconfidential.org/2009/10/13/soul-food-rescue-mission-rebuilds-diets-and-lives/",
|
||||||
|
"https://richmondconfidential.org/2009/10/21/in-tough-economy-pain-trickles-to-the-bottom/",
|
||||||
|
"https://richmondconfidential.org/2009/10/19/richmond-homicide-map/",
|
||||||
|
"https://richmondconfidential.org/2009/10/13/rough-roads-for-richmonds-cab-drivers/",
|
||||||
|
"https://richmondconfidential.org/2009/10/13/before-napa-there-was-winehaven/",
|
||||||
|
"https://richmondconfidential.org/2009/10/13/family-calls-for-end-to-violence-at-memorial-for-slain-woman-friend/"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"website": "https://www.boardgamegeek.com",
|
||||||
|
"expected_min_num_of_pages": 15,
|
||||||
|
"expected_crawled_pages": [
|
||||||
|
"https://www.boardgamegeek.com/browse/boardgameartist",
|
||||||
|
"https://www.boardgamegeek.com/browse/boardgamehonor",
|
||||||
|
"https://www.boardgamegeek.com/browse/boardgamepublisher",
|
||||||
|
"https://www.boardgamegeek.com/browse/boardgamepodcast",
|
||||||
|
"https://www.boardgamegeek.com/wiki/page/Index",
|
||||||
|
"https://www.boardgamegeek.com/browse/boardgamecategory",
|
||||||
|
"https://www.boardgamegeek.com/boardgame/random",
|
||||||
|
"https://www.boardgamegeek.com/browse/boardgamemechanic",
|
||||||
|
"https://www.boardgamegeek.com/forums",
|
||||||
|
"https://www.boardgamegeek.com/gonecardboard",
|
||||||
|
"https://www.boardgamegeek.com/browse/boardgameaccessory",
|
||||||
|
"https://www.boardgamegeek.com/browse/boardgamedesigner",
|
||||||
|
"https://www.boardgamegeek.com/",
|
||||||
|
"https://www.boardgamegeek.com/previews",
|
||||||
|
"https://www.boardgamegeek.com/browse/boardgame"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
@ -3,7 +3,9 @@
|
|||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"description": "",
|
"description": "",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "npx jest --detectOpenHandles --forceExit --openHandlesTimeout=120000 --watchAll=false"
|
"test": "npx jest --detectOpenHandles --forceExit --openHandlesTimeout=120000 --watchAll=false",
|
||||||
|
"test:scrape": "npx jest --detectOpenHandles --forceExit --openHandlesTimeout=120000 --watchAll=false --testPathPattern=tests/scrape.test.ts",
|
||||||
|
"test:crawl": "npx jest --detectOpenHandles --forceExit --openHandlesTimeout=120000 --watchAll=false --testPathPattern=tests/crawl.test.ts"
|
||||||
},
|
},
|
||||||
"author": "",
|
"author": "",
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
|
150
apps/test-suite/tests/crawl.test.ts
Normal file
150
apps/test-suite/tests/crawl.test.ts
Normal file
@ -0,0 +1,150 @@
|
|||||||
|
import request from "supertest";
|
||||||
|
import dotenv from "dotenv";
|
||||||
|
import { WebsiteScrapeError } from "../utils/types";
|
||||||
|
import { logErrors } from "../utils/log";
|
||||||
|
|
||||||
|
import websitesData from "../data/crawl.json";
|
||||||
|
import "dotenv/config";
|
||||||
|
|
||||||
|
import fs from 'fs';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
|
interface WebsiteData {
|
||||||
|
website: string;
|
||||||
|
expected_min_num_of_pages: number;
|
||||||
|
expected_crawled_pages: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
const TEST_URL = "http://127.0.0.1:3002";
|
||||||
|
|
||||||
|
describe("Crawling Checkup (E2E)", () => {
|
||||||
|
beforeAll(() => {
|
||||||
|
if (!process.env.TEST_API_KEY) {
|
||||||
|
throw new Error("TEST_API_KEY is not set");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("Crawling website tests with a dataset", () => {
|
||||||
|
it("Should crawl the website and verify the response", async () => {
|
||||||
|
let passedTests = 0;
|
||||||
|
const startTime = new Date().getTime();
|
||||||
|
const date = new Date();
|
||||||
|
const logsDir = `logs/${date.getMonth() + 1}-${date.getDate()}-${date.getFullYear()}`;
|
||||||
|
|
||||||
|
let errorLogFileName = `${logsDir}/run.log_${new Date().toTimeString().split(' ')[0]}`;
|
||||||
|
const errorLog: WebsiteScrapeError[] = [];
|
||||||
|
|
||||||
|
for (const websiteData of websitesData) {
|
||||||
|
try {
|
||||||
|
const crawlResponse = await request(TEST_URL || "")
|
||||||
|
.post("/v0/crawl")
|
||||||
|
.set("Content-Type", "application/json")
|
||||||
|
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
|
||||||
|
.send({ url: websiteData.website, pageOptions: { onlyMainContent: true }, crawlerOptions: { limit: 100, returnOnlyUrls: true }});
|
||||||
|
|
||||||
|
const jobId = crawlResponse.body.jobId;
|
||||||
|
let completedResponse: any;
|
||||||
|
let isFinished = false;
|
||||||
|
|
||||||
|
while (!isFinished) {
|
||||||
|
completedResponse = await request(TEST_URL)
|
||||||
|
.get(`/v0/crawl/status/${jobId}`)
|
||||||
|
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
|
||||||
|
|
||||||
|
isFinished = completedResponse.body.status === "completed";
|
||||||
|
|
||||||
|
if (!isFinished) {
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 1000)); // Wait for 1 second before checking again
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if(!completedResponse) {
|
||||||
|
// fail the test
|
||||||
|
console.log('No response');
|
||||||
|
continue;
|
||||||
|
// continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!completedResponse.body || completedResponse.body.status !== "completed") {
|
||||||
|
errorLog.push({
|
||||||
|
website: websiteData.website,
|
||||||
|
prompt: 'CRAWL',
|
||||||
|
expected_output: 'SUCCESS',
|
||||||
|
actual_output: 'FAILURE',
|
||||||
|
error: `Crawl job did not complete successfully.`
|
||||||
|
});
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// check how many webpages were crawled successfully
|
||||||
|
// compares with expected_num_of_pages
|
||||||
|
if (completedResponse.body.data.length < websiteData.expected_min_num_of_pages) {
|
||||||
|
errorLog.push({
|
||||||
|
website: websiteData.website,
|
||||||
|
prompt: 'CRAWL',
|
||||||
|
expected_output: `SUCCESS: ${websiteData.expected_min_num_of_pages}`,
|
||||||
|
actual_output: `FAILURE: ${completedResponse.body.data.length}`,
|
||||||
|
error: `Expected at least ${websiteData.expected_min_num_of_pages} webpages, but got ${completedResponse.body.data.length}`
|
||||||
|
});
|
||||||
|
console.log('Error: ', errorLog);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// checks if crawled pages contain expected_crawled_pages
|
||||||
|
if (websiteData.expected_crawled_pages && websiteData.expected_crawled_pages.length > 0 && websiteData.expected_crawled_pages.some(page => !completedResponse.body.data?.some((d: { url: string }) => d.url === page))) {
|
||||||
|
errorLog.push({
|
||||||
|
website: websiteData.website,
|
||||||
|
prompt: 'CRAWL',
|
||||||
|
expected_output: `SUCCESS: ${websiteData.expected_crawled_pages}`,
|
||||||
|
actual_output: `FAILURE: ${completedResponse.body.data}`,
|
||||||
|
error: `Expected crawled pages to contain ${websiteData.expected_crawled_pages}, but got ${completedResponse.body.data}`
|
||||||
|
});
|
||||||
|
console.log('Error: ', errorLog);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// checks if crawled pages not contain expected_not_crawled_pages
|
||||||
|
if (websiteData.expected_not_crawled_pages && websiteData.expected_not_crawled_pages.length > 0 && completedResponse.body.data && websiteData.expected_not_crawled_pages.filter(page => completedResponse.body.data.some((d: { url: string }) => d.url === page)).length > 0) {
|
||||||
|
errorLog.push({
|
||||||
|
website: websiteData.website,
|
||||||
|
prompt: 'CRAWL',
|
||||||
|
expected_output: `SUCCESS: ${websiteData.expected_not_crawled_pages}`,
|
||||||
|
actual_output: `FAILURE: ${completedResponse.body.data}`,
|
||||||
|
error: `Expected crawled pages to not contain ${websiteData.expected_not_crawled_pages}, but got ${completedResponse.body.data}`
|
||||||
|
});
|
||||||
|
console.log('Error: ', errorLog);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
passedTests++;
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error processing ${websiteData.website}: ${error}`);
|
||||||
|
errorLog.push({
|
||||||
|
website: websiteData.website,
|
||||||
|
prompt: 'CRAWL',
|
||||||
|
expected_output: 'SUCCESS',
|
||||||
|
actual_output: 'FAILURE',
|
||||||
|
error: `Error processing ${websiteData.website}: ${error}`
|
||||||
|
});
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const score = (passedTests / websitesData.length) * 100;
|
||||||
|
const endTime = new Date().getTime();
|
||||||
|
const timeTaken = (endTime - startTime) / 1000;
|
||||||
|
console.log(`Score: ${score}%`);
|
||||||
|
|
||||||
|
await logErrors(errorLog, timeTaken, 0, score, websitesData.length);
|
||||||
|
|
||||||
|
if (process.env.ENV === "local" && errorLog.length > 0) {
|
||||||
|
if (!fs.existsSync(logsDir)){
|
||||||
|
fs.mkdirSync(logsDir, { recursive: true });
|
||||||
|
}
|
||||||
|
fs.writeFileSync(errorLogFileName, JSON.stringify(errorLog, null, 2));
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(score).toBeGreaterThanOrEqual(90);
|
||||||
|
}, 350000); // 150 seconds timeout
|
||||||
|
});
|
||||||
|
});
|
@ -1,16 +1,14 @@
|
|||||||
import request from "supertest";
|
import request from "supertest";
|
||||||
import dotenv from "dotenv";
|
import dotenv from "dotenv";
|
||||||
import Anthropic from "@anthropic-ai/sdk";
|
import { numTokensFromString } from "../utils/tokens";
|
||||||
import { numTokensFromString } from "./utils/tokens";
|
|
||||||
import OpenAI from "openai";
|
import OpenAI from "openai";
|
||||||
import { WebsiteScrapeError } from "./utils/types";
|
import { WebsiteScrapeError } from "../utils/types";
|
||||||
import { logErrors } from "./utils/log";
|
import { logErrors } from "../utils/log";
|
||||||
|
|
||||||
const websitesData = require("./data/websites.json");
|
import websitesData from "../data/scrape.json";
|
||||||
import "dotenv/config";
|
import "dotenv/config";
|
||||||
|
|
||||||
const fs = require('fs');
|
import fs from 'fs';
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
interface WebsiteData {
|
interface WebsiteData {
|
||||||
@ -21,8 +19,7 @@ interface WebsiteData {
|
|||||||
|
|
||||||
const TEST_URL = "http://127.0.0.1:3002";
|
const TEST_URL = "http://127.0.0.1:3002";
|
||||||
|
|
||||||
|
describe("Scraping Checkup (E2E)", () => {
|
||||||
describe("Scraping/Crawling Checkup (E2E)", () => {
|
|
||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
if (!process.env.TEST_API_KEY) {
|
if (!process.env.TEST_API_KEY) {
|
||||||
throw new Error("TEST_API_KEY is not set");
|
throw new Error("TEST_API_KEY is not set");
|
||||||
@ -72,10 +69,6 @@ describe("Scraping/Crawling Checkup (E2E)", () => {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
const anthropic = new Anthropic({
|
|
||||||
apiKey: process.env.ANTHROPIC_API_KEY,
|
|
||||||
});
|
|
||||||
|
|
||||||
const openai = new OpenAI({
|
const openai = new OpenAI({
|
||||||
apiKey: process.env.OPENAI_API_KEY,
|
apiKey: process.env.OPENAI_API_KEY,
|
||||||
});
|
});
|
||||||
@ -183,7 +176,7 @@ describe("Scraping/Crawling Checkup (E2E)", () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
expect(score).toBeGreaterThanOrEqual(75);
|
expect(score).toBeGreaterThanOrEqual(70);
|
||||||
}, 350000); // 150 seconds timeout
|
}, 350000); // 150 seconds timeout
|
||||||
});
|
});
|
||||||
});
|
});
|
@ -39,7 +39,7 @@
|
|||||||
// "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */
|
// "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */
|
||||||
// "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */
|
// "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */
|
||||||
// "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */
|
// "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */
|
||||||
// "resolveJsonModule": true, /* Enable importing .json files. */
|
"resolveJsonModule": true, /* Enable importing .json files. */
|
||||||
// "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */
|
// "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */
|
||||||
// "noResolve": true, /* Disallow 'import's, 'require's or '<reference>'s from expanding the number of files TypeScript should add to a project. */
|
// "noResolve": true, /* Disallow 'import's, 'require's or '<reference>'s from expanding the number of files TypeScript should add to a project. */
|
||||||
|
|
||||||
|
78
docker-compose.yaml
Normal file
78
docker-compose.yaml
Normal file
@ -0,0 +1,78 @@
|
|||||||
|
name: firecrawl
|
||||||
|
version: '3.9'
|
||||||
|
services:
|
||||||
|
playwright-service:
|
||||||
|
build: apps/playwright-service
|
||||||
|
environment:
|
||||||
|
- PORT=3000
|
||||||
|
networks:
|
||||||
|
- backend
|
||||||
|
|
||||||
|
api:
|
||||||
|
build: apps/api
|
||||||
|
environment:
|
||||||
|
- REDIS_URL=${REDIS_URL:-redis://redis:6379}
|
||||||
|
- PLAYWRIGHT_MICROSERVICE_URL=${PLAYWRIGHT_MICROSERVICE_URL:-http://playwright-service:3000}
|
||||||
|
- USE_DB_AUTHENTICATION=${USE_DB_AUTHENTICATION}
|
||||||
|
- PORT=${PORT:-3002}
|
||||||
|
- NUM_WORKERS_PER_QUEUE=${NUM_WORKERS_PER_QUEUE}
|
||||||
|
- OPENAI_API_KEY=${OPENAI_API_KEY}
|
||||||
|
- SLACK_WEBHOOK_URL=${SLACK_WEBHOOK_URL}
|
||||||
|
- SERPER_API_KEY=${SERPER_API_KEY}
|
||||||
|
- LLAMAPARSE_API_KEY=${LLAMAPARSE_API_KEY}
|
||||||
|
- LOGTAIL_KEY=${LOGTAIL_KEY}
|
||||||
|
- BULL_AUTH_KEY=${BULL_AUTH_KEY}
|
||||||
|
- TEST_API_KEY=${TEST_API_KEY}
|
||||||
|
- POSTHOG_API_KEY=${POSTHOG_API_KEY}
|
||||||
|
- POSTHOG_HOST=${POSTHOG_HOST}
|
||||||
|
- SUPABASE_ANON_TOKEN=${SUPABASE_ANON_TOKEN}
|
||||||
|
- SUPABASE_URL=${SUPABASE_URL}
|
||||||
|
- SUPABASE_SERVICE_TOKEN=${SUPABASE_SERVICE_TOKEN}
|
||||||
|
- SCRAPING_BEE_API_KEY=${SCRAPING_BEE_API_KEY}
|
||||||
|
- HOST=${HOST:-0.0.0.0}
|
||||||
|
depends_on:
|
||||||
|
- redis
|
||||||
|
- playwright-service
|
||||||
|
ports:
|
||||||
|
- "3002:3002"
|
||||||
|
command: [ "pnpm", "run", "start:production" ]
|
||||||
|
networks:
|
||||||
|
- backend
|
||||||
|
|
||||||
|
worker:
|
||||||
|
build: apps/api
|
||||||
|
environment:
|
||||||
|
- REDIS_URL=${REDIS_URL:-redis://redis:6379}
|
||||||
|
- PLAYWRIGHT_MICROSERVICE_URL=${PLAYWRIGHT_MICROSERVICE_URL:-http://playwright-service:3000}
|
||||||
|
- USE_DB_AUTHENTICATION=${USE_DB_AUTHENTICATION}
|
||||||
|
- PORT=${PORT:-3002}
|
||||||
|
- NUM_WORKERS_PER_QUEUE=${NUM_WORKERS_PER_QUEUE}
|
||||||
|
- OPENAI_API_KEY=${OPENAI_API_KEY}
|
||||||
|
- SLACK_WEBHOOK_URL=${SLACK_WEBHOOK_URL}
|
||||||
|
- SERPER_API_KEY=${SERPER_API_KEY}
|
||||||
|
- LLAMAPARSE_API_KEY=${LLAMAPARSE_API_KEY}
|
||||||
|
- LOGTAIL_KEY=${LOGTAIL_KEY}
|
||||||
|
- BULL_AUTH_KEY=${BULL_AUTH_KEY}
|
||||||
|
- TEST_API_KEY=${TEST_API_KEY}
|
||||||
|
- POSTHOG_API_KEY=${POSTHOG_API_KEY}
|
||||||
|
- POSTHOG_HOST=${POSTHOG_HOST}
|
||||||
|
- SUPABASE_ANON_TOKEN=${SUPABASE_ANON_TOKEN}
|
||||||
|
- SUPABASE_URL=${SUPABASE_URL}
|
||||||
|
- SUPABASE_SERVICE_TOKEN=${SUPABASE_SERVICE_TOKEN}
|
||||||
|
- SCRAPING_BEE_API_KEY=${SCRAPING_BEE_API_KEY}
|
||||||
|
- HOST=${HOST:-0.0.0.0}
|
||||||
|
depends_on:
|
||||||
|
- redis
|
||||||
|
- playwright-service
|
||||||
|
- api
|
||||||
|
networks:
|
||||||
|
- backend
|
||||||
|
redis:
|
||||||
|
image: redis:alpine
|
||||||
|
networks:
|
||||||
|
- backend
|
||||||
|
command: redis-server --bind 0.0.0.0
|
||||||
|
|
||||||
|
networks:
|
||||||
|
backend:
|
||||||
|
driver: bridge
|
Loading…
Reference in New Issue
Block a user