diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 0b24d07..049aeaf 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -25,6 +25,9 @@ env:
SUPABASE_SERVICE_TOKEN: ${{ secrets.SUPABASE_SERVICE_TOKEN }}
SUPABASE_URL: ${{ secrets.SUPABASE_URL }}
TEST_API_KEY: ${{ secrets.TEST_API_KEY }}
+ HYPERDX_API_KEY: ${{ secrets.HYPERDX_API_KEY }}
+ HDX_NODE_BETA_MODE: 1
+
jobs:
pre-deploy:
diff --git a/README.md b/README.md
index 3b3968f..2aaeeeb 100644
--- a/README.md
+++ b/README.md
@@ -6,7 +6,7 @@ _This repository is in its early development stages. We are still merging custom
## What is Firecrawl?
-[Firecrawl](https://firecrawl.dev?ref=github) is an API service that takes a URL, crawls it, and converts it into clean markdown. We crawl all accessible subpages and give you clean markdown for each. No sitemap required.
+[Firecrawl](https://firecrawl.dev?ref=github) is an API service that takes a URL, crawls it, and converts it into clean markdown or structured data. We crawl all accessible subpages and give you clean data for each. No sitemap required.
_Pst. hey, you, join our stargazers :)_
@@ -114,7 +114,7 @@ Response:
### Search (Beta)
-Used to search the web, get the most relevant results, scrap each page and return the markdown.
+Used to search the web, get the most relevant results, scrape each page and return the markdown.
```bash
curl -X POST https://api.firecrawl.dev/v0/search \
@@ -296,7 +296,6 @@ npm install @mendable/firecrawl-js
1. Get an API key from [firecrawl.dev](https://firecrawl.dev)
2. Set the API key as an environment variable named `FIRECRAWL_API_KEY` or pass it as a parameter to the `FirecrawlApp` class.
-
### Scraping a URL
To scrape a single URL with error handling, use the `scrapeUrl` method. It takes the URL as a parameter and returns the scraped data as a dictionary.
diff --git a/SELF_HOST.md b/SELF_HOST.md
index 8d1d490..ff5ee04 100644
--- a/SELF_HOST.md
+++ b/SELF_HOST.md
@@ -1,6 +1,31 @@
# Self-hosting Firecrawl
+*We're currently working on a more in-depth guide on how to self-host, but in the meantime, here is a simplified version.*
Refer to [CONTRIBUTING.md](https://github.com/mendableai/firecrawl/blob/main/CONTRIBUTING.md) for instructions on how to run it locally.
-*This repository is currently in its early stages of development. We are in the process of merging custom modules into this mono repository. The primary objective is to enhance the accuracy of LLM responses by utilizing clean data. It is not ready for full self-host yet - we're working on it*
+## Getting Started
+First, clone this repository and copy the example env file from api folder `.env.example` to `.env`.
+```bash
+git clone https://github.com/mendableai/firecrawl.git
+cd firecrawl
+cp ./apps/api/.env.example ./.env
+```
+
+For running the simplest version of FireCrawl, edit the `USE_DB_AUTHENTICATION` on `.env` to not use the database authentication.
+```yml
+USE_DB_AUTHENTICATION=false
+```
+
+Update the Redis URL in the .env file to align with the Docker configuration:
+```yml
+REDIS_URL=redis://redis:6379
+```
+
+Once that's complete, you can simply run the following commands to get started:
+```bash
+docker compose up
+```
+
+
+This will run a local instance of Firecrawl which can be accessed at `http://localhost:3002`.
diff --git a/apps/api/.env.example b/apps/api/.env.example
index b025326..659d68f 100644
--- a/apps/api/.env.example
+++ b/apps/api/.env.example
@@ -3,6 +3,7 @@ NUM_WORKERS_PER_QUEUE=8
PORT=3002
HOST=0.0.0.0
REDIS_URL=redis://localhost:6379
+PLAYWRIGHT_MICROSERVICE_URL=http://playwright-service:3000
## To turn on DB authentication, you need to set up supabase.
USE_DB_AUTHENTICATION=true
@@ -16,14 +17,22 @@ SUPABASE_SERVICE_TOKEN=
# Other Optionals
TEST_API_KEY= # use if you've set up authentication and want to test with a real API key
+RATE_LIMIT_TEST_API_KEY_SCRAPE= # set if you'd like to test the scraping rate limit
+RATE_LIMIT_TEST_API_KEY_CRAWL= # set if you'd like to test the crawling rate limit
SCRAPING_BEE_API_KEY= #Set if you'd like to use scraping Be to handle JS blocking
OPENAI_API_KEY= # add for LLM dependednt features (image alt generation, etc.)
BULL_AUTH_KEY= #
LOGTAIL_KEY= # Use if you're configuring basic logging with logtail
-PLAYWRIGHT_MICROSERVICE_URL= # set if you'd like to run a playwright fallback
LLAMAPARSE_API_KEY= #Set if you have a llamaparse key you'd like to use to parse pdfs
SERPER_API_KEY= #Set if you have a serper key you'd like to use as a search api
SLACK_WEBHOOK_URL= # set if you'd like to send slack server health status messages
POSTHOG_API_KEY= # set if you'd like to send posthog events like job logs
POSTHOG_HOST= # set if you'd like to send posthog events like job logs
+STRIPE_PRICE_ID_STANDARD=
+STRIPE_PRICE_ID_SCALE=
+
+HYPERDX_API_KEY=
+HDX_NODE_BETA_MODE=1
+
+FIRE_ENGINE_BETA_URL= # set if you'd like to use the fire engine closed beta
\ No newline at end of file
diff --git a/apps/api/openapi.json b/apps/api/openapi.json
index 7861f32..b483bc4 100644
--- a/apps/api/openapi.json
+++ b/apps/api/openapi.json
@@ -18,8 +18,8 @@
"paths": {
"/scrape": {
"post": {
- "summary": "Scrape a single URL",
- "operationId": "scrapeSingleUrl",
+ "summary": "Scrape a single URL and optionally extract information using an LLM",
+ "operationId": "scrapeAndExtractFromUrl",
"tags": ["Scraping"],
"security": [
{
@@ -45,8 +45,43 @@
"type": "boolean",
"description": "Only return the main content of the page excluding headers, navs, footers, etc.",
"default": false
+ },
+ "includeHtml": {
+ "type": "boolean",
+ "description": "Include the raw HTML content of the page. Will output a html key in the response.",
+ "default": false
}
}
+ },
+ "extractorOptions": {
+ "type": "object",
+ "description": "Options for LLM-based extraction of structured information from the page content",
+ "properties": {
+ "mode": {
+ "type": "string",
+ "enum": ["llm-extraction"],
+ "description": "The extraction mode to use, currently supports 'llm-extraction'"
+ },
+ "extractionPrompt": {
+ "type": "string",
+ "description": "A prompt describing what information to extract from the page"
+ },
+ "extractionSchema": {
+ "type": "object",
+ "additionalProperties": true,
+ "description": "The schema for the data to be extracted",
+ "required": [
+ "company_mission",
+ "supports_sso",
+ "is_open_source"
+ ]
+ }
+ }
+ },
+ "timeout": {
+ "type": "integer",
+ "description": "Timeout in milliseconds for the request",
+ "default": 30000
}
},
"required": ["url"]
@@ -126,9 +161,20 @@
"description": "If true, returns only the URLs as a list on the crawl status. Attention: the return response will be a list of URLs inside the data, not a list of documents.",
"default": false
},
+ "maxDepth": {
+ "type": "integer",
+ "description": "Maximum depth to crawl. Depth 1 is the base URL, depth 2 is the base URL and its direct children, and so on."
+ },
+ "mode": {
+ "type": "string",
+ "enum": ["default", "fast"],
+ "description": "The crawling mode to use. Fast mode crawls 4x faster websites without sitemap, but may not be as accurate and shouldn't be used in heavy js-rendered websites.",
+ "default": "default"
+ },
"limit": {
"type": "integer",
- "description": "Maximum number of pages to crawl"
+ "description": "Maximum number of pages to crawl",
+ "default": 10000
}
}
},
@@ -139,6 +185,11 @@
"type": "boolean",
"description": "Only return the main content of the page excluding headers, navs, footers, etc.",
"default": false
+ },
+ "includeHtml": {
+ "type": "boolean",
+ "description": "Include the raw HTML content of the page. Will output a html key in the response.",
+ "default": false
}
}
}
@@ -191,7 +242,7 @@
"query": {
"type": "string",
"format": "uri",
- "description": "The URL to scrape"
+ "description": "The query to search for"
},
"pageOptions": {
"type": "object",
@@ -205,6 +256,11 @@
"type": "boolean",
"description": "Fetch the content of each page. If false, defaults to a basic fast serp API.",
"default": true
+ },
+ "includeHtml": {
+ "type": "boolean",
+ "description": "Include the raw HTML content of the page. Will output a html key in the response.",
+ "default": false
}
}
},
@@ -298,9 +354,66 @@
"data": {
"type": "array",
"items": {
- "$ref": "#/components/schemas/ScrapeResponse"
+ "$ref": "#/components/schemas/CrawlStatusResponseObj"
},
"description": "Data returned from the job (null when it is in progress)"
+ },
+ "partial_data": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/CrawlStatusResponseObj"
+ },
+ "description": "Partial documents returned as it is being crawls (streaming). When a page is ready it will append to the parial_data array - so no need to wait for all the website to be crawled."
+ }
+ }
+ }
+ }
+ }
+ },
+ "402": {
+ "description": "Payment required"
+ },
+ "429": {
+ "description": "Too many requests"
+ },
+ "500": {
+ "description": "Server error"
+ }
+ }
+ }
+ },
+ "/crawl/cancel/{jobId}": {
+ "delete": {
+ "tags": ["Crawl"],
+ "summary": "Cancel a crawl job",
+ "operationId": "cancelCrawlJob",
+ "security": [
+ {
+ "bearerAuth": []
+ }
+ ],
+ "parameters": [
+ {
+ "name": "jobId",
+ "in": "path",
+ "description": "ID of the crawl job",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Successful response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "properties": {
+ "status": {
+ "type": "string",
+ "description": "Returns cancelled."
}
}
}
@@ -343,6 +456,11 @@
"content": {
"type": "string"
},
+ "html": {
+ "type": "string",
+ "nullable": true,
+ "description": "Raw HTML content of the page if `includeHtml` is true"
+ },
"metadata": {
"type": "object",
"properties": {
@@ -361,6 +479,51 @@
"format": "uri"
}
}
+ },
+ "llm_extraction": {
+ "type": "object",
+ "description": "Displayed when using LLM Extraction. Extracted data from the page following the schema defined.",
+ "nullable": true
+ },
+ "warning": {
+ "type": "string",
+ "nullable": true,
+ "description": "Can be displayed when using LLM Extraction. Warning message will let you know any issues with the extraction."
+ }
+ }
+ }
+ }
+ },
+ "CrawlStatusResponseObj": {
+ "type": "object",
+ "properties": {
+ "markdown": {
+ "type": "string"
+ },
+ "content": {
+ "type": "string"
+ },
+ "html": {
+ "type": "string",
+ "nullable": true,
+ "description": "Raw HTML content of the page if `includeHtml` is true"
+ },
+ "metadata": {
+ "type": "object",
+ "properties": {
+ "title": {
+ "type": "string"
+ },
+ "description": {
+ "type": "string"
+ },
+ "language": {
+ "type": "string",
+ "nullable": true
+ },
+ "sourceURL": {
+ "type": "string",
+ "format": "uri"
}
}
}
diff --git a/apps/api/package.json b/apps/api/package.json
index a79e3dc..92dfb6d 100644
--- a/apps/api/package.json
+++ b/apps/api/package.json
@@ -33,6 +33,7 @@
"express": "^4.18.2",
"jest": "^29.6.3",
"jest-fetch-mock": "^3.0.3",
+ "mammoth": "^1.7.2",
"nodemon": "^2.0.20",
"supabase": "^1.77.9",
"supertest": "^6.3.3",
@@ -47,6 +48,7 @@
"@bull-board/express": "^5.8.0",
"@devil7softwares/pos": "^1.0.2",
"@dqbd/tiktoken": "^1.0.13",
+ "@hyperdx/node-opentelemetry": "^0.7.0",
"@logtail/node": "^0.4.12",
"@nangohq/node": "^0.36.33",
"@sentry/node": "^7.48.0",
diff --git a/apps/api/pnpm-lock.yaml b/apps/api/pnpm-lock.yaml
index 7873375..a2d1394 100644
--- a/apps/api/pnpm-lock.yaml
+++ b/apps/api/pnpm-lock.yaml
@@ -23,6 +23,9 @@ dependencies:
'@dqbd/tiktoken':
specifier: ^1.0.13
version: 1.0.13
+ '@hyperdx/node-opentelemetry':
+ specifier: ^0.7.0
+ version: 0.7.0
'@logtail/node':
specifier: ^0.4.12
version: 0.4.20
@@ -97,7 +100,7 @@ dependencies:
version: 0.0.25
langchain:
specifier: ^0.1.25
- version: 0.1.25(@supabase/supabase-js@2.39.7)(axios@1.6.7)(cheerio@1.0.0-rc.12)(ioredis@5.3.2)(pdf-parse@1.1.1)(puppeteer@22.6.3)(redis@4.6.13)(typesense@1.7.2)
+ version: 0.1.25(@supabase/supabase-js@2.39.7)(axios@1.6.7)(cheerio@1.0.0-rc.12)(ioredis@5.3.2)(mammoth@1.7.2)(pdf-parse@1.1.1)(puppeteer@22.6.3)(redis@4.6.13)(typesense@1.7.2)
languagedetect:
specifier: ^2.0.0
version: 2.0.0
@@ -214,6 +217,9 @@ devDependencies:
jest-fetch-mock:
specifier: ^3.0.3
version: 3.0.3
+ mammoth:
+ specifier: ^1.7.2
+ version: 1.7.2
nodemon:
specifier: ^2.0.20
version: 2.0.22
@@ -637,6 +643,11 @@ packages:
'@bull-board/api': 5.14.2(@bull-board/ui@5.14.2)
dev: false
+ /@colors/colors@1.6.0:
+ resolution: {integrity: sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA==}
+ engines: {node: '>=0.1.90'}
+ dev: false
+
/@cspotcode/source-map-support@0.8.1:
resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==}
engines: {node: '>=12'}
@@ -666,6 +677,58 @@ packages:
yargs: 17.7.2
dev: true
+ /@grpc/grpc-js@1.10.8:
+ resolution: {integrity: sha512-vYVqYzHicDqyKB+NQhAc54I1QWCBLCrYG6unqOIcBTHx+7x8C9lcoLj3KVJXs2VB4lUbpWY+Kk9NipcbXYWmvg==}
+ engines: {node: '>=12.10.0'}
+ dependencies:
+ '@grpc/proto-loader': 0.7.13
+ '@js-sdsl/ordered-map': 4.4.2
+ dev: false
+
+ /@grpc/proto-loader@0.7.13:
+ resolution: {integrity: sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw==}
+ engines: {node: '>=6'}
+ hasBin: true
+ dependencies:
+ lodash.camelcase: 4.3.0
+ long: 5.2.3
+ protobufjs: 7.3.0
+ yargs: 17.7.2
+ dev: false
+
+ /@hyperdx/node-opentelemetry@0.7.0:
+ resolution: {integrity: sha512-3PH1CLUITIx8Awlyye0if0xAgdm0+rK4Shs5nE2q7b/8dc66krYzOFvpDcFS9/R4jOiw1t7tY4q8V1p9/dHLmw==}
+ hasBin: true
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/api-logs': 0.51.1
+ '@opentelemetry/auto-instrumentations-node': 0.46.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/exporter-logs-otlp-http': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/exporter-metrics-otlp-proto': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/exporter-trace-otlp-proto': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-http': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/sdk-logs': 0.51.1(@opentelemetry/api-logs@0.51.1)(@opentelemetry/api@1.8.0)
+ '@opentelemetry/sdk-metrics': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/sdk-node': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ debug: 4.3.4
+ json-stringify-safe: 5.0.1
+ lodash.isobject: 3.0.2
+ lodash.isplainobject: 4.0.6
+ lodash.isstring: 4.0.1
+ pino-abstract-transport: 1.2.0
+ shimmer: 1.2.1
+ tslib: 2.6.2
+ winston-transport: 4.7.0
+ transitivePeerDependencies:
+ - encoding
+ - supports-color
+ dev: false
+
/@ioredis/commands@1.2.0:
resolution: {integrity: sha512-Sx1pU8EM64o2BrqNpEO1CNLtKQwyhuXuqyfH7oGKCk+1a33d2r5saW8zNwm3j6BTExtjrv2BxTgzzkMwts6vGg==}
@@ -948,6 +1011,10 @@ packages:
'@jridgewell/sourcemap-codec': 1.4.15
dev: true
+ /@js-sdsl/ordered-map@4.4.2:
+ resolution: {integrity: sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==}
+ dev: false
+
/@langchain/community@0.0.35(@supabase/supabase-js@2.39.7)(ioredis@5.3.2)(redis@4.6.13)(typesense@1.7.2):
resolution: {integrity: sha512-xZGjiqlS7X0EDWM67s2PxSLg0Rz/Wfc741IPF0Ok/f4yFwFseWjtcWXwBwe0dVnapIstpKR82q+RDAa06xFxyw==}
engines: {node: '>=18'}
@@ -1345,6 +1412,991 @@ packages:
- debug
dev: false
+ /@opentelemetry/api-logs@0.51.1:
+ resolution: {integrity: sha512-E3skn949Pk1z2XtXu/lxf6QAZpawuTM/IUEXcAzpiUkTd73Hmvw26FiN3cJuTmkpM5hZzHwkomVdtrh/n/zzwA==}
+ engines: {node: '>=14'}
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ dev: false
+
+ /@opentelemetry/api@1.8.0:
+ resolution: {integrity: sha512-I/s6F7yKUDdtMsoBWXJe8Qz40Tui5vsuKCWJEWVL+5q9sSWRzzx6v2KeNsOBEwd94j0eWkpWCH4yB6rZg9Mf0w==}
+ engines: {node: '>=8.0.0'}
+ dev: false
+
+ /@opentelemetry/auto-instrumentations-node@0.46.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-s0CwmY9KYtPawOhV5YO2Gf62uVOQRNvT6Or8IZ0S4gr/kPVNhoMehTsQvqBwSWQfoFrkmW3KKOHiKJEp4dVGXg==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.4.1
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-amqplib': 0.37.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-aws-lambda': 0.41.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-aws-sdk': 0.41.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-bunyan': 0.38.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-cassandra-driver': 0.38.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-connect': 0.36.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-cucumber': 0.6.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-dataloader': 0.9.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-dns': 0.36.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-express': 0.39.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-fastify': 0.36.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-fs': 0.12.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-generic-pool': 0.36.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-graphql': 0.40.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-grpc': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-hapi': 0.38.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-http': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-ioredis': 0.40.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-knex': 0.36.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-koa': 0.40.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-lru-memoizer': 0.37.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-memcached': 0.36.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-mongodb': 0.43.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-mongoose': 0.38.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-mysql': 0.38.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-mysql2': 0.38.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-nestjs-core': 0.37.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-net': 0.36.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-pg': 0.41.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-pino': 0.39.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-redis': 0.39.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-redis-4': 0.39.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-restify': 0.38.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-router': 0.37.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-socket.io': 0.39.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-tedious': 0.10.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-undici': 0.2.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation-winston': 0.37.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/resource-detector-alibaba-cloud': 0.28.9(@opentelemetry/api@1.8.0)
+ '@opentelemetry/resource-detector-aws': 1.5.0(@opentelemetry/api@1.8.0)
+ '@opentelemetry/resource-detector-azure': 0.2.7(@opentelemetry/api@1.8.0)
+ '@opentelemetry/resource-detector-container': 0.3.9(@opentelemetry/api@1.8.0)
+ '@opentelemetry/resource-detector-gcp': 0.29.9(@opentelemetry/api@1.8.0)
+ '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/sdk-node': 0.51.1(@opentelemetry/api@1.8.0)
+ transitivePeerDependencies:
+ - encoding
+ - supports-color
+ dev: false
+
+ /@opentelemetry/context-async-hooks@1.24.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-R5r6DO4kgEOVBxFXhXjwospLQkv+sYxwCfjvoZBe7Zm6KKXAV9kDSJhi/D1BweowdZmO+sdbENLs374gER8hpQ==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': '>=1.0.0 <1.9.0'
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ dev: false
+
+ /@opentelemetry/core@1.24.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-wMSGfsdmibI88K9wB498zXY04yThPexo8jvwNNlm542HZB7XrrMRBbAyKJqG8qDRJwIBdBrPMi4V9ZPW/sqrcg==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': '>=1.0.0 <1.9.0'
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/semantic-conventions': 1.24.1
+ dev: false
+
+ /@opentelemetry/exporter-logs-otlp-http@0.51.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-cd6GZ9IqCrmvOJwi1HjRR7o9ihF7xhZTekgxUsoyTsPF+SjKMsLF9ur6HeBYkYhk+YjZ1ken3XUMH47oUTvu8Q==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.0.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/api-logs': 0.51.1
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/otlp-exporter-base': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/otlp-transformer': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/sdk-logs': 0.51.1(@opentelemetry/api-logs@0.51.1)(@opentelemetry/api@1.8.0)
+ dev: false
+
+ /@opentelemetry/exporter-metrics-otlp-http@0.51.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-oFXvif9iksHUxrzG3P8ohMLt7xSrl+oDMqxD/3XXndU761RFAKSbRDpfrQs25U5D+A2aMV3qk+4kfUWdJhZ77g==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/otlp-exporter-base': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/otlp-transformer': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/sdk-metrics': 1.24.1(@opentelemetry/api@1.8.0)
+ dev: false
+
+ /@opentelemetry/exporter-metrics-otlp-proto@0.51.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-jhj8xD6S4cydXGCuf2tp56+4QI0DbDH6g+0MiPPJVdXjxLj+iycQuqB2cwljWpByblFaOjyUsL/VKtm8C7sQ9A==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/exporter-metrics-otlp-http': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/otlp-exporter-base': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/otlp-proto-exporter-base': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/otlp-transformer': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/sdk-metrics': 1.24.1(@opentelemetry/api@1.8.0)
+ dev: false
+
+ /@opentelemetry/exporter-trace-otlp-grpc@0.51.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-P9+Hkszih95ITvldGZ+kXvj9HpD1QfS+PwooyHK72GYA+Bgm+yUSAsDkUkDms8+s9HW6poxURv3LcjaMuBBpVQ==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.0.0
+ dependencies:
+ '@grpc/grpc-js': 1.10.8
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/otlp-grpc-exporter-base': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/otlp-transformer': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.8.0)
+ dev: false
+
+ /@opentelemetry/exporter-trace-otlp-http@0.51.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-n+LhLPsX07URh+HhV2SHVSvz1t4G/l/CE5BjpmhAPqeTceFac1VpyQkavWEJbvnK5bUEXijWt4LxAxFpt2fXyw==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.0.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/otlp-exporter-base': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/otlp-transformer': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.8.0)
+ dev: false
+
+ /@opentelemetry/exporter-trace-otlp-proto@0.51.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-SE9f0/6V6EeXC9i+WA4WFjS1EYgaBCpAnI5+lxWvZ7iO7EU1IvHvZhP6Kojr0nLldo83gqg6G7OWFqsID3uF+w==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.0.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/otlp-exporter-base': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/otlp-proto-exporter-base': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/otlp-transformer': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.8.0)
+ dev: false
+
+ /@opentelemetry/exporter-zipkin@1.24.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-+Rl/VFmu2n6eaRMnVbyfZx1DqR/1KNyWebYuHyQBZaEAVIn/ZLgmofRpXN1X2nhJ4BNaptQUNxAstCYYz6dKoQ==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.0.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ dev: false
+
+ /@opentelemetry/instrumentation-amqplib@0.37.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-XjOHeAOreh0XX4jlzTTUWWqu1dIGvMWM8yvd43JJdRMAmTZisezjKsxLjMEMIvF0PzQdoXwh9DiS9nYE4/QmpA==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-aws-lambda@0.41.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-/BLG+0DQr2tCILFGJKJH2Fg6eyjhqOlVflYpNddUEXnzyQ/PAhTdgirkqbICFgeSW2XYcEY9zXpuRldrVNw9cA==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/propagator-aws-xray': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ '@types/aws-lambda': 8.10.122
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-aws-sdk@0.41.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-7+8WMY0LQeqv6KIObXK+Py44qNFLeCU0ZLLxSZtXEbZ2wJlQISP1St65jRto0NV7isnZoyuOxb2+ZpypPPNv7Q==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/propagation-utils': 0.30.9(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-bunyan@0.38.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-ThNcgTE22W7PKzTzz5qfGxb5Gf7rA3EORousYo2nJWHHcF6gqiMNv2+GXY3MdpjLBr8IgCfhtvbQdD6rlIPUpA==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/api-logs': 0.51.1
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@types/bunyan': 1.8.9
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-cassandra-driver@0.38.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-ML4Vw0it2uIpETfX6skuSIGLHF9D3TUKOfdfrk9lnrzzWSzg2aS6pl3UeepkQX4wXHdzlxVRB0USrUqsmxMd5Q==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-connect@0.36.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-xI5Q/CMmzBmHshPnzzjD19ptFaYO/rQWzokpNio4QixZYWhJsa35QgRvN9FhPkwgtuJIbt/CWWAufJ3egJNHEA==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ '@types/connect': 3.4.36
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-cucumber@0.6.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-90eAF2JPSbPAsOuGfYyctYaoYXqy4Clbxt0j/uUgg6dto4oqwUw3AvTyHQEztLGxeXwEzC1EQigDtVPg5ZexYA==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.0.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-dataloader@0.9.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-fiyCOAw+tlbneok1x7P5UseoGW5nS60CWWx7NXzYW+WOexpSmDQQW7olttGa8fqE6/sVCoi1l+QdfVoETZi/NQ==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-dns@0.36.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-NWRbQ7q0E3co/CNTWLZZvUzZoKhB1iTitY282IM8HDTXkA6VRssCfOcvaHw5ezOh23TJbAeYxmmpVj4hFvDPYQ==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ semver: 7.6.0
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-express@0.39.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-AG8U7z7D0JcBu/7dDcwb47UMEzj9/FMiJV2iQZqrsZnxR3FjB9J9oIH2iszJYci2eUdp2WbdvtpD9RV/zmME5A==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-fastify@0.36.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-3Nfm43PI0I+3EX+1YbSy6xbDu276R1Dh1tqAk68yd4yirnIh52Kd5B+nJ8CgHA7o3UKakpBjj6vSzi5vNCzJIA==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-fs@0.12.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-Waf+2hekJRxIwq1PmivxOWLdMOtYbY22hKr34gEtfbv2CArSv8FBJH4BmQxB9o5ZcwkdKu589qs009dbuSfNmQ==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-generic-pool@0.36.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-CExAEqJvK8jYxrhN8cl6EaGg57EGJi+qsSKouLC5lndXi68gZLOKbZIMZg4pF0kNfp/D4BFaGmA6Ap7d5WoPTw==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-graphql@0.40.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-LVRdEHWACWOczv2imD+mhUrLMxsEjPPi32vIZJT57zygR5aUiA4em8X3aiGOCycgbMWkIu8xOSGSxdx3JmzN+w==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-grpc@0.51.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-coRTugFL7De/VNH/1NqPlxnfik87jS+jBXsny+Y/lMhXIA3x8t71IyL9ihuewkD+lNtIxIz6Y7Sq6kPuOqz5dQ==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-hapi@0.38.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-ZcOqEuwuutTDYIjhDIStix22ECblG/i9pHje23QGs4Q4YS4RMaZ5hKCoQJxW88Z4K7T53rQkdISmoXFKDV8xMg==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-http@0.51.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-6b3nZnFFEz/3xZ6w8bVxctPUWIPWiXuPQ725530JgxnN1cvYFd8CJ75PrHZNjynmzSSnqBkN3ef4R9N+RpMh8Q==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ semver: 7.6.0
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-ioredis@0.40.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-Jv/fH7KhpWe4KBirsiqeUJIYrsdR2iu2l4nWhfOlRvaZ+zYIiLEzTQR6QhBbyRoAbU4OuYJzjWusOmmpGBnwng==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/redis-common': 0.36.2
+ '@opentelemetry/semantic-conventions': 1.24.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-knex@0.36.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-6bEuiI+yMf3D0+ZWZE2AKmXhIhBvZ0brdO/0A8lUqeqeS+sS4fTcjA1F2CclsCNxYWEgcs8o3QyQqPceBeVRlg==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-koa@0.40.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-dJc3H/bKMcgUYcQpLF+1IbmUKus0e5Fnn/+ru/3voIRHwMADT3rFSUcGLWSczkg68BCgz0vFWGDTvPtcWIFr7A==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ '@types/koa': 2.14.0
+ '@types/koa__router': 12.0.3
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-lru-memoizer@0.37.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-dHLrn55qVWsHJQYdForPWPUWDk2HZ2jjzkT+WoQSqpYT1j4HxfoiLfBTF+I3EbEYFAJnDRmRAUfA6nU5GPdCLQ==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-memcached@0.36.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-5efkT8ZfN8il5z+yfKYFGm2YR3mhlhaJoGfNOAylKE/6tUH3WDTTWaP7nrURtWGc+fuvDktcEch18Se8qsGS7w==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ '@types/memcached': 2.2.10
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-mongodb@0.43.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-bMKej7Y76QVUD3l55Q9YqizXybHUzF3pujsBFjqbZrRn2WYqtsDtTUlbCK7fvXNPwFInqZ2KhnTqd0gwo8MzaQ==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/sdk-metrics': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-mongoose@0.38.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-zaeiasdnRjXe6VhYCBMdkmAVh1S5MmXC/0spet+yqoaViGnYst/DOxPvhwg3yT4Yag5crZNWsVXnA538UjP6Ow==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-mysql2@0.38.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-qkpHMgWSDTYVB1vlZ9sspf7l2wdS5DDq/rbIepDwX5BA0N0068JTQqh0CgAh34tdFqSCnWXIhcyOXC2TtRb0sg==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ '@opentelemetry/sql-common': 0.40.1(@opentelemetry/api@1.8.0)
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-mysql@0.38.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-+iBAawUaTfX/HAlvySwozx0C2B6LBfNPXX1W8Z2On1Uva33AGkw2UjL9XgIg1Pj4eLZ9R4EoJ/aFz+Xj4E/7Fw==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ '@types/mysql': 2.15.22
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-nestjs-core@0.37.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-ebYQjHZEmGHWEALwwDGhSQVLBaurFnuLIkZD5igPXrt7ohfF4lc5/4al1LO+vKc0NHk8SJWStuRueT86ISA8Vg==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-net@0.36.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-rZlbSgwAJys8lpug+xIeAdO98ypYMAPVqrHqc4AHuUl5S4MULHEcjGLMZLoE/guEGO4xAQ5XUezpRFGM1SAnsg==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-pg@0.41.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-BSlhpivzBD77meQNZY9fS4aKgydA8AJBzv2dqvxXFy/Hq64b7HURgw/ztbmwFeYwdF5raZZUifiiNSMLpOJoSA==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ '@opentelemetry/sql-common': 0.40.1(@opentelemetry/api@1.8.0)
+ '@types/pg': 8.6.1
+ '@types/pg-pool': 2.0.4
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-pino@0.39.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-uA17F2iP77o3NculB63QD2zv3jkJ093Gfb0GxHLEqTIqpYs1ToJ53ybWwjJwqFByxk7GrliaxaxVtWC23PKzBg==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-redis-4@0.39.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-Zpfqfi83KeKgVQ0C2083GZPon3ZPYQ5E59v9FAbhubtOoUb9Rh7n111YD8FPW3sgx6JKp1odXmBmfQhWCaTOpQ==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/redis-common': 0.36.2
+ '@opentelemetry/semantic-conventions': 1.24.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-redis@0.39.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-HUjTerD84jRJnSyDrRPqn6xQ7K91o9qLflRPZqzRvq0GRj5PMfc6TJ/z3q/ayWy/2Kzffhrp7HCIVp0u0TkgUg==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/redis-common': 0.36.2
+ '@opentelemetry/semantic-conventions': 1.24.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-restify@0.38.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-VYK47Z9GBaZX5MQLL7kZDdzQDdyUtHRD4J/GSr6kdwmIpdpUQXLsV3EnboeB8P+BlpucF57FyJKE8yWTOEMfnA==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-router@0.37.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-+OPcm7C9I5oPqnpStE+1WkdPWjRx0k5XKratxQmIDFZrmhRcqvMte3vrrzE/OBPg9iqh2tKrSe0y7+0sRfTJyQ==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-socket.io@0.39.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-4J2ehk5mJyDT6j2yJCOuPxAjit5QB1Fwzhx0LID5jjvhI9LxzZIGDNAPTTHyghSiaRDeNMzceXKkkEQJkg2MNw==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-tedious@0.10.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-maSXMxgS0szU52khQzAROV4nWr+3M8mZajMQOc3/7tYjo+Q3HlWAowOuagPvp4pwROK4x6oDaFYlY+ZSj1qjYA==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ '@types/tedious': 4.0.14
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-undici@0.2.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-RH9WdVRtpnyp8kvya2RYqKsJouPxvHl7jKPsIfrbL8u2QCKloAGi0uEqDHoOS15ZRYPQTDXZ7d8jSpUgSQmvpA==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.7.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation-winston@0.37.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-vOx55fxdNjo2XojJf8JN4jP7VVvQCh7UQzzQ2Q2FpGJpt8Z3EErKaY8xOBkOuJH0TtL/Q72rmIn9c+mRG46BxA==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/api-logs': 0.51.1
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/instrumentation@0.51.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-JIrvhpgqY6437QIqToyozrUG1h5UhwHkaGK/WAX+fkrpyPtc+RO5FkRtUd9BH0MibabHHvqsnBGKfKVijbmp8w==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.3.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/api-logs': 0.51.1
+ '@types/shimmer': 1.0.5
+ import-in-the-middle: 1.7.4
+ require-in-the-middle: 7.3.0
+ semver: 7.6.0
+ shimmer: 1.2.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/otlp-exporter-base@0.51.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-UYlnOYyDdzo1Gw559EHCzru0RwhvuXCwoH8jGo9J4gO1TE58GjnEmIjomMsKBCym3qWNJfIQXw+9SZCV0DdQNg==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.0.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ dev: false
+
+ /@opentelemetry/otlp-grpc-exporter-base@0.51.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-ZAS+4pq8o7dsugGTwV9s6JMKSxi+guIHdn0acOv0bqj26e9pWDFx5Ky+bI0aY46uR9Y0JyXqY+KAEYM/SO3DFA==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.0.0
+ dependencies:
+ '@grpc/grpc-js': 1.10.8
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/otlp-exporter-base': 0.51.1(@opentelemetry/api@1.8.0)
+ protobufjs: 7.3.0
+ dev: false
+
+ /@opentelemetry/otlp-proto-exporter-base@0.51.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-gxxxwfk0inDMb5DLeuxQ3L8TtptxSiTNHE4nnAJH34IQXAVRhXSXW1rK8PmDKDngRPIZ6J7ncUCjjIn8b+AgqQ==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.0.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/otlp-exporter-base': 0.51.1(@opentelemetry/api@1.8.0)
+ protobufjs: 7.3.0
+ dev: false
+
+ /@opentelemetry/otlp-transformer@0.51.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-OppYOXwV9LQqqtYUCywqoOqX/JT9LQ5/FMuPZ//eTkvuHdUC4ZMwz2c6uSoT2R90GWvvGnF1iEqTGyTT3xAt2Q==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': '>=1.3.0 <1.9.0'
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/api-logs': 0.51.1
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/sdk-logs': 0.51.1(@opentelemetry/api-logs@0.51.1)(@opentelemetry/api@1.8.0)
+ '@opentelemetry/sdk-metrics': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.8.0)
+ dev: false
+
+ /@opentelemetry/propagation-utils@0.30.9(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-DP2Y91zyw2uNgKLbej6c3IIjyF27sKnRK/UY/6msMIVGPIbZgtH9L0JOioN5L5kYjEkH4CDvt921SjutN7hY4A==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.0.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ dev: false
+
+ /@opentelemetry/propagator-aws-xray@1.24.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-RzwoLe6QzsYGcpmxxDbbbgSpe3ncxSM4dtFHXh/rCYGjyq0nZGXKvk26mJtWZ4kQ3nuiIoqSZueIuGmt/mvOTA==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': '>=1.0.0 <1.9.0'
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ dev: false
+
+ /@opentelemetry/propagator-b3@1.24.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-nda97ZwhpZKyUJTXqQuKzNhPMUgMLunbbGWn8kroBwegn+nh6OhtyGkrVQsQLNdVKJl0KeB5z0ZgeWszrYhwFw==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': '>=1.0.0 <1.9.0'
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ dev: false
+
+ /@opentelemetry/propagator-jaeger@1.24.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-7bRBJn3FG1l195A1m+xXRHvgzAOBsfmRi9uZ5Da18oTh7BLmNDiA8+kpk51FpTsU1PCikPVpRDNPhKVB6lyzZg==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': '>=1.0.0 <1.9.0'
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ dev: false
+
+ /@opentelemetry/redis-common@0.36.2:
+ resolution: {integrity: sha512-faYX1N0gpLhej/6nyp6bgRjzAKXn5GOEMYY7YhciSfCoITAktLUtQ36d24QEWNA1/WA1y6qQunCe0OhHRkVl9g==}
+ engines: {node: '>=14'}
+ dev: false
+
+ /@opentelemetry/resource-detector-alibaba-cloud@0.28.9(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-cTV2YFFkKAZUZgs5SMknIX4MmFb/0KQhrJuiz2dtJKnI1n7OanCgnMkuXzJ5+CbifRB57I2g3HnwcSPOx3zsKw==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.0.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ dev: false
+
+ /@opentelemetry/resource-detector-aws@1.5.0(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-JNk/kSzzNQaiMo/F0b/bm8S3Qtr/m89BckN9B4U/cPHSqKLdxX03vgRBOqkXJ5KlAD8kc6K1Etcr8QfvGw6+uA==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.0.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ dev: false
+
+ /@opentelemetry/resource-detector-azure@0.2.7(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-+R3VnPaK6rc+kKfdvhgQlYDGXy0+JMAjPNDjcRQSeXY8pVOzHGCIrY+gT6gUrpjsw8w1EgNBVofr+qeNOr+o4A==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.0.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ dev: false
+
+ /@opentelemetry/resource-detector-container@0.3.9(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-kfJ78av51EKk09fn5cwe5UNt+G7UBLvPTmfK/nZzvmNs7enw/TGB8X0j0JUHb9487ypRGph6MBoeP1+qZh+w1A==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.0.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ dev: false
+
+ /@opentelemetry/resource-detector-gcp@0.29.9(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-rTUm0U0cF8f75JzeMpMLbQ4m1uLph+Q31DQKk8ekdDe6SZ1EPD4rM1JgRnbxZtsC2sE8ju87s5nEio77xPz7dQ==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.0.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ gcp-metadata: 6.1.0
+ transitivePeerDependencies:
+ - encoding
+ - supports-color
+ dev: false
+
+ /@opentelemetry/resources@1.24.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-cyv0MwAaPF7O86x5hk3NNgenMObeejZFLJJDVuSeSMIsknlsj3oOZzRv3qSzlwYomXsICfBeFFlxwHQte5mGXQ==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': '>=1.0.0 <1.9.0'
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ dev: false
+
+ /@opentelemetry/sdk-logs@0.51.1(@opentelemetry/api-logs@0.51.1)(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-ULQQtl82b673PpZc5/0EtH4V+BrwVOgKJZEB7tYZnGTG3I98tQVk89S9/JSixomDr++F4ih+LSJTCqIKBz+MQQ==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': '>=1.4.0 <1.9.0'
+ '@opentelemetry/api-logs': '>=0.39.1'
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/api-logs': 0.51.1
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0)
+ dev: false
+
+ /@opentelemetry/sdk-metrics@1.24.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-FrAqCbbGao9iKI+Mgh+OsC9+U2YMoXnlDHe06yH7dvavCKzE3S892dGtX54+WhSFVxHR/TMRVJiK/CV93GR0TQ==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': '>=1.3.0 <1.9.0'
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0)
+ lodash.merge: 4.6.2
+ dev: false
+
+ /@opentelemetry/sdk-node@0.51.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-GgmNF9C+6esr8PIJxCqHw84rEOkYm6XdFWZ2+Wyc3qaUt92ACoN7uSw5iKNvaUq62W0xii1wsGxwHzyENtPP8w==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': '>=1.3.0 <1.9.0'
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/api-logs': 0.51.1
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/exporter-trace-otlp-grpc': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/exporter-trace-otlp-http': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/exporter-trace-otlp-proto': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/exporter-zipkin': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/sdk-logs': 0.51.1(@opentelemetry/api-logs@0.51.1)(@opentelemetry/api@1.8.0)
+ '@opentelemetry/sdk-metrics': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/sdk-trace-node': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@opentelemetry/sdk-trace-base@1.24.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-zz+N423IcySgjihl2NfjBf0qw1RWe11XIAWVrTNOSSI6dtSPJiVom2zipFB2AEEtJWpv0Iz6DY6+TjnyTV5pWg==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': '>=1.0.0 <1.9.0'
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/semantic-conventions': 1.24.1
+ dev: false
+
+ /@opentelemetry/sdk-trace-node@1.24.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-/FZX8uWaGIAwsDhqI8VvQ+qWtfMNlXjaFYGc+vmxgdRFppCSSIRwrPyIhJO1qx61okyYhoyxVEZAfoiNxrfJCg==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': '>=1.0.0 <1.9.0'
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/context-async-hooks': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/propagator-b3': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/propagator-jaeger': 1.24.1(@opentelemetry/api@1.8.0)
+ '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.8.0)
+ semver: 7.6.0
+ dev: false
+
+ /@opentelemetry/semantic-conventions@1.24.1:
+ resolution: {integrity: sha512-VkliWlS4/+GHLLW7J/rVBA00uXus1SWvwFvcUDxDwmFxYfg/2VI6ekwdXS28cjI8Qz2ky2BzG8OUHo+WeYIWqw==}
+ engines: {node: '>=14'}
+ dev: false
+
+ /@opentelemetry/sql-common@0.40.1(@opentelemetry/api@1.8.0):
+ resolution: {integrity: sha512-nSDlnHSqzC3pXn/wZEZVLuAuJ1MYMXPBwtv2qAbCa3847SaHItdE7SzUq/Jtb0KZmh1zfAbNi3AAMjztTT4Ugg==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ '@opentelemetry/api': ^1.1.0
+ dependencies:
+ '@opentelemetry/api': 1.8.0
+ '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0)
+ dev: false
+
/@pkgjs/parseargs@0.11.0:
resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==}
engines: {node: '>=14'}
@@ -1352,6 +2404,49 @@ packages:
dev: false
optional: true
+ /@protobufjs/aspromise@1.1.2:
+ resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==}
+ dev: false
+
+ /@protobufjs/base64@1.1.2:
+ resolution: {integrity: sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==}
+ dev: false
+
+ /@protobufjs/codegen@2.0.4:
+ resolution: {integrity: sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==}
+ dev: false
+
+ /@protobufjs/eventemitter@1.1.0:
+ resolution: {integrity: sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==}
+ dev: false
+
+ /@protobufjs/fetch@1.1.0:
+ resolution: {integrity: sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==}
+ dependencies:
+ '@protobufjs/aspromise': 1.1.2
+ '@protobufjs/inquire': 1.1.0
+ dev: false
+
+ /@protobufjs/float@1.0.2:
+ resolution: {integrity: sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==}
+ dev: false
+
+ /@protobufjs/inquire@1.1.0:
+ resolution: {integrity: sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==}
+ dev: false
+
+ /@protobufjs/path@1.1.2:
+ resolution: {integrity: sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==}
+ dev: false
+
+ /@protobufjs/pool@1.1.0:
+ resolution: {integrity: sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==}
+ dev: false
+
+ /@protobufjs/utf8@1.1.0:
+ resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==}
+ dev: false
+
/@puppeteer/browsers@2.2.1:
resolution: {integrity: sha512-QSXujx4d4ogDamQA8ckkkRieFzDgZEuZuGiey9G7CuDcbnX4iINKWxTPC5Br2AEzY9ICAvcndqgAUFMMKnS/Tw==}
engines: {node: '>=18'}
@@ -1554,6 +2649,16 @@ packages:
resolution: {integrity: sha512-+jby/Guq9H8O7NWgCv6X8VAiQE8Dr/nccsCtL74xyHKhu2Knu5EAKmOZj3nLCnLm1KooUzKY+5DsnGVqhM8/wQ==}
dev: true
+ /@types/accepts@1.3.7:
+ resolution: {integrity: sha512-Pay9fq2lM2wXPWbteBsRAGiWH2hig4ZE2asK+mm7kUzlxRTfL961rj89I6zV/E3PcIkDqyuBEcMxFT7rccugeQ==}
+ dependencies:
+ '@types/node': 20.11.25
+ dev: false
+
+ /@types/aws-lambda@8.10.122:
+ resolution: {integrity: sha512-vBkIh9AY22kVOCEKo5CJlyCgmSWvasC+SWUxL/x/vOwRobMpI/HG1xp/Ae3AqmSiZeLUbOhW0FCD3ZjqqUxmXw==}
+ dev: false
+
/@types/babel__core@7.20.5:
resolution: {integrity: sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==}
dependencies:
@@ -1588,7 +2693,6 @@ packages:
dependencies:
'@types/connect': 3.4.38
'@types/node': 20.11.25
- dev: true
/@types/bull@4.10.0:
resolution: {integrity: sha512-RkYW8K2H3J76HT6twmHYbzJ0GtLDDotpLP9ah9gtiA7zfF6peBH1l5fEiK0oeIZ3/642M7Jcb9sPmor8Vf4w6g==}
@@ -1599,11 +2703,35 @@ packages:
- supports-color
dev: true
+ /@types/bunyan@1.8.9:
+ resolution: {integrity: sha512-ZqS9JGpBxVOvsawzmVt30sP++gSQMTejCkIAQ3VdadOcRE8izTyW66hufvwLeH+YEGP6Js2AW7Gz+RMyvrEbmw==}
+ dependencies:
+ '@types/node': 20.11.25
+ dev: false
+
+ /@types/connect@3.4.36:
+ resolution: {integrity: sha512-P63Zd/JUGq+PdrM1lv0Wv5SBYeA2+CORvbrXbngriYY0jzLUWfQMQQxOhjONEz/wlHOAxOdY7CY65rgQdTjq2w==}
+ dependencies:
+ '@types/node': 20.11.25
+ dev: false
+
/@types/connect@3.4.38:
resolution: {integrity: sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==}
dependencies:
'@types/node': 20.11.25
- dev: true
+
+ /@types/content-disposition@0.5.8:
+ resolution: {integrity: sha512-QVSSvno3dE0MgO76pJhmv4Qyi/j0Yk9pBp0Y7TJ2Tlj+KCgJWY6qX7nnxCOLkZ3VYRSIk1WTxCvwUSdx6CCLdg==}
+ dev: false
+
+ /@types/cookies@0.9.0:
+ resolution: {integrity: sha512-40Zk8qR147RABiQ7NQnBzWzDcjKzNrntB5BAmeGCb2p/MIyOE+4BVvc17wumsUqUw00bJYqoXFHYygQnEFh4/Q==}
+ dependencies:
+ '@types/connect': 3.4.38
+ '@types/express': 4.17.21
+ '@types/keygrip': 1.0.6
+ '@types/node': 20.11.25
+ dev: false
/@types/cors@2.8.17:
resolution: {integrity: sha512-8CGDvrBj1zgo2qE+oS3pOCyYNqCPryMWY2bGfwA0dcfopWGgxs+78df0Rs3rc9THP4JkOhLsAa+15VdpAqkcUA==}
@@ -1618,7 +2746,6 @@ packages:
'@types/qs': 6.9.12
'@types/range-parser': 1.2.7
'@types/send': 0.17.4
- dev: true
/@types/express@4.17.21:
resolution: {integrity: sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==}
@@ -1627,7 +2754,6 @@ packages:
'@types/express-serve-static-core': 4.17.43
'@types/qs': 6.9.12
'@types/serve-static': 1.15.5
- dev: true
/@types/graceful-fs@4.1.9:
resolution: {integrity: sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==}
@@ -1635,9 +2761,12 @@ packages:
'@types/node': 20.11.25
dev: true
+ /@types/http-assert@1.5.5:
+ resolution: {integrity: sha512-4+tE/lwdAahgZT1g30Jkdm9PzFRde0xwxBNUyRsCitRvCQB90iuA2uJYdUnhnANRcqGXaWOGY4FEoxeElNAK2g==}
+ dev: false
+
/@types/http-errors@2.0.4:
resolution: {integrity: sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==}
- dev: true
/@types/istanbul-lib-coverage@2.0.6:
resolution: {integrity: sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==}
@@ -1662,13 +2791,52 @@ packages:
pretty-format: 29.7.0
dev: true
+ /@types/keygrip@1.0.6:
+ resolution: {integrity: sha512-lZuNAY9xeJt7Bx4t4dx0rYCDqGPW8RXhQZK1td7d4H6E9zYbLoOtjBvfwdTKpsyxQI/2jv+armjX/RW+ZNpXOQ==}
+ dev: false
+
+ /@types/koa-compose@3.2.8:
+ resolution: {integrity: sha512-4Olc63RY+MKvxMwVknCUDhRQX1pFQoBZ/lXcRLP69PQkEpze/0cr8LNqJQe5NFb/b19DWi2a5bTi2VAlQzhJuA==}
+ dependencies:
+ '@types/koa': 2.14.0
+ dev: false
+
+ /@types/koa@2.14.0:
+ resolution: {integrity: sha512-DTDUyznHGNHAl+wd1n0z1jxNajduyTh8R53xoewuerdBzGo6Ogj6F2299BFtrexJw4NtgjsI5SMPCmV9gZwGXA==}
+ dependencies:
+ '@types/accepts': 1.3.7
+ '@types/content-disposition': 0.5.8
+ '@types/cookies': 0.9.0
+ '@types/http-assert': 1.5.5
+ '@types/http-errors': 2.0.4
+ '@types/keygrip': 1.0.6
+ '@types/koa-compose': 3.2.8
+ '@types/node': 20.11.25
+ dev: false
+
+ /@types/koa__router@12.0.3:
+ resolution: {integrity: sha512-5YUJVv6NwM1z7m6FuYpKfNLTZ932Z6EF6xy2BbtpJSyn13DKNQEkXVffFVSnJHxvwwWh2SAeumpjAYUELqgjyw==}
+ dependencies:
+ '@types/koa': 2.14.0
+ dev: false
+
+ /@types/memcached@2.2.10:
+ resolution: {integrity: sha512-AM9smvZN55Gzs2wRrqeMHVP7KE8KWgCJO/XL5yCly2xF6EKa4YlbpK+cLSAH4NG/Ah64HrlegmGqW8kYws7Vxg==}
+ dependencies:
+ '@types/node': 20.11.25
+ dev: false
+
/@types/mime@1.3.5:
resolution: {integrity: sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==}
- dev: true
/@types/mime@3.0.4:
resolution: {integrity: sha512-iJt33IQnVRkqeqC7PzBHPTC6fDlRNRW8vjrgqtScAhrmMwe8c4Eo7+fUGTa+XdWrpEgpyKWMYmi2dIwMAYRzPw==}
- dev: true
+
+ /@types/mysql@2.15.22:
+ resolution: {integrity: sha512-wK1pzsJVVAjYCSZWQoWHziQZbNggXFDUEIGf54g4ZM/ERuP86uGdWeKZWMYlqTPMZfHJJvLPyogXGvCOg87yLQ==}
+ dependencies:
+ '@types/node': 20.11.25
+ dev: false
/@types/node-fetch@2.6.11:
resolution: {integrity: sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g==}
@@ -1688,17 +2856,29 @@ packages:
dependencies:
undici-types: 5.26.5
+ /@types/pg-pool@2.0.4:
+ resolution: {integrity: sha512-qZAvkv1K3QbmHHFYSNRYPkRjOWRLBYrL4B9c+wG0GSVGBw0NtJwPcgx/DSddeDJvRGMHCEQ4VMEVfuJ/0gZ3XQ==}
+ dependencies:
+ '@types/pg': 8.6.1
+ dev: false
+
+ /@types/pg@8.6.1:
+ resolution: {integrity: sha512-1Kc4oAGzAl7uqUStZCDvaLFqZrW9qWSjXOmBfdgyBP5La7Us6Mg4GBvRlSoaZMhQF/zSj1C8CtKMBkoiT8eL8w==}
+ dependencies:
+ '@types/node': 20.11.25
+ pg-protocol: 1.6.1
+ pg-types: 2.2.0
+ dev: false
+
/@types/phoenix@1.6.4:
resolution: {integrity: sha512-B34A7uot1Cv0XtaHRYDATltAdKx0BvVKNgYNqE4WjtPUa4VQJM7kxeXcVKaH+KS+kCmZ+6w+QaUdcljiheiBJA==}
dev: false
/@types/qs@6.9.12:
resolution: {integrity: sha512-bZcOkJ6uWrL0Qb2NAWKa7TBU+mJHPzhx9jjLL1KHF+XpzEcR7EXHvjbHlGtR/IsP1vyPrehuS6XqkmaePy//mg==}
- dev: true
/@types/range-parser@1.2.7:
resolution: {integrity: sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==}
- dev: true
/@types/retry@0.12.0:
resolution: {integrity: sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA==}
@@ -1709,7 +2889,6 @@ packages:
dependencies:
'@types/mime': 1.3.5
'@types/node': 20.11.25
- dev: true
/@types/serve-static@1.15.5:
resolution: {integrity: sha512-PDRk21MnK70hja/YF8AHfC7yIsiQHn1rcXx7ijCFBX/k+XQJhQT/gw3xekXKJvx+5SXaMMS8oqQy09Mzvz2TuQ==}
@@ -1717,7 +2896,10 @@ packages:
'@types/http-errors': 2.0.4
'@types/mime': 3.0.4
'@types/node': 20.11.25
- dev: true
+
+ /@types/shimmer@1.0.5:
+ resolution: {integrity: sha512-9Hp0ObzwwO57DpLFF0InUjUm/II8GmKAvzbefxQTihCb7KI6yc9yzf0nLc4mVdby5N4DRCgQM2wCup9KTieeww==}
+ dev: false
/@types/stack-trace@0.0.29:
resolution: {integrity: sha512-TgfOX+mGY/NyNxJLIbDWrO9DjGoVSW9+aB8H2yy1fy32jsvxijhmyJI9fDFgvz3YP4lvJaq9DzdR/M1bOgVc9g==}
@@ -1727,6 +2909,16 @@ packages:
resolution: {integrity: sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==}
dev: true
+ /@types/tedious@4.0.14:
+ resolution: {integrity: sha512-KHPsfX/FoVbUGbyYvk1q9MMQHLPeRZhRJZdO45Q4YjvFkv4hMNghCWTvy7rdKessBsmtz4euWCWAB6/tVpI1Iw==}
+ dependencies:
+ '@types/node': 20.11.25
+ dev: false
+
+ /@types/triple-beam@1.3.5:
+ resolution: {integrity: sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw==}
+ dev: false
+
/@types/uuid@9.0.8:
resolution: {integrity: sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA==}
dev: false
@@ -1765,6 +2957,10 @@ packages:
dev: false
optional: true
+ /@xmldom/xmldom@0.8.10:
+ resolution: {integrity: sha512-2WALfTl4xo2SkGCYRt6rDTFfk9R1czmBvUQy12gK2KuRKIpWEhcbbzy8EZXtz/jkRqHX8bFEc6FC1HjX4TUWYw==}
+ engines: {node: '>=10.0.0'}
+
/abbrev@1.1.1:
resolution: {integrity: sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==}
dev: true
@@ -1783,6 +2979,14 @@ packages:
mime-types: 2.1.35
negotiator: 0.6.3
+ /acorn-import-attributes@1.9.5(acorn@8.11.3):
+ resolution: {integrity: sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ==}
+ peerDependencies:
+ acorn: ^8
+ dependencies:
+ acorn: 8.11.3
+ dev: false
+
/acorn-walk@8.3.2:
resolution: {integrity: sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A==}
engines: {node: '>=0.4.0'}
@@ -1792,7 +2996,6 @@ packages:
resolution: {integrity: sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==}
engines: {node: '>=0.4.0'}
hasBin: true
- dev: true
/afinn-165-financialmarketnews@3.0.0:
resolution: {integrity: sha512-0g9A1S3ZomFIGDTzZ0t6xmv4AuokBvBmpes8htiyHpH7N4xDmvSQL6UxL/Zcs2ypRb3VwgCscaD8Q3zEawKYhw==}
@@ -1895,7 +3098,6 @@ packages:
resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==}
dependencies:
sprintf-js: 1.0.3
- dev: true
/argparse@2.0.1:
resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==}
@@ -2071,13 +3273,16 @@ packages:
/base64-js@1.5.1:
resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==}
- dev: false
/basic-ftp@5.0.5:
resolution: {integrity: sha512-4Bcg1P8xhUuqcii/S0Z9wiHIrQVPMermM1any+MX5GeGD7faD3/msQUDGLol9wOcz4/jbg/WJnGqoJF6LiBdtg==}
engines: {node: '>=10.0.0'}
dev: false
+ /bignumber.js@9.1.2:
+ resolution: {integrity: sha512-2/mKyZH9K85bzOEfhXDBFZTGd1CTs+5IHpeFQo9luiBG7hghdC851Pj2WAhb6E3R6b9tZj/XKhbg4fum+Kepug==}
+ dev: false
+
/bin-links@4.0.3:
resolution: {integrity: sha512-obsRaULtJurnfox/MDwgq6Yo9kzbv1CPTk/1/s7Z/61Lezc8IKkFCOXNeVLXz0456WRzBQmSsDWlai2tIhBsfA==}
engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}
@@ -2096,6 +3301,9 @@ packages:
resolution: {integrity: sha512-nbE1WxOTTrUWIfsfZ4aHGYu5DOuNkbxGokjV6Z2kxfJK3uaAb8zNK1muzOeipoLHZjInT4Br88BHpzevc681xA==}
dev: false
+ /bluebird@3.4.7:
+ resolution: {integrity: sha512-iD3898SR7sWVRHbiQv+sHUtHnMvC1o3nW5rAcqnq3uOn07DSAppZYUkIGslDz6gXC7HfunPe7YVBgoEJASPcHA==}
+
/body-parser@1.20.2:
resolution: {integrity: sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==}
engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16}
@@ -2185,6 +3393,13 @@ packages:
ieee754: 1.2.1
dev: false
+ /buffer@6.0.3:
+ resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==}
+ dependencies:
+ base64-js: 1.5.1
+ ieee754: 1.2.1
+ dev: false
+
/bull@4.12.2:
resolution: {integrity: sha512-WPuc0VCYx+cIVMiZtPwRpWyyJFBrj4/OgKJ6n9Jf4tIw7rQNV+HAKQv15UDkcTvfpGFehvod7Fd1YztbYSJIDQ==}
engines: {node: '>=12'}
@@ -2321,7 +3536,6 @@ packages:
/cjs-module-lexer@1.2.3:
resolution: {integrity: sha512-0TNiGstbQmCFwt4akjjBg5pLRTSyj/PkWQ1ZoO2zntmg9yLqSRxwEa4iCfQLGjqhiqBfOJa7W/E8wfGrTDmlZQ==}
- dev: true
/class-transformer@0.5.1:
resolution: {integrity: sha512-SQa1Ws6hUbfC98vKGxZH3KFY0Y1lm5Zm0SY8XX9zbK7FJCyVEac3ATW0RIpwzW+oOfmHE5PMPufDG9hCfoEOMw==}
@@ -2421,6 +3635,9 @@ packages:
resolution: {integrity: sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw==}
dev: true
+ /core-util-is@1.0.3:
+ resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==}
+
/cors@2.8.5:
resolution: {integrity: sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==}
engines: {node: '>= 0.10'}
@@ -2659,6 +3876,9 @@ packages:
md5: 2.3.0
dev: false
+ /dingbat-to-unicode@1.0.1:
+ resolution: {integrity: sha512-98l0sW87ZT58pU4i61wa2OHwxbiYSbuxsCBozaVnYX2iCnr3bLM3fIes1/ej7h1YdOKuKt/MLs706TVnALA65w==}
+
/dom-serializer@2.0.0:
resolution: {integrity: sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==}
dependencies:
@@ -2695,6 +3915,11 @@ packages:
engines: {node: '>=12'}
dev: false
+ /duck@0.1.12:
+ resolution: {integrity: sha512-wkctla1O6VfP89gQ+J/yDesM0S7B7XLXjKGzXxMDVFg7uEn706niAtyYovKbyq1oT9YwDcly721/iUWoc8MVRg==}
+ dependencies:
+ underscore: 1.13.6
+
/eastasianwidth@0.2.0:
resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==}
dev: false
@@ -2845,6 +4070,11 @@ packages:
resolution: {integrity: sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==}
dev: false
+ /events@3.3.0:
+ resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==}
+ engines: {node: '>=0.8.x'}
+ dev: false
+
/execa@5.1.1:
resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==}
engines: {node: '>=10'}
@@ -2927,6 +4157,10 @@ packages:
transitivePeerDependencies:
- supports-color
+ /extend@3.0.2:
+ resolution: {integrity: sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==}
+ dev: false
+
/extract-zip@2.0.1:
resolution: {integrity: sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg==}
engines: {node: '>= 10.17.0'}
@@ -2973,6 +4207,10 @@ packages:
pend: 1.2.0
dev: false
+ /fecha@4.2.3:
+ resolution: {integrity: sha512-OP2IUU6HeYKJi3i0z4A19kHMQoLVs4Hc+DPqqxI2h/DPZHTm/vjsfC6P0b4jCMy14XizLBqvndQ+UilD7707Jw==}
+ dev: false
+
/fetch-blob@3.2.0:
resolution: {integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==}
engines: {node: ^12.20 || >= 14.13}
@@ -3113,6 +4351,31 @@ packages:
/function-bind@1.1.2:
resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==}
+ /gaxios@6.6.0:
+ resolution: {integrity: sha512-bpOZVQV5gthH/jVCSuYuokRo2bTKOcuBiVWpjmTn6C5Agl5zclGfTljuGsQZxwwDBkli+YhZhP4TdlqTnhOezQ==}
+ engines: {node: '>=14'}
+ dependencies:
+ extend: 3.0.2
+ https-proxy-agent: 7.0.4
+ is-stream: 2.0.1
+ node-fetch: 2.7.0
+ uuid: 9.0.1
+ transitivePeerDependencies:
+ - encoding
+ - supports-color
+ dev: false
+
+ /gcp-metadata@6.1.0:
+ resolution: {integrity: sha512-Jh/AIwwgaxan+7ZUUmRLCjtchyDiqh4KjBJ5tW3plBZb5iL/BPcso8A5DlzeD9qlw0duCamnNdpFjxwaT0KyKg==}
+ engines: {node: '>=14'}
+ dependencies:
+ gaxios: 6.6.0
+ json-bigint: 1.0.0
+ transitivePeerDependencies:
+ - encoding
+ - supports-color
+ dev: false
+
/generic-pool@3.9.0:
resolution: {integrity: sha512-hymDOu5B53XvN4QT9dBmZxPX4CWhBPPLguTZ9MMFeFa/Kg0xWVfylOVNlJji/E7yTZWFd/q9GO5TxDLq156D7g==}
engines: {node: '>= 4'}
@@ -3332,6 +4595,9 @@ packages:
resolution: {integrity: sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==}
dev: true
+ /immediate@3.0.6:
+ resolution: {integrity: sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==}
+
/import-fresh@3.3.0:
resolution: {integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==}
engines: {node: '>=6'}
@@ -3340,6 +4606,15 @@ packages:
resolve-from: 4.0.0
dev: false
+ /import-in-the-middle@1.7.4:
+ resolution: {integrity: sha512-Lk+qzWmiQuRPPulGQeK5qq0v32k2bHnWrRPFgqyvhw7Kkov5L6MOLOIU3pcWeujc9W4q54Cp3Q2WV16eQkc7Bg==}
+ dependencies:
+ acorn: 8.11.3
+ acorn-import-attributes: 1.9.5(acorn@8.11.3)
+ cjs-module-lexer: 1.2.3
+ module-details-from-path: 1.0.3
+ dev: false
+
/import-local@3.1.0:
resolution: {integrity: sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==}
engines: {node: '>=8'}
@@ -3414,7 +4689,6 @@ packages:
resolution: {integrity: sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==}
dependencies:
hasown: 2.0.1
- dev: true
/is-extglob@2.1.1:
resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==}
@@ -3460,7 +4734,9 @@ packages:
/is-stream@2.0.1:
resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==}
engines: {node: '>=8'}
- dev: true
+
+ /isarray@1.0.0:
+ resolution: {integrity: sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==}
/isexe@2.0.0:
resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==}
@@ -4010,6 +5286,12 @@ packages:
hasBin: true
dev: true
+ /json-bigint@1.0.0:
+ resolution: {integrity: sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==}
+ dependencies:
+ bignumber.js: 9.1.2
+ dev: false
+
/json-parse-even-better-errors@2.3.1:
resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==}
@@ -4022,6 +5304,10 @@ packages:
resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==}
dev: false
+ /json-stringify-safe@5.0.1:
+ resolution: {integrity: sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==}
+ dev: false
+
/json5@2.2.3:
resolution: {integrity: sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==}
engines: {node: '>=6'}
@@ -4049,6 +5335,14 @@ packages:
engines: {node: '>=0.10.0'}
dev: false
+ /jszip@3.10.1:
+ resolution: {integrity: sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g==}
+ dependencies:
+ lie: 3.3.0
+ pako: 1.0.11
+ readable-stream: 2.3.8
+ setimmediate: 1.0.5
+
/kareem@2.5.1:
resolution: {integrity: sha512-7jFxRVm+jD+rkq3kY0iZDJfsO2/t4BBPeEb2qKn2lR/9KhuksYk5hxzfRYWMPV8P/x2d0kHD306YyWLzjjH+uA==}
engines: {node: '>=12.0.0'}
@@ -4064,7 +5358,7 @@ packages:
engines: {node: '>=6'}
dev: true
- /langchain@0.1.25(@supabase/supabase-js@2.39.7)(axios@1.6.7)(cheerio@1.0.0-rc.12)(ioredis@5.3.2)(pdf-parse@1.1.1)(puppeteer@22.6.3)(redis@4.6.13)(typesense@1.7.2):
+ /langchain@0.1.25(@supabase/supabase-js@2.39.7)(axios@1.6.7)(cheerio@1.0.0-rc.12)(ioredis@5.3.2)(mammoth@1.7.2)(pdf-parse@1.1.1)(puppeteer@22.6.3)(redis@4.6.13)(typesense@1.7.2):
resolution: {integrity: sha512-sfEChvr4H2CklHdSByNBbytwBrFhgtA5kPOnwcBrxuXGg1iOaTzhVxQA0QcNcQucI3hZrsNbZjxGp+Can1ooZQ==}
engines: {node: '>=18'}
peerDependencies:
@@ -4238,6 +5532,7 @@ packages:
jsonpointer: 5.0.1
langchainhub: 0.0.8
langsmith: 0.1.13
+ mammoth: 1.7.2
ml-distance: 4.0.1
openapi-types: 12.1.3
p-retry: 4.6.2
@@ -4344,6 +5639,11 @@ packages:
type-check: 0.3.2
dev: false
+ /lie@3.3.0:
+ resolution: {integrity: sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ==}
+ dependencies:
+ immediate: 3.0.6
+
/lines-and-columns@1.2.4:
resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==}
@@ -4354,19 +5654,51 @@ packages:
p-locate: 4.1.0
dev: true
+ /lodash.camelcase@4.3.0:
+ resolution: {integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==}
+ dev: false
+
/lodash.defaults@4.2.0:
resolution: {integrity: sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==}
/lodash.isarguments@3.1.0:
resolution: {integrity: sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg==}
+ /lodash.isobject@3.0.2:
+ resolution: {integrity: sha512-3/Qptq2vr7WeJbB4KHUSKlq8Pl7ASXi3UG6CMbBm8WRtXi8+GHm7mKaU3urfpSEzWe2wCIChs6/sdocUsTKJiA==}
+ dev: false
+
+ /lodash.isplainobject@4.0.6:
+ resolution: {integrity: sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==}
+ dev: false
+
+ /lodash.isstring@4.0.1:
+ resolution: {integrity: sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==}
+ dev: false
+
/lodash.memoize@4.1.2:
resolution: {integrity: sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==}
dev: true
+ /lodash.merge@4.6.2:
+ resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==}
+ dev: false
+
/lodash@4.17.21:
resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==}
+ /logform@2.6.0:
+ resolution: {integrity: sha512-1ulHeNPp6k/LD8H91o7VYFBng5i1BDE7HoKxVbZiGFidS1Rj65qcywLxX+pVfAPoQJEjRdvKcusKwOupHCVOVQ==}
+ engines: {node: '>= 12.0.0'}
+ dependencies:
+ '@colors/colors': 1.6.0
+ '@types/triple-beam': 1.3.5
+ fecha: 4.2.3
+ ms: 2.1.3
+ safe-stable-stringify: 2.4.3
+ triple-beam: 1.4.1
+ dev: false
+
/loglevel@1.9.1:
resolution: {integrity: sha512-hP3I3kCrDIMuRwAwHltphhDM1r8i55H33GgqjXbrisuJhF4kRhW1dNuxsRklp4bXl8DSdLaNLuiL4A/LWRfxvg==}
engines: {node: '>= 0.6.0'}
@@ -4380,6 +5712,17 @@ packages:
- encoding
dev: false
+ /long@5.2.3:
+ resolution: {integrity: sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==}
+ dev: false
+
+ /lop@0.4.1:
+ resolution: {integrity: sha512-9xyho9why2A2tzm5aIcMWKvzqKsnxrf9B5I+8O30olh6lQU8PH978LqZoI4++37RBgS1Em5i54v1TFs/3wnmXQ==}
+ dependencies:
+ duck: 0.1.12
+ option: 0.2.4
+ underscore: 1.13.6
+
/lru-cache@10.2.0:
resolution: {integrity: sha512-2bIM8x+VAf6JT4bKAljS1qUWgMsqZRPGJS6FSahIMPVvctcNhyVp7AJu7quxOW9jwkryBReKZY5tY5JYv2n/7Q==}
engines: {node: 14 || >=16.14}
@@ -4423,6 +5766,22 @@ packages:
tmpl: 1.0.5
dev: true
+ /mammoth@1.7.2:
+ resolution: {integrity: sha512-MqWU2hcLf1I5QMKyAbfJCvrLxnv5WztrAQyorfZ+WPq7Hk82vZFmvfR2/64ajIPpM4jlq0TXp1xZvp/FFaL1Ug==}
+ engines: {node: '>=12.0.0'}
+ hasBin: true
+ dependencies:
+ '@xmldom/xmldom': 0.8.10
+ argparse: 1.0.10
+ base64-js: 1.5.1
+ bluebird: 3.4.7
+ dingbat-to-unicode: 1.0.1
+ jszip: 3.10.1
+ lop: 0.4.1
+ path-is-absolute: 1.0.1
+ underscore: 1.13.6
+ xmlbuilder: 10.1.1
+
/md5@2.3.0:
resolution: {integrity: sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==}
dependencies:
@@ -4572,6 +5931,10 @@ packages:
num-sort: 2.1.0
dev: false
+ /module-details-from-path@1.0.3:
+ resolution: {integrity: sha512-ySViT69/76t8VhE1xXHK6Ch4NcDd26gx0MzKXLO+F7NOtnqH68d9zF94nT8ZWSxXh8ELOERsnJO/sWt1xZYw5A==}
+ dev: false
+
/moment@2.30.1:
resolution: {integrity: sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==}
dev: false
@@ -4867,6 +6230,9 @@ packages:
resolution: {integrity: sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw==}
dev: false
+ /option@0.2.4:
+ resolution: {integrity: sha512-pkEqbDyl8ou5cpq+VsnQbe/WlEy5qS7xPzMS1U55OCG9KPvwFD46zDbxQIj3egJSFc3D+XhYOPUzz49zQAVy7A==}
+
/optionator@0.8.3:
resolution: {integrity: sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==}
engines: {node: '>= 0.8.0'}
@@ -4957,6 +6323,9 @@ packages:
netmask: 2.0.2
dev: false
+ /pako@1.0.11:
+ resolution: {integrity: sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==}
+
/parent-module@1.0.1:
resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==}
engines: {node: '>=6'}
@@ -5002,7 +6371,6 @@ packages:
/path-is-absolute@1.0.1:
resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==}
engines: {node: '>=0.10.0'}
- dev: true
/path-key@3.1.1:
resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==}
@@ -5010,7 +6378,6 @@ packages:
/path-parse@1.0.7:
resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==}
- dev: true
/path-scurry@1.10.2:
resolution: {integrity: sha512-7xTavNy5RQXnsjANvVvMkEjvloOinkAjv/Z6Ildz9v2RinZ4SBKTWFOVRbaF8p0vpHnyjV/UwNDdKuUv6M5qcA==}
@@ -5037,6 +6404,26 @@ packages:
resolution: {integrity: sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==}
dev: false
+ /pg-int8@1.0.1:
+ resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==}
+ engines: {node: '>=4.0.0'}
+ dev: false
+
+ /pg-protocol@1.6.1:
+ resolution: {integrity: sha512-jPIlvgoD63hrEuihvIg+tJhoGjUsLPn6poJY9N5CnlPd91c2T18T/9zBtLxZSb1EhYxBRoZJtzScCaWlYLtktg==}
+ dev: false
+
+ /pg-types@2.2.0:
+ resolution: {integrity: sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==}
+ engines: {node: '>=4'}
+ dependencies:
+ pg-int8: 1.0.1
+ postgres-array: 2.0.0
+ postgres-bytea: 1.0.0
+ postgres-date: 1.0.7
+ postgres-interval: 1.2.0
+ dev: false
+
/picocolors@1.0.0:
resolution: {integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==}
@@ -5045,6 +6432,13 @@ packages:
engines: {node: '>=8.6'}
dev: true
+ /pino-abstract-transport@1.2.0:
+ resolution: {integrity: sha512-Guhh8EZfPCfH+PMXAb6rKOjGQEoy0xlAIn+irODG5kgfYV+BQ0rGYYWTIel3P5mmyXqkYkPmdIkywsn6QKUR1Q==}
+ dependencies:
+ readable-stream: 4.5.2
+ split2: 4.2.0
+ dev: false
+
/pirates@4.0.6:
resolution: {integrity: sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==}
engines: {node: '>= 6'}
@@ -5071,6 +6465,28 @@ packages:
source-map-js: 1.0.2
dev: false
+ /postgres-array@2.0.0:
+ resolution: {integrity: sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==}
+ engines: {node: '>=4'}
+ dev: false
+
+ /postgres-bytea@1.0.0:
+ resolution: {integrity: sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==}
+ engines: {node: '>=0.10.0'}
+ dev: false
+
+ /postgres-date@1.0.7:
+ resolution: {integrity: sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==}
+ engines: {node: '>=0.10.0'}
+ dev: false
+
+ /postgres-interval@1.2.0:
+ resolution: {integrity: sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==}
+ engines: {node: '>=0.10.0'}
+ dependencies:
+ xtend: 4.0.2
+ dev: false
+
/posthog-node@4.0.1:
resolution: {integrity: sha512-rtqm2h22QxLGBrW2bLYzbRhliIrqgZ0k+gF0LkQ1SNdeD06YE5eilV0MxZppFSxC8TfH0+B0cWCuebEnreIDgQ==}
engines: {node: '>=15.0.0'}
@@ -5095,6 +6511,14 @@ packages:
react-is: 18.2.0
dev: true
+ /process-nextick-args@2.0.1:
+ resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==}
+
+ /process@0.11.10:
+ resolution: {integrity: sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==}
+ engines: {node: '>= 0.6.0'}
+ dev: false
+
/progress@2.0.3:
resolution: {integrity: sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==}
engines: {node: '>=0.4.0'}
@@ -5129,6 +6553,25 @@ packages:
sisteransi: 1.0.5
dev: true
+ /protobufjs@7.3.0:
+ resolution: {integrity: sha512-YWD03n3shzV9ImZRX3ccbjqLxj7NokGN0V/ESiBV5xWqrommYHYiihuIyavq03pWSGqlyvYUFmfoMKd+1rPA/g==}
+ engines: {node: '>=12.0.0'}
+ requiresBuild: true
+ dependencies:
+ '@protobufjs/aspromise': 1.1.2
+ '@protobufjs/base64': 1.1.2
+ '@protobufjs/codegen': 2.0.4
+ '@protobufjs/eventemitter': 1.1.0
+ '@protobufjs/fetch': 1.1.0
+ '@protobufjs/float': 1.0.2
+ '@protobufjs/inquire': 1.1.0
+ '@protobufjs/path': 1.1.2
+ '@protobufjs/pool': 1.1.0
+ '@protobufjs/utf8': 1.1.0
+ '@types/node': 20.11.25
+ long: 5.2.3
+ dev: false
+
/proxy-addr@2.0.7:
resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==}
engines: {node: '>= 0.10'}
@@ -5251,6 +6694,37 @@ packages:
engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}
dev: true
+ /readable-stream@2.3.8:
+ resolution: {integrity: sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==}
+ dependencies:
+ core-util-is: 1.0.3
+ inherits: 2.0.4
+ isarray: 1.0.0
+ process-nextick-args: 2.0.1
+ safe-buffer: 5.1.2
+ string_decoder: 1.1.1
+ util-deprecate: 1.0.2
+
+ /readable-stream@3.6.2:
+ resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==}
+ engines: {node: '>= 6'}
+ dependencies:
+ inherits: 2.0.4
+ string_decoder: 1.1.1
+ util-deprecate: 1.0.2
+ dev: false
+
+ /readable-stream@4.5.2:
+ resolution: {integrity: sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==}
+ engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
+ dependencies:
+ abort-controller: 3.0.0
+ buffer: 6.0.3
+ events: 3.3.0
+ process: 0.11.10
+ string_decoder: 1.3.0
+ dev: false
+
/readdirp@3.6.0:
resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==}
engines: {node: '>=8.10.0'}
@@ -5302,6 +6776,17 @@ packages:
engines: {node: '>=0.10.0'}
dev: false
+ /require-in-the-middle@7.3.0:
+ resolution: {integrity: sha512-nQFEv9gRw6SJAwWD2LrL0NmQvAcO7FBwJbwmr2ttPAacfy0xuiOjE5zt+zM4xDyuyvUaxBi/9gb2SoCyNEVJcw==}
+ engines: {node: '>=8.6.0'}
+ dependencies:
+ debug: 4.3.4
+ module-details-from-path: 1.0.3
+ resolve: 1.22.8
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
/resolve-cwd@3.0.0:
resolution: {integrity: sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==}
engines: {node: '>=8'}
@@ -5331,7 +6816,6 @@ packages:
is-core-module: 2.13.1
path-parse: 1.0.7
supports-preserve-symlinks-flag: 1.0.0
- dev: true
/retry@0.13.1:
resolution: {integrity: sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==}
@@ -5347,6 +6831,9 @@ packages:
resolution: {integrity: sha512-cLgakCUf6PedEu15t8kbsjnwIFFR2D4RfL+W3iWFJ4iac7z4B0ZI8fxy4R3J956kAI68HclCFGL8MPoUVC3qVA==}
dev: false
+ /safe-buffer@5.1.2:
+ resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==}
+
/safe-buffer@5.2.1:
resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==}
@@ -5460,6 +6947,9 @@ packages:
gopd: 1.0.1
has-property-descriptors: 1.0.2
+ /setimmediate@1.0.5:
+ resolution: {integrity: sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==}
+
/setprototypeof@1.2.0:
resolution: {integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==}
@@ -5477,6 +6967,10 @@ packages:
resolution: {integrity: sha512-6j1W9l1iAs/4xYBI1SYOVZyFcCis9b4KCLQ8fgAGG07QvzaRLVVRQvAy85yNmmZSjYjg4MWh4gNvlPujU/5LpA==}
dev: true
+ /shimmer@1.2.1:
+ resolution: {integrity: sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw==}
+ dev: false
+
/side-channel@1.0.6:
resolution: {integrity: sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==}
engines: {node: '>= 0.4'}
@@ -5560,9 +7054,13 @@ packages:
memory-pager: 1.5.0
dev: false
+ /split2@4.2.0:
+ resolution: {integrity: sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==}
+ engines: {node: '>= 10.x'}
+ dev: false
+
/sprintf-js@1.0.3:
resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==}
- dev: true
/sprintf-js@1.1.3:
resolution: {integrity: sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==}
@@ -5631,6 +7129,17 @@ packages:
strip-ansi: 7.1.0
dev: false
+ /string_decoder@1.1.1:
+ resolution: {integrity: sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==}
+ dependencies:
+ safe-buffer: 5.1.2
+
+ /string_decoder@1.3.0:
+ resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==}
+ dependencies:
+ safe-buffer: 5.2.1
+ dev: false
+
/strip-ansi@6.0.1:
resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==}
engines: {node: '>=8'}
@@ -5731,7 +7240,6 @@ packages:
/supports-preserve-symlinks-flag@1.0.0:
resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==}
engines: {node: '>= 0.4'}
- dev: true
/sylvester@0.0.12:
resolution: {integrity: sha512-SzRP5LQ6Ts2G5NyAa/jg16s8e3R7rfdFjizy1zeoecYWw+nGL+YA1xZvW/+iJmidBGSdLkuvdwTYEyJEb+EiUw==}
@@ -5818,6 +7326,11 @@ packages:
punycode: 2.3.1
dev: false
+ /triple-beam@1.4.1:
+ resolution: {integrity: sha512-aZbgViZrg1QNcG+LULa7nhZpJTZSLm/mXnHXnbAbjmN5aSa0y7V+wvv6+4WaBtpISJzThKy+PIPxc1Nq1EJ9mg==}
+ engines: {node: '>= 14.0.0'}
+ dev: false
+
/ts-jest@29.1.2(@babel/core@7.24.0)(jest@29.7.0)(typescript@5.4.2):
resolution: {integrity: sha512-br6GJoH/WUX4pu7FbZXuWGKGNDuU7b8Uj77g/Sp7puZV6EXzuByl6JrECvm0MzVzSTkSHWTihsXt+5XYER5b+g==}
engines: {node: ^16.10.0 || ^18.0.0 || >=20.0.0}
@@ -5975,7 +7488,6 @@ packages:
/underscore@1.13.6:
resolution: {integrity: sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A==}
- dev: false
/undici-types@5.26.5:
resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==}
@@ -6022,6 +7534,9 @@ packages:
resolution: {integrity: sha512-H/A06tKD7sS1O1X2SshBVeA5FLycRpjqiBeqGKmBwBDBy28EnRjORxTNe269KSSr5un5qyWi1iL61wLxpd+ZOg==}
dev: false
+ /util-deprecate@1.0.2:
+ resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==}
+
/utils-merge@1.0.1:
resolution: {integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==}
engines: {node: '>= 0.4.0'}
@@ -6100,6 +7615,15 @@ packages:
dependencies:
isexe: 2.0.0
+ /winston-transport@4.7.0:
+ resolution: {integrity: sha512-ajBj65K5I7denzer2IYW6+2bNIVqLGDHqDw3Ow8Ohh+vdW+rv4MZ6eiDvHoKhfJFZ2auyN8byXieDDJ96ViONg==}
+ engines: {node: '>= 12.0.0'}
+ dependencies:
+ logform: 2.6.0
+ readable-stream: 3.6.2
+ triple-beam: 1.4.1
+ dev: false
+
/word-wrap@1.2.5:
resolution: {integrity: sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==}
engines: {node: '>=0.10.0'}
@@ -6182,11 +7706,20 @@ packages:
xmlbuilder: 11.0.1
dev: false
+ /xmlbuilder@10.1.1:
+ resolution: {integrity: sha512-OyzrcFLL/nb6fMGHbiRDuPup9ljBycsdCypwuyg5AAHvyWzGfChJpCXMG88AGTIMFhGZ9RccFN1e6lhg3hkwKg==}
+ engines: {node: '>=4.0'}
+
/xmlbuilder@11.0.1:
resolution: {integrity: sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==}
engines: {node: '>=4.0'}
dev: false
+ /xtend@4.0.2:
+ resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==}
+ engines: {node: '>=0.4'}
+ dev: false
+
/y18n@5.0.8:
resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==}
engines: {node: '>=10'}
diff --git a/apps/api/src/__tests__/e2e_withAuth/index.test.ts b/apps/api/src/__tests__/e2e_withAuth/index.test.ts
index 5e3777b..331283e 100644
--- a/apps/api/src/__tests__/e2e_withAuth/index.test.ts
+++ b/apps/api/src/__tests__/e2e_withAuth/index.test.ts
@@ -81,7 +81,7 @@ describe("E2E Tests for API Routes", () => {
expect(response.body.data).toHaveProperty("markdown");
expect(response.body.data).toHaveProperty("metadata");
expect(response.body.data).not.toHaveProperty("html");
- expect(response.body.data.content).toContain("🔥 FireCrawl");
+ expect(response.body.data.content).toContain("🔥 Firecrawl");
}, 30000); // 30 seconds timeout
it("should return a successful response with a valid API key and includeHtml set to true", async () => {
@@ -99,10 +99,40 @@ describe("E2E Tests for API Routes", () => {
expect(response.body.data).toHaveProperty("markdown");
expect(response.body.data).toHaveProperty("html");
expect(response.body.data).toHaveProperty("metadata");
- expect(response.body.data.content).toContain("🔥 FireCrawl");
- expect(response.body.data.markdown).toContain("🔥 FireCrawl");
+ expect(response.body.data.content).toContain("🔥 Firecrawl");
+ expect(response.body.data.markdown).toContain("🔥 Firecrawl");
expect(response.body.data.html).toContain("
{
+ const response = await request(TEST_URL)
+ .post('/v0/scrape')
+ .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`)
+ .set('Content-Type', 'application/json')
+ .send({ url: 'https://arxiv.org/pdf/astro-ph/9301001.pdf' });
+ await new Promise((r) => setTimeout(r, 6000));
+
+ expect(response.statusCode).toBe(200);
+ expect(response.body).toHaveProperty('data');
+ expect(response.body.data).toHaveProperty('content');
+ expect(response.body.data).toHaveProperty('metadata');
+ expect(response.body.data.content).toContain('We present spectrophotometric observations of the Broad Line Radio Galaxy');
+ }, 60000); // 60 seconds
+
+ it('should return a successful response for a valid scrape with PDF file without explicit .pdf extension', async () => {
+ const response = await request(TEST_URL)
+ .post('/v0/scrape')
+ .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`)
+ .set('Content-Type', 'application/json')
+ .send({ url: 'https://arxiv.org/pdf/astro-ph/9301001' });
+ await new Promise((r) => setTimeout(r, 6000));
+
+ expect(response.statusCode).toBe(200);
+ expect(response.body).toHaveProperty('data');
+ expect(response.body.data).toHaveProperty('content');
+ expect(response.body.data).toHaveProperty('metadata');
+ expect(response.body.data.content).toContain('We present spectrophotometric observations of the Broad Line Radio Galaxy');
+ }, 60000); // 60 seconds
});
describe("POST /v0/crawl", () => {
@@ -146,7 +176,274 @@ describe("E2E Tests for API Routes", () => {
);
});
- // Additional tests for insufficient credits?
+ it("should return a successful response with a valid API key and valid includes option", async () => {
+ const crawlResponse = await request(TEST_URL)
+ .post("/v0/crawl")
+ .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
+ .set("Content-Type", "application/json")
+ .send({
+ url: "https://mendable.ai",
+ limit: 10,
+ crawlerOptions: {
+ includes: ["blog/*"],
+ },
+ });
+
+ let response;
+ let isFinished = false;
+
+ while (!isFinished) {
+ response = await request(TEST_URL)
+ .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
+ .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
+
+ expect(response.statusCode).toBe(200);
+ expect(response.body).toHaveProperty("status");
+ isFinished = response.body.status === "completed";
+
+ if (!isFinished) {
+ await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again
+ }
+ }
+
+ const completedResponse = response;
+
+ const urls = completedResponse.body.data.map(
+ (item: any) => item.metadata?.sourceURL
+ );
+ expect(urls.length).toBeGreaterThan(5);
+ urls.forEach((url: string) => {
+ console.log({url})
+ expect(url.startsWith("https://www.mendable.ai/blog/")).toBeTruthy();
+ });
+
+ expect(completedResponse.statusCode).toBe(200);
+ expect(completedResponse.body).toHaveProperty("status");
+ expect(completedResponse.body.status).toBe("completed");
+ expect(completedResponse.body).toHaveProperty("data");
+ expect(completedResponse.body.data[0]).toHaveProperty("content");
+ expect(completedResponse.body.data[0]).toHaveProperty("markdown");
+ expect(completedResponse.body.data[0]).toHaveProperty("metadata");
+ expect(completedResponse.body.data[0].content).toContain("Mendable");
+ }, 60000); // 60 seconds
+
+ it("should return a successful response with a valid API key and valid excludes option", async () => {
+ const crawlResponse = await request(TEST_URL)
+ .post("/v0/crawl")
+ .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
+ .set("Content-Type", "application/json")
+ .send({
+ url: "https://mendable.ai",
+ limit: 10,
+ crawlerOptions: {
+ excludes: ["blog/*"],
+ },
+ });
+
+ let isFinished = false;
+ let response;
+
+ while (!isFinished) {
+ response = await request(TEST_URL)
+ .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
+ .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
+
+ expect(response.statusCode).toBe(200);
+ expect(response.body).toHaveProperty("status");
+ isFinished = response.body.status === "completed";
+
+ if (!isFinished) {
+ await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again
+ }
+ }
+
+ const completedResponse = response;
+
+ const urls = completedResponse.body.data.map(
+ (item: any) => item.metadata?.sourceURL
+ );
+ expect(urls.length).toBeGreaterThan(5);
+ urls.forEach((url: string) => {
+ expect(url.startsWith("https://wwww.mendable.ai/blog/")).toBeFalsy();
+ });
+ }, 60000); // 60 seconds
+
+ it("should return a successful response with a valid API key and limit to 3", async () => {
+ const crawlResponse = await request(TEST_URL)
+ .post("/v0/crawl")
+ .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
+ .set("Content-Type", "application/json")
+ .send({
+ url: "https://mendable.ai",
+ crawlerOptions: { limit: 3 },
+ });
+
+ let isFinished = false;
+ let response;
+
+ while (!isFinished) {
+ response = await request(TEST_URL)
+ .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
+ .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
+
+ expect(response.statusCode).toBe(200);
+ expect(response.body).toHaveProperty("status");
+ isFinished = response.body.status === "completed";
+
+ if (!isFinished) {
+ await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again
+ }
+ }
+
+ const completedResponse = response;
+
+ expect(completedResponse.statusCode).toBe(200);
+ expect(completedResponse.body).toHaveProperty("status");
+ expect(completedResponse.body.status).toBe("completed");
+ expect(completedResponse.body).toHaveProperty("data");
+ expect(completedResponse.body.data.length).toBe(3);
+ expect(completedResponse.body.data[0]).toHaveProperty("content");
+ expect(completedResponse.body.data[0]).toHaveProperty("markdown");
+ expect(completedResponse.body.data[0]).toHaveProperty("metadata");
+ expect(completedResponse.body.data[0].content).toContain("Mendable");
+ }, 60000); // 60 seconds
+
+ it("should return a successful response with max depth option for a valid crawl job", async () => {
+ const crawlResponse = await request(TEST_URL)
+ .post("/v0/crawl")
+ .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
+ .set("Content-Type", "application/json")
+ .send({
+ url: "https://www.scrapethissite.com",
+ crawlerOptions: { maxDepth: 2 },
+ });
+ expect(crawlResponse.statusCode).toBe(200);
+
+ const response = await request(TEST_URL)
+ .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
+ .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
+ expect(response.statusCode).toBe(200);
+ expect(response.body).toHaveProperty("status");
+ expect(response.body.status).toBe("active");
+ // wait for 60 seconds
+ await new Promise((r) => setTimeout(r, 60000));
+ const completedResponse = await request(TEST_URL)
+ .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
+ .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
+
+ expect(completedResponse.statusCode).toBe(200);
+ expect(completedResponse.body).toHaveProperty("status");
+ expect(completedResponse.body.status).toBe("completed");
+ expect(completedResponse.body).toHaveProperty("data");
+ expect(completedResponse.body.data[0]).toHaveProperty("content");
+ expect(completedResponse.body.data[0]).toHaveProperty("markdown");
+ expect(completedResponse.body.data[0]).toHaveProperty("metadata");
+ const urls = completedResponse.body.data.map(
+ (item: any) => item.metadata?.sourceURL
+ );
+ expect(urls.length).toBeGreaterThan(1);
+
+ // Check if all URLs have a maximum depth of 1
+ urls.forEach((url: string) => {
+ const depth = new URL(url).pathname.split("/").filter(Boolean).length;
+ expect(depth).toBeLessThanOrEqual(1);
+ });
+ }, 120000);
+
+ // it("should return a successful response with a valid API key and valid limit option", async () => {
+ // const crawlResponse = await request(TEST_URL)
+ // .post("/v0/crawl")
+ // .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
+ // .set("Content-Type", "application/json")
+ // .send({
+ // url: "https://mendable.ai",
+ // crawlerOptions: { limit: 10 },
+ // });
+
+ // const response = await request(TEST_URL)
+ // .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
+ // .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
+ // expect(response.statusCode).toBe(200);
+ // expect(response.body).toHaveProperty("status");
+ // expect(response.body.status).toBe("active");
+
+ // let isCompleted = false;
+ // while (!isCompleted) {
+ // const statusCheckResponse = await request(TEST_URL)
+ // .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
+ // .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
+ // expect(statusCheckResponse.statusCode).toBe(200);
+ // isCompleted = statusCheckResponse.body.status === "completed";
+ // if (!isCompleted) {
+ // await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again
+ // }
+ // }
+
+ // const completedResponse = await request(TEST_URL)
+ // .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
+ // .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
+
+ // expect(completedResponse.statusCode).toBe(200);
+ // expect(completedResponse.body).toHaveProperty("status");
+ // expect(completedResponse.body.status).toBe("completed");
+ // expect(completedResponse.body).toHaveProperty("data");
+ // expect(completedResponse.body.data.length).toBe(10);
+ // expect(completedResponse.body.data[0]).toHaveProperty("content");
+ // expect(completedResponse.body.data[0]).toHaveProperty("markdown");
+ // expect(completedResponse.body.data[0]).toHaveProperty("metadata");
+ // expect(completedResponse.body.data[0].content).toContain("Mendable");
+ // expect(completedResponse.body.data[0].content).not.toContain("main menu");
+ // }, 60000); // 60 seconds
+
+ it("should return a successful response for a valid crawl job with includeHtml set to true option", async () => {
+ const crawlResponse = await request(TEST_URL)
+ .post("/v0/crawl")
+ .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
+ .set("Content-Type", "application/json")
+ .send({
+ url: "https://firecrawl.dev",
+ pageOptions: { includeHtml: true },
+ });
+ expect(crawlResponse.statusCode).toBe(200);
+
+ const response = await request(TEST_URL)
+ .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
+ .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
+ expect(response.statusCode).toBe(200);
+ expect(response.body).toHaveProperty("status");
+ expect(response.body.status).toBe("active");
+
+ let isCompleted = false;
+ while (!isCompleted) {
+ const statusCheckResponse = await request(TEST_URL)
+ .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
+ .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
+ expect(statusCheckResponse.statusCode).toBe(200);
+ isCompleted = statusCheckResponse.body.status === "completed";
+ if (!isCompleted) {
+ await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again
+ }
+ }
+
+ const completedResponse = await request(TEST_URL)
+ .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
+ .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
+
+ expect(completedResponse.statusCode).toBe(200);
+ expect(completedResponse.body).toHaveProperty("status");
+ expect(completedResponse.body.status).toBe("completed");
+ expect(completedResponse.body).toHaveProperty("data");
+ expect(completedResponse.body.data[0]).toHaveProperty("content");
+ expect(completedResponse.body.data[0]).toHaveProperty("markdown");
+ expect(completedResponse.body.data[0]).toHaveProperty("metadata");
+
+ // 120 seconds
+ expect(completedResponse.body.data[0]).toHaveProperty("html");
+ expect(completedResponse.body.data[0]).toHaveProperty("metadata");
+ expect(completedResponse.body.data[0].content).toContain("🔥 Firecrawl");
+ expect(completedResponse.body.data[0].markdown).toContain("Firecrawl");
+ expect(completedResponse.body.data[0].html).toContain(" {
@@ -176,6 +473,16 @@ describe("E2E Tests for API Routes", () => {
// expect(response.body.error).toContain("Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.");
// });
+ it("should return a timeout error when scraping takes longer than the specified timeout", async () => {
+ const response = await request(TEST_URL)
+ .post("/v0/scrape")
+ .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
+ .set("Content-Type", "application/json")
+ .send({ url: "https://firecrawl.dev", timeout: 1000 });
+
+ expect(response.statusCode).toBe(408);
+ }, 3000);
+
it("should return a successful response with a valid API key", async () => {
const response = await request(TEST_URL)
.post("/v0/crawlWebsitePreview")
@@ -238,7 +545,7 @@ describe("E2E Tests for API Routes", () => {
expect(response.statusCode).toBe(404);
});
- it("should return a successful response for a valid crawl job", async () => {
+ it("should return a successful crawl status response for a valid crawl job", async () => {
const crawlResponse = await request(TEST_URL)
.post("/v0/crawl")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
@@ -246,27 +553,67 @@ describe("E2E Tests for API Routes", () => {
.send({ url: "https://firecrawl.dev" });
expect(crawlResponse.statusCode).toBe(200);
- const response = await request(TEST_URL)
- .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
- .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
- expect(response.statusCode).toBe(200);
- expect(response.body).toHaveProperty("status");
- expect(response.body.status).toBe("active");
+ let isCompleted = false;
+ let completedResponse;
- // wait for 30 seconds
- await new Promise((r) => setTimeout(r, 30000));
+ while (!isCompleted) {
+ const response = await request(TEST_URL)
+ .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
+ .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
+ expect(response.statusCode).toBe(200);
+ expect(response.body).toHaveProperty("status");
- const completedResponse = await request(TEST_URL)
- .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
- .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
- expect(completedResponse.statusCode).toBe(200);
+ if (response.body.status === "completed") {
+ isCompleted = true;
+ completedResponse = response;
+ } else {
+ await new Promise((r) => setTimeout(r, 1000)); // Wait for 1 second before checking again
+ }
+ }
expect(completedResponse.body).toHaveProperty("status");
expect(completedResponse.body.status).toBe("completed");
expect(completedResponse.body).toHaveProperty("data");
expect(completedResponse.body.data[0]).toHaveProperty("content");
expect(completedResponse.body.data[0]).toHaveProperty("markdown");
expect(completedResponse.body.data[0]).toHaveProperty("metadata");
- expect(completedResponse.body.data[0].content).toContain("🔥 FireCrawl");
+ expect(completedResponse.body.data[0].content).toContain("🔥 Firecrawl");
+ }, 60000); // 60 seconds
+
+ it('should return a successful response for a valid crawl job with PDF files without explicit .pdf extension', async () => {
+ const crawlResponse = await request(TEST_URL)
+ .post('/v0/crawl')
+ .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`)
+ .set('Content-Type', 'application/json')
+ .send({ url: 'https://arxiv.org/pdf/astro-ph/9301001', crawlerOptions: { limit: 10, excludes: [ 'list/*', 'login', 'abs/*', 'static/*', 'about/*', 'archive/*' ] }});
+ expect(crawlResponse.statusCode).toBe(200);
+
+ let isCompleted = false;
+ let completedResponse;
+
+ while (!isCompleted) {
+ const response = await request(TEST_URL)
+ .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
+ .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`);
+ expect(response.statusCode).toBe(200);
+ expect(response.body).toHaveProperty('status');
+
+ if (response.body.status === 'completed') {
+ isCompleted = true;
+ completedResponse = response;
+ } else {
+ await new Promise((r) => setTimeout(r, 1000)); // Wait for 1 second before checking again
+ }
+ }
+ expect(completedResponse.body.status).toBe('completed');
+ expect(completedResponse.body).toHaveProperty('data');
+ expect(completedResponse.body.data.length).toEqual(1);
+ expect(completedResponse.body.data).toEqual(
+ expect.arrayContaining([
+ expect.objectContaining({
+ content: expect.stringContaining('asymmetries might represent, for instance, preferred source orientations to our line of sight.')
+ })
+ ])
+ );
}, 60000); // 60 seconds
it("should return a successful response with max depth option for a valid crawl job", async () => {
@@ -280,18 +627,21 @@ describe("E2E Tests for API Routes", () => {
});
expect(crawlResponse.statusCode).toBe(200);
- const response = await request(TEST_URL)
- .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
- .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
- expect(response.statusCode).toBe(200);
- expect(response.body).toHaveProperty("status");
- expect(response.body.status).toBe("active");
- // wait for 60 seconds
- await new Promise((r) => setTimeout(r, 60000));
- const completedResponse = await request(TEST_URL)
- .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
- .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
+ let isCompleted = false;
+ let completedResponse;
+ while (!isCompleted) {
+ const response = await request(TEST_URL)
+ .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
+ .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
+ expect(response.statusCode).toBe(200);
+ expect(response.body).toHaveProperty("status");
+
+ if (response.body.status === "completed") {
+ isCompleted = true;
+ completedResponse = response;
+ }
+ }
expect(completedResponse.statusCode).toBe(200);
expect(completedResponse.body).toHaveProperty("status");
expect(completedResponse.body.status).toBe("completed");
@@ -347,8 +697,8 @@ describe("E2E Tests for API Routes", () => {
// 120 seconds
expect(completedResponse.body.data[0]).toHaveProperty("html");
expect(completedResponse.body.data[0]).toHaveProperty("metadata");
- expect(completedResponse.body.data[0].content).toContain("🔥 FireCrawl");
- expect(completedResponse.body.data[0].markdown).toContain("FireCrawl");
+ expect(completedResponse.body.data[0].content).toContain("🔥 Firecrawl");
+ expect(completedResponse.body.data[0].markdown).toContain("Firecrawl");
expect(completedResponse.body.data[0].html).toContain(" {
.send({ url: "https://jestjs.io" });
expect(crawlResponse.statusCode).toBe(200);
-
-
// wait for 30 seconds
- await new Promise((r) => setTimeout(r, 10000));
+ await new Promise((r) => setTimeout(r, 20000));
const response = await request(TEST_URL)
.delete(`/v0/crawl/cancel/${crawlResponse.body.jobId}`)
@@ -373,7 +721,7 @@ describe("E2E Tests for API Routes", () => {
expect(response.body).toHaveProperty("status");
expect(response.body.status).toBe("cancelled");
- await new Promise((r) => setTimeout(r, 20000));
+ await new Promise((r) => setTimeout(r, 10000));
const completedResponse = await request(TEST_URL)
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
@@ -390,8 +738,6 @@ describe("E2E Tests for API Routes", () => {
}, 60000); // 60 seconds
-
-
describe("POST /v0/scrape with LLM Extraction", () => {
it("should extract data using LLM extraction mode", async () => {
const response = await request(TEST_URL)
@@ -501,6 +847,107 @@ describe("E2E Tests for API Routes", () => {
// }, 120000); // 120 secs
// });
+ describe("POST /v0/crawl with fast mode", () => {
+ it("should complete the crawl under 20 seconds", async () => {
+ const startTime = Date.now();
+
+ const crawlResponse = await request(TEST_URL)
+ .post("/v0/crawl")
+ .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
+ .set("Content-Type", "application/json")
+ .send({
+ url: "https://flutterbricks.com",
+ crawlerOptions: {
+ mode: "fast"
+ }
+ });
+
+ expect(crawlResponse.statusCode).toBe(200);
+
+ const jobId = crawlResponse.body.jobId;
+ let statusResponse;
+ let isFinished = false;
+
+ while (!isFinished) {
+ statusResponse = await request(TEST_URL)
+ .get(`/v0/crawl/status/${jobId}`)
+ .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
+
+ expect(statusResponse.statusCode).toBe(200);
+ isFinished = statusResponse.body.status === "completed";
+
+ if (!isFinished) {
+ await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again
+ }
+ }
+
+ const endTime = Date.now();
+ const timeElapsed = (endTime - startTime) / 1000; // Convert to seconds
+
+ console.log(`Time elapsed: ${timeElapsed} seconds`);
+
+ expect(statusResponse.body.status).toBe("completed");
+ expect(statusResponse.body).toHaveProperty("data");
+ expect(statusResponse.body.data[0]).toHaveProperty("content");
+ expect(statusResponse.body.data[0]).toHaveProperty("markdown");
+ const results = statusResponse.body.data;
+ // results.forEach((result, i) => {
+ // console.log(result.metadata.sourceURL);
+ // });
+ expect(results.length).toBeGreaterThanOrEqual(10);
+ expect(results.length).toBeLessThanOrEqual(15);
+
+ }, 20000);
+
+ // it("should complete the crawl in more than 10 seconds", async () => {
+ // const startTime = Date.now();
+
+ // const crawlResponse = await request(TEST_URL)
+ // .post("/v0/crawl")
+ // .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
+ // .set("Content-Type", "application/json")
+ // .send({
+ // url: "https://flutterbricks.com",
+ // });
+
+ // expect(crawlResponse.statusCode).toBe(200);
+
+ // const jobId = crawlResponse.body.jobId;
+ // let statusResponse;
+ // let isFinished = false;
+
+ // while (!isFinished) {
+ // statusResponse = await request(TEST_URL)
+ // .get(`/v0/crawl/status/${jobId}`)
+ // .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
+
+ // expect(statusResponse.statusCode).toBe(200);
+ // isFinished = statusResponse.body.status === "completed";
+
+ // if (!isFinished) {
+ // await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again
+ // }
+ // }
+
+ // const endTime = Date.now();
+ // const timeElapsed = (endTime - startTime) / 1000; // Convert to seconds
+
+ // console.log(`Time elapsed: ${timeElapsed} seconds`);
+
+ // expect(statusResponse.body.status).toBe("completed");
+ // expect(statusResponse.body).toHaveProperty("data");
+ // expect(statusResponse.body.data[0]).toHaveProperty("content");
+ // expect(statusResponse.body.data[0]).toHaveProperty("markdown");
+ // const results = statusResponse.body.data;
+ // // results.forEach((result, i) => {
+ // // console.log(result.metadata.sourceURL);
+ // // });
+ // expect(results.length).toBeGreaterThanOrEqual(10);
+ // expect(results.length).toBeLessThanOrEqual(15);
+
+ // }, 50000);// 15 seconds timeout to account for network delays
+ });
+
describe("GET /is-production", () => {
it("should return the production status", async () => {
const response = await request(TEST_URL).get("/is-production");
@@ -508,4 +955,65 @@ describe("E2E Tests for API Routes", () => {
expect(response.body).toHaveProperty("isProduction");
});
});
+
+ describe("Rate Limiter", () => {
+ it("should return 429 when rate limit is exceeded for preview token", async () => {
+ for (let i = 0; i < 5; i++) {
+ const response = await request(TEST_URL)
+ .post("/v0/scrape")
+ .set("Authorization", `Bearer this_is_just_a_preview_token`)
+ .set("Content-Type", "application/json")
+ .send({ url: "https://www.scrapethissite.com" });
+
+ expect(response.statusCode).toBe(200);
+ }
+ const response = await request(TEST_URL)
+ .post("/v0/scrape")
+ .set("Authorization", `Bearer this_is_just_a_preview_token`)
+ .set("Content-Type", "application/json")
+ .send({ url: "https://www.scrapethissite.com" });
+
+ expect(response.statusCode).toBe(429);
+ }, 60000);
+ });
+
+ // it("should return 429 when rate limit is exceeded for API key", async () => {
+ // for (let i = 0; i < parseInt(process.env.RATE_LIMIT_TEST_API_KEY_SCRAPE); i++) {
+ // const response = await request(TEST_URL)
+ // .post("/v0/scrape")
+ // .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
+ // .set("Content-Type", "application/json")
+ // .send({ url: "https://www.scrapethissite.com" });
+
+ // expect(response.statusCode).toBe(200);
+ // }
+
+ // const response = await request(TEST_URL)
+ // .post("/v0/scrape")
+ // .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
+ // .set("Content-Type", "application/json")
+ // .send({ url: "https://www.scrapethissite.com" });
+
+ // expect(response.statusCode).toBe(429);
+ // }, 60000);
+
+ // it("should return 429 when rate limit is exceeded for API key", async () => {
+ // for (let i = 0; i < parseInt(process.env.RATE_LIMIT_TEST_API_KEY_CRAWL); i++) {
+ // const response = await request(TEST_URL)
+ // .post("/v0/crawl")
+ // .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
+ // .set("Content-Type", "application/json")
+ // .send({ url: "https://www.scrapethissite.com" });
+
+ // expect(response.statusCode).toBe(200);
+ // }
+
+ // const response = await request(TEST_URL)
+ // .post("/v0/crawl")
+ // .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
+ // .set("Content-Type", "application/json")
+ // .send({ url: "https://www.scrapethissite.com" });
+
+ // expect(response.statusCode).toBe(429);
+ // }, 60000);
});
diff --git a/apps/api/src/controllers/auth.ts b/apps/api/src/controllers/auth.ts
index 77aa52f..b0bfabb 100644
--- a/apps/api/src/controllers/auth.ts
+++ b/apps/api/src/controllers/auth.ts
@@ -1,14 +1,25 @@
import { parseApi } from "../../src/lib/parseApi";
-import { getRateLimiter } from "../../src/services/rate-limiter";
+import { getRateLimiter, } from "../../src/services/rate-limiter";
import { AuthResponse, RateLimiterMode } from "../../src/types";
import { supabase_service } from "../../src/services/supabase";
import { withAuth } from "../../src/lib/withAuth";
-
+import { RateLimiterRedis } from "rate-limiter-flexible";
+import { setTraceAttributes } from '@hyperdx/node-opentelemetry';
export async function authenticateUser(req, res, mode?: RateLimiterMode) : Promise {
return withAuth(supaAuthenticateUser)(req, res, mode);
}
-
+function setTrace(team_id: string, api_key: string) {
+ try {
+ setTraceAttributes({
+ team_id,
+ api_key
+ });
+ } catch (error) {
+ console.error('Error setting trace attributes:', error);
+ }
+
+}
export async function supaAuthenticateUser(
req,
res,
@@ -19,7 +30,6 @@ export async function supaAuthenticateUser(
error?: string;
status?: number;
}> {
-
const authHeader = req.headers.authorization;
if (!authHeader) {
return { success: false, error: "Unauthorized", status: 401 };
@@ -33,13 +43,87 @@ export async function supaAuthenticateUser(
};
}
+ const incomingIP = (req.headers["x-forwarded-for"] ||
+ req.socket.remoteAddress) as string;
+ const iptoken = incomingIP + token;
+
+ let rateLimiter: RateLimiterRedis;
+ let subscriptionData: { team_id: string, plan: string } | null = null;
+ let normalizedApi: string;
+
+ if (token == "this_is_just_a_preview_token") {
+ rateLimiter = getRateLimiter(RateLimiterMode.Preview, token);
+ } else {
+ normalizedApi = parseApi(token);
+
+ const { data, error } = await supabase_service.rpc(
+ 'get_key_and_price_id_2', { api_key: normalizedApi }
+ );
+ // get_key_and_price_id_2 rpc definition:
+ // create or replace function get_key_and_price_id_2(api_key uuid)
+ // returns table(key uuid, team_id uuid, price_id text) as $$
+ // begin
+ // if api_key is null then
+ // return query
+ // select null::uuid as key, null::uuid as team_id, null::text as price_id;
+ // end if;
+
+ // return query
+ // select ak.key, ak.team_id, s.price_id
+ // from api_keys ak
+ // left join subscriptions s on ak.team_id = s.team_id
+ // where ak.key = api_key;
+ // end;
+ // $$ language plpgsql;
+
+ if (error) {
+ console.error('Error fetching key and price_id:', error);
+ } else {
+ // console.log('Key and Price ID:', data);
+ }
+
+ if (error || !data || data.length === 0) {
+ return {
+ success: false,
+ error: "Unauthorized: Invalid token",
+ status: 401,
+ };
+ }
+ const team_id = data[0].team_id;
+ const plan = getPlanByPriceId(data[0].price_id);
+ // HyperDX Logging
+ setTrace(team_id, normalizedApi);
+ subscriptionData = {
+ team_id: team_id,
+ plan: plan
+ }
+ switch (mode) {
+ case RateLimiterMode.Crawl:
+ rateLimiter = getRateLimiter(RateLimiterMode.Crawl, token, subscriptionData.plan);
+ break;
+ case RateLimiterMode.Scrape:
+ rateLimiter = getRateLimiter(RateLimiterMode.Scrape, token, subscriptionData.plan);
+ break;
+ case RateLimiterMode.CrawlStatus:
+ rateLimiter = getRateLimiter(RateLimiterMode.CrawlStatus, token);
+ break;
+ case RateLimiterMode.Search:
+ rateLimiter = getRateLimiter(RateLimiterMode.Search, token);
+ break;
+ case RateLimiterMode.Preview:
+ rateLimiter = getRateLimiter(RateLimiterMode.Preview, token);
+ break;
+ default:
+ rateLimiter = getRateLimiter(RateLimiterMode.Crawl, token);
+ break;
+ // case RateLimiterMode.Search:
+ // rateLimiter = await searchRateLimiter(RateLimiterMode.Search, token);
+ // break;
+ }
+ }
+
try {
- const incomingIP = (req.headers["x-forwarded-for"] ||
- req.socket.remoteAddress) as string;
- const iptoken = incomingIP + token;
- await getRateLimiter(
- token === "this_is_just_a_preview_token" ? RateLimiterMode.Preview : mode, token
- ).consume(iptoken);
+ await rateLimiter.consume(iptoken);
} catch (rateLimiterRes) {
console.error(rateLimiterRes);
return {
@@ -66,19 +150,36 @@ export async function supaAuthenticateUser(
// return { success: false, error: "Unauthorized: Invalid token", status: 401 };
}
- const normalizedApi = parseApi(token);
// make sure api key is valid, based on the api_keys table in supabase
- const { data, error } = await supabase_service
+ if (!subscriptionData) {
+ normalizedApi = parseApi(token);
+
+ const { data, error } = await supabase_service
.from("api_keys")
.select("*")
.eq("key", normalizedApi);
- if (error || !data || data.length === 0) {
- return {
- success: false,
- error: "Unauthorized: Invalid token",
- status: 401,
- };
+
+ if (error || !data || data.length === 0) {
+ return {
+ success: false,
+ error: "Unauthorized: Invalid token",
+ status: 401,
+ };
+ }
+
+ subscriptionData = data[0];
}
- return { success: true, team_id: data[0].team_id };
+ return { success: true, team_id: subscriptionData.team_id };
}
+
+function getPlanByPriceId(price_id: string) {
+ switch (price_id) {
+ case process.env.STRIPE_PRICE_ID_STANDARD:
+ return 'standard';
+ case process.env.STRIPE_PRICE_ID_SCALE:
+ return 'scale';
+ default:
+ return 'starter';
+ }
+}
\ No newline at end of file
diff --git a/apps/api/src/controllers/scrape.ts b/apps/api/src/controllers/scrape.ts
index 021a9d0..0b3f146 100644
--- a/apps/api/src/controllers/scrape.ts
+++ b/apps/api/src/controllers/scrape.ts
@@ -15,6 +15,7 @@ export async function scrapeHelper(
crawlerOptions: any,
pageOptions: PageOptions,
extractorOptions: ExtractorOptions,
+ timeout: number
): Promise<{
success: boolean;
error?: string;
@@ -30,7 +31,6 @@ export async function scrapeHelper(
return { success: false, error: "Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.", returnCode: 403 };
}
-
const a = new WebScraperDataProvider();
await a.setOptions({
mode: "single_urls",
@@ -42,7 +42,19 @@ export async function scrapeHelper(
extractorOptions: extractorOptions,
});
- const docs = await a.getDocuments(false);
+ const timeoutPromise = new Promise<{ success: boolean; error?: string; returnCode: number }>((_, reject) =>
+ setTimeout(() => reject({ success: false, error: "Request timed out. Increase the timeout by passing `timeout` param to the request.", returnCode: 408 }), timeout)
+ );
+
+ const docsPromise = a.getDocuments(false);
+
+ let docs;
+ try {
+ docs = await Promise.race([docsPromise, timeoutPromise]);
+ } catch (error) {
+ return error;
+ }
+
// make sure doc.content is not empty
const filteredDocs = docs.filter(
(doc: { content?: string }) => doc.content && doc.content.trim().length > 0
@@ -51,12 +63,11 @@ export async function scrapeHelper(
return { success: true, error: "No page found", returnCode: 200 };
}
-
- let creditsToBeBilled = filteredDocs.length;
+ let creditsToBeBilled = filteredDocs.length;
const creditsPerLLMExtract = 5;
- if (extractorOptions.mode === "llm-extraction"){
- creditsToBeBilled = creditsToBeBilled + (creditsPerLLMExtract * filteredDocs.length)
+ if (extractorOptions.mode === "llm-extraction") {
+ creditsToBeBilled = creditsToBeBilled + (creditsPerLLMExtract * filteredDocs.length);
}
const billingResult = await billTeam(
@@ -95,7 +106,11 @@ export async function scrapeController(req: Request, res: Response) {
const extractorOptions = req.body.extractorOptions ?? {
mode: "markdown"
}
+ if (extractorOptions.mode === "llm-extraction") {
+ pageOptions.onlyMainContent = true;
+ }
const origin = req.body.origin ?? "api";
+ const timeout = req.body.timeout ?? 30000; // Default timeout of 30 seconds
try {
const { success: creditsCheckSuccess, message: creditsCheckMessage } =
@@ -114,6 +129,7 @@ export async function scrapeController(req: Request, res: Response) {
crawlerOptions,
pageOptions,
extractorOptions,
+ timeout
);
const endTime = new Date().getTime();
const timeTakenInSeconds = (endTime - startTime) / 1000;
diff --git a/apps/api/src/index.ts b/apps/api/src/index.ts
index 27e8713..326728e 100644
--- a/apps/api/src/index.ts
+++ b/apps/api/src/index.ts
@@ -5,6 +5,8 @@ import "dotenv/config";
import { getWebScraperQueue } from "./services/queue-service";
import { redisClient } from "./services/rate-limiter";
import { v0Router } from "./routes/v0";
+import { initSDK } from '@hyperdx/node-opentelemetry';
+
const { createBullBoard } = require("@bull-board/api");
const { BullAdapter } = require("@bull-board/api/bullAdapter");
const { ExpressAdapter } = require("@bull-board/express");
@@ -47,6 +49,11 @@ const DEFAULT_PORT = process.env.PORT ?? 3002;
const HOST = process.env.HOST ?? "localhost";
redisClient.connect();
+// HyperDX OpenTelemetry
+if(process.env.ENV === 'production') {
+ initSDK({ consoleCapture: true, additionalInstrumentations: []});
+}
+
export function startServer(port = DEFAULT_PORT) {
const server = app.listen(Number(port), HOST, () => {
diff --git a/apps/api/src/lib/LLM-extraction/models.ts b/apps/api/src/lib/LLM-extraction/models.ts
index ff805bb..1434e35 100644
--- a/apps/api/src/lib/LLM-extraction/models.ts
+++ b/apps/api/src/lib/LLM-extraction/models.ts
@@ -1,30 +1,43 @@
import OpenAI from "openai";
import { Document } from "../../lib/entities";
+import { numTokensFromString } from "./helpers";
export type ScraperCompletionResult = {
data: any | null;
url: string;
};
+const maxTokens = 32000;
+const modifier = 4;
const defaultPrompt =
"You are a professional web scraper. Extract the contents of the webpage";
function prepareOpenAIDoc(
document: Document
-): OpenAI.Chat.Completions.ChatCompletionContentPart[] {
- // Check if the markdown content exists in the document
- if (!document.markdown) {
+): [OpenAI.Chat.Completions.ChatCompletionContentPart[], number] {
+ let markdown = document.markdown;
+
+// Check if the markdown content exists in the document
+ if (!markdown) {
throw new Error(
"Markdown content is missing in the document. This is likely due to an error in the scraping process. Please try again or reach out to help@mendable.ai"
);
}
- return [{ type: "text", text: document.markdown }];
+ // count number of tokens
+ const numTokens = numTokensFromString(document.markdown, "gpt-4");
+
+ if (numTokens > maxTokens) {
+ // trim the document to the maximum number of tokens, tokens != characters
+ markdown = markdown.slice(0, (maxTokens * modifier));
+ }
+
+ return [[{ type: "text", text: markdown }], numTokens];
}
export async function generateOpenAICompletions({
client,
- model = "gpt-4-turbo",
+ model = "gpt-4o",
document,
schema, //TODO - add zod dynamic type checking
prompt = defaultPrompt,
@@ -38,7 +51,7 @@ export async function generateOpenAICompletions({
temperature?: number;
}): Promise {
const openai = client as OpenAI;
- const content = prepareOpenAIDoc(document);
+ const [content, numTokens] = prepareOpenAIDoc(document);
const completion = await openai.chat.completions.create({
model,
@@ -72,6 +85,7 @@ export async function generateOpenAICompletions({
return {
...document,
llm_extraction: llmExtraction,
+ warning: numTokens > maxTokens ? `Page was trimmed to fit the maximum token limit defined by the LLM model (Max: ${maxTokens} tokens, Attemped: ${numTokens} tokens). If results are not good, email us at help@mendable.ai so we can help you.` : undefined,
};
}
diff --git a/apps/api/src/lib/entities.ts b/apps/api/src/lib/entities.ts
index a387b54..ab0a0ef 100644
--- a/apps/api/src/lib/entities.ts
+++ b/apps/api/src/lib/entities.ts
@@ -44,6 +44,7 @@ export type WebScraperOptions = {
limit?: number;
generateImgAltText?: boolean;
replaceAllPathsWithAbsolutePaths?: boolean;
+ mode?: "default" | "fast"; // have a mode of some sort
};
pageOptions?: PageOptions;
extractorOptions?: ExtractorOptions;
@@ -71,6 +72,7 @@ export class Document {
};
childrenLinks?: string[];
provider?: string;
+ warning?: string;
constructor(data: Partial) {
if (!data.content) {
diff --git a/apps/api/src/lib/load-testing-example.ts b/apps/api/src/lib/load-testing-example.ts
new file mode 100644
index 0000000..6fd56fc
--- /dev/null
+++ b/apps/api/src/lib/load-testing-example.ts
@@ -0,0 +1,42 @@
+import { scrapWithFireEngine } from "../../src/scraper/WebScraper/single_url";
+
+const delay = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));
+
+const scrapInBatches = async (
+ urls: string[],
+ batchSize: number,
+ delayMs: number
+) => {
+ let successCount = 0;
+ let errorCount = 0;
+
+ for (let i = 0; i < urls.length; i += batchSize) {
+ const batch = urls
+ .slice(i, i + batchSize)
+ .map((url) => scrapWithFireEngine(url));
+ try {
+ const results = await Promise.all(batch);
+ results.forEach((data, index) => {
+ if (data.trim() === "") {
+ errorCount++;
+ } else {
+ successCount++;
+ console.log(
+ `Scraping result ${i + index + 1}:`,
+ data.trim().substring(0, 20) + "..."
+ );
+ }
+ });
+ } catch (error) {
+ console.error("Error during scraping:", error);
+ }
+ await delay(delayMs);
+ }
+
+ console.log(`Total successful scrapes: ${successCount}`);
+ console.log(`Total errored scrapes: ${errorCount}`);
+};
+function run() {
+ const urls = Array.from({ length: 200 }, () => "https://scrapethissite.com");
+ scrapInBatches(urls, 10, 1000);
+}
diff --git a/apps/api/src/main/runWebScraper.ts b/apps/api/src/main/runWebScraper.ts
index 3c9ea88..632d110 100644
--- a/apps/api/src/main/runWebScraper.ts
+++ b/apps/api/src/main/runWebScraper.ts
@@ -17,8 +17,10 @@ export async function startWebScraperPipeline({
crawlerOptions: job.data.crawlerOptions,
pageOptions: job.data.pageOptions,
inProgress: (progress) => {
- partialDocs.push(progress.currentDocument);
- job.progress({...progress, partialDocs: partialDocs});
+ if (progress.currentDocument) {
+ partialDocs.push(progress.currentDocument);
+ job.progress({ ...progress, partialDocs: partialDocs });
+ }
},
onSuccess: (result) => {
job.moveToCompleted(result);
@@ -27,7 +29,7 @@ export async function startWebScraperPipeline({
job.moveToFailed(error);
},
team_id: job.data.team_id,
- bull_job_id: job.id.toString()
+ bull_job_id: job.id.toString(),
})) as { success: boolean; message: string; docs: Document[] };
}
export async function runWebScraper({
@@ -63,26 +65,25 @@ export async function runWebScraper({
urls: [url],
crawlerOptions: crawlerOptions,
pageOptions: pageOptions,
- bullJobId: bull_job_id
+ bullJobId: bull_job_id,
});
} else {
await provider.setOptions({
mode: mode,
urls: url.split(","),
crawlerOptions: crawlerOptions,
- pageOptions: pageOptions
+ pageOptions: pageOptions,
});
}
const docs = (await provider.getDocuments(false, (progress: Progress) => {
inProgress(progress);
-
})) as Document[];
if (docs.length === 0) {
return {
success: true,
message: "No pages found",
- docs: []
+ docs: [],
};
}
@@ -95,18 +96,14 @@ export async function runWebScraper({
})
: docs.filter((doc) => doc.content.trim().length > 0);
-
- const billingResult = await billTeam(
- team_id,
- filteredDocs.length
- );
+ const billingResult = await billTeam(team_id, filteredDocs.length);
if (!billingResult.success) {
// throw new Error("Failed to bill team, no subscription was found");
return {
success: false,
message: "Failed to bill team, no subscription was found",
- docs: []
+ docs: [],
};
}
diff --git a/apps/api/src/scraper/WebScraper/crawler.ts b/apps/api/src/scraper/WebScraper/crawler.ts
index 1b371fd..9340aa8 100644
--- a/apps/api/src/scraper/WebScraper/crawler.ts
+++ b/apps/api/src/scraper/WebScraper/crawler.ts
@@ -4,7 +4,7 @@ import { URL } from "url";
import { getLinksFromSitemap } from "./sitemap";
import async from "async";
import { Progress } from "../../lib/entities";
-import { scrapWithScrapingBee } from "./single_url";
+import { scrapSingleUrl, scrapWithScrapingBee } from "./single_url";
import robotsParser from "robots-parser";
export class WebCrawler {
@@ -15,7 +15,7 @@ export class WebCrawler {
private maxCrawledLinks: number;
private maxCrawledDepth: number;
private visited: Set = new Set();
- private crawledUrls: Set = new Set();
+ private crawledUrls: Map = new Map();
private limit: number;
private robotsTxtUrl: string;
private robots: any;
@@ -51,7 +51,6 @@ export class WebCrawler {
this.generateImgAltText = generateImgAltText ?? false;
}
-
private filterLinks(sitemapLinks: string[], limit: number, maxDepth: number): string[] {
return sitemapLinks
.filter((link) => {
@@ -77,9 +76,22 @@ export class WebCrawler {
// Check if the link matches the include patterns, if any are specified
if (this.includes.length > 0 && this.includes[0] !== "") {
- return this.includes.some((includePattern) =>
+ if (!this.includes.some((includePattern) =>
new RegExp(includePattern).test(path)
- );
+ )) {
+ return false;
+ }
+ }
+
+ // Normalize the initial URL and the link to account for www and non-www versions
+ const normalizedInitialUrl = new URL(this.initialUrl);
+ const normalizedLink = new URL(link);
+ const initialHostname = normalizedInitialUrl.hostname.replace(/^www\./, '');
+ const linkHostname = normalizedLink.hostname.replace(/^www\./, '');
+
+ // Ensure the protocol and hostname match, and the path starts with the initial URL's path
+ if (linkHostname !== initialHostname || !normalizedLink.pathname.startsWith(normalizedInitialUrl.pathname)) {
+ return false;
}
const isAllowed = this.robots.isAllowed(link, "FireCrawlAgent") ?? true;
@@ -99,19 +111,21 @@ export class WebCrawler {
concurrencyLimit: number = 5,
limit: number = 10000,
maxDepth: number = 10
- ): Promise {
+ ): Promise<{ url: string, html: string }[]> {
// Fetch and parse robots.txt
try {
const response = await axios.get(this.robotsTxtUrl);
this.robots = robotsParser(this.robotsTxtUrl, response.data);
} catch (error) {
- console.error(`Failed to fetch robots.txt from ${this.robotsTxtUrl}`);
+ console.log(`Failed to fetch robots.txt from ${this.robotsTxtUrl}`);
+
}
+
const sitemapLinks = await this.tryFetchSitemapLinks(this.initialUrl);
if (sitemapLinks.length > 0) {
- const filteredLinks = this.filterLinks(sitemapLinks, limit, maxDepth);
- return filteredLinks;
+ let filteredLinks = this.filterLinks(sitemapLinks, limit, maxDepth);
+ return filteredLinks.map(link => ({ url: link, html: "" }));
}
const urls = await this.crawlUrls(
@@ -123,18 +137,20 @@ export class WebCrawler {
urls.length === 0 &&
this.filterLinks([this.initialUrl], limit, this.maxCrawledDepth).length > 0
) {
- return [this.initialUrl];
+ return [{ url: this.initialUrl, html: "" }];
}
+
// make sure to run include exclude here again
- return this.filterLinks(urls, limit, this.maxCrawledDepth);
+ const filteredUrls = this.filterLinks(urls.map(urlObj => urlObj.url), limit, this.maxCrawledDepth);
+ return filteredUrls.map(url => ({ url, html: urls.find(urlObj => urlObj.url === url)?.html || "" }));
}
private async crawlUrls(
urls: string[],
concurrencyLimit: number,
- inProgress?: (progress: Progress) => void
- ): Promise {
+ inProgress?: (progress: Progress) => void,
+ ): Promise<{ url: string, html: string }[]> {
const queue = async.queue(async (task: string, callback) => {
if (this.crawledUrls.size >= Math.min(this.maxCrawledLinks, this.limit)) {
if (callback && typeof callback === "function") {
@@ -143,13 +159,26 @@ export class WebCrawler {
return;
}
const newUrls = await this.crawl(task);
- newUrls.forEach((url) => this.crawledUrls.add(url));
+ // add the initial url if not already added
+ // if (this.visited.size === 1) {
+ // let normalizedInitial = this.initialUrl;
+ // if (!normalizedInitial.endsWith("/")) {
+ // normalizedInitial = normalizedInitial + "/";
+ // }
+ // if (!newUrls.some(page => page.url === this.initialUrl)) {
+ // newUrls.push({ url: this.initialUrl, html: "" });
+ // }
+ // }
+
+
+ newUrls.forEach((page) => this.crawledUrls.set(page.url, page.html));
+
if (inProgress && newUrls.length > 0) {
inProgress({
current: this.crawledUrls.size,
total: Math.min(this.maxCrawledLinks, this.limit),
status: "SCRAPING",
- currentDocumentUrl: newUrls[newUrls.length - 1],
+ currentDocumentUrl: newUrls[newUrls.length - 1].url,
});
} else if (inProgress) {
inProgress({
@@ -159,7 +188,7 @@ export class WebCrawler {
currentDocumentUrl: task,
});
}
- await this.crawlUrls(newUrls, concurrencyLimit, inProgress);
+ await this.crawlUrls(newUrls.map((p) => p.url), concurrencyLimit, inProgress);
if (callback && typeof callback === "function") {
callback();
}
@@ -175,34 +204,48 @@ export class WebCrawler {
}
);
await queue.drain();
- return Array.from(this.crawledUrls);
+ return Array.from(this.crawledUrls.entries()).map(([url, html]) => ({ url, html }));
}
- async crawl(url: string): Promise {
- if (this.visited.has(url) || !this.robots.isAllowed(url, "FireCrawlAgent"))
+ async crawl(url: string): Promise<{url: string, html: string}[]> {
+ if (this.visited.has(url) || !this.robots.isAllowed(url, "FireCrawlAgent")){
return [];
+ }
this.visited.add(url);
+
+
if (!url.startsWith("http")) {
url = "https://" + url;
+
}
if (url.endsWith("/")) {
url = url.slice(0, -1);
+
}
+
if (this.isFile(url) || this.isSocialMediaOrEmail(url)) {
return [];
}
try {
- let content;
- // If it is the first link, fetch with scrapingbee
+ let content : string = "";
+ // If it is the first link, fetch with single url
if (this.visited.size === 1) {
- content = await scrapWithScrapingBee(url, "load");
+ const page = await scrapSingleUrl(url, {includeHtml: true});
+ content = page.html ?? ""
} else {
const response = await axios.get(url);
- content = response.data;
+ content = response.data ?? "";
}
const $ = load(content);
- let links: string[] = [];
+ let links: {url: string, html: string}[] = [];
+
+ // Add the initial URL to the list of links
+ if(this.visited.size === 1)
+ {
+ links.push({url, html: content});
+ }
+
$("a").each((_, element) => {
const href = $(element).attr("href");
@@ -215,7 +258,6 @@ export class WebCrawler {
const path = url.pathname;
if (
- // fullUrl.startsWith(this.initialUrl) && // this condition makes it stop crawling back the url
this.isInternalLink(fullUrl) &&
this.matchesPattern(fullUrl) &&
this.noSections(fullUrl) &&
@@ -223,12 +265,16 @@ export class WebCrawler {
!this.matchesExcludes(path) &&
this.robots.isAllowed(fullUrl, "FireCrawlAgent")
) {
- links.push(fullUrl);
+ links.push({url: fullUrl, html: content});
}
}
});
- return links.filter((link) => !this.visited.has(link));
+ if(this.visited.size === 1){
+ return links;
+ }
+ // Create a new list to return to avoid modifying the visited list
+ return links.filter((link) => !this.visited.has(link.url));
} catch (error) {
return [];
}
@@ -275,9 +321,15 @@ export class WebCrawler {
".mp4",
".mp3",
".pptx",
- ".docx",
+ // ".docx",
".xlsx",
".xml",
+ ".avi",
+ ".flv",
+ ".woff",
+ ".ttf",
+ ".woff2",
+ ".webp"
];
return fileExtensions.some((ext) => url.endsWith(ext));
}
@@ -294,18 +346,57 @@ export class WebCrawler {
return socialMediaOrEmail.some((ext) => url.includes(ext));
}
+ //
private async tryFetchSitemapLinks(url: string): Promise {
+ const normalizeUrl = (url: string) => {
+ url = url.replace(/^https?:\/\//, "").replace(/^www\./, "");
+ if (url.endsWith("/")) {
+ url = url.slice(0, -1);
+ }
+ return url;
+ };
+
const sitemapUrl = url.endsWith("/sitemap.xml")
? url
: `${url}/sitemap.xml`;
+
+ let sitemapLinks: string[] = [];
+
try {
const response = await axios.get(sitemapUrl);
if (response.status === 200) {
- return await getLinksFromSitemap(sitemapUrl);
+ sitemapLinks = await getLinksFromSitemap(sitemapUrl);
}
} catch (error) {
// Error handling for failed sitemap fetch
+ // console.error(`Failed to fetch sitemap from ${sitemapUrl}: ${error}`);
}
- return [];
+
+ if (sitemapLinks.length === 0) {
+ // If the first one doesn't work, try the base URL
+ const baseUrlSitemap = `${this.baseUrl}/sitemap.xml`;
+ try {
+ const response = await axios.get(baseUrlSitemap);
+ if (response.status === 200) {
+ sitemapLinks = await getLinksFromSitemap(baseUrlSitemap);
+ }
+ } catch (error) {
+ // Error handling for failed base URL sitemap fetch
+ // console.error(`Failed to fetch sitemap from ${baseUrlSitemap}: ${error}`);
+ }
+ }
+
+ // Normalize and check if the URL is present in any of the sitemaps
+ const normalizedUrl = normalizeUrl(url);
+
+ const normalizedSitemapLinks = sitemapLinks.map(link => normalizeUrl(link));
+
+ // has to be greater than 0 to avoid adding the initial URL to the sitemap links, and preventing crawler to crawl
+ if (!normalizedSitemapLinks.includes(normalizedUrl) && sitemapLinks.length > 0) {
+ // do not push the normalized url
+ sitemapLinks.push(url);
+ }
+
+ return sitemapLinks;
}
}
diff --git a/apps/api/src/scraper/WebScraper/index.ts b/apps/api/src/scraper/WebScraper/index.ts
index e3256db..0e295ae 100644
--- a/apps/api/src/scraper/WebScraper/index.ts
+++ b/apps/api/src/scraper/WebScraper/index.ts
@@ -17,6 +17,7 @@ import {
} from "./utils/replacePaths";
import { generateCompletions } from "../../lib/LLM-extraction";
import { getWebScraperQueue } from "../../../src/services/queue-service";
+import { fetchAndProcessDocx } from "./utils/docxProcessor";
export class WebScraperDataProvider {
private bullJobId: string;
@@ -35,6 +36,7 @@ export class WebScraperDataProvider {
private replaceAllPathsWithAbsolutePaths?: boolean = false;
private generateImgAltTextModel: "gpt-4-turbo" | "claude-3-opus" =
"gpt-4-turbo";
+ private crawlerMode: string = "default";
authorize(): void {
throw new Error("Method not implemented.");
@@ -46,7 +48,8 @@ export class WebScraperDataProvider {
private async convertUrlsToDocuments(
urls: string[],
- inProgress?: (progress: Progress) => void
+ inProgress?: (progress: Progress) => void,
+ allHtmls?: string[]
): Promise {
const totalUrls = urls.length;
let processedUrls = 0;
@@ -56,7 +59,12 @@ export class WebScraperDataProvider {
const batchUrls = urls.slice(i, i + this.concurrentRequests);
await Promise.all(
batchUrls.map(async (url, index) => {
- const result = await scrapSingleUrl(url, this.pageOptions);
+ const existingHTML = allHtmls ? allHtmls[i + index] : "";
+ const result = await scrapSingleUrl(
+ url,
+ this.pageOptions,
+ existingHTML
+ );
processedUrls++;
if (inProgress) {
inProgress({
@@ -127,9 +135,30 @@ export class WebScraperDataProvider {
}
}
+ private async cleanIrrelevantPath(links: string[]) {
+ return links.filter((link) => {
+ const normalizedInitialUrl = new URL(this.urls[0]);
+ const normalizedLink = new URL(link);
+
+ // Normalize the hostname to account for www and non-www versions
+ const initialHostname = normalizedInitialUrl.hostname.replace(
+ /^www\./,
+ ""
+ );
+ const linkHostname = normalizedLink.hostname.replace(/^www\./, "");
+
+ // Ensure the protocol and hostname match, and the path starts with the initial URL's path
+ return (
+ linkHostname === initialHostname &&
+ normalizedLink.pathname.startsWith(normalizedInitialUrl.pathname)
+ );
+ });
+ }
+
private async handleCrawlMode(
inProgress?: (progress: Progress) => void
): Promise {
+
const crawler = new WebCrawler({
initialUrl: this.urls[0],
includes: this.includes,
@@ -139,19 +168,38 @@ export class WebScraperDataProvider {
limit: this.limit,
generateImgAltText: this.generateImgAltText,
});
- let links = await crawler.start(inProgress, 5, this.limit, this.maxCrawledDepth);
+
+ let links = await crawler.start(
+ inProgress,
+ 5,
+ this.limit,
+ this.maxCrawledDepth
+ );
+
+ let allLinks = links.map((e) => e.url);
+ const allHtmls = links.map((e) => e.html);
+
if (this.returnOnlyUrls) {
- return this.returnOnlyUrlsResponse(links, inProgress);
+ return this.returnOnlyUrlsResponse(allLinks, inProgress);
}
- let documents = await this.processLinks(links, inProgress);
- return this.cacheAndFinalizeDocuments(documents, links);
+ let documents = [];
+ // check if fast mode is enabled and there is html inside the links
+ if (this.crawlerMode === "fast" && links.some((link) => link.html)) {
+ documents = await this.processLinks(allLinks, inProgress, allHtmls);
+ } else {
+ documents = await this.processLinks(allLinks, inProgress);
+ }
+
+ return this.cacheAndFinalizeDocuments(documents, allLinks);
}
private async handleSingleUrlsMode(
inProgress?: (progress: Progress) => void
): Promise {
- let documents = await this.processLinks(this.urls, inProgress);
+ const links = this.urls;
+
+ let documents = await this.processLinks(links, inProgress);
return documents;
}
@@ -159,6 +207,8 @@ export class WebScraperDataProvider {
inProgress?: (progress: Progress) => void
): Promise {
let links = await getLinksFromSitemap(this.urls[0]);
+ links = await this.cleanIrrelevantPath(links);
+
if (this.returnOnlyUrls) {
return this.returnOnlyUrlsResponse(links, inProgress);
}
@@ -187,16 +237,26 @@ export class WebScraperDataProvider {
private async processLinks(
links: string[],
- inProgress?: (progress: Progress) => void
+ inProgress?: (progress: Progress) => void,
+ allHtmls?: string[]
): Promise {
- let pdfLinks = links.filter((link) => link.endsWith(".pdf"));
- let pdfDocuments = await this.fetchPdfDocuments(pdfLinks);
- links = links.filter((link) => !link.endsWith(".pdf"));
+ const pdfLinks = links.filter(link => link.endsWith(".pdf"));
+ const docLinks = links.filter(link => link.endsWith(".doc") || link.endsWith(".docx"));
- let documents = await this.convertUrlsToDocuments(links, inProgress);
+ const pdfDocuments = await this.fetchPdfDocuments(pdfLinks);
+ const docxDocuments = await this.fetchDocxDocuments(docLinks);
+
+ links = links.filter(link => !pdfLinks.includes(link) && !docLinks.includes(link));
+
+ let documents = await this.convertUrlsToDocuments(
+ links,
+ inProgress,
+ allHtmls
+ );
documents = await this.getSitemapData(this.urls[0], documents);
+
documents = this.applyPathReplacements(documents);
- documents = await this.applyImgAltText(documents);
+ // documents = await this.applyImgAltText(documents);
if (
this.extractorOptions.mode === "llm-extraction" &&
@@ -204,7 +264,7 @@ export class WebScraperDataProvider {
) {
documents = await generateCompletions(documents, this.extractorOptions);
}
- return documents.concat(pdfDocuments);
+ return documents.concat(pdfDocuments).concat(docxDocuments);
}
private async fetchPdfDocuments(pdfLinks: string[]): Promise {
@@ -219,6 +279,18 @@ export class WebScraperDataProvider {
})
);
}
+ private async fetchDocxDocuments(docxLinks: string[]): Promise {
+ return Promise.all(
+ docxLinks.map(async (p) => {
+ const docXDocument = await fetchAndProcessDocx(p);
+ return {
+ content: docXDocument,
+ metadata: { sourceURL: p },
+ provider: "web-scraper",
+ };
+ })
+ );
+ }
private applyPathReplacements(documents: Document[]): Document[] {
return this.replaceAllPathsWithAbsolutePaths
@@ -395,8 +467,9 @@ export class WebScraperDataProvider {
this.pageOptions = options.pageOptions ?? { onlyMainContent: false, includeHtml: false };
this.extractorOptions = options.extractorOptions ?? {mode: "markdown"}
this.replaceAllPathsWithAbsolutePaths = options.crawlerOptions?.replaceAllPathsWithAbsolutePaths ?? false;
- //! @nicolas, for some reason this was being injected and breakign everything. Don't have time to find source of the issue so adding this check
+ //! @nicolas, for some reason this was being injected and breaking everything. Don't have time to find source of the issue so adding this check
this.excludes = this.excludes.filter((item) => item !== "");
+ this.crawlerMode = options.crawlerOptions?.mode ?? "default";
// make sure all urls start with https://
this.urls = this.urls.map((url) => {
diff --git a/apps/api/src/scraper/WebScraper/single_url.ts b/apps/api/src/scraper/WebScraper/single_url.ts
index c43ea40..419bdba 100644
--- a/apps/api/src/scraper/WebScraper/single_url.ts
+++ b/apps/api/src/scraper/WebScraper/single_url.ts
@@ -6,9 +6,19 @@ import { Document, PageOptions } from "../../lib/entities";
import { parseMarkdown } from "../../lib/html-to-markdown";
import { excludeNonMainTags } from "./utils/excludeTags";
import { urlSpecificParams } from "./utils/custom/website_params";
+import { fetchAndProcessPdf } from "./utils/pdfProcessor";
dotenv.config();
+const baseScrapers = [
+ "fire-engine",
+ "scrapingBee",
+ "playwright",
+ "scrapingBeeLoad",
+ "fetch",
+] as const;
+
+
export async function generateRequestParams(
url: string,
wait_browser: string = "domcontentloaded",
@@ -32,15 +42,39 @@ export async function generateRequestParams(
return defaultParams;
}
}
-export async function scrapWithCustomFirecrawl(
+export async function scrapWithFireEngine(
url: string,
options?: any
): Promise {
try {
- // TODO: merge the custom firecrawl scraper into mono-repo when ready
- return null;
+ const reqParams = await generateRequestParams(url);
+ const wait_playwright = reqParams["params"]?.wait ?? 0;
+
+ const response = await fetch(process.env.FIRE_ENGINE_BETA_URL+ "/scrape", {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ },
+ body: JSON.stringify({ url: url, wait: wait_playwright }),
+ });
+
+ if (!response.ok) {
+ console.error(
+ `[Fire-Engine] Error fetching url: ${url} with status: ${response.status}`
+ );
+ return "";
+ }
+
+ const contentType = response.headers['content-type'];
+ if (contentType && contentType.includes('application/pdf')) {
+ return fetchAndProcessPdf(url);
+ } else {
+ const data = await response.json();
+ const html = data.content;
+ return html ?? "";
+ }
} catch (error) {
- console.error(`Error scraping with custom firecrawl-scraper: ${error}`);
+ console.error(`[Fire-Engine][c] Error fetching url: ${url} -> ${error}`);
return "";
}
}
@@ -62,15 +96,21 @@ export async function scrapWithScrapingBee(
if (response.status !== 200 && response.status !== 404) {
console.error(
- `Scraping bee error in ${url} with status code ${response.status}`
+ `[ScrapingBee] Error fetching url: ${url} with status code ${response.status}`
);
return "";
}
- const decoder = new TextDecoder();
- const text = decoder.decode(response.data);
- return text;
+
+ const contentType = response.headers['content-type'];
+ if (contentType && contentType.includes('application/pdf')) {
+ return fetchAndProcessPdf(url);
+ } else {
+ const decoder = new TextDecoder();
+ const text = decoder.decode(response.data);
+ return text;
+ }
} catch (error) {
- console.error(`Error scraping with Scraping Bee: ${error}`);
+ console.error(`[ScrapingBee][c] Error fetching url: ${url} -> ${error}`);
return "";
}
}
@@ -90,23 +130,80 @@ export async function scrapWithPlaywright(url: string): Promise {
if (!response.ok) {
console.error(
- `Error fetching w/ playwright server -> URL: ${url} with status: ${response.status}`
+ `[Playwright] Error fetching url: ${url} with status: ${response.status}`
);
return "";
}
- const data = await response.json();
- const html = data.content;
- return html ?? "";
+ const contentType = response.headers['content-type'];
+ if (contentType && contentType.includes('application/pdf')) {
+ return fetchAndProcessPdf(url);
+ } else {
+ const data = await response.json();
+ const html = data.content;
+ return html ?? "";
+ }
} catch (error) {
- console.error(`Error scraping with Puppeteer: ${error}`);
+ console.error(`[Playwright][c] Error fetching url: ${url} -> ${error}`);
return "";
}
}
+export async function scrapWithFetch(url: string): Promise {
+ try {
+ const response = await fetch(url);
+ if (!response.ok) {
+ console.error(
+ `[Fetch] Error fetching url: ${url} with status: ${response.status}`
+ );
+ return "";
+ }
+
+ const contentType = response.headers['content-type'];
+ if (contentType && contentType.includes('application/pdf')) {
+ return fetchAndProcessPdf(url);
+ } else {
+ const text = await response.text();
+ return text;
+ }
+ } catch (error) {
+ console.error(`[Fetch][c] Error fetching url: ${url} -> ${error}`);
+ return "";
+ }
+}
+
+/**
+ * Get the order of scrapers to be used for scraping a URL
+ * If the user doesn't have envs set for a specific scraper, it will be removed from the order.
+ * @param defaultScraper The default scraper to use if the URL does not have a specific scraper order defined
+ * @returns The order of scrapers to be used for scraping a URL
+ */
+function getScrapingFallbackOrder(defaultScraper?: string) {
+ const availableScrapers = baseScrapers.filter(scraper => {
+ switch (scraper) {
+ case "scrapingBee":
+ case "scrapingBeeLoad":
+ return !!process.env.SCRAPING_BEE_API_KEY;
+ case "fire-engine":
+ return !!process.env.FIRE_ENGINE_BETA_URL;
+ case "playwright":
+ return !!process.env.PLAYWRIGHT_MICROSERVICE_URL;
+ default:
+ return true;
+ }
+ });
+
+ const defaultOrder = ["scrapingBee", "fire-engine", "playwright", "scrapingBeeLoad", "fetch"];
+ const filteredDefaultOrder = defaultOrder.filter((scraper: typeof baseScrapers[number]) => availableScrapers.includes(scraper));
+ const uniqueScrapers = new Set(defaultScraper ? [defaultScraper, ...filteredDefaultOrder, ...availableScrapers] : [...filteredDefaultOrder, ...availableScrapers]);
+ const scrapersInOrder = Array.from(uniqueScrapers);
+ return scrapersInOrder as typeof baseScrapers[number][];
+}
+
export async function scrapSingleUrl(
urlToScrap: string,
- pageOptions: PageOptions = { onlyMainContent: true, includeHtml: false }
+ pageOptions: PageOptions = { onlyMainContent: true, includeHtml: false },
+ existingHtml: string = ""
): Promise {
urlToScrap = urlToScrap.trim();
@@ -124,17 +221,14 @@ export async function scrapSingleUrl(
const attemptScraping = async (
url: string,
- method:
- | "firecrawl-scraper"
- | "scrapingBee"
- | "playwright"
- | "scrapingBeeLoad"
- | "fetch"
+ method: typeof baseScrapers[number]
) => {
let text = "";
switch (method) {
- case "firecrawl-scraper":
- text = await scrapWithCustomFirecrawl(url);
+ case "fire-engine":
+ if (process.env.FIRE_ENGINE_BETA_URL) {
+ text = await scrapWithFireEngine(url);
+ }
break;
case "scrapingBee":
if (process.env.SCRAPING_BEE_API_KEY) {
@@ -156,19 +250,7 @@ export async function scrapSingleUrl(
}
break;
case "fetch":
- try {
- const response = await fetch(url);
- if (!response.ok) {
- console.error(
- `Error fetching URL: ${url} with status: ${response.status}`
- );
- return "";
- }
- text = await response.text();
- } catch (error) {
- console.error(`Error scraping URL: ${error}`);
- return "";
- }
+ text = await scrapWithFetch(url);
break;
}
@@ -186,20 +268,22 @@ export async function scrapSingleUrl(
console.error(`Invalid URL key, trying: ${urlToScrap}`);
}
const defaultScraper = urlSpecificParams[urlKey]?.defaultScraper ?? "";
- const scrapersInOrder = defaultScraper
- ? [
- defaultScraper,
- "scrapingBee",
- "playwright",
- "scrapingBeeLoad",
- "fetch",
- ]
- : ["scrapingBee", "playwright", "scrapingBeeLoad", "fetch"];
+ const scrapersInOrder = getScrapingFallbackOrder(defaultScraper)
for (const scraper of scrapersInOrder) {
+ // If exists text coming from crawler, use it
+ if (existingHtml && existingHtml.trim().length >= 100) {
+ let cleanedHtml = removeUnwantedElements(existingHtml, pageOptions);
+ text = await parseMarkdown(cleanedHtml);
+ html = existingHtml;
+ break;
+ }
[text, html] = await attemptScraping(urlToScrap, scraper);
- if (text && text.length >= 100) break;
- console.log(`Falling back to ${scraper}`);
+ if (text && text.trim().length >= 100) break;
+ const nextScraperIndex = scrapersInOrder.indexOf(scraper) + 1;
+ if (nextScraperIndex < scrapersInOrder.length) {
+ console.info(`Falling back to ${scrapersInOrder[nextScraperIndex]}`);
+ }
}
if (!text) {
diff --git a/apps/api/src/scraper/WebScraper/utils/__tests__/docxProcessor.test.ts b/apps/api/src/scraper/WebScraper/utils/__tests__/docxProcessor.test.ts
new file mode 100644
index 0000000..e018ffa
--- /dev/null
+++ b/apps/api/src/scraper/WebScraper/utils/__tests__/docxProcessor.test.ts
@@ -0,0 +1,13 @@
+import * as docxProcessor from "../docxProcessor";
+
+describe("DOCX Processing Module - Integration Test", () => {
+ it("should correctly process a simple DOCX file without the LLAMAPARSE_API_KEY", async () => {
+ delete process.env.LLAMAPARSE_API_KEY;
+ const docxContent = await docxProcessor.fetchAndProcessDocx(
+ "https://nvca.org/wp-content/uploads/2019/06/NVCA-Model-Document-Stock-Purchase-Agreement.docx"
+ );
+ expect(docxContent.trim()).toContain(
+ "SERIES A PREFERRED STOCK PURCHASE AGREEMENT"
+ );
+ });
+});
diff --git a/apps/api/src/scraper/WebScraper/utils/custom/website_params.ts b/apps/api/src/scraper/WebScraper/utils/custom/website_params.ts
index 5f8be9f..9094fc3 100644
--- a/apps/api/src/scraper/WebScraper/utils/custom/website_params.ts
+++ b/apps/api/src/scraper/WebScraper/utils/custom/website_params.ts
@@ -63,7 +63,7 @@ export const urlSpecificParams = {
},
},
"ycombinator.com":{
- defaultScraper: "playwright",
+ defaultScraper: "fire-engine",
params: {
wait_browser: "networkidle2",
block_resources: false,
@@ -121,5 +121,25 @@ export const urlSpecificParams = {
accept:
"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
},
+ },
+ "help.salesforce.com":{
+ defaultScraper: "playwright",
+ params: {
+ wait_browser: "networkidle2",
+ block_resources: false,
+ wait: 2000,
+ },
+ headers: {
+ "User-Agent":
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36",
+ "sec-fetch-site": "same-origin",
+ "sec-fetch-mode": "cors",
+ "sec-fetch-dest": "empty",
+ referer: "https://www.google.com/",
+ "accept-language": "en-US,en;q=0.9",
+ "accept-encoding": "gzip, deflate, br",
+ accept:
+ "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
+ },
}
};
diff --git a/apps/api/src/scraper/WebScraper/utils/docxProcessor.ts b/apps/api/src/scraper/WebScraper/utils/docxProcessor.ts
new file mode 100644
index 0000000..38759f8
--- /dev/null
+++ b/apps/api/src/scraper/WebScraper/utils/docxProcessor.ts
@@ -0,0 +1,41 @@
+import axios from "axios";
+import fs from "fs";
+import { createWriteStream } from "node:fs";
+import path from "path";
+import os from "os";
+import mammoth from "mammoth";
+
+export async function fetchAndProcessDocx(url: string): Promise {
+ const tempFilePath = await downloadDocx(url);
+ const content = await processDocxToText(tempFilePath);
+ fs.unlinkSync(tempFilePath); // Clean up the temporary file
+ return content;
+}
+
+async function downloadDocx(url: string): Promise {
+ const response = await axios({
+ url,
+ method: "GET",
+ responseType: "stream",
+ });
+
+ const tempFilePath = path.join(os.tmpdir(), `tempDocx-${Date.now()}.docx`);
+ const writer = createWriteStream(tempFilePath);
+
+ response.data.pipe(writer);
+
+ return new Promise((resolve, reject) => {
+ writer.on("finish", () => resolve(tempFilePath));
+ writer.on("error", reject);
+ });
+}
+
+export async function processDocxToText(filePath: string): Promise {
+ const content = await extractTextFromDocx(filePath);
+ return content;
+}
+
+async function extractTextFromDocx(filePath: string): Promise {
+ const result = await mammoth.extractRawText({ path: filePath });
+ return result.value;
+}
diff --git a/apps/api/src/scraper/WebScraper/utils/excludeTags.ts b/apps/api/src/scraper/WebScraper/utils/excludeTags.ts
index 142bcef..bb9c519 100644
--- a/apps/api/src/scraper/WebScraper/utils/excludeTags.ts
+++ b/apps/api/src/scraper/WebScraper/utils/excludeTags.ts
@@ -34,8 +34,6 @@ export const excludeNonMainTags = [
"#nav",
".breadcrumbs",
"#breadcrumbs",
- ".form",
- "form",
"#search-form",
".search",
"#search",
@@ -51,10 +49,6 @@ export const excludeNonMainTags = [
"#tag",
".category",
"#category",
- ".comment",
- "#comment",
- ".reply",
- "#reply",
- ".author",
- "#author",
+ ".cookie",
+ "#cookie"
];
diff --git a/apps/api/src/scraper/WebScraper/utils/pdfProcessor.ts b/apps/api/src/scraper/WebScraper/utils/pdfProcessor.ts
index fb08d9c..7c57007 100644
--- a/apps/api/src/scraper/WebScraper/utils/pdfProcessor.ts
+++ b/apps/api/src/scraper/WebScraper/utils/pdfProcessor.ts
@@ -19,8 +19,8 @@ export async function fetchAndProcessPdf(url: string): Promise {
async function downloadPdf(url: string): Promise {
const response = await axios({
url,
- method: 'GET',
- responseType: 'stream',
+ method: "GET",
+ responseType: "stream",
});
const tempFilePath = path.join(os.tmpdir(), `tempPdf-${Date.now()}.pdf`);
@@ -29,8 +29,8 @@ async function downloadPdf(url: string): Promise {
response.data.pipe(writer);
return new Promise((resolve, reject) => {
- writer.on('finish', () => resolve(tempFilePath));
- writer.on('error', reject);
+ writer.on("finish", () => resolve(tempFilePath));
+ writer.on("error", reject);
});
}
@@ -77,12 +77,12 @@ export async function processPdfToText(filePath: string): Promise {
} else {
// If the status code is not 200, increment the attempt counter and wait
attempt++;
- await new Promise((resolve) => setTimeout(resolve, 250)); // Wait for 2 seconds
+ await new Promise((resolve) => setTimeout(resolve, 500)); // Wait for 0.5 seconds
}
} catch (error) {
- console.error("Error fetching result:", error);
+ console.error("Error fetching result:", error || '');
attempt++;
- await new Promise((resolve) => setTimeout(resolve, 250)); // Wait for 2 seconds before retrying
+ await new Promise((resolve) => setTimeout(resolve, 500)); // Wait for 0.5 seconds before retrying
// You may want to handle specific errors differently
}
}
@@ -101,7 +101,7 @@ export async function processPdfToText(filePath: string): Promise {
return content;
}
-async function processPdf(file: string){
+async function processPdf(file: string) {
const fileContent = fs.readFileSync(file);
const data = await pdf(fileContent);
return data.text;
diff --git a/apps/api/src/services/billing/credit_billing.ts b/apps/api/src/services/billing/credit_billing.ts
index 892530c..4703a7f 100644
--- a/apps/api/src/services/billing/credit_billing.ts
+++ b/apps/api/src/services/billing/credit_billing.ts
@@ -227,10 +227,11 @@ export async function supaCheckTeamCredits(team_id: string, credits: number) {
if (creditUsages && creditUsages.length > 0) {
totalCreditsUsed = creditUsages[0].total_credits_used;
- console.log("Total Credits Used:", totalCreditsUsed);
+ // console.log("Total Credits Used:", totalCreditsUsed);
}
} catch (error) {
console.error("Error calculating credit usage:", error);
+
}
// Adjust total credits used by subtracting coupon value
const adjustedCreditsUsed = Math.max(0, totalCreditsUsed - couponCredits);
diff --git a/apps/api/src/services/queue-worker.ts b/apps/api/src/services/queue-worker.ts
index 78ea030..6772c57 100644
--- a/apps/api/src/services/queue-worker.ts
+++ b/apps/api/src/services/queue-worker.ts
@@ -5,6 +5,11 @@ import { logtail } from "./logtail";
import { startWebScraperPipeline } from "../main/runWebScraper";
import { callWebhook } from "./webhook";
import { logJob } from "./logging/log_job";
+import { initSDK } from '@hyperdx/node-opentelemetry';
+
+if(process.env.ENV === 'production') {
+ initSDK({ consoleCapture: true, additionalInstrumentations: []});
+}
getWebScraperQueue().process(
Math.floor(Number(process.env.NUM_WORKERS_PER_QUEUE ?? 8)),
@@ -26,7 +31,7 @@ getWebScraperQueue().process(
success: success,
result: {
links: docs.map((doc) => {
- return { content: doc, source: doc.metadata.sourceURL };
+ return { content: doc, source: doc?.metadata?.sourceURL ?? doc?.url ?? "" };
}),
},
project_id: job.data.project_id,
diff --git a/apps/api/src/services/rate-limiter.ts b/apps/api/src/services/rate-limiter.ts
index 5bc9acb..5fa0964 100644
--- a/apps/api/src/services/rate-limiter.ts
+++ b/apps/api/src/services/rate-limiter.ts
@@ -2,17 +2,21 @@ import { RateLimiterRedis } from "rate-limiter-flexible";
import * as redis from "redis";
import { RateLimiterMode } from "../../src/types";
-const MAX_REQUESTS_PER_MINUTE_PREVIEW = 5;
-const MAX_CRAWLS_PER_MINUTE_STARTER = 2;
-const MAX_CRAWLS_PER_MINUTE_STANDARD = 4;
+const MAX_CRAWLS_PER_MINUTE_STARTER = 3;
+const MAX_CRAWLS_PER_MINUTE_STANDARD = 5;
const MAX_CRAWLS_PER_MINUTE_SCALE = 20;
+const MAX_SCRAPES_PER_MINUTE_STARTER = 20;
+const MAX_SCRAPES_PER_MINUTE_STANDARD = 40;
+const MAX_SCRAPES_PER_MINUTE_SCALE = 50;
+
+const MAX_SEARCHES_PER_MINUTE_STARTER = 20;
+const MAX_SEARCHES_PER_MINUTE_STANDARD = 40;
+const MAX_SEARCHES_PER_MINUTE_SCALE = 50;
+
+const MAX_REQUESTS_PER_MINUTE_PREVIEW = 5;
const MAX_REQUESTS_PER_MINUTE_ACCOUNT = 20;
-
-const MAX_REQUESTS_PER_MINUTE_CRAWL_STATUS = 120;
-
-
-
+const MAX_REQUESTS_PER_MINUTE_CRAWL_STATUS = 150;
export const redisClient = redis.createClient({
url: process.env.REDIS_URL,
@@ -21,71 +25,109 @@ export const redisClient = redis.createClient({
export const previewRateLimiter = new RateLimiterRedis({
storeClient: redisClient,
- keyPrefix: "middleware",
+ keyPrefix: "preview",
points: MAX_REQUESTS_PER_MINUTE_PREVIEW,
duration: 60, // Duration in seconds
});
export const serverRateLimiter = new RateLimiterRedis({
storeClient: redisClient,
- keyPrefix: "middleware",
+ keyPrefix: "server",
points: MAX_REQUESTS_PER_MINUTE_ACCOUNT,
duration: 60, // Duration in seconds
});
export const crawlStatusRateLimiter = new RateLimiterRedis({
storeClient: redisClient,
- keyPrefix: "middleware",
+ keyPrefix: "crawl-status",
points: MAX_REQUESTS_PER_MINUTE_CRAWL_STATUS,
duration: 60, // Duration in seconds
});
export const testSuiteRateLimiter = new RateLimiterRedis({
storeClient: redisClient,
- keyPrefix: "middleware",
- points: 1000,
+ keyPrefix: "test-suite",
+ points: 10000,
duration: 60, // Duration in seconds
});
-export function crawlRateLimit(plan: string){
- if(plan === "standard"){
- return new RateLimiterRedis({
- storeClient: redisClient,
- keyPrefix: "middleware",
- points: MAX_CRAWLS_PER_MINUTE_STANDARD,
- duration: 60, // Duration in seconds
- });
- }else if(plan === "scale"){
- return new RateLimiterRedis({
- storeClient: redisClient,
- keyPrefix: "middleware",
- points: MAX_CRAWLS_PER_MINUTE_SCALE,
- duration: 60, // Duration in seconds
- });
- }
- return new RateLimiterRedis({
- storeClient: redisClient,
- keyPrefix: "middleware",
- points: MAX_CRAWLS_PER_MINUTE_STARTER,
- duration: 60, // Duration in seconds
- });
-
-}
-
-
-
-
-export function getRateLimiter(mode: RateLimiterMode, token: string){
+export function getRateLimiter(mode: RateLimiterMode, token: string, plan?: string){
// Special test suite case. TODO: Change this later.
- if(token.includes("5089cefa58")){
+ if (token.includes("5089cefa58") || token.includes("6254cf9")){
return testSuiteRateLimiter;
}
- switch(mode) {
+ switch (mode) {
case RateLimiterMode.Preview:
return previewRateLimiter;
case RateLimiterMode.CrawlStatus:
return crawlStatusRateLimiter;
+ case RateLimiterMode.Crawl:
+ if (plan === "standard"){
+ return new RateLimiterRedis({
+ storeClient: redisClient,
+ keyPrefix: "crawl-standard",
+ points: MAX_CRAWLS_PER_MINUTE_STANDARD,
+ duration: 60, // Duration in seconds
+ });
+ } else if (plan === "scale"){
+ return new RateLimiterRedis({
+ storeClient: redisClient,
+ keyPrefix: "crawl-scale",
+ points: MAX_CRAWLS_PER_MINUTE_SCALE,
+ duration: 60, // Duration in seconds
+ });
+ }
+ return new RateLimiterRedis({
+ storeClient: redisClient,
+ keyPrefix: "crawl-starter",
+ points: MAX_CRAWLS_PER_MINUTE_STARTER,
+ duration: 60, // Duration in seconds
+ });
+ case RateLimiterMode.Scrape:
+ if (plan === "standard"){
+ return new RateLimiterRedis({
+ storeClient: redisClient,
+ keyPrefix: "scrape-standard",
+ points: MAX_SCRAPES_PER_MINUTE_STANDARD,
+ duration: 60, // Duration in seconds
+ });
+ } else if (plan === "scale"){
+ return new RateLimiterRedis({
+ storeClient: redisClient,
+ keyPrefix: "scrape-scale",
+ points: MAX_SCRAPES_PER_MINUTE_SCALE,
+ duration: 60, // Duration in seconds
+ });
+ }
+ return new RateLimiterRedis({
+ storeClient: redisClient,
+ keyPrefix: "scrape-starter",
+ points: MAX_SCRAPES_PER_MINUTE_STARTER,
+ duration: 60, // Duration in seconds
+ });
+ case RateLimiterMode.Search:
+ if (plan === "standard"){
+ return new RateLimiterRedis({
+ storeClient: redisClient,
+ keyPrefix: "search-standard",
+ points: MAX_SEARCHES_PER_MINUTE_STANDARD,
+ duration: 60, // Duration in seconds
+ });
+ } else if (plan === "scale"){
+ return new RateLimiterRedis({
+ storeClient: redisClient,
+ keyPrefix: "search-scale",
+ points: MAX_SEARCHES_PER_MINUTE_SCALE,
+ duration: 60, // Duration in seconds
+ });
+ }
+ return new RateLimiterRedis({
+ storeClient: redisClient,
+ keyPrefix: "search-starter",
+ points: MAX_SEARCHES_PER_MINUTE_STARTER,
+ duration: 60, // Duration in seconds
+ });
default:
return serverRateLimiter;
}
diff --git a/apps/js-sdk/firecrawl/build/index.js b/apps/js-sdk/firecrawl/build/index.js
index 6e0f367..b850d5c 100644
--- a/apps/js-sdk/firecrawl/build/index.js
+++ b/apps/js-sdk/firecrawl/build/index.js
@@ -240,7 +240,7 @@ export default class FirecrawlApp {
* @param {string} action - The action being performed when the error occurred.
*/
handleError(response, action) {
- if ([402, 409, 500].includes(response.status)) {
+ if ([402, 408, 409, 500].includes(response.status)) {
const errorMessage = response.data.error || "Unknown error occurred";
throw new Error(`Failed to ${action}. Status code: ${response.status}. Error: ${errorMessage}`);
}
diff --git a/apps/js-sdk/firecrawl/package.json b/apps/js-sdk/firecrawl/package.json
index 9e1948a..3bacdf4 100644
--- a/apps/js-sdk/firecrawl/package.json
+++ b/apps/js-sdk/firecrawl/package.json
@@ -1,6 +1,6 @@
{
"name": "@mendable/firecrawl-js",
- "version": "0.0.20",
+ "version": "0.0.21",
"description": "JavaScript SDK for Firecrawl API",
"main": "build/index.js",
"types": "types/index.d.ts",
diff --git a/apps/js-sdk/firecrawl/src/index.ts b/apps/js-sdk/firecrawl/src/index.ts
index 0319c74..7654f1b 100644
--- a/apps/js-sdk/firecrawl/src/index.ts
+++ b/apps/js-sdk/firecrawl/src/index.ts
@@ -109,7 +109,7 @@ export default class FirecrawlApp {
const response: AxiosResponse = await axios.post(
"https://api.firecrawl.dev/v0/scrape",
jsonData,
- { headers }
+ { headers },
);
if (response.status === 200) {
const responseData = response.data;
@@ -324,7 +324,7 @@ export default class FirecrawlApp {
* @param {string} action - The action being performed when the error occurred.
*/
handleError(response: AxiosResponse, action: string): void {
- if ([402, 409, 500].includes(response.status)) {
+ if ([402, 408, 409, 500].includes(response.status)) {
const errorMessage: string =
response.data.error || "Unknown error occurred";
throw new Error(
diff --git a/apps/python-sdk/build/lib/firecrawl/firecrawl.py b/apps/python-sdk/build/lib/firecrawl/firecrawl.py
index 701810c..98cb8ed 100644
--- a/apps/python-sdk/build/lib/firecrawl/firecrawl.py
+++ b/apps/python-sdk/build/lib/firecrawl/firecrawl.py
@@ -4,10 +4,11 @@ import requests
import time
class FirecrawlApp:
- def __init__(self, api_key=None):
+ def __init__(self, api_key=None, api_url='https://api.firecrawl.dev'):
self.api_key = api_key or os.getenv('FIRECRAWL_API_KEY')
if self.api_key is None:
raise ValueError('No API key provided')
+ self.api_url = api_url or os.getenv('FIRECRAWL_API_URL')
@@ -38,7 +39,7 @@ class FirecrawlApp:
scrape_params[key] = value
# Make the POST request with the prepared headers and JSON data
response = requests.post(
- 'https://api.firecrawl.dev/v0/scrape',
+ f'{self.api_url}/v0/scrape',
headers=headers,
json=scrape_params
)
@@ -48,7 +49,7 @@ class FirecrawlApp:
return response['data']
else:
raise Exception(f'Failed to scrape URL. Error: {response["error"]}')
- elif response.status_code in [402, 409, 500]:
+ elif response.status_code in [402, 408, 409, 500]:
error_message = response.json().get('error', 'Unknown error occurred')
raise Exception(f'Failed to scrape URL. Status code: {response.status_code}. Error: {error_message}')
else:
@@ -63,7 +64,7 @@ class FirecrawlApp:
if params:
json_data.update(params)
response = requests.post(
- 'https://api.firecrawl.dev/v0/search',
+ f'{self.api_url}/v0/search',
headers=headers,
json=json_data
)
@@ -85,7 +86,7 @@ class FirecrawlApp:
json_data = {'url': url}
if params:
json_data.update(params)
- response = self._post_request('https://api.firecrawl.dev/v0/crawl', json_data, headers)
+ response = self._post_request(f'{self.api_url}/v0/crawl', json_data, headers)
if response.status_code == 200:
job_id = response.json().get('jobId')
if wait_until_done:
@@ -97,7 +98,7 @@ class FirecrawlApp:
def check_crawl_status(self, job_id):
headers = self._prepare_headers()
- response = self._get_request(f'https://api.firecrawl.dev/v0/crawl/status/{job_id}', headers)
+ response = self._get_request(f'{self.api_url}/v0/crawl/status/{job_id}', headers)
if response.status_code == 200:
return response.json()
else:
@@ -130,7 +131,7 @@ class FirecrawlApp:
def _monitor_job_status(self, job_id, headers, timeout):
import time
while True:
- status_response = self._get_request(f'https://api.firecrawl.dev/v0/crawl/status/{job_id}', headers)
+ status_response = self._get_request(f'{self.api_url}/v0/crawl/status/{job_id}', headers)
if status_response.status_code == 200:
status_data = status_response.json()
if status_data['status'] == 'completed':
@@ -148,7 +149,7 @@ class FirecrawlApp:
self._handle_error(status_response, 'check crawl status')
def _handle_error(self, response, action):
- if response.status_code in [402, 409, 500]:
+ if response.status_code in [402, 408, 409, 500]:
error_message = response.json().get('error', 'Unknown error occurred')
raise Exception(f'Failed to {action}. Status code: {response.status_code}. Error: {error_message}')
else:
diff --git a/apps/python-sdk/dist/firecrawl-py-0.0.8.tar.gz b/apps/python-sdk/dist/firecrawl-py-0.0.8.tar.gz
deleted file mode 100644
index b18dde5..0000000
Binary files a/apps/python-sdk/dist/firecrawl-py-0.0.8.tar.gz and /dev/null differ
diff --git a/apps/python-sdk/dist/firecrawl-py-0.0.9.tar.gz b/apps/python-sdk/dist/firecrawl-py-0.0.9.tar.gz
new file mode 100644
index 0000000..55e0eed
Binary files /dev/null and b/apps/python-sdk/dist/firecrawl-py-0.0.9.tar.gz differ
diff --git a/apps/python-sdk/dist/firecrawl_py-0.0.8-py3-none-any.whl b/apps/python-sdk/dist/firecrawl_py-0.0.8-py3-none-any.whl
deleted file mode 100644
index f71cb8e..0000000
Binary files a/apps/python-sdk/dist/firecrawl_py-0.0.8-py3-none-any.whl and /dev/null differ
diff --git a/apps/python-sdk/dist/firecrawl_py-0.0.9-py3-none-any.whl b/apps/python-sdk/dist/firecrawl_py-0.0.9-py3-none-any.whl
new file mode 100644
index 0000000..83cb5b7
Binary files /dev/null and b/apps/python-sdk/dist/firecrawl_py-0.0.9-py3-none-any.whl differ
diff --git a/apps/python-sdk/firecrawl/firecrawl.py b/apps/python-sdk/firecrawl/firecrawl.py
index 701810c..98cb8ed 100644
--- a/apps/python-sdk/firecrawl/firecrawl.py
+++ b/apps/python-sdk/firecrawl/firecrawl.py
@@ -4,10 +4,11 @@ import requests
import time
class FirecrawlApp:
- def __init__(self, api_key=None):
+ def __init__(self, api_key=None, api_url='https://api.firecrawl.dev'):
self.api_key = api_key or os.getenv('FIRECRAWL_API_KEY')
if self.api_key is None:
raise ValueError('No API key provided')
+ self.api_url = api_url or os.getenv('FIRECRAWL_API_URL')
@@ -38,7 +39,7 @@ class FirecrawlApp:
scrape_params[key] = value
# Make the POST request with the prepared headers and JSON data
response = requests.post(
- 'https://api.firecrawl.dev/v0/scrape',
+ f'{self.api_url}/v0/scrape',
headers=headers,
json=scrape_params
)
@@ -48,7 +49,7 @@ class FirecrawlApp:
return response['data']
else:
raise Exception(f'Failed to scrape URL. Error: {response["error"]}')
- elif response.status_code in [402, 409, 500]:
+ elif response.status_code in [402, 408, 409, 500]:
error_message = response.json().get('error', 'Unknown error occurred')
raise Exception(f'Failed to scrape URL. Status code: {response.status_code}. Error: {error_message}')
else:
@@ -63,7 +64,7 @@ class FirecrawlApp:
if params:
json_data.update(params)
response = requests.post(
- 'https://api.firecrawl.dev/v0/search',
+ f'{self.api_url}/v0/search',
headers=headers,
json=json_data
)
@@ -85,7 +86,7 @@ class FirecrawlApp:
json_data = {'url': url}
if params:
json_data.update(params)
- response = self._post_request('https://api.firecrawl.dev/v0/crawl', json_data, headers)
+ response = self._post_request(f'{self.api_url}/v0/crawl', json_data, headers)
if response.status_code == 200:
job_id = response.json().get('jobId')
if wait_until_done:
@@ -97,7 +98,7 @@ class FirecrawlApp:
def check_crawl_status(self, job_id):
headers = self._prepare_headers()
- response = self._get_request(f'https://api.firecrawl.dev/v0/crawl/status/{job_id}', headers)
+ response = self._get_request(f'{self.api_url}/v0/crawl/status/{job_id}', headers)
if response.status_code == 200:
return response.json()
else:
@@ -130,7 +131,7 @@ class FirecrawlApp:
def _monitor_job_status(self, job_id, headers, timeout):
import time
while True:
- status_response = self._get_request(f'https://api.firecrawl.dev/v0/crawl/status/{job_id}', headers)
+ status_response = self._get_request(f'{self.api_url}/v0/crawl/status/{job_id}', headers)
if status_response.status_code == 200:
status_data = status_response.json()
if status_data['status'] == 'completed':
@@ -148,7 +149,7 @@ class FirecrawlApp:
self._handle_error(status_response, 'check crawl status')
def _handle_error(self, response, action):
- if response.status_code in [402, 409, 500]:
+ if response.status_code in [402, 408, 409, 500]:
error_message = response.json().get('error', 'Unknown error occurred')
raise Exception(f'Failed to {action}. Status code: {response.status_code}. Error: {error_message}')
else:
diff --git a/apps/python-sdk/firecrawl_py.egg-info/PKG-INFO b/apps/python-sdk/firecrawl_py.egg-info/PKG-INFO
index e54fda5..c1ee531 100644
--- a/apps/python-sdk/firecrawl_py.egg-info/PKG-INFO
+++ b/apps/python-sdk/firecrawl_py.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: firecrawl-py
-Version: 0.0.8
+Version: 0.0.9
Summary: Python SDK for Firecrawl API
Home-page: https://github.com/mendableai/firecrawl
Author: Mendable.ai
diff --git a/apps/python-sdk/setup.py b/apps/python-sdk/setup.py
index 78a4d84..7df520e 100644
--- a/apps/python-sdk/setup.py
+++ b/apps/python-sdk/setup.py
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
setup(
name='firecrawl-py',
- version='0.0.8',
+ version='0.0.9',
url='https://github.com/mendableai/firecrawl',
author='Mendable.ai',
author_email='nick@mendable.ai',
diff --git a/apps/test-suite/data/crawl.json b/apps/test-suite/data/crawl.json
new file mode 100644
index 0000000..8bc28a6
--- /dev/null
+++ b/apps/test-suite/data/crawl.json
@@ -0,0 +1,178 @@
+[
+ {
+ "website": "https://www.vellum.ai/llm-leaderboard",
+ "expected_min_num_of_pages": 1,
+ "expected_crawled_pages": ["https://www.vellum.ai/llm-leaderboard"]
+ },
+ {
+ "website": "https://openai.com/news",
+ "expected_min_num_of_pages": 4,
+ "expected_crawled_pages": [
+ "https://openai.com/news/company/",
+ "https://openai.com/news/research/",
+ "https://openai.com/news/safety-and-alignment/",
+ "https://openai.com/news/stories/"
+ ]
+},
+ {
+ "website": "https://www.framer.com/pricing",
+ "expected_min_num_of_pages": 1,
+ "expected_not_crawled_pages": [
+ "https://www.framer.com/features/navigation/",
+ "https://www.framer.com/contact/",
+ "https://www.framer.com/add-ons/",
+ "https://www.framer.com/free-saas-ui-kit/",
+ "https://www.framer.com/help/",
+ "https://www.framer.com/features/effects/",
+ "https://www.framer.com/enterprise/",
+ "https://www.framer.com/templates/"
+ ]
+},
+ {
+ "website": "https://mendable.ai/pricing",
+ "expected_min_num_of_pages": 1,
+ "expected_not_crawled_pages": [
+ "https://mendable.ai/",
+ "https://mendable.ai/blog",
+ "https://mendable.ai/signin",
+ "https://mendable.ai/signup",
+ "https://mendable.ai",
+ "https://mendable.ai/usecases/sales-enablement",
+ "https://mendable.ai/usecases/documentation",
+ "https://mendable.ai/usecases/cs-enablement",
+ "https://mendable.ai/usecases/productcopilot",
+ "https://mendable.ai/security"
+ ],
+ "notes": "This one should not go backwards, but it does!"
+},
+
+ {
+ "website": "https://agentops.ai/blog",
+ "expected_min_num_of_pages": 6,
+ "expected_crawled_pages": [
+ "https://www.agentops.ai/blog/effortless-hr-management-with-saas",
+ "https://www.agentops.ai/blog/streamlining-hr-with-saas",
+ "https://www.agentops.ai/blog/simplify-hr-with-modern-saas-solutions",
+ "https://www.agentops.ai/blog/efficient-hr-operations-with-saas",
+ "https://www.agentops.ai/blog/hr-made-simple-with-saas",
+ "https://agentops.ai/blog"
+ ],
+ "expected_not_crawled_pages": [
+ "https://agentops.ai/about-us",
+ "https://agentops.ai/contact-us"
+ ]
+ },
+ {
+ "website": "https://en.wikipedia.org/wiki/T._N._Seshan",
+ "expected_min_num_of_pages": 1,
+ "expected_not_crawled_pages": [
+ "https://en.wikipedia.org/wiki/Wikipedia:Contents",
+ "https://en.wikipedia.org/wiki/Wikipedia:Contact_us",
+ "https://en.wikipedia.org/wiki/V._S._Ramadevi",
+ "https://en.wikipedia.org/wiki/Wikipedia:About",
+ "https://en.wikipedia.org/wiki/Help:Introduction",
+ "https://en.wikipedia.org/wiki/H._D._Deve_Gowda",
+ "https://en.wikipedia.org/wiki/File:T.N._Seshan_in_1994.jpg"
+ ]
+ },
+
+ {
+ "website": "https://ycombinator.com/companies",
+ "expected_min_num_of_pages": 20,
+ "expected_crawled_pages": [
+ "https://www.ycombinator.com/companies/industry/elearning",
+ "https://www.ycombinator.com/companies/industry/computer-vision",
+ "https://www.ycombinator.com/companies/industry/health-tech",
+ "https://www.ycombinator.com/companies/industry/education",
+ "https://www.ycombinator.com/companies/industry/robotics",
+ "https://www.ycombinator.com/companies/industry/hardware",
+ "https://www.ycombinator.com/companies/industry/saas",
+ "https://www.ycombinator.com/companies/industry/hard-tech",
+ "https://www.ycombinator.com/companies/industry/developer-tools",
+ "https://www.ycombinator.com/companies/industry/entertainment",
+ "https://www.ycombinator.com/companies/industry/finance",
+ "https://www.ycombinator.com/companies/industry/generative-ai",
+ "https://www.ycombinator.com/companies/industry/machine-learning"
+ ]
+ },
+ {
+ "website": "https://firecrawl.dev",
+ "expected_min_num_of_pages": 2,
+ "expected_crawled_pages": [
+ "https://firecrawl.dev/",
+ "https://firecrawl.dev/pricing"
+ ]
+ },
+
+
+ {
+ "website": "https://fly.io/docs/gpus/gpu-quickstart",
+ "expected_min_num_of_pages": 1,
+ "expected_not_crawled_pages": [
+ "https://fly.io/docs/getting-started/",
+ "https://fly.io/docs/hands-on/",
+ "https://fly.io/docs/about/support/",
+ "https://fly.io/docs/blueprints/going-to-production-with-healthcare-apps/",
+ "https://fly.io/docs/machines/flyctl/fly-machine-update/",
+ "https://fly.io/docs/blueprints/review-apps-guide/",
+ "https://fly.io/docs/blueprints/supercronic/"
+ ],
+ "notes": "This one should not go backwards, but it does!"
+ },
+
+ {
+ "website": "https://www.instructables.com/circuits",
+ "expected_min_num_of_pages": 12,
+ "expected_crawled_pages": [
+ "https://www.instructables.com/circuits/",
+ "https://www.instructables.com/circuits/apple/projects/",
+ "https://www.instructables.com/circuits/art/projects/",
+ "https://www.instructables.com/circuits/electronics/projects/",
+ "https://www.instructables.com/circuits/microsoft/projects/",
+ "https://www.instructables.com/circuits/microcontrollers/projects/",
+ "https://www.instructables.com/circuits/community/",
+ "https://www.instructables.com/circuits/leds/projects/",
+ "https://www.instructables.com/circuits/gadgets/projects/",
+ "https://www.instructables.com/circuits/arduino/projects/",
+ "https://www.instructables.com/circuits/lasers/projects/",
+ "https://www.instructables.com/circuits/clocks/projects/"
+ ]
+ },
+ {
+ "website": "https://richmondconfidential.org",
+ "expected_min_num_of_pages": 20,
+ "expected_crawled_pages": [
+ "https://richmondconfidential.org/2009/10/13/salesians-star-guard-has-a-big-impact/",
+ "https://richmondconfidential.org/2009/10/13/on-team-of-beginners-oilers-old-hand-stands-out/",
+ "https://richmondconfidential.org/2009/10/19/point-richmond-clockmaker-turns-clutter-into-crafts/",
+ "https://richmondconfidential.org/2009/10/13/profile-maurice-cathy/",
+ "https://richmondconfidential.org/2009/10/13/soul-food-rescue-mission-rebuilds-diets-and-lives/",
+ "https://richmondconfidential.org/2009/10/21/in-tough-economy-pain-trickles-to-the-bottom/",
+ "https://richmondconfidential.org/2009/10/19/richmond-homicide-map/",
+ "https://richmondconfidential.org/2009/10/13/rough-roads-for-richmonds-cab-drivers/",
+ "https://richmondconfidential.org/2009/10/13/before-napa-there-was-winehaven/",
+ "https://richmondconfidential.org/2009/10/13/family-calls-for-end-to-violence-at-memorial-for-slain-woman-friend/"
+ ]
+ },
+ {
+ "website": "https://www.boardgamegeek.com",
+ "expected_min_num_of_pages": 15,
+ "expected_crawled_pages": [
+ "https://www.boardgamegeek.com/browse/boardgameartist",
+ "https://www.boardgamegeek.com/browse/boardgamehonor",
+ "https://www.boardgamegeek.com/browse/boardgamepublisher",
+ "https://www.boardgamegeek.com/browse/boardgamepodcast",
+ "https://www.boardgamegeek.com/wiki/page/Index",
+ "https://www.boardgamegeek.com/browse/boardgamecategory",
+ "https://www.boardgamegeek.com/boardgame/random",
+ "https://www.boardgamegeek.com/browse/boardgamemechanic",
+ "https://www.boardgamegeek.com/forums",
+ "https://www.boardgamegeek.com/gonecardboard",
+ "https://www.boardgamegeek.com/browse/boardgameaccessory",
+ "https://www.boardgamegeek.com/browse/boardgamedesigner",
+ "https://www.boardgamegeek.com/",
+ "https://www.boardgamegeek.com/previews",
+ "https://www.boardgamegeek.com/browse/boardgame"
+ ]
+ }
+]
diff --git a/apps/test-suite/data/websites.json b/apps/test-suite/data/scrape.json
similarity index 100%
rename from apps/test-suite/data/websites.json
rename to apps/test-suite/data/scrape.json
diff --git a/apps/test-suite/package.json b/apps/test-suite/package.json
index 74ab7a6..33aa2cd 100644
--- a/apps/test-suite/package.json
+++ b/apps/test-suite/package.json
@@ -3,7 +3,9 @@
"version": "1.0.0",
"description": "",
"scripts": {
- "test": "npx jest --detectOpenHandles --forceExit --openHandlesTimeout=120000 --watchAll=false"
+ "test": "npx jest --detectOpenHandles --forceExit --openHandlesTimeout=120000 --watchAll=false",
+ "test:scrape": "npx jest --detectOpenHandles --forceExit --openHandlesTimeout=120000 --watchAll=false --testPathPattern=tests/scrape.test.ts",
+ "test:crawl": "npx jest --detectOpenHandles --forceExit --openHandlesTimeout=120000 --watchAll=false --testPathPattern=tests/crawl.test.ts"
},
"author": "",
"license": "ISC",
diff --git a/apps/test-suite/tests/crawl.test.ts b/apps/test-suite/tests/crawl.test.ts
new file mode 100644
index 0000000..577725a
--- /dev/null
+++ b/apps/test-suite/tests/crawl.test.ts
@@ -0,0 +1,150 @@
+import request from "supertest";
+import dotenv from "dotenv";
+import { WebsiteScrapeError } from "../utils/types";
+import { logErrors } from "../utils/log";
+
+import websitesData from "../data/crawl.json";
+import "dotenv/config";
+
+import fs from 'fs';
+dotenv.config();
+
+interface WebsiteData {
+ website: string;
+ expected_min_num_of_pages: number;
+ expected_crawled_pages: string[];
+}
+
+const TEST_URL = "http://127.0.0.1:3002";
+
+describe("Crawling Checkup (E2E)", () => {
+ beforeAll(() => {
+ if (!process.env.TEST_API_KEY) {
+ throw new Error("TEST_API_KEY is not set");
+ }
+ });
+
+ describe("Crawling website tests with a dataset", () => {
+ it("Should crawl the website and verify the response", async () => {
+ let passedTests = 0;
+ const startTime = new Date().getTime();
+ const date = new Date();
+ const logsDir = `logs/${date.getMonth() + 1}-${date.getDate()}-${date.getFullYear()}`;
+
+ let errorLogFileName = `${logsDir}/run.log_${new Date().toTimeString().split(' ')[0]}`;
+ const errorLog: WebsiteScrapeError[] = [];
+
+ for (const websiteData of websitesData) {
+ try {
+ const crawlResponse = await request(TEST_URL || "")
+ .post("/v0/crawl")
+ .set("Content-Type", "application/json")
+ .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
+ .send({ url: websiteData.website, pageOptions: { onlyMainContent: true }, crawlerOptions: { limit: 100, returnOnlyUrls: true }});
+
+ const jobId = crawlResponse.body.jobId;
+ let completedResponse: any;
+ let isFinished = false;
+
+ while (!isFinished) {
+ completedResponse = await request(TEST_URL)
+ .get(`/v0/crawl/status/${jobId}`)
+ .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
+
+ isFinished = completedResponse.body.status === "completed";
+
+ if (!isFinished) {
+ await new Promise(resolve => setTimeout(resolve, 1000)); // Wait for 1 second before checking again
+ }
+ }
+
+ if(!completedResponse) {
+ // fail the test
+ console.log('No response');
+ continue;
+ // continue;
+ }
+
+ if (!completedResponse.body || completedResponse.body.status !== "completed") {
+ errorLog.push({
+ website: websiteData.website,
+ prompt: 'CRAWL',
+ expected_output: 'SUCCESS',
+ actual_output: 'FAILURE',
+ error: `Crawl job did not complete successfully.`
+ });
+ continue;
+ }
+
+ // check how many webpages were crawled successfully
+ // compares with expected_num_of_pages
+ if (completedResponse.body.data.length < websiteData.expected_min_num_of_pages) {
+ errorLog.push({
+ website: websiteData.website,
+ prompt: 'CRAWL',
+ expected_output: `SUCCESS: ${websiteData.expected_min_num_of_pages}`,
+ actual_output: `FAILURE: ${completedResponse.body.data.length}`,
+ error: `Expected at least ${websiteData.expected_min_num_of_pages} webpages, but got ${completedResponse.body.data.length}`
+ });
+ console.log('Error: ', errorLog);
+ continue;
+ }
+
+ // checks if crawled pages contain expected_crawled_pages
+ if (websiteData.expected_crawled_pages && websiteData.expected_crawled_pages.length > 0 && websiteData.expected_crawled_pages.some(page => !completedResponse.body.data?.some((d: { url: string }) => d.url === page))) {
+ errorLog.push({
+ website: websiteData.website,
+ prompt: 'CRAWL',
+ expected_output: `SUCCESS: ${websiteData.expected_crawled_pages}`,
+ actual_output: `FAILURE: ${completedResponse.body.data}`,
+ error: `Expected crawled pages to contain ${websiteData.expected_crawled_pages}, but got ${completedResponse.body.data}`
+ });
+ console.log('Error: ', errorLog);
+ continue;
+ }
+
+ // checks if crawled pages not contain expected_not_crawled_pages
+ if (websiteData.expected_not_crawled_pages && websiteData.expected_not_crawled_pages.length > 0 && completedResponse.body.data && websiteData.expected_not_crawled_pages.filter(page => completedResponse.body.data.some((d: { url: string }) => d.url === page)).length > 0) {
+ errorLog.push({
+ website: websiteData.website,
+ prompt: 'CRAWL',
+ expected_output: `SUCCESS: ${websiteData.expected_not_crawled_pages}`,
+ actual_output: `FAILURE: ${completedResponse.body.data}`,
+ error: `Expected crawled pages to not contain ${websiteData.expected_not_crawled_pages}, but got ${completedResponse.body.data}`
+ });
+ console.log('Error: ', errorLog);
+ continue;
+ }
+
+ passedTests++;
+ } catch (error) {
+ console.error(`Error processing ${websiteData.website}: ${error}`);
+ errorLog.push({
+ website: websiteData.website,
+ prompt: 'CRAWL',
+ expected_output: 'SUCCESS',
+ actual_output: 'FAILURE',
+ error: `Error processing ${websiteData.website}: ${error}`
+ });
+ continue;
+ }
+ }
+
+ const score = (passedTests / websitesData.length) * 100;
+ const endTime = new Date().getTime();
+ const timeTaken = (endTime - startTime) / 1000;
+ console.log(`Score: ${score}%`);
+
+ await logErrors(errorLog, timeTaken, 0, score, websitesData.length);
+
+ if (process.env.ENV === "local" && errorLog.length > 0) {
+ if (!fs.existsSync(logsDir)){
+ fs.mkdirSync(logsDir, { recursive: true });
+ }
+ fs.writeFileSync(errorLogFileName, JSON.stringify(errorLog, null, 2));
+ }
+
+ expect(score).toBeGreaterThanOrEqual(90);
+ }, 350000); // 150 seconds timeout
+ });
+});
diff --git a/apps/test-suite/index.test.ts b/apps/test-suite/tests/scrape.test.ts
similarity index 93%
rename from apps/test-suite/index.test.ts
rename to apps/test-suite/tests/scrape.test.ts
index 8d6c31f..ec7b720 100644
--- a/apps/test-suite/index.test.ts
+++ b/apps/test-suite/tests/scrape.test.ts
@@ -1,16 +1,14 @@
import request from "supertest";
import dotenv from "dotenv";
-import Anthropic from "@anthropic-ai/sdk";
-import { numTokensFromString } from "./utils/tokens";
+import { numTokensFromString } from "../utils/tokens";
import OpenAI from "openai";
-import { WebsiteScrapeError } from "./utils/types";
-import { logErrors } from "./utils/log";
+import { WebsiteScrapeError } from "../utils/types";
+import { logErrors } from "../utils/log";
-const websitesData = require("./data/websites.json");
+import websitesData from "../data/scrape.json";
import "dotenv/config";
-const fs = require('fs');
-
+import fs from 'fs';
dotenv.config();
interface WebsiteData {
@@ -21,8 +19,7 @@ interface WebsiteData {
const TEST_URL = "http://127.0.0.1:3002";
-
-describe("Scraping/Crawling Checkup (E2E)", () => {
+describe("Scraping Checkup (E2E)", () => {
beforeAll(() => {
if (!process.env.TEST_API_KEY) {
throw new Error("TEST_API_KEY is not set");
@@ -72,10 +69,6 @@ describe("Scraping/Crawling Checkup (E2E)", () => {
return null;
}
- const anthropic = new Anthropic({
- apiKey: process.env.ANTHROPIC_API_KEY,
- });
-
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
});
@@ -183,7 +176,7 @@ describe("Scraping/Crawling Checkup (E2E)", () => {
}
- expect(score).toBeGreaterThanOrEqual(75);
+ expect(score).toBeGreaterThanOrEqual(70);
}, 350000); // 150 seconds timeout
});
});
diff --git a/apps/test-suite/tsconfig.json b/apps/test-suite/tsconfig.json
index e075f97..afa29e7 100644
--- a/apps/test-suite/tsconfig.json
+++ b/apps/test-suite/tsconfig.json
@@ -39,7 +39,7 @@
// "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */
// "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */
// "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */
- // "resolveJsonModule": true, /* Enable importing .json files. */
+ "resolveJsonModule": true, /* Enable importing .json files. */
// "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */
// "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */
diff --git a/docker-compose.yaml b/docker-compose.yaml
new file mode 100644
index 0000000..049672d
--- /dev/null
+++ b/docker-compose.yaml
@@ -0,0 +1,78 @@
+name: firecrawl
+version: '3.9'
+services:
+ playwright-service:
+ build: apps/playwright-service
+ environment:
+ - PORT=3000
+ networks:
+ - backend
+
+ api:
+ build: apps/api
+ environment:
+ - REDIS_URL=${REDIS_URL:-redis://redis:6379}
+ - PLAYWRIGHT_MICROSERVICE_URL=${PLAYWRIGHT_MICROSERVICE_URL:-http://playwright-service:3000}
+ - USE_DB_AUTHENTICATION=${USE_DB_AUTHENTICATION}
+ - PORT=${PORT:-3002}
+ - NUM_WORKERS_PER_QUEUE=${NUM_WORKERS_PER_QUEUE}
+ - OPENAI_API_KEY=${OPENAI_API_KEY}
+ - SLACK_WEBHOOK_URL=${SLACK_WEBHOOK_URL}
+ - SERPER_API_KEY=${SERPER_API_KEY}
+ - LLAMAPARSE_API_KEY=${LLAMAPARSE_API_KEY}
+ - LOGTAIL_KEY=${LOGTAIL_KEY}
+ - BULL_AUTH_KEY=${BULL_AUTH_KEY}
+ - TEST_API_KEY=${TEST_API_KEY}
+ - POSTHOG_API_KEY=${POSTHOG_API_KEY}
+ - POSTHOG_HOST=${POSTHOG_HOST}
+ - SUPABASE_ANON_TOKEN=${SUPABASE_ANON_TOKEN}
+ - SUPABASE_URL=${SUPABASE_URL}
+ - SUPABASE_SERVICE_TOKEN=${SUPABASE_SERVICE_TOKEN}
+ - SCRAPING_BEE_API_KEY=${SCRAPING_BEE_API_KEY}
+ - HOST=${HOST:-0.0.0.0}
+ depends_on:
+ - redis
+ - playwright-service
+ ports:
+ - "3002:3002"
+ command: [ "pnpm", "run", "start:production" ]
+ networks:
+ - backend
+
+ worker:
+ build: apps/api
+ environment:
+ - REDIS_URL=${REDIS_URL:-redis://redis:6379}
+ - PLAYWRIGHT_MICROSERVICE_URL=${PLAYWRIGHT_MICROSERVICE_URL:-http://playwright-service:3000}
+ - USE_DB_AUTHENTICATION=${USE_DB_AUTHENTICATION}
+ - PORT=${PORT:-3002}
+ - NUM_WORKERS_PER_QUEUE=${NUM_WORKERS_PER_QUEUE}
+ - OPENAI_API_KEY=${OPENAI_API_KEY}
+ - SLACK_WEBHOOK_URL=${SLACK_WEBHOOK_URL}
+ - SERPER_API_KEY=${SERPER_API_KEY}
+ - LLAMAPARSE_API_KEY=${LLAMAPARSE_API_KEY}
+ - LOGTAIL_KEY=${LOGTAIL_KEY}
+ - BULL_AUTH_KEY=${BULL_AUTH_KEY}
+ - TEST_API_KEY=${TEST_API_KEY}
+ - POSTHOG_API_KEY=${POSTHOG_API_KEY}
+ - POSTHOG_HOST=${POSTHOG_HOST}
+ - SUPABASE_ANON_TOKEN=${SUPABASE_ANON_TOKEN}
+ - SUPABASE_URL=${SUPABASE_URL}
+ - SUPABASE_SERVICE_TOKEN=${SUPABASE_SERVICE_TOKEN}
+ - SCRAPING_BEE_API_KEY=${SCRAPING_BEE_API_KEY}
+ - HOST=${HOST:-0.0.0.0}
+ depends_on:
+ - redis
+ - playwright-service
+ - api
+ networks:
+ - backend
+ redis:
+ image: redis:alpine
+ networks:
+ - backend
+ command: redis-server --bind 0.0.0.0
+
+networks:
+ backend:
+ driver: bridge