0

Merge branch 'main' into feat/removeTags-regex

This commit is contained in:
Rafael Miller 2024-06-18 14:39:59 -03:00 committed by GitHub
commit f5a9acc4c6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
25 changed files with 2050 additions and 943 deletions

View File

@ -27,6 +27,7 @@ env:
TEST_API_KEY: ${{ secrets.TEST_API_KEY }} TEST_API_KEY: ${{ secrets.TEST_API_KEY }}
HYPERDX_API_KEY: ${{ secrets.HYPERDX_API_KEY }} HYPERDX_API_KEY: ${{ secrets.HYPERDX_API_KEY }}
HDX_NODE_BETA_MODE: 1 HDX_NODE_BETA_MODE: 1
FIRE_ENGINE_BETA_URL: ${{ secrets.FIRE_ENGINE_BETA_URL }}
jobs: jobs:

View File

@ -1,7 +1,7 @@
name: Fly Deploy Direct name: Fly Deploy Direct
on: on:
schedule: schedule:
- cron: '0 * * * *' - cron: '0 */2 * * *'
env: env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}

View File

@ -93,7 +93,7 @@ jobs:
working-directory: ./apps/test-suite working-directory: ./apps/test-suite
- name: Run E2E tests - name: Run E2E tests
run: | run: |
npm run test npm run test:suite
working-directory: ./apps/test-suite working-directory: ./apps/test-suite
python-sdk-tests: python-sdk-tests:

View File

@ -12,7 +12,7 @@
"build": "tsc", "build": "tsc",
"test": "npx jest --detectOpenHandles --forceExit --openHandlesTimeout=120000 --watchAll=false --testPathIgnorePatterns='src/__tests__/e2e_noAuth/*'", "test": "npx jest --detectOpenHandles --forceExit --openHandlesTimeout=120000 --watchAll=false --testPathIgnorePatterns='src/__tests__/e2e_noAuth/*'",
"test:local-no-auth": "npx jest --detectOpenHandles --forceExit --openHandlesTimeout=120000 --watchAll=false --testPathIgnorePatterns='src/__tests__/e2e_withAuth/*'", "test:local-no-auth": "npx jest --detectOpenHandles --forceExit --openHandlesTimeout=120000 --watchAll=false --testPathIgnorePatterns='src/__tests__/e2e_withAuth/*'",
"test:prod": "npx jest --detectOpenHandles --forceExit --openHandlesTimeout=120000 --watchAll=false --testPathIgnorePatterns='src/__tests__/e2e_noAuth/*'", "test:prod": "npx jest --detectOpenHandles --forceExit --openHandlesTimeout=120000 --watchAll=false --testPathIgnorePatterns='(src/__tests__/e2e_noAuth|src/__tests__/e2e_full_withAuth)'",
"workers": "nodemon --exec ts-node src/services/queue-worker.ts", "workers": "nodemon --exec ts-node src/services/queue-worker.ts",
"worker:production": "node dist/src/services/queue-worker.js", "worker:production": "node dist/src/services/queue-worker.js",
"mongo-docker": "docker run -d -p 2717:27017 -v ./mongo-data:/data/db --name mongodb mongo:latest", "mongo-docker": "docker run -d -p 2717:27017 -v ./mongo-data:/data/db --name mongodb mongo:latest",

File diff suppressed because it is too large Load Diff

View File

@ -1,10 +1,8 @@
import request from "supertest"; import request from "supertest";
import dotenv from "dotenv"; import dotenv from "dotenv";
import { v4 as uuidv4 } from "uuid"; import { FirecrawlCrawlResponse, FirecrawlCrawlStatusResponse, FirecrawlScrapeResponse } from "../../types";
dotenv.config(); dotenv.config();
// const TEST_URL = 'http://localhost:3002'
const TEST_URL = "http://127.0.0.1:3002"; const TEST_URL = "http://127.0.0.1:3002";
describe("E2E Tests for API Routes", () => { describe("E2E Tests for API Routes", () => {
@ -15,31 +13,23 @@ describe("E2E Tests for API Routes", () => {
afterAll(() => { afterAll(() => {
delete process.env.USE_DB_AUTHENTICATION; delete process.env.USE_DB_AUTHENTICATION;
}); });
describe("GET /", () => {
it.concurrent("should return Hello, world! message", async () => {
const response = await request(TEST_URL).get("/");
describe("GET /is-production", () => {
it.concurrent("should return the production status", async () => {
const response = await request(TEST_URL).get("/is-production");
expect(response.statusCode).toBe(200); expect(response.statusCode).toBe(200);
expect(response.text).toContain("SCRAPERS-JS: Hello, world! Fly.io"); expect(response.body).toHaveProperty("isProduction");
});
});
describe("GET /test", () => {
it.concurrent("should return Hello, world! message", async () => {
const response = await request(TEST_URL).get("/test");
expect(response.statusCode).toBe(200);
expect(response.text).toContain("Hello, world!");
}); });
}); });
describe("POST /v0/scrape", () => { describe("POST /v0/scrape", () => {
it.concurrent("should require authorization", async () => { it.concurrent("should require authorization", async () => {
const response = await request(TEST_URL).post("/v0/scrape"); const response: FirecrawlScrapeResponse = await request(TEST_URL).post("/v0/scrape");
expect(response.statusCode).toBe(401); expect(response.statusCode).toBe(401);
}); });
it.concurrent("should return an error response with an invalid API key", async () => { it.concurrent("should return an error response with an invalid API key", async () => {
const response = await request(TEST_URL) const response: FirecrawlScrapeResponse = await request(TEST_URL)
.post("/v0/scrape") .post("/v0/scrape")
.set("Authorization", `Bearer invalid-api-key`) .set("Authorization", `Bearer invalid-api-key`)
.set("Content-Type", "application/json") .set("Content-Type", "application/json")
@ -47,48 +37,26 @@ describe("E2E Tests for API Routes", () => {
expect(response.statusCode).toBe(401); expect(response.statusCode).toBe(401);
}); });
it.concurrent("should return an error for a blocklisted URL", async () => { it.concurrent("should return a successful response with a valid API key", async () => {
const blocklistedUrl = "https://facebook.com/fake-test"; const response: FirecrawlScrapeResponse = await request(TEST_URL)
const response = await request(TEST_URL)
.post("/v0/scrape") .post("/v0/scrape")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json") .set("Content-Type", "application/json")
.send({ url: blocklistedUrl }); .send({ url: "https://roastmywebsite.ai" });
expect(response.statusCode).toBe(403); expect(response.statusCode).toBe(200);
expect(response.body.error).toContain( expect(response.body).toHaveProperty("data");
"Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it." expect(response.body.data).toHaveProperty("content");
); expect(response.body.data).toHaveProperty("markdown");
}); expect(response.body.data).toHaveProperty("metadata");
expect(response.body.data).not.toHaveProperty("html");
expect(response.body.data.content).toContain("_Roast_");
expect(response.body.data.metadata.pageStatusCode).toBe(200);
expect(response.body.data.metadata.pageError).toBeUndefined();
}, 30000); // 30 seconds timeout
// tested on rate limit test
// it.concurrent("should return a successful response with a valid preview token", async () => {
// const response = await request(TEST_URL)
// .post("/v0/scrape")
// .set("Authorization", `Bearer this_is_just_a_preview_token`)
// .set("Content-Type", "application/json")
// .send({ url: "https://roastmywebsite.ai" });
// expect(response.statusCode).toBe(200);
// }, 30000); // 30 seconds timeout
// it.concurrent("should return a successful response with a valid API key", async () => {
// const response = await request(TEST_URL)
// .post("/v0/scrape")
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
// .set("Content-Type", "application/json")
// .send({ url: "https://roastmywebsite.ai" });
// expect(response.statusCode).toBe(200);
// expect(response.body).toHaveProperty("data");
// expect(response.body.data).toHaveProperty("content");
// expect(response.body.data).toHaveProperty("markdown");
// expect(response.body.data).toHaveProperty("metadata");
// expect(response.body.data).not.toHaveProperty("html");
// expect(response.body.data.content).toContain("_Roast_");
// expect(response.body.data.metadata.pageStatusCode).toBe(200);
// expect(response.body.data.metadata.pageError).toBeUndefined();
// }, 30000); // 30 seconds timeout
it.concurrent("should return a successful response with a valid API key and includeHtml set to true", async () => { it.concurrent("should return a successful response with a valid API key and includeHtml set to true", async () => {
const response = await request(TEST_URL) const response: FirecrawlScrapeResponse = await request(TEST_URL)
.post("/v0/scrape") .post("/v0/scrape")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json") .set("Content-Type", "application/json")
@ -110,7 +78,7 @@ describe("E2E Tests for API Routes", () => {
}, 30000); // 30 seconds timeout }, 30000); // 30 seconds timeout
it.concurrent('should return a successful response for a valid scrape with PDF file', async () => { it.concurrent('should return a successful response for a valid scrape with PDF file', async () => {
const response = await request(TEST_URL) const response: FirecrawlScrapeResponse = await request(TEST_URL)
.post('/v0/scrape') .post('/v0/scrape')
.set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`)
.set('Content-Type', 'application/json') .set('Content-Type', 'application/json')
@ -127,7 +95,7 @@ describe("E2E Tests for API Routes", () => {
}, 60000); // 60 seconds }, 60000); // 60 seconds
it.concurrent('should return a successful response for a valid scrape with PDF file without explicit .pdf extension', async () => { it.concurrent('should return a successful response for a valid scrape with PDF file without explicit .pdf extension', async () => {
const response = await request(TEST_URL) const response: FirecrawlScrapeResponse = await request(TEST_URL)
.post('/v0/scrape') .post('/v0/scrape')
.set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`)
.set('Content-Type', 'application/json') .set('Content-Type', 'application/json')
@ -143,23 +111,8 @@ describe("E2E Tests for API Routes", () => {
expect(response.body.data.metadata.pageError).toBeUndefined(); expect(response.body.data.metadata.pageError).toBeUndefined();
}, 60000); // 60 seconds }, 60000); // 60 seconds
it.concurrent('should return a successful response for a valid scrape with PDF file and parsePDF set to false', async () => {
const response = await request(TEST_URL)
.post('/v0/scrape')
.set('Authorization', `Bearer ${process.env.TEST_API_KEY}`)
.set('Content-Type', 'application/json')
.send({ url: 'https://arxiv.org/pdf/astro-ph/9301001.pdf', pageOptions: { parsePDF: false } });
await new Promise((r) => setTimeout(r, 6000));
expect(response.statusCode).toBe(200);
expect(response.body).toHaveProperty('data');
expect(response.body.data).toHaveProperty('content');
expect(response.body.data).toHaveProperty('metadata');
expect(response.body.data.content).toContain('/Title(arXiv:astro-ph/9301001v1 7 Jan 1993)>>endobj');
}, 60000); // 60 seconds
it.concurrent("should return a successful response with a valid API key with removeTags option", async () => { it.concurrent("should return a successful response with a valid API key with removeTags option", async () => {
const responseWithoutRemoveTags = await request(TEST_URL) const responseWithoutRemoveTags: FirecrawlScrapeResponse = await request(TEST_URL)
.post("/v0/scrape") .post("/v0/scrape")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json") .set("Content-Type", "application/json")
@ -175,7 +128,7 @@ describe("E2E Tests for API Routes", () => {
expect(responseWithoutRemoveTags.body.data.content).toContain("[Sandbox]("); // .nav expect(responseWithoutRemoveTags.body.data.content).toContain("[Sandbox]("); // .nav
expect(responseWithoutRemoveTags.body.data.content).toContain("web scraping"); // strong expect(responseWithoutRemoveTags.body.data.content).toContain("web scraping"); // strong
const response = await request(TEST_URL) const response: FirecrawlScrapeResponse = await request(TEST_URL)
.post("/v0/scrape") .post("/v0/scrape")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json") .set("Content-Type", "application/json")
@ -192,29 +145,8 @@ describe("E2E Tests for API Routes", () => {
expect(response.body.data.content).not.toContain("web scraping"); // strong expect(response.body.data.content).not.toContain("web scraping"); // strong
}, 30000); // 30 seconds timeout }, 30000); // 30 seconds timeout
// TODO: add this test back once we nail the waitFor option to be more deterministic
// it.concurrent("should return a successful response with a valid API key and waitFor option", async () => {
// const startTime = Date.now();
// const response = await request(TEST_URL)
// .post("/v0/scrape")
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
// .set("Content-Type", "application/json")
// .send({ url: "https://firecrawl.dev", pageOptions: { waitFor: 7000 } });
// const endTime = Date.now();
// const duration = endTime - startTime;
// expect(response.statusCode).toBe(200);
// expect(response.body).toHaveProperty("data");
// expect(response.body.data).toHaveProperty("content");
// expect(response.body.data).toHaveProperty("markdown");
// expect(response.body.data).toHaveProperty("metadata");
// expect(response.body.data).not.toHaveProperty("html");
// expect(response.body.data.content).toContain("🔥 Firecrawl");
// expect(duration).toBeGreaterThanOrEqual(7000);
// }, 12000); // 12 seconds timeout
it.concurrent('should return a successful response for a scrape with 400 page', async () => { it.concurrent('should return a successful response for a scrape with 400 page', async () => {
const response = await request(TEST_URL) const response: FirecrawlScrapeResponse = await request(TEST_URL)
.post('/v0/scrape') .post('/v0/scrape')
.set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`)
.set('Content-Type', 'application/json') .set('Content-Type', 'application/json')
@ -230,7 +162,7 @@ describe("E2E Tests for API Routes", () => {
}, 60000); // 60 seconds }, 60000); // 60 seconds
it.concurrent('should return a successful response for a scrape with 401 page', async () => { it.concurrent('should return a successful response for a scrape with 401 page', async () => {
const response = await request(TEST_URL) const response: FirecrawlScrapeResponse = await request(TEST_URL)
.post('/v0/scrape') .post('/v0/scrape')
.set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`)
.set('Content-Type', 'application/json') .set('Content-Type', 'application/json')
@ -246,7 +178,7 @@ describe("E2E Tests for API Routes", () => {
}, 60000); // 60 seconds }, 60000); // 60 seconds
it.concurrent("should return a successful response for a scrape with 403 page", async () => { it.concurrent("should return a successful response for a scrape with 403 page", async () => {
const response = await request(TEST_URL) const response: FirecrawlScrapeResponse = await request(TEST_URL)
.post('/v0/scrape') .post('/v0/scrape')
.set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`)
.set('Content-Type', 'application/json') .set('Content-Type', 'application/json')
@ -262,7 +194,7 @@ describe("E2E Tests for API Routes", () => {
}, 60000); // 60 seconds }, 60000); // 60 seconds
it.concurrent('should return a successful response for a scrape with 404 page', async () => { it.concurrent('should return a successful response for a scrape with 404 page', async () => {
const response = await request(TEST_URL) const response: FirecrawlScrapeResponse = await request(TEST_URL)
.post('/v0/scrape') .post('/v0/scrape')
.set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`)
.set('Content-Type', 'application/json') .set('Content-Type', 'application/json')
@ -294,7 +226,7 @@ describe("E2E Tests for API Routes", () => {
}, 60000); // 60 seconds }, 60000); // 60 seconds
it.concurrent('should return a successful response for a scrape with 500 page', async () => { it.concurrent('should return a successful response for a scrape with 500 page', async () => {
const response = await request(TEST_URL) const response: FirecrawlScrapeResponse = await request(TEST_URL)
.post('/v0/scrape') .post('/v0/scrape')
.set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`)
.set('Content-Type', 'application/json') .set('Content-Type', 'application/json')
@ -312,12 +244,12 @@ describe("E2E Tests for API Routes", () => {
describe("POST /v0/crawl", () => { describe("POST /v0/crawl", () => {
it.concurrent("should require authorization", async () => { it.concurrent("should require authorization", async () => {
const response = await request(TEST_URL).post("/v0/crawl"); const response: FirecrawlCrawlResponse = await request(TEST_URL).post("/v0/crawl");
expect(response.statusCode).toBe(401); expect(response.statusCode).toBe(401);
}); });
it.concurrent("should return an error response with an invalid API key", async () => { it.concurrent("should return an error response with an invalid API key", async () => {
const response = await request(TEST_URL) const response: FirecrawlCrawlResponse = await request(TEST_URL)
.post("/v0/crawl") .post("/v0/crawl")
.set("Authorization", `Bearer invalid-api-key`) .set("Authorization", `Bearer invalid-api-key`)
.set("Content-Type", "application/json") .set("Content-Type", "application/json")
@ -325,21 +257,8 @@ describe("E2E Tests for API Routes", () => {
expect(response.statusCode).toBe(401); expect(response.statusCode).toBe(401);
}); });
it.concurrent("should return an error for a blocklisted URL", async () => {
const blocklistedUrl = "https://twitter.com/fake-test";
const response = await request(TEST_URL)
.post("/v0/crawl")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.send({ url: blocklistedUrl });
expect(response.statusCode).toBe(403);
expect(response.body.error).toContain(
"Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it."
);
});
it.concurrent("should return a successful response with a valid API key for crawl", async () => { it.concurrent("should return a successful response with a valid API key for crawl", async () => {
const response = await request(TEST_URL) const response: FirecrawlCrawlResponse = await request(TEST_URL)
.post("/v0/crawl") .post("/v0/crawl")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json") .set("Content-Type", "application/json")
@ -350,33 +269,9 @@ describe("E2E Tests for API Routes", () => {
/^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$/ /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$/
); );
}); });
it.concurrent('should prevent duplicate requests using the same idempotency key', async () => {
const uniqueIdempotencyKey = uuidv4();
// First request with the idempotency key
const firstResponse = await request(TEST_URL)
.post('/v0/crawl')
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.set("x-idempotency-key", uniqueIdempotencyKey)
.send({ url: 'https://mendable.ai' });
expect(firstResponse.statusCode).toBe(200);
// Second request with the same idempotency key
const secondResponse = await request(TEST_URL)
.post('/v0/crawl')
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.set("x-idempotency-key", uniqueIdempotencyKey)
.send({ url: 'https://mendable.ai' });
expect(secondResponse.statusCode).toBe(409);
expect(secondResponse.body.error).toBe('Idempotency key already used');
});
it.concurrent("should return a successful response with a valid API key and valid includes option", async () => { it.concurrent("should return a successful response with a valid API key and valid includes option", async () => {
const crawlResponse = await request(TEST_URL) const crawlResponse: FirecrawlCrawlResponse = await request(TEST_URL)
.post("/v0/crawl") .post("/v0/crawl")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json") .set("Content-Type", "application/json")
@ -388,7 +283,7 @@ describe("E2E Tests for API Routes", () => {
}, },
}); });
let response; let response: FirecrawlCrawlStatusResponse;
let isFinished = false; let isFinished = false;
while (!isFinished) { while (!isFinished) {
@ -428,7 +323,7 @@ describe("E2E Tests for API Routes", () => {
}, 60000); // 60 seconds }, 60000); // 60 seconds
it.concurrent("should return a successful response with a valid API key and valid excludes option", async () => { it.concurrent("should return a successful response with a valid API key and valid excludes option", async () => {
const crawlResponse = await request(TEST_URL) const crawlResponse: FirecrawlCrawlResponse = await request(TEST_URL)
.post("/v0/crawl") .post("/v0/crawl")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json") .set("Content-Type", "application/json")
@ -441,7 +336,7 @@ describe("E2E Tests for API Routes", () => {
}); });
let isFinished = false; let isFinished = false;
let response; let response: FirecrawlCrawlStatusResponse;
while (!isFinished) { while (!isFinished) {
response = await request(TEST_URL) response = await request(TEST_URL)
@ -457,7 +352,7 @@ describe("E2E Tests for API Routes", () => {
} }
} }
const completedResponse = response; const completedResponse: FirecrawlCrawlStatusResponse = response;
const urls = completedResponse.body.data.map( const urls = completedResponse.body.data.map(
(item: any) => item.metadata?.sourceURL (item: any) => item.metadata?.sourceURL
@ -468,50 +363,8 @@ describe("E2E Tests for API Routes", () => {
}); });
}, 90000); // 90 seconds }, 90000); // 90 seconds
it.concurrent("should return a successful response with a valid API key and limit to 3", async () => {
const crawlResponse = await request(TEST_URL)
.post("/v0/crawl")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.send({
url: "https://mendable.ai",
crawlerOptions: { limit: 3 },
});
let isFinished = false;
let response;
while (!isFinished) {
response = await request(TEST_URL)
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
expect(response.statusCode).toBe(200);
expect(response.body).toHaveProperty("status");
isFinished = response.body.status === "completed";
if (!isFinished) {
await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again
}
}
const completedResponse = response;
expect(completedResponse.statusCode).toBe(200);
expect(completedResponse.body).toHaveProperty("status");
expect(completedResponse.body.status).toBe("completed");
expect(completedResponse.body).toHaveProperty("data");
expect(completedResponse.body.data.length).toBe(3);
expect(completedResponse.body.data[0]).toHaveProperty("content");
expect(completedResponse.body.data[0]).toHaveProperty("markdown");
expect(completedResponse.body.data[0]).toHaveProperty("metadata");
expect(completedResponse.body.data[0].content).toContain("Mendable");
expect(completedResponse.body.data[0].metadata.pageStatusCode).toBe(200);
expect(completedResponse.body.data[0].metadata.pageError).toBeUndefined();
}, 60000); // 60 seconds
it.concurrent("should return a successful response with max depth option for a valid crawl job", async () => { it.concurrent("should return a successful response with max depth option for a valid crawl job", async () => {
const crawlResponse = await request(TEST_URL) const crawlResponse: FirecrawlCrawlResponse = await request(TEST_URL)
.post("/v0/crawl") .post("/v0/crawl")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json") .set("Content-Type", "application/json")
@ -521,7 +374,7 @@ describe("E2E Tests for API Routes", () => {
}); });
expect(crawlResponse.statusCode).toBe(200); expect(crawlResponse.statusCode).toBe(200);
const response = await request(TEST_URL) const response: FirecrawlCrawlStatusResponse = await request(TEST_URL)
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`) .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
expect(response.statusCode).toBe(200); expect(response.statusCode).toBe(200);
@ -539,7 +392,7 @@ describe("E2E Tests for API Routes", () => {
await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again
} }
} }
const completedResponse = await request(TEST_URL) const completedResponse: FirecrawlCrawlStatusResponse = await request(TEST_URL)
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`) .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
@ -564,233 +417,16 @@ describe("E2E Tests for API Routes", () => {
expect(depth).toBeLessThanOrEqual(2); expect(depth).toBeLessThanOrEqual(2);
}); });
}, 180000); }, 180000);
it.concurrent("should return a successful response with relative max depth option for a valid crawl job", async () => {
const crawlResponse = await request(TEST_URL)
.post("/v0/crawl")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.send({
url: "https://www.scrapethissite.com/pages/",
crawlerOptions: { maxDepth: 1 },
});
expect(crawlResponse.statusCode).toBe(200);
const response = await request(TEST_URL)
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
expect(response.statusCode).toBe(200);
expect(response.body).toHaveProperty("status");
expect(["active", "waiting"]).toContain(response.body.status);
// wait for 60 seconds
let isCompleted = false;
while (!isCompleted) {
const statusCheckResponse = await request(TEST_URL)
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
expect(statusCheckResponse.statusCode).toBe(200);
isCompleted = statusCheckResponse.body.status === "completed";
if (!isCompleted) {
await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again
}
}
const completedResponse = await request(TEST_URL)
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
expect(completedResponse.statusCode).toBe(200);
expect(completedResponse.body).toHaveProperty("status");
expect(completedResponse.body.status).toBe("completed");
expect(completedResponse.body).toHaveProperty("data");
expect(completedResponse.body.data[0]).toHaveProperty("content");
expect(completedResponse.body.data[0]).toHaveProperty("markdown");
expect(completedResponse.body.data[0]).toHaveProperty("metadata");
const urls = completedResponse.body.data.map(
(item: any) => item.metadata?.sourceURL
);
expect(urls.length).toBeGreaterThan(1);
// Check if all URLs have an absolute maximum depth of 3 after the base URL depth was 2 and the maxDepth was 1
urls.forEach((url: string) => {
const pathSplits = new URL(url).pathname.split('/');
const depth = pathSplits.length - (pathSplits[0].length === 0 && pathSplits[pathSplits.length - 1].length === 0 ? 1 : 0);
expect(depth).toBeLessThanOrEqual(3);
});
}, 180000);
it.concurrent("should return a successful response with relative max depth option for a valid crawl job with maxDepths equals to zero", async () => {
const crawlResponse = await request(TEST_URL)
.post("/v0/crawl")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.send({
url: "https://www.mendable.ai",
crawlerOptions: { maxDepth: 0 },
});
expect(crawlResponse.statusCode).toBe(200);
const response = await request(TEST_URL)
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
expect(response.statusCode).toBe(200);
expect(response.body).toHaveProperty("status");
expect(["active", "waiting"]).toContain(response.body.status);
// wait for 60 seconds
let isCompleted = false;
while (!isCompleted) {
const statusCheckResponse = await request(TEST_URL)
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
expect(statusCheckResponse.statusCode).toBe(200);
isCompleted = statusCheckResponse.body.status === "completed";
if (!isCompleted) {
await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again
}
}
const completedResponse = await request(TEST_URL)
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
const testurls = completedResponse.body.data.map(
(item: any) => item.metadata?.sourceURL
);
//console.log(testurls)
expect(completedResponse.statusCode).toBe(200);
expect(completedResponse.body).toHaveProperty("status");
expect(completedResponse.body.status).toBe("completed");
expect(completedResponse.body).toHaveProperty("data");
expect(completedResponse.body.data[0]).toHaveProperty("content");
expect(completedResponse.body.data[0]).toHaveProperty("markdown");
expect(completedResponse.body.data[0]).toHaveProperty("metadata");
const urls = completedResponse.body.data.map(
(item: any) => item.metadata?.sourceURL
);
expect(urls.length).toBeGreaterThanOrEqual(1);
// Check if all URLs have an absolute maximum depth of 3 after the base URL depth was 2 and the maxDepth was 1
urls.forEach((url: string) => {
const pathSplits = new URL(url).pathname.split('/');
const depth = pathSplits.length - (pathSplits[0].length === 0 && pathSplits[pathSplits.length - 1].length === 0 ? 1 : 0);
expect(depth).toBeLessThanOrEqual(1);
});
}, 180000);
// it.concurrent("should return a successful response with a valid API key and valid limit option", async () => {
// const crawlResponse = await request(TEST_URL)
// .post("/v0/crawl")
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
// .set("Content-Type", "application/json")
// .send({
// url: "https://mendable.ai",
// crawlerOptions: { limit: 10 },
// });
// const response = await request(TEST_URL)
// .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
// expect(response.statusCode).toBe(200);
// expect(response.body).toHaveProperty("status");
// expect(response.body.status).toBe("active");
// let isCompleted = false;
// while (!isCompleted) {
// const statusCheckResponse = await request(TEST_URL)
// .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
// expect(statusCheckResponse.statusCode).toBe(200);
// isCompleted = statusCheckResponse.body.status === "completed";
// if (!isCompleted) {
// await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again
// }
// }
// const completedResponse = await request(TEST_URL)
// .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
// expect(completedResponse.statusCode).toBe(200);
// expect(completedResponse.body).toHaveProperty("status");
// expect(completedResponse.body.status).toBe("completed");
// expect(completedResponse.body).toHaveProperty("data");
// expect(completedResponse.body.data.length).toBe(10);
// expect(completedResponse.body.data[0]).toHaveProperty("content");
// expect(completedResponse.body.data[0]).toHaveProperty("markdown");
// expect(completedResponse.body.data[0]).toHaveProperty("metadata");
// expect(completedResponse.body.data[0].content).toContain("Mendable");
// expect(completedResponse.body.data[0].content).not.toContain("main menu");
// }, 60000); // 60 seconds
// it.concurrent("should return a successful response for a valid crawl job with includeHtml set to true option", async () => {
// const crawlResponse = await request(TEST_URL)
// .post("/v0/crawl")
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
// .set("Content-Type", "application/json")
// .send({
// url: "https://roastmywebsite.ai",
// pageOptions: { includeHtml: true },
// });
// expect(crawlResponse.statusCode).toBe(200);
// const response = await request(TEST_URL)
// .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
// expect(response.statusCode).toBe(200);
// expect(response.body).toHaveProperty("status");
// expect(["active", "waiting"]).toContain(response.body.status);
// let isCompleted = false;
// while (!isCompleted) {
// const statusCheckResponse = await request(TEST_URL)
// .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
// expect(statusCheckResponse.statusCode).toBe(200);
// isCompleted = statusCheckResponse.body.status === "completed";
// if (!isCompleted) {
// await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again
// }
// }
// const completedResponse = await request(TEST_URL)
// .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
// expect(completedResponse.statusCode).toBe(200);
// expect(completedResponse.body).toHaveProperty("status");
// expect(completedResponse.body.status).toBe("completed");
// expect(completedResponse.body).toHaveProperty("data");
// expect(completedResponse.body.data[0]).toHaveProperty("content");
// expect(completedResponse.body.data[0]).toHaveProperty("markdown");
// expect(completedResponse.body.data[0]).toHaveProperty("metadata");
// expect(completedResponse.body.data[0].metadata.pageStatusCode).toBe(200);
// expect(completedResponse.body.data[0].metadata.pageError).toBeUndefined();
// // 120 seconds
// expect(completedResponse.body.data[0]).toHaveProperty("html");
// expect(completedResponse.body.data[0]).toHaveProperty("metadata");
// expect(completedResponse.body.data[0].content).toContain("_Roast_");
// expect(completedResponse.body.data[0].markdown).toContain("_Roast_");
// expect(completedResponse.body.data[0].html).toContain("<h1");
// expect(completedResponse.body.data[0].metadata.pageStatusCode).toBe(200);
// expect(completedResponse.body.data[0].metadata.pageError).toBeUndefined();
// }, 180000);
}); });
describe("POST /v0/crawlWebsitePreview", () => { describe("POST /v0/crawlWebsitePreview", () => {
it.concurrent("should require authorization", async () => { it.concurrent("should require authorization", async () => {
const response = await request(TEST_URL).post("/v0/crawlWebsitePreview"); const response: FirecrawlCrawlResponse = await request(TEST_URL).post("/v0/crawlWebsitePreview");
expect(response.statusCode).toBe(401); expect(response.statusCode).toBe(401);
}); });
it.concurrent("should return an error response with an invalid API key", async () => { it.concurrent("should return an error response with an invalid API key", async () => {
const response = await request(TEST_URL) const response: FirecrawlCrawlResponse = await request(TEST_URL)
.post("/v0/crawlWebsitePreview") .post("/v0/crawlWebsitePreview")
.set("Authorization", `Bearer invalid-api-key`) .set("Authorization", `Bearer invalid-api-key`)
.set("Content-Type", "application/json") .set("Content-Type", "application/json")
@ -798,20 +434,8 @@ describe("E2E Tests for API Routes", () => {
expect(response.statusCode).toBe(401); expect(response.statusCode).toBe(401);
}); });
// it.concurrent("should return an error for a blocklisted URL", async () => {
// const blocklistedUrl = "https://instagram.com/fake-test";
// const response = await request(TEST_URL)
// .post("/v0/crawlWebsitePreview")
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
// .set("Content-Type", "application/json")
// .send({ url: blocklistedUrl });
// // is returning 429 instead of 403
// expect(response.statusCode).toBe(403);
// expect(response.body.error).toContain("Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.");
// });
it.concurrent("should return a timeout error when scraping takes longer than the specified timeout", async () => { it.concurrent("should return a timeout error when scraping takes longer than the specified timeout", async () => {
const response = await request(TEST_URL) const response: FirecrawlCrawlResponse = await request(TEST_URL)
.post("/v0/scrape") .post("/v0/scrape")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json") .set("Content-Type", "application/json")
@ -819,19 +443,6 @@ describe("E2E Tests for API Routes", () => {
expect(response.statusCode).toBe(408); expect(response.statusCode).toBe(408);
}, 3000); }, 3000);
// it.concurrent("should return a successful response with a valid API key for crawlWebsitePreview", async () => {
// const response = await request(TEST_URL)
// .post("/v0/crawlWebsitePreview")
// .set("Authorization", `Bearer this_is_just_a_preview_token`)
// .set("Content-Type", "application/json")
// .send({ url: "https://firecrawl.dev" });
// expect(response.statusCode).toBe(200);
// expect(response.body).toHaveProperty("jobId");
// expect(response.body.jobId).toMatch(
// /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$/
// );
// });
}); });
describe("POST /v0/search", () => { describe("POST /v0/search", () => {
@ -965,110 +576,6 @@ describe("E2E Tests for API Routes", () => {
expect(completedResponse.body.data[0].metadata.pageError).toBeUndefined(); expect(completedResponse.body.data[0].metadata.pageError).toBeUndefined();
}, 180000); // 120 seconds }, 180000); // 120 seconds
it.concurrent("should return a successful response for a valid crawl job with includeHtml set to true option (2)", async () => {
const crawlResponse = await request(TEST_URL)
.post("/v0/crawl")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.send({
url: "https://roastmywebsite.ai",
pageOptions: { includeHtml: true },
});
expect(crawlResponse.statusCode).toBe(200);
const response = await request(TEST_URL)
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
expect(response.statusCode).toBe(200);
expect(response.body).toHaveProperty("status");
expect(["active", "waiting"]).toContain(response.body.status);
let isFinished = false;
let completedResponse;
while (!isFinished) {
const response = await request(TEST_URL)
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
expect(response.statusCode).toBe(200);
expect(response.body).toHaveProperty("status");
if (response.body.status === "completed") {
isFinished = true;
completedResponse = response;
} else {
await new Promise((r) => setTimeout(r, 1000)); // Wait for 1 second before checking again
}
}
expect(completedResponse.statusCode).toBe(200);
expect(completedResponse.body).toHaveProperty("status");
expect(completedResponse.body.status).toBe("completed");
expect(completedResponse.body).toHaveProperty("data");
expect(completedResponse.body.data[0]).toHaveProperty("content");
expect(completedResponse.body.data[0]).toHaveProperty("markdown");
expect(completedResponse.body.data[0]).toHaveProperty("metadata");
expect(completedResponse.body.data[0]).toHaveProperty("html");
expect(completedResponse.body.data[0].content).toContain("_Roast_");
expect(completedResponse.body.data[0].markdown).toContain("_Roast_");
expect(completedResponse.body.data[0].html).toContain("<h1");
expect(completedResponse.body.data[0].metadata.pageStatusCode).toBe(200);
expect(completedResponse.body.data[0].metadata.pageError).toBeUndefined();
}, 60000);
}); // 60 seconds
it.concurrent("should return a successful response for a valid crawl job with allowBackwardCrawling set to true option", async () => {
const crawlResponse = await request(TEST_URL)
.post("/v0/crawl")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.send({
url: "https://mendable.ai/blog",
pageOptions: { includeHtml: true },
crawlerOptions: { allowBackwardCrawling: true },
});
expect(crawlResponse.statusCode).toBe(200);
let isFinished = false;
let completedResponse;
while (!isFinished) {
const response = await request(TEST_URL)
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
expect(response.statusCode).toBe(200);
expect(response.body).toHaveProperty("status");
if (response.body.status === "completed") {
isFinished = true;
completedResponse = response;
} else {
await new Promise((r) => setTimeout(r, 1000)); // Wait for 1 second before checking again
}
}
expect(completedResponse.statusCode).toBe(200);
expect(completedResponse.body).toHaveProperty("status");
expect(completedResponse.body.status).toBe("completed");
expect(completedResponse.body).toHaveProperty("data");
expect(completedResponse.body.data[0]).toHaveProperty("content");
expect(completedResponse.body.data[0]).toHaveProperty("markdown");
expect(completedResponse.body.data[0]).toHaveProperty("metadata");
expect(completedResponse.body.data[0]).toHaveProperty("html");
expect(completedResponse.body.data[0].content).toContain("Mendable");
expect(completedResponse.body.data[0].markdown).toContain("Mendable");
expect(completedResponse.body.data[0].metadata.pageStatusCode).toBe(200);
expect(completedResponse.body.data[0].metadata.pageError).toBeUndefined();
const onlyChildrenLinks = completedResponse.body.data.filter(doc => {
return doc.metadata && doc.metadata.sourceURL && doc.metadata.sourceURL.includes("mendable.ai/blog")
});
expect(completedResponse.body.data.length).toBeGreaterThan(onlyChildrenLinks.length);
}, 60000);
it.concurrent("If someone cancels a crawl job, it should turn into failed status", async () => { it.concurrent("If someone cancels a crawl job, it should turn into failed status", async () => {
const crawlResponse = await request(TEST_URL) const crawlResponse = await request(TEST_URL)
.post("/v0/crawl") .post("/v0/crawl")
@ -1104,6 +611,7 @@ describe("E2E Tests for API Routes", () => {
expect(completedResponse.body.partial_data[0].metadata.pageStatusCode).toBe(200); expect(completedResponse.body.partial_data[0].metadata.pageStatusCode).toBe(200);
expect(completedResponse.body.partial_data[0].metadata.pageError).toBeUndefined(); expect(completedResponse.body.partial_data[0].metadata.pageError).toBeUndefined();
}, 60000); // 60 seconds }, 60000); // 60 seconds
});
describe("POST /v0/scrape with LLM Extraction", () => { describe("POST /v0/scrape with LLM Extraction", () => {
it.concurrent("should extract data using LLM extraction mode", async () => { it.concurrent("should extract data using LLM extraction mode", async () => {
@ -1156,64 +664,6 @@ describe("E2E Tests for API Routes", () => {
}, 60000); // 60 secs }, 60000); // 60 secs
}); });
// describe("POST /v0/scrape for Top 100 Companies", () => {
// it.concurrent("should extract data for the top 100 companies", async () => {
// const response = await request(TEST_URL)
// .post("/v0/scrape")
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
// .set("Content-Type", "application/json")
// .send({
// url: "https://companiesmarketcap.com/",
// pageOptions: {
// onlyMainContent: true
// },
// extractorOptions: {
// mode: "llm-extraction",
// extractionPrompt: "Extract the name, market cap, price, and today's change for the top 20 companies listed on the page.",
// extractionSchema: {
// type: "object",
// properties: {
// companies: {
// type: "array",
// items: {
// type: "object",
// properties: {
// rank: { type: "number" },
// name: { type: "string" },
// marketCap: { type: "string" },
// price: { type: "string" },
// todayChange: { type: "string" }
// },
// required: ["rank", "name", "marketCap", "price", "todayChange"]
// }
// }
// },
// required: ["companies"]
// }
// }
// });
// // Print the response body to the console for debugging purposes
// console.log("Response companies:", response.body.data.llm_extraction.companies);
// // Check if the response has the correct structure and data types
// expect(response.status).toBe(200);
// expect(Array.isArray(response.body.data.llm_extraction.companies)).toBe(true);
// expect(response.body.data.llm_extraction.companies.length).toBe(40);
// // Sample check for the first company
// const firstCompany = response.body.data.llm_extraction.companies[0];
// expect(firstCompany).toHaveProperty("name");
// expect(typeof firstCompany.name).toBe("string");
// expect(firstCompany).toHaveProperty("marketCap");
// expect(typeof firstCompany.marketCap).toBe("string");
// expect(firstCompany).toHaveProperty("price");
// expect(typeof firstCompany.price).toBe("string");
// expect(firstCompany).toHaveProperty("todayChange");
// expect(typeof firstCompany.todayChange).toBe("string");
// }, 120000); // 120 secs
// });
describe("POST /v0/crawl with fast mode", () => { describe("POST /v0/crawl with fast mode", () => {
it.concurrent("should complete the crawl under 20 seconds", async () => { it.concurrent("should complete the crawl under 20 seconds", async () => {
const startTime = Date.now(); const startTime = Date.now();
@ -1269,122 +719,5 @@ describe("E2E Tests for API Routes", () => {
expect(results.length).toBeLessThanOrEqual(15); expect(results.length).toBeLessThanOrEqual(15);
}, 20000); }, 20000);
// it.concurrent("should complete the crawl in more than 10 seconds", async () => {
// const startTime = Date.now();
// const crawlResponse = await request(TEST_URL)
// .post("/v0/crawl")
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
// .set("Content-Type", "application/json")
// .send({
// url: "https://flutterbricks.com",
// });
// expect(crawlResponse.statusCode).toBe(200);
// const jobId = crawlResponse.body.jobId;
// let statusResponse;
// let isFinished = false;
// while (!isFinished) {
// statusResponse = await request(TEST_URL)
// .get(`/v0/crawl/status/${jobId}`)
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
// expect(statusResponse.statusCode).toBe(200);
// isFinished = statusResponse.body.status === "completed";
// if (!isFinished) {
// await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again
// }
// }
// const endTime = Date.now();
// const timeElapsed = (endTime - startTime) / 1000; // Convert to seconds
// console.log(`Time elapsed: ${timeElapsed} seconds`);
// expect(statusResponse.body.status).toBe("completed");
// expect(statusResponse.body).toHaveProperty("data");
// expect(statusResponse.body.data[0]).toHaveProperty("content");
// expect(statusResponse.body.data[0]).toHaveProperty("markdown");
// const results = statusResponse.body.data;
// // results.forEach((result, i) => {
// // console.log(result.metadata.sourceURL);
// // });
// expect(results.length).toBeGreaterThanOrEqual(10);
// expect(results.length).toBeLessThanOrEqual(15);
// }, 50000);// 15 seconds timeout to account for network delays
}); });
describe("GET /is-production", () => {
it.concurrent("should return the production status", async () => {
const response = await request(TEST_URL).get("/is-production");
expect(response.statusCode).toBe(200);
expect(response.body).toHaveProperty("isProduction");
});
});
describe("Rate Limiter", () => {
it.concurrent("should return 429 when rate limit is exceeded for preview token", async () => {
for (let i = 0; i < 5; i++) {
const response = await request(TEST_URL)
.post("/v0/scrape")
.set("Authorization", `Bearer this_is_just_a_preview_token`)
.set("Content-Type", "application/json")
.send({ url: "https://www.scrapethissite.com" });
expect(response.statusCode).toBe(200);
}
const response = await request(TEST_URL)
.post("/v0/scrape")
.set("Authorization", `Bearer this_is_just_a_preview_token`)
.set("Content-Type", "application/json")
.send({ url: "https://www.scrapethissite.com" });
expect(response.statusCode).toBe(429);
}, 90000);
});
// it.concurrent("should return 429 when rate limit is exceeded for API key", async () => {
// for (let i = 0; i < parseInt(process.env.RATE_LIMIT_TEST_API_KEY_SCRAPE); i++) {
// const response = await request(TEST_URL)
// .post("/v0/scrape")
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
// .set("Content-Type", "application/json")
// .send({ url: "https://www.scrapethissite.com" });
// expect(response.statusCode).toBe(200);
// }
// const response = await request(TEST_URL)
// .post("/v0/scrape")
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
// .set("Content-Type", "application/json")
// .send({ url: "https://www.scrapethissite.com" });
// expect(response.statusCode).toBe(429);
// }, 60000);
// it.concurrent("should return 429 when rate limit is exceeded for API key", async () => {
// for (let i = 0; i < parseInt(process.env.RATE_LIMIT_TEST_API_KEY_CRAWL); i++) {
// const response = await request(TEST_URL)
// .post("/v0/crawl")
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
// .set("Content-Type", "application/json")
// .send({ url: "https://www.scrapethissite.com" });
// expect(response.statusCode).toBe(200);
// }
// const response = await request(TEST_URL)
// .post("/v0/crawl")
// .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
// .set("Content-Type", "application/json")
// .send({ url: "https://www.scrapethissite.com" });
// expect(response.statusCode).toBe(429);
// }, 60000);
}); });

View File

@ -0,0 +1,47 @@
import { crawlController } from '../crawl'
import { Request, Response } from 'express';
import { authenticateUser } from '../auth'; // Ensure this import is correct
import { createIdempotencyKey } from '../../services/idempotency/create';
import { validateIdempotencyKey } from '../../services/idempotency/validate';
import { v4 as uuidv4 } from 'uuid';
jest.mock('../auth', () => ({
authenticateUser: jest.fn().mockResolvedValue({
success: true,
team_id: 'team123',
error: null,
status: 200
}),
reduce: jest.fn()
}));
jest.mock('../../services/idempotency/validate');
describe('crawlController', () => {
it('should prevent duplicate requests using the same idempotency key', async () => {
const req = {
headers: {
'x-idempotency-key': await uuidv4(),
'Authorization': `Bearer ${process.env.TEST_API_KEY}`
},
body: {
url: 'https://mendable.ai'
}
} as unknown as Request;
const res = {
status: jest.fn().mockReturnThis(),
json: jest.fn()
} as unknown as Response;
// Mock the idempotency key validation to return false for the second call
(validateIdempotencyKey as jest.Mock).mockResolvedValueOnce(true).mockResolvedValueOnce(false);
// First request should succeed
await crawlController(req, res);
expect(res.status).not.toHaveBeenCalledWith(409);
// Second request with the same key should fail
await crawlController(req, res);
expect(res.status).toHaveBeenCalledWith(409);
expect(res.json).toHaveBeenCalledWith({ error: 'Idempotency key already used' });
});
});

View File

@ -91,6 +91,7 @@ export async function searchHelper(
}); });
const docs = await a.getDocuments(false); const docs = await a.getDocuments(false);
if (docs.length === 0) { if (docs.length === 0) {
return { success: true, error: "No search results found", returnCode: 200 }; return { success: true, error: "No search results found", returnCode: 200 };
} }

View File

@ -7,7 +7,7 @@ import { getAdjustedMaxDepth } from '../utils/maxDepthUtils';
jest.mock('axios'); jest.mock('axios');
jest.mock('robots-parser'); jest.mock('robots-parser');
describe('WebCrawler maxDepth and filterLinks', () => { describe('WebCrawler', () => {
let crawler: WebCrawler; let crawler: WebCrawler;
const mockAxios = axios as jest.Mocked<typeof axios>; const mockAxios = axios as jest.Mocked<typeof axios>;
const mockRobotsParser = robotsParser as jest.MockedFunction<typeof robotsParser>; const mockRobotsParser = robotsParser as jest.MockedFunction<typeof robotsParser>;
@ -156,8 +156,37 @@ describe('WebCrawler maxDepth and filterLinks', () => {
]); ]);
}); });
it('should handle allowBackwardCrawling option correctly', async () => {
const initialUrl = 'https://mendable.ai/blog';
// Setup the crawler with the specific test case options
const crawler = new WebCrawler({
initialUrl: initialUrl,
includes: [],
excludes: [],
limit: 100,
maxCrawledDepth: 3, // Example depth
allowBackwardCrawling: true
});
// Add more tests to cover other scenarios, such as checking includes and excludes // Mock the sitemap fetching function to simulate backward crawling
crawler['tryFetchSitemapLinks'] = jest.fn().mockResolvedValue([
initialUrl,
'https://mendable.ai', // backward link
initialUrl + '/page1',
initialUrl + '/page1/page2'
]);
const results = await crawler.start();
expect(results).toEqual([
{ url: initialUrl, html: '' },
{ url: 'https://mendable.ai', html: '' }, // Expect the backward link to be included
{ url: initialUrl + '/page1', html: '' },
{ url: initialUrl + '/page1/page2', html: '' }
]);
// Check that the backward link is included if allowBackwardCrawling is true
expect(results.some(r => r.url === 'https://mendable.ai')).toBe(true);
});
}); });

View File

@ -0,0 +1,24 @@
jest.mock('../single_url', () => {
const originalModule = jest.requireActual('../single_url');
originalModule.fetchHtmlContent = jest.fn().mockResolvedValue('<html><head><title>Test</title></head><body><h1>Roast</h1></body></html>');
return originalModule;
});
import { scrapSingleUrl } from '../single_url';
import { PageOptions } from '../../../lib/entities';
describe('scrapSingleUrl', () => {
it('should handle includeHtml option correctly', async () => {
const url = 'https://roastmywebsite.ai';
const pageOptionsWithHtml: PageOptions = { includeHtml: true };
const pageOptionsWithoutHtml: PageOptions = { includeHtml: false };
const resultWithHtml = await scrapSingleUrl(url, pageOptionsWithHtml);
const resultWithoutHtml = await scrapSingleUrl(url, pageOptionsWithoutHtml);
expect(resultWithHtml.html).toBeDefined();
expect(resultWithoutHtml.html).toBeUndefined();
}, 10000);
});

View File

@ -0,0 +1,89 @@
import { isUrlBlocked } from '../blocklist';
describe('Blocklist Functionality', () => {
describe('isUrlBlocked', () => {
test.each([
'https://facebook.com/fake-test',
'https://x.com/user-profile',
'https://twitter.com/home',
'https://instagram.com/explore',
'https://linkedin.com/in/johndoe',
'https://pinterest.com/pin/create',
'https://snapchat.com/add/johndoe',
'https://tiktok.com/@johndoe',
'https://reddit.com/r/funny',
'https://tumblr.com/dashboard',
'https://flickr.com/photos/johndoe',
'https://whatsapp.com/download',
'https://wechat.com/features',
'https://telegram.org/apps'
])('should return true for blocklisted URL %s', (url) => {
expect(isUrlBlocked(url)).toBe(true);
});
test.each([
'https://facebook.com/policy',
'https://twitter.com/tos',
'https://instagram.com/about/legal/terms',
'https://linkedin.com/legal/privacy-policy',
'https://pinterest.com/about/privacy',
'https://snapchat.com/legal/terms',
'https://tiktok.com/legal/privacy-policy',
'https://reddit.com/policies',
'https://tumblr.com/policy/en/privacy',
'https://flickr.com/help/terms',
'https://whatsapp.com/legal',
'https://wechat.com/en/privacy-policy',
'https://telegram.org/tos'
])('should return false for allowed URLs with keywords %s', (url) => {
expect(isUrlBlocked(url)).toBe(false);
});
test('should return false for non-blocklisted domain', () => {
const url = 'https://example.com';
expect(isUrlBlocked(url)).toBe(false);
});
test('should handle invalid URLs gracefully', () => {
const url = 'htp://invalid-url';
expect(isUrlBlocked(url)).toBe(false);
});
});
test.each([
'https://subdomain.facebook.com',
'https://facebook.com.someotherdomain.com',
'https://www.facebook.com/profile',
'https://api.twitter.com/info',
'https://instagram.com/accounts/login'
])('should return true for URLs with blocklisted domains in subdomains or paths %s', (url) => {
expect(isUrlBlocked(url)).toBe(true);
});
test.each([
'https://example.com/facebook.com',
'https://example.com/redirect?url=https://twitter.com',
'https://facebook.com.policy.example.com'
])('should return false for URLs where blocklisted domain is part of another domain or path %s', (url) => {
expect(isUrlBlocked(url)).toBe(false);
});
test.each([
'https://FACEBOOK.com',
'https://INSTAGRAM.com/@something'
])('should handle case variations %s', (url) => {
expect(isUrlBlocked(url)).toBe(true);
});
test.each([
'https://facebook.com?redirect=https://example.com',
'https://twitter.com?query=something'
])('should handle query parameters %s', (url) => {
expect(isUrlBlocked(url)).toBe(true);
});
test('should handle internationalized domain names', () => {
const url = 'https://xn--d1acpjx3f.xn--p1ai';
expect(isUrlBlocked(url)).toBe(false);
});
});

View File

@ -9,41 +9,11 @@ describe('PDF Processing Module - Integration Test', () => {
expect(pageError).toBeUndefined(); expect(pageError).toBeUndefined();
}); });
// We're hitting the LLAMAPARSE rate limit 🫠 it('should return a successful response for a valid scrape with PDF file and parsePDF set to false', async () => {
// it('should download and read a simple PDF file by URL', async () => { const { content, pageStatusCode, pageError } = await pdfProcessor.fetchAndProcessPdf('https://arxiv.org/pdf/astro-ph/9301001.pdf', false);
// const pdfContent = await pdfProcessor.fetchAndProcessPdf('https://s3.us-east-1.amazonaws.com/storage.mendable.ai/rafa-testing/test%20%281%29.pdf'); expect(pageStatusCode).toBe(200);
// expect(pdfContent).toEqual("Dummy PDF file"); expect(pageError).toBeUndefined();
// }); expect(content).toContain('/Title(arXiv:astro-ph/9301001v1 7 Jan 1993)>>endobj');
}, 60000); // 60 seconds
// it('should download and read a complex PDF file by URL', async () => {
// const pdfContent = await pdfProcessor.fetchAndProcessPdf('https://arxiv.org/pdf/2307.06435.pdf');
// const expectedContent = 'A Comprehensive Overview of Large Language Models\n' +
// ' a a, b, c,d, e,f e,f g,i\n' +
// ' Humza Naveed , Asad Ullah Khan , Shi Qiu , Muhammad Saqib , Saeed Anwar , Muhammad Usman , Naveed Akhtar ,\n' +
// ' Nick Barnes h, Ajmal Mian i\n' +
// ' aUniversity of Engineering and Technology (UET), Lahore, Pakistan\n' +
// ' bThe Chinese University of Hong Kong (CUHK), HKSAR, China\n' +
// ' cUniversity of Technology Sydney (UTS), Sydney, Australia\n' +
// ' dCommonwealth Scientific and Industrial Research Organisation (CSIRO), Sydney, Australia\n' +
// ' eKing Fahd University of Petroleum and Minerals (KFUPM), Dhahran, Saudi Arabia\n' +
// ' fSDAIA-KFUPM Joint Research Center for Artificial Intelligence (JRCAI), Dhahran, Saudi Arabia\n' +
// ' gThe University of Melbourne (UoM), Melbourne, Australia\n' +
// ' hAustralian National University (ANU), Canberra, Australia\n' +
// ' iThe University of Western Australia (UWA), Perth, Australia\n' +
// ' Abstract\n' +
// ' Large Language Models (LLMs) have recently demonstrated remarkable capabilities in natural language processing tasks and\n' +
// ' beyond. This success of LLMs has led to a large influx of research contributions in this direction. These works encompass diverse\n' +
// ' topics such as architectural innovations, better training strategies, context length improvements, fine-tuning, multi-modal LLMs,\n' +
// ' robotics, datasets, benchmarking, efficiency, and more. With the rapid development of techniques and regular breakthroughs in\n' +
// ' LLM research, it has become considerably challenging to perceive the bigger picture of the advances in this direction. Considering\n' +
// ' the rapidly emerging plethora of literature on LLMs, it is imperative that the research community is able to benefit from a concise\n' +
// ' yet comprehensive overview of the recent developments in this field. This article provides an overview of the existing literature\n' +
// ' on a broad range of LLM-related concepts. Our self-contained comprehensive overview of LLMs discusses relevant background\n' +
// ' concepts along with covering the advanced topics at the frontier of research in LLMs. This review article is intended to not only\n' +
// ' provide a systematic survey but also a quick comprehensive reference for the researchers and practitioners to draw insights from\n' +
// ' extensive informative summaries of the existing works to advance the LLM research.\n'
// expect(pdfContent).toContain(expectedContent);
// }, 60000);
}); });

View File

@ -23,6 +23,7 @@ const allowedKeywords = [
'user-agreement', 'user-agreement',
'legal', 'legal',
'help', 'help',
'policies',
'support', 'support',
'contact', 'contact',
'about', 'about',
@ -30,25 +31,31 @@ const allowedKeywords = [
'blog', 'blog',
'press', 'press',
'conditions', 'conditions',
'tos'
]; ];
export function isUrlBlocked(url: string): boolean { export function isUrlBlocked(url: string): boolean {
// Check if the URL contains any allowed keywords const lowerCaseUrl = url.toLowerCase();
if (allowedKeywords.some(keyword => url.includes(keyword))) {
// Check if the URL contains any allowed keywords as whole words
if (allowedKeywords.some(keyword => new RegExp(`\\b${keyword}\\b`, 'i').test(lowerCaseUrl))) {
return false; return false;
} }
try { try {
const urlObj = new URL(url);
const hostname = urlObj.hostname.toLowerCase();
// Check if the URL matches any domain in the blocklist // Check if the URL matches any domain in the blocklist
return socialMediaBlocklist.some(domain => { const isBlocked = socialMediaBlocklist.some(domain => {
// Create a regular expression to match the exact domain const domainPattern = new RegExp(`(^|\\.)${domain.replace('.', '\\.')}(\\.|$)`, 'i');
const domainPattern = new RegExp(`(^|\\.)${domain.replace('.', '\\.')}$`); return domainPattern.test(hostname);
// Test the hostname of the URL against the pattern
return domainPattern.test(new URL(url).hostname);
}); });
return isBlocked;
} catch (e) { } catch (e) {
// If an error occurs (e.g., invalid URL), return false // If an error occurs (e.g., invalid URL), return false
console.error(`Error processing URL: ${url}`, e);
return false; return false;
} }
} }

View File

@ -0,0 +1,87 @@
import { getRateLimiter, serverRateLimiter, testSuiteRateLimiter, redisClient } from "./rate-limiter";
import { RateLimiterMode } from "../../src/types";
import { RateLimiterRedis } from "rate-limiter-flexible";
describe("Rate Limiter Service", () => {
beforeAll(async () => {
await redisClient.connect();
});
afterAll(async () => {
await redisClient.disconnect();
});
it("should return the testSuiteRateLimiter for specific tokens", () => {
const limiter = getRateLimiter("crawl" as RateLimiterMode, "a01ccae");
expect(limiter).toBe(testSuiteRateLimiter);
const limiter2 = getRateLimiter("scrape" as RateLimiterMode, "6254cf9");
expect(limiter2).toBe(testSuiteRateLimiter);
});
it("should return the serverRateLimiter if mode is not found", () => {
const limiter = getRateLimiter("nonexistent" as RateLimiterMode, "someToken");
expect(limiter).toBe(serverRateLimiter);
});
it("should return the correct rate limiter based on mode and plan", () => {
const limiter = getRateLimiter("crawl" as RateLimiterMode, "someToken", "free");
expect(limiter.points).toBe(2);
const limiter2 = getRateLimiter("scrape" as RateLimiterMode, "someToken", "standard");
expect(limiter2.points).toBe(50);
const limiter3 = getRateLimiter("search" as RateLimiterMode, "someToken", "growth");
expect(limiter3.points).toBe(500);
const limiter4 = getRateLimiter("crawlStatus" as RateLimiterMode, "someToken", "growth");
expect(limiter4.points).toBe(150);
});
it("should return the default rate limiter if plan is not provided", () => {
const limiter = getRateLimiter("crawl" as RateLimiterMode, "someToken");
expect(limiter.points).toBe(3);
const limiter2 = getRateLimiter("scrape" as RateLimiterMode, "someToken");
expect(limiter2.points).toBe(20);
});
it("should create a new RateLimiterRedis instance with correct parameters", () => {
const keyPrefix = "test-prefix";
const points = 10;
const limiter = new RateLimiterRedis({
storeClient: redisClient,
keyPrefix,
points,
duration: 60,
});
expect(limiter.keyPrefix).toBe(keyPrefix);
expect(limiter.points).toBe(points);
expect(limiter.duration).toBe(60);
});
it("should return the correct rate limiter for 'preview' mode", () => {
const limiter = getRateLimiter("preview" as RateLimiterMode, "someToken", "free");
expect(limiter.points).toBe(5);
const limiter2 = getRateLimiter("preview" as RateLimiterMode, "someToken");
expect(limiter2.points).toBe(5);
});
it("should return the correct rate limiter for 'account' mode", () => {
const limiter = getRateLimiter("account" as RateLimiterMode, "someToken", "free");
expect(limiter.points).toBe(100);
const limiter2 = getRateLimiter("account" as RateLimiterMode, "someToken");
expect(limiter2.points).toBe(100);
});
it("should return the correct rate limiter for 'crawlStatus' mode", () => {
const limiter = getRateLimiter("crawlStatus" as RateLimiterMode, "someToken", "free");
expect(limiter.points).toBe(150);
const limiter2 = getRateLimiter("crawlStatus" as RateLimiterMode, "someToken");
expect(limiter2.points).toBe(150);
});
});

View File

@ -1,4 +1,4 @@
import { ExtractorOptions } from "./lib/entities"; import { ExtractorOptions, Document } from "./lib/entities";
export interface CrawlResult { export interface CrawlResult {
source: string; source: string;
@ -43,6 +43,34 @@ export interface FirecrawlJob {
num_tokens?: number, num_tokens?: number,
} }
export interface FirecrawlScrapeResponse {
statusCode: number;
body: {
status: string;
data: Document;
};
error?: string;
}
export interface FirecrawlCrawlResponse {
statusCode: number;
body: {
status: string;
jobId: string;
};
error?: string;
}
export interface FirecrawlCrawlStatusResponse {
statusCode: number;
body: {
status: string;
data: Document[];
};
error?: string;
}
export enum RateLimiterMode { export enum RateLimiterMode {
Crawl = "crawl", Crawl = "crawl",
CrawlStatus = "crawlStatus", CrawlStatus = "crawlStatus",

View File

@ -1,11 +1,14 @@
import FirecrawlApp, { JobStatusResponse } from '@mendable/firecrawl-js'; import FirecrawlApp, { JobStatusResponse } from './firecrawl/src/index' //'@mendable/firecrawl-js';
import { z } from "zod"; import { z } from "zod";
const app = new FirecrawlApp({apiKey: "fc-YOUR_API_KEY"}); const app = new FirecrawlApp({apiKey: "fc-YOUR_API_KEY"});
// Scrape a website: // Scrape a website:
const scrapeResult = await app.scrapeUrl('firecrawl.dev'); const scrapeResult = await app.scrapeUrl('firecrawl.dev');
console.log(scrapeResult.data.content)
if (scrapeResult.data) {
console.log(scrapeResult.data.content)
}
// Crawl a website: // Crawl a website:
const crawlResult = await app.crawlUrl('mendable.ai', {crawlerOptions: {excludes: ['blog/*'], limit: 5}}, false); const crawlResult = await app.crawlUrl('mendable.ai', {crawlerOptions: {excludes: ['blog/*'], limit: 5}}, false);
@ -23,12 +26,13 @@ while (true) {
await new Promise(resolve => setTimeout(resolve, 1000)); // wait 1 second await new Promise(resolve => setTimeout(resolve, 1000)); // wait 1 second
} }
console.log(job.data[0].content); if (job.data) {
console.log(job.data[0].content);
}
// Search for a query: // Search for a query:
const query = 'what is mendable?' const query = 'what is mendable?'
const searchResult = await app.search(query) const searchResult = await app.search(query)
console.log(searchResult)
// LLM Extraction: // LLM Extraction:
// Define schema to extract contents into using zod schema // Define schema to extract contents into using zod schema
@ -50,7 +54,9 @@ let llmExtractionResult = await app.scrapeUrl("https://news.ycombinator.com", {
extractorOptions: { extractionSchema: zodSchema }, extractorOptions: { extractionSchema: zodSchema },
}); });
console.log(llmExtractionResult.data.llm_extraction); if (llmExtractionResult.data) {
console.log(llmExtractionResult.data.llm_extraction);
}
// Define schema to extract contents into using json schema // Define schema to extract contents into using json schema
const jsonSchema = { const jsonSchema = {
@ -80,4 +86,7 @@ llmExtractionResult = await app.scrapeUrl("https://news.ycombinator.com", {
extractorOptions: { extractionSchema: jsonSchema }, extractorOptions: { extractionSchema: jsonSchema },
}); });
console.log(llmExtractionResult.data.llm_extraction); if (llmExtractionResult.data) {
console.log(llmExtractionResult.data.llm_extraction);
}

View File

@ -176,6 +176,11 @@ async function checkStatusExample(jobId) {
checkStatusExample('your_job_id_here'); checkStatusExample('your_job_id_here');
``` ```
## Running Locally
To use the SDK when running Firecrawl locally, you can change the initial Firecrawl app instance to:
```js
const app = new FirecrawlApp({ apiKey: "YOUR_API_KEY", apiUrl: "http://localhost:3002" });
```
## Error Handling ## Error Handling

View File

@ -18,9 +18,9 @@ export default class FirecrawlApp {
* Initializes a new instance of the FirecrawlApp class. * Initializes a new instance of the FirecrawlApp class.
* @param {FirecrawlAppConfig} config - Configuration options for the FirecrawlApp instance. * @param {FirecrawlAppConfig} config - Configuration options for the FirecrawlApp instance.
*/ */
constructor({ apiKey = null }) { constructor({ apiKey = null, apiUrl = null }) {
this.apiUrl = "https://api.firecrawl.dev";
this.apiKey = apiKey || ""; this.apiKey = apiKey || "";
this.apiUrl = apiUrl || "https://api.firecrawl.dev";
if (!this.apiKey) { if (!this.apiKey) {
throw new Error("No API key provided"); throw new Error("No API key provided");
} }

View File

@ -1,12 +1,12 @@
{ {
"name": "@mendable/firecrawl-js", "name": "@mendable/firecrawl-js",
"version": "0.0.22", "version": "0.0.28",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "@mendable/firecrawl-js", "name": "@mendable/firecrawl-js",
"version": "0.0.22", "version": "0.0.28",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"axios": "^1.6.8", "axios": "^1.6.8",
@ -20,6 +20,7 @@
"@types/axios": "^0.14.0", "@types/axios": "^0.14.0",
"@types/dotenv": "^8.2.0", "@types/dotenv": "^8.2.0",
"@types/jest": "^29.5.12", "@types/jest": "^29.5.12",
"@types/mocha": "^10.0.6",
"@types/node": "^20.12.12", "@types/node": "^20.12.12",
"@types/uuid": "^9.0.8", "@types/uuid": "^9.0.8",
"jest": "^29.7.0", "jest": "^29.7.0",
@ -1071,6 +1072,12 @@
"pretty-format": "^29.0.0" "pretty-format": "^29.0.0"
} }
}, },
"node_modules/@types/mocha": {
"version": "10.0.6",
"resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-10.0.6.tgz",
"integrity": "sha512-dJvrYWxP/UcXm36Qn36fxhUKu8A/xMRXVT2cliFF1Z7UA9liG5Psj3ezNSZw+5puH2czDXRLcXQxf8JbJt0ejg==",
"dev": true
},
"node_modules/@types/node": { "node_modules/@types/node": {
"version": "20.12.12", "version": "20.12.12",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.12.tgz", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.12.tgz",

View File

@ -1,6 +1,6 @@
{ {
"name": "@mendable/firecrawl-js", "name": "@mendable/firecrawl-js",
"version": "0.0.26", "version": "0.0.28",
"description": "JavaScript SDK for Firecrawl API", "description": "JavaScript SDK for Firecrawl API",
"main": "build/index.js", "main": "build/index.js",
"types": "types/index.d.ts", "types": "types/index.d.ts",
@ -33,6 +33,7 @@
"@types/axios": "^0.14.0", "@types/axios": "^0.14.0",
"@types/dotenv": "^8.2.0", "@types/dotenv": "^8.2.0",
"@types/jest": "^29.5.12", "@types/jest": "^29.5.12",
"@types/mocha": "^10.0.6",
"@types/node": "^20.12.12", "@types/node": "^20.12.12",
"@types/uuid": "^9.0.8", "@types/uuid": "^9.0.8",
"jest": "^29.7.0", "jest": "^29.7.0",

View File

@ -2,6 +2,7 @@ import FirecrawlApp from '../../index';
import { v4 as uuidv4 } from 'uuid'; import { v4 as uuidv4 } from 'uuid';
import dotenv from 'dotenv'; import dotenv from 'dotenv';
dotenv.config(); dotenv.config();
const TEST_API_KEY = process.env.TEST_API_KEY; const TEST_API_KEY = process.env.TEST_API_KEY;
@ -29,14 +30,14 @@ describe('FirecrawlApp E2E Tests', () => {
const app = new FirecrawlApp({ apiKey: "this_is_just_a_preview_token", apiUrl: API_URL }); const app = new FirecrawlApp({ apiKey: "this_is_just_a_preview_token", apiUrl: API_URL });
const response = await app.scrapeUrl('https://roastmywebsite.ai'); const response = await app.scrapeUrl('https://roastmywebsite.ai');
expect(response).not.toBeNull(); expect(response).not.toBeNull();
expect(response.data.content).toContain("_Roast_"); expect(response.data?.content).toContain("_Roast_");
}, 30000); // 30 seconds timeout }, 30000); // 30 seconds timeout
test.concurrent('should return successful response for valid scrape', async () => { test.concurrent('should return successful response for valid scrape', async () => {
const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL });
const response = await app.scrapeUrl('https://roastmywebsite.ai'); const response = await app.scrapeUrl('https://roastmywebsite.ai');
expect(response).not.toBeNull(); expect(response).not.toBeNull();
expect(response.data.content).toContain("_Roast_"); expect(response.data?.content).toContain("_Roast_");
expect(response.data).toHaveProperty('markdown'); expect(response.data).toHaveProperty('markdown');
expect(response.data).toHaveProperty('metadata'); expect(response.data).toHaveProperty('metadata');
expect(response.data).not.toHaveProperty('html'); expect(response.data).not.toHaveProperty('html');
@ -46,23 +47,23 @@ describe('FirecrawlApp E2E Tests', () => {
const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL });
const response = await app.scrapeUrl('https://roastmywebsite.ai', { pageOptions: { includeHtml: true } }); const response = await app.scrapeUrl('https://roastmywebsite.ai', { pageOptions: { includeHtml: true } });
expect(response).not.toBeNull(); expect(response).not.toBeNull();
expect(response.data.content).toContain("_Roast_"); expect(response.data?.content).toContain("_Roast_");
expect(response.data.markdown).toContain("_Roast_"); expect(response.data?.markdown).toContain("_Roast_");
expect(response.data.html).toContain("<h1"); expect(response.data?.html).toContain("<h1");
}, 30000); // 30 seconds timeout }, 30000); // 30 seconds timeout
test.concurrent('should return successful response for valid scrape with PDF file', async () => { test.concurrent('should return successful response for valid scrape with PDF file', async () => {
const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL });
const response = await app.scrapeUrl('https://arxiv.org/pdf/astro-ph/9301001.pdf'); const response = await app.scrapeUrl('https://arxiv.org/pdf/astro-ph/9301001.pdf');
expect(response).not.toBeNull(); expect(response).not.toBeNull();
expect(response.data.content).toContain('We present spectrophotometric observations of the Broad Line Radio Galaxy'); expect(response.data?.content).toContain('We present spectrophotometric observations of the Broad Line Radio Galaxy');
}, 30000); // 30 seconds timeout }, 30000); // 30 seconds timeout
test.concurrent('should return successful response for valid scrape with PDF file without explicit extension', async () => { test.concurrent('should return successful response for valid scrape with PDF file without explicit extension', async () => {
const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL });
const response = await app.scrapeUrl('https://arxiv.org/pdf/astro-ph/9301001'); const response = await app.scrapeUrl('https://arxiv.org/pdf/astro-ph/9301001');
expect(response).not.toBeNull(); expect(response).not.toBeNull();
expect(response.data.content).toContain('We present spectrophotometric observations of the Broad Line Radio Galaxy'); expect(response.data?.content).toContain('We present spectrophotometric observations of the Broad Line Radio Galaxy');
}, 30000); // 30 seconds timeout }, 30000); // 30 seconds timeout
test.concurrent('should throw error for invalid API key on crawl', async () => { test.concurrent('should throw error for invalid API key on crawl', async () => {
@ -112,15 +113,15 @@ describe('FirecrawlApp E2E Tests', () => {
expect(statusResponse).not.toBeNull(); expect(statusResponse).not.toBeNull();
expect(statusResponse.status).toBe('completed'); expect(statusResponse.status).toBe('completed');
expect(statusResponse.data.length).toBeGreaterThan(0); expect(statusResponse?.data?.length).toBeGreaterThan(0);
}, 35000); // 35 seconds timeout }, 35000); // 35 seconds timeout
test.concurrent('should return successful response for search', async () => { test.concurrent('should return successful response for search', async () => {
const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL });
const response = await app.search("test query"); const response = await app.search("test query");
expect(response).not.toBeNull(); expect(response).not.toBeNull();
expect(response.data[0].content).toBeDefined(); expect(response?.data?.[0]?.content).toBeDefined();
expect(response.data.length).toBeGreaterThan(2); expect(response?.data?.length).toBeGreaterThan(2);
}, 30000); // 30 seconds timeout }, 30000); // 30 seconds timeout
test.concurrent('should throw error for invalid API key on search', async () => { test.concurrent('should throw error for invalid API key on search', async () => {
@ -146,10 +147,10 @@ describe('FirecrawlApp E2E Tests', () => {
} }
}); });
expect(response).not.toBeNull(); expect(response).not.toBeNull();
expect(response.data.llm_extraction).toBeDefined(); expect(response.data?.llm_extraction).toBeDefined();
const llmExtraction = response.data.llm_extraction; const llmExtraction = response.data?.llm_extraction;
expect(llmExtraction.company_mission).toBeDefined(); expect(llmExtraction?.company_mission).toBeDefined();
expect(typeof llmExtraction.supports_sso).toBe('boolean'); expect(typeof llmExtraction?.supports_sso).toBe('boolean');
expect(typeof llmExtraction.is_open_source).toBe('boolean'); expect(typeof llmExtraction?.is_open_source).toBe('boolean');
}, 30000); // 30 seconds timeout }, 30000); // 30 seconds timeout
}); });

View File

@ -43,6 +43,6 @@ describe('the firecrawl JS SDK', () => {
expect.objectContaining({ headers: expect.objectContaining({'Authorization': `Bearer ${apiKey}`}) }), expect.objectContaining({ headers: expect.objectContaining({'Authorization': `Bearer ${apiKey}`}) }),
) )
expect(scrapedData.success).toBe(true); expect(scrapedData.success).toBe(true);
expect(scrapedData.data.metadata.title).toEqual('Mendable'); expect(scrapedData?.data?.metadata.title).toEqual('Mendable');
}); });
}) })

View File

@ -9,6 +9,102 @@ export interface FirecrawlAppConfig {
apiUrl?: string | null; apiUrl?: string | null;
} }
/**
* Metadata for a Firecrawl document.
*/
export interface FirecrawlDocumentMetadata {
title?: string;
description?: string;
language?: string;
keywords?: string;
robots?: string;
ogTitle?: string;
ogDescription?: string;
ogUrl?: string;
ogImage?: string;
ogAudio?: string;
ogDeterminer?: string;
ogLocale?: string;
ogLocaleAlternate?: string[];
ogSiteName?: string;
ogVideo?: string;
dctermsCreated?: string;
dcDateCreated?: string;
dcDate?: string;
dctermsType?: string;
dcType?: string;
dctermsAudience?: string;
dctermsSubject?: string;
dcSubject?: string;
dcDescription?: string;
dctermsKeywords?: string;
modifiedTime?: string;
publishedTime?: string;
articleTag?: string;
articleSection?: string;
sourceURL?: string;
pageStatusCode?: number;
pageError?: string;
[key: string]: any;
}
/**
* Document interface for Firecrawl.
*/
export interface FirecrawlDocument {
id?: string;
url?: string;
content: string;
markdown?: string;
html?: string;
llm_extraction?: Record<string, any>;
createdAt?: Date;
updatedAt?: Date;
type?: string;
metadata: FirecrawlDocumentMetadata;
childrenLinks?: string[];
provider?: string;
warning?: string;
index?: number;
}
/**
* Response interface for scraping operations.
*/
export interface ScrapeResponse {
success: boolean;
data?: FirecrawlDocument;
error?: string;
}
/**
* Response interface for searching operations.
*/
export interface SearchResponse {
success: boolean;
data?: FirecrawlDocument[];
error?: string;
}
/**
* Response interface for crawling operations.
*/
export interface CrawlResponse {
success: boolean;
jobId?: string;
data?: FirecrawlDocument[];
error?: string;
}
/**
* Response interface for job status checks.
*/
export interface JobStatusResponse {
success: boolean;
status: string;
jobId?: string;
data?: FirecrawlDocument[];
partial_data?: FirecrawlDocument[];
error?: string;
}
/** /**
* Generic parameter interface. * Generic parameter interface.
*/ */
@ -20,59 +116,20 @@ export interface Params {
extractionPrompt?: string; extractionPrompt?: string;
}; };
} }
/**
* Response interface for scraping operations.
*/
export interface ScrapeResponse {
success: boolean;
data?: any;
error?: string;
}
/**
* Response interface for searching operations.
*/
export interface SearchResponse {
success: boolean;
data?: any;
error?: string;
}
/**
* Response interface for crawling operations.
*/
export interface CrawlResponse {
success: boolean;
jobId?: string;
data?: any;
error?: string;
}
/**
* Response interface for job status checks.
*/
export interface JobStatusResponse {
success: boolean;
status: string;
jobId?: string;
data?: any;
partial_data?: any,
error?: string;
}
/** /**
* Main class for interacting with the Firecrawl API. * Main class for interacting with the Firecrawl API.
*/ */
export default class FirecrawlApp { export default class FirecrawlApp {
private apiKey: string; private apiKey: string;
private apiUrl: string = "https://api.firecrawl.dev"; private apiUrl: string;
/** /**
* Initializes a new instance of the FirecrawlApp class. * Initializes a new instance of the FirecrawlApp class.
* @param {FirecrawlAppConfig} config - Configuration options for the FirecrawlApp instance. * @param {FirecrawlAppConfig} config - Configuration options for the FirecrawlApp instance.
*/ */
constructor({ apiKey = null }: FirecrawlAppConfig) { constructor({ apiKey = null, apiUrl = null }: FirecrawlAppConfig) {
this.apiKey = apiKey || ""; this.apiKey = apiKey || "";
this.apiUrl = apiUrl || "https://api.firecrawl.dev";
if (!this.apiKey) { if (!this.apiKey) {
throw new Error("No API key provided"); throw new Error("No API key provided");
} }
@ -112,7 +169,7 @@ export default class FirecrawlApp {
const response: AxiosResponse = await axios.post( const response: AxiosResponse = await axios.post(
this.apiUrl + "/v0/scrape", this.apiUrl + "/v0/scrape",
jsonData, jsonData,
{ headers }, { headers }
); );
if (response.status === 200) { if (response.status === 200) {
const responseData = response.data; const responseData = response.data;
@ -231,7 +288,9 @@ export default class FirecrawlApp {
success: true, success: true,
status: response.data.status, status: response.data.status,
data: response.data.data, data: response.data.data,
partial_data: !response.data.data ? response.data.partial_data : undefined, partial_data: !response.data.data
? response.data.partial_data
: undefined,
}; };
} else { } else {
this.handleError(response, "check crawl status"); this.handleError(response, "check crawl status");
@ -252,10 +311,10 @@ export default class FirecrawlApp {
*/ */
prepareHeaders(idempotencyKey?: string): AxiosRequestHeaders { prepareHeaders(idempotencyKey?: string): AxiosRequestHeaders {
return { return {
'Content-Type': 'application/json', "Content-Type": "application/json",
'Authorization': `Bearer ${this.apiKey}`, Authorization: `Bearer ${this.apiKey}`,
...(idempotencyKey ? { 'x-idempotency-key': idempotencyKey } : {}), ...(idempotencyKey ? { "x-idempotency-key": idempotencyKey } : {}),
} as AxiosRequestHeaders & { 'x-idempotency-key'?: string }; } as AxiosRequestHeaders & { "x-idempotency-key"?: string };
} }
/** /**
@ -317,7 +376,9 @@ export default class FirecrawlApp {
if (checkInterval < 2) { if (checkInterval < 2) {
checkInterval = 2; checkInterval = 2;
} }
await new Promise((resolve) => setTimeout(resolve, checkInterval * 1000)); // Wait for the specified timeout before checking again await new Promise((resolve) =>
setTimeout(resolve, checkInterval * 1000)
); // Wait for the specified timeout before checking again
} else { } else {
throw new Error( throw new Error(
`Crawl job failed or was stopped. Status: ${statusData.status}` `Crawl job failed or was stopped. Status: ${statusData.status}`

View File

@ -7,6 +7,99 @@ export interface FirecrawlAppConfig {
apiKey?: string | null; apiKey?: string | null;
apiUrl?: string | null; apiUrl?: string | null;
} }
/**
* Metadata for a Firecrawl document.
*/
export interface FirecrawlDocumentMetadata {
title?: string;
description?: string;
language?: string;
keywords?: string;
robots?: string;
ogTitle?: string;
ogDescription?: string;
ogUrl?: string;
ogImage?: string;
ogAudio?: string;
ogDeterminer?: string;
ogLocale?: string;
ogLocaleAlternate?: string[];
ogSiteName?: string;
ogVideo?: string;
dctermsCreated?: string;
dcDateCreated?: string;
dcDate?: string;
dctermsType?: string;
dcType?: string;
dctermsAudience?: string;
dctermsSubject?: string;
dcSubject?: string;
dcDescription?: string;
dctermsKeywords?: string;
modifiedTime?: string;
publishedTime?: string;
articleTag?: string;
articleSection?: string;
sourceURL?: string;
pageStatusCode?: number;
pageError?: string;
[key: string]: any;
}
/**
* Document interface for Firecrawl.
*/
export interface FirecrawlDocument {
id?: string;
url?: string;
content: string;
markdown?: string;
html?: string;
llm_extraction?: Record<string, any>;
createdAt?: Date;
updatedAt?: Date;
type?: string;
metadata: FirecrawlDocumentMetadata;
childrenLinks?: string[];
provider?: string;
warning?: string;
index?: number;
}
/**
* Response interface for scraping operations.
*/
export interface ScrapeResponse {
success: boolean;
data?: FirecrawlDocument;
error?: string;
}
/**
* Response interface for searching operations.
*/
export interface SearchResponse {
success: boolean;
data?: FirecrawlDocument[];
error?: string;
}
/**
* Response interface for crawling operations.
*/
export interface CrawlResponse {
success: boolean;
jobId?: string;
data?: FirecrawlDocument[];
error?: string;
}
/**
* Response interface for job status checks.
*/
export interface JobStatusResponse {
success: boolean;
status: string;
jobId?: string;
data?: FirecrawlDocument[];
partial_data?: FirecrawlDocument[];
error?: string;
}
/** /**
* Generic parameter interface. * Generic parameter interface.
*/ */
@ -18,42 +111,6 @@ export interface Params {
extractionPrompt?: string; extractionPrompt?: string;
}; };
} }
/**
* Response interface for scraping operations.
*/
export interface ScrapeResponse {
success: boolean;
data?: any;
error?: string;
}
/**
* Response interface for searching operations.
*/
export interface SearchResponse {
success: boolean;
data?: any;
error?: string;
}
/**
* Response interface for crawling operations.
*/
export interface CrawlResponse {
success: boolean;
jobId?: string;
data?: any;
error?: string;
}
/**
* Response interface for job status checks.
*/
export interface JobStatusResponse {
success: boolean;
status: string;
jobId?: string;
data?: any;
partial_data?: any;
error?: string;
}
/** /**
* Main class for interacting with the Firecrawl API. * Main class for interacting with the Firecrawl API.
*/ */
@ -64,7 +121,7 @@ export default class FirecrawlApp {
* Initializes a new instance of the FirecrawlApp class. * Initializes a new instance of the FirecrawlApp class.
* @param {FirecrawlAppConfig} config - Configuration options for the FirecrawlApp instance. * @param {FirecrawlAppConfig} config - Configuration options for the FirecrawlApp instance.
*/ */
constructor({ apiKey }: FirecrawlAppConfig); constructor({ apiKey, apiUrl }: FirecrawlAppConfig);
/** /**
* Scrapes a URL using the Firecrawl API. * Scrapes a URL using the Firecrawl API.
* @param {string} url - The URL to scrape. * @param {string} url - The URL to scrape.

View File

@ -11,10 +11,8 @@
"dependencies": { "dependencies": {
"@mendable/firecrawl-js": "^0.0.19", "@mendable/firecrawl-js": "^0.0.19",
"axios": "^1.6.8", "axios": "^1.6.8",
"dotenv": "^16.4.5",
"ts-node": "^10.9.2", "ts-node": "^10.9.2",
"typescript": "^5.4.5", "typescript": "^5.4.5",
"uuid": "^9.0.1",
"zod": "^3.23.8" "zod": "^3.23.8"
}, },
"devDependencies": { "devDependencies": {
@ -452,15 +450,6 @@
"resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz",
"integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==" "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA=="
}, },
"node_modules/@types/node": {
"version": "20.12.11",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.11.tgz",
"integrity": "sha512-vDg9PZ/zi+Nqp6boSOT7plNuthRugEKixDv5sFTIpkE89MmNtEArAShI4mxuX2+UrLEe9pxC1vm2cjm9YlWbJw==",
"peer": true,
"dependencies": {
"undici-types": "~5.26.4"
}
},
"node_modules/acorn": { "node_modules/acorn": {
"version": "8.11.3", "version": "8.11.3",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz",
@ -532,17 +521,6 @@
"node": ">=0.3.1" "node": ">=0.3.1"
} }
}, },
"node_modules/dotenv": {
"version": "16.4.5",
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.5.tgz",
"integrity": "sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://dotenvx.com"
}
},
"node_modules/esbuild": { "node_modules/esbuild": {
"version": "0.20.2", "version": "0.20.2",
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.20.2.tgz", "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.20.2.tgz",
@ -750,24 +728,6 @@
"node": ">=14.17" "node": ">=14.17"
} }
}, },
"node_modules/undici-types": {
"version": "5.26.5",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==",
"peer": true
},
"node_modules/uuid": {
"version": "9.0.1",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz",
"integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==",
"funding": [
"https://github.com/sponsors/broofa",
"https://github.com/sponsors/ctavan"
],
"bin": {
"uuid": "dist/bin/uuid"
}
},
"node_modules/v8-compile-cache-lib": { "node_modules/v8-compile-cache-lib": {
"version": "3.0.1", "version": "3.0.1",
"resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz",