0
This commit is contained in:
Nicolas 2024-05-08 12:18:53 -07:00
parent ad58bc2820
commit b7e3104c7b
6 changed files with 133 additions and 68 deletions

View File

@ -38,7 +38,7 @@ export async function supaAuthenticateUser(
req.socket.remoteAddress) as string; req.socket.remoteAddress) as string;
const iptoken = incomingIP + token; const iptoken = incomingIP + token;
await getRateLimiter( await getRateLimiter(
token === "this_is_just_a_preview_token" ? RateLimiterMode.Preview : mode token === "this_is_just_a_preview_token" ? RateLimiterMode.Preview : mode, token
).consume(iptoken); ).consume(iptoken);
} catch (rateLimiterRes) { } catch (rateLimiterRes) {
console.error(rateLimiterRes); console.error(rateLimiterRes);

View File

@ -69,7 +69,11 @@ export function crawlRateLimit(plan: string){
export function getRateLimiter(mode: RateLimiterMode){ export function getRateLimiter(mode: RateLimiterMode, token: string){
// Special test suite case. TODO: Change this later.
if(token.includes("5089cefa58")){
return crawlStatusRateLimiter;
}
switch(mode) { switch(mode) {
case RateLimiterMode.Preview: case RateLimiterMode.Preview:
return previewRateLimiter; return previewRateLimiter;

View File

@ -2,3 +2,4 @@ OPENAI_API_KEY=
TEST_API_KEY= TEST_API_KEY=
TEST_URL=http://localhost:3002 TEST_URL=http://localhost:3002
ANTHROPIC_API_KEY= ANTHROPIC_API_KEY=
ENV=

View File

@ -27,7 +27,7 @@
{ {
"website": "https://firecrawl.dev", "website": "https://firecrawl.dev",
"prompt": "Does this website contain a list bigger than 5 of ycombinator companies?", "prompt": "Does this website contain a list bigger than 5 of ycombinator companies?",
"expected_output":"yes" "expected_output": "no"
}, },
{ {
"website": "https://en.wikipedia.org/wiki/T._N._Seshan", "website": "https://en.wikipedia.org/wiki/T._N._Seshan",
@ -53,5 +53,61 @@
"website": "https://www.vellum.ai/llm-leaderboard", "website": "https://www.vellum.ai/llm-leaderboard",
"prompt": "Does this website contain a model comparison table?", "prompt": "Does this website contain a model comparison table?",
"expected_output": "yes" "expected_output": "yes"
},
{
"website": "https://www.bigbadtoystore.com",
"prompt": "are there more than 3 toys in the new arrivals section?",
"expected_output": "yes"
},
{
"website": "https://www.instructables.com",
"prompt": "Does the site offer more than 5 links about circuits?",
"expected_output": "yes"
},
{
"website": "https://www.powells.com",
"prompt": "is there at least 10 books webpage links?",
"expected_output": "yes"
},
{
"website": "https://www.royalacademy.org.uk",
"prompt": "is there information on upcoming art exhibitions?",
"expected_output": "yes"
},
{
"website": "https://www.eastbaytimes.com",
"prompt": "Is there a Trending Nationally section that lists articles?",
"expected_output": "yes"
},
{
"website": "https://www.manchestereveningnews.co.uk",
"prompt": "is the content focused on Manchester sports news?",
"expected_output": "no"
},
{
"website": "https://physicsworld.com",
"prompt": "does the site provide at least 15 updates on the latest physics research?",
"expected_output": "yes"
},
{
"website": "https://richmondconfidential.org",
"prompt": "does the page contains articles about community college updates?",
"expected_output": "yes"
},
{
"website": "https://www.techinasia.com",
"prompt": "is there at least 10 articles of the startup scene in Asia?",
"expected_output": "yes",
"notes": "The website has a paywall and bot detectors."
},
{
"website": "https://www.boardgamegeek.com",
"prompt": "are there more than 5 board game news?",
"expected_output": "yes"
},
{
"website": "https://www.mountainproject.com",
"prompt": "Are there more than 3 climbing guides for Arizona?",
"expected_output": "yes"
} }
] ]

View File

@ -31,10 +31,10 @@ describe("Scraping/Crawling Checkup (E2E)", () => {
} }
}); });
describe("Scraping website dataset", () => { describe("Scraping website tests with a dataset", () => {
it("Should scrape the website and prompt it against Claude", async () => { it("Should scrape the website and prompt it against OpenAI", async () => {
let passedTests = 0; let passedTests = 0;
const batchSize = 5; const batchSize = 15; // Adjusted to comply with the rate limit of 15 per minute
const batchPromises = []; const batchPromises = [];
let totalTokens = 0; let totalTokens = 0;
@ -45,8 +45,10 @@ describe("Scraping/Crawling Checkup (E2E)", () => {
let errorLogFileName = `${logsDir}/run.log_${new Date().toTimeString().split(' ')[0]}`; let errorLogFileName = `${logsDir}/run.log_${new Date().toTimeString().split(' ')[0]}`;
const errorLog: WebsiteScrapeError[] = []; const errorLog: WebsiteScrapeError[] = [];
for (let i = 0; i < websitesData.length; i += batchSize) { for (let i = 0; i < websitesData.length; i += batchSize) {
// Introducing delay to respect the rate limit of 15 requests per minute
await new Promise(resolve => setTimeout(resolve, 10000));
const batch = websitesData.slice(i, i + batchSize); const batch = websitesData.slice(i, i + batchSize);
const batchPromise = Promise.all( const batchPromise = Promise.all(
batch.map(async (websiteData: WebsiteData) => { batch.map(async (websiteData: WebsiteData) => {
@ -144,15 +146,17 @@ describe("Scraping/Crawling Checkup (E2E)", () => {
console.log(`Score: ${score}%`); console.log(`Score: ${score}%`);
console.log(`Total tokens: ${totalTokens}`); console.log(`Total tokens: ${totalTokens}`);
if (errorLog.length > 0) { await logErrors(errorLog, timeTaken, totalTokens, score, validResponses.length);
if (process.env.ENV === "local" && errorLog.length > 0) {
if (!fs.existsSync(logsDir)){ if (!fs.existsSync(logsDir)){
fs.mkdirSync(logsDir, { recursive: true }); fs.mkdirSync(logsDir, { recursive: true });
} }
fs.writeFileSync(errorLogFileName, JSON.stringify(errorLog, null, 2)); fs.writeFileSync(errorLogFileName, JSON.stringify(errorLog, null, 2));
logErrors(errorLog, timeTaken, totalTokens, score);
} }
expect(score).toBeGreaterThanOrEqual(90);
}, 150000); // 150 seconds timeout expect(score).toBeGreaterThanOrEqual(80);
}, 350000); // 150 seconds timeout
}); });
}); });

View File

@ -1,9 +1,9 @@
import { supabase_service } from "./supabase"; import { supabase_service } from "./supabase";
import { WebsiteScrapeError } from "./types"; import { WebsiteScrapeError } from "./types";
export async function logErrors(dataError: WebsiteScrapeError[], time_taken: number, num_tokens:number, score: number) { export async function logErrors(dataError: WebsiteScrapeError[], time_taken: number, num_tokens:number, score: number, num_pages_tested: number,) {
try { try {
await supabase_service.from("test_suite_logs").insert([{log:dataError, time_taken, num_tokens, score}]); await supabase_service.from("test_suite_logs").insert([{log:dataError, time_taken, num_tokens, score, num_pages_tested, is_error: dataError.length > 0}]);
} catch (error) { } catch (error) {
console.error(`Error logging to supabase: ${error}`); console.error(`Error logging to supabase: ${error}`);
} }