0

Update unit tests

This commit is contained in:
Eric Ciarla 2024-06-15 17:14:09 -04:00
parent f0d4146b42
commit 519ab1aecb
3 changed files with 17 additions and 7 deletions

View File

@ -655,7 +655,7 @@ describe("E2E Tests for API Routes", () => {
const testurls = completedResponse.body.data.map( const testurls = completedResponse.body.data.map(
(item: any) => item.metadata?.sourceURL (item: any) => item.metadata?.sourceURL
); );
console.log(testurls) //console.log(testurls)
expect(completedResponse.statusCode).toBe(200); expect(completedResponse.statusCode).toBe(200);
expect(completedResponse.body).toHaveProperty("status"); expect(completedResponse.body).toHaveProperty("status");

View File

@ -73,7 +73,7 @@ describe('WebCrawler maxDepth and filterLinks', () => {
const initialUrl = 'http://example.com'; // Set initial URL for this test const initialUrl = 'http://example.com'; // Set initial URL for this test
const enteredMaxCrawledDepth = 0; const enteredMaxCrawledDepth = 0;
maxCrawledDepth = getAdjustedMaxDepth(initialUrl, enteredMaxCrawledDepth); maxCrawledDepth = getAdjustedMaxDepth(initialUrl, enteredMaxCrawledDepth);
console.log(maxCrawledDepth);
crawler = new WebCrawler({ crawler = new WebCrawler({
initialUrl: initialUrl, initialUrl: initialUrl,
@ -101,7 +101,7 @@ describe('WebCrawler maxDepth and filterLinks', () => {
const initialUrl = 'http://example.com/page1'; // Set initial URL for this test const initialUrl = 'http://example.com/page1'; // Set initial URL for this test
const enteredMaxCrawledDepth = 1; const enteredMaxCrawledDepth = 1;
maxCrawledDepth = getAdjustedMaxDepth(initialUrl, enteredMaxCrawledDepth); maxCrawledDepth = getAdjustedMaxDepth(initialUrl, enteredMaxCrawledDepth);
console.log(maxCrawledDepth);
crawler = new WebCrawler({ crawler = new WebCrawler({
initialUrl: initialUrl, initialUrl: initialUrl,
@ -130,7 +130,7 @@ describe('WebCrawler maxDepth and filterLinks', () => {
const initialUrl = 'http://example.com/page1'; // Set initial URL for this test const initialUrl = 'http://example.com/page1'; // Set initial URL for this test
const enteredMaxCrawledDepth = 2; const enteredMaxCrawledDepth = 2;
maxCrawledDepth = getAdjustedMaxDepth(initialUrl, enteredMaxCrawledDepth); maxCrawledDepth = getAdjustedMaxDepth(initialUrl, enteredMaxCrawledDepth);
console.log(maxCrawledDepth);
crawler = new WebCrawler({ crawler = new WebCrawler({
initialUrl: initialUrl, initialUrl: initialUrl,

View File

@ -22,15 +22,25 @@ describe('Testing getURLDepth and getAdjustedMaxDepth', () => {
}); });
it('Adjusted maxDepth should return 1 for scrapethissite.com', () => { it('Adjusted maxDepth should return 1 for scrapethissite.com and max depth param of 1', () => {
const enteredURL = "https://scrapethissite.com" const enteredURL = "https://scrapethissite.com"
expect(getAdjustedMaxDepth(enteredURL, 1)).toBe(1); expect(getAdjustedMaxDepth(enteredURL, 1)).toBe(1);
}); });
it('Adjusted maxDepth should return 0 for scrapethissite.com and max depth param of 0', () => {
const enteredURL = "https://scrapethissite.com"
expect(getAdjustedMaxDepth(enteredURL, 0)).toBe(0);
it('Adjusted maxDepth should return 5 for scrapethissite.com/pages/articles', () => { });
it('Adjusted maxDepth should return 0 for mendable.ai and max depth param of 0', () => {
const enteredURL = "https://mendable.ai"
expect(getAdjustedMaxDepth(enteredURL, 0)).toBe(0);
});
it('Adjusted maxDepth should return 4 for scrapethissite.com/pages/articles and max depth param of 2', () => {
const enteredURL = "https://scrapethissite.com/pages/articles" const enteredURL = "https://scrapethissite.com/pages/articles"
expect(getAdjustedMaxDepth(enteredURL, 2)).toBe(5); expect(getAdjustedMaxDepth(enteredURL, 2)).toBe(4);
}); });