diff --git a/apps/api/src/scraper/WebScraper/crawler.ts b/apps/api/src/scraper/WebScraper/crawler.ts index 0248df2..ee29069 100644 --- a/apps/api/src/scraper/WebScraper/crawler.ts +++ b/apps/api/src/scraper/WebScraper/crawler.ts @@ -4,7 +4,7 @@ import { URL } from "url"; import { getLinksFromSitemap } from "./sitemap"; import async from "async"; import { Progress } from "../../lib/entities"; -import { scrapWithScrapingBee } from "./single_url"; +import { scrapSingleUrl, scrapWithScrapingBee } from "./single_url"; import robotsParser from "robots-parser"; export class WebCrawler { @@ -196,7 +196,8 @@ export class WebCrawler { let content; // If it is the first link, fetch with scrapingbee if (this.visited.size === 1) { - content = await scrapWithScrapingBee(url, "load"); + const page = await scrapSingleUrl(url, {includeHtml: true}); + content = page.html; } else { const response = await axios.get(url); content = response.data; diff --git a/apps/api/src/scraper/WebScraper/index.ts b/apps/api/src/scraper/WebScraper/index.ts index 7ef0a10..80f2d86 100644 --- a/apps/api/src/scraper/WebScraper/index.ts +++ b/apps/api/src/scraper/WebScraper/index.ts @@ -140,6 +140,7 @@ export class WebScraperDataProvider { generateImgAltText: this.generateImgAltText, }); let links = await crawler.start(inProgress, 5, this.limit, this.maxCrawledDepth); + if (this.returnOnlyUrls) { return this.returnOnlyUrlsResponse(links, inProgress); } @@ -163,6 +164,7 @@ export class WebScraperDataProvider { return this.returnOnlyUrlsResponse(links, inProgress); } + let documents = await this.processLinks(links, inProgress); return this.cacheAndFinalizeDocuments(documents, links); } @@ -237,6 +239,8 @@ export class WebScraperDataProvider { links: string[] ): Promise { await this.setCachedDocuments(documents, links); + documents = this.filterDocsExcludeInclude(documents); + documents = this.filterDepth(documents); documents = this.removeChildLinks(documents); return documents.splice(0, this.limit); }