Skip to content

Commit

Permalink
fix: avoid disabling the crawler if we're scanning localhost
Browse files Browse the repository at this point in the history
  • Loading branch information
harlan-zw committed May 22, 2023
1 parent 0bccf57 commit 9c31ead
Showing 1 changed file with 1 addition and 1 deletion.
2 changes: 1 addition & 1 deletion packages/core/src/discovery/routes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ export const resolveReportableRoutes: () => Promise<NormalisedRoute[]> = async (
logger.info(`Discovered ${sitemapUrls.length} routes from sitemap.xml.`)
sitemapUrls.forEach(url => urls.add(url))
// sitemap threshold for disabling crawler
if (sitemapUrls.length >= 50) {
if (!resolvedConfig.site.includes('localhost') && sitemapUrls.length >= 50) {
resolvedConfig.scanner.crawler = false
logger.info('Disabling crawler mode as sitemap has been provided.')
}
Expand Down

0 comments on commit 9c31ead

Please sign in to comment.