@@ -14,6 +14,7 @@ UnlighthouseWorkerStats,
14
14
import { ReportArtifacts , asRegExp , createTaskReportFromRoute } from '../util'
15
15
import { useUnlighthouse } from '../unlighthouse'
16
16
import { useLogger } from '../logger'
17
+ import { createFilter } from '../util/filter'
17
18
import {
18
19
launchPuppeteerCluster ,
19
20
} from './cluster'
@@ -91,16 +92,12 @@ export async function createUnlighthouseWorker(tasks: Record<UnlighthouseTask, T
91
92
if ( ignoredRoutes . has ( id ) )
92
93
return
93
94
94
- if ( resolvedConfig . scanner . include ) {
95
- // must match
96
- if ( resolvedConfig . scanner . include . filter ( rule => asRegExp ( rule ) . test ( path ) ) . length === 0 )
97
- return
98
- }
99
-
100
- if ( resolvedConfig . scanner . exclude ) {
101
- // must not match
102
- if ( resolvedConfig . scanner . exclude . filter ( rule => asRegExp ( rule ) . test ( path ) ) . length > 0 )
95
+ if ( resolvedConfig . scanner . include || resolvedConfig . scanner . exclude ) {
96
+ const filter = createFilter ( resolvedConfig . scanner )
97
+ if ( filter ( path ) ) {
98
+ logger . debug ( 'Skipping route based on include / exclude rules' , { path } )
103
99
return
100
+ }
104
101
}
105
102
106
103
/*
@@ -109,7 +106,7 @@ export async function createUnlighthouseWorker(tasks: Record<UnlighthouseTask, T
109
106
* Note: this is somewhat similar to the logic in discovery/routes.ts, that's because we need to sample the routes
110
107
* from the sitemap or as provided. This logic is for ensuring crawled URLs don't exceed the group limit.
111
108
*/
112
- if ( resolvedConfig . scanner . dynamicSampling > 0 ) {
109
+ if ( resolvedConfig . scanner . dynamicSampling && resolvedConfig . scanner . dynamicSampling > 0 ) {
113
110
const routeGroup = get ( route , resolvedConfig . client . groupRoutesKey . replace ( 'route.' , '' ) )
114
111
// group all urls by their route definition path name
115
112
const routesInGroup = [ ...routeReports . values ( ) ] . filter (
0 commit comments