diff --git a/apps/api/package.json b/apps/api/package.json index e42cc2435..8c277a2b2 100644 --- a/apps/api/package.json +++ b/apps/api/package.json @@ -13,11 +13,11 @@ "dependencies": { "@ai-sdk/anthropic": "^1.2.10", "@ai-sdk/openai": "^1.3.12", - "@fastify/compress": "^8.0.1", + "@fastify/compress": "^8.1.0", "@fastify/cookie": "^11.0.2", - "@fastify/cors": "^11.0.0", - "@fastify/rate-limit": "^10.2.2", - "@fastify/websocket": "^11.0.2", + "@fastify/cors": "^11.1.0", + "@fastify/rate-limit": "^10.3.0", + "@fastify/websocket": "^11.2.0", "@node-rs/argon2": "^2.0.2", "@openpanel/auth": "workspace:^", "@openpanel/common": "workspace:*", @@ -35,10 +35,10 @@ "@trpc/server": "^11.6.0", "ai": "^4.2.10", "fast-json-stable-hash": "^1.0.3", - "fastify": "^5.2.1", + "fastify": "^5.6.1", "fastify-metrics": "^12.1.0", "fastify-raw-body": "^5.0.0", - "groupmq": "1.0.0-next.19", + "groupmq": "1.1.0-next.5", "jsonwebtoken": "^9.0.2", "ramda": "^0.29.1", "request-ip": "^3.3.0", diff --git a/apps/api/scripts/get-bots.ts b/apps/api/scripts/get-bots.ts index 992a0fe2b..2dfe32e23 100644 --- a/apps/api/scripts/get-bots.ts +++ b/apps/api/scripts/get-bots.ts @@ -7,6 +7,23 @@ const __filename = fileURLToPath(import.meta.url); const __dirname = dirname(__filename); import yaml from 'js-yaml'; +// Regex special characters that indicate we need actual regex +const regexSpecialChars = /[|^$.*+?(){}\[\]\\]/; + +function transformBots(bots: any[]): any[] { + return bots.map((bot) => { + const { regex, ...rest } = bot; + const hasRegexChars = regexSpecialChars.test(regex); + + if (hasRegexChars) { + // Keep as regex + return { regex, ...rest }; + } + // Convert to includes + return { includes: regex, ...rest }; + }); +} + async function main() { // Get document, or throw exception on error try { @@ -14,6 +31,9 @@ async function main() { 'https://raw.githubusercontent.com/matomo-org/device-detector/master/regexes/bots.yml', ).then((res) => res.text()); + const parsedData = yaml.load(data) as any[]; + const transformedBots = transformBots(parsedData); + fs.writeFileSync( path.resolve(__dirname, '../src/bots/bots.ts'), [ @@ -21,11 +41,20 @@ async function main() { '', '// The data is fetch from device-detector https://raw.githubusercontent.com/matomo-org/device-detector/master/regexes/bots.yml', '', - `const bots = ${JSON.stringify(yaml.load(data))} as const;`, + `const bots = ${JSON.stringify(transformedBots, null, 2)} as const;`, 'export default bots;', + '', ].join('\n'), 'utf-8', ); + + console.log( + `✅ Generated bots.ts with ${transformedBots.length} bot entries`, + ); + const regexCount = transformedBots.filter((b) => 'regex' in b).length; + const includesCount = transformedBots.filter((b) => 'includes' in b).length; + console.log(` - ${includesCount} simple string matches (includes)`); + console.log(` - ${regexCount} regex patterns`); } catch (e) { console.log(e); } diff --git a/apps/api/src/bots/bots.ts b/apps/api/src/bots/bots.ts index 67e3c25bb..13aaaffe9 100644 --- a/apps/api/src/bots/bots.ts +++ b/apps/api/src/bots/bots.ts @@ -4,13 +4,13 @@ const bots = [ { - regex: 'WireReaderBot', + includes: 'WireReaderBot', name: 'WireReaderBot', category: 'Feed Fetcher', url: 'https://wirereader.app/', }, { - regex: 'monitoring360bot', + includes: 'monitoring360bot', name: '360 Monitoring', category: 'Site Monitor', url: 'https://www.360monitoring.io', @@ -20,32 +20,44 @@ const bots = [ }, }, { - regex: 'Cloudflare-Healthchecks', + includes: 'Cloudflare-Healthchecks', name: 'Cloudflare Health Checks', category: 'Service Agent', url: 'https://developers.cloudflare.com/health-checks/', - producer: { name: 'CloudFlare', url: 'https://www.cloudflare.com/' }, + producer: { + name: 'CloudFlare', + url: 'https://www.cloudflare.com/', + }, }, { - regex: '360Spider', + includes: '360Spider', name: '360Spider', category: 'Search bot', url: 'https://www.so.com/help/help_3_2.html', - producer: { name: 'Online Media Group, Inc.', url: '' }, + producer: { + name: 'Online Media Group, Inc.', + url: '', + }, }, { - regex: 'Aboundex', + includes: 'Aboundex', name: 'Aboundexbot', category: 'Search bot', url: 'http://www.aboundex.com/crawler/', - producer: { name: 'Aboundex.com', url: 'http://www.aboundex.com' }, + producer: { + name: 'Aboundex.com', + url: 'http://www.aboundex.com', + }, }, { - regex: 'AcoonBot', + includes: 'AcoonBot', name: 'Acoon', category: 'Search bot', url: 'http://www.acoon.de/robot.asp', - producer: { name: 'Acoon GmbH', url: 'http://www.acoon.de' }, + producer: { + name: 'Acoon GmbH', + url: 'http://www.acoon.de', + }, }, { regex: 'AddThis\\.com', @@ -58,62 +70,86 @@ const bots = [ }, }, { - regex: 'AhrefsBot', + includes: 'AhrefsBot', name: 'aHrefs Bot', category: 'Crawler', url: 'https://ahrefs.com/robot', - producer: { name: 'Ahrefs Pte Ltd', url: 'https://ahrefs.com/robot' }, + producer: { + name: 'Ahrefs Pte Ltd', + url: 'https://ahrefs.com/robot', + }, }, { - regex: 'AhrefsSiteAudit', + includes: 'AhrefsSiteAudit', name: 'AhrefsSiteAudit', category: 'Site Monitor', url: 'https://ahrefs.com/robot/site-audit', - producer: { name: 'Ahrefs Pte Ltd', url: 'https://ahrefs.com/' }, + producer: { + name: 'Ahrefs Pte Ltd', + url: 'https://ahrefs.com/', + }, }, { regex: 'ia_archiver|alexabot|verifybot', name: 'Alexa Crawler', category: 'Search bot', url: 'https://support.alexa.com/hc/en-us/sections/200100794-Crawlers', - producer: { name: 'Alexa Internet', url: 'https://www.alexa.com' }, + producer: { + name: 'Alexa Internet', + url: 'https://www.alexa.com', + }, }, { - regex: 'alexa site audit', + includes: 'alexa site audit', name: 'Alexa Site Audit', category: 'Site Monitor', url: 'https://support.alexa.com/hc/en-us/articles/200450194', - producer: { name: 'Alexa Internet', url: 'https://www.alexa.com' }, + producer: { + name: 'Alexa Internet', + url: 'https://www.alexa.com', + }, }, { - regex: 'Amazonbot', + includes: 'Amazonbot', name: 'Amazon Bot', category: 'Crawler', url: 'https://developer.amazon.com/support/amazonbot', - producer: { name: 'Amazon.com, Inc.', url: 'https://www.amazon.com/' }, + producer: { + name: 'Amazon.com, Inc.', + url: 'https://www.amazon.com/', + }, }, { - regex: 'AmazonAdBot', + includes: 'AmazonAdBot', name: 'Amazon AdBot', category: 'Crawler', url: 'https://adbot.amazon.com/', - producer: { name: 'Amazon.com, Inc.', url: 'https://www.amazon.com/' }, + producer: { + name: 'Amazon.com, Inc.', + url: 'https://www.amazon.com/', + }, }, { regex: 'Amazon[ -]Route ?53[ -]Health[ -]Check[ -]Service', name: 'Amazon Route53 Health Check', category: 'Service Agent', - producer: { name: 'Amazon Web Services', url: 'https://aws.amazon.com/' }, + producer: { + name: 'Amazon Web Services', + url: 'https://aws.amazon.com/', + }, }, { - regex: 'AmorankSpider', + includes: 'AmorankSpider', name: 'Amorank Spider', category: 'Crawler', url: 'http://amorank.com/webcrawler.html', - producer: { name: 'Amorank', url: 'http://www.amorank.com' }, + producer: { + name: 'Amorank', + url: 'http://www.amorank.com', + }, }, { - regex: 'ApacheBench', + includes: 'ApacheBench', name: 'ApacheBench', category: 'Benchmark', url: 'https://httpd.apache.org/docs/2.4/programs/ab.html', @@ -123,67 +159,94 @@ const bots = [ }, }, { - regex: 'Applebot', + includes: 'Applebot', name: 'Applebot', category: 'Crawler', url: 'https://support.apple.com/en-us/119829', - producer: { name: 'Apple Inc', url: 'https://www.apple.com/' }, + producer: { + name: 'Apple Inc', + url: 'https://www.apple.com/', + }, }, { - regex: 'iTMS', + includes: 'iTMS', name: 'iTMS', category: 'Crawler', url: 'https://support.apple.com/en-us/119829', - producer: { name: 'Apple Inc', url: 'https://www.apple.com/' }, + producer: { + name: 'Apple Inc', + url: 'https://www.apple.com/', + }, }, { - regex: 'AppSignalBot', + includes: 'AppSignalBot', name: 'AppSignalBot', category: 'Site Monitor', url: 'https://docs.appsignal.com/uptime-monitoring/', - producer: { name: 'AppSignal', url: 'https://appsignal.com/' }, + producer: { + name: 'AppSignal', + url: 'https://appsignal.com/', + }, }, { - regex: 'Arachni', + includes: 'Arachni', name: 'Arachni', category: 'Security Checker', url: 'https://www.arachni-scanner.com/', - producer: { name: 'Sarosys LLC', url: 'https://www.sarosys.com/' }, + producer: { + name: 'Sarosys LLC', + url: 'https://www.sarosys.com/', + }, }, { - regex: 'AspiegelBot', + includes: 'AspiegelBot', name: 'AspiegelBot', category: 'Crawler', url: 'https://aspiegel.com/', - producer: { name: 'Huawei', url: 'https://www.huawei.com/' }, + producer: { + name: 'Huawei', + url: 'https://www.huawei.com/', + }, }, { - regex: 'Castro 2, Episode Duration Lookup', + includes: 'Castro 2, Episode Duration Lookup', name: 'Castro 2', category: 'Service Agent', url: 'http://supertop.co/castro/', - producer: { name: 'Supertop', url: 'http://supertop.co' }, + producer: { + name: 'Supertop', + url: 'http://supertop.co', + }, }, { - regex: 'Curious George', + includes: 'Curious George', name: 'Analytics SEO Crawler', category: 'Crawler', url: 'http://www.analyticsseo.com/crawler', - producer: { name: 'Analytics SEO', url: 'http://www.analyticsseo.com' }, + producer: { + name: 'Analytics SEO', + url: 'http://www.analyticsseo.com', + }, }, { regex: 'archive\\.org_bot|special_archiver', name: 'archive.org bot', category: 'Crawler', url: 'https://archive.org/details/archive.org_bot', - producer: { name: 'The Internet Archive', url: 'https://archive.org' }, + producer: { + name: 'The Internet Archive', + url: 'https://archive.org', + }, }, { - regex: 'Ask Jeeves/Teoma', + includes: 'Ask Jeeves/Teoma', name: 'Ask Jeeves', category: 'Search bot', url: '', - producer: { name: 'Ask Jeeves Inc.', url: 'http://www.ask.com' }, + producer: { + name: 'Ask Jeeves Inc.', + url: 'http://www.ask.com', + }, }, { regex: 'Backlink-Check\\.de', @@ -196,32 +259,44 @@ const bots = [ }, }, { - regex: 'BacklinkCrawler', + includes: 'BacklinkCrawler', name: 'BacklinkCrawler', category: 'Crawler', url: 'http://www.backlinktest.com/crawler.html', - producer: { name: '2.0Promotion GbR', url: 'http://www.backlinktest.com' }, + producer: { + name: '2.0Promotion GbR', + url: 'http://www.backlinktest.com', + }, }, { regex: 'Baidu.*spider|baidu Transcoder', name: 'Baidu Spider', category: 'Search bot', url: 'http://www.baidu.com/search/spider.htm', - producer: { name: 'Baidu', url: 'http://www.baidu.com' }, + producer: { + name: 'Baidu', + url: 'http://www.baidu.com', + }, }, { - regex: 'BazQux', + includes: 'BazQux', name: 'BazQux Reader', url: 'https://bazqux.com/fetcher', category: 'Feed Fetcher', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { - regex: 'Better Uptime Bot', + includes: 'Better Uptime Bot', name: 'Better Uptime Bot', category: 'Site Monitor', url: 'https://betteruptime.com/faq', - producer: { name: 'Better Uptime', url: 'https://betteruptime.com/' }, + producer: { + name: 'Better Uptime', + url: 'https://betteruptime.com/', + }, }, { regex: @@ -235,62 +310,86 @@ const bots = [ }, }, { - regex: 'Blackbox Exporter', + includes: 'Blackbox Exporter', name: 'Blackbox Exporter', category: 'Site Monitor', url: 'https://github.com/prometheus/blackbox_exporter', - producer: { name: 'Prometheus', url: 'https://prometheus.io/' }, + producer: { + name: 'Prometheus', + url: 'https://prometheus.io/', + }, }, { - regex: 'Blekkobot', + includes: 'Blekkobot', name: 'Blekkobot', category: 'Search bot', url: 'http://blekko.com/about/blekkobot', - producer: { name: 'Blekko', url: 'http://blekko.com' }, + producer: { + name: 'Blekko', + url: 'http://blekko.com', + }, }, { - regex: 'BLEXBot', + includes: 'BLEXBot', name: 'BLEXBot Crawler', category: 'Crawler', url: 'http://webmeup-crawler.com', - producer: { name: 'WebMeUp', url: 'http://webmeup.com' }, + producer: { + name: 'WebMeUp', + url: 'http://webmeup.com', + }, }, { - regex: 'Bloglovin', + includes: 'Bloglovin', name: 'Bloglovin', url: 'http://www.bloglovin.com', category: 'Feed Fetcher', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { - regex: 'Blogtrottr', + includes: 'Blogtrottr', name: 'Blogtrottr', url: '', category: 'Feed Fetcher', - producer: { name: 'Blogtrottr Ltd', url: 'https://blogtrottr.com/' }, + producer: { + name: 'Blogtrottr Ltd', + url: 'https://blogtrottr.com/', + }, }, { - regex: 'BoardReader Blog Indexer', + includes: 'BoardReader Blog Indexer', name: 'BoardReader Blog Indexer', category: 'Crawler', - producer: { name: 'BoardReader', url: 'https://boardreader.com/' }, + producer: { + name: 'BoardReader', + url: 'https://boardreader.com/', + }, }, { - regex: 'BountiiBot', + includes: 'BountiiBot', name: 'Bountii Bot', category: 'Search bot', url: 'http://bountii.com/contact.php', - producer: { name: 'Bountii Inc.', url: 'http://bountii.com' }, + producer: { + name: 'Bountii Inc.', + url: 'http://bountii.com', + }, }, { - regex: 'Browsershots', + includes: 'Browsershots', name: 'Browsershots', category: 'Service Agent', url: 'http://browsershots.org/faq', - producer: { name: 'Browsershots.org', url: 'http://browsershots.org' }, + producer: { + name: 'Browsershots.org', + url: 'http://browsershots.org', + }, }, { - regex: 'BUbiNG', + includes: 'BUbiNG', name: 'BUbiNG', category: 'Crawler', url: 'http://law.di.unimi.it/BUbiNG.html', @@ -304,115 +403,163 @@ const bots = [ name: 'Butterfly Robot', category: 'Search bot', url: 'http://labs.topsy.com/butterfly', - producer: { name: 'Topsy Labs', url: 'http://labs.topsy.com' }, + producer: { + name: 'Topsy Labs', + url: 'http://labs.topsy.com', + }, }, { - regex: 'CareerBot', + includes: 'CareerBot', name: 'CareerBot', category: 'Crawler', url: 'http://www.career-x.de/bot.html', - producer: { name: 'career-x GmbH', url: 'http://www.career-x.de' }, + producer: { + name: 'career-x GmbH', + url: 'http://www.career-x.de', + }, }, { - regex: 'CCBot', + includes: 'CCBot', name: 'ccBot crawler', category: 'Crawler', url: 'http://commoncrawl.org/faq/', - producer: { name: 'reddit inc.', url: 'http://www.reddit.com' }, + producer: { + name: 'reddit inc.', + url: 'http://www.reddit.com', + }, }, { - regex: 'Cliqzbot', + includes: 'Cliqzbot', name: 'Cliqzbot', category: 'Crawler', url: 'http://cliqz.com/company/cliqzbot', - producer: { name: '10betterpages GmbH', url: 'http://cliqz.com' }, + producer: { + name: '10betterpages GmbH', + url: 'http://cliqz.com', + }, }, { - regex: 'Cloudflare-AMP', + includes: 'Cloudflare-AMP', name: 'CloudFlare AMP Fetcher', category: 'Crawler', url: 'https://amp.cloudflare.com/doc/fetcher.html', - producer: { name: 'CloudFlare', url: 'http://www.cloudflare.com' }, + producer: { + name: 'CloudFlare', + url: 'http://www.cloudflare.com', + }, }, { regex: 'Cloudflare-?Diagnostics', name: 'Cloudflare Diagnostics', category: 'Site Monitor', url: 'https://www.cloudflare.com/', - producer: { name: 'Cloudflare', url: 'https://www.cloudflare.com/' }, + producer: { + name: 'Cloudflare', + url: 'https://www.cloudflare.com/', + }, }, { - regex: 'CloudFlare-AlwaysOnline', + includes: 'CloudFlare-AlwaysOnline', name: 'CloudFlare Always Online', category: 'Site Monitor', url: 'https://www.cloudflare.com/always-online', - producer: { name: 'CloudFlare', url: 'https://www.cloudflare.com/' }, + producer: { + name: 'CloudFlare', + url: 'https://www.cloudflare.com/', + }, }, { - regex: 'Cloudflare-SSLDetector', + includes: 'Cloudflare-SSLDetector', name: 'Cloudflare SSL Detector', category: 'Site Monitor', url: 'https://developers.cloudflare.com/fundamentals/reference/cloudflare-site-crawling/', - producer: { name: 'CloudFlare', url: 'https://www.cloudflare.com/' }, + producer: { + name: 'CloudFlare', + url: 'https://www.cloudflare.com/', + }, }, { - regex: 'Cloudflare Custom Hostname Verification', + includes: 'Cloudflare Custom Hostname Verification', name: 'Cloudflare Custom Hostname Verification', category: 'Service Agent', url: 'https://developers.cloudflare.com/fundamentals/reference/cloudflare-site-crawling/', - producer: { name: 'CloudFlare', url: 'https://www.cloudflare.com/' }, + producer: { + name: 'CloudFlare', + url: 'https://www.cloudflare.com/', + }, }, { - regex: 'Cloudflare-Traffic-Manager', + includes: 'Cloudflare-Traffic-Manager', name: 'Cloudflare Traffic Manager', category: 'Site Monitor', url: 'https://developers.cloudflare.com/fundamentals/reference/cloudflare-site-crawling/', - producer: { name: 'CloudFlare', url: 'https://www.cloudflare.com/' }, + producer: { + name: 'CloudFlare', + url: 'https://www.cloudflare.com/', + }, }, { - regex: 'Cloudflare-Smart-Transit', + includes: 'Cloudflare-Smart-Transit', name: 'Cloudflare Smart Transit', category: 'Site Monitor', url: 'https://developers.cloudflare.com/fundamentals/reference/cloudflare-site-crawling/', - producer: { name: 'CloudFlare', url: 'https://www.cloudflare.com/' }, + producer: { + name: 'CloudFlare', + url: 'https://www.cloudflare.com/', + }, }, { - regex: 'CloudflareObservatory', + includes: 'CloudflareObservatory', name: 'Cloudflare Observatory', category: 'Site Monitor', url: 'https://developers.cloudflare.com/speed/speed-test/run-speed-test', - producer: { name: 'CloudFlare', url: 'https://www.cloudflare.com/' }, + producer: { + name: 'CloudFlare', + url: 'https://www.cloudflare.com/', + }, }, { regex: 'https://developers\\.cloudflare\\.com/security-center/', name: 'Cloudflare Security Insights', category: 'Site Monitor', url: 'https://developers.cloudflare.com/fundamentals/reference/cloudflare-site-crawling/', - producer: { name: 'CloudFlare', url: 'https://www.cloudflare.com/' }, + producer: { + name: 'CloudFlare', + url: 'https://www.cloudflare.com/', + }, }, { regex: 'coccoc\\.com', name: 'Cốc Cốc Bot', url: 'https://help.coccoc.com/en/search-engine/coccoc-robots', category: 'Search bot', - producer: { name: 'Cốc Cốc', url: 'https://coccoc.com/' }, + producer: { + name: 'Cốc Cốc', + url: 'https://coccoc.com/', + }, }, { - regex: 'collectd', + includes: 'collectd', name: 'Collectd', url: 'https://collectd.org/', category: 'Site Monitor', - producer: { name: 'Collectd', url: 'https://collectd.org/' }, + producer: { + name: 'Collectd', + url: 'https://collectd.org/', + }, }, { - regex: 'CommaFeed', + includes: 'CommaFeed', name: 'CommaFeed', url: 'http://www.commafeed.com', category: 'Feed Fetcher', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { - regex: 'CSS Certificate Spider', + includes: 'CSS Certificate Spider', name: 'CSS Certificate Spider', category: 'Crawler', url: 'http://www.css-security.com/certificatespider/', @@ -426,17 +573,23 @@ const bots = [ name: 'Datadog Agent', url: 'https://github.com/DataDog/dd-agent', category: 'Site Monitor', - producer: { name: 'Datadog', url: 'https://www.datadoghq.com/' }, + producer: { + name: 'Datadog', + url: 'https://www.datadoghq.com/', + }, }, { - regex: 'Datanyze', + includes: 'Datanyze', name: 'Datanyze', url: '', category: 'Crawler', - producer: { name: 'Datanyze', url: 'https://www.datanyze.com' }, + producer: { + name: 'Datanyze', + url: 'https://www.datanyze.com', + }, }, { - regex: 'Dataprovider', + includes: 'Dataprovider', name: 'Dataprovider', category: 'Crawler', url: '', @@ -456,18 +609,24 @@ const bots = [ }, }, { - regex: 'Dazoobot', + includes: 'Dazoobot', name: 'Dazoobot', category: 'Search bot', url: '', - producer: { name: 'DAZOO.FR', url: 'http://dazoo.fr' }, + producer: { + name: 'DAZOO.FR', + url: 'http://dazoo.fr', + }, }, { - regex: 'discobot', + includes: 'discobot', name: 'Discobot', category: 'Search bot', url: 'http://discoveryengine.com/discobot.html', - producer: { name: 'Discovery Engine', url: 'http://discoveryengine.com' }, + producer: { + name: 'Discovery Engine', + url: 'http://discoveryengine.com', + }, }, { regex: 'Domain Re-Animator Bot|support@domainreanimator\\.com', @@ -480,48 +639,66 @@ const bots = [ }, }, { - regex: 'DotBot', + includes: 'DotBot', name: 'DotBot', category: 'Crawler', url: 'http://www.opensiteexplorer.org/dotbot', - producer: { name: 'SEOmoz, Inc.', url: 'http://moz.com/' }, + producer: { + name: 'SEOmoz, Inc.', + url: 'http://moz.com/', + }, }, { regex: 'DuckDuck(?:Go-Favicons-)?Bot', name: 'DuckDuckBot', category: 'Search bot', url: 'https://duckduckgo.com/duckduckgo-help-pages/results/duckduckbot/', - producer: { name: 'DuckDuckGo', url: 'https://duckduckgo.com/' }, + producer: { + name: 'DuckDuckGo', + url: 'https://duckduckgo.com/', + }, }, { - regex: 'DuckAssistBot', + includes: 'DuckAssistBot', name: 'DuckAssistBot', category: 'Search bot', url: 'https://duckduckgo.com/duckduckgo-help-pages/results/duckassistbot/', - producer: { name: 'DuckDuckGo', url: 'https://duckduckgo.com/' }, + producer: { + name: 'DuckDuckGo', + url: 'https://duckduckgo.com/', + }, }, { - regex: 'EasouSpider', + includes: 'EasouSpider', name: 'Easou Spider', category: 'Search bot', url: 'http://www.easou.com/search/spider.html', - producer: { name: 'easou ICP', url: 'http://www.easou.com' }, + producer: { + name: 'easou ICP', + url: 'http://www.easou.com', + }, }, { - regex: 'eCairn-Grabber', + includes: 'eCairn-Grabber', name: 'eCairn-Grabber', category: 'Crawler', - producer: { name: 'eCairn', url: 'https://ecairn.com' }, + producer: { + name: 'eCairn', + url: 'https://ecairn.com', + }, }, { - regex: 'EMail Exractor', + includes: 'EMail Exractor', name: 'EMail Exractor', category: 'Crawler', url: '', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { - regex: 'evc-batch', + includes: 'evc-batch', name: 'evc-batch', category: 'Crawler', url: '', @@ -535,10 +712,13 @@ const bots = [ name: 'ExaBot', category: 'Crawler', url: 'http://www.exabot.com/go/robot', - producer: { name: 'Dassault Systèmes', url: 'http://www.3ds.com' }, + producer: { + name: 'Dassault Systèmes', + url: 'http://www.3ds.com', + }, }, { - regex: 'ExactSeek Crawler', + includes: 'ExactSeek Crawler', name: 'ExactSeek Crawler', category: 'Search bot', url: 'http://www.exactseek.com', @@ -548,56 +728,97 @@ const bots = [ }, }, { - regex: 'Ezooms', + includes: 'Ezooms', name: 'Ezooms', category: 'Crawler', url: '', - producer: { name: 'SEOmoz, Inc.', url: 'http://moz.com/' }, + producer: { + name: 'SEOmoz, Inc.', + url: 'http://moz.com/', + }, }, { regex: 'facebook(?:catalog|externalhit|externalua|platform|scraper)', name: 'Facebook Crawler', category: 'Social Media Agent', url: 'https://developers.facebook.com/docs/sharing/webmasters/web-crawlers', - producer: { name: 'Meta Platforms, Inc.', url: 'https://www.meta.com/' }, + producer: { + name: 'Meta Platforms, Inc.', + url: 'https://www.meta.com/', + }, }, { - regex: 'meta-externalagent', + includes: 'meta-externalagent', name: 'Meta-ExternalAgent', category: 'Crawler', url: 'https://developers.facebook.com/docs/sharing/webmasters/web-crawlers', - producer: { name: 'Meta Platforms, Inc.', url: 'https://www.meta.com/' }, + producer: { + name: 'Meta Platforms, Inc.', + url: 'https://www.meta.com/', + }, }, { - regex: 'meta-externalfetcher', + includes: 'meta-externalfetcher', name: 'Meta-ExternalFetcher', category: 'Social Media Agent', url: 'https://developers.facebook.com/docs/sharing/webmasters/web-crawlers', - producer: { name: 'Meta Platforms, Inc.', url: 'https://www.meta.com/' }, + producer: { + name: 'Meta Platforms, Inc.', + url: 'https://www.meta.com/', + }, + }, + { + includes: 'meta-webindexer', + name: 'Meta-WebIndexer', + category: 'Social Media Agent', + url: 'https://developers.facebook.com/docs/sharing/webmasters/web-crawlers', + producer: { + name: 'Meta Platforms, Inc.', + url: 'https://www.meta.com/', + }, + }, + { + includes: 'meta-externalads', + name: 'Meta-ExternalAds', + category: 'Social Media Agent', + url: 'https://developers.facebook.com/docs/sharing/webmasters/web-crawlers', + producer: { + name: 'Meta Platforms, Inc.', + url: 'https://www.meta.com/', + }, }, { - regex: 'FacebookBot', + includes: 'FacebookBot', name: 'FacebookBot', category: 'Crawler', url: 'https://developers.facebook.com/docs/sharing/bot', - producer: { name: 'Meta Platforms, Inc.', url: 'https://www.meta.com/' }, + producer: { + name: 'Meta Platforms, Inc.', + url: 'https://www.meta.com/', + }, }, { - regex: 'Feedbin', + includes: 'Feedbin', name: 'Feedbin', url: 'http://feedbin.com/', category: 'Feed Fetcher', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { - regex: 'FeedBurner', + includes: 'FeedBurner', name: 'FeedBurner', url: 'http://www.feedburner.com', category: 'Feed Fetcher', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { - regex: 'Feed Wrangler', + includes: 'Feed Wrangler', name: 'Feed Wrangler', url: 'https://feedwrangler.net/', category: 'Feed Fetcher', @@ -607,212 +828,301 @@ const bots = [ }, }, { - regex: 'Feedly', + includes: 'Feedly', name: 'Feedly', url: 'http://www.feedly.com', category: 'Feed Fetcher', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { - regex: 'Feedspot', + includes: 'Feedspot', name: 'Feedspot', url: 'http://www.feedspot.com', category: 'Feed Fetcher', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { - regex: 'Fever/', + includes: 'Fever/', name: 'Fever', url: 'http://feedafever.com/', category: 'Feed Fetcher', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { regex: 'FlipboardProxy|FlipboardRSS', name: 'Flipboard', url: 'http://flipboard.com/browserproxy', category: 'Feed Fetcher', - producer: { name: 'Flipboard', url: 'http://flipboard.com/' }, + producer: { + name: 'Flipboard', + url: 'http://flipboard.com/', + }, }, { - regex: 'Findxbot', + includes: 'Findxbot', name: 'Findxbot', category: 'Crawler', url: 'http://www.findxbot.com', }, { - regex: 'FreshRSS', + includes: 'FreshRSS', name: 'FreshRSS', category: 'Feed Fetcher', url: 'https://freshrss.org/', }, { - regex: 'Genieo', + includes: 'Genieo', name: 'Genieo Web filter', category: '', url: 'http://www.genieo.com/webfilter.html', - producer: { name: 'Genieo', url: 'http://www.genieo.com' }, + producer: { + name: 'Genieo', + url: 'http://www.genieo.com', + }, }, { - regex: 'GigablastOpenSource', + includes: 'GigablastOpenSource', name: 'Gigablast', category: 'Search bot', url: 'https://github.com/gigablast/open-source-search-engine', - producer: { name: 'Matt Wells', url: 'http://www.gigablast.com/faq.html' }, + producer: { + name: 'Matt Wells', + url: 'http://www.gigablast.com/faq.html', + }, }, { - regex: 'Gluten Free Crawler', + includes: 'Gluten Free Crawler', name: 'Gluten Free Crawler', category: 'Crawler', url: 'http://glutenfreepleasure.com/', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { - regex: 'gobuster', + includes: 'gobuster', name: 'Gobuster', url: 'https://github.com/OJ/gobuster', }, { - regex: 'ichiro/mobile goo', + includes: 'ichiro/mobile goo', name: 'Goo', category: 'Search bot', url: 'http://search.goo.ne.jp/option/use/sub4/sub4-1', - producer: { name: 'NTT Resonant', url: 'http://goo.ne.jp' }, + producer: { + name: 'NTT Resonant', + url: 'http://goo.ne.jp', + }, }, - { regex: 'Storebot-Google', name: 'Google StoreBot', category: 'Crawler' }, - { regex: 'Google Favicon', name: 'Google Favicon', category: 'Crawler' }, { - regex: 'Google Search Console', + includes: 'Storebot-Google', + name: 'Google StoreBot', + category: 'Crawler', + }, + { + includes: 'Google Favicon', + name: 'Google Favicon', + category: 'Crawler', + }, + { + includes: 'Google Search Console', name: 'Google Search Console', category: 'Crawler', url: 'https://search.google.com/search-console/about', - producer: { name: 'Google Inc.', url: 'https://www.google.com/' }, + producer: { + name: 'Google Inc.', + url: 'https://www.google.com/', + }, }, { - regex: 'Google Page Speed Insights', + includes: 'Google Page Speed Insights', name: 'Google PageSpeed Insights', category: 'Site Monitor', url: 'http://developers.google.com/speed/pagespeed/insights/', - producer: { name: 'Google Inc.', url: 'https://www.google.com/' }, + producer: { + name: 'Google Inc.', + url: 'https://www.google.com/', + }, }, { - regex: 'google_partner_monitoring', + includes: 'google_partner_monitoring', name: 'Google Partner Monitoring', category: 'Site Monitor', url: '', - producer: { name: 'Google Inc.', url: 'https://www.google.com/' }, + producer: { + name: 'Google Inc.', + url: 'https://www.google.com/', + }, }, { - regex: 'Google-Cloud-Scheduler', + includes: 'Google-Cloud-Scheduler', name: 'Google Cloud Scheduler', category: 'Crawler', url: 'https://cloud.google.com/scheduler', - producer: { name: 'Google Inc.', url: 'https://www.google.com' }, + producer: { + name: 'Google Inc.', + url: 'https://www.google.com', + }, }, { - regex: 'Google-Structured-Data-Testing-Tool', + includes: 'Google-Structured-Data-Testing-Tool', name: 'Google Structured Data Testing Tool', category: 'Validator', url: 'https://search.google.com/structured-data/testing-tool', - producer: { name: 'Google Inc.', url: 'https://www.google.com/' }, + producer: { + name: 'Google Inc.', + url: 'https://www.google.com/', + }, }, { - regex: 'GoogleStackdriverMonitoring', + includes: 'GoogleStackdriverMonitoring', name: 'Google Stackdriver Monitoring', category: 'Site Monitor', url: 'https://cloud.google.com/monitoring', - producer: { name: 'Google Inc.', url: 'https://www.google.com' }, + producer: { + name: 'Google Inc.', + url: 'https://www.google.com', + }, }, { - regex: 'Google-Transparency-Report', + includes: 'Google-Transparency-Report', name: 'Google Transparency Report', category: 'Site Monitor', url: 'https://transparencyreport.google.com/', - producer: { name: 'Google Inc.', url: 'https://www.google.com/' }, + producer: { + name: 'Google Inc.', + url: 'https://www.google.com/', + }, }, { - regex: 'Google-CloudVertexBot', + includes: 'Google-CloudVertexBot', name: 'Google-CloudVertexBot', category: 'Crawler', url: 'https://developers.google.com/search/docs/crawling-indexing/google-common-crawlers#google-cloudvertexbot', - producer: { name: 'Google Inc.', url: 'https://www.google.com/' }, + producer: { + name: 'Google Inc.', + url: 'https://www.google.com/', + }, }, { regex: 'via ggpht\\.com GoogleImageProxy', name: 'Gmail Image Proxy', category: 'Crawler', url: '', - producer: { name: 'Google Inc.', url: 'https://www.google.com/' }, + producer: { + name: 'Google Inc.', + url: 'https://www.google.com/', + }, }, { - regex: 'Google-Document-Conversion', + includes: 'Google-Document-Conversion', name: 'Google-Document-Conversion', category: 'Service Agent', url: 'https://support.google.com/drive/answer/176692?hl=en', - producer: { name: 'Google Inc.', url: 'https://www.google.com/' }, + producer: { + name: 'Google Inc.', + url: 'https://www.google.com/', + }, }, { - regex: 'GoogleDocs; apps-spreadsheets', + includes: 'GoogleDocs; apps-spreadsheets', name: 'Google Sheets', category: 'Service Agent', url: 'https://workspace.google.com/products/sheets/', - producer: { name: 'Google Inc.', url: 'https://www.google.com/' }, + producer: { + name: 'Google Inc.', + url: 'https://www.google.com/', + }, }, { - regex: 'GoogleDocs; apps-presentations', + includes: 'GoogleDocs; apps-presentations', name: 'Google Slides', category: 'Service Agent', url: 'https://workspace.google.com/products/slides/', - producer: { name: 'Google Inc.', url: 'https://www.google.com/' }, + producer: { + name: 'Google Inc.', + url: 'https://www.google.com/', + }, }, { - regex: 'GoogleDocs;', + includes: 'GoogleDocs;', name: 'Google Docs', category: 'Service Agent', url: 'https://docs.google.com/', - producer: { name: 'Google Inc.', url: 'https://www.google.com/' }, + producer: { + name: 'Google Inc.', + url: 'https://www.google.com/', + }, }, { - regex: 'SeznamEmailProxy', + includes: 'SeznamEmailProxy', name: 'Seznam Email Proxy', category: 'Crawler', url: '', - producer: { name: 'Seznam.cz, a.s.', url: 'http://www.seznam.cz/' }, + producer: { + name: 'Seznam.cz, a.s.', + url: 'http://www.seznam.cz/', + }, }, { - regex: 'Seznam-Zbozi-robot', + includes: 'Seznam-Zbozi-robot', name: 'Seznam Zbozi.cz', category: 'Crawler', url: '', - producer: { name: 'Seznam.cz, a.s.', url: 'https://www.zbozi.cz/' }, + producer: { + name: 'Seznam.cz, a.s.', + url: 'https://www.zbozi.cz/', + }, }, { - regex: 'Heurekabot-Feed', + includes: 'Heurekabot-Feed', name: 'Heureka Feed', category: 'Crawler', url: 'https://sluzby.heureka.cz/napoveda/heurekabot/', - producer: { name: 'Heureka.cz, a.s.', url: 'https://www.heureka.cz/' }, + producer: { + name: 'Heureka.cz, a.s.', + url: 'https://www.heureka.cz/', + }, }, { - regex: 'ShopAlike', + includes: 'ShopAlike', name: 'ShopAlike', category: 'Crawler', url: '', - producer: { name: 'Visual Meta', url: 'https://www.shopalike.cz/' }, + producer: { + name: 'Visual Meta', + url: 'https://www.shopalike.cz/', + }, }, { regex: 'deepcrawl\\.com', name: 'Lumar', category: 'Crawler', url: 'https://deepcrawl.com/bot', - producer: { name: 'Lumar', url: 'https://www.lumar.io/' }, + producer: { + name: 'Lumar', + url: 'https://www.lumar.io/', + }, }, { - regex: 'Googlebot-News', + includes: 'Googlebot-News', name: 'Googlebot News', category: 'Search bot', url: 'https://developers.google.com/search/docs/crawling-indexing/overview-google-crawlers', - producer: { name: 'Google Inc.', url: 'https://www.google.com/' }, + producer: { + name: 'Google Inc.', + url: 'https://www.google.com/', + }, }, { regex: @@ -820,41 +1130,59 @@ const bots = [ name: 'Googlebot', category: 'Search bot', url: 'https://developers.google.com/search/docs/crawling-indexing/overview-google-crawlers', - producer: { name: 'Google Inc.', url: 'https://www.google.com/' }, + producer: { + name: 'Google Inc.', + url: 'https://www.google.com/', + }, }, { regex: '^Google$', name: 'Googlebot', category: 'Search bot', url: 'https://developers.google.com/search/docs/crawling-indexing/overview-google-crawlers', - producer: { name: 'Google Inc.', url: 'https://www.google.com/' }, + producer: { + name: 'Google Inc.', + url: 'https://www.google.com/', + }, }, { - regex: 'Google-Safety', + includes: 'Google-Safety', name: 'Google-Safety', category: 'Crawler', url: 'https://developers.google.com/search/docs/crawling-indexing/google-special-case-crawlers', - producer: { name: 'Google Inc.', url: 'https://www.google.com/' }, + producer: { + name: 'Google Inc.', + url: 'https://www.google.com/', + }, }, { - regex: 'DuplexWeb-Google', + includes: 'DuplexWeb-Google', name: 'DuplexWeb-Google', category: 'Crawler', url: 'https://developers.google.com/search/docs/crawling-indexing/google-special-case-crawlers', - producer: { name: 'Google Inc.', url: 'https://www.google.com/' }, + producer: { + name: 'Google Inc.', + url: 'https://www.google.com/', + }, }, { - regex: 'Google-Area120-PrivacyPolicyFetcher', + includes: 'Google-Area120-PrivacyPolicyFetcher', name: 'Google Area 120 Privacy Policy Fetcher', category: 'Crawler', url: 'https://area120.google.com/', - producer: { name: 'Google Inc.', url: 'https://www.google.com/' }, + producer: { + name: 'Google Inc.', + url: 'https://www.google.com/', + }, }, { - regex: 'HubSpot ', + includes: 'HubSpot ', name: 'HubSpot', category: 'Crawler', - producer: { name: 'HubSpot Inc.', url: 'https://www.hubspot.com' }, + producer: { + name: 'HubSpot Inc.', + url: 'https://www.hubspot.com', + }, }, { regex: 'vuhuv(?:Bot|RBT)', @@ -863,18 +1191,24 @@ const bots = [ url: 'https://vuhuv.com/bot.html', }, { - regex: 'HTTPMon', + includes: 'HTTPMon', name: 'HTTPMon', category: 'Site Monitor', url: 'http://www.httpmon.com', - producer: { name: 'towards GmbH', url: 'http://www.towards.ch/' }, + producer: { + name: 'towards GmbH', + url: 'http://www.towards.ch/', + }, }, { - regex: 'ICC-Crawler', + includes: 'ICC-Crawler', name: 'ICC-Crawler', category: 'Crawler', url: 'http://www.nict.go.jp/en/univ-com/plan/crawl.html', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { regex: 'inoreader\\.com', @@ -883,7 +1217,7 @@ const bots = [ url: 'https://www.inoreader.com', }, { - regex: 'iisbot', + includes: 'iisbot', name: 'IIS Site Analysis', category: 'Crawler', url: 'http://www.iis.net/iisbot.html', @@ -893,32 +1227,48 @@ const bots = [ }, }, { - regex: 'ips-agent', + includes: 'ips-agent', name: 'IPS Agent', category: 'Crawler', - producer: { name: 'VeriSign, Inc', url: 'http://www.verisign.com/' }, + producer: { + name: 'VeriSign, Inc', + url: 'http://www.verisign.com/', + }, }, { regex: 'IP-Guide\\.com', name: 'IP-Guide Crawler', category: 'Crawler', url: '', - producer: { name: '', url: 'https://ip-guide.com' }, + producer: { + name: '', + url: 'https://ip-guide.com', + }, + }, + { + includes: 'k6/', + name: 'K6', + url: 'https://k6.io/', }, - { regex: 'k6/', name: 'K6', url: 'https://k6.io/' }, { - regex: 'kouio', + includes: 'kouio', name: 'Kouio', url: 'http://kouio.com/', category: 'Feed Fetcher', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { - regex: 'larbin', + includes: 'larbin', name: 'Larbin web crawler', category: 'Crawler', url: 'http://larbin.sourceforge.net', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { regex: '[A-z0-9]*-Lighthouse', @@ -935,56 +1285,80 @@ const bots = [ name: 'LastMod Bot', category: 'Site Monitor', url: 'https://last-modified.com/en/about', - producer: { name: '', url: 'https://last-modified.com/en' }, + producer: { + name: '', + url: 'https://last-modified.com/en', + }, }, { regex: 'linkdexbot|linkdex\\.com', name: 'Linkdex Bot', category: 'Search bot', url: 'http://www.linkdex.com/bots', - producer: { name: 'Mojeek Ltd.', url: 'http://www.mojeek.com' }, + producer: { + name: 'Mojeek Ltd.', + url: 'http://www.mojeek.com', + }, }, { - regex: 'LinkedInBot', + includes: 'LinkedInBot', name: 'LinkedIn Bot', category: 'Social Media Agent', url: 'http://www.linkedin.com', - producer: { name: 'LinkedIn', url: 'http://www.linkedin.com' }, + producer: { + name: 'LinkedIn', + url: 'http://www.linkedin.com', + }, }, { - regex: 'ltx71', + includes: 'ltx71', name: 'LTX71', category: 'Security Checker', url: 'https://ltx71.com/', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { regex: 'Mail\\.RU', name: 'Mail.Ru Bot', category: 'Search bot', url: 'http://help.mail.ru/webmaster/indexing/robots/types_robots', - producer: { name: 'Mail.Ru Group', url: 'http://corp.mail.ru' }, + producer: { + name: 'Mail.Ru Group', + url: 'http://corp.mail.ru', + }, }, { - regex: 'magpie-crawler', + includes: 'magpie-crawler', name: 'Magpie-Crawler', category: 'Social Media Agent', url: 'http://www.brandwatch.com/magpie-crawler/', - producer: { name: 'Brandwatch', url: 'http://www.brandwatch.com' }, + producer: { + name: 'Brandwatch', + url: 'http://www.brandwatch.com', + }, }, { - regex: 'MagpieRSS', + includes: 'MagpieRSS', name: 'MagpieRSS', url: 'http://magpierss.sourceforge.net/', category: 'Feed Parser', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { - regex: 'masscan-ng', + includes: 'masscan-ng', name: 'masscan-ng', url: 'https://github.com/bi-zone/masscan-ng', category: 'Crawler', - producer: { name: 'BIZON, OOO', url: 'https://bi.zone/' }, + producer: { + name: 'BIZON, OOO', + url: 'https://bi.zone/', + }, }, { regex: '.*masscan', @@ -996,71 +1370,99 @@ const bots = [ url: 'https://github.com/robertdavidgraham', }, }, - { regex: 'Mastodon/', name: 'Mastodon Bot', category: 'Social Media Agent' }, { - regex: 'meanpathbot', + includes: 'Mastodon/', + name: 'Mastodon Bot', + category: 'Social Media Agent', + }, + { + includes: 'meanpathbot', name: 'Meanpath Bot', category: 'Search bot', url: 'http://www.meanpath.com/meanpathbot.html', - producer: { name: 'Meanpath', url: 'http://www.meanpath.com' }, + producer: { + name: 'Meanpath', + url: 'http://www.meanpath.com', + }, }, { - regex: 'MetaJobBot', + includes: 'MetaJobBot', name: 'MetaJobBot', category: 'Crawler', url: 'http://www.metajob.at/the/crawler', - producer: { name: 'MetaJob', url: 'http://www.metajob.at' }, + producer: { + name: 'MetaJob', + url: 'http://www.metajob.at', + }, }, { - regex: 'MetaInspector', + includes: 'MetaInspector', name: 'MetaInspector', category: 'Crawler', url: 'https://github.com/jaimeiniesta/metainspector', }, { - regex: 'MixrankBot', - name: 'Mixrank Bot', + includes: 'MixrankBot', + name: 'MixRank Bot', category: 'Crawler', url: 'http://mixrank.com', - producer: { name: 'Online Media Group, Inc.', url: '' }, + producer: { + name: 'Online Media Group, Inc.', + url: '', + }, }, { - regex: 'MJ12bot', + includes: 'MJ12bot', name: 'MJ12 Bot', category: 'Search bot', url: 'http://majestic12.co.uk/bot.php', - producer: { name: 'Majestic-12', url: 'http://majestic12.co.uk' }, + producer: { + name: 'Majestic-12', + url: 'http://majestic12.co.uk', + }, }, { - regex: 'Mnogosearch', + includes: 'Mnogosearch', name: 'Mnogosearch', category: 'Search bot', url: 'http://www.mnogosearch.org/', - producer: { name: 'Lavtech.Com Corp.', url: '' }, + producer: { + name: 'Lavtech.Com Corp.', + url: '', + }, }, { - regex: 'MojeekBot', + includes: 'MojeekBot', name: 'MojeekBot', category: 'Search bot', url: 'http://www.mojeek.com/bot.html', - producer: { name: 'Mojeek Ltd.', url: 'http://www.mojeek.com' }, + producer: { + name: 'Mojeek Ltd.', + url: 'http://www.mojeek.com', + }, }, { - regex: 'munin', + includes: 'munin', name: 'Munin', category: 'Site Monitor', url: 'http://munin-monitoring.org/', - producer: { name: 'Munin', url: 'http://munin-monitoring.org/' }, + producer: { + name: 'Munin', + url: 'http://munin-monitoring.org/', + }, }, { - regex: 'NalezenCzBot', + includes: 'NalezenCzBot', name: 'NalezenCzBot', category: 'Crawler', url: 'http://www.nalezen.cz/about-crawler', - producer: { name: 'Jaroslav Kuboš', url: '' }, + producer: { + name: 'Jaroslav Kuboš', + url: '', + }, }, { - regex: 'check_http/v', + includes: 'check_http/v', name: 'Nagios check_http', category: 'Site Monitor', url: 'https://nagios.org', @@ -1079,94 +1481,136 @@ const bots = [ name: 'Netcraft Survey Bot', category: 'Search bot', url: '', - producer: { name: 'Netcraft', url: 'http://www.netcraft.com' }, + producer: { + name: 'Netcraft', + url: 'http://www.netcraft.com', + }, }, { - regex: 'netEstate NE Crawler', + includes: 'netEstate NE Crawler', name: 'netEstate', category: 'Crawler', url: 'http://www.website-datenbank.de/Impressum', - producer: { name: 'netEstate GmbH', url: 'https://www.netestate.de/en/' }, + producer: { + name: 'netEstate GmbH', + url: 'https://www.netestate.de/en/', + }, }, { - regex: 'Netvibes', + includes: 'Netvibes', name: 'Netvibes', url: 'http://www.netvibes.com/', category: 'Feed Fetcher', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { regex: 'NewsBlur .*(?:Fetcher|Finder)', name: 'NewsBlur', url: 'http://www.newsblur.com', category: 'Feed Fetcher', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { - regex: 'NewsGatorOnline', + includes: 'NewsGatorOnline', name: 'NewsGator', url: 'http://www.newsgator.com', category: 'Feed Fetcher', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { - regex: 'nlcrawler', + includes: 'nlcrawler', name: 'NLCrawler', category: 'Crawler', url: '', - producer: { name: 'Northern Light', url: 'http://northernlight.com' }, + producer: { + name: 'Northern Light', + url: 'http://northernlight.com', + }, }, { - regex: 'Nmap Scripting Engine', + includes: 'Nmap Scripting Engine', name: 'Nmap', category: 'Security Checker', url: 'https://nmap.org/book/nse.html', - producer: { name: 'Nmap', url: 'https://nmap.org/' }, + producer: { + name: 'Nmap', + url: 'https://nmap.org/', + }, }, { - regex: 'Nuzzel', + includes: 'Nuzzel', name: 'Nuzzel', category: 'Crawler', - producer: { name: 'Nuzzel', url: 'https://www.nuzzel.com/' }, + producer: { + name: 'Nuzzel', + url: 'https://www.nuzzel.com/', + }, }, { - regex: 'NodePing', + includes: 'NodePing', name: 'NodePing', category: 'Site Monitor', url: 'https://nodeping.com', - producer: { name: 'NodePing', url: 'https://nodeping.com' }, + producer: { + name: 'NodePing', + url: 'https://nodeping.com', + }, + }, + { + regex: 'Octopus [\\d.]+', + name: 'Octopus', }, - { regex: 'Octopus [\\d.]+', name: 'Octopus' }, { regex: 'OnlineOrNot\\.com_bot', name: 'OnlineOrNot Bot', category: 'Site Monitor', url: 'https://onlineornot.com/website-monitoring', - producer: { name: 'OnlineOrNot', url: 'https://onlineornot.com' }, + producer: { + name: 'OnlineOrNot', + url: 'https://onlineornot.com', + }, }, { - regex: 'omgili', + includes: 'omgili', name: 'Omgili bot', category: 'Search bot', url: 'http://www.omgili.com/Crawler.html', - producer: { name: 'Omgili', url: 'http://www.omgili.com' }, + producer: { + name: 'Omgili', + url: 'http://www.omgili.com', + }, }, { - regex: 'OpenindexSpider', + includes: 'OpenindexSpider', name: 'Openindex Spider', category: 'Search bot', url: 'http://www.openindex.io/en/webmasters/spider.html', - producer: { name: 'Openindex B.V.', url: 'http://www.openindex.io' }, + producer: { + name: 'Openindex B.V.', + url: 'http://www.openindex.io', + }, }, { - regex: 'spbot', + includes: 'spbot', name: 'OpenLinkProfiler', category: 'Crawler', url: 'http://openlinkprofiler.org/bot', - producer: { name: 'Axandra GmbH', url: 'http://www.axandra.com' }, + producer: { + name: 'Axandra GmbH', + url: 'http://www.axandra.com', + }, }, { - regex: 'OpenWebSpider', + includes: 'OpenWebSpider', name: 'OpenWebSpider', category: 'Crawler', url: 'http://www.openwebspider.org', @@ -1180,23 +1624,29 @@ const bots = [ name: 'Orange Bot', category: 'Search bot', url: 'http://lemoteur.orange.fr', - producer: { name: 'Orange', url: 'http://www.orange.fr' }, + producer: { + name: 'Orange', + url: 'http://www.orange.fr', + }, }, { - regex: 'PaperLiBot', + includes: 'PaperLiBot', name: 'PaperLiBot', category: 'Search bot', url: 'http://support.paper.li/entries/20023257-what-is-paper-li', - producer: { name: 'Smallrivers SA', url: 'http://www.paper.li' }, + producer: { + name: 'Smallrivers SA', + url: 'http://www.paper.li', + }, }, { - regex: 'phantomas/', + includes: 'phantomas/', name: 'Phantomas', category: 'Site Monitor', url: 'https://github.com/macbre/phantomas', }, { - regex: 'phpservermon', + includes: 'phpservermon', name: 'PHP Server Monitor', category: 'Site Monitor', url: 'https://github.com/phpservermon/phpservermon', @@ -1210,120 +1660,165 @@ const bots = [ name: 'Pocket', category: 'Read-it-later Service', url: 'https://getpocket.com/pocketparser_ua', - producer: { name: 'Read It Later, Inc.', url: 'https://getpocket.com/' }, + producer: { + name: 'Read It Later, Inc.', + url: 'https://getpocket.com/', + }, }, { - regex: 'PritTorrent', + includes: 'PritTorrent', name: 'PritTorrent', category: 'Crawler', url: 'https://github.com/astro/prittorrent', - producer: { name: 'Bitlove', url: 'http://bitlove.org/' }, + producer: { + name: 'Bitlove', + url: 'http://bitlove.org/', + }, }, { - regex: 'PRTG Network Monitor', + includes: 'PRTG Network Monitor', name: 'PRTG Network Monitor', category: 'Network Monitor', url: 'https://www.paessler.com/prtg', - producer: { name: 'Paessler AG', url: 'https://www.paessler.com' }, + producer: { + name: 'Paessler AG', + url: 'https://www.paessler.com', + }, }, { - regex: 'psbot', + includes: 'psbot', name: 'Picsearch bot', category: 'Search bot', url: 'http://www.picsearch.com/bot.html', - producer: { name: 'Picsearch', url: 'http://www.picsearch.com' }, + producer: { + name: 'Picsearch', + url: 'http://www.picsearch.com', + }, }, { regex: 'Pingdom(?:\\.com|TMS)', name: 'Pingdom Bot', category: 'Site Monitor', url: '', - producer: { name: 'Pingdom AB', url: 'https://www.pingdom.com' }, + producer: { + name: 'Pingdom AB', + url: 'https://www.pingdom.com', + }, }, { - regex: 'Quora Link Preview', + includes: 'Quora Link Preview', name: 'Quora Link Preview', category: 'Crawler', url: '', - producer: { name: 'Quora', url: 'http://www.quora.com' }, + producer: { + name: 'Quora', + url: 'http://www.quora.com', + }, }, { - regex: 'Quora-Bot', + includes: 'Quora-Bot', name: 'Quora Bot', category: 'Crawler', url: '', - producer: { name: 'Quora', url: 'https://www.quora.com/' }, + producer: { + name: 'Quora', + url: 'https://www.quora.com/', + }, }, { - regex: 'RamblerMail', + includes: 'RamblerMail', name: 'RamblerMail Image Proxy', category: 'Crawler', url: '', - producer: { name: 'Rambler&Co', url: 'https://rambler-co.ru/' }, + producer: { + name: 'Rambler&Co', + url: 'https://rambler-co.ru/', + }, }, { - regex: 'QuerySeekerSpider', + includes: 'QuerySeekerSpider', name: 'QuerySeekerSpider', category: 'Crawler', url: 'http://queryseeker.com/bot.html', - producer: { name: 'QueryEye Inc.', url: 'http://queryeye.com' }, + producer: { + name: 'QueryEye Inc.', + url: 'http://queryeye.com', + }, }, { regex: 'Qwantify|Qwantbot', name: 'Qwantbot', category: 'Crawler', url: 'https://help.qwant.com/bot/', - producer: { name: 'Qwant Corporation', url: 'https://www.qwant.com/' }, + producer: { + name: 'Qwant Corporation', + url: 'https://www.qwant.com/', + }, }, { - regex: 'Rainmeter', + includes: 'Rainmeter', name: 'Rainmeter', category: 'Crawler', url: 'https://www.rainmeter.net', }, { - regex: 'redditbot', + includes: 'redditbot', name: 'Reddit Bot', category: 'Social Media Agent', url: 'http://www.reddit.com/feedback', - producer: { name: 'reddit inc.', url: 'http://www.reddit.com' }, + producer: { + name: 'reddit inc.', + url: 'http://www.reddit.com', + }, }, { - regex: 'Riddler', + includes: 'Riddler', name: 'Riddler', category: 'Security search bot', url: 'https://riddler.io/about', - producer: { name: 'F-Secure', url: 'https://www.f-secure.com' }, + producer: { + name: 'F-Secure', + url: 'https://www.f-secure.com', + }, }, { - regex: 'rogerbot', + includes: 'rogerbot', name: 'Rogerbot', category: 'Crawler', url: 'http://moz.com/help/pro/what-is-rogerbot-', - producer: { name: 'SEOmoz, Inc.', url: 'http://moz.com/' }, + producer: { + name: 'SEOmoz, Inc.', + url: 'http://moz.com/', + }, }, { - regex: 'ROI Hunter', + includes: 'ROI Hunter', name: 'ROI Hunter', category: 'Crawler', url: '', - producer: { name: 'Roihunter a.s.', url: 'http://roihunter.com/' }, + producer: { + name: 'Roihunter a.s.', + url: 'http://roihunter.com/', + }, }, { - regex: 'SafeDNSBot', + includes: 'SafeDNSBot', name: 'SafeDNSBot', category: 'Crawler', url: 'https://www.safedns.com/searchbot', - producer: { name: 'SafeDNS, Inc.', url: 'https://www.safedns.com/' }, + producer: { + name: 'SafeDNS, Inc.', + url: 'https://www.safedns.com/', + }, }, { - regex: 'Scrapy', + includes: 'Scrapy', name: 'Scrapy', category: 'Crawler', url: 'http://scrapy.org', }, { - regex: 'Screaming Frog SEO Spider', + includes: 'Screaming Frog SEO Spider', name: 'Screaming Frog SEO Spider', category: 'Crawler', url: 'http://www.screamingfrog.co.uk/seo-spider', @@ -1333,60 +1828,84 @@ const bots = [ }, }, { - regex: 'ScreenerBot', + includes: 'ScreenerBot', name: 'ScreenerBot', category: 'Crawler', url: 'http://www.screenerbot.com', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { - regex: 'SemrushBot', + includes: 'SemrushBot', name: 'SemrushBot', category: 'Crawler', url: 'https://www.semrush.com/bot/', - producer: { name: 'Semrush Inc.', url: 'https://www.semrush.com/' }, + producer: { + name: 'Semrush Inc.', + url: 'https://www.semrush.com/', + }, }, { - regex: 'BacklinksExtendedBot', + includes: 'BacklinksExtendedBot', name: 'BacklinksExtendedBot', category: 'Crawler', url: 'https://www.semrush.com/bot/', - producer: { name: 'Semrush Inc.', url: 'https://www.semrush.com/' }, + producer: { + name: 'Semrush Inc.', + url: 'https://www.semrush.com/', + }, }, { - regex: 'SerpReputationManagementAgent', + includes: 'SerpReputationManagementAgent', name: 'Semrush Reputation Management', category: 'Service Agent', url: 'https://www.semrush.com/bot/', - producer: { name: 'Semrush Inc.', url: 'https://www.semrush.com/' }, + producer: { + name: 'Semrush Inc.', + url: 'https://www.semrush.com/', + }, }, { - regex: 'SplitSignalBot', + includes: 'SplitSignalBot', name: 'SplitSignalBot', category: 'Crawler', url: 'https://www.semrush.com/bot/', - producer: { name: 'Semrush Inc.', url: 'https://www.semrush.com/' }, + producer: { + name: 'Semrush Inc.', + url: 'https://www.semrush.com/', + }, }, { - regex: 'SiteAuditBot', + includes: 'SiteAuditBot', name: 'SiteAuditBot', category: 'Crawler', url: 'https://www.semrush.com/bot/', - producer: { name: 'Semrush Inc.', url: 'https://www.semrush.com/' }, + producer: { + name: 'Semrush Inc.', + url: 'https://www.semrush.com/', + }, }, { - regex: 'SensikaBot', + includes: 'SensikaBot', name: 'Sensika Bot', category: '', url: '', - producer: { name: 'Sensika', url: 'http://sensika.com' }, + producer: { + name: 'Sensika', + url: 'http://sensika.com', + }, }, { regex: 'SEOENG(?:World)?Bot', name: 'SEOENGBot', category: 'Crawler', url: 'http://www.seoengine.com/seoengbot.htm', - producer: { name: 'SEO Engine', url: 'http://www.seoengine.com' }, + producer: { + name: 'SEO Engine', + url: 'http://www.seoengine.com', + }, }, { regex: 'seoscanners\\.net', @@ -1395,7 +1914,7 @@ const bots = [ url: '', }, { - regex: 'SkypeUriPreview', + includes: 'SkypeUriPreview', name: 'Skype URI Preview', category: 'Service Agent', url: '', @@ -1409,52 +1928,73 @@ const bots = [ name: 'Seznam Bot', category: 'Search bot', url: 'http://www.mapy.cz/cz/seznambot.html', - producer: { name: 'Seznam.cz, a.s.', url: 'http://www.seznam.cz/' }, + producer: { + name: 'Seznam.cz, a.s.', + url: 'http://www.seznam.cz/', + }, }, { - regex: 'shopify-partner-homepage-scraper', + includes: 'shopify-partner-homepage-scraper', name: 'Shopify Partner', category: 'Crawler', url: 'https://www.shopify.com/partners', - producer: { name: 'Shopify', url: 'https://www.shopify.com/' }, + producer: { + name: 'Shopify', + url: 'https://www.shopify.com/', + }, }, { - regex: 'ShopWiki', + includes: 'ShopWiki', name: 'ShopWiki', category: 'Search tools', url: 'http://www.shopwiki.com/wiki/Help:Bot', - producer: { name: 'ShopWiki Corp.', url: 'http://www.shopwiki.com' }, + producer: { + name: 'ShopWiki Corp.', + url: 'http://www.shopwiki.com', + }, }, { - regex: 'SilverReader', + includes: 'SilverReader', name: 'SilverReader', url: 'http://silverreader.com', category: 'Feed Fetcher', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { - regex: 'SimplePie', + includes: 'SimplePie', name: 'SimplePie', url: 'http://www.simplepie.org', category: 'Feed Parser', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { - regex: 'SISTRIX Crawler', + includes: 'SISTRIX Crawler', name: 'SISTRIX Crawler', category: 'Crawler', url: 'http://crawler.sistrix.net', - producer: { name: 'SISTRIX GmbH', url: 'http://www.sistrix.de' }, + producer: { + name: 'SISTRIX GmbH', + url: 'http://www.sistrix.de', + }, }, { regex: 'compatible; (?:SISTRIX )?Optimizer', name: 'SISTRIX Optimizer', category: 'Crawler', url: 'https://optimizer.sistrix.com', - producer: { name: 'SISTRIX GmbH', url: 'http://www.sistrix.de' }, + producer: { + name: 'SISTRIX GmbH', + url: 'http://www.sistrix.de', + }, }, { - regex: 'SiteSucker', + includes: 'SiteSucker', name: 'SiteSucker', category: 'Crawler', url: 'http://ricks-apps.com/osx/sitesucker/', @@ -1464,14 +2004,20 @@ const bots = [ name: 'Sixy.ch', category: 'Site Monitor', url: 'http://sixy.ch', - producer: { name: 'Manuel Kasper', url: 'https://neon1.net/' }, + producer: { + name: 'Manuel Kasper', + url: 'https://neon1.net/', + }, }, { regex: 'Slackbot|Slack-ImgProxy', name: 'Slackbot', category: 'Crawler', url: 'https://api.slack.com/robots', - producer: { name: 'Slack Technologies', url: 'http://slack.com' }, + producer: { + name: 'Slack Technologies', + url: 'http://slack.com', + }, }, { regex: @@ -1479,24 +2025,33 @@ const bots = [ name: 'Sogou Spider', category: 'Search bot', url: 'http://www.sogou.com/docs/help/webmasters.htm', - producer: { name: 'Sohu, Inc.', url: 'http://www.sogou.com' }, + producer: { + name: 'Sohu, Inc.', + url: 'http://www.sogou.com', + }, }, { regex: 'Sosospider|Sosoimagespider', name: 'Soso Spider', category: 'Search bot', url: 'http://help.soso.com/webspider.htm', - producer: { name: 'Tencent Holdings', url: 'http://www.soso.com' }, + producer: { + name: 'Tencent Holdings', + url: 'http://www.soso.com', + }, }, { - regex: 'Sprinklr', + includes: 'Sprinklr', name: 'Sprinklr', category: 'Crawler', url: '', - producer: { name: 'Sprinklr, Inc.', url: 'https://www.sprinklr.com/' }, + producer: { + name: 'Sprinklr, Inc.', + url: 'https://www.sprinklr.com/', + }, }, { - regex: 'SSL Labs', + includes: 'SSL Labs', name: 'SSL Labs', category: 'Validator', url: 'https://www.ssllabs.com/about/assessment.html', @@ -1506,83 +2061,109 @@ const bots = [ }, }, { - regex: 'StatusCake', + includes: 'StatusCake', name: 'StatusCake', category: 'Site Monitor', url: 'https://www.statuscake.com', - producer: { name: 'StatusCake', url: 'https://www.statuscake.com' }, + producer: { + name: 'StatusCake', + url: 'https://www.statuscake.com', + }, }, { - regex: 'Superfeedr bot', + includes: 'Superfeedr bot', name: 'Superfeedr Bot', category: 'Feed Fetcher', url: '', - producer: { name: 'Superfeedr', url: 'https://superfeedr.com/' }, + producer: { + name: 'Superfeedr', + url: 'https://superfeedr.com/', + }, }, { - regex: 'Sparkler', + includes: 'Sparkler', name: 'Sparkler', category: 'Crawler', url: 'https://github.com/USCDataScience/sparkler', }, { - regex: 'Spinn3r', + includes: 'Spinn3r', name: 'Spinn3r', category: 'Crawler', url: 'http://spinn3r.com/robot', - producer: { name: 'Tailrank Inc', url: 'http://spinn3r.com' }, + producer: { + name: 'Tailrank Inc', + url: 'http://spinn3r.com', + }, }, - { regex: 'SputnikBot', name: 'Sputnik Bot', category: 'Crawler', url: '' }, { - regex: 'SputnikFaviconBot', + includes: 'SputnikBot', + name: 'Sputnik Bot', + category: 'Crawler', + url: '', + }, + { + includes: 'SputnikFaviconBot', name: 'Sputnik Favicon Bot', category: 'Crawler', url: '', }, { - regex: 'SputnikImageBot', + includes: 'SputnikImageBot', name: 'Sputnik Image Bot', category: 'Crawler', url: '', }, { - regex: 'SurveyBot', + includes: 'SurveyBot', name: 'Survey Bot', category: 'Search bot', url: 'http://www.domaintools.com/webmasters/surveybot.php', - producer: { name: 'Domain Tools', url: 'http://www.domaintools.com' }, + producer: { + name: 'Domain Tools', + url: 'http://www.domaintools.com', + }, }, { - regex: 'TarmotGezgin', + includes: 'TarmotGezgin', name: 'Tarmot Gezgin', url: 'http://www.tarmot.com/gezgin/', category: 'Search bot', }, { - regex: 'TelegramBot', + includes: 'TelegramBot', name: 'TelegramBot', url: 'https://telegram.org/blog/bot-revolution', }, { - regex: 'TLSProbe', + includes: 'TLSProbe', name: 'TLSProbe', url: 'https://scan.trustnet.venafi.com/', category: 'Security search bot', - producer: { name: 'Venafi TrustNet', url: 'https://www.venafi.com' }, + producer: { + name: 'Venafi TrustNet', + url: 'https://www.venafi.com', + }, }, { - regex: 'TinEye-bot', + includes: 'TinEye-bot', name: 'TinEye Crawler', category: 'Search bot', url: 'http://www.tineye.com/crawler.html', - producer: { name: 'Idée Inc.', url: 'http://ideeinc.com' }, + producer: { + name: 'Idée Inc.', + url: 'http://ideeinc.com', + }, }, { - regex: 'Tiny Tiny RSS', + includes: 'Tiny Tiny RSS', name: 'Tiny Tiny RSS', url: 'http://tt-rss.org', category: 'Feed Fetcher', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { regex: 'theoldreader\\.com', @@ -1595,330 +2176,468 @@ const bots = [ name: 'Chartable', category: 'Site Monitor', url: 'https://help.chartable.com/article/34-what-is-the-trackable-analytics-prefix', - producer: { name: 'Chartable', url: 'https://chartable.com' }, + producer: { + name: 'Chartable', + url: 'https://chartable.com', + }, }, { - regex: 'trendictionbot', + includes: 'trendictionbot', name: 'Trendiction Bot', category: 'Crawler', url: 'http://www.trendiction.de/bot', - producer: { name: 'Talkwalker Inc.', url: 'http://www.talkwalker.com' }, + producer: { + name: 'Talkwalker Inc.', + url: 'http://www.talkwalker.com', + }, }, { - regex: 'TurnitinBot', + includes: 'TurnitinBot', name: 'TurnitinBot', category: 'Crawler', url: 'http://www.turnitin.com/robot/crawlerinfo.html', - producer: { name: 'iParadigms, LLC.', url: 'http://www.turnitin.com' }, + producer: { + name: 'iParadigms, LLC.', + url: 'http://www.turnitin.com', + }, }, { - regex: 'TweetedTimes', + includes: 'TweetedTimes', name: 'TweetedTimes Bot', category: 'Crawler', url: 'https://tweetedtimes.com/', - producer: { name: 'TweetedTimes', url: 'https://tweetedtimes.com/' }, + producer: { + name: 'TweetedTimes', + url: 'https://tweetedtimes.com/', + }, }, { - regex: 'TweetmemeBot', + includes: 'TweetmemeBot', name: 'Tweetmeme Bot', category: 'Crawler', url: 'http://tweetmeme.com/', - producer: { name: 'Mediasift', url: '' }, + producer: { + name: 'Mediasift', + url: '', + }, }, { - regex: 'Twingly Recon', + includes: 'Twingly Recon', name: 'Twingly Recon', category: 'Crawler', - producer: { name: 'Twingly', url: 'https://www.twingly.com' }, + producer: { + name: 'Twingly', + url: 'https://www.twingly.com', + }, }, { - regex: 'Twitterbot', + includes: 'Twitterbot', name: 'Twitterbot', category: 'Social Media Agent', url: 'https://dev.twitter.com/docs/cards/getting-started', - producer: { name: 'Twitter', url: 'http://www.twitter.com' }, + producer: { + name: 'Twitter', + url: 'http://www.twitter.com', + }, }, { - regex: 'UniversalFeedParser', + includes: 'UniversalFeedParser', name: 'UniversalFeedParser', category: 'Feed Fetcher', url: 'https://github.com/kurtmckee/feedparser', - producer: { name: 'Kurt McKee', url: 'https://github.com/kurtmckee' }, + producer: { + name: 'Kurt McKee', + url: 'https://github.com/kurtmckee', + }, }, { regex: 'via secureurl\\.fwdcdn\\.com', name: 'UkrNet Mail Proxy', category: 'Crawler', url: '', - producer: { name: 'UkrNet Ltd', url: 'https://www.ukr.net/' }, + producer: { + name: 'UkrNet Ltd', + url: 'https://www.ukr.net/', + }, }, { regex: 'Uptime(?:bot)?/', name: 'Uptimebot', category: 'Site Monitor', url: 'https://uptime.com/uptime-bot', - producer: { name: 'Uptime', url: 'https://uptime.com/' }, + producer: { + name: 'Uptime', + url: 'https://uptime.com/', + }, }, { - regex: 'UptimeRobot', + includes: 'UptimeRobot', name: 'UptimeRobot', category: 'Site Monitor', url: 'https://uptimerobot.com/', - producer: { name: 'Uptime Robot', url: 'https://uptimerobot.com/' }, + producer: { + name: 'Uptime Robot', + url: 'https://uptimerobot.com/', + }, }, { - regex: 'URLAppendBot', + includes: 'URLAppendBot', name: 'URLAppendBot', category: 'Crawler', url: 'http://www.profound.net/urlappendbot.html', - producer: { name: 'Profound Networks', url: 'http://www.profound.net' }, + producer: { + name: 'Profound Networks', + url: 'http://www.profound.net', + }, }, { - regex: 'Vagabondo', + includes: 'Vagabondo', name: 'Vagabondo', category: 'Crawler', url: '', - producer: { name: 'WiseGuys', url: 'http://www.wise-guys.nl/' }, + producer: { + name: 'WiseGuys', + url: 'http://www.wise-guys.nl/', + }, }, { - regex: 'vkShare; ', + includes: 'vkShare; ', name: 'VK Share Button', category: 'Crawler', url: 'https://dev.vk.com/en/widgets/share', - producer: { name: 'VK', url: 'https://vk.com/' }, + producer: { + name: 'VK', + url: 'https://vk.com/', + }, }, { - regex: 'VKRobot', + includes: 'VKRobot', name: 'VK Robot', category: 'Crawler', url: 'https://dev.vk.com/en/', - producer: { name: 'VK', url: 'https://vk.com/' }, + producer: { + name: 'VK', + url: 'https://vk.com/', + }, }, { - regex: 'VSMCrawler', + includes: 'VSMCrawler', name: 'Visual Site Mapper Crawler', category: 'Crawler', url: 'http://www.visualsitemapper.com/crawler', - producer: { name: 'Alentum Software Ltd.', url: 'http://www.alentum.com' }, + producer: { + name: 'Alentum Software Ltd.', + url: 'http://www.alentum.com', + }, }, { - regex: 'Jigsaw', + includes: 'Jigsaw', name: 'W3C CSS Validator', category: 'Validator', url: 'http://jigsaw.w3.org/css-validator', - producer: { name: 'W3C', url: 'http://www.w3.org' }, + producer: { + name: 'W3C', + url: 'http://www.w3.org', + }, }, { - regex: 'W3C_I18n-Checker', + includes: 'W3C_I18n-Checker', name: 'W3C I18N Checker', category: 'Validator', url: 'http://validator.w3.org/i18n-checker', - producer: { name: 'W3C', url: 'http://www.w3.org' }, + producer: { + name: 'W3C', + url: 'http://www.w3.org', + }, }, { - regex: 'W3C-checklink', + includes: 'W3C-checklink', name: 'W3C Link Checker', category: 'Validator', url: 'http://validator.w3.org/checklink', - producer: { name: 'W3C', url: 'http://www.w3.org' }, + producer: { + name: 'W3C', + url: 'http://www.w3.org', + }, }, { regex: 'W3C_Validator|Validator\\.nu', name: 'W3C Markup Validation Service', category: 'Validator', url: 'http://validator.w3.org/services', - producer: { name: 'W3C', url: 'http://www.w3.org' }, + producer: { + name: 'W3C', + url: 'http://www.w3.org', + }, }, { - regex: 'W3C-mobileOK', + includes: 'W3C-mobileOK', name: 'W3C MobileOK Checker', category: 'Validator', url: 'http://validator.w3.org/mobile', - producer: { name: 'W3C', url: 'http://www.w3.org' }, + producer: { + name: 'W3C', + url: 'http://www.w3.org', + }, }, { - regex: 'W3C_Unicorn', + includes: 'W3C_Unicorn', name: 'W3C Unified Validator', category: 'Validator', url: 'http://validator.w3.org/unicorn', - producer: { name: 'W3C', url: 'http://www.w3.org' }, + producer: { + name: 'W3C', + url: 'http://www.w3.org', + }, }, { - regex: 'P3P Validator', + includes: 'P3P Validator', name: 'W3C P3P Validator', category: 'Validator', url: 'https://www.w3.org/P3P/validator.html', - producer: { name: 'W3C', url: 'https://www.w3.org' }, + producer: { + name: 'W3C', + url: 'https://www.w3.org', + }, }, { - regex: 'Wappalyzer', + includes: 'Wappalyzer', name: 'Wappalyzer', url: 'https://github.com/AliasIO/Wappalyzer', - producer: { name: 'AliasIO', url: 'https://github.com/AliasIO' }, + producer: { + name: 'AliasIO', + url: 'https://github.com/AliasIO', + }, }, { - regex: 'PTST/', + includes: 'PTST/', name: 'WebPageTest', category: 'Site Monitor', url: 'https://www.webpagetest.org', }, { - regex: 'WeSEE', + includes: 'WeSEE', name: 'WeSEE:Search', category: 'Search bot', url: 'http://www.wesee.com/bot', - producer: { name: 'WeSEE Ltd', url: 'http://www.wesee.com' }, + producer: { + name: 'WeSEE Ltd', + url: 'http://www.wesee.com', + }, }, { - regex: 'WebbCrawler', + includes: 'WebbCrawler', name: 'WebbCrawler', category: 'Crawler', url: 'http://badcheese.com/crawler.html', - producer: { name: 'Steve Webb', url: 'http://badcheese.com' }, + producer: { + name: 'Steve Webb', + url: 'http://badcheese.com', + }, }, { regex: 'websitepulse[+ ]checker', name: 'WebSitePulse', category: 'Site Monitor', url: 'http://badcheese.com/crawler.html', - producer: { name: 'WebSitePulse', url: 'http://www.websitepulse.com/' }, + producer: { + name: 'WebSitePulse', + url: 'http://www.websitepulse.com/', + }, }, { regex: 'WordPress.+isitwp\\.com', name: 'IsItWP', category: 'Crawler', url: 'https://www.isitwp.com/', - producer: { name: 'WPBeginner, LLC', url: 'https://www.wpbeginner.com/' }, + producer: { + name: 'WPBeginner, LLC', + url: 'https://www.wpbeginner.com/', + }, }, { - regex: 'Automattic Analytics Crawler', + includes: 'Automattic Analytics Crawler', name: 'Automattic Analytics', category: 'Crawler', url: 'https://wordpress.com/crawler/', - producer: { name: 'Wordpress.org', url: 'https://wordpress.org/' }, + producer: { + name: 'Wordpress.org', + url: 'https://wordpress.org/', + }, }, { regex: 'WordPress\\.com mShots', name: 'WordPress.com mShots', category: 'Service Agent', url: 'https://wordpress.org/', - producer: { name: 'Wordpress.org', url: 'https://wordpress.org/' }, + producer: { + name: 'Wordpress.org', + url: 'https://wordpress.org/', + }, }, { regex: 'wp\\.com feedbot', name: 'wp.com feedbot', category: 'Feed Fetcher', url: 'https://wordpress.com/', - producer: { name: 'Automattic, Inc.', url: 'https://automattic.com/' }, + producer: { + name: 'Automattic, Inc.', + url: 'https://automattic.com/', + }, }, { - regex: 'WordPress', + includes: 'WordPress', name: 'WordPress', category: 'Service Agent', url: 'https://wordpress.org/', - producer: { name: 'Wordpress.org', url: 'https://wordpress.org/' }, + producer: { + name: 'Wordpress.org', + url: 'https://wordpress.org/', + }, }, { - regex: 'Wotbox', + includes: 'Wotbox', name: 'Wotbox', category: 'Search bot', url: 'http://www.wotbox.com/bot/', - producer: { name: 'Wotbox', url: 'http://www.wotbox.com' }, + producer: { + name: 'Wotbox', + url: 'http://www.wotbox.com', + }, }, { - regex: 'XenForo', + includes: 'XenForo', name: 'XenForo', category: 'Service Agent', url: 'https://xenforo.com/', - producer: { name: 'XenForo Ltd.', url: 'https://xenforo.com/' }, + producer: { + name: 'XenForo Ltd.', + url: 'https://xenforo.com/', + }, }, { - regex: 'yacybot', + includes: 'yacybot', name: 'YaCy', category: 'Search bot', url: 'http://yacy.net/bot.html', - producer: { name: 'YaCy', url: 'http://yacy.net' }, + producer: { + name: 'YaCy', + url: 'http://yacy.net', + }, }, { regex: 'Yahoo! Slurp|Yahoo!-AdCrawler', name: 'Yahoo! Slurp', category: 'Search bot', url: 'http://help.yahoo.com/ysearch/slurp', - producer: { name: 'Yahoo! Inc.', url: 'http://www.yahoo.com' }, + producer: { + name: 'Yahoo! Inc.', + url: 'http://www.yahoo.com', + }, }, { regex: 'Yahoo Link Preview|Yahoo:LinkExpander:Slingstone', name: 'Yahoo! Link Preview', category: 'Crawler', url: 'https://help.yahoo.com/kb/mail/yahoo-link-preview-SLN23615.html', - producer: { name: 'Yahoo! Inc.', url: 'http://www.yahoo.com' }, + producer: { + name: 'Yahoo! Inc.', + url: 'http://www.yahoo.com', + }, }, { - regex: 'YahooMailProxy', + includes: 'YahooMailProxy', name: 'Yahoo! Mail Proxy', category: 'Service Agent', url: 'https://help.yahoo.com/kb/yahoo-mail-proxy-SLN28749.html', - producer: { name: 'Yahoo! Inc.', url: 'http://www.yahoo.com' }, + producer: { + name: 'Yahoo! Inc.', + url: 'http://www.yahoo.com', + }, }, { - regex: 'YahooCacheSystem', + includes: 'YahooCacheSystem', name: 'Yahoo! Cache System', category: 'Crawler', url: '', - producer: { name: 'Yahoo! Inc.', url: 'http://www.yahoo.com' }, + producer: { + name: 'Yahoo! Inc.', + url: 'http://www.yahoo.com', + }, }, { - regex: 'Y!J-BRW', + includes: 'Y!J-BRW', name: 'Yahoo! Japan BRW', category: 'Crawler', url: 'https://support.yahoo-net.jp/PccSearch/s/article/H000007955', - producer: { name: 'Yahoo! Japan Corp.', url: 'https://www.yahoo.co.jp/' }, + producer: { + name: 'Yahoo! Japan Corp.', + url: 'https://www.yahoo.co.jp/', + }, }, { - regex: 'Y!J-WSC', + includes: 'Y!J-WSC', name: 'Yahoo! Japan WSC', category: 'Crawler', url: 'https://support.yahoo-net.jp/PccSearch/s/article/H000007955', - producer: { name: 'Yahoo! Japan Corp.', url: 'https://www.yahoo.co.jp/' }, + producer: { + name: 'Yahoo! Japan Corp.', + url: 'https://www.yahoo.co.jp/', + }, }, { - regex: 'Y!J-ASR', + includes: 'Y!J-ASR', name: 'Yahoo! Japan ASR', category: 'Crawler', url: 'https://support.yahoo-net.jp/PccSearch/s/article/H000007955', - producer: { name: 'Yahoo! Japan Corp.', url: 'https://www.yahoo.co.jp/' }, + producer: { + name: 'Yahoo! Japan Corp.', + url: 'https://www.yahoo.co.jp/', + }, }, { regex: '^Y!J', name: 'Yahoo! Japan', category: 'Crawler', url: 'https://support.yahoo-net.jp/PccSearch/s/article/H000007955', - producer: { name: 'Yahoo! Japan Corp.', url: 'https://www.yahoo.co.jp/' }, + producer: { + name: 'Yahoo! Japan Corp.', + url: 'https://www.yahoo.co.jp/', + }, }, { regex: - 'Yandex(?:(?:\\.Gazeta |Accessibility|Mobile|MobileScreenShot|RenderResources|Screenshot|Sprav)?Bot|(?:AdNet|Antivirus|Blogs|Calendar|Catalog|Direct|Favicons|ForDomain|ImageResizer|Images|Market|Media|Metrika|News|OntoDB(?:API)?|Pagechecker|Partner|RCA|SearchShop|(?:News|Site)links|Tracker|Turbo|Userproxy|Verticals|Vertis|Video|Webmaster))|YaDirectFetcher', + 'Yandex(?:(?:\\.Gazeta |Accessibility|Additional|Com|Mobile|MobileScreenShot|RenderResources|Screenshot|Sprav)?Bot|(?:Additional|AdNet|Antivirus|Blogs|Calendar|Catalog|Dialogs|Direct|Favicons|ForDomain|ImageResizer|Images|Market|Media|Metrika|News|OntoDB(?:API)?|Pagechecker|Partner|RCA|SearchShop|(?:News|Site)links|Tracker|Turbo|Userproxy|Verticals|Vertis|Video|Webmaster))|YaDirectFetcher', name: 'Yandex Bot', category: 'Search bot', url: 'https://yandex.com/support/webmaster/robot-workings/check-yandex-robots.html', - producer: { name: 'Yandex LLC', url: 'https://yandex.com/company/' }, + producer: { + name: 'Yandex LLC', + url: 'https://yandex.com/company/', + }, }, { regex: 'Yeti|NaverJapan|AdsBot-Naver', name: 'Yeti/Naverbot', category: 'Search bot', url: 'http://help.naver.com/robots/', - producer: { name: 'Naver', url: 'http://www.naver.com' }, + producer: { + name: 'Naver', + url: 'http://www.naver.com', + }, }, { - regex: 'YoudaoBot', + includes: 'YoudaoBot', name: 'Youdao Bot', category: 'Search bot', url: 'http://www.youdao.com/help/webmaster/spider', - producer: { name: 'NetEase, Inc.', url: 'http://corp.163.com' }, + producer: { + name: 'NetEase, Inc.', + url: 'http://corp.163.com', + }, }, { - regex: 'YOURLS', + includes: 'YOURLS', name: 'Yourls', category: 'Crawler', url: 'http://yourls.org', @@ -1928,121 +2647,169 @@ const bots = [ name: 'Yunyun Bot', category: 'Search bot', url: 'http://www.yunyun.com/SiteInfo.php?r=about', - producer: { name: 'YunYun', url: 'http://www.yunyun.com' }, + producer: { + name: 'YunYun', + url: 'http://www.yunyun.com', + }, }, { - regex: 'zgrab', + includes: 'zgrab', name: 'zgrab', category: 'Security Checker', url: 'https://github.com/zmap/zgrab', }, { - regex: 'Zookabot', + includes: 'Zookabot', name: 'Zookabot', category: 'Crawler', url: 'http://zookabot.com', - producer: { name: 'Hwacha ApS', url: 'http://hwacha.dk' }, + producer: { + name: 'Hwacha ApS', + url: 'http://hwacha.dk', + }, }, { - regex: 'ZumBot', + includes: 'ZumBot', name: 'ZumBot', category: 'Search bot', url: 'http://help.zum.com/inquiry', - producer: { name: 'ZUM internet', url: 'http://www.zuminternet.com/' }, + producer: { + name: 'ZUM internet', + url: 'http://www.zuminternet.com/', + }, }, { - regex: 'YottaaMonitor', + includes: 'YottaaMonitor', name: 'Yottaa Site Monitor', category: 'Site Monitor', url: 'http://www.yottaa.com/products/site-monitor', - producer: { name: 'Yottaa', url: 'http://www.yottaa.com/' }, + producer: { + name: 'Yottaa', + url: 'http://www.yottaa.com/', + }, }, { regex: 'Yahoo Ad monitoring.*yahoo-ad-monitoring-SLN24857', name: 'Yahoo Gemini', category: 'Crawler', url: 'https://help.yahoo.com/kb/yahoo-ad-monitoring-SLN24857.html', - producer: { name: 'Yahoo! Inc.', url: 'http://www.yahoo.com' }, + producer: { + name: 'Yahoo! Inc.', + url: 'http://www.yahoo.com', + }, }, { regex: '.*Java.*outbrain', name: 'Outbrain', category: 'Crawler', url: '', - producer: { name: 'Outbrain', url: 'http://www.outbrain.com/' }, + producer: { + name: 'Outbrain', + url: 'http://www.outbrain.com/', + }, }, { regex: 'HubPages.*crawlingpolicy', name: 'HubPages', category: 'Crawler', url: 'https://hubpages.com/help/crawlingpolicy', - producer: { name: 'HubPages, Inc.', url: 'https://discover.hubpages.com/' }, + producer: { + name: 'HubPages, Inc.', + url: 'https://discover.hubpages.com/', + }, }, { regex: 'Pinterest(?:bot)?/.*www\\.pinterest\\.com', name: 'Pinterest', url: 'https://help.pinterest.com/en/business/article/pinterest-crawler', category: 'Crawler', - producer: { name: 'Pinterest', url: 'https://www.pinterest.com/' }, + producer: { + name: 'Pinterest', + url: 'https://www.pinterest.com/', + }, }, { regex: '.*Site24x7', name: 'Site24x7 Website Monitoring', category: 'Site Monitor', url: 'https://www.site24x7.com/site24x7-faq.html', - producer: { name: 'Site24x7', url: 'https://www.site24x7.com' }, + producer: { + name: 'Site24x7', + url: 'https://www.site24x7.com', + }, }, { regex: '.* HLB', name: 'Site24x7 Defacement Monitor', category: 'Site Monitor', url: 'https://support.site24x7.com/portal/en/kb/articles/default-user-agent-used-in-website-defacement-monitor', - producer: { name: 'Site24x7', url: 'https://www.site24x7.com/' }, + producer: { + name: 'Site24x7', + url: 'https://www.site24x7.com/', + }, }, { - regex: 's~snapchat-proxy', + includes: 's~snapchat-proxy', name: 'Snapchat Proxy', category: 'Crawler', url: 'https://www.snapchat.com', - producer: { name: 'Snapchat Inc.', url: 'https://www.snapchat.com' }, + producer: { + name: 'Snapchat Inc.', + url: 'https://www.snapchat.com', + }, }, { - regex: 'Snap URL Preview Service', + includes: 'Snap URL Preview Service', name: 'Snap URL Preview Service', category: 'Service Agent', url: 'https://developers.snap.com/robots', - producer: { name: 'Snapchat Inc.', url: 'https://www.snapchat.com/' }, + producer: { + name: 'Snapchat Inc.', + url: 'https://www.snapchat.com/', + }, }, { - regex: 'SnapchatAds', + includes: 'SnapchatAds', name: 'Snapchat Ads', category: 'Crawler', url: 'https://businesshelp.snapchat.com/s/article/adsbot-crawler?language=en_US', - producer: { name: 'Snapchat Inc.', url: 'https://www.snapchat.com/' }, + producer: { + name: 'Snapchat Inc.', + url: 'https://www.snapchat.com/', + }, }, { - regex: "Let's Encrypt validation server", + includes: "Let's Encrypt validation server", name: "Let's Encrypt Validation", category: 'Service Agent', url: 'https://letsencrypt.org/how-it-works/', - producer: { name: "Let's Encrypt", url: 'https://letsencrypt.org' }, + producer: { + name: "Let's Encrypt", + url: 'https://letsencrypt.org', + }, }, { - regex: 'GrapeshotCrawler', + includes: 'GrapeshotCrawler', name: 'Grapeshot', category: 'Crawler', url: 'https://www.grapeshot.com/crawler', - producer: { name: 'Grapeshot', url: 'https://www.grapeshot.com' }, + producer: { + name: 'Grapeshot', + url: 'https://www.grapeshot.com', + }, }, { regex: 'www\\.monitor\\.us', name: 'Monitor.Us', category: 'Site Monitor', url: 'http://www.monitor.us', - producer: { name: 'Monitor.Us', url: 'http://www.monitor.us' }, + producer: { + name: 'Monitor.Us', + url: 'http://www.monitor.us', + }, }, { - regex: 'Catchpoint', + includes: 'Catchpoint', name: 'Catchpoint', category: 'Site Monitor', url: 'https://www.catchpoint.com/', @@ -2052,36 +2819,94 @@ const bots = [ }, }, { - regex: 'bitlybot', + includes: 'bitlybot', name: 'BitlyBot', category: 'Crawler', url: 'https://bitly.com', - producer: { name: 'Bitly, Inc.', url: 'https://bitly.com' }, - }, - { regex: 'Zao/', name: 'Zao', category: 'Crawler' }, - { regex: 'lycos', name: 'Lycos' }, - { regex: 'Slurp', name: 'Inktomi Slurp' }, - { regex: 'Speedy Spider', name: 'Speedy' }, - { regex: 'ScoutJet', name: 'ScoutJet' }, - { regex: 'nrsbot|netresearch', name: 'NetResearchServer' }, - { regex: 'scooter', name: 'Scooter' }, - { regex: 'gigabot', name: 'Gigabot' }, - { regex: 'charlotte', name: 'Charlotte' }, - { regex: 'Pompos', name: 'Pompos' }, - { regex: 'ichiro', name: 'ichiro' }, - { - regex: 'PagePeeker', + producer: { + name: 'Bitly, Inc.', + url: 'https://bitly.com', + }, + }, + { + includes: 'Zao/', + name: 'Zao', + category: 'Crawler', + }, + { + includes: 'lycos', + name: 'Lycos', + }, + { + includes: 'Slurp', + name: 'Inktomi Slurp', + }, + { + includes: 'Speedy Spider', + name: 'Speedy', + }, + { + includes: 'ScoutJet', + name: 'ScoutJet', + }, + { + regex: 'nrsbot|netresearch', + name: 'NetResearchServer', + }, + { + includes: 'scooter', + name: 'Scooter', + }, + { + includes: 'gigabot', + name: 'Gigabot', + }, + { + includes: 'charlotte', + name: 'Charlotte', + }, + { + includes: 'Pompos', + name: 'Pompos', + }, + { + includes: 'ichiro', + name: 'ichiro', + }, + { + includes: 'PagePeeker', name: 'PagePeeker', category: 'Crawler', url: 'https://pagepeeker.com/robots/', - producer: { name: 'PAGEPEEKER SRL', url: 'https://pagepeeker.com/' }, + producer: { + name: 'PAGEPEEKER SRL', + url: 'https://pagepeeker.com/', + }, + }, + { + includes: 'WebThumbnail', + name: 'WebThumbnail', + }, + { + includes: 'Willow Internet Crawler', + name: 'Willow Internet Crawler', + }, + { + includes: 'EmailWolf', + name: 'EmailWolf', + }, + { + includes: 'NetLyzer FastProbe', + name: 'NetLyzer FastProbe', + }, + { + regex: 'AdMantX.*admantx\\.com', + name: 'ADMantX', + }, + { + includes: 'Server Density Service Monitoring', + name: 'Server Density', }, - { regex: 'WebThumbnail', name: 'WebThumbnail' }, - { regex: 'Willow Internet Crawler', name: 'Willow Internet Crawler' }, - { regex: 'EmailWolf', name: 'EmailWolf' }, - { regex: 'NetLyzer FastProbe', name: 'NetLyzer FastProbe' }, - { regex: 'AdMantX.*admantx\\.com', name: 'ADMantX' }, - { regex: 'Server Density Service Monitoring', name: 'Server Density' }, { regex: 'RSSRadio \\(Push Notification Scanner;support@dorada\\.co\\.uk\\)', name: 'RSSRadio Bot', @@ -2089,23 +2914,36 @@ const bots = [ { regex: '^sentry', name: 'Sentry Bot', - producer: { name: 'Sentry', url: 'https://sentry.io' }, + producer: { + name: 'Sentry', + url: 'https://sentry.io', + }, }, { regex: '^Spotify/[\\d.]+$', name: 'Spotify', - producer: { name: 'Spotify', url: 'https://www.spotify.com' }, + producer: { + name: 'Spotify', + url: 'https://www.spotify.com', + }, + }, + { + includes: 'The Knowledge AI', + name: 'The Knowledge AI', + category: 'Crawler', }, - { regex: 'The Knowledge AI', name: 'The Knowledge AI', category: 'Crawler' }, { - regex: 'Embedly', + includes: 'Embedly', name: 'Embedly', category: 'Crawler', url: 'https://support.embed.ly/hc/en-us', - producer: { name: 'A Medium, Corp.', url: 'https://medium.com/' }, + producer: { + name: 'A Medium, Corp.', + url: 'https://medium.com/', + }, }, { - regex: 'BrandVerity', + includes: 'BrandVerity', name: 'BrandVerity', category: 'Crawler', url: 'https://www.brandverity.com/why-is-brandverity-visiting-me', @@ -2115,35 +2953,47 @@ const bots = [ }, }, { - regex: 'Kaspersky Lab CFR link resolver', + includes: 'Kaspersky Lab CFR link resolver', name: 'Kaspersky', category: 'Security Checker', url: 'https://www.kaspersky.com/', - producer: { name: 'AO Kaspersky Lab', url: 'https://www.kaspersky.com/' }, + producer: { + name: 'AO Kaspersky Lab', + url: 'https://www.kaspersky.com/', + }, }, { - regex: 'eZ Publish Link Validator', + includes: 'eZ Publish Link Validator', name: 'eZ Publish Link Validator', category: 'Crawler', url: 'https://ez.no/', - producer: { name: 'eZ Systems AS', url: 'https://ez.no/' }, + producer: { + name: 'eZ Systems AS', + url: 'https://ez.no/', + }, }, { - regex: 'woorankreview', + includes: 'woorankreview', name: 'WooRank', category: 'Search bot', url: 'https://www.woorank.com/', - producer: { name: 'WooRank sprl', url: 'https://www.woorank.com/' }, + producer: { + name: 'WooRank sprl', + url: 'https://www.woorank.com/', + }, }, { - regex: 'Siteimprove', + includes: 'Siteimprove', name: 'Siteimprove', category: 'Search bot', url: 'https://siteimprove.com/', - producer: { name: 'Siteimprove GmbH', url: 'https://siteimprove.com/' }, + producer: { + name: 'Siteimprove GmbH', + url: 'https://siteimprove.com/', + }, }, { - regex: 'CATExplorador', + includes: 'CATExplorador', name: 'CATExplorador', category: 'Search bot', url: 'https://fundacio.cat/ca/domini/', @@ -2153,56 +3003,77 @@ const bots = [ }, }, { - regex: 'Buck', + includes: 'Buck', name: 'Buck', category: 'Search bot', url: 'https://hypefactors.com/', - producer: { name: 'Hypefactors A/S', url: 'https://hypefactors.com/' }, + producer: { + name: 'Hypefactors A/S', + url: 'https://hypefactors.com/', + }, }, { - regex: 'tracemyfile', + includes: 'tracemyfile', name: 'TraceMyFile', category: 'Search bot', url: 'https://www.tracemyfile.com/', - producer: { name: 'Idee Inc.', url: 'http://ideeinc.com/' }, + producer: { + name: 'Idee Inc.', + url: 'http://ideeinc.com/', + }, }, { regex: 'zelist\\.ro feed parser', name: 'Ze List', url: 'https://www.zelist.ro/', category: 'Feed Fetcher', - producer: { name: 'Treeworks SRL', url: 'https://www.tree.ro/' }, + producer: { + name: 'Treeworks SRL', + url: 'https://www.tree.ro/', + }, }, { - regex: 'weborama-fetcher', + includes: 'weborama-fetcher', name: 'Weborama', category: 'Search bot', url: 'https://weborama.com/', - producer: { name: 'Weborama SA', url: 'https://weborama.com/' }, + producer: { + name: 'Weborama SA', + url: 'https://weborama.com/', + }, }, { - regex: 'BoardReader Favicon Fetcher', + includes: 'BoardReader Favicon Fetcher', name: 'BoardReader', category: 'Search bot', url: 'https://boardreader.com/', - producer: { name: 'Effyis Inc', url: 'https://boardreader.com/' }, + producer: { + name: 'Effyis Inc', + url: 'https://boardreader.com/', + }, }, { regex: 'IDG/(?:EU|IT|RU|UK)', name: 'IDG', category: 'Crawler', url: 'https://www.spaziodati.eu/', - producer: { name: 'SpazioDati S.r.l.', url: 'https://www.spaziodati.eu/' }, + producer: { + name: 'SpazioDati S.r.l.', + url: 'https://www.spaziodati.eu/', + }, }, { - regex: 'Bytespider', + includes: 'Bytespider', name: 'Bytespider', category: 'Search bot', url: 'https://bytedance.com/', - producer: { name: 'ByteDance Ltd.', url: 'https://bytedance.com/' }, + producer: { + name: 'ByteDance Ltd.', + url: 'https://bytedance.com/', + }, }, { - regex: 'WikiDo', + includes: 'WikiDo', name: 'WikiDo', category: 'Search bot', url: 'https://www.wikido.com/', @@ -2222,7 +3093,7 @@ const bots = [ }, }, { - regex: 'AwarioRssBot', + includes: 'AwarioRssBot', name: 'Awario', category: 'Feed Fetcher', url: 'https://awario.com/bots.html', @@ -2232,7 +3103,7 @@ const bots = [ }, }, { - regex: 'oBot', + includes: 'oBot', name: 'oBot', category: 'Search bot', url: 'https://www.xforce-security.com/crawler/', @@ -2242,14 +3113,17 @@ const bots = [ }, }, { - regex: 'SMTBot', + includes: 'SMTBot', name: 'SMTBot', category: 'Search bot', url: 'https://www.similartech.com/smtbot', - producer: { name: 'SimilarTech Ltd.', url: 'https://www.similartech.com/' }, + producer: { + name: 'SimilarTech Ltd.', + url: 'https://www.similartech.com/', + }, }, { - regex: 'LCC', + includes: 'LCC', name: 'LCC', category: 'Search bot', url: 'https://corpora.uni-leipzig.de/crawler_faq.html', @@ -2259,21 +3133,27 @@ const bots = [ }, }, { - regex: 'Startpagina-Linkchecker', + includes: 'Startpagina-Linkchecker', name: 'Startpagina Linkchecker', category: 'Search bot', url: 'https://www.startpagina.nl/linkchecker', - producer: { name: 'Startpagina B.V.', url: 'https://www.startpagina.nl/' }, + producer: { + name: 'Startpagina B.V.', + url: 'https://www.startpagina.nl/', + }, }, { - regex: 'MoodleBot-Linkchecker', + includes: 'MoodleBot-Linkchecker', name: 'MoodleBot Linkchecker', category: 'Search bot', - url: 'hhttps://docs.moodle.org/en/Usage', - producer: { name: 'Moodle Pty Ltd', url: 'https://moodle.org/' }, + url: 'https://docs.moodle.org/en/Usage', + producer: { + name: 'Moodle Pty Ltd', + url: 'https://moodle.org/', + }, }, { - regex: 'GTmetrix', + includes: 'GTmetrix', name: 'GTmetrix', category: 'Crawler', url: 'https://gtmetrix.com/', @@ -2287,10 +3167,13 @@ const bots = [ name: 'CyberFind Crawler', category: 'Crawler', url: 'https://www.cyberfind.net/bot.html', - producer: { name: 'Find.tf', url: 'https://find.tf/' }, + producer: { + name: 'Find.tf', + url: 'https://find.tf/', + }, }, { - regex: 'Nutch', + includes: 'Nutch', name: 'Nutch-based Bot', category: 'Crawler', url: 'https://nutch.apache.org', @@ -2300,38 +3183,42 @@ const bots = [ }, }, { - regex: 'Seobility', + includes: 'Seobility', name: 'Seobility', category: 'Crawler', url: 'https://www.seobility.net/en/faq/?category=crawling#!aboutourbot', }, { - regex: 'Vercelbot', + includes: 'Vercelbot', name: 'Vercel Bot', category: 'Service bot', url: 'https://vercel.com', }, { - regex: 'Grammarly', + includes: 'Grammarly', name: 'Grammarly', category: 'Service bot', url: 'https://www.grammarly.com', }, - { regex: 'Robozilla', name: 'Robozilla', category: 'Crawler' }, { - regex: 'Domains Project', + includes: 'Robozilla', + name: 'Robozilla', + category: 'Crawler', + }, + { + includes: 'Domains Project', name: 'Domains Project', category: 'Crawler', url: 'https://domainsproject.org', }, { - regex: 'PetalBot', + includes: 'PetalBot', name: 'Petal Bot', category: 'Crawler', url: 'https://aspiegel.com/petalbot', }, { - regex: 'SerendeputyBot', + includes: 'SerendeputyBot', name: 'Serendeputy Bot', category: 'Crawler', url: 'https://serendeputy.com/about/serendeputy-bot', @@ -2344,25 +3231,25 @@ const bots = [ url: 'https://www.admantx.com/service-fetcher.html', }, { - regex: 'SemanticScholarBot', + includes: 'SemanticScholarBot', name: 'Semantic Scholar Bot', category: 'Crawler', url: 'https://www.semanticscholar.org/crawler', }, { - regex: 'VelenPublicWebCrawler', + includes: 'VelenPublicWebCrawler', name: 'Velen Public Web Crawler', category: 'Crawler', url: 'https://hunter.io/robot', }, { - regex: 'Barkrowler', + includes: 'Barkrowler', name: 'Barkrowler', category: 'Crawler', url: 'http://www.exensa.com/crawl', }, { - regex: 'BDCbot', + includes: 'BDCbot', name: 'BDCbot', category: 'Crawler', url: 'https://bigweb.bigdatacorp.com.br/pages/faq.aspx', @@ -2372,18 +3259,24 @@ const bots = [ }, }, { - regex: 'adbeat', + includes: 'adbeat', name: 'Adbeat', category: 'Crawler', url: 'https://www.adbeat.com/operation_policy', - producer: { name: 'PPC Labs LLC', url: 'https://www.adbeat.com/' }, + producer: { + name: 'PPC Labs LLC', + url: 'https://www.adbeat.com/', + }, }, { regex: '(?:BuiltWith|BW/)', name: 'BuiltWith', category: 'Crawler', url: 'https://builtwith.com/biup', - producer: { name: 'BuiltWith Pty Ltd', url: 'https://builtwith.com/' }, + producer: { + name: 'BuiltWith Pty Ltd', + url: 'https://builtwith.com/', + }, }, { regex: 'https://whatis\\.contentkingapp\\.com', @@ -2396,11 +3289,14 @@ const bots = [ }, }, { - regex: 'MicroAdBot', + includes: 'MicroAdBot', name: 'MicroAdBot', category: 'Crawler', url: 'https://www.microad.co.jp/', - producer: { name: 'MicroAd, Inc.', url: 'https://www.microad.co.jp/' }, + producer: { + name: 'MicroAd, Inc.', + url: 'https://www.microad.co.jp/', + }, }, { regex: 'PingAdmin\\.Ru', @@ -2415,7 +3311,7 @@ const bots = [ url: 'http://notifyninja.com', }, { - regex: 'WebDataStats', + includes: 'WebDataStats', name: 'WebDataStats', category: 'Crawler', url: 'https://webdatastats.com/policy.html', @@ -2429,36 +3325,45 @@ const bots = [ name: 'parse.ly', category: 'Crawler', url: 'https://www.parse.ly/help/integration/crawler', - producer: { name: 'Parsely, Inc.', url: 'https://www.parse.ly/' }, + producer: { + name: 'Parsely, Inc.', + url: 'https://www.parse.ly/', + }, }, { - regex: 'Nimbostratus-Bot', + includes: 'Nimbostratus-Bot', name: 'Nimbostratus Bot', category: 'Site Monitor', url: 'http://cloudsystemnetworks.com', }, { - regex: 'HeartRails_Capture', - name: 'Heart Rails Capture', + includes: 'HeartRails_Capture', + name: 'HeartRails Capture', category: 'Service Agent', url: 'http://capture.heartrails.com', }, { - regex: 'Project-Resonance', + includes: 'Project-Resonance', name: 'Project Resonance', category: 'Crawler', url: 'https://project-resonance.com/', - producer: { name: 'RedHunt Labs Limited', url: 'https://redhuntlabs.com/' }, + producer: { + name: 'RedHunt Labs Limited', + url: 'https://redhuntlabs.com/', + }, }, { - regex: 'DataXu', + includes: 'DataXu', name: 'DataXu', category: 'Service Agent', url: 'https://advertising.roku.com/dataxu', - producer: { name: 'Roku, Inc.', url: 'https://roku.com' }, + producer: { + name: 'Roku, Inc.', + url: 'https://roku.com', + }, }, { - regex: 'Cocolyzebot', + includes: 'Cocolyzebot', name: 'Cocolyzebot', category: 'Crawler', url: 'https://cocolyze.com/en/cocolyzebot', @@ -2468,21 +3373,27 @@ const bots = [ }, }, { - regex: 'veryhip', + includes: 'veryhip', name: 'VeryHip', category: 'Crawler', url: 'https://veryhip.com/', - producer: { name: 'VeryHip', url: 'https://veryhip.com/' }, + producer: { + name: 'VeryHip', + url: 'https://veryhip.com/', + }, }, { - regex: 'LinkpadBot', + includes: 'LinkpadBot', name: 'LinkpadBot', category: 'Crawler', url: 'https://www.linkpad.org/', - producer: { name: 'Solomono LLC', url: 'https://www.linkpad.org/' }, + producer: { + name: 'Solomono LLC', + url: 'https://www.linkpad.org/', + }, }, { - regex: 'MuscatFerret', + includes: 'MuscatFerret', name: 'MuscatFerret', category: 'Crawler', url: 'http://www.webtop.com/', @@ -2498,35 +3409,47 @@ const bots = [ }, }, { - regex: 'ArchiveBox', + includes: 'ArchiveBox', name: 'ArchiveBox', url: 'https://archivebox.io/', category: 'Crawler', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { - regex: 'Choosito', + includes: 'Choosito', name: 'Choosito', url: 'https://www.choosito.com/', category: 'Crawler', - producer: { name: 'Choosito! Inc.', url: 'https://www.choosito.com/' }, + producer: { + name: 'Choosito! Inc.', + url: 'https://www.choosito.com/', + }, }, { - regex: 'datagnionbot', + includes: 'datagnionbot', name: 'datagnionbot', url: 'https://www.datagnion.com/bot.html', category: 'Crawler', - producer: { name: 'DATAGNION GMBH', url: 'https://www.datagnion.com/' }, + producer: { + name: 'DATAGNION GMBH', + url: 'https://www.datagnion.com/', + }, }, { - regex: 'WhatCMS', + includes: 'WhatCMS', name: 'WhatCMS', url: 'https://whatcms.org/', category: 'Crawler', - producer: { name: 'Nineteen Ten LLC', url: 'https://whatcms.org/' }, + producer: { + name: 'Nineteen Ten LLC', + url: 'https://whatcms.org/', + }, }, { - regex: 'httpx', + includes: 'httpx', name: 'httpx', url: 'https://github.com/projectdiscovery/httpx', category: 'Crawler', @@ -2550,10 +3473,13 @@ const bots = [ name: 'Expanse', category: 'Security Checker', url: 'https://expanse.co/', - producer: { name: 'Expanse Inc.', url: 'https://expanse.co/' }, + producer: { + name: 'Expanse Inc.', + url: 'https://expanse.co/', + }, }, { - regex: 'HuaweiWebCatBot', + includes: 'HuaweiWebCatBot', name: 'HuaweiWebCatBot', category: 'Crawler', url: 'https://isecurity.huawei.com', @@ -2563,113 +3489,125 @@ const bots = [ }, }, { - regex: 'Hatena-Favicon', + includes: 'Hatena-Favicon', name: 'Hatena Favicon', category: 'Crawler', url: 'https://www.hatena.ne.jp/faq/', - producer: { name: 'Hatena Co., Ltd.', url: 'https://www.hatena.ne.jp' }, + producer: { + name: 'Hatena Co., Ltd.', + url: 'https://www.hatena.ne.jp', + }, }, { regex: 'Hatena-?Bookmark', name: 'Hatena Bookmark', category: 'Crawler', url: 'https://www.hatena.ne.jp/faq/', - producer: { name: 'Hatena Co., Ltd.', url: 'https://www.hatena.ne.jp' }, + producer: { + name: 'Hatena Co., Ltd.', + url: 'https://www.hatena.ne.jp', + }, }, { - regex: 'RyowlEngine', + includes: 'RyowlEngine', name: 'Ryowl', category: 'Crawler', url: 'https://ryowl.org', }, { - regex: 'OdklBot', + includes: 'OdklBot', name: 'Odnoklassniki Bot', category: 'Crawler', url: 'https://odnoklassniki.ru', }, { - regex: 'Mediatoolkitbot', + includes: 'Mediatoolkitbot', name: 'Mediatoolkit Bot', category: 'Crawler', url: 'https://mediatoolkit.com', }, { - regex: 'ZoominfoBot', + includes: 'ZoominfoBot', name: 'ZoominfoBot', category: 'Crawler', url: 'https://www.zoominfo.com', }, { - regex: 'WeViKaBot', + includes: 'WeViKaBot', name: 'WeViKaBot', category: 'Crawler', url: 'http://www.wevika.de', }, { - regex: 'SEOkicks', + includes: 'SEOkicks', name: 'SEOkicks', category: 'Crawler', url: 'https://www.seokicks.de/robot.html', - producer: { name: 'SEOkicks', url: 'https://www.seokicks.de/' }, + producer: { + name: 'SEOkicks', + url: 'https://www.seokicks.de/', + }, }, { - regex: 'Plukkie', + includes: 'Plukkie', name: 'Plukkie', category: 'Crawler', url: 'http://www.botje.com/plukkie.htm', }, { - regex: 'proximic;', + includes: 'proximic;', name: 'Comscore', category: 'Crawler', url: 'https://www.comscore.com/Web-Crawler', }, { - regex: 'SurdotlyBot', + includes: 'SurdotlyBot', name: 'SurdotlyBot', category: 'Crawler', url: 'http://sur.ly/bot.html', }, { - regex: 'Gowikibot', + includes: 'Gowikibot', name: 'Gowikibot', category: 'Crawler', url: 'http:/www.gowikibot.com', }, { - regex: 'SabsimBot', + includes: 'SabsimBot', name: 'SabsimBot', category: 'Crawler', url: 'https://sabsim.com', }, { - regex: 'LumtelBot', + includes: 'LumtelBot', name: 'LumtelBot', category: 'Crawler', - url: 'https://umtel.com', + url: 'https://lumtel.com', }, { - regex: 'PiplBot', + includes: 'PiplBot', name: 'PiplBot', category: 'Crawler', url: 'http://www.pipl.com/bot', }, { - regex: 'woobot', + includes: 'woobot', name: 'WooRank', category: 'Crawler', url: 'https://www.woorank.com/bot', }, { - regex: 'Cookiebot', + includes: 'Cookiebot', name: 'Cookiebot', category: 'Crawler', url: 'https://support.cookiebot.com/hc/en-us/articles/360014264140-Scanner-User-Agent', - producer: { name: 'Cybot A/S', url: 'https://www.cybot.com/' }, + producer: { + name: 'Cybot A/S', + url: 'https://www.cybot.com/', + }, }, { - regex: 'NetSystemsResearch', + includes: 'NetSystemsResearch', name: 'NetSystemsResearch', category: 'Security Checker', url: 'https://www.netsystemsresearch.com/', @@ -2679,11 +3617,14 @@ const bots = [ }, }, { - regex: 'CensysInspect', + includes: 'CensysInspect', name: 'CensysInspect', category: 'Security Checker', url: 'https://about.censys.io/', - producer: { name: 'Censys, Inc.', url: 'https://censys.io/' }, + producer: { + name: 'Censys, Inc.', + url: 'https://censys.io/', + }, }, { regex: 'gdnplus\\.com', @@ -2696,45 +3637,51 @@ const bots = [ }, }, { - regex: 'WellKnownBot', + includes: 'WellKnownBot', name: 'WellKnownBot', category: 'Crawler', url: 'https://well-known.dev', }, { - regex: 'Adsbot', + includes: 'Adsbot', name: 'Adsbot', category: 'Crawler', url: 'https://seostar.co/robot/', }, { - regex: 'MTRobot', + includes: 'MTRobot', name: 'MTRobot', category: 'Crawler', url: 'https://metrics-tools.de/robot.html', - producer: { name: 'Metrics Tools', url: 'https://metrics-tools.de/' }, + producer: { + name: 'Metrics Tools', + url: 'https://metrics-tools.de/', + }, }, { - regex: 'serpstatbot', + includes: 'serpstatbot', name: 'serpstatbot', category: 'Crawler', url: 'http://serpstatbot.com/', - producer: { name: 'Netpeak Ltd', url: 'https://netpeak.net/' }, + producer: { + name: 'Netpeak Ltd', + url: 'https://netpeak.net/', + }, }, { - regex: 'colly', + includes: 'colly', name: 'colly', category: 'Crawler', url: 'https://github.com/gocolly/colly/', }, { - regex: 'l9tcpid', + includes: 'l9tcpid', name: 'l9tcpid', category: 'Security Checker', url: 'https://github.com/LeakIX/l9tcpid', }, { - regex: 'l9explore', + includes: 'l9explore', name: 'l9explore', category: 'Security Checker', url: 'https://github.com/LeakIX/l9explore', @@ -2744,7 +3691,10 @@ const bots = [ name: 'LeakIX', category: 'Security Checker', url: 'https://leakix.net/', - producer: { name: 'BaDaaS SRL', url: 'https://leakix.net/' }, + producer: { + name: 'BaDaaS SRL', + url: 'https://leakix.net/', + }, }, { regex: 'MegaIndex\\.ru', @@ -2753,11 +3703,14 @@ const bots = [ url: 'https://megaindex.com/crawler', }, { - regex: 'Seekport', + includes: 'Seekport', name: 'Seekport', category: 'Crawler', url: 'https://bot.seekport.com/', - producer: { name: 'SISTRIX GmbH', url: 'https://www.sistrix.de/' }, + producer: { + name: 'SISTRIX GmbH', + url: 'https://www.sistrix.de/', + }, }, { regex: 'Seolyt(?:Bot)?', @@ -2766,48 +3719,63 @@ const bots = [ url: 'https://seolyt.com/', }, { - regex: 'YaK/', + includes: 'YaK/', name: 'YaK', category: 'Crawler', url: 'https://www.linkfluence.com/', - producer: { name: 'Linkfluence SAS', url: 'https://www.linkfluence.com/' }, + producer: { + name: 'Linkfluence SAS', + url: 'https://www.linkfluence.com/', + }, }, { - regex: 'KomodiaBot', + includes: 'KomodiaBot', name: 'KomodiaBot', category: 'Crawler', url: 'http://www.komodia.com/newwiki/index.php/URL_server_crawler', - producer: { name: 'Komodia Inc.', url: 'https://www.komodia.com/' }, + producer: { + name: 'Komodia Inc.', + url: 'https://www.komodia.com/', + }, }, { - regex: 'KStandBot', + includes: 'KStandBot', name: 'KStandBot', category: 'Crawler', url: 'https://url-classification.io/wiki/index.php?title=URL_server_crawler', - producer: { name: 'Komodia Inc.', url: 'https://www.komodia.com/' }, + producer: { + name: 'Komodia Inc.', + url: 'https://www.komodia.com/', + }, }, { - regex: 'Neevabot', + includes: 'Neevabot', name: 'Neevabot', category: 'Search bot', url: 'https://neeva.com/neevabot', - producer: { name: 'Neeva Inc.', url: 'https://neeva.com/' }, + producer: { + name: 'Neeva Inc.', + url: 'https://neeva.com/', + }, }, { - regex: 'Chatwork LinkPreview', + includes: 'Chatwork LinkPreview', name: 'Chatwork LinkPreview', category: 'Service Agent', url: 'https://go.chatwork.com/en/', - producer: { name: 'kubell Co., Ltd.', url: 'https://www.kubell.com/en/' }, + producer: { + name: 'kubell Co., Ltd.', + url: 'https://www.kubell.com/en/', + }, }, { - regex: 'LinkPreview', + includes: 'LinkPreview', name: 'LinkPreview', category: 'Service Agent', url: 'https://www.linkpreview.net/', }, { - regex: 'JungleKeyThumbnail', + includes: 'JungleKeyThumbnail', name: 'JungleKeyThumbnail', category: 'Crawler', url: 'https://junglekey.com/', @@ -2817,10 +3785,13 @@ const bots = [ name: 'RocketMonitorBot', category: 'Site Monitor', url: 'https://www.radiomast.io/docs/stream-monitoring/technical_details.html', - producer: { name: 'Radio Mast, Inc.', url: 'https://www.radiomast.io/' }, + producer: { + name: 'Radio Mast, Inc.', + url: 'https://www.radiomast.io/', + }, }, { - regex: 'SitemapParser-VIPnytt', + includes: 'SitemapParser-VIPnytt', name: 'SitemapParser-VIPnytt', category: 'Crawler', url: 'https://github.com/VIPnytt/SitemapParser/', @@ -2837,33 +3808,42 @@ const bots = [ category: 'Site Monitor', url: 'https://www.dotcom-monitor.com', }, - { regex: 'ThinkChaos/', name: 'ThinkChaos', category: 'Crawler' }, { - regex: 'DataForSeoBot', + includes: 'ThinkChaos/', + name: 'ThinkChaos', + category: 'Crawler', + }, + { + includes: 'Thinkbot/', + name: 'Thinkbot', + category: 'Crawler', + }, + { + includes: 'DataForSeoBot', name: 'DataForSeoBot', category: 'Crawler', url: 'https://dataforseo.com/dataforseo-bot', }, { - regex: 'Discordbot', + includes: 'Discordbot', name: 'Discord Bot', category: 'Service Agent', url: 'https://discordapp.com', }, { - regex: 'Linespider', + includes: 'Linespider', name: 'Linespider', category: 'Crawler', url: 'https://lin.ee/4dwXkTH', }, { - regex: 'Cincraw', + includes: 'Cincraw', name: 'Cincraw', category: 'Crawler', url: 'http://cincrawdata.net/bot/', }, { - regex: 'CISPA Web Analyzer', + includes: 'CISPA Web Analyzer', name: 'CISPA Web Analyzer', category: 'Crawler', url: 'https://notify.cispa.de/', @@ -2873,14 +3853,17 @@ const bots = [ }, }, { - regex: 'IonCrawl', + includes: 'IonCrawl', name: 'IONOS Crawler', category: 'Crawler', url: 'https://www.ionos.de/terms-gtc/faq-crawler-en/', - producer: { name: 'IONOS SE', url: 'https://www.ionos.de/' }, + producer: { + name: 'IONOS SE', + url: 'https://www.ionos.de/', + }, }, { - regex: 'Crawldad', + includes: 'Crawldad', name: 'Crawldad', category: 'Crawler', url: 'https://gist.github.com/jayhardee9/2f2a2c4dba26564ee040ae32e0dd0972', @@ -2896,19 +3879,19 @@ const bots = [ }, }, { - regex: 'TigerBot', + includes: 'TigerBot', name: 'TigerBot', category: 'Crawler', url: 'https://tiger.ch/', }, { - regex: 'TestCrawler', + includes: 'TestCrawler', name: 'TestCrawler', category: 'Crawler', url: 'https://www.comcepta.com/', }, { - regex: 'CrowdTanglebot', + includes: 'CrowdTanglebot', name: 'CrowdTangle', category: 'Crawler', url: 'https://help.crowdtangle.com/en/articles/3009319-crowdtangle-bot', @@ -2928,25 +3911,34 @@ const bots = [ }, }, { - regex: 'OnalyticaBot', + includes: 'OnalyticaBot', name: 'Onalytica', category: 'Crawler', url: 'https://www.airslate.com/bot/explore/onalytica-bot', - producer: { name: 'airSlate, Inc.', url: 'https://www.airslate.com/' }, + producer: { + name: 'airSlate, Inc.', + url: 'https://www.airslate.com/', + }, }, { - regex: 'deepnoc', + includes: 'deepnoc', name: 'deepnoc', category: 'Crawler', url: 'https://deepnoc.com/bot', - producer: { name: 'deepnoc, GmbH', url: 'https://deepnoc.com/' }, + producer: { + name: 'deepnoc, GmbH', + url: 'https://deepnoc.com/', + }, }, { - regex: 'Newslitbot', + includes: 'Newslitbot', name: 'Newslitbot', category: 'Crawler', url: 'https://www.newslit.co/', - producer: { name: 'Newslit, LLC.', url: 'https://www.newslit.co/' }, + producer: { + name: 'Newslit, LLC.', + url: 'https://www.newslit.co/', + }, }, { regex: 'um-(?:ANS|CC|FC|IC|LN)', @@ -2959,7 +3951,7 @@ const bots = [ }, }, { - regex: 'Abonti', + includes: 'Abonti', name: 'Abonti', category: 'Crawler', url: 'http://abonti.com/', @@ -2969,7 +3961,10 @@ const bots = [ name: 'Infegy', category: 'Crawler', url: 'https://infegy.com/', - producer: { name: 'Infegy, Inc.', url: 'https://infegy.com/' }, + producer: { + name: 'Infegy, Inc.', + url: 'https://infegy.com/', + }, }, { regex: 'HTTP Banner Detection \\(https://security\\.ipip\\.net\\)', @@ -2982,7 +3977,7 @@ const bots = [ }, }, { - regex: 'ev-crawler', + includes: 'ev-crawler', name: 'Headline', category: 'Crawler', url: 'https://headline.com/legal/crawler', @@ -2992,18 +3987,24 @@ const bots = [ }, }, { - regex: 'webprosbot', + includes: 'webprosbot', name: 'WebPros', category: 'Crawler', url: 'https://webpros.com/', - producer: { name: 'WebPros Holdco B.V.', url: 'https://webpros.com/' }, + producer: { + name: 'WebPros Holdco B.V.', + url: 'https://webpros.com/', + }, }, { - regex: 'ELB-HealthChecker', + includes: 'ELB-HealthChecker', name: 'Amazon ELB', category: 'Site Monitor', url: 'https://aws.amazon.com/elasticloadbalancing/', - producer: { name: 'Amazon.com, Inc.', url: 'https://www.amazon.com/' }, + producer: { + name: 'Amazon.com, Inc.', + url: 'https://www.amazon.com/', + }, }, { regex: 'Wheregoes\\.com Redirect Checker', @@ -3012,19 +4013,19 @@ const bots = [ url: 'https://wheregoes.com/', }, { - regex: 'project_patchwatch', + includes: 'project_patchwatch', name: 'Project Patchwatch', category: 'Crawler', url: 'http://66.240.192.82/', }, { - regex: 'InternetMeasurement', + includes: 'InternetMeasurement', name: 'InternetMeasurement', category: 'Crawler', url: 'https://internet-measurement.com/', }, { - regex: 'DomainAppender', + includes: 'DomainAppender', name: 'DomainAppender', category: 'Crawler', url: 'https://www.profound.net/product/domain_append/', @@ -3034,7 +4035,7 @@ const bots = [ }, }, { - regex: 'FreeWebMonitoring SiteChecker', + includes: 'FreeWebMonitoring SiteChecker', name: 'FreeWebMonitoring', category: 'Site Monitor', url: 'https://www.freewebmonitoring.com/bot.html', @@ -3044,7 +4045,7 @@ const bots = [ }, }, { - regex: 'Page Modified Pinger', + includes: 'Page Modified Pinger', name: 'Page Modified Pinger', category: 'Site Monitor', url: 'https://www.pagemodified.com/', @@ -3058,38 +4059,55 @@ const bots = [ name: 'adstxtlab.com', category: 'Crawler', url: 'https://adstxtlab.com/validator.php', - producer: { name: 'Jaohawi AB', url: 'https://adstxtlab.com/' }, + producer: { + name: 'Jaohawi AB', + url: 'https://adstxtlab.com/', + }, }, { - regex: 'Iframely', + includes: 'Iframely', name: 'Iframely', category: 'Crawler', url: 'https://iframely.com/', - producer: { name: 'Itteco Software, Corp.', url: 'https://iframely.com/' }, + producer: { + name: 'Itteco Software, Corp.', + url: 'https://iframely.com/', + }, }, { - regex: 'DomainStatsBot', + includes: 'DomainStatsBot', name: 'DomainStatsBot', category: 'Crawler', url: 'https://domainstats.com/pages/our-bot', - producer: { name: 'Domainstats Ltd', url: 'https://domainstats.com/' }, + producer: { + name: 'Domainstats Ltd', + url: 'https://domainstats.com/', + }, }, { - regex: 'aiHitBot', + includes: 'aiHitBot', name: 'aiHitBot', category: 'Crawler', url: 'https://www.aihitdata.com/about', }, { - regex: 'DomainCrawler/', + includes: 'DomainCrawler/', name: 'DomainCrawler', category: 'Crawler', url: 'https://domaincrawler.com/about-us/', }, - { regex: 'DNSResearchBot', name: 'DNSResearchBot', category: 'Crawler' }, - { regex: 'GitCrawlerBot', name: 'GitCrawlerBot', category: 'Crawler' }, { - regex: 'AdAuth', + includes: 'DNSResearchBot', + name: 'DNSResearchBot', + category: 'Crawler', + }, + { + includes: 'GitCrawlerBot', + name: 'GitCrawlerBot', + category: 'Crawler', + }, + { + includes: 'AdAuth', name: 'AdAuth', category: 'Crawler', url: 'https://www.adauth.com', @@ -3107,20 +4125,23 @@ const bots = [ url: 'https://kozmonavt.ml', }, { - regex: 'CriteoBot/', + includes: 'CriteoBot/', name: 'CriteoBot', category: 'Crawler', url: 'https://www.criteo.com/criteo-crawler/', }, { - regex: 'PayPal IPN', + includes: 'PayPal IPN', name: 'PayPal IPN', category: 'Service Agent', url: 'https://developer.paypal.com/api/nvp-soap/ipn/IPNIntro/', - producer: { name: 'PayPal, Inc.', url: 'https://www.paypal.com/' }, + producer: { + name: 'PayPal, Inc.', + url: 'https://www.paypal.com/', + }, }, { - regex: 'MaCoCu', + includes: 'MaCoCu', name: 'MaCoCu', category: 'Crawler', url: 'https://www.clarin.si/info/macocu-massive-collection-and-curation-of-monolingual-and-bilingual-data/', @@ -3130,7 +4151,7 @@ const bots = [ }, }, { - regex: 'CLASSLA', + includes: 'CLASSLA', name: 'CLASSLA-web', category: 'Crawler', url: 'https://www.clarin.si/info/classla-web-crawler/', @@ -3150,11 +4171,14 @@ const bots = [ }, }, { - regex: 'InfoTigerBot', + includes: 'InfoTigerBot', name: 'InfoTigerBot', category: 'Crawler', url: 'https://infotiger.com/bot', - producer: { name: 'Infotiger UG', url: 'https://infotiger.com/' }, + producer: { + name: 'Infotiger UG', + url: 'https://infotiger.com/', + }, }, { regex: '(?:Birdcrawlerbot|CrawlaDeBot)', @@ -3167,7 +4191,7 @@ const bots = [ }, }, { - regex: 'ScamadviserExternalHit', + includes: 'ScamadviserExternalHit', name: 'Scamadviser External Hit', category: 'Crawler', url: 'https://www.scamadviser.com/', @@ -3177,27 +4201,33 @@ const bots = [ }, }, { - regex: 'ZaldamoSearchBot', + includes: 'ZaldamoSearchBot', name: 'Zaldamo', category: 'Crawler', url: 'https://www.zaldamo.com/search.html', - producer: { name: 'Zaldamo, LLC.', url: 'https://www.zaldamo.com/' }, + producer: { + name: 'Zaldamo, LLC.', + url: 'https://www.zaldamo.com/', + }, }, { - regex: 'AFB', + includes: 'AFB', name: 'Allloadin Favicon Bot', category: 'Crawler', url: 'https://allloadin.com/', }, { - regex: 'LinkWalker', + includes: 'LinkWalker', name: 'LinkWalker', category: 'Crawler', url: 'https://www.phishlabs.com/', - producer: { name: 'PhishLabs, Inc.', url: 'https://www.phishlabs.com/' }, + producer: { + name: 'PhishLabs, Inc.', + url: 'https://www.phishlabs.com/', + }, }, { - regex: 'RenovateBot', + includes: 'RenovateBot', name: 'RenovateBot', category: 'Security Checker', url: 'https://github.com/renovatebot/renovate', @@ -3207,24 +4237,30 @@ const bots = [ }, }, { - regex: 'INETDEX-BOT', + includes: 'INETDEX-BOT', name: 'Inetdex Bot', category: 'Crawler', url: 'https://www.inetdex.com/', }, { - regex: 'NETZZAPPEN', + includes: 'NETZZAPPEN', name: 'NETZZAPPEN', category: 'Crawler', url: 'https://www.netzzappen.com/', - producer: { name: 'Marc Huemer', url: 'https://www.netzzappen.com/' }, + producer: { + name: 'Marc Huemer', + url: 'https://www.netzzappen.com/', + }, }, { regex: 'panscient\\.com', name: 'Panscient', category: 'Crawler', url: 'https://www.panscient.com/faq.htm', - producer: { name: 'Panscient, Inc.', url: 'https://www.panscient.com/' }, + producer: { + name: 'Panscient, Inc.', + url: 'https://www.panscient.com/', + }, }, { regex: 'research@pdrlabs\\.net', @@ -3237,84 +4273,117 @@ const bots = [ }, }, { - regex: 'Nicecrawler', + includes: 'Nicecrawler', name: 'NiceCrawler', category: 'Crawler', url: 'https://www.nicecrawler.com/', - producer: { name: 'Intelium Corp.', url: 'https://www.intelium.com/' }, + producer: { + name: 'Intelium Corp.', + url: 'https://www.intelium.com/', + }, }, { - regex: 't3versionsBot', + includes: 't3versionsBot', name: 't3versions', category: 'Crawler', url: 'https://www.t3versions.com/bot', - producer: { name: 'Torben Hansen', url: 'https://www.t3versions.com/' }, + producer: { + name: 'Torben Hansen', + url: 'https://www.t3versions.com/', + }, }, { - regex: 'Crawlson', + includes: 'Crawlson', name: 'Crawlson', category: 'Crawler', url: 'https://www.crawlson.com/about', - producer: { name: 'Crawlson', url: 'https://www.crawlson.com/' }, + producer: { + name: 'Crawlson', + url: 'https://www.crawlson.com/', + }, }, { - regex: 'tchelebi', + includes: 'tchelebi', name: 'tchelebi', category: 'Crawler', url: 'https://tchelebi.io/', - producer: { name: 'NormShield, Inc.', url: 'https://blackkite.com/' }, + producer: { + name: 'NormShield, Inc.', + url: 'https://blackkite.com/', + }, }, { - regex: 'JobboerseBot', + includes: 'JobboerseBot', name: 'JobboerseBot', category: 'Crawler', url: 'https://www.xing.com/jobs', - producer: { name: 'New Work SE', url: 'https://www.xing.com/' }, + producer: { + name: 'New Work SE', + url: 'https://www.xing.com/', + }, }, { - regex: 'RepoLookoutBot', + includes: 'RepoLookoutBot', name: 'Repo Lookout', category: 'Security Checker', url: 'https://www.repo-lookout.org/', - producer: { name: 'Crissy Field GmbH', url: 'https://www.crissyfield.de/' }, + producer: { + name: 'Crissy Field GmbH', + url: 'https://www.crissyfield.de/', + }, }, { - regex: 'PATHspider', + includes: 'PATHspider', name: 'PATHspider', category: 'Security Checker', url: 'https://pathspider.net/', - producer: { name: 'MAMI Project', url: 'https://mami-project.eu/' }, + producer: { + name: 'MAMI Project', + url: 'https://mami-project.eu/', + }, }, { - regex: 'everyfeed-spider', + includes: 'everyfeed-spider', name: 'Everyfeed', url: 'https://web.archive.org/web/20050930235914/http://www.everyfeed.com/', category: 'Feed Fetcher', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { - regex: 'Exchange check', + includes: 'Exchange check', name: 'Exchange check', category: 'Security Checker', url: 'https://github.com/GossiTheDog/scanning', - producer: { name: 'Kevin Beaumont', url: 'https://doublepulsar.com/' }, + producer: { + name: 'Kevin Beaumont', + url: 'https://doublepulsar.com/', + }, }, { - regex: 'Sublinq', + includes: 'Sublinq', name: 'Sublinq', category: 'Crawler', url: 'https://web.archive.org/web/20220626191617/https://sublinq.com/', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { - regex: 'Gregarius', + includes: 'Gregarius', name: 'Gregarius', category: 'Feed Fetcher', url: 'https://web.archive.org/web/20100614011837/http://devlog.gregarius.net/docs/ua/', - producer: { name: '', url: '' }, + producer: { + name: '', + url: '', + }, }, { - regex: 'COMODO DCV', + includes: 'COMODO DCV', name: 'COMODO DCV', category: 'Service Agent', url: 'https://www.comodo.com/', @@ -3328,38 +4397,53 @@ const bots = [ name: 'Sectigo DCV', category: 'Service Agent', url: 'https://sectigo.com/', - producer: { name: 'Sectigo Limited', url: 'https://sectigo.com/' }, + producer: { + name: 'Sectigo Limited', + url: 'https://sectigo.com/', + }, }, { regex: 'KlarnaBot-(?:DownloadProductImage|EnrichProducts|PriceWatcher)', name: 'KlarnaBot', category: 'Crawler', url: 'https://docs.klarna.com/klarna-bot/', - producer: { name: 'Klarna Bank AB', url: 'https://www.klarna.com/' }, + producer: { + name: 'Klarna Bank AB', + url: 'https://www.klarna.com/', + }, }, { - regex: 'Taboolabot', + includes: 'Taboolabot', name: 'Taboolabot', category: 'Crawler', url: 'https://help.taboola.com/hc/en-us/articles/115002347594-The-Taboola-Crawler', - producer: { name: 'Taboola, Inc.', url: 'https://www.taboola.com/' }, + producer: { + name: 'Taboola, Inc.', + url: 'https://www.taboola.com/', + }, }, { - regex: 'Asana', + includes: 'Asana', name: 'Asana', category: 'Crawler', url: 'https://asana.com/', - producer: { name: 'Asana, Inc.', url: 'https://asana.com/' }, + producer: { + name: 'Asana, Inc.', + url: 'https://asana.com/', + }, }, { - regex: 'Chrome Privacy Preserving Prefetch Proxy', + includes: 'Chrome Privacy Preserving Prefetch Proxy', name: 'Chrome Privacy Preserving Prefetch Proxy', category: 'Service Agent', url: 'https://developer.chrome.com/blog/private-prefetch-proxy/', - producer: { name: 'Google Inc.', url: 'https://www.google.com/' }, + producer: { + name: 'Google Inc.', + url: 'https://www.google.com/', + }, }, { - regex: 'URLinspectorBot', + includes: 'URLinspectorBot', name: 'URLinspector', category: 'Site Monitor', url: 'https://www.urlinspector.com/bot/', @@ -3369,25 +4453,34 @@ const bots = [ }, }, { - regex: 'EntferBot', + includes: 'EntferBot', name: 'Entfer', category: 'Crawler', url: 'https://entfer.com/', - producer: { name: 'Entfer Ltd.', url: 'https://entfer.com/' }, + producer: { + name: 'Entfer Ltd.', + url: 'https://entfer.com/', + }, }, { - regex: 'TagInspector', + includes: 'TagInspector', name: 'Tag Inspector', category: 'Crawler', url: 'https://taginspector.com/', - producer: { name: 'InfoTrust, LLC', url: 'https://infotrust.com/' }, + producer: { + name: 'InfoTrust, LLC', + url: 'https://infotrust.com/', + }, }, { - regex: 'pageburst', + includes: 'pageburst', name: 'Pageburst', category: 'Crawler', url: 'https://pageburstls.elsevier.com/', - producer: { name: 'Elsevier Ltd', url: 'https://www.elsevier.com/' }, + producer: { + name: 'Elsevier Ltd', + url: 'https://www.elsevier.com/', + }, }, { regex: '.+diffbot', @@ -3400,37 +4493,46 @@ const bots = [ }, }, { - regex: 'DisqusAdstxtCrawler', + includes: 'DisqusAdstxtCrawler', name: 'Disqus', category: 'Crawler', url: 'https://help.disqus.com/en/articles/1765357-ads-txt-implementation-guide', - producer: { name: 'Disqus, Inc.', url: 'https://disqus.com/' }, + producer: { + name: 'Disqus, Inc.', + url: 'https://disqus.com/', + }, }, { - regex: 'startmebot', + includes: 'startmebot', name: 'start.me', category: 'Crawler', url: 'https://about.start.me/', - producer: { name: 'start.me BV', url: 'https://about.start.me/' }, + producer: { + name: 'start.me BV', + url: 'https://about.start.me/', + }, }, { - regex: '2ip bot', + includes: '2ip bot', name: '2ip', category: 'Crawler', url: 'https://2ip.io/', }, { - regex: 'ReqBin Curl Client', + includes: 'ReqBin Curl Client', name: 'ReqBin', category: 'Crawler', url: 'https://reqbin.com/curl', }, { - regex: 'XoviBot', + includes: 'XoviBot', name: 'XoviBot', category: 'Crawler', url: 'https://www.xovibot.net', - producer: { name: 'Xovi GmbH', url: 'http://www.xovi.de' }, + producer: { + name: 'Xovi GmbH', + url: 'http://www.xovi.de', + }, }, { regex: 'Overcast/.+Podcast Sync', @@ -3445,13 +4547,13 @@ const bots = [ url: 'https://gumgum.com/verity', }, { - regex: 'hackermention', + includes: 'hackermention', name: 'hackermention', category: 'Feed Reader', url: 'https://github.com/snarfed/hackermention', }, { - regex: 'BitSightBot', + includes: 'BitSightBot', name: 'BitSight', category: 'Security Checker', url: 'https://www.bitsight.com/', @@ -3461,7 +4563,7 @@ const bots = [ }, }, { - regex: 'Ezgif', + includes: 'Ezgif', name: 'Ezgif', category: 'Service Agent', url: 'https://ezgif.com/about', @@ -3477,7 +4579,7 @@ const bots = [ }, }, { - regex: 'FemtosearchBot', + includes: 'FemtosearchBot', name: 'Femtosearch', category: 'Crawler', url: 'http://femtosearch.com/', @@ -3487,7 +4589,7 @@ const bots = [ }, }, { - regex: 'AdsTxtCrawler/', + includes: 'AdsTxtCrawler/', name: 'AdsTxtCrawler', category: 'Crawler', url: 'https://github.com/InteractiveAdvertisingBureau/adstxtcrawler', @@ -3497,41 +4599,53 @@ const bots = [ }, }, { - regex: 'Morningscore', + includes: 'Morningscore', name: 'Morningscore Bot', category: 'Crawler', url: 'https://morningscore.io/', - producer: { name: 'Morningscore', url: 'https://morningscore.io/' }, + producer: { + name: 'Morningscore', + url: 'https://morningscore.io/', + }, }, { - regex: 'Uptime-Kuma', + includes: 'Uptime-Kuma', name: 'Uptime-Kuma', category: 'Site Monitor', url: 'https://github.com/louislam/uptime-kuma', }, { - regex: 'OAI-SearchBot', + includes: 'OAI-SearchBot', name: 'OAI-SearchBot', category: 'Crawler', url: 'https://platform.openai.com/docs/bots', - producer: { name: 'OpenAI OpCo, LLC', url: 'https://openai.com/' }, + producer: { + name: 'OpenAI OpCo, LLC', + url: 'https://openai.com/', + }, }, { - regex: 'GPTBot', + includes: 'GPTBot', name: 'GPTBot', category: 'Crawler', url: 'https://platform.openai.com/docs/bots', - producer: { name: 'OpenAI OpCo, LLC', url: 'https://openai.com/' }, + producer: { + name: 'OpenAI OpCo, LLC', + url: 'https://openai.com/', + }, }, { - regex: 'ChatGPT-User', + includes: 'ChatGPT-User', name: 'ChatGPT-User', category: 'Crawler', url: 'https://platform.openai.com/docs/bots', - producer: { name: 'OpenAI OpCo, LLC', url: 'https://openai.com/' }, + producer: { + name: 'OpenAI OpCo, LLC', + url: 'https://openai.com/', + }, }, { - regex: 'BrightEdge Crawler', + includes: 'BrightEdge Crawler', name: 'BrightEdge', category: 'Crawler', url: 'https://www.brightedge.com/', @@ -3541,7 +4655,7 @@ const bots = [ }, }, { - regex: 'sfFeedReader', + includes: 'sfFeedReader', name: 'sfFeedReader', url: 'https://github.com/diem-project/sfFeed2Plugin', category: 'Feed Fetcher', @@ -3551,7 +4665,10 @@ const bots = [ name: 'Cyberscan', category: 'Security Checker', url: 'https://www.cyberscan.io/', - producer: { name: 'DGC Verwaltungs GmbH', url: 'https://dgc.org/' }, + producer: { + name: 'DGC Verwaltungs GmbH', + url: 'https://dgc.org/', + }, }, { regex: 'researchscan\\.comsys\\.rwth-aachen\\.de', @@ -3564,28 +4681,37 @@ const bots = [ }, }, { - regex: 'newspaper', + includes: 'newspaper', name: 'Scraping Robot', category: 'Crawler', url: 'https://scrapingrobot.com/', - producer: { name: 'Sprious LLC', url: 'https://sprious.com/' }, + producer: { + name: 'Sprious LLC', + url: 'https://sprious.com/', + }, }, { regex: 'Ant(?:\\.com beta|Bot)', name: 'Ant', category: 'Crawler', url: 'https://www.ant.com/', - producer: { name: 'Ant.com Ltd.', url: 'https://www.ant.com/' }, + producer: { + name: 'Ant.com Ltd.', + url: 'https://www.ant.com/', + }, }, { - regex: 'WebwikiBot', + includes: 'WebwikiBot', name: 'Webwiki', category: 'Crawler', url: 'https://www.webwiki.com/', - producer: { name: 'webwiki GmbH', url: 'https://www.webwiki.com/' }, + producer: { + name: 'webwiki GmbH', + url: 'https://www.webwiki.com/', + }, }, { - regex: 'phpMyAdmin', + includes: 'phpMyAdmin', name: 'phpMyAdmin', category: 'Service Agent', url: 'https://www.phpmyadmin.net/', @@ -3595,30 +4721,39 @@ const bots = [ name: 'Matomo', category: 'Service Agent', url: 'https://github.com/matomo-org/matomo', - producer: { name: 'InnoCraft Ltd', url: 'https://matomo.org/' }, + producer: { + name: 'InnoCraft Ltd', + url: 'https://matomo.org/', + }, }, { - regex: 'Prometheus', + includes: 'Prometheus', name: 'Prometheus', category: 'Service Agent', url: 'https://github.com/prometheus/prometheus', - producer: { name: 'The Linux Foundation', url: 'https://www.cncf.io/' }, + producer: { + name: 'The Linux Foundation', + url: 'https://www.cncf.io/', + }, }, { - regex: 'ArchiveTeam ArchiveBot', + includes: 'ArchiveTeam ArchiveBot', name: 'ArchiveBot', category: 'Crawler', url: 'https://wiki.archiveteam.org/index.php?title=ArchiveBot', - producer: { name: 'ArchiveTeam', url: 'https://wiki.archiveteam.org/' }, + producer: { + name: 'ArchiveTeam', + url: 'https://wiki.archiveteam.org/', + }, }, { - regex: 'MADBbot', + includes: 'MADBbot', name: 'MADBbot', category: 'Crawler', url: 'https://madb.zapto.org/bot.html', }, { - regex: 'MeltwaterNews', + includes: 'MeltwaterNews', name: 'MeltwaterNews', category: 'Crawler', producer: { @@ -3627,7 +4762,7 @@ const bots = [ }, }, { - regex: 'owler', + includes: 'owler', name: 'OWLer', category: 'Crawler', url: 'https://openwebsearch.eu/owler/', @@ -3641,37 +4776,48 @@ const bots = [ name: 'BBC Page Monitor', category: 'Site Monitor', url: 'https://confluence.dev.bbc.co.uk/display/men/Page+Monitor', - producer: { name: 'BBC', url: 'https://www.bbc.com/' }, + producer: { + name: 'BBC', + url: 'https://www.bbc.com/', + }, }, { - regex: 'BBC-Forge-URL-Monitor-Twisted', + includes: 'BBC-Forge-URL-Monitor-Twisted', name: 'BBC Forge URL Monitor', category: 'Site Monitor', url: 'https://www.bbc.com/', - producer: { name: 'BBC', url: 'https://www.bbc.com/' }, + producer: { + name: 'BBC', + url: 'https://www.bbc.com/', + }, }, { - regex: 'ClaudeBot', + includes: 'ClaudeBot', name: 'ClaudeBot', category: 'Crawler', url: 'https://github.com/ClaudeBot/ClaudeBot', }, { - regex: 'Imagesift', + includes: 'Imagesift', name: 'ImageSift', category: 'Crawler', url: 'https://imagesift.com/', - producer: { name: 'Castle Global, Inc.', url: 'https://thehive.ai/' }, + producer: { + name: 'Castle Global, Inc.', + url: 'https://thehive.ai/', + }, }, { - regex: 'TactiScout', + includes: 'TactiScout', name: 'TactiScout', category: 'Crawler', url: 'https://find-it.world/TempCrawl/Crawltheque.php', - producer: { name: 'Tactikast' }, + producer: { + name: 'Tactikast', + }, }, { - regex: 'Brightbot', + includes: 'Brightbot', name: 'BrightBot', category: 'Crawler', url: 'https://www.brightbot.app/', @@ -3681,66 +4827,81 @@ const bots = [ }, }, { - regex: 'DaspeedBot', + includes: 'DaspeedBot', name: 'DaspeedBot', category: 'Crawler', url: 'https://daspeed.io/', - producer: { name: 'DAWAP SARL', url: 'https://dawap.fr/' }, + producer: { + name: 'DAWAP SARL', + url: 'https://dawap.fr/', + }, }, { - regex: 'StractBot', + includes: 'StractBot', name: 'Stract', category: 'Crawler', url: 'https://stract.com/webmasters', - producer: { name: 'Stract', url: 'https://github.com/StractOrg/stract/' }, + producer: { + name: 'Stract', + url: 'https://github.com/StractOrg/stract/', + }, }, { - regex: 'GeedoBot', + includes: 'GeedoBot', name: 'GeedoBot', category: 'Crawler', url: 'https://geedo.com/bot/', }, { - regex: 'GeedoProductSearch', + includes: 'GeedoProductSearch', name: 'GeedoProductSearch', category: 'Crawler', url: 'https://geedo.com/product-search/', }, { - regex: 'BackupLand', + includes: 'BackupLand', name: 'BackupLand', category: 'Crawler', url: 'https://go.backupland.com/', - producer: { name: 'ООО «КВАРТА»', url: 'https://go.backupland.com/' }, + producer: { + name: 'ООО «КВАРТА»', + url: 'https://go.backupland.com/', + }, }, { - regex: 'Konturbot', + includes: 'Konturbot', name: 'Konturbot', category: 'Crawler', url: 'https://kontur.ru/', - producer: { name: 'АО «ПФ «СКБ Контур»', url: 'https://kontur.ru/' }, + producer: { + name: 'АО «ПФ «СКБ Контур»', + url: 'https://kontur.ru/', + }, }, { - regex: 'keys-so-bot', + includes: 'keys-so-bot', name: 'Keys.so', category: 'Crawler', url: 'https://www.keys.so/', - producer: { name: 'ООО «МОДЕСКО»', url: 'https://www.modesco.ru/' }, + producer: { + name: 'ООО «МОДЕСКО»', + url: 'https://www.modesco.ru/', + }, }, { - regex: 'LetsearchBot', + includes: 'LetsearchBot', name: 'LetSearch', category: 'Crawler', url: 'https://letsearch.ru/bots', }, { - regex: 'Example3', + includes: 'Example3', name: 'Example3', category: 'Crawler', url: 'https://www.example3.com/', }, { - regex: 'StatOnlineRuBot', + includes: 'StatOnlineRuBot', name: 'StatOnline.ru', category: 'Crawler', url: 'https://statonline.ru/', @@ -3750,55 +4911,73 @@ const bots = [ }, }, { - regex: 'Spawning-AI', + includes: 'Spawning-AI', name: 'Spawning AI', category: 'Crawler', url: 'https://spawning.ai/', - producer: { name: 'Spawning, Inc', url: 'https://spawning.ai/' }, + producer: { + name: 'Spawning, Inc', + url: 'https://spawning.ai/', + }, }, { - regex: 'domain research project', + includes: 'domain research project', name: 'Domain Research Project', category: 'Crawler', url: 'https://trentwil.es/domains.html', - producer: { name: 'Trent Wiles', url: 'https://trentwil.es/' }, + producer: { + name: 'Trent Wiles', + url: 'https://trentwil.es/', + }, }, { regex: 'getodin\\.com', name: 'Odin', category: 'Security Checker', url: 'https://docs.getodin.com/', - producer: { name: 'Cyble Inc.', url: 'https://cyble.com/' }, + producer: { + name: 'Cyble Inc.', + url: 'https://cyble.com/', + }, }, { - regex: 'YouBot', + includes: 'YouBot', name: 'YouBot', category: 'Crawler', url: 'https://about.you.com/youbot/', - producer: { name: 'SuSea, Inc.', url: 'https://you.com/' }, + producer: { + name: 'SuSea, Inc.', + url: 'https://you.com/', + }, }, { - regex: 'SiteScoreBot', + includes: 'SiteScoreBot', name: 'SiteScore', category: 'Crawler', url: 'https://sitescore.ai/', }, { - regex: 'MBCrawler', + includes: 'MBCrawler', name: 'Monitor Backlinks', category: 'Crawler', url: 'https://www.seoptimer.com/monitor-backlinks/', - producer: { name: 'SEOptimer', url: 'https://www.seoptimer.com/' }, + producer: { + name: 'SEOptimer', + url: 'https://www.seoptimer.com/', + }, }, { - regex: 'mariadb-mysql-kbs-bot', + includes: 'mariadb-mysql-kbs-bot', name: 'MariaDB/MySQL Knowledge Base', category: 'Crawler', url: 'https://github.com/williamdes/mariadb-mysql-kbs', - producer: { name: 'WDES SAS', url: 'https://wdes.fr/en/' }, + producer: { + name: 'WDES SAS', + url: 'https://wdes.fr/en/', + }, }, { - regex: 'GitHubCopilotChat', + includes: 'GitHubCopilotChat', name: 'GitHubCopilotChat', category: 'Crawler', url: 'https://github.com/aaamoon/copilot-gpt4-service', @@ -3810,27 +4989,33 @@ const bots = [ url: 'https://podroll.fm', }, { - regex: 'PodUptime/', + includes: 'PodUptime/', name: 'PodUptime', category: 'Site Monitor', url: 'https://poduptime.com', }, { - regex: 'anthropic-ai', + includes: 'anthropic-ai', name: 'Anthropic AI', category: 'Crawler', url: 'https://www.anthropic.com/', - producer: { name: 'Anthropic, PBC', url: 'https://www.anthropic.com/' }, + producer: { + name: 'Anthropic, PBC', + url: 'https://www.anthropic.com/', + }, }, { - regex: 'NetpeakCheckerBot', + includes: 'NetpeakCheckerBot', name: 'Netpeak Checker', category: 'Crawler', url: 'https://netpeaksoftware.com/checker', - producer: { name: 'Netpeak LTD', url: 'https://netpeaksoftware.com/' }, + producer: { + name: 'Netpeak LTD', + url: 'https://netpeaksoftware.com/', + }, }, { - regex: 'SandobaCrawler', + includes: 'SandobaCrawler', name: 'Sandoba//Crawler', category: 'Crawler', url: 'https://www.sandoba.com/en/crawler/', @@ -3840,14 +5025,17 @@ const bots = [ }, }, { - regex: 'SirdataBot', + includes: 'SirdataBot', name: 'Sirdata', category: 'Crawler', url: 'https://semantic-api.docs.sirdata.net/contextual-api/contextual-api/introduction', - producer: { name: 'Sirdata SAS', url: 'https://www.sirdata.com/' }, + producer: { + name: 'Sirdata SAS', + url: 'https://www.sirdata.com/', + }, }, { - regex: 'CheckMarkNetwork', + includes: 'CheckMarkNetwork', name: 'CheckMark Network', category: 'Crawler', url: 'https://www.checkmarknetwork.com/spider.html/', @@ -3857,24 +5045,37 @@ const bots = [ }, }, { - regex: 'cohere-ai', + includes: 'cohere-ai', name: 'Cohere AI', category: 'Crawler', url: 'https://cohere.com/', - producer: { name: 'Cohere, Inc.', url: 'https://cohere.com/' }, + producer: { + name: 'Cohere, Inc.', + url: 'https://cohere.com/', + }, }, { - regex: 'PerplexityBot', + includes: 'PerplexityBot', name: 'PerplexityBot', category: 'Crawler', - url: 'https://docs.perplexity.ai/docs/perplexitybot', + url: 'https://docs.perplexity.ai/guides/bots', + producer: { + name: 'Perplexity AI, Inc.', + url: 'https://www.perplexity.ai/', + }, + }, + { + includes: 'Perplexity-User', + name: 'Perplexity-User', + category: 'Crawler', + url: 'https://docs.perplexity.ai/guides/bots', producer: { name: 'Perplexity AI, Inc.', url: 'https://www.perplexity.ai/', }, }, { - regex: 'TTD-Content', + includes: 'TTD-Content', name: 'The Trade Desk Content', category: 'Crawler', url: 'https://www.thetradedesk.com/us/ttd-content', @@ -3884,14 +5085,17 @@ const bots = [ }, }, { - regex: 'montastic-monitor', + includes: 'montastic-monitor', name: 'Montastic Monitor', category: 'Site Monitor', url: 'https://www.montastic.com/', - producer: { name: 'Metadot, Corp.', url: 'https://www.metadot.com/' }, + producer: { + name: 'Metadot, Corp.', + url: 'https://www.metadot.com/', + }, }, { - regex: 'Ruby, Twurly v', + includes: 'Ruby, Twurly v', name: 'Twurly', category: 'Crawler', url: 'https://twurly.org/', @@ -3906,9 +5110,13 @@ const bots = [ url: 'https://www.mixnode.com/', }, }, - { regex: 'CSSCheck', name: 'CSSCheck', category: 'Validator' }, { - regex: 'MicrosoftPreview', + includes: 'CSSCheck', + name: 'CSSCheck', + category: 'Validator', + }, + { + includes: 'MicrosoftPreview', name: 'Microsoft Preview', category: 'Service Agent', url: 'https://www.bing.com/webmasters/help/which-crawlers-does-bing-use-8c184ec0', @@ -3918,7 +5126,7 @@ const bots = [ }, }, { - regex: 's~virustotalcloud', + includes: 's~virustotalcloud', name: 'VirusTotal Cloud', category: 'Crawler', url: 'https://www.virustotal.com/', @@ -3928,14 +5136,17 @@ const bots = [ }, }, { - regex: 'TinEye', + includes: 'TinEye', name: 'TinEye', category: 'Crawler', url: 'https://tineye.com/', - producer: { name: 'Idée, Inc.', url: 'https://tineye.com/' }, + producer: { + name: 'Idée, Inc.', + url: 'https://tineye.com/', + }, }, { - regex: 'e~arsnova-filter-system', + includes: 'e~arsnova-filter-system', name: 'ARSNova Filter System', category: 'Crawler', url: 'https://particify.de/en/', @@ -3945,14 +5156,17 @@ const bots = [ }, }, { - regex: 'botify', + includes: 'botify', name: 'Botify', category: 'Crawler', url: 'https://www.botify.com/', - producer: { name: 'BOTIFY SAS', url: 'https://www.botify.com/' }, + producer: { + name: 'BOTIFY SAS', + url: 'https://www.botify.com/', + }, }, { - regex: 'adscanner', + includes: 'adscanner', name: 'Adscanner', category: 'Crawler', url: 'https://www.alleyesonscreens.com/', @@ -3962,31 +5176,40 @@ const bots = [ }, }, { - regex: 'online-webceo-bot', + includes: 'online-webceo-bot', name: 'WebCEO', category: 'Crawler', url: 'https://www.webceo.com/', - producer: { name: 'WebCEO, LLC', url: 'https://www.webceo.com/' }, + producer: { + name: 'WebCEO, LLC', + url: 'https://www.webceo.com/', + }, }, { - regex: 'NetTrack', + includes: 'NetTrack', name: 'NetTrack', category: 'Crawler', url: 'https://web.archive.org/web/20160607151934/https://nettrack.info/', }, { - regex: 'htmlyse', + includes: 'htmlyse', name: 'htmlyse', category: 'Crawler', url: 'https://www.htmlyse.com/', - producer: { name: 'Vistex LTD', url: 'https://www.htmlyse.com/' }, + producer: { + name: 'Vistex LTD', + url: 'https://www.htmlyse.com/', + }, }, { - regex: 'TrendsmapResolver', + includes: 'TrendsmapResolver', name: 'Trendsmap', category: 'Crawler', url: 'https://www.trendsmap.com/', - producer: { name: 'Trendsmap Pty Ltd', url: 'https://www.trendsmap.com/' }, + producer: { + name: 'Trendsmap Pty Ltd', + url: 'https://www.trendsmap.com/', + }, }, { regex: 'Shareaholic(?:bot)?', @@ -3999,24 +5222,30 @@ const bots = [ }, }, { - regex: 'keycdn-tools:', + includes: 'keycdn-tools:', name: 'KeyCDN Tools', category: 'Service Agent', url: 'https://tools.keycdn.com/geo', }, { - regex: 'keycdn-tools/', + includes: 'keycdn-tools/', name: 'KeyCDN Tools', category: 'Service Agent', url: 'https://tools.keycdn.com/', - producer: { name: 'proinity LLC', url: 'https://www.keycdn.com/' }, + producer: { + name: 'proinity LLC', + url: 'https://www.keycdn.com/', + }, }, { - regex: 'Arquivo-web-crawler', + includes: 'Arquivo-web-crawler', name: 'Arquivo.pt', category: 'Crawler', url: 'https://sobre.arquivo.pt/en/help/crawling-and-archiving-web-content/', - producer: { name: 'FCT|FCCN', url: 'https://www.fct.pt/' }, + producer: { + name: 'FCT|FCCN', + url: 'https://www.fct.pt/', + }, }, { regex: 'WhatsMyIP\\.org', @@ -4025,49 +5254,67 @@ const bots = [ url: 'https://www.whatsmyip.org/ua/', }, { - regex: 'SenutoBot', + includes: 'SenutoBot', name: 'Senuto', category: 'Crawler', url: 'https://www.senuto.com/', - producer: { name: 'Senuto Sp. z o.o.', url: 'https://www.senuto.com/' }, + producer: { + name: 'Senuto Sp. z o.o.', + url: 'https://www.senuto.com/', + }, }, { - regex: 'GozleBot', + includes: 'GozleBot', name: 'Gozle', category: 'Crawler', url: 'https://gozle.com.tm/en/blog/post/1', - producer: { name: 'Doly Horjun HJ', url: 'https://gozle.com.tm/' }, + producer: { + name: 'Doly Horjun HJ', + url: 'https://gozle.com.tm/', + }, }, { - regex: 'Quantcastbot', + includes: 'Quantcastbot', name: 'Quantcast', category: 'Crawler', url: 'https://www.quantcast.com/bot/', - producer: { name: 'Quantcast Corp.', url: 'https://www.quantcast.com/' }, + producer: { + name: 'Quantcast Corp.', + url: 'https://www.quantcast.com/', + }, }, { - regex: 'FontRadar', + includes: 'FontRadar', name: 'FontRadar', category: 'Crawler', url: 'https://www.fontradar.com/', - producer: { name: 'EMDASH SAS', url: 'https://www.fontradar.com/' }, + producer: { + name: 'EMDASH SAS', + url: 'https://www.fontradar.com/', + }, }, { - regex: 'ViberUrlDownloader', + includes: 'ViberUrlDownloader', name: 'Viber Url Downloader', category: 'Service Agent', url: 'https://www.viber.com/', - producer: { name: 'Viber Media S.à r.l.', url: 'https://www.viber.com/' }, + producer: { + name: 'Viber Media S.à r.l.', + url: 'https://www.viber.com/', + }, }, { regex: '^Zeno$', name: 'Zeno', category: 'Crawler', url: 'https://github.com/internetarchive/Zeno', - producer: { name: 'The Internet Archive', url: 'https://archive.org/' }, + producer: { + name: 'The Internet Archive', + url: 'https://archive.org/', + }, }, { - regex: 'Barracuda Sentinel', + includes: 'Barracuda Sentinel', name: 'Barracuda Sentinel', category: 'Service Agent', url: 'https://sentinel.barracudanetworks.com/', @@ -4077,42 +5324,57 @@ const bots = [ }, }, { - regex: 'RuxitSynthetic', + includes: 'RuxitSynthetic', name: 'RuxitSynthetic', category: 'Site Monitor', url: 'https://community.dynatrace.com/t5/Troubleshooting/Basic-Commands-for-Synthetic/ta-p/198164', - producer: { name: 'Dynatrace LLC', url: 'https://www.dynatrace.com/' }, + producer: { + name: 'Dynatrace LLC', + url: 'https://www.dynatrace.com/', + }, }, { - regex: 'DynatraceSynthetic', + includes: 'DynatraceSynthetic', name: 'DynatraceSynthetic', category: 'Site Monitor', url: 'https://community.dynatrace.com/t5/Troubleshooting/Basic-Commands-for-Synthetic/ta-p/198164', - producer: { name: 'Dynatrace LLC', url: 'https://www.dynatrace.com/' }, + producer: { + name: 'Dynatrace LLC', + url: 'https://www.dynatrace.com/', + }, }, { - regex: 'sitebulb', + includes: 'sitebulb', name: 'Sitebulb', category: 'Crawler', url: 'https://sitebulb.com/', - producer: { name: 'Sitebulb Limited', url: 'https://sitebulb.com/' }, + producer: { + name: 'Sitebulb Limited', + url: 'https://sitebulb.com/', + }, }, { - regex: 'Monsidobot', + includes: 'Monsidobot', name: 'Monsidobot', category: 'Crawler', url: 'https://monsido.com/bot-html', - producer: { name: 'Monsido LLC', url: 'https://monsido.com/' }, + producer: { + name: 'Monsido LLC', + url: 'https://monsido.com/', + }, }, { - regex: 'AccompanyBot', + includes: 'AccompanyBot', name: 'AccompanyBot', category: 'Crawler', url: 'https://www.accompany.com/', - producer: { name: 'Accompani, Inc', url: 'https://www.accompany.com/' }, + producer: { + name: 'Accompani, Inc', + url: 'https://www.accompany.com/', + }, }, { - regex: 'Ghost Inspector', + includes: 'Ghost Inspector', name: 'Ghost Inspector', category: 'Site Monitor', url: 'https://docs.ghostinspector.com/faq/#how-do-i-detect-ghost-inspector-test-runner-traffic-on-my-site', @@ -4122,27 +5384,33 @@ const bots = [ }, }, { - regex: 'Google-Apps-Script', + includes: 'Google-Apps-Script', name: 'Google Apps Script', category: 'Service Agent', url: 'https://www.google.com/script/start/', }, { - regex: 'SiteOne-Crawler', + includes: 'SiteOne-Crawler', name: 'SiteOne Crawler', category: 'Crawler', url: 'https://crawler.siteone.io/bot/', - producer: { name: 'SiteOne s.r.o.', url: 'https://www.siteone.io/' }, + producer: { + name: 'SiteOne s.r.o.', + url: 'https://www.siteone.io/', + }, }, { - regex: 'Detectify', + includes: 'Detectify', name: 'Detectify', category: 'Security Checker', url: 'https://support.detectify.com/support/solutions/articles/48001049001-how-to-allow-detectify-to-access-your-site', - producer: { name: 'Detectify AB', url: 'https://detectify.com/' }, + producer: { + name: 'Detectify AB', + url: 'https://detectify.com/', + }, }, { - regex: 'DomCopBot', + includes: 'DomCopBot', name: 'DomCop Bot', category: 'Crawler', url: 'https://www.domcop.com/bot', @@ -4152,26 +5420,29 @@ const bots = [ }, }, { - regex: 'Paqlebot', + includes: 'Paqlebot', name: 'Paqlebot', category: 'Crawler', url: 'https://www.paqle.dk/about/paqlebot', - producer: { name: 'Paqle A/S', url: 'https://www.paqle.dk/' }, + producer: { + name: 'Paqle A/S', + url: 'https://www.paqle.dk/', + }, }, { - regex: 'Wibybot', + includes: 'Wibybot', name: 'Wibybot', category: 'Crawler', url: 'https://www.wiby.me/', }, { - regex: 'Synapse', + includes: 'Synapse', name: 'Synapse', category: 'Crawler', url: 'https://github.com/matrix-org/synapse', }, { - regex: 'OSZKbot', + includes: 'OSZKbot', name: 'OSZKbot', category: 'Crawler', url: 'http://mekosztaly.oszk.hu/mia/', @@ -4181,73 +5452,112 @@ const bots = [ }, }, { - regex: 'ZoomBot', + includes: 'ZoomBot', name: 'ZoomBot', category: 'Crawler', url: 'https://suite.seozoom.it/bot.html', - producer: { name: 'SEO Cube S.r.l.', url: 'https://www.seocube.it/' }, + producer: { + name: 'SEO Cube S.r.l.', + url: 'https://www.seocube.it/', + }, }, { - regex: 'RavenCrawler', + includes: 'RavenCrawler', name: 'RavenCrawler', category: 'Crawler', url: 'https://raventools.com/site-auditor/', - producer: { name: 'TapClicks, Inc.', url: 'https://www.tapclicks.com/' }, + producer: { + name: 'TapClicks, Inc.', + url: 'https://www.tapclicks.com/', + }, }, { - regex: 'KadoBot', + includes: 'KadoBot', name: 'KadoBot', category: 'Crawler', url: 'https://www.kadolijst.nl/bot', - producer: { name: 'Kadolijst', url: 'https://www.kadolijst.nl/' }, + producer: { + name: 'Kadolijst', + url: 'https://www.kadolijst.nl/', + }, }, { - regex: 'Dubbotbot', + includes: 'Dubbotbot', name: 'Dubbotbot', category: 'Crawler', url: 'https://help.dubbot.com/en/articles/6746594-example-custom-user-agent', - producer: { name: 'DubBot', url: 'https://dubbot.com/' }, + producer: { + name: 'DubBot', + url: 'https://dubbot.com/', + }, }, { - regex: 'Swiftbot', + includes: 'Swiftbot', name: 'Swiftbot', category: 'Crawler', url: 'https://swiftype.com/swiftbot', - producer: { name: 'Elasticsearch, B.V.', url: 'https://www.elastic.co/' }, + producer: { + name: 'Elasticsearch, B.V.', + url: 'https://www.elastic.co/', + }, }, { - regex: 'EyeMonIT', + includes: 'EyeMonIT', name: 'EyeMonit', category: 'Site Monitor', url: 'https://eyemonit.com/', - producer: { name: 'EyeMonit', url: 'https://eyemonit.com/' }, + producer: { + name: 'EyeMonit', + url: 'https://eyemonit.com/', + }, }, { - regex: 'ThousandEyes', + includes: 'ThousandEyes', name: 'ThousandEyes', category: 'Site Monitor', url: 'https://www.thousandeyes.com/', - producer: { name: 'Cisco Systems, Inc.', url: 'https://www.cisco.com/' }, + producer: { + name: 'Cisco Systems, Inc.', + url: 'https://www.cisco.com/', + }, + }, + { + includes: 'OmtrBot', + name: 'OmtrBot', + category: 'Site Monitor', + }, + { + includes: 'WebMon', + name: 'WebMon', + category: 'Site Monitor', + }, + { + includes: 'AdsTxtCrawlerTP', + name: 'AdsTxtCrawlerTP', + category: 'Crawler', }, - { regex: 'OmtrBot', name: 'OmtrBot', category: 'Site Monitor' }, - { regex: 'WebMon', name: 'WebMon', category: 'Site Monitor' }, - { regex: 'AdsTxtCrawlerTP', name: 'AdsTxtCrawlerTP', category: 'Crawler' }, { - regex: 'fragFINN', + includes: 'fragFINN', name: 'fragFINN', category: 'Crawler', url: 'https://www.fragfinn.de/', - producer: { name: 'fragFINN e.V.', url: 'https://www.fragfinn.de/' }, + producer: { + name: 'fragFINN e.V.', + url: 'https://www.fragfinn.de/', + }, }, { - regex: 'Clickagy', + includes: 'Clickagy', name: 'Clickagy', category: 'Crawler', url: 'https://www.clickagy.com/', - producer: { name: 'Clickagy, LLC', url: 'https://www.clickagy.com/' }, + producer: { + name: 'Clickagy, LLC', + url: 'https://www.clickagy.com/', + }, }, { - regex: 'kiwitcms-gitops', + includes: 'kiwitcms-gitops', name: 'Kiwi TCMS GitOps', category: 'Service Agent', url: 'https://kiwitcms.org', @@ -4257,20 +5567,23 @@ const bots = [ }, }, { - regex: 'webtru_crawler', + includes: 'webtru_crawler', name: 'webtru', category: 'Crawler', url: 'https://webtru.io/', - producer: { name: 'DataSign Inc.', url: 'https://datasign.jp/' }, + producer: { + name: 'DataSign Inc.', + url: 'https://datasign.jp/', + }, }, { - regex: 'URLSuMaBot', + includes: 'URLSuMaBot', name: 'URLSuMaBot', category: 'Crawler', url: 'https://www.urlsuma.de/', }, { - regex: '360JK yunjiankong', + includes: '360JK yunjiankong', name: '360JK', category: 'Site Monitor', url: 'http://jk.cloud.360.cn/', @@ -4280,7 +5593,7 @@ const bots = [ }, }, { - regex: 'UCSBNetworkMeasurement', + includes: 'UCSBNetworkMeasurement', name: 'UCSB Network Measurement', category: 'Crawler', url: 'https://www.it.ucsb.edu/', @@ -4290,7 +5603,7 @@ const bots = [ }, }, { - regex: 'Plesk screenshot bot', + includes: 'Plesk screenshot bot', name: 'Plesk Screenshot Service', category: 'Service Agent', url: 'https://support.plesk.com/hc/en-us/articles/13302778306199-What-is-Plesk-Screenshot-Service', @@ -4306,7 +5619,7 @@ const bots = [ url: 'https://who.is/', }, { - regex: 'Probely', + includes: 'Probely', name: 'Probely', category: 'Security Checker', url: 'https://probely.com/sos/', @@ -4316,35 +5629,47 @@ const bots = [ }, }, { - regex: 'Uptimia', + includes: 'Uptimia', name: 'Uptimia', category: 'Site Monitor', url: 'https://www.uptimia.com/', - producer: { name: 'JJ Online GmbH', url: 'https://www.uptimia.com/' }, + producer: { + name: 'JJ Online GmbH', + url: 'https://www.uptimia.com/', + }, }, { - regex: '2GDPR', + includes: '2GDPR', name: '2GDPR', category: 'Service Agent', url: 'https://2gdpr.com/tos', - producer: { name: '2GDPR', url: 'https://2gdpr.com/' }, + producer: { + name: '2GDPR', + url: 'https://2gdpr.com/', + }, }, { regex: 'abuse\\.xmco\\.fr', name: 'Serenety', category: 'Security Checker', url: 'https://abuse.xmco.fr/', - producer: { name: 'XMCO, SASU', url: 'https://www.xmco.fr/' }, + producer: { + name: 'XMCO, SASU', + url: 'https://www.xmco.fr/', + }, }, { - regex: 'CheckHost', + includes: 'CheckHost', name: 'CheckHost', category: 'Site Monitor', url: 'https://check-host.net/', - producer: { name: 'CheckHost', url: 'https://check-host.net/' }, + producer: { + name: 'CheckHost', + url: 'https://check-host.net/', + }, }, { - regex: 'LAC_IAHarvester', + includes: 'LAC_IAHarvester', name: 'LAC IA Harvester', category: 'Crawler', url: 'https://library-archives.canada.ca/eng/services/government-canada/web-social-media-preservation-program/Pages/web-archive.aspx', @@ -4354,25 +5679,34 @@ const bots = [ }, }, { - regex: 'InsytfulBot', + includes: 'InsytfulBot', name: 'InsytfulBot', category: 'Crawler', url: 'https://www.insytful.com/', - producer: { name: 'Zengenti Limited', url: 'https://www.zengenti.com/' }, + producer: { + name: 'Zengenti Limited', + url: 'https://www.zengenti.com/', + }, }, { regex: 'statista\\.com', name: 'Statista', category: 'Crawler', url: 'https://www.statista.com/', - producer: { name: 'Statista, Inc.', url: 'https://www.statista.com/' }, + producer: { + name: 'Statista, Inc.', + url: 'https://www.statista.com/', + }, }, { - regex: 'SubstackContentFetch', + includes: 'SubstackContentFetch', name: 'Substack Content Fetch', category: 'Crawler', url: 'https://substack.com/', - producer: { name: 'Substack, Inc.', url: 'https://substack.com/' }, + producer: { + name: 'Substack, Inc.', + url: 'https://substack.com/', + }, }, { regex: '^ds9', @@ -4389,30 +5723,39 @@ const bots = [ name: 'LiveJournal', url: 'https://www.livejournal.com/', category: 'Feed Fetcher', - producer: { name: 'ООО "СИМ"', url: 'https://www.livejournal.com/' }, + producer: { + name: 'ООО "СИМ"', + url: 'https://www.livejournal.com/', + }, }, { - regex: 'bitdiscovery', + includes: 'bitdiscovery', name: 'Tenable.asm', category: 'Security Checker', url: 'https://bitdiscovery.com/', - producer: { name: 'Tenable, Inc.', url: 'https://www.tenable.com/' }, + producer: { + name: 'Tenable, Inc.', + url: 'https://www.tenable.com/', + }, }, { - regex: 'Castopod', + includes: 'Castopod', name: 'Castopod', category: 'Crawler', url: 'https://www.castopod.org/', }, { - regex: 'Elastic/Synthetics', + includes: 'Elastic/Synthetics', name: 'Elastic Synthetics', category: 'Site Monitor', url: 'https://github.com/elastic/synthetics', - producer: { name: 'Elasticsearch B.V.', url: 'https://www.elastic.co/' }, + producer: { + name: 'Elasticsearch B.V.', + url: 'https://www.elastic.co/', + }, }, { - regex: 'WDG_Validator', + includes: 'WDG_Validator', name: 'WDG HTML Validator', category: 'Validator', url: 'http://www.htmlhelp.com/tools/validator/', @@ -4424,38 +5767,41 @@ const bots = [ url: 'https://web.archive.org/web/20180910002802/http://www.aegis.network/', }, { - regex: 'CrawlyProjectCrawler', + includes: 'CrawlyProjectCrawler', name: 'Crawly Project', category: 'Crawler', url: 'https://web.archive.org/web/20240326141952/https://crawlyproject.digitaldragon.dev/', }, { - regex: 'BDFetch', + includes: 'BDFetch', name: 'BDFetch', category: 'Crawler', url: 'https://web.archive.org/web/20130821043949/http://www.branddimensions.com/', }, { - regex: 'PunkMap', + includes: 'PunkMap', name: 'Punk Map', category: 'Security Checker', url: 'https://github.com/openeasm/punkmap', }, { - regex: 'GenomeCrawlerd', + includes: 'GenomeCrawlerd', name: 'Deepfield Genome', category: 'Crawler', url: 'https://www.nokia.com/networks/ip-networks/deepfield/genome/', - producer: { name: 'Nokia Corporation', url: 'https://www.nokia.com/' }, + producer: { + name: 'Nokia Corporation', + url: 'https://www.nokia.com/', + }, }, { - regex: 'Gaisbot', + includes: 'Gaisbot', name: 'Gaisbot', category: 'Crawler', url: 'https://web.archive.org/web/20090604121511/https://gais.cs.ccu.edu.tw/robot.php', }, { - regex: 'FAST-WebCrawler', + includes: 'FAST-WebCrawler', name: 'AlltheWeb', category: 'Crawler', url: 'https://web.archive.org/web/20041020050801/http://www.alltheweb.com/help/webmaster/crawler', @@ -4467,7 +5813,7 @@ const bots = [ url: 'https://ducks.party/', }, { - regex: 'DepSpid', + includes: 'DepSpid', name: 'DepSpid', category: 'Crawler', url: 'https://web.archive.org/web/20080321224033/http://about.depspid.net/', @@ -4477,48 +5823,66 @@ const bots = [ name: 'Website-info', category: 'Crawler', url: 'https://website-info.net/robot', - producer: { name: 'Meins und Vogel GmbH', url: 'https://muv.com/' }, + producer: { + name: 'Meins und Vogel GmbH', + url: 'https://muv.com/', + }, }, { - regex: 'RedekenBot', + includes: 'RedekenBot', name: 'RedekenBot', category: 'Crawler', url: 'https://www.redeken.com/en/help/bot.html', - producer: { name: 'Redeken', url: 'https://www.redeken.com/' }, + producer: { + name: 'Redeken', + url: 'https://www.redeken.com/', + }, }, { - regex: 'semaltbot', + includes: 'semaltbot', name: 'semaltbot', category: 'Crawler', url: 'https://semalt.net/', - producer: { name: 'Semalt LP', url: 'https://semalt.net/' }, + producer: { + name: 'Semalt LP', + url: 'https://semalt.net/', + }, }, { - regex: 'MakeMerryBot', + includes: 'MakeMerryBot', name: 'MakeMerryBot', category: 'Crawler', url: 'https://makemerry.app/bots', }, { - regex: 'Timpibot', + includes: 'Timpibot', name: 'Timpibot', category: 'Crawler', url: 'https://timpi.io/', - producer: { name: 'Timpi Inc.', url: 'https://timpi.io/' }, + producer: { + name: 'Timpi Inc.', + url: 'https://timpi.io/', + }, }, { - regex: 'Validbot', + includes: 'Validbot', name: 'ValidBot', category: 'Crawler', url: 'https://www.validbot.com/', - producer: { name: 'Jake Olefsky LLC', url: 'https://www.validbot.com/' }, + producer: { + name: 'Jake Olefsky LLC', + url: 'https://www.validbot.com/', + }, }, { - regex: 'NPBot', + includes: 'NPBot', name: 'NameProtectBot', category: 'Crawler', url: 'https://www.cscglobal.com/cscglobal/home/', - producer: { name: 'NameProtect, Inc.', url: 'https://www.cscglobal.com/' }, + producer: { + name: 'NameProtect, Inc.', + url: 'https://www.cscglobal.com/', + }, }, { regex: 'domaincodex\\.com', @@ -4531,11 +5895,14 @@ const bots = [ }, }, { - regex: 'Swisscows Favicons', + includes: 'Swisscows Favicons', name: 'Swisscows Favicons', category: 'Crawler', url: 'https://swisscows.com/', - producer: { name: 'Swisscows AG', url: 'https://swisscows.com/' }, + producer: { + name: 'Swisscows AG', + url: 'https://swisscows.com/', + }, }, { regex: 'leak\\.info', @@ -4544,42 +5911,57 @@ const bots = [ url: 'http://www.leak.info/', }, { - regex: 'workona', + includes: 'workona', name: 'Workona', category: 'Crawler', url: 'https://workona.com/', - producer: { name: 'Workona, Inc.', url: 'https://workona.com/' }, + producer: { + name: 'Workona, Inc.', + url: 'https://workona.com/', + }, }, { - regex: 'Bloglines', + includes: 'Bloglines', name: 'Bloglines', category: 'Crawler', url: 'https://web.archive.org/web/20140309033202/http://www.bloglines.com/', - producer: { name: 'Reply!, Inc.', url: 'https://www.reply.com/' }, + producer: { + name: 'Reply!, Inc.', + url: 'https://www.reply.com/', + }, }, { - regex: 'heritrix', + includes: 'heritrix', name: 'Heritrix', category: 'Crawler', url: 'https://webarchive.jira.com/wiki/display/Heritrix/Heritrix', - producer: { name: 'The Internet Archive', url: 'https://archive.org' }, + producer: { + name: 'The Internet Archive', + url: 'https://archive.org', + }, }, { regex: 'search\\.marginalia\\.nu', name: 'Marginalia', category: 'Crawler', url: 'https://www.marginalia.nu/marginalia-search/for-webmasters/', - producer: { name: 'Marginalia', url: 'https://www.marginalia.nu/' }, + producer: { + name: 'Marginalia', + url: 'https://www.marginalia.nu/', + }, }, { - regex: 'vu-server-health-scanner', + includes: 'vu-server-health-scanner', name: 'VU Server Health Scanner', category: 'Security Checker', url: 'https://130.37.198.75/index.html', - producer: { name: 'VU Amsterdam', url: 'https://vu.nl/en' }, + producer: { + name: 'VU Amsterdam', + url: 'https://vu.nl/en', + }, }, { - regex: 'Functionize', + includes: 'Functionize', name: 'Functionize', category: 'Crawler', url: 'https://www.functionize.com/', @@ -4589,35 +5971,47 @@ const bots = [ }, }, { - regex: 'Prerender', + includes: 'Prerender', name: 'Prerender', category: 'Crawler', url: 'https://docs.prerender.io/docs/33-overview-of-prerender-crawlers', - producer: { name: 'saas.group Inc.', url: 'https://saas.group/' }, + producer: { + name: 'saas.group Inc.', + url: 'https://saas.group/', + }, }, { regex: 'bl\\.uk_ldfc_bot', name: 'The British Library Legal Deposit Bot', category: 'Crawler', url: 'https://www.bl.uk/', - producer: { name: 'The British Library', url: 'https://www.bl.uk/' }, + producer: { + name: 'The British Library', + url: 'https://www.bl.uk/', + }, }, { regex: 'Miniature\\.io', name: 'Miniature.io', category: 'Service Agent', url: 'https://miniature.io/', - producer: { name: 'LCX Ventures Ltd', url: 'https://www.lcxventures.com/' }, + producer: { + name: 'LCX Ventures Ltd', + url: 'https://www.lcxventures.com/', + }, }, { - regex: 'Convertify', + includes: 'Convertify', name: 'Convertify', category: 'Service Agent', url: 'https://www.convertify.app/', - producer: { name: 'Convertify', url: 'https://www.convertify.app/' }, + producer: { + name: 'Convertify', + url: 'https://www.convertify.app/', + }, }, { - regex: 'ZoteroTranslationServer', + includes: 'ZoteroTranslationServer', name: 'Zotero Translation Server', category: 'Service Agent', url: 'https://github.com/wikimedia/mediawiki-services-zotero', @@ -4627,20 +6021,23 @@ const bots = [ }, }, { - regex: 'MuckRack', + includes: 'MuckRack', name: 'MuckRack', category: 'Crawler', url: 'https://muckrack.com/', - producer: { name: 'Muck Rack, LLC', url: 'https://muckrack.com/' }, + producer: { + name: 'Muck Rack, LLC', + url: 'https://muckrack.com/', + }, }, { - regex: 'Golfe', + includes: 'Golfe', name: 'Golfe', category: 'Crawler', url: 'http://www.goo-olfe.ae/bot.html', }, { - regex: 'SpiderLing', + includes: 'SpiderLing', name: 'SpiderLing', category: 'Crawler', url: 'https://nlp.fi.muni.cz/projects/biwec/', @@ -4650,20 +6047,23 @@ const bots = [ }, }, { - regex: 'Bravebot', + includes: 'Bravebot', name: 'Bravebot', category: 'Search bot', url: 'https://search.brave.com/help/brave-search-crawler', - producer: { name: 'Brave Software, Inc.', url: 'https://brave.com/' }, + producer: { + name: 'Brave Software, Inc.', + url: 'https://brave.com/', + }, }, { - regex: '1001FirmsBot', + includes: '1001FirmsBot', name: '1001FirmsBot', category: 'Crawler', url: 'https://www.1001firms.com/1001firmsbot.php', }, { - regex: 'SteamChatURLLookup', + includes: 'SteamChatURLLookup', name: 'Steam Chat URL Lookup', category: 'Service Agent', url: 'https://help.steampowered.com/en/faqs/view/595C-42F4-3B66-E02F', @@ -4677,37 +6077,49 @@ const bots = [ name: 'Oh Dear', category: 'Site Monitor', url: 'https://ohdear.app/docs/faq/what-is-the-oh-dear-crawler-doing-in-my-logs', - producer: { name: 'Immutable, SNC', url: 'https://ohdear.app/' }, + producer: { + name: 'Immutable, SNC', + url: 'https://ohdear.app/', + }, }, { - regex: 'Inspici', + includes: 'Inspici', name: 'Inspici', category: 'Crawler', url: 'https://www.inspici.com/', - producer: { name: 'Inspici, LLC', url: 'https://www.inspici.com/' }, + producer: { + name: 'Inspici, LLC', + url: 'https://www.inspici.com/', + }, }, { - regex: 'peer39_crawler', + includes: 'peer39_crawler', name: 'Peer39', category: 'Crawler', url: 'https://www.peer39.com/crawler-notice', - producer: { name: 'Peer39 Tech, LLC', url: 'https://www.peer39.com/' }, + producer: { + name: 'Peer39 Tech, LLC', + url: 'https://www.peer39.com/', + }, }, { - regex: 'Pandalytics', + includes: 'Pandalytics', name: 'Pandalytics', category: 'Crawler', url: 'https://www.domainsbot.com/business-intelligence/', - producer: { name: 'DomainsBot, Inc.', url: 'https://www.domainsbot.com/' }, + producer: { + name: 'DomainsBot, Inc.', + url: 'https://www.domainsbot.com/', + }, }, { - regex: 'CloudServerMarketSpider', + includes: 'CloudServerMarketSpider', name: 'CloudServerMarketSpider', category: 'Crawler', url: 'https://web.archive.org/web/20151228225429/https://cloudservermarket.com/spider.html', }, { - regex: 'Pigafetta', + includes: 'Pigafetta', name: 'Pigafetta', category: 'Crawler', url: 'https://visual-seo.com/Pigafetta-Bot', @@ -4717,7 +6129,7 @@ const bots = [ }, }, { - regex: 'Cotoyogi', + includes: 'Cotoyogi', name: 'Cotoyogi', category: 'Crawler', url: 'https://ds.rois.ac.jp/center8/crawler/', @@ -4727,33 +6139,39 @@ const bots = [ }, }, { - regex: 'SuggestBot', + includes: 'SuggestBot', name: 'SuggestBot', category: 'Crawler', url: 'https://github.com/nettrom/suggestbot', }, { - regex: 'cms-experiment', + includes: 'cms-experiment', name: 'CMS Experiment', category: 'Security Checker', url: 'https://securitee.org/cms-experiment-fall2024/', }, { - regex: 'SiteCheckerBotCrawler', + includes: 'SiteCheckerBotCrawler', name: 'SiteCheckerBotCrawler', category: 'Crawler', url: 'https://sitechecker.pro/', - producer: { name: 'Cyber Circus Limited', url: 'https://sitechecker.pro/' }, + producer: { + name: 'Cyber Circus Limited', + url: 'https://sitechecker.pro/', + }, }, { - regex: 'SBIder', + includes: 'SBIder', name: 'SBIder', category: 'Crawler', url: 'https://www.sitesell.com/sbider.html', - producer: { name: 'SiteSell Inc.', url: 'https://www.sitesell.com/' }, + producer: { + name: 'SiteSell Inc.', + url: 'https://www.sitesell.com/', + }, }, { - regex: 'LightspeedSystemsCrawler', + includes: 'LightspeedSystemsCrawler', name: 'LightspeedSystemsCrawler', category: 'Crawler', url: 'https://www.lightspeedsystems.com/', @@ -4763,7 +6181,7 @@ const bots = [ }, }, { - regex: 'Research JLU', + includes: 'Research JLU', name: 'Research JLU', category: 'Crawler', url: 'https://www.uni-giessen.de/en/research', @@ -4783,24 +6201,30 @@ const bots = [ }, }, { - regex: 'WPMU DEV', + includes: 'WPMU DEV', name: 'WPMU DEV', category: 'Crawler', url: 'https://wpmudev.com/docs/wpmu-dev-plugins/broken-link-checker/#broken-link-checker-user-agent', - producer: { name: 'Incsub, LLC.', url: 'https://incsub.com/' }, + producer: { + name: 'Incsub, LLC.', + url: 'https://incsub.com/', + }, }, { - regex: 'SnoopSecInspect', + includes: 'SnoopSecInspect', name: 'SnoopSecInspect', category: 'Security Checker', url: 'https://web.archive.org/web/20241206193253/https://snoopsec.us.to/', }, { - regex: 'ModatScanner', + includes: 'ModatScanner', name: 'ModatScanner', category: 'Security Checker', url: 'https://www.modat.io/scanning', - producer: { name: 'Modat B.V.', url: 'https://www.modat.io/' }, + producer: { + name: 'Modat B.V.', + url: 'https://www.modat.io/', + }, }, { regex: 'researchcyber\\.net', @@ -4809,7 +6233,7 @@ const bots = [ url: 'https://web.archive.org/web/20241219082407/https://researchcyber.net/', }, { - regex: 'CrystalSemanticsBot', + includes: 'CrystalSemanticsBot', name: 'CrystalSemanticsBot', category: 'Crawler', url: 'https://web.archive.org/web/20121230203310/http://www.crystalsemantics.com/user-agent/', @@ -4825,7 +6249,7 @@ const bots = [ url: 'https://najdu.s.holubem.eu/', }, { - regex: 'VORTEX/', + includes: 'VORTEX/', name: 'VORTEX', category: 'Crawler', url: 'https://marty.anstey.ca/robots/vortex', @@ -4837,7 +6261,7 @@ const bots = [ url: 'https://github.com/babycoff/xtate', }, { - regex: 'FediList Agent/', + includes: 'FediList Agent/', name: 'FediList', category: 'Social Media Agent', url: 'https://fedilist.com/', @@ -4847,10 +6271,13 @@ const bots = [ name: 'Grafana', category: 'Site Monitor', url: 'https://github.com/grafana/grafana', - producer: { name: 'Grafana Labs', url: 'https://grafana.com/' }, + producer: { + name: 'Grafana Labs', + url: 'https://grafana.com/', + }, }, { - regex: 'github-camo', + includes: 'github-camo', name: 'Github Camo', category: 'Crawler', url: 'https://github.com/atmos/camo', @@ -4860,21 +6287,27 @@ const bots = [ }, }, { - regex: 'Bluesky', + includes: 'Bluesky', name: 'Bluesky', category: 'Social Media Agent', url: 'https://bsky.app', - producer: { name: 'Bluesky Social PBC', url: 'https://bsky.app' }, + producer: { + name: 'Bluesky Social PBC', + url: 'https://bsky.app', + }, }, { regex: 'OpenGraph\\.io', name: 'OpenGraph.io', category: 'Crawler', url: 'https://www.opengraph.io', - producer: { name: 'OpenGraph.io', url: 'https://www.opengraph.io' }, + producer: { + name: 'OpenGraph.io', + url: 'https://www.opengraph.io', + }, }, { - regex: 'microsoft-flow/', + includes: 'microsoft-flow/', name: 'Microsoft Power Automate', category: 'Service Agent', url: 'https://www.microsoft.com/en-us/power-platform/products/power-automate', @@ -4883,6 +6316,42 @@ const bots = [ url: 'https://www.microsoft.com/', }, }, + { + includes: 'Simbiat Software', + name: 'Simbiat Software', + category: 'Crawler', + url: 'https://www.simbiat.eu', + producer: { + name: 'Simbiat Software', + url: 'https://www.simbiat.eu', + }, + }, + { + includes: 'IbouBot', + name: 'IbouBot', + category: 'Search bot', + url: 'https://ibou.io/iboubot.html', + }, + { + includes: 'AddSearchBot', + name: 'AddSearchBot', + category: 'Crawler', + url: 'https://www.addsearch.com/docs/indexing/whitelisting-addsearch-bot/', + producer: { + name: 'AddSearch Oy', + url: 'https://www.addsearch.com/', + }, + }, + { + includes: 'TerraCotta', + name: 'TerraCotta', + category: 'Crawler', + url: 'https://github.com/CeramicTeam/CeramicTerracotta', + producer: { + name: 'Ceramic, Inc.', + url: 'https://ceramic.ai/', + }, + }, { regex: 'nuhk|grub-client|Download Demon|SearchExpress|Microsoft URL Control|borg|altavista|dataminr\\.com|teoma|oegp|http%20client|htdig|mogimogi|larbin|scrubby|searchsight|semanticdiscovery|snappy|zeal(?!ot)|dataparksearch|findlinks|BrowserMob|URL2PNG|ZooShot|GomezA|Google SketchUp|Read%20Later|7Siters|centuryb\\.o\\.t9|InterNaetBoten|EasyBib AutoCite|Bidtellect|tomnomnom/meg|cortex|Re-re Studio|adreview|AHC/|NameOfAgent|Request-Promise|ALittle Client|Hello,? world|wp_is_mobile|0xAbyssalDoesntExist|Anarchy99|^revolt|nvd0rz|xfa1|Hakai|gbrmss|fuck-your-hp|IDBTE4M CODE87|Antoine|Insomania|Hells-Net|b3astmode|Linux Gnu \\(cow\\)|Test Certificate Info|iplabel|Magellan|TheSafex?Internetx?Search|Searcherx?web|kirkland-signature|LinkChain|survey-security-dot-txt|infrawatch|Time/|r00ts3c-owned-you|nvdorz|Root Slut|NiggaBalls|BotPoke|GlobalWebSearch|xx032_bo9vs83_2a|sslshed|geckotrail|Wordup|Keydrop|\\(compatible\\)|John Recon|SPARK COMMIT|masjesu|Komaru_The_Cat|Jesus Christ of Nazareth is LORD|Kowai|Hakai|LoliSec|LMAO|^xenu|^(?:chrome|firefox|Abcd|Dark|KvshClient|Node.js|Report Runner|url|Zeus|ZmEu)$|OnlyScans|TheInternetSearchx', @@ -4890,7 +6359,7 @@ const bots = [ }, { regex: - '[a-z0-9_-]*(?:(? { + if ('regex' in bot) { + return { + ...bot, + compiledRegex: new RegExp(bot.regex), + }; + } + return bot; +}); + +const regexBots = compiledBots.filter((bot) => 'compiledRegex' in bot); +const includesBots = compiledBots.filter((bot) => 'includes' in bot); + +// Common legitimate browser patterns - if UA matches these, it's very likely a real browser +// This provides ultra-fast early exit for ~95% of real traffic +const legitimateBrowserPatterns = [ + 'Mozilla/5.0', // Nearly all modern browsers + 'Chrome/', // Chrome/Chromium browsers + 'Safari/', // Safari and Chrome-based browsers + 'Firefox/', // Firefox + 'Edg/', // Edge +]; + +const mobilePatterns = ['iPhone', 'Android', 'iPad']; + +const desktopOSPatterns = ['Windows NT', 'Macintosh', 'X11; Linux']; + export function isBot(ua: string) { - const res = bots.find((bot) => { - if (new RegExp(bot.regex).test(ua)) { - return true; + // Ultra-fast early exit: check if this looks like a legitimate browser + // Real browsers typically have Mozilla/5.0 + browser name + OS + if (ua.includes('Mozilla/5.0')) { + // Check for browser signature + const hasBrowser = legitimateBrowserPatterns.some((pattern) => + ua.includes(pattern), + ); + + // Check for OS signature (mobile or desktop) + const hasOS = + mobilePatterns.some((pattern) => ua.includes(pattern)) || + desktopOSPatterns.some((pattern) => ua.includes(pattern)); + + // If it has Mozilla/5.0, a known browser, and an OS, it's very likely legitimate + if (hasBrowser && hasOS) { + return null; + } + } + + // Check simple string patterns first (fast) + for (const bot of includesBots) { + if (ua.includes(bot.includes)) { + return { + name: bot.name, + type: 'category' in bot ? bot.category : 'Unknown', + }; } - return false; - }); + } - if (!res) { - return null; + // Check regex patterns (slower) + for (const bot of regexBots) { + if (bot.compiledRegex.test(ua)) { + return { + name: bot.name, + type: 'category' in bot ? bot.category : 'Unknown', + }; + } } - return { - name: res.name, - type: 'category' in res ? res.category : 'Unknown', - }; + return null; } diff --git a/apps/api/src/controllers/event.controller.ts b/apps/api/src/controllers/event.controller.ts index b758a5b1c..916e4cf95 100644 --- a/apps/api/src/controllers/event.controller.ts +++ b/apps/api/src/controllers/event.controller.ts @@ -3,10 +3,9 @@ import type { FastifyReply, FastifyRequest } from 'fastify'; import { generateDeviceId, parseUserAgent } from '@openpanel/common/server'; import { getSalts } from '@openpanel/db'; -import { eventsGroupQueue } from '@openpanel/queue'; +import { getEventsGroupQueueShard } from '@openpanel/queue'; import type { PostEventPayload } from '@openpanel/sdk'; -import { checkDuplicatedEvent } from '@/utils/deduplicate'; import { generateId } from '@openpanel/common'; import { getGeoLocation } from '@openpanel/geo'; import { getStringHeaders, getTimestamp } from './track.controller'; @@ -45,28 +44,22 @@ export async function postEvent( ua, }); - if ( - await checkDuplicatedEvent({ - reply, - payload: { - ...request.body, - timestamp, - previousDeviceId, - currentDeviceId, - }, - projectId, - }) - ) { - return; - } - const uaInfo = parseUserAgent(ua, request.body?.properties); const groupId = uaInfo.isServer ? request.body?.profileId ? `${projectId}:${request.body?.profileId}` : `${projectId}:${generateId()}` : currentDeviceId; - await eventsGroupQueue.add({ + const jobId = [ + request.body.name, + timestamp, + projectId, + currentDeviceId, + groupId, + ] + .filter(Boolean) + .join('-'); + await getEventsGroupQueueShard(groupId).add({ orderMs: new Date(timestamp).getTime(), data: { projectId, @@ -76,11 +69,13 @@ export async function postEvent( timestamp, isTimestampFromThePast, }, + uaInfo, geo, currentDeviceId, previousDeviceId, }, groupId, + jobId, }); reply.status(202).send('ok'); diff --git a/apps/api/src/controllers/live.controller.ts b/apps/api/src/controllers/live.controller.ts index 32b263097..29931aa84 100644 --- a/apps/api/src/controllers/live.controller.ts +++ b/apps/api/src/controllers/live.controller.ts @@ -4,7 +4,7 @@ import superjson from 'superjson'; import type { WebSocket } from '@fastify/websocket'; import { eventBuffer, - getProfileByIdCached, + getProfileById, transformMinimalEvent, } from '@openpanel/db'; import { setSuperJson } from '@openpanel/json'; @@ -92,10 +92,7 @@ export async function wsProjectEvents( type, async (event) => { if (event.projectId === params.projectId) { - const profile = await getProfileByIdCached( - event.profileId, - event.projectId, - ); + const profile = await getProfileById(event.profileId, event.projectId); socket.send( superjson.stringify( access diff --git a/apps/api/src/controllers/misc.controller.ts b/apps/api/src/controllers/misc.controller.ts index e441b4842..8f1a64c43 100644 --- a/apps/api/src/controllers/misc.controller.ts +++ b/apps/api/src/controllers/misc.controller.ts @@ -129,7 +129,7 @@ async function processImage( ): Promise { // If it's an ICO file, just return it as-is (no conversion needed) if (originalUrl && isIcoFile(originalUrl, contentType)) { - logger.info('Serving ICO file directly', { + logger.debug('Serving ICO file directly', { originalUrl, bufferSize: buffer.length, }); @@ -137,7 +137,7 @@ async function processImage( } if (originalUrl && isSvgFile(originalUrl, contentType)) { - logger.info('Serving SVG file directly', { + logger.debug('Serving SVG file directly', { originalUrl, bufferSize: buffer.length, }); @@ -146,7 +146,7 @@ async function processImage( // If buffer isnt to big just return it as well if (buffer.length < 5000) { - logger.info('Serving image directly without processing', { + logger.debug('Serving image directly without processing', { originalUrl, bufferSize: buffer.length, }); @@ -190,7 +190,7 @@ async function processOgImage( ): Promise { // If buffer is small enough, return it as-is if (buffer.length < 10000) { - logger.info('Serving OG image directly without processing', { + logger.debug('Serving OG image directly without processing', { originalUrl, bufferSize: buffer.length, }); diff --git a/apps/api/src/controllers/profile.controller.ts b/apps/api/src/controllers/profile.controller.ts index ed11d2291..af95999f6 100644 --- a/apps/api/src/controllers/profile.controller.ts +++ b/apps/api/src/controllers/profile.controller.ts @@ -2,7 +2,6 @@ import { getClientIp } from '@/utils/get-client-ip'; import type { FastifyReply, FastifyRequest } from 'fastify'; import { assocPath, pathOr } from 'ramda'; -import { checkDuplicatedEvent, isDuplicatedEvent } from '@/utils/deduplicate'; import { parseUserAgent } from '@openpanel/common/server'; import { getProfileById, upsertProfile } from '@openpanel/db'; import { getGeoLocation } from '@openpanel/geo'; @@ -17,41 +16,39 @@ export async function updateProfile( }>, reply: FastifyReply, ) { - const { profileId, properties, ...rest } = request.body; + const payload = request.body; const projectId = request.client!.projectId; if (!projectId) { return reply.status(400).send('No projectId'); } const ip = getClientIp(request)!; const ua = request.headers['user-agent']!; - const uaInfo = parseUserAgent(ua, properties); + const uaInfo = parseUserAgent(ua, payload.properties); const geo = await getGeoLocation(ip); - if ( - await checkDuplicatedEvent({ - reply, - payload: { - ...request.body, - }, - projectId, - }) - ) { - return; - } - await upsertProfile({ - id: profileId, + ...payload, + id: payload.profileId, isExternal: true, projectId, properties: { - ...(properties ?? {}), - ...(ip ? geo : {}), - ...uaInfo, + ...(payload.properties ?? {}), + country: geo.country, + city: geo.city, + region: geo.region, + longitude: geo.longitude, + latitude: geo.latitude, + os: uaInfo.os, + os_version: uaInfo.osVersion, + browser: uaInfo.browser, + browser_version: uaInfo.browserVersion, + device: uaInfo.device, + brand: uaInfo.brand, + model: uaInfo.model, }, - ...rest, }); - reply.status(202).send(profileId); + reply.status(202).send(payload.profileId); } export async function incrementProfileProperty( @@ -66,18 +63,6 @@ export async function incrementProfileProperty( return reply.status(400).send('No projectId'); } - if ( - await checkDuplicatedEvent({ - reply, - payload: { - ...request.body, - }, - projectId, - }) - ) { - return; - } - const profile = await getProfileById(profileId, projectId); if (!profile) { return reply.status(404).send('Not found'); @@ -120,18 +105,6 @@ export async function decrementProfileProperty( return reply.status(400).send('No projectId'); } - if ( - await checkDuplicatedEvent({ - reply, - payload: { - ...request.body, - }, - projectId, - }) - ) { - return; - } - const profile = await getProfileById(profileId, projectId); if (!profile) { return reply.status(404).send('Not found'); diff --git a/apps/api/src/controllers/track.controller.ts b/apps/api/src/controllers/track.controller.ts index b40b10f9c..79b1bbe16 100644 --- a/apps/api/src/controllers/track.controller.ts +++ b/apps/api/src/controllers/track.controller.ts @@ -1,13 +1,15 @@ import { getClientIp } from '@/utils/get-client-ip'; import type { FastifyReply, FastifyRequest } from 'fastify'; -import { path, assocPath, pathOr, pick } from 'ramda'; +import { assocPath, pathOr, pick } from 'ramda'; -import { checkDuplicatedEvent } from '@/utils/deduplicate'; +import { logger } from '@/utils/logger'; import { generateId } from '@openpanel/common'; import { generateDeviceId, parseUserAgent } from '@openpanel/common/server'; import { getProfileById, getSalts, upsertProfile } from '@openpanel/db'; import { type GeoLocation, getGeoLocation } from '@openpanel/geo'; -import { eventsGroupQueue } from '@openpanel/queue'; +import type { ILogger } from '@openpanel/logger'; +import { getEventsGroupQueueShard } from '@openpanel/queue'; +import { getRedisCache } from '@openpanel/redis'; import type { DecrementPayload, IdentifyPayload, @@ -38,10 +40,10 @@ export function getStringHeaders(headers: FastifyRequest['headers']) { } function getIdentity(body: TrackHandlerPayload): IdentifyPayload | undefined { - const identity = path( - ['properties', '__identify'], - body.payload, - ); + const identity = + 'properties' in body.payload + ? (body.payload?.properties?.__identify as IdentifyPayload | undefined) + : undefined; return ( identity || @@ -57,27 +59,28 @@ export function getTimestamp( timestamp: FastifyRequest['timestamp'], payload: TrackHandlerPayload['payload'], ) { - const safeTimestamp = new Date(timestamp || Date.now()).toISOString(); - const userDefinedTimestamp = path( - ['properties', '__timestamp'], - payload, - ); + const safeTimestamp = timestamp || Date.now(); + const userDefinedTimestamp = + 'properties' in payload + ? (payload?.properties?.__timestamp as string | undefined) + : undefined; if (!userDefinedTimestamp) { return { timestamp: safeTimestamp, isTimestampFromThePast: false }; } const clientTimestamp = new Date(userDefinedTimestamp); + const clientTimestampNumber = clientTimestamp.getTime(); if ( - Number.isNaN(clientTimestamp.getTime()) || - clientTimestamp > new Date(safeTimestamp) + Number.isNaN(clientTimestampNumber) || + clientTimestampNumber > safeTimestamp ) { return { timestamp: safeTimestamp, isTimestampFromThePast: false }; } return { - timestamp: clientTimestamp.toISOString(), + timestamp: clientTimestampNumber, isTimestampFromThePast: true, }; } @@ -90,18 +93,19 @@ export async function handler( ) { const timestamp = getTimestamp(request.timestamp, request.body.payload); const ip = - path(['properties', '__ip'], request.body.payload) || - getClientIp(request)!; + 'properties' in request.body.payload && + request.body.payload.properties?.__ip + ? (request.body.payload.properties.__ip as string) + : getClientIp(request)!; const ua = request.headers['user-agent']!; const projectId = request.client?.projectId; if (!projectId) { - reply.status(400).send({ + return reply.status(400).send({ status: 400, error: 'Bad Request', message: 'Missing projectId', }); - return; } const identity = getIdentity(request.body); @@ -133,33 +137,7 @@ export async function handler( }) : ''; - if ( - await checkDuplicatedEvent({ - reply, - payload: { - ...request.body, - timestamp, - previousDeviceId, - currentDeviceId, - }, - projectId, - }) - ) { - return; - } - - const promises = [ - track({ - payload: request.body.payload, - currentDeviceId, - previousDeviceId, - projectId, - geo, - headers: getStringHeaders(request.headers), - timestamp: timestamp.timestamp, - isTimestampFromThePast: timestamp.isTimestampFromThePast, - }), - ]; + const promises = []; // If we have more than one property in the identity object, we should identify the user // Otherwise its only a profileId and we should not identify the user @@ -174,23 +152,24 @@ export async function handler( ); } + promises.push( + track({ + log: request.log.info, + payload: request.body.payload, + currentDeviceId, + previousDeviceId, + projectId, + geo, + headers: getStringHeaders(request.headers), + timestamp: timestamp.timestamp, + isTimestampFromThePast: timestamp.isTimestampFromThePast, + }), + ); + await Promise.all(promises); break; } case 'identify': { - if ( - await checkDuplicatedEvent({ - reply, - payload: { - ...request.body, - timestamp, - }, - projectId, - }) - ) { - return; - } - const geo = await getGeoLocation(ip); await identify({ payload: request.body.payload, @@ -201,27 +180,13 @@ export async function handler( break; } case 'alias': { - reply.status(400).send({ + return reply.status(400).send({ status: 400, error: 'Bad Request', message: 'Alias is not supported', }); - break; } case 'increment': { - if ( - await checkDuplicatedEvent({ - reply, - payload: { - ...request.body, - timestamp, - }, - projectId, - }) - ) { - return; - } - await increment({ payload: request.body.payload, projectId, @@ -229,19 +194,6 @@ export async function handler( break; } case 'decrement': { - if ( - await checkDuplicatedEvent({ - reply, - payload: { - ...request.body, - timestamp, - }, - projectId, - }) - ) { - return; - } - await decrement({ payload: request.body.payload, projectId, @@ -249,12 +201,11 @@ export async function handler( break; } default: { - reply.status(400).send({ + return reply.status(400).send({ status: 400, error: 'Bad Request', message: 'Invalid type', }); - break; } } @@ -270,6 +221,7 @@ async function track({ headers, timestamp, isTimestampFromThePast, + log, }: { payload: TrackPayload; currentDeviceId: string; @@ -277,8 +229,9 @@ async function track({ projectId: string; geo: GeoLocation; headers: Record; - timestamp: string; + timestamp: number; isTimestampFromThePast: boolean; + log: any; }) { const uaInfo = parseUserAgent(headers['user-agent'], payload.properties); const groupId = uaInfo.isServer @@ -286,8 +239,30 @@ async function track({ ? `${projectId}:${payload.profileId}` : `${projectId}:${generateId()}` : currentDeviceId; - await eventsGroupQueue.add({ - orderMs: new Date(timestamp).getTime(), + const jobId = [payload.name, timestamp, projectId, currentDeviceId, groupId] + .filter(Boolean) + .join('-'); + await getRedisCache().incr('track:counter'); + log('track handler', { + jobId: jobId, + groupId: groupId, + timestamp: timestamp, + data: { + projectId, + headers, + event: { + ...payload, + timestamp, + isTimestampFromThePast, + }, + uaInfo, + geo, + currentDeviceId, + previousDeviceId, + }, + }); + await getEventsGroupQueueShard(groupId).add({ + orderMs: timestamp, data: { projectId, headers, @@ -296,11 +271,13 @@ async function track({ timestamp, isTimestampFromThePast, }, + uaInfo, geo, currentDeviceId, previousDeviceId, }, groupId, + jobId, }); } @@ -323,8 +300,18 @@ async function identify({ projectId, properties: { ...(payload.properties ?? {}), - ...(geo ?? {}), - ...uaInfo, + country: geo.country, + city: geo.city, + region: geo.region, + longitude: geo.longitude, + latitude: geo.latitude, + os: uaInfo.os, + os_version: uaInfo.osVersion, + browser: uaInfo.browser, + browser_version: uaInfo.browserVersion, + device: uaInfo.device, + brand: uaInfo.brand, + model: uaInfo.model, }, }); } diff --git a/apps/api/src/hooks/duplicate.hook.ts b/apps/api/src/hooks/duplicate.hook.ts new file mode 100644 index 000000000..f5976f10b --- /dev/null +++ b/apps/api/src/hooks/duplicate.hook.ts @@ -0,0 +1,21 @@ +import { isDuplicatedEvent } from '@/utils/deduplicate'; +import type { PostEventPayload, TrackHandlerPayload } from '@openpanel/sdk'; +import type { FastifyReply, FastifyRequest } from 'fastify'; + +export async function duplicateHook( + req: FastifyRequest<{ + Body: PostEventPayload | TrackHandlerPayload; + }>, + reply: FastifyReply, +) { + const isDuplicate = await isDuplicatedEvent({ + ip: req.clientIp ?? '', + origin: req.headers.origin ?? '', + payload: req.body, + projectId: (req.headers['openpanel-client-id'] as string) || '', + }); + + if (isDuplicate) { + return reply.status(200).send('Duplicate event'); + } +} diff --git a/apps/api/src/hooks/fix.hook.ts b/apps/api/src/hooks/fix.hook.ts deleted file mode 100644 index f19d7d161..000000000 --- a/apps/api/src/hooks/fix.hook.ts +++ /dev/null @@ -1,16 +0,0 @@ -import type { FastifyRequest } from 'fastify'; - -export async function fixHook(request: FastifyRequest) { - const ua = request.headers['user-agent']; - // Swift SDK issue: https://github.com/Openpanel-dev/swift-sdk/commit/d588fa761a36a33f3b78eb79d83bfd524e3c7144 - if (ua) { - const regex = /OpenPanel\/(\d+\.\d+\.\d+)\sOpenPanel\/(\d+\.\d+\.\d+)/; - const match = ua.match(regex); - if (match) { - request.headers['user-agent'] = ua.replace( - regex, - `OpenPanel/${match[1]}`, - ); - } - } -} diff --git a/apps/api/src/hooks/ip.hook.ts b/apps/api/src/hooks/ip.hook.ts index a1fdcbe64..5dc149e77 100644 --- a/apps/api/src/hooks/ip.hook.ts +++ b/apps/api/src/hooks/ip.hook.ts @@ -1,9 +1,5 @@ import { getClientIp } from '@/utils/get-client-ip'; -import type { - FastifyReply, - FastifyRequest, - HookHandlerDoneFunction, -} from 'fastify'; +import type { FastifyRequest } from 'fastify'; export async function ipHook(request: FastifyRequest) { const ip = getClientIp(request); diff --git a/apps/api/src/index.ts b/apps/api/src/index.ts index 4d2fd8720..a95c7885a 100644 --- a/apps/api/src/index.ts +++ b/apps/api/src/index.ts @@ -28,7 +28,6 @@ import { liveness, readiness, } from './controllers/healthcheck.controller'; -import { fixHook } from './hooks/fix.hook'; import { ipHook } from './hooks/ip.hook'; import { requestIdHook } from './hooks/request-id.hook'; import { requestLoggingHook } from './hooks/request-logging.hook'; @@ -125,7 +124,6 @@ const startServer = async () => { fastify.addHook('onRequest', requestIdHook); fastify.addHook('onRequest', timestampHook); fastify.addHook('onRequest', ipHook); - fastify.addHook('onRequest', fixHook); fastify.addHook('onResponse', requestLoggingHook); fastify.register(compress, { diff --git a/apps/api/src/routes/event.router.ts b/apps/api/src/routes/event.router.ts index 2a3a169c5..5efa52add 100644 --- a/apps/api/src/routes/event.router.ts +++ b/apps/api/src/routes/event.router.ts @@ -2,9 +2,11 @@ import * as controller from '@/controllers/event.controller'; import type { FastifyPluginCallback } from 'fastify'; import { clientHook } from '@/hooks/client.hook'; +import { duplicateHook } from '@/hooks/duplicate.hook'; import { isBotHook } from '@/hooks/is-bot.hook'; const eventRouter: FastifyPluginCallback = async (fastify) => { + fastify.addHook('preValidation', duplicateHook); fastify.addHook('preHandler', clientHook); fastify.addHook('preHandler', isBotHook); diff --git a/apps/api/src/utils/auth.ts b/apps/api/src/utils/auth.ts index 8fda2d13d..afa24d439 100644 --- a/apps/api/src/utils/auth.ts +++ b/apps/api/src/utils/auth.ts @@ -3,6 +3,7 @@ import type { FastifyRequest, RawRequestDefaultExpression } from 'fastify'; import { verifyPassword } from '@openpanel/common/server'; import type { IServiceClientWithProject } from '@openpanel/db'; import { ClientType, getClientByIdCached } from '@openpanel/db'; +import { getCache } from '@openpanel/redis'; import type { PostEventPayload, TrackHandlerPayload } from '@openpanel/sdk'; import type { IProjectFilterIp, @@ -135,7 +136,13 @@ export async function validateSdkRequest( } if (client.secret && clientSecret) { - if (await verifyPassword(clientSecret, client.secret)) { + const isVerified = await getCache( + `client:auth:${clientId}:${clientSecret.slice(0, 5)}`, + 60 * 5, + async () => await verifyPassword(clientSecret, client.secret!), + true, + ); + if (isVerified) { return client; } } diff --git a/apps/api/src/utils/deduplicate.ts b/apps/api/src/utils/deduplicate.ts index f6ca99a08..d45ac9ba5 100644 --- a/apps/api/src/utils/deduplicate.ts +++ b/apps/api/src/utils/deduplicate.ts @@ -1,11 +1,14 @@ import { getLock } from '@openpanel/redis'; import fastJsonStableHash from 'fast-json-stable-hash'; -import type { FastifyReply } from 'fastify'; export async function isDuplicatedEvent({ + ip, + origin, payload, projectId, }: { + ip: string; + origin: string; payload: Record; projectId: string; }) { @@ -13,6 +16,8 @@ export async function isDuplicatedEvent({ `fastify:deduplicate:${fastJsonStableHash.hash( { ...payload, + ip, + origin, projectId, }, 'md5', @@ -27,24 +32,3 @@ export async function isDuplicatedEvent({ return true; } - -export async function checkDuplicatedEvent({ - reply, - payload, - projectId, -}: { - reply: FastifyReply; - payload: Record; - projectId: string; -}) { - if (await isDuplicatedEvent({ payload, projectId })) { - reply.log.info('duplicated event', { - payload, - projectId, - }); - reply.status(200).send('duplicated'); - return true; - } - - return false; -} diff --git a/apps/api/src/utils/graceful-shutdown.ts b/apps/api/src/utils/graceful-shutdown.ts index 1f8a3c597..276762ae6 100644 --- a/apps/api/src/utils/graceful-shutdown.ts +++ b/apps/api/src/utils/graceful-shutdown.ts @@ -1,7 +1,7 @@ import { ch, db } from '@openpanel/db'; import { cronQueue, - eventsGroupQueue, + eventsGroupQueues, miscQueue, notificationQueue, sessionsQueue, @@ -71,7 +71,7 @@ export async function shutdown( // Step 6: Close Bull queues (graceful shutdown of queue state) try { await Promise.all([ - eventsGroupQueue.close(), + ...eventsGroupQueues.map((queue) => queue.close()), sessionsQueue.close(), cronQueue.close(), miscQueue.close(), diff --git a/apps/start/package.json b/apps/start/package.json index b9de5ab54..f9de3927e 100644 --- a/apps/start/package.json +++ b/apps/start/package.json @@ -20,7 +20,6 @@ }, "dependencies": { "@ai-sdk/react": "^1.2.5", - "@clickhouse/client": "^1.2.0", "@dnd-kit/core": "^6.3.1", "@dnd-kit/sortable": "^10.0.0", "@dnd-kit/utilities": "^3.2.2", diff --git a/apps/start/src/components/realtime/realtime-geo.tsx b/apps/start/src/components/realtime/realtime-geo.tsx index 41c2a2968..f10d485db 100644 --- a/apps/start/src/components/realtime/realtime-geo.tsx +++ b/apps/start/src/components/realtime/realtime-geo.tsx @@ -75,7 +75,7 @@ export function RealtimeGeo({ projectId }: RealtimeGeoProps) { }, { name: 'Events', - width: '84px', + width: '60px', render(item) { return (
@@ -86,6 +86,19 @@ export function RealtimeGeo({ projectId }: RealtimeGeoProps) { ); }, }, + { + name: 'Sessions', + width: '82px', + render(item) { + return ( +
+ + {number.short(item.unique_sessions)} + +
+ ); + }, + }, ]} />
diff --git a/apps/start/src/components/realtime/realtime-paths.tsx b/apps/start/src/components/realtime/realtime-paths.tsx index 0c79d8233..4046730ab 100644 --- a/apps/start/src/components/realtime/realtime-paths.tsx +++ b/apps/start/src/components/realtime/realtime-paths.tsx @@ -82,7 +82,7 @@ export function RealtimePaths({ projectId }: RealtimePathsProps) { }, { name: 'Events', - width: '84px', + width: '60px', render(item) { return (
@@ -93,6 +93,19 @@ export function RealtimePaths({ projectId }: RealtimePathsProps) { ); }, }, + { + name: 'Sessions', + width: '82px', + render(item) { + return ( +
+ + {number.short(item.unique_sessions)} + +
+ ); + }, + }, ]} />
diff --git a/apps/start/src/components/realtime/realtime-referrals.tsx b/apps/start/src/components/realtime/realtime-referrals.tsx index ac28fb2a2..1c7e7e97e 100644 --- a/apps/start/src/components/realtime/realtime-referrals.tsx +++ b/apps/start/src/components/realtime/realtime-referrals.tsx @@ -65,7 +65,7 @@ export function RealtimeReferrals({ projectId }: RealtimeReferralsProps) { }, { name: 'Events', - width: '84px', + width: '60px', render(item) { return (
@@ -76,6 +76,19 @@ export function RealtimeReferrals({ projectId }: RealtimeReferralsProps) { ); }, }, + { + name: 'Sessions', + width: '82px', + render(item) { + return ( +
+ + {number.short(item.unique_sessions)} + +
+ ); + }, + }, ]} />
diff --git a/apps/start/src/components/ui/data-table/data-table-hooks.tsx b/apps/start/src/components/ui/data-table/data-table-hooks.tsx index a76a91893..a7fde2e5f 100644 --- a/apps/start/src/components/ui/data-table/data-table-hooks.tsx +++ b/apps/start/src/components/ui/data-table/data-table-hooks.tsx @@ -4,8 +4,8 @@ import type { VisibilityState, } from '@tanstack/react-table'; import { parseAsInteger, useQueryState } from 'nuqs'; -import { useState } from 'react'; -import { useLocalStorage } from 'usehooks-ts'; +import { useEffect, useState } from 'react'; +import { useLocalStorage, useReadLocalStorage } from 'usehooks-ts'; export const useDataTablePagination = (pageSize = 10) => { const [page, setPage] = useQueryState( @@ -22,6 +22,12 @@ export const useDataTablePagination = (pageSize = 10) => { return { page, setPage, state }; }; +export const useReadColumnVisibility = (persistentKey: string) => { + return useReadLocalStorage>( + `@op:${persistentKey}-column-visibility`, + ); +}; + export const useDataTableColumnVisibility = ( columns: ColumnDef[], persistentKey: string, @@ -43,6 +49,13 @@ export const useDataTableColumnVisibility = ( }, {} as VisibilityState), ); + // somewhat hack + // Set initial column visibility, + // otherwise will not useReadColumnVisibility be updated + useEffect(() => { + setColumnVisibility(columnVisibility); + }, []); + const [columnOrder, setColumnOrder] = useLocalStorage( `@op:${persistentKey}-column-order`, columns.map((column) => column.id!), diff --git a/apps/start/src/routes/_app.$organizationId.$projectId_.events._tabs.conversions.tsx b/apps/start/src/routes/_app.$organizationId.$projectId_.events._tabs.conversions.tsx index 5125067e6..167cfaa61 100644 --- a/apps/start/src/routes/_app.$organizationId.$projectId_.events._tabs.conversions.tsx +++ b/apps/start/src/routes/_app.$organizationId.$projectId_.events._tabs.conversions.tsx @@ -1,4 +1,5 @@ import { EventsTable } from '@/components/events/table'; +import { useReadColumnVisibility } from '@/components/ui/data-table/data-table-hooks'; import { useTRPC } from '@/integrations/trpc/react'; import { useInfiniteQuery } from '@tanstack/react-query'; import { createFileRoute } from '@tanstack/react-router'; @@ -18,12 +19,14 @@ function Component() { parseAsIsoDateTime, ); const [endDate, setEndDate] = useQueryState('endDate', parseAsIsoDateTime); + const columnVisibility = useReadColumnVisibility('events'); const query = useInfiniteQuery( trpc.event.conversions.infiniteQueryOptions( { projectId, startDate: startDate || undefined, endDate: endDate || undefined, + columnVisibility: columnVisibility ?? {}, }, { getNextPageParam: (lastPage) => lastPage.meta.next, diff --git a/apps/start/src/routes/_app.$organizationId.$projectId_.events._tabs.events.tsx b/apps/start/src/routes/_app.$organizationId.$projectId_.events._tabs.events.tsx index 095f3f4ee..0861022e4 100644 --- a/apps/start/src/routes/_app.$organizationId.$projectId_.events._tabs.events.tsx +++ b/apps/start/src/routes/_app.$organizationId.$projectId_.events._tabs.events.tsx @@ -1,4 +1,5 @@ import { EventsTable } from '@/components/events/table'; +import { useReadColumnVisibility } from '@/components/ui/data-table/data-table-hooks'; import { useEventQueryFilters, useEventQueryNamesFilter, @@ -21,6 +22,8 @@ function Component() { const [startDate] = useQueryState('startDate', parseAsIsoDateTime); const [endDate] = useQueryState('endDate', parseAsIsoDateTime); const [eventNames] = useEventQueryNamesFilter(); + const columnVisibility = useReadColumnVisibility('events'); + const query = useInfiniteQuery( trpc.event.events.infiniteQueryOptions( { @@ -30,8 +33,10 @@ function Component() { profileId: '', startDate: startDate || undefined, endDate: endDate || undefined, + columnVisibility: columnVisibility ?? {}, }, { + enabled: columnVisibility !== null, getNextPageParam: (lastPage) => lastPage.meta.next, }, ), diff --git a/apps/start/src/routes/_app.$organizationId.$projectId_.profiles.$profileId._tabs.events.tsx b/apps/start/src/routes/_app.$organizationId.$projectId_.profiles.$profileId._tabs.events.tsx index 2343b520d..38f9c058e 100644 --- a/apps/start/src/routes/_app.$organizationId.$projectId_.profiles.$profileId._tabs.events.tsx +++ b/apps/start/src/routes/_app.$organizationId.$projectId_.profiles.$profileId._tabs.events.tsx @@ -1,4 +1,5 @@ import { EventsTable } from '@/components/events/table'; +import { useReadColumnVisibility } from '@/components/ui/data-table/data-table-hooks'; import { useEventQueryFilters, useEventQueryNamesFilter, @@ -21,6 +22,7 @@ function Component() { const [startDate] = useQueryState('startDate', parseAsIsoDateTime); const [endDate] = useQueryState('endDate', parseAsIsoDateTime); const [eventNames] = useEventQueryNamesFilter(); + const columnVisibility = useReadColumnVisibility('events'); const query = useInfiniteQuery( trpc.event.events.infiniteQueryOptions( { @@ -30,8 +32,10 @@ function Component() { startDate: startDate || undefined, endDate: endDate || undefined, events: eventNames, + columnVisibility: columnVisibility ?? {}, }, { + enabled: columnVisibility !== null, getNextPageParam: (lastPage) => lastPage.meta.next, }, ), diff --git a/apps/start/src/routes/_app.$organizationId.$projectId_.sessions_.$sessionId.tsx b/apps/start/src/routes/_app.$organizationId.$projectId_.sessions_.$sessionId.tsx index f795cbf5a..c9d9f8255 100644 --- a/apps/start/src/routes/_app.$organizationId.$projectId_.sessions_.$sessionId.tsx +++ b/apps/start/src/routes/_app.$organizationId.$projectId_.sessions_.$sessionId.tsx @@ -3,12 +3,11 @@ import FullPageLoadingState from '@/components/full-page-loading-state'; import { PageContainer } from '@/components/page-container'; import { PageHeader } from '@/components/page-header'; import { SerieIcon } from '@/components/report-chart/common/serie-icon'; -import { useDataTablePagination } from '@/components/ui/data-table/data-table-hooks'; +import { useReadColumnVisibility } from '@/components/ui/data-table/data-table-hooks'; import { useEventQueryFilters, useEventQueryNamesFilter, } from '@/hooks/use-event-query-filters'; -import { useSearchQueryState } from '@/hooks/use-search-query-state'; import { useTRPC } from '@/integrations/trpc/react'; import { createProjectTitle } from '@/utils/title'; import { useInfiniteQuery, useSuspenseQuery } from '@tanstack/react-query'; @@ -46,8 +45,6 @@ function Component() { const trpc = useTRPC(); const LIMIT = 50; - const { page } = useDataTablePagination(LIMIT); - const { debouncedSearch } = useSearchQueryState(); const { data: session } = useSuspenseQuery( trpc.session.byId.queryOptions({ @@ -60,7 +57,7 @@ function Component() { const [startDate] = useQueryState('startDate', parseAsIsoDateTime); const [endDate] = useQueryState('endDate', parseAsIsoDateTime); const [eventNames] = useEventQueryNamesFilter(); - + const columnVisibility = useReadColumnVisibility('events'); const query = useInfiniteQuery( trpc.event.events.infiniteQueryOptions( { @@ -70,8 +67,10 @@ function Component() { events: eventNames, startDate: startDate || undefined, endDate: endDate || undefined, + columnVisibility: columnVisibility ?? {}, }, { + enabled: columnVisibility !== null, getNextPageParam: (lastPage) => lastPage.meta.next, }, ), diff --git a/apps/worker/package.json b/apps/worker/package.json index c9b548785..6b6247936 100644 --- a/apps/worker/package.json +++ b/apps/worker/package.json @@ -12,8 +12,8 @@ "gen:referrers": "jiti scripts/get-referrers.ts && biome format --write ./src/referrers/index.ts" }, "dependencies": { - "@bull-board/api": "6.13.1", - "@bull-board/express": "6.13.1", + "@bull-board/api": "6.14.0", + "@bull-board/express": "6.14.0", "@openpanel/common": "workspace:*", "@openpanel/db": "workspace:*", "@openpanel/email": "workspace:*", @@ -22,9 +22,9 @@ "@openpanel/logger": "workspace:*", "@openpanel/queue": "workspace:*", "@openpanel/redis": "workspace:*", - "bullmq": "^5.8.7", + "bullmq": "^5.63.0", "express": "^4.18.2", - "groupmq": "1.0.0-next.19", + "groupmq": "1.1.0-next.5", "prom-client": "^15.1.3", "ramda": "^0.29.1", "source-map-support": "^0.5.21", diff --git a/apps/worker/scripts/cleanup-old-event-buffer-keys.ts b/apps/worker/scripts/cleanup-old-event-buffer-keys.ts new file mode 100644 index 000000000..8d4fff79a --- /dev/null +++ b/apps/worker/scripts/cleanup-old-event-buffer-keys.ts @@ -0,0 +1,283 @@ +#!/usr/bin/env tsx +/** + * Cleanup script for old event buffer architecture Redis keys + * + * This script removes Redis keys from the previous complex event buffer implementation: + * - event_buffer:sessions_sorted (sorted set) + * - event_buffer:ready_sessions (sorted set) + * - event_buffer:session:{sessionId} (lists) + * - event_buffer:regular_queue (old queue key, now event_buffer:queue) + * + * The new simplified architecture uses: + * - event_buffer:queue (new queue key) + * - event_buffer:last_screen_view:session:{sessionId} + * - event_buffer:last_screen_view:profile:{projectId}:{profileId} + * - event_buffer:total_count + */ + +import { createLogger } from '@openpanel/logger'; +import { getRedisCache } from '@openpanel/redis'; + +const redis = getRedisCache(); +const logger = createLogger({ name: 'cleanup-event-buffer' }); + +interface CleanupStats { + sessionsSorted: number; + readySessions: number; + sessionLists: number; + regularQueue: number; + totalEventsMigrated: number; + totalKeysDeleted: number; + errors: number; +} + +async function cleanupOldEventBufferKeys(): Promise { + const stats: CleanupStats = { + sessionsSorted: 0, + readySessions: 0, + sessionLists: 0, + regularQueue: 0, + totalEventsMigrated: 0, + totalKeysDeleted: 0, + errors: 0, + }; + + logger.info('Starting cleanup of old event buffer keys...'); + + try { + // 1. Get all session IDs from both sorted sets + const sessionsSortedKey = 'event_buffer:sessions_sorted'; + const readySessionsKey = 'event_buffer:ready_sessions'; + + const [sessionsSortedExists, readySessionsExists] = await Promise.all([ + redis.exists(sessionsSortedKey), + redis.exists(readySessionsKey), + ]); + + let allSessionIds: string[] = []; + + // Collect session IDs from sessions_sorted + if (sessionsSortedExists) { + const sessionIds = await redis.zrange(sessionsSortedKey, 0, -1); + stats.sessionsSorted = sessionIds.length; + allSessionIds = sessionIds; + logger.info(`Found ${sessionIds.length} sessions in sessions_sorted`); + } else { + logger.info(`${sessionsSortedKey} does not exist (already cleaned up)`); + } + + // Also check ready_sessions (might have additional sessions) + if (readySessionsExists) { + const readySessionIds = await redis.zrange(readySessionsKey, 0, -1); + stats.readySessions = readySessionIds.length; + logger.info(`Found ${readySessionIds.length} sessions in ready_sessions`); + + // Merge with allSessionIds (avoid duplicates) + const uniqueReadySessions = readySessionIds.filter( + (id) => !allSessionIds.includes(id), + ); + if (uniqueReadySessions.length > 0) { + logger.info( + `Found ${uniqueReadySessions.length} additional sessions in ready_sessions`, + ); + allSessionIds = [...allSessionIds, ...uniqueReadySessions]; + } + } else { + logger.info(`${readySessionsKey} does not exist (already cleaned up)`); + } + + // 2. Migrate events from session lists to new queue + if (allSessionIds.length > 0) { + logger.info( + `Migrating events from ${allSessionIds.length} session lists to new queue...`, + ); + const newQueueKey = 'event_buffer:queue'; + let totalEventsMigrated = 0; + + // Process in batches + const batchSize = 100; + for (let i = 0; i < allSessionIds.length; i += batchSize) { + const batchIds = allSessionIds.slice(i, i + batchSize); + + for (const sessionId of batchIds) { + const sessionKey = `event_buffer:session:${sessionId}`; + const events = await redis.lrange(sessionKey, 0, -1); + + if (events.length > 0) { + // Move events to new queue + await redis.rpush(newQueueKey, ...events); + // Update buffer counter + await redis.incrby('event_buffer:total_count', events.length); + totalEventsMigrated += events.length; + stats.totalEventsMigrated += events.length; + } + + // Delete the session list + await redis.del(sessionKey); + stats.sessionLists++; + stats.totalKeysDeleted++; + } + + logger.info( + `Processed batch ${Math.floor(i / batchSize) + 1}: ${batchIds.length} sessions, ${totalEventsMigrated} total events migrated`, + ); + } + + logger.info( + `✅ Migrated ${totalEventsMigrated} events from session lists to new queue`, + ); + } + + // 3. Delete the sorted sets + const keysToDelete: string[] = []; + if (sessionsSortedExists) keysToDelete.push(sessionsSortedKey); + if (readySessionsExists) keysToDelete.push(readySessionsKey); + + if (keysToDelete.length > 0) { + await redis.del(...keysToDelete); + stats.totalKeysDeleted += keysToDelete.length; + logger.info(`Deleted sorted sets: ${keysToDelete.join(', ')}`); + } + + // 4. Check and handle regular_queue (old queue key) + const regularQueueKey = 'event_buffer:regular_queue'; + const regularQueueExists = await redis.exists(regularQueueKey); + if (regularQueueExists) { + const queueLength = await redis.llen(regularQueueKey); + stats.regularQueue = queueLength; + + if (queueLength > 0) { + logger.info(`Found ${queueLength} events in old regular_queue`); + logger.warn('WARNING: Old regular_queue has pending events!'); + logger.info('Moving events from old queue to new queue...'); + + // Move events from old queue to new queue + const newQueueKey = 'event_buffer:queue'; + let movedCount = 0; + while (true) { + const event = await redis.rpoplpush(regularQueueKey, newQueueKey); + if (!event) break; + movedCount++; + if (movedCount % 1000 === 0) { + logger.info(`Moved ${movedCount} events...`); + } + } + logger.info(`Moved ${movedCount} events from old queue to new queue`); + stats.totalEventsMigrated += movedCount; + } + + // Delete the old queue key + await redis.del(regularQueueKey); + logger.info(`Deleted ${regularQueueKey}`); + stats.totalKeysDeleted++; + } else { + logger.info(`${regularQueueKey} does not exist (already cleaned up)`); + } + + // 5. Scan for any remaining event_buffer:session:* keys that might have been missed + logger.info('Scanning for any remaining session keys...'); + let cursor = '0'; + let remainingSessionKeys = 0; + + do { + const [newCursor, keys] = await redis.scan( + cursor, + 'MATCH', + 'event_buffer:session:*', + 'COUNT', + 100, + ); + cursor = newCursor; + + if (keys.length > 0) { + const deleted = await redis.del(...keys); + remainingSessionKeys += deleted; + stats.totalKeysDeleted += deleted; + logger.info(`Found and deleted ${deleted} remaining session keys`); + } + } while (cursor !== '0'); + + if (remainingSessionKeys > 0) { + logger.info(`Cleaned up ${remainingSessionKeys} remaining session keys`); + } else { + logger.info('No remaining session keys found'); + } + + logger.info('Cleanup completed successfully!', stats); + return stats; + } catch (error) { + stats.errors++; + logger.error('Error during cleanup:', error); + throw error; + } +} + +async function main() { + try { + logger.info('Event Buffer Cleanup Script'); + logger.info('==========================='); + logger.info( + 'This script will remove old event buffer Redis keys from the previous architecture.', + ); + logger.info(''); + + // Check current state + const sessionsSortedExists = await redis.exists( + 'event_buffer:sessions_sorted', + ); + const readySessionsExists = await redis.exists( + 'event_buffer:ready_sessions', + ); + const regularQueueExists = await redis.exists('event_buffer:regular_queue'); + + if (!sessionsSortedExists && !readySessionsExists && !regularQueueExists) { + logger.info( + '✅ No old keys found. System appears to be already cleaned up!', + ); + process.exit(0); + } + + logger.info('Found old keys to clean up:'); + if (sessionsSortedExists) logger.info(' - event_buffer:sessions_sorted ✓'); + if (readySessionsExists) logger.info(' - event_buffer:ready_sessions ✓'); + if (regularQueueExists) logger.info(' - event_buffer:regular_queue ✓'); + logger.info(''); + + // Perform cleanup + const stats = await cleanupOldEventBufferKeys(); + + // Summary + logger.info(''); + logger.info('Cleanup Summary'); + logger.info('==============='); + logger.info(`Sessions sorted entries: ${stats.sessionsSorted}`); + logger.info(`Ready sessions entries: ${stats.readySessions}`); + logger.info(`Session list keys deleted: ${stats.sessionLists}`); + logger.info(`Regular queue events: ${stats.regularQueue} (migrated)`); + logger.info(`Total events migrated: ${stats.totalEventsMigrated}`); + logger.info(`Total keys deleted: ${stats.totalKeysDeleted}`); + logger.info(`Errors: ${stats.errors}`); + logger.info(''); + + if (stats.errors === 0) { + logger.info('✅ Cleanup completed successfully!'); + } else { + logger.warn(`⚠️ Cleanup completed with ${stats.errors} errors`); + } + + // Close Redis connection + await redis.quit(); + process.exit(0); + } catch (error) { + logger.error('Fatal error during cleanup:', error); + await redis.quit(); + process.exit(1); + } +} + +// Run if executed directly +if (require.main === module) { + main(); +} + +export { cleanupOldEventBufferKeys }; diff --git a/apps/worker/scripts/migrate-delayed-jobs.ts b/apps/worker/scripts/migrate-delayed-jobs.ts new file mode 100644 index 000000000..e66090327 --- /dev/null +++ b/apps/worker/scripts/migrate-delayed-jobs.ts @@ -0,0 +1,206 @@ +/** + * Migration Script: Migrate Delayed Jobs to New Queue Names + * + * This script migrates delayed jobs from old queue names (e.g., "sessions") + * to new queue names with hash tags (e.g., "{sessions}"). + * + * Active/waiting jobs are ignored - only delayed jobs are migrated. + * + * Usage: + * npx tsx apps/worker/scripts/migrate-delayed-jobs.ts + * + * Options: + * --dry-run Show what would be migrated without actually doing it + * --queue Migrate specific queue only (sessions, cron, notification, misc) + * + * # Dry run (recommended first) + * npx tsx apps/worker/scripts/migrate-delayed-jobs.ts --dry-run + * + * Migrate all queues + * npx tsx apps/worker/scripts/migrate-delayed-jobs.ts + * + * Migrate specific queue only + * npx tsx apps/worker/scripts/migrate-delayed-jobs.ts --queue=sessions + * npx tsx apps/worker/scripts/migrate-delayed-jobs.ts --queue=misc + * + */ + +import type { + CronQueuePayload, + MiscQueuePayload, + NotificationQueuePayload, + SessionsQueuePayload, +} from '@openpanel/queue'; +import { getRedisQueue } from '@openpanel/redis'; +import { Queue } from 'bullmq'; + +interface MigrationStats { + queue: string; + total: number; + migrated: number; + failed: number; + skipped: number; +} + +const isDryRun = process.argv.includes('--dry-run'); +const specificQueue = process.argv + .find((arg) => arg.startsWith('--queue=')) + ?.split('=')[1]; + +console.log('🚀 Starting delayed jobs migration'); +console.log( + `Mode: ${isDryRun ? 'DRY RUN (no changes will be made)' : 'LIVE MIGRATION'}`, +); +console.log(`Queue filter: ${specificQueue || 'all queues'}`); +console.log('---\n'); + +async function migrateDelayedJobs( + oldQueueName: string, + newQueueName: string, +): Promise { + const stats: MigrationStats = { + queue: oldQueueName, + total: 0, + migrated: 0, + failed: 0, + skipped: 0, + }; + + const connection = getRedisQueue(); + const oldQueue = new Queue(oldQueueName, { connection }); + const newQueue = new Queue(newQueueName, { connection }); + + try { + console.log(`\n📦 Processing queue: ${oldQueueName} → ${newQueueName}`); + + // Get all delayed jobs from old queue + const delayedJobs = await oldQueue.getDelayed(); + stats.total = delayedJobs.length; + + console.log(` Found ${stats.total} delayed jobs`); + + if (stats.total === 0) { + console.log(' ✓ No delayed jobs to migrate'); + return stats; + } + + for (const job of delayedJobs) { + try { + const delay = job.opts.delay || 0; + const remainingDelay = Math.max(0, job.timestamp + delay - Date.now()); + + console.log( + ` - Job ${job.id}: ${job.name}, delay: ${Math.round(remainingDelay / 1000)}s remaining`, + ); + + if (!isDryRun) { + // Add to new queue with remaining delay + await newQueue.add(job.name || 'migrated-job', job.data, { + ...job.opts, + delay: remainingDelay, + jobId: job.id, // Preserve job ID if possible + attempts: job.opts.attempts, + backoff: job.opts.backoff, + }); + + // Remove from old queue + await job.remove(); + + stats.migrated++; + console.log(' ✓ Migrated'); + } else { + stats.migrated++; + console.log(' ✓ Would migrate (dry run)'); + } + } catch (error) { + stats.failed++; + console.error( + ` ✗ Failed to migrate job ${job.id}:`, + error instanceof Error ? error.message : error, + ); + } + } + + console.log(`\n Summary for ${oldQueueName}:`); + console.log(` - Total: ${stats.total}`); + console.log(` - Migrated: ${stats.migrated}`); + console.log(` - Failed: ${stats.failed}`); + console.log(` - Skipped: ${stats.skipped}`); + } catch (error) { + console.error(` ✗ Error processing queue ${oldQueueName}:`, error); + } finally { + await oldQueue.close(); + await newQueue.close(); + } + + return stats; +} + +async function main() { + const queuesToMigrate: Array<{ old: string; new: string }> = [ + { old: 'sessions', new: '{sessions}' }, + { old: 'misc', new: '{misc}' }, + ]; + + // Filter to specific queue if requested + const filtered = specificQueue + ? queuesToMigrate.filter((q) => q.old === specificQueue) + : queuesToMigrate; + + if (filtered.length === 0) { + console.error( + `❌ Queue "${specificQueue}" not found. Valid queues: sessions, cron, notification, misc`, + ); + process.exit(1); + } + + const allStats: MigrationStats[] = []; + + for (const { old: oldName, new: newName } of filtered) { + const stats = await migrateDelayedJobs(oldName, newName); + allStats.push(stats); + } + + // Print summary + console.log(`\n${'='.repeat(50)}`); + console.log('📊 MIGRATION SUMMARY'); + console.log(`${'='.repeat(50)}\n`); + + let totalJobs = 0; + let totalMigrated = 0; + let totalFailed = 0; + + for (const stats of allStats) { + totalJobs += stats.total; + totalMigrated += stats.migrated; + totalFailed += stats.failed; + } + + console.log(`Total jobs found: ${totalJobs}`); + console.log(`Successfully migrated: ${totalMigrated}`); + console.log(`Failed: ${totalFailed}`); + console.log( + `\n${isDryRun ? '⚠️ This was a DRY RUN - no changes were made' : '✅ Migration complete!'}`, + ); + + if (totalFailed > 0) { + console.log( + '\n⚠️ Some jobs failed to migrate. Check the logs above for details.', + ); + process.exit(1); + } + + if (isDryRun && totalMigrated > 0) { + console.log('\n💡 Run without --dry-run to perform the actual migration'); + } +} + +main() + .then(() => { + console.log('\n✨ Done!'); + process.exit(0); + }) + .catch((error) => { + console.error('\n❌ Migration failed:', error); + process.exit(1); + }); diff --git a/apps/worker/src/boot-cron.ts b/apps/worker/src/boot-cron.ts index 30661e063..59e22e46c 100644 --- a/apps/worker/src/boot-cron.ts +++ b/apps/worker/src/boot-cron.ts @@ -1,6 +1,7 @@ import type { CronQueueType } from '@openpanel/queue'; import { cronQueue } from '@openpanel/queue'; +import { getLock } from '@openpanel/redis'; import { logger } from './utils/logger'; export async function bootCron() { @@ -44,43 +45,40 @@ export async function bootCron() { }); } - // Add repeatable jobs - for (const job of jobs) { - await cronQueue.add( - job.name, - { - type: job.type, - payload: undefined, - }, - { - jobId: job.type, - repeat: - typeof job.pattern === 'number' - ? { - every: job.pattern, - } - : { - pattern: job.pattern, - }, - }, - ); + const lock = await getLock('cron:lock', '1', 1000 * 60 * 5); + + if (lock) { + logger.info('Cron lock acquired'); + } else { + logger.info('Cron lock not acquired'); } - // Remove outdated repeatable jobs - const repeatableJobs = await cronQueue.getRepeatableJobs(); - for (const repeatableJob of repeatableJobs) { - const match = jobs.find( - (job) => `${job.name}:${job.type}:::${job.pattern}` === repeatableJob.key, - ); - if (match) { - logger.info('Repeatable job exists', { - key: repeatableJob.key, - }); - } else { - logger.info('Removing repeatable job', { - key: repeatableJob.key, - }); + if (lock) { + logger.info('Updating cron jobs'); + // TODO: Switch to getJobSchedulers + const repeatableJobs = await cronQueue.getRepeatableJobs(); + for (const repeatableJob of repeatableJobs) { cronQueue.removeRepeatableByKey(repeatableJob.key); } + + // Add repeatable jobs + for (const job of jobs) { + await cronQueue.upsertJobScheduler( + job.type, + typeof job.pattern === 'number' + ? { + every: job.pattern, + } + : { + pattern: job.pattern, + }, + { + data: { + type: job.type, + payload: undefined, + }, + }, + ); + } } } diff --git a/apps/worker/src/boot-workers.ts b/apps/worker/src/boot-workers.ts index d99e14b3f..3362aa497 100644 --- a/apps/worker/src/boot-workers.ts +++ b/apps/worker/src/boot-workers.ts @@ -2,68 +2,183 @@ import type { Queue, WorkerOptions } from 'bullmq'; import { Worker } from 'bullmq'; import { + EVENTS_GROUP_QUEUES_SHARDS, type EventsQueuePayloadIncomingEvent, cronQueue, - eventsGroupQueue, + eventsGroupQueues, miscQueue, notificationQueue, queueLogger, sessionsQueue, } from '@openpanel/queue'; -import { getRedisQueue } from '@openpanel/redis'; +import { getLock, getRedisQueue } from '@openpanel/redis'; import { performance } from 'node:perf_hooks'; import { setTimeout as sleep } from 'node:timers/promises'; import { Worker as GroupWorker } from 'groupmq'; import { cronJob } from './jobs/cron'; -import { eventsJob } from './jobs/events'; import { incomingEventPure } from './jobs/events.incoming-event'; import { miscJob } from './jobs/misc'; import { notificationJob } from './jobs/notification'; import { sessionsJob } from './jobs/sessions'; +import { eventsGroupJobDuration } from './metrics'; import { logger } from './utils/logger'; const workerOptions: WorkerOptions = { connection: getRedisQueue(), }; -export async function bootWorkers() { - const eventsGroupWorker = new GroupWorker< - EventsQueuePayloadIncomingEvent['payload'] - >({ - concurrency: Number.parseInt(process.env.EVENT_JOB_CONCURRENCY || '1', 10), - logger: queueLogger, - queue: eventsGroupQueue, - handler: async (job) => { - logger.info('processing event (group queue)', { - groupId: job.groupId, - timestamp: job.data.event.timestamp, - }); - await incomingEventPure(job.data); - }, +type QueueName = string; // Can be: events, events_N (where N is 0 to shards-1), sessions, cron, notification, misc + +/** + * Parses the ENABLED_QUEUES environment variable and returns an array of queue names to start. + * If no env var is provided, returns all queues. + * + * Supported queue names: + * - events - All event shards (events_0, events_1, ..., events_N) + * - events_N - Individual event shard (where N is 0 to EVENTS_GROUP_QUEUES_SHARDS-1) + * - sessions, cron, notification, misc + */ +function getEnabledQueues(): QueueName[] { + const enabledQueuesEnv = process.env.ENABLED_QUEUES?.trim(); + + if (!enabledQueuesEnv) { + logger.info('No ENABLED_QUEUES specified, starting all queues', { + totalEventShards: EVENTS_GROUP_QUEUES_SHARDS, + }); + return ['events', 'sessions', 'cron', 'notification', 'misc']; + } + + const queues = enabledQueuesEnv + .split(',') + .map((q) => q.trim()) + .filter(Boolean); + + logger.info('Starting queues from ENABLED_QUEUES', { + queues, + totalEventShards: EVENTS_GROUP_QUEUES_SHARDS, }); - eventsGroupWorker.run(); - const sessionsWorker = new Worker( - sessionsQueue.name, - sessionsJob, - workerOptions, - ); - const cronWorker = new Worker(cronQueue.name, cronJob, workerOptions); - const notificationWorker = new Worker( - notificationQueue.name, - notificationJob, - workerOptions, - ); - const miscWorker = new Worker(miscQueue.name, miscJob, workerOptions); + return queues; +} + +/** + * Gets the concurrency setting for a queue from environment variables. + * Env var format: {QUEUE_NAME}_CONCURRENCY (e.g., EVENTS_0_CONCURRENCY=32) + */ +function getConcurrencyFor(queueName: string, defaultValue = 1): number { + const envKey = `${queueName.toUpperCase().replace(/[^A-Z0-9]/g, '_')}_CONCURRENCY`; + const value = process.env[envKey]; + + if (value) { + const parsed = Number.parseInt(value, 10); + if (!Number.isNaN(parsed) && parsed > 0) { + return parsed; + } + } + + return defaultValue; +} + +export async function bootWorkers() { + const enabledQueues = getEnabledQueues(); + + const workers: (Worker | GroupWorker)[] = []; + + // Start event workers based on enabled queues + const eventQueuesToStart: number[] = []; + + if (enabledQueues.includes('events')) { + // Start all event shards + for (let i = 0; i < EVENTS_GROUP_QUEUES_SHARDS; i++) { + eventQueuesToStart.push(i); + } + } else { + // Start specific event shards (events_0, events_1, etc.) + for (let i = 0; i < EVENTS_GROUP_QUEUES_SHARDS; i++) { + if (enabledQueues.includes(`events_${i}`)) { + eventQueuesToStart.push(i); + } + } + } - const workers = [ - sessionsWorker, - cronWorker, - notificationWorker, - miscWorker, - eventsGroupWorker, - ]; + for (const index of eventQueuesToStart) { + const queue = eventsGroupQueues[index]; + if (!queue) continue; + + const queueName = `events_${index}`; + const concurrency = getConcurrencyFor( + queueName, + Number.parseInt(process.env.EVENT_JOB_CONCURRENCY || '10', 10), + ); + + const worker = new GroupWorker({ + queue, + concurrency, + logger: queueLogger, + blockingTimeoutSec: Number.parseFloat( + process.env.EVENT_BLOCKING_TIMEOUT_SEC || '1', + ), + handler: async (job) => { + return await incomingEventPure(job.data); + }, + }); + + worker.run(); + workers.push(worker); + logger.info(`Started worker for ${queueName}`, { concurrency }); + } + + // Start sessions worker + if (enabledQueues.includes('sessions')) { + const concurrency = getConcurrencyFor('sessions'); + const sessionsWorker = new Worker(sessionsQueue.name, sessionsJob, { + ...workerOptions, + concurrency, + }); + workers.push(sessionsWorker); + logger.info('Started worker for sessions', { concurrency }); + } + + // Start cron worker + if (enabledQueues.includes('cron')) { + const concurrency = getConcurrencyFor('cron'); + const cronWorker = new Worker(cronQueue.name, cronJob, { + ...workerOptions, + concurrency, + }); + workers.push(cronWorker); + logger.info('Started worker for cron', { concurrency }); + } + + // Start notification worker + if (enabledQueues.includes('notification')) { + const concurrency = getConcurrencyFor('notification'); + const notificationWorker = new Worker( + notificationQueue.name, + notificationJob, + { ...workerOptions, concurrency }, + ); + workers.push(notificationWorker); + logger.info('Started worker for notification', { concurrency }); + } + + // Start misc worker + if (enabledQueues.includes('misc')) { + const concurrency = getConcurrencyFor('misc'); + const miscWorker = new Worker(miscQueue.name, miscJob, { + ...workerOptions, + concurrency, + }); + workers.push(miscWorker); + logger.info('Started worker for misc', { concurrency }); + } + + if (workers.length === 0) { + logger.warn( + 'No workers started. Check ENABLED_QUEUES environment variable.', + ); + } workers.forEach((worker) => { (worker as Worker).on('error', (error) => { @@ -87,6 +202,13 @@ export async function bootWorkers() { (worker as Worker).on('failed', (job) => { if (job) { + if (job.processedOn && job.finishedOn) { + const duration = job.finishedOn - job.processedOn; + eventsGroupJobDuration.observe( + { queue_shard: worker.name, status: 'failed' }, + duration, + ); + } logger.error('job failed', { jobId: job.id, worker: worker.name, @@ -99,15 +221,13 @@ export async function bootWorkers() { (worker as Worker).on('completed', (job) => { if (job) { - logger.info('job completed', { - jobId: job.id, - worker: worker.name, - data: job.data, - elapsed: - job.processedOn && job.finishedOn - ? job.finishedOn - job.processedOn - : undefined, - }); + if (job.processedOn && job.finishedOn) { + const duration = job.finishedOn - job.processedOn; + eventsGroupJobDuration.observe( + { queue_shard: worker.name, status: 'success' }, + duration, + ); + } } }); @@ -128,8 +248,14 @@ export async function bootWorkers() { }); try { const time = performance.now(); - await waitForQueueToEmpty(cronQueue); + + // Wait for cron queue to empty if it's running + if (enabledQueues.includes('cron')) { + await waitForQueueToEmpty(cronQueue); + } + await Promise.all(workers.map((worker) => worker.close())); + logger.info('workers closed successfully', { elapsed: performance.now() - time, }); diff --git a/apps/worker/src/index.ts b/apps/worker/src/index.ts index bb7793d7d..f91784b06 100644 --- a/apps/worker/src/index.ts +++ b/apps/worker/src/index.ts @@ -4,7 +4,7 @@ import { ExpressAdapter } from '@bull-board/express'; import { createInitialSalts } from '@openpanel/db'; import { cronQueue, - eventsGroupQueue, + eventsGroupQueues, miscQueue, notificationQueue, sessionsQueue, @@ -31,9 +31,11 @@ async function start() { if (process.env.DISABLE_BULLBOARD === undefined) { const serverAdapter = new ExpressAdapter(); serverAdapter.setBasePath('/'); - ({ + createBullBoard({ queues: [ - new BullBoardGroupMQAdapter(eventsGroupQueue) as any, + ...eventsGroupQueues.map( + (queue) => new BullBoardGroupMQAdapter(queue) as any, + ), new BullMQAdapter(sessionsQueue), new BullMQAdapter(cronQueue), new BullMQAdapter(notificationQueue), diff --git a/apps/worker/src/jobs/cron.delete-projects.ts b/apps/worker/src/jobs/cron.delete-projects.ts index abb8f6848..eab4f84c5 100644 --- a/apps/worker/src/jobs/cron.delete-projects.ts +++ b/apps/worker/src/jobs/cron.delete-projects.ts @@ -54,7 +54,7 @@ export async function deleteProjects(job: Job) { await ch.command({ query, clickhouse_settings: { - lightweight_deletes_sync: 0, + lightweight_deletes_sync: '0', }, }); } diff --git a/apps/worker/src/jobs/events.create-session-end.ts b/apps/worker/src/jobs/events.create-session-end.ts index c8d370a43..dde407a04 100644 --- a/apps/worker/src/jobs/events.create-session-end.ts +++ b/apps/worker/src/jobs/events.create-session-end.ts @@ -1,13 +1,13 @@ import type { Job } from 'bullmq'; import { logger as baseLogger } from '@/utils/logger'; -import { getTime } from '@openpanel/common'; import { type IClickhouseSession, type IServiceCreateEventPayload, type IServiceEvent, TABLE_NAMES, checkNotificationRulesForSessionEnd, + convertClickhouseDateToJs, createEvent, eventBuffer, formatClickhouseDate, @@ -68,7 +68,7 @@ export async function createSessionEnd( reqId: payload.properties?.__reqId ?? 'unknown', }); - logger.info('Processing session end job'); + logger.debug('Processing session end job'); const session = await sessionBuffer.getExistingSession(payload.sessionId); @@ -77,7 +77,7 @@ export async function createSessionEnd( } try { - handleSessionEndNotifications({ + await handleSessionEndNotifications({ session, payload, }); @@ -103,7 +103,9 @@ export async function createSessionEnd( name: 'session_end', duration: session.duration ?? 0, path: lastScreenView?.path ?? '', - createdAt: new Date(getTime(session.ended_at) + 1000), + createdAt: new Date( + convertClickhouseDateToJs(session.ended_at).getTime() + 100, + ), profileId: lastScreenView?.profileId || payload.profileId, }); } diff --git a/apps/worker/src/jobs/events.incoming-event.ts b/apps/worker/src/jobs/events.incoming-event.ts index ecd28a550..ae5d3dd32 100644 --- a/apps/worker/src/jobs/events.incoming-event.ts +++ b/apps/worker/src/jobs/events.incoming-event.ts @@ -15,7 +15,7 @@ import { } from '@openpanel/db'; import type { ILogger } from '@openpanel/logger'; import type { EventsQueuePayloadIncomingEvent } from '@openpanel/queue'; -import { getLock } from '@openpanel/redis'; +import { getLock, getRedisCache } from '@openpanel/redis'; import { DelayedError, type Job } from 'bullmq'; import { omit } from 'ramda'; import * as R from 'ramda'; @@ -54,6 +54,7 @@ export async function incomingEventPure( job?: Job, token?: string, ) { + await getRedisCache().incr('queue:counter'); const { geo, event: body, @@ -61,6 +62,7 @@ export async function incomingEventPure( projectId, currentDeviceId, previousDeviceId, + uaInfo: _uaInfo, } = jobPayload; const properties = body.properties ?? {}; const reqId = headers['request-id'] ?? 'unknown'; @@ -91,7 +93,8 @@ export async function incomingEventPure( const userAgent = headers['user-agent']; const sdkName = headers['openpanel-sdk-name']; const sdkVersion = headers['openpanel-sdk-version']; - const uaInfo = parseUserAgent(userAgent, properties); + // TODO: Remove both user-agent and parseUserAgent + const uaInfo = _uaInfo ?? parseUserAgent(userAgent, properties); const baseEvent = { name: body.name, diff --git a/apps/worker/src/metrics.ts b/apps/worker/src/metrics.ts index eb88523b7..2861a585f 100644 --- a/apps/worker/src/metrics.ts +++ b/apps/worker/src/metrics.ts @@ -2,23 +2,32 @@ import client from 'prom-client'; import { botBuffer, - db, eventBuffer, profileBuffer, sessionBuffer, } from '@openpanel/db'; -import { cronQueue, eventsGroupQueue, sessionsQueue } from '@openpanel/queue'; +import { cronQueue, eventsGroupQueues, sessionsQueue } from '@openpanel/queue'; const Registry = client.Registry; export const register = new Registry(); -const queues = [sessionsQueue, cronQueue, eventsGroupQueue]; +const queues = [sessionsQueue, cronQueue, ...eventsGroupQueues]; + +// Histogram to track job processing time for eventsGroupQueues +export const eventsGroupJobDuration = new client.Histogram({ + name: 'events_group_job_duration_ms', + help: 'Duration of job processing in eventsGroupQueues (in ms)', + labelNames: ['queue_shard', 'status'], + buckets: [10, 25, 50, 100, 250, 500, 750, 1000, 2000, 5000, 10000, 30000], // 10ms to 30s +}); + +register.registerMetric(eventsGroupJobDuration); queues.forEach((queue) => { register.registerMetric( new client.Gauge({ - name: `${queue.name}_active_count`, + name: `${queue.name.replace(/[\{\}]/g, '')}_active_count`, help: 'Active count', async collect() { const metric = await queue.getActiveCount(); @@ -29,7 +38,7 @@ queues.forEach((queue) => { register.registerMetric( new client.Gauge({ - name: `${queue.name}_delayed_count`, + name: `${queue.name.replace(/[\{\}]/g, '')}_delayed_count`, help: 'Delayed count', async collect() { const metric = await queue.getDelayedCount(); @@ -40,7 +49,7 @@ queues.forEach((queue) => { register.registerMetric( new client.Gauge({ - name: `${queue.name}_failed_count`, + name: `${queue.name.replace(/[\{\}]/g, '')}_failed_count`, help: 'Failed count', async collect() { const metric = await queue.getFailedCount(); @@ -51,7 +60,7 @@ queues.forEach((queue) => { register.registerMetric( new client.Gauge({ - name: `${queue.name}_completed_count`, + name: `${queue.name.replace(/[\{\}]/g, '')}_completed_count`, help: 'Completed count', async collect() { const metric = await queue.getCompletedCount(); @@ -62,7 +71,7 @@ queues.forEach((queue) => { register.registerMetric( new client.Gauge({ - name: `${queue.name}_waiting_count`, + name: `${queue.name.replace(/[\{\}]/g, '')}_waiting_count`, help: 'Waiting count', async collect() { const metric = await queue.getWaitingCount(); diff --git a/apps/worker/src/utils/session-handler.ts b/apps/worker/src/utils/session-handler.ts index f55121053..9415dc6f1 100644 --- a/apps/worker/src/utils/session-handler.ts +++ b/apps/worker/src/utils/session-handler.ts @@ -113,7 +113,7 @@ export async function getSessionEndJob(args: { } | null> { const state = await job.getState(); if (state !== 'delayed') { - logger.info(`[session-handler] Session end job is in "${state}" state`, { + logger.debug(`[session-handler] Session end job is in "${state}" state`, { state, retryCount, jobTimestamp: new Date(job.timestamp).toISOString(), diff --git a/docker-compose.yml b/docker-compose.yml index 59e0965d2..00211d05b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,4 +1,4 @@ -version: '3' +version: "3" services: op-db: @@ -12,12 +12,25 @@ services: - POSTGRES_USER=postgres - POSTGRES_PASSWORD=postgres + op-df: + image: docker.dragonflydb.io/dragonflydb/dragonfly:latest + container_name: op-df + restart: always + ports: + - "6380:6379" + ulimits: + memlock: -1 + nofile: 65535 + command: + - "--cluster_mode=emulated" + - "--lock_on_hashtags" + op-kv: image: redis:7.2.5-alpine restart: always volumes: - ./docker/data/op-kv-data:/data - command: [ 'redis-server', '--maxmemory-policy', 'noeviction' ] + command: ["redis-server", "--maxmemory-policy", "noeviction"] ports: - 6379:6379 diff --git a/packages/common/package.json b/packages/common/package.json index a8c914f7c..bfec3e2ae 100644 --- a/packages/common/package.json +++ b/packages/common/package.json @@ -10,6 +10,7 @@ "dependencies": { "@openpanel/constants": "workspace:*", "date-fns": "^3.3.1", + "lru-cache": "^11.2.2", "luxon": "^3.6.1", "mathjs": "^12.3.2", "nanoid": "^5.0.7", diff --git a/packages/common/server/parser-user-agent.ts b/packages/common/server/parser-user-agent.ts index b84b9768c..dbc3c296e 100644 --- a/packages/common/server/parser-user-agent.ts +++ b/packages/common/server/parser-user-agent.ts @@ -1,3 +1,4 @@ +import { LRUCache } from 'lru-cache'; import { UAParser } from 'ua-parser-js'; const parsedServerUa = { @@ -11,8 +12,30 @@ const parsedServerUa = { model: '', } as const; +// Pre-compile all regex patterns for better performance const IPHONE_MODEL_REGEX = /(iPhone|iPad)\s*([0-9,]+)/i; const IOS_MODEL_REGEX = /(iOS)\s*([0-9\.]+)/i; +const IPAD_OS_VERSION_REGEX = /iPadOS\s*([0-9_]+)/i; +const SINGLE_NAME_VERSION_REGEX = /^[^\/]+\/[\d.]+$/; + +// Device detection regexes +const SAMSUNG_MOBILE_REGEX = /SM-[ABDEFGJMNRWZ][0-9]+/i; +const SAMSUNG_TABLET_REGEX = /SM-T[0-9]+/i; +const LG_MOBILE_REGEX = /LG-[A-Z0-9]+/i; +const MOBILE_REGEX_1 = + /(android|bb\d+|meego).+mobile|avantgo|bada\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\.(browser|link)|vodafone|wap|windows ce|xda|xiino/i; +const MOBILE_REGEX_2 = + /1207|6310|6590|3gso|4thp|50[1-6]i|770s|802s|a wa|abac|ac(er|oo|s-)|ai(ko|rn)|al(av|ca|co)|amoi|an(ex|ny|yw)|aptu|ar(ch|go)|as(te|us)|attw|au(di|-m|r |s )|avan|be(ck|ll|nq)|bi(lb|rd)|bl(ac|az)|br(e|v)w|bumb|bw-(n|u)|c55\/|capi|ccwa|cdm-|cell|chtm|cldc|cmd-|co(mp|nd)|craw|da(it|ll|ng)|dbte|dc-s|devi|dica|dmob|do(c|p)o|ds(12|-d)|el(49|ai)|em(l2|ul)|er(ic|k0)|esl8|ez([4-7]0|os|wa|ze)|fetc|fly(-|_)|g1 u|g560|gene|gf-5|g-mo|go(\.w|od)|gr(ad|un)|haie|hcit|hd-(m|p|t)|hei-|hi(pt|ta)|hp( i|ip)|hs-c|ht(c(-| |_|a|g|p|s|t)|tp)|hu(aw|tc)|i-(20|go|ma)|i230|iac( |-|\/)|ibro|idea|ig01|ikom|im1k|inno|ipaq|iris|ja(t|v)a|jbro|jemu|jigs|kddi|keji|kgt( |\/)|klon|kpt |kwc-|kyo(c|k)|le(no|xi)|lg( g|\/(k|l|u)|50|54|-[a-w])|libw|lynx|m1-w|m3ga|m50\/|ma(te|ui|xo)|mc(01|21|ca)|m-cr|me(rc|ri)|mi(o8|oa|ts)|mmef|mo(01|02|bi|de|do|t(-| |o|v)|zz)|mt(50|p1|v )|mwbp|mywa|n10[0-2]|n20[2-3]|n30(0|2)|n50(0|2|5)|n7(0(0|1)|10)|ne((c|m)-|on|tf|wf|wg|wt)|nok(6|i)|nzph|o2im|op(ti|wv)|oran|owg1|p800|pan(a|d|t)|pdxg|pg(13|-([1-8]|c))|phil|pire|pl(ay|uc)|pn-2|po(ck|rt|se)|prox|psio|pt-g|qa-a|qc(07|12|21|32|60|-[2-7]|i-)|qtek|r380|r600|raks|rim9|ro(ve|zo)|s55\/|sa(ge|ma|mm|ms|ny|va)|sc(01|h-|oo|p-)|sdk\/|se(c(-|0|1)|47|mc|nd|ri)|sgh-|shar|sie(-|m)|sk-0|sl(45|id)|sm(al|ar|b3|it|t5)|so(ft|ny)|sp(01|h-|v-|v )|sy(01|mb)|t2(18|50)|t6(00|10|18)|ta(gt|lk)|tcl-|tdg-|tel(i|m)|tim-|t-mo|to(pl|sh)|ts(70|m-|m3|m5)|tx-9|up(\.b|g1|si)|utst|v400|v750|veri|vi(rg|te)|vk(40|5[0-3]|-v)|vm40|voda|vulc|vx(52|53|60|61|70|80|81|83|85|98)|w3c(-| )|webc|whit|wi(g |nc|nw)|wmlb|wonu|x700|yas-|your|zeto|zte-/i; +const TABLET_REGEX = /tablet|ipad|xoom|sch-i800|kindle|silk|playbook/i; +const ANDROID_REGEX = /android/i; +const MOBILE_KEYWORD_REGEX = /mobile/i; + +// Cache for parsed results - stores up to 1000 unique user agents +const parseCache = new LRUCache({ + ttl: 1000 * 60 * 5, + ttlAutopurge: true, + max: 1000, +}); const isIphone = (ua: string) => { const model = ua.match(IPHONE_MODEL_REGEX); @@ -27,6 +50,12 @@ const isIphone = (ua: string) => { }; const parse = (ua: string): UAParser.IResult => { + // Check cache first + const cached = parseCache.get(ua); + if (cached) { + return cached; + } + const parser = new UAParser(ua); const res = parser.getResult(); @@ -35,7 +64,7 @@ const parse = (ua: string): UAParser.IResult => { if (!res.device.model && !res.os.name) { const iphone = isIphone(ua); if (iphone) { - return { + const result = { ...res, device: { ...res.device, @@ -48,26 +77,33 @@ const parse = (ua: string): UAParser.IResult => { version: iphone.osVersion, }, }; + parseCache.set(ua, result); + return result; } } // Mozilla/5.0 (iPad; iPadOS 18_0; like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/18.0 if (res.device.model === 'iPad' && !res.os.version) { - const osVersion = ua.match(/iPadOS\s*([0-9_]+)/i); + const osVersion = ua.match(IPAD_OS_VERSION_REGEX); if (osVersion) { - return { + const result = { ...res, os: { ...res.os, version: osVersion[1]!.replace('_', '.'), }, }; + parseCache.set(ua, result); + return result; } } + // Cache the result + parseCache.set(ua, res); return res; }; +export type UserAgentResult = ReturnType; export function parseUserAgent( ua?: string | null, overrides?: Record, @@ -80,13 +116,14 @@ export function parseUserAgent( } return { - os: overrides?.__os || res.os.name, - osVersion: overrides?.__osVersion || res.os.version, - browser: overrides?.__browser || res.browser.name, - browserVersion: overrides?.__browserVersion || res.browser.version, - device: overrides?.__device || res.device.type || getDevice(ua), - brand: overrides?.__brand || res.device.vendor, - model: overrides?.__model || res.device.model, + os: (overrides?.__os || res.os.name) as string, + osVersion: (overrides?.__osVersion || res.os.version) as string, + browser: (overrides?.__browser || res.browser.name) as string, + browserVersion: (overrides?.__browserVersion || + res.browser.version) as string, + device: (overrides?.__device || res.device.type || getDevice(ua)) as string, + brand: (overrides?.__brand || res.device.vendor) as string, + model: (overrides?.__model || res.device.model) as string, isServer: false, } as const; } @@ -94,8 +131,7 @@ export function parseUserAgent( function isServer(res: UAParser.IResult) { // Matches user agents like "Go-http-client/1.0" or "Go Http Client/1.0" // It should just match the first name (with optional spaces) and version - const isSingleNameWithVersion = !!res.ua.match(/^[^\/]+\/[\d.]+$/); - if (isSingleNameWithVersion) { + if (SINGLE_NAME_VERSION_REGEX.test(res.ua)) { return true; } @@ -110,39 +146,39 @@ function isServer(res: UAParser.IResult) { export function getDevice(ua: string) { // Samsung mobile devices use SM-[A,G,N,etc]XXX pattern - if (/SM-[ABDEFGJMNRWZ][0-9]+/i.test(ua)) { + const isSamsungMobile = SAMSUNG_MOBILE_REGEX.test(ua); + if (isSamsungMobile) { return 'mobile'; } // Samsung tablets use SM-TXXX pattern - if (/SM-T[0-9]+/i.test(ua)) { + if (SAMSUNG_TABLET_REGEX.test(ua)) { return 'tablet'; } // LG mobile devices use LG-XXXX pattern - if (/LG-[A-Z0-9]+/i.test(ua)) { + const isLGMobile = LG_MOBILE_REGEX.test(ua); + if (isLGMobile) { return 'mobile'; } - const mobile1 = - /(android|bb\d+|meego).+mobile|avantgo|bada\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\.(browser|link)|vodafone|wap|windows ce|xda|xiino/i.test( - ua, - ); - const mobile2 = - /1207|6310|6590|3gso|4thp|50[1-6]i|770s|802s|a wa|abac|ac(er|oo|s-)|ai(ko|rn)|al(av|ca|co)|amoi|an(ex|ny|yw)|aptu|ar(ch|go)|as(te|us)|attw|au(di|-m|r |s )|avan|be(ck|ll|nq)|bi(lb|rd)|bl(ac|az)|br(e|v)w|bumb|bw-(n|u)|c55\/|capi|ccwa|cdm-|cell|chtm|cldc|cmd-|co(mp|nd)|craw|da(it|ll|ng)|dbte|dc-s|devi|dica|dmob|do(c|p)o|ds(12|-d)|el(49|ai)|em(l2|ul)|er(ic|k0)|esl8|ez([4-7]0|os|wa|ze)|fetc|fly(-|_)|g1 u|g560|gene|gf-5|g-mo|go(\.w|od)|gr(ad|un)|haie|hcit|hd-(m|p|t)|hei-|hi(pt|ta)|hp( i|ip)|hs-c|ht(c(-| |_|a|g|p|s|t)|tp)|hu(aw|tc)|i-(20|go|ma)|i230|iac( |-|\/)|ibro|idea|ig01|ikom|im1k|inno|ipaq|iris|ja(t|v)a|jbro|jemu|jigs|kddi|keji|kgt( |\/)|klon|kpt |kwc-|kyo(c|k)|le(no|xi)|lg( g|\/(k|l|u)|50|54|-[a-w])|libw|lynx|m1-w|m3ga|m50\/|ma(te|ui|xo)|mc(01|21|ca)|m-cr|me(rc|ri)|mi(o8|oa|ts)|mmef|mo(01|02|bi|de|do|t(-| |o|v)|zz)|mt(50|p1|v )|mwbp|mywa|n10[0-2]|n20[2-3]|n30(0|2)|n50(0|2|5)|n7(0(0|1)|10)|ne((c|m)-|on|tf|wf|wg|wt)|nok(6|i)|nzph|o2im|op(ti|wv)|oran|owg1|p800|pan(a|d|t)|pdxg|pg(13|-([1-8]|c))|phil|pire|pl(ay|uc)|pn-2|po(ck|rt|se)|prox|psio|pt-g|qa-a|qc(07|12|21|32|60|-[2-7]|i-)|qtek|r380|r600|raks|rim9|ro(ve|zo)|s55\/|sa(ge|ma|mm|ms|ny|va)|sc(01|h-|oo|p-)|sdk\/|se(c(-|0|1)|47|mc|nd|ri)|sgh-|shar|sie(-|m)|sk-0|sl(45|id)|sm(al|ar|b3|it|t5)|so(ft|ny)|sp(01|h-|v-|v )|sy(01|mb)|t2(18|50)|t6(00|10|18)|ta(gt|lk)|tcl-|tdg-|tel(i|m)|tim-|t-mo|to(pl|sh)|ts(70|m-|m3|m5)|tx-9|up(\.b|g1|si)|utst|v400|v750|veri|vi(rg|te)|vk(40|5[0-3]|-v)|vm40|voda|vulc|vx(52|53|60|61|70|80|81|83|85|98)|w3c(-| )|webc|whit|wi(g |nc|nw)|wmlb|wonu|x700|yas-|your|zeto|zte-/i.test( - ua.slice(0, 4), - ); - const tablet = - /tablet|ipad|xoom|sch-i800|kindle|silk|playbook/i.test(ua) || - (/android/i.test(ua) && - !/mobile/i.test(ua) && - !/SM-[ABDEFGJMNRWZ][0-9]+/i.test(ua) && - !/LG-[A-Z0-9]+/i.test(ua)); + // Check for mobile patterns + const mobile1 = MOBILE_REGEX_1.test(ua); + const mobile2 = MOBILE_REGEX_2.test(ua.slice(0, 4)); if (mobile1 || mobile2) { return 'mobile'; } + // Check for tablet patterns + // Note: We already checked for Samsung mobile/tablet and LG mobile above + const isAndroid = ANDROID_REGEX.test(ua); + const hasMobileKeyword = MOBILE_KEYWORD_REGEX.test(ua); + + const tablet = + TABLET_REGEX.test(ua) || + (isAndroid && !hasMobileKeyword && !isSamsungMobile && !isLGMobile); + if (tablet) { return 'tablet'; } diff --git a/packages/db/package.json b/packages/db/package.json index 56826516f..284b6d963 100644 --- a/packages/db/package.json +++ b/packages/db/package.json @@ -13,7 +13,7 @@ "with-env": "dotenv -e ../../.env -c --" }, "dependencies": { - "@clickhouse/client": "^1.2.0", + "@clickhouse/client": "^1.12.1", "@openpanel/common": "workspace:*", "@openpanel/constants": "workspace:*", "@openpanel/json": "workspace:*", diff --git a/packages/db/src/buffers/base-buffer.ts b/packages/db/src/buffers/base-buffer.ts index f87cc9037..e49c30c8f 100644 --- a/packages/db/src/buffers/base-buffer.ts +++ b/packages/db/src/buffers/base-buffer.ts @@ -8,18 +8,21 @@ export class BaseBuffer { lockKey: string; lockTimeout = 60; onFlush: () => void; + enableParallelProcessing: boolean; protected bufferCounterKey: string; constructor(options: { name: string; onFlush: () => Promise; + enableParallelProcessing?: boolean; }) { this.logger = createLogger({ name: options.name }); this.name = options.name; this.lockKey = `lock:${this.name}`; this.onFlush = options.onFlush; this.bufferCounterKey = `${this.name}:buffer:count`; + this.enableParallelProcessing = options.enableParallelProcessing ?? false; } protected chunks(items: T[], size: number) { @@ -91,6 +94,26 @@ export class BaseBuffer { async tryFlush() { const now = performance.now(); + + // Parallel mode: No locking, multiple workers can process simultaneously + if (this.enableParallelProcessing) { + try { + this.logger.debug('Processing buffer (parallel mode)...'); + await this.onFlush(); + this.logger.debug('Flush completed (parallel mode)', { + elapsed: performance.now() - now, + }); + } catch (error) { + this.logger.error('Failed to process buffer (parallel mode)', { + error, + }); + // In parallel mode, we can't safely reset counter as other workers might be active + // Counter will be resynced automatically by the periodic job + } + return; + } + + // Sequential mode: Use lock to ensure only one worker processes at a time const lockId = generateSecureId('lock'); const acquired = await getRedisCache().set( this.lockKey, @@ -101,7 +124,7 @@ export class BaseBuffer { ); if (acquired === 'OK') { try { - this.logger.info('Acquired lock. Processing buffer...', { + this.logger.debug('Acquired lock. Processing buffer...', { lockId, }); await this.onFlush(); @@ -117,7 +140,7 @@ export class BaseBuffer { } } finally { await this.releaseLock(lockId); - this.logger.info('Flush completed', { + this.logger.debug('Flush completed', { elapsed: performance.now() - now, lockId, }); diff --git a/packages/db/src/buffers/bot-buffer.ts b/packages/db/src/buffers/bot-buffer.ts index b98f68b7d..766b234e9 100644 --- a/packages/db/src/buffers/bot-buffer.ts +++ b/packages/db/src/buffers/bot-buffer.ts @@ -71,7 +71,7 @@ export class BotBuffer extends BaseBuffer { .decrby(this.bufferCounterKey, events.length) .exec(); - this.logger.info('Processed bot events', { + this.logger.debug('Processed bot events', { count: events.length, }); } catch (error) { diff --git a/packages/db/src/buffers/event-buffer.test.ts b/packages/db/src/buffers/event-buffer.test.ts index 0df5b4aea..95852bd29 100644 --- a/packages/db/src/buffers/event-buffer.test.ts +++ b/packages/db/src/buffers/event-buffer.test.ts @@ -1,13 +1,5 @@ import { getRedisCache } from '@openpanel/redis'; -import { - afterAll, - beforeAll, - beforeEach, - describe, - expect, - it, - vi, -} from 'vitest'; +import { afterAll, beforeEach, describe, expect, it, vi } from 'vitest'; import { ch } from '../clickhouse/client'; // Mock transformEvent to avoid circular dependency with buffers -> services -> buffers @@ -61,709 +53,472 @@ afterAll(async () => { } catch {} }); -describe('EventBuffer with real Redis', () => { +describe('EventBuffer', () => { let eventBuffer: EventBuffer; beforeEach(() => { eventBuffer = new EventBuffer(); }); - it('keeps a single screen_view pending until a subsequent event arrives', async () => { - const screenView = { + it('adds regular event directly to buffer queue', async () => { + const event = { project_id: 'p1', profile_id: 'u1', - session_id: 'session_a', - name: 'screen_view', + name: 'custom_event', created_at: new Date().toISOString(), } as any; - await eventBuffer.add(screenView); + // Get initial count + const initialCount = await eventBuffer.getBufferSize(); - // Not eligible for processing yet (only 1 event in session) - await eventBuffer.processBuffer(); + // Add event + await eventBuffer.add(event); - const sessionKey = `event_buffer:session:${screenView.session_id}`; - const events = await redis.lrange(sessionKey, 0, -1); - expect(events.length).toBe(1); - expect(JSON.parse(events[0]!)).toMatchObject({ - session_id: 'session_a', - name: 'screen_view', - }); + // Buffer counter should increase by 1 + const newCount = await eventBuffer.getBufferSize(); + expect(newCount).toBe(initialCount + 1); }); - it('processes two screen_view events and leaves only the last one pending', async () => { + it('adds multiple screen_views - moves previous to buffer with duration', async () => { const t0 = Date.now(); - const first = { + const sessionId = 'session_1'; + + const view1 = { project_id: 'p1', profile_id: 'u1', - session_id: 'session_b', + session_id: sessionId, name: 'screen_view', created_at: new Date(t0).toISOString(), } as any; - const second = { + + const view2 = { project_id: 'p1', profile_id: 'u1', - session_id: 'session_b', + session_id: sessionId, name: 'screen_view', created_at: new Date(t0 + 1000).toISOString(), } as any; - await eventBuffer.add(first); - await eventBuffer.add(second); + const view3 = { + project_id: 'p1', + profile_id: 'u1', + session_id: sessionId, + name: 'screen_view', + created_at: new Date(t0 + 3000).toISOString(), + } as any; - const insertSpy = vi - .spyOn(ch, 'insert') - .mockResolvedValueOnce(undefined as any); + // Add first screen_view + const count1 = await eventBuffer.getBufferSize(); + await eventBuffer.add(view1); - await eventBuffer.processBuffer(); + // Should be stored as "last" but NOT in queue yet + const count2 = await eventBuffer.getBufferSize(); + expect(count2).toBe(count1); // No change in buffer - // First screen_view should be flushed to ClickHouse, second should remain pending in Redis - expect(insertSpy).toHaveBeenCalledWith({ - format: 'JSONEachRow', - table: 'events', - values: [ - { - ...first, - duration: 1000, - }, - ], + // Last screen_view should be retrievable + const last1 = await eventBuffer.getLastScreenView({ + projectId: 'p1', + sessionId: sessionId, }); + expect(last1).not.toBeNull(); + expect(last1!.createdAt.toISOString()).toBe(view1.created_at); - const sessionKey = `event_buffer:session:${first.session_id}`; - const storedEvents = await redis.lrange(sessionKey, 0, -1); - expect(storedEvents.length).toBe(1); - const remaining = JSON.parse(storedEvents[0]!); - expect(remaining).toMatchObject({ - session_id: 'session_b', - name: 'screen_view', - created_at: second.created_at, - }); - }); + // Add second screen_view + await eventBuffer.add(view2); - it('clears session when a session_end event arrives', async () => { - const t0 = Date.now(); - const first = { - project_id: 'p1', - profile_id: 'u1', - session_id: 'session_c', - name: 'screen_view', - created_at: new Date(t0).toISOString(), - } as any; - const end = { - project_id: 'p1', - profile_id: 'u1', - session_id: 'session_c', - name: 'session_end', - created_at: new Date(t0 + 1000).toISOString(), - } as any; + // Now view1 should be in buffer + const count3 = await eventBuffer.getBufferSize(); + expect(count3).toBe(count1 + 1); - await eventBuffer.add(first); - await eventBuffer.add(end); + // view2 should now be the "last" + const last2 = await eventBuffer.getLastScreenView({ + projectId: 'p1', + sessionId: sessionId, + }); + expect(last2!.createdAt.toISOString()).toBe(view2.created_at); - const insertSpy = vi - .spyOn(ch, 'insert') - .mockResolvedValue(undefined as any); + // Add third screen_view + await eventBuffer.add(view3); - await eventBuffer.processBuffer(); + // Now view2 should also be in buffer + const count4 = await eventBuffer.getBufferSize(); + expect(count4).toBe(count1 + 2); - // Both events should be flushed, leaving no pending session events - expect(insertSpy).toHaveBeenCalledWith({ - format: 'JSONEachRow', - table: 'events', - values: [first, end], + // view3 should now be the "last" + const last3 = await eventBuffer.getLastScreenView({ + projectId: 'p1', + sessionId: sessionId, }); - const sessionKey = `event_buffer:session:${first.session_id}`; - const storedEvents = await redis.lrange(sessionKey, 0, -1); - expect(storedEvents.length).toBe(0); + expect(last3!.createdAt.toISOString()).toBe(view3.created_at); }); - it('queues and processes non-session events in regular queue', async () => { - const event = { + it('adds session_end - moves last screen_view and session_end to buffer', async () => { + const t0 = Date.now(); + const sessionId = 'session_2'; + + const view = { project_id: 'p2', - name: 'custom_event', - created_at: new Date().toISOString(), + profile_id: 'u2', + session_id: sessionId, + name: 'screen_view', + created_at: new Date(t0).toISOString(), } as any; - await eventBuffer.add(event); + const sessionEnd = { + project_id: 'p2', + profile_id: 'u2', + session_id: sessionId, + name: 'session_end', + created_at: new Date(t0 + 5000).toISOString(), + } as any; - // Should be in regular queue - const regularQueueKey = 'event_buffer:regular_queue'; - expect(await redis.llen(regularQueueKey)).toBe(1); + // Add screen_view + const count1 = await eventBuffer.getBufferSize(); + await eventBuffer.add(view); - // Buffer counter should reflect outstanding = 1 - expect(await eventBuffer.getBufferSize()).toBe(1); + // Should be stored as "last", not in buffer yet + const count2 = await eventBuffer.getBufferSize(); + expect(count2).toBe(count1); - const insertSpy = vi - .spyOn(ch, 'insert') - .mockResolvedValueOnce(undefined as any); - await eventBuffer.processBuffer(); + // Add session_end + await eventBuffer.add(sessionEnd); - // Regular queue should be trimmed - expect(await redis.llen(regularQueueKey)).toBe(0); - expect(insertSpy).toHaveBeenCalled(); + // Both should now be in buffer (+2) + const count3 = await eventBuffer.getBufferSize(); + expect(count3).toBe(count1 + 2); - // Buffer counter back to 0 - expect(await eventBuffer.getBufferSize()).toBe(0); + // Last screen_view should be cleared + const last = await eventBuffer.getLastScreenView({ + projectId: 'p2', + sessionId: sessionId, + }); + expect(last).toBeNull(); }); - it('adds session to ready set at 2 events and removes it when < 2 events remain', async () => { - const s = 'session_ready'; - const e1 = { + it('session_end with no previous screen_view - only adds session_end to buffer', async () => { + const sessionId = 'session_3'; + + const sessionEnd = { project_id: 'p3', profile_id: 'u3', - session_id: s, - name: 'screen_view', + session_id: sessionId, + name: 'session_end', created_at: new Date().toISOString(), } as any; - const e2 = { - ...e1, - created_at: new Date(Date.now() + 1000).toISOString(), - } as any; - - await eventBuffer.add(e1); - - // One event -> not ready - expect(await redis.zscore('event_buffer:ready_sessions', s)).toBeNull(); - await eventBuffer.add(e2); + const count1 = await eventBuffer.getBufferSize(); + await eventBuffer.add(sessionEnd); - // Two events -> ready - expect(await redis.zscore('event_buffer:ready_sessions', s)).not.toBeNull(); - - const insertSpy = vi - .spyOn(ch, 'insert') - .mockResolvedValueOnce(undefined as any); - await eventBuffer.processBuffer(); - - // After processing with one pending left, session should be REMOVED from ready set - // It will be re-added when the next event arrives - expect(await redis.zscore('event_buffer:ready_sessions', s)).toBeNull(); - expect(insertSpy).toHaveBeenCalled(); - - // But the session and its data should still exist - const sessionKey = `event_buffer:session:${s}`; - const remaining = await redis.lrange(sessionKey, 0, -1); - expect(remaining.length).toBe(1); // One pending event - expect( - await redis.zscore('event_buffer:sessions_sorted', s), - ).not.toBeNull(); // Still in sorted set + // Only session_end should be in buffer (+1) + const count2 = await eventBuffer.getBufferSize(); + expect(count2).toBe(count1 + 1); }); - it('sets last screen_view key and clears it on session_end', async () => { - const projectId = 'p4'; - const profileId = 'u4'; - const sessionId = 'session_last'; - const lastKey = `session:last_screen_view:${projectId}:${profileId}`; - + it('gets last screen_view by profileId', async () => { const view = { - project_id: projectId, - profile_id: profileId, - session_id: sessionId, + project_id: 'p4', + profile_id: 'u4', + session_id: 'session_4', name: 'screen_view', + path: '/home', created_at: new Date().toISOString(), } as any; await eventBuffer.add(view); - // Should be set in Redis - expect(await redis.get(lastKey)).not.toBeNull(); - - const end = { - project_id: projectId, - profile_id: profileId, - session_id: sessionId, - name: 'session_end', - created_at: new Date(Date.now() + 1000).toISOString(), - } as any; - - await eventBuffer.add(end); - - const insertSpy = vi - .spyOn(ch, 'insert') - .mockResolvedValueOnce(undefined as any); - await eventBuffer.processBuffer(); + // Query by profileId + const result = await eventBuffer.getLastScreenView({ + projectId: 'p4', + profileId: 'u4', + }); - // Key should be deleted by session_end - expect(await redis.get(lastKey)).toBeNull(); - expect(insertSpy).toHaveBeenCalled(); + expect(result).not.toBeNull(); + expect(result!.name).toBe('screen_view'); + expect(result!.path).toBe('/home'); }); - it('getLastScreenView works for profile and session queries', async () => { - const projectId = 'p5'; - const profileId = 'u5'; - const sessionId = 'session_glsv'; - + it('gets last screen_view by sessionId', async () => { + const sessionId = 'session_5'; const view = { - project_id: projectId, - profile_id: profileId, + project_id: 'p5', + profile_id: 'u5', session_id: sessionId, name: 'screen_view', + path: '/about', created_at: new Date().toISOString(), } as any; await eventBuffer.add(view); - const byProfile = await eventBuffer.getLastScreenView({ - projectId, - profileId, + // Query by sessionId + const result = await eventBuffer.getLastScreenView({ + projectId: 'p5', + sessionId: sessionId, }); - if (!byProfile) { - throw new Error('byProfile is null'); - } - - expect(byProfile.name).toBe('screen_view'); + expect(result).not.toBeNull(); + expect(result!.name).toBe('screen_view'); + expect(result!.path).toBe('/about'); + }); - const bySession = await eventBuffer.getLastScreenView({ - projectId, - sessionId, + it('returns null for non-existent last screen_view', async () => { + const result = await eventBuffer.getLastScreenView({ + projectId: 'p_nonexistent', + profileId: 'u_nonexistent', }); - if (!bySession) { - throw new Error('bySession is null'); - } - - expect(bySession.name).toBe('screen_view'); + expect(result).toBeNull(); }); - it('buffer counter reflects pending after processing 2 screen_view events', async () => { - const sessionId = 'session_counter'; - const a = { + it('gets buffer count correctly', async () => { + // Initially 0 + expect(await eventBuffer.getBufferSize()).toBe(0); + + // Add regular event + await eventBuffer.add({ + project_id: 'p6', + name: 'event1', + created_at: new Date().toISOString(), + } as any); + + expect(await eventBuffer.getBufferSize()).toBe(1); + + // Add another regular event + await eventBuffer.add({ + project_id: 'p6', + name: 'event2', + created_at: new Date().toISOString(), + } as any); + + expect(await eventBuffer.getBufferSize()).toBe(2); + + // Add screen_view (not counted until flushed) + await eventBuffer.add({ project_id: 'p6', profile_id: 'u6', - session_id: sessionId, + session_id: 'session_6', name: 'screen_view', created_at: new Date().toISOString(), - } as any; - const b = { - ...a, - created_at: new Date(Date.now() + 1000).toISOString(), - } as any; - - await eventBuffer.add(a); - await eventBuffer.add(b); + } as any); - // Counter counts enqueued items - expect(await eventBuffer.getBufferSize()).toBeGreaterThanOrEqual(2); + // Still 2 (screen_view is pending) + expect(await eventBuffer.getBufferSize()).toBe(2); - const insertSpy = vi - .spyOn(ch, 'insert') - .mockResolvedValueOnce(undefined as any); - await eventBuffer.processBuffer(); + // Add another screen_view (first one gets flushed) + await eventBuffer.add({ + project_id: 'p6', + profile_id: 'u6', + session_id: 'session_6', + name: 'screen_view', + created_at: new Date(Date.now() + 1000).toISOString(), + } as any); - // One pending screen_view left -> counter should be 1 - expect(await eventBuffer.getBufferSize()).toBe(1); - expect(insertSpy).toHaveBeenCalled(); + // Now 3 (2 regular + 1 flushed screen_view) + expect(await eventBuffer.getBufferSize()).toBe(3); }); - it('inserts in chunks according to EVENT_BUFFER_CHUNK_SIZE', async () => { - const prev = process.env.EVENT_BUFFER_CHUNK_SIZE; - process.env.EVENT_BUFFER_CHUNK_SIZE = '1'; - const eb = new EventBuffer(); - - const e1 = { - project_id: 'pc', - name: 'ev1', - created_at: new Date().toISOString(), + it('processes buffer and inserts events into ClickHouse', async () => { + const event1 = { + project_id: 'p7', + name: 'event1', + created_at: new Date(Date.now()).toISOString(), } as any; - const e2 = { - project_id: 'pc', - name: 'ev2', - created_at: new Date(Date.now() + 1).toISOString(), + + const event2 = { + project_id: 'p7', + name: 'event2', + created_at: new Date(Date.now() + 1000).toISOString(), } as any; - await eb.add(e1); - await eb.add(e2); + await eventBuffer.add(event1); + await eventBuffer.add(event2); + + expect(await eventBuffer.getBufferSize()).toBe(2); const insertSpy = vi .spyOn(ch, 'insert') - .mockResolvedValue(undefined as any); - - await eb.processBuffer(); - - // With chunk size 1 and two events, insert should be called twice - expect(insertSpy.mock.calls.length).toBeGreaterThanOrEqual(2); + .mockResolvedValueOnce(undefined as any); - // Restore env - if (prev === undefined) delete process.env.EVENT_BUFFER_CHUNK_SIZE; - else process.env.EVENT_BUFFER_CHUNK_SIZE = prev; - }); + await eventBuffer.processBuffer(); - it('counts active visitors after adding an event with profile', async () => { - const e = { - project_id: 'p7', - profile_id: 'u7', - name: 'custom', - created_at: new Date().toISOString(), - } as any; + // Should insert both events + expect(insertSpy).toHaveBeenCalled(); + const callArgs = insertSpy.mock.calls[0]![0]; + expect(callArgs.format).toBe('JSONEachRow'); + expect(callArgs.table).toBe('events'); + expect(Array.isArray(callArgs.values)).toBe(true); - await eventBuffer.add(e); + // Buffer should be empty after processing + expect(await eventBuffer.getBufferSize()).toBe(0); - const count = await eventBuffer.getActiveVisitorCount('p7'); - expect(count).toBeGreaterThanOrEqual(1); + insertSpy.mockRestore(); }); - it('batches pending session updates (respects cap) during processBuffer', async () => { - const prev = process.env.EVENT_BUFFER_UPDATE_PENDING_SESSIONS_BATCH_SIZE; - process.env.EVENT_BUFFER_UPDATE_PENDING_SESSIONS_BATCH_SIZE = '3'; + it('processes buffer with chunking', async () => { + const prev = process.env.EVENT_BUFFER_CHUNK_SIZE; + process.env.EVENT_BUFFER_CHUNK_SIZE = '2'; const eb = new EventBuffer(); - // Create many sessions each with 2 screen_view events → leaves 1 pending per session - const numSessions = 10; - const base = Date.now(); - - for (let i = 0; i < numSessions; i++) { - const sid = `batch_s_${i}`; - const e1 = { + // Add 4 events + for (let i = 0; i < 4; i++) { + await eb.add({ project_id: 'p8', - profile_id: `u${i}`, - session_id: sid, - name: 'screen_view', - created_at: new Date(base + i * 10).toISOString(), - } as any; - const e2 = { - ...e1, - created_at: new Date(base + i * 10 + 1).toISOString(), - } as any; - await eb.add(e1); - await eb.add(e2); + name: `event${i}`, + created_at: new Date(Date.now() + i).toISOString(), + } as any); } const insertSpy = vi .spyOn(ch, 'insert') .mockResolvedValue(undefined as any); - const evalSpy = vi.spyOn(redis as any, 'eval'); await eb.processBuffer(); - // Only consider eval calls for batchUpdateSessionsScript (3 keys now: ready, sorted, counter) - const batchEvalCalls = evalSpy.mock.calls.filter( - (call) => call[1] === 3 && call[4] === 'event_buffer:total_count', - ); - - const expectedCalls = Math.ceil(numSessions / 3); - expect(batchEvalCalls.length).toBeGreaterThanOrEqual(expectedCalls); - - function countSessionsInEvalCall(args: any[]): number { - let idx = 5; // ARGV starts after: script, numKeys, key1, key2, key3 - let count = 0; - while (idx < args.length) { - if (idx + 3 >= args.length) break; - const pendingCount = Number.parseInt(String(args[idx + 3]), 10); - idx += 4 + Math.max(0, pendingCount); - count += 1; - } - return count; - } - - for (const call of batchEvalCalls) { - expect(call[1]).toBe(3); - expect(call[2]).toBe('event_buffer:ready_sessions'); - expect(call[3]).toBe('event_buffer:sessions_sorted'); - expect(call[4]).toBe('event_buffer:total_count'); - - const sessionsInThisCall = countSessionsInEvalCall(call.slice(0)); - expect(sessionsInThisCall).toBeLessThanOrEqual(3); - expect(sessionsInThisCall).toBeGreaterThan(0); - } + // With chunk size 2 and 4 events, should be called twice + expect(insertSpy).toHaveBeenCalledTimes(2); + const call1Values = insertSpy.mock.calls[0]![0].values as any[]; + const call2Values = insertSpy.mock.calls[1]![0].values as any[]; + expect(call1Values.length).toBe(2); + expect(call2Values.length).toBe(2); - expect(insertSpy).toHaveBeenCalled(); - - // Restore env - if (prev === undefined) - delete process.env.EVENT_BUFFER_UPDATE_PENDING_SESSIONS_BATCH_SIZE; - else process.env.EVENT_BUFFER_UPDATE_PENDING_SESSIONS_BATCH_SIZE = prev; + // Restore + if (prev === undefined) delete process.env.EVENT_BUFFER_CHUNK_SIZE; + else process.env.EVENT_BUFFER_CHUNK_SIZE = prev; - evalSpy.mockRestore(); insertSpy.mockRestore(); }); - it('flushes a lone session_end and clears the session list', async () => { - const s = 'session_only_end'; - const end = { + it('tracks active visitors', async () => { + const event = { project_id: 'p9', profile_id: 'u9', - session_id: s, - name: 'session_end', + name: 'custom', created_at: new Date().toISOString(), } as any; - const eb = new EventBuffer(); - await eb.add(end); - - // Should be considered ready even though only 1 event (session_end) - const insertSpy = vi - .spyOn(ch, 'insert') - .mockResolvedValueOnce(undefined as any); - - await eb.processBuffer(); - - expect(insertSpy).toHaveBeenCalledWith({ - format: 'JSONEachRow', - table: 'events', - values: [end], - }); - - const sessionKey = `event_buffer:session:${s}`; - const remaining = await redis.lrange(sessionKey, 0, -1); - expect(remaining.length).toBe(0); + await eventBuffer.add(event); - insertSpy.mockRestore(); + const count = await eventBuffer.getActiveVisitorCount('p9'); + expect(count).toBeGreaterThanOrEqual(1); }); - it('flushes ALL screen_views when session_end arrives (no pending events)', async () => { + it('handles multiple sessions independently', async () => { const t0 = Date.now(); - const s = 'session_multi_end'; - const view1 = { + + // Session 1 + const view1a = { project_id: 'p10', profile_id: 'u10', - session_id: s, + session_id: 'session_10a', name: 'screen_view', created_at: new Date(t0).toISOString(), } as any; - const view2 = { - ...view1, + + const view1b = { + project_id: 'p10', + profile_id: 'u10', + session_id: 'session_10a', + name: 'screen_view', created_at: new Date(t0 + 1000).toISOString(), } as any; - const view3 = { - ...view1, - created_at: new Date(t0 + 2000).toISOString(), - } as any; - const end = { - ...view1, - name: 'session_end', - created_at: new Date(t0 + 3000).toISOString(), - } as any; - - const eb = new EventBuffer(); - await eb.add(view1); - await eb.add(view2); - await eb.add(view3); - await eb.add(end); - - const insertSpy = vi - .spyOn(ch, 'insert') - .mockResolvedValueOnce(undefined as any); - - await eb.processBuffer(); - // All 4 events should be flushed (3 screen_views + session_end) - expect(insertSpy).toHaveBeenCalledWith({ - format: 'JSONEachRow', - table: 'events', - values: [view1, view2, view3, end], - }); - - // Session should be completely empty and removed - const sessionKey = `event_buffer:session:${s}`; - const remaining = await redis.lrange(sessionKey, 0, -1); - expect(remaining.length).toBe(0); - - // Session should be removed from both sorted sets - expect(await redis.zscore('event_buffer:sessions_sorted', s)).toBeNull(); - expect(await redis.zscore('event_buffer:ready_sessions', s)).toBeNull(); - - insertSpy.mockRestore(); - }); - - it('re-adds session to ready_sessions when new event arrives after processing', async () => { - const t0 = Date.now(); - const s = 'session_continued'; - const view1 = { - project_id: 'p11', + // Session 2 + const view2a = { + project_id: 'p10', profile_id: 'u11', - session_id: s, + session_id: 'session_10b', name: 'screen_view', created_at: new Date(t0).toISOString(), } as any; - const view2 = { - ...view1, - created_at: new Date(t0 + 1000).toISOString(), - } as any; - - const eb = new EventBuffer(); - await eb.add(view1); - await eb.add(view2); - - const insertSpy = vi - .spyOn(ch, 'insert') - .mockResolvedValue(undefined as any); - - // First processing: flush view1, keep view2 pending - await eb.processBuffer(); - - expect(insertSpy).toHaveBeenCalledWith({ - format: 'JSONEachRow', - table: 'events', - values: [{ ...view1, duration: 1000 }], - }); - - // Session should be REMOVED from ready_sessions (only 1 event left) - expect(await redis.zscore('event_buffer:ready_sessions', s)).toBeNull(); - // Add a third screen_view - this should re-add to ready_sessions - const view3 = { - ...view1, + const view2b = { + project_id: 'p10', + profile_id: 'u11', + session_id: 'session_10b', + name: 'screen_view', created_at: new Date(t0 + 2000).toISOString(), } as any; - await eb.add(view3); - // NOW it should be back in ready_sessions (2 events again) - expect(await redis.zscore('event_buffer:ready_sessions', s)).not.toBeNull(); + await eventBuffer.add(view1a); + await eventBuffer.add(view2a); + await eventBuffer.add(view1b); // Flushes view1a + await eventBuffer.add(view2b); // Flushes view2a - insertSpy.mockClear(); + // Should have 2 events in buffer (one from each session) + expect(await eventBuffer.getBufferSize()).toBe(2); - // Second processing: should process view2 (now has duration), keep view3 pending - await eb.processBuffer(); + // Each session should have its own "last" screen_view + const last1 = await eventBuffer.getLastScreenView({ + projectId: 'p10', + sessionId: 'session_10a', + }); + expect(last1!.createdAt.toISOString()).toBe(view1b.created_at); - expect(insertSpy).toHaveBeenCalledWith({ - format: 'JSONEachRow', - table: 'events', - values: [{ ...view2, duration: 1000 }], + const last2 = await eventBuffer.getLastScreenView({ + projectId: 'p10', + sessionId: 'session_10b', }); + expect(last2!.createdAt.toISOString()).toBe(view2b.created_at); + }); - // Session should be REMOVED again (only 1 event left) - expect(await redis.zscore('event_buffer:ready_sessions', s)).toBeNull(); + it('screen_view without session_id goes directly to buffer', async () => { + const view = { + project_id: 'p11', + profile_id: 'u11', + name: 'screen_view', + created_at: new Date().toISOString(), + } as any; - const sessionKey = `event_buffer:session:${s}`; - const remaining = await redis.lrange(sessionKey, 0, -1); - expect(remaining.length).toBe(1); - expect(JSON.parse(remaining[0]!)).toMatchObject({ - session_id: s, - created_at: view3.created_at, - }); + const count1 = await eventBuffer.getBufferSize(); + await eventBuffer.add(view); - insertSpy.mockRestore(); + // Should go directly to buffer (no session_id) + const count2 = await eventBuffer.getBufferSize(); + expect(count2).toBe(count1 + 1); }); - it('removes session from ready_sessions only when completely empty', async () => { + it('updates last screen_view when new one arrives from same profile but different session', async () => { const t0 = Date.now(); - const s = 'session_complete'; - const view = { + + const view1 = { project_id: 'p12', profile_id: 'u12', - session_id: s, + session_id: 'session_12a', name: 'screen_view', + path: '/page1', created_at: new Date(t0).toISOString(), } as any; - const end = { - ...view, - name: 'session_end', + + const view2 = { + project_id: 'p12', + profile_id: 'u12', + session_id: 'session_12b', // Different session! + name: 'screen_view', + path: '/page2', created_at: new Date(t0 + 1000).toISOString(), } as any; - const eb = new EventBuffer(); - await eb.add(view); - await eb.add(end); - - const insertSpy = vi - .spyOn(ch, 'insert') - .mockResolvedValueOnce(undefined as any); - - await eb.processBuffer(); + await eventBuffer.add(view1); + await eventBuffer.add(view2); - // Both events flushed, session empty - expect(insertSpy).toHaveBeenCalledWith({ - format: 'JSONEachRow', - table: 'events', - values: [view, end], + // Both sessions should have their own "last" + const lastSession1 = await eventBuffer.getLastScreenView({ + projectId: 'p12', + sessionId: 'session_12a', }); + expect(lastSession1!.path).toBe('/page1'); - // NOW it should be removed from ready_sessions (because it's empty) - expect(await redis.zscore('event_buffer:ready_sessions', s)).toBeNull(); - expect(await redis.zscore('event_buffer:sessions_sorted', s)).toBeNull(); - - insertSpy.mockRestore(); - }); - - it('getBufferSizeHeavy correctly counts events across many sessions in batches', async () => { - const eb = new EventBuffer(); - const numSessions = 250; // More than batch size (100) to test batching - const eventsPerSession = 3; - const numRegularEvents = 50; - - // Add session events (3 events per session) - for (let i = 0; i < numSessions; i++) { - const sessionId = `batch_session_${i}`; - for (let j = 0; j < eventsPerSession; j++) { - await eb.add({ - project_id: 'p_batch', - profile_id: `u_${i}`, - session_id: sessionId, - name: 'screen_view', - created_at: new Date(Date.now() + i * 100 + j * 10).toISOString(), - } as any); - } - } - - // Add regular queue events - for (let i = 0; i < numRegularEvents; i++) { - await eb.add({ - project_id: 'p_batch', - name: 'custom_event', - created_at: new Date().toISOString(), - } as any); - } - - // Get buffer size using heavy method - const bufferSize = await eb.getBufferSizeHeavy(); - - // Should count all events: (250 sessions × 3 events) + 50 regular events - const expectedSize = numSessions * eventsPerSession + numRegularEvents; - expect(bufferSize).toBe(expectedSize); - - // Verify sessions are properly tracked - const sessionCount = await redis.zcard('event_buffer:sessions_sorted'); - expect(sessionCount).toBe(numSessions); - - const regularQueueCount = await redis.llen('event_buffer:regular_queue'); - expect(regularQueueCount).toBe(numRegularEvents); - }); - - it('getBufferSizeHeavy handles empty buffer correctly', async () => { - const eb = new EventBuffer(); - - const bufferSize = await eb.getBufferSizeHeavy(); - - expect(bufferSize).toBe(0); - }); - - it('getBufferSizeHeavy handles only regular queue events', async () => { - const eb = new EventBuffer(); - const numEvents = 10; - - for (let i = 0; i < numEvents; i++) { - await eb.add({ - project_id: 'p_regular', - name: 'custom_event', - created_at: new Date().toISOString(), - } as any); - } - - const bufferSize = await eb.getBufferSizeHeavy(); - - expect(bufferSize).toBe(numEvents); - }); - - it('getBufferSizeHeavy handles only session events', async () => { - const eb = new EventBuffer(); - const numSessions = 5; - const eventsPerSession = 2; - - for (let i = 0; i < numSessions; i++) { - for (let j = 0; j < eventsPerSession; j++) { - await eb.add({ - project_id: 'p_sessions', - profile_id: `u_${i}`, - session_id: `session_${i}`, - name: 'screen_view', - created_at: new Date(Date.now() + i * 100 + j * 10).toISOString(), - } as any); - } - } - - const bufferSize = await eb.getBufferSizeHeavy(); + const lastSession2 = await eventBuffer.getLastScreenView({ + projectId: 'p12', + sessionId: 'session_12b', + }); + expect(lastSession2!.path).toBe('/page2'); - expect(bufferSize).toBe(numSessions * eventsPerSession); + // Profile should have the latest one + const lastProfile = await eventBuffer.getLastScreenView({ + projectId: 'p12', + profileId: 'u12', + }); + expect(lastProfile!.path).toBe('/page2'); }); }); diff --git a/packages/db/src/buffers/event-buffer.ts b/packages/db/src/buffers/event-buffer.ts index e6793ca78..d305372aa 100644 --- a/packages/db/src/buffers/event-buffer.ts +++ b/packages/db/src/buffers/event-buffer.ts @@ -4,7 +4,6 @@ import { getRedisCache, getRedisPub, publishEvent, - runEvery, } from '@openpanel/redis'; import { ch } from '../clickhouse/client'; import { @@ -15,257 +14,144 @@ import { import { BaseBuffer } from './base-buffer'; /** + * Simplified Event Buffer * - * Usuful redis commands: - * --------------------- - * - * Add empty session - * ZADD event_buffer:sessions_sorted 1710831600000 "test_empty_session" - * - * Get session events - * LRANGE event_buffer:session:test_empty_session 0 -1 - * - * Get session events count - * LLEN event_buffer:session:test_empty_session - * - * Get regular queue events - * LRANGE event_buffer:regular_queue 0 -1 - * - * Get regular queue count - * LLEN event_buffer:regular_queue - * + * Rules: + * 1. All events go into a single list buffer (event_buffer:queue) + * 2. screen_view events are handled specially: + * - Store current screen_view as "last" for the session + * - When a new screen_view arrives, flush the previous one with calculated duration + * 3. session_end events: + * - Retrieve the last screen_view (don't modify it) + * - Push both screen_view and session_end to buffer + * 4. Flush: Simply process all events from the list buffer */ export class EventBuffer extends BaseBuffer { // Configurable limits - // How many days to keep buffered session metadata before cleanup - private daysToKeep = process.env.EVENT_BUFFER_DAYS_TO_KEEP - ? Number.parseFloat(process.env.EVENT_BUFFER_DAYS_TO_KEEP) - : 3; - // How many events we attempt to FETCH per flush cycle (split across sessions/non-sessions) - // Prefer new env EVENT_BUFFER_BATCH_SIZE; fallback to legacy EVENT_BUFFER_BATCH_SIZE private batchSize = process.env.EVENT_BUFFER_BATCH_SIZE ? Number.parseInt(process.env.EVENT_BUFFER_BATCH_SIZE, 10) : 4000; - // How many events per insert chunk we send to ClickHouse (insert batch size) private chunkSize = process.env.EVENT_BUFFER_CHUNK_SIZE ? Number.parseInt(process.env.EVENT_BUFFER_CHUNK_SIZE, 10) : 1000; - private updatePendingSessionsBatchSize = process.env - .EVENT_BUFFER_UPDATE_PENDING_SESSIONS_BATCH_SIZE - ? Number.parseInt( - process.env.EVENT_BUFFER_UPDATE_PENDING_SESSIONS_BATCH_SIZE, - 10, - ) - : 300; - - // Cap of how many ready sessions to scan per flush cycle (configurable via env) - private maxSessionsPerFlush = process.env.EVENT_BUFFER_MAX_SESSIONS_PER_FLUSH - ? Number.parseInt(process.env.EVENT_BUFFER_MAX_SESSIONS_PER_FLUSH, 10) - : 500; - - // Soft time budget per flush (ms) to avoid long lock holds - private flushTimeBudgetMs = process.env.EVENT_BUFFER_FLUSH_TIME_BUDGET_MS - ? Number.parseInt(process.env.EVENT_BUFFER_FLUSH_TIME_BUDGET_MS, 10) - : 1000; - - private minEventsInSession = 2; private activeVisitorsExpiration = 60 * 5; // 5 minutes - private sessionEvents = ['screen_view', 'session_end']; - - // LIST - Stores events without sessions - private regularQueueKey = 'event_buffer:regular_queue'; - - // SORTED SET - Tracks all active session IDs with their timestamps - private sessionSortedKey = 'event_buffer:sessions_sorted'; // sorted set of session IDs - - // SORTED SET - Tracks sessions that are ready for processing (have >= minEvents) - private readySessionsKey = 'event_buffer:ready_sessions'; + // LIST - Stores all events ready to be flushed + private queueKey = 'event_buffer:queue'; // STRING - Tracks total buffer size incrementally protected bufferCounterKey = 'event_buffer:total_count'; - private readonly sessionKeyPrefix = 'event_buffer:session:'; - // LIST - Stores events for a given session - private getSessionKey(sessionId: string) { - return `${this.sessionKeyPrefix}${sessionId}`; - } - /** - * Optimized Lua script that processes ready sessions efficiently. - * Only fetches from sessions known to have >= minEvents. - * Limits the number of events fetched per session to avoid huge payloads. - */ - private readonly processReadySessionsScript = ` -local readySessionsKey = KEYS[1] -local sessionPrefix = KEYS[2] -local sessionsSortedKey = KEYS[3] -local maxSessions = tonumber(ARGV[1]) -local maxEventsPerSession = tonumber(ARGV[2]) -local startOffset = tonumber(ARGV[3]) or 0 - -local result = {} -local sessionsToRemove = {} - --- Get up to maxSessions ready sessions from window [startOffset, startOffset+maxSessions-1] -local stopIndex = startOffset + maxSessions - 1 -local sessionIds = redis.call('ZRANGE', readySessionsKey, startOffset, stopIndex) -local resultIndex = 1 - -for i, sessionId in ipairs(sessionIds) do - local sessionKey = sessionPrefix .. sessionId - local eventCount = redis.call('LLEN', sessionKey) - - if eventCount == 0 then - -- Session is empty, remove from both sets - table.insert(sessionsToRemove, sessionId) - else - -- Fetch limited number of events to avoid huge payloads - local eventsToFetch = math.min(eventCount, maxEventsPerSession) - local events = redis.call('LRANGE', sessionKey, 0, eventsToFetch - 1) - - result[resultIndex] = { - sessionId = sessionId, - events = events, - totalEventCount = eventCount - } - resultIndex = resultIndex + 1 - end -end + // Script SHAs for loaded Lua scripts + private scriptShas: { + addScreenView?: string; + addSessionEnd?: string; + } = {}; --- Clean up empty sessions from both ready set and sorted set -if #sessionsToRemove > 0 then - redis.call('ZREM', readySessionsKey, unpack(sessionsToRemove)) - redis.call('ZREM', sessionsSortedKey, unpack(sessionsToRemove)) - -- Also delete the empty session keys - for i, sessionId in ipairs(sessionsToRemove) do - redis.call('DEL', sessionPrefix .. sessionId) - end -end + // Hash key for storing last screen_view per session + private getLastScreenViewKeyBySession(sessionId: string) { + return `event_buffer:last_screen_view:session:${sessionId}`; + } -return cjson.encode(result) -`; + // Hash key for storing last screen_view per profile + private getLastScreenViewKeyByProfile(projectId: string, profileId: string) { + return `event_buffer:last_screen_view:profile:${projectId}:${profileId}`; + } /** - * Optimized atomic Lua script to update a session's list with pending events. - * Also manages the ready_sessions set and buffer counter. + * Lua script for handling screen_view addition - RACE-CONDITION SAFE without GroupMQ * - * KEYS[1] = session key - * KEYS[2] = ready sessions key - * KEYS[3] = sessions sorted key + * Strategy: Use Redis GETDEL (atomic get-and-delete) to ensure only ONE thread + * can process the "last" screen_view at a time. + * + * KEYS[1] = last screen_view key (by session) - stores both event and timestamp as JSON + * KEYS[2] = last screen_view key (by profile, may be empty) + * KEYS[3] = queue key * KEYS[4] = buffer counter key - * ARGV[1] = sessionId - * ARGV[2] = snapshotCount (number of events that were present in our snapshot) - * ARGV[3] = pendingCount (number of pending events) - * ARGV[4] = minEventsInSession - * ARGV[5..(4+pendingCount)] = the pending event strings + * ARGV[1] = new event with timestamp as JSON: {"event": {...}, "ts": 123456} + * ARGV[2] = TTL for last screen_view (1 hour) */ - private readonly updateSessionScript = ` + private readonly addScreenViewScript = ` local sessionKey = KEYS[1] -local readySessionsKey = KEYS[2] -local sessionsSortedKey = KEYS[3] -local bufferCounterKey = KEYS[4] -local sessionId = ARGV[1] -local snapshotCount = tonumber(ARGV[2]) -local pendingCount = tonumber(ARGV[3]) -local minEventsInSession = tonumber(ARGV[4]) - --- Trim the list to remove the processed (snapshot) events. -redis.call("LTRIM", sessionKey, snapshotCount, -1) - --- Re-insert the pending events at the head in their original order. -for i = pendingCount, 1, -1 do - redis.call("LPUSH", sessionKey, ARGV[i+4]) -end - -local newLength = redis.call("LLEN", sessionKey) - --- Update ready sessions set based on new length -if newLength == 0 then - -- Session is now empty, remove from both sets and delete key - redis.call("ZREM", readySessionsKey, sessionId) - redis.call("ZREM", sessionsSortedKey, sessionId) - redis.call("DEL", sessionKey) -elseif newLength >= minEventsInSession then - -- Session has enough events, keep/add it in ready_sessions - redis.call("ZADD", readySessionsKey, redis.call("TIME")[1], sessionId) -else - -- Session has events but < minEvents, remove from ready_sessions - -- It will be re-added when a new event arrives (via addEventScript) - redis.call("ZREM", readySessionsKey, sessionId) +local profileKey = KEYS[2] +local queueKey = KEYS[3] +local counterKey = KEYS[4] +local newEventData = ARGV[1] +local ttl = tonumber(ARGV[2]) + +-- GETDEL is atomic: get previous and delete in one operation +-- This ensures only ONE thread gets the previous event +local previousEventData = redis.call("GETDEL", sessionKey) + +-- Store new screen_view as last for session +redis.call("SET", sessionKey, newEventData, "EX", ttl) + +-- Store new screen_view as last for profile (if key provided) +if profileKey and profileKey ~= "" then + redis.call("SET", profileKey, newEventData, "EX", ttl) end --- Update buffer counter (decrement by processed events, increment by pending) -local counterChange = pendingCount - snapshotCount -if counterChange ~= 0 then - redis.call("INCRBY", bufferCounterKey, counterChange) +-- If there was a previous screen_view, add it to queue with calculated duration +if previousEventData then + local prev = cjson.decode(previousEventData) + local curr = cjson.decode(newEventData) + + -- Calculate duration (ensure non-negative to handle clock skew) + if prev.ts and curr.ts then + prev.event.duration = math.max(0, curr.ts - prev.ts) + end + + redis.call("RPUSH", queueKey, cjson.encode(prev.event)) + redis.call("INCR", counterKey) + return 1 end -return newLength +return 0 `; /** - * Optimized batch update script with counter and ready sessions management. - * KEYS[1] = ready sessions key - * KEYS[2] = sessions sorted key - * KEYS[3] = buffer counter key - * ARGV format: [minEventsInSession, sessionKey1, sessionId1, snapshotCount1, pendingCount1, pending1...., sessionKey2, ...] + * Lua script for handling session_end - RACE-CONDITION SAFE + * + * Uses GETDEL to atomically retrieve and delete the last screen_view + * + * KEYS[1] = last screen_view key (by session) + * KEYS[2] = last screen_view key (by profile, may be empty) + * KEYS[3] = queue key + * KEYS[4] = buffer counter key + * ARGV[1] = session_end event JSON */ - private readonly batchUpdateSessionsScript = ` -local readySessionsKey = KEYS[1] -local sessionsSortedKey = KEYS[2] -local bufferCounterKey = KEYS[3] -local minEventsInSession = tonumber(ARGV[1]) -local totalCounterChange = 0 - -local i = 2 -while i <= #ARGV do - local sessionKey = ARGV[i] - local sessionId = ARGV[i + 1] - local snapshotCount = tonumber(ARGV[i + 2]) - local pendingCount = tonumber(ARGV[i + 3]) - - -- Trim the list to remove processed events - redis.call("LTRIM", sessionKey, snapshotCount, -1) - - -- Re-insert pending events at the head in original order - if pendingCount > 0 then - -- Reinsert in original order: LPUSH requires reverse iteration - for j = pendingCount, 1, -1 do - redis.call("LPUSH", sessionKey, ARGV[i + 3 + j]) - end - end - - local newLength = redis.call("LLEN", sessionKey) - - -- Update ready sessions set based on new length - if newLength == 0 then - -- Session is now empty, remove from both sets and delete key - redis.call("ZREM", readySessionsKey, sessionId) - redis.call("ZREM", sessionsSortedKey, sessionId) - redis.call("DEL", sessionKey) - elseif newLength >= minEventsInSession then - -- Session has enough events, keep/add it in ready_sessions - redis.call("ZADD", readySessionsKey, redis.call("TIME")[1], sessionId) - else - -- Session has events but < minEvents, remove from ready_sessions - -- It will be re-added when a new event arrives (via addEventScript) - redis.call("ZREM", readySessionsKey, sessionId) - end - - -- Track counter change - totalCounterChange = totalCounterChange + (pendingCount - snapshotCount) - - i = i + 4 + pendingCount + private readonly addSessionEndScript = ` +local sessionKey = KEYS[1] +local profileKey = KEYS[2] +local queueKey = KEYS[3] +local counterKey = KEYS[4] +local sessionEndJson = ARGV[1] + +-- GETDEL is atomic: only ONE thread gets the last screen_view +local previousEventData = redis.call("GETDEL", sessionKey) +local added = 0 + +-- If there was a previous screen_view, add it to queue +if previousEventData then + local prev = cjson.decode(previousEventData) + redis.call("RPUSH", queueKey, cjson.encode(prev.event)) + redis.call("INCR", counterKey) + added = added + 1 end --- Update buffer counter once -if totalCounterChange ~= 0 then - redis.call("INCRBY", bufferCounterKey, totalCounterChange) +-- Add session_end to queue +redis.call("RPUSH", queueKey, sessionEndJson) +redis.call("INCR", counterKey) +added = added + 1 + +-- Delete profile key +if profileKey and profileKey ~= "" then + redis.call("DEL", profileKey) end -return "OK" +return added `; constructor() { @@ -273,9 +159,34 @@ return "OK" name: 'event', onFlush: async () => { await this.processBuffer(); - await this.tryCleanup(); }, }); + // Load Lua scripts into Redis on startup + this.loadScripts(); + } + + /** + * Load Lua scripts into Redis and cache their SHAs. + * This avoids sending the entire script on every call. + */ + private async loadScripts() { + try { + const redis = getRedisCache(); + const [screenViewSha, sessionEndSha] = await Promise.all([ + redis.script('LOAD', this.addScreenViewScript), + redis.script('LOAD', this.addSessionEndScript), + ]); + + this.scriptShas.addScreenView = screenViewSha as string; + this.scriptShas.addSessionEnd = sessionEndSha as string; + + this.logger.info('Loaded Lua scripts into Redis', { + addScreenView: this.scriptShas.addScreenView, + addSessionEnd: this.scriptShas.addSessionEnd, + }); + } catch (error) { + this.logger.error('Failed to load Lua scripts', { error }); + } } bulkAdd(events: IClickhouseEvent[]) { @@ -288,68 +199,12 @@ return "OK" } /** - * Optimized Lua script for adding events with counter management. - * KEYS[1] = session key (if session event) - * KEYS[2] = regular queue key - * KEYS[3] = sessions sorted key - * KEYS[4] = ready sessions key - * KEYS[5] = buffer counter key - * KEYS[6] = last event key (if screen_view) - * ARGV[1] = event JSON - * ARGV[2] = session_id - * ARGV[3] = event_name - * ARGV[4] = score (timestamp) - * ARGV[5] = minEventsInSession - * ARGV[6] = last event TTL (if screen_view) - */ - private readonly addEventScript = ` -local sessionKey = KEYS[1] -local regularQueueKey = KEYS[2] -local sessionsSortedKey = KEYS[3] -local readySessionsKey = KEYS[4] -local bufferCounterKey = KEYS[5] -local lastEventKey = KEYS[6] - -local eventJson = ARGV[1] -local sessionId = ARGV[2] -local eventName = ARGV[3] -local score = tonumber(ARGV[4]) -local minEventsInSession = tonumber(ARGV[5]) -local lastEventTTL = tonumber(ARGV[6] or 0) - -local counterIncrement = 1 - -if sessionId and sessionId ~= "" and (eventName == "screen_view" or eventName == "session_end") then - -- Add to session - redis.call("RPUSH", sessionKey, eventJson) - redis.call("ZADD", sessionsSortedKey, "NX", score, sessionId) - - -- Check if session is now ready for processing - local sessionLength = redis.call("LLEN", sessionKey) - if sessionLength >= minEventsInSession or eventName == "session_end" then - redis.call("ZADD", readySessionsKey, score, sessionId) - end - - -- Handle screen_view specific logic - if eventName == "screen_view" and lastEventKey ~= "" then - redis.call("SET", lastEventKey, eventJson, "EX", lastEventTTL) - elseif eventName == "session_end" and lastEventKey ~= "" then - redis.call("DEL", lastEventKey) - end -else - -- Add to regular queue - redis.call("RPUSH", regularQueueKey, eventJson) -end - --- Increment buffer counter -redis.call("INCR", bufferCounterKey) - -return "OK" -`; - - /** - * Add an event into Redis. - * Uses optimized Lua script to reduce round trips and manage counters. + * Add an event into Redis buffer. + * + * Logic: + * - screen_view: Store as "last" for session, flush previous if exists + * - session_end: Flush last screen_view + session_end + * - Other events: Add directly to queue */ async add(event: IClickhouseEvent, _multi?: ReturnType) { try { @@ -357,46 +212,59 @@ return "OK" const eventJson = JSON.stringify(event); const multi = _multi || redis.multi(); - const isSessionEvent = - event.session_id && this.sessionEvents.includes(event.name); - - if (isSessionEvent) { - const sessionKey = this.getSessionKey(event.session_id); - const score = new Date(event.created_at || Date.now()).getTime(); - const lastEventKey = - event.name === 'screen_view' - ? this.getLastEventKey({ - projectId: event.project_id, - profileId: event.profile_id, - }) - : event.name === 'session_end' - ? this.getLastEventKey({ - projectId: event.project_id, - profileId: event.profile_id, - }) - : ''; - - multi.eval( - this.addEventScript, - 6, + if (event.session_id && event.name === 'screen_view') { + // Handle screen_view + const sessionKey = this.getLastScreenViewKeyBySession(event.session_id); + const profileKey = event.profile_id + ? this.getLastScreenViewKeyByProfile( + event.project_id, + event.profile_id, + ) + : ''; + const timestamp = new Date(event.created_at || Date.now()).getTime(); + + // Combine event and timestamp into single JSON for atomic operations + const eventWithTimestamp = JSON.stringify({ + event: event, + ts: timestamp, + }); + + this.evalScript( + multi, + 'addScreenView', + this.addScreenViewScript, + 4, sessionKey, - this.regularQueueKey, - this.sessionSortedKey, - this.readySessionsKey, + profileKey, + this.queueKey, + this.bufferCounterKey, + eventWithTimestamp, + '3600', // 1 hour TTL + ); + } else if (event.session_id && event.name === 'session_end') { + // Handle session_end + const sessionKey = this.getLastScreenViewKeyBySession(event.session_id); + const profileKey = event.profile_id + ? this.getLastScreenViewKeyByProfile( + event.project_id, + event.profile_id, + ) + : ''; + + this.evalScript( + multi, + 'addSessionEnd', + this.addSessionEndScript, + 4, + sessionKey, + profileKey, + this.queueKey, this.bufferCounterKey, - lastEventKey, eventJson, - event.session_id, - event.name, - score.toString(), - this.minEventsInSession.toString(), - '3600', // 1 hour TTL for last event ); } else { - // Non-session events go to regular queue - multi - .rpush(this.regularQueueKey, eventJson) - .incr(this.bufferCounterKey); + // All other events go directly to queue + multi.rpush(this.queueKey, eventJson).incr(this.bufferCounterKey); } if (event.profile_id) { @@ -417,185 +285,79 @@ return "OK" } } - private async getEligibleSessions( - startOffset: number, - maxEventsPerSession: number, - sessionsPerPage: number, + /** + * Execute a Lua script using EVALSHA (cached) or fallback to EVAL. + * This avoids sending the entire script on every call. + */ + private evalScript( + multi: ReturnType, + scriptName: keyof typeof this.scriptShas, + scriptContent: string, + numKeys: number, + ...args: (string | number)[] ) { - const sessionsSorted = await getRedisCache().eval( - this.processReadySessionsScript, - 3, // number of KEYS - this.readySessionsKey, - this.sessionKeyPrefix, - this.sessionSortedKey, - sessionsPerPage.toString(), - maxEventsPerSession.toString(), - startOffset.toString(), - ); - - const parsed = getSafeJson< - Array<{ - sessionId: string; - events: string[]; - totalEventCount: number; - }> - >(sessionsSorted as string); - - const sessions: Record< - string, - { - events: IClickhouseEvent[]; - totalEventCount: number; - } - > = {}; - - if (!parsed || !Array.isArray(parsed)) { - return sessions; - } - - for (const session of parsed) { - const events = session.events - .map((e) => getSafeJson(e)) - .filter((e): e is IClickhouseEvent => e !== null); - - sessions[session.sessionId] = { - events, - totalEventCount: session.totalEventCount, - }; + const sha = this.scriptShas[scriptName]; + + if (sha) { + // Use EVALSHA with cached SHA + multi.evalsha(sha, numKeys, ...args); + } else { + // Fallback to EVAL and try to reload script + multi.eval(scriptContent, numKeys, ...args); + this.logger.warn(`Script ${scriptName} not loaded, using EVAL fallback`); + // Attempt to reload scripts in background + this.loadScripts(); } - - return sessions; } /** - * Process the Redis buffer. - * - * 1. Fetch events from two sources in parallel: - * - Pick events from regular queue (batchSize / 2) - * - Pick events from sessions (batchSize / 2). - * This only have screen_view and session_end events - * - * 2. Process session events: - * - For screen_view events, calculate duration if next event exists - * - Last screen_view of each session remains pending - * - All other events are marked for flushing + * Process the Redis buffer - simplified version. * - * 3. Process regular queue events: - * - Inherit path/origin from last screen_view of same session if exists - * - * 4. Insert all flushable events into ClickHouse in chunks and publish notifications - * - * 5. Clean up processed events: - * - For regular queue: LTRIM processed events - * - For sessions: Update lists atomically via Lua script, preserving pending events + * Simply: + * 1. Fetch events from the queue (up to batchSize) + * 2. Parse and sort them + * 3. Insert into ClickHouse in chunks + * 4. Publish saved events + * 5. Clean up processed events from queue */ async processBuffer() { const redis = getRedisCache(); - const eventsToClickhouse: IClickhouseEvent[] = []; - const pendingUpdates: Array<{ - sessionId: string; - snapshotCount: number; - pending: IClickhouseEvent[]; - }> = []; - const timer = { - fetchUnprocessedEvents: 0, - processSessionEvents: 0, - processRegularQueueEvents: 0, - insertEvents: 0, - updatePendingSessions: 0, - }; try { - let now = performance.now(); - // (A) Fetch no-session events once per run - const regularQueueEvents = await redis.lrange( - this.regularQueueKey, + // Fetch events from queue + const queueEvents = await redis.lrange( + this.queueKey, 0, - Math.floor(this.batchSize / 2) - 1, + this.batchSize - 1, ); - // (A2) Page through ready sessions within time and budget - let sessionBudget = Math.floor(this.batchSize / 2); - let startOffset = 0; - let totalSessionEventsFetched = 0; - while (sessionBudget > 0) { - if (performance.now() - now > this.flushTimeBudgetMs) { - this.logger.debug('Stopping session paging due to time budget'); - break; - } - - const sessionsPerPage = Math.min( - this.maxSessionsPerFlush, - Math.max(1, Math.floor(sessionBudget / 2)), - ); - const perSessionBudget = Math.max( - 2, - Math.floor(sessionBudget / sessionsPerPage), - ); - - const sessionsPage = await this.getEligibleSessions( - startOffset, - perSessionBudget, - sessionsPerPage, - ); - const sessionIds = Object.keys(sessionsPage); - if (sessionIds.length === 0) { - break; - } - - for (const sessionId of sessionIds) { - const sessionData = sessionsPage[sessionId]!; - const { flush, pending } = this.processSessionEvents( - sessionData.events, - ); - - if (flush.length > 0) { - eventsToClickhouse.push(...flush); - } - - pendingUpdates.push({ - sessionId, - snapshotCount: sessionData.events.length, - pending, - }); - - // Decrease budget by fetched events for this session window - sessionBudget -= sessionData.events.length; - totalSessionEventsFetched += sessionData.events.length; - if (sessionBudget <= 0) { - break; - } - } - startOffset += sessionsPerPage; + if (queueEvents.length === 0) { + this.logger.debug('No events to process'); + return; } - timer.processSessionEvents = performance.now() - now; - now = performance.now(); - - // (B) Process no-session events - for (const eventStr of regularQueueEvents) { + // Parse events + const eventsToClickhouse: IClickhouseEvent[] = []; + for (const eventStr of queueEvents) { const event = getSafeJson(eventStr); if (event) { eventsToClickhouse.push(event); } } - timer.processRegularQueueEvents = performance.now() - now; - now = performance.now(); - if (eventsToClickhouse.length === 0) { - this.logger.debug('No events to process'); + this.logger.debug('No valid events to process'); return; } - // (C) Sort events by creation time. + // Sort events by creation time eventsToClickhouse.sort( (a, b) => new Date(a.created_at || 0).getTime() - new Date(b.created_at || 0).getTime(), ); - // (D) Insert events into ClickHouse in chunks + // Insert events into ClickHouse in chunks this.logger.info('Inserting events into ClickHouse', { totalEvents: eventsToClickhouse.length, chunks: Math.ceil(eventsToClickhouse.length / this.chunkSize), @@ -609,40 +371,23 @@ return "OK" }); } - timer.insertEvents = performance.now() - now; - now = performance.now(); - - // (E) Publish "saved" events. + // Publish "saved" events const pubMulti = getRedisPub().multi(); for (const event of eventsToClickhouse) { await publishEvent('events', 'saved', transformEvent(event), pubMulti); } await pubMulti.exec(); - // (F) Only after successful processing, update Redis - const multi = redis.multi(); - - // Clean up no-session events and update counter - if (regularQueueEvents.length > 0) { - multi - .ltrim(this.regularQueueKey, regularQueueEvents.length, -1) - .decrby(this.bufferCounterKey, regularQueueEvents.length); - } - - await multi.exec(); - - // Process pending sessions in batches - await this.processPendingSessionsInBatches(redis, pendingUpdates); - - timer.updatePendingSessions = performance.now() - now; + // Clean up processed events from queue + await redis + .multi() + .ltrim(this.queueKey, queueEvents.length, -1) + .decrby(this.bufferCounterKey, queueEvents.length) + .exec(); this.logger.info('Processed events from Redis buffer', { batchSize: this.batchSize, - eventsToClickhouse: eventsToClickhouse.length, - pendingSessionUpdates: pendingUpdates.length, - sessionEventsFetched: totalSessionEventsFetched, - regularEvents: regularQueueEvents.length, - timer, + eventsProcessed: eventsToClickhouse.length, }); } catch (error) { this.logger.error('Error processing Redis buffer', { error }); @@ -650,280 +395,49 @@ return "OK" } /** - * Process a session's events. - * - * For each event in the session (in order): - * - If it is a screen_view, look for a subsequent event (screen_view or session_end) - * to calculate its duration. If found, flush it; if not, leave it pending. - * - * Returns an object with two arrays: - * flush: events to be sent to ClickHouse. - * pending: events that remain in the Redis session list. - */ - private processSessionEvents(events: IClickhouseEvent[]): { - flush: IClickhouseEvent[]; - pending: IClickhouseEvent[]; - } { - // Ensure events are sorted by created_at - events.sort( - (a, b) => - new Date(a.created_at || 0).getTime() - - new Date(b.created_at || 0).getTime(), - ); - - const flush: IClickhouseEvent[] = []; - const pending: IClickhouseEvent[] = []; - - // Check if session has ended - if so, flush everything - const hasSessionEnd = events.some((e) => e.name === 'session_end'); - - if (hasSessionEnd) { - flush.push(...events); - return { flush, pending: [] }; - } - - const findNextScreenView = (events: IClickhouseEvent[]) => { - return events.find((e) => e.name === 'screen_view'); - }; - - for (let i = 0; i < events.length; i++) { - const event = events[i]!; - // For screen_view events, look for next event - const next = findNextScreenView(events.slice(i + 1)); - if (next) { - event.duration = - new Date(next.created_at).getTime() - - new Date(event.created_at).getTime(); - flush.push(event); - } else { - // Last screen_view with no next event - keep pending - pending.push(event); - } - } - - return { flush, pending }; - } - - async tryCleanup() { - try { - await runEvery({ - interval: 60 * 60 * 24, - fn: this.cleanup.bind(this), - key: `${this.name}-cleanup`, - }); - } catch (error) { - this.logger.error('Failed to run cleanup', { error }); - } - } - - /** - * Cleanup old events from Redis. - * For each key (no-session and per-session), remove events older than the cutoff date. + * Retrieve the latest screen_view event for a given session or profile */ - async cleanup() { - const redis = getRedisCache(); - const cutoffTime = Date.now() - 1000 * 60 * 60 * 24 * this.daysToKeep; - - try { - const sessionCount = await redis.zcard(this.sessionSortedKey); - const batchSize = 1000; - let offset = 0; - let totalCleaned = 0; - - this.logger.info('Starting cleanup of stale sessions', { - cutoffTime: new Date(cutoffTime), - totalSessions: sessionCount, - }); - - while (offset < sessionCount) { - // Get batch of session IDs with scores - const sessionIdsWithScores = await redis.zrange( - this.sessionSortedKey, - offset, - offset + batchSize - 1, - 'WITHSCORES', - ); - - if (sessionIdsWithScores.length === 0) break; - - const pipeline = redis.pipeline(); - let staleSessions = 0; - - // Process pairs of [sessionId, score] - for (let i = 0; i < sessionIdsWithScores.length; i += 2) { - const sessionId = sessionIdsWithScores[i]; - const score = Number.parseInt(sessionIdsWithScores[i + 1] || '0', 10); - - if (sessionId && score < cutoffTime) { - staleSessions++; - // Remove from both sorted sets and delete the session key - pipeline.zrem(this.sessionSortedKey, sessionId); - pipeline.zrem(this.readySessionsKey, sessionId); - pipeline.del(this.getSessionKey(sessionId)); - } - } - - if (staleSessions > 0) { - await pipeline.exec(); - totalCleaned += staleSessions; - this.logger.info('Cleaned batch of stale sessions', { - batch: Math.floor(offset / batchSize) + 1, - cleanedInBatch: staleSessions, - totalCleaned, - }); + public async getLastScreenView( + params: + | { + sessionId: string; } + | { + projectId: string; + profileId: string; + }, + ): Promise { + const redis = getRedisCache(); - offset += batchSize; - } - - this.logger.info('Cleanup completed', { totalCleaned }); - } catch (error) { - this.logger.error('Failed to cleanup stale sessions', { error }); - } - } - - /** - * Retrieve the latest screen_view event for a given project/profile or project/session - */ - public async getLastScreenView({ - projectId, - ...rest - }: - | { - projectId: string; - profileId: string; - } - | { - projectId: string; - sessionId: string; - }): Promise { - if ('profileId' in rest) { - const redis = getRedisCache(); - const eventStr = await redis.get( - this.getLastEventKey({ projectId, profileId: rest.profileId }), - ); - if (eventStr) { - const parsed = getSafeJson(eventStr); - if (parsed) { - return transformEvent(parsed); - } - } - } - - if ('sessionId' in rest) { - const redis = getRedisCache(); - const sessionKey = this.getSessionKey(rest.sessionId); - const lastEvent = await redis.lindex(sessionKey, -1); - if (lastEvent) { - const parsed = getSafeJson(lastEvent); - if (parsed) { - return transformEvent(parsed); - } - } - } - - return null; - } - - private getLastEventKey({ - projectId, - profileId, - }: { - projectId: string; - profileId: string; - }) { - return `session:last_screen_view:${projectId}:${profileId}`; - } - - private async processPendingSessionsInBatches( - redis: ReturnType, - pendingUpdates: Array<{ - sessionId: string; - snapshotCount: number; - pending: IClickhouseEvent[]; - }>, - ) { - for (const batch of this.chunks( - pendingUpdates, - this.updatePendingSessionsBatchSize, - )) { - const batchArgs: string[] = [this.minEventsInSession.toString()]; - - for (const { sessionId, snapshotCount, pending } of batch) { - const sessionKey = this.getSessionKey(sessionId); - batchArgs.push( - sessionKey, - sessionId, - snapshotCount.toString(), - pending.length.toString(), - ...pending.map((e) => JSON.stringify(e)), - ); - } - - await redis.eval( - this.batchUpdateSessionsScript, - 3, // KEYS: ready sessions, sessions sorted, buffer counter - this.readySessionsKey, - this.sessionSortedKey, - this.bufferCounterKey, - ...batchArgs, + let lastScreenViewKey: string; + if ('sessionId' in params) { + lastScreenViewKey = this.getLastScreenViewKeyBySession(params.sessionId); + } else { + lastScreenViewKey = this.getLastScreenViewKeyByProfile( + params.projectId, + params.profileId, ); } - } - - public async getBufferSizeHeavy() { - // Fallback method for when counter is not available - const redis = getRedisCache(); - - // Get regular queue count - const regularQueueCount = await redis.llen(this.regularQueueKey); - - // Get total number of sessions - const sessionCount = await redis.zcard(this.sessionSortedKey); - if (sessionCount === 0) { - return regularQueueCount; - } + const eventDataStr = await redis.get(lastScreenViewKey); - // Process sessions in batches to avoid memory spikes - const batchSize = 1000; - let totalSessionEvents = 0; - let offset = 0; - - while (offset < sessionCount) { - // Get batch of session IDs - const sessionIds = await redis.zrange( - this.sessionSortedKey, - offset, - offset + batchSize - 1, + if (eventDataStr) { + const eventData = getSafeJson<{ event: IClickhouseEvent; ts: number }>( + eventDataStr, ); - - if (sessionIds.length === 0) break; - - // Queue up LLEN commands for this batch - const sessionPipeline = redis.pipeline(); - for (const sessionId of sessionIds) { - sessionPipeline.llen(this.getSessionKey(sessionId)); + if (eventData?.event) { + return transformEvent(eventData.event); } - - // Execute pipeline for this batch - const sessionCounts = (await sessionPipeline.exec()) as [any, any][]; - - // Sum up counts from this batch - for (const [err, count] of sessionCounts) { - if (!err) { - totalSessionEvents += count; - } - } - - offset += batchSize; } - return regularQueueCount + totalSessionEvents; + return null; } public async getBufferSize() { - return this.getBufferSizeWithCounter(() => this.getBufferSizeHeavy()); + return this.getBufferSizeWithCounter(async () => { + const redis = getRedisCache(); + return await redis.llen(this.queueKey); + }); } private async incrementActiveVisitorCount( diff --git a/packages/db/src/buffers/profile-buffer.ts b/packages/db/src/buffers/profile-buffer.ts index 92581e13a..7bfb9c809 100644 --- a/packages/db/src/buffers/profile-buffer.ts +++ b/packages/db/src/buffers/profile-buffer.ts @@ -12,12 +12,12 @@ export class ProfileBuffer extends BaseBuffer { private batchSize = process.env.PROFILE_BUFFER_BATCH_SIZE ? Number.parseInt(process.env.PROFILE_BUFFER_BATCH_SIZE, 10) : 200; - private daysToKeep = process.env.PROFILE_BUFFER_DAYS_TO_KEEP - ? Number.parseInt(process.env.PROFILE_BUFFER_DAYS_TO_KEEP, 10) - : 7; private chunkSize = process.env.PROFILE_BUFFER_CHUNK_SIZE ? Number.parseInt(process.env.PROFILE_BUFFER_CHUNK_SIZE, 10) : 1000; + private ttlInSeconds = process.env.PROFILE_BUFFER_TTL_IN_SECONDS + ? Number.parseInt(process.env.PROFILE_BUFFER_TTL_IN_SECONDS, 10) + : 60 * 60; private readonly redisKey = 'profile-buffer'; private readonly redisProfilePrefix = 'profile-cache:'; @@ -49,7 +49,7 @@ export class ProfileBuffer extends BaseBuffer { profileId: profile.id, projectId: profile.project_id, }); - return (await getRedisCache().exists(cacheKey)) === 1; + return (await this.redis.exists(cacheKey)) === 1; } async add(profile: IClickhouseProfile, isFromEvent = false) { @@ -90,9 +90,6 @@ export class ProfileBuffer extends BaseBuffer { profile, }); - const cacheTtl = profile.is_external - ? 60 * 60 * 24 * this.daysToKeep - : 60 * 60; // 1 hour for internal profiles const cacheKey = this.getProfileCacheKey({ profileId: profile.id, projectId: profile.project_id, @@ -100,7 +97,7 @@ export class ProfileBuffer extends BaseBuffer { const result = await this.redis .multi() - .set(cacheKey, JSON.stringify(mergedProfile), 'EX', cacheTtl) + .set(cacheKey, JSON.stringify(mergedProfile), 'EX', this.ttlInSeconds) .rpush(this.redisKey, JSON.stringify(mergedProfile)) .incr(this.bufferCounterKey) .llen(this.redisKey) @@ -120,7 +117,6 @@ export class ProfileBuffer extends BaseBuffer { batchSize: this.batchSize, }); if (bufferLength >= this.batchSize) { - this.logger.info('Buffer full, initiating flush'); await this.tryFlush(); } } catch (error) { @@ -137,18 +133,33 @@ export class ProfileBuffer extends BaseBuffer { projectId: profile.project_id, }); - const existingProfile = await getRedisCache().get(cacheKey); + const existingProfile = await this.fetchFromCache( + profile.id, + profile.project_id, + ); if (existingProfile) { - const parsedProfile = getSafeJson(existingProfile); - if (parsedProfile) { - logger.debug('Profile found in Redis'); - return parsedProfile; - } + logger.debug('Profile found in Redis'); + return existingProfile; } return this.fetchFromClickhouse(profile, logger); } + public async fetchFromCache( + profileId: string, + projectId: string, + ): Promise { + const cacheKey = this.getProfileCacheKey({ + profileId, + projectId, + }); + const existingProfile = await this.redis.get(cacheKey); + if (!existingProfile) { + return null; + } + return getSafeJson(existingProfile); + } + private async fetchFromClickhouse( profile: IClickhouseProfile, logger: ILogger, @@ -176,7 +187,7 @@ export class ProfileBuffer extends BaseBuffer { async processBuffer() { try { - this.logger.info('Starting profile buffer processing'); + this.logger.debug('Starting profile buffer processing'); const profiles = await this.redis.lrange( this.redisKey, 0, @@ -188,7 +199,7 @@ export class ProfileBuffer extends BaseBuffer { return; } - this.logger.info(`Processing ${profiles.length} profiles in buffer`); + this.logger.debug(`Processing ${profiles.length} profiles in buffer`); const parsedProfiles = profiles.map((p) => getSafeJson(p), ); @@ -208,7 +219,7 @@ export class ProfileBuffer extends BaseBuffer { .decrby(this.bufferCounterKey, profiles.length) .exec(); - this.logger.info('Successfully completed profile processing', { + this.logger.debug('Successfully completed profile processing', { totalProfiles: profiles.length, }); } catch (error) { diff --git a/packages/db/src/buffers/session-buffer.ts b/packages/db/src/buffers/session-buffer.ts index db30bb053..346721b63 100644 --- a/packages/db/src/buffers/session-buffer.ts +++ b/packages/db/src/buffers/session-buffer.ts @@ -12,6 +12,9 @@ export class SessionBuffer extends BaseBuffer { private batchSize = process.env.SESSION_BUFFER_BATCH_SIZE ? Number.parseInt(process.env.SESSION_BUFFER_BATCH_SIZE, 10) : 1000; + private chunkSize = process.env.SESSION_BUFFER_CHUNK_SIZE + ? Number.parseInt(process.env.SESSION_BUFFER_CHUNK_SIZE, 10) + : 1000; private readonly redisKey = 'session-buffer'; private redis: Redis; @@ -209,7 +212,7 @@ export class SessionBuffer extends BaseBuffer { }; }); - for (const chunk of this.chunks(sessions, 1000)) { + for (const chunk of this.chunks(sessions, this.chunkSize)) { // Insert to ClickHouse await ch.insert({ table: TABLE_NAMES.sessions, @@ -225,7 +228,7 @@ export class SessionBuffer extends BaseBuffer { .decrby(this.bufferCounterKey, events.length); await multi.exec(); - this.logger.info('Processed sessions', { + this.logger.debug('Processed sessions', { count: events.length, }); } catch (error) { diff --git a/packages/db/src/clickhouse/client.ts b/packages/db/src/clickhouse/client.ts index b363cb44e..de267bbb3 100644 --- a/packages/db/src/clickhouse/client.ts +++ b/packages/db/src/clickhouse/client.ts @@ -56,14 +56,15 @@ export const TABLE_NAMES = { event_property_values_mv: 'event_property_values_mv', cohort_events_mv: 'cohort_events_mv', sessions: 'sessions', + events_imports: 'events_imports', }; export const CLICKHOUSE_OPTIONS: NodeClickHouseClientConfigOptions = { max_open_connections: 30, - request_timeout: 60000, + request_timeout: 300000, keep_alive: { enabled: true, - idle_socket_ttl: 8000, + idle_socket_ttl: 60000, }, compression: { request: true, @@ -132,7 +133,27 @@ export const ch = new Proxy(originalCh, { const value = Reflect.get(target, property, receiver); if (property === 'insert') { - return (...args: any[]) => withRetry(() => value.apply(target, args)); + return (...args: any[]) => + withRetry(() => { + args[0].clickhouse_settings = { + // Allow bigger HTTP payloads/time to stream rows + async_insert: 1, + wait_for_async_insert: 1, + // Increase insert timeouts and buffer sizes for large batches + max_execution_time: 300, + max_insert_block_size: '500000', + max_http_get_redirects: '0', + // Ensure JSONEachRow stays efficient + input_format_parallel_parsing: 1, + // Keep long-running inserts/queries from idling out at proxies by sending progress headers + send_progress_in_http_headers: 1, + http_headers_progress_interval_ms: '50000', + // Ensure server holds the connection until the query is finished + wait_end_of_query: 1, + ...args[0].clickhouse_settings, + }; + return value.apply(target, args); + }); } return value; diff --git a/packages/db/src/services/clients.service.ts b/packages/db/src/services/clients.service.ts index 374102e9c..b098fc484 100644 --- a/packages/db/src/services/clients.service.ts +++ b/packages/db/src/services/clients.service.ts @@ -34,4 +34,4 @@ export async function getClientById( }); } -export const getClientByIdCached = cacheable(getClientById, 60 * 60 * 24); +export const getClientByIdCached = cacheable(getClientById, 60 * 60 * 24, true); diff --git a/packages/db/src/services/event.service.ts b/packages/db/src/services/event.service.ts index 195fc8875..404526ea7 100644 --- a/packages/db/src/services/event.service.ts +++ b/packages/db/src/services/event.service.ts @@ -19,12 +19,9 @@ import type { EventMeta, Prisma } from '../prisma-client'; import { db } from '../prisma-client'; import { type SqlBuilderObject, createSqlBuilder } from '../sql-builder'; import { getEventFiltersWhereClause } from './chart.service'; -import { getOrganizationByProjectIdCached } from './organization.service'; import type { IServiceProfile, IServiceUpsertProfile } from './profile.service'; import { getProfileById, - getProfileByIdCached, - getProfiles, getProfilesCached, upsertProfile, } from './profile.service'; @@ -391,6 +388,7 @@ export interface GetEventListOptions { endDate?: Date; select?: SelectHelper; custom?: (sb: SqlBuilderObject) => void; + dateIntervalInDays?: number; } export async function getEventList(options: GetEventListOptions) { @@ -404,19 +402,12 @@ export async function getEventList(options: GetEventListOptions) { filters, startDate, endDate, - select: incomingSelect, custom, + select: incomingSelect, + dateIntervalInDays = 0.5, } = options; const { sb, getSql, join } = createSqlBuilder(); - const organization = await getOrganizationByProjectIdCached(projectId); - // This will speed up the query quite a lot for big organizations - const dateIntervalInDays = - organization?.subscriptionPeriodEventsLimit && - organization?.subscriptionPeriodEventsLimit > 1_000_000 - ? 1 - : 7; - if (typeof cursor === 'number') { sb.offset = Math.max(0, (cursor ?? 0) * take); } else if (cursor instanceof Date) { @@ -449,6 +440,9 @@ export async function getEventList(options: GetEventListOptions) { incomingSelect ?? {}, ); + sb.select.createdAt = 'created_at'; + sb.select.projectId = 'project_id'; + if (select.id) { sb.select.id = 'id'; } @@ -470,9 +464,6 @@ export async function getEventList(options: GetEventListOptions) { if (select.properties) { sb.select.properties = 'properties'; } - if (select.createdAt) { - sb.select.createdAt = 'created_at'; - } if (select.country) { sb.select.country = 'country'; } @@ -570,8 +561,6 @@ export async function getEventList(options: GetEventListOptions) { custom(sb); } - console.log('getSql()', getSql()); - const data = await getEvents(getSql(), { profile: select.profile ?? true, meta: select.meta ?? true, @@ -581,10 +570,7 @@ export async function getEventList(options: GetEventListOptions) { if (data.length === 0 && sb.where.cursorWindow) { return getEventList({ ...options, - custom(sb) { - options.custom?.(sb); - delete sb.where.cursorWindow; - }, + dateIntervalInDays: dateIntervalInDays * 2, }); } @@ -909,7 +895,7 @@ class EventService { ]); if (event?.profileId) { - const profile = await getProfileByIdCached(event?.profileId, projectId); + const profile = await getProfileById(event?.profileId, projectId); if (profile) { event.profile = profile; } diff --git a/packages/db/src/services/notification.service.ts b/packages/db/src/services/notification.service.ts index 71cbbc3b1..d7d54eed4 100644 --- a/packages/db/src/services/notification.service.ts +++ b/packages/db/src/services/notification.service.ts @@ -13,7 +13,7 @@ import type { IServiceCreateEventPayload, IServiceEvent, } from './event.service'; -import { getProfileById, getProfileByIdCached } from './profile.service'; +import { getProfileById } from './profile.service'; import { getProjectByIdCached } from './project.service'; type ICreateNotification = Pick< @@ -264,10 +264,7 @@ export async function checkNotificationRulesForEvent( payload.profileId && rules.some((rule) => rule.template?.match(/{{profile\.[^}]*}}/)) ) { - const profile = await getProfileByIdCached( - payload.profileId, - payload.projectId, - ); + const profile = await getProfileById(payload.profileId, payload.projectId); if (profile) { (payload as any).profile = profile; } diff --git a/packages/db/src/services/profile.service.ts b/packages/db/src/services/profile.service.ts index 73d2eb897..f7e02827f 100644 --- a/packages/db/src/services/profile.service.ts +++ b/packages/db/src/services/profile.service.ts @@ -106,6 +106,11 @@ export async function getProfileById(id: string, projectId: string) { return null; } + const cachedProfile = await profileBuffer.fetchFromCache(id, projectId); + if (cachedProfile) { + return transformProfile(cachedProfile); + } + const [profile] = await chQuery( `SELECT id, @@ -127,8 +132,6 @@ export async function getProfileById(id: string, projectId: string) { return transformProfile(profile); } -export const getProfileByIdCached = cacheable(getProfileById, 60 * 30); - interface GetProfileListOptions { projectId: string; take: number; @@ -306,10 +309,5 @@ export async function upsertProfile( is_external: isExternal, }; - if (!isFromEvent) { - // Save to cache directly since the profile might be used before its saved in clickhouse - getProfileByIdCached.set(id, projectId)(transformProfile(profile)); - } - return profileBuffer.add(profile, isFromEvent); } diff --git a/packages/db/src/services/salt.service.ts b/packages/db/src/services/salt.service.ts index c35134295..a63e8e34d 100644 --- a/packages/db/src/services/salt.service.ts +++ b/packages/db/src/services/salt.service.ts @@ -1,6 +1,6 @@ import { generateSalt } from '@openpanel/common/server'; -import { getRedisCache } from '@openpanel/redis'; +import { cacheable, getRedisCache } from '@openpanel/redis'; import { db } from '../prisma-client'; export async function getCurrentSalt() { @@ -17,36 +17,34 @@ export async function getCurrentSalt() { return salt.salt; } -export async function getSalts() { - const cache = await getRedisCache().get('op:salt'); - if (cache) { - return JSON.parse(cache); - } - - const [curr, prev] = await db.salt.findMany({ - orderBy: { - createdAt: 'desc', - }, - take: 2, - }); +export const getSalts = cacheable( + 'op:salt', + async () => { + const [curr, prev] = await db.salt.findMany({ + orderBy: { + createdAt: 'desc', + }, + take: 2, + }); - if (!curr) { - throw new Error('No salt found'); - } - - if (!prev) { - throw new Error('No salt found'); - } + if (!curr) { + throw new Error('No salt found'); + } - const salts = { - current: curr.salt, - previous: prev.salt, - }; + if (!prev) { + throw new Error('No salt found'); + } - await getRedisCache().set('op:salt', JSON.stringify(salts), 'EX', 60 * 10); + const salts = { + current: curr.salt, + previous: prev.salt, + }; - return salts; -} + return salts; + }, + 60 * 10, + true, +); export async function createInitialSalts() { const MAX_RETRIES = 5; diff --git a/packages/geo/package.json b/packages/geo/package.json index 65d6dcbda..9149e22cd 100644 --- a/packages/geo/package.json +++ b/packages/geo/package.json @@ -7,14 +7,15 @@ "codegen": "jiti scripts/download.ts" }, "dependencies": { - "@maxmind/geoip2-node": "^6.1.0" + "@maxmind/geoip2-node": "^6.1.0", + "lru-cache": "^11.2.2" }, "devDependencies": { "@openpanel/tsconfig": "workspace:*", "@types/node": "catalog:", "fast-extract": "^1.4.3", + "jiti": "^2.4.1", "tar": "^7.4.3", - "typescript": "catalog:", - "jiti": "^2.4.1" + "typescript": "catalog:" } } diff --git a/packages/geo/src/geo.ts b/packages/geo/src/geo.ts index bd2ac7f9a..a0ed73b3c 100644 --- a/packages/geo/src/geo.ts +++ b/packages/geo/src/geo.ts @@ -7,6 +7,7 @@ const __filename = fileURLToPath(import.meta.url); const __dirname = dirname(__filename); import type { ReaderModel } from '@maxmind/geoip2-node'; import { Reader } from '@maxmind/geoip2-node'; +import { LRUCache } from 'lru-cache'; const filename = 'GeoLite2-City.mmdb'; // From api or worker package @@ -50,11 +51,22 @@ const DEFAULT_GEO: GeoLocation = { const ignore = ['127.0.0.1', '::1']; +const cache = new LRUCache({ + max: 1000, + ttl: 1000 * 60 * 5, + ttlAutopurge: true, +}); + export async function getGeoLocation(ip?: string): Promise { if (!ip || ignore.includes(ip)) { return DEFAULT_GEO; } + const cached = cache.get(ip); + if (cached) { + return cached; + } + if (!reader) { await loadDatabase(dbPath); } diff --git a/packages/logger/index.ts b/packages/logger/index.ts index 25132adc1..5a65c6785 100644 --- a/packages/logger/index.ts +++ b/packages/logger/index.ts @@ -6,9 +6,12 @@ export { winston }; export type ILogger = winston.Logger; const logLevel = process.env.LOG_LEVEL ?? 'info'; +const silent = process.env.LOG_SILENT === 'true'; export function createLogger({ name }: { name: string }): ILogger { - const service = `${name}-${process.env.NODE_ENV ?? 'dev'}`; + const service = [process.env.LOG_PREFIX, name, process.env.NODE_ENV ?? 'dev'] + .filter(Boolean) + .join('-'); const prettyError = (error: Error) => ({ ...error, @@ -85,7 +88,7 @@ export function createLogger({ name }: { name: string }): ILogger { level: logLevel, format, transports, - silent: process.env.NODE_ENV === 'test', + silent, // Add ISO levels of logging from PINO levels: Object.assign( { fatal: 0, warn: 4, trace: 7 }, diff --git a/packages/queue/package.json b/packages/queue/package.json index 75a96f917..391ecb7d0 100644 --- a/packages/queue/package.json +++ b/packages/queue/package.json @@ -10,8 +10,8 @@ "@openpanel/db": "workspace:*", "@openpanel/logger": "workspace:*", "@openpanel/redis": "workspace:*", - "bullmq": "^5.8.7", - "groupmq": "1.0.0-next.19" + "bullmq": "^5.63.0", + "groupmq": "1.1.0-next.5" }, "devDependencies": { "@openpanel/sdk": "workspace:*", diff --git a/packages/queue/src/queues.ts b/packages/queue/src/queues.ts index 7511cd09f..a55144955 100644 --- a/packages/queue/src/queues.ts +++ b/packages/queue/src/queues.ts @@ -1,5 +1,6 @@ import { Queue, QueueEvents } from 'bullmq'; +import { createHash } from 'node:crypto'; import type { IServiceCreateEventPayload, IServiceEvent, @@ -10,6 +11,18 @@ import { getRedisGroupQueue, getRedisQueue } from '@openpanel/redis'; import type { TrackPayload } from '@openpanel/sdk'; import { Queue as GroupQueue } from 'groupmq'; +export const EVENTS_GROUP_QUEUES_SHARDS = Number.parseInt( + process.env.EVENTS_GROUP_QUEUES_SHARDS || '1', + 10, +); + +function pickShard(projectId: string) { + const h = createHash('sha1').update(projectId).digest(); // 20 bytes + // take first 4 bytes as unsigned int + const x = h.readUInt32BE(0); + return x % EVENTS_GROUP_QUEUES_SHARDS; // 0..n-1 +} + export const queueLogger = createLogger({ name: 'queue' }); export interface EventsQueuePayloadIncomingEvent { @@ -17,9 +30,30 @@ export interface EventsQueuePayloadIncomingEvent { payload: { projectId: string; event: TrackPayload & { - timestamp: string; + timestamp: string | number; isTimestampFromThePast: boolean; }; + uaInfo: + | { + readonly isServer: true; + readonly device: 'server'; + readonly os: ''; + readonly osVersion: ''; + readonly browser: ''; + readonly browserVersion: ''; + readonly brand: ''; + readonly model: ''; + } + | { + readonly os: string | undefined; + readonly osVersion: string | undefined; + readonly browser: string | undefined; + readonly browserVersion: string | undefined; + readonly device: string; + readonly brand: string | undefined; + readonly model: string | undefined; + readonly isServer: false; + }; geo: { country: string | undefined; city: string | undefined; @@ -93,55 +127,65 @@ export type MiscQueuePayload = MiscQueuePayloadTrialEndingSoon; export type CronQueueType = CronQueuePayload['type']; -const orderingWindowMs = Number.parseInt( - process.env.ORDERING_WINDOW_MS || '50', +const orderingDelayMs = Number.parseInt( + process.env.ORDERING_DELAY_MS || '100', 10, ); -const orderingGracePeriodDecay = Number.parseFloat( - process.env.ORDERING_GRACE_PERIOD_DECAY || '0.9', -); -const orderingMaxWaitMultiplier = Number.parseInt( - process.env.ORDERING_MAX_WAIT_MULTIPLIER || '8', + +const autoBatchMaxWaitMs = Number.parseInt( + process.env.AUTO_BATCH_MAX_WAIT_MS || '0', 10, ); +const autoBatchSize = Number.parseInt(process.env.AUTO_BATCH_SIZE || '0', 10); + +export const eventsGroupQueues = Array.from({ + length: EVENTS_GROUP_QUEUES_SHARDS, +}).map( + (_, index) => + new GroupQueue({ + logger: queueLogger, + namespace: `{group_events_${index}}`, + redis: getRedisGroupQueue(), + keepCompleted: 1_000, + keepFailed: 10_000, + orderingDelayMs: orderingDelayMs, + autoBatch: + autoBatchMaxWaitMs && autoBatchSize + ? { + maxWaitMs: autoBatchMaxWaitMs, + size: autoBatchSize, + } + : undefined, + }), +); -export const eventsGroupQueue = new GroupQueue< - EventsQueuePayloadIncomingEvent['payload'] ->({ - logger: queueLogger, - namespace: 'group_events', - // @ts-expect-error - TODO: Fix this in groupmq - redis: getRedisGroupQueue(), - orderingMethod: 'in-memory', - orderingWindowMs, - orderingGracePeriodDecay, - orderingMaxWaitMultiplier, - keepCompleted: 10, - keepFailed: 10_000, -}); +export const getEventsGroupQueueShard = (groupId: string) => { + const shard = pickShard(groupId); + const queue = eventsGroupQueues[shard]; + if (!queue) { + throw new Error(`Queue not found for group ${groupId}`); + } + return queue; +}; -export const sessionsQueue = new Queue('sessions', { - // @ts-ignore +export const sessionsQueue = new Queue('{sessions}', { connection: getRedisQueue(), defaultJobOptions: { removeOnComplete: 10, }, }); -export const sessionsQueueEvents = new QueueEvents('sessions', { - // @ts-ignore +export const sessionsQueueEvents = new QueueEvents('{sessions}', { connection: getRedisQueue(), }); -export const cronQueue = new Queue('cron', { - // @ts-ignore +export const cronQueue = new Queue('{cron}', { connection: getRedisQueue(), defaultJobOptions: { removeOnComplete: 10, }, }); -export const miscQueue = new Queue('misc', { - // @ts-ignore +export const miscQueue = new Queue('{misc}', { connection: getRedisQueue(), defaultJobOptions: { removeOnComplete: 10, @@ -156,9 +200,8 @@ export type NotificationQueuePayload = { }; export const notificationQueue = new Queue( - 'notification', + '{notification}', { - // @ts-ignore connection: getRedisQueue(), defaultJobOptions: { removeOnComplete: 10, diff --git a/packages/redis/cachable.test.ts b/packages/redis/cachable.test.ts index 4caade0a5..d628f4391 100644 --- a/packages/redis/cachable.test.ts +++ b/packages/redis/cachable.test.ts @@ -446,12 +446,6 @@ describe('cachable', () => { expect(cached).toBe(JSON.stringify(payload)); }); - it('should throw error when function is not provided', () => { - expect(() => { - cacheable('test', 3600); - }).toThrow('fn is not a function'); - }); - it('should throw error when expire time is not provided', () => { const fn = async (arg1: string, arg2: string) => ({}); expect(() => { diff --git a/packages/redis/cachable.ts b/packages/redis/cachable.ts index 55221d33f..a49279401 100644 --- a/packages/redis/cachable.ts +++ b/packages/redis/cachable.ts @@ -1,17 +1,34 @@ +import { LRUCache } from 'lru-cache'; import { getRedisCache } from './redis'; export const deleteCache = async (key: string) => { return getRedisCache().del(key); }; +// Global LRU cache for getCache function +const globalLruCache = new LRUCache({ + max: 5000, // Store up to 5000 entries + ttl: 1000 * 60, // 1 minutes default TTL +}); + export async function getCache( key: string, expireInSec: number, fn: () => Promise, + useLruCache?: boolean, ): Promise { + // L1 Cache: Check global LRU cache first (in-memory, instant) + if (useLruCache) { + const lruHit = globalLruCache.get(key); + if (lruHit !== undefined) { + return lruHit as T; + } + } + + // L2 Cache: Check Redis cache (shared across instances) const hit = await getRedisCache().get(key); if (hit) { - return JSON.parse(hit, (_, value) => { + const parsed = JSON.parse(hit, (_, value) => { if ( typeof value === 'string' && /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.*Z$/.test(value) @@ -20,13 +37,49 @@ export async function getCache( } return value; }); + + // Store in LRU cache for next time + if (useLruCache) { + globalLruCache.set(key, parsed, { + ttl: expireInSec * 1000, // Use the same TTL as Redis + }); + } + + return parsed; } + // Cache miss: Execute function const data = await fn(); - await getRedisCache().setex(key, expireInSec, JSON.stringify(data)); + + // Store in both caches + if (useLruCache) { + globalLruCache.set(key, data, { + ttl: expireInSec * 1000, + }); + } + // Fire and forget Redis write for better performance + getRedisCache().setex(key, expireInSec, JSON.stringify(data)); + return data; } +// Helper functions for managing global LRU cache +export function clearGlobalLruCache(key?: string) { + if (key) { + return globalLruCache.delete(key); + } + globalLruCache.clear(); + return true; +} + +export function getGlobalLruCacheStats() { + return { + size: globalLruCache.size, + max: globalLruCache.max, + calculatedSize: globalLruCache.calculatedSize, + }; +} + function stringify(obj: unknown): string { if (obj === null) return 'null'; if (obj === undefined) return 'undefined'; @@ -75,10 +128,39 @@ function hasResult(result: unknown): boolean { return true; } +// Overload 1: cacheable(fn, expireInSec, lruCache?) +export function cacheable any>( + fn: T, + expireInSec: number, + lruCache?: boolean, +): T & { + getKey: (...args: Parameters) => string; + clear: (...args: Parameters) => Promise; + set: ( + ...args: Parameters + ) => (payload: Awaited>) => Promise<'OK'>; +}; + +// Overload 2: cacheable(name, fn, expireInSec, lruCache?) +export function cacheable any>( + name: string, + fn: T, + expireInSec: number, + lruCache?: boolean, +): T & { + getKey: (...args: Parameters) => string; + clear: (...args: Parameters) => Promise; + set: ( + ...args: Parameters + ) => (payload: Awaited>) => Promise<'OK'>; +}; + +// Implementation export function cacheable any>( fnOrName: T | string, fnOrExpireInSec: number | T, - _expireInSec?: number, + _expireInSecOrLruCache?: number | boolean, + _lruCache?: boolean, ) { const name = typeof fnOrName === 'string' ? fnOrName : fnOrName.name; const fn = @@ -87,12 +169,26 @@ export function cacheable any>( : typeof fnOrExpireInSec === 'function' ? fnOrExpireInSec : null; - const expireInSec = - typeof fnOrExpireInSec === 'number' - ? fnOrExpireInSec - : typeof _expireInSec === 'number' - ? _expireInSec + + let expireInSec: number | null = null; + let useLruCache = false; + + // Parse parameters based on function signature + if (typeof fnOrName === 'function') { + // Overload 1: cacheable(fn, expireInSec, lruCache?) + expireInSec = typeof fnOrExpireInSec === 'number' ? fnOrExpireInSec : null; + useLruCache = + typeof _expireInSecOrLruCache === 'boolean' + ? _expireInSecOrLruCache + : false; + } else { + // Overload 2: cacheable(name, fn, expireInSec, lruCache?) + expireInSec = + typeof _expireInSecOrLruCache === 'number' + ? _expireInSecOrLruCache : null; + useLruCache = typeof _lruCache === 'boolean' ? _lruCache : false; + } if (typeof fn !== 'function') { throw new Error('fn is not a function'); @@ -105,11 +201,29 @@ export function cacheable any>( const cachePrefix = `cachable:${name}`; const getKey = (...args: Parameters) => `${cachePrefix}:${stringify(args)}`; + + // Create function-specific LRU cache if enabled + const functionLruCache = useLruCache + ? new LRUCache({ + max: 1000, + ttl: expireInSec * 1000, // Convert seconds to milliseconds for LRU + }) + : null; + const cachedFn = async ( ...args: Parameters ): Promise>> => { - // JSON.stringify here is not bullet proof since ordering of object keys matters etc const key = getKey(...args); + + // L1 Cache: Check LRU cache first (in-memory, instant) + if (functionLruCache) { + const lruHit = functionLruCache.get(key); + if (lruHit !== undefined && hasResult(lruHit)) { + return lruHit; + } + } + + // L2 Cache: Check Redis cache (shared across instances) const cached = await getRedisCache().get(key); if (cached) { try { @@ -123,15 +237,26 @@ export function cacheable any>( return value; }); if (hasResult(parsed)) { + // Store in LRU cache for next time + if (functionLruCache) { + functionLruCache.set(key, parsed); + } return parsed; } } catch (e) { console.error('Failed to parse cache', e); } } + + // Cache miss: Execute function const result = await fn(...(args as any)); if (hasResult(result)) { + // Store in both caches + if (functionLruCache) { + functionLruCache.set(key, result); + } + // Don't await Redis write - fire and forget for better performance getRedisCache().setex(key, expireInSec, JSON.stringify(result)); } @@ -141,12 +266,20 @@ export function cacheable any>( cachedFn.getKey = getKey; cachedFn.clear = async (...args: Parameters) => { const key = getKey(...args); + // Clear both LRU and Redis caches + if (functionLruCache) { + functionLruCache.delete(key); + } return getRedisCache().del(key); }; cachedFn.set = (...args: Parameters) => async (payload: Awaited>) => { const key = getKey(...args); + // Set in both caches + if (functionLruCache) { + functionLruCache.set(key, payload); + } return getRedisCache().setex(key, expireInSec, JSON.stringify(payload)); }; diff --git a/packages/redis/package.json b/packages/redis/package.json index f958f1d73..79c1e951e 100644 --- a/packages/redis/package.json +++ b/packages/redis/package.json @@ -8,7 +8,8 @@ }, "dependencies": { "@openpanel/json": "workspace:*", - "ioredis": "^5.7.0" + "ioredis": "5.8.2", + "lru-cache": "^11.2.2" }, "devDependencies": { "@openpanel/db": "workspace:*", diff --git a/packages/trpc/src/routers/chart.ts b/packages/trpc/src/routers/chart.ts index bd4afa33e..afbaa2530 100644 --- a/packages/trpc/src/routers/chart.ts +++ b/packages/trpc/src/routers/chart.ts @@ -212,7 +212,6 @@ export const chartRouter = createTRPCRouter({ 'origin', 'referrer', 'referrer_name', - 'duration', 'created_at', 'country', 'city', diff --git a/packages/trpc/src/routers/event.ts b/packages/trpc/src/routers/event.ts index 0506fe6c7..46c38cf1f 100644 --- a/packages/trpc/src/routers/event.ts +++ b/packages/trpc/src/routers/event.ts @@ -127,23 +127,20 @@ export const eventRouter = createTRPCRouter({ startDate: z.date().optional(), endDate: z.date().optional(), events: z.array(z.string()).optional(), + columnVisibility: z.record(z.string(), z.boolean()).optional(), }), ) - .query(async ({ input }) => { + .query(async ({ input: { columnVisibility, ...input } }) => { const items = await getEventList({ ...input, take: 50, cursor: input.cursor ? new Date(input.cursor) : undefined, select: { - profile: true, - properties: true, - sessionId: true, - deviceId: true, - profileId: true, - referrerName: true, - referrerType: true, - referrer: true, - origin: true, + ...columnVisibility, + city: columnVisibility?.country ?? true, + path: columnVisibility?.name ?? true, + duration: columnVisibility?.name ?? true, + projectId: false, }, }); @@ -190,9 +187,10 @@ export const eventRouter = createTRPCRouter({ cursor: z.string().optional(), startDate: z.date().optional(), endDate: z.date().optional(), + columnVisibility: z.record(z.string(), z.boolean()).optional(), }), ) - .query(async ({ input }) => { + .query(async ({ input: { columnVisibility, ...input } }) => { const conversions = await getConversionEventNames(input.projectId); if (conversions.length === 0) { @@ -209,15 +207,11 @@ export const eventRouter = createTRPCRouter({ take: 50, cursor: input.cursor ? new Date(input.cursor) : undefined, select: { - profile: true, - properties: true, - sessionId: true, - deviceId: true, - profileId: true, - referrerName: true, - referrerType: true, - referrer: true, - origin: true, + ...columnVisibility, + city: columnVisibility?.country ?? true, + path: columnVisibility?.name ?? true, + duration: columnVisibility?.name ?? true, + projectId: false, }, custom: (sb) => { sb.where.name = `name IN (${conversions.map((event) => sqlstring.escape(event.name)).join(',')})`; diff --git a/packages/trpc/src/routers/profile.ts b/packages/trpc/src/routers/profile.ts index f8bdb4f82..64159f493 100644 --- a/packages/trpc/src/routers/profile.ts +++ b/packages/trpc/src/routers/profile.ts @@ -6,7 +6,7 @@ import { TABLE_NAMES, chQuery, createSqlBuilder, - getProfileByIdCached, + getProfileById, getProfileList, getProfileListCount, getProfileMetrics, @@ -19,7 +19,7 @@ export const profileRouter = createTRPCRouter({ byId: protectedProcedure .input(z.object({ profileId: z.string(), projectId: z.string() })) .query(async ({ input: { profileId, projectId } }) => { - return getProfileByIdCached(profileId, projectId); + return getProfileById(profileId, projectId); }), metrics: protectedProcedure diff --git a/packages/trpc/src/routers/realtime.ts b/packages/trpc/src/routers/realtime.ts index 336f541eb..62229681e 100644 --- a/packages/trpc/src/routers/realtime.ts +++ b/packages/trpc/src/routers/realtime.ts @@ -62,10 +62,12 @@ export const realtimeRouter = createTRPCRouter({ path: string; count: number; avg_duration: number; + unique_sessions: number; }>([ 'origin', 'path', 'COUNT(*) as count', + 'COUNT(DISTINCT session_id) as unique_sessions', 'round(avg(duration)/1000, 2) as avg_duration', ]) .from(TABLE_NAMES.events) @@ -91,9 +93,11 @@ export const realtimeRouter = createTRPCRouter({ referrer_name: string; count: number; avg_duration: number; + unique_sessions: number; }>([ 'referrer_name', 'COUNT(*) as count', + 'COUNT(DISTINCT session_id) as unique_sessions', 'round(avg(duration)/1000, 2) as avg_duration', ]) .from(TABLE_NAMES.events) @@ -120,10 +124,12 @@ export const realtimeRouter = createTRPCRouter({ city: string; count: number; avg_duration: number; + unique_sessions: number; }>([ 'country', 'city', 'COUNT(*) as count', + 'COUNT(DISTINCT session_id) as unique_sessions', 'round(avg(duration)/1000, 2) as avg_duration', ]) .from(TABLE_NAMES.events) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index f15819bd0..d55fa4020 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -78,20 +78,20 @@ importers: specifier: ^1.3.12 version: 1.3.12(zod@3.24.2) '@fastify/compress': - specifier: ^8.0.1 - version: 8.0.1 + specifier: ^8.1.0 + version: 8.1.0 '@fastify/cookie': specifier: ^11.0.2 version: 11.0.2 '@fastify/cors': - specifier: ^11.0.0 - version: 11.0.0 + specifier: ^11.1.0 + version: 11.1.0 '@fastify/rate-limit': - specifier: ^10.2.2 - version: 10.2.2 + specifier: ^10.3.0 + version: 10.3.0 '@fastify/websocket': - specifier: ^11.0.2 - version: 11.0.2 + specifier: ^11.2.0 + version: 11.2.0 '@node-rs/argon2': specifier: ^2.0.2 version: 2.0.2 @@ -144,17 +144,17 @@ importers: specifier: ^1.0.3 version: 1.0.3 fastify: - specifier: ^5.2.1 - version: 5.2.1 + specifier: ^5.6.1 + version: 5.6.1 fastify-metrics: specifier: ^12.1.0 - version: 12.1.0(fastify@5.2.1) + version: 12.1.0(fastify@5.6.1) fastify-raw-body: specifier: ^5.0.0 version: 5.0.0 groupmq: - specifier: 1.0.0-next.19 - version: 1.0.0-next.19(ioredis@5.8.2) + specifier: 1.1.0-next.5 + version: 1.1.0-next.5(ioredis@5.8.2) jsonwebtoken: specifier: ^9.0.2 version: 9.0.2 @@ -343,9 +343,6 @@ importers: '@ai-sdk/react': specifier: ^1.2.5 version: 1.2.5(react@19.1.1)(zod@3.24.2) - '@clickhouse/client': - specifier: ^1.2.0 - version: 1.2.0 '@dnd-kit/core': specifier: ^6.3.1 version: 6.3.1(react-dom@19.1.1(react@19.1.1))(react@19.1.1) @@ -801,11 +798,11 @@ importers: apps/worker: dependencies: '@bull-board/api': - specifier: 6.13.1 - version: 6.13.1(@bull-board/ui@6.13.1) + specifier: 6.14.0 + version: 6.14.0(@bull-board/ui@6.14.0) '@bull-board/express': - specifier: 6.13.1 - version: 6.13.1 + specifier: 6.14.0 + version: 6.14.0 '@openpanel/common': specifier: workspace:* version: link:../../packages/common @@ -831,14 +828,14 @@ importers: specifier: workspace:* version: link:../../packages/redis bullmq: - specifier: ^5.8.7 - version: 5.8.7 + specifier: ^5.63.0 + version: 5.63.0 express: specifier: ^4.18.2 version: 4.18.2 groupmq: - specifier: 1.0.0-next.19 - version: 1.0.0-next.19(ioredis@5.8.2) + specifier: 1.1.0-next.5 + version: 1.1.0-next.5(ioredis@5.8.2) prom-client: specifier: ^15.1.3 version: 15.1.3 @@ -983,6 +980,9 @@ importers: date-fns: specifier: ^3.3.1 version: 3.3.1 + lru-cache: + specifier: ^11.2.2 + version: 11.2.2 luxon: specifier: ^3.6.1 version: 3.6.1 @@ -1048,8 +1048,8 @@ importers: packages/db: dependencies: '@clickhouse/client': - specifier: ^1.2.0 - version: 1.2.0 + specifier: ^1.12.1 + version: 1.12.1 '@openpanel/common': specifier: workspace:* version: link:../common @@ -1166,6 +1166,9 @@ importers: '@maxmind/geoip2-node': specifier: ^6.1.0 version: 6.1.0 + lru-cache: + specifier: ^11.2.2 + version: 11.2.2 devDependencies: '@openpanel/tsconfig': specifier: workspace:* @@ -1295,11 +1298,11 @@ importers: specifier: workspace:* version: link:../redis bullmq: - specifier: ^5.8.7 - version: 5.8.7 + specifier: ^5.63.0 + version: 5.63.0 groupmq: - specifier: 1.0.0-next.19 - version: 1.0.0-next.19(ioredis@5.8.2) + specifier: 1.1.0-next.5 + version: 1.1.0-next.5(ioredis@5.8.2) devDependencies: '@openpanel/sdk': specifier: workspace:* @@ -1320,8 +1323,11 @@ importers: specifier: workspace:* version: link:../json ioredis: - specifier: ^5.7.0 - version: 5.7.0 + specifier: 5.8.2 + version: 5.8.2 + lru-cache: + specifier: ^11.2.2 + version: 11.2.2 devDependencies: '@openpanel/db': specifier: workspace:* @@ -2696,25 +2702,25 @@ packages: cpu: [x64] os: [win32] - '@bull-board/api@6.13.1': - resolution: {integrity: sha512-L9Ukfd/gxg8VIUb+vXRcU31yJsAaLLKG2qU/OMXQJ5EoXm2JhWBat+26YgrH/oKIb9zbZsg8xwHyqxa7sHEkVg==} + '@bull-board/api@6.14.0': + resolution: {integrity: sha512-oMDwXwoPn0RsdZ3Y68/bOErZ/qGZE5H97vgE/Pc8Uul/OHajlvajKW4NV+ZGTix82liUfH9CkjYx7PpwvBWhxg==} peerDependencies: - '@bull-board/ui': 6.13.1 + '@bull-board/ui': 6.14.0 - '@bull-board/express@6.13.1': - resolution: {integrity: sha512-wipvCsdeMdcgWVc77qrs858OjyGo7IAjJxuuWd4q5dvciFmTU1fmfZddWuZ1jDWpq5P7KdcpGxjzF1vnd2GaUw==} + '@bull-board/express@6.14.0': + resolution: {integrity: sha512-3H1ame2G1+eVnqqSsw6KfzTGYAWSpVsIx6EPwg9vPSP2eKfNAm12Cm4zvL6ZkwAvTCkAByt5PPDRWbbwWB6HHQ==} - '@bull-board/ui@6.13.1': - resolution: {integrity: sha512-DzPjCFzjEbDukhfSd7nLdTLVKIv5waARQuAXETSRqiKTN4vSA1KNdaJ8p72YwHujKO19yFW1zWjNKrzsa8DCIg==} + '@bull-board/ui@6.14.0': + resolution: {integrity: sha512-5yqfS9CwWR8DBxpReIbqv/VSPFM/zT4KZ75keyApMiejasRC2joaHqEzYWlMCjkMycbNNCvlQNlTbl+C3dE/dg==} '@capsizecss/unpack@2.4.0': resolution: {integrity: sha512-GrSU71meACqcmIUxPYOJvGKF0yryjN/L1aCuE9DViCTJI7bfkjgYDPD1zbNDcINJwSSP6UaBZY9GAbYDO7re0Q==} - '@clickhouse/client-common@1.2.0': - resolution: {integrity: sha512-VfA/C/tVJ2eNe72CaQ7eXmai+yqFEvZjQZiNtvJoOMLP+Vtb6DzqH9nfkgsiHHMhUhhclvt2mFh6+euk1Ea5wA==} + '@clickhouse/client-common@1.12.1': + resolution: {integrity: sha512-ccw1N6hB4+MyaAHIaWBwGZ6O2GgMlO99FlMj0B0UEGfjxM9v5dYVYql6FpP19rMwrVAroYs/IgX2vyZEBvzQLg==} - '@clickhouse/client@1.2.0': - resolution: {integrity: sha512-zMp2EhMfp1IrFKr/NjDwNiLsf7nq68nW8lGKszwFe7Iglc6Z5PY9ZA9Hd0XqAk75Q1NmFrkGCP1r3JCM1Nm1Bw==} + '@clickhouse/client@1.12.1': + resolution: {integrity: sha512-7ORY85rphRazqHzImNXMrh4vsaPrpetFoTWpZYueCO2bbO6PXYDXp/GQ4DgxnGIqbWB/Di1Ai+Xuwq2o7DJ36A==} engines: {node: '>=16'} '@cloudflare/kv-asset-handler@0.4.0': @@ -3979,14 +3985,14 @@ packages: '@fastify/ajv-compiler@4.0.2': resolution: {integrity: sha512-Rkiu/8wIjpsf46Rr+Fitd3HRP+VsxUFDDeag0hs9L0ksfnwx2g7SPQQTFL0E8Qv+rfXzQOxBJnjUB9ITUDjfWQ==} - '@fastify/compress@8.0.1': - resolution: {integrity: sha512-yWNfKhvL4orfN45LKCHCo8Fcsbj1kdNgwyShw2xpdHfzPf4A3MESmgSfUm3TCKQwgqDdrPnLfy1E+3I/DVP+BQ==} + '@fastify/compress@8.1.0': + resolution: {integrity: sha512-wX3I5u/SYQXxbqjG7CysvzeaCe4Sv8y13MnvnaGTpqfKkJbTLpwvdIDgqrwp/+UGvXOW7OLDLoTAQCDMJJRjDQ==} '@fastify/cookie@11.0.2': resolution: {integrity: sha512-GWdwdGlgJxyvNv+QcKiGNevSspMQXncjMZ1J8IvuDQk0jvkzgWWZFNC2En3s+nHndZBGV8IbLwOI/sxCZw/mzA==} - '@fastify/cors@11.0.0': - resolution: {integrity: sha512-41Bx0LVGr2a6DnnhDN/SgfDlTRNZtEs8niPxyoymV6Hw09AIdz/9Rn/0Fpu+pBOs6kviwS44JY2mB8NcU2qSAA==} + '@fastify/cors@11.1.0': + resolution: {integrity: sha512-sUw8ed8wP2SouWZTIbA7V2OQtMNpLj2W6qJOYhNdcmINTu6gsxVYXjQiM9mdi8UUDlcoDDJ/W2syPo1WB2QjYA==} '@fastify/error@4.0.0': resolution: {integrity: sha512-OO/SA8As24JtT1usTUTKgGH7uLvhfwZPwlptRi2Dp5P4KKmJI3gvsZ8MIHnNwDs4sLf/aai5LzTyl66xr7qMxA==} @@ -4003,11 +4009,11 @@ packages: '@fastify/proxy-addr@5.0.0': resolution: {integrity: sha512-37qVVA1qZ5sgH7KpHkkC4z9SK6StIsIcOmpjvMPXNb3vx2GQxhZocogVYbr2PbbeLCQxYIPDok307xEvRZOzGA==} - '@fastify/rate-limit@10.2.2': - resolution: {integrity: sha512-45vXZImiYthKlMohF4XoHXYiBXCyRYY+zmtjLZuQrGraW0Zj9hYPYNOIa47012+5A65M0KJQxIVbzYCNP90hcg==} + '@fastify/rate-limit@10.3.0': + resolution: {integrity: sha512-eIGkG9XKQs0nyynatApA3EVrojHOuq4l6fhB4eeCk4PIOeadvOJz9/4w3vGI44Go17uaXOWEcPkaD8kuKm7g6Q==} - '@fastify/websocket@11.0.2': - resolution: {integrity: sha512-1oyJkNSZNJGjo/A5fXvlpEcm1kTBD91nRAN9lA7RNVsVNsyC5DuhOXdNL9/4UawVe7SKvzPT/QVI4RdtE9ylnA==} + '@fastify/websocket@11.2.0': + resolution: {integrity: sha512-3HrDPbAG1CzUCqnslgJxppvzaAZffieOVbLp1DAy1huCSynUWPifSvfdEDUR8HlJLp3sp1A36uOM2tJogADS8w==} '@floating-ui/core@1.6.0': resolution: {integrity: sha512-PcF++MykgmTj3CIyOQbKA/hDzOAiqI3mhuoN44WRCopIs1sgoDoU4oty4Jtqaj/y3oDU6fnVSm4QG0a3t5i0+g==} @@ -4178,9 +4184,6 @@ packages: resolution: {integrity: sha512-R7Gsg6elpuqdn55fBH2y9oYzrU/yKrSmIsDX4ROT51vohrECFzTf2zw9BfUbOW8xjfmM2QbVoVYdTwhrtEKWSQ==} engines: {node: '>=18'} - '@ioredis/commands@1.3.0': - resolution: {integrity: sha512-M/T6Zewn7sDaBQEqIZ8Rb+i9y8qfGmq+5SDFSf9sA2lUZTmdDLVdOiQaeDp+Q4wElZ9HG1GAX5KhDaidp6LQsQ==} - '@ioredis/commands@1.4.0': resolution: {integrity: sha512-aFT2yemJJo+TZCmieA7qnYGQooOS7QfNmYrzGtsYd3g9j5iDP8AimYYAesf79ohjbLG12XxC4nG5DyEnC88AsQ==} @@ -4641,10 +4644,6 @@ packages: resolution: {integrity: sha512-uIX52NnTM0iBh84MShlpouI7UKqkZ7MrUszTmaypHBu4r7NofznSnQRfJ+uUeDtQDj6w8eFGg5KBLDAwAPz1+A==} engines: {node: '>=14'} - '@opentelemetry/api@1.8.0': - resolution: {integrity: sha512-I/s6F7yKUDdtMsoBWXJe8Qz40Tui5vsuKCWJEWVL+5q9sSWRzzx6v2KeNsOBEwd94j0eWkpWCH4yB6rZg9Mf0w==} - engines: {node: '>=8.0.0'} - '@opentelemetry/api@1.9.0': resolution: {integrity: sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==} engines: {node: '>=8.0.0'} @@ -5270,6 +5269,10 @@ packages: resolution: {integrity: sha512-TtxJSRD8Ohxp6bKkhrm27JRHAxPczQA7idtcTOMYI+wQRRrfgqxHv1cFbCApcSnNjtXkmzFozn6jQtFrOmbjPQ==} engines: {node: '>=14'} + '@opentelemetry/semantic-conventions@1.37.0': + resolution: {integrity: sha512-JD6DerIKdJGmRp4jQyX5FlrQjA4tjOw1cvfsPAZXfOOEErMUHjPcPSICS+6WnM0nB0efSFARh0KAZss+bvExOA==} + engines: {node: '>=14'} + '@opentelemetry/sql-common@0.40.1': resolution: {integrity: sha512-nSDlnHSqzC3pXn/wZEZVLuAuJ1MYMXPBwtv2qAbCa3847SaHItdE7SzUq/Jtb0KZmh1zfAbNi3AAMjztTT4Ugg==} engines: {node: '>=14'} @@ -8567,12 +8570,6 @@ packages: '@types/nlcst@2.0.3': resolution: {integrity: sha512-vSYNSDe6Ix3q+6Z7ri9lyWqgGhJTmzRjZRqyq15N0Z/1/UnVsno9G/N40NBijoYx2seFDIl0+B2mgAb9mezUCA==} - '@types/node@20.14.8': - resolution: {integrity: sha512-DO+2/jZinXfROG7j7WKFn/3C6nFwxy2lLpgLjEXJz+0XKphZlTLJ14mo8Vfg8X5BWN6XjyESXq+LcYdT7tR3bA==} - - '@types/node@22.18.0': - resolution: {integrity: sha512-m5ObIqwsUp6BZzyiy4RdZpzWGub9bqLJMvZDD0QMXhxjqMHMENlj+SqF5QxoUwaQNFe+8kz8XM8ZQhqkQPTgMQ==} - '@types/node@24.7.1': resolution: {integrity: sha512-CmyhGZanP88uuC5GpWU9q+fI61j2SkhO3UGMUdfYRE6Bcy0ccyzn1Rqj9YAB/ZY4kOXmNf0ocah5GtphmLMP6Q==} @@ -8585,6 +8582,9 @@ packages: '@types/pg-pool@2.0.6': resolution: {integrity: sha512-TaAUE5rq2VQYxab5Ts7WZhKNmuN78Q6PiFonTDdpbx8a1H0M1vhy3rhiMjl+e2iHmogyMw7jZF4FrE6eJUy5HQ==} + '@types/pg@8.15.5': + resolution: {integrity: sha512-LF7lF6zWEKxuT3/OR8wAZGzkg4ENGXFNyiV/JeOt9z5B+0ZVwbql9McqX5c/WStFq1GaGso7H1AzP/qSzmlCKQ==} + '@types/pg@8.6.1': resolution: {integrity: sha512-1Kc4oAGzAl7uqUStZCDvaLFqZrW9qWSjXOmBfdgyBP5La7Us6Mg4GBvRlSoaZMhQF/zSj1C8CtKMBkoiT8eL8w==} @@ -8594,9 +8594,6 @@ packages: '@types/promise.allsettled@1.0.6': resolution: {integrity: sha512-wA0UT0HeT2fGHzIFV9kWpYz5mdoyLxKrTgMdZQM++5h6pYAFH73HXcQhefg24nD1yivUFEn5KU+EF4b+CXJ4Wg==} - '@types/prop-types@15.7.11': - resolution: {integrity: sha512-ga8y9v9uyeiLdpKddhxYQkxNDrfvuPrlFb0N1qnZZByvcElJaXthF1UhvCh9TLWJBEHeNtdnbysW7Y6Uq8CVng==} - '@types/qs@6.9.11': resolution: {integrity: sha512-oGk0gmhnEJK4Yyk+oI7EfXsLayXatCWPHary1MtcmbAifkobT9cM9yutG/hZKIseOU0MqbIwQ/u2nn/Gb+ltuQ==} @@ -8629,9 +8626,6 @@ packages: '@types/react-syntax-highlighter@15.5.11': resolution: {integrity: sha512-ZqIJl+Pg8kD+47kxUjvrlElrraSUrYa4h0dauY/U/FTUuprSCqvUj+9PNQNQzVc6AJgIWUUxn87/gqsMHNbRjw==} - '@types/react@18.3.12': - resolution: {integrity: sha512-D2wOSq/d6Agt28q7rSI3jhU7G6aiuzljDGZ2hTZHIkrTLUI+AF3WMeKkEZ9nN2fkBAlcktT6vcZjDFiIhMYEQw==} - '@types/react@19.1.11': resolution: {integrity: sha512-lr3jdBw/BGj49Eps7EvqlUaoeA0xpj3pc0RoJkHpYaCHkVK7i28dKyImLQb3JVlqs3aYSXf7qYuWOW/fgZnTXQ==} @@ -9297,8 +9291,8 @@ packages: builtins@1.0.3: resolution: {integrity: sha512-uYBjakWipfaO/bXI7E8rq6kpwHRZK5cNYrUv2OzZSI/FvmdMyXJ2tG9dKcjEC5YHmHpUAwsargWIZNWdxb/bnQ==} - bullmq@5.8.7: - resolution: {integrity: sha512-IdAgB9WvJHRAcZtamRLj6fbjMyuIogEa1cjOTWM1pkVoHUOpO34q6FzNMX1R8VOeUhkvkOkWcxI5ENgFLh+TVA==} + bullmq@5.63.0: + resolution: {integrity: sha512-HT1iM3Jt4bZeg3Ru/MxrOy2iIItxcl1Pz5Ync1Vrot70jBpVguMxFEiSaDU57BwYwR4iwnObDnzct2lirKkX5A==} bundle-require@4.0.2: resolution: {integrity: sha512-jwzPOChofl67PSTW2SGubV9HBQAhhR2i6nskiOThauo9dzwDUgOWQScFVaJkjEfYX+UXiD+LEx8EblQMc2wIag==} @@ -10468,10 +10462,6 @@ packages: es-array-method-boxes-properly@1.0.0: resolution: {integrity: sha512-wd6JXUmyHmt8T5a2xreUwKcGPq6f1f+WwIJkijUqiGcJz1qqnZgP6XIK+QyIWU5lT7imeNxUll48bziG+TSYcA==} - es-define-property@1.0.0: - resolution: {integrity: sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==} - engines: {node: '>= 0.4'} - es-define-property@1.0.1: resolution: {integrity: sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==} engines: {node: '>= 0.4'} @@ -10486,10 +10476,6 @@ packages: es-module-lexer@1.7.0: resolution: {integrity: sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==} - es-object-atoms@1.0.0: - resolution: {integrity: sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==} - engines: {node: '>= 0.4'} - es-object-atoms@1.1.1: resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==} engines: {node: '>= 0.4'} @@ -10810,8 +10796,8 @@ packages: resolution: {integrity: sha512-2qfoaQ3BQDhZ1gtbkKZd6n0kKxJISJGM6u/skD9ljdWItAscjXrtZ1lnjr7PavmXX9j4EyCPmBDiIsLn07d5vA==} engines: {node: '>= 10'} - fastify@5.2.1: - resolution: {integrity: sha512-rslrNBF67eg8/Gyn7P2URV8/6pz8kSAscFL4EThZJ8JBMaXacVdVE4hmUcnPNKERl5o/xTiBSLfdowBRhVF1WA==} + fastify@5.6.1: + resolution: {integrity: sha512-WjjlOciBF0K8pDUPZoGPhqhKrQJ02I8DKaDIfO51EL0kbSMwQFl85cRwhOvmSDWoukNOdTo27gLN549pLCcH7Q==} fastq@1.17.1: resolution: {integrity: sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==} @@ -11243,9 +11229,6 @@ packages: peerDependencies: csstype: ^3.0.10 - gopd@1.0.1: - resolution: {integrity: sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==} - gopd@1.2.0: resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==} engines: {node: '>= 0.4'} @@ -11267,8 +11250,8 @@ packages: resolution: {integrity: sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==} engines: {node: '>=6.0'} - groupmq@1.0.0-next.19: - resolution: {integrity: sha512-2iF80iNtvQ/yl8b46JRfNQIkrR+k4VaVtlg+lliPn+fn5IRMEeFaS1cFbGPxYjMtkPvMsi0G526pj1OAYefsFg==} + groupmq@1.1.0-next.5: + resolution: {integrity: sha512-bUsphvHY3tznr9+izuFpTdeLPUYY4tMl4cbg3zWYDa8HeyOggHETzeyN3Ox7ox5/asI8VYyzpU+PV+w7/UIcXA==} engines: {node: '>=18'} peerDependencies: ioredis: '>=5' @@ -11319,10 +11302,6 @@ packages: resolution: {integrity: sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==} engines: {node: '>= 0.4'} - has-symbols@1.0.3: - resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==} - engines: {node: '>= 0.4'} - has-symbols@1.1.0: resolution: {integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==} engines: {node: '>= 0.4'} @@ -11467,10 +11446,6 @@ packages: resolution: {integrity: sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==} engines: {node: '>= 6'} - https-proxy-agent@7.0.5: - resolution: {integrity: sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw==} - engines: {node: '>= 14'} - https-proxy-agent@7.0.6: resolution: {integrity: sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==} engines: {node: '>= 14'} @@ -11594,10 +11569,6 @@ packages: invariant@2.2.4: resolution: {integrity: sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==} - ioredis@5.7.0: - resolution: {integrity: sha512-NUcA93i1lukyXU+riqEyPtSEkyFq8tX90uL659J+qpCZ3rEdViB/APC58oAhIh3+bJln2hzdlZbBZsGNrlsR8g==} - engines: {node: '>=12.22.0'} - ioredis@5.8.2: resolution: {integrity: sha512-C6uC+kleiIMmjViJINWk80sOQw5lEzse1ZmvD+S/s8p8CWapftSaC+kocGTx6xrbrJ4WmYQGC08ffHLr6ToR6Q==} engines: {node: '>=12.22.0'} @@ -12384,13 +12355,13 @@ packages: lowlight@1.20.0: resolution: {integrity: sha512-8Ktj+prEb1RoCPkEOrPMYUN/nCggB7qAWe3a7OpMjWQkh3l2RD5wKRQ+o8Q8YuI9RG/xs95waaI/E6ym/7NsTw==} - lru-cache@10.2.0: - resolution: {integrity: sha512-2bIM8x+VAf6JT4bKAljS1qUWgMsqZRPGJS6FSahIMPVvctcNhyVp7AJu7quxOW9jwkryBReKZY5tY5JYv2n/7Q==} - engines: {node: 14 || >=16.14} - lru-cache@10.4.3: resolution: {integrity: sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==} + lru-cache@11.2.2: + resolution: {integrity: sha512-F9ODfyqML2coTIsQpSkRHnLSZMtkU8Q+mSfcaIyKwy58u+8k5nvAYeiNhsyMARvzNcXJ9QfWVrcPsC9e9rAxtg==} + engines: {node: 20 || >=22} + lru-cache@5.1.1: resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} @@ -12877,9 +12848,6 @@ packages: resolution: {integrity: sha512-V6DDh3v8tfZFWbeH6fsL5uBIlWL7SvRgGDaAZWFC5kjQ2xP5dl/mLpWwJQ1Ho6ZbEKVp/351QF1JXYTAmeZ/zA==} engines: {node: '>=10', npm: '>=6'} - mnemonist@0.40.0: - resolution: {integrity: sha512-kdd8AFNig2AD5Rkih7EPCXhu/iMvwevQFX/uEiGhZyPZi7fHqOoF4V4kHLpCfysxXMgQ4B52kdPMCwARshKvEg==} - mock-require-lazy@1.0.17: resolution: {integrity: sha512-P8nKtCgmnX9flup2Ywv6eoHIH7qjnpF0nQ8tRIG2qqy7UyeqLH8/VtHhTSP00hgTM/VkHDUS23mFPLacEfnmSQ==} engines: {node: '>=0.8'} @@ -12921,8 +12889,8 @@ packages: resolution: {integrity: sha512-SdzXp4kD/Qf8agZ9+iTu6eql0m3kWm1A2y1hkpTeVNENutaB0BwHlSvAIaMxwntmRUAUjon2V4L8Z/njd0Ct8A==} hasBin: true - msgpackr@1.10.1: - resolution: {integrity: sha512-r5VRLv9qouXuLiIBrLpl2d5ZvPt8svdQTl5/vMvE4nzDMyEX4sgW5yWhuBBj5UmgwOTWj8CIdSXn5sAfsHAWIQ==} + msgpackr@1.11.5: + resolution: {integrity: sha512-UjkUHN0yqp9RWKy0Lplhh+wlpdt9oQBYgULZOiFhV3VclSF1JnSQWZ5r9gORQlNYaUKQoR8itv7g7z1xDDuACA==} multimatch@5.0.0: resolution: {integrity: sha512-ypMKuglUrZUD99Tk2bUQ+xNQj43lPEfAeX2o9cTteAmShXy2VHDJpuwu1o0xqoKCt9jLVAvwyFKdLTPXKAfJyA==} @@ -13210,9 +13178,6 @@ packages: resolution: {integrity: sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==} engines: {node: '>= 6'} - object-inspect@1.13.1: - resolution: {integrity: sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==} - object-inspect@1.13.4: resolution: {integrity: sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==} engines: {node: '>= 0.4'} @@ -13225,9 +13190,6 @@ packages: resolution: {integrity: sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==} engines: {node: '>= 0.4'} - obliterator@2.0.4: - resolution: {integrity: sha512-lgHwxlxV1qIg1Eap7LgIeoBWIMFibOjbrYPIPJZcI1mmGAI2m3lNYpK12Y+GBdPQ0U1hRwSord7GIaawz962qQ==} - ofetch@1.4.1: resolution: {integrity: sha512-QZj2DfGplQAr2oj9KzceK9Hwz6Whxazmn85yYeVuS3u9XTMOGMRx0kO95MQ+vLsj/S/NwBDMMLU5hpxvI6Tklw==} @@ -13737,6 +13699,9 @@ packages: process-warning@4.0.1: resolution: {integrity: sha512-3c2LzQ3rY9d0hc1emcsHhfT9Jwz0cChib/QN89oME2R451w5fy3f0afAhERFZAwrbDU43wk12d0ORBpDVME50Q==} + process-warning@5.0.0: + resolution: {integrity: sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA==} + process@0.11.10: resolution: {integrity: sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==} engines: {node: '>= 0.6.0'} @@ -14664,8 +14629,8 @@ packages: secure-json-parse@2.7.0: resolution: {integrity: sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==} - secure-json-parse@3.0.2: - resolution: {integrity: sha512-H6nS2o8bWfpFEV6U38sOSjS7bTbdgbCGU9wEM6W14P5H0QOsz94KCusifV44GpHDTu2nqZbuDNhTzu+mjDSw1w==} + secure-json-parse@4.1.0: + resolution: {integrity: sha512-l4KnYfEyqYJxDwlNVyRfO2E4NTHfMKAWdUuA8J0yve2Dz/E/PdBepY03RvyJpssIpRFwJoCD55wA+mEDs6ByWA==} seedrandom@3.0.5: resolution: {integrity: sha512-8OwmbklUNzwezjGInmZ+2clQmExQPvomqjL7LFqOYqtmuxRgQYqOD3mHaU+MvZn5FLUeVxVfQjwLZW/n/JFuqg==} @@ -15633,12 +15598,6 @@ packages: unctx@2.4.1: resolution: {integrity: sha512-AbaYw0Nm4mK4qjhns67C+kgxR2YWiwlDBPzxrN8h8C6VtAdCgditAY5Dezu3IJy4XVqAnbrXt9oQJvsn3fyozg==} - undici-types@5.26.5: - resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} - - undici-types@6.21.0: - resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} - undici-types@7.14.0: resolution: {integrity: sha512-QQiYxHuyZ9gQUIrmPo3IA+hUl4KYk8uSA7cHrcKd/l3p1OTpZcM0Tbp9x7FAtXdAYhlasd60ncPpgu6ihG6TOA==} @@ -15987,6 +15946,10 @@ packages: resolution: {integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==} engines: {node: '>= 0.4.0'} + uuid@11.1.0: + resolution: {integrity: sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==} + hasBin: true + uuid@7.0.3: resolution: {integrity: sha512-DPSke0pXhTZgoF/d+WSt2QaKMCFSfx7QegxEWT+JOuHF5aWrKEn0G+ztjuJg/gG8/ItK+rbPCD/yNv8yyih6Cg==} hasBin: true @@ -17905,23 +17868,23 @@ snapshots: '@biomejs/cli-win32-x64@1.9.4': optional: true - '@bull-board/api@6.13.1(@bull-board/ui@6.13.1)': + '@bull-board/api@6.14.0(@bull-board/ui@6.14.0)': dependencies: - '@bull-board/ui': 6.13.1 + '@bull-board/ui': 6.14.0 redis-info: 3.1.0 - '@bull-board/express@6.13.1': + '@bull-board/express@6.14.0': dependencies: - '@bull-board/api': 6.13.1(@bull-board/ui@6.13.1) - '@bull-board/ui': 6.13.1 + '@bull-board/api': 6.14.0(@bull-board/ui@6.14.0) + '@bull-board/ui': 6.14.0 ejs: 3.1.10 express: 4.21.2 transitivePeerDependencies: - supports-color - '@bull-board/ui@6.13.1': + '@bull-board/ui@6.14.0': dependencies: - '@bull-board/api': 6.13.1(@bull-board/ui@6.13.1) + '@bull-board/api': 6.14.0(@bull-board/ui@6.14.0) '@capsizecss/unpack@2.4.0': dependencies: @@ -17931,11 +17894,11 @@ snapshots: transitivePeerDependencies: - encoding - '@clickhouse/client-common@1.2.0': {} + '@clickhouse/client-common@1.12.1': {} - '@clickhouse/client@1.2.0': + '@clickhouse/client@1.12.1': dependencies: - '@clickhouse/client-common': 1.2.0 + '@clickhouse/client-common': 1.12.1 '@cloudflare/kv-asset-handler@0.4.0': dependencies: @@ -18906,11 +18869,11 @@ snapshots: ajv-formats: 3.0.1(ajv@8.12.0) fast-uri: 3.0.6 - '@fastify/compress@8.0.1': + '@fastify/compress@8.1.0': dependencies: '@fastify/accept-negotiator': 2.0.1 fastify-plugin: 5.0.1 - mime-db: 1.52.0 + mime-db: 1.54.0 minipass: 7.1.2 peek-stream: 1.1.3 pump: 3.0.0 @@ -18922,10 +18885,10 @@ snapshots: cookie: 1.0.2 fastify-plugin: 5.0.1 - '@fastify/cors@11.0.0': + '@fastify/cors@11.1.0': dependencies: fastify-plugin: 5.0.1 - mnemonist: 0.40.0 + toad-cache: 3.7.0 '@fastify/error@4.0.0': {} @@ -18944,17 +18907,17 @@ snapshots: '@fastify/forwarded': 3.0.0 ipaddr.js: 2.2.0 - '@fastify/rate-limit@10.2.2': + '@fastify/rate-limit@10.3.0': dependencies: '@lukeed/ms': 2.0.2 fastify-plugin: 5.0.1 toad-cache: 3.7.0 - '@fastify/websocket@11.0.2': + '@fastify/websocket@11.2.0': dependencies: duplexify: 4.1.3 fastify-plugin: 5.0.1 - ws: 8.17.1 + ws: 8.18.3 transitivePeerDependencies: - bufferutil - utf-8-validate @@ -19008,13 +18971,13 @@ snapshots: dependencies: react-hook-form: 7.50.1(react@19.1.1) - '@hyperdx/instrumentation-exception@0.1.0(@opentelemetry/api@1.8.0)': + '@hyperdx/instrumentation-exception@0.1.0(@opentelemetry/api@1.9.0)': dependencies: - '@hyperdx/instrumentation-sentry-node': 0.1.0(@opentelemetry/api@1.8.0) - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@hyperdx/instrumentation-sentry-node': 0.1.0(@opentelemetry/api@1.9.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 '@sentry/core': 8.30.0 '@sentry/types': 8.30.0 '@sentry/utils': 8.30.0 @@ -19024,12 +18987,12 @@ snapshots: transitivePeerDependencies: - supports-color - '@hyperdx/instrumentation-sentry-node@0.1.0(@opentelemetry/api@1.8.0)': + '@hyperdx/instrumentation-sentry-node@0.1.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 json-stringify-safe: 5.0.1 shimmer: 1.2.1 tslib: 2.7.0 @@ -19038,24 +19001,24 @@ snapshots: '@hyperdx/node-opentelemetry@0.8.1': dependencies: - '@hyperdx/instrumentation-exception': 0.1.0(@opentelemetry/api@1.8.0) - '@hyperdx/instrumentation-sentry-node': 0.1.0(@opentelemetry/api@1.8.0) - '@opentelemetry/api': 1.8.0 + '@hyperdx/instrumentation-exception': 0.1.0(@opentelemetry/api@1.9.0) + '@hyperdx/instrumentation-sentry-node': 0.1.0(@opentelemetry/api@1.9.0) + '@opentelemetry/api': 1.9.0 '@opentelemetry/api-logs': 0.51.1 - '@opentelemetry/auto-instrumentations-node': 0.46.1(@opentelemetry/api@1.8.0) - '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/exporter-logs-otlp-http': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/exporter-metrics-otlp-proto': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/exporter-trace-otlp-proto': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-http': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-runtime-node': 0.4.0(@opentelemetry/api@1.8.0) - '@opentelemetry/resources': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-logs': 0.51.1(@opentelemetry/api-logs@0.51.1)(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-metrics': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-node': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-trace-base': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/auto-instrumentations-node': 0.46.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-logs-otlp-http': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-metrics-otlp-proto': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-proto': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-http': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-runtime-node': 0.4.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-logs': 0.51.1(@opentelemetry/api-logs@0.51.1)(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 1.26.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-node': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.26.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.36.0 cli-spinners: 2.9.2 json-stringify-safe: 5.0.1 lodash.isobject: 3.0.2 @@ -19150,8 +19113,6 @@ snapshots: '@inquirer/figures@1.0.4': {} - '@ioredis/commands@1.3.0': {} - '@ioredis/commands@1.4.0': {} '@isaacs/cliui@8.0.2': @@ -19567,81 +19528,74 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/api@1.8.0': {} - '@opentelemetry/api@1.9.0': {} - '@opentelemetry/auto-instrumentations-node@0.46.1(@opentelemetry/api@1.8.0)': - dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-amqplib': 0.37.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-aws-lambda': 0.41.1(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-aws-sdk': 0.41.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-bunyan': 0.38.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-cassandra-driver': 0.38.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-connect': 0.36.1(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-cucumber': 0.6.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-dataloader': 0.9.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-dns': 0.36.1(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-express': 0.39.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-fastify': 0.36.1(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-fs': 0.12.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-generic-pool': 0.36.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-graphql': 0.40.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-grpc': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-hapi': 0.38.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-http': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-ioredis': 0.40.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-knex': 0.36.1(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-koa': 0.40.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-lru-memoizer': 0.37.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-memcached': 0.36.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-mongodb': 0.43.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-mongoose': 0.38.1(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-mysql': 0.38.1(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-mysql2': 0.38.1(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-nestjs-core': 0.37.1(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-net': 0.36.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-pg': 0.41.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-pino': 0.39.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-redis': 0.39.1(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-redis-4': 0.39.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-restify': 0.38.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-router': 0.37.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-socket.io': 0.39.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-tedious': 0.10.1(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-undici': 0.2.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation-winston': 0.37.0(@opentelemetry/api@1.8.0) - '@opentelemetry/resource-detector-alibaba-cloud': 0.28.10(@opentelemetry/api@1.8.0) - '@opentelemetry/resource-detector-aws': 1.6.1(@opentelemetry/api@1.8.0) - '@opentelemetry/resource-detector-azure': 0.2.11(@opentelemetry/api@1.8.0) - '@opentelemetry/resource-detector-container': 0.3.11(@opentelemetry/api@1.8.0) - '@opentelemetry/resource-detector-gcp': 0.29.11(@opentelemetry/api@1.8.0) - '@opentelemetry/resources': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-node': 0.51.1(@opentelemetry/api@1.8.0) + '@opentelemetry/auto-instrumentations-node@0.46.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-amqplib': 0.37.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-aws-lambda': 0.41.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-aws-sdk': 0.41.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-bunyan': 0.38.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-cassandra-driver': 0.38.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-connect': 0.36.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-cucumber': 0.6.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-dataloader': 0.9.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-dns': 0.36.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-express': 0.39.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-fastify': 0.36.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-fs': 0.12.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-generic-pool': 0.36.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-graphql': 0.40.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-grpc': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-hapi': 0.38.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-http': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-ioredis': 0.40.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-knex': 0.36.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-koa': 0.40.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-lru-memoizer': 0.37.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-memcached': 0.36.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-mongodb': 0.43.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-mongoose': 0.38.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-mysql': 0.38.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-mysql2': 0.38.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-nestjs-core': 0.37.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-net': 0.36.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-pg': 0.41.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-pino': 0.39.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-redis': 0.39.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-redis-4': 0.39.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-restify': 0.38.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-router': 0.37.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-socket.io': 0.39.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-tedious': 0.10.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-undici': 0.2.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-winston': 0.37.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resource-detector-alibaba-cloud': 0.28.10(@opentelemetry/api@1.9.0) + '@opentelemetry/resource-detector-aws': 1.6.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resource-detector-azure': 0.2.11(@opentelemetry/api@1.9.0) + '@opentelemetry/resource-detector-container': 0.3.11(@opentelemetry/api@1.9.0) + '@opentelemetry/resource-detector-gcp': 0.29.11(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-node': 0.51.1(@opentelemetry/api@1.9.0) transitivePeerDependencies: - encoding - supports-color - '@opentelemetry/context-async-hooks@1.24.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/context-async-hooks@1.24.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 + '@opentelemetry/api': 1.9.0 '@opentelemetry/context-async-hooks@1.30.1(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core@1.24.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/core@1.24.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 + '@opentelemetry/api': 1.9.0 '@opentelemetry/semantic-conventions': 1.24.1 - '@opentelemetry/core@1.26.0(@opentelemetry/api@1.8.0)': - dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/semantic-conventions': 1.27.0 - '@opentelemetry/core@1.26.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -19652,78 +19606,78 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/semantic-conventions': 1.28.0 - '@opentelemetry/exporter-logs-otlp-http@0.51.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/exporter-logs-otlp-http@0.51.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 + '@opentelemetry/api': 1.9.0 '@opentelemetry/api-logs': 0.51.1 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/otlp-exporter-base': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/otlp-transformer': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-logs': 0.51.1(@opentelemetry/api-logs@0.51.1)(@opentelemetry/api@1.8.0) + '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-logs': 0.51.1(@opentelemetry/api-logs@0.51.1)(@opentelemetry/api@1.9.0) - '@opentelemetry/exporter-metrics-otlp-http@0.51.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/exporter-metrics-otlp-http@0.51.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/otlp-exporter-base': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/otlp-transformer': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-metrics': 1.24.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 1.24.1(@opentelemetry/api@1.9.0) - '@opentelemetry/exporter-metrics-otlp-proto@0.51.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/exporter-metrics-otlp-proto@0.51.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/exporter-metrics-otlp-http': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/otlp-exporter-base': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/otlp-proto-exporter-base': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/otlp-transformer': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-metrics': 1.24.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-metrics-otlp-http': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-proto-exporter-base': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 1.24.1(@opentelemetry/api@1.9.0) - '@opentelemetry/exporter-trace-otlp-grpc@0.51.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/exporter-trace-otlp-grpc@0.51.1(@opentelemetry/api@1.9.0)': dependencies: '@grpc/grpc-js': 1.11.2 - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/otlp-grpc-exporter-base': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/otlp-transformer': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.8.0) - - '@opentelemetry/exporter-trace-otlp-http@0.51.1(@opentelemetry/api@1.8.0)': - dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/otlp-exporter-base': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/otlp-transformer': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.8.0) - - '@opentelemetry/exporter-trace-otlp-proto@0.51.1(@opentelemetry/api@1.8.0)': - dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/otlp-exporter-base': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/otlp-proto-exporter-base': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/otlp-transformer': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.8.0) - - '@opentelemetry/exporter-zipkin@1.24.1(@opentelemetry/api@1.8.0)': - dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-grpc-exporter-base': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.9.0) + + '@opentelemetry/exporter-trace-otlp-http@0.51.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.9.0) + + '@opentelemetry/exporter-trace-otlp-proto@0.51.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-proto-exporter-base': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.9.0) + + '@opentelemetry/exporter-zipkin@1.24.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.24.1 - '@opentelemetry/instrumentation-amqplib@0.37.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-amqplib@0.37.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color @@ -19732,54 +19686,54 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.57.2(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.36.0 + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-aws-lambda@0.41.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-aws-lambda@0.41.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/propagator-aws-xray': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/resources': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/propagator-aws-xray': 1.26.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 '@types/aws-lambda': 8.10.122 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-aws-sdk@0.41.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-aws-sdk@0.41.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/propagation-utils': 0.30.11(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/propagation-utils': 0.30.11(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-bunyan@0.38.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-bunyan@0.38.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 + '@opentelemetry/api': 1.9.0 '@opentelemetry/api-logs': 0.51.1 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) '@types/bunyan': 1.8.9 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-cassandra-driver@0.38.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-cassandra-driver@0.38.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-connect@0.36.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-connect@0.36.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 '@types/connect': 3.4.36 transitivePeerDependencies: - supports-color @@ -19789,16 +19743,16 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.57.2(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.36.0 + '@opentelemetry/semantic-conventions': 1.37.0 '@types/connect': 3.4.38 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-cucumber@0.6.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-cucumber@0.6.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color @@ -19809,28 +19763,28 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-dataloader@0.9.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-dataloader@0.9.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-dns@0.36.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-dns@0.36.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 semver: 7.7.2 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-express@0.39.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-express@0.39.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color @@ -19839,24 +19793,24 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.57.2(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.36.0 + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-fastify@0.36.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-fastify@0.36.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-fs@0.12.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-fs@0.12.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color @@ -19868,11 +19822,11 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-generic-pool@0.36.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-generic-pool@0.36.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color @@ -19883,10 +19837,10 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-graphql@0.40.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-graphql@0.40.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color @@ -19897,20 +19851,20 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-grpc@0.51.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-grpc@0.51.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.24.1 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-hapi@0.38.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-hapi@0.38.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color @@ -19919,15 +19873,15 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.57.2(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.36.0 + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-http@0.51.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-http@0.51.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.24.1 semver: 7.6.3 transitivePeerDependencies: @@ -19944,12 +19898,12 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-ioredis@0.40.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-ioredis@0.40.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) '@opentelemetry/redis-common': 0.36.2 - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color @@ -19958,7 +19912,7 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.57.2(@opentelemetry/api@1.9.0) '@opentelemetry/redis-common': 0.36.2 - '@opentelemetry/semantic-conventions': 1.36.0 + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color @@ -19966,15 +19920,15 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.57.2(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.36.0 + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-knex@0.36.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-knex@0.36.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color @@ -19982,16 +19936,16 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.57.2(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.36.0 + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-koa@0.40.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-koa@0.40.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 '@types/koa': 2.14.0 '@types/koa__router': 12.0.3 transitivePeerDependencies: @@ -20002,14 +19956,14 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.57.2(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.36.0 + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-lru-memoizer@0.37.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-lru-memoizer@0.37.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color @@ -20020,21 +19974,21 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-memcached@0.36.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-memcached@0.36.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 '@types/memcached': 2.2.10 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-mongodb@0.43.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-mongodb@0.43.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-metrics': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 1.26.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color @@ -20042,16 +19996,16 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.57.2(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.36.0 + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-mongoose@0.38.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-mongoose@0.38.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color @@ -20060,16 +20014,16 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.57.2(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.36.0 + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-mysql2@0.38.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-mysql2@0.38.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 - '@opentelemetry/sql-common': 0.40.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 + '@opentelemetry/sql-common': 0.40.1(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color @@ -20077,16 +20031,16 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.57.2(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.36.0 + '@opentelemetry/semantic-conventions': 1.37.0 '@opentelemetry/sql-common': 0.40.1(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-mysql@0.38.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-mysql@0.38.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 '@types/mysql': 2.15.22 transitivePeerDependencies: - supports-color @@ -20095,33 +20049,33 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.57.2(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.36.0 + '@opentelemetry/semantic-conventions': 1.37.0 '@types/mysql': 2.15.26 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-nestjs-core@0.37.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-nestjs-core@0.37.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-net@0.36.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-net@0.36.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-pg@0.41.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-pg@0.41.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 - '@opentelemetry/sql-common': 0.40.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 + '@opentelemetry/sql-common': 0.40.1(@opentelemetry/api@1.9.0) '@types/pg': 8.6.1 '@types/pg-pool': 2.0.4 transitivePeerDependencies: @@ -20132,26 +20086,26 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.57.2(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.36.0 + '@opentelemetry/semantic-conventions': 1.37.0 '@opentelemetry/sql-common': 0.40.1(@opentelemetry/api@1.9.0) '@types/pg': 8.6.1 '@types/pg-pool': 2.0.6 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-pino@0.39.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-pino@0.39.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-redis-4@0.39.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-redis-4@0.39.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) '@opentelemetry/redis-common': 0.36.2 - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color @@ -20160,56 +20114,56 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.57.2(@opentelemetry/api@1.9.0) '@opentelemetry/redis-common': 0.36.2 - '@opentelemetry/semantic-conventions': 1.36.0 + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-redis@0.39.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-redis@0.39.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) '@opentelemetry/redis-common': 0.36.2 - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-restify@0.38.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-restify@0.38.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-router@0.37.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-router@0.37.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-runtime-node@0.4.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-runtime-node@0.4.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-socket.io@0.39.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-socket.io@0.39.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-tedious@0.10.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-tedious@0.10.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 '@types/tedious': 4.0.14 transitivePeerDependencies: - supports-color @@ -20218,7 +20172,7 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.57.2(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.36.0 + '@opentelemetry/semantic-conventions': 1.37.0 '@types/tedious': 4.0.14 transitivePeerDependencies: - supports-color @@ -20231,25 +20185,25 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-undici@0.2.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-undici@0.2.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-winston@0.37.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation-winston@0.37.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 + '@opentelemetry/api': 1.9.0 '@opentelemetry/api-logs': 0.51.1 - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation@0.51.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/instrumentation@0.51.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 + '@opentelemetry/api': 1.9.0 '@opentelemetry/api-logs': 0.51.1 '@types/shimmer': 1.2.0 import-in-the-middle: 1.7.4 @@ -20271,104 +20225,104 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/otlp-exporter-base@0.51.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/otlp-exporter-base@0.51.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.9.0) - '@opentelemetry/otlp-grpc-exporter-base@0.51.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/otlp-grpc-exporter-base@0.51.1(@opentelemetry/api@1.9.0)': dependencies: '@grpc/grpc-js': 1.11.2 - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/otlp-exporter-base': 0.51.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.51.1(@opentelemetry/api@1.9.0) protobufjs: 7.4.0 - '@opentelemetry/otlp-proto-exporter-base@0.51.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/otlp-proto-exporter-base@0.51.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/otlp-exporter-base': 0.51.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.51.1(@opentelemetry/api@1.9.0) protobufjs: 7.4.0 - '@opentelemetry/otlp-transformer@0.51.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/otlp-transformer@0.51.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 + '@opentelemetry/api': 1.9.0 '@opentelemetry/api-logs': 0.51.1 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-logs': 0.51.1(@opentelemetry/api-logs@0.51.1)(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-metrics': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.8.0) + '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-logs': 0.51.1(@opentelemetry/api-logs@0.51.1)(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.9.0) - '@opentelemetry/propagation-utils@0.30.11(@opentelemetry/api@1.8.0)': + '@opentelemetry/propagation-utils@0.30.11(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 + '@opentelemetry/api': 1.9.0 - '@opentelemetry/propagator-aws-xray@1.26.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/propagator-aws-xray@1.26.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) - '@opentelemetry/propagator-b3@1.24.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/propagator-b3@1.24.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.9.0) - '@opentelemetry/propagator-jaeger@1.24.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/propagator-jaeger@1.24.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.9.0) '@opentelemetry/redis-common@0.36.2': {} - '@opentelemetry/resource-detector-alibaba-cloud@0.28.10(@opentelemetry/api@1.8.0)': + '@opentelemetry/resource-detector-alibaba-cloud@0.28.10(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/resources': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 - '@opentelemetry/resource-detector-aws@1.6.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/resource-detector-aws@1.6.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/resources': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 - '@opentelemetry/resource-detector-azure@0.2.11(@opentelemetry/api@1.8.0)': + '@opentelemetry/resource-detector-azure@0.2.11(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/resources': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 - '@opentelemetry/resource-detector-container@0.3.11(@opentelemetry/api@1.8.0)': + '@opentelemetry/resource-detector-container@0.3.11(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/resources': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 - '@opentelemetry/resource-detector-gcp@0.29.11(@opentelemetry/api@1.8.0)': + '@opentelemetry/resource-detector-gcp@0.29.11(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/resources': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/semantic-conventions': 1.27.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.37.0 gcp-metadata: 6.1.0 transitivePeerDependencies: - encoding - supports-color - '@opentelemetry/resources@1.24.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/resources@1.24.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.24.1 - '@opentelemetry/resources@1.26.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/resources@1.26.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 '@opentelemetry/resources@1.30.1(@opentelemetry/api@1.9.0)': @@ -20377,57 +20331,57 @@ snapshots: '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.28.0 - '@opentelemetry/sdk-logs@0.51.1(@opentelemetry/api-logs@0.51.1)(@opentelemetry/api@1.8.0)': + '@opentelemetry/sdk-logs@0.51.1(@opentelemetry/api-logs@0.51.1)(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 + '@opentelemetry/api': 1.9.0 '@opentelemetry/api-logs': 0.51.1 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0) + '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.9.0) - '@opentelemetry/sdk-metrics@1.24.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/sdk-metrics@1.24.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.9.0) lodash.merge: 4.6.2 - '@opentelemetry/sdk-metrics@1.26.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/sdk-metrics@1.26.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/resources': 1.26.0(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.26.0(@opentelemetry/api@1.9.0) - '@opentelemetry/sdk-node@0.51.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/sdk-node@0.51.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 + '@opentelemetry/api': 1.9.0 '@opentelemetry/api-logs': 0.51.1 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/exporter-trace-otlp-grpc': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/exporter-trace-otlp-http': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/exporter-trace-otlp-proto': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/exporter-zipkin': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) - '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-logs': 0.51.1(@opentelemetry/api-logs@0.51.1)(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-metrics': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-trace-node': 1.24.1(@opentelemetry/api@1.8.0) + '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-grpc': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-http': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-proto': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-zipkin': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-logs': 0.51.1(@opentelemetry/api-logs@0.51.1)(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-node': 1.24.1(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.24.1 transitivePeerDependencies: - supports-color - '@opentelemetry/sdk-trace-base@1.24.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/sdk-trace-base@1.24.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.24.1(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.24.1 - '@opentelemetry/sdk-trace-base@1.26.0(@opentelemetry/api@1.8.0)': + '@opentelemetry/sdk-trace-base@1.26.0(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.8.0) - '@opentelemetry/resources': 1.26.0(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.26.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 '@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0)': @@ -20437,14 +20391,14 @@ snapshots: '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.28.0 - '@opentelemetry/sdk-trace-node@1.24.1(@opentelemetry/api@1.8.0)': + '@opentelemetry/sdk-trace-node@1.24.1(@opentelemetry/api@1.9.0)': dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/context-async-hooks': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/propagator-b3': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/propagator-jaeger': 1.24.1(@opentelemetry/api@1.8.0) - '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.8.0) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/context-async-hooks': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/propagator-b3': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/propagator-jaeger': 1.24.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.24.1(@opentelemetry/api@1.9.0) semver: 7.7.2 '@opentelemetry/semantic-conventions@1.24.1': {} @@ -20455,15 +20409,12 @@ snapshots: '@opentelemetry/semantic-conventions@1.36.0': {} - '@opentelemetry/sql-common@0.40.1(@opentelemetry/api@1.8.0)': - dependencies: - '@opentelemetry/api': 1.8.0 - '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.8.0) + '@opentelemetry/semantic-conventions@1.37.0': {} '@opentelemetry/sql-common@0.40.1(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) '@orama/orama@3.0.1': {} @@ -22849,7 +22800,7 @@ snapshots: '@sentry/core@9.46.0': {} - '@sentry/node-core@9.46.0(@opentelemetry/api@1.9.0)(@opentelemetry/context-async-hooks@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/core@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/instrumentation@0.57.2(@opentelemetry/api@1.9.0))(@opentelemetry/resources@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.36.0)': + '@sentry/node-core@9.46.0(@opentelemetry/api@1.9.0)(@opentelemetry/context-async-hooks@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/core@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/instrumentation@0.57.2(@opentelemetry/api@1.9.0))(@opentelemetry/resources@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.37.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/context-async-hooks': 1.30.1(@opentelemetry/api@1.9.0) @@ -22857,9 +22808,9 @@ snapshots: '@opentelemetry/instrumentation': 0.57.2(@opentelemetry/api@1.9.0) '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-trace-base': 1.30.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.36.0 + '@opentelemetry/semantic-conventions': 1.37.0 '@sentry/core': 9.46.0 - '@sentry/opentelemetry': 9.46.0(@opentelemetry/api@1.9.0)(@opentelemetry/context-async-hooks@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/core@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.36.0) + '@sentry/opentelemetry': 9.46.0(@opentelemetry/api@1.9.0)(@opentelemetry/context-async-hooks@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/core@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.37.0) import-in-the-middle: 1.14.2 '@sentry/node@9.46.0': @@ -22892,23 +22843,23 @@ snapshots: '@opentelemetry/instrumentation-undici': 0.10.1(@opentelemetry/api@1.9.0) '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-trace-base': 1.30.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.36.0 + '@opentelemetry/semantic-conventions': 1.37.0 '@prisma/instrumentation': 6.11.1(@opentelemetry/api@1.9.0) '@sentry/core': 9.46.0 - '@sentry/node-core': 9.46.0(@opentelemetry/api@1.9.0)(@opentelemetry/context-async-hooks@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/core@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/instrumentation@0.57.2(@opentelemetry/api@1.9.0))(@opentelemetry/resources@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.36.0) - '@sentry/opentelemetry': 9.46.0(@opentelemetry/api@1.9.0)(@opentelemetry/context-async-hooks@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/core@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.36.0) + '@sentry/node-core': 9.46.0(@opentelemetry/api@1.9.0)(@opentelemetry/context-async-hooks@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/core@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/instrumentation@0.57.2(@opentelemetry/api@1.9.0))(@opentelemetry/resources@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.37.0) + '@sentry/opentelemetry': 9.46.0(@opentelemetry/api@1.9.0)(@opentelemetry/context-async-hooks@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/core@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.37.0) import-in-the-middle: 1.14.2 minimatch: 9.0.5 transitivePeerDependencies: - supports-color - '@sentry/opentelemetry@9.46.0(@opentelemetry/api@1.9.0)(@opentelemetry/context-async-hooks@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/core@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.36.0)': + '@sentry/opentelemetry@9.46.0(@opentelemetry/api@1.9.0)(@opentelemetry/context-async-hooks@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/core@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.37.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/context-async-hooks': 1.30.1(@opentelemetry/api@1.9.0) '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-trace-base': 1.30.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.36.0 + '@opentelemetry/semantic-conventions': 1.37.0 '@sentry/core': 9.46.0 '@sentry/react@9.46.0(react@19.1.1)': @@ -23741,7 +23692,7 @@ snapshots: '@types/accepts@1.3.7': dependencies: - '@types/node': 22.18.0 + '@types/node': 24.7.1 '@types/acorn@4.0.6': dependencies: @@ -23775,19 +23726,19 @@ snapshots: '@types/body-parser@1.19.5': dependencies: '@types/connect': 3.4.38 - '@types/node': 22.18.0 + '@types/node': 24.7.1 '@types/bunyan@1.8.9': dependencies: - '@types/node': 22.18.0 + '@types/node': 24.7.1 '@types/connect@3.4.36': dependencies: - '@types/node': 22.18.0 + '@types/node': 24.7.1 '@types/connect@3.4.38': dependencies: - '@types/node': 22.18.0 + '@types/node': 24.7.1 '@types/content-disposition@0.5.8': {} @@ -23798,7 +23749,7 @@ snapshots: '@types/connect': 3.4.38 '@types/express': 4.17.21 '@types/keygrip': 1.0.6 - '@types/node': 22.18.0 + '@types/node': 24.7.1 '@types/cors@2.8.17': dependencies: @@ -23956,14 +23907,14 @@ snapshots: '@types/express-serve-static-core@4.17.43': dependencies: - '@types/node': 22.18.0 + '@types/node': 24.7.1 '@types/qs': 6.9.11 '@types/range-parser': 1.2.7 '@types/send': 0.17.4 '@types/express-serve-static-core@5.1.0': dependencies: - '@types/node': 22.18.0 + '@types/node': 24.7.1 '@types/qs': 6.9.11 '@types/range-parser': 1.2.7 '@types/send': 0.17.4 @@ -24032,7 +23983,7 @@ snapshots: '@types/jsonwebtoken@9.0.9': dependencies: '@types/ms': 0.7.34 - '@types/node': 20.14.8 + '@types/node': 24.7.1 '@types/katex@0.16.7': {} @@ -24051,7 +24002,7 @@ snapshots: '@types/http-errors': 2.0.4 '@types/keygrip': 1.0.6 '@types/koa-compose': 3.2.8 - '@types/node': 22.18.0 + '@types/node': 24.7.1 '@types/koa__router@12.0.3': dependencies: @@ -24085,7 +24036,7 @@ snapshots: '@types/memcached@2.2.10': dependencies: - '@types/node': 22.18.0 + '@types/node': 24.7.1 '@types/mime@1.3.5': {} @@ -24097,24 +24048,16 @@ snapshots: '@types/mysql@2.15.22': dependencies: - '@types/node': 22.18.0 + '@types/node': 24.7.1 '@types/mysql@2.15.26': dependencies: - '@types/node': 22.18.0 + '@types/node': 24.7.1 '@types/nlcst@2.0.3': dependencies: '@types/unist': 3.0.2 - '@types/node@20.14.8': - dependencies: - undici-types: 5.26.5 - - '@types/node@22.18.0': - dependencies: - undici-types: 6.21.0 - '@types/node@24.7.1': dependencies: undici-types: 7.14.0 @@ -24127,11 +24070,17 @@ snapshots: '@types/pg-pool@2.0.6': dependencies: - '@types/pg': 8.6.1 + '@types/pg': 8.15.5 + + '@types/pg@8.15.5': + dependencies: + '@types/node': 24.7.1 + pg-protocol: 1.6.1 + pg-types: 2.2.0 '@types/pg@8.6.1': dependencies: - '@types/node': 22.18.0 + '@types/node': 24.7.1 pg-protocol: 1.6.1 pg-types: 2.2.0 @@ -24141,8 +24090,6 @@ snapshots: '@types/promise.allsettled@1.0.6': {} - '@types/prop-types@15.7.11': {} - '@types/qs@6.9.11': {} '@types/ramda@0.29.10': @@ -24176,16 +24123,11 @@ snapshots: '@types/d3-geo': 2.0.7 '@types/d3-zoom': 2.0.7 '@types/geojson': 7946.0.14 - '@types/react': 18.3.12 + '@types/react': 19.1.11 '@types/react-syntax-highlighter@15.5.11': dependencies: - '@types/react': 18.3.12 - - '@types/react@18.3.12': - dependencies: - '@types/prop-types': 15.7.11 - csstype: 3.1.3 + '@types/react': 19.1.11 '@types/react@19.1.11': dependencies: @@ -24193,7 +24135,7 @@ snapshots: '@types/request-ip@0.0.41': dependencies: - '@types/node': 20.14.8 + '@types/node': 24.7.1 '@types/resolve@1.20.2': {} @@ -24204,13 +24146,13 @@ snapshots: '@types/send@0.17.4': dependencies: '@types/mime': 1.3.5 - '@types/node': 22.18.0 + '@types/node': 24.7.1 '@types/serve-static@1.15.5': dependencies: '@types/http-errors': 2.0.4 '@types/mime': 3.0.4 - '@types/node': 22.18.0 + '@types/node': 24.7.1 '@types/shimmer@1.2.0': {} @@ -24224,7 +24166,7 @@ snapshots: '@types/tedious@4.0.14': dependencies: - '@types/node': 22.18.0 + '@types/node': 24.7.1 '@types/through@0.0.33': dependencies: @@ -24252,7 +24194,7 @@ snapshots: '@types/ws@8.5.14': dependencies: - '@types/node': 20.14.8 + '@types/node': 24.7.1 '@types/yargs-parser@21.0.3': {} @@ -24598,7 +24540,7 @@ snapshots: define-properties: 1.2.1 es-abstract: 1.23.3 es-array-method-boxes-properly: 1.0.0 - es-object-atoms: 1.0.0 + es-object-atoms: 1.1.1 is-string: 1.0.7 arraybuffer.prototype.slice@1.0.3: @@ -24608,7 +24550,7 @@ snapshots: define-properties: 1.2.1 es-abstract: 1.22.4 es-errors: 1.3.0 - get-intrinsic: 1.2.4 + get-intrinsic: 1.3.0 is-array-buffer: 3.0.4 is-shared-array-buffer: 1.0.2 @@ -25075,15 +25017,15 @@ snapshots: builtins@1.0.3: {} - bullmq@5.8.7: + bullmq@5.63.0: dependencies: cron-parser: 4.9.0 - ioredis: 5.7.0 - msgpackr: 1.10.1 + ioredis: 5.8.2 + msgpackr: 1.11.5 node-abort-controller: 3.1.1 - semver: 7.6.0 - tslib: 2.6.2 - uuid: 9.0.1 + semver: 7.7.2 + tslib: 2.7.0 + uuid: 11.1.0 transitivePeerDependencies: - supports-color @@ -25166,10 +25108,10 @@ snapshots: call-bind@1.0.7: dependencies: - es-define-property: 1.0.0 + es-define-property: 1.0.1 es-errors: 1.3.0 function-bind: 1.1.2 - get-intrinsic: 1.2.4 + get-intrinsic: 1.3.0 set-function-length: 1.2.1 call-bound@1.0.4: @@ -25948,9 +25890,9 @@ snapshots: define-data-property@1.1.4: dependencies: - es-define-property: 1.0.0 + es-define-property: 1.0.1 es-errors: 1.3.0 - gopd: 1.0.1 + gopd: 1.2.0 define-lazy-prop@2.0.0: {} @@ -26266,18 +26208,18 @@ snapshots: arraybuffer.prototype.slice: 1.0.3 available-typed-arrays: 1.0.6 call-bind: 1.0.7 - es-define-property: 1.0.0 + es-define-property: 1.0.1 es-errors: 1.3.0 es-set-tostringtag: 2.0.2 es-to-primitive: 1.2.1 function.prototype.name: 1.1.6 - get-intrinsic: 1.2.4 + get-intrinsic: 1.3.0 get-symbol-description: 1.0.2 globalthis: 1.0.3 - gopd: 1.0.1 + gopd: 1.2.0 has-property-descriptors: 1.0.2 has-proto: 1.0.3 - has-symbols: 1.0.3 + has-symbols: 1.1.0 hasown: 2.0.2 internal-slot: 1.0.7 is-array-buffer: 3.0.4 @@ -26288,7 +26230,7 @@ snapshots: is-string: 1.0.7 is-typed-array: 1.1.13 is-weakref: 1.0.2 - object-inspect: 1.13.1 + object-inspect: 1.13.4 object-keys: 1.1.1 object.assign: 4.1.5 regexp.prototype.flags: 1.5.2 @@ -26313,19 +26255,19 @@ snapshots: data-view-buffer: 1.0.1 data-view-byte-length: 1.0.1 data-view-byte-offset: 1.0.0 - es-define-property: 1.0.0 + es-define-property: 1.0.1 es-errors: 1.3.0 - es-object-atoms: 1.0.0 + es-object-atoms: 1.1.1 es-set-tostringtag: 2.0.3 es-to-primitive: 1.2.1 function.prototype.name: 1.1.6 - get-intrinsic: 1.2.4 + get-intrinsic: 1.3.0 get-symbol-description: 1.0.2 globalthis: 1.0.3 - gopd: 1.0.1 + gopd: 1.2.0 has-property-descriptors: 1.0.2 has-proto: 1.0.3 - has-symbols: 1.0.3 + has-symbols: 1.1.0 hasown: 2.0.2 internal-slot: 1.0.7 is-array-buffer: 3.0.4 @@ -26337,7 +26279,7 @@ snapshots: is-string: 1.0.7 is-typed-array: 1.1.13 is-weakref: 1.0.2 - object-inspect: 1.13.1 + object-inspect: 1.13.4 object-keys: 1.1.1 object.assign: 4.1.5 regexp.prototype.flags: 1.5.2 @@ -26355,10 +26297,6 @@ snapshots: es-array-method-boxes-properly@1.0.0: {} - es-define-property@1.0.0: - dependencies: - get-intrinsic: 1.2.4 - es-define-property@1.0.1: {} es-errors@1.3.0: {} @@ -26366,8 +26304,8 @@ snapshots: es-get-iterator@1.1.3: dependencies: call-bind: 1.0.7 - get-intrinsic: 1.2.4 - has-symbols: 1.0.3 + get-intrinsic: 1.3.0 + has-symbols: 1.1.0 is-arguments: 1.1.1 is-map: 2.0.2 is-set: 2.0.2 @@ -26377,23 +26315,19 @@ snapshots: es-module-lexer@1.7.0: {} - es-object-atoms@1.0.0: - dependencies: - es-errors: 1.3.0 - es-object-atoms@1.1.1: dependencies: es-errors: 1.3.0 es-set-tostringtag@2.0.2: dependencies: - get-intrinsic: 1.2.4 + get-intrinsic: 1.3.0 has-tostringtag: 1.0.2 hasown: 2.0.2 es-set-tostringtag@2.0.3: dependencies: - get-intrinsic: 1.2.4 + get-intrinsic: 1.3.0 has-tostringtag: 1.0.2 hasown: 2.0.2 @@ -27028,9 +26962,9 @@ snapshots: dependencies: strnum: 1.0.5 - fastify-metrics@12.1.0(fastify@5.2.1): + fastify-metrics@12.1.0(fastify@5.6.1): dependencies: - fastify: 5.2.1 + fastify: 5.6.1 fastify-plugin: 5.0.1 prom-client: 15.1.3 @@ -27042,7 +26976,7 @@ snapshots: raw-body: 3.0.0 secure-json-parse: 2.7.0 - fastify@5.2.1: + fastify@5.6.1: dependencies: '@fastify/ajv-compiler': 4.0.2 '@fastify/error': 4.0.0 @@ -27054,10 +26988,10 @@ snapshots: find-my-way: 9.2.0 light-my-request: 6.6.0 pino: 9.6.0 - process-warning: 4.0.1 + process-warning: 5.0.0 rfdc: 1.3.1 - secure-json-parse: 3.0.2 - semver: 7.6.3 + secure-json-parse: 4.1.0 + semver: 7.7.2 toad-cache: 3.7.0 fastq@1.17.1: @@ -27413,7 +27347,7 @@ snapshots: gaxios@6.7.1: dependencies: extend: 3.0.2 - https-proxy-agent: 7.0.5 + https-proxy-agent: 7.0.6 is-stream: 2.0.1 node-fetch: 2.7.0 uuid: 9.0.1 @@ -27446,7 +27380,7 @@ snapshots: es-errors: 1.3.0 function-bind: 1.1.2 has-proto: 1.0.1 - has-symbols: 1.0.3 + has-symbols: 1.1.0 hasown: 2.0.2 get-intrinsic@1.3.0: @@ -27487,7 +27421,7 @@ snapshots: dependencies: call-bind: 1.0.7 es-errors: 1.3.0 - get-intrinsic: 1.2.4 + get-intrinsic: 1.3.0 get-tsconfig@4.10.1: dependencies: @@ -27604,10 +27538,6 @@ snapshots: dependencies: csstype: 3.1.3 - gopd@1.0.1: - dependencies: - get-intrinsic: 1.2.4 - gopd@1.2.0: {} graceful-fs@4.2.11: {} @@ -27626,7 +27556,7 @@ snapshots: section-matter: 1.0.0 strip-bom-string: 1.0.0 - groupmq@1.0.0-next.19(ioredis@5.8.2): + groupmq@1.1.0-next.5(ioredis@5.8.2): dependencies: cron-parser: 4.9.0 ioredis: 5.8.2 @@ -27678,19 +27608,17 @@ snapshots: has-property-descriptors@1.0.2: dependencies: - es-define-property: 1.0.0 + es-define-property: 1.0.1 has-proto@1.0.1: {} has-proto@1.0.3: {} - has-symbols@1.0.3: {} - has-symbols@1.1.0: {} has-tostringtag@1.0.2: dependencies: - has-symbols: 1.0.3 + has-symbols: 1.1.0 hash-string@1.0.0: {} @@ -27957,13 +27885,6 @@ snapshots: transitivePeerDependencies: - supports-color - https-proxy-agent@7.0.5: - dependencies: - agent-base: 7.1.1 - debug: 4.4.1 - transitivePeerDependencies: - - supports-color - https-proxy-agent@7.0.6: dependencies: agent-base: 7.1.4 @@ -28082,7 +28003,7 @@ snapshots: dependencies: es-errors: 1.3.0 hasown: 2.0.2 - side-channel: 1.0.5 + side-channel: 1.1.0 internmap@1.0.1: {} @@ -28092,20 +28013,6 @@ snapshots: dependencies: loose-envify: 1.4.0 - ioredis@5.7.0: - dependencies: - '@ioredis/commands': 1.3.0 - cluster-key-slot: 1.1.2 - debug: 4.4.0 - denque: 2.1.0 - lodash.defaults: 4.2.0 - lodash.isarguments: 3.1.0 - redis-errors: 1.2.0 - redis-parser: 3.0.0 - standard-as-callback: 2.1.0 - transitivePeerDependencies: - - supports-color - ioredis@5.8.2: dependencies: '@ioredis/commands': 1.4.0 @@ -28157,7 +28064,7 @@ snapshots: is-array-buffer@3.0.4: dependencies: call-bind: 1.0.7 - get-intrinsic: 1.2.4 + get-intrinsic: 1.3.0 is-arrayish@0.2.1: {} @@ -28301,7 +28208,7 @@ snapshots: is-symbol@1.0.4: dependencies: - has-symbols: 1.0.3 + has-symbols: 1.1.0 is-typed-array@1.1.13: dependencies: @@ -28867,10 +28774,10 @@ snapshots: fault: 1.0.4 highlight.js: 10.7.3 - lru-cache@10.2.0: {} - lru-cache@10.4.3: {} + lru-cache@11.2.2: {} + lru-cache@5.1.1: dependencies: yallist: 3.1.1 @@ -29738,10 +29645,6 @@ snapshots: mmdb-lib@2.2.0: {} - mnemonist@0.40.0: - dependencies: - obliterator: 2.0.4 - mock-require-lazy@1.0.17: dependencies: get-caller-file: 2.0.5 @@ -29782,7 +29685,7 @@ snapshots: '@msgpackr-extract/msgpackr-extract-win32-x64': 3.0.2 optional: true - msgpackr@1.10.1: + msgpackr@1.11.5: optionalDependencies: msgpackr-extract: 3.0.2 @@ -30156,8 +30059,6 @@ snapshots: object-hash@3.0.0: {} - object-inspect@1.13.1: {} - object-inspect@1.13.4: {} object-keys@1.1.1: {} @@ -30166,11 +30067,9 @@ snapshots: dependencies: call-bind: 1.0.7 define-properties: 1.2.1 - has-symbols: 1.0.3 + has-symbols: 1.1.0 object-keys: 1.1.1 - obliterator@2.0.4: {} - ofetch@1.4.1: dependencies: destr: 2.0.5 @@ -30436,7 +30335,7 @@ snapshots: path-scurry@1.11.1: dependencies: - lru-cache: 10.2.0 + lru-cache: 10.4.3 minipass: 7.1.2 path-to-regexp@0.1.12: {} @@ -30686,6 +30585,8 @@ snapshots: process-warning@4.0.1: {} + process-warning@5.0.0: {} + process@0.11.10: {} progress-stream@2.0.0: @@ -30702,7 +30603,7 @@ snapshots: prom-client@15.1.3: dependencies: - '@opentelemetry/api': 1.8.0 + '@opentelemetry/api': 1.9.0 tdigest: 0.1.2 promise-inflight@1.0.1: {} @@ -30757,7 +30658,7 @@ snapshots: '@protobufjs/path': 1.1.2 '@protobufjs/pool': 1.1.0 '@protobufjs/utf8': 1.1.0 - '@types/node': 22.18.0 + '@types/node': 24.7.1 long: 5.2.3 proxy-addr@2.0.7: @@ -31879,15 +31780,15 @@ snapshots: safe-array-concat@1.1.0: dependencies: call-bind: 1.0.7 - get-intrinsic: 1.2.4 - has-symbols: 1.0.3 + get-intrinsic: 1.3.0 + has-symbols: 1.1.0 isarray: 2.0.5 safe-array-concat@1.1.2: dependencies: call-bind: 1.0.7 - get-intrinsic: 1.2.4 - has-symbols: 1.0.3 + get-intrinsic: 1.3.0 + has-symbols: 1.1.0 isarray: 2.0.5 safe-buffer@5.1.2: {} @@ -31936,7 +31837,7 @@ snapshots: secure-json-parse@2.7.0: {} - secure-json-parse@3.0.2: {} + secure-json-parse@4.1.0: {} seedrandom@3.0.5: {} @@ -32070,8 +31971,8 @@ snapshots: define-data-property: 1.1.4 es-errors: 1.3.0 function-bind: 1.1.2 - get-intrinsic: 1.2.4 - gopd: 1.0.1 + get-intrinsic: 1.3.0 + gopd: 1.2.0 has-property-descriptors: 1.0.2 set-function-name@2.0.1: @@ -32182,8 +32083,8 @@ snapshots: dependencies: call-bind: 1.0.7 es-errors: 1.3.0 - get-intrinsic: 1.2.4 - object-inspect: 1.13.1 + get-intrinsic: 1.3.0 + object-inspect: 1.13.4 side-channel@1.1.0: dependencies: @@ -32401,7 +32302,7 @@ snapshots: call-bind: 1.0.7 define-properties: 1.2.1 es-abstract: 1.23.3 - es-object-atoms: 1.0.0 + es-object-atoms: 1.1.1 string.prototype.trimend@1.0.7: dependencies: @@ -32413,7 +32314,7 @@ snapshots: dependencies: call-bind: 1.0.7 define-properties: 1.2.1 - es-object-atoms: 1.0.0 + es-object-atoms: 1.1.1 string.prototype.trimstart@1.0.7: dependencies: @@ -32425,7 +32326,7 @@ snapshots: dependencies: call-bind: 1.0.7 define-properties: 1.2.1 - es-object-atoms: 1.0.0 + es-object-atoms: 1.1.1 string_decoder@1.1.1: dependencies: @@ -32957,7 +32858,7 @@ snapshots: dependencies: call-bind: 1.0.7 for-each: 0.3.3 - gopd: 1.0.1 + gopd: 1.2.0 has-proto: 1.0.3 is-typed-array: 1.1.13 @@ -32974,7 +32875,7 @@ snapshots: available-typed-arrays: 1.0.7 call-bind: 1.0.7 for-each: 0.3.3 - gopd: 1.0.1 + gopd: 1.2.0 has-proto: 1.0.3 is-typed-array: 1.1.13 @@ -32988,7 +32889,7 @@ snapshots: dependencies: call-bind: 1.0.7 for-each: 0.3.3 - gopd: 1.0.1 + gopd: 1.2.0 has-proto: 1.0.3 is-typed-array: 1.1.13 possible-typed-array-names: 1.0.0 @@ -33019,7 +32920,7 @@ snapshots: dependencies: call-bind: 1.0.7 has-bigints: 1.0.2 - has-symbols: 1.0.3 + has-symbols: 1.1.0 which-boxed-primitive: 1.0.2 unbzip2-stream@1.4.3: @@ -33045,10 +32946,6 @@ snapshots: magic-string: 0.30.19 unplugin: 2.3.8 - undici-types@5.26.5: {} - - undici-types@6.21.0: {} - undici-types@7.14.0: {} undici@7.14.0: {} @@ -33335,6 +33232,8 @@ snapshots: utils-merge@1.0.1: {} + uuid@11.1.0: {} + uuid@7.0.3: {} uuid@8.3.2: {} @@ -33593,7 +33492,7 @@ snapshots: available-typed-arrays: 1.0.6 call-bind: 1.0.7 for-each: 0.3.3 - gopd: 1.0.1 + gopd: 1.2.0 has-tostringtag: 1.0.2 which-typed-array@1.1.15: @@ -33601,7 +33500,7 @@ snapshots: available-typed-arrays: 1.0.7 call-bind: 1.0.7 for-each: 0.3.3 - gopd: 1.0.1 + gopd: 1.2.0 has-tostringtag: 1.0.2 which@1.3.1: