From 4818b7d759bd055a7a74b24f6f2c929e17204f37 Mon Sep 17 00:00:00 2001 From: Laurin Quast Date: Wed, 19 Nov 2025 14:47:01 +0100 Subject: [PATCH 1/7] feat: move logger property to top level instead of having it in agent object --- packages/libraries/core/src/client/agent.ts | 6 +- packages/libraries/core/src/client/client.ts | 4 +- packages/libraries/core/src/client/types.ts | 8 + packages/libraries/core/tests/usage.spec.ts | 146 +++++++++++++++++++ 4 files changed, 161 insertions(+), 3 deletions(-) diff --git a/packages/libraries/core/src/client/agent.ts b/packages/libraries/core/src/client/agent.ts index 90cce54a79b..f9e17a1f5b4 100644 --- a/packages/libraries/core/src/client/agent.ts +++ b/packages/libraries/core/src/client/agent.ts @@ -67,7 +67,11 @@ export interface AgentOptions { */ maxSize?: number; /** - * Custom logger (defaults to console) + * Custom logger. + * + * Default: console based logger + * + * @deprecated Instead, provide a logger for the root Hive SDK. If a logger is provided on the root Hive SDK, this one is ignored. */ logger?: Logger; /** diff --git a/packages/libraries/core/src/client/client.ts b/packages/libraries/core/src/client/client.ts index 4c37eadd226..b7c84666db8 100644 --- a/packages/libraries/core/src/client/client.ts +++ b/packages/libraries/core/src/client/client.ts @@ -14,9 +14,9 @@ import { createHiveLogger, isLegacyAccessToken } from './utils.js'; export function createHive(options: HivePluginOptions): HiveClient { const logger = createHiveLogger( - options?.agent?.logger ?? console, + options?.logger ?? options?.agent?.logger ?? console, '[hive]', - options.debug ?? false, + options.debug, ); let enabled = options.enabled ?? true; diff --git a/packages/libraries/core/src/client/types.ts b/packages/libraries/core/src/client/types.ts index 85374d3cfb1..d2b1febe253 100644 --- a/packages/libraries/core/src/client/types.ts +++ b/packages/libraries/core/src/client/types.ts @@ -210,8 +210,16 @@ export type HivePluginOptions = OptionalWhenFalse< * Debugging mode * * Default: false + * + * @deprecated Use the {logger} property instead. */ debug?: boolean; + /** + * Custom logger. + * + * Default: console based logger + */ + logger?: Logger; /** * Access Token for usage reporting */ diff --git a/packages/libraries/core/tests/usage.spec.ts b/packages/libraries/core/tests/usage.spec.ts index 74f844a5dc5..732947997e8 100644 --- a/packages/libraries/core/tests/usage.spec.ts +++ b/packages/libraries/core/tests/usage.spec.ts @@ -885,3 +885,149 @@ test('constructs URL with usage.target (hvu1/)', async ({ expect }) => { expect(url).toEqual('http://localhost/the-guild/graphql-hive/staging'); await hive.dispose(); }); + +test('no debug property -> logger.debug is invoked', async ({ expect }) => { + const logger = createHiveTestingLogger(); + const token = 'hvu1/brrrrt'; + + const hive = createHive({ + enabled: true, + agent: { + timeout: 500, + maxRetries: 0, + sendInterval: 1, + maxSize: 1, + async fetch() { + return new Response('', { + status: 200, + }); + }, + logger, + }, + token, + selfHosting: { + graphqlEndpoint: 'http://localhost:2/graphql', + applicationUrl: 'http://localhost:1', + usageEndpoint: 'http://localhost', + }, + usage: { + target: 'the-guild/graphql-hive/staging', + }, + }); + + await hive.collectUsage()( + { + schema, + document: op, + operationName: 'asd', + }, + {}, + ); + + await hive.dispose(); + expect(logger.getLogs()).toMatchInlineSnapshot(` + [DBG] [hive][usage][agent] Disposing + [DBG] [hive][usage][agent] Sending immediately + [DBG] [hive][usage][agent] Sending report (queue 1) + [DBG] [hive][usage][agent] POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) + [DBG] [hive][usage][agent] POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [DBG] [hive][usage][agent] Report sent! + `); +}); + +test('debug: false -> logger.debug is not invoked', async ({ expect }) => { + const logger = createHiveTestingLogger(); + const token = 'hvu1/brrrrt'; + + const hive = createHive({ + enabled: true, + debug: false, + agent: { + timeout: 500, + maxRetries: 0, + sendInterval: 1, + maxSize: 1, + async fetch() { + return new Response('', { + status: 200, + }); + }, + logger, + }, + token, + selfHosting: { + graphqlEndpoint: 'http://localhost:2/graphql', + applicationUrl: 'http://localhost:1', + usageEndpoint: 'http://localhost', + }, + usage: { + target: 'the-guild/graphql-hive/staging', + }, + }); + + await hive.collectUsage()( + { + schema, + document: op, + operationName: 'asd', + }, + {}, + ); + + await hive.dispose(); + expect(logger.getLogs()).toMatchInlineSnapshot(``); +}); + +test('debug: true and missing logger.debug method -> logger.info is invoked (to cover legacy logger implementation)', async ({ + expect, +}) => { + const logger = createHiveTestingLogger(); + // @ts-expect-error + logger.debug = undefined; + const token = 'hvu1/brrrrt'; + + const hive = createHive({ + enabled: true, + debug: true, + agent: { + timeout: 500, + maxRetries: 0, + sendInterval: 1, + maxSize: 1, + async fetch() { + return new Response('', { + status: 200, + }); + }, + logger, + }, + token, + selfHosting: { + graphqlEndpoint: 'http://localhost:2/graphql', + applicationUrl: 'http://localhost:1', + usageEndpoint: 'http://localhost', + }, + usage: { + target: 'the-guild/graphql-hive/staging', + }, + }); + + await hive.collectUsage()( + { + schema, + document: op, + operationName: 'asd', + }, + {}, + ); + + await hive.dispose(); + expect(logger.getLogs()).toMatchInlineSnapshot(` + [INF] [hive][usage][agent] Disposing + [INF] [hive][usage][agent] Sending immediately + [INF] [hive][usage][agent] Sending report (queue 1) + [INF] [hive][usage][agent] POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) + [INF] [hive][usage][agent] POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [INF] [hive][usage][agent] Report sent! + `); +}); From b50d7880917532e4522a0df6cc774c041ced4930 Mon Sep 17 00:00:00 2001 From: Laurin Quast Date: Wed, 19 Nov 2025 15:01:48 +0100 Subject: [PATCH 2/7] wow --- packages/libraries/core/src/client/agent.ts | 2 +- packages/libraries/core/src/client/client.ts | 53 ++++++++++++++++++-- packages/libraries/core/src/client/types.ts | 4 +- packages/libraries/core/src/client/utils.ts | 6 +-- 4 files changed, 56 insertions(+), 9 deletions(-) diff --git a/packages/libraries/core/src/client/agent.ts b/packages/libraries/core/src/client/agent.ts index f9e17a1f5b4..8e5adfe7e1e 100644 --- a/packages/libraries/core/src/client/agent.ts +++ b/packages/libraries/core/src/client/agent.ts @@ -123,7 +123,7 @@ export function createAgent( ? null : pluginOptions.circuitBreaker, }; - const logger = createHiveLogger(pluginOptions.logger ?? console, '[agent]', pluginOptions.debug); + const logger = createHiveLogger(pluginOptions.logger ?? console, '[agent]'); let circuitBreaker: CircuitBreakerInterface< Parameters, diff --git a/packages/libraries/core/src/client/client.ts b/packages/libraries/core/src/client/client.ts index b7c84666db8..b4888d869fd 100644 --- a/packages/libraries/core/src/client/client.ts +++ b/packages/libraries/core/src/client/client.ts @@ -10,14 +10,61 @@ import { createPersistedDocuments } from './persisted-documents.js'; import { createReporting } from './reporting.js'; import type { HiveClient, HiveInternalPluginOptions, HivePluginOptions } from './types.js'; import { createUsage } from './usage.js'; -import { createHiveLogger, isLegacyAccessToken } from './utils.js'; +import { createHiveLogger, HiveLogger, isLegacyAccessToken } from './utils.js'; -export function createHive(options: HivePluginOptions): HiveClient { - const logger = createHiveLogger( +function chooseDefaultLogger(options: HivePluginOptions): HiveLogger { + if (options.logger === 'debug') { + return createHiveLogger( + { + debug(...args) { + console.debug(...args); + }, + info(...args) { + console.info(...args); + }, + error(...args) { + console.error(...args); + }, + }, + '[hive]', + ); + } + if (options.logger === 'info') { + return createHiveLogger( + { + debug() {}, + info(...args) { + console.info(...args); + }, + error(...args) { + console.error(...args); + }, + }, + '[hive]', + ); + } + if (options.logger === 'error') { + return createHiveLogger( + { + debug() {}, + info() {}, + error(...args) { + console.error(...args); + }, + }, + '[hive]', + ); + } + + return createHiveLogger( options?.logger ?? options?.agent?.logger ?? console, '[hive]', options.debug, ); +} + +export function createHive(options: HivePluginOptions): HiveClient { + const logger = chooseDefaultLogger(options); let enabled = options.enabled ?? true; if (enabled === false && !options.experimental__persistedDocuments) { diff --git a/packages/libraries/core/src/client/types.ts b/packages/libraries/core/src/client/types.ts index d2b1febe253..28181c4a7da 100644 --- a/packages/libraries/core/src/client/types.ts +++ b/packages/libraries/core/src/client/types.ts @@ -217,9 +217,9 @@ export type HivePluginOptions = OptionalWhenFalse< /** * Custom logger. * - * Default: console based logger + * Default: 'info' */ - logger?: Logger; + logger?: Logger | 'error' | 'info' | 'debug'; /** * Access Token for usage reporting */ diff --git a/packages/libraries/core/src/client/utils.ts b/packages/libraries/core/src/client/utils.ts index 3ee19bbcf1a..55c4c3867ca 100644 --- a/packages/libraries/core/src/client/utils.ts +++ b/packages/libraries/core/src/client/utils.ts @@ -210,9 +210,6 @@ export function createHiveLogger(baseLogger: Logger, prefix: string, debug = tru return { [hiveSymbol]: context, - info: (message: string) => { - logger.info(printPath(path) + message); - }, error: (error: any, ...data: any[]) => { if (error.stack) { const pth = printPath(path); @@ -223,6 +220,9 @@ export function createHiveLogger(baseLogger: Logger, prefix: string, debug = tru logger.error(printPath(path) + String(error), ...data); } }, + info: (message: string) => { + logger.info(printPath(path) + message); + }, debug: (message: string) => { if (!context.debug) { return; From c8b689224885e37c7dd3d14ed648fbafdae72b4c Mon Sep 17 00:00:00 2001 From: Laurin Quast Date: Wed, 19 Nov 2025 15:08:45 +0100 Subject: [PATCH 3/7] fix --- packages/libraries/core/tests/usage.spec.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/libraries/core/tests/usage.spec.ts b/packages/libraries/core/tests/usage.spec.ts index 732947997e8..dc49dee59c8 100644 --- a/packages/libraries/core/tests/usage.spec.ts +++ b/packages/libraries/core/tests/usage.spec.ts @@ -982,7 +982,7 @@ test('debug: true and missing logger.debug method -> logger.info is invoked (to expect, }) => { const logger = createHiveTestingLogger(); - // @ts-expect-error + // @ts-expect-error we remove this property to emulate logger without it logger.debug = undefined; const token = 'hvu1/brrrrt'; From 876c5a5b4cd000423aaf30921615e9a3f8946a57 Mon Sep 17 00:00:00 2001 From: Laurin Quast Date: Mon, 24 Nov 2025 11:50:45 +0100 Subject: [PATCH 4/7] changeset --- .changeset/big-pigs-help.md | 69 +++++++++++++++++++++++++++++++++++++ 1 file changed, 69 insertions(+) create mode 100644 .changeset/big-pigs-help.md diff --git a/.changeset/big-pigs-help.md b/.changeset/big-pigs-help.md new file mode 100644 index 00000000000..bbc9b819b19 --- /dev/null +++ b/.changeset/big-pigs-help.md @@ -0,0 +1,69 @@ +--- +'@graphql-hive/core': minor +'@graphql-hive/apollo': minor +'@graphql-hive/envelop': minor +'@graphql-hive/yoga': minor +--- + +Add support for providing a logger object via `HivePluginOptions`. + +It is possible to provide the following options: + +- **`'error'`** log errors +- **`'info'`** log errors and informal logs +- **`'debug'`** log errors, informal and debug logs + +```ts +import { createHive } from '@graphql-hive/core' + +const client = createHive({ + logger: 'info' +}) +``` + +In addition to that, it is also possible to provide a error logging instance, where you can +customize how logs are forwarded. + +```ts +import { createHive } from '@graphql-hive/core' + +const client = createHive({ + logger: { + info() {}, + error() {}, + debug() {} + } +}) +``` + +Deprecates the `HivePluginOptions.debug` option. Instead, please provide a logger with a `debug` +method. + +```diff + import { createHive } from '@graphql-hive/core' + + const client = createHive({ +- debug: process.env.DEBUG === "1", ++ logger: process.env.DEBUG === "1" ? "debug" : "info", + }) +``` + +Deprecate the `HivePluginOptions.agent.logger` option. Instead, please provide +`HivePluginOptions.logger`. + +```diff + import { createHive } from '@graphql-hive/core' + + const logger = { + info() {}, + error() {}, + debug() {}, + }; + + const client = createHive({ + agent: { +- logger, + }, ++ logger, + }) +``` From 4c1cc849694155cc19f1459dac15c6acad9457a8 Mon Sep 17 00:00:00 2001 From: Laurin Quast Date: Tue, 25 Nov 2025 17:49:30 +0100 Subject: [PATCH 5/7] feat: hive-logger for JS SDK (#7307) Co-authored-by: Denis Badurina --- .../libraries/apollo/tests/apollo.spec.ts | 6 +- .../cli/src/commands/artifact/fetch.ts | 23 +-- packages/libraries/core/package.json | 1 + packages/libraries/core/src/client/agent.ts | 10 +- packages/libraries/core/src/client/client.ts | 64 ++------ .../libraries/core/src/client/gateways.ts | 4 +- .../libraries/core/src/client/http-client.ts | 93 ++++++----- .../core/src/client/persisted-documents.ts | 4 +- .../libraries/core/src/client/reporting.ts | 4 +- .../libraries/core/src/client/supergraph.ts | 5 +- packages/libraries/core/src/client/types.ts | 9 +- packages/libraries/core/src/client/usage.ts | 3 +- packages/libraries/core/src/client/utils.ts | 146 ++++++++++-------- packages/libraries/core/src/index.ts | 2 +- packages/libraries/core/tests/enabled.spec.ts | 4 +- .../libraries/core/tests/http-client.spec.ts | 3 - packages/libraries/core/tests/info.spec.ts | 4 +- .../libraries/core/tests/reporting.spec.ts | 89 ++++++----- packages/libraries/core/tests/usage.spec.ts | 91 ++++++----- packages/libraries/yoga/package.json | 1 + packages/libraries/yoga/src/index.ts | 38 ++++- pnpm-lock.yaml | 63 ++++++-- 22 files changed, 368 insertions(+), 299 deletions(-) diff --git a/packages/libraries/apollo/tests/apollo.spec.ts b/packages/libraries/apollo/tests/apollo.spec.ts index c5ff433944a..48db56c073c 100644 --- a/packages/libraries/apollo/tests/apollo.spec.ts +++ b/packages/libraries/apollo/tests/apollo.spec.ts @@ -102,9 +102,9 @@ test('should not interrupt the process', async () => { await waitFor(200); await apollo.stop(); clean(); - expect(logger.error).toHaveBeenCalledWith(expect.stringContaining('[hive][info]')); - expect(logger.error).toHaveBeenCalledWith(expect.stringContaining('[hive][usage]')); - expect(logger.error).toHaveBeenCalledWith(expect.stringContaining('[hive][reporting]')); + expect(logger.info).toHaveBeenCalledWith(expect.stringContaining('[hive][info]')); + expect(logger.info).toHaveBeenCalledWith(expect.stringContaining('[hive][usage]')); + expect(logger.info).toHaveBeenCalledWith(expect.stringContaining('[hive][reporting]')); }, 1_000); test('should capture client name and version headers', async () => { diff --git a/packages/libraries/cli/src/commands/artifact/fetch.ts b/packages/libraries/cli/src/commands/artifact/fetch.ts index 5fac9d31f97..196b04122ca 100644 --- a/packages/libraries/cli/src/commands/artifact/fetch.ts +++ b/packages/libraries/cli/src/commands/artifact/fetch.ts @@ -69,18 +69,19 @@ export default class ArtifactsFetch extends Command { retry: { retries: 3, }, - logger: { - info: (...args) => { - if (this.flags.debug) { - console.info(...args); + logger: this.flags.debug + ? { + info: (...args: Array) => { + this.logInfo(...args); + }, + error: (...args: Array) => { + this.logFailure(...args); + }, + debug: (...args: Array) => { + this.logInfo(...args); + }, } - }, - error: (...args) => { - if (this.flags.debug) { - console.error(...args); - } - }, - }, + : undefined, }); } catch (e: any) { const sourceError = e?.cause ?? e; diff --git a/packages/libraries/core/package.json b/packages/libraries/core/package.json index dcc4136ebfe..3667cd126a8 100644 --- a/packages/libraries/core/package.json +++ b/packages/libraries/core/package.json @@ -46,6 +46,7 @@ "graphql": "^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" }, "dependencies": { + "@graphql-hive/logger": "^1.0.9", "@graphql-hive/signal": "^2.0.0", "@graphql-tools/utils": "^10.0.0", "@whatwg-node/fetch": "^0.10.13", diff --git a/packages/libraries/core/src/client/agent.ts b/packages/libraries/core/src/client/agent.ts index 8e5adfe7e1e..aaf974751c9 100644 --- a/packages/libraries/core/src/client/agent.ts +++ b/packages/libraries/core/src/client/agent.ts @@ -1,8 +1,8 @@ import CircuitBreaker from '../circuit-breaker/circuit.js'; import { version } from '../version.js'; import { http } from './http-client.js'; -import type { Logger } from './types.js'; -import { createHiveLogger } from './utils.js'; +import type { LegacyLogger } from './types.js'; +import { chooseLogger } from './utils.js'; type ReadOnlyResponse = Pick; @@ -73,7 +73,7 @@ export interface AgentOptions { * * @deprecated Instead, provide a logger for the root Hive SDK. If a logger is provided on the root Hive SDK, this one is ignored. */ - logger?: Logger; + logger?: LegacyLogger; /** * Circuit Breaker Configuration. * true -> Use default configuration @@ -123,13 +123,13 @@ export function createAgent( ? null : pluginOptions.circuitBreaker, }; - const logger = createHiveLogger(pluginOptions.logger ?? console, '[agent]'); + const logger = chooseLogger(pluginOptions.logger).child('[agent]'); let circuitBreaker: CircuitBreakerInterface< Parameters, ReturnType >; - const breakerLogger = createHiveLogger(logger, '[circuit breaker]'); + const breakerLogger = logger.child('[circuit breaker]'); const enabled = options.enabled !== false; let timeoutID: ReturnType | null = null; diff --git a/packages/libraries/core/src/client/client.ts b/packages/libraries/core/src/client/client.ts index b4888d869fd..314e9ef27c9 100644 --- a/packages/libraries/core/src/client/client.ts +++ b/packages/libraries/core/src/client/client.ts @@ -4,67 +4,31 @@ import { type GraphQLSchema, type subscribe as SubscribeImplementation, } from 'graphql'; +import { Logger } from '@graphql-hive/logger'; import { version } from '../version.js'; import { http } from './http-client.js'; import { createPersistedDocuments } from './persisted-documents.js'; import { createReporting } from './reporting.js'; import type { HiveClient, HiveInternalPluginOptions, HivePluginOptions } from './types.js'; import { createUsage } from './usage.js'; -import { createHiveLogger, HiveLogger, isLegacyAccessToken } from './utils.js'; - -function chooseDefaultLogger(options: HivePluginOptions): HiveLogger { - if (options.logger === 'debug') { - return createHiveLogger( - { - debug(...args) { - console.debug(...args); - }, - info(...args) { - console.info(...args); - }, - error(...args) { - console.error(...args); - }, - }, - '[hive]', - ); - } - if (options.logger === 'info') { - return createHiveLogger( - { - debug() {}, - info(...args) { - console.info(...args); - }, - error(...args) { - console.error(...args); - }, - }, - '[hive]', - ); +import { chooseLogger, isLegacyAccessToken } from './utils.js'; + +function resolveLoggerFromConfigOptions(options: HivePluginOptions): Logger { + if (typeof options.logger == 'string') { + return new Logger({ + level: options.logger, + }); } - if (options.logger === 'error') { - return createHiveLogger( - { - debug() {}, - info() {}, - error(...args) { - console.error(...args); - }, - }, - '[hive]', - ); + + if (options.logger instanceof Logger) { + return options.logger; } - return createHiveLogger( - options?.logger ?? options?.agent?.logger ?? console, - '[hive]', - options.debug, - ); + return chooseLogger(options.logger ?? options.agent?.logger, options.debug); } export function createHive(options: HivePluginOptions): HiveClient { - const logger = chooseDefaultLogger(options); + const logger = resolveLoggerFromConfigOptions(options).child('[hive]'); let enabled = options.enabled ?? true; if (enabled === false && !options.experimental__persistedDocuments) { @@ -111,7 +75,7 @@ export function createHive(options: HivePluginOptions): HiveClient { ? options.printTokenInfo === true || (!!options.debug && options.printTokenInfo !== false) : false; - const infoLogger = createHiveLogger(logger, '[info]'); + const infoLogger = logger.child('[info]'); const info = printTokenInfo ? async () => { diff --git a/packages/libraries/core/src/client/gateways.ts b/packages/libraries/core/src/client/gateways.ts index 6fe2cb5363d..c71aee1c2f7 100644 --- a/packages/libraries/core/src/client/gateways.ts +++ b/packages/libraries/core/src/client/gateways.ts @@ -1,7 +1,7 @@ import { version } from '../version.js'; import { http } from './http-client.js'; import type { SchemaFetcherOptions, ServicesFetcherOptions } from './types.js'; -import { createHash, createHiveLogger, joinUrl } from './utils.js'; +import { chooseLogger, createHash, joinUrl } from './utils.js'; interface Schema { sdl: string; @@ -10,7 +10,7 @@ interface Schema { } function createFetcher(options: SchemaFetcherOptions & ServicesFetcherOptions) { - const logger = createHiveLogger(options.logger ?? console, ''); + const logger = chooseLogger(options.logger ?? console); let cacheETag: string | null = null; let cached: { id: string; diff --git a/packages/libraries/core/src/client/http-client.ts b/packages/libraries/core/src/client/http-client.ts index 7512c7deee9..d7b037810a7 100644 --- a/packages/libraries/core/src/client/http-client.ts +++ b/packages/libraries/core/src/client/http-client.ts @@ -1,7 +1,8 @@ import asyncRetry from 'async-retry'; +import { Logger } from '@graphql-hive/logger'; import { abortSignalAny } from '@graphql-hive/signal'; import { crypto, fetch, URL } from '@whatwg-node/fetch'; -import { Logger } from './types'; +import type { LegacyLogger } from './types'; interface SharedConfig { headers: Record; @@ -15,7 +16,7 @@ interface SharedConfig { /** custom fetch implementation. */ fetchImplementation?: typeof fetch; /** Logger for HTTP info and request errors. Uses `console` by default. */ - logger?: Logger; + logger?: LegacyLogger; /** * Function for determining whether the request response is okay. * You can override it if you want to accept other status codes as well. @@ -58,6 +59,39 @@ export const http = { post, }; +function chooseLogger(logger: SharedConfig['logger']): Logger { + if (!logger) { + return new Logger({ + writers: [{ write() {} }], + }); + } + + if (logger instanceof Logger) { + return logger; + } + + return new Logger({ + level: 'debug', + writers: [ + { + write(level, _attrs, msg) { + if (level === 'debug' && logger.debug && msg) { + logger.debug(msg); + return; + } + if (level === 'info' && msg) { + logger.info(msg); + return; + } + if (level === 'error' && msg) { + logger.error(msg); + } + }, + }, + ], + }); +} + export async function makeFetchCall( endpoint: URL | string, config: { @@ -74,7 +108,7 @@ export async function makeFetchCall( /** custom fetch implementation. */ fetchImplementation?: typeof fetch; /** Logger for HTTP info and request errors. Uses `console` by default. */ - logger?: Logger; + logger?: LegacyLogger; /** * Function for determining whether the request response is okay. * You can override it if you want to accept other status codes as well. @@ -85,7 +119,7 @@ export async function makeFetchCall( signal?: AbortSignal; }, ): Promise { - const logger = config.logger; + const logger = chooseLogger(config.logger); const isRequestOk: ResponseAssertFunction = config.isRequestOk ?? (response => response.ok); let retries = 0; let minTimeout = 200; @@ -107,7 +141,7 @@ export async function makeFetchCall( const isFinalAttempt = attempt > retries; const requestId = crypto.randomUUID(); - logger?.debug?.( + logger.debug( `${config.method} ${endpoint} (x-request-id=${requestId})` + (retries > 0 ? ' ' + getAttemptMessagePart(attempt, retries + 1) : ''), ); @@ -126,33 +160,16 @@ export async function makeFetchCall( }, signal, }).catch((error: unknown) => { - const logErrorMessage = () => { - const msg = - `${config.method} ${endpoint} (x-request-id=${requestId}) failed ${getDuration()}. ` + - getErrorMessage(error); - - if (isFinalAttempt) { - logger?.error(msg); - return; - } - logger?.debug?.(msg); - }; - - if (isAggregateError(error)) { - for (const err of error.errors) { - if (isFinalAttempt) { - logger?.error(err); - continue; - } - logger?.debug?.(String(err)); - } - - logErrorMessage(); - throw new Error(`Unexpected HTTP error. (x-request-id=${requestId})`, { cause: error }); + const msg = + `${config.method} ${endpoint} (x-request-id=${requestId}) failed ${getDuration()}. ` + + getErrorMessage(error); + + if (isFinalAttempt) { + logger.error({ error }, msg); + } else { + logger.debug({ error }, msg); } - logger?.error(error); - logErrorMessage(); throw new Error(`Unexpected HTTP error. (x-request-id=${requestId})`, { cause: error }); }); @@ -171,14 +188,14 @@ export async function makeFetchCall( } if (isFinalAttempt) { - logger?.error( + logger.error( `${config.method} ${endpoint} (x-request-id=${requestId}) failed with status ${response.status} ${getDuration()}: ${(await response.text()) || ''}`, ); - logger?.error( + logger.error( `${config.method} ${endpoint} (x-request-id=${requestId}) retry limit exceeded after ${attempt} attempts.`, ); } else { - logger?.debug?.( + logger.debug( `${config.method} ${endpoint} (x-request-id=${requestId}) failed with status ${response.status} ${getDuration()}: ${(await response.text()) || ''}`, ); } @@ -189,7 +206,7 @@ export async function makeFetchCall( if (response.status >= 400 && response.status < 500) { if (retries > 0) { - logger?.error(`Abort retry because of status code ${response.status}.`); + logger.error(`Abort retry because of status code ${response.status}.`); } bail(error); } @@ -247,12 +264,4 @@ function formatTimestamp(timestamp: number): string { return parts.join(':'); } -interface AggregateError extends Error { - errors: Error[]; -} - -function isAggregateError(error: unknown): error is AggregateError { - return !!error && typeof error === 'object' && 'errors' in error && Array.isArray(error.errors); -} - export { URL }; diff --git a/packages/libraries/core/src/client/persisted-documents.ts b/packages/libraries/core/src/client/persisted-documents.ts index 90a3287e43b..3ab53a9b743 100644 --- a/packages/libraries/core/src/client/persisted-documents.ts +++ b/packages/libraries/core/src/client/persisted-documents.ts @@ -1,8 +1,8 @@ import type { PromiseOrValue } from 'graphql/jsutils/PromiseOrValue.js'; import LRU from 'tiny-lru'; +import { Logger } from '@graphql-hive/logger'; import { http } from './http-client.js'; import type { PersistedDocumentsConfiguration } from './types'; -import type { HiveLogger } from './utils.js'; type HeadersObject = { get(name: string): string | null; @@ -10,7 +10,7 @@ type HeadersObject = { export function createPersistedDocuments( config: PersistedDocumentsConfiguration & { - logger: HiveLogger; + logger: Logger; fetch?: typeof fetch; }, ): null | { diff --git a/packages/libraries/core/src/client/reporting.ts b/packages/libraries/core/src/client/reporting.ts index ec0923a7e0e..078be999a02 100644 --- a/packages/libraries/core/src/client/reporting.ts +++ b/packages/libraries/core/src/client/reporting.ts @@ -12,7 +12,7 @@ import { version } from '../version.js'; import type { SchemaPublishMutation } from './__generated__/types.js'; import { http } from './http-client.js'; import type { HiveInternalPluginOptions } from './types.js'; -import { createHiveLogger, logIf } from './utils.js'; +import { logIf } from './utils.js'; export interface SchemaReporter { report(args: { schema: GraphQLSchema }): void; @@ -30,7 +30,7 @@ export function createReporting(pluginOptions: HiveInternalPluginOptions): Schem const token = pluginOptions.token; const selfHostingOptions = pluginOptions.selfHosting; const reportingOptions = pluginOptions.reporting; - const logger = createHiveLogger(pluginOptions.logger, '[reporting]'); + const logger = pluginOptions.logger.child('[reporting]'); logIf( typeof reportingOptions.author !== 'string' || reportingOptions.author.length === 0, diff --git a/packages/libraries/core/src/client/supergraph.ts b/packages/libraries/core/src/client/supergraph.ts index c2a0e395ea1..d4465d3b3b3 100644 --- a/packages/libraries/core/src/client/supergraph.ts +++ b/packages/libraries/core/src/client/supergraph.ts @@ -1,12 +1,13 @@ +import { Logger } from '@graphql-hive/logger'; import { version } from '../version.js'; import { http } from './http-client.js'; -import type { Logger } from './types.js'; +import type { LegacyLogger } from './types.js'; import { createHash, joinUrl } from './utils.js'; export interface SupergraphSDLFetcherOptions { endpoint: string; key: string; - logger?: Logger; + logger?: LegacyLogger | Logger; fetchImplementation?: typeof fetch; name?: string; version?: string; diff --git a/packages/libraries/core/src/client/types.ts b/packages/libraries/core/src/client/types.ts index 28181c4a7da..3e4c0995d29 100644 --- a/packages/libraries/core/src/client/types.ts +++ b/packages/libraries/core/src/client/types.ts @@ -1,9 +1,9 @@ import type { ExecutionArgs } from 'graphql'; import type { PromiseOrValue } from 'graphql/jsutils/PromiseOrValue.js'; +import { LogLevel as HiveLoggerLevel, Logger } from '@graphql-hive/logger'; import type { AgentOptions } from './agent.js'; import type { autoDisposeSymbol, hiveClientSymbol } from './client.js'; import type { SchemaReporter } from './reporting.js'; -import { HiveLogger } from './utils.js'; type HeadersObject = { get(name: string): string | null; @@ -68,7 +68,8 @@ export interface ClientInfo { version: string; } -export interface Logger { +/** @deprecated Instead provide a logger instance from `@graphql-hive/logger`. */ +export interface LegacyLogger { info(msg: string): void; error(error: any, ...data: any[]): void; debug?(msg: string): void; @@ -219,7 +220,7 @@ export type HivePluginOptions = OptionalWhenFalse< * * Default: 'info' */ - logger?: Logger | 'error' | 'info' | 'debug'; + logger?: Logger | HiveLoggerLevel; /** * Access Token for usage reporting */ @@ -269,7 +270,7 @@ export type HivePluginOptions = OptionalWhenFalse< >; export type HiveInternalPluginOptions = HivePluginOptions & { - logger: HiveLogger; + logger: Logger; }; export type Maybe = null | undefined | T; diff --git a/packages/libraries/core/src/client/usage.ts b/packages/libraries/core/src/client/usage.ts index d00b40ba991..2e59495e046 100644 --- a/packages/libraries/core/src/client/usage.ts +++ b/packages/libraries/core/src/client/usage.ts @@ -23,7 +23,6 @@ import type { import { cache, cacheDocumentKey, - createHiveLogger, isLegacyAccessToken, logIf, measureDuration, @@ -74,7 +73,7 @@ export function createUsage(pluginOptions: HiveInternalPluginOptions): UsageColl const options = typeof pluginOptions.usage === 'boolean' ? ({} as HiveUsagePluginOptions) : pluginOptions.usage; const selfHostingOptions = pluginOptions.selfHosting; - const logger = createHiveLogger(pluginOptions.logger, '[usage]'); + const logger = pluginOptions.logger.child('[usage]'); const collector = memo(createCollector, arg => arg.schema); const excludeSet = new Set(options.exclude ?? []); diff --git a/packages/libraries/core/src/client/utils.ts b/packages/libraries/core/src/client/utils.ts index 55c4c3867ca..fb80a12a46f 100644 --- a/packages/libraries/core/src/client/utils.ts +++ b/packages/libraries/core/src/client/utils.ts @@ -1,6 +1,7 @@ +import { Attributes, Logger } from '@graphql-hive/logger'; import { crypto, TextEncoder } from '@whatwg-node/fetch'; import { hiveClientSymbol } from './client.js'; -import type { HiveClient, HivePluginOptions, Logger } from './types.js'; +import type { HiveClient, HivePluginOptions, LegacyLogger } from './types.js'; async function digest(algo: 'SHA-256' | 'SHA-1', output: 'hex' | 'base64', data: string) { const buffer = await crypto.subtle.digest(algo, new TextEncoder().encode(data)); @@ -176,68 +177,6 @@ export function joinUrl(url: string, subdirectory: string) { return normalizedUrl + '/' + normalizedSubdirectory; } -const hiveSymbol = Symbol('hive-logger'); - -export type HiveLogger = { - info(message: string): void; - debug(message: string): void; - error(error: any, ...data: any[]): void; - [hiveSymbol]: { - path: string; - debug: boolean; - logger: Logger; - }; -}; - -function printPath(path: string) { - if (path.length) { - return path + ' '; - } - return path; -} - -export function createHiveLogger(baseLogger: Logger, prefix: string, debug = true): HiveLogger { - const context: HiveLogger[typeof hiveSymbol] = { - path: '', - logger: baseLogger, - debug, - // @ts-expect-error internal stuff - ...baseLogger?.[hiveSymbol], - }; - context.path = context.path + prefix; - - const { logger, path } = context; - - return { - [hiveSymbol]: context, - error: (error: any, ...data: any[]) => { - if (error.stack) { - const pth = printPath(path); - for (const stack of error.stack.split('\n')) { - logger.error(pth + stack); - } - } else { - logger.error(printPath(path) + String(error), ...data); - } - }, - info: (message: string) => { - logger.info(printPath(path) + message); - }, - debug: (message: string) => { - if (!context.debug) { - return; - } - - const msg = printPath(path) + message; - if (!logger.debug) { - logger.info(msg); - return; - } - logger.debug(msg); - }, - }; -} - export function isLegacyAccessToken(accessToken: string): boolean { if ( !accessToken.startsWith('hvo1/') && @@ -249,3 +188,84 @@ export function isLegacyAccessToken(accessToken: string): boolean { return false; } + +export function chooseLogger(logger: LegacyLogger | Logger | undefined, debug?: boolean): Logger { + if (!logger) { + return new Logger({ + writers: [{ write() {} }], + }); + } + + if (logger instanceof Logger) { + return logger; + } + + return new Logger({ + level: 'debug', + writers: [ + { + write(level, attrs, msg) { + const errors = getErrorsFromAttrs(attrs); + + if (level === 'debug' && msg) { + if (logger.debug) { + if (errors) { + for (const error of errors) { + logger.debug(error); + } + } + logger.debug(msg); + return; + } + if (debug === true) { + if (errors) { + for (const error of errors) { + logger.info(error); + } + } + logger.info(msg); + return; + } + + return; + } + if (level === 'info' && msg) { + if (errors) { + for (const error of errors) { + logger.info(error); + } + } + logger.info(msg); + return; + } + if (level === 'error' && msg) { + if (errors) { + for (const error of errors) { + logger.error(error); + } + } + logger.error(msg); + } + }, + }, + ], + }); +} + +function getErrorsFromAttrs(attrs: Attributes | null | undefined): Array | null { + if (!attrs || Array.isArray(attrs)) { + return null; + } + + const error = attrs?.error; + + if (!error) { + return null; + } + + if (error?.errors) { + return error.errors.map((error: any) => `${error.name ?? error.class}: ${error.message}`); + } + + return [`${error.name ?? error.class}: ${error.message}`]; +} diff --git a/packages/libraries/core/src/index.ts b/packages/libraries/core/src/index.ts index 988c6c2cc92..f908d25959b 100644 --- a/packages/libraries/core/src/index.ts +++ b/packages/libraries/core/src/index.ts @@ -4,7 +4,7 @@ export type { HivePluginOptions, HiveClient, CollectUsageCallback, - Logger, + LegacyLogger as Logger, } from './client/types.js'; export { createSchemaFetcher, createServicesFetcher } from './client/gateways.js'; export { createHive, autoDisposeSymbol } from './client/client.js'; diff --git a/packages/libraries/core/tests/enabled.spec.ts b/packages/libraries/core/tests/enabled.spec.ts index 84534357940..616d1a3b850 100644 --- a/packages/libraries/core/tests/enabled.spec.ts +++ b/packages/libraries/core/tests/enabled.spec.ts @@ -19,9 +19,7 @@ test("should log that it's not enabled", async () => { .then(() => 'OK') .catch(() => 'ERROR'); - expect(logger.info).toHaveBeenCalledWith( - expect.stringContaining(`[hive] Plugin is not enabled.`), - ); + expect(logger.info).toHaveBeenCalledWith(expect.stringContaining(`[hive]Plugin is not enabled.`)); expect(result).toBe('OK'); }); diff --git a/packages/libraries/core/tests/http-client.spec.ts b/packages/libraries/core/tests/http-client.spec.ts index a77bab01f73..30eb37dcb31 100644 --- a/packages/libraries/core/tests/http-client.spec.ts +++ b/packages/libraries/core/tests/http-client.spec.ts @@ -17,7 +17,6 @@ test('HTTP call without retries and system level error', async () => { expect(logger.getLogs()).toMatchInlineSnapshot(` [DBG] GET https://ap.localhost.noop (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) - [ERR] Error: getaddrinfo ENOTFOUND ap.localhost.noop [ERR] GET https://ap.localhost.noop (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) failed (666ms). getaddrinfo ENOTFOUND ap.localhost.noop `); }); @@ -36,10 +35,8 @@ test('HTTP with retries and system', async () => { expect(logger.getLogs()).toMatchInlineSnapshot(` [DBG] GET https://ap.localhost.noop (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/2) - [ERR] Error: getaddrinfo ENOTFOUND ap.localhost.noop [DBG] GET https://ap.localhost.noop (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) failed (666ms). getaddrinfo ENOTFOUND ap.localhost.noop [DBG] GET https://ap.localhost.noop (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (2/2) - [ERR] Error: getaddrinfo ENOTFOUND ap.localhost.noop [ERR] GET https://ap.localhost.noop (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) failed (666ms). getaddrinfo ENOTFOUND ap.localhost.noop `); }); diff --git a/packages/libraries/core/tests/info.spec.ts b/packages/libraries/core/tests/info.spec.ts index 2608637b2b4..5679803d7ce 100644 --- a/packages/libraries/core/tests/info.spec.ts +++ b/packages/libraries/core/tests/info.spec.ts @@ -23,7 +23,7 @@ test('should not leak the exception', async () => { .then(() => 'OK') .catch(() => 'ERROR'); - expect(logger.error).toHaveBeenCalledWith(expect.stringContaining(`[hive][info] Error`)); + expect(logger.error).toHaveBeenCalledWith(expect.stringContaining(`[hive][info]Error`)); expect(result).toBe('OK'); }); @@ -80,7 +80,7 @@ test('should use selfHosting.graphqlEndpoint if provided', async () => { .then(() => 'OK') .catch(() => 'ERROR'); - expect(logger.info).toHaveBeenCalledWith(expect.stringContaining(`[hive][info] Token details`)); + expect(logger.info).toHaveBeenCalledWith(expect.stringContaining(`[hive][info]Token details`)); expect(logger.info).toHaveBeenCalledWith(expect.stringMatching(/Token name: \s+ My Token/)); expect(logger.info).toHaveBeenCalledWith( expect.stringMatching(/Organization: \s+ Org \s+ http:\/\/localhost\/org-id/), diff --git a/packages/libraries/core/tests/reporting.spec.ts b/packages/libraries/core/tests/reporting.spec.ts index 1596c1c2803..2a9a350f051 100644 --- a/packages/libraries/core/tests/reporting.spec.ts +++ b/packages/libraries/core/tests/reporting.spec.ts @@ -49,11 +49,10 @@ test('should not leak the exception', async () => { await hive.dispose(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [INF] [hive][reporting] Publish schema - [DBG] [hive][reporting] POST http://127.0.0.1:55404 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) - [ERR] [hive][reporting] Error: connect ECONNREFUSED 127.0.0.1:55404 - [ERR] [hive][reporting] at TCPConnectWrap.afterConnect [as oncomplete] (node:net:666:666) - [DBG] [hive][reporting] POST http://127.0.0.1:55404 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) failed (666ms). connect ECONNREFUSED 127.0.0.1:55404 + [INF] [hive][reporting]Publish schema + [DBG] [hive][reporting]POST http://127.0.0.1:55404 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) + [DBG] Error: connect ECONNREFUSED 127.0.0.1:55404 + [DBG] [hive][reporting]POST http://127.0.0.1:55404 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) failed (666ms). connect ECONNREFUSED 127.0.0.1:55404 `); }); @@ -124,10 +123,10 @@ test('should send data to Hive', async () => { http.done(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [INF] [hive][reporting] Publish schema - [DBG] [hive][reporting] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) - [DBG] [hive][reporting] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [INF] [hive][reporting] Published schema + [INF] [hive][reporting]Publish schema + [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) + [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [INF] [hive][reporting]Published schema `); }); @@ -197,10 +196,10 @@ test('should send data to Hive (deprecated endpoint)', async () => { http.done(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [INF] [hive][reporting] Publish schema - [DBG] [hive][reporting] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) - [DBG] [hive][reporting] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [INF] [hive][reporting] Published schema + [INF] [hive][reporting]Publish schema + [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) + [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [INF] [hive][reporting]Published schema `); expect(body.variables.input.sdl).toBe(`type Query{foo:String}`); @@ -272,10 +271,10 @@ test('should send data to app.graphql-hive.com/graphql by default', async () => http.done(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [INF] [hive][reporting] Publish schema - [DBG] [hive][reporting] POST https://app.graphql-hive.com/graphql (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) - [DBG] [hive][reporting] POST https://app.graphql-hive.com/graphql (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [INF] [hive][reporting] Published schema + [INF] [hive][reporting]Publish schema + [DBG] [hive][reporting]POST https://app.graphql-hive.com/graphql (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) + [DBG] [hive][reporting]POST https://app.graphql-hive.com/graphql (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [INF] [hive][reporting]Published schema `); expect(body.variables.input.sdl).toBe(`type Query{foo:String}`); @@ -345,13 +344,13 @@ test('should send data to Hive immediately', async () => { `), }); - expect(logger.getLogs()).toMatchInlineSnapshot(`[INF] [hive][reporting] Publish schema`); + expect(logger.getLogs()).toMatchInlineSnapshot(`[INF] [hive][reporting]Publish schema`); logger.clear(); await waitFor(50); expect(logger.getLogs()).toMatchInlineSnapshot(` - [DBG] [hive][reporting] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) - [DBG] [hive][reporting] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [INF] [hive][reporting] Successfully published schema + [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) + [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [INF] [hive][reporting]Successfully published schema `); expect(body.variables.input.sdl).toBe(`type Query{foo:String}`); expect(body.variables.input.author).toBe(author); @@ -362,9 +361,9 @@ test('should send data to Hive immediately', async () => { await waitFor(100); expect(logger.getLogs()).toMatchInlineSnapshot(` - [DBG] [hive][reporting] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) - [DBG] [hive][reporting] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [INF] [hive][reporting] Successfully published schema + [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) + [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [INF] [hive][reporting]Successfully published schema `); await hive.dispose(); @@ -432,10 +431,10 @@ test('should send original schema of a federated (v1) service', async () => { await hive.dispose(); const logs = logger.getLogs(); expect(logs).toMatchInlineSnapshot(` - [INF] [hive][reporting] Publish schema - [DBG] [hive][reporting] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) - [DBG] [hive][reporting] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [INF] [hive][reporting] Published schema + [INF] [hive][reporting]Publish schema + [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) + [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [INF] [hive][reporting]Published schema `); http.done(); }); @@ -501,10 +500,10 @@ test('should send original schema of a federated (v2) service', async () => { await hive.dispose(); const logs = logger.getLogs(); expect(logs).toMatchInlineSnapshot(` - [INF] [hive][reporting] Publish schema - [DBG] [hive][reporting] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) - [DBG] [hive][reporting] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [INF] [hive][reporting] Published schema + [INF] [hive][reporting]Publish schema + [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) + [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [INF] [hive][reporting]Published schema `); http.done(); }); @@ -562,10 +561,10 @@ test('should display SchemaPublishMissingServiceError', async () => { http.done(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [INF] [hive][reporting] Publish schema - [DBG] [hive][reporting] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) - [DBG] [hive][reporting] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [ERR] [hive][reporting] Failed to report schema: Service name is not defined + [INF] [hive][reporting]Publish schema + [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) + [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [ERR] [hive][reporting]Failed to report schema: Service name is not defined `); }); @@ -623,10 +622,10 @@ test('should display SchemaPublishMissingUrlError', async () => { http.done(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [INF] [hive][reporting] Publish schema - [DBG] [hive][reporting] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) - [DBG] [hive][reporting] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [ERR] [hive][reporting] Failed to report schema: Service url is not defined + [INF] [hive][reporting]Publish schema + [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) + [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [ERR] [hive][reporting]Failed to report schema: Service url is not defined `); expect(logger.getLogs()).toContain( @@ -676,10 +675,10 @@ test('retry on non-200', async () => { await hive.dispose(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [INF] [hive][reporting] Publish schema - [DBG] [hive][reporting] POST http://localhost/registry (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) - [DBG] [hive][reporting] Error: connect ECONNREFUSED ::1:80 - [DBG] [hive][reporting] Error: connect ECONNREFUSED 127.0.0.1:80 - [DBG] [hive][reporting] POST http://localhost/registry (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) failed (666ms). + [INF] [hive][reporting]Publish schema + [DBG] [hive][reporting]POST http://localhost/registry (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) + [DBG] Error: connect ECONNREFUSED ::1:80 + [DBG] Error: connect ECONNREFUSED 127.0.0.1:80 + [DBG] [hive][reporting]POST http://localhost/registry (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) failed (666ms). `); }); diff --git a/packages/libraries/core/tests/usage.spec.ts b/packages/libraries/core/tests/usage.spec.ts index dc49dee59c8..3944e384e37 100644 --- a/packages/libraries/core/tests/usage.spec.ts +++ b/packages/libraries/core/tests/usage.spec.ts @@ -165,11 +165,11 @@ test('should send data to Hive', async () => { http.done(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [DBG] [hive][usage][agent] Disposing - [DBG] [hive][usage][agent] Sending report (queue 1) - [DBG] [hive][usage][agent] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) - [DBG] [hive][usage][agent] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [DBG] [hive][usage][agent] Report sent! + [DBG] [hive][usage][agent]Disposing + [DBG] [hive][usage][agent]Sending report (queue 1) + [DBG] [hive][usage][agent]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) + [DBG] [hive][usage][agent]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [DBG] [hive][usage][agent]Report sent! `); // Map @@ -275,11 +275,11 @@ test('should send data to Hive (deprecated endpoint)', async () => { http.done(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [DBG] [hive][usage][agent] Disposing - [DBG] [hive][usage][agent] Sending report (queue 1) - [DBG] [hive][usage][agent] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) - [DBG] [hive][usage][agent] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [DBG] [hive][usage][agent] Report sent! + [DBG] [hive][usage][agent]Disposing + [DBG] [hive][usage][agent]Sending report (queue 1) + [DBG] [hive][usage][agent]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) + [DBG] [hive][usage][agent]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [DBG] [hive][usage][agent]Report sent! `); // Map @@ -366,11 +366,11 @@ test('should not leak the exception', { retry: 3 }, async () => { await hive.dispose(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [DBG] [hive][usage][agent] Sending report (queue 1) - [DBG] [hive][usage][agent] POST http://404.localhost.noop (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/2) - [ERR] [hive][usage][agent] Error: getaddrinfo ENOTFOUND 404.localhost.noop - [DBG] [hive][usage][agent] POST http://404.localhost.noop (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) failed (666ms). getaddrinfo ENOTFOUND 404.localhost.noop - [DBG] [hive][usage][agent] Disposing + [DBG] [hive][usage][agent]Sending report (queue 1) + [DBG] [hive][usage][agent]POST http://404.localhost.noop (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/2) + [DBG] Error: getaddrinfo ENOTFOUND 404.localhost.noop + [DBG] [hive][usage][agent]POST http://404.localhost.noop (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) failed (666ms). getaddrinfo ENOTFOUND 404.localhost.noop + [DBG] [hive][usage][agent]Disposing `); }); @@ -536,11 +536,11 @@ test('should send data to Hive at least once when using atLeastOnceSampler', asy http.done(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [DBG] [hive][usage][agent] Disposing - [DBG] [hive][usage][agent] Sending report (queue 2) - [DBG] [hive][usage][agent] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) - [DBG] [hive][usage][agent] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [DBG] [hive][usage][agent] Report sent! + [DBG] [hive][usage][agent]Disposing + [DBG] [hive][usage][agent]Sending report (queue 2) + [DBG] [hive][usage][agent]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) + [DBG] [hive][usage][agent]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [DBG] [hive][usage][agent]Report sent! `); // Map @@ -640,11 +640,11 @@ test('should not send excluded operation name data to Hive', async () => { http.done(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [DBG] [hive][usage][agent] Disposing - [DBG] [hive][usage][agent] Sending report (queue 2) - [DBG] [hive][usage][agent] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) - [DBG] [hive][usage][agent] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [DBG] [hive][usage][agent] Report sent! + [DBG] [hive][usage][agent]Disposing + [DBG] [hive][usage][agent]Sending report (queue 2) + [DBG] [hive][usage][agent]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) + [DBG] [hive][usage][agent]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [DBG] [hive][usage][agent]Report sent! `); // Map @@ -741,10 +741,10 @@ test('retry on non-200', async () => { await hive.dispose(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [DBG] [hive][usage][agent] Sending report (queue 1) - [DBG] [hive][usage][agent] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/2) - [DBG] [hive][usage][agent] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) failed with status 500 (666ms): No no no - [DBG] [hive][usage][agent] Disposing + [DBG] [hive][usage][agent]Sending report (queue 1) + [DBG] [hive][usage][agent]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/2) + [DBG] [hive][usage][agent]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) failed with status 500 (666ms): No no no + [DBG] [hive][usage][agent]Disposing `); }); @@ -926,12 +926,12 @@ test('no debug property -> logger.debug is invoked', async ({ expect }) => { await hive.dispose(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [DBG] [hive][usage][agent] Disposing - [DBG] [hive][usage][agent] Sending immediately - [DBG] [hive][usage][agent] Sending report (queue 1) - [DBG] [hive][usage][agent] POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) - [DBG] [hive][usage][agent] POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [DBG] [hive][usage][agent] Report sent! + [DBG] [hive][usage][agent]Disposing + [DBG] [hive][usage][agent]Sending immediately + [DBG] [hive][usage][agent]Sending report (queue 1) + [DBG] [hive][usage][agent]POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) + [DBG] [hive][usage][agent]POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [DBG] [hive][usage][agent]Report sent! `); }); @@ -975,7 +975,14 @@ test('debug: false -> logger.debug is not invoked', async ({ expect }) => { ); await hive.dispose(); - expect(logger.getLogs()).toMatchInlineSnapshot(``); + expect(logger.getLogs()).toMatchInlineSnapshot(` + [DBG] [hive][usage][agent]Disposing + [DBG] [hive][usage][agent]Sending immediately + [DBG] [hive][usage][agent]Sending report (queue 1) + [DBG] [hive][usage][agent]POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) + [DBG] [hive][usage][agent]POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [DBG] [hive][usage][agent]Report sent! + `); }); test('debug: true and missing logger.debug method -> logger.info is invoked (to cover legacy logger implementation)', async ({ @@ -1023,11 +1030,11 @@ test('debug: true and missing logger.debug method -> logger.info is invoked (to await hive.dispose(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [INF] [hive][usage][agent] Disposing - [INF] [hive][usage][agent] Sending immediately - [INF] [hive][usage][agent] Sending report (queue 1) - [INF] [hive][usage][agent] POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) - [INF] [hive][usage][agent] POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [INF] [hive][usage][agent] Report sent! + [INF] [hive][usage][agent]Disposing + [INF] [hive][usage][agent]Sending immediately + [INF] [hive][usage][agent]Sending report (queue 1) + [INF] [hive][usage][agent]POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) + [INF] [hive][usage][agent]POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [INF] [hive][usage][agent]Report sent! `); }); diff --git a/packages/libraries/yoga/package.json b/packages/libraries/yoga/package.json index 2e3de272dcb..a6430cb6ced 100644 --- a/packages/libraries/yoga/package.json +++ b/packages/libraries/yoga/package.json @@ -48,6 +48,7 @@ }, "dependencies": { "@graphql-hive/core": "workspace:*", + "@graphql-hive/logger": "^1.0.9", "@graphql-yoga/plugin-persisted-operations": "^3.9.0" }, "devDependencies": { diff --git a/packages/libraries/yoga/src/index.ts b/packages/libraries/yoga/src/index.ts index d48efa98c6f..984d4ae2d91 100644 --- a/packages/libraries/yoga/src/index.ts +++ b/packages/libraries/yoga/src/index.ts @@ -9,6 +9,7 @@ import { isAsyncIterable, isHiveClient, } from '@graphql-hive/core'; +import { Logger } from '@graphql-hive/logger'; import { usePersistedOperations } from '@graphql-yoga/plugin-persisted-operations'; import { version } from './version.js'; @@ -49,9 +50,19 @@ export function useHive(clientOrOptions: HiveClient | HivePluginOptions): Plugin let hive: HiveClient; let yoga: YogaServer; + let onYogaInit: () => void; + let onYogaInitDefered: Promise | null = new Promise( + res => + (onYogaInit = () => { + res(); + onYogaInitDefered = null; + }), + ); + return { onYogaInit(payload) { yoga = payload.yoga; + onYogaInit(); }, onSchemaChange({ schema }) { hive.reportSchema({ schema }); @@ -179,13 +190,30 @@ export function useHive(clientOrOptions: HiveClient | HivePluginOptions): Plugin ? clientOrOptions : createHive({ ...clientOrOptions, + logger: + clientOrOptions.logger ?? + new Logger({ + writers: [ + { + write(level, attrs, msg) { + level = level === 'trace' ? 'debug' : level; + if (!onYogaInitDefered) { + yoga.logger[level](msg, attrs); + + return; + } + // Defer logs until yoga instance is initialized + // Ideally, onPluginInit would provide us access to the logger instance + // See https://github.com/graphql-hive/graphql-yoga/issues/4048#issuecomment-3576258603 + void onYogaInitDefered.then(() => { + yoga?.logger[level](msg, attrs); + }); + }, + }, + ], + }), agent: clientOrOptions.agent ? { - logger: { - // Hive Plugin should respect the given Yoga logger - error: (...args) => yoga.logger.error(...args), - info: (...args) => yoga.logger.info(...args), - }, // Hive Plugin should respect the given FetchAPI, note that this is not `yoga.fetch` fetch: (...args) => yoga.fetchAPI.fetch(...args), ...clientOrOptions.agent, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 5186565db61..5fd639a77a8 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -513,6 +513,9 @@ importers: packages/libraries/core: dependencies: + '@graphql-hive/logger': + specifier: ^1.0.9 + version: 1.0.9 '@graphql-hive/signal': specifier: ^2.0.0 version: 2.0.0 @@ -613,6 +616,9 @@ importers: '@graphql-hive/core': specifier: workspace:* version: link:../core/dist + '@graphql-hive/logger': + specifier: ^1.0.9 + version: 1.0.9 '@graphql-yoga/plugin-persisted-operations': specifier: ^3.9.0 version: 3.9.0(@graphql-tools/utils@10.10.3(graphql@16.9.0))(graphql-yoga@5.13.3(graphql@16.9.0))(graphql@16.9.0) @@ -1705,6 +1711,21 @@ importers: specifier: 3.25.76 version: 3.25.76 + packages/services/workflows: + dependencies: + '@graphql-hive/logger': + specifier: 1.0.9 + version: 1.0.9 + '@openworkflow/backend-postgres': + specifier: 0.3.0 + version: 0.3.0(openworkflow@0.3.0) + openworkflow: + specifier: 0.3.0 + version: 0.3.0 + zod: + specifier: 3.25.76 + version: 3.25.76 + packages/web/app: devDependencies: '@date-fns/utc': @@ -7241,6 +7262,11 @@ packages: peerDependencies: '@opentelemetry/api': ^1.1.0 + '@openworkflow/backend-postgres@0.3.0': + resolution: {integrity: sha512-h7uE/+xrQpGpXeI0IaAy1Q+FN2SILIYX166R5kk47TleEYhRBF1JZ8jmZkmkqUapODxFp+BUZmTVmg3SctIIFg==} + peerDependencies: + openworkflow: ^0.3.0 + '@pagefind/darwin-arm64@1.3.0': resolution: {integrity: sha512-365BEGl6ChOsauRjyVpBjXybflXAOvoMROw3TucAROHIcdBvXk9/2AmEvGFU0r75+vdQI4LJdJdpH4Y6Yqaj4A==} cpu: [arm64] @@ -15706,6 +15732,10 @@ packages: resolution: {integrity: sha512-vz9iS7MJ5+Bp1URw8Khvdyw1H/hGvzHWlKQ7eRrQojSCDL1/SrWfrY9QebLw97n2deyRtzHRC3MkQfVNUCo91Q==} engines: {node: '>=0.10'} + openworkflow@0.3.0: + resolution: {integrity: sha512-eP3W7bvmcdllRZp3Xawh0iB2VKR4eyUML5D2yi87f2GDyFcrKMHCddM1tVxUgjaXBYa6zpTeJasbcSgrVTRsAQ==} + engines: {node: '>=20'} + optionator@0.9.3: resolution: {integrity: sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==} engines: {node: '>= 0.8.0'} @@ -16244,6 +16274,10 @@ packages: postgres-range@1.1.3: resolution: {integrity: sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g==} + postgres@3.4.7: + resolution: {integrity: sha512-Jtc2612XINuBjIl/QTWsV5UvE8UHuNblcO3vVADSrKsrc6RqGX6lOW1cEo3CM2v0XG4Nat8nI+YM7/f26VxXLw==} + engines: {node: '>=12'} + prelude-ls@1.2.1: resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} engines: {node: '>= 0.8.0'} @@ -19361,8 +19395,8 @@ snapshots: dependencies: '@aws-crypto/sha256-browser': 3.0.0 '@aws-crypto/sha256-js': 3.0.0 - '@aws-sdk/client-sso-oidc': 3.596.0(@aws-sdk/client-sts@3.596.0) - '@aws-sdk/client-sts': 3.596.0 + '@aws-sdk/client-sso-oidc': 3.596.0 + '@aws-sdk/client-sts': 3.596.0(@aws-sdk/client-sso-oidc@3.596.0) '@aws-sdk/core': 3.592.0 '@aws-sdk/credential-provider-node': 3.596.0(@aws-sdk/client-sso-oidc@3.596.0)(@aws-sdk/client-sts@3.596.0) '@aws-sdk/middleware-host-header': 3.577.0 @@ -19514,11 +19548,11 @@ snapshots: transitivePeerDependencies: - aws-crt - '@aws-sdk/client-sso-oidc@3.596.0(@aws-sdk/client-sts@3.596.0)': + '@aws-sdk/client-sso-oidc@3.596.0': dependencies: '@aws-crypto/sha256-browser': 3.0.0 '@aws-crypto/sha256-js': 3.0.0 - '@aws-sdk/client-sts': 3.596.0 + '@aws-sdk/client-sts': 3.596.0(@aws-sdk/client-sso-oidc@3.596.0) '@aws-sdk/core': 3.592.0 '@aws-sdk/credential-provider-node': 3.596.0(@aws-sdk/client-sso-oidc@3.596.0)(@aws-sdk/client-sts@3.596.0) '@aws-sdk/middleware-host-header': 3.577.0 @@ -19557,7 +19591,6 @@ snapshots: '@smithy/util-utf8': 3.0.0 tslib: 2.8.1 transitivePeerDependencies: - - '@aws-sdk/client-sts' - aws-crt '@aws-sdk/client-sso-oidc@3.723.0(@aws-sdk/client-sts@3.723.0)': @@ -19777,11 +19810,11 @@ snapshots: transitivePeerDependencies: - aws-crt - '@aws-sdk/client-sts@3.596.0': + '@aws-sdk/client-sts@3.596.0(@aws-sdk/client-sso-oidc@3.596.0)': dependencies: '@aws-crypto/sha256-browser': 3.0.0 '@aws-crypto/sha256-js': 3.0.0 - '@aws-sdk/client-sso-oidc': 3.596.0(@aws-sdk/client-sts@3.596.0) + '@aws-sdk/client-sso-oidc': 3.596.0 '@aws-sdk/core': 3.592.0 '@aws-sdk/credential-provider-node': 3.596.0(@aws-sdk/client-sso-oidc@3.596.0)(@aws-sdk/client-sts@3.596.0) '@aws-sdk/middleware-host-header': 3.577.0 @@ -19820,6 +19853,7 @@ snapshots: '@smithy/util-utf8': 3.0.0 tslib: 2.8.1 transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' - aws-crt '@aws-sdk/client-sts@3.723.0': @@ -20051,7 +20085,7 @@ snapshots: '@aws-sdk/credential-provider-ini@3.596.0(@aws-sdk/client-sso-oidc@3.596.0)(@aws-sdk/client-sts@3.596.0)': dependencies: - '@aws-sdk/client-sts': 3.596.0 + '@aws-sdk/client-sts': 3.596.0(@aws-sdk/client-sso-oidc@3.596.0) '@aws-sdk/credential-provider-env': 3.587.0 '@aws-sdk/credential-provider-http': 3.596.0 '@aws-sdk/credential-provider-process': 3.587.0 @@ -20298,7 +20332,7 @@ snapshots: '@aws-sdk/credential-provider-web-identity@3.587.0(@aws-sdk/client-sts@3.596.0)': dependencies: - '@aws-sdk/client-sts': 3.596.0 + '@aws-sdk/client-sts': 3.596.0(@aws-sdk/client-sso-oidc@3.596.0) '@aws-sdk/types': 3.577.0 '@smithy/property-provider': 3.1.11 '@smithy/types': 3.7.2 @@ -20687,7 +20721,7 @@ snapshots: '@aws-sdk/token-providers@3.587.0(@aws-sdk/client-sso-oidc@3.596.0)': dependencies: - '@aws-sdk/client-sso-oidc': 3.596.0(@aws-sdk/client-sts@3.596.0) + '@aws-sdk/client-sso-oidc': 3.596.0 '@aws-sdk/types': 3.577.0 '@smithy/property-provider': 3.1.11 '@smithy/shared-ini-file-loader': 3.1.12 @@ -27358,6 +27392,11 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0) + '@openworkflow/backend-postgres@0.3.0(openworkflow@0.3.0)': + dependencies: + openworkflow: 0.3.0 + postgres: 3.4.7 + '@pagefind/darwin-arm64@1.3.0': optional: true @@ -38132,6 +38171,8 @@ snapshots: opentracing@0.14.7: {} + openworkflow@0.3.0: {} + optionator@0.9.3: dependencies: '@aashutoshrathi/word-wrap': 1.2.6 @@ -38706,6 +38747,8 @@ snapshots: postgres-range@1.1.3: {} + postgres@3.4.7: {} + prelude-ls@1.2.1: {} prettier-plugin-pkg@0.18.0(prettier@3.4.2): From c278810c74636e6f9ee5acfa9b60a445e62852e3 Mon Sep 17 00:00:00 2001 From: Laurin Quast Date: Tue, 25 Nov 2025 18:02:30 +0100 Subject: [PATCH 6/7] update changeset --- .changeset/big-pigs-help.md | 40 ++++++++++++++++++++----------------- 1 file changed, 22 insertions(+), 18 deletions(-) diff --git a/.changeset/big-pigs-help.md b/.changeset/big-pigs-help.md index bbc9b819b19..bfe0fa428c8 100644 --- a/.changeset/big-pigs-help.md +++ b/.changeset/big-pigs-help.md @@ -9,9 +9,11 @@ Add support for providing a logger object via `HivePluginOptions`. It is possible to provide the following options: -- **`'error'`** log errors -- **`'info'`** log errors and informal logs -- **`'debug'`** log errors, informal and debug logs +- **'trace'** +- **'debug'** +- **'info'** default +- **'warn'** +- **'error'** ```ts import { createHive } from '@graphql-hive/core' @@ -21,23 +23,23 @@ const client = createHive({ }) ``` -In addition to that, it is also possible to provide a error logging instance, where you can -customize how logs are forwarded. +In addition to that, it is also possible to provide a Hive Logger instance, that allows more control over how you want to log and forward logs. ```ts import { createHive } from '@graphql-hive/core' +import { Logger } from '@graphql-hive/logger' const client = createHive({ - logger: { - info() {}, - error() {}, - debug() {} - } + logger: new Logger() }) ``` -Deprecates the `HivePluginOptions.debug` option. Instead, please provide a logger with a `debug` -method. +Head to our [Hive Logger documentation](https://the-guild.dev/graphql/hive/docs/logger) to learn more. + +___ + +**The `HivePluginOptions.debug` option is now deprecated.** Instead, please provide the option `debug` +instead for the logger. ```diff import { createHive } from '@graphql-hive/core' @@ -48,17 +50,17 @@ method. }) ``` -Deprecate the `HivePluginOptions.agent.logger` option. Instead, please provide +**Note**: If the `logger` property is provided, the `debug` option is ignored. + +___ + +**The `HivePluginOptions.agent.logger` option is now deprecated.** Instead, please provide `HivePluginOptions.logger`. ```diff import { createHive } from '@graphql-hive/core' - const logger = { - info() {}, - error() {}, - debug() {}, - }; + const logger = new Logger() const client = createHive({ agent: { @@ -67,3 +69,5 @@ Deprecate the `HivePluginOptions.agent.logger` option. Instead, please provide + logger, }) ``` + +**Note**: If both options are provided, the `agent` option is ignored. From fc321d0d3513a7f609c2b23b5f4291cd0952c219 Mon Sep 17 00:00:00 2001 From: Laurin Quast Date: Wed, 26 Nov 2025 11:06:25 +0100 Subject: [PATCH 7/7] use object prefix --- .../libraries/apollo/tests/apollo.spec.ts | 13 +- packages/libraries/core/src/client/agent.ts | 13 +- packages/libraries/core/src/client/client.ts | 26 +-- .../libraries/core/src/client/reporting.ts | 2 +- packages/libraries/core/src/client/usage.ts | 2 +- packages/libraries/core/tests/enabled.spec.ts | 2 +- packages/libraries/core/tests/info.spec.ts | 6 +- .../libraries/core/tests/reporting.spec.ts | 82 +++---- packages/libraries/core/tests/test-utils.ts | 21 +- packages/libraries/core/tests/usage.spec.ts | 202 ++++++++++++++---- packages/libraries/yoga/tests/yoga.spec.ts | 12 +- 11 files changed, 237 insertions(+), 144 deletions(-) diff --git a/packages/libraries/apollo/tests/apollo.spec.ts b/packages/libraries/apollo/tests/apollo.spec.ts index 48db56c073c..06064eae9a4 100644 --- a/packages/libraries/apollo/tests/apollo.spec.ts +++ b/packages/libraries/apollo/tests/apollo.spec.ts @@ -13,6 +13,7 @@ import { startStandaloneServer } from '@apollo/server/standalone'; import { expressMiddleware } from '@as-integrations/express4'; import { http } from '@graphql-hive/core'; import { makeExecutableSchema } from '@graphql-tools/schema'; +import { createHiveTestingLogger } from '../../core/tests/test-utils'; import { createHive, useHive } from '../src'; function createLogger() { @@ -60,11 +61,9 @@ function handleProcess() { }; } -test('should not interrupt the process', async () => { - const logger = { - error: vi.fn(), - info: vi.fn(), - }; +test('should not interrupt the process', async ({ expect }) => { + const logger = createHiveTestingLogger(); + const clean = handleProcess(); const apollo = new ApolloServer({ typeDefs, @@ -102,9 +101,7 @@ test('should not interrupt the process', async () => { await waitFor(200); await apollo.stop(); clean(); - expect(logger.info).toHaveBeenCalledWith(expect.stringContaining('[hive][info]')); - expect(logger.info).toHaveBeenCalledWith(expect.stringContaining('[hive][usage]')); - expect(logger.info).toHaveBeenCalledWith(expect.stringContaining('[hive][reporting]')); + expect(logger.getLogs()).toContain(`[DBG] Disposing`); }, 1_000); test('should capture client name and version headers', async () => { diff --git a/packages/libraries/core/src/client/agent.ts b/packages/libraries/core/src/client/agent.ts index aaf974751c9..1d939a212ec 100644 --- a/packages/libraries/core/src/client/agent.ts +++ b/packages/libraries/core/src/client/agent.ts @@ -123,13 +123,12 @@ export function createAgent( ? null : pluginOptions.circuitBreaker, }; - const logger = chooseLogger(pluginOptions.logger).child('[agent]'); + const logger = chooseLogger(pluginOptions.logger).child({ module: 'hive-agent' }); let circuitBreaker: CircuitBreakerInterface< Parameters, ReturnType >; - const breakerLogger = logger.child('[circuit breaker]'); const enabled = options.enabled !== false; let timeoutID: ReturnType | null = null; @@ -270,14 +269,12 @@ export function createAgent( circuitBreaker = circuitBreakerInstance; circuitBreakerInstance.on('open', () => - breakerLogger.error('circuit opened - backend seems unreachable.'), + logger.error('circuit opened - backend seems unreachable.'), ); circuitBreakerInstance.on('halfOpen', () => - breakerLogger.info('circuit half open - testing backend connectivity'), - ); - circuitBreakerInstance.on('close', () => - breakerLogger.info('circuit closed - backend recovered '), + logger.info('circuit half open - testing backend connectivity'), ); + circuitBreakerInstance.on('close', () => logger.info('circuit closed - backend recovered ')); } else { circuitBreaker = { getSignal() { @@ -293,7 +290,7 @@ export function createAgent( return await circuitBreaker.fire(...args); } catch (err: unknown) { if (err instanceof Error && 'code' in err && err.code === 'EOPENBREAKER') { - breakerLogger.info('circuit open - sending report skipped'); + logger.info('circuit open - sending report skipped'); return null; } diff --git a/packages/libraries/core/src/client/client.ts b/packages/libraries/core/src/client/client.ts index 314e9ef27c9..143f48eed81 100644 --- a/packages/libraries/core/src/client/client.ts +++ b/packages/libraries/core/src/client/client.ts @@ -28,7 +28,7 @@ function resolveLoggerFromConfigOptions(options: HivePluginOptions): Logger { } export function createHive(options: HivePluginOptions): HiveClient { - const logger = resolveLoggerFromConfigOptions(options).child('[hive]'); + const logger = resolveLoggerFromConfigOptions(options).child({ module: 'hive' }); let enabled = options.enabled ?? true; if (enabled === false && !options.experimental__persistedDocuments) { @@ -75,8 +75,6 @@ export function createHive(options: HivePluginOptions): HiveClient { ? options.printTokenInfo === true || (!!options.debug && options.printTokenInfo !== false) : false; - const infoLogger = logger.child('[info]'); - const info = printTokenInfo ? async () => { try { @@ -120,7 +118,7 @@ export function createHive(options: HivePluginOptions): HiveClient { } `; - infoLogger.info('Fetching token details...'); + logger.info('Fetching token details...'); const clientVersionForDetails = options.agent?.version || version; const response = await http.post( @@ -139,7 +137,7 @@ export function createHive(options: HivePluginOptions): HiveClient { }, timeout: 30_000, fetchImplementation: options?.agent?.fetch, - logger: infoLogger, + logger, }, ); @@ -171,7 +169,7 @@ export function createHive(options: HivePluginOptions): HiveClient { const projectUrl = `${organizationUrl}/${project.slug}`; const targetUrl = `${projectUrl}/${target.slug}`; - infoLogger.info( + logger.info( [ 'Token details', '', @@ -187,21 +185,17 @@ export function createHive(options: HivePluginOptions): HiveClient { ].join('\n'), ); } else if (result.data?.tokenInfo.message) { - infoLogger.error(`Token not found. Reason: ${result.data?.tokenInfo.message}`); - infoLogger.info( - `How to create a token? https://docs.graphql-hive.com/features/tokens`, - ); + logger.error(`Token not found. Reason: ${result.data?.tokenInfo.message}`); + logger.info(`How to create a token? https://docs.graphql-hive.com/features/tokens`); } else { - infoLogger.error(`${result.errors![0].message}`); - infoLogger.info( - `How to create a token? https://docs.graphql-hive.com/features/tokens`, - ); + logger.error(`${result.errors![0].message}`); + logger.info(`How to create a token? https://docs.graphql-hive.com/features/tokens`); } } else { - infoLogger.error(`Error ${response.status}: ${response.statusText}`); + logger.error(`Error ${response.status}: ${response.statusText}`); } } catch (error) { - infoLogger.error(`Error ${(error as Error)?.message ?? error}`); + logger.error(`Error ${(error as Error)?.message ?? error}`); } } : () => {}; diff --git a/packages/libraries/core/src/client/reporting.ts b/packages/libraries/core/src/client/reporting.ts index 078be999a02..751c6939149 100644 --- a/packages/libraries/core/src/client/reporting.ts +++ b/packages/libraries/core/src/client/reporting.ts @@ -30,7 +30,7 @@ export function createReporting(pluginOptions: HiveInternalPluginOptions): Schem const token = pluginOptions.token; const selfHostingOptions = pluginOptions.selfHosting; const reportingOptions = pluginOptions.reporting; - const logger = pluginOptions.logger.child('[reporting]'); + const logger = pluginOptions.logger.child({ module: 'hive-reporting' }); logIf( typeof reportingOptions.author !== 'string' || reportingOptions.author.length === 0, diff --git a/packages/libraries/core/src/client/usage.ts b/packages/libraries/core/src/client/usage.ts index 2e59495e046..cbdaf910ab9 100644 --- a/packages/libraries/core/src/client/usage.ts +++ b/packages/libraries/core/src/client/usage.ts @@ -73,7 +73,7 @@ export function createUsage(pluginOptions: HiveInternalPluginOptions): UsageColl const options = typeof pluginOptions.usage === 'boolean' ? ({} as HiveUsagePluginOptions) : pluginOptions.usage; const selfHostingOptions = pluginOptions.selfHosting; - const logger = pluginOptions.logger.child('[usage]'); + const logger = pluginOptions.logger.child({ module: 'hive-usage' }); const collector = memo(createCollector, arg => arg.schema); const excludeSet = new Set(options.exclude ?? []); diff --git a/packages/libraries/core/tests/enabled.spec.ts b/packages/libraries/core/tests/enabled.spec.ts index 616d1a3b850..280870b2c9b 100644 --- a/packages/libraries/core/tests/enabled.spec.ts +++ b/packages/libraries/core/tests/enabled.spec.ts @@ -19,7 +19,7 @@ test("should log that it's not enabled", async () => { .then(() => 'OK') .catch(() => 'ERROR'); - expect(logger.info).toHaveBeenCalledWith(expect.stringContaining(`[hive]Plugin is not enabled.`)); + expect(logger.info).toHaveBeenCalledWith(expect.stringContaining(`Plugin is not enabled.`)); expect(result).toBe('OK'); }); diff --git a/packages/libraries/core/tests/info.spec.ts b/packages/libraries/core/tests/info.spec.ts index 5679803d7ce..bd408914b9d 100644 --- a/packages/libraries/core/tests/info.spec.ts +++ b/packages/libraries/core/tests/info.spec.ts @@ -23,7 +23,9 @@ test('should not leak the exception', async () => { .then(() => 'OK') .catch(() => 'ERROR'); - expect(logger.error).toHaveBeenCalledWith(expect.stringContaining(`[hive][info]Error`)); + expect(logger.error).toHaveBeenCalledWith( + expect.stringContaining(`Error Unexpected HTTP error.`), + ); expect(result).toBe('OK'); }); @@ -80,7 +82,7 @@ test('should use selfHosting.graphqlEndpoint if provided', async () => { .then(() => 'OK') .catch(() => 'ERROR'); - expect(logger.info).toHaveBeenCalledWith(expect.stringContaining(`[hive][info]Token details`)); + expect(logger.info).toHaveBeenCalledWith(expect.stringContaining(`Token details`)); expect(logger.info).toHaveBeenCalledWith(expect.stringMatching(/Token name: \s+ My Token/)); expect(logger.info).toHaveBeenCalledWith( expect.stringMatching(/Organization: \s+ Org \s+ http:\/\/localhost\/org-id/), diff --git a/packages/libraries/core/tests/reporting.spec.ts b/packages/libraries/core/tests/reporting.spec.ts index 2a9a350f051..05668c502a6 100644 --- a/packages/libraries/core/tests/reporting.spec.ts +++ b/packages/libraries/core/tests/reporting.spec.ts @@ -49,10 +49,10 @@ test('should not leak the exception', async () => { await hive.dispose(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [INF] [hive][reporting]Publish schema - [DBG] [hive][reporting]POST http://127.0.0.1:55404 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) + [INF] Publish schema + [DBG] POST http://127.0.0.1:55404 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) [DBG] Error: connect ECONNREFUSED 127.0.0.1:55404 - [DBG] [hive][reporting]POST http://127.0.0.1:55404 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) failed (666ms). connect ECONNREFUSED 127.0.0.1:55404 + [DBG] POST http://127.0.0.1:55404 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) failed (666ms). connect ECONNREFUSED 127.0.0.1:55404 `); }); @@ -123,10 +123,10 @@ test('should send data to Hive', async () => { http.done(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [INF] [hive][reporting]Publish schema - [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) - [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [INF] [hive][reporting]Published schema + [INF] Publish schema + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [INF] Published schema `); }); @@ -196,10 +196,10 @@ test('should send data to Hive (deprecated endpoint)', async () => { http.done(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [INF] [hive][reporting]Publish schema - [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) - [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [INF] [hive][reporting]Published schema + [INF] Publish schema + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [INF] Published schema `); expect(body.variables.input.sdl).toBe(`type Query{foo:String}`); @@ -271,10 +271,10 @@ test('should send data to app.graphql-hive.com/graphql by default', async () => http.done(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [INF] [hive][reporting]Publish schema - [DBG] [hive][reporting]POST https://app.graphql-hive.com/graphql (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) - [DBG] [hive][reporting]POST https://app.graphql-hive.com/graphql (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [INF] [hive][reporting]Published schema + [INF] Publish schema + [DBG] POST https://app.graphql-hive.com/graphql (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) + [DBG] POST https://app.graphql-hive.com/graphql (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [INF] Published schema `); expect(body.variables.input.sdl).toBe(`type Query{foo:String}`); @@ -344,13 +344,13 @@ test('should send data to Hive immediately', async () => { `), }); - expect(logger.getLogs()).toMatchInlineSnapshot(`[INF] [hive][reporting]Publish schema`); + expect(logger.getLogs()).toMatchInlineSnapshot(`[INF] Publish schema`); logger.clear(); await waitFor(50); expect(logger.getLogs()).toMatchInlineSnapshot(` - [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) - [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [INF] [hive][reporting]Successfully published schema + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [INF] Successfully published schema `); expect(body.variables.input.sdl).toBe(`type Query{foo:String}`); expect(body.variables.input.author).toBe(author); @@ -361,9 +361,9 @@ test('should send data to Hive immediately', async () => { await waitFor(100); expect(logger.getLogs()).toMatchInlineSnapshot(` - [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) - [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [INF] [hive][reporting]Successfully published schema + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [INF] Successfully published schema `); await hive.dispose(); @@ -431,10 +431,10 @@ test('should send original schema of a federated (v1) service', async () => { await hive.dispose(); const logs = logger.getLogs(); expect(logs).toMatchInlineSnapshot(` - [INF] [hive][reporting]Publish schema - [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) - [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [INF] [hive][reporting]Published schema + [INF] Publish schema + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [INF] Published schema `); http.done(); }); @@ -500,10 +500,10 @@ test('should send original schema of a federated (v2) service', async () => { await hive.dispose(); const logs = logger.getLogs(); expect(logs).toMatchInlineSnapshot(` - [INF] [hive][reporting]Publish schema - [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) - [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [INF] [hive][reporting]Published schema + [INF] Publish schema + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [INF] Published schema `); http.done(); }); @@ -561,10 +561,10 @@ test('should display SchemaPublishMissingServiceError', async () => { http.done(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [INF] [hive][reporting]Publish schema - [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) - [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [ERR] [hive][reporting]Failed to report schema: Service name is not defined + [INF] Publish schema + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [ERR] Failed to report schema: Service name is not defined `); }); @@ -622,10 +622,10 @@ test('should display SchemaPublishMissingUrlError', async () => { http.done(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [INF] [hive][reporting]Publish schema - [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) - [DBG] [hive][reporting]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [ERR] [hive][reporting]Failed to report schema: Service url is not defined + [INF] Publish schema + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [ERR] Failed to report schema: Service url is not defined `); expect(logger.getLogs()).toContain( @@ -675,10 +675,10 @@ test('retry on non-200', async () => { await hive.dispose(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [INF] [hive][reporting]Publish schema - [DBG] [hive][reporting]POST http://localhost/registry (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) + [INF] Publish schema + [DBG] POST http://localhost/registry (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/6) [DBG] Error: connect ECONNREFUSED ::1:80 [DBG] Error: connect ECONNREFUSED 127.0.0.1:80 - [DBG] [hive][reporting]POST http://localhost/registry (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) failed (666ms). + [DBG] POST http://localhost/registry (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) failed (666ms). `); }); diff --git a/packages/libraries/core/tests/test-utils.ts b/packages/libraries/core/tests/test-utils.ts index d69eb0797ee..cf233590631 100644 --- a/packages/libraries/core/tests/test-utils.ts +++ b/packages/libraries/core/tests/test-utils.ts @@ -9,15 +9,7 @@ function getLogLines(calls: Array>) { return calls.map(log => { let msg: string; if (typeof log[1] === 'string') { - msg = maskRequestId( - log[1] - // Replace milliseconds with static value - .replace(/\(\d{1,4}ms\)/, '(666ms)') - // Replace stack trace line numbers with static value - .replace(/\(node:net:\d+:\d+\)/, '(node:net:666:666)') - .replace(/\(node:dns:\d+:\d+\)/, '(node:dns:666:666)'), - // request UUIDsu - ); + msg = normalizeLogMessage(log[1]); } else { msg = String(log[1]); } @@ -48,6 +40,17 @@ export function maskRequestId(errorMessage: string) { ); } +export function normalizeLogMessage(msg: string) { + return maskRequestId( + msg + // Replace milliseconds with static value + .replace(/\(\d{1,4}ms\)/, '(666ms)') + // Replace stack trace line numbers with static value + .replace(/\(node:net:\d+:\d+\)/, '(node:net:666:666)') + .replace(/\(node:dns:\d+:\d+\)/, '(node:dns:666:666)'), + ); +} + export function fastFetchError(input: URL | RequestInfo, _init?: RequestInit) { let url: URL; if (typeof input === 'string') { diff --git a/packages/libraries/core/tests/usage.spec.ts b/packages/libraries/core/tests/usage.spec.ts index 3944e384e37..8cc5c49c42e 100644 --- a/packages/libraries/core/tests/usage.spec.ts +++ b/packages/libraries/core/tests/usage.spec.ts @@ -1,10 +1,16 @@ import { buildSchema, parse } from 'graphql'; import nock from 'nock'; +import { Logger, MemoryLogWriter } from '@graphql-hive/logger'; import { createHive } from '../src/client/client'; import { atLeastOnceSampler } from '../src/client/samplers'; import type { Report } from '../src/client/usage'; import { version } from '../src/version'; -import { createHiveTestingLogger, fastFetchError, waitFor } from './test-utils'; +import { + createHiveTestingLogger, + fastFetchError, + normalizeLogMessage, + waitFor, +} from './test-utils'; const headers = { 'Content-Type': 'application/json', @@ -165,11 +171,11 @@ test('should send data to Hive', async () => { http.done(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [DBG] [hive][usage][agent]Disposing - [DBG] [hive][usage][agent]Sending report (queue 1) - [DBG] [hive][usage][agent]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) - [DBG] [hive][usage][agent]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [DBG] [hive][usage][agent]Report sent! + [DBG] Disposing + [DBG] Sending report (queue 1) + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [DBG] Report sent! `); // Map @@ -275,11 +281,11 @@ test('should send data to Hive (deprecated endpoint)', async () => { http.done(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [DBG] [hive][usage][agent]Disposing - [DBG] [hive][usage][agent]Sending report (queue 1) - [DBG] [hive][usage][agent]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) - [DBG] [hive][usage][agent]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [DBG] [hive][usage][agent]Report sent! + [DBG] Disposing + [DBG] Sending report (queue 1) + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [DBG] Report sent! `); // Map @@ -366,11 +372,11 @@ test('should not leak the exception', { retry: 3 }, async () => { await hive.dispose(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [DBG] [hive][usage][agent]Sending report (queue 1) - [DBG] [hive][usage][agent]POST http://404.localhost.noop (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/2) + [DBG] Sending report (queue 1) + [DBG] POST http://404.localhost.noop (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/2) [DBG] Error: getaddrinfo ENOTFOUND 404.localhost.noop - [DBG] [hive][usage][agent]POST http://404.localhost.noop (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) failed (666ms). getaddrinfo ENOTFOUND 404.localhost.noop - [DBG] [hive][usage][agent]Disposing + [DBG] POST http://404.localhost.noop (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) failed (666ms). getaddrinfo ENOTFOUND 404.localhost.noop + [DBG] Disposing `); }); @@ -536,11 +542,11 @@ test('should send data to Hive at least once when using atLeastOnceSampler', asy http.done(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [DBG] [hive][usage][agent]Disposing - [DBG] [hive][usage][agent]Sending report (queue 2) - [DBG] [hive][usage][agent]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) - [DBG] [hive][usage][agent]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [DBG] [hive][usage][agent]Report sent! + [DBG] Disposing + [DBG] Sending report (queue 2) + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [DBG] Report sent! `); // Map @@ -640,11 +646,11 @@ test('should not send excluded operation name data to Hive', async () => { http.done(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [DBG] [hive][usage][agent]Disposing - [DBG] [hive][usage][agent]Sending report (queue 2) - [DBG] [hive][usage][agent]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) - [DBG] [hive][usage][agent]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [DBG] [hive][usage][agent]Report sent! + [DBG] Disposing + [DBG] Sending report (queue 2) + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [DBG] Report sent! `); // Map @@ -741,10 +747,10 @@ test('retry on non-200', async () => { await hive.dispose(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [DBG] [hive][usage][agent]Sending report (queue 1) - [DBG] [hive][usage][agent]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/2) - [DBG] [hive][usage][agent]POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) failed with status 500 (666ms): No no no - [DBG] [hive][usage][agent]Disposing + [DBG] Sending report (queue 1) + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) Attempt (1/2) + [DBG] POST http://localhost/200 (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) failed with status 500 (666ms): No no no + [DBG] Disposing `); }); @@ -926,12 +932,12 @@ test('no debug property -> logger.debug is invoked', async ({ expect }) => { await hive.dispose(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [DBG] [hive][usage][agent]Disposing - [DBG] [hive][usage][agent]Sending immediately - [DBG] [hive][usage][agent]Sending report (queue 1) - [DBG] [hive][usage][agent]POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) - [DBG] [hive][usage][agent]POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [DBG] [hive][usage][agent]Report sent! + [DBG] Disposing + [DBG] Sending immediately + [DBG] Sending report (queue 1) + [DBG] POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) + [DBG] POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [DBG] Report sent! `); }); @@ -976,12 +982,12 @@ test('debug: false -> logger.debug is not invoked', async ({ expect }) => { await hive.dispose(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [DBG] [hive][usage][agent]Disposing - [DBG] [hive][usage][agent]Sending immediately - [DBG] [hive][usage][agent]Sending report (queue 1) - [DBG] [hive][usage][agent]POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) - [DBG] [hive][usage][agent]POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [DBG] [hive][usage][agent]Report sent! + [DBG] Disposing + [DBG] Sending immediately + [DBG] Sending report (queue 1) + [DBG] POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) + [DBG] POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [DBG] Report sent! `); }); @@ -1030,11 +1036,113 @@ test('debug: true and missing logger.debug method -> logger.info is invoked (to await hive.dispose(); expect(logger.getLogs()).toMatchInlineSnapshot(` - [INF] [hive][usage][agent]Disposing - [INF] [hive][usage][agent]Sending immediately - [INF] [hive][usage][agent]Sending report (queue 1) - [INF] [hive][usage][agent]POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) - [INF] [hive][usage][agent]POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). - [INF] [hive][usage][agent]Report sent! + [INF] Disposing + [INF] Sending immediately + [INF] Sending report (queue 1) + [INF] POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) + [INF] POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms). + [INF] Report sent! + `); +}); + +test('new logger option', async () => { + const logWriter = new MemoryLogWriter(); + const logger = new Logger({ + writers: [ + { + write(level, attrs, msg) { + if (msg) { + msg = normalizeLogMessage(msg); + } + + logWriter.write(level, attrs, msg); + }, + }, + ], + level: 'debug', + }); + + const token = 'hvu1/brrrrt'; + + const hive = createHive({ + enabled: true, + logger, + agent: { + timeout: 500, + maxRetries: 0, + sendInterval: 1, + maxSize: 1, + async fetch() { + return new Response('', { + status: 200, + }); + }, + }, + token, + selfHosting: { + graphqlEndpoint: 'http://localhost:2/graphql', + applicationUrl: 'http://localhost:1', + usageEndpoint: 'http://localhost', + }, + usage: { + target: 'the-guild/graphql-hive/staging', + }, + }); + + await hive.collectUsage()( + { + schema, + document: op, + operationName: 'asd', + }, + {}, + ); + + await hive.dispose(); + expect(logWriter.logs).toMatchInlineSnapshot(` + [ + { + attrs: { + module: hive-agent, + }, + level: debug, + msg: Disposing, + }, + { + attrs: { + module: hive-agent, + }, + level: debug, + msg: Sending immediately, + }, + { + attrs: { + module: hive-agent, + }, + level: debug, + msg: Sending report (queue 1), + }, + { + attrs: { + module: hive-agent, + }, + level: debug, + msg: POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx), + }, + { + attrs: { + module: hive-agent, + }, + level: debug, + msg: POST http://localhost/the-guild/graphql-hive/staging (x-request-id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx) succeeded with status 200 (666ms)., + }, + { + attrs: { + module: hive-agent, + }, + level: debug, + msg: Report sent!, + }, + ] `); }); diff --git a/packages/libraries/yoga/tests/yoga.spec.ts b/packages/libraries/yoga/tests/yoga.spec.ts index 0c2ced1b3dc..f76f1e088e9 100644 --- a/packages/libraries/yoga/tests/yoga.spec.ts +++ b/packages/libraries/yoga/tests/yoga.spec.ts @@ -101,20 +101,12 @@ test('should not interrupt the process', async () => { ); await waitFor(50); - const reportingLogs = logger - .getLogs() - .split(`\n`) - .filter(item => item.includes(`[hive][reporting]`)) - .join(`\n`); + const reportingLogs = logger.getLogs().split(`\n`).join(`\n`); expect(reportingLogs).includes('Publish schema'); expect(reportingLogs).includes('POST http://404.localhost.noop/registry'); - const usageLogs = logger - .getLogs() - .split(`\n`) - .filter(item => item.includes(`[hive][usage]`)) - .join(`\n`); + const usageLogs = logger.getLogs().split(`\n`).join(`\n`); expect(usageLogs).includes('POST http://404.localhost.noop/usage');