Skip to content

Commit

Permalink
Merge pull request #1973 from stripe/richardm-usage
Browse files Browse the repository at this point in the history
Add `usage` to X-Stripe-Client-Telemetry
  • Loading branch information
richardm-stripe committed Dec 14, 2023
2 parents a31d032 + 7db57c5 commit ba1226a
Show file tree
Hide file tree
Showing 8 changed files with 253 additions and 150 deletions.
10 changes: 6 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ const stripe = Stripe('sk_test_...', {
| `host` | `'api.stripe.com'` | Host that requests are made to. |
| `port` | 443 | Port that requests are made to. |
| `protocol` | `'https'` | `'https'` or `'http'`. `http` is never appropriate for sending requests to Stripe servers, and we strongly discourage `http`, even in local testing scenarios, as this can result in your credentials being transmitted over an insecure channel. |
| `telemetry` | `true` | Allow Stripe to send latency [telemetry](#request-latency-telemetry). |
| `telemetry` | `true` | Allow Stripe to send [telemetry](#telemetry). |

> **Note**
> Both `maxNetworkRetries` and `timeout` can be overridden on a per-request basis.
Expand Down Expand Up @@ -477,10 +477,12 @@ const allNewCustomers = await stripe.customers
.autoPagingToArray({limit: 10000});
```

### Request latency telemetry
### Telemetry

By default, the library sends request latency telemetry to Stripe. These
numbers help Stripe improve the overall latency of its API for all users.
By default, the library sends request telemetry to Stripe regarding request
latency and feature usage. These
numbers help Stripe improve the overall latency of its API for all users, and
improve popular features.

You can disable this behavior if you prefer:

Expand Down
43 changes: 35 additions & 8 deletions src/RequestSender.ts
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@ export class RequestSender {
*/
_streamingResponseHandler(
requestEvent: RequestEvent,
usage: Array<string>,
callback: RequestCallback
): (res: HttpClientResponseInterface) => RequestCallbackReturn {
return (res: HttpClientResponseInterface): RequestCallbackReturn => {
Expand All @@ -102,7 +103,8 @@ export class RequestSender {
this._stripe._emitter.emit('response', responseEvent);
this._recordRequestMetrics(
this._getRequestId(headers),
responseEvent.elapsed
responseEvent.elapsed,
usage
);
};

Expand All @@ -122,7 +124,11 @@ export class RequestSender {
* parses the JSON and returns it (i.e. passes it to the callback) if there
* is no "error" field. Otherwise constructs/passes an appropriate Error.
*/
_jsonResponseHandler(requestEvent: RequestEvent, callback: RequestCallback) {
_jsonResponseHandler(
requestEvent: RequestEvent,
usage: Array<string>,
callback: RequestCallback
) {
return (res: HttpClientResponseInterface): void => {
const headers = res.getHeaders();
const requestId = this._getRequestId(headers);
Expand Down Expand Up @@ -180,7 +186,7 @@ export class RequestSender {
)
.then(
(jsonResponse) => {
this._recordRequestMetrics(requestId, responseEvent.elapsed);
this._recordRequestMetrics(requestId, responseEvent.elapsed, usage);

// Expose raw response object.
const rawResponse = res.getRawResponse();
Expand Down Expand Up @@ -387,7 +393,11 @@ export class RequestSender {
}
}

_recordRequestMetrics(requestId: string, requestDurationMs: number): void {
_recordRequestMetrics(
requestId: string,
requestDurationMs: number,
usage: Array<string>
): void {
if (this._stripe.getTelemetryEnabled() && requestId) {
if (
this._stripe._prevRequestMetrics.length > this._maxBufferedRequestMetric
Expand All @@ -396,10 +406,18 @@ export class RequestSender {
'Request metrics buffer is full, dropping telemetry message.'
);
} else {
this._stripe._prevRequestMetrics.push({
const m: {
request_id: string;
request_duration_ms: number;
usage?: Array<string>;
} = {
request_id: requestId,
request_duration_ms: requestDurationMs,
});
};
if (usage && usage.length > 0) {
m.usage = usage;
}
this._stripe._prevRequestMetrics.push(m);
}
}
}
Expand All @@ -411,6 +429,7 @@ export class RequestSender {
data: RequestData,
auth: string | null,
options: RequestOptions = {},
usage: Array<string> = [],
callback: RequestCallback,
requestDataProcessor: RequestDataProcessor | null = null
): void {
Expand Down Expand Up @@ -488,9 +507,17 @@ export class RequestSender {
res.getHeaders()['retry-after']
);
} else if (options.streaming && res.getStatusCode() < 400) {
return this._streamingResponseHandler(requestEvent, callback)(res);
return this._streamingResponseHandler(
requestEvent,
usage,
callback
)(res);
} else {
return this._jsonResponseHandler(requestEvent, callback)(res);
return this._jsonResponseHandler(
requestEvent,
usage,
callback
)(res);
}
})
.catch((error: HttpClientResponseError) => {
Expand Down
3 changes: 3 additions & 0 deletions src/StripeResource.ts
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,7 @@ StripeResource.prototype = {
): RequestOpts {
// Extract spec values with defaults.
const requestMethod = (spec.method || 'GET').toUpperCase();
const usage = spec.usage || [];
const urlParams = spec.urlParams || [];
const encode = spec.encode || ((data): RequestData => data);

Expand Down Expand Up @@ -190,6 +191,7 @@ StripeResource.prototype = {
host: host ?? null,
streaming,
settings: options.settings,
usage,
};
},

Expand Down Expand Up @@ -238,6 +240,7 @@ StripeResource.prototype = {
opts.bodyData,
opts.auth,
{headers, settings, streaming: opts.streaming},
opts.usage,
requestCallback,
this.requestDataProcessor?.bind(this)
);
Expand Down
4 changes: 4 additions & 0 deletions src/Types.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ export type MethodSpec = {
streaming?: boolean;
host?: string;
transformResponseData?: (response: HttpClientResponseInterface) => any;
usage?: Array<string>;
};
export type MultipartRequestData = RequestData | StreamingFile | BufferedFile;
export type RawErrorType =
Expand All @@ -49,6 +50,7 @@ export type RequestEvent = {
method?: string;
path?: string;
request_start_time: number;
usage: Array<string>;
};
export type RequestHeaders = Record<string, string | number | string[]>;
export type RequestOptions = {
Expand All @@ -66,6 +68,7 @@ export type RequestOpts = {
host: string | null;
streaming: boolean;
settings: RequestSettings;
usage: Array<string>;
};
export type RequestSettings = {timeout?: number; maxNetworkRetries?: number};
export type ResponseEvent = {
Expand Down Expand Up @@ -155,6 +158,7 @@ export type RequestSender = {
data: RequestData,
auth: string | null,
options: RequestOptions,
usage: Array<string>,
callback: RequestCallback,
requestDataProcessor: RequestDataProcessor | undefined
): void;
Expand Down
23 changes: 23 additions & 0 deletions test/StripeResource.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -262,4 +262,27 @@ describe('StripeResource', () => {
]);
});
});

describe('usage', () => {
it('is passed to the request sender', (callback) => {
const mockSelf = new (StripeResource.extend({
boop: stripeMethod({
method: 'GET',
fullPath: '/v1/widgets/{widget}/boop',
usage: ['llama', 'bufo'],
}),
}))(stripe);

mockSelf.boop('foo', {bar: 'baz'}, (err, res) => {
if (err) {
return callback(err);
}
expect(stripe._requestSender._stripe.LAST_REQUEST.usage).to.deep.equal([
'llama',
'bufo',
]);
return callback();
});
});
});
});
4 changes: 2 additions & 2 deletions test/autoPagination.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ describe('auto pagination', () => {

const mockStripe = getMockStripe(
{},
(_1, _2, path, _4, _5, _6, callback) => {
(_1, _2, path, _4, _5, _6, _7, callback) => {
paramsLog.push(path.slice(path.indexOf('?')));
callback(
null,
Expand Down Expand Up @@ -661,7 +661,7 @@ describe('auto pagination', () => {

const mockStripe = getMockStripe(
{},
(_1, _2, path, _4, _5, _6, callback) => {
(_1, _2, path, _4, _5, _6, _7, callback) => {
paramsLog.push(path.slice(path.indexOf('?')));

callback(
Expand Down
Loading

0 comments on commit ba1226a

Please sign in to comment.