Skip to content
This repository has been archived by the owner on Oct 3, 2023. It is now read-only.

Commit

Permalink
Add support for Binary Format serializer for TagMap (#431)
Browse files Browse the repository at this point in the history
* Add support for Binary Format serializer for TagMap

* fix review comments

1. Add comments for MSB and REST constants
2. Remove VERSION_ID_OFFSET -> VERSION_ID_INDEX and b -> currentByte
3. Add backwards-compatible check on versionId

* Add tests for the varint encoding/decoding
  • Loading branch information
mayurkale22 committed Mar 21, 2019
1 parent d3e2d7c commit 04f1fee
Show file tree
Hide file tree
Showing 4 changed files with 374 additions and 0 deletions.
150 changes: 150 additions & 0 deletions packages/opencensus-core/src/tags/propagation/binary-serializer.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,150 @@
/**
* Copyright 2019, OpenCensus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

/**
* This module contains the functions for serializing and deserializing
* TagMap (TagContext) with the binary format. It allows tags to propagate
* across requests.
*
* <p>OpenCensus tag context encoding:
*
* <ul>
* <li>Tags are encoded in single byte sequence. The version 0 format is:
* <li>{@code <version_id><encoded_tags>}
* <li>{@code <version_id> -> a single byte, value 0}
* <li>{@code <encoded_tags> -> (<tag_field_id><tag_encoding>)*}
* <ul>
* <li>{@code <tag_field_id>} -> a single byte, value 0
* <li>{@code <tag_encoding>}:
* <ul>
* <li>{@code <tag_key_len><tag_key><tag_val_len><tag_val>}
* <ul>
* <li>{@code <tag_key_len>} -> varint encoded integer
* <li>{@code <tag_key>} -> tag_key_len bytes comprising tag name
* <li>{@code <tag_val_len>} -> varint encoded integer
* <li>{@code <tag_val>} -> tag_val_len bytes comprising tag value
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* </ul>
*/

import {TagMap} from '../tag-map';
import {TagKey, TagValue} from '../types';
import {DecodeVarint, EncodeVarint} from './variant-encoding';

// This size limit only applies to the bytes representing tag keys and values.
export const TAG_MAP_SERIALIZED_SIZE_LIMIT = 8192;

const ENCODING = 'utf8';
const VERSION_ID = 0;
const TAG_FIELD_ID = 0;
const VERSION_ID_INDEX = 0;

/**
* Serializes a given TagMap to the on-the-wire format.
* @param tagMap The TagMap to serialize.
*/
export function serializeBinary(tagMap: TagMap): Buffer {
const byteArray: number[] = [];
byteArray.push(VERSION_ID);
let totalChars = 0;
const tags = tagMap.tags;
tags.forEach((tagValue: TagValue, tagKey: TagKey) => {
totalChars += tagKey.name.length;
totalChars += tagValue.value.length;
encodeTag(tagKey, tagValue, byteArray);
});

if (totalChars > TAG_MAP_SERIALIZED_SIZE_LIMIT) {
throw new Error(`Size of TagMap exceeds the maximum serialized size ${
TAG_MAP_SERIALIZED_SIZE_LIMIT}`);
}
return Buffer.from(byteArray);
}

/**
* Deserializes input to TagMap based on the binary format standard.
* @param buffer The TagMap to deserialize.
*/
export function deserializeBinary(buffer: Buffer): TagMap {
if (buffer.length === 0) {
throw new Error('Input buffer can not be empty.');
}
const versionId = buffer.readInt8(VERSION_ID_INDEX);
if (versionId > VERSION_ID) {
throw new Error(`Wrong Version ID: ${
versionId}. Currently supports version up to: ${VERSION_ID}`);
}
return parseTags(buffer);
}

function encodeTag(tagKey: TagKey, tagValue: TagValue, byteArray: number[]) {
byteArray.push(TAG_FIELD_ID);
encodeString(tagKey.name, byteArray);
encodeString(tagValue.value, byteArray);
}

function encodeString(input: string, byteArray: number[]) {
byteArray.push(...EncodeVarint(input.length));
byteArray.push(...input.split('').map(unicode));
return byteArray;
}

function parseTags(buffer: Buffer): TagMap {
const tags = new TagMap();
const limit = buffer.length;
let totalChars = 0;
let currentIndex = 1;

while (currentIndex < limit) {
const fieldId = buffer.readInt8(currentIndex);
if (fieldId > TAG_FIELD_ID) {
// Stop parsing at the first unknown field ID, since there is no way to
// know its length.
break;
}
currentIndex += 1;
const key = decodeString(buffer, currentIndex);
currentIndex += key.length;
totalChars += key.length;

currentIndex += 1;
const val = decodeString(buffer, currentIndex);
currentIndex += val.length;
totalChars += val.length;

currentIndex += 1;
if (totalChars > TAG_MAP_SERIALIZED_SIZE_LIMIT) {
throw new Error(`Size of TagMap exceeds the maximum serialized size ${
TAG_MAP_SERIALIZED_SIZE_LIMIT}`);
} else {
tags.set({name: key}, {value: val});
}
}
return tags;
}

function decodeString(buffer: Buffer, offset: number): string {
const length = DecodeVarint(buffer, offset);
return buffer.toString(ENCODING, offset + 1, offset + 1 + length);
}

function unicode(x: string) {
return x.charCodeAt(0);
}
58 changes: 58 additions & 0 deletions packages/opencensus-core/src/tags/propagation/variant-encoding.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
/**
* Copyright 2019, OpenCensus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

// The MSB (most significant bit) indicates whether we've reached the end of
// the number. Set means there is more than one byte in the varint.
const MSB = 0x80;

// The REST indicates the lower 7 bits of each byte.
const REST = 0x7F;

/**
* Encodes a number in a variable-length encoding, 7 bits per byte.
* @param value The input number.
*/
export function EncodeVarint(value: number) {
const ret: number[] = [];
do {
const bits = value & REST;
value >>>= 7;
const b = bits + ((value !== 0) ? MSB : 0);
ret.push(b);
} while (value !== 0);
return ret;
}

/**
* Decodes a varint from buffer.
* @param buffer The source buffer.
* @param offset The offset within buffer.
*/
export function DecodeVarint(buffer: Buffer, offset: number) {
let ret = 0;
let shift = 0;
let currentByte;
let counter = offset;
do {
if (shift >= 32) {
throw new Error('varint too long');
}
currentByte = buffer.readInt8(counter++);
ret |= (currentByte & REST) << shift;
shift += 7;
} while ((currentByte & MSB) !== 0);
return ret;
}
116 changes: 116 additions & 0 deletions packages/opencensus-core/test/test-binary-serializer.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
/**
* Copyright 2019, OpenCensus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

import * as assert from 'assert';
import {deserializeBinary, serializeBinary, TAG_MAP_SERIALIZED_SIZE_LIMIT} from '../src/tags/propagation/binary-serializer';
import {TagMap} from '../src/tags/tag-map';

const K1 = {
name: 'k1'
};
const K2 = {
name: 'k2'
};
const K3 = {
name: 'k3'
};
const K4 = {
name: 'k4'
};

const V1 = {
value: 'v1'
};
const V2 = {
value: 'v2'
};
const V3 = {
value: 'v3'
};
const V4 = {
value: 'v4'
};

describe('Binary Format Serializer', () => {
const emptyTagMap = new TagMap();

const singleTagMap = new TagMap();
singleTagMap.set(K1, V1);

const multipleTagMap = new TagMap();
multipleTagMap.set(K1, V1);
multipleTagMap.set(K2, V2);
multipleTagMap.set(K3, V3);
multipleTagMap.set(K4, V4);

describe('serializeBinary', () => {
it('should serialize empty tag map', () => {
const binary = serializeBinary(emptyTagMap);
assert.deepEqual(deserializeBinary(binary), emptyTagMap);
});

it('should serialize with one tag map', () => {
const binary = serializeBinary(singleTagMap);
assert.deepEqual(deserializeBinary(binary), singleTagMap);
});

it('should serialize with multiple tag', () => {
const binary = serializeBinary(multipleTagMap);
assert.deepEqual(deserializeBinary(binary), multipleTagMap);
});

it('should throw an error when exceeds the max serialized size', () => {
const tags = new TagMap();
for (let i = 0; i < TAG_MAP_SERIALIZED_SIZE_LIMIT / 8 - 1; i++) {
// Each tag will be with format {key : "0123", value : "0123"}, so the
// length of it is 8.
const pad = '0000'.substring(0, 4 - `${i}`.length);
const str = `${pad}${i}`;
tags.set({name: str}, {value: str});
}
// The last tag will be of size 9, so the total size of the TagMap
// (8193) will be one byte more than limit.
tags.set({name: 'last'}, {value: 'last1'});

assert.throws(() => {
serializeBinary(tags);
}, /^Error: Size of TagMap exceeds the maximum serialized size 8192/);
});
});

describe('deserializeBinary', () => {
it('should throw an error when invalid tagKey', () => {
const buff =
Buffer.from([0x01, 0x00, 0x02, 0x6b, 0x31, 0x02, 0x76, 0x31]);
assert.throws(() => {
deserializeBinary(buff);
}, /^Error: Wrong Version ID: 1. Currently supports version up to: 0/);
});

it('should stop parsing at first unknown field ID', () => {
const expectedTags = new TagMap();
expectedTags.set(K1, V1);

const buff = Buffer.from([
0x00, 0x00, 0x02, 0x6b, 0x31, 0x02, 0x76, 0x31, 0x01, 0x02, 0x6b, 0x32,
0x02, 0x76, 0x32
]);
const tags = deserializeBinary(buff);
assert.equal(tags.tags.size, 1);
assert.deepStrictEqual(tags, expectedTags);
});
});
});
50 changes: 50 additions & 0 deletions packages/opencensus-core/test/test-variant-encoding.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
/**
* Copyright 2019, OpenCensus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

import * as assert from 'assert';
import {DecodeVarint, EncodeVarint} from '../src/tags/propagation/variant-encoding';

const testCases =
[0, 1, 10, 100, 1000, 10000, 100000, 1000000, 10000000, 100000000];

function randint(range: number) {
return Math.floor(Math.random() * range);
}

describe('variant encoding', () => {
it('should encode single byte', () => {
const expected = randint(127);
assert.deepEqual(EncodeVarint(expected), [expected]);
});

it('should encode/decode multiple bytes', () => {
const num = 300;
const expectedBytes = [0xAC, 0x02]; // [172, 2]

const variant = EncodeVarint(num);
assert.deepEqual(variant, expectedBytes);
const buff = Buffer.from(variant);
assert.equal(DecodeVarint(buff, 0), num);
});

for (const testCase of testCases) {
it(`should encode and decode ${testCase} correctly`, () => {
const variant = EncodeVarint(testCase);
const buff = Buffer.from(variant);
assert.equal(DecodeVarint(buff, 0), testCase);
});
}
});

0 comments on commit 04f1fee

Please sign in to comment.