Skip to content

Commit

Permalink
kafka: add missing unit tests (#14195)
Browse files Browse the repository at this point in the history
Signed-off-by: Adam Kotwasinski <adam.kotwasinski@gmail.com>
  • Loading branch information
adamkotwasinski committed Nov 28, 2020
1 parent 41de291 commit 39bf75f
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 2 deletions.
13 changes: 13 additions & 0 deletions test/extensions/filters/network/kafka/serialization_test.cc
Expand Up @@ -101,10 +101,12 @@ TEST(VarUInt32Deserializer, ShouldDeserializeEdgeValues) {
Buffer::OwnedImpl buffer;

// when
const uint32_t expected_size = encoder.computeCompactSize(values[i]);
const uint32_t written = encoder.encodeCompact(values[i], buffer);

// then
ASSERT_EQ(written, i + 1);
ASSERT_EQ(written, expected_size);
absl::string_view data = {getRawData(buffer), 1024};
// All bits in lower bytes need to be set.
for (auto j = 0; j + 1 < i; ++j) {
Expand Down Expand Up @@ -434,6 +436,17 @@ TEST(NullableCompactArrayDeserializer, ShouldConsumeNullArray) {
NullableCompactArrayDeserializer<Int32Deserializer>>(value);
}

TEST(NullableCompactArrayDeserializer, ShouldConsumeCorrectAmountOfDataForLargeInput) {
std::vector<int32_t> raw;
raw.reserve(4096);
for (int32_t i = 0; i < 4096; ++i) {
raw.push_back(i);
}
const NullableArray<int32_t> value{raw};
serializeCompactThenDeserializeAndCheckEquality<
NullableCompactArrayDeserializer<Int32Deserializer>>(value);
}

// Tagged fields.

TEST(TaggedFieldDeserializer, ShouldConsumeCorrectAmountOfData) {
Expand Down
13 changes: 11 additions & 2 deletions test/extensions/filters/network/kafka/serialization_utilities.h
Expand Up @@ -114,12 +114,16 @@ void serializeCompactThenDeserializeAndCheckEqualityInOneGo(AT expected) {

Buffer::OwnedImpl buffer;
EncodingContext encoder{-1};
const uint32_t expected_written_size = encoder.computeCompactSize(expected);
const uint32_t written = encoder.encodeCompact(expected, buffer);
ASSERT_EQ(written, expected_written_size);
// Insert garbage after serialized payload.
const uint32_t garbage_size = encoder.encode(Bytes(10000), buffer);

const char* raw_buffer_ptr =
reinterpret_cast<const char*>(buffer.linearize(written + garbage_size));
// Tell parser that there is more data, it should never consume more than written.
const absl::string_view orig_data = {getRawData(buffer), written + garbage_size};
const absl::string_view orig_data = {raw_buffer_ptr, written + garbage_size};
absl::string_view data = orig_data;

// when
Expand Down Expand Up @@ -147,11 +151,16 @@ void serializeCompactThenDeserializeAndCheckEqualityWithChunks(AT expected) {

Buffer::OwnedImpl buffer;
EncodingContext encoder{-1};
const uint32_t expected_written_size = encoder.computeCompactSize(expected);
const uint32_t written = encoder.encodeCompact(expected, buffer);
ASSERT_EQ(written, expected_written_size);
// Insert garbage after serialized payload.
const uint32_t garbage_size = encoder.encode(Bytes(10000), buffer);

const absl::string_view orig_data = {getRawData(buffer), written + garbage_size};
const char* raw_buffer_ptr =
reinterpret_cast<const char*>(buffer.linearize(written + garbage_size));
// Tell parser that there is more data, it should never consume more than written.
const absl::string_view orig_data = {raw_buffer_ptr, written + garbage_size};

// when
absl::string_view data = orig_data;
Expand Down

0 comments on commit 39bf75f

Please sign in to comment.