Skip to content

Commit

Permalink
fix magic string alignment in file reader, add file reader tests
Browse files Browse the repository at this point in the history
  • Loading branch information
trxcllnt committed May 13, 2018
1 parent 402187e commit 304e75d
Show file tree
Hide file tree
Showing 2 changed files with 22 additions and 19 deletions.
8 changes: 4 additions & 4 deletions js/src/ipc/writer/binary.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ import { Table } from '../../table';
import { DenseUnionData } from '../../data';
import { RecordBatch } from '../../recordbatch';
import { VectorVisitor, TypeVisitor } from '../../visitor';
import { MAGIC, PADDING, magicLength, magicAndPadding } from '../magic';
import { MAGIC, magicLength, magicAndPadding } from '../magic';
import { align, getBool, packBools, iterateBits } from '../../util/bit';
import { Vector, UnionVector, DictionaryVector, NestedVector, ListVector } from '../../vector';
import { BufferMetadata, FieldMetadata, Footer, FileBlock, Message, RecordBatchMetadata, DictionaryBatch } from '../metadata';
Expand Down Expand Up @@ -84,9 +84,9 @@ export function* serializeFile(table: Table) {
yield buffer;

// Last, yield the footer length + terminating magic arrow string (aligned)
buffer = new Uint8Array(align(magicAndPadding, 8));
new Uint32Array(buffer.buffer)[0] = metadataLength;
buffer.set(MAGIC, buffer.byteLength - PADDING);
buffer = new Uint8Array(magicAndPadding);
new DataView(buffer.buffer).setInt32(0, metadataLength, platformIsLittleEndian);
buffer.set(MAGIC, buffer.byteLength - magicLength);
yield buffer;
}

Expand Down
33 changes: 18 additions & 15 deletions js/test/integration/validate-tests.ts
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,8 @@ describe(`Integration`, () => {
describe(path.join(dir, name), () => {
testReaderIntegration(json, arrowBuffer);
testTableFromBuffersIntegration(json, arrowBuffer);
testTableToBuffersIntegration(json, arrowBuffer);
testTableToBuffersIntegration('file')(json, arrowBuffer);
testTableToBuffersIntegration('stream')(json, arrowBuffer);
});
}
});
Expand Down Expand Up @@ -154,18 +155,20 @@ function testTableFromBuffersIntegration(jsonData: any, arrowBuffer: Uint8Array)
});
}

function testTableToBuffersIntegration(jsonData: any, arrowBuffer: Uint8Array) {
test(`serializing json to binary reports the same values as the original binary arrow table`, () => {
expect.hasAssertions();
const fromJSON = Table.from(jsonData);
const serialized = fromJSON.serialize();
const jsonTable = Table.from(serialized);
const binaryTable = Table.from(arrowBuffer);
expect(jsonTable.length).toEqual(binaryTable.length);
expect(jsonTable.numCols).toEqual(binaryTable.numCols);
for (let i = -1, n = jsonTable.numCols; ++i < n;) {
(jsonTable.getColumnAt(i) as any).name = jsonTable.schema.fields[i].name;
(expect(jsonTable.getColumnAt(i)) as any).toEqualVector(binaryTable.getColumnAt(i));
}
});
function testTableToBuffersIntegration(arrowFormat: 'stream' | 'file') {
return function testTableToBuffersIntegration(jsonData: any, arrowBuffer: Uint8Array) {
test(`serializing json to binary reports the same values as the original binary arrow table`, () => {
expect.hasAssertions();
const fromJSON = Table.from(jsonData);
const serialized = fromJSON.serialize('binary', arrowFormat === 'stream');
const jsonTable = Table.from(serialized);
const binaryTable = Table.from(arrowBuffer);
expect(jsonTable.length).toEqual(binaryTable.length);
expect(jsonTable.numCols).toEqual(binaryTable.numCols);
for (let i = -1, n = jsonTable.numCols; ++i < n;) {
(jsonTable.getColumnAt(i) as any).name = jsonTable.schema.fields[i].name;
(expect(jsonTable.getColumnAt(i)) as any).toEqualVector(binaryTable.getColumnAt(i));
}
});
}
}

0 comments on commit 304e75d

Please sign in to comment.