Skip to content

Commit

Permalink
bugfix reading arrays (#234)
Browse files Browse the repository at this point in the history
* bugfix reading arrays

* add tests

Co-authored-by: Jon Alm Eriksen <jon.alm.eriksen@novelda.com>
  • Loading branch information
jonalm and Jon Alm Eriksen committed Sep 16, 2021
1 parent 967e3c6 commit cd468ef
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 1 deletion.
2 changes: 1 addition & 1 deletion src/FlatBuffers/table.jl
Expand Up @@ -103,7 +103,7 @@ end
function Array{T}(t::Table, off) where {T}
a = vector(t, off)
S = T <: Table ? UOffsetT : T <: Struct ? NTuple{structsizeof(T), UInt8} : T
ptr = convert(Ptr{S}, pointer(bytes(t), pos(t) + a + 1))
ptr = convert(Ptr{S}, pointer(bytes(t), a + 1))
data = unsafe_wrap(Base.Array, ptr, vectorlen(t, off))
return Array{T, S, typeof(t)}(t, a, data)
end
Expand Down
29 changes: 29 additions & 0 deletions test/runtests.jl
Expand Up @@ -409,6 +409,35 @@ tbl = Arrow.Table(Arrow.tobuffer(cols))
@test tbl.k1 == cols.k1
@test tbl.k2 == cols.k2

# PR 234
# bugfix parsing primitive arrays
buf = [
0x14,0x00,0x00,0x00,0x00,0x00,0x0e,0x00,0x14,0x00,0x00,0x00,0x10,0x00,0x0c,0x00,0x08,
0x00,0x04,0x00,0x0e,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x38,0x00,
0x00,0x00,0x38,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x03,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
]

struct TestData <: Arrow.FlatBuffers.Table
bytes::Vector{UInt8}
pos::Base.Int
end

function Base.getproperty(x::TestData, field::Symbol)
if field === :DataInt32
o = Arrow.FlatBuffers.offset(x, 12)
o != 0 && return Arrow.FlatBuffers.Array{Int32}(x, o)
else
@warn "field $field not supported"
end
end

d = Arrow.FlatBuffers.getrootas(TestData, buf, 0);
@test d.DataInt32 == UInt32[1,2,3]


end # @testset "misc"

end

0 comments on commit cd468ef

Please sign in to comment.