Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
Major change news in Caesar.jl

# Changes in v0.13

- Standardize all Manifolds.jl representations to use `SciML/ArrayPartition` instead of `Manifold/ProductRepr`.
- Add `RobotOS / PyCall` based interface for writing bagfiles with `RosbagWriter`.
- Add `_PCL` export functions to go from `Caesar._PCL.PointCloud` out to `PCLPointCloud2` and `ROS.PointCloud2` types.
- Finally adding a NEWS.md to Caesar.jl.
4 changes: 3 additions & 1 deletion src/3rdParty/_PCL/entities/PCLTypes.jl
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ References
end

Base.convert(::Type{<:_PCL_POINTFIELD_FORMAT}, val::Integer) = (en=instances(_PCL_POINTFIELD_FORMAT); en[findfirst(Int.(en) .== Int.(convert(UInt8, val)))])
Base.convert(::Type{<:Integer}, ff::_PCL_POINTFIELD_FORMAT) = findfirst(==(ff), instances(_PCL_POINTFIELD_FORMAT))

struct asType{T} end

Expand Down Expand Up @@ -125,7 +126,8 @@ Base.@kwdef struct Header
""" The sequence number """
seq::UInt32 = UInt32(0)
""" A timestamp associated with the time when the data was acquired.
The value represents microseconds since 1970-01-01 00:00:00 (the UNIX epoch). """
The value represents microseconds since 1970-01-01 00:00:00 (the UNIX epoch).
Suggest: making this relative to UCT to account for timezones in future. """
stamp::UInt64 = UInt64(0)
""" Coordinate frame ID. """
frame_id::String = ""
Expand Down
100 changes: 90 additions & 10 deletions src/3rdParty/_PCL/services/PointCloud.jl
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,8 @@ Base.getindex(pc::PointCloud, i) = pc.points[i]
Base.setindex!(pc::PointCloud, pt::PointT, idx) = (pc.points[idx] = pt)
Base.resize!(pc::PointCloud, s::Integer) = resize!(pc.points, s)

Base.length(pc::PointCloud) = length(pc.points)

## not very Julian translations from C++ above
## ==============================================================================================

Expand Down Expand Up @@ -146,7 +148,7 @@ function createMapping(T,msg_fields::AbstractVector{<:PointField}, field_map::Ms

# Coalesce adjacent fields into single copy where possible
if 1 < length(field_map)
# TODO check accending vs descending order
# TODO check accending vs descending order>
sort!(field_map, by = x->x.serialized_offset)

# something strange with how C++ does and skips the while loop, disabling the coalescing for now
Expand All @@ -167,7 +169,7 @@ function createMapping(T,msg_fields::AbstractVector{<:PointField}, field_map::Ms
# else
# i += 1
# j += 1

# _jmap = field_map[j]
# _jend = field_map[end]
# end
Expand All @@ -177,6 +179,14 @@ function createMapping(T,msg_fields::AbstractVector{<:PointField}, field_map::Ms

return field_map
end
# pc2.fields
# 6-element Vector{Caesar._PCL.PointField}:
# Caesar._PCL.PointField("x", 0x00000000, Caesar._PCL._PCL_FLOAT32, 0x00000001)
# Caesar._PCL.PointField("y", 0x00000004, Caesar._PCL._PCL_FLOAT32, 0x00000001)
# Caesar._PCL.PointField("z", 0x00000008, Caesar._PCL._PCL_FLOAT32, 0x00000001)
# Caesar._PCL.PointField("intensity", 0x00000010, Caesar._PCL._PCL_FLOAT32, 0x00000001)
# Caesar._PCL.PointField("timestamp", 0x00000018, Caesar._PCL._PCL_FLOAT64, 0x00000001)
# Caesar._PCL.PointField("ring", 0x00000020, Caesar._PCL._PCL_UINT16, 0x00000001)


# https://pointclouds.org/documentation/conversions_8h_source.html#l00166
Expand All @@ -193,13 +203,14 @@ function PointCloud(
cloudsize = msg.width*msg.height
# cloud_data = Vector{UInt8}(undef, cloudsize)

# NOTE assume all fields use the same data type
# FIXME cannot assume all fields use the same data type
# off script conversion for XYZ_ data only
datatype = asType{msg.fields[1].datatype}()
len = trunc(Int, length(msg.data)/field_map[1].size)
data_ = Vector{datatype}(undef, len)
read!(IOBuffer(msg.data), data_)
mat = reshape(data_, :, cloudsize)
# see createMapping above


# Check if we can copy adjacent points in a single memcpy. We can do so if there
Expand All @@ -214,7 +225,7 @@ function PointCloud(
error("copy of just one field_map not implemented yet")
else
# If not, memcpy each group of contiguous fields separately
@assert msg.height == 1 "only decoding msg.height=1 messages, update converter here."
@assert msg.height == 1 "only decoding msg::PCLPointCloud2.height=1 messages, update converter here."
for row in 1:msg.height
# TODO check might have an off by one error here
# row_data = row * msg.row_step + 1 # msg.data[(row-1) * msg.row_step]
Expand All @@ -223,12 +234,11 @@ function PointCloud(
# the slow way of building the point.data entry
ptdata = zeros(datatype, 4)
for (i,mapping) in enumerate(field_map)
@info "test" i mapping maxlog=3
midx = trunc(Int,mapping.serialized_offset/mapping.size) + 1
# TODO, why the weird index reversal?
# Copy individual points as columns from mat -- i.e. memcpy
# TODO copy only works if all elements have the same type -- suggestion, ptdata::ArrayPartition instead
ptdata[i] = mat[midx, col]
# @info "DO COPY" mapping
# memcpy (cloud_data + mapping.struct_offset, msg_data + mapping.serialized_offset, mapping.size);
# @info "copy" mapping.struct_offset mapping.serialized_offset mapping.size
end
pt = T(;data=SVector(ptdata...))
push!(cloud.points, pt)
Expand All @@ -241,16 +251,86 @@ function PointCloud(
end


"""
$SIGNATURES

Convert a pcl::PointCloud<T> object to a PCLPointCloud2 binary data blob.
In: cloud the input pcl::PointCloud<T>
Out: msg the resultant PCLPointCloud2 binary blob

DevNotes:
- TODO, remove hacks, e.g. fields [x,y,z,intensity,]-only.
"""
function PCLPointCloud2(cloud::PointCloud{T,P,R}; datatype = _PCL_FLOAT32) where {T,P,R}
# Ease the user's burden on specifying width/height for unorganized datasets
if (cloud.width == 0 && cloud.height == 0)
height = 1;
width = length(cloud)
else
@assert (length(cloud) == cloud.width * cloud.height) "Input `cloud::PointCloud`'s width*height is not equal to length(cloud)"
height = cloud.height;
width = cloud.width;
end

# // Fill point cloud binary data (padding and all)
# std::size_t data_size = sizeof (PointT) * cloud.size ();
# msg.data.resize (data_size);
# if (data_size)
# {
# memcpy(&msg.data[0], &cloud[0], data_size);
# }

fields = Vector{PointField}(undef, 4)
# TODO assuming all(fields[_].count==1)
fields[1] = PointField("x", UInt32(0), datatype, UInt32(1))
fields[2] = PointField("y", UInt32(4), datatype, UInt32(1))
fields[3] = PointField("z", UInt32(8), datatype, UInt32(1))
fields[4] = PointField("intensity", UInt32(12), datatype, UInt32(1))
_nflds = length(fields)
# # Fill fields metadata
# msg.fields.clear ();
# for_each_type<typename traits::fieldList<PointT>::type> (detail::FieldAdder<PointT>(msg.fields));

# NOTE get the last possible byte location of all fields, assume no padding after last used byte
_fend = fields[end]
point_step = _fend.offset + sizeof(asType{_fend.datatype}()) # = sizeof (PointT);
# msg.row_step = (sizeof (PointT) * msg.width);
# # todo msg.is_bigendian = ?;

# flatten all point fields as binary
data = Vector{UInt8}(undef, point_step*width)
for (i,pt) in enumerate(cloud.points)
offset = (point_step*(i-1)+1)
# NOTE assume continuous data block in struct (all of same datatype, FIXME)
data[offset:(offset-1+point_step)] .= reinterpret(UInt8, view(pt.data, 1:_nflds))
end

# @error("noise")
return PCLPointCloud2(;
header = cloud.header,
height,
width,
fields,
data,
# is_bigendian = ,
point_step,
row_step = sizeof(T)*width,
is_dense = cloud.is_dense,
)
end


## =========================================================================================================
## Coordinate transformations using Manifolds.jl
## =========================================================================================================


# 2D, do similar or better for 3D
# FIXME, to optimize, this function will likely be slow
# TODO, consolidate with transformPointcloud(::ScatterAlign,..) function
function apply( M_::typeof(SpecialEuclidean(2)),
rPp::Union{<:ProductRepr,<:Manifolds.ArrayPartition},
pc::PointCloud{T} ) where T
rPp::Union{<:ProductRepr,<:Manifolds.ArrayPartition},
pc::PointCloud{T} ) where T
#

rTp = affine_matrix(M_, rPp)
Expand Down
58 changes: 49 additions & 9 deletions src/3rdParty/_PCL/services/ROSConversions.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,16 @@

using ..RobotOS

@rosimport std_msgs.msg: Header
@rosimport sensor_msgs.msg: PointField
@rosimport sensor_msgs.msg: PointCloud2


Base.convert(::Type{UInt64}, rost::RobotOS.Time) = UInt64(rost.secs)*1000000 + trunc(UInt64, rost.nsecs*1e-3)
Base.convert(::Type{RobotOS.Time}, tm::UInt64) = RobotOS.Time(trunc(Int32,tm*1e-6), Int32((tm % 1_000_000)*1000) )

# embedded test to avoid CI requiring all of ROS and PyCall
@assert convert(RobotOS.Time, convert(UInt64, RobotOS.Time(1646305718, 997857000))) == RobotOS.Time(1646305718, 997857000) "conversion to and from RobotOS.Time and UInt64 not consistent"

# Really odd constructor, strong assumption that user FIRST ran @rostypegen in Main BEFORE loading Caesar
# https://docs.ros.org/en/hydro/api/pcl_conversions/html/pcl__conversions_8h_source.html#l00208
Expand All @@ -23,16 +29,50 @@ function PCLPointCloud2(msg::Main.sensor_msgs.msg.PointCloud2)
pfs = PointField[PointField(;name=pf_.name,offset=pf_.offset,datatype=pf_.datatype,count=pf_.count) for pf_ in msg.fields]

endian = msg.is_bigendian ? _PCL_ENDIAN_BIG_BYTE : _PCL_ENDIAN_LITTLE_WORD
pc2 = PCLPointCloud2(;header,
height = msg.height,
width = msg.width,
fields = pfs,
data = msg.data,
is_bigendian= endian,
point_step = msg.point_step,
row_step = msg.row_step,
is_dense = UInt8(msg.is_dense) )
PCLPointCloud2(;header,
height = msg.height,
width = msg.width,
fields = pfs,
data = msg.data,
is_bigendian= endian,
point_step = msg.point_step,
row_step = msg.row_step,
is_dense = UInt8(msg.is_dense)
)
#
end

function toROSPointCloud2(pc2::PCLPointCloud2)
header = Main.std_msgs.msg.Header();
header.seq = pc2.header.seq
header.stamp = convert(RobotOS.Time, pc2.header.stamp)
header.frame_id = pc2.header.frame_id

msg = Main.sensor_msgs.msg.PointCloud2();

msg.header = header
msg.height = pc2.height
msg.width = pc2.width

# all PointField elements
fields = Main.sensor_msgs.msg.PointField[]
for pf_ in pc2.fields
mpf = Main.sensor_msgs.msg.PointField()
mpf.name = pf_.name
mpf.offset=pf_.offset
mpf.datatype= convert(UInt8, pf_.datatype)
mpf.count=pf_.count
push!(fields, mpf)
end
msg.fields = fields

msg.is_bigendian = pc2.is_bigendian == _PCL_ENDIAN_BIG_BYTE
msg.point_step = pc2.point_step
msg.row_step = pc2.row_step
msg.data = pc2.data
msg.is_dense = pc2.is_dense

msg
end

#
25 changes: 25 additions & 0 deletions src/ros/Utils/RosbagSubscriber.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@


export RosbagSubscriber, loop!, getROSPyMsgTimestamp, nanosecond2datetime
export RosbagWriter

## Load rosbag file parser

Expand All @@ -11,6 +12,30 @@ pushfirst!(PyVector(pyimport("sys")."path"), @__DIR__ )
rosbagReaderPy = pyimport("rosbagReader")
RosbagParser = rosbagReaderPy."RosbagParser"

# piggy back writer code here
writeRosbagPy = pyimport("rosbagWriter")

"""
RosbagWriter(bagfilepath)

```julia
# Link with ROSbag infrastructure via rospy
using Pkg
ENV["PYTHON"] = "/usr/bin/python3"
Pkg.build("PyCall")
using PyCall
using RobotOS
@rosimport std_msgs.msg: String
rostypegen()
using Caesar

bagwr = Caesar.RosbagWriter("/tmp/test.bag")
s = std_msgs.msg.StringMsg("test")
bagwr.write_message("/ch1", s)
bagwr.close()
```
"""
RosbagWriter = writeRosbagPy."RosbagWriter"

## Common handler approach

Expand Down
14 changes: 14 additions & 0 deletions src/ros/Utils/rosbagWriter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@

import rosbag
import sys

class RosbagWriter:
def __init__(self, filename):
self.filename = filename
self.bag = rosbag.Bag(filename, 'w')

def write_message(self, channel, msg):
self.bag.write(channel, msg) # , header

def close(self):
self.bag.close()
20 changes: 20 additions & 0 deletions test/pcl/testPointCloud2.jl
Original file line number Diff line number Diff line change
Expand Up @@ -92,4 +92,24 @@ show(pc)
##
end


@testset "PandarXT test point cloud conversion test" begin
##

@warn "TODO, see testdata/_pandar_PCLPointCloud2.jldat which via `Serialization.serialize` of a `Caesar._PCL.PCLPointCloud2` object, at JL 1.7.3, CJL v0.13.1+"
# pc2.fields
# 6-element Vector{Caesar._PCL.PointField}:
# Caesar._PCL.PointField("x", 0x00000000, Caesar._PCL._PCL_FLOAT32, 0x00000001)
# Caesar._PCL.PointField("y", 0x00000004, Caesar._PCL._PCL_FLOAT32, 0x00000001)
# Caesar._PCL.PointField("z", 0x00000008, Caesar._PCL._PCL_FLOAT32, 0x00000001)
# Caesar._PCL.PointField("intensity", 0x00000010, Caesar._PCL._PCL_FLOAT32, 0x00000001)
# Caesar._PCL.PointField("timestamp", 0x00000018, Caesar._PCL._PCL_FLOAT64, 0x00000001)
# Caesar._PCL.PointField("ring", 0x00000020, Caesar._PCL._PCL_UINT16, 0x00000001)


##
end



#
Binary file added test/testdata/_pandar_PCLPointCloud2.jldat
Binary file not shown.