diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bf398ae2b..45cdc9d5b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -7,7 +7,7 @@ on: - develop - release** jobs: - test: + basic-functional: name: Julia ${{ matrix.version }} - ${{ matrix.arch }} - ${{ matrix.group }} - ${{ matrix.os }} runs-on: ${{ matrix.os }} env: @@ -16,8 +16,7 @@ jobs: fail-fast: false matrix: version: - - '1.6' - - '1.7' + - '1.8' - 'nightly' os: - ubuntu-latest @@ -25,7 +24,6 @@ jobs: - x64 group: - 'basic_functional_group' - - 'test_cases_group' steps: - uses: actions/checkout@v2 - uses: julia-actions/setup-julia@v1 @@ -55,7 +53,53 @@ jobs: with: file: lcov.info - test-masters: + test-cases: + name: Julia ${{ matrix.version }} - ${{ matrix.arch }} - ${{ matrix.group }} - ${{ matrix.os }} + runs-on: ${{ matrix.os }} + env: + JULIA_PKG_SERVER: "" + strategy: + fail-fast: false + matrix: + version: + - '1.8' + os: + - ubuntu-latest + arch: + - x64 + group: + - 'test_cases_group' + continue-on-error: true + steps: + - uses: actions/checkout@v2 + - uses: julia-actions/setup-julia@v1 + with: + version: ${{ matrix.version }} + arch: ${{ matrix.arch }} + - uses: actions/cache@v1 + env: + cache-name: cache-artifacts + with: + path: ~/.julia/artifacts + key: ${{ runner.os }}-test-${{ env.cache-name }}-${{ hashFiles('**/Project.toml') }} + restore-keys: | + ${{ runner.os }}-test-${{ env.cache-name }}- + ${{ runner.os }}-test- + ${{ runner.os }}- + - uses: julia-actions/julia-buildpkg@latest + - run: | + git config --global user.name Tester + git config --global user.email te@st.er + - uses: julia-actions/julia-runtest@latest + continue-on-error: ${{ matrix.version == 'nightly' }} + env: + IIF_TEST_GROUP: ${{ matrix.group }} + # - uses: julia-actions/julia-processcoverage@v1 + # - uses: codecov/codecov-action@v1 + # with: + # file: lcov.info + + test-dev-main: #if: github.ref != 'refs/heads/release**' name: Upstream Dev runs-on: ubuntu-latest @@ -66,7 +110,7 @@ jobs: - uses: julia-actions/setup-julia@v1 with: - version: 1.7 + version: 1.8 arch: x64 - uses: actions/cache@v1 @@ -107,7 +151,7 @@ jobs: - uses: actions/checkout@v2 - uses: julia-actions/setup-julia@v1 with: - version: 1.7 + version: 1.8 - name: 'Docs on ${{ github.head_ref }}' run: | export JULIA_PKG_SERVER="" diff --git a/Project.toml b/Project.toml index 2a89e6c9d..b7af954a4 100644 --- a/Project.toml +++ b/Project.toml @@ -2,10 +2,11 @@ name = "Caesar" uuid = "62eebf14-49bc-5f46-9df9-f7b7ef379406" keywords = ["SLAM", "state-estimation", "MM-iSAM", "MM-iSAMv2", "inference", "robotics", "ROS"] desc = "Non-Gaussian simultaneous localization and mapping" -version = "0.13.4" +version = "0.13.5" [deps] ApproxManifoldProducts = "9bbbb610-88a1-53cd-9763-118ce10c1f89" +Base64 = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" Combinatorics = "861a8166-3701-5b0c-9a16-15d98fcdc6aa" CoordinateTransformations = "150eb455-5306-5404-9cee-2592286d6298" DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8" @@ -59,7 +60,7 @@ FFTW = "1" FileIO = "1" ImageCore = "0.8, 0.9" ImageMagick = "1" -IncrementalInference = "0.30" +IncrementalInference = "0.30, 0.31" JLD2 = "0.3, 0.4" JSON = "0.20, 0.21" JSON2 = "0.3, 0.4" @@ -71,7 +72,7 @@ Optim = "1" ProgressMeter = "1" Reexport = "1" Requires = "1" -RoME = "0.20" +RoME = "0.20, 0.21" Rotations = "1.1" StaticArrays = "1" StatsBase = "0.33" diff --git a/docs/src/examples/custom_factor_features.md b/docs/src/examples/custom_factor_features.md index f52562ebe..6a0457298 100644 --- a/docs/src/examples/custom_factor_features.md +++ b/docs/src/examples/custom_factor_features.md @@ -20,8 +20,8 @@ The MM-iSAMv2 algorithm relies on the Kolmogorov-Criteria as well as uncorrelate At present `cfo` contains three main fields: - `cfo.factor::MyFactor` the factor object as defined in the `struct` definition, -- `cfo.metadata::FactorMetadata`, which is currently under development and likely to change. - - This contains references to the connected variables to the factor and more, and is useful for large data retrieval such as used in Terrain Relative Navigation (TRN). +- `cfo.fullvariables`, which can be used for large data blob retrieval such as used in Terrain Relative Navigation (TRN). + - Also see [Stashing and Caching](@ref section_stash_and_cache) - `cfo._sampleIdx` is the index of which computational sample is currently being calculated. diff --git a/docs/src/refs/literature.md b/docs/src/refs/literature.md index fb8f5c8bf..a0d1a197f 100644 --- a/docs/src/refs/literature.md +++ b/docs/src/refs/literature.md @@ -10,7 +10,7 @@ Newly created page to list related references and additional literature pertaini [1.3] Fourie, D., Claassens, S., Pillai, S., Mata, R., Leonard, J.: ["SLAMinDB: Centralized graph databases for mobile robotics"](http://people.csail.mit.edu/spillai/projects/cloud-graphs/2017-icra-cloudgraphs.pdf), IEEE Intl. Conf. on Robotics and Automation (ICRA), Singapore, 2017. -[1.4] Cheung, M., Fourie, D., Rypkema, N., Vaz Teixeira, P., Schmidt, H., and Leonard, J.: ["Non-Gaussian SLAM utilizing Synthetic Aperture Sonar"](https://marinerobotics.mit.edu/sites/default/files/cheung_icra2019.pdf), Intl. Conf. On Robotics and Automation (ICRA), IEEE, Montreal, 2019. +[1.4] Cheung, M., Fourie, D., Rypkema, N., Vaz Teixeira, P., Schmidt, H., and Leonard, J.: ["Non-Gaussian SLAM utilizing Synthetic Aperture Sonar"](https://ieeexplore.ieee.org/document/8793536), Intl. Conf. On Robotics and Automation (ICRA), IEEE, Montreal, 2019. [1.5] Doherty, K., Fourie, D., Leonard, J.: ["Multimodal Semantic SLAM with Probabilistic Data Association"](https://dspace.mit.edu/bitstream/handle/1721.1/137995.2/doherty_icra2019_revised.pdf?sequence=4&isAllowed=y), Intl. Conf. On Robotics and Automation (ICRA), IEEE, Montreal, 2019. @@ -58,7 +58,7 @@ Newly created page to list related references and additional literature pertaini [2.14] Arnborg, S., Corneil, D.G. and Proskurowski, A., 1987. ["Complexity of finding embeddings in a k-tree"](https://epubs.siam.org/doi/pdf/10.1137/0608024). SIAM Journal on Algebraic Discrete Methods, 8(2), pp.277-284. -[2.15a] Sola, J., Deray, J. and Atchuthan, D., 2018. ["A micro Lie theory for state estimation in robotics". arXiv preprint arXiv:1812.01537](https://arxiv.org/pdf/1812.01537), and [tech report](https://upcommons.upc.edu/bitstream/handle/2117/179757/2089-A-micro-Lie-theory-for-state-estimation-in-robotics%20(3).pdf). +[2.15a] Sola, J., Deray, J. and Atchuthan, D., 2018. ["A micro Lie theory for state estimation in robotics". arXiv preprint arXiv:1812.01537](https://arxiv.org/pdf/1812.01537), and [tech report](https://upcommons.upc.edu/bitstream/handle/2117/179757/2089-A-micro-Lie-theory-for-state-estimation-in-robotics%20(3).pdf). And [cheatsheet w/ suspected typos](https://github.com/artivis/manif/blob/devel/paper/Lie_theory_cheat_sheet.pdf). [2.15b] Delleart F., 2012. [Lie Groups for Beginners](https://raw.githubusercontent.com/devbharat/gtsam/master/doc/LieGroups.pdf). diff --git a/src/3rdParty/_PCL/_PCL.jl b/src/3rdParty/_PCL/_PCL.jl index 96a20bdf0..a271ebfd5 100644 --- a/src/3rdParty/_PCL/_PCL.jl +++ b/src/3rdParty/_PCL/_PCL.jl @@ -40,6 +40,7 @@ include("services/ICP_Simple.jl") function __init__() @require RobotOS="22415677-39a4-5241-a37a-00beabbbdae8" include("services/ROSConversions.jl") + @require Gadfly="c91e804a-d5a3-530f-b6f0-dfbca275c004" include("services/PlottingUtilsPCL.jl") end diff --git a/src/3rdParty/_PCL/entities/PCLTypes.jl b/src/3rdParty/_PCL/entities/PCLTypes.jl index 1a5fa7d67..f03fc1f15 100644 --- a/src/3rdParty/_PCL/entities/PCLTypes.jl +++ b/src/3rdParty/_PCL/entities/PCLTypes.jl @@ -4,6 +4,7 @@ Point Cloud Library (PCL) - www.pointclouds.org Copyright (c) 2010, Willow Garage, Inc. Copyright (c) 2012-, Open Perception, Inc. + Copyright (c) 2022, NavAbility, Inc. All rights reserved. @@ -277,4 +278,4 @@ end -# \ No newline at end of file +# diff --git a/src/3rdParty/_PCL/services/ConsolidateRigidTransform.jl b/src/3rdParty/_PCL/services/ConsolidateRigidTransform.jl index ec7478faf..ccb025962 100644 --- a/src/3rdParty/_PCL/services/ConsolidateRigidTransform.jl +++ b/src/3rdParty/_PCL/services/ConsolidateRigidTransform.jl @@ -46,16 +46,28 @@ end ## FIXME, not-yet-consolidated rigid transform code that must be deprecated below ## ============================================================ +const _SO3_MANI = SpecialOrthogonal(3) +const _SE3_MANI = SpecialEuclidean(3) + # name euler here is very ambiguous, these are Lie algebra elements # used to manipulate cartesian coordinates in a TranslationGroup(3) space. -function euler_angles_to_linearized_rotation_matrix(α1, α2, α3) - dR = [ 1 -α3 α2 - α3 1 -α1 - -α2 α1 1] +function euler_angles_to_linearized_rotation_matrix(α1, α2, α3, rigid::Bool=true) + dR = if rigid + # TODO likely faster performance by using a retraction instead of expmap + exp_lie(_SO3_MANI, hat(_SO3_MANI, SMatrix{3,3, Float64}(I), SA[α1, α2, α3])) + else + SMatrix{3,3,Float64}(1.0,0,0,0,1,0,0,0,1) + + hat(_SO3_MANI, Identity(_SO3_MANI), SA[α1, α2, α3]) + # [ 1 -α3 α2 + # α3 1 -α1 + # -α2 α1 1] + end end + function create_homogeneous_transformation_matrix(R, t) - H = [R t - zeros(1,3) 1] + H = affine_matrix(_SE3_MANI, ArrayPartition(t, R)) + # H = [R t + # zeros(1,3) 1] end function euler_coord_to_homogeneous_coord(XE) no_points = size(XE, 1) diff --git a/src/3rdParty/_PCL/services/ICP_Simple.jl b/src/3rdParty/_PCL/services/ICP_Simple.jl index 7d2585581..17d13305b 100644 --- a/src/3rdParty/_PCL/services/ICP_Simple.jl +++ b/src/3rdParty/_PCL/services/ICP_Simple.jl @@ -221,7 +221,7 @@ Downloads.download(lidar2_url, io2) X_fix = readdlm(io1) X_mov = readdlm(io2) -H, HX_mov = Caesar._PCL.alignICP_Simple(X_fix, X_mov; verbose=true) +H, HX_mov, stat = Caesar._PCL.alignICP_Simple(X_fix, X_mov; verbose=true) ``` Notes @@ -244,16 +244,17 @@ function alignICP_Simple( max_overlap_distance::Number=Inf, min_change::Number=3, max_iterations::Integer=100, - verbose::Bool=false + verbose::Bool=false, + H = Matrix{Float64}(I,4,4), ) # size(X_fix)[2] == 3 || error(""""X_fix" must have 3 columns""") size(X_mov)[2] == 3 || error(""""X_mov" must have 3 columns""") - correspondences >= 10 || error(""""correspondences" must be >= 10""") - min_planarity >= 0 && min_planarity < 1 || error(""""min_planarity" must be >= 0 and < 1""") - neighbors >= 2 || error(""""neighbors" must be >= 2""") - min_change > 0 || error(""""min_change" must be > 0""") - max_iterations > 0 || error(""""max_iterations" must be > 0""") + 10 <= correspondences || error(""""correspondences" must be >= 10""") + 0 <= min_planarity < 1 || error(""""min_planarity" must be >= 0 and < 1""") + 2 <= neighbors || error(""""neighbors" must be >= 2""") + 0 < min_change || error(""""min_change" must be > 0""") + 0 < max_iterations || error(""""max_iterations" must be > 0""") dt = @elapsed begin verbose && @info "Create point cloud objects ..." @@ -276,11 +277,12 @@ function alignICP_Simple( verbose && @info "Estimate normals of selected points ..." estimate_normals!(pcfix, neighbors) - H = Matrix{Float64}(I,4,4) residual_distances = Any[] verbose && @info "Start iterations ..." + count = 0 for i in 1:max_iterations + count += 1 initial_distances = matching!(pcmov, pcfix) reject!(pcmov, pcfix, min_planarity, initial_distances) dH, residuals = estimate_rigid_body_transformation( @@ -290,7 +292,7 @@ function alignICP_Simple( push!(residual_distances, residuals) transform!(pcmov, dH) - H = dH*H + H .= dH*H pcfix.sel = sel_orig if i > 1 if check_convergence_criteria(residual_distances[i], residual_distances[i-1], @@ -324,7 +326,8 @@ function alignICP_Simple( X_mov_transformed = [pcmov.x pcmov.y pcmov.z] - return H, X_mov_transformed + stat = (;residual_mean=mean(residual_distances[end]), correspondences=length(residual_distances[end]),residual_std=std(residual_distances[end]), iterations=count) + return H, X_mov_transformed, stat end diff --git a/src/3rdParty/_PCL/services/PlottingUtilsPCL.jl b/src/3rdParty/_PCL/services/PlottingUtilsPCL.jl new file mode 100644 index 000000000..4a00ed8a8 --- /dev/null +++ b/src/3rdParty/_PCL/services/PlottingUtilsPCL.jl @@ -0,0 +1,16 @@ + +@info "Caesar._PCL is loading tools related to Gadfly.jl." + +# import ..Gadfly as GF + +## ===================================================================================== +## _PCL plotting utils +## ===================================================================================== + +# NOTE moved pointcloud plotting to and see PR Arena.jl#94 +function plotPointCloud(pc::PointCloud) + x = (s->s.data[1]).(pc.points) + y = (s->s.data[2]).(pc.points) + + Main.Gadfly.plot(x=x,y=y, Main.Gadfly.Geom.point) +end \ No newline at end of file diff --git a/src/3rdParty/_PCL/services/PointCloud.jl b/src/3rdParty/_PCL/services/PointCloud.jl index a88d2a1a6..e34597186 100644 --- a/src/3rdParty/_PCL/services/PointCloud.jl +++ b/src/3rdParty/_PCL/services/PointCloud.jl @@ -359,6 +359,21 @@ function PCLPointCloud2(cloud::PointCloud{T,P,R}; datatype = _PCL_FLOAT32) where ) end +## ========================================================================================================= +## Useful utils +## ========================================================================================================= + + +function _filterMinRange( + pts::AbstractVector{<:AbstractVector{<:Real}}, + minrange::Real, + maxrange::Real +) + # filter pointcloud section of interest + msk = findall(x-> minrange < norm(x) < maxrange, pts) + pts[msk] +end + ## ========================================================================================================= ## Custom printing diff --git a/src/Caesar.jl b/src/Caesar.jl index f9215dbe7..596604138 100644 --- a/src/Caesar.jl +++ b/src/Caesar.jl @@ -31,6 +31,7 @@ using CoordinateTransformations, JSON, JSON2, + Base64, FileIO, DataStructures, ProgressMeter, @@ -100,6 +101,7 @@ function __init__() include("images/ScatterAlignPose2.jl") @require Gadfly="c91e804a-d5a3-530f-b6f0-dfbca275c004" include("plotting/ScatterAlignPlotting.jl") + @require RobotOS="22415677-39a4-5241-a37a-00beabbbdae8" include("images/ROSConversions.jl") end @require Distributed="8ba89e20-285c-5b6f-9357-94700520ee1b" include("images/DistributedUtils.jl") end diff --git a/src/beamforming/SASBearing2D.jl b/src/beamforming/SASBearing2D.jl index 0d0350cce..4b3ebe408 100644 --- a/src/beamforming/SASBearing2D.jl +++ b/src/beamforming/SASBearing2D.jl @@ -120,14 +120,14 @@ function (cfo::CalcFactor{<:SASBearing2D})( meas, XX... ) # sas2d = cfo.factor - userdata = cfo.metadata + # userdata = cfo.metadata # OUT OF DATE, use CalcFactor.fullvariables idx = cfo._sampleIdx # dx, dy, azi = 0.0, 0.0, 0.0 thread_data = sas2d.threadreuse[Threads.threadid()] res = [0.0;] - if string(userdata.solvefor)[1] == 'l' + if cf.solvefor == 1 # Solving all poses to Landmark # Reference the filter positions locally (first element) @@ -140,8 +140,8 @@ function (cfo::CalcFactor{<:SASBearing2D})( meas, # ccall(:jl_, Void, (Any,), "idx=$(idx)\n") thread_data.oncebackidx[1] = idx #run for eacn new idx - thread_data.workvarlist = userdata.variablelist[2:end] # skip landmark element - thread_data.looelement = findfirst(thread_data.workvarlist .== userdata.solvefor) + thread_data.workvarlist = cfo.variablelist[2:end] # skip landmark element + thread_data.looelement = findfirst(thread_data.workvarlist .== cfo.solvefor) thread_data.elementset = setdiff(Int[1:length(thread_data.workvarlist);], [thread_data.looelement;]) # Reference first or second element diff --git a/src/images/ROSConversions.jl b/src/images/ROSConversions.jl new file mode 100644 index 000000000..88c0d9eeb --- /dev/null +++ b/src/images/ROSConversions.jl @@ -0,0 +1,72 @@ +@info "Caesar.jl is loading tools relating to Images.jl and RobotOS.jl" + +using .RobotOS + +import Unmarshal: unmarshal + +@rosimport std_msgs.msg: Header +@rosimport sensor_msgs.msg: Image + + +function unmarshal( + header::Main.std_msgs.msg.Header +) + # + Dict{String,Any}( + "seq" => header.seq, + "stamp" => Dict{String,Int}( + "secs"=>header.stamp.secs, + "nsecs"=>header.stamp.nsecs + ), + "frame_id" => header.frame_id, + "_type" => "ROS1/std_msgs/Header" + ) +end + +function unmarshal( + msg::Main.sensor_msgs.msg.Image +) + Dict{String,Any}( + "width" => Int(msg.width), + "height" => Int(msg.height), + "step" => Int(msg.step), + "data_b64" => base64encode(msg.data), + "encoding" => msg.encoding, + "is_bigendian" => msg.is_bigendian === 0x01, + "header" => unmarshal(msg.header), + "_type" => "ROS1/sensor_msgs/Image?base64", + "description" => "Caesar.toImage(JSON.parse(jstr))" + ) +end + + +toImage(msg::Main.sensor_msgs.msg.Image) = unmarshal(msg) |> toImage + + +""" + $SIGNATURES + +Convert `Caesar.Image::Dict` type to ROS message `sensor_msgs.msg.Image`. + +See also: [`Caesar.unmarshal`](@ref), [`Caesar.toImage`](@ref), [`Caesar._PCL.toROSPointCloud2`](@ref) +""" +function toROSImage(msgd::Dict{String,Any}) + header = Main.std_msgs.msg.Header(); + header.seq = msgd["header"]["seq"] + header.stamp = RobotOS.Time(msgd["header"]["stamp"]["secs"], msgd["header"]["stamp"]["nsecs"]) + header.frame_id = msgd["header"]["frame_id"] + + msg = Main.sensor_msgs.msg.Image(); + + msg.header = header + msg.height = UInt32(msgd["height"]) + msg.width = UInt32(msgd["width"]) + + msg.is_bigendian = UInt8(msgd["is_bigendian"]) + msg.step = UInt32(msgd["step"]) + msg.data = base64decode(msgd["data_b64"]) + msg.encoding = msgd["encoding"] + + msg +end +# \ No newline at end of file diff --git a/src/images/ScatterAlignPose2.jl b/src/images/ScatterAlignPose2.jl index 448933c76..6c283d2fd 100644 --- a/src/images/ScatterAlignPose2.jl +++ b/src/images/ScatterAlignPose2.jl @@ -1,14 +1,15 @@ -import IncrementalInference: getSample, preambleCache +import IncrementalInference: getSample, preambleCache, _update! import Base: convert, show # import DistributedFactorGraphs: getManifold -import ApproxManifoldProducts: sample +import ApproxManifoldProducts: sample, _update! +using UUIDs using .Images export ScatterAlignPose2, PackedScatterAlignPose2 -export ScatterAlignPose3 +export ScatterAlignPose3, PackedScatterAlignPose3 export overlayScatter, overlayScatterMutate @@ -67,7 +68,7 @@ Base.@kwdef struct ScatterAlign{P, Arg 0 < `rescale` ≤ 1 is also used to rescale the images to lower resolution for speed. """ gridscale::Float64 = 1.0 """ how many heatmap sampled particles to use for mmd alignment """ - sample_count::Int = 100 + sample_count::Int = 500 """ bandwidth to use for mmd """ bw::Float64 = 1.0 """ EXPERIMENTAL, flag whether to use 'stashing' for large point cloud, see [Stash & Cache](@ref section_stash_unstash) """ @@ -196,7 +197,11 @@ getManifold(::IIF.InstanceType{<:ScatterAlignPose2}) = getManifold(Pose2Pose2) getManifold(::IIF.InstanceType{<:ScatterAlignPose3}) = getManifold(Pose3Pose3) # runs once upon addFactor! and returns object later used as `cache` -function preambleCache(dfg::AbstractDFG, vars::AbstractVector{<:DFGVariable}, fnc::Union{<:ScatterAlignPose2,<:ScatterAlignPose3}) +function preambleCache( + dfg::AbstractDFG, + vars::AbstractVector{<:DFGVariable}, + fnc::Union{<:ScatterAlignPose2,<:ScatterAlignPose3} +) # M = getManifold(_getPoseType(fnc.align)) e0 = getPointIdentity(M) @@ -205,10 +210,14 @@ function preambleCache(dfg::AbstractDFG, vars::AbstractVector{<:DFGVariable}, fn for (va,de,cl) in zip(vars,[fnc.align.dataEntry_cloud1,fnc.align.dataEntry_cloud2],[fnc.align.cloud1,fnc.align.cloud2]) if fnc.align.useStashing @assert 0 < length(de) "cannot reconstitute ScatterAlignPose2 without necessary data entry, only have $de" - _, db = getData(dfg, getLabel(va), Symbol(de)) # fnc.align.dataStoreHint + _, db = getData(dfg, getLabel(va), UUID(de)) # fnc.align.dataStoreHint + # Assume PackedManifoldKernelDensity cld = convert(SamplableBelief, String(take!(IOBuffer(db)))) - # cld = unpackDistribution(strdstr) - IIF._update!(cl, cld) + # payload = JSON.parse(String(take!(IOBuffer(db)))) + # dstr = unmarshal PackedManifoldKernelDensity + # cld = unpackDistribution(dstr) + # update either a HGD or MKD + _update!(cl, cld) end end @@ -409,24 +418,42 @@ Base.@kwdef struct PackedScatterAlignPose2 <: AbstractPackedFactor dataStoreHint::String = "" end +Base.@kwdef struct PackedScatterAlignPose3 <: AbstractPackedFactor + _type::String = "Caesar.PackedScatterAlignPose3" + cloud1::_PARCHABLE_PACKED_CLOUD + cloud2::_PARCHABLE_PACKED_CLOUD + gridscale::Float64 = 1.0 + sample_count::Int = 50 + bw::Float64 = 0.01 + """ EXPERIMENTAL, flag whether to use 'stashing' for large point cloud, see [Stash & Cache](@ref section_stash_unstash) """ + useStashing::Bool = false + """ DataEntry ID for stash store of cloud 1 & 2 """ + dataEntry_cloud1::String = "" + dataEntry_cloud2::String = "" + """ Data store hint where likely to find the data entries and blobs for reconstructing cloud1 and cloud2""" + dataStoreHint::String = "" +end -function convert(::Type{<:PackedScatterAlignPose2}, arp::ScatterAlignPose2) - +function convert( + ::Type{T}, + arp::Union{<:ScatterAlignPose2,<:ScatterAlignPose3} +) where {T <: Union{<:PackedScatterAlignPose2,<:PackedScatterAlignPose3}} + # cld1 = arp.align.cloud1 cld2 = arp.align.cloud2 # reconstitute full type during the preambleCache step if arp.align.useStashing - @assert length(arp.align.dataEntry_cloud1) !== 0 "packing of ScatterAlignPose2 asked to be `useStashing=true`` yet no `.dataEntry_cloud1` exists for later loading" + @assert length(arp.align.dataEntry_cloud1) !== 0 "packing of ScatterAlignPose asked to be `useStashing=true`` yet no `.dataEntry_cloud1` exists for later loading" cld1 = IIF.parchDistribution(arp.align.cloud1) - @assert length(arp.align.dataEntry_cloud2) !== 0 "packing of ScatterAlignPose2 asked to be `useStashing=true`` yet no `.dataEntry_cloud2` exists for later loading" + @assert length(arp.align.dataEntry_cloud2) !== 0 "packing of ScatterAlignPose asked to be `useStashing=true`` yet no `.dataEntry_cloud2` exists for later loading" cld2 = IIF.parchDistribution(arp.align.cloud2) end cloud1 = packDistribution(cld1) cloud2 = packDistribution(cld2) - PackedScatterAlignPose2(; + T(; cloud1, cloud2, gridscale = arp.align.gridscale, @@ -435,22 +462,29 @@ function convert(::Type{<:PackedScatterAlignPose2}, arp::ScatterAlignPose2) useStashing = arp.align.useStashing, dataEntry_cloud1 = arp.align.dataEntry_cloud1, dataEntry_cloud2 = arp.align.dataEntry_cloud2, - dataStoreHint = arp.align.dataStoreHint ) + dataStoreHint = arp.align.dataStoreHint + ) end -function convert(::Type{<:ScatterAlignPose2}, parp::PackedScatterAlignPose2) +function convert( + ::Type{T}, + parp::Union{<:PackedScatterAlignPose2,<:PackedScatterAlignPose3} +) where {T <: Union{<:ScatterAlignPose2,<:ScatterAlignPose3}} # - + _selectPoseT(::Type{<:ScatterAlignPose2}) = Pose2 + _selectPoseT(::Type{<:ScatterAlignPose3}) = Pose3 + function _resizeCloudData!(cl::PackedHeatmapGridDensity) - typ, col, row = typeof(cl.data[1][1]), Int(cl.data[1][2]), Int(cl.data[2][1]) + row=Int(cl.data[2][1]) + typ, col = typeof(cl.data[1][1]), Int(cl.data[1][2]) resize!(cl.data,row) for i in 1:row cl.data[i] = Vector{typ}(undef, col) end nothing end - _resizeCloudData!(cl::ManifoldKernelDensity) = nothing - + _resizeCloudData!(cl::PackedManifoldKernelDensity) = nothing + # prep cloud1.data fields for larger data if parp.useStashing _resizeCloudData!(parp.cloud1) @@ -461,7 +495,8 @@ function convert(::Type{<:ScatterAlignPose2}, parp::PackedScatterAlignPose2) cloud2 = unpackDistribution(parp.cloud2) # and build the final object - ScatterAlignPose2(ScatterAlign{Pose2, typeof(cloud1), typeof(cloud2)}(; + poseT = _selectPoseT(T) + T(ScatterAlign{poseT, typeof(cloud1), typeof(cloud2)}(; cloud1, cloud2, gridscale=parp.gridscale, diff --git a/src/images/images.jl b/src/images/images.jl index 076294224..0bf491a67 100644 --- a/src/images/images.jl +++ b/src/images/images.jl @@ -9,6 +9,22 @@ export imhcatPretty, csmAnimationJoinImgs, csmAnimateSideBySide export makeVideoFromData +function toImage(msgd::Dict{String,Any}) + data = base64decode(msgd["data_b64"]) + h, w = msgd["height"], msgd["width"] + + if msgd["encoding"] == "mono8" + img = Matrix{Gray{N0f8}}(undef, h, w) + # assuming one endian type for now, TODO both little and big endian + for i in 1:h, j in 1:w + img[i,j] = Gray{N0f8}(data[msgd["step"]*(i-1)+j]/255) + end + img + else + error("Conversion for ROS sensor_msgs.Image encoding not implemented yet $(msgd["encoding"])") + end +end + """ $SIGNATURES @@ -17,6 +33,8 @@ Use ffmpeg to write image sequence to video file. Notes: - Requires Images.jl - https://discourse.julialang.org/t/creating-a-video-from-a-stack-of-images/646/8 + +Also see: `convert(Matrix{RGB}, pl)` """ function writevideo(fname::AbstractString, imgstack::AbstractArray{<:Colorant,3}; diff --git a/test/ICRA2022_tests/nongaussian_mixture.jl b/test/ICRA2022_tests/nongaussian_mixture.jl index 3578fb698..2077500ff 100644 --- a/test/ICRA2022_tests/nongaussian_mixture.jl +++ b/test/ICRA2022_tests/nongaussian_mixture.jl @@ -18,6 +18,7 @@ using IncrementalInference # Start with an empty factor graph fg = initfg() +getSolverParams(fg).useMsgLikelihoods = true # add the first node addVariable!(fg, :x0, ContinuousScalar) @@ -92,6 +93,8 @@ tree = solveGraph!(fg); ppes = DFG.getPPESuggested.(fg, [:x0;:x1;:x2;:x3]) +# + @test isapprox( getPPESuggested(fg, :x0)[1], 0; atol=1) @test isapprox( getPPESuggested(fg, :x1)[1], 10; atol=1) @test isapprox( getPPESuggested(fg, :x2)[1], 20; atol=2) diff --git a/test/pcl/testPointCloud2.jl b/test/pcl/testPointCloud2.jl index b8e3891a3..2a3618d1d 100644 --- a/test/pcl/testPointCloud2.jl +++ b/test/pcl/testPointCloud2.jl @@ -389,41 +389,77 @@ Caesar._PCL.reject!(icp_fix, icp_mov, min_planarity, initial_distances) ## check alignment -H, pcmovd = Caesar._PCL.alignICP_Simple(X_fix, X_mov) +H, pcmovd, stat = Caesar._PCL.alignICP_Simple(X_fix, X_mov) +## + +# LieAlgebra expontiation based ref: H_ref = [ - 1.00101 5.66661e-5 -0.0349906 0.0994922 - -0.00119442 0.999613 -0.0344838 -0.0498164 - 0.0350353 0.035119 1.00047 -0.0501982 - 0.0 0.0 0.0 1.0 + 0.999392 4.89177e-5 -0.0348611 0.0995209 + -0.00126357 0.999393 -0.0348214 -0.0498342 + 0.0348382 0.0348443 0.998785 -0.0499514 + 0.0 0.0 0.0 1.0 ] +# non-rigid (un-normalized R) +# H_ref = [ +# 1.00101 5.66661e-5 -0.0349906 0.0994922 +# -0.00119442 0.999613 -0.0344838 -0.0498164 +# 0.0350353 0.035119 1.00047 -0.0501982 +# 0.0 0.0 0.0 1.0 +# ] +# with rotation Lie Algebra exp map modification pcmv_ref_10 = [ - 0.635052 0.0413591 -0.430395 - 0.640057 0.0423527 -0.430185 - 0.639056 0.0423539 -0.43022 - 0.639055 0.0403547 -0.43029 - 0.633049 0.0403618 -0.4305 - 0.635051 0.0403594 -0.43043 - 0.637054 0.0413567 -0.430325 - 0.641058 0.0403523 -0.43022 - 0.634051 0.0433595 -0.43036 - 0.631048 0.0423634 -0.4305 + 0.634187 0.0414235 -0.429596 + 0.639184 0.0424166 -0.429387 + 0.638185 0.0424179 -0.429422 + 0.638185 0.0404191 -0.429491 + 0.632188 0.0404267 -0.4297 + 0.634187 0.0404241 -0.429631 + 0.636186 0.041421 -0.429526 + 0.640184 0.0404166 -0.429421 + 0.633188 0.0434236 -0.429561 + 0.63019 0.042428 -0.4297 ] - pcmv_ref_end = [ - -0.441908 0.232918 0.109252 - -0.445912 0.231923 0.109076 - -0.432898 0.2499 0.110164 - -0.433899 0.247902 0.110059 - -0.439904 0.253907 0.110059 - -0.437902 0.256904 0.110235 - -0.435901 0.249904 0.110059 - -0.438903 0.251907 0.110024 - -0.437903 0.237911 0.109567 - -0.447914 0.233925 0.109076 + -0.440997 0.232817 0.109241 + -0.444994 0.231823 0.109067 + -0.432001 0.249795 0.110147 + -0.433001 0.247798 0.110043 + -0.438997 0.253802 0.110043 + -0.436998 0.256797 0.110217 + -0.434999 0.249799 0.110043 + -0.437997 0.251802 0.110008 + -0.436999 0.237809 0.109555 + -0.446993 0.233824 0.109067 ] +# # non-rigid legacy +# pcmv_ref_10 = [ +# 0.635052 0.0413591 -0.430395 +# 0.640057 0.0423527 -0.430185 +# 0.639056 0.0423539 -0.43022 +# 0.639055 0.0403547 -0.43029 +# 0.633049 0.0403618 -0.4305 +# 0.635051 0.0403594 -0.43043 +# 0.637054 0.0413567 -0.430325 +# 0.641058 0.0403523 -0.43022 +# 0.634051 0.0433595 -0.43036 +# 0.631048 0.0423634 -0.4305 +# ] +# pcmv_ref_end = [ +# -0.441908 0.232918 0.109252 +# -0.445912 0.231923 0.109076 +# -0.432898 0.2499 0.110164 +# -0.433899 0.247902 0.110059 +# -0.439904 0.253907 0.110059 +# -0.437902 0.256904 0.110235 +# -0.435901 0.249904 0.110059 +# -0.438903 0.251907 0.110024 +# -0.437903 0.237911 0.109567 +# -0.447914 0.233925 0.109076 +# ] + @test isapprox(H_ref, H; atol = 1e-4) @test isapprox(pcmv_ref_10, pcmovd[1:10,:]; atol = 1e-4) diff --git a/test/runtests.jl b/test/runtests.jl index 21276b0db..a98f6a240 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -13,10 +13,11 @@ TEST_GROUP = get(ENV, "IIF_TEST_GROUP", "all") if TEST_GROUP in ["all", "basic_functional_group"] println("Starting tests...") # highly multipackage tests that don't fit well in specific library dependencies. - include("pcl/testPointCloud2.jl") - include("testPose2AprilTag4Corner.jl") + include("testScatterAlignParched.jl") include("testScatterAlignPose2.jl") include("testStashing_SAP.jl") + include("pcl/testPointCloud2.jl") + include("testPose2AprilTag4Corner.jl") include("multilangzmq/runtests.jl") end diff --git a/test/testScatterAlignParched.jl b/test/testScatterAlignParched.jl new file mode 100644 index 000000000..c89bc3163 --- /dev/null +++ b/test/testScatterAlignParched.jl @@ -0,0 +1,142 @@ + +using Test +using Caesar +# import Caesar._PCL as _PCL +using Downloads +using DelimitedFiles +using TensorCast +using Images +using JSON + +## + +@testset "test stashing in factor packing and unpacking" begin +## + +## ~50mb download, blobs that are too big to always store in variables themselves +# lidar1_url = "https://github.com/JuliaRobotics/CaesarTestData.jl/raw/main/data/lidar/simpleICP/terrestrial_lidar1.xyz" +# lidar2_url = "https://github.com/JuliaRobotics/CaesarTestData.jl/raw/main/data/lidar/simpleICP/terrestrial_lidar2.xyz" +# io1 = PipeBuffer() +# io2 = PipeBuffer() +# Downloads.download(lidar1_url, io1) +# Downloads.download(lidar2_url, io2) + +# X_fix = readdlm(io1) +# X_mov = readdlm(io2) + +X_fix = randn(10000,3) +X_mov = randn(10000,3) + +@cast pts_f_[i][d] := X_fix[i,d] +@cast pts_m_[i][d] := X_mov[i,d] + +pts_f = (s->[s...]).(pts_f_) +pts_m = (s->[s...]).(pts_m_) + +# pcf = _PCL.PointCloud(X_fix) +# pcm = _PCL.PointCloud(X_mov) + +## + +bel_f = manikde!(Position3, pts_f; bw=[0.1;0.1;0.1]) +bel_m = manikde!(Position3, pts_m; bw=[0.1;0.1;0.1]) + +## + +fg = initfg() + +storeDir = "/tmp/caesar/localstore" # joinLogPath(fg,"data") +mkpath(storeDir) +datastore = FolderStore{Vector{UInt8}}(:default_folder_store, storeDir) +addBlobStore!(fg, datastore) + +## + +addVariable!(fg, :x0, Pose3) +addVariable!(fg, :x1, Pose3) + +## +# addFactor!(fg, [:x0], MvNormal(zeros(6))) + +## + +data = packDistribution(bel_f) +deb0 = addData!( + fg, + :default_folder_store, + :x0, + :pointcloud, + Vector{UInt8}(JSON.json( data )), + mimeType="application/json/octet-stream" +) +data = packDistribution(bel_m) +deb1 = addData!( + fg, + :default_folder_store, + :x1, + :pointcloud, + Vector{UInt8}(JSON.json( data )), + mimeType="application/json/octet-stream" +) + +## + +sap = Caesar.ScatterAlignPose3(; + cloud1=bel_f, + cloud2=bel_m, + sample_count=100, + useStashing=true, + dataEntry_cloud1=string(deb0[1].id), + dataEntry_cloud2=string(deb1[1].id), +) + +## this line checks blob store access via preambleCache specifically for ScatterAlign which will internally access the blob store + +f1 = addFactor!(fg, [:x0; :x1], sap; graphinit=false); + +## make sure stuff is working before serialization +meas = sampleFactor(fg, getLabel(f1), 1) + +## serialize with stashing enabled, see docs here: https://juliarobotics.org/Caesar.jl/latest/concepts/stash_and_cache/ + +pf = DFG.packFactor(fg, getFactor(fg, getLabel(f1))) + +jpf = JSON.json(pf) + +# check that the massive point clouds are not stored in the packed factor object +@test length(jpf) < 1100 + +## now confirm the solver deserialization can also work with the factor pulling data from the blob store + +tfg = initfg() + +# use existing logpath from fg +storeDir = "/tmp/caesar/localstore" # joinLogPath(fg,"data") + +datastore = FolderStore{Vector{UInt8}}(:default_folder_store, storeDir) +addBlobStore!(tfg, datastore) +v0 = addVariable!(tfg, :x0, Pose3) +v1 = addVariable!(tfg, :x1, Pose3) + +# To load from stashed blobs attached to variables, those variables need the associated dataEntries +Caesar.addDataEntry!(v0,deb0[1]) +Caesar.addDataEntry!(v1,deb1[1]) + +# test unpacking of factor pulling data from blobstore during preambleCache +pf_ = Caesar.unpackFactor(tfg,pf) + +## all up test via FileDFG + +saveDFG("/tmp/caesar/test_stashed_data", fg) + +fg_ = initfg() + +# attach existing blob store before loading +storeDir = "/tmp/caesar/localstore" # joinLogPath(fg,"data") +datastore = FolderStore{Vector{UInt8}}(:default_folder_store, storeDir) +addBlobStore!(fg_, datastore) + +loadDFG!(fg_, "/tmp/caesar/test_stashed_data") + +## +end \ No newline at end of file diff --git a/test/testStashing_SAP.jl b/test/testStashing_SAP.jl index 78ab9bff7..e0067192f 100644 --- a/test/testStashing_SAP.jl +++ b/test/testStashing_SAP.jl @@ -63,26 +63,36 @@ addBlobStore!(fg, datastore) addVariable!(fg, :x0, Pose2) addVariable!(fg, :x1, Pose2) -## must store the stashed data entry blobs before the preambleCache function runs on addFactor ## sap = ScatterAlignPose2(bIM1, bIM2, (x,y); sample_count=100, bw=1.0, cvt=(im)->im, - useStashing=true, - dataEntry_cloud1=string(:hgd_stash_x0), - dataEntry_cloud2=string(:hgd_stash_x1), - dataStoreHint = string(:test_stashing_store) ) + useStashing=false) + # dataEntry_cloud1=string(:hgd_stash_x0), + # dataEntry_cloud2=string(:hgd_stash_x1), + # dataStoreHint = string(:test_stashing_store) ) # -## +## must store the stashed data entry blobs before the preambleCache function runs on addFactor db = Vector{UInt8}( convert(String, sap.align.cloud1) ) -addData!(fg, :test_stashing_store, :x0, :hgd_stash_x0, db, mimeType="application/json/octet-stream") +de0, = addData!(fg, :test_stashing_store, :x0, :hgd_stash_x0, db, mimeType="application/json/octet-stream") db = Vector{UInt8}( convert(String,sap.align.cloud2) ) -addData!(fg, :test_stashing_store, :x1, :hgd_stash_x1, db, mimeType="application/json/octet-stream") +de1, = addData!(fg, :test_stashing_store, :x1, :hgd_stash_x1, db, mimeType="application/json/octet-stream") + +## + +sap = ScatterAlignPose2(bIM1, bIM2, (x,y); + sample_count=100, bw=1.0, + cvt=(im)->im, + useStashing=false, + dataEntry_cloud1=string(de0.id), + dataEntry_cloud2=string(de1.id), + dataStoreHint = string(:test_stashing_store) ) +# ##