diff --git a/.github/workflows/conda.yml b/.github/workflows/conda.yml index 4ecf94d912..e4b98fe4a8 100644 --- a/.github/workflows/conda.yml +++ b/.github/workflows/conda.yml @@ -49,3 +49,12 @@ jobs: with: name: ${{ matrix.platform }}-conda-package path: ./pdal-feedstock/packages/ + + - name: Deploy to pdal-master Conda channel + if: github.ref == 'refs/heads/master' + shell: bash -l {0} + env: + ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} + run: | + source ../scripts/ci/conda/upload.sh || true + working-directory: ./pdal-feedstock diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 83bf66f4e3..e83cebb806 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -7,20 +7,30 @@ on: [push, pull_request, workflow_dispatch] jobs: containers: runs-on: ubuntu-latest + env: + PUSH_PACKAGES: ${{ github.event_name == 'push' && github.ref == 'refs/heads/master' }} steps: - uses: actions/checkout@v2 + - name: Lint + id: lint + run: | + echo "are we pushing packages" ${{ env.PUSH_PACKAGES }} + echo "event_name" ${{ github.event_name }} + echo "ref" ${{ github.ref }} - name: Setup Docker Buildx id: buildx uses: docker/setup-buildx-action@v1 with: version: latest - - name: Login to GitHub Container Registry + - if: ${{ env.PUSH_PACKAGES == 'true' }} + name: Login to GitHub Container Registry uses: docker/login-action@v1 with: registry: ghcr.io username: ${{ github.repository_owner }} password: ${{ secrets.GHCR_TOKEN }} - - name: Login to Docker Hub + - if: ${{ env.PUSH_PACKAGES == 'true' }} + name: Login to Docker Hub uses: docker/login-action@v1 with: username: ${{ secrets.DOCKER_USERNAME }} @@ -37,14 +47,16 @@ jobs: - name: Build image uses: docker/build-push-action@v2 with: - push: true + push: ${{ env.PUSH_PACKAGES == 'true' }} builder: ${{ steps.buildx.outputs.name }} context: . file: ./scripts/docker/ubuntu/Dockerfile platforms: linux/amd64 tags: | docker.io/pdal/pdal:${{ steps.prep.outputs.VERSION }} + docker.io/pdal/pdal:latest ghcr.io/pdal/pdal:${{ steps.prep.outputs.VERSION }} + ghcr.io/pdal/pdal:latest labels: | org.opencontainers.image.title=${{ github.event.repository.name }} org.opencontainers.image.description=${{ github.event.repository.description }} diff --git a/doc/index.rst b/doc/index.rst index 3ac156e793..eaee7dc1ba 100755 --- a/doc/index.rst +++ b/doc/index.rst @@ -28,10 +28,10 @@ The entire website is available as a single PDF at http://pdal.io/PDAL.pdf News -------------------------------------------------------------------------------- -**03-21-2020** +**09-09-2020** ................................................................................ -PDAL 2.1.0 has been released. You can :ref:`download ` the source +PDAL 2.2.0 has been released. You can :ref:`download ` the source code or follow the :ref:`quickstart ` to get going in a hurry with Conda. diff --git a/doc/stages/readers.i3s.rst b/doc/stages/readers.i3s.rst index 7113092907..553fdcda7b 100755 --- a/doc/stages/readers.i3s.rst +++ b/doc/stages/readers.i3s.rst @@ -3,7 +3,9 @@ readers.i3s =========== -`Indexed 3d Scene Layer (I3S)`_ is a specification created by Esri as a format for their 3D Scene Layer and scene services. The I3S reader handles RESTful webservices in an I3S file structure/format. +`Indexed 3d Scene Layer (I3S)`_ is a specification created by Esri as a format for their +3D Scene Layer and scene services. The I3S reader handles RESTful webservices in an I3S +file structure/format. Example -------------------------------------------------------------------------------- @@ -15,7 +17,25 @@ This example will download the Autzen dataset from the ArcGIS scene server and o { "type": "readers.i3s", "filename": "https://tiles.arcgis.com/tiles/8cv2FuXuWSfF0nbL/arcgis/rest/services/AUTZEN_LiDAR/SceneServer", - "bounds": "([-123.075542,-123.06196],[44.049719,44.06278])" + "obb": { + "center": [ + 636590, + 849216, + 460 + ], + "halfSize": [ + 590, + 281, + 60 + ], + "quaternion": + [ + 0, + 0, + 0, + 1 + ] + } } ] @@ -23,8 +43,7 @@ This example will download the Autzen dataset from the ArcGIS scene server and o pdal translate i3s://https://tiles.arcgis.com/tiles/8cv2FuXuWSfF0nbL/arcgis/rest/services/AUTZEN_LiDAR/SceneServer \ autzen.las \ - --readers.i3s.threads=64 \ - --readers.i3s.bounds="([-123.075542,-123.06196],[44.049719,44.06278])" + --readers.i3s.threads=64 Options -------------------------------------------------------------------------------- @@ -43,10 +62,9 @@ threads Example: ``--readers.i3s.threads=64`` -bounds - The bounds refers to the extents of the resource in X, Y, Z coordinates with the Z dimension being optional. This must be input as a string. - - Example:``readers.i3s.bounds="([xmin,xmax],[ymin,ymax],[zmin,zmax])"`` +obb + An oriented bounding box used to filter the data being retrieved. The obb + is specified as JSON exactly as described by the `I3S specification`_. dimensions Comma-separated list of dimensions that should be read. Specify the @@ -74,3 +92,4 @@ min_density and max_density Example: ``--readers.i3s.min_density=2 --readers.i3s.max_density=2.5`` .. _Indexed 3d Scene Layer (I3S): https://github.com/Esri/i3s-spec/blob/master/format/Indexed%203d%20Scene%20Layer%20Format%20Specification.md +.. _I3S specification: https://github.com/Esri/i3s-spec/blob/master/docs/2.0/obb.cmn.md diff --git a/doc/stages/readers.las.rst b/doc/stages/readers.las.rst index e32c2371fc..f1f4d1febd 100755 --- a/doc/stages/readers.las.rst +++ b/doc/stages/readers.las.rst @@ -103,3 +103,10 @@ compression support for the decompressor being requested. The LazPerf decompressor doesn't support version 1 LAZ files or version 1.4 of LAS. [Default: 'none'] +ignore_vlr + A comma-separated list of "userid/record_id" pairs specifying VLR records that should + not be loaded. + +fix_dims + Make invalid dimension names valid by converting disallowed characters to '_'. Only + applies to names specified in an extra-bytes VLR. [Default: true] diff --git a/doc/stages/readers.slpk.rst b/doc/stages/readers.slpk.rst index 962a96b8ce..c53c191368 100755 --- a/doc/stages/readers.slpk.rst +++ b/doc/stages/readers.slpk.rst @@ -20,15 +20,31 @@ through PDAL's command line interface or through the pipeline. { "type": "readers.slpk", "filename": "PDAL/test/data/i3s/SMALL_AUTZEN_LAS_All.slpk", - "bounds": "([-123.075542,-123.06196],[44.049719,44.06278])" + "obb": { + "center": [ + 636590, + 849216, + 460 + ], + "halfSize": [ + 590, + 281, + 60 + ], + "quaternion": + [ + 0, + 0, + 0, + 1 + ] + } } ] :: - pdal traslate PDAL/test/data/i3s/SMALL_AUTZEN_LAS_All.slpk \ - autzen.las \ - --readers.slpk.bounds="([-123.075542,-123.06196],[44.049719,44.06278])"`` + pdal traslate PDAL/test/data/i3s/SMALL_AUTZEN_LAS_All.slpk autzen.las Options -------------------------------------------------------------------------------- @@ -38,12 +54,9 @@ filename .. include:: reader_opts.rst -bounds - The bounds refers to the extents of the resource in X, Y, Z - coordinates with the Z dimension being optional. This must be input - as a string. - - Example:``readers.slpk.bounds="([xmin,xmax],[ymin,ymax],[zmin,zmax])"`` +obb + An oriented bounding box used to filter the data being retrieved. The obb + is specified as JSON exactly as described by the `I3S specification`_. dimensions Comma-separated list of dimensions that should be read. Specify the @@ -76,3 +89,4 @@ min_density and max_density Example: ``--readers.slpk.min_density=2 --readers.slpk.max_density=2.5`` .. _Scene Layer Packages (SLPK): https://github.com/Esri/i3s-spec/blob/master/format/Indexed%203d%20Scene%20Layer%20Format%20Specification.md#_8_1 +.. _I3S specification: https://github.com/Esri/i3s-spec/blob/master/docs/2.0/obb.cmn.md diff --git a/doc/tutorial/iowa-entwine.rst b/doc/tutorial/iowa-entwine.rst index 8301ec76ac..de28c92f80 100755 --- a/doc/tutorial/iowa-entwine.rst +++ b/doc/tutorial/iowa-entwine.rst @@ -110,6 +110,12 @@ it a URL to the root of the resource in the ``filename`` option, and we also give it a ``bounds`` object to define the window in which we should select data from. +.. note:: + + The full URL to the EPT root file (``ept.json``)) must be given + to the filename parameter for PDAL 2.2+. This was a change in + behavior of the :ref:`readers.ept` driver. + The ``bounds`` object is in the form ``([minx, maxx], [miny, maxy])``. .. warning:: diff --git a/filters/StatsFilter.cpp b/filters/StatsFilter.cpp index bbffe41ea7..070003dc15 100644 --- a/filters/StatsFilter.cpp +++ b/filters/StatsFilter.cpp @@ -114,9 +114,8 @@ void Summary::computeGlobalStats() { auto compute_median = [](std::vector vals) { - std::nth_element(vals.begin(), vals.begin()+vals.size()/2, vals.end()); - - return *(vals.begin()+vals.size()/2); + std::nth_element(vals.begin(), vals.begin() + vals.size() / 2, vals.end()); + return *(vals.begin() + vals.size() / 2); }; // TODO add quantiles @@ -124,11 +123,51 @@ void Summary::computeGlobalStats() std::transform(m_data.begin(), m_data.end(), m_data.begin(), [this](double v) { return std::fabs(v - this->m_median); }); m_mad = compute_median(m_data); +} +// Math comes from https://prod.sandia.gov/techlib-noauth/access-control.cgi/2008/086212.pdf +// (Pebay paper from Sandia labs, 2008) +bool Summary::merge(const Summary& s) +{ + if ((m_name != s.m_name) || (m_enumerate != s.m_enumerate) || (m_advanced != s.m_advanced)) + return false; + + double n1 = m_cnt; + double n2 = s.m_cnt; + double n = n1 + n2; + double nsq = n * n; + double n1n2 = m_cnt * s.m_cnt; + double n1sq = n1 * n1; + double n2sq = n2 * n2; + double ncube = n * n * n; + double deltaMean = s.M1 - M1; + + if (n == 0) + return true; + + double m1 = M1 + s.m_cnt * deltaMean / n; + double m2 = M2 + s.M2 + n1n2 * std::pow(deltaMean, 2) / n; + double m3 = M3 + s.M3 + n1n2 * (n1 - n2) * std::pow(deltaMean, 3) / nsq + + 3 * (n1 * s.M2 - n2 * M2) * deltaMean / n; + double m4 = M4 + s.M4 + + n1n2 * (n1sq - n1n2 + n2sq) * std::pow(deltaMean, 4) / ncube + + 6 * (n1sq * s.M2 + n2sq * M2) * std::pow(deltaMean, 2) / nsq + + 4 * (n1 * s.M3 - n2 * M3) * deltaMean / n; + + M1 = m1; + M2 = m2; + M3 = m3; + M4 = m4; + m_min = (std::min)(m_min, s.m_min); + m_max = (std::max)(m_max, s.m_max); + m_cnt = s.m_cnt + m_cnt; + m_data.insert(m_data.begin(), s.m_data.begin(), s.m_data.end()); + for (auto p : s.m_values) + m_values[p.first] += p.second; + return true; } - } // namespace stats using namespace stats; diff --git a/filters/StatsFilter.hpp b/filters/StatsFilter.hpp index 892b1d526f..c0fdd09254 100644 --- a/filters/StatsFilter.hpp +++ b/filters/StatsFilter.hpp @@ -61,6 +61,9 @@ typedef std::vector DataVector; m_name(name), m_enumerate(enumerate), m_advanced(advanced) { reset(); } + // Merge another summary with this one. 'name', 'enumerate' and 'advanced' must match + // or false is returned and no merge occurs. + bool merge(const Summary& s); double minimum() const { return m_min; } double maximum() const diff --git a/io/GeotiffSupport.cpp b/io/GeotiffSupport.cpp index 5332c8d802..33d5b43641 100644 --- a/io/GeotiffSupport.cpp +++ b/io/GeotiffSupport.cpp @@ -39,6 +39,33 @@ #include #include +namespace pdal +{ + + +// Utility functor with accompanying to print GeoTIFF directory. +struct geotiff_dir_printer +{ + geotiff_dir_printer() {} + + std::string output() const { return m_oss.str(); } + std::string::size_type size() const { return m_oss.str().size(); } + + void operator()(char* data, void* /*aux*/) + { + + if (0 != data) + { + m_oss << data; + } + } + +private: + std::stringstream m_oss; +}; + +} + PDAL_C_START // These functions are available from GDAL, but they @@ -47,13 +74,19 @@ char PDAL_DLL * GTIFGetOGISDefn(GTIF*, GTIFDefn*); int PDAL_DLL GTIFSetFromOGISDefn(GTIF*, const char*); void VSIFree(void *data); +int PDALGeoTIFFPrint(char* data, void* aux) +{ + pdal::geotiff_dir_printer* printer = reinterpret_cast(aux); + (*printer)(data, 0); + return static_cast(printer->size()); +} + PDAL_C_END #include namespace pdal { - namespace { @@ -152,6 +185,12 @@ GeotiffSrs::GeotiffSrs(const std::vector& directoryRec, VSIFree(wkt); } } + + geotiff_dir_printer geotiff_printer; + GTIFPrint(ctx.gtiff, PDALGeoTIFFPrint, &geotiff_printer); + + m_gtiff_print_string = geotiff_printer.output(); + } diff --git a/io/GeotiffSupport.hpp b/io/GeotiffSupport.hpp index 9e9eebb87a..a4b7cb1e73 100644 --- a/io/GeotiffSupport.hpp +++ b/io/GeotiffSupport.hpp @@ -59,9 +59,14 @@ class GeotiffSrs const std::vector& asciiRec, LogPtr log); SpatialReference srs() const { return m_srs; } + + std::string const& gtiffPrintString() + { return m_gtiff_print_string; } + private: SpatialReference m_srs; LogPtr m_log; + std::string m_gtiff_print_string; void validateDirectory(const Entry *ent, size_t numEntries, size_t numDoubles, size_t asciiSize); diff --git a/io/LasHeader.cpp b/io/LasHeader.cpp index 8dbd265613..02da92f0d7 100644 --- a/io/LasHeader.cpp +++ b/io/LasHeader.cpp @@ -291,7 +291,10 @@ void LasHeader::setSrsFromWkt() const char *c = vlr->data() + len - 1; if (*c == 0) len--; - m_srs.set(std::string(vlr->data(), len)); + std::string wkt(vlr->data(), len); + // Strip any excess NULL bytes from the WKT. + wkt.erase(std::find(wkt.begin(), wkt.end(), '\0'), wkt.end()); + m_srs.set(wkt); } @@ -327,6 +330,7 @@ void LasHeader::setSrsFromGeotiff() std::vector asciiRec(data, data + dataLen); GeotiffSrs geotiff(directoryRec, doublesRec, asciiRec, m_log); + m_geotiff_print = geotiff.gtiffPrintString(); SpatialReference gtiffSrs = geotiff.srs(); if (!gtiffSrs.empty()) m_srs = gtiffSrs; diff --git a/io/LasHeader.hpp b/io/LasHeader.hpp index 4920056eac..2581908fce 100644 --- a/io/LasHeader.hpp +++ b/io/LasHeader.hpp @@ -378,6 +378,9 @@ class PDAL_DLL LasHeader SpatialReference srs() const { return m_srs; } + std::string geotiffPrint() + { return m_geotiff_print; } + void setSummary(const LasSummaryData& summary); bool valid() const; Dimension::IdList usedDims() const; @@ -419,6 +422,7 @@ class PDAL_DLL LasHeader std::string m_compressionInfo; LogPtr m_log; SpatialReference m_srs; + std::string m_geotiff_print; VlrList m_vlrs; VlrList m_eVlrs; diff --git a/io/LasReader.cpp b/io/LasReader.cpp index b114a55e26..2d6da560f3 100644 --- a/io/LasReader.cpp +++ b/io/LasReader.cpp @@ -48,7 +48,6 @@ #include #include -#include "GeotiffSupport.hpp" #include "LasHeader.hpp" #include "LasVLR.hpp" @@ -87,6 +86,8 @@ void LasReader::addArgs(ProgramArgs& args) m_useEbVlr); args.add("ignore_vlr", "VLR userid/recordid to ignore", m_ignoreVLROption); args.add("start", "Point at which reading should start (0-indexed).", m_start); + args.add("fix_dims", "Make invalid dimension names valid by changing " + "invalid characters to '_'", m_fixNames, true); } @@ -449,6 +450,9 @@ void LasReader::extractHeaderMetadata(MetadataNode& forward, MetadataNode& m) m_header.pointCount(), "This field contains the total " "number of point records within the file."); + m.add("gtiff", m_header.geotiffPrint(), + "GTifPrint output of GeoTIFF keys"); + // PDAL metadata VLR const LasVLR *vlr = m_header.findVlr("PDAL", 12); if (vlr) @@ -592,6 +596,8 @@ void LasReader::addDimensions(PointLayoutPtr layout) continue; if (dim.m_dimType.m_xform.nonstandard()) type = Dimension::Type::Double; + if (m_fixNames) + dim.m_name = Dimension::fixName(dim.m_name); dim.m_dimType.m_id = layout->registerOrAssignDim(dim.m_name, type); } } diff --git a/io/LasReader.hpp b/io/LasReader.hpp index 7ce0c34984..06ed1b8796 100644 --- a/io/LasReader.hpp +++ b/io/LasReader.hpp @@ -117,6 +117,7 @@ class PDAL_DLL LasReader : public Reader, public Streamable StringList m_ignoreVLROption; bool m_useEbVlr; point_count_t m_start; + bool m_fixNames; virtual void addArgs(ProgramArgs& args); virtual void initialize(PointTableRef table) diff --git a/pdal/SpatialReference.cpp b/pdal/SpatialReference.cpp index df535939b1..8cce68d26e 100644 --- a/pdal/SpatialReference.cpp +++ b/pdal/SpatialReference.cpp @@ -485,7 +485,7 @@ std::string SpatialReference::getWKT1() const if (srs) { char *buf = nullptr; - const char* apszOptions[] = { "FORMAT=WKT1_GDAL", nullptr }; + const char* apszOptions[] = { "FORMAT=WKT1_GDAL", "ALLOW_ELLIPSOIDAL_HEIGHT_AS_VERTICAL_CRS=YES", nullptr }; srs->exportToWkt(&buf, apszOptions); if (buf) diff --git a/scripts/ci/conda/compile.sh b/scripts/ci/conda/compile.sh index 62870baf0e..5fcf956510 100755 --- a/scripts/ci/conda/compile.sh +++ b/scripts/ci/conda/compile.sh @@ -2,20 +2,23 @@ mkdir packages -CI_SUPPORT="" -if [ "$PLATFORM" == "windows-latest" ]; then - CI_SUPPORT="win_64_.yaml" +export CI_PLAT="" +if grep -q "windows" <<< "$PLATFORM"; then + CI_PLAT="win" fi -if [ "$PLATFORM" == "ubuntu-latest" ]; then - CI_SUPPORT="linux_64_.yaml" +if grep -q "ubuntu" <<< "$PLATFORM"; then + CI_PLAT="linux" fi -if [ "$PLATFORM" == "macos-latest" ]; then - CI_SUPPORT="osx_64_.yaml" +if grep -q "macos" <<< "$PLATFORM"; then + CI_PLAT="osx" fi -conda build recipe --clobber-file recipe/recipe_clobber.yaml --output-folder packages -m .ci_support/$CI_SUPPORT -conda install -c ./packages pdal +conda build recipe --clobber-file recipe/recipe_clobber.yaml --output-folder packages -m ".ci_support/${CI_PLAT}_64_.yaml" +conda create -y -n test -c ./packages python=3.8 pdal +conda deactivate +conda activate test pdal --version +conda deactivate diff --git a/scripts/ci/conda/examples.sh b/scripts/ci/conda/examples.sh index 6bd005fb0d..c6b3807bc0 100755 --- a/scripts/ci/conda/examples.sh +++ b/scripts/ci/conda/examples.sh @@ -2,9 +2,11 @@ export BASE=`pwd` +conda activate test +conda install compilers cmake ninja -y + if [ "$PLATFORM" == "windows-latest" ]; then -conda install compilers ninja -y export CC=cl.exe export CXX=cl.exe where cl diff --git a/scripts/ci/conda/setup.sh b/scripts/ci/conda/setup.sh index 83ab1a55f2..a35bb2438d 100755 --- a/scripts/ci/conda/setup.sh +++ b/scripts/ci/conda/setup.sh @@ -1,7 +1,6 @@ #!/bin/bash -conda update -n base -c defaults conda -conda install conda-build ninja compilers -y +conda install -c conda-forge conda-build anaconda-client -y pwd ls git clone https://github.com/conda-forge/pdal-feedstock.git diff --git a/scripts/ci/conda/upload.sh b/scripts/ci/conda/upload.sh new file mode 100755 index 0000000000..d84dd80115 --- /dev/null +++ b/scripts/ci/conda/upload.sh @@ -0,0 +1,40 @@ +#!/bin/bash + + +if [ -z "${ANACONDA_TOKEN+x}" ] +then + echo "Anaconda token is not set, not uploading" + exit 0; +fi + +ls +pwd +find . + +if [ -z "${ANACONDA_TOKEN}" ] +then + echo "Anaconda token is empty, not uploading" + exit 0; +fi + +export CI_PLAT="" +if grep -q "windows" <<< "$PLATFORM"; then + CI_PLAT="win" +fi + +if grep -q "ubuntu" <<< "$PLATFORM"; then + CI_PLAT="linux" +fi + +if grep -q "macos" <<< "$PLATFORM"; then + CI_PLAT="osx" +fi + + + +echo "Anaconda token is available, attempting to upload" + +conda install -c conda-forge anaconda-client -y + +find . -name "*pdal*.bz2" -exec anaconda -t "$ANACONDA_TOKEN" upload --force --no-progress --user pdal-master {} \; + diff --git a/scripts/docker/ubuntu/Dockerfile b/scripts/docker/ubuntu/Dockerfile index 77c654a193..7e49e97fcf 100644 --- a/scripts/docker/ubuntu/Dockerfile +++ b/scripts/docker/ubuntu/Dockerfile @@ -2,8 +2,6 @@ FROM pdal/ubuntubase:latest as builder MAINTAINER Andrew Bell SHELL ["/bin/bash", "-c"] -ENV CC gcc-6 -ENV CXX g++-6 RUN . /opt/conda/etc/profile.d/conda.sh && \ conda activate pdal && \ @@ -43,7 +41,7 @@ RUN . /opt/conda/etc/profile.d/conda.sh && \ cd pdal/build && \ ninja install -RUN apt-get -y remove unzip bzip2 gcc-6 g++-6 ca-certificates && \ +RUN apt-get -y remove unzip bzip2 gcc g++ ca-certificates && \ apt-get -y autoremove RUN . /opt/conda/etc/profile.d/conda.sh && \ diff --git a/test/unit/filters/StatsFilterTest.cpp b/test/unit/filters/StatsFilterTest.cpp index f8a30e47c9..a6e16af6bd 100644 --- a/test/unit/filters/StatsFilterTest.cpp +++ b/test/unit/filters/StatsFilterTest.cpp @@ -34,6 +34,8 @@ #include +#include + #include #include #include @@ -435,3 +437,70 @@ TEST(Stats, global) EXPECT_DOUBLE_EQ(statsZ.maximum(), 1000.0); } + +TEST(Stats, merge) +{ + std::mt19937 gen(314159); + { + std::uniform_real_distribution dis(0, 100000); + using SummaryPtr = std::unique_ptr; + std::array parts; + + for (SummaryPtr& part : parts) + part.reset(new stats::Summary("test", stats::Summary::NoEnum, true)); + stats::Summary whole("test", stats::Summary::NoEnum, true); + + stats::Summary* part = parts[0].get(); + for (size_t i = 0; i < 10000; ++i) + { + stats::Summary& part = *(parts[i / 1000].get()); + + double d = dis(gen); + whole.insert(d); + part.insert(d); + } + + for (size_t i = 1; i < 10; ++i) + parts[0]->merge(*parts[i]); + + stats::Summary& p = *parts[0]; + + EXPECT_DOUBLE_EQ(whole.minimum(), p.minimum()); + EXPECT_DOUBLE_EQ(whole.maximum(), p.maximum()); + EXPECT_FLOAT_EQ((float)whole.average(), (float)p.average()); + EXPECT_FLOAT_EQ((float)whole.populationVariance(), (float)p.populationVariance()); + EXPECT_FLOAT_EQ((float)whole.skewness(), (float)p.skewness()); + EXPECT_FLOAT_EQ((float)whole.kurtosis(), (float)p.kurtosis()); + } + + { + std::uniform_int_distribution dis(0, 100); + using SummaryPtr = std::unique_ptr; + std::array parts; + + for (SummaryPtr& part : parts) + part.reset(new stats::Summary("test", stats::Summary::Enumerate, false)); + stats::Summary whole("test", stats::Summary::Enumerate, false); + + stats::Summary* part = parts[0].get(); + for (size_t i = 0; i < 10000; ++i) + { + stats::Summary& part = *(parts[i / 1000].get()); + + double d = dis(gen); + whole.insert(d); + part.insert(d); + } + + for (size_t i = 1; i < 10; ++i) + parts[0]->merge(*parts[i]); + + stats::Summary& p = *parts[0]; + + stats::Summary::EnumMap wm = whole.values(); + stats::Summary::EnumMap pm = p.values(); + EXPECT_EQ(wm.size(), pm.size()); + for (size_t i = 0; i < 100; ++i) + EXPECT_EQ(wm[(double)i], pm[(double)i]); + } +}