diff --git a/CMakeLists.txt b/CMakeLists.txt index be2f74bcd4..6169750b20 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -114,7 +114,6 @@ include(${PDAL_CMAKE_DIR}/geos.cmake) include(${PDAL_CMAKE_DIR}/geotiff.cmake) # Optional (not really) include(${PDAL_CMAKE_DIR}/lazperf.cmake) # Optional include(${PDAL_CMAKE_DIR}/laszip.cmake) # Optional -include(${PDAL_CMAKE_DIR}/pdaljni.cmake) # Optional include(${PDAL_CMAKE_DIR}/threads.cmake) include(${PDAL_CMAKE_DIR}/zlib.cmake) include(${PDAL_CMAKE_DIR}/lzma.cmake) diff --git a/HOWTORELEASE.txt b/HOWTORELEASE.txt index de114efc2b..8b50666ea3 100644 --- a/HOWTORELEASE.txt +++ b/HOWTORELEASE.txt @@ -137,45 +137,3 @@ Release Process :: http://upload.osgeo.org/cgi-bin/osgeo4w-promote.sh - - -13) Publish JNI Bindings - What you need: - - an account on sonatype (https://issues.sonatype.org/secure/Signup!default.jspa) - - ~/.sbt/1.0/sonatype.sbt file with the following content: - credentials += Credentials("Sonatype Nexus Repository Manager", - "oss.sonatype.org", - "", - "") - - The description of the Sonatype publishment process (everything described below is in a java dir: cd PDAL/java): - - Publishing snaphots: - Snapshot can be published without PGP sign, it is published to a snapshot repo and allows immediate snaphot updates. - To publish everything in a local repo use command: - - ./scripts/publish-local.sh (publishes scala 2.11 version) - - ./scripts/publish-local-212.sh (publishes scala 2.12 version) - To publish everything into sonatype snapshot repo use: - - ./scripts/publish-all.sh - Summary: - - Run ./scripts/publish-all.sh and everything is available in a snaphost repository - - Publishing releases: - To publish everything into sonatype snapshot repo (staging repo) use: - - ./scripts/publish-all.sh --suffix="" --signed - `suffix` defines version suffix (for example `--suffix="-RC1"`) - `signed` means that jar would be uploaded into a staging sonatype repo with a PGP sign - Staging means a special repository in a pre released condition. - - Go into staging repos panel: https://oss.sonatype.org/#stagingRepositories (log in using sonatype user / pwd) - - Filter by package name (pdal in our case) and select staging repo - - Press Close button on the top of the table with repos. It would run packages - validation and will close staging repo in a succesfull case - - After succesfull closing press Release button. It would be immediately published into sonatype releases repo, - and synced with maven central ~ in 10 minutes and ~ in 2 hours it would be indexed here: - http://search.maven.org/#search%7Cga%7C1%7Cio.pdal - Full official guide: http://central.sonatype.org/pages/ossrh-guide.html - Deploying to sonatype using sbt official doc: http://www.scala-sbt.org/release/docs/Using-Sonatype.html - Official sonatype guide with pics of (https://oss.sonatype.org/#stagingRepositories) and answers the question what - to do after jars were published into a staging repo (in our case after ./scripts/publish-all.sh --suffix="" --signed step) - Summary: - - Run ./scripts/publish-all.sh --suffix="" --signed to publish everything into staging repo - - Go to sonatype panel https://oss.sonatype.org/#stagingRepositories and release the jar - - Await ~10 minutes to have jars published to maven central diff --git a/cmake/pdaljni.cmake b/cmake/pdaljni.cmake deleted file mode 100644 index 45dab880b4..0000000000 --- a/cmake/pdaljni.cmake +++ /dev/null @@ -1,7 +0,0 @@ -option(WITH_PDAL_JNI - "Build PDAL JNI Bindings" FALSE) - -if (WITH_PDAL_JNI) - set(PDAL_BUILD TURE) - add_subdirectory(${ROOT_DIR}/java/native/src) -endif() diff --git a/java/.gitignore b/java/.gitignore deleted file mode 100644 index d3aff5f2f3..0000000000 --- a/java/.gitignore +++ /dev/null @@ -1,43 +0,0 @@ -# Operating System Files - -# *.DS_Store -Thumbs.db - -# Build Files - -bin -target -build/ -.gradle - -# Eclipse Project Files - -.classpath -.project -.settings - -# IntelliJ IDEA Files - -*.iml -*.ipr -*.iws -*.idea - -# Spring Bootstrap artifacts - -dependency-reduced-pom.xml -README.html - -# Sublime files - -*.sublime-workspace - -# Test data files # - -java/data - -# Compiled libs # - -java/*.dylib -java/*.so -java/*dll diff --git a/java/README.md b/java/README.md deleted file mode 100644 index a327cc40ba..0000000000 --- a/java/README.md +++ /dev/null @@ -1,122 +0,0 @@ -# PDAL Java bindings - -[![Join the chat at https://gitter.im/PDAL/PDAL](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/PDAL/PDAL?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) - -Java bindings to use PDAL on JVM (supports PDAL >= 1.4). - -## Using PDAL JNI with SBT - -```scala -// pdal is published to maven central, but you can use following repos in addition -resolvers ++= Seq( - Resolver.sonatypeRepo("releases"), - Resolver.sonatypeRepo("snapshots") // for snaphots -) - -libraryDependencies ++= Seq( - "io.pdal" %% "pdal" % "1.6.0" -) -``` - -It's required to have native JNI binary in `java.library.path`: - -```scala -// Mac OS X example with manual JNI installation -// Though it's strongly recommended to use WITH_PDAL_JNI during PDAL build -// cp -f native/target/resource_managed/main/native/x86_64-darwin/libpdaljni.1.4.dylib /usr/local/lib/libpdaljni.1.4.dylib -// place built binary into /usr/local/lib, and pass java.library.path to your JVM -javaOptions += "-Djava.library.path=/usr/local/lib" -``` - -## PDAL-Scala - -Scala API to build pipeline expressions instead of writing a raw JSON. - -```scala -libraryDependencies ++= Seq( - "io.pdal" %% "pdal-scala" % "1.6.0" -) -``` - -Scala API covers PDAL 1.6.0 but is compatible with PDAL >= 1.4, to use any custom DSL -that is not covered by the current Scala API you can use `RawExpr` type to build `Pipeline -Expression`. - -### Code examples - -```scala -// To construct the expected json -val expected = - """ - |{ - | "pipeline" : [ - | { - | "filename" : "/path/to/las", - | "type" : "readers.las" - | }, - | { - | "type" : "filters.crop" - | }, - | { - | "filename" : "/path/to/new/las", - | "type" : "writers.las" - | } - | ] - |} - """.stripMargin - -// The same, but using scala DSL -val pc: PipelineConstructor = LasRead("/path/to/las") ~ CropFilter() ~ LasWrite("/path/to/new/las") - -// The same, but using RawExpr, to support not implemented PDAL Pipeline API features -// RawExpr accepts a circe.Json type, which can be a json object of any desired complexity -val pcWithRawExpr = LasRead("/path/to/las") ~ RawExpr(Map("type" -> "filters.crop").asJson) ~ LasWrite("/path/to/new/las") -``` - -## How to compile - -Development purposes (including binaries): - 1. Install PDAL (using brew / package managers (unix) / build from sources / etc) _without_ `-DWITH_PDAL_JNI=ON` flag - 2. Build native libs `./sbt native/nativeCompile` (optionally, binaries would be built during tests run) - 3. Run `./sbt core/test` to run PDAL tests - -Only Java development purposes: - 1. Provide `$LD_LIBRARY_PATH` or `$DYLD_LIBRARY_PATH` - 2. If you don't want to provide global variable you can pass `-Djava.library.path=` into sbt: - `./sbt -Djava.library.path=` - 3. Set `PDAL_DEPEND_ON_NATIVE=false` (to disable `native` project build) - 4. Run `PDAL_DEPEND_ON_NATIVE=false ./sbt` - -Finally the possible command to launch and build PDAL JNI bindings could be: - -```bash -# Including binaries build -# WARN: PDAL should be built without `-DWITH_PDAL_JNI=ON` flag -./sbt -``` - -```bash -# Java side development without binaries build -# WARN: PDAL should be built with `-DWITH_PDAL_JNI=ON` flag -PDAL_DEPEND_ON_NATIVE=false ./sbt -Djava.library.path= -``` - -### Possible issues and solutions - -1. In case of not installed as global PDAL change [this](./java/native/src/CMakeLists.txt#L25) line to: - - ```cmake - set(CMAKE_CXX_FLAGS "$ENV{PDAL_LD_FLAGS} $ENV{PDAL_CXX_FLAGS} -std=c++11") - ``` - In this case sbt launch would be the following: - - ```bash - PDAL_LD_FLAGS=`pdal-config --libs` PDAL_CXX_FLAGS=`pdal-config --includes` ./sbt - ``` - -2. Sometimes can happen a bad dynamic linking issue (somehow spoiled environment), - the quick workaround would be to replace [this](./java/native/src/CMakeLists.txt#L25) line to: - - ```cmake - set(CMAKE_CXX_FLAGS "-L -std=c++11") - ``` diff --git a/java/build.sbt b/java/build.sbt deleted file mode 100644 index e4bec53c3a..0000000000 --- a/java/build.sbt +++ /dev/null @@ -1,81 +0,0 @@ -name := "pdal-jni" - -lazy val commonSettings = Seq( - version := "1.7.0" + Environment.versionSuffix, - scalaVersion := "2.11.11", - crossScalaVersions := Seq("2.12.4", "2.11.11"), - organization := "io.pdal", - description := "PDAL JNI bindings", - licenses := Seq("BSD" -> url("https://github.com/PDAL/PDAL/blob/master/LICENSE.txt")), - homepage := Some(url("http://www.pdal.io")), - publishMavenStyle := true, - pomIncludeRepository := { _ => false }, - scalacOptions ++= Seq( - "-deprecation", - "-unchecked", - "-language:implicitConversions", - "-language:reflectiveCalls", - "-language:higherKinds", - "-language:postfixOps", - "-language:existentials", - "-feature" - ), - test in assembly := {}, - shellPrompt := { s => Project.extract(s).currentProject.id + " > " }, - commands ++= Seq( - Commands.processJavastyleCommand("publish"), - Commands.processJavastyleCommand("publishSigned") - ), - publishArtifact in Test := false, - publishTo := { - val nexus = "https://oss.sonatype.org/" - if (isSnapshot.value) - Some("snapshots" at nexus + "content/repositories/snapshots") - else - Some("releases" at nexus + "service/local/staging/deploy/maven2") - }, - pomExtra := ( - - git@github.com:PDAL/PDAL.git - scm:git:git@github.com:PDAL/PDAL.git - - - - pomadchin - Grigory Pomadchin - http://github.com/pomadchin/ - - - ) -) - -lazy val root = (project in file(".")) - .settings(commonSettings: _*) - .aggregate(`core-scala`, core, native) - -lazy val `core-scala` = project - .settings(commonSettings: _*) - .settings(name := "pdal-scala") - .settings(target in javah := (sourceDirectory in nativeCompile in native).value / "include") - .settings(libraryDependencies ++= Seq( - Dependencies.circeCore, - Dependencies.circeGeneric, - Dependencies.circeGenericExtras, - Dependencies.circeParser, - Dependencies.jtsCore, - Dependencies.scalaTest % Test - )) - .settings(headerLicense := Some(HeaderLicense.ALv2("2017", "Azavea"))) - .settings(licenses := Seq("Apache-2.0" -> url("https://www.apache.org/licenses/LICENSE-2.0.html"))) - .dependsOn(core) - -lazy val core = project - .settings(commonSettings: _*) - .settings(name := "pdal") - .settings(target in javah := (sourceDirectory in nativeCompile in native).value / "include") - .settings(libraryDependencies += Dependencies.scalaTest % Test) - .dependsOn(Environment.dependOnNative(native % Runtime): _*) - -lazy val native = project - .settings(sourceDirectory in nativeCompile := sourceDirectory.value) - .enablePlugins(JniNative) diff --git a/java/core-scala/src/main/scala/io/pdal/pipeline/ExprType.scala b/java/core-scala/src/main/scala/io/pdal/pipeline/ExprType.scala deleted file mode 100644 index 74d3684fd6..0000000000 --- a/java/core-scala/src/main/scala/io/pdal/pipeline/ExprType.scala +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright 2017 Azavea - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.pdal.pipeline - -import scala.util.Try - -trait ExprType { - val `type`: String - lazy val name = s"${`type`}.${this.getClass.getName.split("\\$").last}" - - override def toString = name -} - -object ExprType { - def fromName(name: String): ExprType = - Try(FilterTypes.fromName(name)) - .getOrElse(Try(ReaderTypes.fromName(name)) - .getOrElse(Try(WriterTypes.fromName(name)) - .getOrElse(throw new Exception(s"ExprType $name is not supported.")))) -} diff --git a/java/core-scala/src/main/scala/io/pdal/pipeline/FilterTypes.scala b/java/core-scala/src/main/scala/io/pdal/pipeline/FilterTypes.scala deleted file mode 100644 index 7c3b401812..0000000000 --- a/java/core-scala/src/main/scala/io/pdal/pipeline/FilterTypes.scala +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright 2017 Azavea - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.pdal.pipeline - -sealed trait FilterType extends ExprType { val `type` = "filters" } - -object FilterTypes { - case object approximatecoplanar extends FilterType - case object chipper extends FilterType - case object cluster extends FilterType - case object colorinterp extends FilterType - case object colorization extends FilterType - case object computerange extends FilterType - case object cpd extends FilterType - case object crop extends FilterType - case object decimation extends FilterType - case object divider extends FilterType - case object eigenvalues extends FilterType - case object estimaterank extends FilterType - case object ferry extends FilterType - case object greedyprojection extends FilterType - case object gridprojection extends FilterType - case object groupby extends FilterType - case object hag extends FilterType - case object head extends FilterType - case object hexbin extends FilterType - case object icp extends FilterType - case object iqr extends FilterType - case object kdistance extends FilterType - case object locate extends FilterType - case object lof extends FilterType - case object mad extends FilterType - case object matlab extends FilterType - case object merge extends FilterType - case object mongus extends FilterType - case object mortonorder extends FilterType - case object movingleastsquares extends FilterType - case object normal extends FilterType - case object overlay extends FilterType - case object outlier extends FilterType - case object pclblock extends FilterType - case object pmf extends FilterType - case object poisson extends FilterType - case object python extends FilterType - case object radialdensity extends FilterType - case object range extends FilterType - case object randomize extends FilterType - case object reprojection extends FilterType - case object sample extends FilterType - case object smrf extends FilterType - case object sort extends FilterType - case object splitter extends FilterType - case object stats extends FilterType - case object tail extends FilterType - case object transformation extends FilterType - case object voxelcenternearestneighbor extends FilterType - case object voxelcentroidnearestneighbor extends FilterType - case object voxelgrid extends FilterType - - lazy val all = List( - approximatecoplanar, chipper, cluster, colorinterp, colorization, computerange, crop, - cpd, decimation, divider, eigenvalues, estimaterank, ferry, greedyprojection, gridprojection, groupby, - hag, head, hexbin, icp, iqr, kdistance, locate, lof, mad, matlab, merge, mongus, mortonorder, movingleastsquares, - normal, outlier, overlay, pclblock, pmf, poisson, python, radialdensity, randomize, range, reprojection, - sample, smrf, sort, splitter, stats, transformation, voxelcenternearestneighbor, voxelcentroidnearestneighbor, - voxelgrid - ) - - def fromName(name: String): FilterType = - all.find(_.name == name).getOrElse(throw new Exception(s"FilterType $name is not supported.")) -} \ No newline at end of file diff --git a/java/core-scala/src/main/scala/io/pdal/pipeline/Implicits.scala b/java/core-scala/src/main/scala/io/pdal/pipeline/Implicits.scala deleted file mode 100644 index f9567eb046..0000000000 --- a/java/core-scala/src/main/scala/io/pdal/pipeline/Implicits.scala +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright 2017 Azavea - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.pdal.pipeline - -import io.pdal.PointCloud -import com.vividsolutions.jts.geom.Coordinate - -object Implicits extends Implicits - -trait Implicits extends Serializable { - implicit class withPointCloudMethods(pointCloud: PointCloud) { - def getCoordinate(i: Int) = - new Coordinate(pointCloud.getX(i), pointCloud.getY(i), pointCloud.getZ(i)) - } -} diff --git a/java/core-scala/src/main/scala/io/pdal/pipeline/PipelineExpressions.scala b/java/core-scala/src/main/scala/io/pdal/pipeline/PipelineExpressions.scala deleted file mode 100644 index 74091c9f9b..0000000000 --- a/java/core-scala/src/main/scala/io/pdal/pipeline/PipelineExpressions.scala +++ /dev/null @@ -1,844 +0,0 @@ -/* - * Copyright 2017 Azavea - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.pdal.pipeline - -import io.pdal.Pipeline - -import io.circe.Json -import io.circe.generic.extras.ConfiguredJsonCodec - -@ConfiguredJsonCodec -sealed trait PipelineExpr { - def ~(other: PipelineExpr): PipelineConstructor = this :: other :: Nil - - def ~(other: Option[PipelineExpr]): PipelineConstructor = - other.fold(this :: Nil)(o => this :: o :: Nil) - - def toPipeline: Pipeline = (this :: Nil).toPipeline -} - -@ConfiguredJsonCodec -case class RawExpr(json: Json) extends PipelineExpr - -@ConfiguredJsonCodec -case class Read( - filename: String, - spatialreference: Option[String] = None, - tag: Option[String] = None, - `type`: Option[ReaderType] = None // usually auto derived by pdal -) extends PipelineExpr - -@ConfiguredJsonCodec -case class FauxRead( - numPoints: Int, - mode: String, // constant | random | ramp | uniform | normal - stdevX: Option[Int] = None, // [default: 1] - stdevY: Option[Int] = None, // [default: 1] - stdevZ: Option[Int] = None, // [default: 1] - meanX: Option[Int] = None, // [default: 0] - meanY: Option[Int] = None, // [default: 0] - meanZ: Option[Int] = None, // [default: 0] - bounds: Option[String] = None, // [default: unit cube] - spatialreference: Option[String] = None, - tag: Option[String] = None, - `type`: ReaderType = ReaderTypes.faux -) extends PipelineExpr - -object GdalRead { - def apply(filename: String, spatialreference: Option[String] = None, tag: Option[String] = None): Read = - Read(filename, spatialreference, tag, Some(ReaderTypes.gdal)) -} - -@ConfiguredJsonCodec -case class GeoWaveRead( - zookeeperUrl: String, - instanceName: String, - username: String, - password: String, - tableNamespace: String, - featureTypeName: Option[String] = None, // [default: PDAL_Point] - dataAdapter: Option[String] = None, // [default: FeatureCollectionDataAdapter] - pointsPerEntry: Option[String] = None, // [default: 5000u] - bounds: Option[String] = None, // [default: unit cube] - spatialreference: Option[String] = None, - tag: Option[String] = None, - `type`: ReaderType = ReaderTypes.geowave -) extends PipelineExpr - -@ConfiguredJsonCodec -case class GreyhoundRead( - url: String, - bounds: Option[String] = None, // [default: the entire resource] - depthBegin: Option[Int] = None, // [default: 0] - depthEnd: Option[Int] = None, // [default: 0] - tilePath: Option[String] = None, - filter: Option[Json] = None, - threads: Option[Int] = None, // [default: 4] - spatialreference: Option[String] = None, - tag: Option[String] = None, - `type`: ReaderType = ReaderTypes.greyhound -) extends PipelineExpr - -@ConfiguredJsonCodec -case class Ilvis2Read( - filename: String, - mapping: Option[String] = None, - metadata: Option[String] = None, - spatialreference: Option[String] = None, - tag: Option[String] = None, - `type`: ReaderType = ReaderTypes.ilvis2 -) extends PipelineExpr - -@ConfiguredJsonCodec -case class MatlabRead( - filename: String, - struct: Option[String] = None, // [default: PDAL] - `type`: ReaderType = ReaderTypes.mbio -) extends PipelineExpr - -@ConfiguredJsonCodec -case class MbioRead( - filename: String, - format: String, - `type`: ReaderType = ReaderTypes.mbio -) extends PipelineExpr - -@ConfiguredJsonCodec -case class LasRead( - filename: String, - extraDims: Option[String] = None, - compression: Option[String] = None, - spatialreference: Option[String] = None, - tag: Option[String] = None, - `type`: ReaderType = ReaderTypes.las -) extends PipelineExpr - -object MrsidRead { - def apply(filename: String, spatialreference: Option[String] = None, tag: Option[String] = None): Read = - Read(filename, spatialreference, tag, Some(ReaderTypes.mrsid)) -} - -object NitfRead { - def apply(filename: String, spatialreference: Option[String] = None, tag: Option[String] = None): Read = - Read(filename, spatialreference, tag, Some(ReaderTypes.nitf)) -} - -@ConfiguredJsonCodec -case class OciRead( - connection: String, - query: String, - xmlSchemaDump: Option[String] = None, - populatePointsourceid: Option[String] = None, - spatialreference: Option[String] = None, - tag: Option[String] = None, - `type`: ReaderType = ReaderTypes.oci -) extends PipelineExpr - -object OptechRead { - def apply(filename: String, spatialreference: Option[String] = None, tag: Option[String] = None): Read = - Read(filename, spatialreference, tag, Some(ReaderTypes.optech)) -} - -object OsgRead { - def apply(filename: String, spatialreference: Option[String] = None, tag: Option[String] = None): Read = - Read(filename, spatialreference, tag, Some(ReaderTypes.osg)) -} - -object PcdRead { - def apply(filename: String, spatialreference: Option[String] = None, tag: Option[String] = None): Read = - Read(filename, spatialreference, tag, Some(ReaderTypes.pcd)) -} - -@ConfiguredJsonCodec -case class PgpointcloudRead( - connection: String, - table: String, - schema: Option[String] = None, // [default: public] - column: Option[String] = None, // [default: pa] - spatialreference: Option[String] = None, - tag: Option[String] = None, - `type`: ReaderType = ReaderTypes.pgpointcloud -) extends PipelineExpr - -object PlyRead { - def apply(filename: String, spatialreference: Option[String] = None, tag: Option[String] = None): Read = - Read(filename, spatialreference, tag, Some(ReaderTypes.ply)) -} - -object PtsRead { - def apply(filename: String, spatialreference: Option[String] = None, tag: Option[String] = None): Read = - Read(filename, spatialreference, tag, Some(ReaderTypes.pts)) -} - -@ConfiguredJsonCodec -case class QfitRead( - filename: String, - flipCoordinates: Option[Boolean] = None, - scaleZ: Option[Double] = None, - spatialreference: Option[String] = None, - tag: Option[String] = None, - `type`: ReaderType = ReaderTypes.qfit -) extends PipelineExpr - -@ConfiguredJsonCodec -case class RxpRead( - filename: String, - rdtp: Option[Boolean] = None, - syncToPps: Option[Boolean] = None, - minimal: Option[Boolean] = None, - reflectanceAsIntensity: Option[Boolean] = None, - minReflectance: Option[Double] = None, - maxReflectance: Option[Double] = None, - spatialreference: Option[String] = None, - tag: Option[String] = None, - `type`: ReaderType = ReaderTypes.rxp -) extends PipelineExpr - -object SbetRead { - def apply(filename: String, spatialreference: Option[String] = None, tag: Option[String] = None): Read = - Read(filename, spatialreference, tag, Some(ReaderTypes.sbet)) -} - -@ConfiguredJsonCodec -case class SqliteRead( - connection: String, - query: String, - spatialreference: Option[String] = None, - tag: Option[String] = None, - `type`: ReaderType = ReaderTypes.sqlite -) extends PipelineExpr - -object TextRead { - def apply(filename: String, spatialreference: Option[String] = None, tag: Option[String] = None): Read = - Read(filename, spatialreference, tag, Some(ReaderTypes.text)) -} - -@ConfiguredJsonCodec -case class TindexRead( - filename: String, - layerName: Option[String] = None, - srsColumn: Option[String] = None, - tindexName: Option[String] = None, - sql: Option[String] = None, - wkt: Option[String] = None, - boundary: Option[String] = None, - tSrs: Option[String] = None, - filterSrs: Option[String] = None, - where: Option[String] = None, - dialect: Option[String] = None, - spatialreference: Option[String] = None, - tag: Option[String] = None, - `type`: ReaderType = ReaderTypes.tindex -) extends PipelineExpr - -object TerrasolidRead { - def apply(filename: String, spatialreference: Option[String] = None, tag: Option[String] = None): Read = - Read(filename, spatialreference, tag, Some(ReaderTypes.terrasolid)) -} - -object IceBridgeRead { - def apply(filename: String, spatialreference: Option[String] = None, tag: Option[String] = None): Read = - Read(filename, spatialreference, tag, Some(ReaderTypes.icebridge)) -} - -@ConfiguredJsonCodec -case class ApproximateCoplanarFilter( - knn: Option[Int] = None, // [default: 8] - thresh1: Option[Int] = None, // [default: 25] - thresh2: Option[Int] = None, // [default: 6] - `type`: FilterType = FilterTypes.approximatecoplanar -) extends PipelineExpr - -@ConfiguredJsonCodec -case class ChipperFilter( - capacity: Option[Int] = None, // [default: 5000] - `type`: FilterType = FilterTypes.chipper -) extends PipelineExpr - -@ConfiguredJsonCodec -case class ClusterFilter( - minPoints: Option[Int] = None, // [default: 1] - maxPoints: Option[Int] = None, // [default: UINT64_MAX] - tolerance: Option[Double] = None, // [default: 1.0] - `type`: FilterType = FilterTypes.cluster -) - -@ConfiguredJsonCodec -case class ColorinterpFilter( - ramp: Option[String] = None, // [default: pestel_shades] - dimension: Option[String] = None, // [default: Z] - minimum: Option[String] = None, - maximum: Option[String] = None, - invert: Option[Boolean] = None, // [default: false] - k: Option[Double] = None, - mad: Option[Boolean] = None, - madMultiplier: Option[Double] = None, - `type`: FilterType = FilterTypes.colorinterp -) extends PipelineExpr - -@ConfiguredJsonCodec -case class ColorizationFilter( - raster: String, - dimensions: Option[String] = None, - `type`: FilterType = FilterTypes.colorization -) extends PipelineExpr - -@ConfiguredJsonCodec -case class ComputerangeFilter( - `type`: FilterType = FilterTypes.computerange -) extends PipelineExpr - -@ConfiguredJsonCodec -case class CpdFilter( - method: Option[String] = None, - `type`: FilterType = FilterTypes.cpd -) extends PipelineExpr - -@ConfiguredJsonCodec -case class CropFilter( - bounds: Option[String] = None, - polygon: Option[String] = None, - outside: Option[String] = None, - point: Option[String] = None, - radius: Option[String] = None, - `type`: FilterType = FilterTypes.crop -) extends PipelineExpr - -@ConfiguredJsonCodec -case class DecimationFilter( - step: Option[Int] = None, - offset: Option[Int] = None, - limit: Option[Int] = None, - `type`: FilterType = FilterTypes.decimation -) extends PipelineExpr - -@ConfiguredJsonCodec -case class DividerFilter( - mode: Option[String] = None, - count: Option[Int] = None, - capacity: Option[Int] = None, - `type`: FilterType = FilterTypes.divider -) extends PipelineExpr - -@ConfiguredJsonCodec -case class EigenValuesFilter( - knn: Option[Int] = None, - `type`: FilterType = FilterTypes.eigenvalues -) extends PipelineExpr - -@ConfiguredJsonCodec -case class EstimateRankFilter( - knn: Option[Int] = None, - thresh: Option[Double] = None, - `type`: FilterType = FilterTypes.estimaterank -) extends PipelineExpr - -@ConfiguredJsonCodec -case class FerryFilter( - dimensions: String, - `type`: FilterType = FilterTypes.ferry -) extends PipelineExpr - -@ConfiguredJsonCodec -case class GreedyProjectionFilter( - `type`: FilterType = FilterTypes.greedyprojection -) extends PipelineExpr - -@ConfiguredJsonCodec -case class GridProjectionFilter( - `type`: FilterType = FilterTypes.gridprojection -) extends PipelineExpr - -@ConfiguredJsonCodec -case class GroupByFilter( - dimension: String, - `type`: FilterType = FilterTypes.groupby -) - -@ConfiguredJsonCodec -case class HagFilter( - `type`: FilterType = FilterTypes.hag -) extends PipelineExpr - -@ConfiguredJsonCodec -case class HeadFilter( - count: Option[Int] = None, // [default: 10] - `type`: FilterType = FilterTypes.head -) extends PipelineExpr - -@ConfiguredJsonCodec -case class HexbinFilter( - edgeSize: Option[Int] = None, - sampleSize: Option[Int] = None, - threshold: Option[Int] = None, - precision: Option[Int] = None, - `type`: FilterType = FilterTypes.hexbin -) extends PipelineExpr - -@ConfiguredJsonCodec -case class IcpFilter( - `type`: FilterType = FilterTypes.icp -) extends PipelineExpr - -@ConfiguredJsonCodec -case class IqrFilter( - dimension: String, - k: Option[Double] = None, - `type`: FilterType = FilterTypes.iqr -) extends PipelineExpr - -@ConfiguredJsonCodec -case class KDistanceFilter( - k: Option[Int] = None, - `type`: FilterType = FilterTypes.kdistance -) extends PipelineExpr - -@ConfiguredJsonCodec -case class LocateFilter( - dimension: String, - minmax: String, - `type`: FilterType = FilterTypes.locate -) extends PipelineExpr - -@ConfiguredJsonCodec -case class LofFilter( - minpts: Option[Int] = None, - `type`: FilterType = FilterTypes.lof -) extends PipelineExpr - -@ConfiguredJsonCodec -case class MadFilter( - dimension: String, - k: Option[Double] = None, - `type`: FilterType = FilterTypes.mad -) extends PipelineExpr - -@ConfiguredJsonCodec -case class MatlabFilter( - script: String, - source: String, - addDimenstion: Option[String] = None, - struct: Option[String] = None, // [default: PDAL] - `type`: FilterType = FilterTypes.matlab -) extends PipelineExpr - -@ConfiguredJsonCodec -case class MergeFilter( - inputs: List[String], - tag: Option[String] = None, - `type`: FilterType = FilterTypes.merge -) extends PipelineExpr - -@ConfiguredJsonCodec -case class MongusFilter( - cell: Option[Double] = None, - classify: Option[Boolean] = None, - extract: Option[Boolean] = None, - k: Option[Double] = None, - l: Option[Int] = None, - `type`: FilterType = FilterTypes.mongus -) extends PipelineExpr - -@ConfiguredJsonCodec -case class MortnOrderFilter( - `type`: FilterType = FilterTypes.mortonorder -) extends PipelineExpr - -@ConfiguredJsonCodec -case class MovingLeastSquaresFilter( - `type`: FilterType = FilterTypes.movingleastsquares -) extends PipelineExpr - -@ConfiguredJsonCodec -case class NormalFilter( - knn: Option[Int] = None, - `type`: FilterType = FilterTypes.normal -) extends PipelineExpr - -@ConfiguredJsonCodec -case class OutlierFilter( - method: Option[String] = None, - minK: Option[Int] = None, - radius: Option[Double] = None, - meanK: Option[Int] = None, - multiplier: Option[Double] = None, - `type`: FilterType = FilterTypes.outlier -) extends PipelineExpr - -@ConfiguredJsonCodec -case class OverlayFilter( - dimension: Option[String] = None, // [default: none] - datasource: Option[String] = None, // [default: none] - column: Option[String] = None, // [default: none] - query: Option[String] = None, // [default: first column] - layer: Option[String] = None, // [default: first layer] - `type`: FilterType = FilterTypes.overlay -) extends PipelineExpr - -@ConfiguredJsonCodec -case class PclBlockFilter( - filename: String, - methods: Option[List[String]] = None, - `type`: FilterType = FilterTypes.pclblock -) extends PipelineExpr - -@ConfiguredJsonCodec -case class PmfFilter( - maxWindowSize: Option[Int] = None, - slope: Option[Double] = None, - maxDistance: Option[Double] = None, - initialDistance: Option[Double] = None, - cellSize: Option[Int] = None, - exponential: Option[Boolean] = None, // [default: true] - `type`: FilterType = FilterTypes.pmf -) extends PipelineExpr - -@ConfiguredJsonCodec -case class PoissonFilter( - depth: Option[Int] = None, - pointWeight: Option[Double] = None, - `type`: FilterType = FilterTypes.poisson -) extends PipelineExpr - -@ConfiguredJsonCodec -case class PythonFilter( - script: String, - module: String, - function: String, - source: String, - addDimenstion: Option[String] = None, - pdalargs: Option[String] = None, - `type`: FilterType = FilterTypes.python -) extends PipelineExpr - -@ConfiguredJsonCodec -case class RadialDensityFilter( - radius: Option[Double] = None, - `type`: FilterType = FilterTypes.radialdensity -) extends PipelineExpr - -@ConfiguredJsonCodec -case class RandomizeFilter( - `type`: FilterType = FilterTypes.randomize -) extends PipelineExpr - -@ConfiguredJsonCodec -case class RangeFilter( - limits: Option[String] = None, - `type`: FilterType = FilterTypes.range -) extends PipelineExpr - -@ConfiguredJsonCodec -case class ReprojectionFilter( - outSrs: String, - inSrs: Option[String] = None, - tag: Option[String] = None, - `type`: FilterType = FilterTypes.reprojection -) extends PipelineExpr - -@ConfiguredJsonCodec -case class SampleFilter( - radius: Option[Double] = None, - `type`: FilterType = FilterTypes.sample -) extends PipelineExpr - -@ConfiguredJsonCodec -case class SmrfFilter( - cell: Option[Double] = None, - classify: Option[Boolean] = None, - cut: Option[Double] = None, - extract: Option[Boolean] = None, - slope: Option[Double] = None, - threshold: Option[Double] = None, - window: Option[Double] = None, - `type`: FilterType = FilterTypes.smrf -) extends PipelineExpr - -@ConfiguredJsonCodec -case class SortFilter( - dimension: String, - `type`: FilterType = FilterTypes.sort -) extends PipelineExpr - -@ConfiguredJsonCodec -case class SplitterFilter( - length: Option[Int] = None, - originX: Option[Double] = None, - originY: Option[Double] = None, - `type`: FilterType = FilterTypes.splitter -) extends PipelineExpr - -@ConfiguredJsonCodec -case class StatsFilter( - dimenstions: Option[String] = None, - enumerate: Option[String] = None, - count: Option[Int] = None, - `type`: FilterType = FilterTypes.stats -) extends PipelineExpr - -@ConfiguredJsonCodec -case class TailFilter( - count: Option[Int] = None, // [default: 10] - `type`: FilterType = FilterTypes.tail -) extends PipelineExpr - -@ConfiguredJsonCodec -case class TransformationFilter( - matrix: String, - `type`: FilterType = FilterTypes.transformation -) extends PipelineExpr - -@ConfiguredJsonCodec -case class VoxelCenterNearestNeighborFilter( - cell: Option[Double] = None, // [default: 1.0] - `type`: FilterType = FilterTypes.voxelcenternearestneighbor -) extends PipelineExpr - -@ConfiguredJsonCodec -case class VoxelCentroidNearestNeighbor( - cell: Option[Double] = None, // [default: 1.0] - `type`: FilterType = FilterTypes.voxelcentroidnearestneighbor -) extends PipelineExpr - -@ConfiguredJsonCodec -case class VoxelGridFilter( - leafX: Option[Double] = None, - leafY: Option[Double] = None, - leafZ: Option[Double] = None, - `type`: FilterType = FilterTypes.voxelgrid -) extends PipelineExpr - -@ConfiguredJsonCodec -case class Write( - filename: String, - `type`: Option[WriterType] = None // usually auto derived by pdal -) extends PipelineExpr - -@ConfiguredJsonCodec -case class BpfWrite( - filename: String, - compression: Option[Boolean] = None, - format: Option[String] = None, - bundledfile: Option[String] = None, - headerData: Option[String] = None, - coordId: Option[Int] = None, - scaleX: Option[Double] = None, - scaleY: Option[Double] = None, - scaleZ: Option[Double] = None, - offsetX: Option[String] = None, - offsetY: Option[String] = None, - offsetZ: Option[String] = None, - outputDims: Option[String] = None, - `type`: WriterType = WriterTypes.bpf -) extends PipelineExpr - -@ConfiguredJsonCodec -case class GdalWrite( - filename: String, - resoultion: Int, - radius: Double, - gdaldriver: Option[String] = None, - gdalopts: Option[String] = None, - outputType: Option[String] = None, - windowSize: Option[Int] = None, - dimension: Option[String] = None, - `type`: WriterType = WriterTypes.gdal -) extends PipelineExpr - -@ConfiguredJsonCodec -case class GeoWaveWrite( - zookeeperUrl: String, - instanceName: String, - username: String, - password: String, - tableNamespace: String, - featureTypeName: Option[String] = None, - dataAdapter: Option[String] = None, - pointsPerEntry: Option[String] = None, // [default: 5000u] - `type`: WriterType = WriterTypes.geowave -) extends PipelineExpr - -@ConfiguredJsonCodec -case class LasWrite( - filename: String, - forward: Option[String] = None, - minorVersion: Option[Int] = None, - softwareId: Option[String] = None, - creationDoy: Option[Int] = None, - creationYear: Option[Int] = None, - dataformatId: Option[Int] = None, - systemId: Option[String] = None, - aSrs: Option[String] = None, - globalEncoding: Option[String] = None, - projectId: Option[String] = None, - compression: Option[String] = None, - scaleX: Option[Double] = None, - scaleY: Option[Double] = None, - scaleZ: Option[Double] = None, - offsetX: Option[String] = None, - offsetY: Option[String] = None, - offsetZ: Option[String] = None, - filesourceId: Option[Int] = None, - discardHighReturnNumbers: Option[Boolean] = None, - `type`: WriterType = WriterTypes.las -) extends PipelineExpr - -@ConfiguredJsonCodec -case class MatlabWrite( - filename: String, - outputDims: Option[String] = None, - `type`: WriterType = WriterTypes.matlab -) extends PipelineExpr - -@ConfiguredJsonCodec -case class NitfWrite( - filename: String, - clevel: Option[String] = None, - stype: Option[String] = None, - ostaid: Option[String] = None, - ftitle: Option[String] = None, - fscalas: Option[String] = None, - oname: Option[String] = None, - ophone: Option[String] = None, - fsctlh: Option[String] = None, - fsclsy: Option[String] = None, - idatim: Option[String] = None, - iid2: Option[String] = None, - fscltx: Option[String] = None, - aimidb: Option[String] = None, - acftb: Option[String] = None, - `type`: WriterType = WriterTypes.nitf -) extends PipelineExpr - -@ConfiguredJsonCodec -case class NullWrite( - `type`: WriterType = WriterTypes.`null` -) extends PipelineExpr - -@ConfiguredJsonCodec -case class OciWrite( - connection: String, - is3d: Option[Boolean] = None, - solid: Option[Boolean] = None, - overwrite: Option[Boolean] = None, - verbose: Option[Boolean] = None, - srid: Option[Int] = None, - capacity: Option[Int] = None, - streamOutputPrecision: Option[Int] = None, - cloudId: Option[Int] = None, - blockTableName: Option[String] = None, - blockTablePartitionValue: Option[Int] = None, - baseTableName: Option[String] = None, - cloudColumnName: Option[String] = None, - baseTableAuxColumns: Option[String] = None, - baseTableAuxValues: Option[String] = None, - baseTableBoundaryColumn: Option[String] = None, - baseTableBoundaryWkt: Option[String] = None, - preBlockSql: Option[String] = None, - preSql: Option[String] = None, - postBlockSql: Option[String] = None, - baseTableBounds: Option[String] = None, - pcId: Option[Int] = None, - packIgnoredFields: Option[Boolean] = None, - streamChunks: Option[Boolean] = None, - blobChunkCount: Option[Int] = None, - scaleX: Option[Double] = None, - scaleY: Option[Double] = None, - scaleZ: Option[Double] = None, - offsetX: Option[Double] = None, - offsetY: Option[Double] = None, - offsetZ: Option[Double] = None, - outputDims: Option[String] = None, - `type`: WriterType = WriterTypes.oci -) extends PipelineExpr - -@ConfiguredJsonCodec -case class PcdWrite( - filename: String, - compression: Option[Boolean] = None, - `type`: WriterType = WriterTypes.pcd -) extends PipelineExpr - -@ConfiguredJsonCodec -case class PgpointcloudWrite( - connection: String, - table: String, - schema: Option[String] = None, - column: Option[String] = None, - compression: Option[String] = None, - overwrite: Option[Boolean] = None, - srid: Option[Int] = None, - pcid: Option[Int] = None, - preSql: Option[String] = None, - postSql: Option[String] = None, - scaleX: Option[Double] = None, - scaleY: Option[Double] = None, - scaleZ: Option[Double] = None, - offsetX: Option[Double] = None, - offsetY: Option[Double] = None, - offsetZ: Option[Double] = None, - outputDims: Option[String] = None, - `type`: WriterType = WriterTypes.pgpointcloud -) extends PipelineExpr - -@ConfiguredJsonCodec -case class PlyWrite( - filename: String, - storageMode: Option[String] = None, - `type`: WriterType = WriterTypes.ply -) extends PipelineExpr - -@ConfiguredJsonCodec -case class RialtoWrite( - filename: String, - maxLevels: Option[Int] = None, - overwrite: Option[Boolean] = None, - `type`: WriterType = WriterTypes.rialto -) extends PipelineExpr - -@ConfiguredJsonCodec -case class SqliteWrite( - filename: String, - cloudTableName: String, - blockTableName: String, - cloudColumnName: Option[String] = None, - compression: Option[String] = None, - overwrite: Option[Boolean] = None, - preSql: Option[String] = None, - postSql: Option[String] = None, - scaleX: Option[Double] = None, - scaleY: Option[Double] = None, - scaleZ: Option[Double] = None, - offsetX: Option[Double] = None, - offsetY: Option[Double] = None, - offsetZ: Option[Double] = None, - outputDims: Option[String] = None, - `type`: WriterType = WriterTypes.sqlite -) extends PipelineExpr - -@ConfiguredJsonCodec -case class TextWrite( - filename: String, - format: Option[String] = None, - order: Option[String] = None, - keepUnspecified: Option[Boolean] = None, - jscallback: Option[String] = None, - quoteHeader: Option[String] = None, - newline: Option[String] = None, - delimiter: Option[String] = None, - `type`: WriterType = WriterTypes.text -) extends PipelineExpr diff --git a/java/core-scala/src/main/scala/io/pdal/pipeline/ReaderTypes.scala b/java/core-scala/src/main/scala/io/pdal/pipeline/ReaderTypes.scala deleted file mode 100644 index 561eeed3eb..0000000000 --- a/java/core-scala/src/main/scala/io/pdal/pipeline/ReaderTypes.scala +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright 2017 Azavea - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.pdal.pipeline - -sealed trait ReaderType extends ExprType { val `type` = "readers" } - -object ReaderTypes { - case object bpf extends ReaderType - case object buffer extends ReaderType - case object faux extends ReaderType - case object gdal extends ReaderType - case object geowave extends ReaderType - case object greyhound extends ReaderType - case object ilvis2 extends ReaderType - case object las extends ReaderType - case object matlab extends ReaderType - case object mbio extends ReaderType - case object mrsid extends ReaderType - case object nitf extends ReaderType - case object oci extends ReaderType - case object optech extends ReaderType - case object osg extends ReaderType - case object pcd extends ReaderType - case object pgpointcloud extends ReaderType - case object ply extends ReaderType - case object pts extends ReaderType - case object qfit extends ReaderType - case object rxp extends ReaderType - case object sbet extends ReaderType - case object sqlite extends ReaderType - case object text extends ReaderType - case object tindex extends ReaderType - case object terrasolid extends ReaderType - case object icebridge extends ReaderType - - lazy val all = List( - bpf, buffer, faux, gdal, geowave, greyhound, ilvis2, las, matlab, mbio, mrsid, nitf, - oci, optech, osg, pcd, pgpointcloud, ply, pts, qfit, rxp, sbet, sqlite, text, - tindex, terrasolid, icebridge - ) - - def fromName(name: String): ReaderType = - all.find(_.name == name).getOrElse(throw new Exception(s"ReaderType $name is not supported.")) -} \ No newline at end of file diff --git a/java/core-scala/src/main/scala/io/pdal/pipeline/WriterTypes.scala b/java/core-scala/src/main/scala/io/pdal/pipeline/WriterTypes.scala deleted file mode 100644 index 25805b445b..0000000000 --- a/java/core-scala/src/main/scala/io/pdal/pipeline/WriterTypes.scala +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2017 Azavea - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.pdal.pipeline - -sealed trait WriterType extends ExprType { val `type` = "writers" } - -object WriterTypes { - case object bpf extends WriterType - case object gdal extends WriterType - case object geowave extends WriterType - case object las extends WriterType - case object matlab extends WriterType - case object nitf extends WriterType - case object `null` extends WriterType - case object oci extends WriterType - case object optech extends WriterType - case object pcd extends WriterType - case object pgpointcloud extends WriterType - case object pclvisualizer extends WriterType - case object ply extends WriterType - case object rialto extends WriterType - case object sbet extends WriterType - case object sqlite extends WriterType - case object text extends WriterType - - lazy val all = List( - bpf, gdal, geowave, las, matlab, nitf, oci, optech, - pcd, pgpointcloud, pclvisualizer, ply, rialto, sbet, sqlite, text - ) - - def fromName(name: String): WriterType = - all.find(_.name == name).getOrElse(throw new Exception(s"WriterType $name is not supported.")) -} \ No newline at end of file diff --git a/java/core-scala/src/main/scala/io/pdal/pipeline/json/Implicits.scala b/java/core-scala/src/main/scala/io/pdal/pipeline/json/Implicits.scala deleted file mode 100644 index 32d4bde0d5..0000000000 --- a/java/core-scala/src/main/scala/io/pdal/pipeline/json/Implicits.scala +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2017 Azavea - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.pdal.pipeline.json - -import io.pdal.pipeline._ - -import io.circe.{Decoder, Encoder, Json, Printer} -import io.circe.generic.extras.Configuration -import io.circe.syntax._ -import cats.syntax.either._ - -object Implicits extends Implicits - -trait Implicits extends Serializable { - implicit val customConfig: Configuration = - Configuration.default.withSnakeCaseKeys.withDiscriminator("class_type") - - val pipelinePrettyPrinter: Printer = Printer.spaces2.copy(dropNullKeys = true) - - implicit def exprTypeEncoder[T <: ExprType]: Encoder[T] = Encoder.instance { _.toString.asJson } - implicit def exprTypeDecoder[T <: ExprType]: Decoder[T] = Decoder.decodeString.emap { str => - Either.catchNonFatal(ExprType.fromName(str).asInstanceOf[T]).leftMap(_ => "ExprType") - } - - implicit val pipelineConstructorEncoder: Encoder[PipelineConstructor] = Encoder.instance { constructor => - Json.obj( - "pipeline" -> constructor - .flatMap { - _.flatMap { - case RawExpr(json) => json.asObject - case expr => expr.asJson.asObject - }.map { - _.remove("class_type") // remove type - .filter { case (_, value) => !value.isNull } // cleanup options - } - }.asJson - ) - } - implicit val pipelineConstructorDecoder: Decoder[PipelineConstructor] = Decoder.instance { - _.downField("pipeline").as[PipelineConstructor] - } - - implicit val rawExprEncoder: Encoder[RawExpr] = Encoder.instance { _.json } - implicit val rawExprDecoder: Decoder[RawExpr] = Decoder.decodeJson.emap { json => - Either.catchNonFatal(RawExpr(json)).leftMap(_ => "RawExpr") - } -} diff --git a/java/core-scala/src/main/scala/io/pdal/pipeline/json/package.scala b/java/core-scala/src/main/scala/io/pdal/pipeline/json/package.scala deleted file mode 100644 index d0404ac89b..0000000000 --- a/java/core-scala/src/main/scala/io/pdal/pipeline/json/package.scala +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright 2017 Azavea - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.pdal.pipeline - -package object json extends json.Implicits diff --git a/java/core-scala/src/main/scala/io/pdal/pipeline/package.scala b/java/core-scala/src/main/scala/io/pdal/pipeline/package.scala deleted file mode 100644 index 0242c3e063..0000000000 --- a/java/core-scala/src/main/scala/io/pdal/pipeline/package.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2017 Azavea - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.pdal - -import io.circe.Json -import io.circe.syntax._ - -/** - * There is no implicit PipelineExprToString function to avoid - * implicit casts in places where PipelineConstructor should be used. - */ - -package object pipeline extends json.Implicits with Implicits with Serializable { - type PipelineConstructor = List[PipelineExpr] - - implicit class withPipelineConstructor(list: PipelineConstructor) { - def ~(e: PipelineExpr): PipelineConstructor = list :+ e - def ~(e: Option[PipelineExpr]): PipelineConstructor = e.fold(list)(el => list :+ el) - def map[B](f: PipelineExpr => B): List[B] = list.map(f) - def toPipeline = Pipeline(list.asJson.noSpaces) - } - - implicit def pipelineExprToConstructor[T <: PipelineExpr](expr: T): PipelineConstructor = expr :: Nil - implicit def pipelineExprToJson(expr: PipelineExpr): Json = expr.asJson - implicit def pipelineConstructorToJson(expr: PipelineConstructor): Json = expr.asJson - implicit def pipelineConstructorToString(expr: PipelineConstructor): String = expr.asJson.noSpaces -} diff --git a/java/core-scala/src/test/scala/io/pdal/pipeline/PipelineExpressionsSpec.scala b/java/core-scala/src/test/scala/io/pdal/pipeline/PipelineExpressionsSpec.scala deleted file mode 100644 index a1b364ba5c..0000000000 --- a/java/core-scala/src/test/scala/io/pdal/pipeline/PipelineExpressionsSpec.scala +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright 2017 Azavea - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.pdal.pipeline - -import io.circe._ -import io.circe.syntax._ -import io.circe.parser._ - -import org.scalatest._ - -class PipelineExpressionsSpec extends FunSpec with Matchers with BeforeAndAfterAll { - describe("Pipeline Expressions spec") { - it("should print a correct json, using DSL") { - val expected = - """ - |{ - | "pipeline" : [ - | { - | "filename" : "/path/to/las", - | "type" : "readers.las" - | }, - | { - | "type" : "filters.crop" - | }, - | { - | "filename" : "/path/to/new/las", - | "type" : "writers.las" - | } - | ] - |} - """.stripMargin - - - val pc: PipelineConstructor = LasRead("/path/to/las") ~ CropFilter() ~ LasWrite("/path/to/new/las") - val pipelineJson: Json = LasRead("/path/to/las") ~ CropFilter() ~ LasWrite("/path/to/new/las") - - parse(expected) match { - case Right(r) => pipelineJson shouldBe r - case Left(e) => throw e - } - } - - it("should print a correct json, using RAW JSON") { - val expected = - """ - |{ - | "pipeline" : [ - | { - | "filename" : "/path/to/las", - | "type" : "readers.las" - | }, - | { - | "type" : "filters.crop" - | }, - | { - | "filename" : "/path/to/new/las", - | "type" : "writers.las" - | } - | ] - |} - """.stripMargin - - val pipelineJson: Json = LasRead("/path/to/las") ~ RawExpr(Map("type" -> "filters.crop").asJson) ~ LasWrite("/path/to/new/las") - - parse(expected) match { - case Right(r) => pipelineJson shouldBe r - case Left(e) => throw e - } - } - } -} diff --git a/java/core/src/main/scala/io/pdal/DimType.scala b/java/core/src/main/scala/io/pdal/DimType.scala deleted file mode 100644 index a0b40b2c0d..0000000000 --- a/java/core/src/main/scala/io/pdal/DimType.scala +++ /dev/null @@ -1,49 +0,0 @@ -/****************************************************************************** - * Copyright (c) 2016, hobu Inc. (info@hobu.co) - * - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following - * conditions are met: - * - * * Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in - * the documentation and/or other materials provided - * with the distribution. - * * Neither the name of Hobu, Inc. nor the names of its - * contributors may be used to endorse or promote products derived - * from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, - * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS - * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED - * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT - * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY - * OF SUCH DAMAGE. - ****************************************************************************/ - -package io.pdal - -case class DimType(id: String, `type`: String, scale: Double = 1, offset: Double = 0) - -object DimType { - object Id { - val Unknown = "Unknown" - val X = "X" - val Y = "Y" - val Z = "Z" - } - - def X = DimType(Id.X, "double") - def Y = DimType(Id.Y, "double") - def Z = DimType(Id.Z, "double") -} diff --git a/java/core/src/main/scala/io/pdal/Native.scala b/java/core/src/main/scala/io/pdal/Native.scala deleted file mode 100644 index 949b0dd533..0000000000 --- a/java/core/src/main/scala/io/pdal/Native.scala +++ /dev/null @@ -1,40 +0,0 @@ -/****************************************************************************** - * Copyright (c) 2016, hobu Inc. (info@hobu.co) - * - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following - * conditions are met: - * - * * Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in - * the documentation and/or other materials provided - * with the distribution. - * * Neither the name of Hobu, Inc. nor the names of its - * contributors may be used to endorse or promote products derived - * from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, - * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS - * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED - * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT - * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY - * OF SUCH DAMAGE. - ****************************************************************************/ - -package io.pdal - -trait Native { - protected var nativeHandle = 0l // C++ pointer - def ptr(): Long = nativeHandle - def dispose(): Unit -} diff --git a/java/core/src/main/scala/io/pdal/Pipeline.scala b/java/core/src/main/scala/io/pdal/Pipeline.scala deleted file mode 100644 index c6882c0530..0000000000 --- a/java/core/src/main/scala/io/pdal/Pipeline.scala +++ /dev/null @@ -1,56 +0,0 @@ -/****************************************************************************** - * Copyright (c) 2016, hobu Inc. (info@hobu.co) - * - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following - * conditions are met: - * - * * Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in - * the documentation and/or other materials provided - * with the distribution. - * * Neither the name of Hobu, Inc. nor the names of its - * contributors may be used to endorse or promote products derived - * from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, - * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS - * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED - * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT - * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY - * OF SUCH DAMAGE. - ****************************************************************************/ - -package io.pdal - -import ch.jodersky.jni.nativeLoader - -class Pipeline(val json: String) extends Native { - Pipeline // reference the object so that the nativeLoader will load the JNI native libraries - - @native def initialise(): Unit - @native def execute(): Unit - @native def getPointViews(): PointViewIterator - @native def dispose(): Unit - @native def getMetadata(): String - @native def getSchema(): String - @native def validate(): Boolean - @native def setLogLevel(i: Int): Unit - @native def getLogLevel(): Int - @native def getLog(): String -} - -@nativeLoader("pdaljni.1.4") -object Pipeline { - def apply(json: String): Pipeline = { val p = new Pipeline(json); p.initialise(); p } -} diff --git a/java/core/src/main/scala/io/pdal/PointCloud.scala b/java/core/src/main/scala/io/pdal/PointCloud.scala deleted file mode 100644 index c743fcca13..0000000000 --- a/java/core/src/main/scala/io/pdal/PointCloud.scala +++ /dev/null @@ -1,134 +0,0 @@ -/****************************************************************************** - * Copyright (c) 2016, hobu Inc. (info@hobu.co) - * - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following - * conditions are met: - * - * * Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in - * the documentation and/or other materials provided - * with the distribution. - * * Neither the name of Hobu, Inc. nor the names of its - * contributors may be used to endorse or promote products derived - * from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, - * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS - * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED - * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT - * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY - * OF SUCH DAMAGE. - ****************************************************************************/ - -package io.pdal - -import java.nio.{ByteBuffer, ByteOrder} -import java.util - -import scala.collection.JavaConversions._ - -/** - * PointCloud abstraction to work with packed point(s) in JVM memory. - * SizedDimType contains size and offset for a particular packed point with the current set of dims. - **/ -case class PointCloud(bytes: Array[Byte], dimTypes: util.Map[String, SizedDimType]) { - val pointSize: Int = dimTypes.values.map(_.size).sum.toInt - val length: Int = bytes.length / pointSize - val isPoint: Boolean = length == pointSize - - def dimSize(dim: SizedDimType) = dimTypes(dim.dimType.id).size - def dimSize(dim: DimType) = dimTypes(dim.id).size - def dimSize(dim: String) = dimTypes(dim).size - def findDimType(dim: String) = dimTypes(dim).dimType - def findSizedDimType(dim: String) = dimTypes(dim) - - /** - * Reads a packed point by point id from a set of packed points. - */ - def get(i: Int): Array[Byte] = { - if (isPoint) bytes - else { - val from = i * pointSize - val result = new Array[Byte](pointSize) - var j = 0 - while(j < pointSize) { - result(j) = bytes(from + j) - j += 1 - } - result - } - } - - def getDouble(idx: Int, dim: SizedDimType): Double = getDouble(idx, dim.dimType) - def getDouble(idx: Int, dim: DimType): Double = getDouble(idx, dim.id) - def getDouble(idx: Int, dim: String): Double = get(idx, dim).getDouble - - def getFloat(idx: Int, dim: SizedDimType): Float = getFloat(idx, dim.dimType.id) - def getFloat(idx: Int, dim: DimType): Float = getFloat(idx, dim.id) - def getFloat(idx: Int, dim: String): Float = get(idx, dim).getFloat - - def getLong(idx: Int, dim: SizedDimType): Long = getLong(idx, dim.dimType.id) - def getLong(idx: Int, dim: DimType): Long = getLong(idx, dim.id) - def getLong(idx: Int, dim: String): Long = get(idx, dim).getLong - - def getInt(idx: Int, dim: SizedDimType): Int = getInt(idx, dim.dimType.id) - def getInt(idx: Int, dim: DimType): Int = getInt(idx, dim.id) - def getInt(idx: Int, dim: String): Int = get(idx, dim).getInt - - def getShort(idx: Int, dim: SizedDimType): Short = getShort(idx, dim.dimType.id) - def getShort(idx: Int, dim: DimType): Short = getShort(idx, dim.id) - def getShort(idx: Int, dim: String): Short = get(idx, dim).getShort - - def getChar(idx: Int, dim: SizedDimType): Char = getChar(idx, dim.dimType.id) - def getChar(idx: Int, dim: DimType): Char = getChar(idx, dim.id) - def getChar(idx: Int, dim: String): Char = get(idx, dim).getChar - - def getByte(idx: Int, dim: SizedDimType): Byte = getByte(idx, dim.dimType.id) - def getByte(idx: Int, dim: DimType): Byte = getByte(idx, dim.id) - def getByte(idx: Int, dim: String): Byte = get(idx, dim).get() - - def get(idx: Int, dim: SizedDimType): ByteBuffer = get(idx, dim.dimType.id) - def get(idx: Int, dim: DimType): ByteBuffer = get(idx, dim.id) - def get(idx: Int, dim: String): ByteBuffer = ByteBuffer.wrap(get(get(idx), dim)).order(ByteOrder.nativeOrder()) - - def get(idx: Int, dims: Array[SizedDimType]): ByteBuffer = get(idx, dims.map(_.dimType.id)) - def get(idx: Int, dims: Array[DimType]): ByteBuffer = get(idx, dims.map(_.id)) - def get(idx: Int, dims: Array[String]): ByteBuffer = ByteBuffer.wrap(get(get(idx), dims)).order(ByteOrder.nativeOrder()) - - def getX(idx: Int): Double = getDouble(idx, DimType.Id.X) - def getY(idx: Int): Double = getDouble(idx, DimType.Id.Y) - def getZ(idx: Int): Double = getDouble(idx, DimType.Id.Z) - - /** - * Reads dim from a packed point. - */ - def get(packedPoint: Array[Byte], dim: String): Array[Byte] = { - val sdt = dimTypes(dim) - val from = sdt.offset.toInt - val dimSize = sdt.size.toInt - val result = new Array[Byte](dimSize) - var j = 0 - while(j < dimSize) { - result(j) = packedPoint(from + j) - j += 1 - } - result - } - - /** - * Reads dims from a packed point. - */ - private def get(packedPoint: Array[Byte], dims: Array[String]): Array[Byte] = - dims.map(get(bytes, _)).fold(Array[Byte]())(_ ++ _) -} diff --git a/java/core/src/main/scala/io/pdal/PointLayout.scala b/java/core/src/main/scala/io/pdal/PointLayout.scala deleted file mode 100644 index 1585dece02..0000000000 --- a/java/core/src/main/scala/io/pdal/PointLayout.scala +++ /dev/null @@ -1,67 +0,0 @@ -/****************************************************************************** - * Copyright (c) 2016, hobu Inc. (info@hobu.co) - * - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following - * conditions are met: - * - * * Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in - * the documentation and/or other materials provided - * with the distribution. - * * Neither the name of Hobu, Inc. nor the names of its - * contributors may be used to endorse or promote products derived - * from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, - * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS - * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED - * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT - * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY - * OF SUCH DAMAGE. - ****************************************************************************/ - -package io.pdal - -import java.util -import scala.collection.JavaConversions._ - -class PointLayout extends Native { - def dimSize(dimType: DimType): Long = dimSize(dimType.id) - def dimPackedOffset(dimType: DimType): Long = dimPackedOffset(dimType.id) - - def sizedDimTypes(): util.Map[String, SizedDimType] = toSizedDimTypes(dimTypes()) - def toSizedDimTypes(dimTypes: Array[DimType]): util.Map[String, SizedDimType] = { - var (i, offset, length) = (0, 0l, dimTypes.length) - val result = new util.HashMap[String, SizedDimType]() - while(i < length) { - val dt = dimTypes(i) - val size = dimSize(dt) - result += dt.id -> SizedDimType(dt, size, offset) - offset += size - i += 1 - } - result - } - - @native def dimTypes(): Array[DimType] - @native def findDimType(name: String): DimType - @native def dimSize(id: String): Long - /** - * Offset of a dim in a packed points byte array calculated as a sum of previous dim sizes. - * Valid for a point with all dims. - */ - @native def dimPackedOffset(id: String): Long - @native def pointSize(): Long - @native def dispose(): Unit -} diff --git a/java/core/src/main/scala/io/pdal/PointView.scala b/java/core/src/main/scala/io/pdal/PointView.scala deleted file mode 100644 index 5d62256654..0000000000 --- a/java/core/src/main/scala/io/pdal/PointView.scala +++ /dev/null @@ -1,156 +0,0 @@ -/****************************************************************************** - * Copyright (c) 2016, hobu Inc. (info@hobu.co) - * - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following - * conditions are met: - * - * * Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in - * the documentation and/or other materials provided - * with the distribution. - * * Neither the name of Hobu, Inc. nor the names of its - * contributors may be used to endorse or promote products derived - * from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, - * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS - * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED - * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT - * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY - * OF SUCH DAMAGE. - ****************************************************************************/ - -package io.pdal - -import java.nio.{ByteBuffer, ByteOrder} - -class PointView extends Native { - def getPointCloud(idx: Long): PointCloud = getPointCloud(idx, layout.dimTypes()) - def getPointCloud(idx: Long, dims: Array[DimType]): PointCloud = - PointCloud( - bytes = getPackedPoint(idx, dims), - dimTypes = layout.toSizedDimTypes(dims) - ) - - def getPointCloud(): PointCloud = getPointCloud(layout.dimTypes()) - def getPointCloud(dims: Array[DimType]): PointCloud = - PointCloud( - bytes = getPackedPoints(dims), - dimTypes = layout.toSizedDimTypes(dims) - ) - - def getPackedPoint(idx: Long): Array[Byte] = getPackedPoint(idx, layout.dimTypes()) - def getPackedPoints(): Array[Byte] = getPackedPoints(layout.dimTypes()) - def findDimType(name: String): DimType = layout.findDimType(name) - def length(): Int = size() - def getCrsWKT(): String = getCrsWKT(pretty = false) - - /** - * Reads a packed point by point id from a set of packed points. - */ - def get(idx: Int, packedPoints: Array[Byte]): Array[Byte] = get(idx, packedPoints, layout.dimTypes()) - def get(idx: Int, packedPoints: Array[Byte], dims: Array[DimType]): Array[Byte] = { - val pointSize = dims.map(layout.dimSize(_)).sum.toInt - val from = idx * pointSize - val result = new Array[Byte](pointSize) - var j = 0 - while(j < pointSize) { - result(j) = packedPoints(from + j) - j += 1 - } - result - } - - /** - * Reads dim from a packed point, point should contain all layout dims. - */ - def get(packedPoint: Array[Byte], dim: DimType): ByteBuffer = { - val from = layout.dimPackedOffset(dim).toInt - val dimSize = layout.dimSize(dim).toInt - val result = new Array[Byte](dimSize) - var j = 0 - while(j < dimSize) { - result(j) = packedPoint(from + j) - j += 1 - } - ByteBuffer.wrap(result).order(ByteOrder.nativeOrder()) - } - - def getDouble(packedPoint: Array[Byte], dim: String): Double = getDouble(packedPoint, findDimType(dim)) - def getDouble(packedPoint: Array[Byte], dim: DimType): Double = get(packedPoint, dim).getDouble - - def getFloat(packedPoint: Array[Byte], dim: String): Float = getFloat(packedPoint, findDimType(dim)) - def getFloat(packedPoint: Array[Byte], dim: DimType): Float = get(packedPoint, dim).getFloat - - def getLong(packedPoint: Array[Byte], dim: String): Long = getLong(packedPoint, findDimType(dim)) - def getLong(packedPoint: Array[Byte], dim: DimType): Long = get(packedPoint, dim).getLong - - def getInt(packedPoint: Array[Byte], dim: String): Int = getInt(packedPoint, findDimType(dim)) - def getInt(packedPoint: Array[Byte], dim: DimType): Int = get(packedPoint, dim).getInt - - def getShort(packedPoint: Array[Byte], dim: String): Short = getShort(packedPoint, findDimType(dim)) - def getShort(packedPoint: Array[Byte], dim: DimType): Short = get(packedPoint, dim).getShort - - def getChar(packedPoint: Array[Byte], dim: String): Char = getChar(packedPoint, findDimType(dim)) - def getChar(packedPoint: Array[Byte], dim: DimType): Char = get(packedPoint, dim).getChar - - def getByte(packedPoint: Array[Byte], dim: String): Byte = getByte(packedPoint, findDimType(dim)) - def getByte(packedPoint: Array[Byte], dim: DimType): Byte = get(packedPoint, dim).get() - - /** - * One dimension read; for multiple dims custom logic required. - */ - - def getDouble(idx: Int, dim: String): Double = getDouble(idx, findDimType(dim)) - def getDouble(idx: Int, dim: DimType): Double = get(idx, dim).getDouble - - def getFloat(idx: Int, dim: String): Float = getFloat(idx, findDimType(dim)) - def getFloat(idx: Int, dim: DimType): Float = get(idx, dim).getFloat - - def getLong(idx: Int, dim: String): Long = getLong(idx, findDimType(dim)) - def getLong(idx: Int, dim: DimType): Long = get(idx, dim).getLong - - def getInt(idx: Int, dim: String): Int = getInt(idx, findDimType(dim)) - def getInt(idx: Int, dim: DimType): Int = get(idx, dim).getInt - - def getShort(idx: Int, dim: String): Short = getShort(idx, findDimType(dim)) - def getShort(idx: Int, dim: DimType): Short = get(idx, dim).getShort - - def getChar(idx: Int, dim: String): Char = getChar(idx, findDimType(dim)) - def getChar(idx: Int, dim: DimType): Char = get(idx, dim).getChar - - def getByte(idx: Int, dim: String): Byte = getByte(idx, findDimType(dim)) - def getByte(idx: Int, dim: DimType): Byte = get(idx, dim).get() - - def get(idx: Int, dim: String): ByteBuffer = get(idx, findDimType(dim)) - def get(idx: Int, dim: DimType): ByteBuffer = - ByteBuffer.wrap(getPackedPoint(idx, Array(dim))).order(ByteOrder.nativeOrder()) - - def getX(idx: Int): Double = getDouble(idx, DimType.X) - def getY(idx: Int): Double = getDouble(idx, DimType.Y) - def getZ(idx: Int): Double = getDouble(idx, DimType.Z) - - def getX(packedPoint: Array[Byte]): Double = getDouble(packedPoint, DimType.X) - def getY(packedPoint: Array[Byte]): Double = getDouble(packedPoint, DimType.Y) - def getZ(packedPoint: Array[Byte]): Double = getDouble(packedPoint, DimType.Z) - - @native def layout(): PointLayout - @native def size(): Int - @native def empty(): Boolean - @native def getCrsProj4(): String - @native def getCrsWKT(pretty: Boolean): String - @native def getPackedPoint(idx: Long, dims: Array[DimType]): Array[Byte] - @native def getPackedPoints(dims: Array[DimType]): Array[Byte] - @native def dispose(): Unit -} diff --git a/java/core/src/main/scala/io/pdal/PointViewIterator.scala b/java/core/src/main/scala/io/pdal/PointViewIterator.scala deleted file mode 100644 index 3b2a2ce212..0000000000 --- a/java/core/src/main/scala/io/pdal/PointViewIterator.scala +++ /dev/null @@ -1,42 +0,0 @@ -/****************************************************************************** - * Copyright (c) 2016, hobu Inc. (info@hobu.co) - * - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following - * conditions are met: - * - * * Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in - * the documentation and/or other materials provided - * with the distribution. - * * Neither the name of Hobu, Inc. nor the names of its - * contributors may be used to endorse or promote products derived - * from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, - * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS - * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED - * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT - * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY - * OF SUCH DAMAGE. - ****************************************************************************/ - -package io.pdal - -import java.util - -class PointViewIterator extends util.Iterator[PointView] with Native { - @native def hasNext: Boolean - @native def next(): PointView - @native def dispose(): Unit -} diff --git a/java/core/src/main/scala/io/pdal/SizedDimType.scala b/java/core/src/main/scala/io/pdal/SizedDimType.scala deleted file mode 100644 index a1b3d46c2c..0000000000 --- a/java/core/src/main/scala/io/pdal/SizedDimType.scala +++ /dev/null @@ -1,36 +0,0 @@ -/****************************************************************************** - * Copyright (c) 2016, hobu Inc. (info@hobu.co) - * - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following - * conditions are met: - * - * * Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in - * the documentation and/or other materials provided - * with the distribution. - * * Neither the name of Hobu, Inc. nor the names of its - * contributors may be used to endorse or promote products derived - * from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, - * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS - * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED - * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT - * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY - * OF SUCH DAMAGE. - ****************************************************************************/ - -package io.pdal - -case class SizedDimType(dimType: DimType, size: Long, offset: Long) diff --git a/java/core/src/test/resources/las.json b/java/core/src/test/resources/las.json deleted file mode 100644 index ae8ac352af..0000000000 --- a/java/core/src/test/resources/las.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "pipeline":[ - { - "filename":"../test/data/las/1.2-with-color.las", - "spatialreference":"EPSG:2993" - } - ] -} diff --git a/java/core/src/test/scala/io/pdal/PipelineSpec.scala b/java/core/src/test/scala/io/pdal/PipelineSpec.scala deleted file mode 100644 index a7d5eaefa6..0000000000 --- a/java/core/src/test/scala/io/pdal/PipelineSpec.scala +++ /dev/null @@ -1,177 +0,0 @@ -/****************************************************************************** - * Copyright (c) 2016, hobu Inc. (info@hobu.co) - * - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following - * conditions are met: - * - * * Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in - * the documentation and/or other materials provided - * with the distribution. - * * Neither the name of Hobu, Inc. nor the names of its - * contributors may be used to endorse or promote products derived - * from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, - * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS - * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED - * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT - * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY - * OF SUCH DAMAGE. - ****************************************************************************/ - -package io.pdal - -import java.nio.{ByteBuffer, ByteOrder} - -import scala.collection.JavaConverters._ - -class PipelineSpec extends TestEnvironmentSpec { - describe("Pipeline execution") { - it("should validate as incorrect json (bad json passed)") { - val badPipeline = Pipeline(badJson) - badPipeline.validate() should be (false) - badPipeline.dispose() - badPipeline.ptr should be (0) - } - - it("should validate json") { - pipeline.validate() should be (true) - } - - it("should execute pipeline") { - pipeline.execute() - } - - it("should create pointViews iterator") { - val pvi = pipeline.getPointViews() - pvi.asScala.length should be (1) - pvi.dispose() - } - - it("should have a valid point view size") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - pv.length should be (1065) - pvi.hasNext should be (false) - pv.dispose() - pvi.dispose() - } - - it("should read a valid (X, Y, Z) data") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - pv.getX(0) should be (637012.24) - pv.getY(0) should be (849028.31) - pv.getZ(0) should be (431.66) - pv.dispose() - pvi.dispose() - } - - it("should read a valid packed data") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - val layout = pv.layout - val arr = pv.getPackedPoint(0, Array(DimType.X, DimType.Y)) - val (xarr, yarr) = arr.take(layout.dimSize(DimType.X).toInt) -> arr.drop(layout.dimSize(DimType.Y).toInt) - - ByteBuffer.wrap(xarr).order(ByteOrder.nativeOrder()).getDouble should be (pv.getX(0)) - ByteBuffer.wrap(yarr).order(ByteOrder.nativeOrder()).getDouble should be (pv.getY(0)) - - layout.dispose() - pv.dispose() - pvi.dispose() - } - - it("should read the whole packed point and grab only one dim") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - val arr = pv.getPackedPoint(0) - pv.get(arr, DimType.Y).getDouble should be (pv.getY(0)) - pv.dispose() - pvi.dispose() - } - - it("should read all packed points and grab only one point out of it") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - pv.get(3, pv.getPackedPoints) should be (pv.getPackedPoint(3)) - pv.dispose() - pvi.dispose() - } - - it("should read a valid value by name") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - pv.getByte(0, "ReturnNumber") should be (1) - pv.dispose() - pvi.dispose() - } - - it("should read correctly data as a packed point") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - val layout = pv.layout - val arr = pv.getPackedPoint(0) - layout.dimTypes().foreach { dt => pv.get(0, dt).array() should be(pv.get(arr, dt).array())} - layout.dispose() - pv.dispose() - pvi.dispose() - } - - it("layout should have a valid number of dims") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - pv.layout.dimTypes().length should be (16) - pv.dispose() - pvi.dispose() - } - - it("should find a dim by name") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - pv.findDimType("Red") should be (DimType("Red", "uint16_t")) - pv.dispose() - pvi.dispose() - } - - it("dim sizes should be of a valid size") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - val layout = pv.layout - layout.dimTypes().map(pv.layout.dimSize(_)).sum should be (layout.pointSize()) - layout.dispose() - pv.dispose() - pvi.dispose() - } - - it("should read all packed points valid") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - val layout = pv.layout - pv.getPackedPoints.length should be (pv.length * layout.pointSize()) - layout.dispose() - pv.dispose() - pvi.dispose() - } - - it("should read crs correct") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - pv.getCrsProj4 should be (proj4String) - pv.dispose() - pvi.dispose() - } - } -} diff --git a/java/core/src/test/scala/io/pdal/PointCloudSpec.scala b/java/core/src/test/scala/io/pdal/PointCloudSpec.scala deleted file mode 100644 index ab8844d2d8..0000000000 --- a/java/core/src/test/scala/io/pdal/PointCloudSpec.scala +++ /dev/null @@ -1,183 +0,0 @@ -/****************************************************************************** - * Copyright (c) 2016, hobu Inc. (info@hobu.co) - * - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following - * conditions are met: - * - * * Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in - * the documentation and/or other materials provided - * with the distribution. - * * Neither the name of Hobu, Inc. nor the names of its - * contributors may be used to endorse or promote products derived - * from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, - * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS - * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED - * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT - * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY - * OF SUCH DAMAGE. - ****************************************************************************/ - -package io.pdal - -import java.nio.{ByteBuffer, ByteOrder} - -import scala.collection.JavaConversions._ - -class PointCloudSpec extends TestEnvironmentSpec { - var packedPoints: PointCloud = _ - - describe("PointCloud in JVM memory operations") { - it("should init PackedPoints") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - - packedPoints = pv.getPointCloud - - pv.dispose() - pvi.dispose() - } - - it("should have a valid point view size") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - pv.length should be (packedPoints.length) - pv.dispose() - pvi.dispose() - } - - it("should read a valid (X, Y, Z) data") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - pv.getX(0) should be (packedPoints.getX(0)) - pv.getY(0) should be (packedPoints.getY(0)) - pv.getZ(0) should be (packedPoints.getZ(0)) - pv.dispose() - pvi.dispose() - } - - it("should read a valid packed data") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - val layout = pv.layout - val arr = pv.getPackedPoint(0, Array(DimType.X, DimType.Y)) - val (xarr, yarr) = arr.take(layout.dimSize(DimType.X).toInt) -> arr.drop(layout.dimSize(DimType.Y).toInt) - - val marr = packedPoints.get(0, Array(DimType.X, DimType.Y)) - val (xmarr, ymarr) = arr.take(packedPoints.dimSize(DimType.X).toInt) -> arr.drop(packedPoints.dimSize(DimType.Y).toInt) - - xarr should be (xmarr) - yarr should be (ymarr) - ByteBuffer.wrap(xmarr).order(ByteOrder.nativeOrder()).getDouble should be (pv.getX(0)) - ByteBuffer.wrap(ymarr).order(ByteOrder.nativeOrder()).getDouble should be (pv.getY(0)) - - layout.dispose() - pv.dispose() - pvi.dispose() - } - - it("should read the whole packed point and grab only one dim") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - packedPoints.get(0, DimType.Y).getDouble should be (pv.getY(0)) - pv.dispose() - pvi.dispose() - } - - it("should read all packed points and grab only one point out of it") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - pv.get(3, pv.getPackedPoints) should be (packedPoints.get(3)) - pv.dispose() - pvi.dispose() - } - - it("should read a valid value by name") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - pv.getByte(0, "ReturnNumber") should be (packedPoints.getByte(0, "ReturnNumber")) - pv.dispose() - pvi.dispose() - } - - it("should read correctly data as a packed point") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - packedPoints.dimTypes.foreach { case (_, sdt) => - pv.get(0, sdt.dimType) should be (packedPoints.get(0, sdt)) - } - pv.dispose() - pvi.dispose() - } - - it("layout should have a valid number of dims") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - pv.layout.dimTypes().length should be (packedPoints.dimTypes.size) - pv.dispose() - pvi.dispose() - } - - it("should find a dim by name") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - pv.findDimType("Red") should be (packedPoints.findDimType("Red")) - pv.dispose() - pvi.dispose() - } - - it("dim sizes should be of a valid size") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - val layout = pv.layout - layout.dimTypes().map(pv.layout.dimSize(_)).sum should be (packedPoints.pointSize) - layout.dispose() - pv.dispose() - pvi.dispose() - } - - it("should read all packed points valid") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - val length = packedPoints.bytes.length - pv.getPackedPoints.length should be (length) - pv.dispose() - pvi.dispose() - } - - it("should get correct points and all values") { - val pvi = pipeline.getPointViews() - val pv = pvi.next() - val length = pv.length - val dimTypes = packedPoints.dimTypes.values().map(_.dimType) - for (i <- 0 until length) { - packedPoints.get(i) should be (pv.getPackedPoint(i)) - packedPoints.getX(i) should be (pv.getX(i)) - packedPoints.getY(i) should be (pv.getY(i)) - packedPoints.getZ(i) should be (pv.getZ(i)) - dimTypes.foreach { dt => - packedPoints.get(i, dt).array() should be (pv.get(i, dt).array()) - } - } - pv.dispose() - pvi.dispose() - } - } - - override def beforeAll() = { - pipeline.execute() - } -} diff --git a/java/core/src/test/scala/io/pdal/TestEnvironmentSpec.scala b/java/core/src/test/scala/io/pdal/TestEnvironmentSpec.scala deleted file mode 100644 index 23c51ead77..0000000000 --- a/java/core/src/test/scala/io/pdal/TestEnvironmentSpec.scala +++ /dev/null @@ -1,68 +0,0 @@ -/****************************************************************************** - * Copyright (c) 2016, hobu Inc. (info@hobu.co) - * - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following - * conditions are met: - * - * * Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in - * the documentation and/or other materials provided - * with the distribution. - * * Neither the name of Hobu, Inc. nor the names of its - * contributors may be used to endorse or promote products derived - * from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, - * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS - * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED - * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT - * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY - * OF SUCH DAMAGE. - ****************************************************************************/ - -package io.pdal - -import org.scalatest._ - -trait TestEnvironmentSpec extends FunSpec with Matchers with BeforeAndAfterAll { - def getJson(resource: String): String = { - val stream = getClass.getResourceAsStream(resource) - val lines = scala.io.Source.fromInputStream(stream).getLines - val json = lines.mkString(" ") - stream.close() - json - } - - val json = getJson("/las.json") - val badJson = - """ - |{ - | "pipeline": [ - | "nofile.las", - | { - | "type": "filters.sort", - | "dimension": "X" - | } - | ] - |} - """.stripMargin - - val proj4String = "+proj=lcc +lat_1=43 +lat_2=45.5 +lat_0=41.75 +lon_0=-120.5 +x_0=400000 +y_0=0 +ellps=GRS80 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs" - - val pipeline: Pipeline = Pipeline(json) - - override def afterAll() = { - pipeline.dispose() - } -} diff --git a/java/native/src/Accessors.cpp b/java/native/src/Accessors.cpp deleted file mode 100644 index ab46ecaee4..0000000000 --- a/java/native/src/Accessors.cpp +++ /dev/null @@ -1,41 +0,0 @@ -/****************************************************************************** -* Copyright (c) 2016, hobu Inc. (info@hobu.co) -* -* All rights reserved. -* -* Redistribution and use in source and binary forms, with or without -* modification, are permitted provided that the following -* conditions are met: -* -* * Redistributions of source code must retain the above copyright -* notice, this list of conditions and the following disclaimer. -* * Redistributions in binary form must reproduce the above copyright -* notice, this list of conditions and the following disclaimer in -* the documentation and/or other materials provided -* with the distribution. -* * Neither the name of Hobu, Inc. nor the names of its -* contributors may be used to endorse or promote products derived -* from this software without specific prior written permission. -* -* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS -* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED -* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT -* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY -* OF SUCH DAMAGE. -****************************************************************************/ - -#include "Accessors.hpp" - -jfieldID getHandleField(JNIEnv *env, jobject obj) -{ - jclass c = env->GetObjectClass(obj); - // J is the type signature for long: - return env->GetFieldID(c, "nativeHandle", "J"); -} diff --git a/java/native/src/CMakeLists.txt b/java/native/src/CMakeLists.txt deleted file mode 100644 index e21647841b..0000000000 --- a/java/native/src/CMakeLists.txt +++ /dev/null @@ -1,71 +0,0 @@ -cmake_minimum_required(VERSION 2.8.0) - -set(ignoreMe "${SBT}") # sbt-jni defines -DSBT -set(MAKE_COLOR_MAKEFILE ON) - -# Define project and related variables -# (required by sbt-jni) please use semantic versioning -# - -project (pdaljni) -set(PROJECT_VERSION_MAJOR 1) -set(PROJECT_VERSION_MINOR 4) -set(PROJECT_VERSION_PATCH 0) - -set(PDAL_LIB_NAME pdalcpp) - -if (APPLE) - set(CMAKE_MACOSX_RPATH ON) - SET(CMAKE_SKIP_BUILD_RPATH TRUE) - SET(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE) - SET(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE) -endif () - -if (NOT PDAL_BUILD) - set(CMAKE_CXX_FLAGS "-std=c++11") -endif() - -# Setup JNI -find_package(JNI REQUIRED) -if (JNI_FOUND) - message (STATUS "JNI include directories: ${JNI_INCLUDE_DIRS}") -endif() - -if (NOT PDAL_BUILD) - find_package(PDAL 1.0.0 REQUIRED CONFIG) -endif() - -# Include directories -include_directories(.) -include_directories(include) -include_directories(${JNI_INCLUDE_DIRS}) -if (APPLE) - include_directories(/usr/local/opt/libxml2/include/libxml2) -else () - include_directories(/usr/include/libxml2) -endif () - -# Sources -file(GLOB LIB_SRC - "*.c" - "*.cc" - "*.cpp" -) - -# Setup installation targets -# (required by sbt-jni) major version should always be appended to library name -# -set (LIB_NAME ${PROJECT_NAME}.${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}) -if (PDAL_BUILD) - PDAL_ADD_LIBRARY(${LIB_NAME} ${LIB_SRC}) - target_link_libraries(${LIB_NAME} PUBLIC - ${PDAL_BASE_LIB_NAME} - ${PDAL_UTIL_LIB_NAME}) - target_include_directories(${LIB_NAME} PRIVATE - ${PROJECT_BINARY_DIR}/../../../include) - install(TARGETS ${LIB_NAME} LIBRARY DESTINATION ${PDAL_LIB_INSTALL_DIR}) -else () - add_library(${LIB_NAME} SHARED ${LIB_SRC}) - install(TARGETS ${LIB_NAME} LIBRARY DESTINATION . OPTIONAL) - target_link_libraries(${LIB_NAME} PRIVATE ${PDAL_LIB_NAME}) -endif () diff --git a/java/native/src/JavaPipeline.cpp b/java/native/src/JavaPipeline.cpp deleted file mode 100644 index d39277c5ac..0000000000 --- a/java/native/src/JavaPipeline.cpp +++ /dev/null @@ -1,83 +0,0 @@ -/****************************************************************************** -* Copyright (c) 2016, hobu Inc. (info@hobu.co) -* -* All rights reserved. -* -* Redistribution and use in source and binary forms, with or without -* modification, are permitted provided that the following -* conditions are met: -* -* * Redistributions of source code must retain the above copyright -* notice, this list of conditions and the following disclaimer. -* * Redistributions in binary form must reproduce the above copyright -* notice, this list of conditions and the following disclaimer in -* the documentation and/or other materials provided -* with the distribution. -* * Neither the name of Hobu, Inc. nor the names of its -* contributors may be used to endorse or promote products derived -* from this software without specific prior written permission. -* -* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS -* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED -* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT -* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY -* OF SUCH DAMAGE. -****************************************************************************/ - -#include "JavaPipeline.hpp" -#ifdef PDAL_HAVE_LIBXML2 -#include -#endif - -using pdal::PointViewSet; - -namespace libpdaljava -{ - -Pipeline::Pipeline(std::string const& json) - : m_executor(json) -{ - -} - -Pipeline::~Pipeline() -{ -} - -void Pipeline::setLogLevel(int level) -{ - m_executor.setLogLevel(level); -} - -int Pipeline::getLogLevel() const -{ - return static_cast(m_executor.getLogLevel()); -} - -int64_t Pipeline::execute() -{ - - int64_t count = m_executor.execute(); - return count; -} - -bool Pipeline::validate() -{ - return m_executor.validate(); -} - -PointViewSet Pipeline::getPointViews() const -{ - if (!m_executor.executed()) - throw java_error("call execute() before fetching arrays"); - - return m_executor.getManagerConst().views(); -} -} //namespace libpdaljava diff --git a/java/native/src/PointViewRawPtr.cpp b/java/native/src/PointViewRawPtr.cpp deleted file mode 100644 index 8350e33585..0000000000 --- a/java/native/src/PointViewRawPtr.cpp +++ /dev/null @@ -1,46 +0,0 @@ -/****************************************************************************** -* Copyright (c) 2016, hobu Inc. (info@hobu.co) -* -* All rights reserved. -* -* Redistribution and use in source and binary forms, with or without -* modification, are permitted provided that the following -* conditions are met: -* -* * Redistributions of source code must retain the above copyright -* notice, this list of conditions and the following disclaimer. -* * Redistributions in binary form must reproduce the above copyright -* notice, this list of conditions and the following disclaimer in -* the documentation and/or other materials provided -* with the distribution. -* * Neither the name of Hobu, Inc. nor the names of its -* contributors may be used to endorse or promote products derived -* from this software without specific prior written permission. -* -* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS -* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED -* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT -* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY -* OF SUCH DAMAGE. -****************************************************************************/ - -#include "PointViewRawPtr.hpp" - -using pdal::PointViewPtr; - -namespace libpdaljava -{ -PointViewRawPtr::PointViewRawPtr(PointViewPtr p) - : shared_pointer{p} -{ } - -PointViewRawPtr::~PointViewRawPtr() -{ } -} diff --git a/java/native/src/include/Accessors.hpp b/java/native/src/include/Accessors.hpp deleted file mode 100644 index 22f63a4b37..0000000000 --- a/java/native/src/include/Accessors.hpp +++ /dev/null @@ -1,55 +0,0 @@ -/****************************************************************************** -* Copyright (c) 2016, hobu Inc. (info@hobu.co) -* -* All rights reserved. -* -* Redistribution and use in source and binary forms, with or without -* modification, are permitted provided that the following -* conditions are met: -* -* * Redistributions of source code must retain the above copyright -* notice, this list of conditions and the following disclaimer. -* * Redistributions in binary form must reproduce the above copyright -* notice, this list of conditions and the following disclaimer in -* the documentation and/or other materials provided -* with the distribution. -* * Neither the name of Hobu, Inc. nor the names of its -* contributors may be used to endorse or promote products derived -* from this software without specific prior written permission. -* -* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS -* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED -* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT -* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY -* OF SUCH DAMAGE. -****************************************************************************/ - -#include -#include - -#ifndef _ACCESSORS_H_INCLUDED_ -#define _ACCESSORS_H_INCLUDED_ - -jfieldID getHandleField(JNIEnv *, jobject); - -template -T *getHandle(JNIEnv *env, jobject obj) -{ - jlong handle = env->GetLongField(obj, getHandleField(env, obj)); - return reinterpret_cast(handle); -} - -template -void setHandle(JNIEnv *env, jobject obj, T *t) -{ - jlong handle = reinterpret_cast(t); - env->SetLongField(obj, getHandleField(env, obj), handle); -} -#endif diff --git a/java/native/src/include/JavaIterator.hpp b/java/native/src/include/JavaIterator.hpp deleted file mode 100644 index 0ba678ef7f..0000000000 --- a/java/native/src/include/JavaIterator.hpp +++ /dev/null @@ -1,77 +0,0 @@ -/****************************************************************************** -* Copyright (c) 2016, hobu Inc. (info@hobu.co) -* -* All rights reserved. -* -* Redistribution and use in source and binary forms, with or without -* modification, are permitted provided that the following -* conditions are met: -* -* * Redistributions of source code must retain the above copyright -* notice, this list of conditions and the following disclaimer. -* * Redistributions in binary form must reproduce the above copyright -* notice, this list of conditions and the following disclaimer in -* the documentation and/or other materials provided -* with the distribution. -* * Neither the name of Hobu, Inc. nor the names of its -* contributors may be used to endorse or promote products derived -* from this software without specific prior written permission. -* -* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS -* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED -* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT -* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY -* OF SUCH DAMAGE. -****************************************************************************/ - -#include -#include -#include -#include "JavaPipeline.hpp" - -#ifndef _JAVAITERATOR_H_INCLUDED_ -#define _JAVAITERATOR_H_INCLUDED_ - -using pdal::PointViewLess; -using pdal::PointViewPtr; - -namespace libpdaljava -{ -template -class JavaIterator { -public: - JavaIterator() {} - JavaIterator(const std::set set) - : container{set}, curr_pos{0} - { } - JavaIterator(const std::set *set) - : container{*set}, curr_pos{0} - { } - bool hasNext() const { - return curr_pos < container.size(); - } - K next() { - if(!hasNext()) - throw java_error("iterator is out of bounds"); - - return *std::next(container.begin(), curr_pos++); - } - int size() const { - return container.size(); - } - -private: - std::set container; - unsigned int curr_pos; -}; - -typedef JavaIterator PointViewIterator; -} -#endif diff --git a/java/native/src/include/JavaPipeline.hpp b/java/native/src/include/JavaPipeline.hpp deleted file mode 100644 index 229652a680..0000000000 --- a/java/native/src/include/JavaPipeline.hpp +++ /dev/null @@ -1,91 +0,0 @@ -/****************************************************************************** -* Copyright (c) 2016, hobu Inc. (info@hobu.co) -* -* All rights reserved. -* -* Redistribution and use in source and binary forms, with or without -* modification, are permitted provided that the following -* conditions are met: -* -* * Redistributions of source code must retain the above copyright -* notice, this list of conditions and the following disclaimer. -* * Redistributions in binary form must reproduce the above copyright -* notice, this list of conditions and the following disclaimer in -* the documentation and/or other materials provided -* with the distribution. -* * Neither the name of Hobu, Inc. nor the names of its -* contributors may be used to endorse or promote products derived -* from this software without specific prior written permission. -* -* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS -* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED -* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT -* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY -* OF SUCH DAMAGE. -****************************************************************************/ - -#pragma once - -#include -#include -#include -#include - -#include -#include -#undef toupper -#undef tolower -#undef isspace - -namespace libpdaljava -{ - -class java_error : public std::runtime_error -{ -public: - inline java_error(std::string const& msg) : std::runtime_error(msg) - {} -}; - -class Pipeline { -public: - Pipeline(std::string const& xml); - ~Pipeline(); - - int64_t execute(); - bool validate(); - inline std::string getPipeline() const - { - return m_executor.getPipeline(); - } - inline std::string getMetadata() const - { - return m_executor.getMetadata(); - } - inline std::string getSchema() const - { - return m_executor.getSchema(); - } - inline std::string getLog() const - { - return m_executor.getLog(); - } - pdal::PointViewSet getPointViews() const; - - void setLogLevel(int level); - int getLogLevel() const; - -private: - - pdal::PipelineExecutor m_executor; - -}; - -} diff --git a/java/native/src/include/PointViewRawPtr.hpp b/java/native/src/include/PointViewRawPtr.hpp deleted file mode 100644 index 23ed2f6b81..0000000000 --- a/java/native/src/include/PointViewRawPtr.hpp +++ /dev/null @@ -1,55 +0,0 @@ -/****************************************************************************** -* Copyright (c) 2016, hobu Inc. (info@hobu.co) -* -* All rights reserved. -* -* Redistribution and use in source and binary forms, with or without -* modification, are permitted provided that the following -* conditions are met: -* -* * Redistributions of source code must retain the above copyright -* notice, this list of conditions and the following disclaimer. -* * Redistributions in binary form must reproduce the above copyright -* notice, this list of conditions and the following disclaimer in -* the documentation and/or other materials provided -* with the distribution. -* * Neither the name of Hobu, Inc. nor the names of its -* contributors may be used to endorse or promote products derived -* from this software without specific prior written permission. -* -* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS -* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED -* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT -* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY -* OF SUCH DAMAGE. -****************************************************************************/ - -#include "JavaPipeline.hpp" - -#ifndef _POINTVIEWRAWPTR_H_INCLUDED_ -#define _POINTVIEWRAWPTR_H_INCLUDED_ - -using pdal::PointViewPtr; - -/** - * PointView wrapper for safer work with PointView shared_pointer as with a common pointer - */ -namespace libpdaljava -{ -class PointViewRawPtr -{ -public: - PointViewPtr shared_pointer; - - PointViewRawPtr(PointViewPtr); - ~PointViewRawPtr(); -}; -} -#endif diff --git a/java/native/src/include/io_pdal_Pipeline.h b/java/native/src/include/io_pdal_Pipeline.h deleted file mode 100644 index 2c8c5ad0ef..0000000000 --- a/java/native/src/include/io_pdal_Pipeline.h +++ /dev/null @@ -1,93 +0,0 @@ -/* DO NOT EDIT THIS FILE - it is machine generated */ -#include -/* Header for class io_pdal_Pipeline */ - -#ifndef _Included_io_pdal_Pipeline -#define _Included_io_pdal_Pipeline -#ifdef __cplusplus -extern "C" { -#endif -/* - * Class: io_pdal_Pipeline - * Method: initialise - * Signature: ()V - */ -JNIEXPORT void JNICALL Java_io_pdal_Pipeline_initialise - (JNIEnv *, jobject); - -/* - * Class: io_pdal_Pipeline - * Method: execute - * Signature: ()V - */ -JNIEXPORT void JNICALL Java_io_pdal_Pipeline_execute - (JNIEnv *, jobject); - -/* - * Class: io_pdal_Pipeline - * Method: getPointViews - * Signature: ()Lio/pdal/PointViewIterator; - */ -JNIEXPORT jobject JNICALL Java_io_pdal_Pipeline_getPointViews - (JNIEnv *, jobject); - -/* - * Class: io_pdal_Pipeline - * Method: dispose - * Signature: ()V - */ -JNIEXPORT void JNICALL Java_io_pdal_Pipeline_dispose - (JNIEnv *, jobject); - -/* - * Class: io_pdal_Pipeline - * Method: getMetadata - * Signature: ()Ljava/lang/String; - */ -JNIEXPORT jstring JNICALL Java_io_pdal_Pipeline_getMetadata - (JNIEnv *, jobject); - -/* - * Class: io_pdal_Pipeline - * Method: getSchema - * Signature: ()Ljava/lang/String; - */ -JNIEXPORT jstring JNICALL Java_io_pdal_Pipeline_getSchema - (JNIEnv *, jobject); - -/* - * Class: io_pdal_Pipeline - * Method: validate - * Signature: ()Z - */ -JNIEXPORT jboolean JNICALL Java_io_pdal_Pipeline_validate - (JNIEnv *, jobject); - -/* - * Class: io_pdal_Pipeline - * Method: setLogLevel - * Signature: (I)V - */ -JNIEXPORT void JNICALL Java_io_pdal_Pipeline_setLogLevel - (JNIEnv *, jobject, jint); - -/* - * Class: io_pdal_Pipeline - * Method: getLogLevel - * Signature: ()I - */ -JNIEXPORT jint JNICALL Java_io_pdal_Pipeline_getLogLevel - (JNIEnv *, jobject); - -/* - * Class: io_pdal_Pipeline - * Method: getLog - * Signature: ()Ljava/lang/String; - */ -JNIEXPORT jstring JNICALL Java_io_pdal_Pipeline_getLog - (JNIEnv *, jobject); - -#ifdef __cplusplus -} -#endif -#endif diff --git a/java/native/src/include/io_pdal_PointLayout.h b/java/native/src/include/io_pdal_PointLayout.h deleted file mode 100644 index bba5277433..0000000000 --- a/java/native/src/include/io_pdal_PointLayout.h +++ /dev/null @@ -1,61 +0,0 @@ -/* DO NOT EDIT THIS FILE - it is machine generated */ -#include -/* Header for class io_pdal_PointLayout */ - -#ifndef _Included_io_pdal_PointLayout -#define _Included_io_pdal_PointLayout -#ifdef __cplusplus -extern "C" { -#endif -/* - * Class: io_pdal_PointLayout - * Method: dimTypes - * Signature: ()[Lio/pdal/DimType; - */ -JNIEXPORT jobjectArray JNICALL Java_io_pdal_PointLayout_dimTypes - (JNIEnv *, jobject); - -/* - * Class: io_pdal_PointLayout - * Method: findDimType - * Signature: (Ljava/lang/String;)Lio/pdal/DimType; - */ -JNIEXPORT jobject JNICALL Java_io_pdal_PointLayout_findDimType - (JNIEnv *, jobject, jstring); - -/* - * Class: io_pdal_PointLayout - * Method: dimSize - * Signature: (Ljava/lang/String;)J - */ -JNIEXPORT jlong JNICALL Java_io_pdal_PointLayout_dimSize - (JNIEnv *, jobject, jstring); - -/* - * Class: io_pdal_PointLayout - * Method: dimPackedOffset - * Signature: (Ljava/lang/String;)J - */ -JNIEXPORT jlong JNICALL Java_io_pdal_PointLayout_dimPackedOffset - (JNIEnv *, jobject, jstring); - -/* - * Class: io_pdal_PointLayout - * Method: pointSize - * Signature: ()J - */ -JNIEXPORT jlong JNICALL Java_io_pdal_PointLayout_pointSize - (JNIEnv *, jobject); - -/* - * Class: io_pdal_PointLayout - * Method: dispose - * Signature: ()V - */ -JNIEXPORT void JNICALL Java_io_pdal_PointLayout_dispose - (JNIEnv *, jobject); - -#ifdef __cplusplus -} -#endif -#endif diff --git a/java/native/src/include/io_pdal_PointView.h b/java/native/src/include/io_pdal_PointView.h deleted file mode 100644 index 0d26817d5a..0000000000 --- a/java/native/src/include/io_pdal_PointView.h +++ /dev/null @@ -1,77 +0,0 @@ -/* DO NOT EDIT THIS FILE - it is machine generated */ -#include -/* Header for class io_pdal_PointView */ - -#ifndef _Included_io_pdal_PointView -#define _Included_io_pdal_PointView -#ifdef __cplusplus -extern "C" { -#endif -/* - * Class: io_pdal_PointView - * Method: layout - * Signature: ()Lio/pdal/PointLayout; - */ -JNIEXPORT jobject JNICALL Java_io_pdal_PointView_layout - (JNIEnv *, jobject); - -/* - * Class: io_pdal_PointView - * Method: size - * Signature: ()I - */ -JNIEXPORT jint JNICALL Java_io_pdal_PointView_size - (JNIEnv *, jobject); - -/* - * Class: io_pdal_PointView - * Method: empty - * Signature: ()Z - */ -JNIEXPORT jboolean JNICALL Java_io_pdal_PointView_empty - (JNIEnv *, jobject); - -/* - * Class: io_pdal_PointView - * Method: getCrsProj4 - * Signature: ()Ljava/lang/String; - */ -JNIEXPORT jstring JNICALL Java_io_pdal_PointView_getCrsProj4 - (JNIEnv *, jobject); - -/* - * Class: io_pdal_PointView - * Method: getCrsWKT - * Signature: (IZ)Ljava/lang/String; - */ -JNIEXPORT jstring JNICALL Java_io_pdal_PointView_getCrsWKT - (JNIEnv *, jobject, jboolean); - -/* - * Class: io_pdal_PointView - * Method: getPackedPoint - * Signature: (J[Lio/pdal/DimType;)[B - */ -JNIEXPORT jbyteArray JNICALL Java_io_pdal_PointView_getPackedPoint - (JNIEnv *, jobject, jlong, jobjectArray); - -/* - * Class: io_pdal_PointView - * Method: getPackedPoints - * Signature: ([Lio/pdal/DimType;)[B - */ -JNIEXPORT jbyteArray JNICALL Java_io_pdal_PointView_getPackedPoints - (JNIEnv *, jobject, jobjectArray); - -/* - * Class: io_pdal_PointView - * Method: dispose - * Signature: ()V - */ -JNIEXPORT void JNICALL Java_io_pdal_PointView_dispose - (JNIEnv *, jobject); - -#ifdef __cplusplus -} -#endif -#endif diff --git a/java/native/src/include/io_pdal_PointViewIterator.h b/java/native/src/include/io_pdal_PointViewIterator.h deleted file mode 100644 index 72518b24bc..0000000000 --- a/java/native/src/include/io_pdal_PointViewIterator.h +++ /dev/null @@ -1,37 +0,0 @@ -/* DO NOT EDIT THIS FILE - it is machine generated */ -#include -/* Header for class io_pdal_PointViewIterator */ - -#ifndef _Included_io_pdal_PointViewIterator -#define _Included_io_pdal_PointViewIterator -#ifdef __cplusplus -extern "C" { -#endif -/* - * Class: io_pdal_PointViewIterator - * Method: hasNext - * Signature: ()Z - */ -JNIEXPORT jboolean JNICALL Java_io_pdal_PointViewIterator_hasNext - (JNIEnv *, jobject); - -/* - * Class: io_pdal_PointViewIterator - * Method: next - * Signature: ()Lio/pdal/PointView; - */ -JNIEXPORT jobject JNICALL Java_io_pdal_PointViewIterator_next - (JNIEnv *, jobject); - -/* - * Class: io_pdal_PointViewIterator - * Method: dispose - * Signature: ()V - */ -JNIEXPORT void JNICALL Java_io_pdal_PointViewIterator_dispose - (JNIEnv *, jobject); - -#ifdef __cplusplus -} -#endif -#endif diff --git a/java/native/src/io_pdal_Pipeline.cpp b/java/native/src/io_pdal_Pipeline.cpp deleted file mode 100644 index 2184d30ce8..0000000000 --- a/java/native/src/io_pdal_Pipeline.cpp +++ /dev/null @@ -1,144 +0,0 @@ -/****************************************************************************** -* Copyright (c) 2016, hobu Inc. (info@hobu.co) -* -* All rights reserved. -* -* Redistribution and use in source and binary forms, with or without -* modification, are permitted provided that the following -* conditions are met: -* -* * Redistributions of source code must retain the above copyright -* notice, this list of conditions and the following disclaimer. -* * Redistributions in binary form must reproduce the above copyright -* notice, this list of conditions and the following disclaimer in -* the documentation and/or other materials provided -* with the distribution. -* * Neither the name of Hobu, Inc. nor the names of its -* contributors may be used to endorse or promote products derived -* from this software without specific prior written permission. -* -* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS -* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED -* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT -* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY -* OF SUCH DAMAGE. -****************************************************************************/ - -#include -#include -#include - -#include "io_pdal_Pipeline.h" -#include "JavaPipeline.hpp" -#include "JavaIterator.hpp" -#include "Accessors.hpp" - -using libpdaljava::Pipeline; -using libpdaljava::PointViewIterator; - -using pdal::PointViewSet; -using pdal::PointView; -using pdal::PointViewLess; -using pdal::PointViewPtr; -using pdal::pdal_error; - -JNIEXPORT void JNICALL Java_io_pdal_Pipeline_initialise - (JNIEnv *env, jobject obj) -{ - jclass c = env->GetObjectClass(obj); - jfieldID fid = env->GetFieldID(c, "json", "Ljava/lang/String;"); - jstring jstr = reinterpret_cast(env->GetObjectField(obj, fid)); - setHandle(env, obj, new Pipeline(std::string(env->GetStringUTFChars(jstr, 0)))); -} - -JNIEXPORT void JNICALL Java_io_pdal_Pipeline_dispose - (JNIEnv *env, jobject obj) -{ - Pipeline *p = getHandle(env, obj); - setHandle(env, obj, 0); - delete p; -} - -JNIEXPORT void JNICALL Java_io_pdal_Pipeline_execute - (JNIEnv *env, jobject obj) -{ - Pipeline *p = getHandle(env, obj); - p->execute(); -} - -JNIEXPORT jstring JNICALL Java_io_pdal_Pipeline_getMetadata - (JNIEnv *env, jobject obj) -{ - Pipeline *p = getHandle(env, obj); - return env->NewStringUTF(p->getMetadata().c_str()); -} - -JNIEXPORT jstring JNICALL Java_io_pdal_Pipeline_getSchema - (JNIEnv *env, jobject obj) -{ - Pipeline *p = getHandle(env, obj); - return env->NewStringUTF(p->getSchema().c_str()); -} - -JNIEXPORT jboolean JNICALL Java_io_pdal_Pipeline_validate - (JNIEnv *env, jobject obj) -{ - Pipeline *p = getHandle(env, obj); - bool result; - try - { - result = p->validate(); - } - catch(const pdal_error& pe) - { - std::cerr << "Runtime error: " << pe.what() << std::endl; - result = false; - } - - return result; -} - -JNIEXPORT void JNICALL Java_io_pdal_Pipeline_setLogLevel - (JNIEnv *env, jobject obj, jint i) -{ - Pipeline *p = getHandle(env, obj); - p->setLogLevel(i); -} - -JNIEXPORT jint JNICALL Java_io_pdal_Pipeline_getLogLevel - (JNIEnv *env, jobject obj) -{ - Pipeline *p = getHandle(env, obj); - return p->getLogLevel(); -} - -JNIEXPORT jstring JNICALL Java_io_pdal_Pipeline_getLog - (JNIEnv *env, jobject obj) -{ - Pipeline *p = getHandle(env, obj); - return env->NewStringUTF(p->getLog().c_str()); -} - -JNIEXPORT jobject JNICALL Java_io_pdal_Pipeline_getPointViews - (JNIEnv *env, jobject obj) -{ - Pipeline *p = getHandle(env, obj); - PointViewSet pvset = p->getPointViews(); - - jclass pviClass = env->FindClass("io/pdal/PointViewIterator"); - jmethodID pviCtor = env->GetMethodID(pviClass, "", "()V"); - jobject pvi = env->NewObject(pviClass, pviCtor); - - PointViewIterator *it = new PointViewIterator(pvset); - - setHandle(env, pvi, it); - - return pvi; -} diff --git a/java/native/src/io_pdal_PointLayout.cpp b/java/native/src/io_pdal_PointLayout.cpp deleted file mode 100644 index 8ee09f5494..0000000000 --- a/java/native/src/io_pdal_PointLayout.cpp +++ /dev/null @@ -1,133 +0,0 @@ -/****************************************************************************** -* Copyright (c) 2016, hobu Inc. (info@hobu.co) -* -* All rights reserved. -* -* Redistribution and use in source and binary forms, with or without -* modification, are permitted provided that the following -* conditions are met: -* -* * Redistributions of source code must retain the above copyright -* notice, this list of conditions and the following disclaimer. -* * Redistributions in binary form must reproduce the above copyright -* notice, this list of conditions and the following disclaimer in -* the documentation and/or other materials provided -* with the distribution. -* * Neither the name of Hobu, Inc. nor the names of its -* contributors may be used to endorse or promote products derived -* from this software without specific prior written permission. -* -* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS -* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED -* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT -* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY -* OF SUCH DAMAGE. -****************************************************************************/ - -#include -#include -#include "io_pdal_PointLayout.h" -#include "JavaPipeline.hpp" -#include "Accessors.hpp" - -using pdal::PointLayout; -using pdal::DimTypeList; -using pdal::DimType; - -JNIEXPORT jobjectArray JNICALL Java_io_pdal_PointLayout_dimTypes - (JNIEnv *env, jobject obj) -{ - PointLayout *pl = getHandle(env, obj); - DimTypeList dimTypes = pl->dimTypes(); - - jclass dtClass = env->FindClass("io/pdal/DimType"); - jmethodID dtCtor = env->GetMethodID(dtClass, "", "(Ljava/lang/String;Ljava/lang/String;DD)V"); - - jobjectArray result = env->NewObjectArray(dimTypes.size(), dtClass, NULL); - - for (long i = 0; i < static_cast(dimTypes.size()); i++) - { - auto dt = dimTypes.at(i); - jstring id = env->NewStringUTF(pdal::Dimension::name(dt.m_id).c_str()); - jstring type = env->NewStringUTF(pdal::Dimension::interpretationName(dt.m_type).c_str()); - jobject element = env->NewObject(dtClass, dtCtor, id, type, dt.m_xform.m_scale.m_val, dt.m_xform.m_offset.m_val); - - env->SetObjectArrayElement(result, i, element); - - env->DeleteLocalRef(element); - env->DeleteLocalRef(type); - env->DeleteLocalRef(id); - } - - return result; -} - -JNIEXPORT jobject JNICALL Java_io_pdal_PointLayout_findDimType - (JNIEnv *env, jobject obj, jstring jstr) -{ - std::string fid = std::string(env->GetStringUTFChars(jstr, 0)); - PointLayout *pl = getHandle(env, obj); - DimType dt = pl->findDimType(fid); - jstring id = env->NewStringUTF(pdal::Dimension::name(dt.m_id).c_str()); - jstring type = env->NewStringUTF(pdal::Dimension::interpretationName(dt.m_type).c_str()); - - jclass dtClass = env->FindClass("io/pdal/DimType"); - jmethodID dtCtor = env->GetMethodID(dtClass, "", "(Ljava/lang/String;Ljava/lang/String;DD)V"); - jobject result = env->NewObject(dtClass, dtCtor, id, type, dt.m_xform.m_scale.m_val, dt.m_xform.m_offset.m_val); - - return result; -} - -JNIEXPORT jlong JNICALL Java_io_pdal_PointLayout_dimSize - (JNIEnv *env, jobject obj, jstring jstr) -{ - std::string fid = std::string(env->GetStringUTFChars(jstr, 0)); - PointLayout *pl = getHandle(env, obj); - - return pl->dimSize(pl->findDim(fid)); -} - -JNIEXPORT jlong JNICALL Java_io_pdal_PointLayout_dimPackedOffset - (JNIEnv *env, jobject obj, jstring jstr) -{ - std::string fid = std::string(env->GetStringUTFChars(jstr, 0)); - PointLayout *pl = getHandle(env, obj); - DimType dimType = pl->findDimType(fid); - DimTypeList dims = pl->dimTypes(); - - auto it = std::find_if(dims.begin(), dims.end(), [&dimType](const DimType& dt) { - return pdal::Dimension::name(dt.m_id) == pdal::Dimension::name(dimType.m_id); - }); - auto index = std::distance(dims.begin(), it); - long offset = 0; - - for(int i = 0; i < index; i++) - { - offset += pl->dimSize(dims.at(i).m_id); - } - - return offset; -} - -JNIEXPORT jlong JNICALL Java_io_pdal_PointLayout_pointSize - (JNIEnv *env, jobject obj) -{ - PointLayout *pl = getHandle(env, obj); - return pl->pointSize(); -} - -JNIEXPORT void JNICALL Java_io_pdal_PointLayout_dispose - (JNIEnv *env, jobject obj) -{ - // A bit unclear why we can't remove this pointer, probably wrapping here makes sense as well - // PointLayout *pl = getHandle(env, obj); - setHandle(env, obj, 0); - // delete pl; -} diff --git a/java/native/src/io_pdal_PointView.cpp b/java/native/src/io_pdal_PointView.cpp deleted file mode 100644 index ff87288c7b..0000000000 --- a/java/native/src/io_pdal_PointView.cpp +++ /dev/null @@ -1,213 +0,0 @@ -/****************************************************************************** -* Copyright (c) 2016, hobu Inc. (info@hobu.co) -* -* All rights reserved. -* -* Redistribution and use in source and binary forms, with or without -* modification, are permitted provided that the following -* conditions are met: -* -* * Redistributions of source code must retain the above copyright -* notice, this list of conditions and the following disclaimer. -* * Redistributions in binary form must reproduce the above copyright -* notice, this list of conditions and the following disclaimer in -* the documentation and/or other materials provided -* with the distribution. -* * Neither the name of Hobu, Inc. nor the names of its -* contributors may be used to endorse or promote products derived -* from this software without specific prior written permission. -* -* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS -* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED -* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT -* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY -* OF SUCH DAMAGE. -****************************************************************************/ - -#include -#include -#include "io_pdal_PointView.h" -#include "JavaPipeline.hpp" -#include "PointViewRawPtr.hpp" -#include "Accessors.hpp" - -using libpdaljava::Pipeline; -using libpdaljava::PointViewRawPtr; - -using pdal::PointView; -using pdal::PointViewPtr; -using pdal::PointLayoutPtr; -using pdal::Dimension::Type; -using pdal::Dimension::Id; -using pdal::PointId; -using pdal::DimTypeList; -using pdal::SpatialReference; -using pdal::DimType; - -/// Converts JavaArray of DimTypes (In Java interpretation DimType is a pair of strings) -/// into DimTypeList (vector of DimTypes), puts dim size into bufSize -/// \param[in] env JNI environment -/// \param[in] dims JavaArray of DimTypes -/// \param[in] bufSize Dims sum size -/// \param[in] dimTypes Vector of DimTypes -void convertDimTypeJavaArrayToVector(JNIEnv *env, jobjectArray dims, std::size_t *pointSize, DimTypeList *dimTypes) { - for (jint i = 0; i < env->GetArrayLength(dims); i++) { - jobject jDimType = reinterpret_cast(env->GetObjectArrayElement(dims, i)); - jclass cDimType = env->GetObjectClass(jDimType); - jfieldID fid = env->GetFieldID(cDimType, "id", "Ljava/lang/String;"); - jfieldID ftype = env->GetFieldID(cDimType, "type", "Ljava/lang/String;"); - jfieldID fscale = env->GetFieldID(cDimType, "scale", "D"); - jfieldID foffset = env->GetFieldID(cDimType, "offset", "D"); - - jstring jid = reinterpret_cast(env->GetObjectField(jDimType, fid)); - jstring jtype = reinterpret_cast(env->GetObjectField(jDimType, ftype)); - jdouble jscale = env->GetDoubleField(jDimType, fscale); - jdouble joffset = env->GetDoubleField(jDimType, foffset); - - Id id = pdal::Dimension::id(std::string(env->GetStringUTFChars(jid, 0))); - Type type = pdal::Dimension::type(std::string(env->GetStringUTFChars(jtype, 0))); - - *pointSize += pdal::Dimension::size(type); - dimTypes->insert(dimTypes->begin() + i, DimType(id, type, jscale, joffset)); - } -} - -/// Fill a buffer with point data specified by the dimension list, accounts index -/// Using this functions it is possible to pack all points into one buffer -/// \param[in] pv PointView pointer. -/// \param[in] dims List of dimensions/types to retrieve. -/// \param[in] idx Index of point to get. -/// \param[in] buf Pointer to buffer to fill. -void appendPackedPoint(PointViewPtr pv, const DimTypeList& dims, PointId idx, std::size_t pointSize, char *buf) -{ - std::size_t from = idx * pointSize; - if(from >= pv->size() * pointSize) return; - buf += from; - pv->getPackedPoint(dims, idx, buf); -} - -JNIEXPORT jobject JNICALL Java_io_pdal_PointView_layout - (JNIEnv *env, jobject obj) -{ - PointViewRawPtr *pvrp = getHandle(env, obj); - PointViewPtr pv = pvrp->shared_pointer; - PointLayoutPtr pl = pv->layout(); - - jclass pvlClass = env->FindClass("io/pdal/PointLayout"); - jmethodID pvlCtor = env->GetMethodID(pvlClass, "", "()V"); - jobject pvl = env->NewObject(pvlClass, pvlCtor); - - setHandle(env, pvl, pl); - - return pvl; -} - -JNIEXPORT jint JNICALL Java_io_pdal_PointView_size - (JNIEnv *env, jobject obj) -{ - PointViewRawPtr *pvrp = getHandle(env, obj); - PointViewPtr pv = pvrp->shared_pointer; - return pv->size(); -} - -JNIEXPORT jboolean JNICALL Java_io_pdal_PointView_empty - (JNIEnv *env, jobject obj) -{ - PointViewRawPtr *pvrp = getHandle(env, obj); - PointViewPtr pv = pvrp->shared_pointer; - return pv->empty(); -} - -JNIEXPORT jstring JNICALL Java_io_pdal_PointView_getCrsProj4 - (JNIEnv *env, jobject obj) -{ - PointViewRawPtr *pvrp = getHandle(env, obj); - PointViewPtr pv = pvrp->shared_pointer; - return env->NewStringUTF(pv->spatialReference().getProj4().c_str()); -} - -JNIEXPORT jstring JNICALL Java_io_pdal_PointView_getCrsWKT - (JNIEnv *env, jobject obj, jboolean pretty) -{ - PointViewRawPtr *pvrp = getHandle(env, obj); - PointViewPtr pv = pvrp->shared_pointer; - - std::string wkt = pv->spatialReference().getWKT(); - - if(pretty) wkt = SpatialReference::prettyWkt(wkt); - - return env->NewStringUTF(wkt.c_str()); -} - -JNIEXPORT jbyteArray JNICALL Java_io_pdal_PointView_getPackedPoint - (JNIEnv *env, jobject obj, jlong idx, jobjectArray dims) -{ - PointViewRawPtr *pvrp = getHandle(env, obj); - PointViewPtr pv = pvrp->shared_pointer; - - PointLayoutPtr pl = pv->layout(); - - // we need to calculate buffer size - std::size_t pointSize = 0; - DimTypeList dimTypes; - - // calculate result buffer length (for one point) and get dimTypes - convertDimTypeJavaArrayToVector(env, dims, &pointSize, &dimTypes); - - char *buf = new char[pointSize]; - - pv->getPackedPoint(dimTypes, idx, buf); - - jbyteArray array = env->NewByteArray(pointSize); - env->SetByteArrayRegion (array, 0, pointSize, reinterpret_cast(buf)); - - delete[] buf; - - return array; -} - -JNIEXPORT jbyteArray JNICALL Java_io_pdal_PointView_getPackedPoints - (JNIEnv *env, jobject obj, jobjectArray dims) -{ - PointViewRawPtr *pvrp = getHandle(env, obj); - PointViewPtr pv = pvrp->shared_pointer; - - PointLayoutPtr pl = pv->layout(); - - // we need to calculate buffer size - std::size_t pointSize = 0; - DimTypeList dimTypes; - - // calculate result buffer length (for one point) and get dimTypes - convertDimTypeJavaArrayToVector(env, dims, &pointSize, &dimTypes); - - // reading all points - std::size_t bufSize = pointSize * pv->size(); - char *buf = new char[bufSize]; - - for (PointId idx = 0; idx < pv->size(); idx++) { - appendPackedPoint(pv, dimTypes, idx, pointSize, buf); - } - - jbyteArray array = env->NewByteArray(bufSize); - env->SetByteArrayRegion (array, 0, bufSize, reinterpret_cast(buf)); - - delete[] buf; - - return array; -} - -JNIEXPORT void JNICALL Java_io_pdal_PointView_dispose - (JNIEnv *env, jobject obj) -{ - PointViewRawPtr *pvrp = getHandle(env, obj); - setHandle(env, obj, 0); - delete pvrp; -} diff --git a/java/native/src/io_pdal_PointViewIterator.cpp b/java/native/src/io_pdal_PointViewIterator.cpp deleted file mode 100644 index 1a007dd7a3..0000000000 --- a/java/native/src/io_pdal_PointViewIterator.cpp +++ /dev/null @@ -1,75 +0,0 @@ -/****************************************************************************** -* Copyright (c) 2016, hobu Inc. (info@hobu.co) -* -* All rights reserved. -* -* Redistribution and use in source and binary forms, with or without -* modification, are permitted provided that the following -* conditions are met: -* -* * Redistributions of source code must retain the above copyright -* notice, this list of conditions and the following disclaimer. -* * Redistributions in binary form must reproduce the above copyright -* notice, this list of conditions and the following disclaimer in -* the documentation and/or other materials provided -* with the distribution. -* * Neither the name of Hobu, Inc. nor the names of its -* contributors may be used to endorse or promote products derived -* from this software without specific prior written permission. -* -* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS -* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED -* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT -* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY -* OF SUCH DAMAGE. -****************************************************************************/ - -#include -#include "io_pdal_PointViewIterator.h" -#include "JavaPipeline.hpp" -#include "JavaIterator.hpp" -#include "PointViewRawPtr.hpp" -#include "Accessors.hpp" - -using libpdaljava::PointViewIterator; -using libpdaljava::PointViewRawPtr; - -JNIEXPORT jboolean JNICALL Java_io_pdal_PointViewIterator_hasNext - (JNIEnv *env, jobject obj) -{ - PointViewIterator *it = getHandle(env, obj); - return it->hasNext(); -} - -JNIEXPORT jobject JNICALL Java_io_pdal_PointViewIterator_next - (JNIEnv *env, jobject obj) -{ - PointViewIterator *it = getHandle(env, obj); - - PointViewPtr pvptr = it->next(); - - jclass jpvClass = env->FindClass("io/pdal/PointView"); - jmethodID jpvCtor = env->GetMethodID(jpvClass, "", "()V"); - jobject jpv = env->NewObject(jpvClass, jpvCtor); - - PointViewRawPtr *pvrp = new PointViewRawPtr(pvptr); - - setHandle(env, jpv, pvrp); - - return jpv; -} - -JNIEXPORT void JNICALL Java_io_pdal_PointViewIterator_dispose - (JNIEnv *env, jobject obj) -{ - PointViewIterator *it = getHandle(env, obj); - setHandle(env, obj, 0); - delete it; -} diff --git a/java/project/Commands.scala b/java/project/Commands.scala deleted file mode 100644 index 732712e9ec..0000000000 --- a/java/project/Commands.scala +++ /dev/null @@ -1,45 +0,0 @@ -/****************************************************************************** - * Copyright (c) 2016, hobu Inc. (info@hobu.co) - * - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following - * conditions are met: - * - * * Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in - * the documentation and/or other materials provided - * with the distribution. - * * Neither the name of Hobu, Inc. nor the names of its - * contributors may be used to endorse or promote products derived - * from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, - * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS - * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED - * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT - * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY - * OF SUCH DAMAGE. - ****************************************************************************/ - -import sbt._ -import sbt.Keys._ - -object Commands { - def processJavastyleCommand(commandProcess: String) = { - Command.command(s"${commandProcess}Javastyle")((state: State) => { - val extracted = Project extract state - import extracted._ - commandProcess :: append(Seq(crossPaths := false), state) - }) - } -} diff --git a/java/project/Dependencies.scala b/java/project/Dependencies.scala deleted file mode 100644 index 5065febdef..0000000000 --- a/java/project/Dependencies.scala +++ /dev/null @@ -1,10 +0,0 @@ -import sbt._ - -object Dependencies { - val circeCore = "io.circe" %% "circe-core" % Version.circe - val circeGeneric = "io.circe" %% "circe-generic" % Version.circe - val circeGenericExtras = "io.circe" %% "circe-generic-extras" % Version.circe - val circeParser = "io.circe" %% "circe-parser" % Version.circe - val jtsCore = "com.vividsolutions" % "jts-core" % Version.jtsCore - val scalaTest = "org.scalatest" %% "scalatest" % Version.scalaTest -} diff --git a/java/project/Environment.scala b/java/project/Environment.scala deleted file mode 100644 index c627c934dd..0000000000 --- a/java/project/Environment.scala +++ /dev/null @@ -1,47 +0,0 @@ -/****************************************************************************** - * Copyright (c) 2016, hobu Inc. (info@hobu.co) - * - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following - * conditions are met: - * - * * Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in - * the documentation and/or other materials provided - * with the distribution. - * * Neither the name of Hobu, Inc. nor the names of its - * contributors may be used to endorse or promote products derived - * from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, - * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS - * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED - * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT - * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY - * OF SUCH DAMAGE. - ****************************************************************************/ - -import sbt.ClasspathDependency - -import scala.util.Properties - -object Environment { - def either(environmentVariable: String, default: String): String = - Properties.envOrElse(environmentVariable, default) - - def dependOnNative(native: ClasspathDependency) = - if(pdalDependOnNative == "true") Seq(native) else Seq.empty - - lazy val versionSuffix = either("PDAL_VERSION_SUFFIX", "-SNAPSHOT") - lazy val pdalDependOnNative = either("PDAL_DEPEND_ON_NATIVE", "true") -} \ No newline at end of file diff --git a/java/project/Version.scala b/java/project/Version.scala deleted file mode 100644 index 37a4c541d1..0000000000 --- a/java/project/Version.scala +++ /dev/null @@ -1,5 +0,0 @@ -object Version { - val circe = "0.8.0" - val scalaTest = "3.0.4" - val jtsCore = "1.14.0" -} \ No newline at end of file diff --git a/java/project/build.properties b/java/project/build.properties deleted file mode 100644 index b7dd3cb2ae..0000000000 --- a/java/project/build.properties +++ /dev/null @@ -1 +0,0 @@ -sbt.version=1.0.2 diff --git a/java/project/plugins.sbt b/java/project/plugins.sbt deleted file mode 100644 index 0498b1f1bb..0000000000 --- a/java/project/plugins.sbt +++ /dev/null @@ -1,5 +0,0 @@ -addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.5") -addSbtPlugin("ch.jodersky" % "sbt-jni" % "1.3.0") -addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.0") -addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.3.1") -addSbtPlugin("de.heikoseeberger" % "sbt-header" % "3.0.1") diff --git a/java/sbt b/java/sbt deleted file mode 100755 index cc8453721e..0000000000 --- a/java/sbt +++ /dev/null @@ -1,578 +0,0 @@ -#!/usr/bin/env bash -# -# A more capable sbt runner, coincidentally also called sbt. -# Author: Paul Phillips - -set -o pipefail - -declare -r sbt_release_version="0.13.16" -declare -r sbt_unreleased_version="0.13.16" - -declare -r latest_213="2.13.0-M2" -declare -r latest_212="2.12.3" -declare -r latest_211="2.11.11" -declare -r latest_210="2.10.6" -declare -r latest_29="2.9.3" -declare -r latest_28="2.8.2" - -declare -r buildProps="project/build.properties" - -declare -r sbt_launch_ivy_release_repo="http://repo.typesafe.com/typesafe/ivy-releases" -declare -r sbt_launch_ivy_snapshot_repo="https://repo.scala-sbt.org/scalasbt/ivy-snapshots" -declare -r sbt_launch_mvn_release_repo="http://repo.scala-sbt.org/scalasbt/maven-releases" -declare -r sbt_launch_mvn_snapshot_repo="http://repo.scala-sbt.org/scalasbt/maven-snapshots" - -declare -r default_jvm_opts_common="-Xms512m -Xmx1536m -Xss2m" -declare -r noshare_opts="-Dsbt.global.base=project/.sbtboot -Dsbt.boot.directory=project/.boot -Dsbt.ivy.home=project/.ivy" - -declare sbt_jar sbt_dir sbt_create sbt_version sbt_script sbt_new -declare sbt_explicit_version -declare verbose noshare batch trace_level -declare debugUs - -declare java_cmd="java" -declare sbt_launch_dir="$HOME/.sbt/launchers" -declare sbt_launch_repo - -# pull -J and -D options to give to java. -declare -a java_args scalac_args sbt_commands residual_args - -# args to jvm/sbt via files or environment variables -declare -a extra_jvm_opts extra_sbt_opts - -echoerr () { echo >&2 "$@"; } -vlog () { [[ -n "$verbose" ]] && echoerr "$@"; } -die () { echo "Aborting: $@" ; exit 1; } - -setTrapExit () { - # save stty and trap exit, to ensure echo is re-enabled if we are interrupted. - export SBT_STTY="$(stty -g 2>/dev/null)" - - # restore stty settings (echo in particular) - onSbtRunnerExit() { - [ -t 0 ] || return - vlog "" - vlog "restoring stty: $SBT_STTY" - stty "$SBT_STTY" - } - - vlog "saving stty: $SBT_STTY" - trap onSbtRunnerExit EXIT -} - -# this seems to cover the bases on OSX, and someone will -# have to tell me about the others. -get_script_path () { - local path="$1" - [[ -L "$path" ]] || { echo "$path" ; return; } - - local target="$(readlink "$path")" - if [[ "${target:0:1}" == "/" ]]; then - echo "$target" - else - echo "${path%/*}/$target" - fi -} - -declare -r script_path="$(get_script_path "$BASH_SOURCE")" -declare -r script_name="${script_path##*/}" - -init_default_option_file () { - local overriding_var="${!1}" - local default_file="$2" - if [[ ! -r "$default_file" && "$overriding_var" =~ ^@(.*)$ ]]; then - local envvar_file="${BASH_REMATCH[1]}" - if [[ -r "$envvar_file" ]]; then - default_file="$envvar_file" - fi - fi - echo "$default_file" -} - -declare sbt_opts_file="$(init_default_option_file SBT_OPTS .sbtopts)" -declare jvm_opts_file="$(init_default_option_file JVM_OPTS .jvmopts)" - -build_props_sbt () { - [[ -r "$buildProps" ]] && \ - grep '^sbt\.version' "$buildProps" | tr '=\r' ' ' | awk '{ print $2; }' -} - -update_build_props_sbt () { - local ver="$1" - local old="$(build_props_sbt)" - - [[ -r "$buildProps" ]] && [[ "$ver" != "$old" ]] && { - perl -pi -e "s/^sbt\.version\b.*\$/sbt.version=${ver}/" "$buildProps" - grep -q '^sbt.version[ =]' "$buildProps" || printf "\nsbt.version=%s\n" "$ver" >> "$buildProps" - - vlog "!!!" - vlog "!!! Updated file $buildProps setting sbt.version to: $ver" - vlog "!!! Previous value was: $old" - vlog "!!!" - } -} - -set_sbt_version () { - sbt_version="${sbt_explicit_version:-$(build_props_sbt)}" - [[ -n "$sbt_version" ]] || sbt_version=$sbt_release_version - export sbt_version -} - -url_base () { - local version="$1" - - case "$version" in - 0.7.*) echo "http://simple-build-tool.googlecode.com" ;; - 0.10.* ) echo "$sbt_launch_ivy_release_repo" ;; - 0.11.[12]) echo "$sbt_launch_ivy_release_repo" ;; - 0.*-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9]) # ie "*-yyyymmdd-hhMMss" - echo "$sbt_launch_ivy_snapshot_repo" ;; - 0.*) echo "$sbt_launch_ivy_release_repo" ;; - *-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9]) # ie "*-yyyymmdd-hhMMss" - echo "$sbt_launch_mvn_snapshot_repo" ;; - *) echo "$sbt_launch_mvn_release_repo" ;; - esac -} - -make_url () { - local version="$1" - - local base="${sbt_launch_repo:-$(url_base "$version")}" - - case "$version" in - 0.7.*) echo "$base/files/sbt-launch-0.7.7.jar" ;; - 0.10.* ) echo "$base/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;; - 0.11.[12]) echo "$base/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;; - 0.*) echo "$base/org.scala-sbt/sbt-launch/$version/sbt-launch.jar" ;; - *) echo "$base/org/scala-sbt/sbt-launch/$version/sbt-launch.jar" ;; - esac -} - -addJava () { vlog "[addJava] arg = '$1'" ; java_args+=("$1"); } -addSbt () { vlog "[addSbt] arg = '$1'" ; sbt_commands+=("$1"); } -addScalac () { vlog "[addScalac] arg = '$1'" ; scalac_args+=("$1"); } -addResidual () { vlog "[residual] arg = '$1'" ; residual_args+=("$1"); } - -addResolver () { addSbt "set resolvers += $1"; } -addDebugger () { addJava "-Xdebug" ; addJava "-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=$1"; } -setThisBuild () { - vlog "[addBuild] args = '$@'" - local key="$1" && shift - addSbt "set $key in ThisBuild := $@" -} -setScalaVersion () { - [[ "$1" == *"-SNAPSHOT" ]] && addResolver 'Resolver.sonatypeRepo("snapshots")' - addSbt "++ $1" -} -setJavaHome () { - java_cmd="$1/bin/java" - setThisBuild javaHome "_root_.scala.Some(file(\"$1\"))" - export JAVA_HOME="$1" - export JDK_HOME="$1" - export PATH="$JAVA_HOME/bin:$PATH" -} - -getJavaVersion() { "$1" -version 2>&1 | grep -E -e '(java|openjdk) version' | awk '{ print $3 }' | tr -d \"; } - -checkJava() { - # Warn if there is a Java version mismatch between PATH and JAVA_HOME/JDK_HOME - - [[ -n "$JAVA_HOME" && -e "$JAVA_HOME/bin/java" ]] && java="$JAVA_HOME/bin/java" - [[ -n "$JDK_HOME" && -e "$JDK_HOME/lib/tools.jar" ]] && java="$JDK_HOME/bin/java" - - if [[ -n "$java" ]]; then - pathJavaVersion=$(getJavaVersion java) - homeJavaVersion=$(getJavaVersion "$java") - if [[ "$pathJavaVersion" != "$homeJavaVersion" ]]; then - echoerr "Warning: Java version mismatch between PATH and JAVA_HOME/JDK_HOME, sbt will use the one in PATH" - echoerr " Either: fix your PATH, remove JAVA_HOME/JDK_HOME or use -java-home" - echoerr " java version from PATH: $pathJavaVersion" - echoerr " java version from JAVA_HOME/JDK_HOME: $homeJavaVersion" - fi - fi -} - -java_version () { - local version=$(getJavaVersion "$java_cmd") - vlog "Detected Java version: $version" - echo "${version:2:1}" -} - -# MaxPermSize critical on pre-8 JVMs but incurs noisy warning on 8+ -default_jvm_opts () { - local v="$(java_version)" - if [[ $v -ge 8 ]]; then - echo "$default_jvm_opts_common" - else - echo "-XX:MaxPermSize=384m $default_jvm_opts_common" - fi -} - -build_props_scala () { - if [[ -r "$buildProps" ]]; then - versionLine="$(grep '^build.scala.versions' "$buildProps")" - versionString="${versionLine##build.scala.versions=}" - echo "${versionString%% .*}" - fi -} - -execRunner () { - # print the arguments one to a line, quoting any containing spaces - vlog "# Executing command line:" && { - for arg; do - if [[ -n "$arg" ]]; then - if printf "%s\n" "$arg" | grep -q ' '; then - printf >&2 "\"%s\"\n" "$arg" - else - printf >&2 "%s\n" "$arg" - fi - fi - done - vlog "" - } - - setTrapExit - - if [[ -n "$batch" ]]; then - "$@" < /dev/null - else - "$@" - fi -} - -jar_url () { make_url "$1"; } - -is_cygwin () [[ "$(uname -a)" == "CYGWIN"* ]] - -jar_file () { - is_cygwin \ - && echo "$(cygpath -w $sbt_launch_dir/"$1"/sbt-launch.jar)" \ - || echo "$sbt_launch_dir/$1/sbt-launch.jar" -} - -download_url () { - local url="$1" - local jar="$2" - - echoerr "Downloading sbt launcher for $sbt_version:" - echoerr " From $url" - echoerr " To $jar" - - mkdir -p "${jar%/*}" && { - if which curl >/dev/null; then - curl --fail --silent --location "$url" --output "$jar" - elif which wget >/dev/null; then - wget -q -O "$jar" "$url" - fi - } && [[ -r "$jar" ]] -} - -acquire_sbt_jar () { - { - sbt_jar="$(jar_file "$sbt_version")" - [[ -r "$sbt_jar" ]] - } || { - sbt_jar="$HOME/.ivy2/local/org.scala-sbt/sbt-launch/$sbt_version/jars/sbt-launch.jar" - [[ -r "$sbt_jar" ]] - } || { - sbt_jar="$(jar_file "$sbt_version")" - download_url "$(make_url "$sbt_version")" "$sbt_jar" - } -} - -usage () { - set_sbt_version - cat < display stack traces with a max of frames (default: -1, traces suppressed) - -debug-inc enable debugging log for the incremental compiler - -no-colors disable ANSI color codes - -sbt-create start sbt even if current directory contains no sbt project - -sbt-dir path to global settings/plugins directory (default: ~/.sbt/) - -sbt-boot path to shared boot directory (default: ~/.sbt/boot in 0.11+) - -ivy path to local Ivy repository (default: ~/.ivy2) - -no-share use all local caches; no sharing - -offline put sbt in offline mode - -jvm-debug Turn on JVM debugging, open at the given port. - -batch Disable interactive mode - -prompt Set the sbt prompt; in expr, 's' is the State and 'e' is Extracted - -script Run the specified file as a scala script - - # sbt version (default: sbt.version from $buildProps if present, otherwise $sbt_release_version) - -sbt-force-latest force the use of the latest release of sbt: $sbt_release_version - -sbt-version use the specified version of sbt (default: $sbt_release_version) - -sbt-dev use the latest pre-release version of sbt: $sbt_unreleased_version - -sbt-jar use the specified jar as the sbt launcher - -sbt-launch-dir directory to hold sbt launchers (default: $sbt_launch_dir) - -sbt-launch-repo repo url for downloading sbt launcher jar (default: $(url_base "$sbt_version")) - - # scala version (default: as chosen by sbt) - -28 use $latest_28 - -29 use $latest_29 - -210 use $latest_210 - -211 use $latest_211 - -212 use $latest_212 - -213 use $latest_213 - -scala-home use the scala build at the specified directory - -scala-version use the specified version of scala - -binary-version use the specified scala version when searching for dependencies - - # java version (default: java from PATH, currently $(java -version 2>&1 | grep version)) - -java-home alternate JAVA_HOME - - # passing options to the jvm - note it does NOT use JAVA_OPTS due to pollution - # The default set is used if JVM_OPTS is unset and no -jvm-opts file is found - $(default_jvm_opts) - JVM_OPTS environment variable holding either the jvm args directly, or - the reference to a file containing jvm args if given path is prepended by '@' (e.g. '@/etc/jvmopts') - Note: "@"-file is overridden by local '.jvmopts' or '-jvm-opts' argument. - -jvm-opts file containing jvm args (if not given, .jvmopts in project root is used if present) - -Dkey=val pass -Dkey=val directly to the jvm - -J-X pass option -X directly to the jvm (-J is stripped) - - # passing options to sbt, OR to this runner - SBT_OPTS environment variable holding either the sbt args directly, or - the reference to a file containing sbt args if given path is prepended by '@' (e.g. '@/etc/sbtopts') - Note: "@"-file is overridden by local '.sbtopts' or '-sbt-opts' argument. - -sbt-opts file containing sbt args (if not given, .sbtopts in project root is used if present) - -S-X add -X to sbt's scalacOptions (-S is stripped) -EOM -} - -process_args () { - require_arg () { - local type="$1" - local opt="$2" - local arg="$3" - - if [[ -z "$arg" ]] || [[ "${arg:0:1}" == "-" ]]; then - die "$opt requires <$type> argument" - fi - } - while [[ $# -gt 0 ]]; do - case "$1" in - -h|-help) usage; exit 0 ;; - -v) verbose=true && shift ;; - -d) addSbt "--debug" && shift ;; - -w) addSbt "--warn" && shift ;; - -q) addSbt "--error" && shift ;; - -x) debugUs=true && shift ;; - -trace) require_arg integer "$1" "$2" && trace_level="$2" && shift 2 ;; - -ivy) require_arg path "$1" "$2" && addJava "-Dsbt.ivy.home=$2" && shift 2 ;; - -no-colors) addJava "-Dsbt.log.noformat=true" && shift ;; - -no-share) noshare=true && shift ;; - -sbt-boot) require_arg path "$1" "$2" && addJava "-Dsbt.boot.directory=$2" && shift 2 ;; - -sbt-dir) require_arg path "$1" "$2" && sbt_dir="$2" && shift 2 ;; - -debug-inc) addJava "-Dxsbt.inc.debug=true" && shift ;; - -offline) addSbt "set offline in Global := true" && shift ;; - -jvm-debug) require_arg port "$1" "$2" && addDebugger "$2" && shift 2 ;; - -batch) batch=true && shift ;; - -prompt) require_arg "expr" "$1" "$2" && setThisBuild shellPrompt "(s => { val e = Project.extract(s) ; $2 })" && shift 2 ;; - -script) require_arg file "$1" "$2" && sbt_script="$2" && addJava "-Dsbt.main.class=sbt.ScriptMain" && shift 2 ;; - - -sbt-create) sbt_create=true && shift ;; - -sbt-jar) require_arg path "$1" "$2" && sbt_jar="$2" && shift 2 ;; - -sbt-version) require_arg version "$1" "$2" && sbt_explicit_version="$2" && shift 2 ;; - -sbt-force-latest) sbt_explicit_version="$sbt_release_version" && shift ;; - -sbt-dev) sbt_explicit_version="$sbt_unreleased_version" && shift ;; - -sbt-launch-dir) require_arg path "$1" "$2" && sbt_launch_dir="$2" && shift 2 ;; - -sbt-launch-repo) require_arg path "$1" "$2" && sbt_launch_repo="$2" && shift 2 ;; - -scala-version) require_arg version "$1" "$2" && setScalaVersion "$2" && shift 2 ;; - -binary-version) require_arg version "$1" "$2" && setThisBuild scalaBinaryVersion "\"$2\"" && shift 2 ;; - -scala-home) require_arg path "$1" "$2" && setThisBuild scalaHome "_root_.scala.Some(file(\"$2\"))" && shift 2 ;; - -java-home) require_arg path "$1" "$2" && setJavaHome "$2" && shift 2 ;; - -sbt-opts) require_arg path "$1" "$2" && sbt_opts_file="$2" && shift 2 ;; - -jvm-opts) require_arg path "$1" "$2" && jvm_opts_file="$2" && shift 2 ;; - - -D*) addJava "$1" && shift ;; - -J*) addJava "${1:2}" && shift ;; - -S*) addScalac "${1:2}" && shift ;; - -28) setScalaVersion "$latest_28" && shift ;; - -29) setScalaVersion "$latest_29" && shift ;; - -210) setScalaVersion "$latest_210" && shift ;; - -211) setScalaVersion "$latest_211" && shift ;; - -212) setScalaVersion "$latest_212" && shift ;; - -213) setScalaVersion "$latest_213" && shift ;; - new) sbt_new=true && : ${sbt_explicit_version:=$sbt_release_version} && addResidual "$1" && shift ;; - *) addResidual "$1" && shift ;; - esac - done -} - -# process the direct command line arguments -process_args "$@" - -# skip #-styled comments and blank lines -readConfigFile() { - local end=false - until $end; do - read || end=true - [[ $REPLY =~ ^# ]] || [[ -z $REPLY ]] || echo "$REPLY" - done < "$1" -} - -# if there are file/environment sbt_opts, process again so we -# can supply args to this runner -if [[ -r "$sbt_opts_file" ]]; then - vlog "Using sbt options defined in file $sbt_opts_file" - while read opt; do extra_sbt_opts+=("$opt"); done < <(readConfigFile "$sbt_opts_file") -elif [[ -n "$SBT_OPTS" && ! ("$SBT_OPTS" =~ ^@.*) ]]; then - vlog "Using sbt options defined in variable \$SBT_OPTS" - extra_sbt_opts=( $SBT_OPTS ) -else - vlog "No extra sbt options have been defined" -fi - -[[ -n "${extra_sbt_opts[*]}" ]] && process_args "${extra_sbt_opts[@]}" - -# reset "$@" to the residual args -set -- "${residual_args[@]}" -argumentCount=$# - -# set sbt version -set_sbt_version - -checkJava - -# only exists in 0.12+ -setTraceLevel() { - case "$sbt_version" in - "0.7."* | "0.10."* | "0.11."* ) echoerr "Cannot set trace level in sbt version $sbt_version" ;; - *) setThisBuild traceLevel $trace_level ;; - esac -} - -# set scalacOptions if we were given any -S opts -[[ ${#scalac_args[@]} -eq 0 ]] || addSbt "set scalacOptions in ThisBuild += \"${scalac_args[@]}\"" - -# Update build.properties on disk to set explicit version - sbt gives us no choice -[[ -n "$sbt_explicit_version" && -z "$sbt_new" ]] && update_build_props_sbt "$sbt_explicit_version" -vlog "Detected sbt version $sbt_version" - -if [[ -n "$sbt_script" ]]; then - residual_args=( $sbt_script ${residual_args[@]} ) -else - # no args - alert them there's stuff in here - (( argumentCount > 0 )) || { - vlog "Starting $script_name: invoke with -help for other options" - residual_args=( shell ) - } -fi - -# verify this is an sbt dir, -create was given or user attempts to run a scala script -[[ -r ./build.sbt || -d ./project || -n "$sbt_create" || -n "$sbt_script" || -n "$sbt_new" ]] || { - cat <