Skip to content

Commit

Permalink
GEOMESA-3356 Support for rootless docker in docker-based unit tests
Browse files Browse the repository at this point in the history
  • Loading branch information
elahrvivaz committed Apr 19, 2024
1 parent c24be75 commit c3c70b1
Show file tree
Hide file tree
Showing 5 changed files with 10 additions and 13 deletions.
Empty file.
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ class FileSystemRDDProviderTest extends Specification with LazyLogging {
var sc: SQLContext = _

lazy val path = s"${HadoopSharedCluster.Container.getHdfsUrl}/${getClass.getSimpleName}/"
lazy val params = Map("fs.path" -> path)
lazy val params = Map("fs.path" -> path, "fs.config.xml" -> HadoopSharedCluster.ContainerConfig)
lazy val ds: DataStore = DataStoreFinder.getDataStore(params.asJava)

val formats = Seq("orc", "parquet")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,9 +41,6 @@ class FsManageMetadataCommandTest extends Specification {
ScalaSimpleFeature.create(sft, "2", "name1", "2022-01-01T00:00:00.000Z", "POINT (0 0)")
)

val gzipXml =
"<configuration><property><name>parquet.compression</name><value>GZIP</value></property></configuration>"

val counter = new AtomicInteger(0)

def nextPath(): String =
Expand All @@ -52,7 +49,7 @@ class FsManageMetadataCommandTest extends Specification {
"ManageMetadata command" should {
"find file inconsistencies" in {
val dir = nextPath()
val dsParams = Map("fs.path" -> dir, "fs.config.xml" -> gzipXml)
val dsParams = Map("fs.path" -> dir, "fs.config.xml" -> HadoopSharedCluster.ContainerConfig)
WithClose(DataStoreFinder.getDataStore(dsParams.asJava).asInstanceOf[FileSystemDataStore]) { ds =>
ds.createSchema(SimpleFeatureTypes.copy(sft))
WithClose(ds.getFeatureWriterAppend(sft.getTypeName, Transaction.AUTO_COMMIT)) { writer =>
Expand All @@ -73,7 +70,7 @@ class FsManageMetadataCommandTest extends Specification {
val command = new FsManageMetadataCommand.CheckConsistencyCommand()
command.params.path = dir
command.params.featureName = sft.getTypeName
command.params.configuration = Collections.singletonList(s"fs.config.xml=$gzipXml")
command.params.configuration = Collections.singletonList(s"fs.config.xml=${HadoopSharedCluster.ContainerConfig}")
command.params.repair = true
command.execute()

Expand All @@ -89,7 +86,7 @@ class FsManageMetadataCommandTest extends Specification {
}
"rebuild metadata from scratch" in {
val dir = nextPath()
val dsParams = Map("fs.path" -> dir, "fs.config.xml" -> gzipXml)
val dsParams = Map("fs.path" -> dir, "fs.config.xml" -> HadoopSharedCluster.ContainerConfig)
WithClose(DataStoreFinder.getDataStore(dsParams.asJava).asInstanceOf[FileSystemDataStore]) { ds =>
ds.createSchema(SimpleFeatureTypes.copy(sft))
WithClose(ds.getFeatureWriterAppend(sft.getTypeName, Transaction.AUTO_COMMIT)) { writer =>
Expand All @@ -114,7 +111,7 @@ class FsManageMetadataCommandTest extends Specification {
val command = new FsManageMetadataCommand.CheckConsistencyCommand()
command.params.path = dir
command.params.featureName = sft.getTypeName
command.params.configuration = Collections.singletonList(s"fs.config.xml=$gzipXml")
command.params.configuration = Collections.singletonList(s"fs.config.xml=${HadoopSharedCluster.ContainerConfig}")
command.params.rebuild = true
command.execute()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ class RedisDataStoreTest extends Specification with LazyLogging {
"expire features based on ingest time" in {
val sft = featureType("ingest", Some("2 seconds"))

RedisSystemProperties.AgeOffInterval.threadLocalValue.set("500 ms")
RedisSystemProperties.AgeOffInterval.threadLocalValue.set("100 ms")
val ds = try { DataStoreFinder.getDataStore(params.asJava).asInstanceOf[RedisDataStore] } finally {
RedisSystemProperties.AgeOffInterval.threadLocalValue.remove()
}
Expand Down Expand Up @@ -141,7 +141,7 @@ class RedisDataStoreTest extends Specification with LazyLogging {

foreach(filters) { filter =>
val query = new Query(sft.getTypeName, filter)
eventually(40, 100.millis) {
eventually(10, 1000.millis) {
val result = SelfClosingIterator(ds.getFeatureReader(query, Transaction.AUTO_COMMIT)).toList
result must beEmpty
}
Expand All @@ -161,7 +161,7 @@ class RedisDataStoreTest extends Specification with LazyLogging {

val sft = featureType("time", Some(s"dtg($time ms)"))

RedisSystemProperties.AgeOffInterval.threadLocalValue.set("500 ms")
RedisSystemProperties.AgeOffInterval.threadLocalValue.set("100 ms")
val ds = try { DataStoreFinder.getDataStore(params.asJava).asInstanceOf[RedisDataStore] } finally {
RedisSystemProperties.AgeOffInterval.threadLocalValue.remove()
}
Expand Down Expand Up @@ -191,7 +191,7 @@ class RedisDataStoreTest extends Specification with LazyLogging {
foreach(filters) { filter =>
val expected = features.drop(1).filter(filter.evaluate)
val query = new Query(sft.getTypeName, filter)
eventually(40, 100.millis) {
eventually(10, 1000.millis) {
val result = SelfClosingIterator(ds.getFeatureReader(query, Transaction.AUTO_COMMIT)).toList
result must containTheSameElementsAs(expected)
}
Expand Down
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@
<junit.version>4.13.2</junit.version>
<junit.jupiter.version>5.9.3</junit.jupiter.version>
<testcontainers.version>1.19.7</testcontainers.version>
<testcontainers.accumulo.version>1.3.0</testcontainers.accumulo.version>
<testcontainers.accumulo.version>1.4.0</testcontainers.accumulo.version>
<!-- needs to track hadoop: https://github.com/apache/hadoop/blob/release-3.3.6-RC1/hadoop-project/pom.xml#L1279C18-L1279C24 -->
<hadoop.minicluster.mockito.version>2.28.2</hadoop.minicluster.mockito.version>

Expand Down

0 comments on commit c3c70b1

Please sign in to comment.