diff --git a/docs/configuration/extensions.md b/docs/configuration/extensions.md index 575ad25d6551..29356ebc05cc 100644 --- a/docs/configuration/extensions.md +++ b/docs/configuration/extensions.md @@ -44,11 +44,11 @@ Core extensions are maintained by Druid committers. |druid-google-extensions|Google Cloud Storage deep storage.|[link](../development/extensions-core/google.md)| |druid-hdfs-storage|HDFS deep storage.|[link](../development/extensions-core/hdfs.md)| |druid-histogram|Approximate histograms and quantiles aggregator. Deprecated, please use the [DataSketches quantiles aggregator](../development/extensions-core/datasketches-quantiles.md) from the `druid-datasketches` extension instead.|[link](../development/extensions-core/approximate-histograms.md)| -|druid-kafka-extraction-namespace|Apache Kafka-based namespaced lookup. Requires namespace lookup extension.|[link](../development/extensions-core/kafka-extraction-namespace.md)| +|druid-kafka-extraction-namespace|Apache Kafka-based namespaced lookup. Requires namespace lookup extension.|[link](../querying/kafka-extraction-namespace.md)| |druid-kafka-indexing-service|Supervised exactly-once Apache Kafka ingestion for the indexing service.|[link](../ingestion/kafka-ingestion.md)| |druid-kinesis-indexing-service|Supervised exactly-once Kinesis ingestion for the indexing service.|[link](../ingestion/kinesis-ingestion.md)| |druid-kerberos|Kerberos authentication for druid processes.|[link](../development/extensions-core/druid-kerberos.md)| -|druid-lookups-cached-global|A module for [lookups](../querying/lookups.md) providing a jvm-global eager caching for lookups. It provides JDBC and URI implementations for fetching lookup data.|[link](../development/extensions-core/lookups-cached-global.md)| +|druid-lookups-cached-global|A module for [lookups](../querying/lookups.md) providing a jvm-global eager caching for lookups. It provides JDBC and URI implementations for fetching lookup data.|[link](../querying/lookups-cached-global.md)| |druid-lookups-cached-single| Per lookup caching module to support the use cases where a lookup need to be isolated from the global pool of lookups |[link](../development/extensions-core/druid-lookups.md)| |druid-multi-stage-query| Support for the multi-stage query architecture for Apache Druid and the multi-stage query task engine.|[link](../multi-stage-query/index.md)| |druid-orc-extensions|Support for data in Apache ORC data format.|[link](../development/extensions-core/orc.md)| diff --git a/docs/configuration/index.md b/docs/configuration/index.md index f3d189931b8b..f8583b958411 100644 --- a/docs/configuration/index.md +++ b/docs/configuration/index.md @@ -627,7 +627,7 @@ the [HTTP input source](../ingestion/input-sources.md#http-input-source). You can use the following properties to specify permissible JDBC options for: - [SQL input source](../ingestion/input-sources.md#sql-input-source) -- [globally cached JDBC lookups](../development/extensions-core/lookups-cached-global.md#jdbc-lookup) +- [globally cached JDBC lookups](../querying/lookups-cached-global.md#jdbc-lookup) - [JDBC Data Fetcher for per-lookup caching](../development/extensions-core/druid-lookups.md#data-fetcher-layer). These properties do not apply to metadata storage connections. diff --git a/docs/development/extensions-core/kafka-extraction-namespace.md b/docs/querying/kafka-extraction-namespace.md similarity index 92% rename from docs/development/extensions-core/kafka-extraction-namespace.md rename to docs/querying/kafka-extraction-namespace.md index 2d841dfc9436..1cfa91aac554 100644 --- a/docs/development/extensions-core/kafka-extraction-namespace.md +++ b/docs/querying/kafka-extraction-namespace.md @@ -22,7 +22,7 @@ title: "Apache Kafka Lookups" ~ under the License. --> -To use this Apache Druid extension, [include](../../configuration/extensions.md#loading-extensions) `druid-lookups-cached-global` and `druid-kafka-extraction-namespace` in the extensions load list. +To use this Apache Druid extension, [include](../configuration/extensions.md#loading-extensions) `druid-lookups-cached-global` and `druid-kafka-extraction-namespace` in the extensions load list. If you need updates to populate as promptly as possible, it is possible to plug into a Kafka topic whose key is the old value and message is the desired new value (both in UTF-8) as a LookupExtractorFactory. @@ -41,13 +41,13 @@ If you need updates to populate as promptly as possible, it is possible to plug | `kafkaTopic` | The Kafka topic to read the data from | Yes || | `kafkaProperties` | Kafka consumer properties (`bootstrap.servers` must be specified) | Yes || | `connectTimeout` | How long to wait for an initial connection | No | `0` (do not wait) | -| `isOneToOne` | The map is a one-to-one (see [Lookup DimensionSpecs](../../querying/dimensionspecs.md)) | No | `false` | +| `isOneToOne` | The map is a one-to-one (see [Lookup DimensionSpecs](./dimensionspecs.md)) | No | `false` | The extension `kafka-extraction-namespace` enables reading from an [Apache Kafka](https://kafka.apache.org/) topic which has name/key pairs to allow renaming of dimension values. An example use case would be to rename an ID to a human-readable format. ## How it Works -The extractor works by consuming the configured Kafka topic from the beginning, and appending every record to an internal map. The key of the Kafka record is used as they key of the map, and the payload of the record is used as the value. At query time, a lookup can be used to transform the key into the associated value. See [lookups](../../querying/lookups.md) for how to configure and use lookups in a query. Keys and values are both stored as strings by the lookup extractor. +The extractor works by consuming the configured Kafka topic from the beginning, and appending every record to an internal map. The key of the Kafka record is used as they key of the map, and the payload of the record is used as the value. At query time, a lookup can be used to transform the key into the associated value. See [lookups](./lookups.md) for how to configure and use lookups in a query. Keys and values are both stored as strings by the lookup extractor. The extractor remains subscribed to the topic, so new records are added to the lookup map as they appear. This allows for lookup values to be updated in near-realtime. If two records are added to the topic with the same key, the record with the larger offset will replace the previous record in the lookup map. A record with a `null` payload will be treated as a tombstone record, and the associated key will be removed from the lookup map. diff --git a/docs/development/extensions-core/lookups-cached-global.md b/docs/querying/lookups-cached-global.md similarity index 93% rename from docs/development/extensions-core/lookups-cached-global.md rename to docs/querying/lookups-cached-global.md index 5cfcbea01c24..72c4189c2dad 100644 --- a/docs/development/extensions-core/lookups-cached-global.md +++ b/docs/querying/lookups-cached-global.md @@ -22,12 +22,12 @@ title: "Globally Cached Lookups" ~ under the License. --> -To use this Apache Druid extension, [include](../../configuration/extensions.md#loading-extensions) `druid-lookups-cached-global` in the extensions load list. +To use this Apache Druid extension, [include](../configuration/extensions.md#loading-extensions) `druid-lookups-cached-global` in the extensions load list. ## Configuration :::info Static configuration is no longer supported. Lookups can be configured through - [dynamic configuration](../../querying/lookups.md#configuration). + [dynamic configuration](./lookups.md#configuration). ::: Globally cached lookups are appropriate for lookups which are not possible to pass at query time due to their size, @@ -36,7 +36,7 @@ and are small enough to reasonably populate in-memory. This usually means tens t Globally cached lookups all draw from the same cache pool, allowing each process to have a fixed cache pool that can be used by cached lookups. -Globally cached lookups can be specified as part of the [cluster wide config for lookups](../../querying/lookups.md) as a type of `cachedNamespace` +Globally cached lookups can be specified as part of the [cluster wide config for lookups](./lookups.md) as a type of `cachedNamespace` ```json { @@ -84,7 +84,7 @@ The parameters are as follows |--------|-----------|--------|-------| |`extractionNamespace`|Specifies how to populate the local cache. See below|Yes|-| |`firstCacheTimeout`|How long to wait (in ms) for the first run of the cache to populate. 0 indicates to not wait|No|`0` (do not wait)| -|`injective`|If the underlying map is [injective](../../querying/lookups.md#query-rewrites) (keys and values are unique) then optimizations can occur internally by setting this to `true`|No|`false`| +|`injective`|If the underlying map is [injective](./lookups.md#query-rewrites) (keys and values are unique) then optimizations can occur internally by setting this to `true`|No|`false`| If `firstCacheTimeout` is set to a non-zero value, it should be less than `druid.manager.lookups.hostUpdateTimeout`. If `firstCacheTimeout` is NOT set, then management is essentially asynchronous and does not know if a lookup succeeded or failed in starting. In such a case logs from the processes using lookups should be monitored for repeated failures. @@ -93,7 +93,7 @@ Proper functionality of globally cached lookups requires the following extension ## Example configuration -In a simple case where only one [tier](../../querying/lookups.md#dynamic-configuration) exists (`realtime_customer2`) with one `cachedNamespace` lookup called `country_code`, the resulting configuration JSON looks similar to the following: +In a simple case where only one [tier](./lookups.md#dynamic-configuration) exists (`realtime_customer2`) with one `cachedNamespace` lookup called `country_code`, the resulting configuration JSON looks similar to the following: ```json { @@ -170,7 +170,7 @@ It's highly recommended that `druid.lookup.namespace.numBufferedEntries` is set ## Supported lookups -For additional lookups, please see our [extensions list](../../configuration/extensions.md). +For additional lookups, please see our [extensions list](../configuration/extensions.md). ### URI lookup @@ -345,7 +345,7 @@ The JDBC lookups will poll a database to populate its local cache. If the `tsCol |Parameter|Description|Required|Default| |---------|-----------|--------|-------| -|`connectorConfig`|The connector config to use. You can set `connectURI`, `user` and `password`. You can selectively allow JDBC properties in `connectURI`. See [JDBC connections security config](../../configuration/index.md#jdbc-connections-to-external-databases) for more details.|Yes|| +|`connectorConfig`|The connector config to use. You can set `connectURI`, `user` and `password`. You can selectively allow JDBC properties in `connectURI`. See [JDBC connections security config](../configuration/index.md#jdbc-connections-to-external-databases) for more details.|Yes|| |`table`|The table which contains the key value pairs|Yes|| |`keyColumn`|The column in `table` which contains the keys|Yes|| |`valueColumn`|The column in `table` which contains the values|Yes|| @@ -377,7 +377,7 @@ The JDBC lookups will poll a database to populate its local cache. If the `tsCol :::info If using JDBC, you will need to add your database's client JAR files to the extension's directory. For Postgres, the connector JAR is already included. - See the MySQL extension documentation for instructions to obtain [MySQL](./mysql.md#installing-the-mysql-connector-library) or [MariaDB](./mysql.md#alternative-installing-the-mariadb-connector-library) connector libraries. + See the MySQL extension documentation for instructions to obtain [MySQL](../development/extensions-core/mysql.md#installing-the-mysql-connector-library) or [MariaDB](../development/extensions-core/mysql.md#alternative-installing-the-mariadb-connector-library) connector libraries. The connector JAR should reside in the classpath of Druid's main class loader. To add the connector JAR to the classpath, you can copy the downloaded file to `lib/` under the distribution root directory. Alternatively, create a symbolic link to the connector in the `lib` directory. ::: diff --git a/docs/querying/lookups.md b/docs/querying/lookups.md index bbc1b03faca8..a22fbf03928c 100644 --- a/docs/querying/lookups.md +++ b/docs/querying/lookups.md @@ -24,7 +24,7 @@ title: "Lookups" Lookups are a concept in Apache Druid where dimension values are (optionally) replaced with new values, allowing join-like functionality. Applying lookups in Druid is similar to joining a dimension table in a data warehouse. See -[dimension specs](../querying/dimensionspecs.md) for more information. For the purpose of these documents, a "key" +[dimension specs](./dimensionspecs.md) for more information. For the purpose of these documents, a "key" refers to a dimension value to match, and a "value" refers to its replacement. So if you wanted to map `appid-12345` to `Super Mega Awesome App` then the key would be `appid-12345` and the value would be `Super Mega Awesome App`. @@ -43,12 +43,12 @@ and such data belongs in the raw denormalized data for use in Druid. Lookups are generally preloaded in-memory on all servers. But very small lookups (on the order of a few dozen to a few hundred entries) can also be passed inline in native queries time using the "map" lookup type. Refer to the -[dimension specs](dimensionspecs.md) documentation for details. +[dimension specs](./dimensionspecs.md) documentation for details. Other lookup types are available as extensions, including: -- Globally cached lookups from local files, remote URIs, or JDBC through [lookups-cached-global](../development/extensions-core/lookups-cached-global.md). -- Globally cached lookups from a Kafka topic through [kafka-extraction-namespace](../development/extensions-core/kafka-extraction-namespace.md). +- Globally cached lookups from local files, remote URIs, or JDBC through [lookups-cached-global](./lookups-cached-global.md). +- Globally cached lookups from a Kafka topic through [kafka-extraction-namespace](./kafka-extraction-namespace.md). Query Syntax ------------ @@ -213,7 +213,7 @@ Injective lookups are eligible for the largest set of query rewrites. Injective function may encounter null input values. To determine whether a lookup is injective, Druid relies on an `injective` property that you can set in the -[lookup definition](../development/extensions-core/lookups-cached-global.md). In general, you should set +[lookup definition](./lookups-cached-global.md). In general, you should set `injective: true` for any lookup that satisfies the required properties, to allow Druid to run your queries as fast as possible. diff --git a/extensions-core/azure-extensions/pom.xml b/extensions-core/azure-extensions/pom.xml index 494b246094d1..2955d88c406c 100644 --- a/extensions-core/azure-extensions/pom.xml +++ b/extensions-core/azure-extensions/pom.xml @@ -143,8 +143,23 @@ - junit - junit + org.junit.jupiter + junit-jupiter + test + + + org.junit.jupiter + junit-jupiter-api + test + + + org.junit.jupiter + junit-jupiter-engine + test + + + org.junit.jupiter + junit-jupiter-params test diff --git a/extensions-core/azure-extensions/src/main/java/org/apache/druid/storage/azure/ICloudSpecificObjectToCloudObjectLocationConverter.java b/extensions-core/azure-extensions/src/main/java/org/apache/druid/storage/azure/ICloudSpecificObjectToCloudObjectLocationConverter.java deleted file mode 100644 index cc5ce4f2fb75..000000000000 --- a/extensions-core/azure-extensions/src/main/java/org/apache/druid/storage/azure/ICloudSpecificObjectToCloudObjectLocationConverter.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.azure; - -import org.apache.druid.data.input.impl.CloudObjectLocation; - -/** - * Interface for converting between from some object, T, and a {@link CloudObjectLocation} object - * @param The object to convert to a {@link CloudObjectLocation} object - */ -public interface ICloudSpecificObjectToCloudObjectLocationConverter -{ - CloudObjectLocation createCloudObjectLocation(T cloudSpecificImpl); -} diff --git a/extensions-core/azure-extensions/src/test/java/org/apache/druid/data/input/azure/AzureEntityTest.java b/extensions-core/azure-extensions/src/test/java/org/apache/druid/data/input/azure/AzureEntityTest.java index 8453b71b4b70..540f7e8f5bac 100644 --- a/extensions-core/azure-extensions/src/test/java/org/apache/druid/data/input/azure/AzureEntityTest.java +++ b/extensions-core/azure-extensions/src/test/java/org/apache/druid/data/input/azure/AzureEntityTest.java @@ -28,14 +28,16 @@ import org.apache.druid.storage.azure.AzureUtils; import org.easymock.EasyMock; import org.easymock.EasyMockSupport; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.io.IOException; import java.io.InputStream; import java.net.URI; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertSame; + public class AzureEntityTest extends EasyMockSupport { private static final String STORAGE_ACCOUNT_NAME = "storageAccount"; @@ -49,8 +51,6 @@ public class AzureEntityTest extends EasyMockSupport private CloudObjectLocation location; private AzureByteSourceFactory byteSourceFactory; private AzureByteSource byteSource; - - private AzureEntity azureEntity; private AzureStorage azureStorage; static { @@ -62,7 +62,7 @@ public class AzureEntityTest extends EasyMockSupport } } - @Before + @BeforeEach public void setup() { location = createMock(CloudObjectLocation.class); @@ -80,13 +80,12 @@ public void test_getUri_returnsLocationUri() EasyMock.expect(location.toUri(AzureInputSource.SCHEME)).andReturn(ENTITY_URI); replayAll(); - azureEntity = new AzureEntity(location, azureStorage, AzureInputSource.SCHEME, byteSourceFactory); + final AzureEntity azureEntity = new AzureEntity(location, azureStorage, AzureInputSource.SCHEME, byteSourceFactory); URI actualUri = azureEntity.getUri(); - Assert.assertEquals(ENTITY_URI, actualUri); + assertEquals(ENTITY_URI, actualUri); verifyAll(); - } @Test @@ -95,20 +94,19 @@ public void test_getUri_returnsLocationUri_azureStorageScheme() EasyMock.expect(byteSourceFactory.create(CONTAINER_NAME, BLOB_NAME, azureStorage)).andReturn(byteSource); replayAll(); - azureEntity = new AzureEntity( + final AzureEntity azureEntity = new AzureEntity( new CloudObjectLocation(STORAGE_ACCOUNT_NAME, CONTAINER_NAME + "/" + BLOB_NAME), azureStorage, AzureStorageAccountInputSource.SCHEME, byteSourceFactory ); - Assert.assertEquals( + assertEquals( URI.create(AzureStorageAccountInputSource.SCHEME + "://" + STORAGE_ACCOUNT_NAME + "/" + CONTAINER_NAME + "/" + BLOB_NAME), azureEntity.getUri() ); verifyAll(); - } @Test @@ -120,10 +118,10 @@ public void test_readFromStart_returnsExpectedStream() throws Exception EasyMock.expect(byteSourceFactory.create(CONTAINER_NAME, BLOB_NAME, azureStorage)).andReturn(byteSource); replayAll(); - azureEntity = new AzureEntity(location, azureStorage, AzureInputSource.SCHEME, byteSourceFactory); + final AzureEntity azureEntity = new AzureEntity(location, azureStorage, AzureInputSource.SCHEME, byteSourceFactory); InputStream actualInputStream = azureEntity.readFrom(0); - Assert.assertSame(INPUT_STREAM, actualInputStream); + assertSame(INPUT_STREAM, actualInputStream); } @Test @@ -135,10 +133,10 @@ public void test_readFrom_returnsExpectedStream() throws Exception EasyMock.expect(byteSourceFactory.create(CONTAINER_NAME, BLOB_NAME, azureStorage)).andReturn(byteSource); replayAll(); - azureEntity = new AzureEntity(location, azureStorage, AzureInputSource.SCHEME, byteSourceFactory); + final AzureEntity azureEntity = new AzureEntity(location, azureStorage, AzureInputSource.SCHEME, byteSourceFactory); InputStream actualInputStream = azureEntity.readFrom(OFFSET); - Assert.assertSame(INPUT_STREAM, actualInputStream); + assertSame(INPUT_STREAM, actualInputStream); } @Test @@ -151,7 +149,7 @@ public void test_readFrom_throwsIOException_propogatesError() EasyMock.expect(byteSourceFactory.create(CONTAINER_NAME, BLOB_NAME, azureStorage)).andReturn(byteSource); replayAll(); - azureEntity = new AzureEntity(location, azureStorage, AzureInputSource.SCHEME, byteSourceFactory); + final AzureEntity azureEntity = new AzureEntity(location, azureStorage, AzureInputSource.SCHEME, byteSourceFactory); azureEntity.readFrom(OFFSET); } catch (IOException e) { @@ -167,10 +165,10 @@ public void test_getPath_returnsLocationPath() EasyMock.expect(byteSourceFactory.create(CONTAINER_NAME, BLOB_NAME, azureStorage)).andReturn(byteSource); replayAll(); - azureEntity = new AzureEntity(location, azureStorage, AzureInputSource.SCHEME, byteSourceFactory); + final AzureEntity azureEntity = new AzureEntity(location, azureStorage, AzureInputSource.SCHEME, byteSourceFactory); String actualPath = azureEntity.getPath(); - Assert.assertEquals(BLOB_NAME, actualPath); + assertEquals(BLOB_NAME, actualPath); verifyAll(); } @@ -180,20 +178,18 @@ public void test_getPath_azureStorageScheme() EasyMock.expect(byteSourceFactory.create(CONTAINER_NAME, BLOB_NAME, azureStorage)).andReturn(byteSource); replayAll(); - azureEntity = new AzureEntity( + final AzureEntity azureEntity = new AzureEntity( new CloudObjectLocation(STORAGE_ACCOUNT_NAME, CONTAINER_NAME + "/" + BLOB_NAME), azureStorage, AzureStorageAccountInputSource.SCHEME, byteSourceFactory ); - Assert.assertEquals( - CONTAINER_NAME + "/" + BLOB_NAME, - azureEntity.getPath() - ); + assertEquals(CONTAINER_NAME + "/" + BLOB_NAME, azureEntity.getPath()); verifyAll(); } + @Test public void test_getRetryCondition_returnsExpectedRetryCondition() { @@ -202,8 +198,8 @@ public void test_getRetryCondition_returnsExpectedRetryCondition() EasyMock.expect(byteSourceFactory.create(CONTAINER_NAME, BLOB_NAME, azureStorage)).andReturn(byteSource); replayAll(); - azureEntity = new AzureEntity(location, azureStorage, AzureInputSource.SCHEME, byteSourceFactory); + final AzureEntity azureEntity = new AzureEntity(location, azureStorage, AzureInputSource.SCHEME, byteSourceFactory); Predicate actualRetryCondition = azureEntity.getRetryCondition(); - Assert.assertSame(AzureUtils.AZURE_RETRY, actualRetryCondition); + assertSame(AzureUtils.AZURE_RETRY, actualRetryCondition); } } diff --git a/extensions-core/azure-extensions/src/test/java/org/apache/druid/data/input/azure/AzureInputSourceSerdeTest.java b/extensions-core/azure-extensions/src/test/java/org/apache/druid/data/input/azure/AzureInputSourceSerdeTest.java index e11ce8ba5480..1fc572c3fd7d 100644 --- a/extensions-core/azure-extensions/src/test/java/org/apache/druid/data/input/azure/AzureInputSourceSerdeTest.java +++ b/extensions-core/azure-extensions/src/test/java/org/apache/druid/data/input/azure/AzureInputSourceSerdeTest.java @@ -32,39 +32,44 @@ import org.apache.druid.storage.azure.AzureStorage; import org.apache.druid.storage.azure.AzureStorageDruidModule; import org.easymock.EasyMockSupport; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.net.URI; import java.util.Collections; import java.util.List; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; + public class AzureInputSourceSerdeTest extends EasyMockSupport { - private static final String JSON_WITH_URIS = "{\n" - + " \"type\": \"azure\",\n" - + " \"uris\": [\"azure://datacontainer2/wikipedia.json\"]\n" - + "}"; - - private static final String JSON_WITH_PREFIXES = "{\n" - + " \"type\": \"azure\",\n" - + " \"prefixes\": [\"azure://datacontainer2\"]\n" - + "}"; - - private static final String JSON_WITH_OBJECTS = "{\n" - + " \"type\": \"azure\",\n" - + " \"objects\": [\n" - + " { \"bucket\": \"container1\", \"path\": \"bar/file1.json\"},\n" - + " { \"bucket\": \"conatiner2\", \"path\": \"foo/file2.json\"}\n" - + " ]\n" - + " }"; + private static final String JSON_WITH_URIS = + "{\n" + + " \"type\": \"azure\",\n" + + " \"uris\": [\"azure://datacontainer2/wikipedia.json\"]\n" + + "}"; + + private static final String JSON_WITH_PREFIXES = + "{\n" + + " \"type\": \"azure\",\n" + + " \"prefixes\": [\"azure://datacontainer2\"]\n" + + "}"; + + private static final String JSON_WITH_OBJECTS = + "{\n" + + " \"type\": \"azure\",\n" + + " \"objects\": [\n" + + " { \"bucket\": \"container1\", \"path\": \"bar/file1.json\"},\n" + + " { \"bucket\": \"conatiner2\", \"path\": \"foo/file2.json\"}\n" + + " ]\n" + + "}"; private static final String JSON_WITH_URIS_AND_SYSFIELDS = "{\n" - + " \"type\": \"azure\",\n" - + " \"uris\": [\"azure://datacontainer2/wikipedia.json\"],\n" - + " \"systemFields\": [\"__file_uri\"]\n" + + " \"type\": \"azure\",\n" + + " \"uris\": [\"azure://datacontainer2/wikipedia.json\"],\n" + + " \"systemFields\": [\"__file_uri\"]\n" + "}"; private static final List EXPECTED_URIS; @@ -77,7 +82,6 @@ public class AzureInputSourceSerdeTest extends EasyMockSupport private AzureInputDataConfig inputDataConfig; private AzureAccountConfig accountConfig; - static { try { EXPECTED_URIS = ImmutableList.of(new URI("azure://datacontainer2/wikipedia.json")); @@ -92,7 +96,7 @@ public class AzureInputSourceSerdeTest extends EasyMockSupport } } - @Before + @BeforeEach public void setup() { azureStorage = createMock(AzureStorage.class); @@ -117,7 +121,6 @@ public void test_uriSerde_constructsProperAzureInputSource() throws Exception objectMapper.writeValueAsBytes(inputSource), AzureInputSource.class); verifyInputSourceWithUris(roundTripInputSource); - } @Test @@ -129,12 +132,12 @@ public void test_uriAndSystemFieldsSerde_constructsProperAzureInputSource() thro objectMapper.setInjectableValues(injectableValues); final AzureInputSource inputSource = objectMapper.readValue(JSON_WITH_URIS_AND_SYSFIELDS, AzureInputSource.class); - Assert.assertEquals(Collections.singleton(SystemField.URI), inputSource.getConfiguredSystemFields()); + assertEquals(Collections.singleton(SystemField.URI), inputSource.getConfiguredSystemFields()); final AzureInputSource roundTripInputSource = objectMapper.readValue( objectMapper.writeValueAsBytes(inputSource), AzureInputSource.class); - Assert.assertEquals(Collections.singleton(SystemField.URI), roundTripInputSource.getConfiguredSystemFields()); + assertEquals(Collections.singleton(SystemField.URI), roundTripInputSource.getConfiguredSystemFields()); } @Test @@ -153,7 +156,6 @@ public void test_prefixSerde_constructsProperAzureInputSource() throws Exception objectMapper.writeValueAsBytes(inputSource), AzureInputSource.class); verifyInputSourceWithPrefixes(roundTripInputSource); - } @Test @@ -186,24 +188,22 @@ private InjectableValues.Std initInjectableValues() private static void verifyInputSourceWithUris(final AzureInputSource inputSource) { - - Assert.assertEquals(EXPECTED_URIS, inputSource.getUris()); - Assert.assertNull(inputSource.getPrefixes()); - Assert.assertNull(inputSource.getObjects()); + assertEquals(EXPECTED_URIS, inputSource.getUris()); + assertNull(inputSource.getPrefixes()); + assertNull(inputSource.getObjects()); } private static void verifyInputSourceWithPrefixes(final AzureInputSource inputSource) { - - Assert.assertNull(inputSource.getUris()); - Assert.assertEquals(EXPECTED_PREFIXES, inputSource.getPrefixes()); - Assert.assertNull(inputSource.getObjects()); + assertNull(inputSource.getUris()); + assertEquals(EXPECTED_PREFIXES, inputSource.getPrefixes()); + assertNull(inputSource.getObjects()); } private static void verifyInputSourceWithObjects(final AzureInputSource inputSource) { - Assert.assertNull(inputSource.getUris()); - Assert.assertNull(inputSource.getPrefixes()); - Assert.assertEquals(EXPECTED_CLOUD_OBJECTS, inputSource.getObjects()); + assertNull(inputSource.getUris()); + assertNull(inputSource.getPrefixes()); + assertEquals(EXPECTED_CLOUD_OBJECTS, inputSource.getObjects()); } } diff --git a/extensions-core/azure-extensions/src/test/java/org/apache/druid/data/input/azure/AzureInputSourceTest.java b/extensions-core/azure-extensions/src/test/java/org/apache/druid/data/input/azure/AzureInputSourceTest.java index a7cb7c708c6b..531fdf144109 100644 --- a/extensions-core/azure-extensions/src/test/java/org/apache/druid/data/input/azure/AzureInputSourceTest.java +++ b/extensions-core/azure-extensions/src/test/java/org/apache/druid/data/input/azure/AzureInputSourceTest.java @@ -42,10 +42,9 @@ import org.apache.druid.storage.azure.blob.CloudBlobHolder; import org.easymock.EasyMock; import org.easymock.EasyMockSupport; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.net.URI; import java.nio.file.FileSystems; @@ -57,6 +56,11 @@ import java.util.stream.Collectors; import java.util.stream.Stream; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + public class AzureInputSourceTest extends EasyMockSupport { private static final String CONTAINER_NAME = "container"; @@ -84,12 +88,10 @@ public class AzureInputSourceTest extends EasyMockSupport private AzureInputDataConfig inputDataConfig; private InputSplit> inputSplit; - private AzureEntity azureEntity1; - private CloudBlobHolder cloudBlobDruid1; + private AzureEntity azureEntity; + private CloudBlobHolder cloudBlobDruid; private AzureCloudBlobIterable azureCloudBlobIterable; - private AzureInputSource azureInputSource; - static { try { PREFIX_URI = new URI(AzureInputSource.SCHEME + "://" + CONTAINER_NAME + "/" + BLOB_NAME); @@ -99,45 +101,51 @@ public class AzureInputSourceTest extends EasyMockSupport } } - @Before + @BeforeEach public void setup() { storage = createMock(AzureStorage.class); entityFactory = createMock(AzureEntityFactory.class); inputSplit = createMock(InputSplit.class); - azureEntity1 = createMock(AzureEntity.class); + azureEntity = createMock(AzureEntity.class); azureCloudBlobIterableFactory = createMock(AzureCloudBlobIterableFactory.class); inputDataConfig = createMock(AzureInputDataConfig.class); - cloudBlobDruid1 = createMock(CloudBlobHolder.class); + cloudBlobDruid = createMock(CloudBlobHolder.class); azureCloudBlobIterable = createMock(AzureCloudBlobIterable.class); } - @Test(expected = IllegalArgumentException.class) + @Test public void test_constructor_emptyUrisEmptyPrefixesEmptyObjects_throwsIllegalArgumentException() { replayAll(); - azureInputSource = new AzureInputSource( - storage, - entityFactory, - azureCloudBlobIterableFactory, - inputDataConfig, - EMPTY_URIS, - EMPTY_PREFIXES, - EMPTY_OBJECTS, - null, - null + + //noinspection ResultOfObjectAllocationIgnored + assertThrows( + IllegalArgumentException.class, + () -> new AzureInputSource( + storage, + entityFactory, + azureCloudBlobIterableFactory, + inputDataConfig, + EMPTY_URIS, + EMPTY_PREFIXES, + EMPTY_OBJECTS, + null, + null + ) ); } @Test public void test_createEntity_returnsExpectedEntity() { - EasyMock.expect(entityFactory.create(CLOUD_OBJECT_LOCATION_1, storage, AzureInputSource.SCHEME)).andReturn(azureEntity1); + EasyMock.expect(entityFactory.create(CLOUD_OBJECT_LOCATION_1, storage, AzureInputSource.SCHEME)) + .andReturn(azureEntity); EasyMock.expect(inputSplit.get()).andReturn(ImmutableList.of(CLOUD_OBJECT_LOCATION_1)).times(2); replayAll(); List objects = ImmutableList.of(CLOUD_OBJECT_LOCATION_1); - azureInputSource = new AzureInputSource( + final AzureInputSource azureInputSource = new AzureInputSource( storage, entityFactory, azureCloudBlobIterableFactory, @@ -149,9 +157,9 @@ public void test_createEntity_returnsExpectedEntity() null ); - Assert.assertEquals(1, inputSplit.get().size()); + assertEquals(1, inputSplit.get().size()); AzureEntity actualAzureEntity = azureInputSource.createEntity(inputSplit.get().get(0)); - Assert.assertSame(azureEntity1, actualAzureEntity); + assertSame(azureEntity, actualAzureEntity); verifyAll(); } @@ -160,18 +168,18 @@ public void test_createSplits_successfullyCreatesCloudLocation_returnsExpectedLo { List prefixes = ImmutableList.of(PREFIX_URI); List> expectedCloudLocations = ImmutableList.of(ImmutableList.of(CLOUD_OBJECT_LOCATION_1)); - List expectedCloudBlobs = ImmutableList.of(cloudBlobDruid1); + List expectedCloudBlobs = ImmutableList.of(cloudBlobDruid); Iterator expectedCloudBlobsIterator = expectedCloudBlobs.iterator(); EasyMock.expect(inputDataConfig.getMaxListingLength()).andReturn(MAX_LISTING_LENGTH); - EasyMock.expect(azureCloudBlobIterableFactory.create(prefixes, MAX_LISTING_LENGTH, storage)).andReturn( - azureCloudBlobIterable); + EasyMock.expect(azureCloudBlobIterableFactory.create(prefixes, MAX_LISTING_LENGTH, storage)) + .andReturn(azureCloudBlobIterable); EasyMock.expect(azureCloudBlobIterable.iterator()).andReturn(expectedCloudBlobsIterator); - EasyMock.expect(cloudBlobDruid1.getContainerName()).andReturn(CONTAINER).anyTimes(); - EasyMock.expect(cloudBlobDruid1.getName()).andReturn(BLOB_PATH).anyTimes(); - EasyMock.expect(cloudBlobDruid1.getBlobLength()).andReturn(100L).anyTimes(); + EasyMock.expect(cloudBlobDruid.getContainerName()).andReturn(CONTAINER).anyTimes(); + EasyMock.expect(cloudBlobDruid.getName()).andReturn(BLOB_PATH).anyTimes(); + EasyMock.expect(cloudBlobDruid.getBlobLength()).andReturn(100L).anyTimes(); replayAll(); - azureInputSource = new AzureInputSource( + final AzureInputSource azureInputSource = new AzureInputSource( storage, entityFactory, azureCloudBlobIterableFactory, @@ -191,7 +199,7 @@ public void test_createSplits_successfullyCreatesCloudLocation_returnsExpectedLo List> actualCloudLocationList = cloudObjectStream.map(InputSplit::get) .collect(Collectors.toList()); verifyAll(); - Assert.assertEquals(expectedCloudLocations, actualCloudLocationList); + assertEquals(expectedCloudLocations, actualCloudLocationList); } @Test @@ -199,7 +207,7 @@ public void test_getPrefixesSplitStream_withObjectGlob_successfullyCreatesCloudL { List prefixes = ImmutableList.of(PREFIX_URI); List> expectedCloudLocations = ImmutableList.of(ImmutableList.of(CLOUD_OBJECT_LOCATION_1)); - List expectedCloudBlobs = ImmutableList.of(cloudBlobDruid1); + List expectedCloudBlobs = ImmutableList.of(cloudBlobDruid); Iterator expectedCloudBlobsIterator = expectedCloudBlobs.iterator(); String objectGlob = "**.csv"; @@ -211,16 +219,16 @@ public void test_getPrefixesSplitStream_withObjectGlob_successfullyCreatesCloudL ); EasyMock.expect(inputDataConfig.getMaxListingLength()).andReturn(MAX_LISTING_LENGTH); - EasyMock.expect(azureCloudBlobIterableFactory.create(prefixes, MAX_LISTING_LENGTH, storage)).andReturn( - azureCloudBlobIterable); + EasyMock.expect(azureCloudBlobIterableFactory.create(prefixes, MAX_LISTING_LENGTH, storage)) + .andReturn(azureCloudBlobIterable); EasyMock.expect(azureCloudBlobIterable.iterator()).andReturn(expectedCloudBlobsIterator); - EasyMock.expect(cloudBlobDruid1.getBlobLength()).andReturn(100L).anyTimes(); - EasyMock.expect(cloudBlobDruid1.getContainerName()).andReturn(CONTAINER).anyTimes(); - EasyMock.expect(cloudBlobDruid1.getName()).andReturn(BLOB_PATH).anyTimes(); + EasyMock.expect(cloudBlobDruid.getBlobLength()).andReturn(100L).anyTimes(); + EasyMock.expect(cloudBlobDruid.getContainerName()).andReturn(CONTAINER).anyTimes(); + EasyMock.expect(cloudBlobDruid.getName()).andReturn(BLOB_PATH).anyTimes(); replayAll(); - azureInputSource = new AzureInputSource( + final AzureInputSource azureInputSource = new AzureInputSource( storage, entityFactory, azureCloudBlobIterableFactory, @@ -240,7 +248,7 @@ public void test_getPrefixesSplitStream_withObjectGlob_successfullyCreatesCloudL List> actualCloudLocationList = cloudObjectStream.map(InputSplit::get) .collect(Collectors.toList()); verifyAll(); - Assert.assertEquals(expectedCloudLocations, actualCloudLocationList); + assertEquals(expectedCloudLocations, actualCloudLocationList); } @Test @@ -250,7 +258,7 @@ public void test_withSplit_constructsExpectedInputSource() EasyMock.expect(inputSplit.get()).andReturn(ImmutableList.of(CLOUD_OBJECT_LOCATION_1)); replayAll(); - azureInputSource = new AzureInputSource( + final AzureInputSource azureInputSource = new AzureInputSource( storage, entityFactory, azureCloudBlobIterableFactory, @@ -263,7 +271,7 @@ public void test_withSplit_constructsExpectedInputSource() ); SplittableInputSource> newInputSource = azureInputSource.withSplit(inputSplit); - Assert.assertTrue(newInputSource.isSplittable()); + assertTrue(newInputSource.isSplittable()); verifyAll(); } @@ -271,7 +279,7 @@ public void test_withSplit_constructsExpectedInputSource() public void test_toString_returnsExpectedString() { List prefixes = ImmutableList.of(PREFIX_URI); - azureInputSource = new AzureInputSource( + final AzureInputSource azureInputSource = new AzureInputSource( storage, entityFactory, azureCloudBlobIterableFactory, @@ -284,7 +292,7 @@ public void test_toString_returnsExpectedString() ); String actualToString = azureInputSource.toString(); - Assert.assertEquals( + assertEquals( "AzureInputSource{uris=[], prefixes=[azure://container/blob], objects=[], objectGlob=null}", actualToString ); @@ -294,7 +302,7 @@ public void test_toString_returnsExpectedString() public void test_toString_withAllSystemFields_returnsExpectedString() { List prefixes = ImmutableList.of(PREFIX_URI); - azureInputSource = new AzureInputSource( + final AzureInputSource azureInputSource = new AzureInputSource( storage, entityFactory, azureCloudBlobIterableFactory, @@ -307,7 +315,7 @@ public void test_toString_withAllSystemFields_returnsExpectedString() ); String actualToString = azureInputSource.toString(); - Assert.assertEquals( + assertEquals( "AzureInputSource{" + "uris=[], " + "prefixes=[azure://container/blob], " @@ -323,7 +331,7 @@ public void test_toString_withAllSystemFields_returnsExpectedString() public void test_getTypes_returnsExpectedTypes() { List prefixes = ImmutableList.of(PREFIX_URI); - azureInputSource = new AzureInputSource( + final AzureInputSource azureInputSource = new AzureInputSource( storage, entityFactory, azureCloudBlobIterableFactory, @@ -334,13 +342,13 @@ public void test_getTypes_returnsExpectedTypes() null, null ); - Assert.assertEquals(ImmutableSet.of(AzureInputSource.SCHEME), azureInputSource.getTypes()); + assertEquals(ImmutableSet.of(AzureInputSource.SCHEME), azureInputSource.getTypes()); } @Test public void test_systemFields() { - azureInputSource = (AzureInputSource) new AzureInputSource( + final AzureInputSource azureInputSource = new AzureInputSource( storage, entityFactory, azureCloudBlobIterableFactory, @@ -352,7 +360,7 @@ public void test_systemFields() new SystemFields(EnumSet.of(SystemField.URI, SystemField.BUCKET, SystemField.PATH)) ); - Assert.assertEquals( + assertEquals( EnumSet.of(SystemField.URI, SystemField.BUCKET, SystemField.PATH), azureInputSource.getConfiguredSystemFields() ); @@ -364,9 +372,9 @@ public void test_systemFields() (containerName, blobPath, storage) -> null ); - Assert.assertEquals("azure://foo/bar", azureInputSource.getSystemFieldValue(entity, SystemField.URI)); - Assert.assertEquals("foo", azureInputSource.getSystemFieldValue(entity, SystemField.BUCKET)); - Assert.assertEquals("bar", azureInputSource.getSystemFieldValue(entity, SystemField.PATH)); + assertEquals("azure://foo/bar", azureInputSource.getSystemFieldValue(entity, SystemField.URI)); + assertEquals("foo", azureInputSource.getSystemFieldValue(entity, SystemField.BUCKET)); + assertEquals("bar", azureInputSource.getSystemFieldValue(entity, SystemField.PATH)); } @Test @@ -375,7 +383,11 @@ public void abidesEqualsContract() EqualsVerifier.forClass(AzureInputSource.class) .usingGetClass() .withPrefabValues(Logger.class, new Logger(AzureStorage.class), new Logger(AzureStorage.class)) - .withPrefabValues(BlobContainerClient.class, new BlobContainerClientBuilder().buildClient(), new BlobContainerClientBuilder().buildClient()) + .withPrefabValues( + BlobContainerClient.class, + new BlobContainerClientBuilder().buildClient(), + new BlobContainerClientBuilder().buildClient() + ) .withPrefabValues(AzureStorage.class, new AzureStorage(null, null), new AzureStorage(null, null)) .withNonnullFields("storage") .withNonnullFields("entityFactory") @@ -386,7 +398,7 @@ public void abidesEqualsContract() .verify(); } - @After + @AfterEach public void cleanup() { resetAll(); diff --git a/extensions-core/azure-extensions/src/test/java/org/apache/druid/data/input/azure/AzureStorageAccountInputSourceTest.java b/extensions-core/azure-extensions/src/test/java/org/apache/druid/data/input/azure/AzureStorageAccountInputSourceTest.java index 8d17d9ba01e6..d50472ed9194 100644 --- a/extensions-core/azure-extensions/src/test/java/org/apache/druid/data/input/azure/AzureStorageAccountInputSourceTest.java +++ b/extensions-core/azure-extensions/src/test/java/org/apache/druid/data/input/azure/AzureStorageAccountInputSourceTest.java @@ -45,10 +45,9 @@ import org.apache.druid.storage.azure.blob.CloudBlobHolder; import org.easymock.EasyMock; import org.easymock.EasyMockSupport; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.net.URI; import java.nio.file.FileSystems; @@ -57,23 +56,28 @@ import java.util.EnumSet; import java.util.Iterator; import java.util.List; +import java.util.Locale; import java.util.stream.Collectors; import java.util.stream.Stream; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + public class AzureStorageAccountInputSourceTest extends EasyMockSupport { private static final String BLOB_NAME = "blob"; private static final URI PREFIX_URI; - private final List EMPTY_URIS = ImmutableList.of(); - private final List EMPTY_PREFIXES = ImmutableList.of(); - private final List EMPTY_OBJECTS = ImmutableList.of(); private static final String STORAGE_ACCOUNT = "STORAGE_ACCOUNT"; private static final String DEFAULT_STORAGE_ACCOUNT = "DEFAULT_STORAGE_ACCOUNT"; private static final String CONTAINER = "CONTAINER"; private static final String BLOB_PATH = "BLOB_PATH.csv"; - private static final CloudObjectLocation CLOUD_OBJECT_LOCATION_1 = new CloudObjectLocation(STORAGE_ACCOUNT, CONTAINER + "/" + BLOB_PATH); + private static final CloudObjectLocation CLOUD_OBJECT_LOCATION_1 = new CloudObjectLocation( + STORAGE_ACCOUNT, + CONTAINER + "/" + BLOB_PATH + ); private static final int MAX_LISTING_LENGTH = 10; - private static final InputFormat INPUT_FORMAT = new JsonInputFormat( new JSONPathSpec(true, null), null, @@ -82,72 +86,85 @@ public class AzureStorageAccountInputSourceTest extends EasyMockSupport null ); + private final List EMPTY_URIS = ImmutableList.of(); + private final List EMPTY_PREFIXES = ImmutableList.of(); + private final List EMPTY_OBJECTS = ImmutableList.of(); private AzureStorage storage; private AzureEntityFactory entityFactory; private AzureCloudBlobIterableFactory azureCloudBlobIterableFactory; private AzureInputDataConfig inputDataConfig; private AzureStorageAccountInputSourceConfig azureStorageAccountInputSourceConfig; private AzureAccountConfig azureAccountConfig; - private InputSplit> inputSplit; - private AzureEntity azureEntity1; - private CloudBlobHolder cloudBlobDruid1; + private AzureEntity azureEntity; + private CloudBlobHolder cloudBlobDruid; private AzureCloudBlobIterable azureCloudBlobIterable; - private AzureStorageAccountInputSource azureInputSource; - static { try { - PREFIX_URI = new URI(AzureStorageAccountInputSource.SCHEME + "://" + STORAGE_ACCOUNT + "/" + CONTAINER + "/" + BLOB_NAME); + PREFIX_URI = new URI( + AzureStorageAccountInputSource.SCHEME + "://" + STORAGE_ACCOUNT + "/" + CONTAINER + "/" + BLOB_NAME + ); } catch (Exception e) { throw new RuntimeException(e); } } - @Before + @BeforeEach public void setup() { storage = createMock(AzureStorage.class); entityFactory = createMock(AzureEntityFactory.class); inputSplit = createMock(InputSplit.class); - azureEntity1 = createMock(AzureEntity.class); + azureEntity = createMock(AzureEntity.class); azureCloudBlobIterableFactory = createMock(AzureCloudBlobIterableFactory.class); inputDataConfig = createMock(AzureInputDataConfig.class); - cloudBlobDruid1 = createMock(CloudBlobHolder.class); + cloudBlobDruid = createMock(CloudBlobHolder.class); azureCloudBlobIterable = createMock(AzureCloudBlobIterable.class); azureStorageAccountInputSourceConfig = createMock(AzureStorageAccountInputSourceConfig.class); azureAccountConfig = createMock(AzureAccountConfig.class); EasyMock.expect(azureAccountConfig.getAccount()).andReturn(DEFAULT_STORAGE_ACCOUNT).anyTimes(); } - @Test(expected = IllegalArgumentException.class) + @Test public void test_constructor_emptyUrisEmptyPrefixesEmptyObjects_throwsIllegalArgumentException() { replayAll(); - azureInputSource = new AzureStorageAccountInputSource( - entityFactory, - azureCloudBlobIterableFactory, - inputDataConfig, - azureAccountConfig, - EMPTY_URIS, - EMPTY_PREFIXES, - EMPTY_OBJECTS, - null, - azureStorageAccountInputSourceConfig, - null + + //noinspection ResultOfObjectAllocationIgnored + assertThrows( + IllegalArgumentException.class, + () -> new AzureStorageAccountInputSource( + entityFactory, + azureCloudBlobIterableFactory, + inputDataConfig, + azureAccountConfig, + EMPTY_URIS, + EMPTY_PREFIXES, + EMPTY_OBJECTS, + null, + azureStorageAccountInputSourceConfig, + null + ) ); } @Test public void test_createEntity_returnsExpectedEntity() { - EasyMock.expect(entityFactory.create(EasyMock.eq(CLOUD_OBJECT_LOCATION_1), EasyMock.anyObject(AzureStorage.class), EasyMock.eq(AzureStorageAccountInputSource.SCHEME))).andReturn(azureEntity1); + EasyMock.expect( + entityFactory.create( + EasyMock.eq(CLOUD_OBJECT_LOCATION_1), + EasyMock.anyObject(AzureStorage.class), + EasyMock.eq(AzureStorageAccountInputSource.SCHEME) + ) + ).andReturn(azureEntity); EasyMock.expect(inputSplit.get()).andReturn(ImmutableList.of(CLOUD_OBJECT_LOCATION_1)).times(2); replayAll(); List objects = ImmutableList.of(CLOUD_OBJECT_LOCATION_1); - azureInputSource = new AzureStorageAccountInputSource( + final AzureStorageAccountInputSource azureInputSource = new AzureStorageAccountInputSource( entityFactory, azureCloudBlobIterableFactory, inputDataConfig, @@ -160,9 +177,9 @@ public void test_createEntity_returnsExpectedEntity() null ); - Assert.assertEquals(1, inputSplit.get().size()); + assertEquals(1, inputSplit.get().size()); AzureEntity actualAzureEntity = azureInputSource.createEntity(inputSplit.get().get(0)); - Assert.assertSame(azureEntity1, actualAzureEntity); + assertSame(azureEntity, actualAzureEntity); verifyAll(); } @@ -171,19 +188,24 @@ public void test_createSplits_successfullyCreatesCloudLocation_returnsExpectedLo { List prefixes = ImmutableList.of(PREFIX_URI); List> expectedCloudLocations = ImmutableList.of(ImmutableList.of(CLOUD_OBJECT_LOCATION_1)); - List expectedCloudBlobs = ImmutableList.of(cloudBlobDruid1); + List expectedCloudBlobs = ImmutableList.of(cloudBlobDruid); Iterator expectedCloudBlobsIterator = expectedCloudBlobs.iterator(); EasyMock.expect(inputDataConfig.getMaxListingLength()).andReturn(MAX_LISTING_LENGTH); - EasyMock.expect(azureCloudBlobIterableFactory.create(EasyMock.eq(prefixes), EasyMock.eq(MAX_LISTING_LENGTH), EasyMock.anyObject(AzureStorage.class))).andReturn( - azureCloudBlobIterable); + EasyMock.expect( + azureCloudBlobIterableFactory.create( + EasyMock.eq(prefixes), + EasyMock.eq(MAX_LISTING_LENGTH), + EasyMock.anyObject(AzureStorage.class) + ) + ).andReturn(azureCloudBlobIterable); EasyMock.expect(azureCloudBlobIterable.iterator()).andReturn(expectedCloudBlobsIterator); - EasyMock.expect(cloudBlobDruid1.getStorageAccount()).andReturn(STORAGE_ACCOUNT).anyTimes(); - EasyMock.expect(cloudBlobDruid1.getContainerName()).andReturn(CONTAINER).anyTimes(); - EasyMock.expect(cloudBlobDruid1.getName()).andReturn(BLOB_PATH).anyTimes(); - EasyMock.expect(cloudBlobDruid1.getBlobLength()).andReturn(100L).anyTimes(); + EasyMock.expect(cloudBlobDruid.getStorageAccount()).andReturn(STORAGE_ACCOUNT).anyTimes(); + EasyMock.expect(cloudBlobDruid.getContainerName()).andReturn(CONTAINER).anyTimes(); + EasyMock.expect(cloudBlobDruid.getName()).andReturn(BLOB_PATH).anyTimes(); + EasyMock.expect(cloudBlobDruid.getBlobLength()).andReturn(100L).anyTimes(); replayAll(); - azureInputSource = new AzureStorageAccountInputSource( + final AzureStorageAccountInputSource azureInputSource = new AzureStorageAccountInputSource( entityFactory, azureCloudBlobIterableFactory, inputDataConfig, @@ -201,10 +223,11 @@ public void test_createSplits_successfullyCreatesCloudLocation_returnsExpectedLo new MaxSizeSplitHintSpec(null, 1) ); - List> actualCloudLocationList = cloudObjectStream.map(InputSplit::get) + List> actualCloudLocationList = cloudObjectStream + .map(InputSplit::get) .collect(Collectors.toList()); verifyAll(); - Assert.assertEquals(expectedCloudLocations, actualCloudLocationList); + assertEquals(expectedCloudLocations, actualCloudLocationList); } @Test @@ -212,7 +235,7 @@ public void test_getPrefixesSplitStream_withObjectGlob_successfullyCreatesCloudL { List prefixes = ImmutableList.of(PREFIX_URI); List> expectedCloudLocations = ImmutableList.of(ImmutableList.of(CLOUD_OBJECT_LOCATION_1)); - List expectedCloudBlobs = ImmutableList.of(cloudBlobDruid1); + List expectedCloudBlobs = ImmutableList.of(cloudBlobDruid); Iterator expectedCloudBlobsIterator = expectedCloudBlobs.iterator(); String objectGlob = "**.csv"; @@ -224,17 +247,22 @@ public void test_getPrefixesSplitStream_withObjectGlob_successfullyCreatesCloudL ); EasyMock.expect(inputDataConfig.getMaxListingLength()).andReturn(MAX_LISTING_LENGTH); - EasyMock.expect(azureCloudBlobIterableFactory.create(EasyMock.eq(prefixes), EasyMock.eq(MAX_LISTING_LENGTH), EasyMock.anyObject(AzureStorage.class))).andReturn( - azureCloudBlobIterable); + EasyMock.expect( + azureCloudBlobIterableFactory.create( + EasyMock.eq(prefixes), + EasyMock.eq(MAX_LISTING_LENGTH), + EasyMock.anyObject(AzureStorage.class) + ) + ).andReturn(azureCloudBlobIterable); EasyMock.expect(azureCloudBlobIterable.iterator()).andReturn(expectedCloudBlobsIterator); - EasyMock.expect(cloudBlobDruid1.getStorageAccount()).andReturn(STORAGE_ACCOUNT).anyTimes(); - EasyMock.expect(cloudBlobDruid1.getBlobLength()).andReturn(100L).anyTimes(); - EasyMock.expect(cloudBlobDruid1.getContainerName()).andReturn(CONTAINER).anyTimes(); - EasyMock.expect(cloudBlobDruid1.getName()).andReturn(BLOB_PATH).anyTimes(); + EasyMock.expect(cloudBlobDruid.getStorageAccount()).andReturn(STORAGE_ACCOUNT).anyTimes(); + EasyMock.expect(cloudBlobDruid.getBlobLength()).andReturn(100L).anyTimes(); + EasyMock.expect(cloudBlobDruid.getContainerName()).andReturn(CONTAINER).anyTimes(); + EasyMock.expect(cloudBlobDruid.getName()).andReturn(BLOB_PATH).anyTimes(); replayAll(); - azureInputSource = new AzureStorageAccountInputSource( + final AzureStorageAccountInputSource azureInputSource = new AzureStorageAccountInputSource( entityFactory, azureCloudBlobIterableFactory, inputDataConfig, @@ -252,10 +280,11 @@ public void test_getPrefixesSplitStream_withObjectGlob_successfullyCreatesCloudL new MaxSizeSplitHintSpec(null, 1) ); - List> actualCloudLocationList = cloudObjectStream.map(InputSplit::get) + List> actualCloudLocationList = cloudObjectStream + .map(InputSplit::get) .collect(Collectors.toList()); verifyAll(); - Assert.assertEquals(expectedCloudLocations, actualCloudLocationList); + assertEquals(expectedCloudLocations, actualCloudLocationList); } @Test @@ -265,7 +294,7 @@ public void test_withSplit_constructsExpectedInputSource() EasyMock.expect(inputSplit.get()).andReturn(ImmutableList.of(CLOUD_OBJECT_LOCATION_1)); replayAll(); - azureInputSource = new AzureStorageAccountInputSource( + final AzureStorageAccountInputSource azureInputSource = new AzureStorageAccountInputSource( entityFactory, azureCloudBlobIterableFactory, inputDataConfig, @@ -279,30 +308,33 @@ public void test_withSplit_constructsExpectedInputSource() ); SplittableInputSource> newInputSource = azureInputSource.withSplit(inputSplit); - Assert.assertTrue(newInputSource.isSplittable()); + assertTrue(newInputSource.isSplittable()); verifyAll(); } @Test public void test_toString_returnsExpectedString() { - List prefixes = ImmutableList.of(PREFIX_URI); - azureInputSource = new AzureStorageAccountInputSource( + final AzureStorageAccountInputSource azureInputSource = new AzureStorageAccountInputSource( entityFactory, azureCloudBlobIterableFactory, inputDataConfig, azureAccountConfig, EMPTY_URIS, - prefixes, + ImmutableList.of(PREFIX_URI), EMPTY_OBJECTS, null, azureStorageAccountInputSourceConfig, null ); - String azureStorageAccountInputSourceString = azureInputSource.toString(); - Assert.assertEquals( - "AzureStorageAccountInputSource{uris=[], prefixes=[azureStorage://STORAGE_ACCOUNT/CONTAINER/blob], objects=[], objectGlob=null, azureStorageAccountInputSourceConfig=" + azureStorageAccountInputSourceConfig + "}", - azureStorageAccountInputSourceString + + assertEquals( + String.format( + Locale.ENGLISH, + "AzureStorageAccountInputSource{uris=[], prefixes=[azureStorage://STORAGE_ACCOUNT/CONTAINER/blob], objects=[], objectGlob=null, azureStorageAccountInputSourceConfig=%s}", + azureStorageAccountInputSourceConfig + ), + azureInputSource.toString() ); } @@ -310,7 +342,7 @@ public void test_toString_returnsExpectedString() public void test_toString_withAllSystemFields_returnsExpectedString() { List prefixes = ImmutableList.of(PREFIX_URI); - azureInputSource = new AzureStorageAccountInputSource( + final AzureStorageAccountInputSource azureInputSource = new AzureStorageAccountInputSource( entityFactory, azureCloudBlobIterableFactory, inputDataConfig, @@ -325,15 +357,15 @@ public void test_toString_withAllSystemFields_returnsExpectedString() String azureStorageAccountInputSourceString = azureInputSource.toString(); - Assert.assertEquals( + assertEquals( "AzureStorageAccountInputSource{" - + "uris=[], " - + "prefixes=[azureStorage://STORAGE_ACCOUNT/CONTAINER/blob], " - + "objects=[], " - + "objectGlob=null, " - + "azureStorageAccountInputSourceConfig=" + azureStorageAccountInputSourceConfig + ", " - + "systemFields=[__file_uri, __file_bucket, __file_path]" - + "}", + + "uris=[], " + + "prefixes=[azureStorage://STORAGE_ACCOUNT/CONTAINER/blob], " + + "objects=[], " + + "objectGlob=null, " + + "azureStorageAccountInputSourceConfig=" + azureStorageAccountInputSourceConfig + ", " + + "systemFields=[__file_uri, __file_bucket, __file_path]" + + "}", azureStorageAccountInputSourceString ); } @@ -342,7 +374,7 @@ public void test_toString_withAllSystemFields_returnsExpectedString() public void test_getTypes_returnsExpectedTypes() { List prefixes = ImmutableList.of(PREFIX_URI); - azureInputSource = new AzureStorageAccountInputSource( + final AzureStorageAccountInputSource azureInputSource = new AzureStorageAccountInputSource( entityFactory, azureCloudBlobIterableFactory, inputDataConfig, @@ -354,13 +386,13 @@ public void test_getTypes_returnsExpectedTypes() azureStorageAccountInputSourceConfig, null ); - Assert.assertEquals(ImmutableSet.of(AzureStorageAccountInputSource.SCHEME), azureInputSource.getTypes()); + assertEquals(ImmutableSet.of(AzureStorageAccountInputSource.SCHEME), azureInputSource.getTypes()); } @Test public void test_systemFields() { - azureInputSource = (AzureStorageAccountInputSource) new AzureStorageAccountInputSource( + final AzureStorageAccountInputSource azureInputSource = new AzureStorageAccountInputSource( entityFactory, azureCloudBlobIterableFactory, inputDataConfig, @@ -373,7 +405,7 @@ public void test_systemFields() new SystemFields(EnumSet.of(SystemField.URI, SystemField.BUCKET, SystemField.PATH)) ); - Assert.assertEquals( + assertEquals( EnumSet.of(SystemField.URI, SystemField.BUCKET, SystemField.PATH), azureInputSource.getConfiguredSystemFields() ); @@ -385,19 +417,31 @@ public void test_systemFields() (containerName, blobPath, storage) -> null ); - Assert.assertEquals("azureStorage://foo/container/bar", azureInputSource.getSystemFieldValue(entity, SystemField.URI)); - Assert.assertEquals("foo", azureInputSource.getSystemFieldValue(entity, SystemField.BUCKET)); - Assert.assertEquals("container/bar", azureInputSource.getSystemFieldValue(entity, SystemField.PATH)); + assertEquals( + "azureStorage://foo/container/bar", + azureInputSource.getSystemFieldValue(entity, SystemField.URI) + ); + assertEquals("foo", azureInputSource.getSystemFieldValue(entity, SystemField.BUCKET)); + assertEquals("container/bar", azureInputSource.getSystemFieldValue(entity, SystemField.PATH)); } @Test public void abidesEqualsContract() { - EqualsVerifier.forClass(AzureStorageAccountInputSource.class) + EqualsVerifier + .forClass(AzureStorageAccountInputSource.class) .usingGetClass() .withPrefabValues(Logger.class, new Logger(AzureStorage.class), new Logger(AzureStorage.class)) - .withPrefabValues(BlobContainerClient.class, new BlobContainerClientBuilder().buildClient(), new BlobContainerClientBuilder().buildClient()) - .withPrefabValues(AzureIngestClientFactory.class, new AzureIngestClientFactory(null, null), new AzureIngestClientFactory(null, null)) + .withPrefabValues( + BlobContainerClient.class, + new BlobContainerClientBuilder().buildClient(), + new BlobContainerClientBuilder().buildClient() + ) + .withPrefabValues( + AzureIngestClientFactory.class, + new AzureIngestClientFactory(null, null), + new AzureIngestClientFactory(null, null) + ) .withIgnoredFields("entityFactory") .withIgnoredFields("azureCloudBlobIterableFactory") .withNonnullFields("inputDataConfig") @@ -415,9 +459,8 @@ public void test_getContainerAndPathFromObjectLocation() Pair storageLocation = AzureStorageAccountInputSource.getContainerAndPathFromObjectLocation( CLOUD_OBJECT_LOCATION_1 ); - Assert.assertEquals(CONTAINER, storageLocation.lhs); - Assert.assertEquals(BLOB_PATH, storageLocation.rhs); - + assertEquals(CONTAINER, storageLocation.lhs); + assertEquals(BLOB_PATH, storageLocation.rhs); } @Test @@ -426,12 +469,11 @@ public void test_getContainerAndPathFromObjectLocatio_nullpath() Pair storageLocation = AzureStorageAccountInputSource.getContainerAndPathFromObjectLocation( new CloudObjectLocation(STORAGE_ACCOUNT, CONTAINER) ); - Assert.assertEquals(CONTAINER, storageLocation.lhs); - Assert.assertEquals("", storageLocation.rhs); - + assertEquals(CONTAINER, storageLocation.lhs); + assertEquals("", storageLocation.rhs); } - @After + @AfterEach public void cleanup() { resetAll(); diff --git a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureAccountConfigTest.java b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureAccountConfigTest.java index d22f112198ee..27328e2a97bc 100644 --- a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureAccountConfigTest.java +++ b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureAccountConfigTest.java @@ -22,8 +22,9 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.druid.jackson.DefaultObjectMapper; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; public class AzureAccountConfigTest { @@ -35,8 +36,8 @@ public void test_getBlobStorageEndpoint_endpointSuffixNullAndStorageAccountEndpo { AzureAccountConfig config = new AzureAccountConfig(); AzureAccountConfig configSerde = MAPPER.readValue("{}", AzureAccountConfig.class); - Assert.assertEquals(configSerde, config); - Assert.assertEquals(AzureUtils.AZURE_STORAGE_HOST_ADDRESS, config.getBlobStorageEndpoint()); + assertEquals(configSerde, config); + assertEquals(AzureUtils.AZURE_STORAGE_HOST_ADDRESS, config.getBlobStorageEndpoint()); } @Test @@ -51,8 +52,8 @@ public void test_getBlobStorageEndpoint_endpointSuffixNotNullAndStorageAccountEn + "\"endpointSuffix\": \"" + endpointSuffix + "\"" + "}", AzureAccountConfig.class); - Assert.assertEquals(configSerde, config); - Assert.assertEquals(AzureUtils.BLOB + "." + endpointSuffix, config.getBlobStorageEndpoint()); + assertEquals(configSerde, config); + assertEquals(AzureUtils.BLOB + "." + endpointSuffix, config.getBlobStorageEndpoint()); } @Test @@ -70,8 +71,8 @@ public void test_getBlobStorageEndpoint_endpointSuffixNotNullAndStorageAccountEn + " \"storageAccountEndpointSuffix\": \"" + storageAccountEndpointSuffix + "\"" + "}", AzureAccountConfig.class); - Assert.assertEquals(configSerde, config); - Assert.assertEquals(AzureUtils.BLOB + "." + endpointSuffix, config.getBlobStorageEndpoint()); + assertEquals(configSerde, config); + assertEquals(AzureUtils.BLOB + "." + endpointSuffix, config.getBlobStorageEndpoint()); } @Test @@ -86,8 +87,8 @@ public void test_getBlobStorageEndpoint_endpointSuffixNullAndStorageAccountEndpo + "\"storageAccountEndpointSuffix\": \"" + storageAccountEndpointSuffix + "\"" + "}", AzureAccountConfig.class); - Assert.assertEquals(configSerde, config); - Assert.assertEquals(storageAccountEndpointSuffix, config.getBlobStorageEndpoint()); + assertEquals(configSerde, config); + assertEquals(storageAccountEndpointSuffix, config.getBlobStorageEndpoint()); } @Test @@ -102,7 +103,7 @@ public void test_getManagedIdentityClientId_withValueForManagedIdentityClientId_ + "\"managedIdentityClientId\": \"" + managedIdentityClientId + "\"" + "}", AzureAccountConfig.class); - Assert.assertEquals(configSerde, config); - Assert.assertEquals("blah", config.getManagedIdentityClientId()); + assertEquals(configSerde, config); + assertEquals("blah", config.getManagedIdentityClientId()); } } diff --git a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureByteSourceTest.java b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureByteSourceTest.java index f8c7af2470a0..53983e9b9f58 100644 --- a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureByteSourceTest.java +++ b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureByteSourceTest.java @@ -23,11 +23,13 @@ import com.azure.storage.blob.models.BlobStorageException; import org.easymock.EasyMock; import org.easymock.EasyMockSupport; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.IOException; import java.io.InputStream; +import static org.junit.jupiter.api.Assertions.assertThrows; + public class AzureByteSourceTest extends EasyMockSupport { private static final long NO_OFFSET = 0L; @@ -71,8 +73,8 @@ public void test_openStream_withOffset_succeeds() throws IOException, BlobStorag verifyAll(); } - @Test(expected = IOException.class) - public void openStreamWithRecoverableErrorTest() throws BlobStorageException, IOException + @Test + public void openStreamWithRecoverableErrorTest() throws BlobStorageException { final String containerName = "container"; final String blobPath = "/path/to/file"; @@ -80,19 +82,14 @@ public void openStreamWithRecoverableErrorTest() throws BlobStorageException, IO HttpResponse httpResponse = createMock(HttpResponse.class); EasyMock.expect(httpResponse.getStatusCode()).andReturn(500).anyTimes(); EasyMock.replay(httpResponse); - EasyMock.expect(azureStorage.getBlockBlobInputStream(NO_OFFSET, containerName, blobPath)).andThrow( - new BlobStorageException( - "", - httpResponse, - null - ) - ); + EasyMock.expect(azureStorage.getBlockBlobInputStream(NO_OFFSET, containerName, blobPath)) + .andThrow(new BlobStorageException("", httpResponse, null)); EasyMock.replay(azureStorage); AzureByteSource byteSource = new AzureByteSource(azureStorage, containerName, blobPath); - byteSource.openStream(); + assertThrows(IOException.class, byteSource::openStream); verifyAll(); } diff --git a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureClientFactoryTest.java b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureClientFactoryTest.java index 795f00542243..5c4d1c433c84 100644 --- a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureClientFactoryTest.java +++ b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureClientFactoryTest.java @@ -25,8 +25,7 @@ import com.azure.storage.common.StorageSharedKeyCredential; import com.google.common.collect.ImmutableMap; import org.apache.druid.java.util.common.concurrent.Execs; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.net.MalformedURLException; import java.net.URL; @@ -35,6 +34,10 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + public class AzureClientFactoryTest { private AzureClientFactory azureClientFactory; @@ -46,7 +49,7 @@ public void test_blobServiceClient_accountName() AzureAccountConfig config = new AzureAccountConfig(); azureClientFactory = new AzureClientFactory(config); BlobServiceClient blobServiceClient = azureClientFactory.getBlobServiceClient(null, ACCOUNT); - Assert.assertEquals(ACCOUNT, blobServiceClient.getAccountName()); + assertEquals(ACCOUNT, blobServiceClient.getAccountName()); } @Test @@ -59,12 +62,14 @@ public void test_blobServiceClientBuilder_key() throws MalformedURLException StorageSharedKeyCredential storageSharedKeyCredential = StorageSharedKeyCredential.getSharedKeyCredentialFromPipeline( blobServiceClient.getHttpPipeline() ); - Assert.assertNotNull(storageSharedKeyCredential); + assertNotNull(storageSharedKeyCredential); // Azure doesn't let us look at the key in the StorageSharedKeyCredential so make sure the authorization header generated is what we expect. - Assert.assertEquals( - new StorageSharedKeyCredential(ACCOUNT, "key").generateAuthorizationHeader(new URL("http://druid.com"), "POST", ImmutableMap.of()), - storageSharedKeyCredential.generateAuthorizationHeader(new URL("http://druid.com"), "POST", ImmutableMap.of()) + assertEquals( + new StorageSharedKeyCredential(ACCOUNT, "key") + .generateAuthorizationHeader(new URL("http://druid.com"), "POST", ImmutableMap.of()), + storageSharedKeyCredential + .generateAuthorizationHeader(new URL("http://druid.com"), "POST", ImmutableMap.of()) ); } @@ -82,7 +87,7 @@ public void test_blobServiceClientBuilder_sasToken() } } - Assert.assertNotNull(azureSasCredentialPolicy); + assertNotNull(azureSasCredentialPolicy); } @Test @@ -99,7 +104,7 @@ public void test_blobServiceClientBuilder_useDefaultCredentialChain() } } - Assert.assertNotNull(bearerTokenAuthenticationPolicy); + assertNotNull(bearerTokenAuthenticationPolicy); } @Test @@ -110,7 +115,7 @@ public void test_blobServiceClientBuilder_useCachedClient() azureClientFactory = new AzureClientFactory(config); BlobServiceClient blobServiceClient = azureClientFactory.getBlobServiceClient(null, ACCOUNT); BlobServiceClient blobServiceClient2 = azureClientFactory.getBlobServiceClient(null, ACCOUNT); - Assert.assertEquals(blobServiceClient, blobServiceClient2); + assertEquals(blobServiceClient, blobServiceClient2); } @Test @@ -121,7 +126,7 @@ public void test_blobServiceClientBuilder_useNewClientForDifferentRetryCount() azureClientFactory = new AzureClientFactory(config); BlobServiceClient blobServiceClient = azureClientFactory.getBlobServiceClient(null, ACCOUNT); BlobServiceClient blobServiceClient2 = azureClientFactory.getBlobServiceClient(1, ACCOUNT); - Assert.assertNotEquals(blobServiceClient, blobServiceClient2); + assertNotEquals(blobServiceClient, blobServiceClient2); } @Test @@ -132,7 +137,7 @@ public void test_blobServiceClientBuilder_useAzureAccountConfig_asDefaultMaxTrie azureClientFactory = new AzureClientFactory(config); BlobServiceClient expectedBlobServiceClient = azureClientFactory.getBlobServiceClient(AzureAccountConfig.DEFAULT_MAX_TRIES, ACCOUNT); BlobServiceClient blobServiceClient = azureClientFactory.getBlobServiceClient(null, ACCOUNT); - Assert.assertEquals(expectedBlobServiceClient, blobServiceClient); + assertEquals(expectedBlobServiceClient, blobServiceClient); } @Test @@ -146,7 +151,7 @@ public void test_blobServiceClientBuilder_useAzureAccountConfigWithNonDefaultEnd URL expectedAccountUrl = new URL(AzureAccountConfig.DEFAULT_PROTOCOL, ACCOUNT + "." + AzureUtils.BLOB + "." + endpointSuffix, ""); azureClientFactory = new AzureClientFactory(config); BlobServiceClient blobServiceClient = azureClientFactory.getBlobServiceClient(null, ACCOUNT); - Assert.assertEquals(expectedAccountUrl.toString(), blobServiceClient.getAccountUrl()); + assertEquals(expectedAccountUrl.toString(), blobServiceClient.getAccountUrl()); } @Test @@ -162,7 +167,7 @@ public void test_blobServiceClientBuilder_useAzureAccountConfigWithStorageAccoun URL expectedAccountUrl = new URL(AzureAccountConfig.DEFAULT_PROTOCOL, ACCOUNT + "." + AzureUtils.BLOB + "." + endpointSuffix, ""); azureClientFactory = new AzureClientFactory(config); BlobServiceClient blobServiceClient = azureClientFactory.getBlobServiceClient(null, ACCOUNT); - Assert.assertEquals(expectedAccountUrl.toString(), blobServiceClient.getAccountUrl()); + assertEquals(expectedAccountUrl.toString(), blobServiceClient.getAccountUrl()); } @Test @@ -176,7 +181,7 @@ public void test_blobServiceClientBuilder_useAzureAccountConfigWithStorageAccoun URL expectedAccountUrl = new URL(AzureAccountConfig.DEFAULT_PROTOCOL, ACCOUNT + "." + storageAccountEndpointSuffix, ""); azureClientFactory = new AzureClientFactory(config); BlobServiceClient blobServiceClient = azureClientFactory.getBlobServiceClient(null, ACCOUNT); - Assert.assertEquals(expectedAccountUrl.toString(), blobServiceClient.getAccountUrl()); + assertEquals(expectedAccountUrl.toString(), blobServiceClient.getAccountUrl()); } @Test @@ -213,14 +218,17 @@ private void concurrentAzureClientFactoryGets() throws Exception latch.countDown(); latch.await(); BlobServiceClient blobServiceClient = localAzureClientFactory.getBlobServiceClient(retry, ACCOUNT); - Assert.assertEquals(expectedAccountUrl.toString(), blobServiceClient.getAccountUrl()); + assertEquals(expectedAccountUrl.toString(), blobServiceClient.getAccountUrl()); } catch (Exception e) { failureException.compareAndSet(null, e); } }); } + + //noinspection ResultOfMethodCallIgnored executorService.awaitTermination(1000, TimeUnit.MICROSECONDS); + if (failureException.get() != null) { throw failureException.get(); } diff --git a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureCloudBlobIteratorTest.java b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureCloudBlobIteratorTest.java index 996028377ed5..a77c6d104138 100644 --- a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureCloudBlobIteratorTest.java +++ b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureCloudBlobIteratorTest.java @@ -30,53 +30,54 @@ import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.storage.azure.blob.CloudBlobHolder; import org.easymock.EasyMock; -import org.easymock.EasyMockRunner; +import org.easymock.EasyMockExtension; import org.easymock.EasyMockSupport; import org.easymock.Mock; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + import java.net.URI; +import java.net.URISyntaxException; import java.util.ArrayList; import java.util.List; import java.util.NoSuchElementException; import java.util.stream.Collectors; -@RunWith(EasyMockRunner.class) +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; + +@ExtendWith(EasyMockExtension.class) public class AzureCloudBlobIteratorTest extends EasyMockSupport { @Mock private AzureStorage storage; - private AzureCloudBlobIterator azureCloudBlobIterator; private final AzureAccountConfig config = new AzureAccountConfig(); private final Integer MAX_TRIES = 3; private final Integer MAX_LISTING_LENGTH = 10; private final String CONTAINER = "container"; - private final String STORAGE_ACCOUNT = "storageAccount"; private final String DEFAULT_STORAGE_ACCOUNT = "defaultStorageAccount"; - - @Before + @BeforeEach public void setup() { config.setMaxTries(MAX_TRIES); config.setAccount(DEFAULT_STORAGE_ACCOUNT); - } @Test public void test_hasNext_noBlobs_returnsFalse() { - azureCloudBlobIterator = new AzureCloudBlobIterator( + final AzureCloudBlobIterator azureCloudBlobIterator = new AzureCloudBlobIterator( storage, config, ImmutableList.of(), 1 ); boolean hasNext = azureCloudBlobIterator.hasNext(); - Assert.assertFalse(hasNext); + assertFalse(hasNext); } @Test @@ -103,7 +104,7 @@ public void test_next_prefixesWithMultipleBlobsAndSomeDirectories_returnsExpecte .andReturn(pagedIterable2); replayAll(); - azureCloudBlobIterator = new AzureCloudBlobIterator( + final AzureCloudBlobIterator azureCloudBlobIterator = new AzureCloudBlobIterator( storage, config, prefixes, @@ -118,16 +119,16 @@ public void test_next_prefixesWithMultipleBlobsAndSomeDirectories_returnsExpecte new CloudBlobHolder(blobItem, CONTAINER, DEFAULT_STORAGE_ACCOUNT), new CloudBlobHolder(blobItem2, CONTAINER, DEFAULT_STORAGE_ACCOUNT) ); - Assert.assertEquals(expectedBlobItems.size(), actualBlobItems.size()); - Assert.assertEquals( + assertEquals(expectedBlobItems.size(), actualBlobItems.size()); + assertEquals( expectedBlobItems.stream().map(CloudBlobHolder::getName).collect(Collectors.toSet()), actualBlobItems.stream().map(CloudBlobHolder::getName).collect(Collectors.toSet()) ); - Assert.assertEquals( + assertEquals( expectedBlobItems.stream().map(CloudBlobHolder::getStorageAccount).collect(Collectors.toSet()), actualBlobItems.stream().map(CloudBlobHolder::getStorageAccount).collect(Collectors.toSet()) ); - Assert.assertEquals( + assertEquals( expectedBlobItems.stream().map(CloudBlobHolder::getContainerName).collect(Collectors.toSet()), actualBlobItems.stream().map(CloudBlobHolder::getContainerName).collect(Collectors.toSet()) ); @@ -150,7 +151,7 @@ public void test_next_prefixesWithMultipleBlobsAndOneDirectory_returnsExpectedBl replayAll(); - azureCloudBlobIterator = new AzureCloudBlobIterator( + final AzureCloudBlobIterator azureCloudBlobIterator = new AzureCloudBlobIterator( storage, config, prefixes, @@ -165,34 +166,36 @@ public void test_next_prefixesWithMultipleBlobsAndOneDirectory_returnsExpectedBl new CloudBlobHolder(blobItem, CONTAINER, DEFAULT_STORAGE_ACCOUNT), new CloudBlobHolder(blobItem2, CONTAINER, DEFAULT_STORAGE_ACCOUNT) ); - Assert.assertEquals(expectedBlobItems.size(), actualBlobItems.size()); - Assert.assertEquals( + assertEquals(expectedBlobItems.size(), actualBlobItems.size()); + assertEquals( expectedBlobItems.stream().map(CloudBlobHolder::getName).collect(Collectors.toSet()), actualBlobItems.stream().map(CloudBlobHolder::getName).collect(Collectors.toSet()) ); - Assert.assertEquals( + assertEquals( expectedBlobItems.stream().map(CloudBlobHolder::getStorageAccount).collect(Collectors.toSet()), actualBlobItems.stream().map(CloudBlobHolder::getStorageAccount).collect(Collectors.toSet()) ); - Assert.assertEquals( + assertEquals( expectedBlobItems.stream().map(CloudBlobHolder::getContainerName).collect(Collectors.toSet()), actualBlobItems.stream().map(CloudBlobHolder::getContainerName).collect(Collectors.toSet()) ); } @Test - public void test_next_prefixesWithMultipleBlobsAndSomeDirectories_returnsExpectedBlobs_azureStorage() throws Exception + public void test_next_prefixesWithMultipleBlobsAndSomeDirectories_returnsExpectedBlobs_azureStorage() + throws URISyntaxException { + final String storageAccount = "storageAccount"; List prefixes = ImmutableList.of( - new URI(StringUtils.format("azureStorage://%s/%s/dir1", STORAGE_ACCOUNT, CONTAINER)), - new URI(StringUtils.format("azureStorage://%s/%s/dir2", STORAGE_ACCOUNT, CONTAINER)) + new URI(StringUtils.format("azureStorage://%s/%s/dir1", storageAccount, CONTAINER)), + new URI(StringUtils.format("azureStorage://%s/%s/dir2", storageAccount, CONTAINER)) ); BlobItem blobItem = new BlobItem().setName("blobName").setProperties(new BlobItemProperties().setContentLength(10L)); SettableSupplier> supplier = new SettableSupplier<>(); supplier.set(new TestPagedResponse<>(ImmutableList.of(blobItem))); PagedIterable pagedIterable = new PagedIterable<>(supplier); - EasyMock.expect(storage.listBlobsWithPrefixInContainerSegmented(STORAGE_ACCOUNT, CONTAINER, "dir1", MAX_LISTING_LENGTH, MAX_TRIES)) + EasyMock.expect(storage.listBlobsWithPrefixInContainerSegmented(storageAccount, CONTAINER, "dir1", MAX_LISTING_LENGTH, MAX_TRIES)) .andReturn(pagedIterable); BlobItem blobPrefixItem = new BlobItem().setIsPrefix(true).setName("subdir").setProperties(new BlobItemProperties()); @@ -200,11 +203,11 @@ public void test_next_prefixesWithMultipleBlobsAndSomeDirectories_returnsExpecte SettableSupplier> supplier2 = new SettableSupplier<>(); supplier2.set(new TestPagedResponse<>(ImmutableList.of(blobPrefixItem, blobItem2))); PagedIterable pagedIterable2 = new PagedIterable<>(supplier2); - EasyMock.expect(storage.listBlobsWithPrefixInContainerSegmented(STORAGE_ACCOUNT, CONTAINER, "dir2", MAX_LISTING_LENGTH, MAX_TRIES)) + EasyMock.expect(storage.listBlobsWithPrefixInContainerSegmented(storageAccount, CONTAINER, "dir2", MAX_LISTING_LENGTH, MAX_TRIES)) .andReturn(pagedIterable2); replayAll(); - azureCloudBlobIterator = new AzureCloudBlobIterator( + final AzureCloudBlobIterator azureCloudBlobIterator = new AzureCloudBlobIterator( storage, config, prefixes, @@ -215,20 +218,21 @@ public void test_next_prefixesWithMultipleBlobsAndSomeDirectories_returnsExpecte actualBlobItems.add(azureCloudBlobIterator.next()); } verifyAll(); + List expectedBlobItems = ImmutableList.of( - new CloudBlobHolder(blobItem, CONTAINER, STORAGE_ACCOUNT), - new CloudBlobHolder(blobItem2, CONTAINER, STORAGE_ACCOUNT) + new CloudBlobHolder(blobItem, CONTAINER, storageAccount), + new CloudBlobHolder(blobItem2, CONTAINER, storageAccount) ); - Assert.assertEquals(expectedBlobItems.size(), actualBlobItems.size()); - Assert.assertEquals( + assertEquals(expectedBlobItems.size(), actualBlobItems.size()); + assertEquals( expectedBlobItems.stream().map(CloudBlobHolder::getName).collect(Collectors.toSet()), actualBlobItems.stream().map(CloudBlobHolder::getName).collect(Collectors.toSet()) ); - Assert.assertEquals( + assertEquals( expectedBlobItems.stream().map(CloudBlobHolder::getStorageAccount).collect(Collectors.toSet()), actualBlobItems.stream().map(CloudBlobHolder::getStorageAccount).collect(Collectors.toSet()) ); - Assert.assertEquals( + assertEquals( expectedBlobItems.stream().map(CloudBlobHolder::getContainerName).collect(Collectors.toSet()), actualBlobItems.stream().map(CloudBlobHolder::getContainerName).collect(Collectors.toSet()) ); @@ -251,7 +255,7 @@ public void test_next_emptyObjects_skipEmptyObjects() throws Exception .andReturn(pagedIterable); replayAll(); - azureCloudBlobIterator = new AzureCloudBlobIterator( + final AzureCloudBlobIterator azureCloudBlobIterator = new AzureCloudBlobIterator( storage, config, prefixes, @@ -265,34 +269,35 @@ public void test_next_emptyObjects_skipEmptyObjects() throws Exception List expectedBlobItems = ImmutableList.of( new CloudBlobHolder(blobItem, CONTAINER, DEFAULT_STORAGE_ACCOUNT) ); - Assert.assertEquals(expectedBlobItems.size(), actualBlobItems.size()); - Assert.assertEquals( + assertEquals(expectedBlobItems.size(), actualBlobItems.size()); + assertEquals( expectedBlobItems.stream().map(CloudBlobHolder::getName).collect(Collectors.toSet()), actualBlobItems.stream().map(CloudBlobHolder::getName).collect(Collectors.toSet()) ); - Assert.assertEquals( + assertEquals( expectedBlobItems.stream().map(CloudBlobHolder::getStorageAccount).collect(Collectors.toSet()), actualBlobItems.stream().map(CloudBlobHolder::getStorageAccount).collect(Collectors.toSet()) ); - Assert.assertEquals( + assertEquals( expectedBlobItems.stream().map(CloudBlobHolder::getContainerName).collect(Collectors.toSet()), actualBlobItems.stream().map(CloudBlobHolder::getContainerName).collect(Collectors.toSet()) ); } - @Test(expected = NoSuchElementException.class) + @Test public void test_next_emptyPrefixes_throwsNoSuchElementException() { - azureCloudBlobIterator = new AzureCloudBlobIterator( + final AzureCloudBlobIterator azureCloudBlobIterator = new AzureCloudBlobIterator( storage, config, ImmutableList.of(), MAX_LISTING_LENGTH ); - azureCloudBlobIterator.next(); + + assertThrows(NoSuchElementException.class, azureCloudBlobIterator::next); } - @Test(expected = RE.class) + @Test public void test_fetchNextBatch_moreThanMaxTriesRetryableExceptionsThrownInStorage_throwsREException() throws Exception { List prefixes = ImmutableList.of( @@ -305,19 +310,25 @@ public void test_fetchNextBatch_moreThanMaxTriesRetryableExceptionsThrownInStora EasyMock.anyString(), EasyMock.anyInt(), EasyMock.anyInt() - )).andThrow(new BlobStorageException("", null, null)).times(3); + )).andThrow(new BlobStorageException("", null, null)); replayAll(); - azureCloudBlobIterator = new AzureCloudBlobIterator( - storage, - config, - prefixes, - MAX_LISTING_LENGTH + + //noinspection ResultOfObjectAllocationIgnored + assertThrows( + RE.class, + () -> new AzureCloudBlobIterator( + storage, + config, + prefixes, + MAX_LISTING_LENGTH + ) ); + verifyAll(); } - @Test(expected = RE.class) + @Test public void test_fetchNextBatch_nonRetryableExceptionThrownInStorage_throwsREException() throws Exception { List prefixes = ImmutableList.of( @@ -330,13 +341,20 @@ public void test_fetchNextBatch_nonRetryableExceptionThrownInStorage_throwsREExc EasyMock.anyInt(), EasyMock.anyInt() )).andThrow(new RuntimeException("")); + replayAll(); - azureCloudBlobIterator = new AzureCloudBlobIterator( - storage, - config, - prefixes, - MAX_LISTING_LENGTH + + //noinspection ResultOfObjectAllocationIgnored + assertThrows( + RE.class, + () -> new AzureCloudBlobIterator( + storage, + config, + prefixes, + MAX_LISTING_LENGTH + ) ); + verifyAll(); } } diff --git a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentKillerTest.java b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentKillerTest.java index 43844c0c7cb0..40be9737d8d1 100644 --- a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentKillerTest.java +++ b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentKillerTest.java @@ -29,13 +29,12 @@ import org.apache.druid.segment.loading.SegmentLoadingException; import org.apache.druid.storage.azure.blob.CloudBlobHolder; import org.apache.druid.timeline.DataSegment; -import org.apache.druid.timeline.partition.NoneShardSpec; +import org.apache.druid.timeline.partition.LinearShardSpec; import org.easymock.Capture; import org.easymock.EasyMock; import org.easymock.EasyMockSupport; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.io.IOException; import java.net.URI; @@ -44,6 +43,10 @@ import java.util.HashSet; import java.util.List; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + public class AzureDataSegmentKillerTest extends EasyMockSupport { private static final String CONTAINER_NAME = "container"; @@ -70,7 +73,7 @@ public class AzureDataSegmentKillerTest extends EasyMockSupport ImmutableMap.of("containerName", CONTAINER_NAME, "blobPath", BLOB_PATH), null, null, - NoneShardSpec.instance(), + new LinearShardSpec(0), 0, 1 ); @@ -82,7 +85,7 @@ public class AzureDataSegmentKillerTest extends EasyMockSupport ImmutableMap.of("containerName", CONTAINER_NAME, "blobPath", BLOB_PATH_2), null, null, - NoneShardSpec.instance(), + new LinearShardSpec(0), 0, 1 ); @@ -93,7 +96,7 @@ public class AzureDataSegmentKillerTest extends EasyMockSupport private AzureStorage azureStorage; private AzureCloudBlobIterableFactory azureCloudBlobIterableFactory; - @Before + @BeforeEach public void before() { segmentConfig = createMock(AzureDataSegmentConfig.class); @@ -106,7 +109,6 @@ public void before() @Test public void killTest() throws SegmentLoadingException, BlobStorageException { - List deletedFiles = new ArrayList<>(); final String dirPath = Paths.get(BLOB_PATH).getParent().toString(); @@ -114,47 +116,67 @@ public void killTest() throws SegmentLoadingException, BlobStorageException replayAll(); - AzureDataSegmentKiller killer = new AzureDataSegmentKiller(segmentConfig, inputDataConfig, accountConfig, azureStorage, azureCloudBlobIterableFactory); + final AzureDataSegmentKiller killer = new AzureDataSegmentKiller( + segmentConfig, + inputDataConfig, + accountConfig, + azureStorage, + azureCloudBlobIterableFactory + ); killer.kill(DATA_SEGMENT); verifyAll(); } - @Test(expected = SegmentLoadingException.class) + @Test public void test_kill_StorageExceptionExtendedErrorInformationNull_throwsException() - throws SegmentLoadingException, BlobStorageException { + String dirPath = Paths.get(BLOB_PATH).getParent().toString(); - common_test_kill_StorageExceptionExtendedError_throwsException(); - } + EasyMock.expect(azureStorage.emptyCloudBlobDirectory(CONTAINER_NAME, dirPath)) + .andThrow(new BlobStorageException("", null, null)); - @Test(expected = SegmentLoadingException.class) - public void test_kill_StorageExceptionExtendedErrorInformationNotNull_throwsException() - throws SegmentLoadingException, BlobStorageException - { + replayAll(); + + final AzureDataSegmentKiller killer = new AzureDataSegmentKiller( + segmentConfig, + inputDataConfig, + accountConfig, + azureStorage, + azureCloudBlobIterableFactory + ); - common_test_kill_StorageExceptionExtendedError_throwsException(); + assertThrows( + SegmentLoadingException.class, + () -> killer.kill(DATA_SEGMENT) + ); + + verifyAll(); } - @Test(expected = RuntimeException.class) + @Test public void test_kill_runtimeException_throwsException() - throws SegmentLoadingException, BlobStorageException { + final String dirPath = Paths.get(BLOB_PATH).getParent().toString(); - String dirPath = Paths.get(BLOB_PATH).getParent().toString(); - - EasyMock.expect(azureStorage.emptyCloudBlobDirectory(CONTAINER_NAME, dirPath)).andThrow( - new RuntimeException( - "" - ) - ); + EasyMock.expect(azureStorage.emptyCloudBlobDirectory(CONTAINER_NAME, dirPath)) + .andThrow(new RuntimeException("")); replayAll(); - AzureDataSegmentKiller killer = new AzureDataSegmentKiller(segmentConfig, inputDataConfig, accountConfig, azureStorage, azureCloudBlobIterableFactory); + final AzureDataSegmentKiller killer = new AzureDataSegmentKiller( + segmentConfig, + inputDataConfig, + accountConfig, + azureStorage, + azureCloudBlobIterableFactory + ); - killer.kill(DATA_SEGMENT); + assertThrows( + RuntimeException.class, + () -> killer.kill(DATA_SEGMENT) + ); verifyAll(); } @@ -182,7 +204,7 @@ public void test_killAll_segmentConfigWithNullContainerAndPrefix_throwsISEExcept thrownISEException = true; } - Assert.assertTrue(thrownISEException); + assertTrue(thrownISEException); EasyMock.verify(segmentConfig, inputDataConfig, accountConfig, azureStorage, azureCloudBlobIterableFactory); } @@ -194,35 +216,35 @@ public void test_killAll_noException_deletesAllSegments() throws Exception EasyMock.expect(inputDataConfig.getMaxListingLength()).andReturn(MAX_KEYS); EasyMock.expect(accountConfig.getMaxTries()).andReturn(MAX_TRIES).anyTimes(); - CloudBlobHolder object1 = AzureTestUtils.newCloudBlobHolder(CONTAINER, KEY_1, TIME_0); - CloudBlobHolder object2 = AzureTestUtils.newCloudBlobHolder(CONTAINER, KEY_2, TIME_1); + CloudBlobHolder blob1 = AzureTestUtils.newCloudBlobHolder(CONTAINER, KEY_1, TIME_0); + CloudBlobHolder blob2 = AzureTestUtils.newCloudBlobHolder(CONTAINER, KEY_2, TIME_1); AzureCloudBlobIterable azureCloudBlobIterable = AzureTestUtils.expectListObjects( azureCloudBlobIterableFactory, MAX_KEYS, PREFIX_URI, - ImmutableList.of(object1, object2), + ImmutableList.of(blob1, blob2), azureStorage ); - EasyMock.replay(object1, object2); + EasyMock.replay(blob1, blob2); AzureTestUtils.expectDeleteObjects( azureStorage, - ImmutableList.of(object1, object2), + ImmutableList.of(blob1, blob2), ImmutableMap.of(), MAX_TRIES ); EasyMock.replay(segmentConfig, inputDataConfig, accountConfig, azureCloudBlobIterable, azureCloudBlobIterableFactory, azureStorage); AzureDataSegmentKiller killer = new AzureDataSegmentKiller(segmentConfig, inputDataConfig, accountConfig, azureStorage, azureCloudBlobIterableFactory); killer.killAll(); - EasyMock.verify(segmentConfig, inputDataConfig, accountConfig, object1, object2, azureCloudBlobIterable, azureCloudBlobIterableFactory, azureStorage); + EasyMock.verify(segmentConfig, inputDataConfig, accountConfig, blob1, blob2, azureCloudBlobIterable, azureCloudBlobIterableFactory, azureStorage); } @Test public void test_killAll_nonrecoverableExceptionWhenListingObjects_deletesAllSegments() { boolean ioExceptionThrown = false; - CloudBlobHolder object1 = null; + CloudBlobHolder cloudBlob = null; AzureCloudBlobIterable azureCloudBlobIterable = null; try { EasyMock.expect(segmentConfig.getContainer()).andReturn(CONTAINER).atLeastOnce(); @@ -230,21 +252,21 @@ public void test_killAll_nonrecoverableExceptionWhenListingObjects_deletesAllSeg EasyMock.expect(inputDataConfig.getMaxListingLength()).andReturn(MAX_KEYS); EasyMock.expect(accountConfig.getMaxTries()).andReturn(MAX_TRIES).anyTimes(); - object1 = AzureTestUtils.newCloudBlobHolder(CONTAINER, KEY_1, TIME_0); + cloudBlob = AzureTestUtils.newCloudBlobHolder(CONTAINER, KEY_1, TIME_0); azureCloudBlobIterable = AzureTestUtils.expectListObjects( azureCloudBlobIterableFactory, MAX_KEYS, PREFIX_URI, - ImmutableList.of(object1), + ImmutableList.of(cloudBlob), azureStorage ); - EasyMock.replay(object1); + EasyMock.replay(cloudBlob); AzureTestUtils.expectDeleteObjects( azureStorage, ImmutableList.of(), - ImmutableMap.of(object1, NON_RECOVERABLE_EXCEPTION), + ImmutableMap.of(cloudBlob, NON_RECOVERABLE_EXCEPTION), MAX_TRIES ); EasyMock.replay( @@ -268,41 +290,19 @@ public void test_killAll_nonrecoverableExceptionWhenListingObjects_deletesAllSeg ioExceptionThrown = true; } - Assert.assertTrue(ioExceptionThrown); + assertTrue(ioExceptionThrown); EasyMock.verify( segmentConfig, inputDataConfig, accountConfig, - object1, + cloudBlob, azureCloudBlobIterable, azureCloudBlobIterableFactory, azureStorage ); } - private void common_test_kill_StorageExceptionExtendedError_throwsException() - throws SegmentLoadingException, BlobStorageException - { - String dirPath = Paths.get(BLOB_PATH).getParent().toString(); - - EasyMock.expect(azureStorage.emptyCloudBlobDirectory(CONTAINER_NAME, dirPath)).andThrow( - new BlobStorageException( - "", - null, - null - ) - ); - - replayAll(); - - AzureDataSegmentKiller killer = new AzureDataSegmentKiller(segmentConfig, inputDataConfig, accountConfig, azureStorage, azureCloudBlobIterableFactory); - - killer.kill(DATA_SEGMENT); - - verifyAll(); - } - @Test public void killBatchTest() throws SegmentLoadingException, BlobStorageException { @@ -321,42 +321,53 @@ public void killBatchTest() throws SegmentLoadingException, BlobStorageException verifyAll(); - Assert.assertEquals( - ImmutableSet.of(BLOB_PATH, BLOB_PATH_2), - new HashSet<>(deletedFilesCapture.getValue()) - ); + assertEquals(ImmutableSet.of(BLOB_PATH, BLOB_PATH_2), new HashSet<>(deletedFilesCapture.getValue())); } - @Test(expected = RuntimeException.class) + @Test public void test_killBatch_runtimeException() - throws SegmentLoadingException, BlobStorageException { - EasyMock.expect(azureStorage.batchDeleteFiles(CONTAINER_NAME, ImmutableList.of(BLOB_PATH, BLOB_PATH_2), null)) .andThrow(new RuntimeException("")); replayAll(); - AzureDataSegmentKiller killer = new AzureDataSegmentKiller(segmentConfig, inputDataConfig, accountConfig, azureStorage, azureCloudBlobIterableFactory); + final AzureDataSegmentKiller killer = new AzureDataSegmentKiller( + segmentConfig, + inputDataConfig, + accountConfig, + azureStorage, + azureCloudBlobIterableFactory + ); - killer.kill(ImmutableList.of(DATA_SEGMENT, DATA_SEGMENT_2)); + assertThrows( + RuntimeException.class, + () -> killer.kill(ImmutableList.of(DATA_SEGMENT, DATA_SEGMENT_2)) + ); verifyAll(); } - @Test(expected = SegmentLoadingException.class) + @Test public void test_killBatch_SegmentLoadingExceptionOnError() - throws SegmentLoadingException, BlobStorageException { - EasyMock.expect(azureStorage.batchDeleteFiles(CONTAINER_NAME, ImmutableList.of(BLOB_PATH, BLOB_PATH_2), null)) .andReturn(false); replayAll(); - AzureDataSegmentKiller killer = new AzureDataSegmentKiller(segmentConfig, inputDataConfig, accountConfig, azureStorage, azureCloudBlobIterableFactory); + AzureDataSegmentKiller killer = new AzureDataSegmentKiller( + segmentConfig, + inputDataConfig, + accountConfig, + azureStorage, + azureCloudBlobIterableFactory + ); - killer.kill(ImmutableList.of(DATA_SEGMENT, DATA_SEGMENT_2)); + assertThrows( + SegmentLoadingException.class, + () -> killer.kill(ImmutableList.of(DATA_SEGMENT, DATA_SEGMENT_2)) + ); verifyAll(); } @@ -364,7 +375,6 @@ public void test_killBatch_SegmentLoadingExceptionOnError() @Test public void killBatch_emptyList() throws SegmentLoadingException, BlobStorageException { - AzureDataSegmentKiller killer = new AzureDataSegmentKiller(segmentConfig, inputDataConfig, accountConfig, azureStorage, azureCloudBlobIterableFactory); killer.kill(ImmutableList.of()); } @@ -372,7 +382,6 @@ public void killBatch_emptyList() throws SegmentLoadingException, BlobStorageExc @Test public void killBatch_singleSegment() throws SegmentLoadingException, BlobStorageException { - List deletedFiles = new ArrayList<>(); final String dirPath = Paths.get(BLOB_PATH).getParent().toString(); diff --git a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPullerTest.java b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPullerTest.java index ebcefd79571d..a53e5bba405d 100644 --- a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPullerTest.java +++ b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPullerTest.java @@ -25,26 +25,32 @@ import org.apache.druid.segment.loading.SegmentLoadingException; import org.easymock.EasyMock; import org.easymock.EasyMockSupport; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import java.io.File; -import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; public class AzureDataSegmentPullerTest extends EasyMockSupport { - - private static final String SEGMENT_FILE_NAME = "segment"; private static final String CONTAINER_NAME = "container"; private static final String BLOB_PATH = "path/to/storage/index.zip"; - private static final String BLOB_PATH_HADOOP = AzureUtils.AZURE_STORAGE_HOST_ADDRESS + "/path/to/storage/index.zip"; private AzureStorage azureStorage; private AzureByteSourceFactory byteSourceFactory; - @Before + @BeforeEach public void before() { azureStorage = createMock(AzureStorage.class); @@ -52,136 +58,135 @@ public void before() } @Test - public void test_getSegmentFiles_success() - throws SegmentLoadingException, BlobStorageException, IOException + public void test_getSegmentFiles_success(@TempDir Path sourcePath, @TempDir Path targetPath) + throws IOException, SegmentLoadingException { + final String segmentFileName = "segment"; final String value = "bucket"; - final File pulledFile = AzureTestUtils.createZipTempFile(SEGMENT_FILE_NAME, value); - final File toDir = FileUtils.createTempDir(); - try { - final InputStream zipStream = new FileInputStream(pulledFile); - final AzureAccountConfig config = new AzureAccountConfig(); - EasyMock.expect(byteSourceFactory.create(CONTAINER_NAME, BLOB_PATH, azureStorage)).andReturn(new AzureByteSource(azureStorage, CONTAINER_NAME, BLOB_PATH)); - EasyMock.expect(azureStorage.getBlockBlobInputStream(0L, CONTAINER_NAME, BLOB_PATH)).andReturn(zipStream); + final File pulledFile = createZipTempFile(sourcePath, segmentFileName, value); + + final InputStream zipStream = Files.newInputStream(pulledFile.toPath()); + final AzureAccountConfig config = new AzureAccountConfig(); - replayAll(); + EasyMock.expect(byteSourceFactory.create(CONTAINER_NAME, BLOB_PATH, azureStorage)) + .andReturn(new AzureByteSource(azureStorage, CONTAINER_NAME, BLOB_PATH)); + EasyMock.expect(azureStorage.getBlockBlobInputStream(0L, CONTAINER_NAME, BLOB_PATH)).andReturn(zipStream); - AzureDataSegmentPuller puller = new AzureDataSegmentPuller(byteSourceFactory, azureStorage, config); + replayAll(); - FileUtils.FileCopyResult result = puller.getSegmentFiles(CONTAINER_NAME, BLOB_PATH, toDir); + AzureDataSegmentPuller puller = new AzureDataSegmentPuller(byteSourceFactory, azureStorage, config); - File expected = new File(toDir, SEGMENT_FILE_NAME); - Assert.assertEquals(value.length(), result.size()); - Assert.assertTrue(expected.exists()); - Assert.assertEquals(value.length(), expected.length()); + FileUtils.FileCopyResult result = puller.getSegmentFiles(CONTAINER_NAME, BLOB_PATH, targetPath.toFile()); - verifyAll(); - } - finally { - pulledFile.delete(); - FileUtils.deleteDirectory(toDir); - } + File expected = new File(targetPath.toFile(), segmentFileName); + assertEquals(value.length(), result.size()); + assertTrue(expected.exists()); + assertEquals(value.length(), expected.length()); + + verifyAll(); } @Test - public void test_getSegmentFiles_blobPathIsHadoop_success() - throws SegmentLoadingException, BlobStorageException, IOException + public void test_getSegmentFiles_blobPathIsHadoop_success(@TempDir Path sourcePath, @TempDir Path targetPath) + throws IOException, SegmentLoadingException { + final String segmentFileName = "segment"; final String value = "bucket"; - final File pulledFile = AzureTestUtils.createZipTempFile(SEGMENT_FILE_NAME, value); - final File toDir = FileUtils.createTempDir(); - try { - final InputStream zipStream = new FileInputStream(pulledFile); - final AzureAccountConfig config = new AzureAccountConfig(); - EasyMock.expect(byteSourceFactory.create(CONTAINER_NAME, BLOB_PATH, azureStorage)).andReturn(new AzureByteSource(azureStorage, CONTAINER_NAME, BLOB_PATH)); - EasyMock.expect(azureStorage.getBlockBlobInputStream(0L, CONTAINER_NAME, BLOB_PATH)).andReturn(zipStream); + final File pulledFile = createZipTempFile(sourcePath, segmentFileName, value); - replayAll(); + final InputStream zipStream = Files.newInputStream(pulledFile.toPath()); + final AzureAccountConfig config = new AzureAccountConfig(); - AzureDataSegmentPuller puller = new AzureDataSegmentPuller(byteSourceFactory, azureStorage, config); + EasyMock.expect(byteSourceFactory.create(CONTAINER_NAME, BLOB_PATH, azureStorage)) + .andReturn(new AzureByteSource(azureStorage, CONTAINER_NAME, BLOB_PATH)); + EasyMock.expect(azureStorage.getBlockBlobInputStream(0L, CONTAINER_NAME, BLOB_PATH)).andReturn(zipStream); - FileUtils.FileCopyResult result = puller.getSegmentFiles(CONTAINER_NAME, BLOB_PATH_HADOOP, toDir); + replayAll(); - File expected = new File(toDir, SEGMENT_FILE_NAME); - Assert.assertEquals(value.length(), result.size()); - Assert.assertTrue(expected.exists()); - Assert.assertEquals(value.length(), expected.length()); + AzureDataSegmentPuller puller = new AzureDataSegmentPuller(byteSourceFactory, azureStorage, config); - verifyAll(); - } - finally { - pulledFile.delete(); - FileUtils.deleteDirectory(toDir); - } + final String blobPathHadoop = AzureUtils.AZURE_STORAGE_HOST_ADDRESS + "/path/to/storage/index.zip"; + FileUtils.FileCopyResult result = puller.getSegmentFiles(CONTAINER_NAME, blobPathHadoop, targetPath.toFile()); + + File expected = new File(targetPath.toFile(), segmentFileName); + assertEquals(value.length(), result.size()); + assertTrue(expected.exists()); + assertEquals(value.length(), expected.length()); + + verifyAll(); } - @Test(expected = RuntimeException.class) - public void test_getSegmentFiles_nonRecoverableErrorRaisedWhenPullingSegmentFiles_doNotDeleteOutputDirectory() - throws IOException, BlobStorageException, SegmentLoadingException + @Test + public void test_getSegmentFiles_nonRecoverableErrorRaisedWhenPullingSegmentFiles_doNotDeleteOutputDirectory( + @TempDir Path tempPath + ) { final AzureAccountConfig config = new AzureAccountConfig(); - final File outDir = FileUtils.createTempDir(); - try { - EasyMock.expect(byteSourceFactory.create(CONTAINER_NAME, BLOB_PATH, azureStorage)).andReturn(new AzureByteSource(azureStorage, CONTAINER_NAME, BLOB_PATH)); - EasyMock.expect(azureStorage.getBlockBlobInputStream(0L, CONTAINER_NAME, BLOB_PATH)).andThrow( - new RuntimeException( - "error" - ) - ); + EasyMock.expect(byteSourceFactory.create(CONTAINER_NAME, BLOB_PATH, azureStorage)) + .andReturn(new AzureByteSource(azureStorage, CONTAINER_NAME, BLOB_PATH)); + EasyMock.expect(azureStorage.getBlockBlobInputStream(0L, CONTAINER_NAME, BLOB_PATH)) + .andThrow(new RuntimeException("error")); - replayAll(); + AzureDataSegmentPuller puller = new AzureDataSegmentPuller(byteSourceFactory, azureStorage, config); - AzureDataSegmentPuller puller = new AzureDataSegmentPuller(byteSourceFactory, azureStorage, config); + replayAll(); - puller.getSegmentFiles(CONTAINER_NAME, BLOB_PATH, outDir); - } - catch (Exception e) { - Assert.assertTrue(outDir.exists()); - verifyAll(); - throw e; - } - finally { - FileUtils.deleteDirectory(outDir); - } + assertThrows( + RuntimeException.class, + () -> puller.getSegmentFiles(CONTAINER_NAME, BLOB_PATH, tempPath.toFile()) + ); + assertTrue(tempPath.toFile().exists()); + + verifyAll(); } - @Test(expected = SegmentLoadingException.class) - public void test_getSegmentFiles_recoverableErrorRaisedWhenPullingSegmentFiles_deleteOutputDirectory() - throws IOException, BlobStorageException, SegmentLoadingException + @Test + public void test_getSegmentFiles_recoverableErrorRaisedWhenPullingSegmentFiles_deleteOutputDirectory( + @TempDir Path tempPath + ) { final AzureAccountConfig config = new AzureAccountConfig(); - final File outDir = FileUtils.createTempDir(); - try { - HttpResponse httpResponse = createMock(HttpResponse.class); - EasyMock.expect(httpResponse.getStatusCode()).andReturn(500).anyTimes(); - EasyMock.replay(httpResponse); - EasyMock.expect(byteSourceFactory.create(CONTAINER_NAME, BLOB_PATH, azureStorage)).andReturn(new AzureByteSource(azureStorage, CONTAINER_NAME, BLOB_PATH)); - EasyMock.expect(azureStorage.getBlockBlobInputStream(0L, CONTAINER_NAME, BLOB_PATH)).andThrow( - new BlobStorageException("", httpResponse, null) - ).atLeastOnce(); + final HttpResponse httpResponse = createMock(HttpResponse.class); + EasyMock.expect(httpResponse.getStatusCode()).andReturn(500).anyTimes(); + EasyMock.replay(httpResponse); + EasyMock.expect(byteSourceFactory.create(CONTAINER_NAME, BLOB_PATH, azureStorage)) + .andReturn(new AzureByteSource(azureStorage, CONTAINER_NAME, BLOB_PATH)); + EasyMock.expect(azureStorage.getBlockBlobInputStream(0L, CONTAINER_NAME, BLOB_PATH)).andThrow( + new BlobStorageException("", httpResponse, null) + ).atLeastOnce(); - EasyMock.replay(azureStorage); - EasyMock.replay(byteSourceFactory); + EasyMock.replay(azureStorage); + EasyMock.replay(byteSourceFactory); - AzureDataSegmentPuller puller = new AzureDataSegmentPuller(byteSourceFactory, azureStorage, config); + AzureDataSegmentPuller puller = new AzureDataSegmentPuller(byteSourceFactory, azureStorage, config); - puller.getSegmentFiles(CONTAINER_NAME, BLOB_PATH, outDir); + assertThrows( + SegmentLoadingException.class, + () -> puller.getSegmentFiles(CONTAINER_NAME, BLOB_PATH, tempPath.toFile()) + ); - Assert.assertFalse(outDir.exists()); + assertFalse(tempPath.toFile().exists()); + verifyAll(); + } - verifyAll(); - } - catch (Exception e) { - Assert.assertFalse(outDir.exists()); - verifyAll(); - throw e; - } - finally { - FileUtils.deleteDirectory(outDir); + @SuppressWarnings("SameParameterValue") + private static File createZipTempFile( + final Path tempPath, + final String entry, + final String entryValue + ) throws IOException + { + final File zipFile = Files.createFile(tempPath.resolve("index.zip")).toFile(); + + try (ZipOutputStream zipStream = new ZipOutputStream(Files.newOutputStream(zipFile.toPath()))) { + zipStream.putNextEntry(new ZipEntry(entry)); + zipStream.write(entryValue.getBytes(StandardCharsets.UTF_8)); } + + return zipFile; } } diff --git a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPusherTest.java b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPusherTest.java index 2c8e357b2c5a..cc1bd8bd5725 100644 --- a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPusherTest.java +++ b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPusherTest.java @@ -26,28 +26,29 @@ import org.apache.druid.java.util.common.MapUtils; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.timeline.DataSegment; -import org.apache.druid.timeline.partition.NoneShardSpec; +import org.apache.druid.timeline.partition.LinearShardSpec; import org.easymock.EasyMock; import org.easymock.EasyMockSupport; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import java.io.File; import java.io.IOException; +import java.nio.file.Path; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.regex.Pattern; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + public class AzureDataSegmentPusherTest extends EasyMockSupport { - @Rule - public final TemporaryFolder tempFolder = new TemporaryFolder(); - private static final String ACCOUNT = "account"; private static final String CONTAINER_NAME = "container"; private static final String PREFIX = "prefix"; @@ -59,7 +60,7 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport ImmutableMap.of("containerName", CONTAINER_NAME, "blobPath", BLOB_PATH), null, null, - NoneShardSpec.instance(), + new LinearShardSpec(0), 0, 1 ); @@ -77,7 +78,7 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport new HashMap<>(), new ArrayList<>(), new ArrayList<>(), - NoneShardSpec.instance(), + new LinearShardSpec(0), 0, DATA.length ); @@ -87,7 +88,7 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport private AzureDataSegmentConfig segmentConfigWithPrefix; private AzureDataSegmentConfig segmentConfigWithoutPrefix; - @Before + @BeforeEach public void before() { azureStorage = createMock(AzureStorage.class); @@ -104,15 +105,14 @@ public void before() } @Test - public void test_push_nonUniquePathNoPrefix_succeeds() throws Exception + public void test_push_nonUniquePathNoPrefix_succeeds(@TempDir Path tempPath) throws Exception { boolean useUniquePath = false; AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig, segmentConfigWithoutPrefix ); // Create a mock segment on disk - File tmp = tempFolder.newFile("version.bin"); - + File tmp = tempPath.resolve("version.bin").toFile(); Files.write(DATA, tmp); String azurePath = pusher.getAzurePath(SEGMENT_TO_PUSH, useUniquePath); @@ -121,28 +121,29 @@ public void test_push_nonUniquePathNoPrefix_succeeds() throws Exception replayAll(); - DataSegment segment = pusher.push(tempFolder.getRoot(), SEGMENT_TO_PUSH, useUniquePath); + DataSegment segment = pusher.push(tempPath.toFile(), SEGMENT_TO_PUSH, useUniquePath); - Assert.assertTrue( - segment.getLoadSpec().get("blobPath").toString(), - Pattern.compile(NON_UNIQUE_NO_PREFIX_MATCHER).matcher(segment.getLoadSpec().get("blobPath").toString()).matches() + assertTrue( + Pattern.compile(NON_UNIQUE_NO_PREFIX_MATCHER) + .matcher(segment.getLoadSpec().get("blobPath").toString()) + .matches(), + segment.getLoadSpec().get("blobPath").toString() ); - Assert.assertEquals(SEGMENT_TO_PUSH.getSize(), segment.getSize()); + assertEquals(SEGMENT_TO_PUSH.getSize(), segment.getSize()); verifyAll(); } @Test - public void test_push_nonUniquePathWithPrefix_succeeds() throws Exception + public void test_push_nonUniquePathWithPrefix_succeeds(@TempDir Path tempPath) throws Exception { boolean useUniquePath = false; AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig, segmentConfigWithPrefix ); // Create a mock segment on disk - File tmp = tempFolder.newFile("version.bin"); - + File tmp = tempPath.resolve("version.bin").toFile(); Files.write(DATA, tmp); String azurePath = pusher.getAzurePath(SEGMENT_TO_PUSH, useUniquePath); @@ -156,26 +157,24 @@ public void test_push_nonUniquePathWithPrefix_succeeds() throws Exception replayAll(); - DataSegment segment = pusher.push(tempFolder.getRoot(), SEGMENT_TO_PUSH, useUniquePath); + DataSegment segment = pusher.push(tempPath.toFile(), SEGMENT_TO_PUSH, useUniquePath); - Assert.assertTrue( - segment.getLoadSpec().get("blobPath").toString(), - Pattern.compile(NON_UNIQUE_WITH_PREFIX_MATCHER).matcher(segment.getLoadSpec().get("blobPath").toString()).matches() - ); + assertTrue(Pattern.compile(NON_UNIQUE_WITH_PREFIX_MATCHER).matcher(segment.getLoadSpec().get("blobPath").toString()).matches(), + segment.getLoadSpec().get("blobPath").toString()); - Assert.assertEquals(SEGMENT_TO_PUSH.getSize(), segment.getSize()); + assertEquals(SEGMENT_TO_PUSH.getSize(), segment.getSize()); verifyAll(); } @Test - public void test_push_uniquePathNoPrefix_succeeds() throws Exception + public void test_push_uniquePathNoPrefix_succeeds(@TempDir Path tempPath) throws Exception { boolean useUniquePath = true; AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig, segmentConfigWithoutPrefix); // Create a mock segment on disk - File tmp = tempFolder.newFile("version.bin"); + File tmp = tempPath.resolve("version.bin").toFile(); Files.write(DATA, tmp); @@ -190,26 +189,27 @@ public void test_push_uniquePathNoPrefix_succeeds() throws Exception replayAll(); - DataSegment segment = pusher.push(tempFolder.getRoot(), SEGMENT_TO_PUSH, useUniquePath); + DataSegment segment = pusher.push(tempPath.toFile(), SEGMENT_TO_PUSH, useUniquePath); - Assert.assertTrue( - segment.getLoadSpec().get("blobPath").toString(), - Pattern.compile(UNIQUE_MATCHER_NO_PREFIX).matcher(segment.getLoadSpec().get("blobPath").toString()).matches() - ); + assertTrue( + Pattern.compile(UNIQUE_MATCHER_NO_PREFIX) + .matcher(segment.getLoadSpec().get("blobPath").toString()) + .matches(), + segment.getLoadSpec().get("blobPath").toString()); - Assert.assertEquals(SEGMENT_TO_PUSH.getSize(), segment.getSize()); + assertEquals(SEGMENT_TO_PUSH.getSize(), segment.getSize()); verifyAll(); } @Test - public void test_push_uniquePath_succeeds() throws Exception + public void test_push_uniquePath_succeeds(@TempDir Path tempPath) throws Exception { boolean useUniquePath = true; AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig, segmentConfigWithPrefix); // Create a mock segment on disk - File tmp = tempFolder.newFile("version.bin"); + File tmp = tempPath.resolve("version.bin").toFile(); Files.write(DATA, tmp); @@ -224,57 +224,52 @@ public void test_push_uniquePath_succeeds() throws Exception replayAll(); - DataSegment segment = pusher.push(tempFolder.getRoot(), SEGMENT_TO_PUSH, useUniquePath); + DataSegment segment = pusher.push(tempPath.toFile(), SEGMENT_TO_PUSH, useUniquePath); - Assert.assertTrue( - segment.getLoadSpec().get("blobPath").toString(), - Pattern.compile(UNIQUE_MATCHER_PREFIX).matcher(segment.getLoadSpec().get("blobPath").toString()).matches() + assertTrue( + Pattern.compile(UNIQUE_MATCHER_PREFIX) + .matcher(segment.getLoadSpec().get("blobPath").toString()) + .matches(), + segment.getLoadSpec().get("blobPath").toString() ); - Assert.assertEquals(SEGMENT_TO_PUSH.getSize(), segment.getSize()); + assertEquals(SEGMENT_TO_PUSH.getSize(), segment.getSize()); verifyAll(); } - @Test(expected = RuntimeException.class) - public void test_push_exception_throwsException() throws Exception + @Test + public void test_push_exception_throwsException(@TempDir Path tempPath) throws Exception { boolean useUniquePath = true; AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig, segmentConfigWithPrefix); // Create a mock segment on disk - File tmp = tempFolder.newFile("version.bin"); + File tmp = tempPath.resolve("version.bin").toFile(); Files.write(DATA, tmp); - final long size = DATA.length; - String azurePath = pusher.getAzurePath(SEGMENT_TO_PUSH, useUniquePath); azureStorage.uploadBlockBlob(EasyMock.anyObject(File.class), EasyMock.eq(CONTAINER_NAME), EasyMock.anyString(), EasyMock.eq(MAX_TRIES)); EasyMock.expectLastCall().andThrow(new BlobStorageException("", null, null)); replayAll(); - DataSegment segment = pusher.push(tempFolder.getRoot(), SEGMENT_TO_PUSH, useUniquePath); - - Assert.assertTrue( - segment.getLoadSpec().get("blobPath").toString(), - Pattern.compile(UNIQUE_MATCHER_NO_PREFIX).matcher(segment.getLoadSpec().get("blobPath").toString()).matches() + assertThrows( + RuntimeException.class, + () -> pusher.push(tempPath.toFile(), SEGMENT_TO_PUSH, useUniquePath) ); - Assert.assertEquals(SEGMENT_TO_PUSH.getSize(), segment.getSize()); - verifyAll(); } @Test public void getAzurePathsTest() { - AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig, segmentConfigWithPrefix); final String storageDir = pusher.getStorageDir(DATA_SEGMENT, false); final String azurePath = pusher.getAzurePath(DATA_SEGMENT, false); - Assert.assertEquals( + assertEquals( StringUtils.format("%s/%s", storageDir, AzureStorageDruidModule.INDEX_ZIP_FILE_NAME), azurePath ); @@ -301,11 +296,11 @@ public void uploadDataSegmentTest() throws BlobStorageException, IOException azurePath ); - Assert.assertEquals(compressedSegmentData.length(), pushedDataSegment.getSize()); - Assert.assertEquals(binaryVersion, (int) pushedDataSegment.getBinaryVersion()); + assertEquals(compressedSegmentData.length(), pushedDataSegment.getSize()); + assertEquals(binaryVersion, (int) pushedDataSegment.getBinaryVersion()); Map loadSpec = pushedDataSegment.getLoadSpec(); - Assert.assertEquals(AzureStorageDruidModule.SCHEME, MapUtils.getString(loadSpec, "type")); - Assert.assertEquals(azurePath, MapUtils.getString(loadSpec, "blobPath")); + assertEquals(AzureStorageDruidModule.SCHEME, MapUtils.getString(loadSpec, "type")); + assertEquals(azurePath, MapUtils.getString(loadSpec, "blobPath")); verifyAll(); } @@ -315,7 +310,7 @@ public void getPathForHadoopWithPrefixTest() { AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig, segmentConfigWithPrefix); String hadoopPath = pusher.getPathForHadoop(); - Assert.assertEquals("wasbs://container@account.blob.core.windows.net/prefix/", hadoopPath); + assertEquals("wasbs://container@account.blob.core.windows.net/prefix/", hadoopPath); } @Test @@ -323,23 +318,7 @@ public void getPathForHadoopWithoutPrefixTest() { AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig, segmentConfigWithoutPrefix); String hadoopPath = pusher.getPathForHadoop(); - Assert.assertEquals("wasbs://container@account.blob.core.windows.net/", hadoopPath); - } - - @Test - public void test_getPathForHadoop_noArgsWithoutPrefix_succeeds() - { - AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig, segmentConfigWithoutPrefix); - String hadoopPath = pusher.getPathForHadoop(""); - Assert.assertEquals("wasbs://container@account.blob.core.windows.net/", hadoopPath); - } - - @Test - public void test_getPathForHadoop_noArgsWithPrefix_succeeds() - { - AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig, segmentConfigWithPrefix); - String hadoopPath = pusher.getPathForHadoop(""); - Assert.assertEquals("wasbs://container@account.blob.core.windows.net/prefix/", hadoopPath); + assertEquals("wasbs://container@account.blob.core.windows.net/", hadoopPath); } @Test @@ -347,7 +326,7 @@ public void test_getAllowedPropertyPrefixesForHadoop_returnsExpcetedPropertyPref { AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig, segmentConfigWithPrefix); List actualPropertyPrefixes = pusher.getAllowedPropertyPrefixesForHadoop(); - Assert.assertEquals(AzureDataSegmentPusher.ALLOWED_PROPERTY_PREFIXES_FOR_HADOOP, actualPropertyPrefixes); + assertEquals(AzureDataSegmentPusher.ALLOWED_PROPERTY_PREFIXES_FOR_HADOOP, actualPropertyPrefixes); } @Test @@ -356,6 +335,6 @@ public void storageDirContainsNoColonsTest() AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig, segmentConfigWithPrefix); DataSegment withColons = DATA_SEGMENT.withVersion("2018-01-05T14:54:09.295Z"); String segmentPath = pusher.getStorageDir(withColons, false); - Assert.assertFalse("Path should not contain any columns", segmentPath.contains(":")); + assertFalse(segmentPath.contains(":"), "Path should not contain any columns"); } } diff --git a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureIngestClientFactoryTest.java b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureIngestClientFactoryTest.java index d982cf2253e1..1d82b845f28e 100644 --- a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureIngestClientFactoryTest.java +++ b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureIngestClientFactoryTest.java @@ -26,21 +26,22 @@ import com.google.common.collect.ImmutableMap; import org.apache.druid.data.input.azure.AzureStorageAccountInputSourceConfig; import org.easymock.EasyMock; -import org.easymock.EasyMockRunner; +import org.easymock.EasyMockExtension; import org.easymock.EasyMockSupport; import org.easymock.Mock; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import java.net.MalformedURLException; import java.net.URL; -@RunWith(EasyMockRunner.class) +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +@ExtendWith(EasyMockExtension.class) public class AzureIngestClientFactoryTest extends EasyMockSupport { - private AzureIngestClientFactory azureIngestClientFactory; private static final String ACCOUNT = "account"; private static final String KEY = "key"; private static final String TOKEN = "token"; @@ -51,7 +52,7 @@ public class AzureIngestClientFactoryTest extends EasyMockSupport @Mock private static AzureStorageAccountInputSourceConfig azureStorageAccountInputSourceConfig; - @Before + @BeforeEach public void setup() { EasyMock.expect(accountConfig.getBlobStorageEndpoint()).andReturn("blob.core.windows.net").anyTimes(); @@ -67,12 +68,13 @@ public void test_blobServiceClient_accountName() null, null ); - azureIngestClientFactory = new AzureIngestClientFactory(accountConfig, azureStorageAccountInputSourceConfig); + + final AzureIngestClientFactory azureIngestClientFactory = new AzureIngestClientFactory(accountConfig, azureStorageAccountInputSourceConfig); replayAll(); BlobServiceClient blobServiceClient = azureIngestClientFactory.getBlobServiceClient(3, ACCOUNT); verifyAll(); - Assert.assertEquals(ACCOUNT, blobServiceClient.getAccountName()); + assertEquals(ACCOUNT, blobServiceClient.getAccountName()); } @Test @@ -85,7 +87,8 @@ public void test_blobServiceClientBuilder_key() throws MalformedURLException null, null ); - azureIngestClientFactory = new AzureIngestClientFactory(accountConfig, azureStorageAccountInputSourceConfig); + + final AzureIngestClientFactory azureIngestClientFactory = new AzureIngestClientFactory(accountConfig, azureStorageAccountInputSourceConfig); replayAll(); BlobServiceClient blobServiceClient = azureIngestClientFactory.getBlobServiceClient(3, ACCOUNT); @@ -93,12 +96,14 @@ public void test_blobServiceClientBuilder_key() throws MalformedURLException StorageSharedKeyCredential storageSharedKeyCredential = StorageSharedKeyCredential.getSharedKeyCredentialFromPipeline( blobServiceClient.getHttpPipeline() ); - Assert.assertNotNull(storageSharedKeyCredential); + assertNotNull(storageSharedKeyCredential); // Azure doesn't let us look at the key in the StorageSharedKeyCredential so make sure the authorization header generated is what we expect. - Assert.assertEquals( - new StorageSharedKeyCredential(ACCOUNT, KEY).generateAuthorizationHeader(new URL("http://druid.com"), "POST", ImmutableMap.of()), - storageSharedKeyCredential.generateAuthorizationHeader(new URL("http://druid.com"), "POST", ImmutableMap.of()) + assertEquals( + new StorageSharedKeyCredential(ACCOUNT, KEY) + .generateAuthorizationHeader(new URL("http://druid.com"), "POST", ImmutableMap.of()), + storageSharedKeyCredential + .generateAuthorizationHeader(new URL("http://druid.com"), "POST", ImmutableMap.of()) ); } @@ -112,7 +117,8 @@ public void test_blobServiceClientBuilder_sasToken() null, null ); - azureIngestClientFactory = new AzureIngestClientFactory(accountConfig, azureStorageAccountInputSourceConfig); + + final AzureIngestClientFactory azureIngestClientFactory = new AzureIngestClientFactory(accountConfig, azureStorageAccountInputSourceConfig); replayAll(); BlobServiceClient blobServiceClient = azureIngestClientFactory.getBlobServiceClient(3, ACCOUNT); verifyAll(); @@ -124,7 +130,7 @@ public void test_blobServiceClientBuilder_sasToken() } } - Assert.assertNotNull(azureSasCredentialPolicy); + assertNotNull(azureSasCredentialPolicy); } @Test @@ -137,7 +143,8 @@ public void test_blobServiceClientBuilder_useAppRegistration() "clientSecret", "tenantId" ); - azureIngestClientFactory = new AzureIngestClientFactory(accountConfig, azureStorageAccountInputSourceConfig); + + final AzureIngestClientFactory azureIngestClientFactory = new AzureIngestClientFactory(accountConfig, azureStorageAccountInputSourceConfig); replayAll(); BlobServiceClient blobServiceClient = azureIngestClientFactory.getBlobServiceClient(3, ACCOUNT); verifyAll(); @@ -148,16 +155,16 @@ public void test_blobServiceClientBuilder_useAppRegistration() } } - Assert.assertNotNull(bearerTokenAuthenticationPolicy); + assertNotNull(bearerTokenAuthenticationPolicy); } - @Test public void test_blobServiceClientBuilder_useAzureAccountConfig_asDefaultMaxTries() { // We should only call getKey twice (both times in the first call to getBlobServiceClient) EasyMock.expect(azureStorageAccountInputSourceConfig.getKey()).andReturn(KEY).times(2); - azureIngestClientFactory = new AzureIngestClientFactory(accountConfig, azureStorageAccountInputSourceConfig); + + final AzureIngestClientFactory azureIngestClientFactory = new AzureIngestClientFactory(accountConfig, azureStorageAccountInputSourceConfig); EasyMock.expect(accountConfig.getMaxTries()).andReturn(5); replayAll(); azureIngestClientFactory.getBlobServiceClient(null, ACCOUNT); @@ -181,7 +188,8 @@ public void test_blobServiceClientBuilder_fallbackToAzureAccountConfig() null, null ); - azureIngestClientFactory = new AzureIngestClientFactory(accountConfig, azureStorageAccountInputSourceConfig); + + final AzureIngestClientFactory azureIngestClientFactory = new AzureIngestClientFactory(accountConfig, azureStorageAccountInputSourceConfig); EasyMock.expect(accountConfig.getKey()).andReturn(KEY).times(2); replayAll(); azureIngestClientFactory.getBlobServiceClient(5, ACCOUNT); diff --git a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureStorageDruidModuleTest.java b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureStorageDruidModuleTest.java index 5b18b6c5b61b..ab1ab379a8a5 100644 --- a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureStorageDruidModuleTest.java +++ b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureStorageDruidModuleTest.java @@ -19,14 +19,10 @@ package org.apache.druid.storage.azure; -import com.azure.storage.blob.BlobServiceClient; -import com.google.common.base.Supplier; import com.google.common.collect.ImmutableList; -import com.google.inject.Binder; import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.Key; -import com.google.inject.Module; import com.google.inject.ProvisionException; import com.google.inject.TypeLiteral; import org.apache.druid.data.input.azure.AzureEntityFactory; @@ -40,11 +36,11 @@ import org.apache.druid.segment.loading.OmniDataSegmentKiller; import org.easymock.EasyMock; import org.easymock.EasyMockSupport; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Named; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; import javax.validation.Validation; import javax.validation.Validator; @@ -52,12 +48,18 @@ import java.nio.charset.StandardCharsets; import java.util.Base64; import java.util.Properties; +import java.util.stream.Stream; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNotSame; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; public class AzureStorageDruidModuleTest extends EasyMockSupport { - @Rule - public ExpectedException expectedException = ExpectedException.none(); - private static final String AZURE_ACCOUNT_NAME; private static final String AZURE_ACCOUNT_KEY; private static final String AZURE_SHARED_ACCESS_TOKEN; @@ -72,7 +74,6 @@ public class AzureStorageDruidModuleTest extends EasyMockSupport private CloudObjectLocation cloudObjectLocation1; private CloudObjectLocation cloudObjectLocation2; private AzureStorage azureStorage; - private Injector injector; static { try { @@ -91,7 +92,7 @@ public class AzureStorageDruidModuleTest extends EasyMockSupport } } - @Before + @BeforeEach public void setup() { cloudObjectLocation1 = createMock(CloudObjectLocation.class); @@ -102,11 +103,11 @@ public void setup() @Test public void testGetAzureAccountConfigExpectedConfig() { - injector = makeInjectorWithProperties(PROPERTIES); + final Injector injector = makeInjectorWithProperties(PROPERTIES); AzureAccountConfig azureAccountConfig = injector.getInstance(AzureAccountConfig.class); - Assert.assertEquals(AZURE_ACCOUNT_NAME, azureAccountConfig.getAccount()); - Assert.assertEquals(AZURE_ACCOUNT_KEY, azureAccountConfig.getKey()); + assertEquals(AZURE_ACCOUNT_NAME, azureAccountConfig.getAccount()); + assertEquals(AZURE_ACCOUNT_KEY, azureAccountConfig.getKey()); } @Test @@ -116,42 +117,42 @@ public void testGetAzureAccountConfigExpectedConfigWithSAS() properties.setProperty("druid.azure.sharedAccessStorageToken", AZURE_SHARED_ACCESS_TOKEN); properties.remove("druid.azure.key"); - injector = makeInjectorWithProperties(properties); + final Injector injector = makeInjectorWithProperties(properties); AzureAccountConfig azureAccountConfig = injector.getInstance(AzureAccountConfig.class); - Assert.assertEquals(AZURE_ACCOUNT_NAME, azureAccountConfig.getAccount()); - Assert.assertEquals(AZURE_SHARED_ACCESS_TOKEN, azureAccountConfig.getSharedAccessStorageToken()); + assertEquals(AZURE_ACCOUNT_NAME, azureAccountConfig.getAccount()); + assertEquals(AZURE_SHARED_ACCESS_TOKEN, azureAccountConfig.getSharedAccessStorageToken()); } @Test public void testGetAzureDataSegmentConfigExpectedConfig() { - injector = makeInjectorWithProperties(PROPERTIES); + final Injector injector = makeInjectorWithProperties(PROPERTIES); AzureDataSegmentConfig segmentConfig = injector.getInstance(AzureDataSegmentConfig.class); - Assert.assertEquals(AZURE_CONTAINER, segmentConfig.getContainer()); - Assert.assertEquals(AZURE_PREFIX, segmentConfig.getPrefix()); + assertEquals(AZURE_CONTAINER, segmentConfig.getContainer()); + assertEquals(AZURE_PREFIX, segmentConfig.getPrefix()); } @Test public void testGetAzureInputDataConfigExpectedConfig() { - injector = makeInjectorWithProperties(PROPERTIES); + final Injector injector = makeInjectorWithProperties(PROPERTIES); AzureInputDataConfig inputDataConfig = injector.getInstance(AzureInputDataConfig.class); - Assert.assertEquals(AZURE_MAX_LISTING_LENGTH, inputDataConfig.getMaxListingLength()); + assertEquals(AZURE_MAX_LISTING_LENGTH, inputDataConfig.getMaxListingLength()); } @Test public void testGetAzureByteSourceFactoryCanCreateAzureByteSource() { - injector = makeInjectorWithProperties(PROPERTIES); + final Injector injector = makeInjectorWithProperties(PROPERTIES); AzureByteSourceFactory factory = injector.getInstance(AzureByteSourceFactory.class); Object object1 = factory.create("container1", "blob1", azureStorage); Object object2 = factory.create("container2", "blob2", azureStorage); - Assert.assertNotNull(object1); - Assert.assertNotNull(object2); - Assert.assertNotSame(object1, object2); + assertNotNull(object1); + assertNotNull(object2); + assertNotSame(object1, object2); } @Test @@ -163,40 +164,40 @@ public void testGetAzureEntityFactoryCanCreateAzureEntity() EasyMock.expect(cloudObjectLocation2.getPath()).andReturn(PATH); replayAll(); - injector = makeInjectorWithProperties(PROPERTIES); + final Injector injector = makeInjectorWithProperties(PROPERTIES); AzureEntityFactory factory = injector.getInstance(AzureEntityFactory.class); Object object1 = factory.create(cloudObjectLocation1, azureStorage, AzureInputSource.SCHEME); Object object2 = factory.create(cloudObjectLocation2, azureStorage, AzureInputSource.SCHEME); Object object3 = factory.create(cloudObjectLocation1, azureStorage, AzureStorageAccountInputSource.SCHEME); - Assert.assertNotNull(object1); - Assert.assertNotNull(object2); - Assert.assertNotNull(object3); - Assert.assertNotSame(object1, object2); - Assert.assertNotSame(object1, object3); + assertNotNull(object1); + assertNotNull(object2); + assertNotNull(object3); + assertNotSame(object1, object2); + assertNotSame(object1, object3); } @Test public void testGetAzureCloudBlobIteratorFactoryCanCreateAzureCloudBlobIterator() { - injector = makeInjectorWithProperties(PROPERTIES); + final Injector injector = makeInjectorWithProperties(PROPERTIES); AzureCloudBlobIteratorFactory factory = injector.getInstance(AzureCloudBlobIteratorFactory.class); Object object1 = factory.create(EMPTY_PREFIXES_ITERABLE, 10, azureStorage); Object object2 = factory.create(EMPTY_PREFIXES_ITERABLE, 10, azureStorage); - Assert.assertNotNull(object1); - Assert.assertNotNull(object2); - Assert.assertNotSame(object1, object2); + assertNotNull(object1); + assertNotNull(object2); + assertNotSame(object1, object2); } @Test public void testGetAzureCloudBlobIterableFactoryCanCreateAzureCloudBlobIterable() { - injector = makeInjectorWithProperties(PROPERTIES); + final Injector injector = makeInjectorWithProperties(PROPERTIES); AzureCloudBlobIterableFactory factory = injector.getInstance(AzureCloudBlobIterableFactory.class); AzureCloudBlobIterable object1 = factory.create(EMPTY_PREFIXES_ITERABLE, 10, azureStorage); AzureCloudBlobIterable object2 = factory.create(EMPTY_PREFIXES_ITERABLE, 10, azureStorage); - Assert.assertNotNull(object1); - Assert.assertNotNull(object2); - Assert.assertNotSame(object1, object2); + assertNotNull(object1); + assertNotNull(object2); + assertNotSame(object1, object2); } @Test @@ -204,79 +205,75 @@ public void testSegmentKillerBoundSingleton() { Injector injector = makeInjectorWithProperties(PROPERTIES); OmniDataSegmentKiller killer = injector.getInstance(OmniDataSegmentKiller.class); - Assert.assertTrue(killer.getKillers().containsKey(AzureStorageDruidModule.SCHEME)); - Assert.assertSame( + assertTrue(killer.getKillers().containsKey(AzureStorageDruidModule.SCHEME)); + assertSame( AzureDataSegmentKiller.class, killer.getKillers().get(AzureStorageDruidModule.SCHEME).get().getClass() ); - Assert.assertSame( + assertSame( killer.getKillers().get(AzureStorageDruidModule.SCHEME).get(), killer.getKillers().get(AzureStorageDruidModule.SCHEME).get() ); } - @Test - public void testMultipleCredentialsSet() + @ParameterizedTest + @MethodSource("propertiesWithMultipleCredentials") + public void testMultipleCredentialsSet(final Properties properties) { - String message = "Set only one of 'key' or 'sharedAccessStorageToken' or 'useAzureCredentialsChain' in the azure config."; - Properties properties = initializePropertes(); - properties.setProperty("druid.azure.sharedAccessStorageToken", AZURE_SHARED_ACCESS_TOKEN); - expectedException.expect(ProvisionException.class); - expectedException.expectMessage(message); - makeInjectorWithProperties(properties).getInstance( - Key.get(new TypeLiteral() - { - }) - ); - - properties = initializePropertes(); - properties.setProperty("druid.azure.managedIdentityClientId", AZURE_MANAGED_CREDENTIAL_CLIENT_ID); - expectedException.expect(ProvisionException.class); - expectedException.expectMessage(message); - makeInjectorWithProperties(properties).getInstance( - Key.get(new TypeLiteral>() - { - }) + final ProvisionException exception = assertThrows( + ProvisionException.class, + () -> makeInjectorWithProperties(properties).getInstance( + Key.get(new TypeLiteral() + { + }) + ) ); - properties = initializePropertes(); - properties.remove("druid.azure.key"); - properties.setProperty("druid.azure.managedIdentityClientId", AZURE_MANAGED_CREDENTIAL_CLIENT_ID); - properties.setProperty("druid.azure.sharedAccessStorageToken", AZURE_SHARED_ACCESS_TOKEN); - expectedException.expect(ProvisionException.class); - expectedException.expectMessage(message); - makeInjectorWithProperties(properties).getInstance( - Key.get(new TypeLiteral() - { - }) + assertEquals( + "Set only one of 'key' or 'sharedAccessStorageToken' or 'useAzureCredentialsChain' in the azure config. Please refer to azure documentation.", + exception.getCause().getMessage() ); } @Test public void testAllCredentialsUnset() { - Properties properties = initializePropertes(); + final Properties properties = initializePropertes(); properties.remove("druid.azure.key"); - expectedException.expect(ProvisionException.class); - expectedException.expectMessage("Either set 'key' or 'sharedAccessStorageToken' or 'useAzureCredentialsChain' in the azure config."); - makeInjectorWithProperties(properties).getInstance( - Key.get(new TypeLiteral() - { - }) + + final ProvisionException exception = assertThrows( + ProvisionException.class, + () -> makeInjectorWithProperties(properties).getInstance( + Key.get(new TypeLiteral() + { + }) + ) + ); + + assertEquals( + "Either set 'key' or 'sharedAccessStorageToken' or 'useAzureCredentialsChain' in the azure config. Please refer to azure documentation.", + exception.getCause().getMessage() ); } @Test public void testAccountUnset() { - Properties properties = initializePropertes(); + final Properties properties = initializePropertes(); properties.remove("druid.azure.account"); - expectedException.expect(ProvisionException.class); - expectedException.expectMessage("Set 'account' to the storage account that needs to be configured in the azure config. Please refer to azure documentation."); - makeInjectorWithProperties(properties).getInstance( - Key.get(new TypeLiteral() - { - }) + + final ProvisionException exception = assertThrows( + ProvisionException.class, + () -> makeInjectorWithProperties(properties).getInstance( + Key.get(new TypeLiteral() + { + }) + ) + ); + + assertEquals( + "Set 'account' to the storage account that needs to be configured in the azure config. Please refer to azure documentation.", + exception.getCause().getMessage() ); } @@ -285,9 +282,9 @@ public void testGetBlobStorageEndpointWithDefaultProperties() { Properties properties = initializePropertes(); AzureAccountConfig config = makeInjectorWithProperties(properties).getInstance(AzureAccountConfig.class); - Assert.assertNull(config.getEndpointSuffix()); - Assert.assertEquals(config.getStorageAccountEndpointSuffix(), AzureUtils.AZURE_STORAGE_HOST_ADDRESS); - Assert.assertEquals(config.getBlobStorageEndpoint(), AzureUtils.AZURE_STORAGE_HOST_ADDRESS); + assertNull(config.getEndpointSuffix()); + assertEquals(config.getStorageAccountEndpointSuffix(), AzureUtils.AZURE_STORAGE_HOST_ADDRESS); + assertEquals(config.getBlobStorageEndpoint(), AzureUtils.AZURE_STORAGE_HOST_ADDRESS); } @Test @@ -297,8 +294,8 @@ public void testGetBlobStorageEndpointWithCustomBlobPath() final String customSuffix = "core.usgovcloudapi.net"; properties.setProperty("druid.azure.endpointSuffix", customSuffix); AzureAccountConfig config = makeInjectorWithProperties(properties).getInstance(AzureAccountConfig.class); - Assert.assertEquals(config.getEndpointSuffix(), customSuffix); - Assert.assertEquals(config.getBlobStorageEndpoint(), "blob." + customSuffix); + assertEquals(config.getEndpointSuffix(), customSuffix); + assertEquals(config.getBlobStorageEndpoint(), "blob." + customSuffix); } private Injector makeInjectorWithProperties(final Properties props) @@ -307,15 +304,10 @@ private Injector makeInjectorWithProperties(final Properties props) ImmutableList.of( new DruidGuiceExtensions(), new JacksonModule(), - new Module() - { - @Override - public void configure(Binder binder) - { - binder.bind(Validator.class).toInstance(Validation.buildDefaultValidatorFactory().getValidator()); - binder.bind(JsonConfigurator.class).in(LazySingleton.class); - binder.bind(Properties.class).toInstance(props); - } + binder -> { + binder.bind(Validator.class).toInstance(Validation.buildDefaultValidatorFactory().getValidator()); + binder.bind(JsonConfigurator.class).in(LazySingleton.class); + binder.bind(Properties.class).toInstance(props); }, new AzureStorageDruidModule() )); @@ -324,11 +316,31 @@ public void configure(Binder binder) private static Properties initializePropertes() { final Properties props = new Properties(); - props.put("druid.azure.account", AZURE_ACCOUNT_NAME); - props.put("druid.azure.key", AZURE_ACCOUNT_KEY); - props.put("druid.azure.container", AZURE_CONTAINER); - props.put("druid.azure.prefix", AZURE_PREFIX); - props.put("druid.azure.maxListingLength", String.valueOf(AZURE_MAX_LISTING_LENGTH)); + props.setProperty("druid.azure.account", AZURE_ACCOUNT_NAME); + props.setProperty("druid.azure.key", AZURE_ACCOUNT_KEY); + props.setProperty("druid.azure.container", AZURE_CONTAINER); + props.setProperty("druid.azure.prefix", AZURE_PREFIX); + props.setProperty("druid.azure.maxListingLength", String.valueOf(AZURE_MAX_LISTING_LENGTH)); return props; } + + private static Stream> propertiesWithMultipleCredentials() + { + final Properties propertiesWithKeyAndToken = initializePropertes(); + propertiesWithKeyAndToken.setProperty("druid.azure.sharedAccessStorageToken", AZURE_SHARED_ACCESS_TOKEN); + + final Properties propertiesWithKeyAndCredentialChain = initializePropertes(); + propertiesWithKeyAndCredentialChain.setProperty("druid.azure.useAzureCredentialsChain", Boolean.TRUE.toString()); + + final Properties propertiesWithTokenAndCredentialChain = initializePropertes(); + propertiesWithTokenAndCredentialChain.remove("druid.azure.key"); + propertiesWithTokenAndCredentialChain.setProperty("druid.azure.useAzureCredentialsChain", Boolean.TRUE.toString()); + propertiesWithTokenAndCredentialChain.setProperty("druid.azure.sharedAccessStorageToken", AZURE_SHARED_ACCESS_TOKEN); + + return Stream.of( + Named.of("Key and storage token", propertiesWithKeyAndToken), + Named.of("Key and credential chain", propertiesWithKeyAndCredentialChain), + Named.of("Storage token and credential chain", propertiesWithTokenAndCredentialChain) + ); + } } diff --git a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureStorageTest.java b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureStorageTest.java index 6f6606389718..b61e72340148 100644 --- a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureStorageTest.java +++ b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureStorageTest.java @@ -31,11 +31,9 @@ import com.azure.storage.blob.models.DeleteSnapshotsOptionType; import com.google.common.collect.ImmutableList; import org.apache.druid.common.guava.SettableSupplier; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import org.mockito.ArgumentCaptor; import org.mockito.ArgumentMatchers; import org.mockito.Mockito; @@ -43,14 +41,19 @@ import java.io.File; import java.io.IOException; import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.ArrayList; import java.util.List; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + // Using Mockito for the whole test class since azure classes (e.g. BlobContainerClient) are final and can't be mocked with EasyMock public class AzureStorageTest { - AzureStorage azureStorage; BlobClient blobClient = Mockito.mock(BlobClient.class); BlobServiceClient blobServiceClient = Mockito.mock(BlobServiceClient.class); @@ -60,12 +63,8 @@ public class AzureStorageTest private final String STORAGE_ACCOUNT = "storageAccount"; private final String CONTAINER = "container"; private final String BLOB_NAME = "blobName"; - private final Integer MAX_ATTEMPTS = 3; - @Rule - public TemporaryFolder tempFolder = new TemporaryFolder(); - - @Before + @BeforeEach public void setup() throws BlobStorageException { azureStorage = new AzureStorage(azureClientFactory, STORAGE_ACCOUNT); @@ -83,9 +82,11 @@ public void testListDir_retriable() throws BlobStorageException ArgumentMatchers.any() ); Mockito.doReturn(blobContainerClient).when(blobServiceClient).createBlobContainerIfNotExists(CONTAINER); - Mockito.doReturn(blobServiceClient).when(azureClientFactory).getBlobServiceClient(MAX_ATTEMPTS, STORAGE_ACCOUNT); - Assert.assertEquals(ImmutableList.of(BLOB_NAME), azureStorage.listDir(CONTAINER, "", MAX_ATTEMPTS)); + final Integer maxAttempts = 3; + Mockito.doReturn(blobServiceClient).when(azureClientFactory).getBlobServiceClient(maxAttempts, STORAGE_ACCOUNT); + + assertEquals(ImmutableList.of(BLOB_NAME), azureStorage.listDir(CONTAINER, "", maxAttempts)); } @Test @@ -102,7 +103,7 @@ public void testListDir_nullMaxAttempts() throws BlobStorageException Mockito.doReturn(blobContainerClient).when(blobServiceClient).createBlobContainerIfNotExists(CONTAINER); Mockito.doReturn(blobServiceClient).when(azureClientFactory).getBlobServiceClient(null, STORAGE_ACCOUNT); - Assert.assertEquals(ImmutableList.of(BLOB_NAME), azureStorage.listDir(CONTAINER, "", null)); + assertEquals(ImmutableList.of(BLOB_NAME), azureStorage.listDir(CONTAINER, "", null)); } @Test @@ -150,8 +151,8 @@ public void testBatchDeleteFiles_emptyResponse() throws BlobStorageException ); boolean deleteSuccessful = azureStorage.batchDeleteFiles(CONTAINER, ImmutableList.of(BLOB_NAME), null); - Assert.assertEquals(captor.getValue().get(0), containerUrl + "/" + BLOB_NAME); - Assert.assertTrue(deleteSuccessful); + assertEquals(captor.getValue().get(0), containerUrl + "/" + BLOB_NAME); + assertTrue(deleteSuccessful); } @Test @@ -174,8 +175,8 @@ public void testBatchDeleteFiles_error() throws BlobStorageException ); boolean deleteSuccessful = azureStorage.batchDeleteFiles(CONTAINER, ImmutableList.of(BLOB_NAME), null); - Assert.assertEquals(captor.getValue().get(0), containerUrl + "/" + BLOB_NAME); - Assert.assertFalse(deleteSuccessful); + assertEquals(captor.getValue().get(0), containerUrl + "/" + BLOB_NAME); + assertFalse(deleteSuccessful); } @Test @@ -207,15 +208,15 @@ public void testBatchDeleteFiles_emptyResponse_multipleResponses() throws BlobSt boolean deleteSuccessful = azureStorage.batchDeleteFiles(CONTAINER, blobNameList, null); List> deletedValues = captor.getAllValues(); - Assert.assertEquals(deletedValues.get(0).size(), 256); - Assert.assertEquals(deletedValues.get(1).size(), 2); - Assert.assertTrue(deleteSuccessful); + assertEquals(deletedValues.get(0).size(), 256); + assertEquals(deletedValues.get(1).size(), 2); + assertTrue(deleteSuccessful); } @Test - public void testUploadBlob_usesOverwrite() throws BlobStorageException, IOException + public void testUploadBlob_usesOverwrite(@TempDir Path tempPath) throws BlobStorageException, IOException { - File tempFile = tempFolder.newFile("tempFile.txt"); + final File tempFile = Files.createFile(tempPath.resolve("tempFile.txt")).toFile(); String blobPath = "blob"; ArgumentCaptor captor = ArgumentCaptor.forClass(InputStream.class); @@ -229,7 +230,7 @@ public void testUploadBlob_usesOverwrite() throws BlobStorageException, IOExcept azureStorage.uploadBlockBlob(tempFile, CONTAINER, blobPath, null); Mockito.verify(blobClient).upload(captor.capture(), captor2.capture(), overrideArgument.capture()); - Assert.assertTrue(overrideArgument.getValue()); + assertTrue(overrideArgument.getValue()); } } diff --git a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureTaskLogsTest.java b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureTaskLogsTest.java index 92010952802a..ca661fab438a 100644 --- a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureTaskLogsTest.java +++ b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureTaskLogsTest.java @@ -25,15 +25,14 @@ import com.google.common.collect.ImmutableMap; import org.apache.commons.io.IOUtils; import org.apache.druid.common.utils.CurrentTimeMillisSupplier; -import org.apache.druid.java.util.common.FileUtils; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.storage.azure.blob.CloudBlobHolder; import org.easymock.EasyMock; import org.easymock.EasyMockSupport; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import java.io.ByteArrayInputStream; import java.io.File; @@ -42,10 +41,16 @@ import java.io.StringWriter; import java.net.URI; import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; public class AzureTaskLogsTest extends EasyMockSupport { - private static final String CONTAINER = "test"; private static final String PREFIX = "test/log"; private static final String TASK_ID = "taskid"; @@ -70,7 +75,7 @@ public class AzureTaskLogsTest extends EasyMockSupport private CurrentTimeMillisSupplier timeSupplier; private AzureTaskLogs azureTaskLogs; - @Before + @BeforeEach public void before() { inputDataConfig = createMock(AzureInputDataConfig.class); @@ -84,174 +89,138 @@ public void before() accountConfig, azureStorage, azureCloudBlobIterableFactory, - timeSupplier); + timeSupplier + ); } - @Test - public void test_PushTaskLog_uploadsBlob() throws Exception + public void test_PushTaskLog_uploadsBlob(@TempDir Path tempPath) throws IOException { - final File tmpDir = FileUtils.createTempDir(); + final File logFile = Files.createFile(tempPath.resolve("log")).toFile(); - try { - final File logFile = new File(tmpDir, "log"); + azureStorage.uploadBlockBlob(logFile, CONTAINER, PREFIX + "/" + TASK_ID + "/log", MAX_TRIES); + EasyMock.expectLastCall(); - azureStorage.uploadBlockBlob(logFile, CONTAINER, PREFIX + "/" + TASK_ID + "/log", MAX_TRIES); - EasyMock.expectLastCall(); - - EasyMock.expect(accountConfig.getMaxTries()).andReturn(MAX_TRIES).anyTimes(); + EasyMock.expect(accountConfig.getMaxTries()).andReturn(MAX_TRIES).anyTimes(); - replayAll(); + replayAll(); - azureTaskLogs.pushTaskLog(TASK_ID, logFile); + azureTaskLogs.pushTaskLog(TASK_ID, logFile); - verifyAll(); - } - finally { - FileUtils.deleteDirectory(tmpDir); - } + verifyAll(); } - @Test(expected = RuntimeException.class) - public void test_PushTaskLog_exception_rethrowsException() throws Exception + @Test + public void test_PushTaskLog_exception_rethrowsException(@TempDir Path tempPath) throws IOException { - final File tmpDir = FileUtils.createTempDir(); + final File logFile = Files.createFile(tempPath.resolve("log")).toFile(); - try { - final File logFile = new File(tmpDir, "log"); - - EasyMock.expect(accountConfig.getMaxTries()).andReturn(MAX_TRIES).anyTimes(); - azureStorage.uploadBlockBlob(logFile, CONTAINER, PREFIX + "/" + TASK_ID + "/log", MAX_TRIES); - EasyMock.expectLastCall().andThrow(new IOException()); + EasyMock.expect(accountConfig.getMaxTries()).andReturn(MAX_TRIES).anyTimes(); + azureStorage.uploadBlockBlob(logFile, CONTAINER, PREFIX + "/" + TASK_ID + "/log", MAX_TRIES); + EasyMock.expectLastCall().andThrow(new IOException()); - replayAll(); + replayAll(); - azureTaskLogs.pushTaskLog(TASK_ID, logFile); + assertThrows( + RuntimeException.class, + () -> azureTaskLogs.pushTaskLog(TASK_ID, logFile) + ); - verifyAll(); - } - finally { - FileUtils.deleteDirectory(tmpDir); - } + verifyAll(); } @Test - public void test_PushTaskReports_uploadsBlob() throws Exception + public void test_PushTaskReports_uploadsBlob(@TempDir Path tempPath) throws IOException { - final File tmpDir = FileUtils.createTempDir(); + final File logFile = Files.createFile(tempPath.resolve("log")).toFile(); - try { - final File logFile = new File(tmpDir, "log"); - - EasyMock.expect(accountConfig.getMaxTries()).andReturn(MAX_TRIES).anyTimes(); - azureStorage.uploadBlockBlob(logFile, CONTAINER, PREFIX + "/" + TASK_ID + "/report.json", MAX_TRIES); - EasyMock.expectLastCall(); + EasyMock.expect(accountConfig.getMaxTries()).andReturn(MAX_TRIES).anyTimes(); + azureStorage.uploadBlockBlob(logFile, CONTAINER, PREFIX + "/" + TASK_ID + "/report.json", MAX_TRIES); + EasyMock.expectLastCall(); - replayAll(); + replayAll(); - azureTaskLogs.pushTaskReports(TASK_ID, logFile); + azureTaskLogs.pushTaskReports(TASK_ID, logFile); - verifyAll(); - } - finally { - FileUtils.deleteDirectory(tmpDir); - } + verifyAll(); } @Test - public void test_PushTaskStatus_uploadsBlob() throws Exception + public void test_PushTaskStatus_uploadsBlob(@TempDir Path tempPath) throws IOException { - final File tmpDir = FileUtils.createTempDir(); + final File logFile = Files.createFile(tempPath.resolve("status.json")).toFile(); - try { - final File logFile = new File(tmpDir, "status.json"); - - EasyMock.expect(accountConfig.getMaxTries()).andReturn(MAX_TRIES).anyTimes(); - azureStorage.uploadBlockBlob(logFile, CONTAINER, PREFIX + "/" + TASK_ID + "/status.json", MAX_TRIES); - EasyMock.expectLastCall(); + EasyMock.expect(accountConfig.getMaxTries()).andReturn(MAX_TRIES).anyTimes(); + azureStorage.uploadBlockBlob(logFile, CONTAINER, PREFIX + "/" + TASK_ID + "/status.json", MAX_TRIES); + EasyMock.expectLastCall(); - replayAll(); + replayAll(); - azureTaskLogs.pushTaskStatus(TASK_ID, logFile); + azureTaskLogs.pushTaskStatus(TASK_ID, logFile); - verifyAll(); - } - finally { - FileUtils.deleteDirectory(tmpDir); - } + verifyAll(); } @Test - public void test_PushTaskPayload_uploadsBlob() throws Exception + public void test_PushTaskPayload_uploadsBlob(@TempDir Path tempPath) throws IOException { - final File tmpDir = FileUtils.createTempDir(); + final File taskFile = Files.createFile(tempPath.resolve("task.json")).toFile(); - try { - final File taskFile = new File(tmpDir, "task.json"); - - EasyMock.expect(accountConfig.getMaxTries()).andReturn(MAX_TRIES).anyTimes(); - azureStorage.uploadBlockBlob(taskFile, CONTAINER, PREFIX + "/" + TASK_ID + "/task.json", MAX_TRIES); - EasyMock.expectLastCall(); + EasyMock.expect(accountConfig.getMaxTries()).andReturn(MAX_TRIES).anyTimes(); + azureStorage.uploadBlockBlob(taskFile, CONTAINER, PREFIX + "/" + TASK_ID + "/task.json", MAX_TRIES); + EasyMock.expectLastCall(); - replayAll(); + replayAll(); - azureTaskLogs.pushTaskPayload(TASK_ID, taskFile); + azureTaskLogs.pushTaskPayload(TASK_ID, taskFile); - verifyAll(); - } - finally { - FileUtils.deleteDirectory(tmpDir); - } + verifyAll(); } - @Test(expected = RuntimeException.class) - public void test_PushTaskReports_exception_rethrowsException() throws Exception + @Test + public void test_PushTaskReports_exception_rethrowsException(@TempDir Path tempPath) throws IOException { - final File tmpDir = FileUtils.createTempDir(); + final File logFile = Files.createFile(tempPath.resolve("log")).toFile(); - try { - final File logFile = new File(tmpDir, "log"); - - EasyMock.expect(accountConfig.getMaxTries()).andReturn(MAX_TRIES).anyTimes(); - azureStorage.uploadBlockBlob(logFile, CONTAINER, PREFIX + "/" + TASK_ID + "/report.json", MAX_TRIES); - EasyMock.expectLastCall().andThrow(new IOException()); + EasyMock.expect(accountConfig.getMaxTries()).andReturn(MAX_TRIES).anyTimes(); + azureStorage.uploadBlockBlob(logFile, CONTAINER, PREFIX + "/" + TASK_ID + "/report.json", MAX_TRIES); + EasyMock.expectLastCall().andThrow(new IOException()); - replayAll(); + replayAll(); - azureTaskLogs.pushTaskReports(TASK_ID, logFile); + assertThrows( + RuntimeException.class, + () -> azureTaskLogs.pushTaskReports(TASK_ID, logFile) + ); - verifyAll(); - } - finally { - FileUtils.deleteDirectory(tmpDir); - } + verifyAll(); } @Test - public void testStreamTaskLogWithoutOffset() throws Exception + public void testStreamTaskLogWithoutOffset() throws IOException { final String testLog = "hello this is a log"; final String blobPath = PREFIX + "/" + TASK_ID + "/log"; EasyMock.expect(azureStorage.getBlockBlobExists(CONTAINER, blobPath)).andReturn(true); EasyMock.expect(azureStorage.getBlockBlobLength(CONTAINER, blobPath)).andReturn((long) testLog.length()); - EasyMock.expect(azureStorage.getBlockBlobInputStream(CONTAINER, blobPath)).andReturn( - new ByteArrayInputStream(testLog.getBytes(StandardCharsets.UTF_8))); - + EasyMock.expect(azureStorage.getBlockBlobInputStream(CONTAINER, blobPath)) + .andReturn(new ByteArrayInputStream(testLog.getBytes(StandardCharsets.UTF_8))); replayAll(); final Optional stream = azureTaskLogs.streamTaskLog(TASK_ID, 0); + assertTrue(stream.isPresent()); final StringWriter writer = new StringWriter(); IOUtils.copy(stream.get(), writer, "UTF-8"); - Assert.assertEquals(writer.toString(), testLog); + assertEquals(writer.toString(), testLog); verifyAll(); } @Test - public void testStreamTaskLogWithPositiveOffset() throws Exception + public void testStreamTaskLogWithPositiveOffset() throws IOException { final String testLog = "hello this is a log"; @@ -261,43 +230,43 @@ public void testStreamTaskLogWithPositiveOffset() throws Exception EasyMock.expect(azureStorage.getBlockBlobInputStream(CONTAINER, blobPath)).andReturn( new ByteArrayInputStream(testLog.getBytes(StandardCharsets.UTF_8))); - replayAll(); final Optional stream = azureTaskLogs.streamTaskLog(TASK_ID, 5); + assertTrue(stream.isPresent()); final StringWriter writer = new StringWriter(); IOUtils.copy(stream.get(), writer, "UTF-8"); - Assert.assertEquals(writer.toString(), testLog.substring(5)); + assertEquals(writer.toString(), testLog.substring(5)); verifyAll(); } @Test - public void testStreamTaskLogWithNegative() throws Exception + public void testStreamTaskLogWithNegative() throws IOException { final String testLog = "hello this is a log"; final String blobPath = PREFIX + "/" + TASK_ID + "/log"; EasyMock.expect(azureStorage.getBlockBlobExists(CONTAINER, blobPath)).andReturn(true); EasyMock.expect(azureStorage.getBlockBlobLength(CONTAINER, blobPath)).andReturn((long) testLog.length()); - EasyMock.expect(azureStorage.getBlockBlobInputStream(CONTAINER, blobPath)).andReturn( - new ByteArrayInputStream(StringUtils.toUtf8(testLog))); - + EasyMock.expect(azureStorage.getBlockBlobInputStream(CONTAINER, blobPath)) + .andReturn(new ByteArrayInputStream(StringUtils.toUtf8(testLog))); replayAll(); final Optional stream = azureTaskLogs.streamTaskLog(TASK_ID, -3); + assertTrue(stream.isPresent()); final StringWriter writer = new StringWriter(); IOUtils.copy(stream.get(), writer, "UTF-8"); - Assert.assertEquals(writer.toString(), testLog.substring(testLog.length() - 3)); + assertEquals(writer.toString(), testLog.substring(testLog.length() - 3)); verifyAll(); } @Test - public void test_streamTaskReports_blobExists_succeeds() throws Exception + public void test_streamTaskReports_blobExists_succeeds() throws IOException { final String testLog = "hello this is a log"; @@ -311,19 +280,18 @@ public void test_streamTaskReports_blobExists_succeeds() throws Exception replayAll(); final Optional stream = azureTaskLogs.streamTaskReports(TASK_ID); + assertTrue(stream.isPresent()); final StringWriter writer = new StringWriter(); IOUtils.copy(stream.get(), writer, "UTF-8"); - Assert.assertEquals(writer.toString(), testLog); + assertEquals(writer.toString(), testLog); verifyAll(); } @Test - public void test_streamTaskReports_blobDoesNotExist_returnsAbsent() throws Exception + public void test_streamTaskReports_blobDoesNotExist_returnsAbsent() throws IOException { - final String testLog = "hello this is a log"; - final String blobPath = PREFIX + "/" + TASK_ID_NOT_FOUND + "/report.json"; EasyMock.expect(azureStorage.getBlockBlobExists(CONTAINER, blobPath)).andReturn(false); @@ -331,72 +299,76 @@ public void test_streamTaskReports_blobDoesNotExist_returnsAbsent() throws Excep final Optional stream = azureTaskLogs.streamTaskReports(TASK_ID_NOT_FOUND); - - Assert.assertFalse(stream.isPresent()); + assertFalse(stream.isPresent()); verifyAll(); } - @Test(expected = IOException.class) - public void test_streamTaskReports_exceptionWhenGettingStream_throwsException() throws Exception + @Test + public void test_streamTaskReports_exceptionWhenGettingStream_throwsException() { final String testLog = "hello this is a log"; final String blobPath = PREFIX + "/" + TASK_ID + "/report.json"; - EasyMock.expect(azureStorage.getBlockBlobExists(CONTAINER, blobPath)).andReturn(true); - EasyMock.expect(azureStorage.getBlockBlobLength(CONTAINER, blobPath)).andReturn((long) testLog.length()); - EasyMock.expect(azureStorage.getBlockBlobInputStream(CONTAINER, blobPath)).andThrow( - new BlobStorageException("", null, null)); - + EasyMock.expect(azureStorage.getBlockBlobExists(CONTAINER, blobPath)) + .andReturn(true); + EasyMock.expect(azureStorage.getBlockBlobLength(CONTAINER, blobPath)) + .andReturn((long) testLog.length()); + EasyMock.expect(azureStorage.getBlockBlobInputStream(CONTAINER, blobPath)) + .andThrow(new BlobStorageException("", null, null)); replayAll(); - final Optional stream = azureTaskLogs.streamTaskReports(TASK_ID); + assertThrows( + IOException.class, + () -> azureTaskLogs.streamTaskReports(TASK_ID) + ); - final StringWriter writer = new StringWriter(); - IOUtils.copy(stream.get(), writer, "UTF-8"); verifyAll(); } - @Test(expected = IOException.class) - public void test_streamTaskReports_exceptionWhenCheckingBlobExistence_throwsException() throws Exception + @Test + public void test_streamTaskReports_exceptionWhenCheckingBlobExistence_throwsException() { - final String blobPath = PREFIX + "/" + TASK_ID + "/report.json"; - EasyMock.expect(azureStorage.getBlockBlobExists(CONTAINER, blobPath)).andThrow(new BlobStorageException("", null, null)); + EasyMock.expect(azureStorage.getBlockBlobExists(CONTAINER, blobPath)) + .andThrow(new BlobStorageException("", null, null)); replayAll(); - azureTaskLogs.streamTaskReports(TASK_ID); + assertThrows( + IOException.class, + () -> azureTaskLogs.streamTaskReports(TASK_ID) + ); verifyAll(); } @Test - public void test_streamTaskStatus_blobExists_succeeds() throws Exception + public void test_streamTaskStatus_blobExists_succeeds() throws IOException { final String taskStatus = "{}"; final String blobPath = PREFIX + "/" + TASK_ID + "/status.json"; EasyMock.expect(azureStorage.getBlockBlobExists(CONTAINER, blobPath)).andReturn(true); EasyMock.expect(azureStorage.getBlockBlobLength(CONTAINER, blobPath)).andReturn((long) taskStatus.length()); - EasyMock.expect(azureStorage.getBlockBlobInputStream(CONTAINER, blobPath)).andReturn( - new ByteArrayInputStream(taskStatus.getBytes(StandardCharsets.UTF_8))); - + EasyMock.expect(azureStorage.getBlockBlobInputStream(CONTAINER, blobPath)) + .andReturn(new ByteArrayInputStream(taskStatus.getBytes(StandardCharsets.UTF_8))); replayAll(); final Optional stream = azureTaskLogs.streamTaskStatus(TASK_ID); + assertTrue(stream.isPresent()); final StringWriter writer = new StringWriter(); IOUtils.copy(stream.get(), writer, "UTF-8"); - Assert.assertEquals(writer.toString(), taskStatus); + assertEquals(writer.toString(), taskStatus); verifyAll(); } @Test - public void test_streamTaskStatus_blobDoesNotExist_returnsAbsent() throws Exception + public void test_streamTaskStatus_blobDoesNotExist_returnsAbsent() throws IOException { final String blobPath = PREFIX + "/" + TASK_ID_NOT_FOUND + "/status.json"; EasyMock.expect(azureStorage.getBlockBlobExists(CONTAINER, blobPath)).andReturn(false); @@ -406,47 +378,53 @@ public void test_streamTaskStatus_blobDoesNotExist_returnsAbsent() throws Except final Optional stream = azureTaskLogs.streamTaskStatus(TASK_ID_NOT_FOUND); - Assert.assertFalse(stream.isPresent()); + assertFalse(stream.isPresent()); verifyAll(); } - @Test(expected = IOException.class) - public void test_streamTaskStatus_exceptionWhenGettingStream_throwsException() throws Exception + @Test + public void test_streamTaskStatus_exceptionWhenGettingStream_throwsException() { final String taskStatus = "{}"; final String blobPath = PREFIX + "/" + TASK_ID + "/status.json"; - EasyMock.expect(azureStorage.getBlockBlobExists(CONTAINER, blobPath)).andReturn(true); - EasyMock.expect(azureStorage.getBlockBlobLength(CONTAINER, blobPath)).andReturn((long) taskStatus.length()); - EasyMock.expect(azureStorage.getBlockBlobInputStream(CONTAINER, blobPath)).andThrow( - new BlobStorageException("", null, null)); - + EasyMock.expect(azureStorage.getBlockBlobExists(CONTAINER, blobPath)) + .andReturn(true); + EasyMock.expect(azureStorage.getBlockBlobLength(CONTAINER, blobPath)) + .andReturn((long) taskStatus.length()); + EasyMock.expect(azureStorage.getBlockBlobInputStream(CONTAINER, blobPath)) + .andThrow(new BlobStorageException("", null, null)); replayAll(); - final Optional stream = azureTaskLogs.streamTaskStatus(TASK_ID); + assertThrows( + IOException.class, + () -> azureTaskLogs.streamTaskStatus(TASK_ID) + ); - final StringWriter writer = new StringWriter(); - IOUtils.copy(stream.get(), writer, "UTF-8"); verifyAll(); } - @Test(expected = IOException.class) - public void test_streamTaskStatus_exceptionWhenCheckingBlobExistence_throwsException() throws Exception + @Test + public void test_streamTaskStatus_exceptionWhenCheckingBlobExistence_throwsException() { final String blobPath = PREFIX + "/" + TASK_ID + "/status.json"; - EasyMock.expect(azureStorage.getBlockBlobExists(CONTAINER, blobPath)).andThrow(new BlobStorageException("", null, null)); + EasyMock.expect(azureStorage.getBlockBlobExists(CONTAINER, blobPath)) + .andThrow(new BlobStorageException("", null, null)); replayAll(); - azureTaskLogs.streamTaskStatus(TASK_ID); + assertThrows( + IOException.class, + () -> azureTaskLogs.streamTaskStatus(TASK_ID) + ); verifyAll(); } @Test - public void test_streamTaskPayload_blobExists_succeeds() throws Exception + public void test_streamTaskPayload_blobExists_succeeds() throws IOException { final String taskPayload = "{}"; @@ -456,20 +434,20 @@ public void test_streamTaskPayload_blobExists_succeeds() throws Exception EasyMock.expect(azureStorage.getBlockBlobInputStream(CONTAINER, blobPath)).andReturn( new ByteArrayInputStream(taskPayload.getBytes(StandardCharsets.UTF_8))); - replayAll(); final Optional stream = azureTaskLogs.streamTaskPayload(TASK_ID); + assertTrue(stream.isPresent()); final StringWriter writer = new StringWriter(); IOUtils.copy(stream.get(), writer, "UTF-8"); - Assert.assertEquals(writer.toString(), taskPayload); + assertEquals(writer.toString(), taskPayload); verifyAll(); } @Test - public void test_streamTaskPayload_blobDoesNotExist_returnsAbsent() throws Exception + public void test_streamTaskPayload_blobDoesNotExist_returnsAbsent() throws IOException { final String blobPath = PREFIX + "/" + TASK_ID_NOT_FOUND + "/task.json"; EasyMock.expect(azureStorage.getBlockBlobExists(CONTAINER, blobPath)).andReturn(false); @@ -479,48 +457,53 @@ public void test_streamTaskPayload_blobDoesNotExist_returnsAbsent() throws Excep final Optional stream = azureTaskLogs.streamTaskPayload(TASK_ID_NOT_FOUND); - Assert.assertFalse(stream.isPresent()); + assertFalse(stream.isPresent()); verifyAll(); } - @Test(expected = IOException.class) - public void test_streamTaskPayload_exceptionWhenGettingStream_throwsException() throws Exception + @Test + public void test_streamTaskPayload_exceptionWhenGettingStream_throwsException() { final String taskPayload = "{}"; final String blobPath = PREFIX + "/" + TASK_ID + "/task.json"; - EasyMock.expect(azureStorage.getBlockBlobExists(CONTAINER, blobPath)).andReturn(true); - EasyMock.expect(azureStorage.getBlockBlobLength(CONTAINER, blobPath)).andReturn((long) taskPayload.length()); - EasyMock.expect(azureStorage.getBlockBlobInputStream(CONTAINER, blobPath)).andThrow( - new BlobStorageException("", null, null)); - + EasyMock.expect(azureStorage.getBlockBlobExists(CONTAINER, blobPath)) + .andReturn(true); + EasyMock.expect(azureStorage.getBlockBlobLength(CONTAINER, blobPath)) + .andReturn((long) taskPayload.length()); + EasyMock.expect(azureStorage.getBlockBlobInputStream(CONTAINER, blobPath)) + .andThrow(new BlobStorageException("", null, null)); replayAll(); - final Optional stream = azureTaskLogs.streamTaskPayload(TASK_ID); + assertThrows( + IOException.class, + () -> azureTaskLogs.streamTaskPayload(TASK_ID) + ); - final StringWriter writer = new StringWriter(); - IOUtils.copy(stream.get(), writer, "UTF-8"); verifyAll(); } - @Test(expected = IOException.class) - public void test_streamTaskPayload_exceptionWhenCheckingBlobExistence_throwsException() throws Exception + @Test + public void test_streamTaskPayload_exceptionWhenCheckingBlobExistence_throwsException() { final String blobPath = PREFIX + "/" + TASK_ID + "/task.json"; - EasyMock.expect(azureStorage.getBlockBlobExists(CONTAINER, blobPath)).andThrow(new BlobStorageException("", null, null)); + EasyMock.expect(azureStorage.getBlockBlobExists(CONTAINER, blobPath)) + .andThrow(new BlobStorageException("", null, null)); replayAll(); - azureTaskLogs.streamTaskPayload(TASK_ID); + assertThrows( + IOException.class, + () -> azureTaskLogs.streamTaskPayload(TASK_ID) + ); verifyAll(); } - @Test - public void test_killAll_noException_deletesAllTaskLogs() throws Exception + public void test_killAll_noException_deletesAllTaskLogs() throws IOException { EasyMock.expect(inputDataConfig.getMaxListingLength()).andReturn(MAX_KEYS); EasyMock.expect(timeSupplier.getAsLong()).andReturn(TIME_NOW); @@ -544,9 +527,25 @@ public void test_killAll_noException_deletesAllTaskLogs() throws Exception ImmutableMap.of(), MAX_TRIES ); - EasyMock.replay(inputDataConfig, accountConfig, timeSupplier, azureCloudBlobIterable, azureCloudBlobIterableFactory, azureStorage); + EasyMock.replay( + inputDataConfig, + accountConfig, + timeSupplier, + azureCloudBlobIterable, + azureCloudBlobIterableFactory, + azureStorage + ); azureTaskLogs.killAll(); - EasyMock.verify(inputDataConfig, accountConfig, timeSupplier, object1, object2, azureCloudBlobIterable, azureCloudBlobIterableFactory, azureStorage); + EasyMock.verify( + inputDataConfig, + accountConfig, + timeSupplier, + object1, + object2, + azureCloudBlobIterable, + azureCloudBlobIterableFactory, + azureStorage + ); } @Test @@ -590,7 +589,7 @@ public void test_killAll_nonrecoverableExceptionWhenListingObjects_doesntDeleteA catch (IOException e) { ioExceptionThrown = true; } - Assert.assertTrue(ioExceptionThrown); + assertTrue(ioExceptionThrown); EasyMock.verify( inputDataConfig, accountConfig, @@ -603,7 +602,7 @@ public void test_killAll_nonrecoverableExceptionWhenListingObjects_doesntDeleteA } @Test - public void test_killOlderThan_noException_deletesOnlyTaskLogsOlderThan() throws Exception + public void test_killOlderThan_noException_deletesOnlyTaskLogsOlderThan() throws IOException { EasyMock.expect(inputDataConfig.getMaxListingLength()).andReturn(MAX_KEYS); EasyMock.expect(accountConfig.getMaxTries()).andReturn(MAX_TRIES).anyTimes(); @@ -626,9 +625,25 @@ public void test_killOlderThan_noException_deletesOnlyTaskLogsOlderThan() throws ImmutableMap.of(), MAX_TRIES ); - EasyMock.replay(inputDataConfig, accountConfig, timeSupplier, azureCloudBlobIterable, azureCloudBlobIterableFactory, azureStorage); + EasyMock.replay( + inputDataConfig, + accountConfig, + timeSupplier, + azureCloudBlobIterable, + azureCloudBlobIterableFactory, + azureStorage + ); azureTaskLogs.killOlderThan(TIME_NOW); - EasyMock.verify(inputDataConfig, accountConfig, timeSupplier, object1, object2, azureCloudBlobIterable, azureCloudBlobIterableFactory, azureStorage); + EasyMock.verify( + inputDataConfig, + accountConfig, + timeSupplier, + object1, + object2, + azureCloudBlobIterable, + azureCloudBlobIterableFactory, + azureStorage + ); } @Test @@ -671,7 +686,7 @@ public void test_killOlderThan_nonrecoverableExceptionWhenListingObjects_doesntD catch (IOException e) { ioExceptionThrown = true; } - Assert.assertTrue(ioExceptionThrown); + assertTrue(ioExceptionThrown); EasyMock.verify( inputDataConfig, accountConfig, @@ -683,15 +698,7 @@ public void test_killOlderThan_nonrecoverableExceptionWhenListingObjects_doesntD ); } - /* - @Test (expected = UnsupportedOperationException.class) - public void test_killOlderThan_throwsUnsupportedOperationException() throws IOException - { - azureTaskLogs.killOlderThan(0); - } - */ - - @After + @AfterEach public void cleanup() { resetAll(); diff --git a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureTestUtils.java b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureTestUtils.java index e6c048dbdd22..ddbb70b1309d 100644 --- a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureTestUtils.java +++ b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureTestUtils.java @@ -25,34 +25,14 @@ import org.easymock.EasyMockSupport; import org.easymock.IExpectationSetters; -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; import java.net.URI; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.zip.ZipEntry; -import java.util.zip.ZipOutputStream; public class AzureTestUtils extends EasyMockSupport { - public static File createZipTempFile(final String segmentFileName, final String content) throws IOException - { - final File zipFile = Files.createTempFile("index", ".zip").toFile(); - final byte[] value = content.getBytes(StandardCharsets.UTF_8); - - try (ZipOutputStream zipStream = new ZipOutputStream(new FileOutputStream(zipFile))) { - zipStream.putNextEntry(new ZipEntry(segmentFileName)); - zipStream.write(value); - } - - return zipFile; - } - public static AzureCloudBlobIterable expectListObjects( AzureCloudBlobIterableFactory azureCloudBlobIterableFactory, int maxListingLength, diff --git a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureUtilsTest.java b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureUtilsTest.java index 4a28c4de4ccc..6491c61521c1 100644 --- a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureUtilsTest.java +++ b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureUtilsTest.java @@ -23,19 +23,22 @@ import com.azure.storage.blob.models.BlobStorageException; import org.apache.druid.data.input.azure.AzureInputSource; import org.easymock.EasyMock; -import org.easymock.EasyMockRunner; +import org.easymock.EasyMockExtension; import org.easymock.EasyMockSupport; import org.easymock.Mock; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.concurrent.TimeoutException; -@RunWith(EasyMockRunner.class) +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +@ExtendWith(EasyMockExtension.class) public class AzureUtilsTest extends EasyMockSupport { private static final String CONTAINER_NAME = "container1"; @@ -79,28 +82,28 @@ public class AzureUtilsTest extends EasyMockSupport public void test_extractAzureKey_pathHasLeadingSlash_returnsPathWithLeadingSlashRemoved() { String extractedKey = AzureUtils.extractAzureKey(URI_WITH_PATH_WITH_LEADING_SLASH); - Assert.assertEquals(BLOB_NAME, extractedKey); + assertEquals(BLOB_NAME, extractedKey); } @Test public void test_maybeRemoveAzurePathPrefix_pathHasLeadingAzurePathPrefix_returnsPathWithLeadingAzurePathRemoved() { String path = AzureUtils.maybeRemoveAzurePathPrefix(BLOB_PATH_WITH_LEADING_AZURE_PREFIX, AzureUtils.AZURE_STORAGE_HOST_ADDRESS); - Assert.assertEquals(BLOB_NAME, path); + assertEquals(BLOB_NAME, path); } @Test public void test_maybeRemoveAzurePathPrefix_pathDoesNotHaveAzurePathPrefix__returnsPathWithLeadingAzurePathRemoved() { String path = AzureUtils.maybeRemoveAzurePathPrefix(BLOB_NAME, AzureUtils.AZURE_STORAGE_HOST_ADDRESS); - Assert.assertEquals(BLOB_NAME, path); + assertEquals(BLOB_NAME, path); } @Test public void test_azureRetry_URISyntaxException_returnsFalse() { boolean retry = AzureUtils.AZURE_RETRY.apply(URI_SYNTAX_EXCEPTION); - Assert.assertFalse(retry); + assertFalse(retry); } @Test @@ -112,7 +115,7 @@ public void test_azureRetry_StorageException_500ErrorCode_returnsTrue() BlobStorageException blobStorageException = new BlobStorageException("storage exception", httpResponse, null); boolean retry = AzureUtils.AZURE_RETRY.apply(blobStorageException); verifyAll(); - Assert.assertTrue(retry); + assertTrue(retry); } @Test @@ -124,7 +127,7 @@ public void test_azureRetry_StorageException_429ErrorCode_returnsTrue() BlobStorageException blobStorageException = new BlobStorageException("storage exception", httpResponse, null); boolean retry = AzureUtils.AZURE_RETRY.apply(blobStorageException); verifyAll(); - Assert.assertTrue(retry); + assertTrue(retry); } @Test @@ -136,7 +139,7 @@ public void test_azureRetry_StorageException_503ErrorCode_returnsTrue() BlobStorageException blobStorageException = new BlobStorageException("storage exception", httpResponse, null); boolean retry = AzureUtils.AZURE_RETRY.apply(blobStorageException); verifyAll(); - Assert.assertTrue(retry); + assertTrue(retry); } @Test @@ -148,76 +151,76 @@ public void test_azureRetry_StorageException_400ErrorCode_returnsFalse() BlobStorageException blobStorageException = new BlobStorageException("storage exception", httpResponse, null); boolean retry = AzureUtils.AZURE_RETRY.apply(blobStorageException); verifyAll(); - Assert.assertFalse(retry); + assertFalse(retry); } @Test public void test_azureRetry_nestedIOException_returnsTrue() { boolean retry = AzureUtils.AZURE_RETRY.apply(new RuntimeException("runtime", new IOException("ioexception"))); - Assert.assertTrue(retry); + assertTrue(retry); } @Test public void test_azureRetry_nestedTimeoutException_returnsTrue() { boolean retry = AzureUtils.AZURE_RETRY.apply(new RuntimeException("runtime", new TimeoutException("timeout exception"))); - Assert.assertTrue(retry); + assertTrue(retry); } @Test public void test_azureRetry_IOException_returnsTrue() { boolean retry = AzureUtils.AZURE_RETRY.apply(IO_EXCEPTION); - Assert.assertTrue(retry); + assertTrue(retry); } @Test public void test_azureRetry_nullException_returnsFalse() { boolean retry = AzureUtils.AZURE_RETRY.apply(null); - Assert.assertFalse(retry); + assertFalse(retry); } @Test public void test_azureRetry_RunTimeException_returnsFalse() { boolean retry = AzureUtils.AZURE_RETRY.apply(RUNTIME_EXCEPTION); - Assert.assertFalse(retry); + assertFalse(retry); } @Test public void test_azureRetry_nullExceptionWrappedInRunTimeException_returnsFalse() { boolean retry = AzureUtils.AZURE_RETRY.apply(NULL_EXCEPTION_WRAPPED_IN_RUNTIME_EXCEPTION); - Assert.assertFalse(retry); + assertFalse(retry); } @Test public void test_azureRetry_IOExceptionWrappedInRunTimeException_returnsTrue() { boolean retry = AzureUtils.AZURE_RETRY.apply(IO_EXCEPTION_WRAPPED_IN_RUNTIME_EXCEPTION); - Assert.assertTrue(retry); + assertTrue(retry); } @Test public void test_azureRetry_RunTimeExceptionWrappedInRunTimeException_returnsFalse() { boolean retry = AzureUtils.AZURE_RETRY.apply(RUNTIME_EXCEPTION_WRAPPED_IN_RUNTIME_EXCEPTON); - Assert.assertFalse(retry); + assertFalse(retry); } @Test public void testRemoveAzurePathPrefixDefaultEndpoint() { String outputBlob = AzureUtils.maybeRemoveAzurePathPrefix("blob.core.windows.net/container/blob", "blob.core.windows.net"); - Assert.assertEquals("container/blob", outputBlob); + assertEquals("container/blob", outputBlob); } @Test public void testRemoveAzurePathPrefixCustomEndpoint() { String outputBlob = AzureUtils.maybeRemoveAzurePathPrefix("blob.core.usgovcloudapi.net/container/blob", "blob.core.usgovcloudapi.net"); - Assert.assertEquals("container/blob", outputBlob); + assertEquals("container/blob", outputBlob); } } diff --git a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/output/AzureInputRangeTest.java b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/output/AzureInputRangeTest.java index 4753132d1c48..722d28f58349 100644 --- a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/output/AzureInputRangeTest.java +++ b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/output/AzureInputRangeTest.java @@ -20,7 +20,7 @@ package org.apache.druid.storage.azure.output; import nl.jqno.equalsverifier.EqualsVerifier; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class AzureInputRangeTest { diff --git a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/output/AzureOutputConfigTest.java b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/output/AzureOutputConfigTest.java index ab3104adf4ea..058887316ec9 100644 --- a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/output/AzureOutputConfigTest.java +++ b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/output/AzureOutputConfigTest.java @@ -24,62 +24,60 @@ import org.apache.druid.java.util.common.FileUtils; import org.apache.druid.java.util.common.HumanReadableBytes; import org.apache.druid.java.util.common.ISE; -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import java.io.File; import java.io.IOException; +import static org.junit.jupiter.api.Assertions.assertThrows; + public class AzureOutputConfigTest { - - @Rule - public final TemporaryFolder temporaryFolder = new TemporaryFolder(); - private static final String CONTAINER = "container"; private static final String PREFIX = "prefix"; private static final int MAX_RETRY_COUNT = 0; @Test - public void testTooLargeChunkSize() + public void testTooLargeChunkSize(@TempDir File tempDir) { HumanReadableBytes chunkSize = new HumanReadableBytes("4001MiB"); - Assert.assertThrows( + + //noinspection ResultOfObjectAllocationIgnored + assertThrows( DruidException.class, - () -> new AzureOutputConfig(CONTAINER, PREFIX, temporaryFolder.newFolder(), chunkSize, MAX_RETRY_COUNT) + () -> new AzureOutputConfig(CONTAINER, PREFIX, tempDir, chunkSize, MAX_RETRY_COUNT) ); } @Test - public void testTempDirectoryNotWritable() throws IOException + public void testTempDirectoryNotWritable(@TempDir File tempDir) { - File tempDir = temporaryFolder.newFolder(); if (!tempDir.setWritable(false)) { throw new ISE("Unable to change the permission of temp folder for %s", this.getClass().getName()); } + //noinspection ResultOfObjectAllocationIgnored - Assert.assertThrows( + assertThrows( DruidException.class, () -> new AzureOutputConfig(CONTAINER, PREFIX, tempDir, null, MAX_RETRY_COUNT) ); } @Test - public void testTempDirectoryNotPresentButWritable() throws IOException + public void testTempDirectoryNotPresentButWritable(@TempDir File tempDir) { - File tempDir = new File(temporaryFolder.newFolder() + "/notPresent1/notPresent2/notPresent3"); + File temporaryFolder = new File(tempDir + "/notPresent1/notPresent2/notPresent3"); //noinspection ResultOfObjectAllocationIgnored - new AzureOutputConfig(CONTAINER, PREFIX, tempDir, null, MAX_RETRY_COUNT); + new AzureOutputConfig(CONTAINER, PREFIX, temporaryFolder, null, MAX_RETRY_COUNT); } @Test - public void testTempDirectoryPresent() throws IOException + public void testTempDirectoryPresent(@TempDir File tempDir) throws IOException { - File tempDir = new File(temporaryFolder.newFolder() + "/notPresent1/notPresent2/notPresent3"); - FileUtils.mkdirp(tempDir); + File temporaryFolder = new File(tempDir + "/notPresent1/notPresent2/notPresent3"); + FileUtils.mkdirp(temporaryFolder); //noinspection ResultOfObjectAllocationIgnored - new AzureOutputConfig(CONTAINER, PREFIX, tempDir, null, MAX_RETRY_COUNT); + new AzureOutputConfig(CONTAINER, PREFIX, temporaryFolder, null, MAX_RETRY_COUNT); } } diff --git a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/output/AzureOutputSerdeTest.java b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/output/AzureOutputSerdeTest.java index ecf99666ce70..aea5232217a7 100644 --- a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/output/AzureOutputSerdeTest.java +++ b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/output/AzureOutputSerdeTest.java @@ -25,12 +25,14 @@ import com.fasterxml.jackson.databind.exc.ValueInstantiationException; import org.apache.druid.java.util.common.HumanReadableBytes; import org.apache.druid.java.util.common.StringUtils; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.File; import java.io.IOException; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + public class AzureOutputSerdeTest { @@ -55,12 +57,9 @@ public void sanity() throws IOException 2 ); - Assert.assertEquals( - json, - MAPPER.writeValueAsString(azureOutputConfig) - ); + assertEquals(json, MAPPER.writeValueAsString(azureOutputConfig)); - Assert.assertEquals(azureOutputConfig, MAPPER.readValue(json, AzureOutputConfig.class)); + assertEquals(azureOutputConfig, MAPPER.readValue(json, AzureOutputConfig.class)); } @Test @@ -72,7 +71,7 @@ public void noPrefix() + " \"chunkSize\":104857600,\n" + " \"maxRetry\": 2\n" + "}\n"); - Assert.assertThrows(MismatchedInputException.class, () -> MAPPER.readValue(json, AzureOutputConfig.class)); + assertThrows(MismatchedInputException.class, () -> MAPPER.readValue(json, AzureOutputConfig.class)); } @Test @@ -84,7 +83,7 @@ public void noContainer() + " \"chunkSize\":104857600,\n" + " \"maxRetry\": 2\n" + "}\n"); - Assert.assertThrows(MismatchedInputException.class, () -> MAPPER.readValue(json, AzureOutputConfig.class)); + assertThrows(MismatchedInputException.class, () -> MAPPER.readValue(json, AzureOutputConfig.class)); } @Test @@ -96,7 +95,7 @@ public void noTempDir() + " \"chunkSize\":104857600,\n" + " \"maxRetry\": 2\n" + "}\n"); - Assert.assertThrows(MismatchedInputException.class, () -> MAPPER.readValue(json, AzureOutputConfig.class)); + assertThrows(MismatchedInputException.class, () -> MAPPER.readValue(json, AzureOutputConfig.class)); } @Test @@ -115,7 +114,7 @@ public void leastArguments() throws JsonProcessingException null, null ); - Assert.assertEquals(azureOutputConfig, MAPPER.readValue(json, AzureOutputConfig.class)); + assertEquals(azureOutputConfig, MAPPER.readValue(json, AzureOutputConfig.class)); } @@ -130,7 +129,7 @@ public void testChunkValidation() + " \"chunkSize\":104,\n" + " \"maxRetry\": 2\n" + "}\n"); - Assert.assertThrows(ValueInstantiationException.class, () -> MAPPER.readValue(json, AzureOutputConfig.class)); + assertThrows(ValueInstantiationException.class, () -> MAPPER.readValue(json, AzureOutputConfig.class)); } private static String jsonStringReadyForAssert(String input) diff --git a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/output/AzureStorageConnectorProviderTest.java b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/output/AzureStorageConnectorProviderTest.java index 50a856c71255..f03287e7310e 100644 --- a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/output/AzureStorageConnectorProviderTest.java +++ b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/output/AzureStorageConnectorProviderTest.java @@ -34,12 +34,15 @@ import org.apache.druid.storage.azure.AzureStorage; import org.apache.druid.storage.azure.AzureStorageDruidModule; import org.easymock.EasyMock; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.File; import java.util.Properties; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertThrows; + public class AzureStorageConnectorProviderTest { private static final String CUSTOM_NAMESPACE = "custom"; @@ -55,11 +58,12 @@ public void createAzureStorageFactoryWithRequiredProperties() properties.setProperty(CUSTOM_NAMESPACE + ".tempDir", "/tmp"); StorageConnectorProvider s3StorageConnectorProvider = getStorageConnectorProvider(properties); - Assert.assertTrue(s3StorageConnectorProvider instanceof AzureStorageConnectorProvider); - Assert.assertTrue(s3StorageConnectorProvider.get() instanceof AzureStorageConnector); - Assert.assertEquals("container", ((AzureStorageConnectorProvider) s3StorageConnectorProvider).getContainer()); - Assert.assertEquals("prefix", ((AzureStorageConnectorProvider) s3StorageConnectorProvider).getPrefix()); - Assert.assertEquals(new File("/tmp"), ((AzureStorageConnectorProvider) s3StorageConnectorProvider).getTempDir()); + assertInstanceOf(AzureStorageConnectorProvider.class, s3StorageConnectorProvider); + assertInstanceOf(AzureStorageConnector.class, s3StorageConnectorProvider.get()); + assertEquals("container", ((AzureStorageConnectorProvider) s3StorageConnectorProvider).getContainer()); + assertEquals("prefix", ((AzureStorageConnectorProvider) s3StorageConnectorProvider).getPrefix()); + assertEquals(new File("/tmp"), + ((AzureStorageConnectorProvider) s3StorageConnectorProvider).getTempDir()); } @@ -71,10 +75,10 @@ public void createAzureStorageFactoryWithMissingPrefix() properties.setProperty(CUSTOM_NAMESPACE + ".type", "s3"); properties.setProperty(CUSTOM_NAMESPACE + ".container", "container"); properties.setProperty(CUSTOM_NAMESPACE + ".tempDir", "/tmp"); - Assert.assertThrows( - "Missing required creator property 'prefix'", + assertThrows( ProvisionException.class, - () -> getStorageConnectorProvider(properties) + () -> getStorageConnectorProvider(properties), + "Missing required creator property 'prefix'" ); } @@ -87,10 +91,10 @@ public void createAzureStorageFactoryWithMissingContainer() properties.setProperty(CUSTOM_NAMESPACE + ".type", "azure"); properties.setProperty(CUSTOM_NAMESPACE + ".prefix", "prefix"); properties.setProperty(CUSTOM_NAMESPACE + ".tempDir", "/tmp"); - Assert.assertThrows( - "Missing required creator property 'container'", + assertThrows( ProvisionException.class, - () -> getStorageConnectorProvider(properties) + () -> getStorageConnectorProvider(properties), + "Missing required creator property 'container'" ); } @@ -103,10 +107,10 @@ public void createAzureStorageFactoryWithMissingTempDir() properties.setProperty(CUSTOM_NAMESPACE + ".container", "container"); properties.setProperty(CUSTOM_NAMESPACE + ".prefix", "prefix"); - Assert.assertThrows( - "Missing required creator property 'tempDir'", + assertThrows( ProvisionException.class, - () -> getStorageConnectorProvider(properties) + () -> getStorageConnectorProvider(properties), + "Missing required creator property 'tempDir'" ); } diff --git a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/output/AzureStorageConnectorTest.java b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/output/AzureStorageConnectorTest.java index 17cee1855383..5219f2b5962d 100644 --- a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/output/AzureStorageConnectorTest.java +++ b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/output/AzureStorageConnectorTest.java @@ -28,41 +28,40 @@ import org.apache.druid.storage.azure.AzureStorage; import org.easymock.Capture; import org.easymock.EasyMock; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; +import java.io.File; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.List; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + public class AzureStorageConnectorTest { - private static final String CONTAINER = "CONTAINER"; private static final String PREFIX = "P/R/E/F/I/X"; public static final String TEST_FILE = "test.csv"; - @Rule - public TemporaryFolder temporaryFolder = new TemporaryFolder(); - private StorageConnector storageConnector; private final AzureStorage azureStorage = EasyMock.createMock(AzureStorage.class); - @Before - public void setup() throws IOException + @BeforeEach + public void setup(@TempDir File tempDir) { storageConnector = new AzureStorageConnector( - new AzureOutputConfig(CONTAINER, PREFIX, temporaryFolder.newFolder(), null, null), + new AzureOutputConfig(CONTAINER, PREFIX, tempDir, null, null), azureStorage ); } - @Test public void testPathExistsSuccess() throws BlobStorageException, IOException { @@ -72,9 +71,9 @@ public void testPathExistsSuccess() throws BlobStorageException, IOException EasyMock.expect(azureStorage.getBlockBlobExists(EasyMock.capture(bucket), EasyMock.capture(path), EasyMock.anyInt())) .andReturn(true); EasyMock.replay(azureStorage); - Assert.assertTrue(storageConnector.pathExists(TEST_FILE)); - Assert.assertEquals(CONTAINER, bucket.getValue()); - Assert.assertEquals(PREFIX + "/" + TEST_FILE, path.getValue()); + assertTrue(storageConnector.pathExists(TEST_FILE)); + assertEquals(CONTAINER, bucket.getValue()); + assertEquals(PREFIX + "/" + TEST_FILE, path.getValue()); EasyMock.verify(azureStorage); } @@ -87,9 +86,9 @@ public void testPathExistsNotFound() throws BlobStorageException, IOException EasyMock.expect(azureStorage.getBlockBlobExists(EasyMock.capture(bucket), EasyMock.capture(path), EasyMock.anyInt())) .andReturn(false); EasyMock.replay(azureStorage); - Assert.assertFalse(storageConnector.pathExists(TEST_FILE)); - Assert.assertEquals(CONTAINER, bucket.getValue()); - Assert.assertEquals(PREFIX + "/" + TEST_FILE, path.getValue()); + assertFalse(storageConnector.pathExists(TEST_FILE)); + assertEquals(CONTAINER, bucket.getValue()); + assertEquals(PREFIX + "/" + TEST_FILE, path.getValue()); EasyMock.verify(azureStorage); } @@ -114,9 +113,9 @@ public void testRead() throws BlobStorageException, IOException EasyMock.replay(azureStorage); InputStream is = storageConnector.read(TEST_FILE); byte[] dataBytes = new byte[data.length()]; - Assert.assertEquals(data.length(), is.read(dataBytes)); - Assert.assertEquals(-1, is.read()); - Assert.assertEquals(data, new String(dataBytes, StandardCharsets.UTF_8)); + assertEquals(data.length(), is.read(dataBytes)); + assertEquals(-1, is.read()); + assertEquals(data, new String(dataBytes, StandardCharsets.UTF_8)); EasyMock.reset(azureStorage); } @@ -142,9 +141,9 @@ public void testReadRange() throws BlobStorageException, IOException InputStream is = storageConnector.readRange(TEST_FILE, start, length); byte[] dataBytes = new byte[((Long) length).intValue()]; - Assert.assertEquals(length, is.read(dataBytes)); - Assert.assertEquals(-1, is.read()); - Assert.assertEquals(dataQueried, new String(dataBytes, StandardCharsets.UTF_8)); + assertEquals(length, is.read(dataBytes)); + assertEquals(-1, is.read()); + assertEquals(dataQueried, new String(dataBytes, StandardCharsets.UTF_8)); EasyMock.reset(azureStorage); } } @@ -163,8 +162,8 @@ public void testDeleteSinglePath() throws BlobStorageException, IOException )).andReturn(true); EasyMock.replay(azureStorage); storageConnector.deleteFile(TEST_FILE); - Assert.assertEquals(CONTAINER, containerCapture.getValue()); - Assert.assertEquals(Collections.singletonList(PREFIX + "/" + TEST_FILE), pathsCapture.getValue()); + assertEquals(CONTAINER, containerCapture.getValue()); + assertEquals(Collections.singletonList(PREFIX + "/" + TEST_FILE), pathsCapture.getValue()); EasyMock.reset(azureStorage); } @@ -181,8 +180,8 @@ public void testDeleteMultiplePaths() throws BlobStorageException, IOException )).andReturn(true); EasyMock.replay(azureStorage); storageConnector.deleteFiles(ImmutableList.of(TEST_FILE + "_1.part", TEST_FILE + "_2.part")); - Assert.assertEquals(CONTAINER, containerCapture.getValue()); - Assert.assertEquals( + assertEquals(CONTAINER, containerCapture.getValue()); + assertEquals( ImmutableList.of( PREFIX + "/" + TEST_FILE + "_1.part", PREFIX + "/" + TEST_FILE + "_2.part" @@ -200,7 +199,7 @@ public void testListDir() throws BlobStorageException, IOException .andReturn(ImmutableList.of(PREFIX + "/x/y/z/" + TEST_FILE, PREFIX + "/p/q/r/" + TEST_FILE)); EasyMock.replay(azureStorage); List ret = Lists.newArrayList(storageConnector.listDir("")); - Assert.assertEquals(ImmutableList.of("x/y/z/" + TEST_FILE, "p/q/r/" + TEST_FILE), ret); + assertEquals(ImmutableList.of("x/y/z/" + TEST_FILE, "p/q/r/" + TEST_FILE), ret); EasyMock.reset(azureStorage); } @@ -212,7 +211,7 @@ public void test_deleteFile_blobStorageException() azureStorage.batchDeleteFiles(EasyMock.anyString(), EasyMock.anyObject(), EasyMock.anyInt()); EasyMock.expectLastCall().andThrow(new BlobStorageException("error", mockHttpResponse, null)); EasyMock.replay(azureStorage); - Assert.assertThrows(IOException.class, () -> storageConnector.deleteFile("file")); + assertThrows(IOException.class, () -> storageConnector.deleteFile("file")); EasyMock.verify(azureStorage); EasyMock.reset(azureStorage); } @@ -225,7 +224,7 @@ public void test_deleteFiles_blobStorageException() azureStorage.batchDeleteFiles(EasyMock.anyString(), EasyMock.anyObject(), EasyMock.anyInt()); EasyMock.expectLastCall().andThrow(new BlobStorageException("error", mockHttpResponse, null)); EasyMock.replay(azureStorage); - Assert.assertThrows(IOException.class, () -> storageConnector.deleteFiles(ImmutableList.of())); + assertThrows(IOException.class, () -> storageConnector.deleteFiles(ImmutableList.of())); EasyMock.verify(azureStorage); EasyMock.reset(azureStorage); } diff --git a/website/redirects.js b/website/redirects.js index bcbe53cb51e3..e2bb4da7b68c 100644 --- a/website/redirects.js +++ b/website/redirects.js @@ -148,8 +148,11 @@ const Redirects=[ "to": "/docs/latest/querying/scan-query" }, { - "from": "/docs/latest/development/extensions-core/namespaced-lookup.html", - "to": "/docs/latest/development/extensions-core/lookups-cached-global" + "from": [ + "/docs/latest/development/extensions-core/namespaced-lookup.html", + "/docs/latest/development/extensions-core/lookups-cached-global" + ], + "to": "/docs/latest/querying/lookups-cached-global" }, { "from": "/docs/latest/development/indexer.html", diff --git a/website/sidebars.json b/website/sidebars.json index 9fa1e14fff79..1f2ea873fcbb 100644 --- a/website/sidebars.json +++ b/website/sidebars.json @@ -185,7 +185,16 @@ "items": [ "querying/datasource", "querying/joins", - "querying/lookups", + {"type": "category", + "label": "Lookups", + "link": { + "type": "doc", + "id": "querying/lookups" + }, + "items":[ + "querying/lookups-cached-global", + "querying/kafka-extraction-namespace" + ]}, "querying/multi-value-dimensions", "querying/arrays", "querying/nested-columns",