Skip to content

Commit

Permalink
Rename Hive connector "hive-hadoop2 to "hive"
Browse files Browse the repository at this point in the history
  • Loading branch information
electrum committed Jun 1, 2021
1 parent 67f06d1 commit 56bbeeb
Show file tree
Hide file tree
Showing 34 changed files with 133 additions and 92 deletions.
Expand Up @@ -22,7 +22,7 @@
import io.airlift.log.Logging;
import io.trino.client.ClientSelectedRole;
import io.trino.plugin.blackhole.BlackHolePlugin;
import io.trino.plugin.hive.HiveHadoop2Plugin;
import io.trino.plugin.hive.HivePlugin;
import io.trino.server.testing.TestingTrinoServer;
import io.trino.spi.connector.ConnectorSession;
import io.trino.spi.connector.ConnectorTableMetadata;
Expand Down Expand Up @@ -86,8 +86,8 @@ public void setupServer()
server = TestingTrinoServer.builder()
.setAdditionalModule(systemTables)
.build();
server.installPlugin(new HiveHadoop2Plugin());
server.createCatalog("hive", "hive-hadoop2", ImmutableMap.<String, String>builder()
server.installPlugin(new HivePlugin());
server.createCatalog("hive", "hive", ImmutableMap.<String, String>builder()
.put("hive.metastore", "file")
.put("hive.metastore.catalog.dir", server.getBaseDataDir().resolve("hive").toAbsolutePath().toString())
.put("hive.security", "sql-standard")
Expand Down
Expand Up @@ -18,7 +18,7 @@
import com.google.common.collect.Multiset;
import io.airlift.log.Logging;
import io.trino.plugin.blackhole.BlackHolePlugin;
import io.trino.plugin.hive.HiveHadoop2Plugin;
import io.trino.plugin.hive.HivePlugin;
import io.trino.plugin.tpch.TpchMetadata;
import io.trino.plugin.tpch.TpchPlugin;
import io.trino.server.BasicQueryInfo;
Expand Down Expand Up @@ -114,8 +114,8 @@ public void setupServer()
server.installPlugin(new BlackHolePlugin());
server.createCatalog("blackhole", "blackhole");

server.installPlugin(new HiveHadoop2Plugin());
server.createCatalog("hive", "hive-hadoop2", ImmutableMap.<String, String>builder()
server.installPlugin(new HivePlugin());
server.createCatalog("hive", "hive", ImmutableMap.<String, String>builder()
.put("hive.metastore", "file")
.put("hive.metastore.catalog.dir", server.getBaseDataDir().resolve("hive").toAbsolutePath().toString())
.put("hive.security", "sql-standard")
Expand Down
Expand Up @@ -106,7 +106,7 @@ private static void testServer(String rpmHostPath, String expectedJavaVersion)
"echo CONFIG_ENV[NODE_ID]=test-node-id-injected-via-env >> /etc/trino/env.sh\n" +
"sed -i \"s/^node.id=.*/node.id=\\${ENV:NODE_ID}/g\" /etc/trino/node.properties\n" +
"cat > /etc/trino/catalog/hive.properties <<\"EOT\"\n" +
"connector.name=hive-hadoop2\n" +
"connector.name=hive\n" +
"hive.metastore.uri=thrift://localhost:${ENV:HMS_PORT}\n" +
"EOT\n" +
// create JMX catalog file
Expand Down
2 changes: 1 addition & 1 deletion core/trino-server/src/main/provisio/presto.xml
Expand Up @@ -68,7 +68,7 @@
</artifact>
</artifactSet>

<artifactSet to="plugin/hive-hadoop2">
<artifactSet to="plugin/hive">
<artifact id="${project.groupId}:trino-hive-hadoop2:zip:${project.version}">
<unpack />
</artifact>
Expand Down
2 changes: 1 addition & 1 deletion docs/src/main/sphinx/connector/hive-alluxio.rst
Expand Up @@ -79,7 +79,7 @@ the following:

.. code-block:: text
connector.name=hive-hadoop2
connector.name=hive
hive.metastore=alluxio
hive.metastore.alluxio.master.address=HOSTNAME:PORT
Expand Down
2 changes: 1 addition & 1 deletion docs/src/main/sphinx/connector/hive-caching.rst
Expand Up @@ -78,7 +78,7 @@ can be activated in the catalog properties file:

.. code-block:: text
connector.name=hive-hadoop2
connector.name=hive
hive.cache.enabled=true
hive.cache.location=/opt/hive-cache
Expand Down
4 changes: 2 additions & 2 deletions docs/src/main/sphinx/connector/hive.rst
Expand Up @@ -174,13 +174,13 @@ Configuration
-------------

Create ``etc/catalog/hive.properties`` with the following contents
to mount the ``hive-hadoop2`` connector as the ``hive`` catalog,
to mount the ``hive`` connector as the ``hive`` catalog,
replacing ``example.net:9083`` with the correct host and port
for your Hive metastore Thrift service:

.. code-block:: text
connector.name=hive-hadoop2
connector.name=hive
hive.metastore.uri=thrift://example.net:9083
Multiple Hive clusters
Expand Down
5 changes: 5 additions & 0 deletions plugin/trino-hive-hadoop2/pom.xml
Expand Up @@ -23,6 +23,11 @@
<artifactId>trino-hive</artifactId>
</dependency>

<dependency>
<groupId>io.airlift</groupId>
<artifactId>log</artifactId>
</dependency>

<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
Expand Down

This file was deleted.

@@ -0,0 +1,51 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.hive;

import com.google.common.collect.ImmutableList;
import io.airlift.log.Logger;
import io.trino.spi.Plugin;
import io.trino.spi.connector.Connector;
import io.trino.spi.connector.ConnectorContext;
import io.trino.spi.connector.ConnectorFactory;

import java.util.Map;

public class HivePlugin
implements Plugin
{
private static final Logger log = Logger.get(HivePlugin.class);

@Override
public Iterable<ConnectorFactory> getConnectorFactories()
{
return ImmutableList.of(new HiveConnectorFactory("hive"), new LegacyHiveConnectorFactory());
}

private static class LegacyHiveConnectorFactory
extends HiveConnectorFactory
{
public LegacyHiveConnectorFactory()
{
super("hive-hadoop2");
}

@Override
public Connector create(String catalogName, Map<String, String> config, ConnectorContext context)
{
log.warn("Connector name 'hive-hadoop2' is deprecated. Use 'hive' instead.");
return super.create(catalogName, config, context);
}
}
}
Expand Up @@ -14,7 +14,6 @@
package io.trino.plugin.hive;

import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.qubole.rubix.core.CachingFileSystem;
import io.trino.spi.Plugin;
import io.trino.spi.connector.Connector;
Expand All @@ -30,7 +29,8 @@
import java.nio.file.Files;
import java.nio.file.Path;

import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.common.collect.MoreCollectors.toOptional;
import static com.google.common.collect.Streams.stream;
import static com.google.common.io.MoreFiles.deleteRecursively;
import static com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE;
import static io.trino.plugin.hive.HiveSessionProperties.InsertExistingPartitionsBehavior.APPEND;
Expand All @@ -40,7 +40,7 @@
import static org.assertj.core.api.Assertions.assertThatThrownBy;

@Test(singleThreaded = true) // see @BeforeMethod
public class TestHiveHadoop2Plugin
public class TestHivePlugin
{
private Path tempDirectory;

Expand Down Expand Up @@ -69,17 +69,30 @@ public void deinitializeRubix()
@Test
public void testCreateConnector()
{
Plugin plugin = new HiveHadoop2Plugin();
ConnectorFactory factory = getOnlyElement(plugin.getConnectorFactories());
ConnectorFactory factory = getHiveConnectorFactory();

// simplest possible configuration
factory.create("test", ImmutableMap.of("hive.metastore.uri", "thrift://foo:1234"), new TestingConnectorContext()).shutdown();
}

@Test
public void testCreateConnectorLegacyName()
{
Plugin plugin = new HivePlugin();
ConnectorFactory factory = stream(plugin.getConnectorFactories())
.filter(x -> x.getName().equals("hive-hadoop2"))
.collect(toOptional())
.orElseThrow();

// simplest possible configuration
factory.create("test", ImmutableMap.of("hive.metastore.uri", "thrift://foo:1234"), new TestingConnectorContext()).shutdown();
}

@Test
public void testThriftMetastore()
{
Plugin plugin = new HiveHadoop2Plugin();
ConnectorFactory factory = getOnlyElement(plugin.getConnectorFactories());
ConnectorFactory factory = getHiveConnectorFactory();

factory.create(
"test",
ImmutableMap.of(
Expand All @@ -92,8 +105,8 @@ public void testThriftMetastore()
@Test
public void testGlueMetastore()
{
Plugin plugin = new HiveHadoop2Plugin();
ConnectorFactory factory = getOnlyElement(plugin.getConnectorFactories());
ConnectorFactory factory = getHiveConnectorFactory();

factory.create(
"test",
ImmutableMap.of(
Expand All @@ -113,8 +126,7 @@ public void testGlueMetastore()
@Test
public void testRecordingMetastore()
{
Plugin plugin = new HiveHadoop2Plugin();
ConnectorFactory factory = getOnlyElement(plugin.getConnectorFactories());
ConnectorFactory factory = getHiveConnectorFactory();

factory.create(
"test",
Expand All @@ -140,8 +152,7 @@ public void testS3SecurityMappingAndHiveCachingMutuallyExclusive()
throws IOException
{
Path mappingConfig = Files.createTempFile(null, null);
Plugin plugin = new HiveHadoop2Plugin();
ConnectorFactory connectorFactory = Iterables.getOnlyElement(plugin.getConnectorFactories());
ConnectorFactory connectorFactory = getHiveConnectorFactory();

assertThatThrownBy(() -> connectorFactory.create(
"test",
Expand All @@ -158,8 +169,7 @@ public void testS3SecurityMappingAndHiveCachingMutuallyExclusive()
@Test
public void testGcsAccessTokenAndHiveCachingMutuallyExclusive()
{
Plugin plugin = new HiveHadoop2Plugin();
ConnectorFactory connectorFactory = Iterables.getOnlyElement(plugin.getConnectorFactories());
ConnectorFactory connectorFactory = getHiveConnectorFactory();

assertThatThrownBy(() -> connectorFactory.create(
"test",
Expand All @@ -176,8 +186,7 @@ public void testGcsAccessTokenAndHiveCachingMutuallyExclusive()
@Test
public void testImmutablePartitionsAndInsertOverwriteMutuallyExclusive()
{
Plugin plugin = new HiveHadoop2Plugin();
ConnectorFactory connectorFactory = Iterables.getOnlyElement(plugin.getConnectorFactories());
ConnectorFactory connectorFactory = getHiveConnectorFactory();

assertThatThrownBy(() -> connectorFactory.create(
"test",
Expand All @@ -193,8 +202,7 @@ public void testImmutablePartitionsAndInsertOverwriteMutuallyExclusive()
@Test
public void testInsertOverwriteIsSetToErrorWhenImmutablePartitionsIsTrue()
{
Plugin plugin = new HiveHadoop2Plugin();
ConnectorFactory connectorFactory = Iterables.getOnlyElement(plugin.getConnectorFactories());
ConnectorFactory connectorFactory = getHiveConnectorFactory();

Connector connector = connectorFactory.create(
"test",
Expand All @@ -210,8 +218,7 @@ public void testInsertOverwriteIsSetToErrorWhenImmutablePartitionsIsTrue()
@Test
public void testInsertOverwriteIsSetToAppendWhenImmutablePartitionsIsFalseByDefault()
{
Plugin plugin = new HiveHadoop2Plugin();
ConnectorFactory connectorFactory = Iterables.getOnlyElement(plugin.getConnectorFactories());
ConnectorFactory connectorFactory = getHiveConnectorFactory();

Connector connector = connectorFactory.create(
"test",
Expand All @@ -235,8 +242,7 @@ private Object getDefaultValueInsertExistingPartitionsBehavior(Connector connect
@Test
public void testHdfsImpersonationAndHiveCachingMutuallyExclusive()
{
Plugin plugin = new HiveHadoop2Plugin();
ConnectorFactory connectorFactory = Iterables.getOnlyElement(plugin.getConnectorFactories());
ConnectorFactory connectorFactory = getHiveConnectorFactory();

assertThatThrownBy(() -> connectorFactory.create(
"test",
Expand All @@ -253,8 +259,7 @@ public void testHdfsImpersonationAndHiveCachingMutuallyExclusive()
@Test
public void testRubixCache()
{
Plugin plugin = new HiveHadoop2Plugin();
ConnectorFactory connectorFactory = Iterables.getOnlyElement(plugin.getConnectorFactories());
ConnectorFactory connectorFactory = getHiveConnectorFactory();

connectorFactory.create(
"test",
Expand All @@ -270,8 +275,7 @@ public void testRubixCache()
@Test
public void testRubixCacheWithNonExistingCacheDirectory()
{
Plugin plugin = new HiveHadoop2Plugin();
ConnectorFactory connectorFactory = Iterables.getOnlyElement(plugin.getConnectorFactories());
ConnectorFactory connectorFactory = getHiveConnectorFactory();

assertThatThrownBy(() -> connectorFactory.create(
"test",
Expand Down Expand Up @@ -304,4 +308,13 @@ public void testRubixCacheWithNonExistingCacheDirectory()
new TestingConnectorContext())
.shutdown();
}

private static ConnectorFactory getHiveConnectorFactory()
{
Plugin plugin = new HivePlugin();
return stream(plugin.getConnectorFactories())
.filter(factory -> factory.getName().equals("hive"))
.collect(toOptional())
.orElseThrow();
}
}
Expand Up @@ -16,7 +16,7 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.trino.Session;
import io.trino.plugin.hive.HiveHadoop2Plugin;
import io.trino.plugin.hive.HivePlugin;
import io.trino.spi.security.Identity;
import io.trino.spi.security.SelectedRole;
import io.trino.testing.DistributedQueryRunner;
Expand Down Expand Up @@ -61,9 +61,9 @@ public void createQueryRunner()
.builder(ADMIN)
.setNodeCount(1)
.build();
queryRunner.installPlugin(new HiveHadoop2Plugin());
queryRunner.installPlugin(new HivePlugin());
temporaryDirectory = createTempDir();
queryRunner.createCatalog(CATALOG, "hive-hadoop2", ImmutableMap.of(
queryRunner.createCatalog(CATALOG, "hive", ImmutableMap.of(
"hive.metastore", "file",
"hive.metastore.catalog.dir", temporaryDirectory.toURI().toString(),
"hive.security", "sql-standard",
Expand Down
@@ -1,4 +1,4 @@
connector.name=hive-hadoop2
connector.name=hive
hive.metastore.uri=thrift://hadoop-master:9083
hive.config.resources=/docker/presto-product-tests/conf/presto/etc/hive-default-fs-site.xml
hive.allow-add-column=true
Expand Down
@@ -1,4 +1,4 @@
connector.name=hive-hadoop2
connector.name=hive
hive.metastore.uri=thrift://hadoop-master:9083
hive.config.resources=/docker/presto-product-tests/conf/presto/etc/hive-default-fs-site.xml
hive.allow-drop-table=true
Expand Down

0 comments on commit 56bbeeb

Please sign in to comment.