diff --git a/beeline/src/java/org/apache/hive/beeline/Commands.java b/beeline/src/java/org/apache/hive/beeline/Commands.java index 42cc87c1bb60..9764872a77a3 100644 --- a/beeline/src/java/org/apache/hive/beeline/Commands.java +++ b/beeline/src/java/org/apache/hive/beeline/Commands.java @@ -773,7 +773,7 @@ public HiveConf getHiveConf(boolean call) { } public HiveConf getHiveConfHelper(boolean call) { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); BufferedRows rows = getConfInternal(call); while (rows != null && rows.hasNext()) { addConf((Rows.Row) rows.next(), conf); diff --git a/beeline/src/test/org/apache/hive/beeline/cli/TestHiveCli.java b/beeline/src/test/org/apache/hive/beeline/cli/TestHiveCli.java index 5ea4d11b7abd..7ad0cd49d5e6 100644 --- a/beeline/src/test/org/apache/hive/beeline/cli/TestHiveCli.java +++ b/beeline/src/test/org/apache/hive/beeline/cli/TestHiveCli.java @@ -279,7 +279,7 @@ private void verifyCMD(String CMD, String keywords, OutputStream os, String[] op @BeforeClass public static void init(){ // something changed scratch dir permissions, so test can't execute - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); String scratchDir = hiveConf.get(HiveConf.ConfVars.SCRATCHDIR.varname); File file = new File(scratchDir); if (file.exists()) { diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java index d0d37b506ab2..76d525c29648 100644 --- a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java +++ b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java @@ -740,7 +740,7 @@ public int run(String[] args) throws Exception { logInitDetailMessage = e.getMessage(); } - CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class)); + CliSessionState ss = new CliSessionState(HiveConf.create(SessionState.class)); ss.in = System.in; try { ss.out = diff --git a/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java b/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java index 37448fee2138..127f0428147d 100644 --- a/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java +++ b/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java @@ -130,7 +130,7 @@ public void testThatCliDriverDoesNotStripComments() throws Exception { SessionStream err = new SessionStream(dataErr); System.setErr(err); - CliSessionState ss = new CliSessionState(new HiveConf()); + CliSessionState ss = new CliSessionState(HiveConf.create()); ss.out = out; ss.err = err; @@ -226,7 +226,7 @@ public void testRun() throws Exception { File historyFile = new File(historyDirectory + File.separator + ".hivehistory"); historyFile.delete(); } - HiveConf configuration = new HiveConf(); + HiveConf configuration = HiveConf.create(); configuration.setBoolVar(ConfVars.HIVE_SESSION_HISTORY_ENABLED, true); PrintStream oldOut = System.out; ByteArrayOutputStream dataOut = new ByteArrayOutputStream(); @@ -260,7 +260,7 @@ public void testRun() throws Exception { @Test public void testQuit() throws Exception { - CliSessionState ss = new CliSessionState(new HiveConf()); + CliSessionState ss = new CliSessionState(HiveConf.create()); ss.err = new SessionStream(System.err); ss.out = new SessionStream(System.out); @@ -290,7 +290,7 @@ public void testQuit() throws Exception { @Test public void testProcessSelectDatabase() throws Exception { - CliSessionState sessinState = new CliSessionState(new HiveConf()); + CliSessionState sessinState = new CliSessionState(HiveConf.create()); CliSessionState.start(sessinState); ByteArrayOutputStream data = new ByteArrayOutputStream(); sessinState.err = new SessionStream(data); @@ -325,7 +325,7 @@ public void testprocessInitFiles() throws Exception { FileUtils.write(homeFile, "-- init hive file for test "); setEnv("HIVE_HOME", homeFile.getParentFile().getParentFile().getAbsolutePath()); setEnv("HIVE_CONF_DIR", homeFile.getParentFile().getAbsolutePath()); - CliSessionState sessionState = new CliSessionState(new HiveConf()); + CliSessionState sessionState = new CliSessionState(HiveConf.create()); ByteArrayOutputStream data = new ByteArrayOutputStream(); diff --git a/cli/src/test/org/apache/hadoop/hive/cli/TestCliSessionState.java b/cli/src/test/org/apache/hadoop/hive/cli/TestCliSessionState.java index c159142954f5..5d834cd27b62 100644 --- a/cli/src/test/org/apache/hadoop/hive/cli/TestCliSessionState.java +++ b/cli/src/test/org/apache/hadoop/hive/cli/TestCliSessionState.java @@ -34,7 +34,7 @@ public class TestCliSessionState { */ @Test public void testgetDbName() throws Exception { - SessionState.start(new HiveConf()); + SessionState.start(HiveConf.create()); assertEquals(Warehouse.DEFAULT_DATABASE_NAME, SessionState.get().getCurrentDatabase()); } diff --git a/cli/src/test/org/apache/hadoop/hive/cli/TestOptionsProcessor.java b/cli/src/test/org/apache/hadoop/hive/cli/TestOptionsProcessor.java index ecc7fdc94525..89306e78d3a4 100644 --- a/cli/src/test/org/apache/hadoop/hive/cli/TestOptionsProcessor.java +++ b/cli/src/test/org/apache/hadoop/hive/cli/TestOptionsProcessor.java @@ -49,7 +49,7 @@ public void testOptionsProcessor() { assertEquals("D", processor.getHiveVariables().get("C")); assertEquals("Y", processor.getHiveVariables().get("X")); - CliSessionState sessionState = new CliSessionState(new HiveConf()); + CliSessionState sessionState = new CliSessionState(HiveConf.create()); // stage 2 processor.process_stage2(sessionState); assertEquals("testDb", sessionState.database); @@ -69,7 +69,7 @@ public void testFiles() { String[] args = {"-i", "f1", "-i", "f2","-f", "fileName",}; assertTrue(processor.process_stage1(args)); - CliSessionState sessionState = new CliSessionState(new HiveConf()); + CliSessionState sessionState = new CliSessionState(HiveConf.create()); processor.process_stage2(sessionState); assertEquals("fileName", sessionState.fileName); assertEquals(2, sessionState.initFiles.size()); diff --git a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java index afe6607298a9..572496f42054 100644 --- a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java +++ b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java @@ -93,7 +93,7 @@ public static String initHiveExecLog4j() private static String initHiveLog4jCommon(ConfVars confVarName) throws LogInitializationException { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); return initHiveLog4jCommon(conf, confVarName); } diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index ea7c56d10f98..6ffb19c8a76d 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -477,6 +477,8 @@ public static final Set getLlapDaemonConfVars() { * in the underlying Hadoop configuration. */ public static enum ConfVars { + HIVE_CONF_PROPERTY_TRACKING("hive.conf.property.tracking", false, + "Whether to enable property tracking with TrackedHiveConf objects"), MSC_CACHE_ENABLED("hive.metastore.client.cache.v2.enabled", true, "This property enables a Caffeine Cache for Metastore client"), MSC_CACHE_MAX_SIZE("hive.metastore.client.cache.v2.maxSize", "1Gb", new SizeValidator(), @@ -6331,29 +6333,76 @@ public ZooKeeperHiveHelper getZKConfig() { .trustStorePassword(trustStorePassword).build(); } - public HiveConf() { + public static HiveConf create() { + HiveConf conf = new HiveConf(); + if (conf.isPropertyTrackingEnabled()) { + return new TrackedHiveConf(); + } + return conf; + } + + public static HiveConf create(Class cls) { + HiveConf conf = new HiveConf(cls); + if (conf.isPropertyTrackingEnabled()) { + return new TrackedHiveConf(cls); + } + return conf; + } + + public static HiveConf create(Configuration other, Class cls) { + if (other.getBoolean(ConfVars.HIVE_CONF_PROPERTY_TRACKING.varname, false)) { + return new TrackedHiveConf(other, cls); + } + return new HiveConf(other, cls); + } + + public static HiveConf create(HiveConf other) { + if (other.isPropertyTrackingEnabled()) { + return new TrackedHiveConf(other); + } + return new HiveConf(other); + } + + private boolean isPropertyTrackingEnabled() { + return getBoolVar(ConfVars.HIVE_CONF_PROPERTY_TRACKING); + } + + /** + * @deprecated This method will become private eventually; Use {@link #create()} instead. + */ + @Deprecated + protected HiveConf() { super(); initialize(this.getClass()); } - public HiveConf(Class cls) { + /** + * @deprecated This method will become private eventually; Use {@link #create(cls)} instead. + */ + @Deprecated + protected HiveConf(Class cls) { super(); initialize(cls); } - public HiveConf(Configuration other, Class cls) { + /** + * @deprecated This method will become private eventually; Use {@link #create(other, cls)} instead. + */ + @Deprecated + protected HiveConf(Configuration other, Class cls) { super(other); initialize(cls); } /** - * Copy constructor + * @deprecated This method will become private eventually; Use {@link #create(other)} instead. */ - public HiveConf(HiveConf other) { + @Deprecated + protected HiveConf(HiveConf other) { super(other); hiveJar = other.hiveJar; auxJars = other.auxJars; - origProp = (Properties)other.origProp.clone(); + origProp = (Properties) other.origProp.clone(); restrictList.addAll(other.restrictList); hiddenSet.addAll(other.hiddenSet); modWhiteListPattern = other.modWhiteListPattern; diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java index 179ee83b1093..a1f16fd9f631 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java @@ -77,7 +77,7 @@ public static StringBuilder dumpConfig(HiveConf conf) { sb.append("hiveServer2SiteUrl=").append(HiveConf.getHiveServer2SiteLocation()).append('\n'); sb.append("hivemetastoreSiteUrl=").append(HiveConf.getMetastoreSiteLocation()).append('\n'); dumpConfig(conf, sb); - return sb.append("END========\"new HiveConf()\"========\n"); + return sb.append("END========\"HiveConf.create()\"========\n"); } /** diff --git a/common/src/java/org/apache/hadoop/hive/conf/TrackedHiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/TrackedHiveConf.java new file mode 100644 index 000000000000..7d5eb9141c3c --- /dev/null +++ b/common/src/java/org/apache/hadoop/hive/conf/TrackedHiveConf.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.conf; + +import org.apache.hadoop.conf.Configuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * TrackedHiveConf is a HiveConf object where property changes are logged. + */ +public class TrackedHiveConf extends HiveConf { + private static final Logger LOG = LoggerFactory.getLogger(TrackedHiveConf.class); + + protected TrackedHiveConf() { + super(); + } + + @Deprecated + protected TrackedHiveConf(Class cls) { + super(cls); + } + + @Deprecated + protected TrackedHiveConf(Configuration other, Class cls) { + super(other, cls); + } + + @Deprecated + protected TrackedHiveConf(HiveConf other) { + super(other); + } + + @Override + public void set(String name, String value, String source) { + LOG.info("'{}' changed: '{}' -> '{}' (thread: {})", name, get(name), value, Thread.currentThread().getId()); + LOG.info("Change stack", new RuntimeException("Fake exception, only for easy stack logging (set)")); + super.set(name, value, source); + } + + @Override + public void unset(String name) { + LOG.info("'{}' unset, current value: '{}' (thread: {})", name, get(name), Thread.currentThread().getId()); + LOG.info("Unset stack", new RuntimeException("Fake exception, only for easy stack logging (unset)")); + super.unset(name); + } + + @Override + public void clear() { + LOG.info("Configuration is cleared (thread: {})", Thread.currentThread().getId()); + LOG.info("Clear stack", new RuntimeException("Fake exception, only for easy stack logging (clear)")); + super.clear(); + } +} diff --git a/common/src/java/org/apache/hive/http/HttpServer.java b/common/src/java/org/apache/hive/http/HttpServer.java index 2d73a920d47f..8ab72f25f37e 100644 --- a/common/src/java/org/apache/hive/http/HttpServer.java +++ b/common/src/java/org/apache/hive/http/HttpServer.java @@ -186,7 +186,7 @@ public HttpServer build() throws IOException { } public Builder setConf(HiveConf origConf) { - this.conf = new HiveConf(origConf); + this.conf = HiveConf.create(origConf); origConf.stripHiddenConfigurations(conf); setContextAttribute(CONF_CONTEXT_ATTRIBUTE, conf); return this; diff --git a/common/src/test/org/apache/hadoop/hive/common/TestFileUtils.java b/common/src/test/org/apache/hadoop/hive/common/TestFileUtils.java index 287b8a2b9454..1666e6a0e71f 100644 --- a/common/src/test/org/apache/hadoop/hive/common/TestFileUtils.java +++ b/common/src/test/org/apache/hadoop/hive/common/TestFileUtils.java @@ -127,7 +127,7 @@ public void getParentRegardlessOfScheme_root() { @Test public void testGetJarFilesByPath() { - HiveConf conf = new HiveConf(this.getClass()); + HiveConf conf = HiveConf.create(this.getClass()); File tmpDir = Files.createTempDir(); String jarFileName1 = tmpDir.getAbsolutePath() + File.separator + "a.jar"; String jarFileName2 = tmpDir.getAbsolutePath() + File.separator + "b.jar"; @@ -220,7 +220,7 @@ private void verifyIfParentsContainPath(Path key, Set parents, boolean exp public void testCopyWithDistcp() throws IOException { Path copySrc = new Path("copySrc"); Path copyDst = new Path("copyDst"); - HiveConf conf = new HiveConf(TestFileUtils.class); + HiveConf conf = HiveConf.create(TestFileUtils.class); FileSystem mockFs = mock(FileSystem.class); when(mockFs.getUri()).thenReturn(URI.create("hdfs:///")); @@ -241,7 +241,7 @@ public void testCopyWithDistcp() throws IOException { public void testCopyWithDistCpAs() throws IOException { Path copySrc = new Path("copySrc"); Path copyDst = new Path("copyDst"); - HiveConf conf = new HiveConf(TestFileUtils.class); + HiveConf conf = HiveConf.create(TestFileUtils.class); FileSystem fs = copySrc.getFileSystem(conf); @@ -290,7 +290,7 @@ public void testMakeRelative() { @Test public void testListStatusIterator() throws Exception { - MockFileSystem fs = new MockFileSystem(new HiveConf(), + MockFileSystem fs = new MockFileSystem(HiveConf.create(), new MockFile("mock:/tmp/.staging", 500, new byte[0]), new MockFile("mock:/tmp/_dummy", 500, new byte[0]), new MockFile("mock:/tmp/dummy", 500, new byte[0])); diff --git a/common/src/test/org/apache/hadoop/hive/conf/TestHiveAsyncLogging.java b/common/src/test/org/apache/hadoop/hive/conf/TestHiveAsyncLogging.java index b2f1bb3abc8a..63278516fe1d 100644 --- a/common/src/test/org/apache/hadoop/hive/conf/TestHiveAsyncLogging.java +++ b/common/src/test/org/apache/hadoop/hive/conf/TestHiveAsyncLogging.java @@ -36,7 +36,7 @@ public class TestHiveAsyncLogging { // this test requires disruptor jar in classpath @Test public void testAsyncLoggingInitialization() throws Exception { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setBoolVar(ConfVars.HIVE_ASYNC_LOG_ENABLED, false); LogUtils.initHiveLog4jCommon(conf, ConfVars.HIVE_LOG4J_FILE); Log4jContextFactory log4jContextFactory = (Log4jContextFactory) LogManager.getFactory(); diff --git a/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java b/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java index decba6dbea0a..9f53c9b50c67 100644 --- a/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java +++ b/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java @@ -57,7 +57,7 @@ private void checkConfVar(ConfVars var, String expectedConfVarVal) throws Except } private void checkHiveConf(String name, String expectedHiveVal) throws Exception { - Assert.assertEquals(expectedHiveVal, new HiveConf().get(name)); + Assert.assertEquals(expectedHiveVal, HiveConf.create().get(name)); } @Test @@ -137,7 +137,7 @@ public void testToSizeBytes() throws Exception { @Test public void testHiddenConfig() throws Exception { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); // check that a change to the hidden list should fail try { @@ -178,7 +178,7 @@ public void testHiddenConfig() throws Exception { @Test public void testEncodingDecoding() throws UnsupportedEncodingException { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); String query = "select blah, '\u0001' from random_table"; conf.setQueryString(query); Assert.assertEquals(URLEncoder.encode(query, "UTF-8"), conf.get(ConfVars.HIVEQUERYSTRING.varname)); @@ -229,7 +229,7 @@ public void testAdditionalConfigFiles() throws Exception{ File f2 = new File(newFileName); FileUtils.writeStringToFile(f2, testLdapString); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); String val = conf.getVar(ConfVars.HIVE_SERVER2_PLAIN_LDAP_DOMAIN); Assert.assertEquals("b.com", val); //restore and clean up diff --git a/common/src/test/org/apache/hadoop/hive/conf/TestHiveConfRestrictList.java b/common/src/test/org/apache/hadoop/hive/conf/TestHiveConfRestrictList.java index 1d0beaf58678..011526c3b98c 100644 --- a/common/src/test/org/apache/hadoop/hive/conf/TestHiveConfRestrictList.java +++ b/common/src/test/org/apache/hadoop/hive/conf/TestHiveConfRestrictList.java @@ -37,7 +37,7 @@ public void setUp() throws Exception { System.setProperty(ConfVars.HIVE_CONF_RESTRICTED_LIST.varname, ConfVars.HIVETESTMODEPREFIX.varname); - conf = new HiveConf(); + conf = HiveConf.create(); } /** diff --git a/common/src/test/org/apache/hadoop/hive/conf/TestHiveConfUtil.java b/common/src/test/org/apache/hadoop/hive/conf/TestHiveConfUtil.java index 76bee5fa496b..dbc813306b4a 100644 --- a/common/src/test/org/apache/hadoop/hive/conf/TestHiveConfUtil.java +++ b/common/src/test/org/apache/hadoop/hive/conf/TestHiveConfUtil.java @@ -28,7 +28,7 @@ */ public class TestHiveConfUtil { - private HiveConf conf = new HiveConf(); + private HiveConf conf = HiveConf.create(); @Before public void init() { diff --git a/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java b/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java index ab565051ae1b..afdab263b948 100644 --- a/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java +++ b/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java @@ -49,7 +49,7 @@ private void configLog(String hiveLog4jTest, String hiveExecLog4jTest) LogUtils.initHiveLog4j(); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); assertEquals(expectedLog4jTestPath, conf.getVar(ConfVars.HIVE_LOG4J_FILE)); assertEquals(expectedLog4jExecPath, conf.getVar(ConfVars.HIVE_EXEC_LOG4J_FILE)); } diff --git a/common/src/test/org/apache/hadoop/hive/conf/TestSystemVariables.java b/common/src/test/org/apache/hadoop/hive/conf/TestSystemVariables.java index 364102040093..6a6916e76bf3 100644 --- a/common/src/test/org/apache/hadoop/hive/conf/TestSystemVariables.java +++ b/common/src/test/org/apache/hadoop/hive/conf/TestSystemVariables.java @@ -89,7 +89,7 @@ public void test_SubstituteLongSelfReference() { SystemVariables uut = new SystemVariables(); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.set(HiveConf.ConfVars.HIVE_QUERY_MAX_LENGTH.varname, "100Kb"); conf.set("myTestVariable", longStringWithReferences.toString()); diff --git a/common/src/test/org/apache/hadoop/hive/conf/TestVariableSubstitution.java b/common/src/test/org/apache/hadoop/hive/conf/TestVariableSubstitution.java index e13e5d4efdb3..81b0532467c9 100644 --- a/common/src/test/org/apache/hadoop/hive/conf/TestVariableSubstitution.java +++ b/common/src/test/org/apache/hadoop/hive/conf/TestVariableSubstitution.java @@ -54,10 +54,10 @@ private static LocalMySource getMySource() { } }); - String v = variableSubstitution.substitute(new HiveConf(), "${a}"); + String v = variableSubstitution.substitute(HiveConf.create(), "${a}"); Assert.assertEquals("${a}", v); TestVariableSubstitution.getMySource().put("a", "b"); - v = variableSubstitution.substitute(new HiveConf(), "${a}"); + v = variableSubstitution.substitute(HiveConf.create(), "${a}"); Assert.assertEquals("b", v); } } diff --git a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseQueries.java b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseQueries.java index da69f0887f77..91303ab13eed 100644 --- a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseQueries.java +++ b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseQueries.java @@ -52,7 +52,7 @@ public class TestHBaseQueries { * databases, etc.), otherwise those will be visible for subsequent test methods too. */ public TestHBaseQueries() throws Exception { - baseConf = new HiveConf(HBaseConfiguration.create(), TestHBaseQueries.class); + baseConf = HiveConf.create(HBaseConfiguration.create(), TestHBaseQueries.class); baseConf.set(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER.varname, SQLStdHiveAuthorizerFactory.class.getName()); // set up Zookeeper @@ -99,7 +99,7 @@ public void after() throws Exception { @Test public void testRollbackDoesNotDeleteOriginTableWhenCTLTFails() throws CommandProcessorException { - HiveConf conf = new HiveConf(baseConf); + HiveConf conf = HiveConf.create(baseConf); conf.setVar(HiveConf.ConfVars.HIVE_TXN_MANAGER, "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager"); conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, true); conf.setBoolVar(HiveConf.ConfVars.HIVE_STRICT_MANAGED_TABLES, true); diff --git a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseStorageHandler.java b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseStorageHandler.java index 25ddd3219f9a..79d826b113cd 100644 --- a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseStorageHandler.java +++ b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseStorageHandler.java @@ -38,11 +38,11 @@ public class TestHBaseStorageHandler { @Test public void testHbaseConfigIsAddedToJobConf() { HBaseStorageHandler hbaseStorageHandler = new HBaseStorageHandler(); - hbaseStorageHandler.setConf(new JobConf(new HiveConf())); + hbaseStorageHandler.setConf(new JobConf(HiveConf.create())); TableDesc tableDesc = getHBaseTableDesc(); - JobConf jobConfToConfigure = new JobConf(new HiveConf()); + JobConf jobConfToConfigure = new JobConf(HiveConf.create()); Assert.assertTrue("hbase-site.xml is supposed to be present", jobConfToConfigure.get("hbase.some.fake.option.from.xml.file") == null); @@ -108,7 +108,7 @@ public void testGetUriForAuthWithTableAndEncodedColumns() throws URISyntaxExcept // full URL encoding turned on serdeParams.put("hbase.columns.mapping", "myco#lumn\ns"); - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SECURITY_HBASE_URLENCODE_AUTHORIZATION_URI, true); uri = checkURIForAuth(createMockTable(serdeParams), new JobConf(hiveConf)); Assert.assertEquals("hbase://testhost:8765/my%23tbl/myco%23lumn%0As", uri.toString()); @@ -126,7 +126,7 @@ private TableDesc getHBaseTableDesc() { } private static URI checkURIForAuth(Table table) throws URISyntaxException { - return checkURIForAuth(table, new JobConf(new HiveConf())); + return checkURIForAuth(table, new JobConf(HiveConf.create())); } private static URI checkURIForAuth(Table table, JobConf jobConf) throws URISyntaxException { diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java index 70f466adb3ad..2a89dab80e30 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java @@ -68,7 +68,7 @@ public static void main(String[] args) { } LOG = LoggerFactory.getLogger(HCatCli.class); - CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class)); + CliSessionState ss = new CliSessionState(HiveConf.create(SessionState.class)); ss.in = System.in; try { ss.out = new SessionStream(System.out, true, "UTF-8"); diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java index b0b3276893e9..96c0b38bbe18 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java @@ -648,7 +648,7 @@ private static Properties getHiveSiteOverrides(Configuration hiveSite, Configura public static HiveConf getHiveConf(Configuration conf) throws IOException { - HiveConf hiveConf = new HiveConf(conf, HCatUtil.class); + HiveConf hiveConf = HiveConf.create(conf, HCatUtil.class); //copy the hive conf into the job conf and restore it //in the backend context diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HiveClientCache.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HiveClientCache.java index d786e3c4822e..b4dd57c37af3 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HiveClientCache.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HiveClientCache.java @@ -240,7 +240,7 @@ void closeAllClientsQuietly() { } public void cleanup() { - // TODO: periodically reload a new HiveConf to check if stats reporting is enabled. + // TODO: periodically reload a HiveConf.create to check if stats reporting is enabled. hiveCache.cleanUp(); if (enableStats) { diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InitializeInput.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InitializeInput.java index 6280f7b992ef..2e5811615d94 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InitializeInput.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InitializeInput.java @@ -98,7 +98,7 @@ private static void populateInputJobInfo( if (conf != null) { hiveConf = HCatUtil.getHiveConf(conf); } else { - hiveConf = new HiveConf(HCatInputFormat.class); + hiveConf = HiveConf.create(HCatInputFormat.class); } client = HCatUtil.getHiveMetastoreClient(hiveConf); Table table = HCatUtil.getTable(client, inputJobInfo.getDatabaseName(), diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/oozie/JavaAction.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/oozie/JavaAction.java index 97b7c75f26e6..08732f3e90cc 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/oozie/JavaAction.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/oozie/JavaAction.java @@ -30,7 +30,7 @@ public class JavaAction { public static void main(String[] args) throws Exception { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.addResource(new Path("file:///", System.getProperty("oozie.action.conf.xml"))); conf.setVar(ConfVars.SEMANTIC_ANALYZER_HOOK, HCatSemanticAnalyzer.class.getName()); conf.setBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL, true); diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/TestHCatAuthUtil.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/TestHCatAuthUtil.java index 8282c68ff5f5..cb7516e2bc5c 100644 --- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/TestHCatAuthUtil.java +++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/TestHCatAuthUtil.java @@ -54,7 +54,7 @@ public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreC */ @Test public void authEnabledV1Auth() throws Exception { - HiveConf hcatConf = new HiveConf(this.getClass()); + HiveConf hcatConf = HiveConf.create(this.getClass()); hcatConf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); hcatConf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, StorageBasedAuthorizationProvider.class.getName()); SessionState.start(hcatConf); @@ -66,7 +66,7 @@ public void authEnabledV1Auth() throws Exception { */ @Test public void authEnabledV2Auth() throws Exception { - HiveConf hcatConf = new HiveConf(this.getClass()); + HiveConf hcatConf = HiveConf.create(this.getClass()); hcatConf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); hcatConf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, DummyV2AuthorizerFactory.class.getName()); SessionState.start(hcatConf); @@ -78,7 +78,7 @@ public void authEnabledV2Auth() throws Exception { */ @Test public void authDisabled() throws Exception { - HiveConf hcatConf = new HiveConf(this.getClass()); + HiveConf hcatConf = HiveConf.create(this.getClass()); hcatConf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, false); SessionState.start(hcatConf); assertFalse("hcat auth should be disabled", HCatAuthUtil.isAuthorizationEnabled(hcatConf)); diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java index 2b57d8d8ae69..4e61e49918e5 100644 --- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java +++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java @@ -91,7 +91,7 @@ public void setUp() throws Exception { return; } - hcatConf = new HiveConf(this.getClass()); + hcatConf = HiveConf.create(this.getClass()); MetaStoreTestUtils.startMetaStoreWithRetry(hcatConf); isServerRunning = true; diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java index b5f29f5e40fd..fbee7607dfbd 100644 --- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java +++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java @@ -61,7 +61,7 @@ public class TestSemanticAnalysis extends HCatBaseTest { @Before public void setUpHCatDriver() throws IOException { if (hcatDriver == null) { - HiveConf hcatConf = new HiveConf(hiveConf); + HiveConf hcatConf = HiveConf.create(hiveConf); hcatConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java index 63715432b721..91228d6ba9c4 100644 --- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java +++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java @@ -41,7 +41,7 @@ public class TestUseDatabase { @Before public void setUp() throws Exception { - HiveConf hcatConf = new HiveConf(this.getClass()); + HiveConf hcatConf = HiveConf.create(this.getClass()); hcatConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); hcatConf.set(ConfVars.PREEXECHOOKS.varname, ""); diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/common/TestHiveClientCache.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/common/TestHiveClientCache.java index fe1d8afdc8bd..01b38965931e 100644 --- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/common/TestHiveClientCache.java +++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/common/TestHiveClientCache.java @@ -65,7 +65,7 @@ public class TestHiveClientCache { private static final Logger LOG = LoggerFactory.getLogger(TestHiveClientCache.class); - final HiveConf hiveConf = new HiveConf(); + final HiveConf hiveConf = HiveConf.create(); @BeforeClass public static void setUp() throws Exception { @@ -226,7 +226,7 @@ private static class LocalMetaServer implements Runnable { public LocalMetaServer() { securityManager = System.getSecurityManager(); System.setSecurityManager(new NoExitSecurityManager()); - hiveConf = new HiveConf(TestHiveClientCache.class); + hiveConf = HiveConf.create(TestHiveClientCache.class); hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + MS_PORT); hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java index cf5ef56b5201..814e70de7fcf 100644 --- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java +++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java @@ -42,7 +42,7 @@ public class HCatDataCheckUtil { private static final Logger LOG = LoggerFactory.getLogger(HCatDataCheckUtil.class); public static IDriver instantiateDriver(MiniCluster cluster) { - HiveConf hiveConf = new HiveConf(HCatDataCheckUtil.class); + HiveConf hiveConf = HiveConf.create(HCatDataCheckUtil.class); for (Entry e : cluster.getProperties().entrySet()) { hiveConf.set(e.getKey().toString(), e.getValue().toString()); } diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java index a304e49ae879..9a5b3364d41e 100644 --- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java +++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java @@ -75,10 +75,10 @@ public void setUp() throws Exception { } /** - * Create a new HiveConf and set properties necessary for unit tests. + * Create a HiveConf.create and set properties necessary for unit tests. */ protected void setUpHiveConf() { - hiveConf = new HiveConf(this.getClass()); + hiveConf = HiveConf.create(this.getClass()); Path workDir = new Path(System.getProperty("test.tmp.dir", "target" + File.separator + "test" + File.separator + "tmp")); hiveConf.set("mapred.local.dir", workDir + File.separator + this.getClass().getSimpleName() diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java index 975cf3cc4bed..588b64aa4bb7 100644 --- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java +++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java @@ -144,7 +144,7 @@ public static void setUpOneTime() throws Exception { fs = new LocalFileSystem(); fs.initialize(fs.getWorkingDirectory().toUri(), new Configuration()); - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); hiveConf.setInt(HCatConstants.HCAT_HIVE_CLIENT_EXPIRY_TIME, 0); // Hack to initialize cache with 0 expiry time causing it to return a new hive client every time // Otherwise the cache doesn't play well with the second test method with the client gets closed() in the @@ -164,7 +164,7 @@ public void deleteTable() throws Exception { // in case of external table, drop the table contents as well if (isTableExternal() && (externalTableLocation != null)) { Path extPath = new Path(externalTableLocation); - FileSystem fileSystem = extPath.getFileSystem(new HiveConf()); + FileSystem fileSystem = extPath.getFileSystem(HiveConf.create()); if (fileSystem.exists(extPath)) { fileSystem.delete(extPath, true); } diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatDynamicPartitioned.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatDynamicPartitioned.java index 424e428be545..f7d88f84e860 100644 --- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatDynamicPartitioned.java +++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatDynamicPartitioned.java @@ -194,7 +194,7 @@ protected void runHCatDynamicPartitionedTable(boolean asSingleMapTask, //TODO 1.0 miniCluster is slow this test times out, make it work // renaming test to make test framework skip it public void _testHCatDynamicPartitionMaxPartitions() throws Exception { - HiveConf hc = new HiveConf(this.getClass()); + HiveConf hc = HiveConf.create(this.getClass()); int maxParts = hiveConf.getIntVar(HiveConf.ConfVars.DYNAMICPARTITIONMAXPARTS); LOG.info("Max partitions allowed = {}", maxParts); diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java index e601992fc40b..6a22eee01119 100644 --- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java +++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java @@ -155,7 +155,7 @@ public static void setup() throws Exception { warehousedir = new Path(System.getProperty("test.warehouse.dir")); - HiveConf metastoreConf = new HiveConf(); + HiveConf metastoreConf = HiveConf.create(); metastoreConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, warehousedir.toString()); // Run hive metastore server @@ -182,7 +182,7 @@ public static void setup() throws Exception { private static void initializeSetup(HiveConf metastoreConf) throws Exception { - hiveConf = new HiveConf(metastoreConf, TestHCatMultiOutputFormat.class); + hiveConf = HiveConf.create(metastoreConf, TestHCatMultiOutputFormat.class); hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTFAILURERETRIES, 3); hiveConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname, diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatOutputFormat.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatOutputFormat.java index 416a01e2770e..b59cb915b7a8 100644 --- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatOutputFormat.java +++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatOutputFormat.java @@ -71,7 +71,7 @@ public class TestHCatOutputFormat { @Before public void setUp() throws Exception { - hiveConf = new HiveConf(this.getClass()); + hiveConf = HiveConf.create(this.getClass()); try { client = new HiveMetaStoreClient(hiveConf); diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java index 22a0d3f7d134..ff75cf5a1993 100644 --- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java +++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java @@ -112,7 +112,7 @@ public static void setup() throws Exception { return; } - hcatConf = new HiveConf(TestHCatPartitionPublish.class); + hcatConf = HiveConf.create(TestHCatPartitionPublish.class); MetaStoreTestUtils.startMetaStoreWithRetry(hcatConf); isServerRunning = true; diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java index c955aa502126..c38795cbf73d 100644 --- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java +++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java @@ -57,7 +57,7 @@ public class TestPassProperties { private static HiveConf hiveConf; public void Initialize() throws Exception { - hiveConf = new HiveConf(this.getClass()); + hiveConf = HiveConf.create(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); diff --git a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java index afe6e92163b8..0cc98fe4f9c9 100644 --- a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java +++ b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java @@ -150,7 +150,7 @@ private static IMetaStoreClient getHiveMetaClient(String serverUri, // metastore configuration arguments like the metastore jdbc connection string // and password, in the case of an embedded metastore, which you get when // hive.metastore.uris = "". - HiveConf hiveConf = new HiveConf(job.getConfiguration(), clazz); + HiveConf hiveConf = HiveConf.create(job.getConfiguration(), clazz); if (serverUri != null) { hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, serverUri.trim()); diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java index ae292eb78c16..5390a53dc493 100644 --- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java +++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java @@ -83,7 +83,7 @@ public void setUp() throws Exception { throw new RuntimeException("Could not create " + TEST_WAREHOUSE_DIR); } - HiveConf hiveConf = new HiveConf(this.getClass()); + HiveConf hiveConf = HiveConf.create(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java index b96479b826a0..3dacce1c5d2a 100644 --- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java +++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java @@ -105,7 +105,7 @@ private void createTable(String tablename, String schema) throws Exception { @BeforeClass public static void setUpBeforeClass() throws Exception { - HiveConf hiveConf = new HiveConf(TestHCatLoaderComplexSchema.class); + HiveConf hiveConf = HiveConf.create(TestHCatLoaderComplexSchema.class); Path workDir = new Path(System.getProperty("test.tmp.dir", "target" + File.separator + "test" + File.separator + "tmp")); hiveConf.set("mapred.local.dir", workDir + File.separator + "TestHCatLoaderComplexSchema" diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java index beb4fe9f4b92..ae48cfbb6128 100644 --- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java +++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java @@ -158,7 +158,7 @@ public void setup() throws Exception { throw new RuntimeException("Could not create " + TEST_WAREHOUSE_DIR); } - HiveConf hiveConf = new HiveConf(this.getClass()); + HiveConf hiveConf = HiveConf.create(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java index a0c5ce93ff27..2aae1325fb97 100644 --- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java +++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java @@ -94,7 +94,7 @@ public void setUp() throws Exception { assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS)); if (driver == null) { - HiveConf hiveConf = new HiveConf(this.getClass()); + HiveConf hiveConf = HiveConf.create(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); diff --git a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/MessageFactory.java b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/MessageFactory.java index 782fffb516b9..0590f6ef3b53 100644 --- a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/MessageFactory.java +++ b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/MessageFactory.java @@ -41,7 +41,7 @@ public abstract class MessageFactory { private static MessageFactory instance = null; - protected static final HiveConf hiveConf = new HiveConf(); + protected static final HiveConf hiveConf = HiveConf.create(); static { hiveConf.addResource("hive-site.xml"); } diff --git a/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java b/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java index ee60556a80ab..02e92b9e2b91 100644 --- a/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java +++ b/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java @@ -207,14 +207,14 @@ void setConf(Configuration conf) { this.conf = (HiveConf)conf; } else { - this.conf = new HiveConf(conf, getClass()); + this.conf = HiveConf.create(conf, getClass()); } } HiveConf getConf() { if (conf == null) { LOG.warn("Conf hasn't been set yet. Using defaults."); - conf = new HiveConf(); + conf = HiveConf.create(); } return conf; } diff --git a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java index 0420c506136d..40a9a73e1f6d 100644 --- a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java +++ b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java @@ -108,7 +108,7 @@ public static void tearDown() throws Exception { @BeforeClass public static void startMetaStoreServer() throws Exception { - hcatConf = new HiveConf(TestHCatClient.class); + hcatConf = HiveConf.create(TestHCatClient.class); String metastoreUri = System.getProperty("test."+HiveConf.ConfVars.METASTOREURIS.varname); if (metastoreUri != null) { hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, metastoreUri); @@ -814,11 +814,11 @@ public void testDropPartitionsWithPartialSpec() throws Exception { private void startReplicationTargetMetaStoreIfRequired() throws Exception { if (!isReplicationTargetHCatRunning) { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.set("javax.jdo.option.ConnectionURL", hcatConf.get("javax.jdo.option.ConnectionURL") .replace("metastore", "target_metastore")); replicationTargetHCatPort = MetaStoreTestUtils.startMetaStoreWithRetry(conf); - replicationTargetHCatConf = new HiveConf(hcatConf); + replicationTargetHCatConf = HiveConf.create(hcatConf); replicationTargetHCatConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + replicationTargetHCatPort); isReplicationTargetHCatRunning = true; diff --git a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/TestReplicationTask.java b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/TestReplicationTask.java index 09dc5d8ca79e..16109444e7ad 100644 --- a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/TestReplicationTask.java +++ b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/TestReplicationTask.java @@ -82,12 +82,12 @@ public void testCreate() throws HCatException { event.setTableName(t.getTableName()); ReplicationTask.resetFactory(null); - ReplicationTask rtask = ReplicationTask.create(HCatClient.create(new HiveConf()),new HCatNotificationEvent(event)); + ReplicationTask rtask = ReplicationTask.create(HCatClient.create(HiveConf.create()),new HCatNotificationEvent(event)); assertTrue("Provided factory instantiation should yield CreateTableReplicationTask", rtask instanceof CreateTableReplicationTask); ReplicationTask.resetFactory(NoopFactory.class); - rtask = ReplicationTask.create(HCatClient.create(new HiveConf()),new HCatNotificationEvent(event)); + rtask = ReplicationTask.create(HCatClient.create(HiveConf.create()),new HCatNotificationEvent(event)); assertTrue("Provided factory instantiation should yield NoopReplicationTask", rtask instanceof NoopReplicationTask); ReplicationTask.resetFactory(null); diff --git a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/exim/TestEximReplicationTasks.java b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/exim/TestEximReplicationTasks.java index 846ebc75e73f..c91b890383da 100644 --- a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/exim/TestEximReplicationTasks.java +++ b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/exim/TestEximReplicationTasks.java @@ -72,7 +72,7 @@ public class TestEximReplicationTasks{ @BeforeClass public static void setUpBeforeClass() throws HCatException { - client = HCatClient.create(new HiveConf()); + client = HCatClient.create(HiveConf.create()); ReplicationTask.resetFactory(EximReplicationTaskFactory.class); } diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java index b94c7d715530..e55fc9c6e88e 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java @@ -250,7 +250,7 @@ private void init() { * those values should take precedence. */ private void handleHiveProperties() { - HiveConf hiveConf = new HiveConf();//load hive-site.xml from classpath + HiveConf hiveConf = HiveConf.create();//load hive-site.xml from classpath List interestingPropNames = Arrays.asList( HiveConf.ConfVars.METASTOREURIS.varname, HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL.varname, @@ -305,7 +305,7 @@ private String dumpEnvironent() { sb.append(TempletonUtils.dumpPropMap("========WebHCat System.getProperties()========", System.getProperties())); - sb.append(HiveConfUtil.dumpConfig(new HiveConf())); + sb.append(HiveConfUtil.dumpConfig(HiveConf.create())); return sb.toString(); } diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java index ed79d1a903fb..0f08909a5064 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java @@ -100,7 +100,7 @@ public CompleteBean run(String id, String jobStatus) String metastoreTokenStrForm = DelegationTokenCache.getStringFormTokenCache().getDelegationToken(id); if(metastoreTokenStrForm != null) { - client = HCatUtil.getHiveMetastoreClient(new HiveConf()); + client = HCatUtil.getHiveMetastoreClient(HiveConf.create()); client.cancelDelegationToken(metastoreTokenStrForm); LOG.debug("Cancelled token for jobId=" + id + " status from JT=" + jobStatus); DelegationTokenCache.getStringFormTokenCache().removeDelegationToken(id); diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java index d8f27ca953e1..b14a2b50d717 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java @@ -448,7 +448,7 @@ public static void addDef(List args, String name, String val) { */ void addHiveMetaStoreTokenArg() { //in order for this to work hive-site.xml must be on the classpath - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); if(!hiveConf.getBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL)) { return; } diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java index bbe5947937aa..71729c751e4b 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java @@ -212,7 +212,7 @@ public Object run() throws IOException { private String buildHcatDelegationToken(String user) throws IOException, InterruptedException, TException { - final HiveConf c = new HiveConf(); + final HiveConf c = HiveConf.create(); final IMetaStoreClient client = HCatUtil.getHiveMetastoreClient(c); LOG.info("user: " + user + " loginUser: " + UserGroupInformation.getLoginUser().getUserName()); final UserGroupInformation ugi = UgiFactory.getUgi(user); diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java index 834b54b6c83f..09a4d4dd3c70 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java @@ -186,7 +186,7 @@ private String addToken(Job job, String user, String type) throws IOException, I } private String buildHcatDelegationToken(String user) throws IOException, InterruptedException, TException { - final HiveConf c = new HiveConf(); + final HiveConf c = HiveConf.create(); LOG.debug("Creating hive metastore delegation token for user " + user); final UserGroupInformation ugi = UgiFactory.getUgi(user); UserGroupInformation real = ugi.getRealUser(); @@ -207,7 +207,7 @@ public String run() throws IOException, TException, InterruptedException { private String buildHS2DelegationToken(String user) throws IOException, InterruptedException, TException { - final HiveConf c = new HiveConf(); + final HiveConf c = HiveConf.create(); LOG.debug("Creating hiveserver2 delegation token for user " + user); final UserGroupInformation ugi = UgiFactory.getUgi(user); UserGroupInformation real = ugi.getRealUser(); diff --git a/iceberg/iceberg-catalog/src/main/java/org/apache/iceberg/hive/HiveClientPool.java b/iceberg/iceberg-catalog/src/main/java/org/apache/iceberg/hive/HiveClientPool.java index e9a3c53519be..298d00a05b72 100644 --- a/iceberg/iceberg-catalog/src/main/java/org/apache/iceberg/hive/HiveClientPool.java +++ b/iceberg/iceberg-catalog/src/main/java/org/apache/iceberg/hive/HiveClientPool.java @@ -46,7 +46,7 @@ public class HiveClientPool extends ClientPoolImpl public HiveClientPool(int poolSize, Configuration conf) { // Do not allow retry by default as we rely on RetryingHiveClient super(poolSize, TTransportException.class, false); - this.hiveConf = new HiveConf(conf, HiveClientPool.class); + this.hiveConf = HiveConf.create(conf, HiveClientPool.class); this.hiveConf.addResource(conf); } diff --git a/iceberg/iceberg-catalog/src/test/java/org/apache/iceberg/hive/TestHiveClientPool.java b/iceberg/iceberg-catalog/src/test/java/org/apache/iceberg/hive/TestHiveClientPool.java index 36996e33e3c6..2acd04d7cb16 100644 --- a/iceberg/iceberg-catalog/src/test/java/org/apache/iceberg/hive/TestHiveClientPool.java +++ b/iceberg/iceberg-catalog/src/test/java/org/apache/iceberg/hive/TestHiveClientPool.java @@ -85,7 +85,7 @@ public void testConf() { } private HiveConf createHiveConf() { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); try (InputStream inputStream = new ByteArrayInputStream(HIVE_SITE_CONTENT.getBytes(StandardCharsets.UTF_8))) { hiveConf.addResource(inputStream, "for_test"); } catch (IOException e) { diff --git a/iceberg/iceberg-catalog/src/test/java/org/apache/iceberg/hive/TestHiveMetastore.java b/iceberg/iceberg-catalog/src/test/java/org/apache/iceberg/hive/TestHiveMetastore.java index 76eb87b21b50..f4cd059791d6 100644 --- a/iceberg/iceberg-catalog/src/test/java/org/apache/iceberg/hive/TestHiveMetastore.java +++ b/iceberg/iceberg-catalog/src/test/java/org/apache/iceberg/hive/TestHiveMetastore.java @@ -123,7 +123,7 @@ public class TestHiveMetastore { * Starts a TestHiveMetastore with the default connection pool size (5) and the default HiveConf. */ public void start() { - start(new HiveConf(new Configuration(), TestHiveMetastore.class), DEFAULT_POOL_SIZE); + start(HiveConf.create(new Configuration(), TestHiveMetastore.class), DEFAULT_POOL_SIZE); } /** @@ -150,7 +150,7 @@ public void start(HiveConf conf, int poolSize) { this.executorService = Executors.newSingleThreadExecutor(); this.executorService.submit(() -> server.serve()); - // in Hive3, setting this as a system prop ensures that it will be picked up whenever a new HiveConf is created + // in Hive3, setting this as a system prop ensures that it will be picked up whenever a HiveConf.create is created System.setProperty(HiveConf.ConfVars.METASTOREURIS.varname, hiveConf.getVar(HiveConf.ConfVars.METASTOREURIS)); this.clientPool = new HiveClientPool(1, hiveConf); @@ -228,7 +228,7 @@ public R run(ClientPool.Action action) thro } private TServer newThriftServer(TServerSocket socket, int poolSize, HiveConf conf) throws Exception { - HiveConf serverConf = new HiveConf(conf); + HiveConf serverConf = HiveConf.create(conf); serverConf.set(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, "jdbc:derby:" + DERBY_PATH + ";create=true"); baseHandler = HMS_HANDLER_CTOR.newInstance("new db based metaserver", serverConf); IHMSHandler handler = GET_BASE_HMS_HANDLER.invoke(serverConf, baseHandler, false); @@ -257,7 +257,7 @@ private void initConf(HiveConf conf, int port) { } private static void setupMetastoreDB(String dbURL) throws Exception { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); MetastoreConf.setVar(conf, MetastoreConf.ConfVars.CONNECT_URL_KEY, "jdbc:derby:" + DERBY_PATH + ";create=true"); TestTxnDbUtil.prepDb(conf); diff --git a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveShell.java b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveShell.java index 9d17d85ca772..d4f8fe1bed4e 100644 --- a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveShell.java +++ b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveShell.java @@ -83,7 +83,7 @@ public void setHiveSessionValue(String key, boolean value) { public void start() { // Create a copy of the HiveConf for the metastore - metastore.start(new HiveConf(hs2Conf), 20); + metastore.start(HiveConf.create(hs2Conf), 20); hs2Conf.setVar(HiveConf.ConfVars.METASTOREURIS, metastore.hiveConf().getVar(HiveConf.ConfVars.METASTOREURIS)); hs2Conf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, metastore.hiveConf().getVar(HiveConf.ConfVars.METASTOREWAREHOUSE)); @@ -181,7 +181,7 @@ public HiveSession getSession() { } private HiveConf initializeConf() { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); // Use ephemeral port to enable running tests in parallel hiveConf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT, 0); diff --git a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/writer/HiveIcebergWriterTestBase.java b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/writer/HiveIcebergWriterTestBase.java index bd3d3aefce23..2b61f1aa28d4 100644 --- a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/writer/HiveIcebergWriterTestBase.java +++ b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/writer/HiveIcebergWriterTestBase.java @@ -70,7 +70,7 @@ public class HiveIcebergWriterTestBase { .add(122, "g") .build(); - private final HadoopTables tables = new HadoopTables(new HiveConf()); + private final HadoopTables tables = new HadoopTables(HiveConf.create()); private TestHelper helper; protected Table table; protected WriterBuilder writerBuilder; @@ -114,7 +114,7 @@ public void init() throws IOException { PartitionSpec.builderFor(SCHEMA) .bucket("data", 3) .build(); - this.helper = new TestHelper(new HiveConf(), tables, location.toString(), SCHEMA, spec, fileFormat, + this.helper = new TestHelper(HiveConf.create(), tables, location.toString(), SCHEMA, spec, fileFormat, Collections.singletonMap(WriterBuilder.ICEBERG_DELETE_SKIPROWDATA, String.valueOf(skipRowData)), temp); this.table = helper.createTable(); helper.appendToTable(RECORDS); diff --git a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/api/TestHCatClientNotification.java b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/api/TestHCatClientNotification.java index ae7e57271dcc..e84159df09a7 100644 --- a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/api/TestHCatClientNotification.java +++ b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/api/TestHCatClientNotification.java @@ -67,7 +67,7 @@ public class TestHCatClientNotification { @BeforeClass public static void setupClient() throws Exception { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setVar(HiveConf.ConfVars.METASTORE_EVENT_LISTENERS, DbNotificationListener.class.getName()); conf.setVar(HiveConf.ConfVars.METASTORE_EVENT_MESSAGE_FACTORY, JSONMessageEncoder.class.getName()); hCatClient = HCatClient.create(conf); diff --git a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java index 155735c6072e..01ec6af5a478 100644 --- a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java +++ b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java @@ -181,7 +181,7 @@ public Configuration getJobConf() { * @return Configuration of Hive Metastore, this is a standalone not a daemon */ public HiveConf getHiveConf() { - return new HiveConf(hiveConf); + return HiveConf.create(hiveConf); } /** @@ -295,7 +295,7 @@ private void setupHBaseCluster() { private void setUpMetastore() throws Exception { if (hiveConf == null) - hiveConf = new HiveConf(this.getClass()); + hiveConf = HiveConf.create(this.getClass()); //The default org.apache.hadoop.hive.ql.hooks.PreExecutePrinter hook //is present only in the ql/test directory diff --git a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java index fe33f47793fb..a73018918bd5 100644 --- a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java +++ b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java @@ -68,7 +68,7 @@ public class TestPigHBaseStorageHandler extends SkeletonHBaseTest { public void Initialize() throws Exception { - hcatConf = new HiveConf(this.getClass()); + hcatConf = HiveConf.create(this.getClass()); //hcatConf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, // HCatSemanticAnalyzer.class.getName()); URI fsuri = getFileSystem().getUri(); diff --git a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java index 100ee24e1fa5..441b2cbe9e61 100644 --- a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java +++ b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java @@ -279,7 +279,7 @@ public void onBatchAcidWrite(BatchAcidWriteEvent batchAcidWriteEvent) throws Met @SuppressWarnings("rawtypes") @BeforeClass public static void connectToMetastore() throws Exception { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setVar(HiveConf.ConfVars.METASTORE_TRANSACTIONAL_EVENT_LISTENERS, DbNotificationListener.class.getName()); conf.setVar(HiveConf.ConfVars.METASTORE_EVENT_LISTENERS, MockMetaStoreEventListener.class.getName()); diff --git a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java index c093055ecff1..1d252a133c7c 100644 --- a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java +++ b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java @@ -69,10 +69,10 @@ public class TestSequenceFileReadWrite { public void setup() throws Exception { dataDir = new File(System.getProperty("java.io.tmpdir") + File.separator + TestSequenceFileReadWrite.class.getCanonicalName() + "-" + System.currentTimeMillis()); - hiveConf = new HiveConf(this.getClass()); + hiveConf = HiveConf.create(this.getClass()); warehouseDir = HCatUtil.makePathASafeFileName(dataDir + File.separator + "warehouse"); inputFileName = HCatUtil.makePathASafeFileName(dataDir + File.separator + "input.data"); - hiveConf = new HiveConf(this.getClass()); + hiveConf = HiveConf.create(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); diff --git a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/calcite/FieldTrimmerBench.java b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/calcite/FieldTrimmerBench.java index 435732c1ab54..dd6ee2c03a9a 100644 --- a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/calcite/FieldTrimmerBench.java +++ b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/calcite/FieldTrimmerBench.java @@ -80,7 +80,7 @@ public class FieldTrimmerBench { @Setup(Level.Trial) public void initTrial() { // Init cluster and builder - final RelOptPlanner planner = CalcitePlanner.createPlanner(new HiveConf()); + final RelOptPlanner planner = CalcitePlanner.createPlanner(HiveConf.create()); final RexBuilder rexBuilder = new RexBuilder( new JavaTypeFactoryImpl(new HiveTypeSystemImpl())); relOptCluster = RelOptCluster.create(planner, rexBuilder); diff --git a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/ql/exec/TableAndPartitionExportBench.java b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/ql/exec/TableAndPartitionExportBench.java index a2367271dc56..8c4e0fb684a4 100644 --- a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/ql/exec/TableAndPartitionExportBench.java +++ b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/ql/exec/TableAndPartitionExportBench.java @@ -47,7 +47,7 @@ public class TableAndPartitionExportBench { public static class BaseBench { protected static final Logger LOG = LoggerFactory.getLogger(BaseBench.class); - final HiveConf conf = new HiveConf(); + final HiveConf conf = HiveConf.create(); final int nTables = 500; @Benchmark diff --git a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinMultiKeyBenchBase.java b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinMultiKeyBenchBase.java index aa882973ecfe..17a46df576ae 100644 --- a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinMultiKeyBenchBase.java +++ b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinMultiKeyBenchBase.java @@ -31,7 +31,7 @@ public abstract class MapJoinMultiKeyBenchBase extends AbstractMapJoin { public void doSetup(VectorMapJoinVariation vectorMapJoinVariation, MapJoinTestImplementation mapJoinImplementation) throws Exception { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); long seed = 2543; diff --git a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinOneLongKeyBenchBase.java b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinOneLongKeyBenchBase.java index 60b28907a54e..512ce18d7419 100644 --- a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinOneLongKeyBenchBase.java +++ b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinOneLongKeyBenchBase.java @@ -31,7 +31,7 @@ public abstract class MapJoinOneLongKeyBenchBase extends AbstractMapJoin { public void doSetup(VectorMapJoinVariation vectorMapJoinVariation, MapJoinTestImplementation mapJoinImplementation) throws Exception { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); long seed = 2543; diff --git a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinOneStringKeyBenchBase.java b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinOneStringKeyBenchBase.java index 937ede188294..99f32604ce40 100644 --- a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinOneStringKeyBenchBase.java +++ b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinOneStringKeyBenchBase.java @@ -31,7 +31,7 @@ public abstract class MapJoinOneStringKeyBenchBase extends AbstractMapJoin { public void doSetup(VectorMapJoinVariation vectorMapJoinVariation, MapJoinTestImplementation mapJoinImplementation) throws Exception { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); long seed = 2543; diff --git a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/load/BytesKeyBase.java b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/load/BytesKeyBase.java index a4ddd9dfea6d..18bb088bcc17 100644 --- a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/load/BytesKeyBase.java +++ b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/load/BytesKeyBase.java @@ -41,7 +41,7 @@ public void doSetup(VectorMapJoinDesc.VectorMapJoinVariation vectorMapJoinVariat MapJoinTestConfig.MapJoinTestImplementation mapJoinImplementation, int rows) throws Exception { long seed = 2543; int rowCount = rows; - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); int[] bigTableKeyColumnNums = new int[] { 0 }; String[] bigTableColumnNames = new String[] { "b1" }; TypeInfo[] bigTableTypeInfos = new TypeInfo[] { TypeInfoFactory.stringTypeInfo }; diff --git a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/load/LongKeyBase.java b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/load/LongKeyBase.java index 7bbaa9c5549c..565634f40c22 100644 --- a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/load/LongKeyBase.java +++ b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/load/LongKeyBase.java @@ -33,7 +33,7 @@ public void doSetup(VectorMapJoinDesc.VectorMapJoinVariation vectorMapJoinVariat MapJoinTestConfig.MapJoinTestImplementation mapJoinImplementation, int rows) throws Exception { long seed = 2543; int rowCount = rows; - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); int[] bigTableKeyColumnNums = new int[] { 0 }; String[] bigTableColumnNames = new String[] { "number1" }; TypeInfo[] bigTableTypeInfos = new TypeInfo[] { TypeInfoFactory.longTypeInfo }; diff --git a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/load/MultiKeyBase.java b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/load/MultiKeyBase.java index be6eae7f36e8..b178db17ee5c 100644 --- a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/load/MultiKeyBase.java +++ b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/load/MultiKeyBase.java @@ -37,7 +37,7 @@ public void doSetup(VectorMapJoinDesc.VectorMapJoinVariation vectorMapJoinVariat MapJoinTestConfig.MapJoinTestImplementation mapJoinImplementation, int rows) throws Exception { long seed = 2543; int rowCount = rows; - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); int[] bigTableKeyColumnNums = new int[] { 0, 1, 2}; String[] bigTableColumnNames = new String[] { "b1", "b2", "b3" }; TypeInfo[] bigTableTypeInfos = new TypeInfo[] { diff --git a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/operators/VectorSelectOperatorBench.java b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/operators/VectorSelectOperatorBench.java index 54b200b189bc..2fa3ce1c9e5b 100644 --- a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/operators/VectorSelectOperatorBench.java +++ b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/operators/VectorSelectOperatorBench.java @@ -62,7 +62,7 @@ public class VectorSelectOperatorBench extends AbstractOperatorBench { @Setup public void setup(Blackhole bh) throws HiveException { - HiveConf hconf = new HiveConf(); + HiveConf hconf = HiveConf.create(); List columns = new ArrayList(); columns.add("a"); columns.add("b"); diff --git a/itests/hive-minikdc/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCleanerWithSecureDFS.java b/itests/hive-minikdc/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCleanerWithSecureDFS.java index ecf8472ea994..af5873b5b4b7 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCleanerWithSecureDFS.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCleanerWithSecureDFS.java @@ -108,13 +108,13 @@ public static void stopCluster() { @Override public void setup() throws Exception { - HiveConf conf = new HiveConf(secureConf); + HiveConf conf = HiveConf.create(secureConf); conf.set("fs.defaultFS", dfsCluster.getFileSystem().getUri().toString()); - setup(new HiveConf(secureConf)); + setup(HiveConf.create(secureConf)); } private static HiveConf createSecureDFSConfig(MiniKdc kdc) throws Exception { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, conf); String suPrincipal = SUPER_USER_NAME + "/localhost@" + kdc.getRealm(); String suKeyTab = SUPER_USER_KEYTAB.toAbsolutePath().toString(); @@ -157,7 +157,7 @@ public void testLeakAfterHistoryException() throws Exception { // compaction requests to fail and its not practical to have in a unit test. // The size of the configuration, measured by taking heapdump and inspecting the objects, is // roughly 190MB. - HiveConf cleanerConf = new HiveConf(conf); + HiveConf cleanerConf = HiveConf.create(conf); for (int i = 0; i < 1_000_000; i++) { cleanerConf.set("hive.random.property.with.id." + i, Integer.toString(i)); } diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java index fccf3e0209f8..35fad48e334f 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java @@ -47,7 +47,7 @@ public abstract class JdbcWithMiniKdcSQLAuthTest { public static void beforeTestBase(String transportMode) throws Exception { miniHiveKdc = new MiniHiveKdc(); - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, transportMode); System.err.println("Testing using HS2 mode:" + transportMode); diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java index 0940c79c7215..afeff4dbadfc 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java @@ -34,7 +34,7 @@ public class TestHiveAuthFactory { @BeforeClass public static void setUp() throws Exception { miniHiveKdc = new MiniHiveKdc(); - hiveConf = new HiveConf(); + hiveConf = HiveConf.create(); } @AfterClass diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java index 890e4092ea4f..51665c3fe26d 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java @@ -58,7 +58,7 @@ public static void setUpBeforeClass() throws Exception { confOverlay.put(ConfVars.HIVEFETCHTASKCACHING.varname, "" + false); miniHiveKdc = new MiniHiveKdc(); - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf); miniHS2.start(confOverlay); } diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcNonKrbSASLWithMiniKdc.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcNonKrbSASLWithMiniKdc.java index 948c7daded1d..83e5d5d9a9b1 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcNonKrbSASLWithMiniKdc.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcNonKrbSASLWithMiniKdc.java @@ -59,7 +59,7 @@ public static void beforeTest() throws Exception { CustomAuthenticator.class.getName()); confOverlay.put(ConfVars.HIVE_SCHEDULED_QUERIES_EXECUTOR_ENABLED.varname, "false"); miniHiveKdc = new MiniHiveKdc(); - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); miniHS2 = MiniHiveKdc.getMiniHS2WithKerbWithRemoteHMS(miniHiveKdc, hiveConf, "CUSTOM"); miniHS2.start(confOverlay); } diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithDBTokenStore.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithDBTokenStore.java index 58bcac694215..b4951ea929a6 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithDBTokenStore.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithDBTokenStore.java @@ -39,7 +39,7 @@ public static void beforeTest() throws Exception { confOverlay.put(ConfVars.HIVE_SCHEDULED_QUERIES_EXECUTOR_ENABLED.varname, "false"); miniHiveKdc = new MiniHiveKdc(); - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); //using old config value tests backwards compatibility hiveConf.setVar(ConfVars.METASTORE_CLUSTER_DELEGATION_TOKEN_STORE_CLS, "org.apache.hadoop.hive.thrift.DBTokenStore"); miniHS2 = MiniHiveKdc.getMiniHS2WithKerbWithRemoteHMSWithKerb(miniHiveKdc, hiveConf); diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithDBTokenStoreNoDoAs.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithDBTokenStoreNoDoAs.java index 74d8e777597e..1b0a086b059a 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithDBTokenStoreNoDoAs.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithDBTokenStoreNoDoAs.java @@ -39,7 +39,7 @@ public static void beforeTest() throws Exception { confOverlay.put(ConfVars.HIVE_SCHEDULED_QUERIES_EXECUTOR_ENABLED.varname, "false"); miniHiveKdc = new MiniHiveKdc(); - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); hiveConf.setVar(ConfVars.METASTORE_CLUSTER_DELEGATION_TOKEN_STORE_CLS, "org.apache.hadoop.hive.thrift.DBTokenStore"); hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); miniHS2 = MiniHiveKdc.getMiniHS2WithKerbWithRemoteHMSWithKerb(miniHiveKdc, hiveConf); diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java index 12c3c4b8381a..25b3216ec387 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java @@ -71,7 +71,7 @@ public static void beforeTest() throws Exception { confOverlay.put(ConfVars.HIVE_SCHEDULED_QUERIES_EXECUTOR_ENABLED.varname, "false"); miniHiveKdc = new MiniHiveKdc(); - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf); miniHS2.start(confOverlay); } diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcCookie.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcCookie.java index 883d333dd48d..2c6262615017 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcCookie.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcCookie.java @@ -71,7 +71,7 @@ public static void beforeTest() throws Exception { public void setUp() throws Exception { miniHiveKdc = new MiniHiveKdc(); DriverManager.setLoginTimeout(0); - hiveConf = new HiveConf(); + hiveConf = HiveConf.create(); hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, transportMode); System.err.println("Testing using HS2 mode : " + hiveConf.getVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE)); diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestMiniHiveKdc.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestMiniHiveKdc.java index e58786b34da4..e99060b858af 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestMiniHiveKdc.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestMiniHiveKdc.java @@ -48,7 +48,7 @@ public static void beforeTest() throws Exception { @Before public void setUp() throws Exception { miniHiveKdc = new MiniHiveKdc(); - hiveConf = new HiveConf(); + hiveConf = HiveConf.create(); } @After diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestSSLWithMiniKdc.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestSSLWithMiniKdc.java index 7ca74efb648b..84ffe3992927 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestSSLWithMiniKdc.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestSSLWithMiniKdc.java @@ -54,7 +54,7 @@ public static void beforeTest() throws Exception { miniHiveKdc = new MiniHiveKdc(); - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); SSLTestUtils.setMetastoreSslConf(hiveConf); hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); diff --git a/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/common/TestFileUtils.java b/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/common/TestFileUtils.java index fa91c2cc15fe..fcd9eaf55439 100644 --- a/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/common/TestFileUtils.java +++ b/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/common/TestFileUtils.java @@ -50,7 +50,7 @@ public class TestFileUtils { @BeforeClass public static void setup() throws Exception { - conf = new HiveConf(TestFileUtils.class); + conf = HiveConf.create(TestFileUtils.class); dfs = ShimLoader.getHadoopShims().getMiniDfs(conf, 4, true, null); fs = dfs.getFileSystem(); } diff --git a/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/metastore/security/TestHadoopAuthBridge23.java b/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/metastore/security/TestHadoopAuthBridge23.java index e25533302300..62d4d520d870 100644 --- a/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/metastore/security/TestHadoopAuthBridge23.java +++ b/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/metastore/security/TestHadoopAuthBridge23.java @@ -141,7 +141,7 @@ public void setup() throws Exception { "true"); System.setProperty(HiveConf.ConfVars.METASTORE_CLUSTER_DELEGATION_TOKEN_STORE_CLS.varname, MyTokenStore.class.getName()); - conf = new HiveConf(TestHadoopAuthBridge23.class); + conf = HiveConf.create(TestHadoopAuthBridge23.class); MetaStoreTestUtils.startMetaStoreWithRetry(new MyHadoopThriftAuthBridge23(), conf); } diff --git a/itests/hive-unit/src/main/java/org/hadoop/hive/jdbc/SSLTestUtils.java b/itests/hive-unit/src/main/java/org/hadoop/hive/jdbc/SSLTestUtils.java index 3917a3b457e4..36bd45aa78a1 100644 --- a/itests/hive-unit/src/main/java/org/hadoop/hive/jdbc/SSLTestUtils.java +++ b/itests/hive-unit/src/main/java/org/hadoop/hive/jdbc/SSLTestUtils.java @@ -39,7 +39,7 @@ public class SSLTestUtils { private static final String HS2_HTTP_ENDPOINT = "cliservice"; private static final String HS2_BINARY_AUTH_MODE = "NONE"; - private static final HiveConf conf = new HiveConf(); + private static final HiveConf conf = HiveConf.create(); private static final String dataFileDir = !System.getProperty("test.data.files", "").isEmpty() ? System.getProperty( "test.data.files") : conf.get("test.data.files").replace('\\', '/').replace("c:", ""); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/UtilsForTest.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/UtilsForTest.java index b3dfa961a6c3..b130ecb6127b 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/UtilsForTest.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/UtilsForTest.java @@ -62,7 +62,7 @@ public static void expandHiveConfParams(HiveConf hiveConf) { public static HiveConf getHiveOnTezConfFromDir(String confDir) throws Exception { HiveConf.setHiveSiteLocation( new URL("file://" + new File(confDir).toURI().getPath() + "/hive-site.xml")); - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); hiveConf .addResource(new URL("file://" + new File(confDir).toURI().getPath() + "/tez-site.xml")); return hiveConf; diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java index 7d441b6acc03..1c4a3dbf8f62 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java @@ -139,7 +139,7 @@ public void postAnalyze(HiveSemanticAnalyzerHookContext context, */ @BeforeClass public static void setUpBeforeClass() throws Exception { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); hiveConf.setVar(ConfVars.PREEXECHOOKS, PreExecHook.class.getName()); hiveConf.setVar(ConfVars.POSTEXECHOOKS, diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/io/TestHadoopFileStatus.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/io/TestHadoopFileStatus.java index 55877bea15c8..80464bab5dfb 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/io/TestHadoopFileStatus.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/io/TestHadoopFileStatus.java @@ -47,7 +47,7 @@ public class TestHadoopFileStatus { @BeforeClass public static void setUp() throws IOException { - hiveConf = new HiveConf(TestHadoopFileStatus.class); + hiveConf = HiveConf.create(TestHadoopFileStatus.class); hiveConf.set("dfs.namenode.acls.enabled", "true"); } diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/AbstractTestAuthorizationApiAuthorizer.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/AbstractTestAuthorizationApiAuthorizer.java index c10060f8171d..28ba3c45cb8a 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/AbstractTestAuthorizationApiAuthorizer.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/AbstractTestAuthorizationApiAuthorizer.java @@ -55,7 +55,7 @@ protected static void setup() throws Exception { System.setProperty("hive.security.metastore.authorization.manager", MetaStoreAuthzAPIAuthorizerEmbedOnly.class.getName()); - hiveConf = new HiveConf(); + hiveConf = HiveConf.create(); if (isRemoteMetastoreMode) { MetaStoreTestUtils.startMetaStoreWithRetry(hiveConf); } diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestDisallowColChangesExceptionList.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestDisallowColChangesExceptionList.java index 061dba3bd645..6033ef946127 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestDisallowColChangesExceptionList.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestDisallowColChangesExceptionList.java @@ -43,7 +43,7 @@ public class TestDisallowColChangesExceptionList { @BeforeClass public static void startServices() throws Exception { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS, 2); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS, 2); hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreAlterColumnPar.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreAlterColumnPar.java index 4b95fe7be6b9..5189d28a4045 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreAlterColumnPar.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreAlterColumnPar.java @@ -50,7 +50,7 @@ public class TestHiveMetaStoreAlterColumnPar { @BeforeClass public static void startServices() throws Exception { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS, 2); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS, 2); hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveProtoEventsCleanerTask.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveProtoEventsCleanerTask.java index e187fadb990d..76dc7c6b3ba5 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveProtoEventsCleanerTask.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveProtoEventsCleanerTask.java @@ -65,7 +65,7 @@ public class TestHiveProtoEventsCleanerTask { @Before public void setup() throws Exception { - hiveConf = new HiveConf(TestHiveProtoEventsCleanerTask.class); + hiveConf = HiveConf.create(TestHiveProtoEventsCleanerTask.class); String tmpFolder = folder.newFolder().getAbsolutePath(); hiveConf.setVar(HiveConf.ConfVars.HIVE_PROTO_EVENTS_BASE_PATH, tmpFolder + "/" + eventsSubDirs[0]); HiveConf.setTimeVar(hiveConf, ConfVars.HIVE_PROTO_EVENTS_TTL, 2, TimeUnit.DAYS); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreAuthorization.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreAuthorization.java index 9ddad9922652..a4f72303d71d 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreAuthorization.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreAuthorization.java @@ -38,7 +38,7 @@ * TestMetaStoreAuthorization. */ public class TestMetaStoreAuthorization { - protected HiveConf conf = new HiveConf(); + protected HiveConf conf = HiveConf.create(); private int port; diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java index 2f7a2601627c..e8f0a9a485eb 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java @@ -41,7 +41,7 @@ public class TestMetaStoreMetrics { @BeforeClass public static void before() throws Exception { - hiveConf = new HiveConf(TestMetaStoreMetrics.class); + hiveConf = HiveConf.create(TestMetaStoreMetrics.class); hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.setBoolVar(HiveConf.ConfVars.METASTORE_METRICS, true); hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMultipleEncryptionZones.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMultipleEncryptionZones.java index 4dcfb2266a0c..8112fd5ca3e3 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMultipleEncryptionZones.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMultipleEncryptionZones.java @@ -83,7 +83,7 @@ public static void setUp() throws Exception { miniDFSCluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).format(true).build(); DFSTestUtil.createKey("test_key_cm", miniDFSCluster, conf); DFSTestUtil.createKey("test_key_db", miniDFSCluster, conf); - hiveConf = new HiveConf(TestReplChangeManager.class); + hiveConf = HiveConf.create(TestReplChangeManager.class); hiveConf.setBoolean(HiveConf.ConfVars.REPLCMENABLED.varname, true); hiveConf.setInt(CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY, 60); hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, @@ -1252,7 +1252,7 @@ public void recycleFailureWithDifferentEncryptionZonesForCm() throws Throwable { @Test public void testClearerEncrypted() throws Exception { - HiveConf hiveConfCmClearer = new HiveConf(TestReplChangeManager.class); + HiveConf hiveConfCmClearer = HiveConf.create(TestReplChangeManager.class); hiveConfCmClearer.setBoolean(HiveConf.ConfVars.REPLCMENABLED.varname, true); hiveConfCmClearer.setInt(CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY, 60); hiveConfCmClearer.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, @@ -1358,7 +1358,7 @@ public void testClearerEncrypted() throws Exception { @Test public void testCmRootAclPermissions() throws Exception { - HiveConf hiveConfAclPermissions = new HiveConf(TestReplChangeManager.class); + HiveConf hiveConfAclPermissions = HiveConf.create(TestReplChangeManager.class); hiveConfAclPermissions.setBoolean(HiveConf.ConfVars.REPLCMENABLED.varname, true); hiveConfAclPermissions.setInt(CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY, 60); hiveConfAclPermissions.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, @@ -1500,7 +1500,7 @@ public void testCmRootAclPermissions() throws Exception { @Test public void testCmrootEncrypted() throws Exception { - HiveConf encryptedHiveConf = new HiveConf(TestReplChangeManager.class); + HiveConf encryptedHiveConf = HiveConf.create(TestReplChangeManager.class); encryptedHiveConf.setBoolean(HiveConf.ConfVars.REPLCMENABLED.varname, true); encryptedHiveConf.setInt(CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY, 60); encryptedHiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, @@ -1561,7 +1561,7 @@ public void testCmrootEncrypted() throws Exception { @Test public void testCmrootFallbackEncrypted() throws Exception { - HiveConf encryptedHiveConf = new HiveConf(TestReplChangeManager.class); + HiveConf encryptedHiveConf = HiveConf.create(TestReplChangeManager.class); encryptedHiveConf.setBoolean(HiveConf.ConfVars.REPLCMENABLED.varname, true); encryptedHiveConf.setInt(CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY, 60); encryptedHiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, @@ -1599,7 +1599,7 @@ public void testCmrootFallbackEncrypted() throws Exception { @Test public void testCmrootFallbackRelative() throws Exception { - HiveConf encryptedHiveConf = new HiveConf(TestReplChangeManager.class); + HiveConf encryptedHiveConf = HiveConf.create(TestReplChangeManager.class); encryptedHiveConf.setBoolean(HiveConf.ConfVars.REPLCMENABLED.varname, true); encryptedHiveConf.setInt(CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY, 60); encryptedHiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java index 981f5fb4c211..0ea26f9c6f01 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java @@ -62,7 +62,7 @@ public void setUp() throws Exception { ObjectStore.setSchemaVerified(false); System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "false"); System.setProperty(HiveConf.ConfVars.METASTORE_AUTO_CREATE_ALL.toString(), "true"); - hiveConf = new HiveConf(this.getClass()); + hiveConf = HiveConf.create(this.getClass()); System.setProperty("hive.support.concurrency", "false"); System.setProperty("hive.metastore.event.listeners", DummyListener.class.getName()); @@ -90,7 +90,7 @@ public void tearDown() throws Exception { @Test public void testDefaults() { System.clearProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString()); - hiveConf = new HiveConf(this.getClass()); + hiveConf = HiveConf.create(this.getClass()); assertFalse(hiveConf.getBoolVar(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION)); assertTrue(hiveConf.getBoolVar(HiveConf.ConfVars.METASTORE_AUTO_CREATE_ALL)); } @@ -102,7 +102,7 @@ public void testDefaults() { @Test public void testVersionRestriction () throws Exception { System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "true"); - hiveConf = new HiveConf(this.getClass()); + hiveConf = HiveConf.create(this.getClass()); assertTrue(hiveConf.getBoolVar(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION)); assertFalse(hiveConf.getBoolVar(HiveConf.ConfVars.METASTORE_AUTO_CREATE_ALL)); @@ -129,7 +129,7 @@ public void testMetastoreVersion() throws Exception { // let the schema and version be auto created System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "false"); System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION_RECORD_VERSION.toString(), "true"); - hiveConf = new HiveConf(this.getClass()); + hiveConf = HiveConf.create(this.getClass()); SessionState.start(new CliSessionState(hiveConf)); driver = DriverFactory.newDriver(hiveConf); try { @@ -152,7 +152,7 @@ public void testMetastoreVersion() throws Exception { @Test public void testVersionMatching () throws Exception { System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "false"); - hiveConf = new HiveConf(this.getClass()); + hiveConf = HiveConf.create(this.getClass()); SessionState.start(new CliSessionState(hiveConf)); driver = DriverFactory.newDriver(hiveConf); try { @@ -164,7 +164,7 @@ public void testVersionMatching () throws Exception { ObjectStore.setSchemaVerified(false); hiveConf.setBoolVar(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION, true); - hiveConf = new HiveConf(this.getClass()); + hiveConf = HiveConf.create(this.getClass()); setVersion(hiveConf, metastoreSchemaInfo.getHiveSchemaVersion()); driver = DriverFactory.newDriver(hiveConf); driver.run("show tables"); @@ -177,14 +177,14 @@ public void testVersionMatching () throws Exception { @Test public void testVersionMisMatch () throws Exception { System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "false"); - hiveConf = new HiveConf(this.getClass()); + hiveConf = HiveConf.create(this.getClass()); SessionState.start(new CliSessionState(hiveConf)); driver = DriverFactory.newDriver(hiveConf); driver.run("show tables"); ObjectStore.setSchemaVerified(false); System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "true"); - hiveConf = new HiveConf(this.getClass()); + hiveConf = HiveConf.create(this.getClass()); setVersion(hiveConf, "fooVersion"); SessionState.start(new CliSessionState(hiveConf)); driver = DriverFactory.newDriver(hiveConf); @@ -204,13 +204,13 @@ public void testVersionMisMatch () throws Exception { @Test public void testVersionCompatibility () throws Exception { System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "false"); - hiveConf = new HiveConf(this.getClass()); + hiveConf = HiveConf.create(this.getClass()); SessionState.start(new CliSessionState(hiveConf)); driver = DriverFactory.newDriver(hiveConf); driver.run("show tables"); System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "true"); - hiveConf = new HiveConf(this.getClass()); + hiveConf = HiveConf.create(this.getClass()); setVersion(hiveConf, "3.9000.0"); SessionState.start(new CliSessionState(hiveConf)); driver = DriverFactory.newDriver(hiveConf); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestReplChangeManager.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestReplChangeManager.java index ebac38d10944..f78e91b57b0d 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestReplChangeManager.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestReplChangeManager.java @@ -96,7 +96,7 @@ private static void internalSetUpProvidePerm() throws Exception { configuration.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, noPermBaseDir); configuration.set("dfs.client.use.datanode.hostname", "true"); permDdfs = new MiniDFSCluster.Builder(configuration).numDataNodes(2).format(true).build(); - permhiveConf = new HiveConf(TestReplChangeManager.class); + permhiveConf = HiveConf.create(TestReplChangeManager.class); permhiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, "hdfs://" + permDdfs.getNameNode().getHostAndPort() + HiveConf.ConfVars.METASTOREWAREHOUSE.defaultStrVal); permhiveConf.setBoolean(HiveConf.ConfVars.REPLCMENABLED.varname, true); @@ -108,7 +108,7 @@ private static void internalSetUpProvidePerm() throws Exception { private static void internalSetUp() throws Exception { m_dfs = new MiniDFSCluster.Builder(new Configuration()).numDataNodes(2).format(true).build(); - hiveConf = new HiveConf(TestReplChangeManager.class); + hiveConf = HiveConf.create(TestReplChangeManager.class); hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, "hdfs://" + m_dfs.getNameNode().getHostAndPort() + HiveConf.ConfVars.METASTOREWAREHOUSE.defaultStrVal); hiveConf.setBoolean(HiveConf.ConfVars.REPLCMENABLED.varname, true); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestServerSpecificConfig.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestServerSpecificConfig.java index 17542f177c1e..0a4afdb5e604 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestServerSpecificConfig.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestServerSpecificConfig.java @@ -52,7 +52,7 @@ public void testServerConfigsEmbeddedMetastore() throws IOException, Throwable { // set hive-site.xml to default hive-site.xml that has embedded metastore HiveConf.setHiveSiteLocation(oldDefaultHiveSite); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); // check config properties expected with embedded metastore client assertTrue(HiveConf.isLoadMetastoreConfig()); @@ -71,7 +71,7 @@ public void testServerConfigsEmbeddedMetastore() throws IOException, Throwable { // check if hiveserver2 config gets loaded when HS2 is started new HiveServer2(); - conf = new HiveConf(); + conf = HiveConf.create(); verifyHS2ConfParams(conf); assertEquals("from.hivemetastore-site.xml", @@ -103,13 +103,13 @@ public void testSystemPropertyPrecedence() { try { HiveConf.setHiveSiteLocation(oldDefaultHiveSite); System.setProperty(OVERRIDE_KEY, "from.sysprop"); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); // ensure metatore site.xml does not get to override this assertEquals("from.sysprop", conf.get(OVERRIDE_KEY)); // get HS2 site.xml loaded new HiveServer2(); - conf = new HiveConf(); + conf = HiveConf.create(); assertTrue(HiveConf.isLoadHiveServer2Config()); // ensure hiveserver2 site.xml does not get to override this assertEquals("from.sysprop", conf.get(OVERRIDE_KEY)); @@ -146,7 +146,7 @@ public void testHiveMetastoreRemoteConfig() throws IOException, Throwable { resetDefaults(); // create hiveconf again to run initialization code, to see if value changes - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); // check the properties expected in hive client without metastore verifyMetastoreConfNotLoaded(conf); @@ -155,7 +155,7 @@ public void testHiveMetastoreRemoteConfig() throws IOException, Throwable { // get HS2 site.xml loaded new HiveServer2(); - conf = new HiveConf(); + conf = HiveConf.create(); verifyHS2ConfParams(conf); verifyMetastoreConfNotLoaded(conf); } @@ -180,7 +180,7 @@ private void setHiveSiteWithRemoteMetastore() throws IOException { File hiveSite = new File(newConfFile); FileOutputStream out = new FileOutputStream(hiveSite); HiveConf.setHiveSiteLocation(oldDefaultHiveSite); - HiveConf defaultHiveConf = new HiveConf(); + HiveConf defaultHiveConf = HiveConf.create(); defaultHiveConf.setVar(ConfVars.METASTOREURIS, "dummyvalue"); // reset to the hive-site.xml values for following param defaultHiveConf.set("hive.dummyparam.test.server.specific.config.override", diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/security/ZooKeeperTokenStoreTestBase.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/security/ZooKeeperTokenStoreTestBase.java index 35053e70b0af..d8eb3e951db4 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/security/ZooKeeperTokenStoreTestBase.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/security/ZooKeeperTokenStoreTestBase.java @@ -91,7 +91,7 @@ private Configuration createConf(String zkPath) { if(zkSslEnabled) { String dataFileDir = !System.getProperty("test.data.files", "").isEmpty() ? System.getProperty("test.data.files") : - (new HiveConf()).get("test.data.files").replace('\\', '/').replace("c:", ""); + (HiveConf.create()).get("test.data.files").replace('\\', '/').replace("c:", ""); conf.set(MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_KEYSTORE_LOCATION, dataFileDir + File.separator + LOCALHOST_KEY_STORE_NAME); conf.set(MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_KEYSTORE_PASSWORD, diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestHiveMetaTool.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestHiveMetaTool.java index 22e3fe0e4762..ee4f69295664 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestHiveMetaTool.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestHiveMetaTool.java @@ -93,7 +93,7 @@ public void setUp() throws Exception { os = new ByteArrayOutputStream(); System.setOut(new PrintStream(os)); - hiveConf = new HiveConf(HiveMetaTool.class); + hiveConf = HiveConf.create(HiveMetaTool.class); client = new HiveMetaStoreClient(hiveConf); createDatabase(); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/tools/schematool/TestSchemaToolCatalogOps.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/tools/schematool/TestSchemaToolCatalogOps.java index 9669cd4264fd..73f826e6a2f8 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/tools/schematool/TestSchemaToolCatalogOps.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/tools/schematool/TestSchemaToolCatalogOps.java @@ -63,7 +63,7 @@ public class TestSchemaToolCatalogOps { @BeforeClass public static void initDb() throws HiveMetaException, IOException { - conf = new HiveConf(); + conf = HiveConf.create(); MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.AUTO_CREATE_ALL, false); MetastoreConf.setLongVar(conf, MetastoreConf.ConfVars.HMS_HANDLER_ATTEMPTS, 1); MetastoreConf.setLongVar(conf, MetastoreConf.ConfVars.THRIFT_CONNECTION_RETRIES, 1); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAcidOnTez.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAcidOnTez.java index 36ba35f2aea0..c998372fb3c7 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAcidOnTez.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAcidOnTez.java @@ -112,7 +112,7 @@ public String toString() { @Before public void setUp() throws Exception { - hiveConf = new HiveConf(this.getClass()); + hiveConf = HiveConf.create(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR); @@ -209,7 +209,7 @@ public void testMapJoinOnTez() throws Exception { @Ignore("HIVE-19509: Disable tests that are failing continuously") @Test public void testNonStandardConversion01() throws Exception { - HiveConf confForTez = new HiveConf(hiveConf); // make a clone of existing hive conf + HiveConf confForTez = HiveConf.create(hiveConf); // make a clone of existing hive conf setupTez(confForTez); //CTAS with non-ACID target table runStatementOnDriver("create table " + Table.NONACIDNONBUCKET + " stored as ORC TBLPROPERTIES('transactional'='false') as " + @@ -350,7 +350,7 @@ public void testNonStandardConversion01() throws Exception { @Ignore("HIVE-17214")//this consistently works locally but never in ptest.... @Test public void testNonStandardConversion02() throws Exception { - HiveConf confForTez = new HiveConf(hiveConf); // make a clone of existing hive conf + HiveConf confForTez = HiveConf.create(hiveConf); // make a clone of existing hive conf confForTez.setBoolean("mapred.input.dir.recursive", true); setupTez(confForTez); runStatementOnDriver("create table " + Table.NONACIDNONBUCKET + " stored as ORC " + @@ -455,7 +455,7 @@ public void testNonStandardConversion02() throws Exception { */ @Test public void testCtasTezUnion() throws Exception { - HiveConf confForTez = new HiveConf(hiveConf); // make a clone of existing hive conf + HiveConf confForTez = HiveConf.create(hiveConf); // make a clone of existing hive conf confForTez.setBoolVar(HiveConf.ConfVars.HIVE_EXPLAIN_USER, false); setupTez(confForTez); //CTAS with ACID target table @@ -584,7 +584,7 @@ public void testCtasTezUnion() throws Exception { @Test public void testInsertWithRemoveUnion() throws Exception { int[][] values = {{1,2},{3,4},{5,6},{7,8},{9,10}}; - HiveConf confForTez = new HiveConf(hiveConf); // make a clone of existing hive conf + HiveConf confForTez = HiveConf.create(hiveConf); // make a clone of existing hive conf setupTez(confForTez); runStatementOnDriver("drop table if exists T", confForTez); runStatementOnDriver("create table T (a int, b int) stored as ORC TBLPROPERTIES ('transactional'='false')", confForTez); @@ -659,7 +659,7 @@ public void testInsertWithRemoveUnion() throws Exception { */ @Test public void testAcidInsertWithRemoveUnion() throws Exception { - HiveConf confForTez = new HiveConf(hiveConf); // make a clone of existing hive conf + HiveConf confForTez = HiveConf.create(hiveConf); // make a clone of existing hive conf setupTez(confForTez); runStatementOnDriver("drop table if exists T", confForTez); runStatementOnDriver("create table T (a int, b int) stored as ORC TBLPROPERTIES ('transactional'='true')", confForTez); @@ -706,7 +706,7 @@ public void testAcidInsertWithRemoveUnion() throws Exception { } @Test public void testBucketedAcidInsertWithRemoveUnion() throws Exception { - HiveConf confForTez = new HiveConf(hiveConf); // make a clone of existing hive conf + HiveConf confForTez = HiveConf.create(hiveConf); // make a clone of existing hive conf setupTez(confForTez); int[][] values = {{1,2},{2,4},{5,6},{6,8},{9,10}}; runStatementOnDriver("delete from " + Table.ACIDTBL, confForTez); @@ -754,7 +754,7 @@ public void testBucketedAcidInsertWithRemoveUnion() throws Exception { @Test public void testGetSplitsLocks() throws Exception { // Need to test this with LLAP settings, which requires some additional configurations set. - HiveConf modConf = new HiveConf(hiveConf); + HiveConf modConf = HiveConf.create(hiveConf); setupTez(modConf); modConf.setVar(ConfVars.HIVE_EXECUTION_ENGINE, "tez"); modConf.setVar(ConfVars.HIVEFETCHTASKCONVERSION, "more"); @@ -815,7 +815,7 @@ public void testGetSplitsLocks() throws Exception { @Test public void testGetSplitsLocksWithMaterializedView() throws Exception { // Need to test this with LLAP settings, which requires some additional configurations set. - HiveConf modConf = new HiveConf(hiveConf); + HiveConf modConf = HiveConf.create(hiveConf); setupTez(modConf); modConf.setVar(ConfVars.HIVE_EXECUTION_ENGINE, "tez"); modConf.setVar(ConfVars.HIVEFETCHTASKCONVERSION, "more"); @@ -866,7 +866,7 @@ public void testGetSplitsLocksWithMaterializedView() throws Exception { public void testCrudMajorCompactionSplitGrouper() throws Exception { String tblName = "test_split_grouper"; // make a clone of existing hive conf - HiveConf confForTez = new HiveConf(hiveConf); + HiveConf confForTez = HiveConf.create(hiveConf); setupTez(confForTez); // one-time setup to make query able to run with Tez HiveConf.setVar(confForTez, HiveConf.ConfVars.HIVEFETCHTASKCONVERSION, "none"); runStatementOnDriver("create transactional table " + tblName + " (a int, b int) clustered by (a) into 2 buckets " @@ -945,8 +945,8 @@ private void restartSessionAndDriver(HiveConf conf) throws Exception { // Ideally test like this should be a qfile test. However, the explain output from qfile is always // slightly different depending on where the test is run, specifically due to file size estimation private void testJoin(String engine, String joinType) throws Exception { - HiveConf confForTez = new HiveConf(hiveConf); // make a clone of existing hive conf - HiveConf confForMR = new HiveConf(hiveConf); // make a clone of existing hive conf + HiveConf confForTez = HiveConf.create(hiveConf); // make a clone of existing hive conf + HiveConf confForMR = HiveConf.create(hiveConf); // make a clone of existing hive conf if (engine.equals("tez")) { setupTez(confForTez); // one-time setup to make query able to run with Tez diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAutoPurgeTables.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAutoPurgeTables.java index 055688a3cc65..166e603abf19 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAutoPurgeTables.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAutoPurgeTables.java @@ -112,7 +112,7 @@ private static void createTestTable(Statement stmt, String isAutopurge, boolean @BeforeClass public static void setUpBeforeClass() throws Exception { - conf = new HiveConf(TestAutoPurgeTables.class); + conf = HiveConf.create(TestAutoPurgeTables.class); // enable trash so it can be tested conf.setFloat("fs.trash.checkpoint.interval", 30); conf.setFloat("fs.trash.interval", 30); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestConstraintsMerge.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestConstraintsMerge.java index 12b626d09d3d..528327b0b779 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestConstraintsMerge.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestConstraintsMerge.java @@ -72,7 +72,7 @@ public String toString() { @Before public void setUp() throws Exception { - hiveConf = new HiveConf(this.getClass()); + hiveConf = HiveConf.create(this.getClass()); hiveConf.set(ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(ConfVars.POSTEXECHOOKS.varname, ""); hiveConf.set(ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR); @@ -136,7 +136,7 @@ public void tearDown() throws Exception { @Test public void testUpdateInMergeViolatesCheckConstraint() throws Exception { - HiveConf confForTez = new HiveConf(hiveConf); + HiveConf confForTez = HiveConf.create(hiveConf); confForTez.setBoolVar(HiveConf.ConfVars.HIVE_EXPLAIN_USER, false); setupTez(confForTez); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java index 325831e2eb96..fed2d3de73fc 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java @@ -35,7 +35,7 @@ public class TestCreateUdfEntities { @Before public void setUp() throws Exception { - HiveConf conf = new HiveConf(IDriver.class); + HiveConf conf = HiveConf.create(IDriver.class); SessionState.start(conf); driver = DriverFactory.newDriver(conf); } diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java index 7e54dde6f926..f0b136dd8da1 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java @@ -78,7 +78,7 @@ public void setUp() throws Exception { } tests = new JUnit4TestAdapter(this.getClass()).countTestCases(); try { - conf = new HiveConf(ExecDriver.class); + conf = HiveConf.create(ExecDriver.class); SessionState.start(conf); // Test with remote metastore service @@ -90,7 +90,7 @@ public void setUp() throws Exception { // Initialize second mocked filesystem (implement only necessary stuff) // Physical files are resides in local file system in the similar location - jobConf = new HiveConf(conf); + jobConf = HiveConf.create(conf); miniDfs = new MiniDFSCluster(new Configuration(), 1, true, null); fs2 = miniDfs.getFileSystem(); try { diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDatabaseTableDefault.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDatabaseTableDefault.java index 041be2d063f7..8b2e722d0e89 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDatabaseTableDefault.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDatabaseTableDefault.java @@ -89,7 +89,7 @@ public String toString() { @Before public void setUp() throws Exception { - hiveConf = new HiveConf(this.getClass()); + hiveConf = HiveConf.create(this.getClass()); HiveConf.setBoolVar(hiveConf, HiveConf.ConfVars.CREATE_TABLES_AS_ACID, true); HiveConf.setBoolVar(hiveConf, HiveConf.ConfVars.HIVE_CREATE_TABLES_AS_INSERT_ONLY, true); HiveConf.setBoolVar(hiveConf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, true); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMetaStoreLimitPartitionRequest.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMetaStoreLimitPartitionRequest.java index 05a104f53ea5..96d778b752b0 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMetaStoreLimitPartitionRequest.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMetaStoreLimitPartitionRequest.java @@ -65,7 +65,7 @@ public class TestMetaStoreLimitPartitionRequest { @BeforeClass public static void beforeTest() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); - conf = new HiveConf(); + conf = HiveConf.create(); DriverManager.setLoginTimeout(0); conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestWarehouseExternalDir.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestWarehouseExternalDir.java index ba090ea096a8..46466085efba 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestWarehouseExternalDir.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestWarehouseExternalDir.java @@ -99,7 +99,7 @@ public void tearDown() throws Exception { } public TestWarehouseExternalDir() throws Exception { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); // Specify the external warehouse root conf.setVar(ConfVars.HIVE_METASTORE_WAREHOUSE_EXTERNAL, whRootExternal); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java index 127de2301b16..030576ae473f 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java @@ -71,7 +71,7 @@ public class TestHiveHistory { @Before public void setUp() { try { - conf = new HiveConf(HiveHistory.class); + conf = HiveConf.create(HiveHistory.class); SessionState.start(conf); fs = FileSystem.get(conf); @@ -136,7 +136,7 @@ public void testSimpleQuery() { LogUtils.initHiveLog4j(); } catch (LogInitializationException e) { } - HiveConf hconf = new HiveConf(SessionState.class); + HiveConf hconf = HiveConf.create(SessionState.class); hconf.setBoolVar(ConfVars.HIVE_SESSION_HISTORY_ENABLED, true); CliSessionState ss = new CliSessionState(hconf); ss.in = System.in; @@ -187,7 +187,7 @@ public void testQueryloglocParentDirNotExist() throws Exception { } try { String actualDir = parentTmpDir + "/test"; - HiveConf conf = new HiveConf(SessionState.class); + HiveConf conf = HiveConf.create(SessionState.class); conf.set(HiveConf.ConfVars.HIVEHISTORYFILELOC.toString(), actualDir); SessionState ss = new CliSessionState(conf); HiveHistory hiveHistory = new HiveHistoryImpl(ss); @@ -209,7 +209,7 @@ public void testQueryloglocParentDirNotExist() throws Exception { */ @Test public void testHiveHistoryConfigEnabled() throws Exception { - HiveConf conf = new HiveConf(SessionState.class); + HiveConf conf = HiveConf.create(SessionState.class); conf.setBoolVar(ConfVars.HIVE_SESSION_HISTORY_ENABLED, true); SessionState ss = new CliSessionState(conf); SessionState.start(ss); @@ -223,7 +223,7 @@ public void testHiveHistoryConfigEnabled() throws Exception { */ @Test public void testHiveHistoryConfigDisabled() throws Exception { - HiveConf conf = new HiveConf(SessionState.class); + HiveConf conf = HiveConf.create(SessionState.class); conf.setBoolVar(ConfVars.HIVE_SESSION_HISTORY_ENABLED, false); SessionState ss = new CliSessionState(conf); SessionState.start(ss); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestAlterTableMetadata.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestAlterTableMetadata.java index 96aeb0f12c10..ebd5478425d5 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestAlterTableMetadata.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestAlterTableMetadata.java @@ -35,7 +35,7 @@ public void testAlterTableOwner() throws HiveException, CommandProcessorExceptio * owner metadata of the table in HMS. */ - HiveConf conf = new HiveConf(this.getClass()); + HiveConf conf = HiveConf.create(this.getClass()); conf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(conf); IDriver driver = DriverFactory.newDriver(conf); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java index 594c8dcd52fe..b4e40300cbf6 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java @@ -38,7 +38,7 @@ public class TestSemanticAnalyzerHookLoading { @Test public void testHookLoading() throws Exception { - HiveConf conf = new HiveConf(this.getClass()); + HiveConf conf = HiveConf.create(this.getClass()); conf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, DummySemanticAnalyzerHook.class.getName()); conf.set(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(conf); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/BaseReplicationAcrossInstances.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/BaseReplicationAcrossInstances.java index f486fc35a059..245e5e2cca85 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/BaseReplicationAcrossInstances.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/BaseReplicationAcrossInstances.java @@ -54,7 +54,7 @@ public class BaseReplicationAcrossInstances { static void internalBeforeClassSetup(Map overrides, Class clazz) throws Exception { - conf = new HiveConf(clazz); + conf = HiveConf.create(clazz); conf.set("dfs.client.use.datanode.hostname", "true"); conf.set("hive.repl.cmrootdir", "/tmp/"); conf.set("dfs.namenode.acls.enabled", "true"); @@ -86,7 +86,7 @@ static void internalBeforeClassSetupExclusiveReplica(Map primary throws Exception { // Setup replica HDFS. String replicaBaseDir = Files.createTempDirectory("replica").toFile().getAbsolutePath(); - replicaConf = new HiveConf(clazz); + replicaConf = HiveConf.create(clazz); replicaConf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, replicaBaseDir); replicaConf.set("dfs.client.use.datanode.hostname", "true"); MiniDFSCluster miniReplicaDFSCluster = @@ -94,7 +94,7 @@ static void internalBeforeClassSetupExclusiveReplica(Map primary // Setup primary HDFS. String primaryBaseDir = Files.createTempDirectory("base").toFile().getAbsolutePath(); - conf = new HiveConf(clazz); + conf = HiveConf.create(clazz); conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, primaryBaseDir); conf.set("dfs.client.use.datanode.hostname", "true"); MiniDFSCluster miniPrimaryDFSCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).format(true).build(); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/BaseReplicationScenariosAcidTables.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/BaseReplicationScenariosAcidTables.java index 90aa944fe4da..8e142212ee4c 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/BaseReplicationScenariosAcidTables.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/BaseReplicationScenariosAcidTables.java @@ -81,7 +81,7 @@ public class BaseReplicationScenariosAcidTables { static void internalBeforeClassSetup(Map overrides, Class clazz) throws Exception { - conf = new HiveConf(clazz); + conf = HiveConf.create(clazz); conf.set("dfs.client.use.datanode.hostname", "true"); conf.set("hadoop.proxyuser." + Utils.getUGI().getShortUserName() + ".hosts", "*"); MiniDFSCluster miniDFSCluster = diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestMetaStoreEventListenerInRepl.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestMetaStoreEventListenerInRepl.java index 2704e5a58a7e..e0ae7de6f236 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestMetaStoreEventListenerInRepl.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestMetaStoreEventListenerInRepl.java @@ -62,7 +62,7 @@ public class TestMetaStoreEventListenerInRepl { @BeforeClass public static void internalBeforeClassSetup() throws Exception { - TestMetaStoreEventListenerInRepl.conf = new HiveConf(TestMetaStoreEventListenerInRepl.class); + TestMetaStoreEventListenerInRepl.conf = HiveConf.create(TestMetaStoreEventListenerInRepl.class); TestMetaStoreEventListenerInRepl.conf.set("dfs.client.use.datanode.hostname", "true"); TestMetaStoreEventListenerInRepl.conf.set("hadoop.proxyuser." + Utils.getUGI().getShortUserName() + ".hosts", "*"); MiniDFSCluster miniDFSCluster = diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplWithReadOnlyHook.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplWithReadOnlyHook.java index 379c53bb2f7d..939c4b2b7ee2 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplWithReadOnlyHook.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplWithReadOnlyHook.java @@ -43,7 +43,7 @@ public static void classLevelSetup() throws Exception { overrides.put(MetastoreConf.ConfVars.EVENT_MESSAGE_FACTORY.getHiveName(), GzipJSONMessageEncoder.class.getCanonicalName()); - conf = new HiveConf(TestReplWithReadOnlyHook.class); + conf = HiveConf.create(TestReplWithReadOnlyHook.class); conf.set("hadoop.proxyuser." + Utils.getUGI().getShortUserName() + ".hosts", "*"); MiniDFSCluster miniDFSCluster = diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationFilterTransactions.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationFilterTransactions.java index bb64f17c0b83..607ef944aa91 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationFilterTransactions.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationFilterTransactions.java @@ -253,7 +253,7 @@ private Map setupConf(String dfsUri, String listenerClassName) { @Before public void setup() throws Throwable { - TestReplicationFilterTransactions.dfsConf = new HiveConf(TestReplicationFilterTransactions.class); + TestReplicationFilterTransactions.dfsConf = HiveConf.create(TestReplicationFilterTransactions.class); TestReplicationFilterTransactions.dfsConf.set("dfs.client.use.datanode.hostname", "true"); TestReplicationFilterTransactions.dfsConf.set("hadoop.proxyuser." + Utils.getUGI().getShortUserName() + ".hosts", "*"); TestReplicationFilterTransactions.dfsConf.set("dfs.namenode.acls.enabled", "true"); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationOfHiveStreaming.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationOfHiveStreaming.java index ef83ec6c7928..72e979b74f5f 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationOfHiveStreaming.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationOfHiveStreaming.java @@ -69,7 +69,7 @@ public static void classLevelSetup() throws Exception { static void internalBeforeClassSetup(Map overrides, Class clazz) throws Exception { - HiveConf conf = new HiveConf(clazz); + HiveConf conf = HiveConf.create(clazz); conf.set("dfs.client.use.datanode.hostname", "true"); conf.set("hadoop.proxyuser." + Utils.getUGI().getShortUserName() + ".hosts", "*"); MiniDFSCluster miniDFSCluster = diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java index 910dda676582..41b0b1d68637 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java @@ -193,7 +193,7 @@ public static void setUpBeforeClass() throws Exception { static void internalBeforeClassSetup(Map additionalProperties) throws Exception { - hconf = new HiveConf(TestReplicationScenarios.class); + hconf = HiveConf.create(TestReplicationScenarios.class); String metastoreUri = System.getProperty("test."+MetastoreConf.ConfVars.THRIFT_URIS.getHiveName()); if (metastoreUri != null) { hconf.set(MetastoreConf.ConfVars.THRIFT_URIS.getHiveName(), metastoreUri); @@ -245,10 +245,10 @@ static void internalBeforeClassSetup(Map additionalProperties) metaStoreClient = new HiveMetaStoreClient(hconf); FileUtils.deleteDirectory(new File("metastore_db2")); - HiveConf hconfMirrorServer = new HiveConf(); + HiveConf hconfMirrorServer = HiveConf.create(); hconfMirrorServer.set(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, "jdbc:derby:;databaseName=metastore_db2;create=true"); MetaStoreTestUtils.startMetaStoreWithRetry(hconfMirrorServer, true); - hconfMirror = new HiveConf(hconf); + hconfMirror = HiveConf.create(hconf); MetastoreConf.setBoolVar(hconfMirror, MetastoreConf.ConfVars.EVENT_DB_NOTIFICATION_API_AUTH, false); hconfMirrorServer.set(proxySettingName, "*"); String thriftUri = MetastoreConf.getVar(hconfMirrorServer, MetastoreConf.ConfVars.THRIFT_URIS); @@ -2533,7 +2533,7 @@ public void testReplConfiguredCleanupOfNotificationEvents() throws Exception { @Test public void testCleanerThreadStartupWait() throws Exception { int eventsTtl = 20; - HiveConf newConf = new HiveConf(hconf); + HiveConf newConf = HiveConf.create(hconf); // Set TTL short enough for testing. MetastoreConf.setTimeVar(newConf, REPL_EVENT_DB_LISTENER_TTL, eventsTtl, TimeUnit.SECONDS); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcidTables.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcidTables.java index e23e9d4bb00f..b6e315d14d06 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcidTables.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcidTables.java @@ -109,7 +109,7 @@ public static void classLevelSetup() throws Exception { static void internalBeforeClassSetup(Map overrides, Class clazz) throws Exception { - conf = new HiveConf(clazz); + conf = HiveConf.create(clazz); conf.set("dfs.client.use.datanode.hostname", "true"); conf.set("metastore.warehouse.tenant.colocation", "true"); conf.set("hadoop.proxyuser." + Utils.getUGI().getShortUserName() + ".hosts", "*"); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosIncrementalLoadAcidTables.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosIncrementalLoadAcidTables.java index 56db93c6d7a0..adf8b7aded35 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosIncrementalLoadAcidTables.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosIncrementalLoadAcidTables.java @@ -71,7 +71,7 @@ public static void classLevelSetup() throws Exception { static void internalBeforeClassSetup(Map overrides, Class clazz) throws Exception { - conf = new HiveConf(clazz); + conf = HiveConf.create(clazz); conf.set("dfs.client.use.datanode.hostname", "true"); conf.set("hadoop.proxyuser." + Utils.getUGI().getShortUserName() + ".hosts", "*"); MiniDFSCluster miniDFSCluster = diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestScheduledReplicationScenarios.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestScheduledReplicationScenarios.java index 1887587ddbbb..03740bf1eb6d 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestScheduledReplicationScenarios.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestScheduledReplicationScenarios.java @@ -85,7 +85,7 @@ public static void classLevelSetup() throws Exception { static void internalBeforeClassSetup(Map overrides, Class clazz) throws Exception { - conf = new HiveConf(clazz); + conf = HiveConf.create(clazz); conf.set("dfs.client.use.datanode.hostname", "true"); conf.set("hadoop.proxyuser." + Utils.getUGI().getShortUserName() + ".hosts", "*"); MiniDFSCluster miniDFSCluster = diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestStatsReplicationScenarios.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestStatsReplicationScenarios.java index e23c542d670b..34c8abaa193d 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestStatsReplicationScenarios.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestStatsReplicationScenarios.java @@ -94,7 +94,7 @@ static void internalBeforeClassSetup(Map primaryOverrides, Map replicaOverrides, Class clazz, boolean autogather, AcidTableKind acidTableKind) throws Exception { - conf = new HiveConf(clazz); + conf = HiveConf.create(clazz); conf.set("dfs.client.use.datanode.hostname", "true"); conf.set("hadoop.proxyuser." + Utils.getUGI().getShortUserName() + ".hosts", "*"); MiniDFSCluster miniDFSCluster = diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java index 3695a7e03b3a..6e7c124da9b9 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java @@ -126,7 +126,7 @@ public class WarehouseInstance implements Closeable { private void initialize(String cmRoot, String externalTableWarehouseRoot, String warehouseRoot, Map overridesForHiveConf) throws Exception { - hiveConf = new HiveConf(miniDFSCluster.getConfiguration(0), TestReplicationScenarios.class); + hiveConf = HiveConf.create(miniDFSCluster.getConfiguration(0), TestReplicationScenarios.class); String metaStoreUri = System.getProperty("test." + HiveConf.ConfVars.METASTOREURIS.varname); if (metaStoreUri != null) { diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/reexec/TestReExecuteKilledTezAMQueryPlugin.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/reexec/TestReExecuteKilledTezAMQueryPlugin.java index 7b71432d0786..3d8155378647 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/reexec/TestReExecuteKilledTezAMQueryPlugin.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/reexec/TestReExecuteKilledTezAMQueryPlugin.java @@ -59,7 +59,7 @@ static HiveConf defaultConf() throws Exception { HiveConf.setHiveSiteLocation(new URL("file://"+ new File(confDir).toURI().getPath() + "/hive-site.xml")); System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation()); } - HiveConf defaultConf = new HiveConf(); + HiveConf defaultConf = HiveConf.create(); defaultConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); defaultConf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); defaultConf.addResource(new URL("file://" + new File(confDir).toURI().getPath() + "/tez-site.xml")); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java index b3383d923bec..930a20e1f05a 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java @@ -57,7 +57,7 @@ protected String getAuthorizationProvider(){ } protected HiveConf createHiveConf() throws Exception { - HiveConf conf = new HiveConf(this.getClass()); + HiveConf conf = HiveConf.create(this.getClass()); return conf; } diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java index b78c348c5203..32ccd953c277 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java @@ -66,7 +66,7 @@ public void setUp() throws Exception { int port = MetaStoreTestUtils.startMetaStoreWithRetry(); - clientHiveConf = new HiveConf(this.getClass()); + clientHiveConf = HiveConf.create(this.getClass()); clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); clientHiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java index 948ab4d10370..7a3f99016b9c 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java @@ -71,7 +71,7 @@ public void setUp() throws Exception { int port = MetaStoreTestUtils.startMetaStoreWithRetry(); - clientHiveConf = new HiveConf(this.getClass()); + clientHiveConf = HiveConf.create(this.getClass()); // Turn on client-side authorization clientHiveConf.setBoolVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED,true); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java index 3fa1d0d5b50d..4596b44b4af6 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java @@ -82,7 +82,7 @@ protected String getAuthorizationProvider(){ } protected HiveConf createHiveConf() throws Exception { - return new HiveConf(this.getClass()); + return HiveConf.create(this.getClass()); } protected String getProxyUserName() { diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreClientSideAuthorizationProvider.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreClientSideAuthorizationProvider.java index dbd71cb0ceb4..3bf3171e4396 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreClientSideAuthorizationProvider.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreClientSideAuthorizationProvider.java @@ -54,7 +54,7 @@ public void setUp() throws Exception { int port = MetaStoreTestUtils.startMetaStoreWithRetry(); - clientHiveConf = new HiveConf(this.getClass()); + clientHiveConf = HiveConf.create(this.getClass()); // Turn on client-side authorization clientHiveConf.setBoolVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED,true); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java index d1e80698efa9..1b5d7d7ed250 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java @@ -61,7 +61,7 @@ public static void setUp() throws Exception { int port = MetaStoreTestUtils.startMetaStoreWithRetry(); - clientHiveConf = new HiveConf(); + clientHiveConf = HiveConf.create(); clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java index e306a9970bcb..122d8c5f5ec2 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java @@ -97,7 +97,7 @@ public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreC @BeforeClass public static void beforeTest() throws Exception { - conf = new HiveConf(); + conf = HiveConf.create(); // Turn on mocked authorization conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName()); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java index 36ac85b73095..8a61d7c63b80 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java @@ -113,7 +113,7 @@ public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreC @BeforeClass public static void beforeTest() throws Exception { - conf = new HiveConf(); + conf = HiveConf.create(); // Turn on mocked authorization conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName()); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/session/TestClearDanglingScratchDir.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/session/TestClearDanglingScratchDir.java index 82d3db5910b9..30040ecfbf29 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/session/TestClearDanglingScratchDir.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/session/TestClearDanglingScratchDir.java @@ -47,7 +47,7 @@ public class TestClearDanglingScratchDir { @BeforeClass static public void oneTimeSetup() throws Exception { m_dfs = new MiniDFSCluster.Builder(new Configuration()).numDataNodes(1).format(true).build(); - conf = new HiveConf(); + conf = HiveConf.create(); conf.set(HiveConf.ConfVars.HIVE_SCRATCH_DIR_LOCK.toString(), "true"); conf.set(HiveConf.ConfVars.METASTORE_AUTO_CREATE_ALL.toString(), "true"); LoggerFactory.getLogger("SessionState"); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/session/TestUdfClassLoaderAcrossSessions.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/session/TestUdfClassLoaderAcrossSessions.java index f619e6f7d2a2..dc6fd2edc7d0 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/session/TestUdfClassLoaderAcrossSessions.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/session/TestUdfClassLoaderAcrossSessions.java @@ -32,7 +32,7 @@ public class TestUdfClassLoaderAcrossSessions { @Test public void testDropDatabaseCascadeDoesNotThrow() throws CommandProcessorException, IOException { - HiveConf conf = new HiveConf(this.getClass()); + HiveConf conf = HiveConf.create(this.getClass()); conf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(conf); IDriver driver = DriverFactory.newDriver(conf); @@ -48,7 +48,7 @@ public void testDropDatabaseCascadeDoesNotThrow() throws CommandProcessorExcepti @Test public void testDropFunctionDoesNotThrow() throws CommandProcessorException, IOException { - HiveConf conf = new HiveConf(this.getClass()); + HiveConf conf = HiveConf.create(this.getClass()); conf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(conf); IDriver driver = DriverFactory.newDriver(conf); @@ -65,7 +65,7 @@ public void testDropFunctionDoesNotThrow() throws CommandProcessorException, IOE @Test public void testUseBeforeDropDatabaseCascadeDoesNotThrow() throws CommandProcessorException, IOException { - HiveConf conf = new HiveConf(this.getClass()); + HiveConf conf = HiveConf.create(this.getClass()); conf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(conf); IDriver driver = DriverFactory.newDriver(conf); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorOnTezTest.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorOnTezTest.java index 37f06371ba42..698968779c71 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorOnTezTest.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorOnTezTest.java @@ -82,7 +82,7 @@ public abstract class CompactorOnTezTest { @Before // Note: we create a new conf and driver object before every test public void setup() throws Exception { - HiveConf hiveConf = new HiveConf(this.getClass()); + HiveConf hiveConf = HiveConf.create(this.getClass()); setupWithConf(hiveConf); } diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorTestUtil.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorTestUtil.java index 8dd6087a66a7..a5ecaf904d0a 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorTestUtil.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorTestUtil.java @@ -151,7 +151,7 @@ static List getBucketFileNamesWithoutAttemptId(FileSystem fs, Table tabl */ static void runCompaction(HiveConf conf, String dbName, String tblName, CompactionType compactionType, boolean isQueryBased, Map properties, String... partNames) throws Exception { - HiveConf hiveConf = new HiveConf(conf); + HiveConf hiveConf = HiveConf.create(conf); hiveConf.setBoolVar(HiveConf.ConfVars.COMPACTOR_CRUD_QUERY_BASED, isQueryBased); TxnStore txnHandler = TxnUtils.getTxnStore(hiveConf); Worker t = new Worker(); @@ -197,7 +197,7 @@ static void runCleaner(HiveConf hConf) throws Exception { // Wait for the cooldown period so the Cleaner can see last committed txn as the highest committed watermark Thread.sleep(MetastoreConf.getTimeVar(hConf, MetastoreConf.ConfVars.TXN_OPENTXN_TIMEOUT, TimeUnit.MILLISECONDS)); - HiveConf hiveConf = new HiveConf(hConf); + HiveConf hiveConf = HiveConf.create(hConf); Cleaner t = new Cleaner(); t.setConf(hiveConf); t.init(new AtomicBoolean(true)); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCleanerWithReplication.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCleanerWithReplication.java index 4a7bb34bad1a..fce68f368746 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCleanerWithReplication.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCleanerWithReplication.java @@ -50,7 +50,7 @@ public class TestCleanerWithReplication extends CompactorTest { @Before public void setup() throws Exception { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.set("fs.defaultFS", miniDFSCluster.getFileSystem().getUri().toString()); conf.setBoolVar(HiveConf.ConfVars.REPLCMENABLED, true); setup(conf); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactorBase.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactorBase.java index 8b6e57c8d0fa..2c7e5cffc5dd 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactorBase.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactorBase.java @@ -80,7 +80,7 @@ public void setup() throws Exception { throw new RuntimeException("Could not create " + TEST_WAREHOUSE_DIR); } - HiveConf hiveConf = new HiveConf(this.getClass()); + HiveConf hiveConf = HiveConf.create(this.getClass()); hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, ""); hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, ""); hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, TEST_WAREHOUSE_DIR); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCrudCompactorOnTez.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCrudCompactorOnTez.java index c4d5fde2e120..cb46684c8310 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCrudCompactorOnTez.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCrudCompactorOnTez.java @@ -2219,7 +2219,7 @@ public void testCompactionWithSchemaEvolutionAndBuckets() throws Exception { @Test public void testCompactionWithSchemaEvolutionNoBucketsMultipleReducers() throws Exception { - HiveConf hiveConf = new HiveConf(conf); + HiveConf hiveConf = HiveConf.create(conf); hiveConf.setIntVar(HiveConf.ConfVars.MAXREDUCERS, 2); hiveConf.setIntVar(HiveConf.ConfVars.HADOOPNUMREDUCERS, 2); driver = DriverFactory.newDriver(hiveConf); @@ -2721,7 +2721,7 @@ public boolean run(CompactorContext context) throws IOException { CompactionInfo ciMock = mock(CompactionInfo.class); ciMock.runAs = "hive"; List emptyQueries = new ArrayList<>(); - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); hiveConf.set(ValidTxnList.VALID_TXNS_KEY, "8:9223372036854775807::"); // Check for default case. diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestMmCompactorOnTez.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestMmCompactorOnTez.java index 0a57c4588337..a87ac322a692 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestMmCompactorOnTez.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestMmCompactorOnTez.java @@ -479,7 +479,7 @@ private void testMmMinorCompactionPartitionedWithBuckets(String fileFormat) thro @Test public void testMmMinorCompactionWithSchemaEvolutionNoBucketsMultipleReducers() throws Exception { - HiveConf hiveConf = new HiveConf(conf); + HiveConf hiveConf = HiveConf.create(conf); hiveConf.setIntVar(HiveConf.ConfVars.MAXREDUCERS, 2); hiveConf.setIntVar(HiveConf.ConfVars.HADOOPNUMREDUCERS, 2); driver = DriverFactory.newDriver(hiveConf); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/serde2/TestSerdeWithFieldComments.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/serde2/TestSerdeWithFieldComments.java index ae971225d1ff..9d8fd1212ef5 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/serde2/TestSerdeWithFieldComments.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/serde2/TestSerdeWithFieldComments.java @@ -68,7 +68,7 @@ public void testFieldComments() throws MetaException, SerDeException { Deserializer mockDe = mock(Deserializer.class); when(mockDe.getObjectInspector()).thenReturn(mockSOI); List result = - HiveMetaStoreUtils.getFieldsFromDeserializer("testTable", mockDe, new HiveConf()); + HiveMetaStoreUtils.getFieldsFromDeserializer("testTable", mockDe, HiveConf.create()); assertEquals(2, result.size()); assertEquals("first", result.get(0).getName()); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java b/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java index e8ebf251297d..e98b709a427b 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java @@ -119,7 +119,7 @@ private static void createTable() throws ClassNotFoundException, SQLException { stmt.execute("set hive.support.concurrency = false"); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); String dataFileDir = conf.get("test.data.files").replace('\\', '/') .replace("c:", ""); Path dataFilePath = new Path(dataFileDir, "kv1.txt"); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeelinePasswordOption.java b/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeelinePasswordOption.java index b6d01ce57893..d873b979d17b 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeelinePasswordOption.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeelinePasswordOption.java @@ -52,7 +52,7 @@ public class TestBeelinePasswordOption { */ @BeforeClass public static void preTests() throws Exception { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); // Set to non-zk lock manager to prevent HS2 from trying to connect hiveConf.setVar(HiveConf.ConfVars.HIVE_LOCK_MANAGER, "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager"); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/beeline/hs2connection/BeelineWithHS2ConnectionFileTestBase.java b/itests/hive-unit/src/test/java/org/apache/hive/beeline/hs2connection/BeelineWithHS2ConnectionFileTestBase.java index 13679867c1b2..54d8f9de191b 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/beeline/hs2connection/BeelineWithHS2ConnectionFileTestBase.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/beeline/hs2connection/BeelineWithHS2ConnectionFileTestBase.java @@ -56,7 +56,7 @@ @RunWith(Parameterized.class) public abstract class BeelineWithHS2ConnectionFileTestBase { protected MiniHS2 miniHS2; - protected HiveConf hiveConf = new HiveConf(); + protected HiveConf hiveConf = HiveConf.create(); protected final String tableName = "testBeelineTable"; protected String dataFileDir = hiveConf.get("test.data.files"); protected static final String LOCALHOST_KEY_STORE_NAME = "keystore.jks"; @@ -169,7 +169,7 @@ public void before() throws Exception { dataFileDir = System.getProperty("test.data.files"); } dataFileDir = dataFileDir.replace('\\', '/').replace("c:", ""); - hiveConf = new HiveConf(); + hiveConf = HiveConf.create(); miniHS2 = getNewMiniHS2(); confOverlay = new HashMap(); confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/AbstractJdbcTriggersTest.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/AbstractJdbcTriggersTest.java index 44c67ec4dae1..ce1a7d5f725a 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/AbstractJdbcTriggersTest.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/AbstractJdbcTriggersTest.java @@ -69,7 +69,7 @@ public static void beforeTest() throws Exception { HiveConf.setHiveSiteLocation(new URL("file://" + new File(confDir).toURI().getPath() + "/hive-site.xml")); System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation()); - conf = new HiveConf(); + conf = HiveConf.create(); conf.setBoolVar(HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, false); conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/AbstractTestJdbcGenericUDTFGetSplits.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/AbstractTestJdbcGenericUDTFGetSplits.java index 799d8ffc1dac..3ca7712aa62f 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/AbstractTestJdbcGenericUDTFGetSplits.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/AbstractTestJdbcGenericUDTFGetSplits.java @@ -64,7 +64,7 @@ public static void beforeTest() throws Exception { HiveConf.setHiveSiteLocation(new URL("file://" + new File(confDir).toURI().getPath() + "/hive-site.xml")); System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation()); - conf = new HiveConf(); + conf = HiveConf.create(); conf.setBoolVar(HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, false); conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/BaseJdbcWithMiniLlap.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/BaseJdbcWithMiniLlap.java index 2ec3d48e9017..1b27a7e4e0d9 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/BaseJdbcWithMiniLlap.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/BaseJdbcWithMiniLlap.java @@ -101,7 +101,7 @@ static HiveConf defaultConf() throws Exception { HiveConf.setHiveSiteLocation(new URL("file://"+ new File(confDir).toURI().getPath() + "/hive-site.xml")); System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation()); } - HiveConf defaultConf = new HiveConf(); + HiveConf defaultConf = HiveConf.create(); defaultConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); defaultConf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); defaultConf.addResource(new URL("file://" + new File(confDir).toURI().getPath() + "/tez-site.xml")); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java index 22c67534ee7b..f45a2f841f9b 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java @@ -103,12 +103,12 @@ public static void afterTest() throws Exception { @Before public void setUp() throws Exception { - hiveConf1 = new HiveConf(); + hiveConf1 = HiveConf.create(); hiveConf1.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); // Set up zookeeper dynamic service discovery configs setHAConfigs(hiveConf1); miniHS2_1 = new MiniHS2.Builder().withConf(hiveConf1).cleanupLocalDirOnStartup(false).build(); - hiveConf2 = new HiveConf(); + hiveConf2 = HiveConf.create(); hiveConf2.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); // Set up zookeeper dynamic service discovery configs setHAConfigs(hiveConf2); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java index 9256d288f221..2c55dfbaf622 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java @@ -191,8 +191,8 @@ private static void createTestTables(Statement stmt, String testDbName) throws S @SuppressWarnings("deprecation") @BeforeClass public static void setUpBeforeClass() throws Exception { - conf = new HiveConf(TestJdbcDriver2.class); - HiveConf initConf = new HiveConf(conf); + conf = HiveConf.create(TestJdbcDriver2.class); + HiveConf initConf = HiveConf.create(conf); TestTxnDbUtil.setConfValues(initConf); TestTxnDbUtil.prepDb(initConf); dataFileDir = conf.get("test.data.files").replace('\\', '/') diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHA.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHA.java index 26ea18607557..98659ffdae9c 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHA.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHA.java @@ -67,7 +67,7 @@ public void run(HiveSessionHookContext sessionHookContext) throws HiveSQLExcepti @BeforeClass public static void beforeTest() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); - conf = new HiveConf(); + conf = HiveConf.create(); conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); String dataFileDir = conf.get("test.data.files").replace('\\', '/') .replace("c:", ""); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java index 2436f5f9d8f1..1e7869b29aa2 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java @@ -106,7 +106,7 @@ public class TestJdbcWithMiniHS2 { @BeforeClass public static void setupBeforeClass() throws Exception { MiniHS2.cleanupLocalDir(); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); dataFileDir = conf.get("test.data.files").replace('\\', '/').replace("c:", ""); kvDataFilePath = new Path(dataFileDir, "kv1.txt"); @@ -204,7 +204,7 @@ private static void restoreMiniHS2AndConnections() throws Exception { } } stopMiniHS2(); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); startMiniHS2(conf); openDefaultConnections(); openTestConnections(); @@ -790,7 +790,7 @@ private void unsetSerializeInTasksInConf(Statement stmt) throws SQLException { public void testSessionScratchDirs() throws Exception { // Stop HiveServer2 stopMiniHS2(); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); String userName; Path scratchDirPath; // Set a custom prefix for hdfs scratch dir path @@ -858,7 +858,7 @@ public void testSessionScratchDirs() throws Exception { */ @Test public void testUdfWhiteBlackList() throws Exception { - HiveConf testConf = new HiveConf(); + HiveConf testConf = HiveConf.create(); assertTrue(testConf.getVar(ConfVars.HIVE_SERVER2_BUILTIN_UDF_WHITELIST).isEmpty()); // verify that udf in default whitelist can be executed Statement stmt = conDefault.createStatement(); @@ -902,7 +902,7 @@ public void testUdfWhiteBlackList() throws Exception { */ @Test public void testUdfBlackList() throws Exception { - HiveConf testConf = new HiveConf(); + HiveConf testConf = HiveConf.create(); assertTrue(testConf.getVar(ConfVars.HIVE_SERVER2_BUILTIN_UDF_BLACKLIST).isEmpty()); Statement stmt = conDefault.createStatement(); // verify that udf in default whitelist can be executed @@ -933,7 +933,7 @@ public void testUdfBlackList() throws Exception { public void testUdfBlackListOverride() throws Exception { stopMiniHS2(); // setup whitelist - HiveConf testConf = new HiveConf(); + HiveConf testConf = HiveConf.create(); Set funcNames = FunctionRegistry.getFunctionNames(); String funcNameStr = ""; @@ -969,7 +969,7 @@ public void testUdfBlackListOverride() throws Exception { public void testRootScratchDir() throws Exception { // Stop HiveServer2 stopMiniHS2(); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); String userName; Path scratchDirPath; conf.set("hive.exec.scratchdir", tmpDir + "/hs2"); @@ -1019,7 +1019,7 @@ private void verifyScratchDir(HiveConf conf, FileSystem fs, Path scratchDirPath, public void testHttpHeaderSize() throws Exception { // Stop HiveServer2 stopMiniHS2(); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_REQUEST_HEADER_SIZE, 1024); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_RESPONSE_HEADER_SIZE, 1024); startMiniHS2(conf, true); @@ -1089,7 +1089,7 @@ public void testHttpHeaderSize() throws Exception { public void testHttpRetryOnServerIdleTimeout() throws Exception { // Stop HiveServer2 stopMiniHS2(); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); // Set server's idle timeout to a very low value conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_MAX_IDLE_TIME, "5000"); startMiniHS2(conf, true); @@ -1457,7 +1457,7 @@ public void testReplDumpResultSet() throws Exception { String testPathName = System.getProperty("test.warehouse.dir", "/tmp") + Path.SEPARATOR + tid; Path testPath = new Path(testPathName + Path.SEPARATOR + Base64.getEncoder().encodeToString(testDbName.toLowerCase().getBytes(StandardCharsets.UTF_8))); - FileSystem fs = testPath.getFileSystem(new HiveConf()); + FileSystem fs = testPath.getFileSystem(HiveConf.create()); Statement stmt = conDefault.createStatement(); try { stmt.execute("set hive.repl.rootdir = " + testPathName); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2ErasureCoding.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2ErasureCoding.java index 1923e262fa48..c39f2e0ac902 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2ErasureCoding.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2ErasureCoding.java @@ -67,7 +67,7 @@ public class TestJdbcWithMiniHS2ErasureCoding { @BeforeClass public static void beforeTest() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); - conf = new HiveConf(); + conf = HiveConf.create(); conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); DriverManager.setLoginTimeout(0); miniHS2 = new MiniHS2.Builder() diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithServiceDiscovery.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithServiceDiscovery.java index 9f2fb3235891..7705d646bc16 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithServiceDiscovery.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithServiceDiscovery.java @@ -72,7 +72,7 @@ public static void setup() throws Exception { // Create one MiniHS2 with miniMRCluster and one with Local FS only HiveConf hiveConf1 = loadConf(); - HiveConf hiveConf2 = new HiveConf(); + HiveConf hiveConf2 = HiveConf.create(); setSDConfigs(hiveConf1); setSDConfigs(hiveConf2); @@ -154,7 +154,7 @@ private static HiveConf loadConf() throws Exception { String confDir = "../../data/conf/"; HiveConf.setHiveSiteLocation(new URL("file://" + new File(confDir).toURI().getPath() + "/hive-site.xml")); System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation()); - HiveConf defaultConf = new HiveConf(); + HiveConf defaultConf = HiveConf.create(); return defaultConf; } diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestKillQueryWithAuthorizationDisabled.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestKillQueryWithAuthorizationDisabled.java index 59b775c16e00..96c5a09f3524 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestKillQueryWithAuthorizationDisabled.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestKillQueryWithAuthorizationDisabled.java @@ -93,7 +93,7 @@ static HiveConf defaultConf() throws Exception { HiveConf.setHiveSiteLocation(new URL("file://" + new File(confDir).toURI().getPath() + "/hive-site.xml")); System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation()); } - HiveConf defaultConf = new HiveConf(); + HiveConf defaultConf = HiveConf.create(); defaultConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); defaultConf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); defaultConf.setBoolVar(HiveConf.ConfVars.HIVE_QUERY_RESULTS_CACHE_ENABLED, false); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestNoSaslAuth.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestNoSaslAuth.java index 4ec86c0c1338..72c3b822640e 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestNoSaslAuth.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestNoSaslAuth.java @@ -58,7 +58,7 @@ public void run(HiveSessionHookContext sessionHookContext) @BeforeClass public static void beforeTest() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); conf.setVar(ConfVars.HIVE_SERVER2_SESSION_HOOK, diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestRestrictedList.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestRestrictedList.java index 1b37a33e099e..254268fee5de 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestRestrictedList.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestRestrictedList.java @@ -58,7 +58,7 @@ public static void startServices() throws Exception { new URL("file://" + new File(confDir).toURI().getPath() + "/hivemetastore-site.xml")); System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation()); - hiveConf = new HiveConf(); + hiveConf = HiveConf.create(); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS, 1); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS, 1); hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java index ec6c65f75a25..16a7431fdd36 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java @@ -67,7 +67,7 @@ public class TestSSL { private static final String KEY_MANAGER_FACTORY_ALGORITHM = "SunX509"; private MiniHS2 miniHS2 = null; - private static HiveConf conf = new HiveConf(); + private static HiveConf conf = HiveConf.create(); private Connection hs2Conn = null; private String dataFileDir = SSLTestUtils.getDataFileDir(); private Map confOverlay; diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestServiceDiscoveryWithMiniHS2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestServiceDiscoveryWithMiniHS2.java index 8512ff4ad6c9..4e107b199ce6 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestServiceDiscoveryWithMiniHS2.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestServiceDiscoveryWithMiniHS2.java @@ -55,7 +55,7 @@ public static void beforeTest() throws Exception { MiniHS2.cleanupLocalDir(); zkServer = new TestingServer(); Class.forName(MiniHS2.getJdbcDriverName()); - hiveConf = new HiveConf(); + hiveConf = HiveConf.create(); hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); // Set up zookeeper dynamic service discovery configs enableZKServiceDiscoveryConfigs(hiveConf); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersMoveWorkloadManager.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersMoveWorkloadManager.java index 097a7db9759a..bb97b267b157 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersMoveWorkloadManager.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersMoveWorkloadManager.java @@ -70,7 +70,7 @@ public static void beforeTest() throws Exception { HiveConf.setHiveSiteLocation(new URL("file://" + new File(confDir).toURI().getPath() + "/hive-site.xml")); System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation()); - conf = new HiveConf(); + conf = HiveConf.create(); conf.setBoolVar(HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, false); conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersWorkloadManager.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersWorkloadManager.java index 06dd6d04a7c2..c9cbb131e687 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersWorkloadManager.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersWorkloadManager.java @@ -58,7 +58,7 @@ public static void beforeTest() throws Exception { String confDir = "../../data/conf/llap/"; HiveConf.setHiveSiteLocation(new URL("file://" + new File(confDir).toURI().getPath() + "/hive-site.xml")); - conf = new HiveConf(); + conf = HiveConf.create(); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, "org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator"); java.nio.file.Path confPath = File.createTempFile("hive", "test").toPath(); conf.writeXml(new FileWriter(confPath.toFile())); @@ -66,7 +66,7 @@ public static void beforeTest() throws Exception { System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation()); - conf = new HiveConf(); + conf = HiveConf.create(); conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); conf.setTimeVar(ConfVars.HIVE_TRIGGER_VALIDATION_INTERVAL, 100, TimeUnit.MILLISECONDS); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestWMMetricsWithTrigger.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestWMMetricsWithTrigger.java index d9e368dae9c6..c141e5e82cf1 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestWMMetricsWithTrigger.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestWMMetricsWithTrigger.java @@ -82,7 +82,7 @@ static HiveConf defaultConf() throws Exception { HiveConf.setHiveSiteLocation(new URL("file://" + new File(confDir).toURI().getPath() + "/hive-site.xml")); System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation()); } - HiveConf defaultConf = new HiveConf(); + HiveConf defaultConf = HiveConf.create(); defaultConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); defaultConf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); defaultConf.setBoolVar(HiveConf.ConfVars.HIVE_QUERY_RESULTS_CACHE_ENABLED, false); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestXSRFFilter.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestXSRFFilter.java index b5a3568ab2cb..454ee3f00d95 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestXSRFFilter.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestXSRFFilter.java @@ -83,7 +83,7 @@ public static void afterClass() throws IOException { // If there is a better way to do this, we should do it. private void initHS2(boolean enableXSRFFilter) throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHS2 = new MiniHS2.Builder().withConf(conf).cleanupLocalDirOnStartup(false).build(); dataFileDir = conf.get("test.data.files").replace('\\', '/').replace("c:", ""); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestCLIAuthzSessionContext.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestCLIAuthzSessionContext.java index fa359f9f8f3b..20e3c5f1c9e5 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestCLIAuthzSessionContext.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestCLIAuthzSessionContext.java @@ -60,7 +60,7 @@ public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreC @BeforeClass public static void beforeTest() throws Exception { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName()); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java index 977fe4332057..abe114360257 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java @@ -70,7 +70,7 @@ public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreC @BeforeClass public static void beforeTest() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName()); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzSessionContext.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzSessionContext.java index 60d939e83ba3..61cde57eae9d 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzSessionContext.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzSessionContext.java @@ -60,7 +60,7 @@ public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreC @BeforeClass public static void beforeTest() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName()); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcMetadataApiAuth.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcMetadataApiAuth.java index aae43c9ea5a6..565dea073144 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcMetadataApiAuth.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcMetadataApiAuth.java @@ -102,7 +102,7 @@ public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreC @BeforeClass public static void beforeTest() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, TestAuthorizerFactory.class.getName()); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthUDFBlacklist.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthUDFBlacklist.java index 8130c51413bf..ff1ecf3aee65 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthUDFBlacklist.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthUDFBlacklist.java @@ -64,7 +64,7 @@ public void shutDownHS2() throws Exception { @Test public void testBlackListedUdfUsage() throws Exception { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setVar(ConfVars.HIVE_SERVER2_BUILTIN_UDF_BLACKLIST, "sqrt"); startHS2(conf); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthorization.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthorization.java index 1f05be10a73a..e4e047dce24d 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthorization.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthorization.java @@ -46,7 +46,7 @@ public class TestJdbcWithSQLAuthorization { @BeforeClass public static void beforeTest() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, SQLStdHiveAuthorizerFactory.class.getName()); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/cbo_rp_TestJdbcDriver2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/cbo_rp_TestJdbcDriver2.java index b43a1b7586de..c0189e6900bf 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/cbo_rp_TestJdbcDriver2.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/cbo_rp_TestJdbcDriver2.java @@ -98,7 +98,7 @@ public class cbo_rp_TestJdbcDriver2 { private static final float floatCompareDelta = 0.0001f; public cbo_rp_TestJdbcDriver2() { - conf = new HiveConf(cbo_rp_TestJdbcDriver2.class); + conf = HiveConf.create(cbo_rp_TestJdbcDriver2.class); dataFileDir = conf.get("test.data.files").replace('\\', '/') .replace("c:", ""); dataFilePath = new Path(dataFileDir, "kv1.txt"); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/Hs2ConnectionMetrics.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/Hs2ConnectionMetrics.java index 06c279ebf2d7..852cefa3bc1c 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/Hs2ConnectionMetrics.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/Hs2ConnectionMetrics.java @@ -39,7 +39,7 @@ public abstract class Hs2ConnectionMetrics { protected static final String PASSWORD = "foo"; public static void setup() throws Exception { - miniHS2 = new MiniHS2(new HiveConf()); + miniHS2 = new MiniHS2(HiveConf.create()); confOverlay.put(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); confOverlay.put(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname, diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/StartMiniHS2Cluster.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/StartMiniHS2Cluster.java index 048400c12d2c..fd40de4cdd19 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/StartMiniHS2Cluster.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/StartMiniHS2Cluster.java @@ -61,7 +61,7 @@ public void testRunCluster() throws Exception { HiveConf.setHiveSiteLocation(new URL("file://"+ new File(confFile).toURI().getPath())); break; } - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(HiveConf.ConfVars.HIVE_RPC_QUERY_PLAN, true); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java index be9be1a65029..7f71125ff989 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java @@ -43,7 +43,7 @@ public class TestHiveServer2 { @BeforeClass public static void beforeTest() throws Exception { - miniHS2 = new MiniHS2(new HiveConf()); + miniHS2 = new MiniHS2(HiveConf.create()); confOverlay = new HashMap(); confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); miniHS2.start(confOverlay); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2SessionTimeout.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2SessionTimeout.java index f61ebca62d9b..9e7e9b1e0798 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2SessionTimeout.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2SessionTimeout.java @@ -43,7 +43,7 @@ public class TestHiveServer2SessionTimeout { @BeforeClass public static void beforeTest() throws Exception { - miniHS2 = new MiniHS2(new HiveConf()); + miniHS2 = new MiniHS2(HiveConf.create()); } @Before diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2Metrics.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2Metrics.java index d6631729d1b5..343f76bafd44 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2Metrics.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2Metrics.java @@ -73,7 +73,7 @@ public void postAnalyze(HiveSemanticAnalyzerHookContext context, @BeforeClass public static void setup() throws Exception { - miniHS2 = new MiniHS2(new HiveConf()); + miniHS2 = new MiniHS2(HiveConf.create()); confOverlay = new HashMap(); confOverlay.put(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); confOverlay.put(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname, MetricCheckingHook.class.getName()); @@ -85,7 +85,7 @@ public static void setup() throws Exception { @Before public void before() throws Exception { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); MetricsFactory.close(); MetricsFactory.init(conf); } diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestMiniHS2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestMiniHS2.java index abc6977fea92..886275fcf922 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestMiniHS2.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestMiniHS2.java @@ -56,7 +56,7 @@ public void tearDown() throws Exception { */ @Test public void testConfInSession() throws Exception { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); final String DUMMY_CONF_KEY = "hive.test.minihs2.dummy.config"; final String DUMMY_CONF_VAL = "dummy.val"; hiveConf.set(DUMMY_CONF_KEY, DUMMY_CONF_VAL); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/TestDFSErrorHandling.java b/itests/hive-unit/src/test/java/org/apache/hive/service/TestDFSErrorHandling.java index f088bc651c9f..61d8b981857f 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/TestDFSErrorHandling.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/TestDFSErrorHandling.java @@ -67,7 +67,7 @@ public class TestDFSErrorHandling @BeforeClass public static void startServices() throws Exception { - hiveConf = new HiveConf(); + hiveConf = HiveConf.create(); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS, 1); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS, 1); hiveConf.setBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI, true); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/TestHS2ImpersonationWithRemoteMS.java b/itests/hive-unit/src/test/java/org/apache/hive/service/TestHS2ImpersonationWithRemoteMS.java index 0d396b6bfc0e..d2bc34bddce0 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/TestHS2ImpersonationWithRemoteMS.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/TestHS2ImpersonationWithRemoteMS.java @@ -46,7 +46,7 @@ public class TestHS2ImpersonationWithRemoteMS { @BeforeClass public static void startServices() throws Exception { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS, 1); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS, 1); hiveConf.setBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI, true); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/auth/TestCustomAuthentication.java b/itests/hive-unit/src/test/java/org/apache/hive/service/auth/TestCustomAuthentication.java index 9705c65af893..5877e420dd5f 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/auth/TestCustomAuthentication.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/auth/TestCustomAuthentication.java @@ -42,7 +42,7 @@ public class TestCustomAuthentication { @BeforeClass public static void setUp() throws Exception { - hiveConf = new HiveConf(); + hiveConf = HiveConf.create(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); hiveConf.writeXml(baos); baos.close(); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/auth/TrustDomainAuthenticationTest.java b/itests/hive-unit/src/test/java/org/apache/hive/service/auth/TrustDomainAuthenticationTest.java index e83ac0e97f56..39c47f0503ea 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/auth/TrustDomainAuthenticationTest.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/auth/TrustDomainAuthenticationTest.java @@ -60,7 +60,7 @@ static void initialize(String transportMode, boolean useProperTrustedDomain) thr hs2TransportMode = transportMode; properTrustedDomain = useProperTrustedDomain; - hiveConf = new HiveConf(); + hiveConf = HiveConf.create(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); hiveConf.writeXml(baos); baos.close(); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/auth/jwt/TestHttpJwtAuthentication.java b/itests/hive-unit/src/test/java/org/apache/hive/service/auth/jwt/TestHttpJwtAuthentication.java index 202ff0d481cf..8cc25ab5cfc1 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/auth/jwt/TestHttpJwtAuthentication.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/auth/jwt/TestHttpJwtAuthentication.java @@ -113,7 +113,7 @@ public static void setupHS2() throws Exception { .willReturn(ok() .withBody(Files.readAllBytes(jwtVerificationJWKSFile.toPath())))); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED, false); conf.setBoolVar(ConfVars.HIVESTATSCOLAUTOGATHER, false); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/auth/saml/TestHttpSamlAuthentication.java b/itests/hive-unit/src/test/java/org/apache/hive/service/auth/saml/TestHttpSamlAuthentication.java index d71fdedf7ea9..8186f2236027 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/auth/saml/TestHttpSamlAuthentication.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/auth/saml/TestHttpSamlAuthentication.java @@ -105,7 +105,7 @@ public class TestHttpSamlAuthentication { @BeforeClass public static void setupHS2() throws Exception { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED, false); conf.setBoolVar(ConfVars.HIVESTATSCOLAUTOGATHER, false); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/TestEmbeddedThriftBinaryCLIService.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/TestEmbeddedThriftBinaryCLIService.java index 819838d09120..c69805b7844f 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/TestEmbeddedThriftBinaryCLIService.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/TestEmbeddedThriftBinaryCLIService.java @@ -37,7 +37,7 @@ public class TestEmbeddedThriftBinaryCLIService extends CLIServiceTest { @BeforeClass public static void setUpBeforeClass() throws Exception { service = new EmbeddedThriftBinaryCLIService(); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setBoolean("datanucleus.schema.autoCreateTables", true); conf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict"); UtilsForTest.expandHiveConfParams(conf); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithMr.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithMr.java index c7dade3874a9..5283ea386b6c 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithMr.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithMr.java @@ -61,7 +61,7 @@ public static void setUpBeforeClass() throws Exception { "", "" }; - hiveConf = new HiveConf(); + hiveConf = HiveConf.create(); hiveConf.set(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL.varname, "verbose"); miniHS2 = new MiniHS2(hiveConf); confOverlay = new HashMap(); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java index e2a7d3385fa5..858c30cbbd1c 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java @@ -69,7 +69,7 @@ public class TestOperationLoggingLayout { @BeforeClass public static void setUpBeforeClass() throws Exception { tableName = "TestOperationLoggingLayout_table"; - hiveConf = new HiveConf(); + hiveConf = HiveConf.create(); hiveConf.set(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL.varname, "execution"); miniHS2 = new MiniHS2(hiveConf); confOverlay = new HashMap(); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestHiveSessionImpl.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestHiveSessionImpl.java index a5784d41882a..82e88840fa04 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestHiveSessionImpl.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestHiveSessionImpl.java @@ -49,7 +49,7 @@ public void testLeakOperationHandle() throws HiveSQLException { TProtocolVersion protocol = TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V2; String username = ""; String password = ""; - HiveConf serverhiveConf = new HiveConf(); + HiveConf serverhiveConf = HiveConf.create(); String ipAddress = null; HiveSessionImpl session = new HiveSessionImpl(null, protocol, username, password, serverhiveConf, ipAddress, null) { diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java index 86425ad7754a..4da8a46db987 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java @@ -49,7 +49,7 @@ public class TestQueryDisplay { @Before public void setup() { - conf = new HiveConf(); + conf = HiveConf.create(); conf.set("hive.support.concurrency", "false"); sessionManager = new SessionManager(null, true); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestMiniHS2StateWithNoZookeeper.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestMiniHS2StateWithNoZookeeper.java index 0df3058359c7..22a2acd0b4b7 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestMiniHS2StateWithNoZookeeper.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestMiniHS2StateWithNoZookeeper.java @@ -52,7 +52,7 @@ public class TestMiniHS2StateWithNoZookeeper { @BeforeClass public static void beforeTest() throws Exception { MiniHS2.cleanupLocalDir(); - hiveConf = new HiveConf(); + hiveConf = HiveConf.create(); hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_SUPPORT_DYNAMIC_SERVICE_DISCOVERY, true); hiveConf.setIntVar(ConfVars.HIVE_ZOOKEEPER_CONNECTION_MAX_RETRIES, 0); hiveConf.setTimeVar(ConfVars.HIVE_ZOOKEEPER_CONNECTION_BASESLEEPTIME, 0, TimeUnit.MILLISECONDS); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCliServiceMessageSize.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCliServiceMessageSize.java index d59e4f1655f8..fce4a932b01c 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCliServiceMessageSize.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCliServiceMessageSize.java @@ -52,7 +52,7 @@ public static void setUpBeforeClass() throws Exception { // Find a free port port = MetaStoreTestUtils.findFreePort(); hiveServer2 = new HiveServer2(); - hiveConf = new HiveConf(); + hiveConf = HiveConf.create(); } /** diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCliServiceWithInfoMessage.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCliServiceWithInfoMessage.java index a27bf21a44ac..6197004ff543 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCliServiceWithInfoMessage.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCliServiceWithInfoMessage.java @@ -67,7 +67,7 @@ public void setUp() throws Exception { while (cliPort == webuiPort) { webuiPort = MetaStoreTestUtils.findFreePort(); } - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_USE_SSL, false); hiveConf.setVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST, host); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/server/InformationSchemaWithPrivilegeTestBase.java b/itests/hive-unit/src/test/java/org/apache/hive/service/server/InformationSchemaWithPrivilegeTestBase.java index 9573e5050c61..873cacd57220 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/server/InformationSchemaWithPrivilegeTestBase.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/server/InformationSchemaWithPrivilegeTestBase.java @@ -192,7 +192,7 @@ public static void setupInternal(boolean zookeeperSSLEnabled) throws Exception { zkCluster = new MiniZooKeeperCluster(zookeeperSSLEnabled); int zkPort = zkCluster.startup(zkDataDir); - miniHS2 = new MiniHS2(new HiveConf()); + miniHS2 = new MiniHS2(HiveConf.create()); confOverlay = new HashMap(); Path workDir = new Path(System.getProperty("test.tmp.dir", "target" + File.separator + "test" + File.separator + "tmp")); @@ -218,7 +218,7 @@ public static void setupInternal(boolean zookeeperSSLEnabled) throws Exception { if(zookeeperSSLEnabled) { String dataFileDir = !System.getProperty("test.data.files", "").isEmpty() ? System.getProperty("test.data.files") : - (new HiveConf()).get("test.data.files").replace('\\', '/').replace("c:", ""); + (HiveConf.create()).get("test.data.files").replace('\\', '/').replace("c:", ""); confOverlay.put(ConfVars.HIVE_ZOOKEEPER_SSL_KEYSTORE_LOCATION.varname, dataFileDir + File.separator + LOCALHOST_KEY_STORE_NAME); confOverlay.put(ConfVars.HIVE_ZOOKEEPER_SSL_KEYSTORE_PASSWORD.varname, diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/server/TestGracefulStopHS2.java b/itests/hive-unit/src/test/java/org/apache/hive/service/server/TestGracefulStopHS2.java index 164a63315e41..b1e8eb7c9185 100755 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/server/TestGracefulStopHS2.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/server/TestGracefulStopHS2.java @@ -48,7 +48,7 @@ public class TestGracefulStopHS2 { public static void setupBeforeClass() throws Exception { MiniHS2.cleanupLocalDir(); try { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED, false); conf.setBoolVar(HiveConf.ConfVars.HIVESTATSCOLAUTOGATHER, false); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/server/TestHS2ClearDanglingScratchDir.java b/itests/hive-unit/src/test/java/org/apache/hive/service/server/TestHS2ClearDanglingScratchDir.java index 1adfdebdf2be..046843f9b70d 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/server/TestHS2ClearDanglingScratchDir.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/server/TestHS2ClearDanglingScratchDir.java @@ -35,7 +35,7 @@ public class TestHS2ClearDanglingScratchDir { @Test public void testScratchDirCleared() throws Exception { MiniDFSCluster m_dfs = new MiniDFSCluster.Builder(new Configuration()).numDataNodes(1).format(true).build(); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.addResource(m_dfs.getConfiguration(0)); conf.set(HiveConf.ConfVars.HIVE_SCRATCH_DIR_LOCK.toString(), "true"); conf.set(HiveConf.ConfVars.HIVE_SERVER2_CLEAR_DANGLING_SCRATCH_DIR.toString(), "true"); diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java index 0d343545e4d9..9f2376172b30 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java @@ -158,7 +158,7 @@ private void setupUniqueTestPath() { public Map getHiveVariable() { return null; } - }).substitute(new HiveConf(), qt.getConf().get(HCONF_TEST_BLOBSTORE_PATH)); + }).substitute(HiveConf.create(), qt.getConf().get(HCONF_TEST_BLOBSTORE_PATH)); testBlobstorePath = HiveTestEnvSetup.ensurePathEndsInSlash(testBlobstorePath); testBlobstorePath += HiveTestEnvSetup.ensurePathEndsInSlash(this.getClass().getSimpleName()); // name of child class diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java index 292e7abf168f..d71c88c2ba89 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java @@ -100,7 +100,7 @@ public CoreBeeLineDriver(AbstractCliConfig testCliConfig) { } private static MiniHS2 createMiniServer() throws Exception { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); // We do not need Zookeeper at the moment hiveConf.set(HiveConf.ConfVars.HIVE_LOCK_MANAGER.varname, "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager"); diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java index e9c86372bc98..1bebdd348daa 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java @@ -214,7 +214,7 @@ public QTestUtil(QTestArguments testArgs) throws Exception { // For testing configurations set by System.setProperties System.setProperty("hive.query.max.length", "100Mb"); - conf = new HiveConf(IDriver.class); + conf = HiveConf.create(IDriver.class); setCustomConfs(conf, testArgs.getCustomConfs()); setMetaStoreProperties(); @@ -241,7 +241,7 @@ public QTestUtil(QTestArguments testArgs) throws Exception { this.initScript = scriptsDir + File.separator + testArgs.getInitScript(); this.cleanupScript = scriptsDir + File.separator + testArgs.getCleanupScript(); - savedConf = new HiveConf(conf); + savedConf = HiveConf.create(conf); } @@ -429,7 +429,7 @@ public void newSession() throws Exception { public void newSession(boolean canReuseSession) throws Exception { // allocate and initialize a new conf since a test can // modify conf by using 'set' commands - conf = new HiveConf(savedConf); + conf = HiveConf.create(savedConf); initConf(); initConfFromSetup(); diff --git a/itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java b/itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java index 9e95d3b2db92..a10a4f45c8ea 100644 --- a/itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java +++ b/itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java @@ -97,7 +97,7 @@ public enum MiniClusterType { } public static class Builder { - private HiveConf hiveConf = new HiveConf(); + private HiveConf hiveConf = HiveConf.create(); private MiniClusterType miniClusterType = MiniClusterType.LOCALFS_ONLY; private boolean useMiniKdc = false; private String serverPrincipal; diff --git a/jdbc/src/java/org/apache/hive/jdbc/EmbeddedCLIServicePortal.java b/jdbc/src/java/org/apache/hive/jdbc/EmbeddedCLIServicePortal.java index a389285f3cd1..4898f88ccd6e 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/EmbeddedCLIServicePortal.java +++ b/jdbc/src/java/org/apache/hive/jdbc/EmbeddedCLIServicePortal.java @@ -43,7 +43,7 @@ public static Iface get(Map hiveConfs) { } private static HiveConf buildOverlayedConf(Map confOverlay) { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); if (confOverlay != null && !confOverlay.isEmpty()) { // apply overlay query specific settings, if any for (Map.Entry confEntry : confOverlay.entrySet()) { diff --git a/llap-client/src/test/org/apache/hadoop/hive/llap/registry/impl/TestLlapZookeeperRegistryImpl.java b/llap-client/src/test/org/apache/hadoop/hive/llap/registry/impl/TestLlapZookeeperRegistryImpl.java index 7e3c20884b60..124041933f91 100644 --- a/llap-client/src/test/org/apache/hadoop/hive/llap/registry/impl/TestLlapZookeeperRegistryImpl.java +++ b/llap-client/src/test/org/apache/hadoop/hive/llap/registry/impl/TestLlapZookeeperRegistryImpl.java @@ -41,7 +41,7 @@ public class TestLlapZookeeperRegistryImpl { - private HiveConf hiveConf = new HiveConf(); + private HiveConf hiveConf = HiveConf.create(); private LlapZookeeperRegistryImpl registry; diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/service/AsyncTaskCreateUdfFile.java b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/service/AsyncTaskCreateUdfFile.java index 01fd8b6e0091..82333e499eb1 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/service/AsyncTaskCreateUdfFile.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/service/AsyncTaskCreateUdfFile.java @@ -90,10 +90,10 @@ public Void call() throws Exception { private Set downloadPermanentFunctions() throws HiveException, URISyntaxException, IOException { Map udfs = new HashMap(); - HiveConf hiveConf = new HiveConf(conf); + HiveConf hiveConf = HiveConf.create(conf); // disable expensive operations on the metastore hiveConf.setBoolean(MetastoreConf.ConfVars.METRICS_ENABLED.getVarname(), false); - // performance problem: ObjectStore does its own new HiveConf() + // performance problem: ObjectStore does its own HiveConf.create() Hive hive = Hive.getWithFastCheck(hiveConf, false); ResourceDownloader resourceDownloader = new ResourceDownloader(conf, udfDir.toUri().normalize().getPath()); List fns = hive.getAllFunctions(); diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/service/LlapServiceDriver.java b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/service/LlapServiceDriver.java index e0a02a50cb90..95ef5da48815 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/service/LlapServiceDriver.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/service/LlapServiceDriver.java @@ -73,7 +73,7 @@ public LlapServiceDriver(LlapServiceCommandLine cl) throws Exception { this.cl = cl; SessionState ss = SessionState.get(); - this.conf = (ss != null) ? ss.getConf() : new HiveConf(SessionState.class); + this.conf = (ss != null) ? ss.getConf() : HiveConf.create(SessionState.class); HiveConfUtil.copyFromProperties(cl.getConfig(), this.conf); diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/status/LlapStatusServiceDriver.java b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/status/LlapStatusServiceDriver.java index b0e27c01e79d..96bbe7d8fb55 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/status/LlapStatusServiceDriver.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/status/LlapStatusServiceDriver.java @@ -129,7 +129,7 @@ private static LlapStatusServiceDriver createServiceDriver() { public LlapStatusServiceDriver() { SessionState ss = SessionState.get(); - conf = (ss != null) ? ss.getConf() : new HiveConf(SessionState.class); + conf = (ss != null) ? ss.getConf() : HiveConf.create(SessionState.class); setupConf(); } diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java index 0bb16366b52b..ba5c1e6c63f2 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java @@ -83,7 +83,7 @@ public void serviceInit(Configuration conf) { String bindAddress = "0.0.0.0"; HttpServer.Builder builder = new HttpServer.Builder("llap").setPort(this.port).setHost(bindAddress); - builder.setConf(new HiveConf(conf, HiveConf.class)); + builder.setConf(HiveConf.create(conf, HiveConf.class)); builder.setDisableDirListing(true); if (conf.getBoolean(ConfVars.LLAP_DAEMON_WEB_XFRAME_ENABLED.varname, ConfVars.LLAP_DAEMON_WEB_XFRAME_ENABLED.defaultBoolVal)) { diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestLowLevelLrfuCachePolicy.java b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestLowLevelLrfuCachePolicy.java index 8c38ac10cdf9..4e80120fa50e 100644 --- a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestLowLevelLrfuCachePolicy.java +++ b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestLowLevelLrfuCachePolicy.java @@ -460,7 +460,7 @@ public void testHotBuffersCutoff() { private void testProactiveEviction(float lambda, boolean isInstantDealloc) throws Exception { closeSweeperExecutorForTest(); int lrfuMaxSize = 10; - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); // This is to make sure no sweep happens automatically in the background, the test here will call evictProactively() // on the policy conf.setTimeVar(HiveConf.ConfVars.LLAP_IO_PROACTIVE_EVICTION_SWEEP_INTERVAL, 1, TimeUnit.HOURS); diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestProactiveEviction.java b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestProactiveEviction.java index 89c8f6055038..f7dbf36bcb68 100644 --- a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestProactiveEviction.java +++ b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestProactiveEviction.java @@ -111,13 +111,13 @@ public void testProactiveSweep() throws Exception { closeSweeperExecutorForTest(); // Test that proactive sweeper thread does not get created if we turn the feature off - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setBoolVar(HiveConf.ConfVars.LLAP_IO_PROACTIVE_EVICTION_ENABLED, false); new DummyPolicy(conf); assertFalse(isProactiveEvictionSweeperThreadStarted()); // Below here - testing with the feature turned on - conf = new HiveConf(); + conf = HiveConf.create(); // NOTE: Choosing a too small value (<10ms) here can make this test case flaky long sweepIntervalInMs = 200; conf.setTimeVar(HiveConf.ConfVars.LLAP_IO_PROACTIVE_EVICTION_SWEEP_INTERVAL, sweepIntervalInMs, diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/LlapDaemonExtension.java b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/LlapDaemonExtension.java index c180a3860f6d..c1183b9d43fb 100644 --- a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/LlapDaemonExtension.java +++ b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/LlapDaemonExtension.java @@ -59,7 +59,7 @@ public void beforeEach(ExtensionContext context) throws Exception { throw new IllegalStateException("Lock acquisition failed cause another test is using the LlapDaemon."); } final String appName = "testLlapDaemon" + context.getUniqueId(); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); HiveConf.setVar(conf, HiveConf.ConfVars.LLAP_DAEMON_SERVICE_HOSTS, "llap"); LlapDaemonInfo.initialize(appName, conf); daemon = diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TestLlapDaemon.java b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TestLlapDaemon.java index f1e4265a5a34..5bd955702b52 100644 --- a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TestLlapDaemon.java +++ b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TestLlapDaemon.java @@ -55,7 +55,7 @@ public class TestLlapDaemon { MetricsUtils.METRICS_PROCESS_NAME }; - private Configuration hiveConf = new HiveConf(); + private Configuration hiveConf = HiveConf.create(); @Mock private LlapRegistryService mockRegistry; @@ -88,7 +88,7 @@ public void tearDown() { @Test(expected = IllegalArgumentException.class) public void testEnforceProperNumberOfIOThreads() throws IOException { - Configuration thisHiveConf = new HiveConf(); + Configuration thisHiveConf = HiveConf.create(); HiveConf.setVar(thisHiveConf, HiveConf.ConfVars.LLAP_DAEMON_SERVICE_HOSTS, "@llap"); HiveConf.setIntVar(thisHiveConf, HiveConf.ConfVars.LLAP_DAEMON_NUM_EXECUTORS, 4); HiveConf.setIntVar(thisHiveConf, HiveConf.ConfVars.LLAP_IO_THREADPOOL_SIZE, 3); diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TestLlapLoadGeneratorService.java b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TestLlapLoadGeneratorService.java index e4e76f5e890e..50bee3b293d3 100644 --- a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TestLlapLoadGeneratorService.java +++ b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TestLlapLoadGeneratorService.java @@ -36,7 +36,7 @@ public class TestLlapLoadGeneratorService { public void testLoadGeneratorStops() throws InterruptedException, UnknownHostException { LlapLoadGeneratorService service = new LlapLoadGeneratorService(); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_TEST_LOAD_HOSTNAMES, InetAddress.getLocalHost().getHostName() + ",???"); HiveConf.setFloatVar(conf, HiveConf.ConfVars.HIVE_TEST_LOAD_UTILIZATION, 0.5f); @@ -61,7 +61,7 @@ public void testLoadGeneratorStops() throws InterruptedException, UnknownHostExc public void testLoadGeneratorFails() throws InterruptedException, UnknownHostException { LlapLoadGeneratorService service = new LlapLoadGeneratorService(); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_TEST_LOAD_HOSTNAMES, InetAddress.getLocalHost().getHostName() + ",???"); HiveConf.setFloatVar(conf, HiveConf.ConfVars.HIVE_TEST_LOAD_UTILIZATION, 1.2f); diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/services/impl/TestLlapWebServices.java b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/services/impl/TestLlapWebServices.java index f000dadb3dfd..a052d5a1d5ef 100644 --- a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/services/impl/TestLlapWebServices.java +++ b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/services/impl/TestLlapWebServices.java @@ -46,7 +46,7 @@ public static void beforeTests() throws Exception { llapWSPort = MetaStoreTestUtils.findFreePortExcepting( Integer.valueOf(HiveConf.ConfVars.LLAP_DAEMON_WEB_PORT.getDefaultValue())); llapWS = new LlapWebServices(llapWSPort, null, null); - llapWS.init(new HiveConf()); + llapWS.init(HiveConf.create()); llapWS.start(); Thread.sleep(5000); ensureUniqueInClasspath("javax/servlet/http/HttpServletRequest.class"); diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/registry/impl/TestLlapRegistryService.java b/llap-server/src/test/org/apache/hadoop/hive/llap/registry/impl/TestLlapRegistryService.java index 065160bbf356..fd56562f25a9 100644 --- a/llap-server/src/test/org/apache/hadoop/hive/llap/registry/impl/TestLlapRegistryService.java +++ b/llap-server/src/test/org/apache/hadoop/hive/llap/registry/impl/TestLlapRegistryService.java @@ -35,7 +35,7 @@ */ public class TestLlapRegistryService { private static MiniLlapCluster cluster = null; - private static HiveConf conf = new HiveConf(); + private static HiveConf conf = HiveConf.create(); @BeforeClass public static void setUp() throws Exception { diff --git a/llap-tez/src/test/org/apache/hadoop/hive/llap/tezplugins/metrics/TestBlacklistingLlapMetricsListener.java b/llap-tez/src/test/org/apache/hadoop/hive/llap/tezplugins/metrics/TestBlacklistingLlapMetricsListener.java index 9216c6ea4333..9e0a7138c400 100644 --- a/llap-tez/src/test/org/apache/hadoop/hive/llap/tezplugins/metrics/TestBlacklistingLlapMetricsListener.java +++ b/llap-tez/src/test/org/apache/hadoop/hive/llap/tezplugins/metrics/TestBlacklistingLlapMetricsListener.java @@ -80,7 +80,7 @@ public class TestBlacklistingLlapMetricsListener { public void setUp() throws Exception { initMocks(this); - conf = new HiveConf(); + conf = HiveConf.create(); when(mockRegistry.getInstances()).thenReturn(mockInstanceSet); when(mockRegistry.lockForConfigChange(anyLong(), anyLong())).thenReturn( new ConfigChangeLockResult(true, Long.MIN_VALUE)); diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveClientCache.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveClientCache.java index b68511d4fa84..563a628aae43 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveClientCache.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveClientCache.java @@ -235,7 +235,7 @@ void closeAllClientsQuietly() { } public void cleanup() { - // TODO: periodically reload a new HiveConf to check if stats reporting is enabled. + // TODO: periodically reload a HiveConf.create to check if stats reporting is enabled. hiveCache.cleanUp(); if (enableStats) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java b/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java index b39037cd65e1..8a4f114a26f0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java @@ -304,9 +304,9 @@ public QueryState build() { if (isolated) { // isolate query conf if (hiveConf == null) { - queryConf = new HiveConf(); + queryConf = HiveConf.create(); } else { - queryConf = new HiveConf(hiveConf); + queryConf = HiveConf.create(hiveConf); } } else { queryConf = hiveConf; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/ShowUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/ShowUtils.java index 0d52c852895f..6d48ff361781 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/ShowUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/ShowUtils.java @@ -286,7 +286,7 @@ private static String convertHistogram(byte[] buffer, ColumnStatisticsData._Fiel } private static ZoneId getZoneIdFromConf() { - return SessionState.get() == null ? new HiveConf().getLocalTimeZone() + return SessionState.get() == null ? HiveConf.create().getLocalTimeZone() : SessionState.get().getConf().getLocalTimeZone(); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/execute/AlterTableExecuteAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/execute/AlterTableExecuteAnalyzer.java index 6692c2413d50..5eeadeb421b5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/execute/AlterTableExecuteAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/execute/AlterTableExecuteAnalyzer.java @@ -73,7 +73,7 @@ protected void analyzeCommand(TableName tableName, Map partition ASTNode child = (ASTNode) command.getChild(1); if (child.getType() == HiveParser.StringLiteral) { - ZoneId timeZone = SessionState.get() == null ? new HiveConf().getLocalTimeZone() : SessionState.get().getConf() + ZoneId timeZone = SessionState.get() == null ? HiveConf.create().getLocalTimeZone() : SessionState.get().getConf() .getLocalTimeZone(); TimestampTZ time = TimestampTZUtil.parse(PlanUtils.stripQuotes(child.getText()), timeZone); spec = new AlterTableExecuteSpec(ROLLBACK, new RollbackSpec(TIME, time.toEpochMilli())); @@ -87,7 +87,7 @@ protected void analyzeCommand(TableName tableName, Map partition // the second child must be the rollback parameter ASTNode child = (ASTNode) command.getChild(1); - ZoneId timeZone = SessionState.get() == null ? new HiveConf().getLocalTimeZone() : SessionState.get().getConf() + ZoneId timeZone = SessionState.get() == null ? HiveConf.create().getLocalTimeZone() : SessionState.get().getConf() .getLocalTimeZone(); TimestampTZ time = TimestampTZUtil.parse(PlanUtils.stripQuotes(child.getText()), timeZone); spec = new AlterTableExecuteSpec(EXPIRE_SNAPSHOT, new ExpireSnapshotsSpec(time.toEpochMilli())); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java index ab761de3651f..1fcbee3f95e7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java @@ -140,9 +140,9 @@ private Configuration getBestAvailableConf() { return ss.getConf(); } - // Last resort is to create a new HiveConf object. It does not have any "runtime" + // Last resort is to create a HiveConf.create object. It does not have any "runtime" // changes to the configuration but that is the best we can do if we get this far. - return new HiveConf(); + return HiveConf.create(); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java index 35b6996637dc..2c2efff42924 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java @@ -560,7 +560,7 @@ protected void receiveFeed(FeedType feedType, Object feedValue) { protected void cloneConf() { if (!clonedConf) { clonedConf = true; - conf = new HiveConf(conf); + conf = HiveConf.create(conf); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java index ce12cea66eda..7c3fe25d4acc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java @@ -784,7 +784,7 @@ public static void main(String[] args) throws IOException, HiveException { */ public static String generateCmdLine(HiveConf hconf, Context ctx) throws IOException { - HiveConf tempConf = new HiveConf(); + HiveConf tempConf = HiveConf.create(); Path hConfFilePath = new Path(ctx.getLocalTmpPath(), JOBCONF_FILENAME); OutputStream out = null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/DirCopyTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/DirCopyTask.java index 9dd34ee84fb1..b0c7cd3286d3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/DirCopyTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/DirCopyTask.java @@ -226,7 +226,7 @@ public int execute() { private HiveConf getConf(HiveConf conf) { // if it is a db level path check for custom configurations. - HiveConf clonedConf = new HiveConf(conf); + HiveConf clonedConf = HiveConf.create(conf); if (work.getTableName().startsWith("dbPath:")) { for (Map.Entry entry : conf.getPropsWithPrefix(CUSTOM_PATH_CONFIG_PREFIX).entrySet()) { clonedConf.set(entry.getKey().replaceFirst(CUSTOM_PATH_CONFIG_PREFIX, ""), entry.getValue()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/KillTriggerActionHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/KillTriggerActionHandler.java index cb279987a06e..32781aec5835 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/KillTriggerActionHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/KillTriggerActionHandler.java @@ -44,7 +44,7 @@ public void applyAction(final Map queriesViolated) { String queryId = sessionState.getWmContext().getQueryId(); try { UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); - SessionState ss = new SessionState(new HiveConf(), ugi.getShortUserName()); + SessionState ss = new SessionState(HiveConf.create(), ugi.getShortUserName()); ss.setIsHiveServerQuery(true); SessionState.start(ss); KillQuery killQuery = sessionState.getKillQuery(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolManager.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolManager.java index 4f12b5d6ff7b..d0bdb7ed0101 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolManager.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolManager.java @@ -163,7 +163,7 @@ public TezSessionPoolSession create(TezSessionPoolSession oldSession) { queueIx = 0; } } - HiveConf sessionConf = new HiveConf(initConf); + HiveConf sessionConf = HiveConf.create(initConf); return createAndInitSession(defaultQueueList[localQueueIx], true, sessionConf); } }); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WorkloadManager.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WorkloadManager.java index 33f9a8a34d26..805e86fc9e18 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WorkloadManager.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WorkloadManager.java @@ -487,7 +487,7 @@ private void scheduleWork(WmThreadSyncWork context) { LOG.info("Invoking KillQuery for " + queryId + ": " + reason); try { UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); - SessionState ss = new SessionState(new HiveConf(), ugi.getShortUserName()); + SessionState ss = new SessionState(HiveConf.create(), ugi.getShortUserName()); ss.setIsHiveServerQuery(true); SessionState.start(ss); kq.killQuery(queryId, reason, toKill.getConf()); @@ -1735,7 +1735,7 @@ public TezSessionState reopen(TezSessionState session) throws Exception { if (sessionConf == null) { // TODO: can this ever happen? LOG.warn("Session configuration is null for " + wmTezSession); - sessionConf = new HiveConf(conf, WorkloadManager.class); + sessionConf = HiveConf.create(conf, WorkloadManager.class); } SettableFuture future = SettableFuture.create(); @@ -1814,7 +1814,7 @@ private WmTezSession createSession(HiveConf conf) { @VisibleForTesting protected WmTezSession createSessionObject(String sessionId, HiveConf conf) { - conf = (conf == null) ? new HiveConf(this.conf) : conf; + conf = (conf == null) ? HiveConf.create(this.conf) : conf; conf.set(LlapTaskSchedulerService.LLAP_PLUGIN_ENDPOINT_ENABLED, "true"); return new WmTezSession(sessionId, this, expirationTracker, conf); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/HiveProtoLoggingHook.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/HiveProtoLoggingHook.java index 904dd4bebded..a5ab4857800b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/HiveProtoLoggingHook.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/HiveProtoLoggingHook.java @@ -357,7 +357,7 @@ private HiveHookEventProtoPartialBuilder getPreHookEvent(HookContext hookContext LOG.info("Received pre-hook notification for: " + plan.getQueryId()); // Make a copy so that we do not modify hookContext conf. - HiveConf conf = new HiveConf(hookContext.getConf()); + HiveConf conf = HiveConf.create(hookContext.getConf()); List mrTasks = Utilities.getMRTasks(plan.getRootTasks()); List tezTasks = Utilities.getTezTasks(plan.getRootTasks()); ExecutionMode executionMode = getExecutionMode(plan, mrTasks, tezTasks); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java index 951e20687a09..19c0eb8e401d 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java @@ -984,7 +984,7 @@ public static void pushFiltersAndAsOf(JobConf jobConf, TableScanOperator tableSc protected static void pushAsOf(Configuration jobConf, TableScanOperator ts) { TableScanDesc scanDesc = ts.getConf(); if (scanDesc.getAsOfTimestamp() != null) { - ZoneId timeZone = SessionState.get() == null ? new HiveConf().getLocalTimeZone() : + ZoneId timeZone = SessionState.get() == null ? HiveConf.create().getLocalTimeZone() : SessionState.get().getConf().getLocalTimeZone(); TimestampTZ time = TimestampTZUtil.parse(scanDesc.getAsOfTimestamp(), timeZone); jobConf.set(TableScanDesc.AS_OF_TIMESTAMP, Long.toString(time.toEpochMilli())); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java index 4b70ff5c5b76..f976ce97280d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java @@ -1966,7 +1966,7 @@ public InputSplit[] getSplits(JobConf job, Configuration conf = job; if (HiveConf.getBoolVar(job, HiveConf.ConfVars.HIVE_ORC_MS_FOOTER_CACHE_ENABLED)) { // Create HiveConf once, since this is expensive. - conf = new HiveConf(conf, OrcInputFormat.class); + conf = HiveConf.create(conf, OrcInputFormat.class); } List result = generateSplitsInfo(conf, new Context(conf, numSplits, createExternalCaches())); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewInputFormat.java index acf600266d3a..231920d006e0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewInputFormat.java @@ -139,7 +139,7 @@ private Context createContext(Configuration conf, int numSplits) throws IOExcept // Use threads to resolve directories into splits. if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_ORC_MS_FOOTER_CACHE_ENABLED)) { // Create HiveConf once, since this is expensive. - conf = new HiveConf(conf, OrcInputFormat.class); + conf = HiveConf.create(conf, OrcInputFormat.class); } return new Context(conf, numSplits, null); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/CuratorFrameworkSingleton.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/CuratorFrameworkSingleton.java index e8eaac0fd9b7..47854ec81edc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/CuratorFrameworkSingleton.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/CuratorFrameworkSingleton.java @@ -43,7 +43,7 @@ public static synchronized CuratorFramework getInstance(HiveConf hiveConf) { if (sharedClient == null) { // Create a client instance if (hiveConf == null) { - conf = new HiveConf(); + conf = HiveConf.create(); } else { conf = hiveConf; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index dbdece0d4f4e..b63251ef9743 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -370,7 +370,7 @@ public void reloadFunctions() throws HiveException { } public static Hive get(Configuration c, Class clazz) throws HiveException { - return get(c instanceof HiveConf ? (HiveConf)c : new HiveConf(c, clazz)); + return get(c instanceof HiveConf ? (HiveConf)c : HiveConf.create(c, clazz)); } /** @@ -461,7 +461,7 @@ private static Hive create(HiveConf c, boolean doRegisterAllFns) throws HiveExce private static HiveConf createHiveConf() { SessionState session = SessionState.get(); - return (session == null) ? new HiveConf(Hive.class) : session.getConf(); + return (session == null) ? HiveConf.create(Hive.class) : session.getConf(); } public void setHMSClientCapabilities(String[] capabilities) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java index 90f58274c9b1..5a9af500295e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java @@ -131,7 +131,7 @@ public void init() { try { // Create a new conf object to bypass metastore authorization, as we need to // retrieve all materialized views from all databases - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.set(MetastoreConf.ConfVars.FILTER_HOOK.getVarname(), DefaultMetaStoreFilterHookImpl.class.getName()); init(Hive.get(conf)); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveTezModelRelMetadataProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveTezModelRelMetadataProvider.java index 525550679026..c03dcff689ab 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveTezModelRelMetadataProvider.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveTezModelRelMetadataProvider.java @@ -45,7 +45,7 @@ public class HiveTezModelRelMetadataProvider { ImmutableList.of( HiveRelMdDistinctRowCount.SOURCE, HiveRelMdCumulativeCost.SOURCE, - new HiveRelMdCost(HiveOnTezCostModel.getCostModel(new HiveConf())).getMetadataProvider(), + new HiveRelMdCost(HiveOnTezCostModel.getCostModel(HiveConf.create())).getMetadataProvider(), HiveRelMdSelectivity.SOURCE, HiveRelMdRowCount.SOURCE, HiveRelMdUniqueKeys.SOURCE, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java index 9c9dac07a671..5058df32153a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java @@ -319,7 +319,7 @@ public static void processSkewJoin(JoinOperator joinOp, } mapJoinOp.setChildOperators(childOps); - HiveConf jc = new HiveConf(parseCtx.getConf(), + HiveConf jc = HiveConf.create(parseCtx.getConf(), GenMRSkewJoinProcessor.class); newPlan.setNumMapTasks(HiveConf diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/MetaDataExportListener.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/MetaDataExportListener.java index b6d8a2849528..458a77960a50 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/MetaDataExportListener.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/MetaDataExportListener.java @@ -88,7 +88,7 @@ private void export_meta_data(PreDropTableEvent tableEvent) throws MetaException try { SessionState.getConsole().printInfo("Beginning metadata export"); EximUtil.createExportDump(fs, outFile, mTbl, null, null, - new HiveConf(conf, MetaDataExportListener.class)); + HiveConf.create(conf, MetaDataExportListener.class)); if (moveMetadataToTrash == true) { wh.deleteDir(metaPath, true, false, false); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java index f32c38be282a..863486f21f19 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java @@ -83,7 +83,7 @@ public class ReplicationSemanticAnalyzer extends BaseSemanticAnalyzer { ReplicationSemanticAnalyzer(QueryState queryState) throws SemanticException { super(queryState); this.db = super.db; - this.conf = new HiveConf(super.conf); + this.conf = HiveConf.create(super.conf); } @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java index 977ab5372dc3..ed9df8358674 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java @@ -57,7 +57,7 @@ public static CommandProcessor getForHiveCommandInternal(String[] cmd, HiveConf return null; } if (conf == null) { - conf = new HiveConf(); + conf = HiveConf.create(); } Set availableCommands = new HashSet(); for (String availableCommand : conf.getVar(HiveConf.ConfVars.HIVE_SECURITY_COMMAND_WHITELIST) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/ResetProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/ResetProcessor.java index 150362417cbe..baf45220e0ab 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/processors/ResetProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/ResetProcessor.java @@ -93,7 +93,7 @@ private static void resetOverridesOnly(SessionState ss) { if (ss.getOverriddenConfigurations().isEmpty()) { return; } - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); for (String key : ss.getOverriddenConfigurations().keySet()) { setSessionVariableFromConf(ss, key, conf); } @@ -104,7 +104,7 @@ private static void resetOverrideOnly(SessionState ss, String varname) { if (!ss.getOverriddenConfigurations().containsKey(varname)) { return; } - setSessionVariableFromConf(ss, varname, new HiveConf()); + setSessionVariableFromConf(ss, varname, HiveConf.create()); ss.getOverriddenConfigurations().remove(varname); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/scheduled/ScheduledQueryExecutionService.java b/ql/src/java/org/apache/hadoop/hive/ql/scheduled/ScheduledQueryExecutionService.java index 3cbaa60bdf1e..e4c23f56add1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/scheduled/ScheduledQueryExecutionService.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/scheduled/ScheduledQueryExecutionService.java @@ -58,7 +58,7 @@ public class ScheduledQueryExecutionService implements Closeable { private Queue runningExecutors = new ConcurrentLinkedQueue<>(); public static ScheduledQueryExecutionService startScheduledQueryExecutorService(HiveConf inputConf) { - HiveConf conf = new HiveConf(inputConf); + HiveConf conf = HiveConf.create(inputConf); MetastoreBasedScheduledQueryService qService = new MetastoreBasedScheduledQueryService(conf); ExecutorService executor = buildExecutor(conf); ScheduledQueryExecutionContext ctx = new ScheduledQueryExecutionContext(executor, conf, qService); @@ -220,7 +220,7 @@ private void processQuery(ScheduledQueryPollResponse q) { info.setExecutorQueryId(buildExecutorQueryId("")); SessionState state = null; try { - HiveConf conf = new HiveConf(context.conf); + HiveConf conf = HiveConf.create(context.conf); conf.set(Constants.HIVE_QUERY_EXCLUSIVE_LOCK, lockNameFor(q.getScheduleKey())); conf.setVar(HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.set(Constants.SCHEDULED_QUERY_NAMESPACE, q.getScheduleKey().getClusterNamespace()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java index 2cc057ee6e89..f3899de450c0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java @@ -72,7 +72,7 @@ public class AuthorizationPreEventListener extends MetaStorePreEventListener { private static final ThreadLocal tConfig = new ThreadLocal() { @Override protected Configuration initialValue() { - return new HiveConf(AuthorizationPreEventListener.class); + return HiveConf.create(AuthorizationPreEventListener.class); } }; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchronizer.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchronizer.java index b18f77901493..941b12c7a84c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchronizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchronizer.java @@ -63,7 +63,7 @@ public class PrivilegeSynchronizer implements Runnable { public PrivilegeSynchronizer(LeaderLatch privilegeSynchronizerLatch, PolicyProviderContainer policyProviderContainer, HiveConf hiveConf) { - this.hiveConf = new HiveConf(hiveConf); + this.hiveConf = HiveConf.create(hiveConf); this.hiveConf.set(MetastoreConf.ConfVars.FILTER_HOOK.getVarname(), DefaultMetaStoreFilterHookImpl.class.getName()); try { hiveClient = Hive.get(this.hiveConf).getMSC(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthorizer.java index 2ec1d3bf3155..21c5aed9913d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthorizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthorizer.java @@ -525,7 +525,7 @@ HiveAuthorizer createHiveMetaStoreAuthorizer() throws Exception { HiveAuthorizer ret = null; HiveConf hiveConf = (HiveConf)tConfig.get(); if(hiveConf == null){ - HiveConf hiveConf1 = new HiveConf(super.getConf(), HiveConf.class); + HiveConf hiveConf1 = HiveConf.create(super.getConf(), HiveConf.class); tConfig.set(hiveConf1); hiveConf = hiveConf1; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/ClearDanglingScratchDir.java b/ql/src/java/org/apache/hadoop/hive/ql/session/ClearDanglingScratchDir.java index 8d83a00e476f..91474bfe1465 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/ClearDanglingScratchDir.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/ClearDanglingScratchDir.java @@ -89,7 +89,7 @@ public static void main(String[] args) throws Exception { verbose = true; } - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); String rootHDFSDir; if (cli.hasOption("s")) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index 32c0891d3f94..4100761071a5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -1079,7 +1079,7 @@ public static SessionState get() { public static HiveConf getSessionConf() { SessionStates state = tss.get(); if (state.conf == null) { - state.attach(new HiveConf()); + state.attach(HiveConf.create()); } return state.conf; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUpdaterThread.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUpdaterThread.java index 43c41ebbaefa..6283de4ff35c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUpdaterThread.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUpdaterThread.java @@ -651,7 +651,7 @@ public class WorkerRunnable implements Runnable { private final String user; public WorkerRunnable(Configuration conf, String user) { - this.conf = new HiveConf(conf, HiveConf.class); + this.conf = HiveConf.create(conf, HiveConf.class); this.user = user; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java index 9a44ffb45059..3286d03f655b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java @@ -145,7 +145,7 @@ public static void main(String[] args) throws Exception { LineageInfo lep = new LineageInfo(); - Context ctx=new Context(new HiveConf()); + Context ctx=new Context(HiveConf.create()); lep.getLineageInfo(query, ctx); for (String tab : lep.getInputTableList()) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorThread.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorThread.java index f26e832e0886..f49761913892 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorThread.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorThread.java @@ -74,7 +74,7 @@ public void setConf(Configuration configuration) { // HiveConf is moved to the standalone metastore. //clone the conf - compactor needs to set properties in it which we don't // want to bleed into the caller - conf = new HiveConf(configuration, HiveConf.class); + conf = HiveConf.create(configuration, HiveConf.class); } @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/MajorQueryCompactor.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/MajorQueryCompactor.java index 75fb798ac33b..ab37d8093f6c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/MajorQueryCompactor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/MajorQueryCompactor.java @@ -44,7 +44,7 @@ public boolean run(CompactorContext context) throws IOException { StorageDescriptor storageDescriptor = context.getSd(); ValidWriteIdList writeIds = context.getValidWriteIdList(); - HiveConf conf = new HiveConf(hiveConf); + HiveConf conf = HiveConf.create(hiveConf); /* * For now, we will group splits on tez so that we end up with all bucket files, * with same bucket number in one map task. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/MinorQueryCompactor.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/MinorQueryCompactor.java index cc1c3e921675..b992e8b056f9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/MinorQueryCompactor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/MinorQueryCompactor.java @@ -52,7 +52,7 @@ public boolean run(CompactorContext context) throws IOException { AcidDirectory dir = context.getAcidDirectory(); ValidWriteIdList writeIds = context.getValidWriteIdList(); // Set up the session for driver. - HiveConf conf = new HiveConf(hiveConf); + HiveConf conf = HiveConf.create(hiveConf); conf.set(HiveConf.ConfVars.SPLIT_GROUPING_MODE.varname, CompactorUtil.COMPACTOR); conf.setBoolVar(HiveConf.ConfVars.HIVE_STATS_FETCH_COLUMN_STATS, false); conf.setBoolVar(HiveConf.ConfVars.HIVE_STATS_ESTIMATE_STATS, false); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/MmMajorQueryCompactor.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/MmMajorQueryCompactor.java index ecdae6439caa..30bc1786e130 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/MmMajorQueryCompactor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/MmMajorQueryCompactor.java @@ -50,7 +50,7 @@ public boolean run(CompactorContext context) throws IOException { ValidWriteIdList writeIds = context.getValidWriteIdList(); // Set up the session for driver. - HiveConf driverConf = new HiveConf(hiveConf); + HiveConf driverConf = HiveConf.create(hiveConf); // Note: we could skip creating the table and just add table type stuff directly to the // "insert overwrite directory" command if there were no bucketing or list bucketing. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/MmMinorQueryCompactor.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/MmMinorQueryCompactor.java index 81e7b4cc19f5..119e8c75a2cc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/MmMinorQueryCompactor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/MmMinorQueryCompactor.java @@ -186,7 +186,7 @@ private String getDropQuery(String tableToDrop) { } private HiveConf setUpDriverSession(HiveConf hiveConf) { - HiveConf driverConf = new HiveConf(hiveConf); + HiveConf driverConf = HiveConf.create(hiveConf); driverConf.setBoolVar(HiveConf.ConfVars.HIVE_STATS_FETCH_COLUMN_STATS, false); driverConf.setBoolVar(HiveConf.ConfVars.HIVE_STATS_ESTIMATE_STATS, false); return driverConf; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/RebalanceQueryCompactor.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/RebalanceQueryCompactor.java index 591a4b4dfb35..36749c28215c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/RebalanceQueryCompactor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/RebalanceQueryCompactor.java @@ -48,7 +48,7 @@ public boolean run(CompactorContext context) ValidWriteIdList writeIds = context.getValidWriteIdList(); // Set up the session for driver. - HiveConf conf = new HiveConf(hiveConf); + HiveConf conf = HiveConf.create(hiveConf); String tmpTableName = getTempTableName(table); Path tmpTablePath = QueryCompactor.Util.getCompactionResultDir(storageDescriptor, writeIds, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/StatsUpdater.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/StatsUpdater.java index 698346b9b72a..342de381a87f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/StatsUpdater.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/StatsUpdater.java @@ -60,7 +60,7 @@ public void gatherStats(CompactionInfo ci, HiveConf conf, throw new IllegalArgumentException("Metastore client is missing"); } - HiveConf statusUpdaterConf = new HiveConf(conf); + HiveConf statusUpdaterConf = HiveConf.create(conf); statusUpdaterConf.unset(ValidTxnList.VALID_TXNS_KEY); //e.g. analyze table page_view partition(dt='10/15/2014',country=’US’) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUtils.java index 89e571aaacbb..a5a186277281 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUtils.java @@ -30,7 +30,7 @@ public class UDFUtils { public static TimestampTZ getTimestampTZFromTimestamp(Timestamp timestamp) { ZoneId zone = ((SessionState.get() == null) ? - new HiveConf().getLocalTimeZone() : SessionState.get().getConf().getLocalTimeZone()); + HiveConf.create().getLocalTimeZone() : SessionState.get().getConf().getLocalTimeZone()); return TimestampTZUtil.convert(timestamp, zone); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java index ead43d225efc..b0c25c725c4e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java @@ -80,7 +80,7 @@ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumen if (fmtStr != null) { try { if (timeZone == null) { - timeZone = SessionState.get() == null ? new HiveConf().getLocalTimeZone() : SessionState.get().getConf() + timeZone = SessionState.get() == null ? HiveConf.create().getLocalTimeZone() : SessionState.get().getConf() .getLocalTimeZone(); } formatter = DateTimeFormatter.ofPattern(fmtStr); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUnixTime.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUnixTime.java index 21081cf7c117..8c0c1c72749b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUnixTime.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUnixTime.java @@ -86,7 +86,7 @@ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumen } if (timeZone == null) { - timeZone = SessionState.get() == null ? new HiveConf().getLocalTimeZone() : SessionState.get().getConf() + timeZone = SessionState.get() == null ? HiveConf.create().getLocalTimeZone() : SessionState.get().getConf() .getLocalTimeZone(); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java index 8cbcbf55b082..f63d150368af 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java @@ -80,10 +80,10 @@ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumen checkArgGroups(arguments, 0, tsInputTypes, STRING_GROUP, DATE_GROUP, NUMERIC_GROUP, VOID_GROUP, BOOLEAN_GROUP); strict = SessionState.get() != null ? SessionState.get().getConf() - .getBoolVar(ConfVars.HIVE_STRICT_TIMESTAMP_CONVERSION) : new HiveConf() + .getBoolVar(ConfVars.HIVE_STRICT_TIMESTAMP_CONVERSION) : HiveConf.create() .getBoolVar(ConfVars.HIVE_STRICT_TIMESTAMP_CONVERSION); intToTimestampInSeconds = SessionState.get() != null ? SessionState.get().getConf() - .getBoolVar(ConfVars.HIVE_INT_TIMESTAMP_CONVERSION_IN_SECONDS) : new HiveConf() + .getBoolVar(ConfVars.HIVE_INT_TIMESTAMP_CONVERSION_IN_SECONDS) : HiveConf.create() .getBoolVar(ConfVars.HIVE_INT_TIMESTAMP_CONVERSION_IN_SECONDS); if (strict) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java index 5075ee1525e0..685ce6ba6b74 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java @@ -125,7 +125,7 @@ protected void initializeInput(ObjectInspector[] arguments) throws UDFArgumentEx .getPrimitiveCategory().name()); } - timeZone = SessionState.get() == null ? new HiveConf().getLocalTimeZone() : SessionState.get().getConf() + timeZone = SessionState.get() == null ? HiveConf.create().getLocalTimeZone() : SessionState.get().getConf() .getLocalTimeZone(); formatter = getFormatter(lasPattern); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSQLSchema.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSQLSchema.java index ff71c9c2dcb7..6b0bfca7a261 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSQLSchema.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSQLSchema.java @@ -64,7 +64,7 @@ public void process(Object[] arguments) throws HiveException { String query = stringOI.getPrimitiveJavaObject(arguments[0]); LOG.debug("Getting schema for Query: {}", query); - HiveConf conf = new HiveConf(SessionState.get().getConf()); + HiveConf conf = HiveConf.create(SessionState.get().getConf()); List fieldSchemas = null; try { fieldSchemas = ParseUtils.parseQueryAndGetSchema(conf, query); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java index 97d04676e76b..524e208d77df 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java @@ -270,7 +270,7 @@ private void validateSplitResult(SplitResult splitResult, boolean generateLightW private PlanFragment createPlanFragment(String query, ApplicationId splitsAppId) throws HiveException { - HiveConf conf = new HiveConf(SessionState.get().getConf()); + HiveConf conf = HiveConf.create(SessionState.get().getConf()); HiveConf.setVar(conf, ConfVars.HIVEFETCHTASKCONVERSION, "none"); HiveConf.setVar(conf, ConfVars.HIVEQUERYRESULTFILEFORMAT, PlanUtils.LLAP_OUTPUT_FORMAT_KEY); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/util/HiveStrictManagedMigration.java b/ql/src/java/org/apache/hadoop/hive/ql/util/HiveStrictManagedMigration.java index 1dd9d8bf9db9..ef50e4b625c0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/util/HiveStrictManagedMigration.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/util/HiveStrictManagedMigration.java @@ -232,7 +232,7 @@ public static void main(String[] args) throws Exception { HiveStrictManagedMigration migration = null; try { - HiveConf conf = hiveConf == null ? new HiveConf() : hiveConf; + HiveConf conf = hiveConf == null ? HiveConf.create() : hiveConf; WarehouseRootCheckResult warehouseRootCheckResult = checkOldWarehouseRoot(runOptions, conf); runOptions.setShouldModifyManagedTableLocation( warehouseRootCheckResult.shouldModifyManagedTableLocation); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/util/UpgradeTool.java b/ql/src/java/org/apache/hadoop/hive/ql/util/UpgradeTool.java index 67df6a7bcec4..a9c74d8dc48a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/util/UpgradeTool.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/util/UpgradeTool.java @@ -184,7 +184,7 @@ private static IMetaStoreClient getHMS(HiveConf conf) { */ private void performUpgradeInternal(String scriptLocation, boolean execute) throws HiveException, TException, IOException { - HiveConf conf = hiveConf != null ? hiveConf : new HiveConf(); + HiveConf conf = hiveConf != null ? hiveConf : HiveConf.create(); boolean isAcidEnabled = isAcidEnabled(conf); IMetaStoreClient hms = getHMS(conf); LOG.debug("Looking for databases"); diff --git a/ql/src/test/org/apache/hadoop/hive/llap/TestLlapOutputFormat.java b/ql/src/test/org/apache/hadoop/hive/llap/TestLlapOutputFormat.java index f27cdf49696e..3380a8e82d60 100644 --- a/ql/src/test/org/apache/hadoop/hive/llap/TestLlapOutputFormat.java +++ b/ql/src/test/org/apache/hadoop/hive/llap/TestLlapOutputFormat.java @@ -77,7 +77,7 @@ public void testValues() throws Exception { job.set(LlapOutputFormat.LLAP_OF_ID_KEY, id); LlapOutputFormat format = new LlapOutputFormat(); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); Socket socket = new Socket("localhost", service.getPort()); LOG.debug("Socket connected"); diff --git a/ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java b/ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java index 222c27170a02..9861ea535772 100644 --- a/ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java +++ b/ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java @@ -88,7 +88,7 @@ public void tearDown() throws Exception { public void setUp() throws Exception { try { - client = new HiveMetaStoreClient(new HiveConf(this.getClass())); + client = new HiveMetaStoreClient(HiveConf.create(this.getClass())); } catch (Throwable e) { System.err.println("Unable to open the metastore"); System.err.println(StringUtils.stringifyException(e)); diff --git a/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java b/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java index 5b334838ac8c..aa0d6e59da30 100644 --- a/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java +++ b/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java @@ -76,7 +76,7 @@ public class TestCompactionTxnHandler { public static final String WORKER_VERSION = HiveVersionInfo.getShortVersion(); - private HiveConf conf = new HiveConf(); + private HiveConf conf = HiveConf.create(); private TxnStore txnHandler; public TestCompactionTxnHandler() throws Exception { diff --git a/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java b/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java index 43fa5410c399..94bf084f585c 100644 --- a/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java +++ b/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java @@ -116,7 +116,7 @@ public class TestTxnHandler { static final private String CLASS_NAME = TxnHandler.class.getName(); private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME); - private HiveConf conf = new HiveConf(); + private HiveConf conf = HiveConf.create(); private TxnStore txnHandler; public TestTxnHandler() throws Exception { diff --git a/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNoConnectionPool.java b/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNoConnectionPool.java index b47f4f1e65f8..610b7fa1e335 100644 --- a/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNoConnectionPool.java +++ b/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNoConnectionPool.java @@ -42,7 +42,7 @@ public class TestTxnHandlerNoConnectionPool { private static final Logger LOG = LoggerFactory.getLogger(TestTxnHandlerNoConnectionPool.class.getName()); - private HiveConf conf = new HiveConf(); + private HiveConf conf = HiveConf.create(); private TxnStore txnHandler; @Before diff --git a/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerWithOneConnection.java b/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerWithOneConnection.java index c2708b2bb6a1..1016d3f39456 100644 --- a/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerWithOneConnection.java +++ b/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerWithOneConnection.java @@ -40,7 +40,7 @@ public class TestTxnHandlerWithOneConnection { static final private String CLASS_NAME = TxnHandler.class.getName(); private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME); - private HiveConf conf = new HiveConf(); + private HiveConf conf = HiveConf.create(); private TxnStore txnHandler; public TestTxnHandlerWithOneConnection() throws Exception { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestCompileLock.java b/ql/src/test/org/apache/hadoop/hive/ql/TestCompileLock.java index 7566f8ea593d..4ad764b237e2 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/TestCompileLock.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/TestCompileLock.java @@ -73,7 +73,7 @@ public class TestCompileLock { @Before public void init() throws Exception { - conf = new HiveConf(); + conf = HiveConf.create(); conf.setBoolVar(HIVE_SERVER2_METRICS_ENABLED, true); conf.setVar(HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR, System.getProperty("java.io.tmpdir")); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java index 2240b99b114c..70171d9dfd48 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java @@ -304,7 +304,7 @@ private static final class QueryRunnable implements Runnable { this.query = query; this.cdlIn = cdlIn; this.cdlOut = cdlOut; - this.hiveConf = new HiveConf(hiveConf); + this.hiveConf = HiveConf.create(hiveConf); } @Override @@ -1233,7 +1233,7 @@ public void testMergeOnTezEdges() throws Exception { "WHEN MATCHED THEN UPDATE SET b = 7 " + "WHEN NOT MATCHED THEN INSERT VALUES(s.a, s.b) "; d.destroy(); - HiveConf hc = new HiveConf(hiveConf); + HiveConf hc = HiveConf.create(hiveConf); hc.setVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE, "tez"); hc.setBoolVar(HiveConf.ConfVars.HIVE_EXPLAIN_USER, false); d = new Driver(hc); @@ -1292,7 +1292,7 @@ public void testMergeUpdateDelete() throws Exception { @Test public void testMergeUpdateDeleteNoCardCheck() throws Exception { d.destroy(); - HiveConf hc = new HiveConf(hiveConf); + HiveConf hc = HiveConf.create(hiveConf); hc.setBoolVar(HiveConf.ConfVars.MERGE_CARDINALITY_VIOLATION_CHECK, false); d = new Driver(hc); d.setMaxRows(10000); @@ -1461,7 +1461,7 @@ public void testMoreBucketsThanReducers() throws Exception { //see bucket_num_reducers.q bucket_num_reducers2.q // todo: try using set VerifyNumReducersHook.num.reducers=10; d.destroy(); - HiveConf hc = new HiveConf(hiveConf); + HiveConf hc = HiveConf.create(hiveConf); hc.setIntVar(HiveConf.ConfVars.MAXREDUCERS, 1); //this is used in multiple places, SemanticAnalyzer.getBucketingSortingDest() among others hc.setIntVar(HiveConf.ConfVars.HADOOPNUMREDUCERS, 1); @@ -1481,7 +1481,7 @@ public void testMoreBucketsThanReducers2() throws Exception { //todo: try using set VerifyNumReducersHook.num.reducers=10; //see bucket_num_reducers.q bucket_num_reducers2.q d.destroy(); - HiveConf hc = new HiveConf(hiveConf); + HiveConf hc = HiveConf.create(hiveConf); hc.setIntVar(HiveConf.ConfVars.MAXREDUCERS, 2); //this is used in multiple places, SemanticAnalyzer.getBucketingSortingDest() among others hc.setIntVar(HiveConf.ConfVars.HADOOPNUMREDUCERS, 2); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TxnCommandsBaseForTests.java b/ql/src/test/org/apache/hadoop/hive/ql/TxnCommandsBaseForTests.java index 03c4d4f23da3..62c0b639c42a 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/TxnCommandsBaseForTests.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/TxnCommandsBaseForTests.java @@ -105,7 +105,7 @@ public void setUp() throws Exception { } } void initHiveConf() { - hiveConf = new HiveConf(this.getClass()); + hiveConf = HiveConf.create(this.getClass()); } void setUpInternal() throws Exception { initHiveConf(); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/ddl/table/partition/show/TestShowPartitionAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/ddl/table/partition/show/TestShowPartitionAnalyzer.java index e3c64cb12f12..3a792fe807b0 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/ddl/table/partition/show/TestShowPartitionAnalyzer.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/ddl/table/partition/show/TestShowPartitionAnalyzer.java @@ -56,7 +56,7 @@ public class TestShowPartitionAnalyzer { @Before public void before() throws Exception { - conf = new HiveConf(); + conf = HiveConf.create(); SessionState.start(conf); } @@ -96,7 +96,7 @@ public void testGetShowPartitionsFilter() throws Exception { "foo", 1, 1, -1, null, partColumns, null, null, null, TableType.MANAGED_TABLE.name())); ShowPartitionAnalyzer analyzer = new ShowPartitionAnalyzer(QueryState.getNewQueryState( - new HiveConf(), null)); + HiveConf.create(), null)); funcDesc = (ExprNodeGenericFuncDesc)analyzer.getShowPartitionsFilter(table, command); Assert.assertTrue(funcDesc.getChildren().size() == 2); // ds > '2010-03-03' diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestConcurrentDppInserts.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestConcurrentDppInserts.java index 1288c10d32ec..21c4ce65d7e9 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestConcurrentDppInserts.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestConcurrentDppInserts.java @@ -156,7 +156,7 @@ private String getExceptionMessages() { } private static IDriver createDriver(boolean custom) { - HiveConf conf = new HiveConf(env_setup.getTestCtx().hiveConf); + HiveConf conf = HiveConf.create(env_setup.getTestCtx().hiveConf); if (custom) { conf.setVar(ConfVars.HIVE_LOCK_FILE_MOVE_MODE, "all"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestContext.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestContext.java index de59ba5644e4..bdeb4e78cab9 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestContext.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestContext.java @@ -31,7 +31,7 @@ import static org.mockito.Mockito.*; public class TestContext { - private static HiveConf conf = new HiveConf(); + private static HiveConf conf = HiveConf.create(); private Context context; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java index 801133d85c61..839bf1842b68 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java @@ -87,7 +87,7 @@ public class TestExecDriver { static { try { - queryState = new QueryState.Builder().withHiveConf(new HiveConf(ExecDriver.class)).build(); + queryState = new QueryState.Builder().withHiveConf(HiveConf.create(ExecDriver.class)).build(); conf = queryState.getConf(); conf.setBoolVar(HiveConf.ConfVars.SUBMITVIACHILD, true); conf.setBoolVar(HiveConf.ConfVars.SUBMITLOCALTASKVIACHILD, true); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExplainTask.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExplainTask.java index 2ea15cf4924f..00acfb81250d 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExplainTask.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExplainTask.java @@ -59,7 +59,7 @@ public class TestExplainTask { @Before public void setUp() { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); uut = new ExplainTask(); uut.conf = hiveConf; out = mock(PrintStream.class); @@ -322,7 +322,7 @@ public void testCollectAuthRelatedEntitiesJsonShouldMatch() throws Exception { when(qs.getHiveOperation()).thenReturn(HiveOperation.EXPLAIN); uut.queryState = qs; - SessionState.start(new HiveConf(ExplainTask.class)); + SessionState.start(HiveConf.create(ExplainTask.class)); // SessionState.get().setCommandType(HiveOperation.EXPLAIN); HiveAuthenticationProvider authenticationProviderMock = mock(HiveAuthenticationProvider.class); when(authenticationProviderMock.getUserName()).thenReturn("test-user"); @@ -340,7 +340,7 @@ public void testCollectAuthRelatedEntitiesJsonShouldMatch() throws Exception { public void testOutputPlanVectorizationJsonShouldMatch() throws Exception { QueryState qs = mock(QueryState.class); when(qs.getHiveOperation()).thenReturn(HiveOperation.EXPLAIN); - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, true); when(qs.getConf()).thenReturn(hiveConf); uut.queryState = qs; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java index b33ebd2c032d..d815f3d82ed1 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java @@ -64,7 +64,7 @@ public class TestExpressionEvaluator { public TestExpressionEvaluator() { // Arithmetic operations rely on getting conf from SessionState, need to initialize here. - SessionState ss = new SessionState(new HiveConf()); + SessionState ss = new SessionState(HiveConf.create()); SessionState.setCurrentSessionState(ss); col1 = new ArrayList(); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java index dfbb7f36902a..ec1610ee91da 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java @@ -96,7 +96,7 @@ public void setUp() { varchar5 = TypeInfoFactory.getPrimitiveTypeInfo("varchar(5)"); char10 = TypeInfoFactory.getPrimitiveTypeInfo("char(10)"); char5 = TypeInfoFactory.getPrimitiveTypeInfo("char(5)"); - SessionState.start(new HiveConf()); + SessionState.start(HiveConf.create()); } private void implicit(TypeInfo a, TypeInfo b, boolean convertible) { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestGetInputSummary.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestGetInputSummary.java index 160892c7dcb5..c2905af63044 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestGetInputSummary.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestGetInputSummary.java @@ -79,7 +79,7 @@ public class TestGetInputSummary { @Before public void setup() throws Exception { // creates scratch directories needed by the Context object - SessionState.start(new HiveConf()); + SessionState.start(HiveConf.create()); this.jobConf = new JobConf(); this.properties = new Properties(); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestKeyWrapperFactory.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestKeyWrapperFactory.java index 03c4ed63c82a..5ff2453a072d 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestKeyWrapperFactory.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestKeyWrapperFactory.java @@ -44,7 +44,7 @@ public class TestKeyWrapperFactory { @Before public void setup() throws Exception { - SessionState ss = new SessionState(new HiveConf()); + SessionState ss = new SessionState(HiveConf.create()); SessionState.setCurrentSessionState(ss); ArrayList col1 = new ArrayList(); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestLimitOperator.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestLimitOperator.java index 681435c65c8b..632c0bf6cbd7 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestLimitOperator.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestLimitOperator.java @@ -63,7 +63,7 @@ private void testGlobalLimitReachedInDaemonOrContainer(boolean isDaemon, int off ObjectCache.setupObjectRegistry(new ObjectRegistryImpl()); } - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); HiveConf.setVar(conf, HiveConf.ConfVars.HIVEQUERYID, "query-" + random.nextInt(10000)); HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE, "tez"); conf.set(TezProcessor.HIVE_TEZ_VERTEX_NAME, "Map 1"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckCreatePartitionsInBatches.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckCreatePartitionsInBatches.java index 30c893223825..0445792a8a11 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckCreatePartitionsInBatches.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckCreatePartitionsInBatches.java @@ -72,7 +72,7 @@ public class TestMsckCreatePartitionsInBatches { @BeforeClass public static void setupClass() throws HiveException, MetaException { - hiveConf = new HiveConf(TestMsckCreatePartitionsInBatches.class); + hiveConf = HiveConf.create(TestMsckCreatePartitionsInBatches.class); hiveConf.setIntVar(ConfVars.HIVE_MSCK_REPAIR_BATCH_SIZE, 5); hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckDropPartitionsInBatches.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckDropPartitionsInBatches.java index e7318bf6d345..6a10ccf01564 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckDropPartitionsInBatches.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckDropPartitionsInBatches.java @@ -73,7 +73,7 @@ public class TestMsckDropPartitionsInBatches { @BeforeClass public static void setupClass() throws Exception { - hiveConf = new HiveConf(TestMsckCreatePartitionsInBatches.class); + hiveConf = HiveConf.create(TestMsckCreatePartitionsInBatches.class); hiveConf.setIntVar(ConfVars.HIVE_MSCK_REPAIR_BATCH_SIZE, 5); hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java index c82fdf3a1d9a..0db264a03ad2 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java @@ -421,7 +421,7 @@ public InputSplit[] getSplits(JobConf job, int splits) throws IOException { @Test public void testFetchOperatorContext() throws Exception { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.set("hive.support.concurrency", "false"); conf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict"); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, @@ -458,7 +458,7 @@ public void testFetchOperatorContext() throws Exception { public void testNoConditionalTaskSizeForLlap() { ConvertJoinMapJoin convertJoinMapJoin = new ConvertJoinMapJoin(); long defaultNoConditionalTaskSize = 1024L * 1024L * 1024L; - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); hiveConf.setLongVar(HiveConf.ConfVars.HIVECONVERTJOINNOCONDITIONALTASKTHRESHOLD, defaultNoConditionalTaskSize); LlapClusterStateForCompile llapInfo = null; @@ -510,7 +510,7 @@ public void testNoConditionalTaskSizeForLlap() { @Test public void testLlapMemoryOversubscriptionMaxExecutorsPerQueryCalculation() { ConvertJoinMapJoin convertJoinMapJoin = new ConvertJoinMapJoin(); - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); LlapClusterStateForCompile llapInfo = Mockito.mock(LlapClusterStateForCompile.class); @@ -576,7 +576,7 @@ public void testLlapMemoryOversubscriptionMaxExecutorsPerQueryCalculation() { desc.setMinReductionHashAggr(0.5f); // 5. Configure hive conf and Build group by operator - HiveConf hconf = new HiveConf(); + HiveConf hconf = HiveConf.create(); HiveConf.setIntVar(hconf, HiveConf.ConfVars.HIVEGROUPBYMAPINTERVAL, 1); // 6. test hash aggr without grouping sets diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java index 15106909734e..d957a1e84a3e 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java @@ -133,7 +133,7 @@ public void testSerializeTimestamp() { public void testgetDbTableName() throws HiveException{ String tablename; String [] dbtab; - SessionState.start(new HiveConf(this.getClass())); + SessionState.start(HiveConf.create(this.getClass())); String curDefaultdb = SessionState.get().getCurrentDatabase(); //test table without db portion @@ -247,7 +247,7 @@ public void testRenameFileExistsHivePath() throws Exception { private List runRemoveTempOrDuplicateFilesTestCase(String executionEngine, boolean dPEnabled) throws Exception { - Configuration hconf = new HiveConf(this.getClass()); + Configuration hconf = HiveConf.create(this.getClass()); // do this to verify that Utilities.removeTempOrDuplicateFiles does not revert to default scheme information hconf.set("fs.defaultFS", "hdfs://should-not-be-used/"); hconf.set(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.varname, executionEngine); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/dump/TestUtils.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/dump/TestUtils.java index 3303f95252df..cb8123a0dbb1 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/dump/TestUtils.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/dump/TestUtils.java @@ -50,7 +50,7 @@ public class TestUtils { @Test public void testCreate() throws SemanticException, IOException { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); Mockito.when(outputFile.getFileSystem(conf)).thenReturn(fileSystem); Mockito.when(fileSystem.create(outputFile)).thenReturn(outputStream); Utils.create(outputFile, conf); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/util/TestFileList.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/util/TestFileList.java index e7d010f988ac..507fbff806ca 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/util/TestFileList.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/util/TestFileList.java @@ -47,7 +47,7 @@ @PrepareForTest({LoggerFactory.class}) public class TestFileList { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); private FSDataOutputStream outStream; private FSDataOutputStream testFileStream; final String TEST_DATA_DIR = new File(System.getProperty("java.io.tmpdir") + @@ -145,7 +145,7 @@ public void testWriteNoRetry() throws Exception { @Test public void testReadWithDuplicateEntries() throws Exception { - conf = new HiveConf(); + conf = HiveConf.create(); String testEntry = "someEntry"; int numUniqueEntries = 100; Path testFilePath = new Path(new Path(TEST_DATA_DIR), "testFile"); @@ -170,7 +170,7 @@ public void testReadWithDuplicateEntries() throws Exception { @Test public void testReadWithAllDistinctEntries() throws Exception { - conf = new HiveConf(); + conf = HiveConf.create(); String testEntry = "someEntry"; int numUniqueEntries = 100; Path testFilePath = new Path(new Path(TEST_DATA_DIR), "testFile"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestDagUtils.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestDagUtils.java index 960caafcced2..c3ece071c1a8 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestDagUtils.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestDagUtils.java @@ -76,7 +76,7 @@ public Void run() throws Exception { @Test public void outputCommitterSetToDefaultIfNotPresent() throws IOException { DagUtils dagUtils = DagUtils.getInstance(); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); JobConf configuration = dagUtils.createConfiguration(conf); @@ -87,7 +87,7 @@ public void outputCommitterSetToDefaultIfNotPresent() throws IOException { @Test public void outputCommitterNotOverriddenIfPresent() throws IOException { DagUtils dagUtils = DagUtils.getInstance(); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.set("mapred.output.committer.class", TestTezOutputCommitter.CountingOutputCommitter.class.getName()); JobConf configuration = dagUtils.createConfiguration(conf); @@ -101,7 +101,7 @@ public void testMapTezTaskEnvIsCopiedFromMrProperties() { final DagUtils dagUtils = DagUtils.getInstance(); Vertex map = Vertex.create("mapWorkName", null); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); Assert.assertNull(map.getTaskEnvironment().get("key")); conf.set(JobConf.MAPRED_MAP_TASK_ENV, "key=value"); @@ -115,7 +115,7 @@ public void testReduceTezTaskEnvIsCopiedFromMrProperties() { final DagUtils dagUtils = DagUtils.getInstance(); Vertex reduce = Vertex.create("reduceWorkName", null); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); Assert.assertNull(reduce.getTaskEnvironment().get("key")); conf.set(JobConf.MAPRED_REDUCE_TASK_ENV, "key=value"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezSessionPool.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezSessionPool.java index ee32d166101f..b95580f980f5 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezSessionPool.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezSessionPool.java @@ -60,7 +60,7 @@ public TezSessionPoolSession createSession(String sessionId, HiveConf conf) { @Before public void setUp() { - conf = new HiveConf(); + conf = HiveConf.create(); } @Test @@ -271,7 +271,7 @@ public SessionThread(boolean llap) { @Override public void run() { try { - HiveConf tmpConf = new HiveConf(conf); + HiveConf tmpConf = HiveConf.create(conf); if (random.nextDouble() > 0.5) { tmpConf.set("tez.queue.name", "default"); } else { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezTask.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezTask.java index 5bac3bb62c76..064578174f12 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezTask.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezTask.java @@ -170,7 +170,7 @@ public Edge answer(InvocationOnMock invocation) throws Throwable { conf = new JobConf(); appLr = createResource("foo.jar"); - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); hiveConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestUtils.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestUtils.java index 71ce812b2071..a8afa6fe24f6 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestUtils.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestUtils.java @@ -88,7 +88,7 @@ public void testGetSplitLocationProvider() throws IOException, URISyntaxExceptio LlapServiceInstance inactive = new InactiveServiceInstance(INACTIVE); instances.add(inactive); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.set(HiveConf.ConfVars.HIVE_ZOOKEEPER_QUORUM.varname, "localhost"); LlapZookeeperRegistryImpl dynRegistry = new LlapZookeeperRegistryImpl("dyn", conf); Endpoint rpcEndpoint = RegistryTypeUtils.ipcEndpoint("llap", new InetSocketAddress(ACTIVE, 4000)); @@ -126,7 +126,7 @@ public void testGetSplitLocationProvider() throws IOException, URISyntaxExceptio assertLocations((HostAffinitySplitLocationProvider)provider, new String[] {ACTIVE}); // Check if fixed stuff is working as well - LlapFixedRegistryImpl fixRegistry = new LlapFixedRegistryImpl("llap", new HiveConf()); + LlapFixedRegistryImpl fixRegistry = new LlapFixedRegistryImpl("llap", HiveConf.create()); // Instance for testing fixed registry instances LlapServiceInstance fixed = fixRegistry.new FixedServiceInstance(FIXED); @@ -194,7 +194,7 @@ public String[] getLocations(final InputSplit inputSplit) throws IOException { @Test public void testCustomSplitLocationProvider() throws IOException { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setVar(HiveConf.ConfVars.HIVE_EXECUTION_MODE, "llap"); conf.setVar(HiveConf.ConfVars.LLAP_SPLIT_LOCATION_PROVIDER_CLASS, NoConstructorSplitLocationProvider.class.getName()); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestVectorMapJoinFastHashTable.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestVectorMapJoinFastHashTable.java index d8a076a7e313..7ae06eaf55c2 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestVectorMapJoinFastHashTable.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestVectorMapJoinFastHashTable.java @@ -72,7 +72,7 @@ private void runEstimationCheck(HashTableKeyType l) throws SerDeException, IOExc TableDesc keyTblDesc = new TableDesc(); keyTblDesc.setProperties(new Properties()); desc.setKeyTblDesc(keyTblDesc); - Configuration hconf = new HiveConf(); + Configuration hconf = HiveConf.create(); VectorMapJoinFastTableContainer container = new VectorMapJoinFastTableContainer(desc, hconf, keyCount, 1); container.setSerde(null, null); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestWorkloadManager.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestWorkloadManager.java index 8ce58bb45cc2..1e400fae06d9 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestWorkloadManager.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestWorkloadManager.java @@ -216,7 +216,7 @@ private static WMFullResourcePlan createDummyPlan(int numSessions) { @Override protected WmTezSession createSessionObject(String sessionId, HiveConf conf) { - conf = conf == null ? new HiveConf(getConf()) : conf; + conf = conf == null ? HiveConf.create(getConf()) : conf; SampleTezSessionState sess = new SampleTezSessionState(sessionId, this, conf); if (failedWait != null) { sess.setWaitForAmRegistryFuture(failedWait); @@ -1398,7 +1398,7 @@ private void checkError(final AtomicReference error) throws Exception } private HiveConf createConf() { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.set(ConfVars.HIVE_SERVER2_TEZ_SESSION_LIFETIME.varname, "-1"); conf.set(ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, "false"); conf.set(ConfVars.LLAP_TASK_SCHEDULER_AM_REGISTRY_NAME.varname, ""); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/util/TestRetryable.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/util/TestRetryable.java index 9d0e57948b13..35d8aff336c8 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/util/TestRetryable.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/util/TestRetryable.java @@ -299,7 +299,7 @@ public Void call() throws Exception { @Test public void testRetryFailureWithHiveConf() throws Throwable { - HiveConf conf = new HiveConf(TestRetryable.class); + HiveConf conf = HiveConf.create(TestRetryable.class); conf.set(HiveConf.ConfVars.REPL_RETRY_INTIAL_DELAY.varname, "1s"); conf.setFloat(HiveConf.ConfVars.REPL_RETRY_BACKOFF_COEFFICIENT.varname, 1.0f); conf.set(HiveConf.ConfVars.REPL_RETRY_TOTAL_DURATION.varname, "60s"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java index 2e601d6fdaca..4ee2135d84f1 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java @@ -45,7 +45,7 @@ */ public class TestVectorFilterOperator { - HiveConf hconf = new HiveConf(); + HiveConf hconf = HiveConf.create(); /** * Fundamental logic and performance tests for vector filters belong here. diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java index f189894169f9..6b4949da031e 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java @@ -101,7 +101,7 @@ */ public class TestVectorGroupByOperator { - HiveConf hconf = new HiveConf(); + HiveConf hconf = HiveConf.create(); private static ExprNodeDesc buildColumnDesc( VectorizationContext ctx, diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorLimitOperator.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorLimitOperator.java index 74a4ad0b61b9..fecc67ee09e4 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorLimitOperator.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorLimitOperator.java @@ -86,7 +86,7 @@ private void testGlobalLimitReachedInDaemonOrContainer(boolean isDaemon, int off ObjectCache.setupObjectRegistry(new ObjectRegistryImpl()); } - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); HiveConf.setVar(conf, HiveConf.ConfVars.HIVEQUERYID, "query-" + random.nextInt(10000)); HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE, "tez"); conf.set(TezProcessor.HIVE_TEZ_VERTEX_NAME, "Map 1"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/aggregation/AggregationBase.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/aggregation/AggregationBase.java index e0e86a47d816..42602a648275 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/aggregation/AggregationBase.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/aggregation/AggregationBase.java @@ -198,7 +198,7 @@ protected static boolean doVectorTest(String aggregationName, TypeInfo typeInfo, Object[] results) throws Exception { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); VectorizationContext vectorizationContext = new VectorizationContext( diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorArithmetic.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorArithmetic.java index cbedb53aa637..3a871eaff615 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorArithmetic.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorArithmetic.java @@ -72,7 +72,7 @@ public class TestVectorArithmetic { public TestVectorArithmetic() { // Arithmetic operations rely on getting conf from SessionState, need to initialize here. - SessionState ss = new SessionState(new HiveConf()); + SessionState ss = new SessionState(HiveConf.create()); ss.getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "default"); SessionState.setCurrentSessionState(ss); } @@ -584,7 +584,7 @@ private void doRowArithmeticTest(TypeInfo typeInfo1, " exprDesc " + exprDesc.toString()); */ - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(exprDesc, hiveConf); evaluator.initialize(rowInspector); @@ -640,7 +640,7 @@ private void doVectorArithmeticTest(TypeInfo typeInfo1, TypeInfo outputTypeInfo, Object[] resultObjects) throws Exception { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); if (arithmeticTestMode == ArithmeticTestMode.ADAPTOR) { hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorBetweenIn.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorBetweenIn.java index f76378487ccb..e9acc48fa54c 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorBetweenIn.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorBetweenIn.java @@ -832,7 +832,7 @@ private boolean doRowCastTest(TypeInfo typeInfo, " exprDesc " + exprDesc.toString()); */ - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(exprDesc, hiveConf); @@ -881,7 +881,7 @@ private boolean doVectorBetweenInTest(TypeInfo typeInfo, TypeInfo outputTypeInfo, Object[] resultObjects) throws Exception { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); if (betweenInTestMode == BetweenInTestMode.ADAPTOR) { hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorCastStatement.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorCastStatement.java index ad4e6beaab09..b68279fb15bd 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorCastStatement.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorCastStatement.java @@ -411,7 +411,7 @@ private boolean doRowCastTest(TypeInfo typeInfo, TypeInfo targetTypeInfo, " exprDesc " + exprDesc.toString()); */ - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(exprDesc, hiveConf); try { @@ -474,7 +474,7 @@ private boolean doVectorCastTest(TypeInfo typeInfo, TypeInfo targetTypeInfo, ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(targetTypeInfo, udf, children); - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); if (castStmtTestMode == CastStmtTestMode.ADAPTOR) { hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorCoalesceElt.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorCoalesceElt.java index 56e904adc13d..0c50ce8ce561 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorCoalesceElt.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorCoalesceElt.java @@ -384,7 +384,7 @@ private boolean doRowCastTest(TypeInfo typeInfo, " exprDesc " + exprDesc.toString()); */ - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(exprDesc, hiveConf); try { @@ -435,7 +435,7 @@ private boolean doVectorCastTest(TypeInfo typeInfo, int iteration, TypeInfo outputTypeInfo, Object[] resultObjects) throws Exception { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); if (coalesceEltTestMode == CoalesceEltTestMode.ADAPTOR) { hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateAddSub.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateAddSub.java index eb637f13052a..fe6708bdfcbe 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateAddSub.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateAddSub.java @@ -375,7 +375,7 @@ private void doRowDateAddSubTest(TypeInfo dateTimeStringTypeInfo, TypeInfo integ " exprDesc " + exprDesc.toString()); */ - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(exprDesc, hiveConf); evaluator.initialize(rowInspector); @@ -425,7 +425,7 @@ private void doVectorDateAddSubTest(TypeInfo dateTimeStringTypeInfo, TypeInfo in Object[] resultObjects) throws Exception { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); if (dateAddSubTestMode == DateAddSubTestMode.ADAPTOR) { hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateDiff.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateDiff.java index f5b1a3467d0b..04192e476d47 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateDiff.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateDiff.java @@ -367,7 +367,7 @@ private void doRowDateAddSubTest(TypeInfo dateTimeStringTypeInfo1, " exprDesc " + exprDesc.toString()); */ - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(exprDesc, hiveConf); evaluator.initialize(rowInspector); @@ -412,7 +412,7 @@ private void doVectorDateAddSubTest(TypeInfo dateTimeStringTypeInfo1, Object[] resultObjects) throws Exception { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); if (dateDiffTestMode == DateDiffTestMode.ADAPTOR) { hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java index 1f811971287a..2010cc5e01d1 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java @@ -334,7 +334,7 @@ private void compareToUDFUnixTimeStampDate(long t, long y) { private void verifyUDFUnixTimeStamp(VectorizedRowBatch batch) throws HiveException { VectorExpression udf; udf = new VectorUDFUnixTimeStampDate(0, 1); - udf.transientInit(new HiveConf()); + udf.transientInit(HiveConf.create()); udf.setInputTypeInfos(new TypeInfo[] {TypeInfoFactory.dateTypeInfo}); udf.evaluate(batch); final int in = 0; @@ -398,7 +398,7 @@ private void verifyUDFWeekOfYear(VectorizedRowBatch batch) throws HiveException VectorExpression udf; udf = new VectorUDFWeekOfYearDate(0, 1); udf.setInputTypeInfos(new TypeInfo[] {TypeInfoFactory.dateTypeInfo}); - udf.transientInit(new HiveConf()); + udf.transientInit(HiveConf.create()); udf.evaluate(batch); final int in = 0; final int out = 1; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterCompare.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterCompare.java index ac3c0abae89e..d0db793f31fd 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterCompare.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterCompare.java @@ -82,7 +82,7 @@ public class TestVectorFilterCompare { public TestVectorFilterCompare() { // Arithmetic operations rely on getting conf from SessionState, need to initialize here. - SessionState ss = new SessionState(new HiveConf()); + SessionState ss = new SessionState(HiveConf.create()); ss.getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "latest"); SessionState.setCurrentSessionState(ss); } @@ -583,7 +583,7 @@ private void doRowFilterCompareTest(TypeInfo typeInfo1, " exprDesc " + exprDesc.toString()); */ - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(exprDesc, hiveConf); evaluator.initialize(rowInspector); @@ -639,7 +639,7 @@ private void doVectorFilterCompareTest(TypeInfo typeInfo1, TypeInfo outputTypeInfo, Object[] resultObjects) throws Exception { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); if (filterCompareTestMode == FilterCompareTestMode.ADAPTOR) { hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java index d9d56a0558f8..a153ba1df735 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java @@ -655,7 +655,7 @@ public void testFilterLongIn() throws HiveException { FilterLongColumnInList f = new FilterLongColumnInList(0); f.setInListValues(inList); f.setInputTypeInfos(new TypeInfo[] {TypeInfoFactory.longTypeInfo}); - f.transientInit(new HiveConf()); + f.transientInit(HiveConf.create()); VectorExpression expr1 = f; // Basic case @@ -758,7 +758,7 @@ public void testFilterDoubleIn() throws HiveException { FilterDoubleColumnInList f = new FilterDoubleColumnInList(0); f.setInListValues(inList); f.setInputTypeInfos(new TypeInfo[] {TypeInfoFactory.doubleTypeInfo}); - f.transientInit(new HiveConf()); + f.transientInit(HiveConf.create()); VectorExpression expr1 = f; // Basic sanity check. Other cases are not skipped because it is similar to the case for Long. diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java index 62f0c51cebfb..b9d4f216d5b7 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java @@ -48,7 +48,7 @@ public class TestVectorGenericDateExpressions { private Charset utf8 = StandardCharsets.UTF_8; - private HiveConf hiveConf = new HiveConf(); + private HiveConf hiveConf = HiveConf.create(); private int size = 200; private Random random = new Random(); private SimpleDateFormat formatter = getFormatter(); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorIfStatement.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorIfStatement.java index cbee1eefda13..0d5381d46ff6 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorIfStatement.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorIfStatement.java @@ -420,7 +420,7 @@ private void doRowIfTest(TypeInfo typeInfo, List columns, List void verify(HookContext.HookType type, HiveHooks loader, Class ex @Test public void testAddHooks() throws Exception { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); HiveHooks loader = new HiveHooks(hiveConf); verify(HookContext.HookType.PRE_EXEC_HOOK, loader, ExecuteWithHookContext.class, PreExecHook.class); verify(HookContext.HookType.POST_EXEC_HOOK, loader, ExecuteWithHookContext.class, PostExecHook.class); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHiveProtoLoggingHook.java b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHiveProtoLoggingHook.java index 6b65f0c7873d..2eb9e7d6ff20 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHiveProtoLoggingHook.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHiveProtoLoggingHook.java @@ -78,7 +78,7 @@ public class TestHiveProtoLoggingHook { @Before public void setup() throws Exception { - conf = new HiveConf(); + conf = HiveConf.create(); conf.set(HiveConf.ConfVars.LLAP_DAEMON_QUEUE_NAME.varname, "llap_queue"); conf.set(HiveConf.ConfVars.HIVE_PROTO_EVENTS_QUEUE_CAPACITY.varname, "3"); conf.set(MRJobConfig.QUEUE_NAME, "mr_queue"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java index f1a9a44e1fe5..01f83cfeb105 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java @@ -31,7 +31,7 @@ public class TestHooks { @BeforeClass public static void onetimeSetup() throws Exception { - HiveConf conf = new HiveConf(TestHooks.class); + HiveConf conf = HiveConf.create(TestHooks.class); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); @@ -41,7 +41,7 @@ public static void onetimeSetup() throws Exception { @AfterClass public static void onetimeTeardown() throws Exception { - HiveConf conf = new HiveConf(TestHooks.class); + HiveConf conf = HiveConf.create(TestHooks.class); Driver driver = createDriver(conf); driver.run("drop table t1"); } @@ -52,7 +52,7 @@ public void setup() { @Test public void testRedactLogString() throws Exception { - HiveConf conf = new HiveConf(TestHooks.class); + HiveConf conf = HiveConf.create(TestHooks.class); String str; HiveConf.setVar(conf, HiveConf.ConfVars.QUERYREDACTORHOOKS, SimpleQueryRedactor.class.getName()); @@ -69,7 +69,7 @@ public void testRedactLogString() throws Exception { @Test public void testQueryRedactor() throws Exception { - HiveConf conf = new HiveConf(TestHooks.class); + HiveConf conf = HiveConf.create(TestHooks.class); HiveConf.setVar(conf, HiveConf.ConfVars.QUERYREDACTORHOOKS, SimpleQueryRedactor.class.getName()); conf diff --git a/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestMetricsQueryLifeTimeHook.java b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestMetricsQueryLifeTimeHook.java index beabd9430612..6e0ebc15b841 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestMetricsQueryLifeTimeHook.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestMetricsQueryLifeTimeHook.java @@ -41,7 +41,7 @@ public class TestMetricsQueryLifeTimeHook { @Before public void before() throws Exception { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, "local"); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_CLASS, CodahaleMetrics.class.getCanonicalName()); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java index 263c68acb839..4a350c688451 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java @@ -41,7 +41,7 @@ public class TestQueryHooks { @BeforeClass public static void setUpBeforeClass() { - conf = new HiveConf(TestQueryHooks.class); + conf = HiveConf.create(TestQueryHooks.class); conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/TestAcidInputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/TestAcidInputFormat.java index 7475294d57dd..941a053b81a2 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/TestAcidInputFormat.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/TestAcidInputFormat.java @@ -116,7 +116,7 @@ public void testDeltaMetaWithFile() throws Exception { assertEquals(2001L, copy.getMaxWriteId()); assertEquals(0, copy.getStmtIds().size()); AcidInputFormat.DeltaFileMetaData fileMetaData = copy.getDeltaFiles().get(0); - Object fileId = fileMetaData.getFileId(new Path("deleteDelta"), 1, new HiveConf()); + Object fileId = fileMetaData.getFileId(new Path("deleteDelta"), 1, HiveConf.create()); Assert.assertTrue(fileId instanceof SyntheticFileId); assertEquals(100, ((SyntheticFileId)fileId).getModTime()); @@ -148,7 +148,7 @@ public void testDeltaMetaWithHdfsFileId() throws Exception { assertEquals(0, copy.getStmtIds().size()); AcidInputFormat.DeltaFileMetaData fileMetaData = copy.getDeltaFiles().get(0); - Object fileId = fileMetaData.getFileId(new Path("deleteDelta"), 1, new HiveConf()); + Object fileId = fileMetaData.getFileId(new Path("deleteDelta"), 1, HiveConf.create()); Assert.assertTrue(fileId instanceof Long); long fId = (Long)fileId; assertEquals(123L, fId); @@ -177,7 +177,7 @@ public void testDeltaMetaWithAttemptId() throws Exception { assertEquals(2001L, copy.getMaxWriteId()); assertEquals(0, copy.getStmtIds().size()); AcidInputFormat.DeltaFileMetaData fileMetaData = copy.getDeltaFiles().get(0); - Object fileId = fileMetaData.getFileId(new Path("deleteDelta"), 1, new HiveConf()); + Object fileId = fileMetaData.getFileId(new Path("deleteDelta"), 1, HiveConf.create()); Assert.assertTrue(fileId instanceof SyntheticFileId); assertEquals(100, ((SyntheticFileId)fileId).getModTime()); @@ -208,7 +208,7 @@ public void testDeltaMetaWithFileMultiStatement() throws Exception { assertEquals(2000L, copy.getMinWriteId()); assertEquals(2001L, copy.getMaxWriteId()); assertEquals(3, copy.getStmtIds().size()); - Object fileId = copy.getDeltaFiles().get(0).getFileId(new Path("deleteDelta"), 1, new HiveConf()); + Object fileId = copy.getDeltaFiles().get(0).getFileId(new Path("deleteDelta"), 1, HiveConf.create()); Assert.assertTrue(fileId instanceof SyntheticFileId); assertEquals(100, ((SyntheticFileId)fileId).getModTime()); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/TestAcidUtils.java b/ql/src/test/org/apache/hadoop/hive/ql/io/TestAcidUtils.java index 070ad4b88ab3..4e720291d3d4 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/TestAcidUtils.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/TestAcidUtils.java @@ -698,7 +698,7 @@ public void testTableIsSoftDeleteCompliant(){ Table table = new Table("dummy", "test_acid"); table.setTableType(TableType.MANAGED_TABLE); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_ACID_LOCKLESS_READS_ENABLED, true); Map parameters = new HashMap<>(); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java index afdc6844109a..2d0eeaf86f81 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java @@ -131,7 +131,7 @@ public void testCombine() throws Exception { new Path(dataDir2, "combinefile2_1")); - HiveConf hiveConf = new HiveConf(TestSymlinkTextInputFormat.class); + HiveConf hiveConf = HiveConf.create(TestSymlinkTextInputFormat.class); hiveConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/arrow/TestSerializer.java b/ql/src/test/org/apache/hadoop/hive/ql/io/arrow/TestSerializer.java index f8daada3d0d3..84236c20d33b 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/arrow/TestSerializer.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/arrow/TestSerializer.java @@ -31,7 +31,7 @@ public class TestSerializer { public void testEmptyList() { List typeInfos = TypeInfoUtils.getTypeInfosFromTypeString("array"); List fieldNames = Arrays.asList(new String[]{"a"}); - Serializer converter = new Serializer(new HiveConf(), "attemptId", typeInfos, fieldNames); + Serializer converter = new Serializer(HiveConf.create(), "attemptId", typeInfos, fieldNames); ArrowWrapperWritable writable = converter.emptyBatch(); Assert.assertEquals("Schema>", writable.getVectorSchemaRoot().getSchema().toString()); @@ -41,7 +41,7 @@ public void testEmptyList() { public void testEmptyStruct() { List typeInfos = TypeInfoUtils.getTypeInfosFromTypeString("struct"); List fieldNames = Arrays.asList(new String[] { "a" }); - Serializer converter = new Serializer(new HiveConf(), "attemptId", typeInfos, fieldNames); + Serializer converter = new Serializer(HiveConf.create(), "attemptId", typeInfos, fieldNames); ArrowWrapperWritable writable = converter.emptyBatch(); Assert.assertEquals("Schema>", writable.getVectorSchemaRoot().getSchema().toString()); @@ -51,7 +51,7 @@ public void testEmptyStruct() { public void testEmptyMap() { List typeInfos = TypeInfoUtils.getTypeInfosFromTypeString("map"); List fieldNames = Arrays.asList(new String[] { "a" }); - Serializer converter = new Serializer(new HiveConf(), "attemptId", typeInfos, fieldNames); + Serializer converter = new Serializer(HiveConf.create(), "attemptId", typeInfos, fieldNames); ArrowWrapperWritable writable = converter.emptyBatch(); Assert.assertEquals("Schema not null>>", writable.getVectorSchemaRoot().getSchema().toString()); @@ -62,7 +62,7 @@ public void testEmptyComplexStruct() { List typeInfos = TypeInfoUtils.getTypeInfosFromTypeString( "struct,c:map,d:struct,f:map>>"); List fieldNames = Arrays.asList(new String[] { "a" }); - Serializer converter = new Serializer(new HiveConf(), "attemptId", typeInfos, fieldNames); + Serializer converter = new Serializer(HiveConf.create(), "attemptId", typeInfos, fieldNames); ArrowWrapperWritable writable = converter.emptyBatch(); Assert.assertEquals( "Schema, c: Map(false) typeInfos = TypeInfoUtils.getTypeInfosFromTypeString( "map,struct,c:map>>"); List fieldNames = Arrays.asList(new String[] { "a" }); - Serializer converter = new Serializer(new HiveConf(), "attemptId", typeInfos, fieldNames); + Serializer converter = new Serializer(HiveConf.create(), "attemptId", typeInfos, fieldNames); ArrowWrapperWritable writable = converter.emptyBatch(); Assert.assertEquals( "Schema not null, value: Struct typeInfos = TypeInfoUtils.getTypeInfosFromTypeString("struct>," + "c:array>,d:array,f:map>>>"); List fieldNames = Arrays.asList(new String[] { "a" }); - Serializer converter = new Serializer(new HiveConf(), "attemptId", typeInfos, fieldNames); + Serializer converter = new Serializer(HiveConf.create(), "attemptId", typeInfos, fieldNames); ArrowWrapperWritable writable = converter.emptyBatch(); Assert.assertEquals( "Schema>, c: List<$data$: Map(false) fileList = new ArrayList(); assertEquals(false, - ((InputFormatChecker) in).validateInput(fs, new HiveConf(), fileList)); + ((InputFormatChecker) in).validateInput(fs, HiveConf.create(), fileList)); fileList.add(fs.getFileStatus(testFilePath)); assertEquals(true, - ((InputFormatChecker) in).validateInput(fs, new HiveConf(), fileList)); + ((InputFormatChecker) in).validateInput(fs, HiveConf.create(), fileList)); fileList.add(fs.getFileStatus(workDir)); assertEquals(false, - ((InputFormatChecker) in).validateInput(fs, new HiveConf(), fileList)); + ((InputFormatChecker) in).validateInput(fs, HiveConf.create(), fileList)); // read the whole file diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java index 681943285fe7..ee210f4f80f4 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java @@ -279,7 +279,7 @@ public void testReaderPair() throws Exception { RecordIdentifier minKey = new RecordIdentifier(10, 20, 30); RecordIdentifier maxKey = new RecordIdentifier(40, 50, 60); ReaderPair pair = new OrcRawRecordMerger.ReaderPairAcid(key, reader, minKey, maxKey, - new Reader.Options(), new HiveConf()); + new Reader.Options(), HiveConf.create()); RecordReader recordReader = pair.getRecordReader(); checkReaderRecord(10, 20, 40, 120, "third", key, pair); @@ -298,7 +298,7 @@ public void testReaderPairNoMinMaxKeys() throws Exception { // null min and max keys forces a full scan of all records ReaderPair pair = new OrcRawRecordMerger.ReaderPairAcid(key, reader, null, null, - new Reader.Options(), new HiveConf()); + new Reader.Options(), HiveConf.create()); RecordReader recordReader = pair.getRecordReader(); checkReaderRecord(10, 20, 20, 100, "first", key, pair); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/DbTxnManagerEndToEndTestBase.java b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/DbTxnManagerEndToEndTestBase.java index 2b14ccda1d55..d5604c658bb6 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/DbTxnManagerEndToEndTestBase.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/DbTxnManagerEndToEndTestBase.java @@ -46,7 +46,7 @@ public abstract class DbTxnManagerEndToEndTestBase { DbTxnManagerEndToEndTestBase.class.getCanonicalName() + "-" + System.currentTimeMillis()) .getPath().replaceAll("\\\\", "/"); - protected static HiveConf conf = new HiveConf(Driver.class); + protected static HiveConf conf = HiveConf.create(Driver.class); protected HiveTxnManager txnMgr; protected Context ctx; protected Driver driver, driver2; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java index c2d93e0f95ed..1bf3a3da1a39 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java @@ -67,7 +67,7 @@ */ public class TestDbTxnManager { private static final int TEST_TIMED_OUT_TXN_ABORT_BATCH_SIZE = 1000; - private final HiveConf conf = new HiveConf(); + private final HiveConf conf = HiveConf.create(); private HiveTxnManager txnMgr; private AcidHouseKeeperService houseKeeperService = null; private final Context ctx; @@ -395,7 +395,7 @@ public void testDDLNoLock() throws Exception { @Test public void concurrencyFalse() throws Exception { - HiveConf badConf = new HiveConf(); + HiveConf badConf = HiveConf.create(); if(badConf.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY)) { //TxnManagerFactory is a singleton, so if the default is true, it has already been //created and won't throw diff --git a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager2.java b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager2.java index 82eef64125bf..c2367a54afc4 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager2.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager2.java @@ -2753,7 +2753,7 @@ private void testInsertOverwriteMergeInsertDynamicPartitioningConflict(boolean s driver.run("create table source (a int, b int) partitioned by (c int) stored as orc TBLPROPERTIES ('transactional'='true')"); driver.run("insert into source values (3,3,2), (4,4,2)"); - DbTxnManager txnMgr2 = (DbTxnManager) TxnManagerFactory.getTxnManagerFactory().getTxnManager(new HiveConf(conf)); + DbTxnManager txnMgr2 = (DbTxnManager) TxnManagerFactory.getTxnManagerFactory().getTxnManager(HiveConf.create(conf)); if (!slowCompile) { // txn 1 insert data to an old and a new partition @@ -2763,7 +2763,7 @@ private void testInsertOverwriteMergeInsertDynamicPartitioningConflict(boolean s driver2.compileAndRespond("insert overwrite table target partition (c=2) select 3, 3"); } - DbTxnManager txnMgr3 = (DbTxnManager) TxnManagerFactory.getTxnManagerFactory().getTxnManager(new HiveConf(conf)); + DbTxnManager txnMgr3 = (DbTxnManager) TxnManagerFactory.getTxnManagerFactory().getTxnManager(HiveConf.create(conf)); swapTxnManager(txnMgr3); // Compile txn 3 with only 1 known partition diff --git a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java index 62e9bc4f441b..9e2ae268b458 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java @@ -50,7 +50,7 @@ @RunWith(MockitoJUnitRunner.class) public class TestDummyTxnManager { - private final HiveConf conf = new HiveConf(); + private final HiveConf conf = HiveConf.create(); private HiveTxnManager txnMgr; private Context ctx; private int nextInput = 1; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestEmbeddedLockManager.java b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestEmbeddedLockManager.java index 4a885518711c..54509b9a136e 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestEmbeddedLockManager.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestEmbeddedLockManager.java @@ -32,11 +32,11 @@ public class TestEmbeddedLockManager { private int counter; - private HiveConf conf = new HiveConf(); + private HiveConf conf = HiveConf.create(); @Test public void testLocking() throws LockException { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.set("hive.lock.numretries", "0"); conf.set("hive.unlock.numretries", "0"); EmbeddedLockManager manager = new EmbeddedLockManager(); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestHiveLockObject.java b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestHiveLockObject.java index 52f86bb0956a..12482e9beff4 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestHiveLockObject.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestHiveLockObject.java @@ -26,7 +26,7 @@ public class TestHiveLockObject { - private HiveConf conf = new HiveConf(); + private HiveConf conf = HiveConf.create(); @Test public void testEqualsAndHashCode() { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java index 3ab74c584e8e..8e37d31d1c8d 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java @@ -62,7 +62,7 @@ public class TestZookeeperLockManager { @Before public void setup() { - conf = new HiveConf(); + conf = HiveConf.create(); conf.setVar(ConfVars.HIVE_LOCK_SLEEP_BETWEEN_RETRIES, "100ms"); lockObjData = new HiveLockObjectData("1", "10", "SHARED", "show tables", conf); hiveLock = new HiveLockObject(TABLE, lockObjData); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java index 142f5cec22dc..ce9d3f500956 100755 --- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java @@ -96,7 +96,7 @@ public class TestHive { @BeforeClass public static void setUp() throws Exception { - hiveConf = new HiveConf(TestHive.class); + hiveConf = HiveConf.create(TestHive.class); hm = setUpImpl(hiveConf); } @@ -458,14 +458,14 @@ public void testGetAndDropTables() throws Throwable { @Test public void testWmNamespaceHandling() throws Throwable { - HiveConf hiveConf = new HiveConf(this.getClass()); + HiveConf hiveConf = HiveConf.create(this.getClass()); Hive hm = setUpImpl(hiveConf); // TODO: threadlocals... Why is all this Hive client stuff like that?!! final AtomicReference hm2r = new AtomicReference<>(); Thread pointlessThread = new Thread(new Runnable() { @Override public void run() { - HiveConf hiveConf2 = new HiveConf(this.getClass()); + HiveConf hiveConf2 = HiveConf.create(this.getClass()); hiveConf2.setVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE, "hm2"); try { hm2r.set(setUpImpl(hiveConf2)); @@ -864,7 +864,7 @@ public void testHiveRefreshOnConfChange() throws Throwable{ Hive newHiveObj; //if HiveConf has not changed, same object should be returned - HiveConf newHconf = new HiveConf(hiveConf); + HiveConf newHconf = HiveConf.create(hiveConf); newHiveObj = Hive.get(newHconf); assertTrue(prevHiveObj == newHiveObj); @@ -876,7 +876,7 @@ public void testHiveRefreshOnConfChange() throws Throwable{ prevHiveObj = Hive.get(); prevHiveObj.getDatabaseCurrent(); //change value of a metavar config param in new hive conf - newHconf = new HiveConf(hiveConf); + newHconf = HiveConf.create(hiveConf); newHconf.setIntVar(ConfVars.METASTORETHRIFTCONNECTIONRETRIES, newHconf.getIntVar(ConfVars.METASTORETHRIFTCONNECTIONRETRIES) + 1); newHiveObj = Hive.get(newHconf); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveCopyFiles.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveCopyFiles.java index f9c10f5ffb5d..4a3d8c2214a4 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveCopyFiles.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveCopyFiles.java @@ -62,7 +62,7 @@ public static List getParameters() throws Exception { @BeforeClass public static void setUp() { - hiveConf = new HiveConf(TestHiveCopyFiles.class); + hiveConf = HiveConf.create(TestHiveCopyFiles.class); SessionState.start(hiveConf); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreClientApiArgumentsChecker.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreClientApiArgumentsChecker.java index 175b47c47d83..7f2efda51abe 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreClientApiArgumentsChecker.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreClientApiArgumentsChecker.java @@ -71,7 +71,7 @@ public class TestHiveMetaStoreClientApiArgumentsChecker { @Before public void setUp() throws Exception { - client = new TestHiveMetaStoreClient(new HiveConf(Hive.class)); + client = new TestHiveMetaStoreClient(HiveConf.create(Hive.class)); hive = Hive.get(client); hive.getConf().set(MetastoreConf.ConfVars.FS_HANDLER_THREADS_COUNT.getVarname(), "15"); hive.getConf().set(MetastoreConf.ConfVars.MSCK_PATH_VALIDATION.getVarname(), "throw"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveRemote.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveRemote.java index 5a695e941993..81898db9feca 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveRemote.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveRemote.java @@ -42,7 +42,7 @@ public class TestHiveRemote extends TestHive { */ @BeforeClass public static void setUp() throws Exception { - hiveConf = new HiveConf(TestHiveRemote.class); + hiveConf = HiveConf.create(TestHiveRemote.class); hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); MetaStoreTestUtils.startMetaStoreWithRetry(hiveConf); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveUtils.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveUtils.java index b528b208bbf8..dec2e3d21110 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveUtils.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveUtils.java @@ -90,7 +90,7 @@ public void testUnparseIdentifierWithDoubleQuotesWhenQuotationIsStandard() { } private HiveConf createConf(Quotation quotation) { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setVar(HiveConf.ConfVars.HIVE_QUOTEDID_SUPPORT, quotation.stringValue()); return conf; } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/optimizer/TestGenMapRedUtilsCreateConditionalTask.java b/ql/src/test/org/apache/hadoop/hive/ql/optimizer/TestGenMapRedUtilsCreateConditionalTask.java index a96d93e04291..c10003f6e601 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/optimizer/TestGenMapRedUtilsCreateConditionalTask.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/optimizer/TestGenMapRedUtilsCreateConditionalTask.java @@ -63,7 +63,7 @@ public class TestGenMapRedUtilsCreateConditionalTask { @BeforeClass public static void initializeSessionState() { - hiveConf = new HiveConf(); + hiveConf = HiveConf.create(); } @Before diff --git a/ql/src/test/org/apache/hadoop/hive/ql/optimizer/calcite/TestCBORuleFiredOnlyOnce.java b/ql/src/test/org/apache/hadoop/hive/ql/optimizer/calcite/TestCBORuleFiredOnlyOnce.java index 25e20b6167ff..6bf1e98f8bb3 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/optimizer/calcite/TestCBORuleFiredOnlyOnce.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/optimizer/calcite/TestCBORuleFiredOnlyOnce.java @@ -51,7 +51,7 @@ public class TestCBORuleFiredOnlyOnce { @Test public void testRuleFiredOnlyOnce() { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); // Create HepPlanner HepProgramBuilder programBuilder = new HepProgramBuilder(); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/optimizer/calcite/stats/TestFilterSelectivityEstimator.java b/ql/src/test/org/apache/hadoop/hive/ql/optimizer/calcite/stats/TestFilterSelectivityEstimator.java index ade4f89d0e61..01a56186d0bf 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/optimizer/calcite/stats/TestFilterSelectivityEstimator.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/optimizer/calcite/stats/TestFilterSelectivityEstimator.java @@ -112,7 +112,7 @@ public static void beforeClass() { int11 = REX_BUILDER.makeLiteral(11, integerType, true); tableType = TYPE_FACTORY.createStructType(ImmutableList.of(integerType), ImmutableList.of("f1")); - RelOptPlanner planner = CalcitePlanner.createPlanner(new HiveConf()); + RelOptPlanner planner = CalcitePlanner.createPlanner(HiveConf.create()); relOptCluster = RelOptCluster.create(planner, REX_BUILDER); stats = new ColStatistics(); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TestRexNodeConverter.java b/ql/src/test/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TestRexNodeConverter.java index 341097b8dd70..b3b003e4cbae 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TestRexNodeConverter.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TestRexNodeConverter.java @@ -95,7 +95,7 @@ public static void beforeClass() { ImmutableList.of("f1", "f2") ); - RelOptPlanner planner = CalcitePlanner.createPlanner(new HiveConf()); + RelOptPlanner planner = CalcitePlanner.createPlanner(HiveConf.create()); relOptCluster = RelOptCluster.create(planner, REX_BUILDER); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestNullScanTaskDispatcher.java b/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestNullScanTaskDispatcher.java index c9fc2a54edd6..5970eefff1f4 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestNullScanTaskDispatcher.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestNullScanTaskDispatcher.java @@ -83,7 +83,7 @@ public class TestNullScanTaskDispatcher { @Before public void setup() { - hiveConf = new HiveConf(); + hiveConf = HiveConf.create(); hiveConf.set("fs.mock.impl", MockFileSystem.class.getName()); hiveConf.setBoolVar(HiveConf.ConfVars.HIVEMETADATAONLYQUERIES, true); sessionState = SessionState.start(hiveConf); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java index 305c17065616..d693fbe0eaf4 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java @@ -196,7 +196,7 @@ private Map> getColsFromReadEntity(Set inputs) } private static Driver createDriver() { - HiveConf conf = new HiveConf(Driver.class); + HiveConf conf = HiveConf.create(Driver.class); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestGenTezWork.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestGenTezWork.java index 888e4efcbcf1..8c230992d21d 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestGenTezWork.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestGenTezWork.java @@ -69,7 +69,7 @@ public class TestGenTezWork { @Before public void setUp() throws Exception { // Init conf - final HiveConf conf = new HiveConf(SemanticAnalyzer.class); + final HiveConf conf = HiveConf.create(SemanticAnalyzer.class); SessionState.start(conf); // Init parse context diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java index 7d96e630365f..00d88c885d41 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java @@ -166,7 +166,7 @@ public void testDecimalType9() throws ParseException { } private Driver createDriver() { - HiveConf conf = new HiveConf(Driver.class); + HiveConf conf = HiveConf.create(Driver.class); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseUtils.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseUtils.java index fb433c09045c..a664ce1934a0 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseUtils.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseUtils.java @@ -49,7 +49,7 @@ public class TestParseUtils { public TestParseUtils(String query, TxnType txnType) { this.query = query; this.txnType = txnType; - this.conf = new HiveConf(); + this.conf = HiveConf.create(); } @Before diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java index 322fbbd7f324..0805dbb25e83 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java @@ -39,7 +39,7 @@ public class TestQBJoinTreeApplyPredicate { @BeforeClass public static void initialize() { queryState = - new QueryState.Builder().withHiveConf(new HiveConf(SemanticAnalyzer.class)).build(); + new QueryState.Builder().withHiveConf(HiveConf.create(SemanticAnalyzer.class)).build(); conf = queryState.getConf(); SessionState.start(conf); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java index 5749fb29908f..c6a3758ee03f 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java @@ -49,7 +49,7 @@ public class TestQBSubQuery { @BeforeClass public static void initialize() { queryState = - new QueryState.Builder().withHiveConf(new HiveConf(SemanticAnalyzer.class)).build(); + new QueryState.Builder().withHiveConf(HiveConf.create(SemanticAnalyzer.class)).build(); conf = queryState.getConf(); SessionState.start(conf); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java index b1c8c0fbde08..c9c898d05c65 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java @@ -32,7 +32,7 @@ public class TestReplicationSemanticAnalyzer { private static HiveConf hiveConf = buildHiveConf(); public static HiveConf buildHiveConf() { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setVar(HIVE_QUOTEDID_SUPPORT, Quotation.NONE.stringValue()); return conf; } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzer.java index 0ab1c70e37cf..eea9e79047fa 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzer.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzer.java @@ -110,7 +110,7 @@ public void testUnescapeSQLString() { @Test public void testSkipAuthorization() throws Exception { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED, true); hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_SERVICE_USERS, "u1,u2"); SessionState ss = new SessionState(hiveConf); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java index 77d2454f492a..ce153e3c865e 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java @@ -37,7 +37,7 @@ public class PrivilegesTestBase { public static void grantUserTable(String privStr, PrivilegeType privType, QueryState queryState, Hive db) throws Exception { - Context ctx=new Context(new HiveConf()); + Context ctx=new Context(HiveConf.create()); DDLWork work = AuthorizationTestUtil.analyze( "GRANT " + privStr + " ON TABLE " + TABLE + " TO USER " + USER, queryState, db, ctx); GrantDesc grantDesc = (GrantDesc)work.getDDLDesc(); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestSessionUserName.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestSessionUserName.java index 61d755d2fd40..cf081c4cba31 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestSessionUserName.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestSessionUserName.java @@ -132,7 +132,7 @@ private void setupDataNucleusFreeHive(HiveConf hiveConf) throws MetaException { * that captures the given user name */ private HiveConf getAuthV2HiveConf() { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, HiveAuthorizerStoringUserNameFactory.class.getName()); conf.setVar(HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER, diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/TestCopyUtils.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/TestCopyUtils.java index 4740802d7ad1..418e6f443f73 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/TestCopyUtils.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/TestCopyUtils.java @@ -71,7 +71,7 @@ public void distcpShouldNotBeCalledOnlyForOneFile() throws Exception { mockStatic(UserGroupInformation.class); when(UserGroupInformation.getCurrentUser()).thenReturn(mock(UserGroupInformation.class)); - HiveConf conf = Mockito.spy(new HiveConf()); + HiveConf conf = Mockito.spy(HiveConf.create()); doReturn(1L).when(conf).getLong(HiveConf.ConfVars.HIVE_EXEC_COPYFILE_MAXSIZE.varname, 32L * 1024 * 1024); CopyUtils copyUtils = new CopyUtils("", conf, null); long MB_128 = 128 * 1024 * 1024; @@ -83,7 +83,7 @@ public void distcpShouldNotBeCalledForSmallerFileSize() throws Exception { mockStatic(UserGroupInformation.class); when(UserGroupInformation.getCurrentUser()).thenReturn(mock(UserGroupInformation.class)); - HiveConf conf = Mockito.spy(new HiveConf()); + HiveConf conf = Mockito.spy(HiveConf.create()); CopyUtils copyUtils = new CopyUtils("", conf, null); long MB_16 = 16 * 1024 * 1024; assertFalse(copyUtils.limitReachedForLocalCopy(MB_16, 100L)); @@ -210,7 +210,7 @@ public void testRetryableFSCalls() throws Exception { public void testParallelCopySuccess() throws Exception { mockStatic(UserGroupInformation.class); when(UserGroupInformation.getCurrentUser()).thenReturn(mock(UserGroupInformation.class)); - HiveConf conf = Mockito.spy(new HiveConf()); + HiveConf conf = Mockito.spy(HiveConf.create()); when(conf.getIntVar(HiveConf.ConfVars.REPL_PARALLEL_COPY_TASKS)).thenReturn(2); when(conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST_REPL)).thenReturn(true); FileSystem destFs = mock(FileSystem.class); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/metric/TestReplicationMetricCollector.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/metric/TestReplicationMetricCollector.java index 8ac2896ccf6f..469cc8dbe4de 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/metric/TestReplicationMetricCollector.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/metric/TestReplicationMetricCollector.java @@ -80,7 +80,7 @@ public class TestReplicationMetricCollector { @Before public void setup() throws Exception { - conf = new HiveConf(); + conf = HiveConf.create(); conf.set(Constants.SCHEDULED_QUERY_SCHEDULENAME, "repl"); conf.set(Constants.SCHEDULED_QUERY_EXECUTIONID, "1"); MetricCollector.getInstance().init(conf); @@ -102,7 +102,7 @@ public void finalize() { @Test public void testFailureCacheHardLimit() throws Exception { MetricCollector.getInstance().deinit(); - conf = new HiveConf(); + conf = HiveConf.create(); MetricCollector collector = MetricCollector.getInstance(); MetricCollector metricCollectorSpy = Mockito.spy(collector); Mockito.doReturn(1L).when(metricCollectorSpy).getMaxSize(Mockito.any()); @@ -121,7 +121,7 @@ public void testFailureCacheHardLimit() throws Exception { @Test public void testFailureNoScheduledId() throws Exception { MetricCollector.getInstance().deinit(); - conf = new HiveConf(); + conf = HiveConf.create(); MetricCollector.getInstance().init(conf); ReplicationMetricCollector bootstrapDumpMetricCollector = new BootstrapDumpMetricCollector("db", "dummyDir", conf, 0L); @@ -136,7 +136,7 @@ public void testFailureNoScheduledId() throws Exception { @Test public void testFailureNoPolicyId() throws Exception { MetricCollector.getInstance().deinit(); - conf = new HiveConf(); + conf = HiveConf.create(); MetricCollector.getInstance().init(conf); ReplicationMetricCollector bootstrapDumpMetricCollector = new BootstrapDumpMetricCollector("db", "dummyDir", conf, 0L); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/metric/TestReplicationMetricSink.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/metric/TestReplicationMetricSink.java index 5059f9af4724..8dac030793e2 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/metric/TestReplicationMetricSink.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/metric/TestReplicationMetricSink.java @@ -75,7 +75,7 @@ public class TestReplicationMetricSink { @Before public void setup() throws Exception { - conf = new HiveConf(); + conf = HiveConf.create(); conf.set(Constants.SCHEDULED_QUERY_SCHEDULENAME, "repl"); conf.set(Constants.SCHEDULED_QUERY_EXECUTIONID, "1"); MetricSink metricSinkSpy = Mockito.spy(MetricSink.getInstance()); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/metric/TestReplicationMetricUpdateOnFailure.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/metric/TestReplicationMetricUpdateOnFailure.java index d8859e3b8749..492b3ac6acd1 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/metric/TestReplicationMetricUpdateOnFailure.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/metric/TestReplicationMetricUpdateOnFailure.java @@ -69,7 +69,7 @@ public class TestReplicationMetricUpdateOnFailure { @Before public void setup() throws Exception { - conf = new HiveConf(); + conf = HiveConf.create(); conf.set(HiveConf.ConfVars.HIVE_IN_TEST.varname, "false"); conf.set(Constants.SCHEDULED_QUERY_SCHEDULENAME, "repl"); conf.set(Constants.SCHEDULED_QUERY_EXECUTIONID, "1"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/type/TestBigIntCompareValidation.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/type/TestBigIntCompareValidation.java index bb4f696afab2..2360053c2241 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/type/TestBigIntCompareValidation.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/type/TestBigIntCompareValidation.java @@ -44,7 +44,7 @@ public class TestBigIntCompareValidation { public void setUp() throws Exception { this.constant = new ExprNodeConstantDesc(TypeInfoFactory.longTypeInfo, 0L); this.processor = ExprNodeTypeCheck.getExprNodeDefaultExprProcessor(); - this.errorMsg = HiveConf.StrictChecks.checkTypeSafety(new HiveConf()); + this.errorMsg = HiveConf.StrictChecks.checkTypeSafety(HiveConf.create()); this.functionInfo = FunctionRegistry.getFunctionInfo("="); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/type/TestDecimalStringValidation.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/type/TestDecimalStringValidation.java index 0d295ad64be1..d2c4c2451781 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/type/TestDecimalStringValidation.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/type/TestDecimalStringValidation.java @@ -100,7 +100,7 @@ public static Collection params() throws Exception { @Test public void testValidationDecimalWithCharacterFailsWhenStrictChecksEnabled() { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setBoolVar(HiveConf.ConfVars.HIVE_STRICT_CHECKS_TYPE_SAFETY, true); try { validateCall(conf); @@ -112,7 +112,7 @@ public void testValidationDecimalWithCharacterFailsWhenStrictChecksEnabled() { @Test public void testValidationDecimalWithCharacterSucceedsWhenStrictChecksDisabled() throws SemanticException { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setBoolVar(HiveConf.ConfVars.HIVE_STRICT_CHECKS_TYPE_SAFETY, false); validateCall(conf); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/type/TestExprProcessorGetFuncExpr.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/type/TestExprProcessorGetFuncExpr.java index f944f91f17ff..d01381e1bd5c 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/type/TestExprProcessorGetFuncExpr.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/type/TestExprProcessorGetFuncExpr.java @@ -47,7 +47,7 @@ public class TestExprProcessorGetFuncExpr { @Before public void setUp() throws Exception { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_ALLOW_UDF_LOAD_ON_DEMAND, true); SessionState sessionState = new SessionState(hiveConf, System.getProperty("user.name")); SessionState.setCurrentSessionState(sessionState); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestConditionalResolverCommonJoin.java b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestConditionalResolverCommonJoin.java index 780fb2a58e22..219e47b0ac73 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestConditionalResolverCommonJoin.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestConditionalResolverCommonJoin.java @@ -69,7 +69,7 @@ public void testResolvingDriverAlias() throws Exception { ctx.setTaskToAliases(taskToAliases); ctx.setAliasToKnownSize(aliasToKnownSize); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setLongVar(HiveConf.ConfVars.HIVESMALLTABLESFILESIZE, 4096); // alias3 only can be selected diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java index 57508b318835..9d249e2e577d 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java @@ -174,7 +174,7 @@ public void testSelectEntityInDirectJoinAlias() throws ParseException { * Create driver with the test hook set in config */ private static Driver createDriver() { - HiveConf conf = new HiveConf(Driver.class); + HiveConf conf = HiveConf.create(Driver.class); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java index d3a3cd574068..9ff2b8ae2ff2 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java @@ -57,7 +57,7 @@ public void postAnalyze(HiveSemanticAnalyzerHookContext context, @BeforeClass public static void onetimeSetup() throws Exception { - HiveConf conf = new HiveConf(Driver.class); + HiveConf conf = HiveConf.create(Driver.class); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/processors/TestCommandProcessorFactory.java b/ql/src/test/org/apache/hadoop/hive/ql/processors/TestCommandProcessorFactory.java index 0c960f2b8e13..60ed8015616f 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/processors/TestCommandProcessorFactory.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/processors/TestCommandProcessorFactory.java @@ -34,7 +34,7 @@ public class TestCommandProcessorFactory { @Before public void setUp() throws Exception { - conf = new HiveConf(); + conf = HiveConf.create(); } @Test diff --git a/ql/src/test/org/apache/hadoop/hive/ql/processors/TestSetProcessor.java b/ql/src/test/org/apache/hadoop/hive/ql/processors/TestSetProcessor.java index ddbbef0b0134..52d18d84d146 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/processors/TestSetProcessor.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/processors/TestSetProcessor.java @@ -49,7 +49,7 @@ public static void before() throws Exception { env.put(TEST_ENV_VAR_PASSWORD, TEST_ENV_VAR_PASSWORD_VALUE); setEnv(env); System.setProperty(TEST_SYSTEM_PROPERTY, TEST_SYSTEM_PROPERTY_VALUE); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); SessionState.start(conf); state = SessionState.get(); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHivePrivilegeObjectOwnerNameAndType.java b/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHivePrivilegeObjectOwnerNameAndType.java index a04e5f0227be..f97d290520db 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHivePrivilegeObjectOwnerNameAndType.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHivePrivilegeObjectOwnerNameAndType.java @@ -67,7 +67,7 @@ public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreC @BeforeClass public static void beforeTest() throws Exception { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser("hive")); - conf = new HiveConf(); + conf = HiveConf.create(); // Turn on mocked authorization conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName()); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerCLI.java b/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerCLI.java index 3a59ad54c4a0..0aa06aee7a96 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerCLI.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerCLI.java @@ -45,7 +45,7 @@ public class TestSQLStdHiveAccessControllerCLI { */ @Test public void testConfigProcessing() throws HiveAuthzPluginException { - HiveConf processedConf = new HiveConf(); + HiveConf processedConf = HiveConf.create(); SQLStdHiveAccessController accessController = new SQLStdHiveAccessController(null, processedConf, new HadoopDefaultAuthenticator(), getCLISessionCtx() ); @@ -73,7 +73,7 @@ private HiveAuthzSessionContext getCLISessionCtx() { */ @Test public void testAuthEnable() throws Exception { - HiveConf processedConf = new HiveConf(); + HiveConf processedConf = HiveConf.create(); processedConf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); HiveAuthorizerFactory authorizerFactory = new SQLStdHiveAuthorizerFactory(); HiveAuthorizer authorizer = authorizerFactory.createHiveAuthorizer(null, processedConf, diff --git a/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerHS2.java b/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerHS2.java index b087d3beab26..173a1bfd969b 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerHS2.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerHS2.java @@ -70,7 +70,7 @@ public void testConfigProcessing() throws HiveAuthzPluginException, SecurityExce } private HiveConf newAuthEnabledConf() { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); return conf; } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/session/TestAddResource.java b/ql/src/test/org/apache/hadoop/hive/ql/session/TestAddResource.java index cc251789c11d..e27cd2a41c67 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/session/TestAddResource.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/session/TestAddResource.java @@ -49,7 +49,7 @@ public class TestAddResource { @Before public void setup() throws IOException { - conf = new HiveConf(); + conf = HiveConf.create(); t = ResourceType.JAR; //Generate test jar files @@ -166,7 +166,7 @@ public void testDuplicateAdds() throws URISyntaxException, IOException { @Test public void testUnion() throws URISyntaxException, IOException { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); SessionState ss = Mockito.spy(SessionState.start(conf).get()); ResourceType t = ResourceType.JAR; String query1 = "testQuery1"; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java b/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java index 4c374e8d418a..2dc48c77e6c0 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java @@ -86,7 +86,7 @@ public static Collection data() { @Before public void setUp() { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); String tmp = System.getProperty("java.io.tmpdir"); File tmpDir = new File(tmp); if (!tmpDir.exists()) { @@ -134,7 +134,7 @@ public void testgetDbName() throws Exception { SessionState.get().getCurrentDatabase()); //verify that a new sessionstate has default db - SessionState.start(new HiveConf()); + SessionState.start(HiveConf.create()); assertEquals(Warehouse.DEFAULT_DATABASE_NAME, SessionState.get().getCurrentDatabase()); @@ -167,7 +167,7 @@ public void run() { @Test public void testClassLoaderEquality() throws Exception { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); final SessionState ss1 = new SessionState(conf); RegisterJarRunnable otherThread = new RegisterJarRunnable("./build/contrib/test/test-udfs.jar", ss1); Thread th1 = new Thread(otherThread); @@ -204,7 +204,7 @@ private void generateRefreshJarFiles(String version) throws IOException, Interru @Test public void testReloadAuxJars2() { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); HiveConf.setVar(conf, ConfVars.HIVERELOADABLEJARS, hiveReloadPath); SessionState ss = new SessionState(conf); SessionState.start(ss); @@ -274,7 +274,7 @@ public void testReflectionCleanup() throws Exception { @Test public void testReloadExistingAuxJars2() { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); HiveConf.setVar(conf, ConfVars.HIVERELOADABLEJARS, hiveReloadPath); SessionState ss = new SessionState(conf); @@ -320,7 +320,7 @@ public void testReloadExistingAuxJars2() { */ @Test public void testCreatePath() throws Exception { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); LocalFileSystem localFileSystem = FileSystem.getLocal(conf); Path repeatedCreate = new Path("repeatedCreate"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/stats/TestBasicStats.java b/ql/src/test/org/apache/hadoop/hive/ql/stats/TestBasicStats.java index eb362f70e235..76baa0ac9e5e 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/stats/TestBasicStats.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/stats/TestBasicStats.java @@ -69,7 +69,7 @@ public Partish buildPartition() { public void testDataSizeEstimator() { Partish p1 = new LocalPartishBuilder().totalSize(10).buildPartition(); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setFloatVar(ConfVars.HIVE_STATS_DESERIALIZATION_FACTOR, 13.0f); BasicStats.Factory factory = new BasicStats.Factory(new BasicStats.DataSizeEstimator(conf)); @@ -81,7 +81,7 @@ public void testDataSizeEstimator() { @Test public void mergeWithEmpty() { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); int avgRowSize = 100; int r0 = 13; int r1 = 15; @@ -107,7 +107,7 @@ public void mergedKeepsPartialStateEvenIfValuesAreSuccessfullyEstimated() { Partish p0 = new LocalPartishBuilder().numRows(10).rawDataSize(100).buildPartition(); Partish p1 = new LocalPartishBuilder().totalSize(10).buildPartition(); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); BasicStats.Factory factory = new BasicStats.Factory(new BasicStats.DataSizeEstimator(conf), new BasicStats.RowNumEstimator(10)); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/stats/TestStatsUpdaterThread.java b/ql/src/test/org/apache/hadoop/hive/ql/stats/TestStatsUpdaterThread.java index add1b0b82bca..decd807467eb 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/stats/TestStatsUpdaterThread.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/stats/TestStatsUpdaterThread.java @@ -73,7 +73,7 @@ String getTestDataDir() { @SuppressWarnings("deprecation") @Before public void setUp() throws Exception { - this.hiveConf = new HiveConf(TestStatsUpdaterThread.class); + this.hiveConf = HiveConf.create(TestStatsUpdaterThread.class); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, getTestDataDir()); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/stats/TestStatsUtils.java b/ql/src/test/org/apache/hadoop/hive/ql/stats/TestStatsUtils.java index 07699e0fc601..1bce0e94cc7b 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/stats/TestStatsUtils.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/stats/TestStatsUtils.java @@ -78,7 +78,7 @@ private boolean rangeContains(Range range, Number f) { @Test public void testPrimitiveSizeEstimations() throws Exception { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); Set exclusions = Sets.newHashSet(); exclusions.add(serdeConstants.VOID_TYPE_NAME); exclusions.add(serdeConstants.LIST_TYPE_NAME); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/tool/TestLineageInfo.java b/ql/src/test/org/apache/hadoop/hive/ql/tool/TestLineageInfo.java index 4c522c85a143..af1d4dac4cee 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/tool/TestLineageInfo.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/tool/TestLineageInfo.java @@ -40,7 +40,7 @@ public class TestLineageInfo { @Before public void before() { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); SessionState.start(conf); ctx = new Context(conf); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/CompactorTest.java b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/CompactorTest.java index 4abe5d066ed1..fc83d6904778 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/CompactorTest.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/CompactorTest.java @@ -124,7 +124,7 @@ public abstract class CompactorTest { @Before public void setup() throws Exception { - setup(new HiveConf()); + setup(HiveConf.create()); } protected final void setup(HiveConf conf) throws Exception { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestDeltaFilesMetricFlags.java b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestDeltaFilesMetricFlags.java index 1e6b77f25dd2..e54b177e4265 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestDeltaFilesMetricFlags.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestDeltaFilesMetricFlags.java @@ -42,7 +42,7 @@ boolean useHive130DeltaDirName() { @Test(expected = javax.management.InstanceNotFoundException.class) public void testDeltaFilesMetricFromInitiatorWithMetricsDisabled() throws Exception { - conf = new HiveConf(); + conf = HiveConf.create(); conf.setBoolean(MetastoreConf.ConfVars.METRICS_ENABLED.getVarname(), false); setup(conf); startInitiator(); @@ -53,7 +53,7 @@ public void testDeltaFilesMetricFromInitiatorWithMetricsDisabled() throws Except @Test(expected = javax.management.InstanceNotFoundException.class) public void testDeltaFilesMetricFromWorkerWithMetricsDisabled() throws Exception { - conf = new HiveConf(); + conf = HiveConf.create(); conf.setBoolean(MetastoreConf.ConfVars.METRICS_ENABLED.getVarname(), false); setup(conf); startWorker(); @@ -64,7 +64,7 @@ public void testDeltaFilesMetricFromWorkerWithMetricsDisabled() throws Exception @Test(expected = javax.management.InstanceNotFoundException.class) public void testDeltaFilesMetricFromCleanerWithMetricsDisabled() throws Exception { - conf = new HiveConf(); + conf = HiveConf.create(); conf.setBoolean(MetastoreConf.ConfVars.METRICS_ENABLED.getVarname(), false); setup(conf); startCleaner(); @@ -75,7 +75,7 @@ public void testDeltaFilesMetricFromCleanerWithMetricsDisabled() throws Exceptio @Test(expected = javax.management.InstanceNotFoundException.class) public void testDeltaFilesMetricFromInitiatorWithAcidMetricsThreadDisabled() throws Exception { - conf = new HiveConf(); + conf = HiveConf.create(); conf.setBoolean(MetastoreConf.ConfVars.METASTORE_ACIDMETRICS_THREAD_ON.getVarname(), false); setup(conf); startInitiator(); @@ -86,7 +86,7 @@ public void testDeltaFilesMetricFromInitiatorWithAcidMetricsThreadDisabled() thr @Test(expected = javax.management.InstanceNotFoundException.class) public void testDeltaFilesMetricFromWorkerWithAcidMetricsThreadDisabled() throws Exception { - conf = new HiveConf(); + conf = HiveConf.create(); conf.setBoolean(MetastoreConf.ConfVars.METASTORE_ACIDMETRICS_THREAD_ON.getVarname(), false); setup(conf); startWorker(); @@ -97,7 +97,7 @@ public void testDeltaFilesMetricFromWorkerWithAcidMetricsThreadDisabled() throws @Test(expected = javax.management.InstanceNotFoundException.class) public void testDeltaFilesMetricFromCleanerWithAcidMetricsThreadDisabled() throws Exception { - conf = new HiveConf(); + conf = HiveConf.create(); conf.setBoolean(MetastoreConf.ConfVars.METASTORE_ACIDMETRICS_THREAD_ON.getVarname(), false); setup(conf); startCleaner(); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestDeltaFilesMetrics.java b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestDeltaFilesMetrics.java index 0e2886f632e1..9b29b84b3038 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestDeltaFilesMetrics.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestDeltaFilesMetrics.java @@ -65,7 +65,7 @@ private void setUpHiveConf() { @Override @Before public void setup() throws Exception { - this.conf = new HiveConf(); + this.conf = HiveConf.create(); setUpHiveConf(); setup(conf); MetricsFactory.init(conf); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestMRCompactorJobQueueConfiguration.java b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestMRCompactorJobQueueConfiguration.java index 8a709b5a168a..e62f63ad6b14 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestMRCompactorJobQueueConfiguration.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestMRCompactorJobQueueConfiguration.java @@ -55,7 +55,7 @@ void testCreateBaseJobConfHasCorrectJobQueue(ConfSetup input) { MRCompactor compactor = new MRCompactor(null); CompactionInfo ci = new CompactionInfo(tbl.getDbName(), tbl.getTableName(), null, CompactionType.MAJOR); ci.properties = new StringableMap(input.compactionProperties).toString(); - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); input.confProperties.forEach(conf::set); JobConf c = compactor.createBaseJobConf(conf, "test-job", tbl, tbl.getSd(), new ValidReaderWriteIdList(), ci); assertEquals(input.expectedQueue, c.getQueueName(), "Test failed for the following input:" + input); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java index 905d82a06596..be0d57f3b9a8 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java @@ -162,7 +162,7 @@ public void inputSplit() throws Exception { String delta1 = "/warehouse/foo/delta_2_3"; String delta2 = "/warehouse/foo/delta_4_7"; - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); Path file = new Path(System.getProperty("java.io.tmpdir") + System.getProperty("file.separator") + "newWriteInputSplitTest"); FileSystem fs = FileSystem.get(conf); @@ -207,7 +207,7 @@ public void inputSplitNullBase() throws Exception { String delta1 = "/warehouse/foo/delta_2_3"; String delta2 = "/warehouse/foo/delta_4_7"; - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); Path file = new Path(System.getProperty("java.io.tmpdir") + System.getProperty("file.separator") + "newWriteInputSplitTest"); FileSystem fs = FileSystem.get(conf); @@ -1199,7 +1199,7 @@ public void testTimeoutWithoutInterrupt() throws Exception { private void runTimeoutTest(long timeout, boolean runForever, boolean swallowInterrupt) throws Exception { ExecutorService executor = Executors.newSingleThreadExecutor(); - HiveConf timeoutConf = new HiveConf(conf); + HiveConf timeoutConf = HiveConf.create(conf); timeoutConf.setTimeVar(HiveConf.ConfVars.HIVE_COMPACTOR_WORKER_TIMEOUT, timeout, TimeUnit.MILLISECONDS); timeoutConf.setTimeVar(HiveConf.ConfVars.HIVE_COMPACTOR_WORKER_SLEEP_TIME, 20, TimeUnit.MILLISECONDS); timeoutConf.setTimeVar(HiveConf.ConfVars.HIVE_COMPACTOR_WORKER_MAX_SLEEP_TIME, 20, TimeUnit.MILLISECONDS); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/TestBlockedUdf.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/TestBlockedUdf.java index 9a29c4008a3e..18650dfabf8c 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/TestBlockedUdf.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/TestBlockedUdf.java @@ -47,8 +47,8 @@ public void tearDown() throws Exception { */ @Test public void testDefaultWhiteList() throws Exception { - assertEquals("", new HiveConf().getVar(ConfVars.HIVE_SERVER2_BUILTIN_UDF_WHITELIST)); - assertEquals("", new HiveConf().getVar(ConfVars.HIVE_SERVER2_BUILTIN_UDF_BLACKLIST)); + assertEquals("", HiveConf.create().getVar(ConfVars.HIVE_SERVER2_BUILTIN_UDF_WHITELIST)); + assertEquals("", HiveConf.create().getVar(ConfVars.HIVE_SERVER2_BUILTIN_UDF_BLACKLIST)); FunctionRegistry.setupPermissionsForBuiltinUDFs("", ""); assertEquals("substr", FunctionRegistry.getFunctionInfo("substr").getDisplayName()); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/AbstractTestGenericUDFOPNumeric.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/AbstractTestGenericUDFOPNumeric.java index 69207a681a61..7bd429474f06 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/AbstractTestGenericUDFOPNumeric.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/AbstractTestGenericUDFOPNumeric.java @@ -31,7 +31,7 @@ public abstract class AbstractTestGenericUDFOPNumeric { public AbstractTestGenericUDFOPNumeric() { // Arithmetic operations rely on getting conf from SessionState, need to initialize here. - SessionState ss = new SessionState(new HiveConf()); + SessionState ss = new SessionState(HiveConf.create()); ss.getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "latest"); SessionState.setCurrentSessionState(ss); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDeserialize.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDeserialize.java index a656db48935d..ac2ae3d1c854 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDeserialize.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDeserialize.java @@ -66,7 +66,7 @@ public void testGZIPBase64Compression() throws HiveException { PrimitiveObjectInspectorFactory.writableStringObjectInspector}); GenericUDF.DeferredObject[] args = new GenericUDF.DeferredObject[2]; String expectedOutput = "test"; - MessageEncoder encoder = MessageFactory.getDefaultInstanceForReplMetrics(new HiveConf()); + MessageEncoder encoder = MessageFactory.getDefaultInstanceForReplMetrics(HiveConf.create()); String serializedMsg = encoder.getSerializer().serialize(expectedOutput); args[0] = new GenericUDF.DeferredJavaObject(new Text(serializedMsg)); args[1] = new GenericUDF.DeferredJavaObject(new Text(encoder.getMessageFormat())); @@ -81,7 +81,7 @@ public void testInvalidCompressionFormat() throws HiveException { PrimitiveObjectInspectorFactory.writableStringObjectInspector}); GenericUDF.DeferredObject[] args = new GenericUDF.DeferredObject[2]; String expectedOutput = "test"; - MessageEncoder encoder = MessageFactory.getDefaultInstanceForReplMetrics(new HiveConf()); + MessageEncoder encoder = MessageFactory.getDefaultInstanceForReplMetrics(HiveConf.create()); String serializedMsg = encoder.getSerializer().serialize(expectedOutput); String compressionFormat = "randomSerialization"; args[0] = new GenericUDF.DeferredJavaObject(new Text(serializedMsg)); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDTFGetSQLSchema.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDTFGetSQLSchema.java index bce6f6c048bc..00529da1e434 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDTFGetSQLSchema.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDTFGetSQLSchema.java @@ -41,7 +41,7 @@ public class TestGenericUDTFGetSQLSchema { @BeforeClass public static void setUpBeforeClass() throws Exception { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.set("hive.security.authorization.manager", "org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider"); sessionState = SessionState.start(conf); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/util/TestHiveStrictManagedMigration.java b/ql/src/test/org/apache/hadoop/hive/ql/util/TestHiveStrictManagedMigration.java index 386a90fd9445..020b9524e715 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/util/TestHiveStrictManagedMigration.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/util/TestHiveStrictManagedMigration.java @@ -82,7 +82,7 @@ public void testUpgrade() throws Exception { String oldWarehouse = getWarehouseDir(); String[] args = {"--hiveconf", "hive.strict.managed.tables=true", "-m", "automatic", "--modifyManagedTables", "--oldWarehouseRoot", oldWarehouse}; - HiveConf newConf = new HiveConf(hiveConf); + HiveConf newConf = HiveConf.create(hiveConf); File newWarehouseDir = new File(getTestDataDir(), "newWarehouse"); newConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, newWarehouseDir.getAbsolutePath()); newConf.set("strict.managed.tables.migration.owner", System.getProperty("user.name")); @@ -121,7 +121,7 @@ public void testExternalMove() throws Exception { String oldWarehouse = getWarehouseDir(); String[] args = {"-m", "external", "--shouldMoveExternal", "--tableRegex", "man.*|ext.*|custm.*|custe.*", "--oldWarehouseRoot", oldWarehouse}; - HiveConf newConf = new HiveConf(hiveConf); + HiveConf newConf = HiveConf.create(hiveConf); File newManagedWarehouseDir = new File(getTestDataDir(), "newManaged"); File newExtWarehouseDir = new File(getTestDataDir(), "newExternal"); newConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, newManagedWarehouseDir.getAbsolutePath()); @@ -137,7 +137,7 @@ public void testExternalMove() throws Exception { public void testExternalMoveFailsForIncorrectOptions() throws Throwable { try { String[] args = {"-m", "automatic", "--shouldMoveExternal"}; - runMigrationTool(new HiveConf(hiveConf), args); + runMigrationTool(HiveConf.create(hiveConf), args); } catch (Exception e) { // Exceptions are re-packaged by the migration tool... throw e.getCause(); @@ -148,7 +148,7 @@ public void testExternalMoveFailsForIncorrectOptions() throws Throwable { public void testExceptionForDbRegexPlusControlFile() throws Throwable { try { String[] args = {"-m", "automatic", "--dbRegex", "db0", "--controlFileUrl", "file:/tmp/file"}; - runMigrationTool(new HiveConf(hiveConf), args); + runMigrationTool(HiveConf.create(hiveConf), args); } catch (Exception e) { // Exceptions are re-packaged by the migration tool... throw e.getCause(); @@ -161,7 +161,7 @@ public void testUsingControlFileUrl() throws Throwable { String oldWarehouse = getWarehouseDir(); String[] args = {"-m", "external", "--oldWarehouseRoot", oldWarehouse, "--controlFileUrl", "src/test/resources/hsmm/hsmm_cfg_01.yaml"}; - HiveConf newConf = new HiveConf(hiveConf); + HiveConf newConf = HiveConf.create(hiveConf); runMigrationTool(newConf, args); @@ -176,7 +176,7 @@ public void testUsingControlDirUrl() throws Throwable { String oldWarehouse = getWarehouseDir(); String[] args = {"-m", "external", "--oldWarehouseRoot", oldWarehouse, "--controlFileUrl", "src/test/resources/hsmm"}; - HiveConf newConf = new HiveConf(hiveConf); + HiveConf newConf = HiveConf.create(hiveConf); runMigrationTool(newConf, args); @@ -213,7 +213,7 @@ public void testExtDbDirOnFsIsCreatedAsHiveIfDbOwnerNull() throws Exception { Hive.get().alterDatabase("ownerlessdb", db); String[] args = {"-m", "external"}; - HiveConf newConf = new HiveConf(hiveConf); + HiveConf newConf = HiveConf.create(hiveConf); File newExtWarehouseDir = new File(getTestDataDir(), "newExternal"); newConf.set(HiveConf.ConfVars.HIVE_METASTORE_WAREHOUSE_EXTERNAL.varname, newExtWarehouseDir.getAbsolutePath()); runMigrationTool(newConf, args); diff --git a/ql/src/test/org/apache/hive/testutils/HiveTestEnvSetup.java b/ql/src/test/org/apache/hive/testutils/HiveTestEnvSetup.java index f098a217efaa..a5a1934cf19f 100644 --- a/ql/src/test/org/apache/hive/testutils/HiveTestEnvSetup.java +++ b/ql/src/test/org/apache/hive/testutils/HiveTestEnvSetup.java @@ -154,23 +154,23 @@ public void beforeClass(HiveTestEnvContext ctx) throws Exception { HiveConf.setHivemetastoreSiteUrl(new File(confFolder, "hivemetastore-site.xml").toURI().toURL()); // FIXME: hiveServer2SiteUrl is not settable? - ctx.hiveConf = new HiveConf(IDriver.class); + ctx.hiveConf = HiveConf.create(IDriver.class); ctx.hiveConf.setBoolVar(ConfVars.HIVE_IN_TEST_IDE, true); } @Override public void beforeMethod(HiveTestEnvContext ctx) throws Exception { if (savedConf == null) { - savedConf = new HiveConf(ctx.hiveConf); + savedConf = HiveConf.create(ctx.hiveConf); } // service a fresh conf for every testMethod - ctx.hiveConf = new HiveConf(savedConf); + ctx.hiveConf = HiveConf.create(savedConf); } @Override public void afterMethod(HiveTestEnvContext ctx) throws Exception { // create a fresh hiveconf; afterclass methods may get into trouble without this - ctx.hiveConf = new HiveConf(savedConf); + ctx.hiveConf = HiveConf.create(savedConf); } @Override diff --git a/ql/src/test/queries/clientpositive/join0.q b/ql/src/test/queries/clientpositive/join0.q index 0fce78c3a029..fecb3ccfcaf0 100644 --- a/ql/src/test/queries/clientpositive/join0.q +++ b/ql/src/test/queries/clientpositive/join0.q @@ -3,6 +3,7 @@ SET hive.vectorized.execution.enabled=false; set hive.mapred.mode=nonstrict; set hive.explain.user=false; +SET hive.conf.property.tracking=true; -- SORT_QUERY_RESULTS EXPLAIN diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleFast.java b/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleFast.java index 92b6176e88bf..717da91dc103 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleFast.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleFast.java @@ -405,7 +405,7 @@ public void testLazyBinarySimpleComplexDepthFour() throws Throwable { @Test public void testLazySimpleDeserializeRowEmptyArray() throws Throwable { - HiveConf hconf = new HiveConf(); + HiveConf hconf = HiveConf.create(); // set the escaping related properties Properties props = new Properties(); diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/lazy/fast/TestLazySimpleDeserializeRead.java b/serde/src/test/org/apache/hadoop/hive/serde2/lazy/fast/TestLazySimpleDeserializeRead.java index 196d2b6c7394..ba1f6bce0cb2 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/lazy/fast/TestLazySimpleDeserializeRead.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/lazy/fast/TestLazySimpleDeserializeRead.java @@ -44,7 +44,7 @@ public class TestLazySimpleDeserializeRead { */ @Test public void testEscaping() throws Exception { - HiveConf hconf = new HiveConf(); + HiveConf hconf = HiveConf.create(); // set the escaping related properties Properties props = new Properties(); diff --git a/service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java b/service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java index 063091ce28c3..0b40896af74c 100644 --- a/service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java +++ b/service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java @@ -35,7 +35,7 @@ public enum AuthMethods { private final String authMethod; - private final HiveConf conf = new HiveConf(); + private final HiveConf conf = HiveConf.create(); AuthMethods(String authMethod) { this.authMethod = authMethod; diff --git a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java index 8eacf138b8b3..b5c55e99466c 100644 --- a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java +++ b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java @@ -133,7 +133,7 @@ public HiveSessionImpl(SessionHandle sessionHandle, TProtocolVersion protocol, this.password = password; creationTime = System.currentTimeMillis(); this.sessionHandle = sessionHandle != null ? sessionHandle : new SessionHandle(protocol); - this.sessionConf = new HiveConf(serverConf); + this.sessionConf = HiveConf.create(serverConf); this.ipAddress = ipAddress; this.forwardedAddresses = forwardedAddresses; this.operationLock = serverConf.getBoolVar( diff --git a/service/src/java/org/apache/hive/service/server/HS2ActivePassiveHARegistry.java b/service/src/java/org/apache/hive/service/server/HS2ActivePassiveHARegistry.java index 18cfa8d3111b..c7f5f798b742 100644 --- a/service/src/java/org/apache/hive/service/server/HS2ActivePassiveHARegistry.java +++ b/service/src/java/org/apache/hive/service/server/HS2ActivePassiveHARegistry.java @@ -346,7 +346,7 @@ private Map getConfsToPublish() { confsToPublish.put(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname, conf.get(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname)); // Transport specific confs - if (HiveServer2.isHttpTransportMode(new HiveConf(conf, Configuration.class))) { + if (HiveServer2.isHttpTransportMode(HiveConf.create(conf, Configuration.class))) { confsToPublish.put(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT.varname, conf.get(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT.varname)); confsToPublish.put(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH.varname, diff --git a/service/src/java/org/apache/hive/service/server/HiveServer2.java b/service/src/java/org/apache/hive/service/server/HiveServer2.java index 59fe33eb3bae..073c8ba3e519 100644 --- a/service/src/java/org/apache/hive/service/server/HiveServer2.java +++ b/service/src/java/org/apache/hive/service/server/HiveServer2.java @@ -1088,7 +1088,7 @@ private static void startHiveServer2() throws Throwable { long attempts = 0, maxAttempts = 1; while (true) { LOG.info("Starting HiveServer2"); - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); maxAttempts = hiveConf.getLongVar(HiveConf.ConfVars.HIVE_SERVER2_MAX_START_ATTEMPTS); long retrySleepIntervalMs = hiveConf .getTimeVar(ConfVars.HIVE_SERVER2_SLEEP_INTERVAL_BETWEEN_START_ATTEMPTS, @@ -1206,7 +1206,7 @@ private void maybeStartCompactorThreads(HiveConf hiveConf) throws Exception { * @throws Exception */ static void deleteServerInstancesFromZooKeeper(String versionNumber) throws Exception { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); setUpZooKeeperAuth(hiveConf); CuratorFramework zooKeeperClient = hiveConf.getZKConfig().getNewZookeeperClient(); zooKeeperClient.start(); @@ -1391,7 +1391,7 @@ ServerOptionsProcessorResponse parse(String[] argv) { if (commandLine.hasOption("getHiveConf")) { return new ServerOptionsProcessorResponse(() -> { String key = commandLine.getOptionValue("getHiveConf"); - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); HiveConf.ConfVars confVars = HiveConf.getConfVars(key); String value = hiveConf.get(key); if (confVars != null && confVars.getValidator() instanceof Validator.TimeValidator) { @@ -1518,7 +1518,7 @@ static class FailoverHS2InstanceExecutor implements ServerOptionsExecutor { @Override public void execute() { try { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); HS2ActivePassiveHARegistry haRegistry = HS2ActivePassiveHARegistryClient.getClient(hiveConf); Collection hs2Instances = haRegistry.getAll(); // no HS2 instances are running @@ -1609,7 +1609,7 @@ static class ListHAPeersExecutor implements ServerOptionsExecutor { @Override public void execute() { try { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); HS2ActivePassiveHARegistry haRegistry = HS2ActivePassiveHARegistryClient.getClient(hiveConf); HS2Peers.HS2Instances hs2Instances = new HS2Peers.HS2Instances(haRegistry.getAll()); String jsonOut = hs2Instances.toJson(); diff --git a/service/src/test/org/apache/hive/service/auth/TestLdapAuthenticationProviderImpl.java b/service/src/test/org/apache/hive/service/auth/TestLdapAuthenticationProviderImpl.java index c8632aefb9da..b4ba7ccaedf4 100644 --- a/service/src/test/org/apache/hive/service/auth/TestLdapAuthenticationProviderImpl.java +++ b/service/src/test/org/apache/hive/service/auth/TestLdapAuthenticationProviderImpl.java @@ -53,7 +53,7 @@ public class TestLdapAuthenticationProviderImpl { @Before public void setup() throws AuthenticationException { - conf = new HiveConf(); + conf = HiveConf.create(); conf.set("hive.root.logger", "DEBUG,console"); conf.set("hive.server2.authentication.ldap.url", "localhost"); when(factory.getInstance(any(HiveConf.class), anyString(), anyString())).thenReturn(search); diff --git a/service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java b/service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java index f9b7fb3305c1..8f866a18bac8 100644 --- a/service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java +++ b/service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java @@ -41,7 +41,7 @@ public class TestPlainSaslHelper { @Test public void testDoAsSetting(){ - HiveConf hconf = new HiveConf(); + HiveConf hconf = HiveConf.create(); hconf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git a/service/src/test/org/apache/hive/service/auth/ldap/LdapAuthenticationTestCase.java b/service/src/test/org/apache/hive/service/auth/ldap/LdapAuthenticationTestCase.java index 9693e77edc47..a2c8fe8122a3 100644 --- a/service/src/test/org/apache/hive/service/auth/ldap/LdapAuthenticationTestCase.java +++ b/service/src/test/org/apache/hive/service/auth/ldap/LdapAuthenticationTestCase.java @@ -140,7 +140,7 @@ private void overrideHiveConf() { public LdapAuthenticationTestCase build() { Preconditions.checkState(conf == null, "Test Case Builder should not be reused. Please create a new instance."); - conf = new HiveConf(); + conf = HiveConf.create(); overrideHiveConf(); return new LdapAuthenticationTestCase(this); } diff --git a/service/src/test/org/apache/hive/service/auth/ldap/TestChainFilter.java b/service/src/test/org/apache/hive/service/auth/ldap/TestChainFilter.java index 8043d9c9f483..2286b2be1a6c 100644 --- a/service/src/test/org/apache/hive/service/auth/ldap/TestChainFilter.java +++ b/service/src/test/org/apache/hive/service/auth/ldap/TestChainFilter.java @@ -60,7 +60,7 @@ public class TestChainFilter { @Before public void setup() { - conf = new HiveConf(); + conf = HiveConf.create(); factory = new ChainFilterFactory(factory1, factory2, factory3); } diff --git a/service/src/test/org/apache/hive/service/auth/ldap/TestCustomQueryFilter.java b/service/src/test/org/apache/hive/service/auth/ldap/TestCustomQueryFilter.java index d8725ae9df62..b3c98a252cc8 100644 --- a/service/src/test/org/apache/hive/service/auth/ldap/TestCustomQueryFilter.java +++ b/service/src/test/org/apache/hive/service/auth/ldap/TestCustomQueryFilter.java @@ -47,7 +47,7 @@ public class TestCustomQueryFilter { @Before public void setup() { - conf = new HiveConf(); + conf = HiveConf.create(); conf.set("hive.root.logger", "DEBUG,console"); factory = new CustomQueryFilterFactory(); } diff --git a/service/src/test/org/apache/hive/service/auth/ldap/TestGroupFilter.java b/service/src/test/org/apache/hive/service/auth/ldap/TestGroupFilter.java index 34f50b768572..9691eca86603 100644 --- a/service/src/test/org/apache/hive/service/auth/ldap/TestGroupFilter.java +++ b/service/src/test/org/apache/hive/service/auth/ldap/TestGroupFilter.java @@ -44,7 +44,7 @@ public class TestGroupFilter { @Before public void setup() { - conf = new HiveConf(); + conf = HiveConf.create(); conf.set("hive.root.logger", "DEBUG,console"); factory = new GroupFilterFactory(); } diff --git a/service/src/test/org/apache/hive/service/auth/ldap/TestLdapSearch.java b/service/src/test/org/apache/hive/service/auth/ldap/TestLdapSearch.java index 78cba2ac55c7..9aba228466b8 100644 --- a/service/src/test/org/apache/hive/service/auth/ldap/TestLdapSearch.java +++ b/service/src/test/org/apache/hive/service/auth/ldap/TestLdapSearch.java @@ -49,7 +49,7 @@ public class TestLdapSearch { @Before public void setup() { - conf = new HiveConf(); + conf = HiveConf.create(); conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERMEMBERSHIP_KEY, "memberOf"); } diff --git a/service/src/test/org/apache/hive/service/auth/ldap/TestLdapUtils.java b/service/src/test/org/apache/hive/service/auth/ldap/TestLdapUtils.java index 015407ae135b..932b01949f8b 100644 --- a/service/src/test/org/apache/hive/service/auth/ldap/TestLdapUtils.java +++ b/service/src/test/org/apache/hive/service/auth/ldap/TestLdapUtils.java @@ -29,7 +29,7 @@ public class TestLdapUtils { @Test public void testCreateCandidatePrincipalsForUserDn() { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); String userDn = "cn=user1,ou=CORP,dc=mycompany,dc=com"; List expected = Arrays.asList(userDn); List actual = LdapUtils.createCandidatePrincipals(conf, userDn); @@ -38,7 +38,7 @@ public void testCreateCandidatePrincipalsForUserDn() { @Test public void testCreateCandidatePrincipalsForUserWithDomain() { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); String userWithDomain = "user1@mycompany.com"; List expected = Arrays.asList(userWithDomain); List actual = LdapUtils.createCandidatePrincipals(conf, userWithDomain); @@ -47,7 +47,7 @@ public void testCreateCandidatePrincipalsForUserWithDomain() { @Test public void testCreateCandidatePrincipalsLdapDomain() { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_DOMAIN, "mycompany.com"); List expected = Arrays.asList("user1@mycompany.com"); List actual = LdapUtils.createCandidatePrincipals(conf, "user1"); @@ -56,7 +56,7 @@ public void testCreateCandidatePrincipalsLdapDomain() { @Test public void testCreateCandidatePrincipalsUserPatternsDefaultBaseDn() { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GUIDKEY, "sAMAccountName"); conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_BASEDN, "dc=mycompany,dc=com"); List expected = Arrays.asList("sAMAccountName=user1,dc=mycompany,dc=com"); @@ -66,7 +66,7 @@ public void testCreateCandidatePrincipalsUserPatternsDefaultBaseDn() { @Test public void testCreateCandidatePrincipals() { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_BASEDN, "dc=mycompany,dc=com"); conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERDNPATTERN, "cn=%s,ou=CORP1,dc=mycompany,dc=com:cn=%s,ou=CORP2,dc=mycompany,dc=com"); diff --git a/service/src/test/org/apache/hive/service/auth/ldap/TestQueryFactory.java b/service/src/test/org/apache/hive/service/auth/ldap/TestQueryFactory.java index 21c6303b8740..103f0279a296 100644 --- a/service/src/test/org/apache/hive/service/auth/ldap/TestQueryFactory.java +++ b/service/src/test/org/apache/hive/service/auth/ldap/TestQueryFactory.java @@ -30,7 +30,7 @@ public class TestQueryFactory { @Before public void setup() { - conf = new HiveConf(); + conf = HiveConf.create(); conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GUIDKEY, "guid"); conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPCLASS_KEY, "superGroups"); conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPMEMBERSHIP_KEY, "member"); @@ -89,7 +89,7 @@ public void testIsUserMemberOfGroup() { @Test(expected = IllegalStateException.class) public void testIsUserMemberOfGroupWhenMisconfigured() { - QueryFactory misconfiguredQueryFactory = new QueryFactory(new HiveConf()); + QueryFactory misconfiguredQueryFactory = new QueryFactory(HiveConf.create()); misconfiguredQueryFactory.isUserMemberOfGroup("user", "cn=MyGroup"); } diff --git a/service/src/test/org/apache/hive/service/auth/ldap/TestUserFilter.java b/service/src/test/org/apache/hive/service/auth/ldap/TestUserFilter.java index 3edac26e4537..72a7fbdca0dc 100644 --- a/service/src/test/org/apache/hive/service/auth/ldap/TestUserFilter.java +++ b/service/src/test/org/apache/hive/service/auth/ldap/TestUserFilter.java @@ -42,7 +42,7 @@ public class TestUserFilter { @Before public void setup() { - conf = new HiveConf(); + conf = HiveConf.create(); factory = new UserFilterFactory(); } diff --git a/service/src/test/org/apache/hive/service/auth/ldap/TestUserSearchFilter.java b/service/src/test/org/apache/hive/service/auth/ldap/TestUserSearchFilter.java index d066fd01bb47..de967759401d 100644 --- a/service/src/test/org/apache/hive/service/auth/ldap/TestUserSearchFilter.java +++ b/service/src/test/org/apache/hive/service/auth/ldap/TestUserSearchFilter.java @@ -41,7 +41,7 @@ public class TestUserSearchFilter { @Before public void setup() { - conf = new HiveConf(); + conf = HiveConf.create(); factory = new UserSearchFilterFactory(); } diff --git a/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java b/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java index ac19b19dbb96..446ac98d9561 100644 --- a/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java +++ b/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java @@ -226,7 +226,7 @@ public void testExecuteStatementAsync() throws Exception { * to give a compile time error. * (compilation is done synchronous as of now) */ - longPollingTimeout = HiveConf.getTimeVar(new HiveConf(), + longPollingTimeout = HiveConf.getTimeVar(HiveConf.create(), HiveConf.ConfVars.HIVE_SERVER2_LONG_POLLING_TIMEOUT, TimeUnit.MILLISECONDS); queryString = "SELECT NON_EXISTING_COLUMN FROM " + tableName; try { @@ -308,7 +308,7 @@ public void testExecuteStatementParallel() throws Exception { SessionHandle sessionHandle = setupTestData(tableName, columnDefinitions, confOverlay); assertNotNull(sessionHandle); - long longPollingTimeout = HiveConf.getTimeVar(new HiveConf(), + long longPollingTimeout = HiveConf.getTimeVar(HiveConf.create(), HiveConf.ConfVars.HIVE_SERVER2_LONG_POLLING_TIMEOUT, TimeUnit.MILLISECONDS); confOverlay.put( HiveConf.ConfVars.HIVE_SERVER2_LONG_POLLING_TIMEOUT.varname, longPollingTimeout + "ms"); diff --git a/service/src/test/org/apache/hive/service/cli/TestCLIServiceConnectionLimits.java b/service/src/test/org/apache/hive/service/cli/TestCLIServiceConnectionLimits.java index 6ce40ec68dc1..ecf00f38ec4e 100644 --- a/service/src/test/org/apache/hive/service/cli/TestCLIServiceConnectionLimits.java +++ b/service/src/test/org/apache/hive/service/cli/TestCLIServiceConnectionLimits.java @@ -31,7 +31,7 @@ public class TestCLIServiceConnectionLimits { public ExpectedException thrown = ExpectedException.none(); private int limit = 10; - private HiveConf conf = new HiveConf(); + private HiveConf conf = HiveConf.create(); @Test public void testNoLimit() throws HiveSQLException { diff --git a/service/src/test/org/apache/hive/service/cli/TestCLIServiceRestore.java b/service/src/test/org/apache/hive/service/cli/TestCLIServiceRestore.java index 1e0c4274ad27..b091eda07f21 100644 --- a/service/src/test/org/apache/hive/service/cli/TestCLIServiceRestore.java +++ b/service/src/test/org/apache/hive/service/cli/TestCLIServiceRestore.java @@ -42,7 +42,7 @@ public void testRestore() throws HiveSQLException { } public CLIService getService() { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); CLIService service = new CLIService(null, true); diff --git a/service/src/test/org/apache/hive/service/cli/TestRetryingThriftCLIServiceClient.java b/service/src/test/org/apache/hive/service/cli/TestRetryingThriftCLIServiceClient.java index 364124c7fdb9..4c7d80da29e9 100644 --- a/service/src/test/org/apache/hive/service/cli/TestRetryingThriftCLIServiceClient.java +++ b/service/src/test/org/apache/hive/service/cli/TestRetryingThriftCLIServiceClient.java @@ -51,7 +51,7 @@ public class TestRetryingThriftCLIServiceClient { @Before public void init() { - hiveConf = new HiveConf(); + hiveConf = HiveConf.create(); hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST, "localhost"); hiveConf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT, 15000); hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); diff --git a/service/src/test/org/apache/hive/service/cli/operation/TestCommandWithSpace.java b/service/src/test/org/apache/hive/service/cli/operation/TestCommandWithSpace.java index 5c57357c84b4..840bedf9f86e 100644 --- a/service/src/test/org/apache/hive/service/cli/operation/TestCommandWithSpace.java +++ b/service/src/test/org/apache/hive/service/cli/operation/TestCommandWithSpace.java @@ -36,7 +36,7 @@ public class TestCommandWithSpace { @Test public void testCommandWithPrefixSpace() throws IllegalAccessException, ClassNotFoundException, InstantiationException, HiveSQLException { String query = " dfs -ls /"; - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git a/service/src/test/org/apache/hive/service/cli/operation/TestOperationLogManager.java b/service/src/test/org/apache/hive/service/cli/operation/TestOperationLogManager.java index ddb65b997fd7..ac59c15daf3d 100644 --- a/service/src/test/org/apache/hive/service/cli/operation/TestOperationLogManager.java +++ b/service/src/test/org/apache/hive/service/cli/operation/TestOperationLogManager.java @@ -55,7 +55,7 @@ public class TestOperationLogManager { @Before public void setUp() throws Exception { - hiveConf = new HiveConf(); + hiveConf = HiveConf.create(); HiveConf.setBoolVar(hiveConf, HiveConf.ConfVars.HIVE_SERVER2_HISTORIC_OPERATION_LOG_ENABLED, true); HiveConf.setIntVar(hiveConf, HiveConf.ConfVars.HIVE_SERVER2_WEBUI_MAX_HISTORIC_QUERIES, 1); HiveConf.setIntVar(hiveConf, HiveConf.ConfVars.HIVE_SERVER2_WEBUI_PORT, 8080); @@ -139,7 +139,7 @@ public void testOperationLogManager() throws Exception { @Test public void testGetOperationLog() throws Exception { FakeHiveSession session = new FakeHiveSession( - new SessionHandle(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V11), new HiveConf(hiveConf)); + new SessionHandle(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V11), HiveConf.create(hiveConf)); session.setOperationLogSessionDir(new File(HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LOG_LOCATION))); session.open(new HashMap<>()); diff --git a/service/src/test/org/apache/hive/service/cli/operation/TestQueryLifeTimeHooksWithSQLOperation.java b/service/src/test/org/apache/hive/service/cli/operation/TestQueryLifeTimeHooksWithSQLOperation.java index 2fd336128b08..ae83c5a8f00e 100644 --- a/service/src/test/org/apache/hive/service/cli/operation/TestQueryLifeTimeHooksWithSQLOperation.java +++ b/service/src/test/org/apache/hive/service/cli/operation/TestQueryLifeTimeHooksWithSQLOperation.java @@ -48,7 +48,7 @@ public class TestQueryLifeTimeHooksWithSQLOperation { @Test public void testQueryInfoInHookContext() throws IllegalAccessException, ClassNotFoundException, InstantiationException, HiveSQLException { - HiveConf conf = new HiveConf(TestQueryHooks.class); + HiveConf conf = HiveConf.create(TestQueryHooks.class); conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git a/service/src/test/org/apache/hive/service/cli/operation/TestQueryShutdownHooks.java b/service/src/test/org/apache/hive/service/cli/operation/TestQueryShutdownHooks.java index 0170c716c890..c7cbfadf61fe 100644 --- a/service/src/test/org/apache/hive/service/cli/operation/TestQueryShutdownHooks.java +++ b/service/src/test/org/apache/hive/service/cli/operation/TestQueryShutdownHooks.java @@ -53,7 +53,7 @@ public class TestQueryShutdownHooks { public void setUp() throws Exception { service = new EmbeddedThriftBinaryCLIService(); - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); diff --git a/service/src/test/org/apache/hive/service/cli/operation/TestSQLOperationMetrics.java b/service/src/test/org/apache/hive/service/cli/operation/TestSQLOperationMetrics.java index 5ce3d16bd51f..5f55ddf22d02 100644 --- a/service/src/test/org/apache/hive/service/cli/operation/TestSQLOperationMetrics.java +++ b/service/src/test/org/apache/hive/service/cli/operation/TestSQLOperationMetrics.java @@ -45,7 +45,7 @@ public class TestSQLOperationMetrics { @Before public void setup() throws Exception { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_METRICS_ENABLED, true); MetricsFactory.init(conf); diff --git a/service/src/test/org/apache/hive/service/cli/session/TestPluggableHiveSessionImpl.java b/service/src/test/org/apache/hive/service/cli/session/TestPluggableHiveSessionImpl.java index cbc15ddbec68..f438fe03522f 100644 --- a/service/src/test/org/apache/hive/service/cli/session/TestPluggableHiveSessionImpl.java +++ b/service/src/test/org/apache/hive/service/cli/session/TestPluggableHiveSessionImpl.java @@ -35,7 +35,7 @@ public class TestPluggableHiveSessionImpl { @Test public void testSessionImpl() throws Exception { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER.getDefaultValue()); hiveConf.setVar(HiveConf.ConfVars.HIVE_SESSION_IMPL_CLASSNAME, @@ -61,7 +61,7 @@ public void testSessionImpl() throws Exception { @Test public void testSessionImplWithUGI() throws Exception { - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER.getDefaultValue()); hiveConf.setVar(HiveConf.ConfVars.HIVE_SESSION_IMPL_WITH_UGI_CLASSNAME, diff --git a/service/src/test/org/apache/hive/service/cli/session/TestSessionCleanup.java b/service/src/test/org/apache/hive/service/cli/session/TestSessionCleanup.java index c19d97abe8f4..bf47516de0fa 100644 --- a/service/src/test/org/apache/hive/service/cli/session/TestSessionCleanup.java +++ b/service/src/test/org/apache/hive/service/cli/session/TestSessionCleanup.java @@ -62,7 +62,7 @@ public CLIService getCliService() { // This is to test session temporary files are cleaned up after HIVE-11768 public void testTempSessionFileCleanup() throws Exception { MyEmbeddedThriftBinaryCLIService service = new MyEmbeddedThriftBinaryCLIService(); - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); hiveConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git a/service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java b/service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java index 002dc603c10f..3d226a892900 100644 --- a/service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java +++ b/service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java @@ -90,14 +90,14 @@ public void setUp() throws Exception { FileUtils.writeLines(initFile, Arrays.asList(fileContent)); // set up service and client - hiveConf = new HiveConf(); + hiveConf = HiveConf.create(); hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION, initFile.getParentFile().getAbsolutePath()); hiveConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); service = new FakeEmbeddedThriftBinaryCLIService(hiveConf); - service.init(new HiveConf()); + service.init(HiveConf.create()); client = new ThriftCLIServiceClient(service); } diff --git a/service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java b/service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java index d687a7beed05..5241b69bc2e2 100644 --- a/service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java +++ b/service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java @@ -66,7 +66,7 @@ public void setUp() throws Exception { System.setProperty(ConfVars.HIVE_SERVER2_SESSION_HOOK.varname, TestSessionHooks.SessionHookTest.class.getName()); service = new EmbeddedThriftBinaryCLIService(); - HiveConf hiveConf = new HiveConf(); + HiveConf hiveConf = HiveConf.create(); hiveConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git a/service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java b/service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java index a3356658c12e..e5cd56c12014 100644 --- a/service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java +++ b/service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java @@ -66,7 +66,7 @@ public class TestSessionManagerMetrics { @Before public void setup() throws Exception { - HiveConf conf = new HiveConf(); + HiveConf conf = HiveConf.create(); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_ASYNC_EXEC_THREADS, 2); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_ASYNC_EXEC_WAIT_QUEUE_SIZE, 10); conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_ASYNC_EXEC_KEEPALIVE_TIME, "1000000s"); diff --git a/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java b/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java index 97ba39f1b8a7..14a2edec2f18 100644 --- a/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java +++ b/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java @@ -62,7 +62,7 @@ public static void setUpBeforeClass() throws Exception { // Find a free port port = MetaStoreTestUtils.findFreePort(); hiveServer2 = new HiveServer2(); - hiveConf = new HiveConf(); + hiveConf = HiveConf.create(); } /** diff --git a/service/src/test/org/apache/hive/service/cli/thrift/ThriftHttpServletTest.java b/service/src/test/org/apache/hive/service/cli/thrift/ThriftHttpServletTest.java index c490cc2ebbc1..4bd291bd6f17 100644 --- a/service/src/test/org/apache/hive/service/cli/thrift/ThriftHttpServletTest.java +++ b/service/src/test/org/apache/hive/service/cli/thrift/ThriftHttpServletTest.java @@ -46,7 +46,7 @@ public class ThriftHttpServletTest { public void setUp() throws Exception { String authType = HiveAuthConstants.AuthTypes.KERBEROS.toString(); thriftHttpServlet = new ThriftHttpServlet(null, null, authType, null, null, null, - new HiveConf()); + HiveConf.create()); } @Test diff --git a/service/src/test/org/apache/hive/service/server/TestHS2HttpServer.java b/service/src/test/org/apache/hive/service/server/TestHS2HttpServer.java index 6c50e8170901..4b4109cbe219 100644 --- a/service/src/test/org/apache/hive/service/server/TestHS2HttpServer.java +++ b/service/src/test/org/apache/hive/service/server/TestHS2HttpServer.java @@ -75,7 +75,7 @@ public static void beforeTests() throws Exception { webUIPort = MetaStoreTestUtils.findFreePortExcepting( Integer.valueOf(ConfVars.HIVE_SERVER2_WEBUI_PORT.getDefaultValue())); apiBaseURL = "http://localhost:" + webUIPort + "/api/v1"; - hiveConf = new HiveConf(); + hiveConf = HiveConf.create(); hiveConf.set(ConfVars.METASTOREPWD.varname, metastorePasswd); hiveConf.set(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname, webUIPort.toString()); hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, diff --git a/service/src/test/org/apache/hive/service/server/TestHS2HttpServerPam.java b/service/src/test/org/apache/hive/service/server/TestHS2HttpServerPam.java index 04f66b4e5d73..c44862c29fa8 100644 --- a/service/src/test/org/apache/hive/service/server/TestHS2HttpServerPam.java +++ b/service/src/test/org/apache/hive/service/server/TestHS2HttpServerPam.java @@ -66,7 +66,7 @@ public class TestHS2HttpServerPam { public static void beforeTests() throws Exception { webUIPort = MetaStoreTestUtils.findFreePortExcepting(Integer.valueOf(ConfVars.HIVE_SERVER2_WEBUI_PORT.getDefaultValue())); - hiveConf = new HiveConf(); + hiveConf = HiveConf.create(); hiveConf.setBoolVar(ConfVars.HIVE_IN_TEST, true); hiveConf.set(ConfVars.METASTOREPWD.varname, metastorePasswd); hiveConf.set(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname, webUIPort.toString()); diff --git a/service/src/test/org/apache/hive/service/server/TestHS2HttpServerPamConfiguration.java b/service/src/test/org/apache/hive/service/server/TestHS2HttpServerPamConfiguration.java index 3e2ad22bc15a..2ac0eeb09466 100644 --- a/service/src/test/org/apache/hive/service/server/TestHS2HttpServerPamConfiguration.java +++ b/service/src/test/org/apache/hive/service/server/TestHS2HttpServerPamConfiguration.java @@ -63,7 +63,7 @@ public static void beforeTests() throws Exception { String metastorePasswd = "693efe9fa425ad21886d73a0fa3fbc70"; //random md5 Integer webUIPort = MetaStoreTestUtils.findFreePortExcepting(Integer.valueOf(ConfVars.HIVE_SERVER2_WEBUI_PORT.getDefaultValue())); - hiveConf = new HiveConf(); + hiveConf = HiveConf.create(); hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_PAM, true); hiveConf.setBoolVar(ConfVars.HIVE_IN_TEST, false); hiveConf.set(ConfVars.METASTOREPWD.varname, metastorePasswd); diff --git a/standalone-metastore/metastore-tools/tools-common/src/main/java/org/apache/hadoop/hive/metastore/tools/HMSClient.java b/standalone-metastore/metastore-tools/tools-common/src/main/java/org/apache/hadoop/hive/metastore/tools/HMSClient.java index e7cf2ffb84f0..fdf27969744b 100644 --- a/standalone-metastore/metastore-tools/tools-common/src/main/java/org/apache/hadoop/hive/metastore/tools/HMSClient.java +++ b/standalone-metastore/metastore-tools/tools-common/src/main/java/org/apache/hadoop/hive/metastore/tools/HMSClient.java @@ -134,7 +134,7 @@ private void addResource(Configuration conf, @NotNull String r) throws Malformed */ private void getClient(@Nullable URI uri) throws TException, IOException, InterruptedException, URISyntaxException, LoginException { - Configuration conf = new HiveConf(); + Configuration conf = HiveConf.create(); addResource(conf, HIVE_SITE); if (uri != null) { conf.set(METASTORE_URI, uri.toString()); diff --git a/streaming/src/java/org/apache/hive/streaming/HiveStreamingConnection.java b/streaming/src/java/org/apache/hive/streaming/HiveStreamingConnection.java index a61beb55b8fd..7cfcb35c538f 100644 --- a/streaming/src/java/org/apache/hive/streaming/HiveStreamingConnection.java +++ b/streaming/src/java/org/apache/hive/streaming/HiveStreamingConnection.java @@ -816,7 +816,7 @@ Long getCurrentTxnId() { } private HiveConf createHiveConf(Class clazz, String metaStoreUri) { - HiveConf conf = new HiveConf(clazz); + HiveConf conf = HiveConf.create(clazz); if (metaStoreUri != null) { conf.set(MetastoreConf.ConfVars.THRIFT_URIS.getHiveName(), metaStoreUri); } diff --git a/streaming/src/test/org/apache/hive/streaming/TestStreaming.java b/streaming/src/test/org/apache/hive/streaming/TestStreaming.java index 37c64429b78d..692fcc9c380c 100644 --- a/streaming/src/test/org/apache/hive/streaming/TestStreaming.java +++ b/streaming/src/test/org/apache/hive/streaming/TestStreaming.java @@ -211,7 +211,7 @@ public TestStreaming() throws Exception { partitionVals2.add(PART1_COUNTRY); - conf = new HiveConf(this.getClass()); + conf = HiveConf.create(this.getClass()); conf.set("fs.raw.impl", RawFileSystem.class.getName()); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git a/streaming/src/test/org/apache/hive/streaming/TestStreamingDynamicPartitioning.java b/streaming/src/test/org/apache/hive/streaming/TestStreamingDynamicPartitioning.java index c548ea7388a6..a4b1bb56185c 100644 --- a/streaming/src/test/org/apache/hive/streaming/TestStreamingDynamicPartitioning.java +++ b/streaming/src/test/org/apache/hive/streaming/TestStreamingDynamicPartitioning.java @@ -126,7 +126,7 @@ public FileStatus getFileStatus(Path path) throws IOException { private final static String dbName2 = "testing2"; public TestStreamingDynamicPartitioning() throws Exception { - conf = new HiveConf(this.getClass()); + conf = HiveConf.create(this.getClass()); conf.set("fs.raw.impl", RawFileSystem.class.getName()); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,