diff --git a/ql/pom.xml b/ql/pom.xml index d171633ac0fc..17ac8d80d7ff 100644 --- a/ql/pom.xml +++ b/ql/pom.xml @@ -25,7 +25,6 @@ Hive Query Language .. - 2.0.2 0.10.2 2.1.0 @@ -813,18 +812,6 @@ ${mockito-core.version} test - - org.powermock - powermock-module-junit4 - ${powermock.version} - test - - - org.powermock - powermock-api-mockito2 - ${powermock.version} - test - com.google.guava guava-testlib @@ -858,6 +845,12 @@ org.codehaus.janino janino + + org.mockito + mockito-inline + 3.4.4 + test + com.sun.jersey.contribs jersey-multipart diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/AtlasDumpTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/AtlasDumpTask.java index 2c3a77851aa2..2bc9d0d6113d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/AtlasDumpTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/AtlasDumpTask.java @@ -36,11 +36,13 @@ import org.apache.hadoop.hive.ql.exec.util.Retryable; import org.apache.hadoop.hive.ql.parse.EximUtil; import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.parse.repl.ReplLogger; import org.apache.hadoop.hive.ql.parse.repl.dump.Utils; import org.apache.hadoop.hive.ql.parse.repl.dump.log.AtlasDumpLogger; import org.apache.hadoop.hive.ql.parse.repl.metric.event.Status; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet; +import org.jetbrains.annotations.NotNull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -68,16 +70,23 @@ public class AtlasDumpTask extends Task implements Serializable { private static final transient Logger LOG = LoggerFactory.getLogger(AtlasDumpTask.class); private static final long serialVersionUID = 1L; private transient AtlasRestClient atlasRestClient; + private AtlasDumpLogger replLogger; public AtlasDumpTask() { super(); } @VisibleForTesting - AtlasDumpTask(final AtlasRestClient atlasRestClient, final HiveConf conf, final AtlasDumpWork work) { + AtlasDumpTask(final AtlasRestClient atlasRestClient, final HiveConf conf, final AtlasDumpWork work, AtlasDumpLogger replLogger) { this.conf = conf; this.work = work; this.atlasRestClient = atlasRestClient; + this.replLogger = replLogger; + } + + @VisibleForTesting + AtlasDumpTask(final AtlasRestClient atlasRestClient, final HiveConf conf, final AtlasDumpWork work) { + this(atlasRestClient, conf, work, null); } @Override @@ -87,8 +96,7 @@ public int execute() { AtlasReplInfo atlasReplInfo = createAtlasReplInfo(); LOG.info("Dumping Atlas metadata of srcDb: {}, for TgtDb: {} to staging location:", atlasReplInfo.getSrcDB(), atlasReplInfo.getTgtDB(), atlasReplInfo.getStagingDir()); - AtlasDumpLogger replLogger = new AtlasDumpLogger(atlasReplInfo.getSrcDB(), - atlasReplInfo.getStagingDir().toString()); + initializeReplLogger(atlasReplInfo); replLogger.startLog(); Map metricMap = new HashMap<>(); metricMap.put(ReplUtils.MetricName.ENTITIES.name(), 0L); @@ -129,6 +137,14 @@ public int execute() { } } + @NotNull + private void initializeReplLogger(AtlasReplInfo atlasReplInfo) { + if (this.replLogger == null){ + this.replLogger = new AtlasDumpLogger(atlasReplInfo.getSrcDB(), + atlasReplInfo.getStagingDir().toString()); + } + } + private AtlasReplInfo createAtlasReplInfo() throws SemanticException, MalformedURLException { String errorFormat = "%s is mandatory config for Atlas metadata replication"; //Also validates URL for endpoint. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/AtlasLoadTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/AtlasLoadTask.java index a44aa435aa9b..f10ce52ef14f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/AtlasLoadTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/AtlasLoadTask.java @@ -34,7 +34,7 @@ import org.apache.hadoop.hive.ql.exec.util.Retryable; import org.apache.hadoop.hive.ql.parse.EximUtil; import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.parse.repl.dump.Utils; +import org.apache.hadoop.hive.ql.parse.repl.ReplLogger; import org.apache.hadoop.hive.ql.parse.repl.load.log.AtlasLoadLogger; import org.apache.hadoop.hive.ql.parse.repl.metric.event.Status; import org.apache.hadoop.hive.ql.plan.api.StageType; @@ -50,7 +50,6 @@ import java.nio.charset.Charset; import java.util.HashMap; import java.util.Map; -import java.util.concurrent.Callable; /** * Atlas Metadata Replication Load Task. @@ -59,14 +58,17 @@ public class AtlasLoadTask extends Task implements Serializable { private static final long serialVersionUID = 1L; private static final transient Logger LOG = LoggerFactory.getLogger(AtlasLoadTask.class); + private ReplLogger replLogger = null; + public AtlasLoadTask() { super(); } @VisibleForTesting - AtlasLoadTask(final HiveConf conf, final AtlasLoadWork work) { + AtlasLoadTask(final HiveConf conf, final AtlasLoadWork work, ReplLogger replLogger) { this.conf = conf; this.work = work; + this.replLogger = replLogger; } @Override @@ -79,8 +81,7 @@ public int execute() { work.getMetricCollector().reportStageStart(getName(), metricMap); LOG.info("Loading atlas metadata from srcDb: {} to tgtDb: {} from staging: {}", atlasReplInfo.getSrcDB(), atlasReplInfo.getTgtDB(), atlasReplInfo.getStagingDir()); - AtlasLoadLogger replLogger = new AtlasLoadLogger(atlasReplInfo.getSrcDB(), atlasReplInfo.getTgtDB(), - atlasReplInfo.getStagingDir().toString()); + initializeReplLogger(atlasReplInfo); replLogger.startLog(); int importCount = importAtlasMetadata(atlasReplInfo); replLogger.endLog(importCount); @@ -113,6 +114,13 @@ public int execute() { } } + private void initializeReplLogger(AtlasReplInfo atlasReplInfo) { + if (this.replLogger == null){ + this.replLogger = new AtlasLoadLogger(atlasReplInfo.getSrcDB(), atlasReplInfo.getTgtDB(), + atlasReplInfo.getStagingDir().toString()); + } + } + AtlasReplInfo createAtlasReplInfo() throws SemanticException, MalformedURLException { String errorFormat = "%s is mandatory config for Atlas metadata replication"; //Also validates URL for endpoint. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/RangerDumpTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/RangerDumpTask.java index e7b403b8dbc2..153aaa0d14e4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/RangerDumpTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/RangerDumpTask.java @@ -33,7 +33,6 @@ import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.repl.ReplLogger; -import org.apache.hadoop.hive.ql.parse.repl.dump.Utils; import org.apache.hadoop.hive.ql.parse.repl.dump.log.RangerDumpLogger; import org.apache.hadoop.hive.ql.parse.repl.metric.event.Status; import org.apache.hadoop.hive.ql.plan.api.StageType; @@ -65,10 +64,16 @@ public RangerDumpTask() { } @VisibleForTesting - RangerDumpTask(final RangerRestClient rangerRestClient, final HiveConf conf, final RangerDumpWork work) { + RangerDumpTask(final RangerRestClient rangerRestClient, final HiveConf conf, final RangerDumpWork work, ReplLogger replLogger) { this.conf = conf; this.work = work; this.rangerRestClient = rangerRestClient; + this.replLogger = replLogger; + } + + @VisibleForTesting + RangerDumpTask(final RangerRestClient rangerRestClient, final HiveConf conf, final RangerDumpWork work) { + this(rangerRestClient, conf, work, null); } @Override @@ -86,7 +91,7 @@ public int execute() { Map metricMap = new HashMap<>(); metricMap.put(ReplUtils.MetricName.POLICIES.name(), 0L); work.getMetricCollector().reportStageStart(getName(), metricMap); - replLogger = new RangerDumpLogger(work.getDbName(), work.getCurrentDumpPath().toString()); + initializeReplLogger(work); replLogger.startLog(); if (rangerRestClient == null) { rangerRestClient = getRangerRestClient(); @@ -158,6 +163,12 @@ public int execute() { } } + private void initializeReplLogger(RangerDumpWork work) { + if (this.replLogger == null){ + this.replLogger = new RangerDumpLogger(work.getDbName(), work.getCurrentDumpPath().toString()); + } + } + private RangerRestClient getRangerRestClient() { if (conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST_REPL) || conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST)) { return new NoOpRangerRestClient(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/RangerLoadTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/RangerLoadTask.java index d992c4984d7b..c8b50fdce3c4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/RangerLoadTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/RangerLoadTask.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hive.ql.parse.repl.load.log.RangerLoadLogger; import org.apache.hadoop.hive.ql.parse.repl.metric.event.Status; import org.apache.hadoop.hive.ql.plan.api.StageType; +import org.jetbrains.annotations.NotNull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -67,9 +68,15 @@ public RangerLoadTask() { @VisibleForTesting RangerLoadTask(final RangerRestClient rangerRestClient, final HiveConf conf, final RangerLoadWork work) { + this(rangerRestClient, conf, work, null); + } + + @VisibleForTesting + RangerLoadTask(final RangerRestClient rangerRestClient, final HiveConf conf, final RangerLoadWork work, ReplLogger replLogger) { this.conf = conf; this.work = work; this.rangerRestClient = rangerRestClient; + this.replLogger = replLogger; } @Override @@ -111,8 +118,7 @@ public int execute() { rangerExportPolicyList = rangerRestClient.readRangerPoliciesFromJsonFile(new Path(work.getCurrentDumpPath(), ReplUtils.HIVE_RANGER_POLICIES_FILE_NAME), conf); int expectedPolicyCount = rangerExportPolicyList == null ? 0 : rangerExportPolicyList.getListSize(); - replLogger = new RangerLoadLogger(work.getSourceDbName(), work.getTargetDbName(), - work.getCurrentDumpPath().toString(), expectedPolicyCount); + initializeReplLogger(expectedPolicyCount); replLogger.startLog(); Map metricMap = new HashMap<>(); metricMap.put(ReplUtils.MetricName.POLICIES.name(), (long) expectedPolicyCount); @@ -170,6 +176,13 @@ public int execute() { } } + private void initializeReplLogger(int expectedPolicyCount) { + if (this.replLogger == null){ + this.replLogger = new RangerLoadLogger(work.getSourceDbName(), work.getTargetDbName(), + work.getCurrentDumpPath().toString(), expectedPolicyCount); + } + } + private RangerRestClient getRangerRestClient() { if (conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST_REPL) || conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST)) { return new NoOpRangerRestClient(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java index 31bf8fe2cfb7..8ee54fe8a1d8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java @@ -1406,7 +1406,7 @@ Long bootStrapDump(Path dumpRoot, DumpMetaData dmd, Path cmRoot, Hive hiveDb) for (String tblName : Utils.matchesTbl(hiveDb, dbName, work.replScope)) { Table table = null; try { - HiveWrapper.Tuple tableTuple = new HiveWrapper(hiveDb, dbName).table(tblName, conf); + HiveWrapper.Tuple
tableTuple = createHiveWrapper(hiveDb, dbName).table(tblName, conf); table = tableTuple != null ? tableTuple.object : null; //disable materialized-view replication if not configured @@ -1805,6 +1805,10 @@ void dumpConstraintMetadata(String dbName, String tblName, Path dbRoot, Hive hiv } } + HiveWrapper createHiveWrapper(Hive hiveDb, String dbName){ + return new HiveWrapper(hiveDb, dbName); + } + private HiveWrapper.Tuple functionTuple(String functionName, String dbName, Hive hiveDb) { try { HiveWrapper.Tuple tuple = new HiveWrapper(hiveDb, dbName).function(functionName); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java index d009541478c9..1c1517955265 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java @@ -1,4 +1,3 @@ - /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file @@ -40,6 +39,7 @@ import org.apache.hadoop.hive.ql.parse.repl.load.MetaData; import org.apache.hadoop.hive.ql.plan.CopyWork; import org.apache.hadoop.hive.ql.plan.DependencyCollectionWork; +import org.apache.hadoop.hive.ql.util.TimeUtil; import java.io.IOException; import java.util.ArrayList; @@ -154,12 +154,20 @@ static class PrimaryToReplicaResourceFunction private final String functionsRootDir; private String destinationDbName; + private TimeUtil timeUtil; + PrimaryToReplicaResourceFunction(Context context, MetaData metadata, - String destinationDbName) { + String destinationDbName, TimeUtil timeUtil) { this.context = context; this.metadata = metadata; this.destinationDbName = destinationDbName; this.functionsRootDir = context.hiveConf.getVar(HiveConf.ConfVars.REPL_FUNCTIONS_ROOT_DIR); + this.timeUtil = timeUtil; + } + + PrimaryToReplicaResourceFunction(Context context, MetaData metadata, + String destinationDbName) { + this(context, metadata, destinationDbName, new TimeUtil()); } @Override @@ -187,7 +195,7 @@ ResourceUri destinationResourceUri(ResourceUri resourceUri) pathBuilder .addDescendant(destinationDbName.toLowerCase()) .addDescendant(metadata.function.getFunctionName().toLowerCase()) - .addDescendant(String.valueOf(System.nanoTime())) + .addDescendant(String.valueOf(timeUtil.getNanoSeconds())) .addDescendant(split[split.length - 1]) .build(), new Path(functionsRootDir).getFileSystem(context.hiveConf) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java index edc8e6fdeca3..8190871352e1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java @@ -137,7 +137,6 @@ public void setJobProperties(Map jobProperties) { this.jobProperties = jobProperties; } - @Explain(displayName = "jobProperties", explainLevels = { Level.EXTENDED }) public Map getJobProperties() { return jobProperties; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/datasketches/kll/KllHistogramEstimator.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/datasketches/kll/KllHistogramEstimator.java new file mode 100644 index 000000000000..4d0777c3ff98 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/datasketches/kll/KllHistogramEstimator.java @@ -0,0 +1,78 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.udf.datasketches.kll; + +import org.apache.datasketches.kll.KllFloatsSketch; +import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.ql.util.JavaDataModel; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; + +public class KllHistogramEstimator { + + private final KllFloatsSketch kll; + + KllHistogramEstimator(int k) { + this.kll = new KllFloatsSketch(k); + } + + KllHistogramEstimator(KllFloatsSketch kll) { + this.kll = kll; + } + + public byte[] serialize() { + final ByteArrayOutputStream bos = new ByteArrayOutputStream(); + try { + KllUtils.serializeKll(bos, kll); + final byte[] result = bos.toByteArray(); + bos.close(); + return result; + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public void addToEstimator(long v) { + kll.update(v); + } + + public void addToEstimator(double d) { + kll.update((float) d); + } + + public void addToEstimator(HiveDecimal decimal) { + kll.update(decimal.floatValue()); + } + + public void mergeEstimators(KllHistogramEstimator o) { + kll.merge(o.kll); + } + + public int lengthFor(JavaDataModel model) { + return KllUtils.lengthFor(model, kll); + } + + public KllFloatsSketch getSketch() { + return kll; + } + + public int getK() { + return kll.getK(); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/datasketches/kll/KllHistogramEstimatorFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/datasketches/kll/KllHistogramEstimatorFactory.java new file mode 100644 index 000000000000..72dc7d3b400c --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/datasketches/kll/KllHistogramEstimatorFactory.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hadoop.hive.ql.udf.datasketches.kll; + +public class KllHistogramEstimatorFactory { + + private KllHistogramEstimatorFactory() { + throw new AssertionError("Suppress default constructor for non instantiation"); + } + + /** + * This function deserializes the serialized KLL histogram estimator from a byte array. + * @param buf to deserialize + * @param start start index for deserialization + * @param len start+len is deserialized + * @return KLL histogram estimator + */ + public static KllHistogramEstimator getKllHistogramEstimator(byte[] buf, int start, int len) { + return new KllHistogramEstimator(KllUtils.deserializeKll(buf, start, len)); + } + + /** + * This method creates an empty histogram estimator with a KLL sketch of a given k parameter. + * @param k the KLL parameter k for initializing the sketch + * @return an empty histogram estimator with a KLL sketch of a given k parameter + */ + public static KllHistogramEstimator getEmptyHistogramEstimator(int k) { + return new KllHistogramEstimator(k); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/datasketches/kll/KllUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/datasketches/kll/KllUtils.java new file mode 100644 index 000000000000..2d9c08b586de --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/datasketches/kll/KllUtils.java @@ -0,0 +1,97 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.udf.datasketches.kll; + +import org.apache.datasketches.kll.KllFloatsSketch; +import org.apache.datasketches.memory.Memory; +import org.apache.hadoop.hive.ql.util.JavaDataModel; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; + +/** + * KLL serialization utilities. + */ +public class KllUtils { + + private KllUtils() { + throw new AssertionError("Suppress default constructor for non instantiation"); + } + + /** + * KLL is serialized according to what provided by data-sketches library + * @param out output stream to write to + * @param kll KLL sketch that needs to be serialized + * @throws IOException if an error occurs during serialization + */ + public static void serializeKll(OutputStream out, KllFloatsSketch kll) throws IOException { + out.write(kll.toByteArray()); + } + + /** + * This function deserializes the serialized KLL sketch from a stream. + * @param in input stream to be deserialized + * @return KLL sketch + * @throws IOException if errors occur while reading the stream + */ + public static KllFloatsSketch deserializeKll(InputStream in) throws IOException { + final ByteArrayOutputStream buffer = new ByteArrayOutputStream(); + final byte[] data = new byte[4]; + int nRead; + + while ((nRead = in.read(data, 0, data.length)) != -1) { + buffer.write(data, 0, nRead); + } + + buffer.flush(); + return KllFloatsSketch.heapify(Memory.wrap(buffer.toByteArray())); + } + + /** + * This function deserializes the serialized KLL sketch from a byte array. + * @param buf to deserialize + * @param start start index for deserialization + * @param len start+len is deserialized + * @return KLL sketch + */ + public static KllFloatsSketch deserializeKll(byte[] buf, int start, int len) { + InputStream is = new ByteArrayInputStream(buf, start, len); + try { + KllFloatsSketch result = deserializeKll(is); + is.close(); + return result; + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + /** + * Returns the length of the given KLL sketch according to the given java data model. + * @param model the java data model to compute the length + * @param kll the KLL sketch to compute the length for + * @return the length of the given KLL sketch according to the given java data model + */ + public static int lengthFor(JavaDataModel model, KllFloatsSketch kll) { + return model == null ? KllFloatsSketch.getMaxSerializedSizeBytes(kll.getK(), kll.getN()) + : (int) model.lengthForByteArrayOfSize(kll.getSerializedSizeBytes()); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/util/TimeUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/util/TimeUtil.java new file mode 100644 index 000000000000..ff1a372d5d17 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/util/TimeUtil.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.util; + +public class TimeUtil { + public long getNanoSeconds(){ + return System.nanoTime(); + } +} diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java index 566b12514278..4e6f9934162d 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java @@ -101,7 +101,8 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import static org.mockito.ArgumentMatchers.any; -import static org.powermock.api.mockito.PowerMockito.when; +import static org.mockito.Mockito.when; + import org.mockito.Mockito; import org.mockito.stubbing.Answer; @@ -1452,8 +1453,8 @@ private void execDDLOpAndCompactionConcurrently(String opType, boolean isPartion runStatementOnDriver("INSERT INTO " + tblName + (isPartioned ? " PARTITION (p='" + partName + "')" : "") + " VALUES (1, 'foo'),(2, 'bar'),(3, 'baz')"); - runStatementOnDriver("UPDATE " + tblName + " SET b = 'blah' WHERE a = 3"); + runStatementOnDriver("UPDATE " + tblName + " SET b = 'blah' WHERE a = 3"); //run Worker to execute compaction CompactionRequest req = new CompactionRequest("default", tblName, CompactionType.MAJOR); if (isPartioned) { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestAtlasDumpTask.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestAtlasDumpTask.java index 7f3263bd4e63..8d23406ef8b3 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestAtlasDumpTask.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestAtlasDumpTask.java @@ -36,7 +36,7 @@ import org.apache.hadoop.hive.ql.exec.repl.atlas.AtlasRestClientImpl; import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils; import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.parse.repl.ReplState; +import org.apache.hadoop.hive.ql.parse.repl.dump.log.AtlasDumpLogger; import org.apache.hadoop.hive.ql.parse.repl.metric.ReplicationMetricCollector; import org.apache.hadoop.security.UserGroupInformation; import org.junit.Assert; @@ -44,13 +44,9 @@ import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; +import org.mockito.MockedStatic; import org.mockito.Mockito; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; -import org.powermock.reflect.Whitebox; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.mockito.junit.MockitoJUnitRunner; import javax.ws.rs.HttpMethod; import javax.ws.rs.core.Response; @@ -64,16 +60,17 @@ import java.util.Properties; import java.util.concurrent.TimeUnit; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; -import static org.powermock.api.mockito.PowerMockito.mockStatic; -import static org.powermock.api.mockito.PowerMockito.when; +import static org.mockito.Mockito.mockStatic; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; /** * Unit test class for testing Atlas metadata Dump. */ -@RunWith(PowerMockRunner.class) -@PrepareForTest({LoggerFactory.class, UserGroupInformation.class, ConfigurationConverter.class}) - +@RunWith(MockitoJUnitRunner.class) public class TestAtlasDumpTask { @Mock @@ -92,48 +89,39 @@ public class TestAtlasDumpTask { @Test public void testAtlasDumpMetrics() throws Exception { - Mockito.when(work.getMetricCollector()).thenReturn(metricCollector); - Mockito.when(conf.get(HiveConf.ConfVars.REPL_ATLAS_ENDPOINT.varname)).thenReturn("http://localhost:21000/atlas"); - Mockito.when(conf.get(HiveConf.ConfVars.REPL_ATLAS_REPLICATED_TO_DB.varname)).thenReturn("tgtDb"); - Mockito.when(conf.get(HiveConf.ConfVars.REPL_SOURCE_CLUSTER_NAME.varname)).thenReturn("srcCluster"); - Mockito.when(conf.get(HiveConf.ConfVars.REPL_TARGET_CLUSTER_NAME.varname)).thenReturn("tgtCluster"); - Mockito.when(conf.get(ReplUtils.DEFAULT_FS_CONFIG)).thenReturn("hdfs:tgtFsUri:8020"); - Mockito.when(work.getStagingDir()).thenReturn(new Path("hdfs://tmp:8020/staging")); - Mockito.when(work.getSrcDB()).thenReturn("srcDB"); - Mockito.when(work.isBootstrap()).thenReturn(true); - atlasDumpTask = new AtlasDumpTask(atlasRestClient, conf, work); + AtlasDumpLogger logger = mock(AtlasDumpLogger.class); + when(work.getMetricCollector()).thenReturn(metricCollector); + when(conf.get(HiveConf.ConfVars.REPL_ATLAS_ENDPOINT.varname)).thenReturn("http://localhost:21000/atlas"); + when(conf.get(HiveConf.ConfVars.REPL_ATLAS_REPLICATED_TO_DB.varname)).thenReturn("tgtDb"); + when(conf.get(HiveConf.ConfVars.REPL_SOURCE_CLUSTER_NAME.varname)).thenReturn("srcCluster"); + when(conf.get(HiveConf.ConfVars.REPL_TARGET_CLUSTER_NAME.varname)).thenReturn("tgtCluster"); + when(conf.get(ReplUtils.DEFAULT_FS_CONFIG)).thenReturn("hdfs:tgtFsUri:8020"); + when(work.getStagingDir()).thenReturn(new Path("hdfs://tmp:8020/staging")); + when(work.getSrcDB()).thenReturn("srcDB"); + when(work.isBootstrap()).thenReturn(true); + atlasDumpTask = new AtlasDumpTask(atlasRestClient, conf, work, logger); AtlasDumpTask atlasDumpTaskSpy = Mockito.spy(atlasDumpTask); - Mockito.when(conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST_REPL)).thenReturn(true); - Logger logger = Mockito.mock(Logger.class); - Whitebox.setInternalState(ReplState.class, logger); + when(conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST_REPL)).thenReturn(true); Mockito.doReturn(0L).when(atlasDumpTaskSpy) .dumpAtlasMetaData(Mockito.any(AtlasRequestBuilder.class), Mockito.any(AtlasReplInfo.class)); Mockito.doNothing().when(atlasDumpTaskSpy).createDumpMetadata(Mockito.any(AtlasReplInfo.class), Mockito.any(Long.class)); int status = atlasDumpTaskSpy.execute(); + Assert.assertEquals(0, status); - ArgumentCaptor replStateCaptor = ArgumentCaptor.forClass(String.class); - ArgumentCaptor eventCaptor = ArgumentCaptor.forClass(Object.class); - ArgumentCaptor eventDetailsCaptor = ArgumentCaptor.forClass(Object.class); - Mockito.verify(logger, - Mockito.times(2)).info(replStateCaptor.capture(), - eventCaptor.capture(), eventDetailsCaptor.capture()); - Assert.assertEquals("REPL::{}: {}", replStateCaptor.getAllValues().get(0)); - Assert.assertEquals("ATLAS_DUMP_START", eventCaptor.getAllValues().get(0)); - Assert.assertEquals("ATLAS_DUMP_END", eventCaptor.getAllValues().get(1)); - Assert.assertTrue(eventDetailsCaptor.getAllValues().get(1).toString(), eventDetailsCaptor.getAllValues().get(0) - .toString().contains("{\"dbName\":\"srcDB\",\"dumpStartTime")); - Assert.assertTrue(eventDetailsCaptor - .getAllValues().get(1).toString().contains("{\"dbName\":\"srcDB\",\"dumpEndTime\"")); + verify(logger, times(1)).startLog(); + verify(logger, times(1)).endLog(any()); } @Test public void testAtlasRestClientBuilder() throws SemanticException, IOException { - mockStatic(UserGroupInformation.class); - when(UserGroupInformation.getLoginUser()).thenReturn(mock(UserGroupInformation.class)); - AtlasRestClientBuilder atlasRestCleintBuilder = new AtlasRestClientBuilder("http://localhost:31000"); - AtlasRestClient atlasClient = atlasRestCleintBuilder.getClient(conf); - Assert.assertTrue(atlasClient != null); + try (MockedStatic userGroupInformationMockedStatic = mockStatic(UserGroupInformation.class)){ + userGroupInformationMockedStatic.when(UserGroupInformation::getLoginUser).thenReturn(mock(UserGroupInformation.class)); + + AtlasRestClientBuilder atlasRestCleintBuilder = new AtlasRestClientBuilder("http://localhost:31000"); + AtlasRestClient atlasClient = atlasRestCleintBuilder.getClient(conf); + Assert.assertTrue(atlasClient != null); + } } @Test @@ -150,7 +138,7 @@ public void testRetryingClientTimeBased() throws SemanticException, IOException, AtlasRestClientImpl atlasRestClientImpl = (AtlasRestClientImpl)atlasClient; InputStream inputStream = atlasRestClientImpl.exportData(exportRequest); ArgumentCaptor expReqCaptor = ArgumentCaptor.forClass(AtlasExportRequest.class); - Mockito.verify(atlasClientV2, Mockito.times(1)).exportData(expReqCaptor.capture()); + Mockito.verify(atlasClientV2, times(1)).exportData(expReqCaptor.capture()); Assert.assertEquals(expReqCaptor.getValue().toString(), "dummyExportRequest"); byte[] exportResponseDataReadBytes = new byte[exportResponseData.length()]; inputStream.read(exportResponseDataReadBytes); @@ -178,7 +166,7 @@ public void testRetryingClientTimeBasedExhausted() throws AtlasServiceException Assert.assertTrue(atlasServiceException == ex.getCause()); } ArgumentCaptor expReqCaptor = ArgumentCaptor.forClass(AtlasExportRequest.class); - Mockito.verify(atlasClientV2, Mockito.times(3)).exportData(expReqCaptor.capture()); + Mockito.verify(atlasClientV2, times(3)).exportData(expReqCaptor.capture()); for (AtlasExportRequest atlasExportRequest: expReqCaptor.getAllValues()) { Assert.assertEquals(atlasExportRequest.toString(), "dummyExportRequest"); } @@ -205,7 +193,7 @@ public void testAtlasServerEntityNotFound() throws AtlasServiceException, Semant AtlasServer atlasServerRet = atlasClient.getServer("src", conf); Assert.assertNull(atlasServerRet); ArgumentCaptor getServerReqCaptor = ArgumentCaptor.forClass(String.class); - Mockito.verify(atlasClientV2, Mockito.times(1)).getServer(getServerReqCaptor.capture()); + Mockito.verify(atlasClientV2, times(1)).getServer(getServerReqCaptor.capture()); } @Test @@ -223,28 +211,34 @@ public void testAtlasServerEntityRetryExhausted() throws AtlasServiceException { Assert.assertTrue(atlasServiceException == ex.getCause()); } ArgumentCaptor getServerReqCaptor = ArgumentCaptor.forClass(String.class); - Mockito.verify(atlasClientV2, Mockito.times(4)).getServer(getServerReqCaptor.capture()); + Mockito.verify(atlasClientV2, times(4)).getServer(getServerReqCaptor.capture()); } @Test public void testAtlasClientTimeouts() throws Exception { - when(conf.getTimeVar(HiveConf.ConfVars.REPL_EXTERNAL_CLIENT_CONNECT_TIMEOUT, - TimeUnit.MILLISECONDS)).thenReturn(20L); - when(conf.getTimeVar(HiveConf.ConfVars.REPL_ATLAS_CLIENT_READ_TIMEOUT, TimeUnit.MILLISECONDS)).thenReturn(500L); - mockStatic(UserGroupInformation.class); - when(UserGroupInformation.getLoginUser()).thenReturn(mock(UserGroupInformation.class)); - mockStatic(ConfigurationConverter.class); - when(ConfigurationConverter.getConfiguration(Mockito.any(Properties.class))).thenCallRealMethod(); - AtlasRestClientBuilder atlasRestCleintBuilder = new AtlasRestClientBuilder("http://localhost:31000"); - AtlasRestClient atlasClient = atlasRestCleintBuilder.getClient(conf); - Assert.assertTrue(atlasClient != null); - ArgumentCaptor propsCaptor = ArgumentCaptor.forClass(Properties.class); - PowerMockito.verifyStatic(ConfigurationConverter.class, Mockito.times(1)); - ConfigurationConverter.getConfiguration(propsCaptor.capture()); - Assert.assertEquals("20", propsCaptor.getValue().getProperty( - AtlasRestClientBuilder.ATLAS_PROPERTY_CONNECT_TIMEOUT_IN_MS)); - Assert.assertEquals("500", propsCaptor.getValue().getProperty( - AtlasRestClientBuilder.ATLAS_PROPERTY_READ_TIMEOUT_IN_MS)); + try ( + MockedStatic userGroupInformationMockedStatic = mockStatic(UserGroupInformation.class); + MockedStatic configurationConverterMockedStatic = mockStatic(ConfigurationConverter.class) + ){ + when(conf.getTimeVar(HiveConf.ConfVars.REPL_EXTERNAL_CLIENT_CONNECT_TIMEOUT, + TimeUnit.MILLISECONDS)).thenReturn(20L); + when(conf.getTimeVar(HiveConf.ConfVars.REPL_ATLAS_CLIENT_READ_TIMEOUT, TimeUnit.MILLISECONDS)).thenReturn(500L); + + userGroupInformationMockedStatic.when(UserGroupInformation::getLoginUser).thenReturn(mock(UserGroupInformation.class)); + configurationConverterMockedStatic.when(() -> ConfigurationConverter.getConfiguration(Mockito.any(Properties.class))).thenCallRealMethod(); + + AtlasRestClientBuilder atlasRestClientBuilder = new AtlasRestClientBuilder("http://localhost:31000"); + AtlasRestClient atlasClient = atlasRestClientBuilder.getClient(conf); + Assert.assertTrue(atlasClient != null); + ArgumentCaptor propsCaptor = ArgumentCaptor.forClass(Properties.class); + configurationConverterMockedStatic.verify(times(1), () -> ConfigurationConverter.getConfiguration(propsCaptor.capture())); + + ConfigurationConverter.getConfiguration(propsCaptor.capture()); + Assert.assertEquals("20", propsCaptor.getValue().getProperty( + AtlasRestClientBuilder.ATLAS_PROPERTY_CONNECT_TIMEOUT_IN_MS)); + Assert.assertEquals("500", propsCaptor.getValue().getProperty( + AtlasRestClientBuilder.ATLAS_PROPERTY_READ_TIMEOUT_IN_MS)); + } } @Test @@ -283,7 +277,7 @@ public void testGetFileAsListRetry() throws Exception { Assert.assertTrue(e.getMessage().contains("Unable to connect")); } ArgumentCaptor getServerReqCaptor = ArgumentCaptor.forClass(Path.class); - Mockito.verify(fs, Mockito.times(4)).getFileStatus(getServerReqCaptor.capture()); + Mockito.verify(fs, times(4)).getFileStatus(getServerReqCaptor.capture()); List pathList = getServerReqCaptor.getAllValues(); for (Path path: pathList) { Assert.assertTrue(tableListPath.equals(path)); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestAtlasLoadTask.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestAtlasLoadTask.java index 0024fa5e267c..c8b8ae17dd17 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestAtlasLoadTask.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestAtlasLoadTask.java @@ -20,17 +20,17 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.repl.atlas.AtlasReplInfo; -import org.apache.hadoop.hive.ql.parse.repl.ReplState; +import org.apache.hadoop.hive.ql.parse.repl.ReplLogger; import org.apache.hadoop.hive.ql.parse.repl.metric.ReplicationMetricCollector; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; -import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; -import org.powermock.reflect.Whitebox; -import org.slf4j.Logger; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.times; /** * Unit test class for testing Atlas metadata load. @@ -52,31 +52,20 @@ public class TestAtlasLoadTask { @Test public void testAtlasLoadMetrics() throws Exception { Mockito.when(work.getMetricCollector()).thenReturn(metricCollector); - atlasLoadTask = new AtlasLoadTask(conf, work); + ReplLogger logger = Mockito.mock(ReplLogger.class); + atlasLoadTask = new AtlasLoadTask(conf, work, logger); AtlasLoadTask atlasLoadTaskSpy = Mockito.spy(atlasLoadTask); Mockito.when(conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST_REPL)).thenReturn(true); - Logger logger = Mockito.mock(Logger.class); - Whitebox.setInternalState(ReplState.class, logger); AtlasReplInfo atlasReplInfo = new AtlasReplInfo("http://localhost:21000/atlas", "srcDB", "tgtDB", "srcCluster", "tgtCluster", new Path("hdfs://tmp"), null, conf); atlasReplInfo.setSrcFsUri("hdfs://srcFsUri:8020"); atlasReplInfo.setTgtFsUri("hdfs:tgtFsUri:8020"); Mockito.doReturn(atlasReplInfo).when(atlasLoadTaskSpy).createAtlasReplInfo(); + int status = atlasLoadTaskSpy.execute(); + Assert.assertEquals(0, status); - ArgumentCaptor replStateCaptor = ArgumentCaptor.forClass(String.class); - ArgumentCaptor eventCaptor = ArgumentCaptor.forClass(Object.class); - ArgumentCaptor eventDetailsCaptor = ArgumentCaptor.forClass(Object.class); - Mockito.verify(logger, - Mockito.times(2)).info(replStateCaptor.capture(), - eventCaptor.capture(), eventDetailsCaptor.capture()); - Assert.assertEquals("REPL::{}: {}", replStateCaptor.getAllValues().get(0)); - Assert.assertEquals("ATLAS_LOAD_START", eventCaptor.getAllValues().get(0)); - Assert.assertEquals("ATLAS_LOAD_END", eventCaptor.getAllValues().get(1)); - Assert.assertTrue(eventDetailsCaptor.getAllValues().get(0) - .toString().contains("{\"sourceDbName\":\"srcDB\",\"targetDbName\":\"tgtDB\",\"loadStartTime\":")); - Assert.assertTrue(eventDetailsCaptor - .getAllValues().get(1).toString().contains("{\"sourceDbName\":\"srcDB\",\"targetDbName\"" - + ":\"tgtDB\",\"numOfEntities\":0,\"loadEndTime\"")); + Mockito.verify(logger, times(1)).startLog(); + Mockito.verify(logger, times(1)).endLog(any()); } } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestRangerDumpTask.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestRangerDumpTask.java index 0af6b00ce5f2..cc1d949e2acf 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestRangerDumpTask.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestRangerDumpTask.java @@ -26,32 +26,28 @@ import org.apache.hadoop.hive.ql.exec.repl.ranger.RangerRestClientImpl; import org.apache.hadoop.hive.ql.exec.repl.ranger.RangerPolicy; import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils; -import org.apache.hadoop.hive.ql.parse.repl.ReplState; +import org.apache.hadoop.hive.ql.parse.repl.dump.log.RangerDumpLogger; import org.apache.hadoop.hive.ql.parse.repl.metric.ReplicationMetricCollector; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; -import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.Mockito; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; -import org.powermock.reflect.Whitebox; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.mockito.junit.MockitoJUnitRunner; import java.net.URL; import java.util.ArrayList; import static org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils.RANGER_REST_URL; import static org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils.RANGER_HIVE_SERVICE_NAME; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.times; /** * Unit test class for testing Ranger Dump. */ -@RunWith(PowerMockRunner.class) -@PrepareForTest({LoggerFactory.class}) +@RunWith(MockitoJUnitRunner.class) public class TestRangerDumpTask { private RangerDumpTask task; @@ -142,8 +138,8 @@ public void testSuccessNonEmptyRangerPolicies() throws Exception { @Test public void testSuccessRangerDumpMetrics() throws Exception { - Logger logger = Mockito.mock(Logger.class); - Whitebox.setInternalState(ReplState.class, logger); + RangerDumpLogger logger = Mockito.mock(RangerDumpLogger.class); + task = new RangerDumpTask(mockClient, conf, work, logger); RangerExportPolicyList rangerPolicyList = new RangerExportPolicyList(); rangerPolicyList.setPolicies(new ArrayList()); Mockito.when(mockClient.exportRangerPolicies(Mockito.anyString(), Mockito.anyString(), Mockito.anyString(), @@ -152,22 +148,12 @@ public void testSuccessRangerDumpMetrics() throws Exception { Mockito.when(conf.get(RANGER_REST_URL)).thenReturn("rangerEndpoint"); Mockito.when(conf.get(RANGER_HIVE_SERVICE_NAME)).thenReturn("hive"); Mockito.when(work.getDbName()).thenReturn("testdb"); - Mockito.when(work.getCurrentDumpPath()).thenReturn(new Path("/tmp")); Mockito.when(work.getRangerConfigResource()).thenReturn(new URL("file://ranger.xml")); + int status = task.execute(); + Assert.assertEquals(0, status); - ArgumentCaptor replStateCaptor = ArgumentCaptor.forClass(String.class); - ArgumentCaptor eventCaptor = ArgumentCaptor.forClass(Object.class); - ArgumentCaptor eventDetailsCaptor = ArgumentCaptor.forClass(Object.class); - Mockito.verify(logger, - Mockito.times(2)).info(replStateCaptor.capture(), - eventCaptor.capture(), eventDetailsCaptor.capture()); - Assert.assertEquals("REPL::{}: {}", replStateCaptor.getAllValues().get(0)); - Assert.assertEquals("RANGER_DUMP_START", eventCaptor.getAllValues().get(0)); - Assert.assertEquals("RANGER_DUMP_END", eventCaptor.getAllValues().get(1)); - Assert.assertTrue(eventDetailsCaptor.getAllValues().get(0) - .toString().contains("{\"dbName\":\"testdb\",\"dumpStartTime")); - Assert.assertTrue(eventDetailsCaptor - .getAllValues().get(1).toString().contains("{\"dbName\":\"testdb\",\"actualNumPolicies\":0,\"dumpEndTime\"")); + Mockito.verify(logger, times(1)).startLog(); + Mockito.verify(logger, times(1)).endLog(any()); } } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestRangerLoadTask.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestRangerLoadTask.java index 5485fbd145f0..22a122c7ec13 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestRangerLoadTask.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestRangerLoadTask.java @@ -24,7 +24,7 @@ import org.apache.hadoop.hive.ql.exec.repl.ranger.RangerExportPolicyList; import org.apache.hadoop.hive.ql.exec.repl.ranger.RangerPolicy; import org.apache.hadoop.hive.ql.exec.repl.ranger.RangerRestClientImpl; -import org.apache.hadoop.hive.ql.parse.repl.ReplState; +import org.apache.hadoop.hive.ql.parse.repl.dump.log.RangerDumpLogger; import org.apache.hadoop.hive.ql.parse.repl.metric.ReplicationMetricCollector; import org.junit.Assert; import org.junit.Before; @@ -34,7 +34,6 @@ import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; -import org.powermock.reflect.Whitebox; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -44,6 +43,8 @@ import static org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils.RANGER_HIVE_SERVICE_NAME; import static org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils.RANGER_REST_URL; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.times; /** * Unit test class for testing Ranger Load. @@ -76,7 +77,7 @@ public void setup() throws Exception { .thenCallRealMethod(); Mockito.when(mockClient.getDenyPolicyForReplicatedDb(Mockito.anyString(), Mockito.anyString(), Mockito.anyString())).thenCallRealMethod(); - Mockito.when(mockClient.checkConnection(Mockito.anyString(), Mockito.any())).thenReturn(true); + Mockito.when(mockClient.checkConnection(Mockito.anyString(), any())).thenReturn(true); Mockito.when(work.getMetricCollector()).thenReturn(metricCollector); } @@ -116,7 +117,7 @@ public void testSuccessNonEmptyRangerPolicies() throws Exception { Mockito.when(work.getTargetDbName()).thenReturn("tgtdb"); Path rangerDumpPath = new Path("/tmp"); Mockito.when(work.getCurrentDumpPath()).thenReturn(rangerDumpPath); - Mockito.when(mockClient.readRangerPoliciesFromJsonFile(Mockito.any(), Mockito.any())).thenReturn(rangerPolicyList); + Mockito.when(mockClient.readRangerPoliciesFromJsonFile(any(), any())).thenReturn(rangerPolicyList); Mockito.when(work.getRangerConfigResource()).thenReturn(new URL("file://ranger.xml")); int status = task.execute(); Assert.assertEquals(0, status); @@ -124,8 +125,9 @@ public void testSuccessNonEmptyRangerPolicies() throws Exception { @Test public void testSuccessRangerDumpMetrics() throws Exception { - Logger logger = Mockito.mock(Logger.class); - Whitebox.setInternalState(ReplState.class, logger); + RangerDumpLogger logger = Mockito.mock(RangerDumpLogger.class); + task = new RangerLoadTask(mockClient, conf, work, logger); + String rangerResponse = "{\"metaDataInfo\":{\"Host name\":\"ranger.apache.org\"," + "\"Exported by\":\"hive\",\"Export time\":\"May 5, 2020, 8:55:03 AM\",\"Ranger apache version\"" + ":\"2.0.0.7.2.0.0-61\"},\"policies\":[{\"service\":\"cm_hive\",\"name\":\"db-level\",\"policyType\":0," @@ -143,25 +145,12 @@ public void testSuccessRangerDumpMetrics() throws Exception { Mockito.when(work.getTargetDbName()).thenReturn("tgtdb"); Path rangerDumpPath = new Path("/tmp"); Mockito.when(work.getCurrentDumpPath()).thenReturn(rangerDumpPath); - Mockito.when(mockClient.readRangerPoliciesFromJsonFile(Mockito.any(), Mockito.any())).thenReturn(rangerPolicyList); + Mockito.when(mockClient.readRangerPoliciesFromJsonFile(any(), any())).thenReturn(rangerPolicyList); Mockito.when(work.getRangerConfigResource()).thenReturn(new URL("file://ranger.xml")); int status = task.execute(); Assert.assertEquals(0, status); - ArgumentCaptor replStateCaptor = ArgumentCaptor.forClass(String.class); - ArgumentCaptor eventCaptor = ArgumentCaptor.forClass(Object.class); - ArgumentCaptor eventDetailsCaptor = ArgumentCaptor.forClass(Object.class); - Mockito.verify(logger, - Mockito.times(2)).info(replStateCaptor.capture(), - eventCaptor.capture(), eventDetailsCaptor.capture()); - Assert.assertEquals("REPL::{}: {}", replStateCaptor.getAllValues().get(0)); - Assert.assertEquals("RANGER_LOAD_START", eventCaptor.getAllValues().get(0)); - Assert.assertEquals("RANGER_LOAD_END", eventCaptor.getAllValues().get(1)); - Assert.assertTrue(eventDetailsCaptor.getAllValues().get(0) - .toString().contains("{\"sourceDbName\":\"srcdb\",\"targetDbName\":\"tgtdb\"" - + ",\"estimatedNumPolicies\":1,\"loadStartTime\":")); - Assert.assertTrue(eventDetailsCaptor - .getAllValues().get(1).toString().contains("{\"sourceDbName\":\"srcdb\",\"targetDbName\"" - + ":\"tgtdb\",\"actualNumPolicies\":1,\"loadEndTime\"")); + Mockito.verify(logger, times(1)).startLog(); + Mockito.verify(logger, times(1)).endLog(any()); } @Test @@ -186,7 +175,7 @@ public void testRangerDenyTask() throws Exception { Mockito.never()).deleteRangerPolicy(policyName.capture(), rangerEndpoint.capture(), serviceName.capture(), confCaptor.capture()); Mockito.verify(mockClient, - Mockito.times(1)).importRangerPolicies(rangerPolicyCapture.capture(), + times(1)).importRangerPolicies(rangerPolicyCapture.capture(), targetDb.capture(), rangerEndpoint.capture(), serviceName.capture(), confCaptor.capture()); Assert.assertEquals("tgtdb", targetDb.getAllValues().get(0)); Assert.assertEquals("rangerEndpoint", rangerEndpoint.getAllValues().get(0)); @@ -221,7 +210,7 @@ public void testRangerDenyTask() throws Exception { status = rangerDenyTask.execute(); Assert.assertEquals(0, status); Mockito.verify(mockClient, - Mockito.times(1)).deleteRangerPolicy(policyName.capture(), rangerEndpoint.capture(), serviceName.capture(), + times(1)).deleteRangerPolicy(policyName.capture(), rangerEndpoint.capture(), serviceName.capture(), confCaptor.capture()); Assert.assertEquals("srcdb_replication deny policy for tgtdb", policyName.getAllValues().get(0)); Assert.assertEquals("rangerEndpoint", rangerEndpoint.getAllValues().get(0)); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestReplDumpTask.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestReplDumpTask.java index e04a99eef5be..0d26683ecbce 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestReplDumpTask.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestReplDumpTask.java @@ -33,10 +33,9 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; +import org.mockito.MockedStatic; import org.mockito.Mockito; -import org.powermock.core.classloader.annotations.PowerMockIgnore; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; +import org.mockito.junit.MockitoJUnitRunner; import java.util.Arrays; import java.util.Collections; @@ -48,13 +47,10 @@ import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.same; import static org.mockito.Mockito.mock; -import static org.powermock.api.mockito.PowerMockito.mockStatic; -import static org.powermock.api.mockito.PowerMockito.when; -import static org.powermock.api.mockito.PowerMockito.whenNew; +import static org.mockito.Mockito.mockStatic; +import static org.mockito.Mockito.when; -@RunWith(PowerMockRunner.class) -@PrepareForTest({ Utils.class, ReplDumpTask.class}) -@PowerMockIgnore({ "javax.management.*" }) +@RunWith(MockitoJUnitRunner.class) public class TestReplDumpTask { protected static final Logger LOG = LoggerFactory.getLogger(TestReplDumpTask.class); @@ -113,47 +109,50 @@ public void removeDBPropertyToPreventRenameWhenBootstrapDumpOfTableFails() throw String dbRandomKey = "akeytoberandom"; ReplScope replScope = new ReplScope("default"); - mockStatic(Utils.class); - when(Utils.matchesDb(same(hive), eq("default"))) - .thenReturn(Collections.singletonList("default")); - when(Utils.getAllTables(same(hive), eq("default"), eq(replScope))).thenReturn(tableList); - when(Utils.setDbBootstrapDumpState(same(hive), eq("default"))).thenReturn(dbRandomKey); - when(Utils.matchesTbl(same(hive), eq("default"), eq(replScope))).thenReturn(tableList); - - - when(hive.getAllFunctions()).thenReturn(Collections.emptyList()); - when(queryState.getConf()).thenReturn(conf); - when(conf.getLong("hive.repl.last.repl.id", -1L)).thenReturn(1L); - when(conf.getBoolVar(HiveConf.ConfVars.REPL_INCLUDE_EXTERNAL_TABLES)).thenReturn(false); - when(HiveConf.getVar(conf, HiveConf.ConfVars.REPL_BOOTSTRAP_DUMP_OPEN_TXN_TIMEOUT)).thenReturn("1h"); - whenNew(HiveWrapper.class).withAnyArguments().thenReturn(mock(HiveWrapper.class)); - - ReplDumpTask task = new StubReplDumpTask() { - private int tableDumpCount = 0; - - @Override - void dumpTable(ExportService exportService, String dbName, String tblName, String validTxnList, - Path dbRootMetadata, Path dbRootData, - long lastReplId, Hive hiveDb, - HiveWrapper.Tuple
tuple, FileList managedTableDirFileList, boolean dataCopyAtLoad) - throws Exception { - tableDumpCount++; - if (tableDumpCount > 1) { - throw new TestException(); + try (MockedStatic utilsMockedStatic = mockStatic(Utils.class)){ + utilsMockedStatic.when(() -> Utils.matchesDb(same(hive), eq("default"))) + .thenReturn(Collections.singletonList("default")); + utilsMockedStatic.when(() -> Utils.getAllTables(same(hive), eq("default"), eq(replScope))).thenReturn(tableList); + utilsMockedStatic.when(() -> Utils.setDbBootstrapDumpState(same(hive), eq("default"))).thenReturn(dbRandomKey); + utilsMockedStatic.when(() -> Utils.matchesTbl(same(hive), eq("default"), eq(replScope))).thenReturn(tableList); + + when(queryState.getConf()).thenReturn(conf); + when(conf.getLong("hive.repl.last.repl.id", -1L)).thenReturn(1L); + when(conf.getBoolVar(HiveConf.ConfVars.REPL_INCLUDE_EXTERNAL_TABLES)).thenReturn(false); + when(HiveConf.getVar(conf, HiveConf.ConfVars.REPL_BOOTSTRAP_DUMP_OPEN_TXN_TIMEOUT)).thenReturn("1h"); + + ReplDumpTask task = new StubReplDumpTask() { + private int tableDumpCount = 0; + + @Override + void dumpTable(ExportService exportService, String dbName, String tblName, String validTxnList, + Path dbRootMetadata, Path dbRootData, + long lastReplId, Hive hiveDb, + HiveWrapper.Tuple
tuple, FileList managedTableDirFileList, boolean dataCopyAtLoad) + throws Exception { + tableDumpCount++; + if (tableDumpCount > 1) { + throw new TestException(); + } + } + + @Override + HiveWrapper createHiveWrapper(Hive hiveDb, String dbName){ + return mock(HiveWrapper.class); + } + }; + + task.initialize(queryState, null, null, null); + ReplDumpWork replDumpWork = new ReplDumpWork(replScope, "", ""); + replDumpWork.setMetricCollector(metricCollector); + task.setWork(replDumpWork); + + try { + task.bootStrapDump(new Path("mock"), new DumpMetaData(new Path("mock"), conf), + mock(Path.class), hive); + } finally { + Utils.resetDbBootstrapDumpState(same(hive), eq("default"), eq(dbRandomKey)); } - } - }; - - task.initialize(queryState, null, null, null); - ReplDumpWork replDumpWork = new ReplDumpWork(replScope, "", ""); - replDumpWork.setMetricCollector(metricCollector); - task.setWork(replDumpWork); - - try { - task.bootStrapDump(new Path("mock"), new DumpMetaData(new Path("mock"), conf), - mock(Path.class), hive); - } finally { - Utils.resetDbBootstrapDumpState(same(hive), eq("default"), eq(dbRandomKey)); } } } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/TestTaskTracker.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/TestTaskTracker.java index bb9999db6149..4b6e2f6695ae 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/TestTaskTracker.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/TestTaskTracker.java @@ -22,14 +22,12 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; -import org.powermock.modules.junit4.PowerMockRunner; - -import java.io.Serializable; +import org.mockito.junit.MockitoJUnitRunner; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -@RunWith(PowerMockRunner.class) +@RunWith(MockitoJUnitRunner.class) public class TestTaskTracker { @Mock private Task task; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/ranger/TestRangerRestClient.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/ranger/TestRangerRestClient.java index 15da904a56a4..6372fadeefe7 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/ranger/TestRangerRestClient.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/ranger/TestRangerRestClient.java @@ -28,21 +28,18 @@ import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; +import org.mockito.MockedStatic; import org.mockito.Mockito; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; +import org.mockito.junit.MockitoJUnitRunner; import java.security.PrivilegedAction; -import java.security.PrivilegedExceptionAction; import java.util.concurrent.TimeUnit; /** * Unit test class for testing Ranger Dump. */ -@RunWith(PowerMockRunner.class) -@PrepareForTest({UserGroupInformation.class}) +@RunWith(MockitoJUnitRunner.class) public class TestRangerRestClient { @Mock @@ -56,41 +53,35 @@ public class TestRangerRestClient { @Before public void setup() throws Exception { - PowerMockito.mockStatic(UserGroupInformation.class); - Mockito.when(UserGroupInformation.getLoginUser()).thenReturn(userGroupInformation); - Mockito.when(userGroupInformation.doAs((PrivilegedAction) Mockito.any())).thenCallRealMethod(); - Mockito.when(userGroupInformation.doAs((PrivilegedExceptionAction) Mockito.any())).thenCallRealMethod(); Mockito.when(mockClient.getRangerExportUrl(Mockito.anyString(), Mockito.anyString(), Mockito.anyString())) .thenCallRealMethod(); - Mockito.when(mockClient.getRangerImportUrl(Mockito.anyString(), Mockito.anyString())) - .thenCallRealMethod(); Mockito.when(conf.getTimeVar(HiveConf.ConfVars.REPL_RETRY_INTIAL_DELAY, TimeUnit.SECONDS)).thenReturn(1L); Mockito.when(conf.getTimeVar(HiveConf.ConfVars.REPL_RETRY_TOTAL_DURATION, TimeUnit.SECONDS)).thenReturn(20L); Mockito.when(conf.getTimeVar(HiveConf.ConfVars.REPL_RETRY_JITTER, TimeUnit.SECONDS)).thenReturn(1L); Mockito.when(conf.getTimeVar(HiveConf.ConfVars.REPL_RETRY_MAX_DELAY_BETWEEN_RETRIES, TimeUnit.SECONDS)) .thenReturn(10L); - Mockito.when(conf.getFloat(HiveConf.ConfVars.REPL_RETRY_BACKOFF_COEFFICIENT.varname, 1.0f)) - .thenReturn(1.0f); } @Test public void testSuccessSimpleAuthCheckConnection() throws Exception { - Mockito.when(UserGroupInformation.isSecurityEnabled()).thenReturn(false); - Mockito.when(mockClient.checkConnectionPlain(Mockito.anyString(), Mockito.any(HiveConf.class))).thenReturn(true); + Mockito.when(mockClient.checkConnectionPlain(Mockito.eq("http://localhost:6080/ranger"), Mockito.any(HiveConf.class))).thenReturn(true); Mockito.when(mockClient.checkConnection(Mockito.anyString(), Mockito.any())).thenCallRealMethod(); - mockClient.checkConnection("http://localhost:6080/ranger", conf); - ArgumentCaptor urlCaptor = ArgumentCaptor.forClass(String.class); - Mockito.verify(mockClient, - Mockito.times(1)).checkConnectionPlain(urlCaptor.capture(), Mockito.any(HiveConf.class)); - Assert.assertEquals("http://localhost:6080/ranger", urlCaptor.getValue()); - ArgumentCaptor privilegedActionArgumentCaptor = ArgumentCaptor.forClass(PrivilegedAction.class); + + try(MockedStatic ignored = Mockito.mockStatic(UserGroupInformation.class)) { + ignored.when(UserGroupInformation::isSecurityEnabled).thenReturn(false); + mockClient.checkConnection("http://localhost:6080/ranger", conf); + } + + Mockito.verify(mockClient, Mockito.times(1)).checkConnectionPlain(Mockito.eq("http://localhost:6080/ranger"), Mockito.any(HiveConf.class)); Mockito.verify(userGroupInformation, - Mockito.times(0)).doAs(privilegedActionArgumentCaptor.capture()); + Mockito.never()).doAs(Mockito.any(PrivilegedAction.class)); } @Test public void testSuccessSimpleAuthRangerExport() throws Exception { - Mockito.when(UserGroupInformation.isSecurityEnabled()).thenReturn(false); + try(MockedStatic ignored = Mockito.mockStatic(UserGroupInformation.class)) { + ignored.when(UserGroupInformation::isSecurityEnabled).thenReturn(false); + } Mockito.when(mockClient.exportRangerPoliciesPlain(Mockito.anyString(), Mockito.any(HiveConf.class))).thenReturn(new RangerExportPolicyList()); Mockito.when(mockClient.exportRangerPolicies(Mockito.anyString(), Mockito.anyString(), Mockito.anyString(), diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/util/TestFileList.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/util/TestFileList.java index e7d010f988ac..f6a5ce4d4800 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/util/TestFileList.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/util/TestFileList.java @@ -30,8 +30,6 @@ import org.mockito.ArgumentCaptor; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; @@ -42,9 +40,7 @@ /** * Tests the File List implementation. */ - @RunWith(MockitoJUnitRunner.class) -@PrepareForTest({LoggerFactory.class}) public class TestFileList { HiveConf conf = new HiveConf(); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/util/TestRetryable.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/util/TestRetryable.java index 9d0e57948b13..a53f8ae9971f 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/util/TestRetryable.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/util/TestRetryable.java @@ -22,38 +22,44 @@ import org.apache.hadoop.security.UserGroupInformation; import org.junit.Assert; import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; +import org.mockito.MockedStatic; import org.mockito.Mockito; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; +import org.mockito.junit.MockitoJUnitRunner; import java.io.FileNotFoundException; import java.io.IOException; -import java.security.PrivilegedAction; import java.security.PrivilegedExceptionAction; import java.util.Arrays; import java.util.concurrent.Callable; +import static org.mockito.Mockito.mockStatic; + /** * Tests for retriable interface. */ -@RunWith(PowerMockRunner.class) -@PrepareForTest({UserGroupInformation.class}) +@RunWith(MockitoJUnitRunner.class) public class TestRetryable { @Mock UserGroupInformation userGroupInformation; + private static MockedStatic userGroupInformationMockedStatic; + + @BeforeClass + public static void setupClass() { + userGroupInformationMockedStatic = mockStatic(UserGroupInformation.class); + } + @Before - public void setup() throws IOException { - PowerMockito.mockStatic(UserGroupInformation.class); - Mockito.when(UserGroupInformation.isSecurityEnabled()).thenReturn(false); - Mockito.when(UserGroupInformation.getLoginUser()).thenReturn(userGroupInformation); - Mockito.when(UserGroupInformation.getCurrentUser()).thenReturn(userGroupInformation); + public void setup(){ + userGroupInformationMockedStatic.when(UserGroupInformation::isSecurityEnabled).thenReturn(false); + userGroupInformationMockedStatic.when(UserGroupInformation::getLoginUser).thenReturn(userGroupInformation); + userGroupInformationMockedStatic.when(UserGroupInformation::getCurrentUser).thenReturn(userGroupInformation); } @Test @@ -324,7 +330,6 @@ public Void call() throws Exception { @Test public void testRetrySuccessSecureCallable() throws Throwable { - Mockito.when(userGroupInformation.doAs((PrivilegedAction) Mockito.any())).thenCallRealMethod(); Mockito.when(userGroupInformation.doAs((PrivilegedExceptionAction) Mockito.any())).thenCallRealMethod(); Mockito.when(UserGroupInformation.isSecurityEnabled()).thenReturn(true); Retryable retryable = Retryable.builder() @@ -340,7 +345,7 @@ public void testRetrySuccessSecureCallable() throws Throwable { ArgumentCaptor privilegedActionArgumentCaptor = ArgumentCaptor.forClass(PrivilegedExceptionAction.class); Mockito.verify(userGroupInformation, - Mockito.times(3)).doAs(privilegedActionArgumentCaptor.capture()); + Mockito.times(1)).doAs(privilegedActionArgumentCaptor.capture()); } private void executeWithDelay(long startTime, long totalTime) { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/TestCopyUtils.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/TestCopyUtils.java index 7c6bb094c95e..4659d5227da1 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/TestCopyUtils.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/TestCopyUtils.java @@ -30,11 +30,11 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; +import org.mockito.MockedStatic; import org.mockito.Mockito; -import org.powermock.core.classloader.annotations.PowerMockIgnore; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; +import org.mockito.junit.MockitoJUnitRunner; +import javax.rmi.CORBA.Util; import java.io.IOException; import java.util.Arrays; import java.util.Collection; @@ -52,15 +52,13 @@ import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.mock; -import static org.powermock.api.mockito.PowerMockito.mockStatic; -import static org.powermock.api.mockito.PowerMockito.when; +import static org.mockito.Mockito.mockStatic; +import static org.mockito.Mockito.when; /** * Unit Test class for CopyUtils class. */ -@RunWith(PowerMockRunner.class) -@PrepareForTest({ CopyUtils.class, FileUtils.class, Utils.class, UserGroupInformation.class, ReplChangeManager.class}) -@PowerMockIgnore({ "javax.management.*" }) +@RunWith(MockitoJUnitRunner.class) public class TestCopyUtils { /* Distcp currently does not copy a single file in a distributed manner hence we dont care about @@ -68,25 +66,28 @@ public class TestCopyUtils { */ @Test public void distcpShouldNotBeCalledOnlyForOneFile() throws Exception { - mockStatic(UserGroupInformation.class); - when(UserGroupInformation.getCurrentUser()).thenReturn(mock(UserGroupInformation.class)); - - HiveConf conf = Mockito.spy(new HiveConf()); - doReturn(1L).when(conf).getLong(HiveConf.ConfVars.HIVE_EXEC_COPYFILE_MAXSIZE.varname, 32L * 1024 * 1024); - CopyUtils copyUtils = new CopyUtils("", conf, null); - long MB_128 = 128 * 1024 * 1024; - assertFalse(copyUtils.limitReachedForLocalCopy(MB_128, 1L)); + try (MockedStatic userGroupInformationMockedStatic = mockStatic(UserGroupInformation.class)) { + userGroupInformationMockedStatic.when(UserGroupInformation::getCurrentUser).thenReturn(mock(UserGroupInformation.class)); + + HiveConf conf = Mockito.spy(new HiveConf()); + doReturn(1L).when(conf).getLong(HiveConf.ConfVars.HIVE_EXEC_COPYFILE_MAXSIZE.varname, 32L * 1024 * 1024); + CopyUtils copyUtils = new CopyUtils("", conf, null); + long MB_128 = 128 * 1024 * 1024; + assertFalse(copyUtils.limitReachedForLocalCopy(MB_128, 1L)); + } } @Test public void distcpShouldNotBeCalledForSmallerFileSize() throws Exception { - mockStatic(UserGroupInformation.class); - when(UserGroupInformation.getCurrentUser()).thenReturn(mock(UserGroupInformation.class)); + try ( + MockedStatic userGroupInformationMockedStatic = mockStatic(UserGroupInformation.class)) { + userGroupInformationMockedStatic.when(UserGroupInformation::getCurrentUser).thenReturn(mock(UserGroupInformation.class)); - HiveConf conf = Mockito.spy(new HiveConf()); - CopyUtils copyUtils = new CopyUtils("", conf, null); - long MB_16 = 16 * 1024 * 1024; - assertFalse(copyUtils.limitReachedForLocalCopy(MB_16, 100L)); + HiveConf conf = Mockito.spy(new HiveConf()); + CopyUtils copyUtils = new CopyUtils("", conf, null); + long MB_16 = 16 * 1024 * 1024; + assertFalse(copyUtils.limitReachedForLocalCopy(MB_16, 100L)); + } } @Test(expected = IOException.class) @@ -98,150 +99,163 @@ public void shouldThrowExceptionOnDistcpFailure() throws Exception { HiveConf conf = mock(HiveConf.class); CopyUtils copyUtils = Mockito.spy(new CopyUtils(null, conf, fs)); - mockStatic(FileUtils.class); - mockStatic(Utils.class); - when(destination.getFileSystem(same(conf))).thenReturn(fs); - when(source.getFileSystem(same(conf))).thenReturn(fs); - when(FileUtils.distCp(same(fs), anyListOf(Path.class), same(destination), - anyBoolean(), eq(null), same(conf), - same(ShimLoader.getHadoopShims()))) - .thenReturn(false); - when(Utils.getUGI()).thenReturn(mock(UserGroupInformation.class)); - doReturn(false).when(copyUtils).regularCopy(same(fs), anyListOf(ReplChangeManager.FileInfo.class)); - - copyUtils.doCopy(destination, srcPaths); + try ( + MockedStatic fileUtilsMockedStatic = mockStatic(FileUtils.class); + MockedStatic utilsMockedStatic = mockStatic(Utils.class)) { + + fileUtilsMockedStatic.when(() -> FileUtils.distCp(same(fs), anyListOf(Path.class), same(destination), + anyBoolean(), eq(null), same(conf), + same(ShimLoader.getHadoopShims()))) + .thenReturn(false); + utilsMockedStatic.when(Utils::getUGI).thenReturn(mock(UserGroupInformation.class)); + + when(source.getFileSystem(same(conf))).thenReturn(fs); + doReturn(false).when(copyUtils).regularCopy(same(fs), anyListOf(ReplChangeManager.FileInfo.class)); + + copyUtils.doCopy(destination, srcPaths); + } } @Test public void testFSCallsFailOnParentExceptions() throws Exception { - mockStatic(UserGroupInformation.class); - mockStatic(ReplChangeManager.class); - when(UserGroupInformation.getCurrentUser()).thenReturn(mock(UserGroupInformation.class)); - HiveConf conf = mock(HiveConf.class); - conf.set(HiveConf.ConfVars.REPL_RETRY_INTIAL_DELAY.varname, "1s"); - FileSystem fs = mock(FileSystem.class); - Path source = mock(Path.class); - Path destination = mock(Path.class); - ContentSummary cs = mock(ContentSummary.class); - - Exception exception = new org.apache.hadoop.fs.PathPermissionException("Failed"); - when(ReplChangeManager.checksumFor(source, fs)).thenThrow(exception).thenReturn("dummy"); - when(fs.exists(same(source))).thenThrow(exception).thenReturn(true); - when(fs.delete(same(source), anyBoolean())).thenThrow(exception).thenReturn(true); - when(fs.mkdirs(same(source))).thenThrow(exception).thenReturn(true); - when(fs.rename(same(source), same(destination))).thenThrow(exception).thenReturn(true); - when(fs.getContentSummary(same(source))).thenThrow(exception).thenReturn(cs); - - CopyUtils copyUtils = new CopyUtils(UserGroupInformation.getCurrentUser().getUserName(), conf, fs); - CopyUtils copyUtilsSpy = Mockito.spy(copyUtils); - try { - copyUtilsSpy.exists(fs, source); - } catch (Exception e) { - assertEquals(exception.getClass(), e.getCause().getClass()); - } - Mockito.verify(fs, Mockito.times(1)).exists(source); - try { - copyUtils.delete(fs, source, true); - } catch (Exception e) { - assertEquals(exception.getClass(), e.getCause().getClass()); - } - Mockito.verify(fs, Mockito.times(1)).delete(source, true); - try { - copyUtils.mkdirs(fs, source); - } catch (Exception e) { - assertEquals(exception.getClass(), e.getCause().getClass()); - } - Mockito.verify(fs, Mockito.times(1)).mkdirs(source); - try { - copyUtils.rename(fs, source, destination); - } catch (Exception e) { - assertEquals(exception.getClass(), e.getCause().getClass()); - } - Mockito.verify(fs, Mockito.times(1)).rename(source, destination); - try { - copyUtilsSpy.getContentSummary(fs, source); - } catch (Exception e) { - assertEquals(exception.getClass(), e.getCause().getClass());; - } - Mockito.verify(fs, Mockito.times(1)).getContentSummary(source); - try { - copyUtilsSpy.checkSumFor(source, fs); - } catch (Exception e) { - assertEquals(exception.getClass(), e.getCause().getClass()); + try ( + MockedStatic userGroupInformationMockedStatic = mockStatic(UserGroupInformation.class); + MockedStatic replChangeManagerMockedStatic = mockStatic(ReplChangeManager.class) + ) { + + userGroupInformationMockedStatic.when(UserGroupInformation::getCurrentUser).thenReturn(mock(UserGroupInformation.class)); + + HiveConf conf = mock(HiveConf.class); + conf.set(HiveConf.ConfVars.REPL_RETRY_INTIAL_DELAY.varname, "1s"); + FileSystem fs = mock(FileSystem.class); + Path source = mock(Path.class); + Path destination = mock(Path.class); + ContentSummary cs = mock(ContentSummary.class); + + Exception exception = new org.apache.hadoop.fs.PathPermissionException("Failed"); + replChangeManagerMockedStatic.when(() -> ReplChangeManager.checksumFor(source, fs)).thenThrow(exception).thenReturn("dummy"); + when(fs.exists(same(source))).thenThrow(exception).thenReturn(true); + when(fs.delete(same(source), anyBoolean())).thenThrow(exception).thenReturn(true); + when(fs.mkdirs(same(source))).thenThrow(exception).thenReturn(true); + when(fs.rename(same(source), same(destination))).thenThrow(exception).thenReturn(true); + when(fs.getContentSummary(same(source))).thenThrow(exception).thenReturn(cs); + + CopyUtils copyUtils = new CopyUtils(UserGroupInformation.getCurrentUser().getUserName(), conf, fs); + CopyUtils copyUtilsSpy = Mockito.spy(copyUtils); + try { + copyUtilsSpy.exists(fs, source); + } catch (Exception e) { + assertEquals(exception.getClass(), e.getCause().getClass()); + } + Mockito.verify(fs, Mockito.times(1)).exists(source); + try { + copyUtils.delete(fs, source, true); + } catch (Exception e) { + assertEquals(exception.getClass(), e.getCause().getClass()); + } + Mockito.verify(fs, Mockito.times(1)).delete(source, true); + try { + copyUtils.mkdirs(fs, source); + } catch (Exception e) { + assertEquals(exception.getClass(), e.getCause().getClass()); + } + Mockito.verify(fs, Mockito.times(1)).mkdirs(source); + try { + copyUtils.rename(fs, source, destination); + } catch (Exception e) { + assertEquals(exception.getClass(), e.getCause().getClass()); + } + Mockito.verify(fs, Mockito.times(1)).rename(source, destination); + try { + copyUtilsSpy.getContentSummary(fs, source); + } catch (Exception e) { + assertEquals(exception.getClass(), e.getCause().getClass());; + } + Mockito.verify(fs, Mockito.times(1)).getContentSummary(source); + try { + copyUtilsSpy.checkSumFor(source, fs); + } catch (Exception e) { + assertEquals(exception.getClass(), e.getCause().getClass()); + } + Mockito.verify(copyUtilsSpy, Mockito.times(1)).checkSumFor(source, fs); } - Mockito.verify(copyUtilsSpy, Mockito.times(1)).checkSumFor(source, fs); } @Test public void testRetryableFSCalls() throws Exception { - mockStatic(UserGroupInformation.class); - mockStatic(ReplChangeManager.class); - when(UserGroupInformation.getCurrentUser()).thenReturn(mock(UserGroupInformation.class)); - HiveConf conf = mock(HiveConf.class); - conf.set(HiveConf.ConfVars.REPL_RETRY_INTIAL_DELAY.varname, "1s"); - FileSystem fs = mock(FileSystem.class); - Path source = mock(Path.class); - Path destination = mock(Path.class); - ContentSummary cs = mock(ContentSummary.class); - - when(ReplChangeManager.checksumFor(source, fs)).thenThrow(new IOException("Failed")).thenReturn("dummy"); - when(fs.exists(same(source))).thenThrow(new IOException("Failed")).thenReturn(true); - when(fs.delete(same(source), anyBoolean())).thenThrow(new IOException("Failed")).thenReturn(true); - when(fs.mkdirs(same(source))).thenThrow(new IOException("Failed")).thenReturn(true); - when(fs.rename(same(source), same(destination))).thenThrow(new IOException("Failed")).thenReturn(true); - when(fs.getContentSummary(same(source))).thenThrow(new IOException("Failed")).thenReturn(cs); - - CopyUtils copyUtils = new CopyUtils(UserGroupInformation.getCurrentUser().getUserName(), conf, fs); - CopyUtils copyUtilsSpy = Mockito.spy(copyUtils); - assertEquals (true, copyUtilsSpy.exists(fs, source)); - Mockito.verify(fs, Mockito.times(2)).exists(source); - assertEquals (true, copyUtils.delete(fs, source, true)); - Mockito.verify(fs, Mockito.times(2)).delete(source, true); - assertEquals (true, copyUtils.mkdirs(fs, source)); - Mockito.verify(fs, Mockito.times(2)).mkdirs(source); - assertEquals (true, copyUtils.rename(fs, source, destination)); - Mockito.verify(fs, Mockito.times(2)).rename(source, destination); - assertEquals (cs, copyUtilsSpy.getContentSummary(fs, source)); - Mockito.verify(fs, Mockito.times(2)).getContentSummary(source); - assertEquals ("dummy", copyUtilsSpy.checkSumFor(source, fs)); + try ( + MockedStatic userGroupInformationMockedStatic = mockStatic(UserGroupInformation.class); + MockedStatic replChangeManagerMockedStatic = mockStatic(ReplChangeManager.class) + ) { + + userGroupInformationMockedStatic.when(UserGroupInformation::getCurrentUser).thenReturn(mock(UserGroupInformation.class)); + HiveConf conf = mock(HiveConf.class); + conf.set(HiveConf.ConfVars.REPL_RETRY_INTIAL_DELAY.varname, "1s"); + FileSystem fs = mock(FileSystem.class); + Path source = mock(Path.class); + Path destination = mock(Path.class); + ContentSummary cs = mock(ContentSummary.class); + + replChangeManagerMockedStatic.when(() -> ReplChangeManager.checksumFor(source, fs)).thenThrow(new IOException("Failed")).thenReturn("dummy"); + when(fs.exists(same(source))).thenThrow(new IOException("Failed")).thenReturn(true); + when(fs.delete(same(source), anyBoolean())).thenThrow(new IOException("Failed")).thenReturn(true); + when(fs.mkdirs(same(source))).thenThrow(new IOException("Failed")).thenReturn(true); + when(fs.rename(same(source), same(destination))).thenThrow(new IOException("Failed")).thenReturn(true); + when(fs.getContentSummary(same(source))).thenThrow(new IOException("Failed")).thenReturn(cs); + + CopyUtils copyUtils = new CopyUtils(UserGroupInformation.getCurrentUser().getUserName(), conf, fs); + CopyUtils copyUtilsSpy = Mockito.spy(copyUtils); + assertEquals (true, copyUtilsSpy.exists(fs, source)); + Mockito.verify(fs, Mockito.times(2)).exists(source); + assertEquals (true, copyUtils.delete(fs, source, true)); + Mockito.verify(fs, Mockito.times(2)).delete(source, true); + assertEquals (true, copyUtils.mkdirs(fs, source)); + Mockito.verify(fs, Mockito.times(2)).mkdirs(source); + assertEquals (true, copyUtils.rename(fs, source, destination)); + Mockito.verify(fs, Mockito.times(2)).rename(source, destination); + assertEquals (cs, copyUtilsSpy.getContentSummary(fs, source)); + Mockito.verify(fs, Mockito.times(2)).getContentSummary(source); + assertEquals ("dummy", copyUtilsSpy.checkSumFor(source, fs)); + } } @Test public void testParallelCopySuccess() throws Exception { - mockStatic(UserGroupInformation.class); - when(UserGroupInformation.getCurrentUser()).thenReturn(mock(UserGroupInformation.class)); - HiveConf conf = Mockito.spy(new HiveConf()); - when(conf.getIntVar(HiveConf.ConfVars.REPL_PARALLEL_COPY_TASKS)).thenReturn(2); - when(conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST_REPL)).thenReturn(true); - FileSystem destFs = mock(FileSystem.class); - when(destFs.exists(Mockito.any())).thenReturn(true); - CopyUtils copyUtils = new CopyUtils(UserGroupInformation.getCurrentUser().getUserName(), conf, destFs); - CopyUtils copyUtilsSpy = Mockito.spy(copyUtils); - ExecutorService executorService = Executors.newFixedThreadPool(2); - ExecutorService mockExecutorService = Mockito.spy(executorService); - when(copyUtilsSpy.getExecutorService()).thenReturn(mockExecutorService); - Path destination = new Path("dest"); - Path source = mock(Path.class); - FileSystem fs = mock(FileSystem.class); - ReplChangeManager.FileInfo srcFileInfo = new ReplChangeManager.FileInfo(fs, source, "path1"); - List srcFiles = Arrays.asList(srcFileInfo); - doNothing().when(copyUtilsSpy).doCopy(Mockito.any(), Mockito.any(), - Mockito.anyBoolean(), Mockito.anyBoolean(), Mockito.any()); - copyUtilsSpy.copyAndVerify(destination, srcFiles, source, true, true); - Class>> listClass = - (Class>>)(Class)List.class; - //Thread pool Not invoked as only one target path - ArgumentCaptor>> callableCapture = ArgumentCaptor.forClass(listClass); - Mockito.verify(mockExecutorService, Mockito.times(0)).invokeAll(callableCapture.capture()); - ReplChangeManager.FileInfo srcFileInfo1 = new ReplChangeManager.FileInfo(fs, source, "path2"); - ReplChangeManager.FileInfo srcFileInfo2 = new ReplChangeManager.FileInfo(fs, source, "path3"); - srcFiles = Arrays.asList(srcFileInfo1, srcFileInfo2); - executorService = Executors.newFixedThreadPool(2); - mockExecutorService = Mockito.spy(executorService); - when(copyUtilsSpy.getExecutorService()).thenReturn(mockExecutorService); - copyUtilsSpy.copyAndVerify(destination, srcFiles, source, true, true); - //File count is greater than 1 do thread pool invoked - Mockito.verify(mockExecutorService, - Mockito.times(1)).invokeAll(callableCapture.capture()); + try (MockedStatic userGroupInformationMockedStatic = mockStatic(UserGroupInformation.class)) { + + userGroupInformationMockedStatic.when(UserGroupInformation::getCurrentUser).thenReturn(mock(UserGroupInformation.class)); + HiveConf conf = Mockito.spy(new HiveConf()); + when(conf.getIntVar(HiveConf.ConfVars.REPL_PARALLEL_COPY_TASKS)).thenReturn(2); + when(conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST_REPL)).thenReturn(true); + FileSystem destFs = mock(FileSystem.class); + CopyUtils copyUtils = new CopyUtils(UserGroupInformation.getCurrentUser().getUserName(), conf, destFs); + CopyUtils copyUtilsSpy = Mockito.spy(copyUtils); + ExecutorService executorService = Executors.newFixedThreadPool(2); + ExecutorService mockExecutorService = Mockito.spy(executorService); + when(copyUtilsSpy.getExecutorService()).thenReturn(mockExecutorService); + Path destination = new Path("dest"); + Path source = mock(Path.class); + FileSystem fs = mock(FileSystem.class); + ReplChangeManager.FileInfo srcFileInfo = new ReplChangeManager.FileInfo(fs, source, "path1"); + List srcFiles = Arrays.asList(srcFileInfo); + doNothing().when(copyUtilsSpy).doCopy(Mockito.any(), Mockito.any(), + Mockito.anyBoolean(), Mockito.anyBoolean(), Mockito.any()); + copyUtilsSpy.copyAndVerify(destination, srcFiles, source, true, true); + Class>> listClass = + (Class>>)(Class)List.class; + //Thread pool Not invoked as only one target path + ArgumentCaptor>> callableCapture = ArgumentCaptor.forClass(listClass); + Mockito.verify(mockExecutorService, Mockito.times(0)).invokeAll(callableCapture.capture()); + ReplChangeManager.FileInfo srcFileInfo1 = new ReplChangeManager.FileInfo(fs, source, "path2"); + ReplChangeManager.FileInfo srcFileInfo2 = new ReplChangeManager.FileInfo(fs, source, "path3"); + srcFiles = Arrays.asList(srcFileInfo1, srcFileInfo2); + executorService = Executors.newFixedThreadPool(2); + mockExecutorService = Mockito.spy(executorService); + when(copyUtilsSpy.getExecutorService()).thenReturn(mockExecutorService); + copyUtilsSpy.copyAndVerify(destination, srcFiles, source, true, true); + //File count is greater than 1 do thread pool invoked + Mockito.verify(mockExecutorService, + Mockito.times(1)).invokeAll(callableCapture.capture()); + } } } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/TestExportService.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/TestExportService.java index bea29744ee04..5e0f3083994c 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/TestExportService.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/TestExportService.java @@ -24,8 +24,7 @@ import org.junit.Assert; import org.junit.runner.RunWith; import org.mockito.Mock; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; +import org.mockito.junit.MockitoJUnitRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.junit.Test; @@ -35,9 +34,7 @@ import static org.mockito.Mockito.when; -@RunWith(PowerMockRunner.class) -@PrepareForTest({LoggerFactory.class, ExportService.class}) - +@RunWith(MockitoJUnitRunner.class) public class TestExportService { protected static final Logger LOG = LoggerFactory.getLogger(TestExportService.class); @Mock diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/load/message/TestPrimaryToReplicaResourceFunction.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/load/message/TestPrimaryToReplicaResourceFunction.java index ddf687b48c59..c631ad81c58b 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/load/message/TestPrimaryToReplicaResourceFunction.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/load/message/TestPrimaryToReplicaResourceFunction.java @@ -30,12 +30,13 @@ import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.repl.load.MetaData; +import org.apache.hadoop.hive.ql.util.TimeUtil; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; +import org.mockito.MockedStatic; +import org.mockito.junit.MockitoJUnitRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -50,12 +51,10 @@ import static org.junit.Assert.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; -import static org.powermock.api.mockito.PowerMockito.mockStatic; -import static org.powermock.api.mockito.PowerMockito.when; +import static org.mockito.Mockito.mockStatic; +import static org.mockito.Mockito.when; -@RunWith(PowerMockRunner.class) -@PrepareForTest({ PrimaryToReplicaResourceFunction.class, FileSystem.class, ReplCopyTask.class, - System.class }) +@RunWith(MockitoJUnitRunner.class) public class TestPrimaryToReplicaResourceFunction { private PrimaryToReplicaResourceFunction function; @@ -76,20 +75,27 @@ public void setup() { new Context("primaryDb", null, null, null, hiveConf, null, null, logger); when(hiveConf.getVar(HiveConf.ConfVars.REPL_FUNCTIONS_ROOT_DIR)) .thenReturn("/someBasePath/withADir/"); - function = new PrimaryToReplicaResourceFunction(context, metadata, "replicaDbName"); + function = new PrimaryToReplicaResourceFunction(context, metadata, "replicaDbName", new TimeUtil(){ + @Override + public long getNanoSeconds(){ + return 0l; + } + }); } @Test public void createDestinationPath() throws IOException, SemanticException, URISyntaxException { - mockStatic(FileSystem.class); - when(FileSystem.get(any(Configuration.class))).thenReturn(mockFs); - when(FileSystem.get(any(URI.class), any(Configuration.class))).thenReturn(mockFs); + MockedStatic fileSystemMockedStatic = mockStatic(FileSystem.class); + MockedStatic ignoredReplCopyTaskMockedStatic = mockStatic(ReplCopyTask.class); + MockedStatic createFunctionHandlerMockedStatic = mockStatic(CreateFunctionHandler.class); + + fileSystemMockedStatic.when(() -> FileSystem.get(any(Configuration.class))).thenReturn(mockFs); + fileSystemMockedStatic.when(() -> FileSystem.get(any(URI.class), any(Configuration.class))).thenReturn(mockFs); + when(mockFs.getScheme()).thenReturn("hdfs"); when(mockFs.getUri()).thenReturn(new URI("hdfs", "somehost:9000", null, null, null)); - mockStatic(System.class); -// when(System.nanoTime()).thenReturn(Long.MAX_VALUE); + when(functionObj.getFunctionName()).thenReturn("someFunctionName"); - mockStatic(ReplCopyTask.class); Task mock = mock(Task.class); when(ReplCopyTask.getLoadCopyTask(any(ReplicationSpec.class), any(Path.class), any(Path.class), any(HiveConf.class), any(), any())).thenReturn(mock); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/metric/TestReplicationMetricCollector.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/metric/TestReplicationMetricCollector.java index d237027f1819..cdfc2753ca10 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/metric/TestReplicationMetricCollector.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/metric/TestReplicationMetricCollector.java @@ -45,14 +45,13 @@ import org.junit.Assert; import org.junit.Before; import org.junit.After; +import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.MockedStatic; import org.mockito.Mockito; import org.mockito.Mock; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PowerMockIgnore; -import org.powermock.core.classloader.annotations.PrepareOnlyThisForTest; -import org.powermock.modules.junit4.PowerMockRunner; +import org.mockito.junit.MockitoJUnitRunner; import java.util.Map; import java.util.HashMap; @@ -62,24 +61,30 @@ import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.mockStatic; /** * Unit Test class for In Memory Replication Metric Collection. */ -@PowerMockIgnore({ "javax.*", "com.sun.*", "org.w3c.*" }) -@PrepareOnlyThisForTest({MetricSink.class}) -@RunWith(PowerMockRunner.class) +@RunWith(MockitoJUnitRunner.class) public class TestReplicationMetricCollector { HiveConf conf; + private static MockedStatic metricSinkMockedStatic; + @Mock private FailoverMetaData fmd; @Mock private MetricSink metricSinkInstance; + @BeforeClass + public static void setupClass() { + metricSinkMockedStatic = mockStatic(MetricSink.class); + } + @Before public void setup() throws Exception { conf = new HiveConf(); @@ -92,8 +97,7 @@ public void setup() throws Exception { } private void disableBackgroundThreads() { - PowerMockito.mockStatic(MetricSink.class); - Mockito.when(MetricSink.getInstance()).thenReturn(metricSinkInstance); + metricSinkMockedStatic.when(MetricSink::getInstance).thenReturn(metricSinkInstance); } @After diff --git a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCompactionHeartbeatService.java b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCompactionHeartbeatService.java index 99455783da65..3c6c323ae730 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCompactionHeartbeatService.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCompactionHeartbeatService.java @@ -17,8 +17,9 @@ */ package org.apache.hadoop.hive.ql.txn.compactor; +import org.apache.commons.pool2.ObjectPool; +import org.apache.commons.pool2.impl.GenericObjectPool; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.HiveMetaStoreUtils; import org.apache.hadoop.hive.metastore.IMetaStoreClient; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.junit.After; @@ -29,46 +30,54 @@ import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.Mockito; +import org.mockito.junit.MockitoJUnitRunner; import org.mockito.stubbing.Answer; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PowerMockIgnore; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; import java.lang.reflect.Field; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; -import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.atLeast; import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -@RunWith(PowerMockRunner.class) -@PrepareForTest({HiveMetaStoreUtils.class}) -@PowerMockIgnore("javax.management.*") +@RunWith(MockitoJUnitRunner.class) public class TestCompactionHeartbeatService { private static Field HEARTBEAT_SINGLETON; + private static Field HEARTBEAT_CLIENTPOOL; @Mock private HiveConf conf; @Mock private IMetaStoreClient client; + private ObjectPool clientPool; + @BeforeClass public static void setupClass() throws NoSuchFieldException { HEARTBEAT_SINGLETON = CompactionHeartbeatService.class.getDeclaredField("instance"); HEARTBEAT_SINGLETON.setAccessible(true); + + HEARTBEAT_CLIENTPOOL = CompactionHeartbeatService.class.getDeclaredField("clientPool"); + HEARTBEAT_CLIENTPOOL.setAccessible(true); } @Before public void setup() throws Exception { Mockito.when(conf.get(MetastoreConf.ConfVars.TXN_TIMEOUT.getVarname())).thenReturn("100ms"); Mockito.when(conf.get(MetastoreConf.ConfVars.COMPACTOR_WORKER_THREADS.getVarname())).thenReturn("4"); - PowerMockito.mockStatic(HiveMetaStoreUtils.class); - PowerMockito.when(HiveMetaStoreUtils.getHiveMetastoreClient(any())).thenReturn(client); HEARTBEAT_SINGLETON.set(null,null); + + IMetaStoreClientFactory metaStoreClientFactory = spy((new IMetaStoreClientFactory(conf))); + doReturn(client).when(metaStoreClientFactory).create(); + + clientPool = Mockito.spy(new GenericObjectPool<>(metaStoreClientFactory)); + + CompactionHeartbeatService compactionHeartbeatService = CompactionHeartbeatService.getInstance(conf); + HEARTBEAT_CLIENTPOOL.set(compactionHeartbeatService, clientPool); } @After @@ -148,7 +157,7 @@ public void testBadClientInvalidated() throws Exception { // Check if bad clients were closed and new ones were requested verify(client, times(3)).heartbeat(0,0); verify(client, times(3)).close(); - PowerMockito.verifyStatic(HiveMetaStoreUtils.class, times(3)); - HiveMetaStoreUtils.getHiveMetastoreClient(conf); + + verify(clientPool, times(3)).borrowObject(); } } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java index 8e1f3c529917..9a833d504f12 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java @@ -74,7 +74,7 @@ import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import static org.powermock.api.mockito.PowerMockito.when; +import static org.mockito.Mockito.when; /** * Tests for the worker thread and its MR jobs. @@ -1019,7 +1019,7 @@ public void testWorkerAndInitiatorVersion() throws Exception { public void testFindNextCompactThrowsTException() throws Exception { Worker worker = Mockito.spy(new Worker()); IMetaStoreClient msc = Mockito.mock(IMetaStoreClient.class); - Mockito.when(msc.findNextCompact(Mockito.any(FindNextCompactRequest.class))).thenThrow(MetaException.class); + when(msc.findNextCompact(Mockito.any(FindNextCompactRequest.class))).thenThrow(MetaException.class); worker.msc = msc; worker.findNextCompactionAndExecute(true, true);