From 3f3586bd237c8144bf9636c11889846b6dd6a654 Mon Sep 17 00:00:00 2001 From: Mike Walch Date: Tue, 12 Mar 2019 18:53:34 -0400 Subject: [PATCH] Fix integration tests (#37) * Due to changes in Accumulo * Commented out use of DistributedTrace * Updated ITs due to changes in TestIngest and ConfigurableMacBase --- .../examples/client/TracingExample.java | 4 +-- .../apache/accumulo/examples/ExamplesIT.java | 28 +++++++------------ .../accumulo/examples/dirlist/CountIT.java | 3 +- .../examples/filedata/ChunkInputFormatIT.java | 2 +- .../examples/mapreduce/MapReduceIT.java | 14 ++++++---- 5 files changed, 23 insertions(+), 28 deletions(-) diff --git a/src/main/java/org/apache/accumulo/examples/client/TracingExample.java b/src/main/java/org/apache/accumulo/examples/client/TracingExample.java index 050bd2b..899008c 100644 --- a/src/main/java/org/apache/accumulo/examples/client/TracingExample.java +++ b/src/main/java/org/apache/accumulo/examples/client/TracingExample.java @@ -32,7 +32,7 @@ import org.apache.accumulo.core.data.Mutation; import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.security.Authorizations; -import org.apache.accumulo.core.trace.DistributedTrace; +// import org.apache.accumulo.core.trace.DistributedTrace; import org.apache.accumulo.examples.cli.ClientOnDefaultTable; import org.apache.accumulo.examples.cli.ScannerOpts; import org.apache.htrace.Sampler; @@ -74,7 +74,7 @@ private TracingExample(AccumuloClient client) { } private void enableTracing() { - DistributedTrace.enable("myHost", "myApp"); + // DistributedTrace.enable("myHost", "myApp"); } private void execute(Opts opts) throws TableNotFoundException, AccumuloException, diff --git a/src/test/java/org/apache/accumulo/examples/ExamplesIT.java b/src/test/java/org/apache/accumulo/examples/ExamplesIT.java index ff7045e..9f713ca 100644 --- a/src/test/java/org/apache/accumulo/examples/ExamplesIT.java +++ b/src/test/java/org/apache/accumulo/examples/ExamplesIT.java @@ -34,7 +34,6 @@ import java.util.Map.Entry; import java.util.concurrent.TimeUnit; -import org.apache.accumulo.core.cli.BatchWriterOpts; import org.apache.accumulo.core.client.AccumuloClient; import org.apache.accumulo.core.client.BatchScanner; import org.apache.accumulo.core.client.BatchWriter; @@ -71,6 +70,7 @@ import org.apache.accumulo.minicluster.MemoryUnit; import org.apache.accumulo.miniclusterImpl.MiniAccumuloConfigImpl; import org.apache.accumulo.test.TestIngest; +import org.apache.accumulo.test.TestIngest.IngestParams; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -78,24 +78,19 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import com.google.common.collect.Iterators; public class ExamplesIT extends AccumuloClusterHarness { - private static final Logger log = LoggerFactory.getLogger(ExamplesIT.class); - private static final BatchWriterOpts bwOpts = new BatchWriterOpts(); private static final BatchWriterConfig bwc = new BatchWriterConfig(); - private static final String visibility = "A|B"; private static final String auths = "A,B"; - AccumuloClient c; - BatchWriter bw; - IteratorSetting is; - String dir; - FileSystem fs; - Authorizations origAuths; + private AccumuloClient c; + private BatchWriter bw; + private IteratorSetting is; + private String dir; + private FileSystem fs; + private Authorizations origAuths; @Override public void configureMiniCluster(MiniAccumuloConfigImpl cfg, Configuration hadoopConf) { @@ -245,13 +240,10 @@ public void testMaxMutationConstraint() throws Exception { String tableName = getUniqueNames(1)[0]; c.tableOperations().create(tableName); c.tableOperations().addConstraint(tableName, MaxMutationSize.class.getName()); - TestIngest.Opts opts = new TestIngest.Opts(); - opts.rows = 1; - opts.cols = 1000; - opts.setTableName(tableName); - opts.setPrincipal(getAdminPrincipal()); + IngestParams params = new IngestParams(c.properties(), tableName, 1); + params.cols = 1000; try { - TestIngest.ingest(c, opts, bwOpts); + TestIngest.ingest(c, params); } catch (MutationsRejectedException ex) { assertEquals(1, ex.getConstraintViolationSummaries().size()); } diff --git a/src/test/java/org/apache/accumulo/examples/dirlist/CountIT.java b/src/test/java/org/apache/accumulo/examples/dirlist/CountIT.java index 52bb62e..950b295 100644 --- a/src/test/java/org/apache/accumulo/examples/dirlist/CountIT.java +++ b/src/test/java/org/apache/accumulo/examples/dirlist/CountIT.java @@ -22,6 +22,7 @@ import java.util.ArrayList; import java.util.Map.Entry; +import org.apache.accumulo.core.client.Accumulo; import org.apache.accumulo.core.client.AccumuloClient; import org.apache.accumulo.core.client.BatchWriter; import org.apache.accumulo.core.client.BatchWriterConfig; @@ -55,7 +56,7 @@ protected void configure(MiniAccumuloConfigImpl cfg, Configuration hadoopCoreSit @Before public void setupInstance() throws Exception { tableName = getUniqueNames(1)[0]; - client = createClient(); + client = Accumulo.newClient().from(getClientProperties()).build(); client.tableOperations().create(tableName); BatchWriter bw = client.createBatchWriter(tableName, new BatchWriterConfig()); ColumnVisibility cv = new ColumnVisibility(); diff --git a/src/test/java/org/apache/accumulo/examples/filedata/ChunkInputFormatIT.java b/src/test/java/org/apache/accumulo/examples/filedata/ChunkInputFormatIT.java index 8797fef..23790df 100644 --- a/src/test/java/org/apache/accumulo/examples/filedata/ChunkInputFormatIT.java +++ b/src/test/java/org/apache/accumulo/examples/filedata/ChunkInputFormatIT.java @@ -119,7 +119,7 @@ public static class TestMapper @Override protected void map(List> key, InputStream value, Context context) - throws IOException, InterruptedException { + throws IOException { String table = context.getConfiguration().get("MRTester_tableName"); assertNotNull(table); diff --git a/src/test/java/org/apache/accumulo/examples/mapreduce/MapReduceIT.java b/src/test/java/org/apache/accumulo/examples/mapreduce/MapReduceIT.java index 8eedb69..b77ae9e 100644 --- a/src/test/java/org/apache/accumulo/examples/mapreduce/MapReduceIT.java +++ b/src/test/java/org/apache/accumulo/examples/mapreduce/MapReduceIT.java @@ -22,11 +22,13 @@ import java.util.Base64; import java.util.Collections; import java.util.Map.Entry; +import java.util.Properties; +import org.apache.accumulo.core.client.Accumulo; import org.apache.accumulo.core.client.AccumuloClient; import org.apache.accumulo.core.client.BatchWriter; -import org.apache.accumulo.core.client.BatchWriterConfig; import org.apache.accumulo.core.client.Scanner; +import org.apache.accumulo.core.conf.ClientProperty; import org.apache.accumulo.core.conf.Property; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.Mutation; @@ -65,12 +67,13 @@ protected void configure(MiniAccumuloConfigImpl cfg, Configuration hadoopCoreSit @Test public void test() throws Exception { String confFile = System.getProperty("user.dir") + "/target/accumulo-client.properties"; - String instance = getClientInfo().getInstanceName(); - String keepers = getClientInfo().getZooKeepers(); + Properties props = getClientProperties(); + String instance = ClientProperty.INSTANCE_NAME.getValue(props); + String keepers = ClientProperty.INSTANCE_ZOOKEEPERS.getValue(props); ExamplesIT.writeClientPropsFile(confFile, instance, keepers, "root", ROOT_PASSWORD); - try (AccumuloClient client = createClient()) { + try (AccumuloClient client = Accumulo.newClient().from(props).build()) { client.tableOperations().create(tablename); - BatchWriter bw = client.createBatchWriter(tablename, new BatchWriterConfig()); + BatchWriter bw = client.createBatchWriter(tablename); for (int i = 0; i < 10; i++) { Mutation m = new Mutation("" + i); m.put(input_cf, input_cq, "row" + i); @@ -93,5 +96,4 @@ public void test() throws Exception { } } } - }