Skip to content
Permalink
Browse files
Fix warnings and clean up pom.xml
POM updates:
* Update plugins
* Add m2e profile for ensuring compiler compliance and ignoring plugins
  in Eclipse that Eclipse doesn't understand
* Require minimum java version of 1.8 and Maven 3.5
* Remove unneeded plugins (felix bundle and exec-maven-plugin)

Fix warnings in code:
* Use try-with-resources for TestEnv
* Use Double.parseDouble instead of new Double
* Compute iterable sizes from scanners without unused variables
* Suppress deprecation warnings where needed
* Update BulkPlusOne RW test to use new bulk import API
* Add missing case statement for GET_SUMMARIES in TableOp
* Remove unused variables
  • Loading branch information
ctubbsii committed Jul 30, 2019
1 parent 2b7b28f commit 237c0c5a952adcef7f1aad29dcb243c602cadf54
Show file tree
Hide file tree
Showing 17 changed files with 264 additions and 205 deletions.
103 pom.xml
@@ -126,15 +126,19 @@
<pluginManagement>
<plugins>
<plugin>
<!-- Allows us to get the apache-ds bundle artifacts -->
<groupId>org.apache.felix</groupId>
<artifactId>maven-bundle-plugin</artifactId>
<version>3.0.1</version>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.1</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>3.2.1</version>
</plugin>
<plugin>
<groupId>com.github.ekryd.sortpom</groupId>
<artifactId>sortpom-maven-plugin</artifactId>
<version>2.8.0</version>
<version>2.10.0</version>
<configuration>
<predefinedSortOrder>recommended_2008_06</predefinedSortOrder>
<createBackupFile>false</createBackupFile>
@@ -149,7 +153,7 @@
<plugin>
<groupId>net.revelc.code.formatter</groupId>
<artifactId>formatter-maven-plugin</artifactId>
<version>2.8.1</version>
<version>2.10.0</version>
<configuration>
<configFile>${eclipseFormatterStyle}</configFile>
<compilerCompliance>${maven.compiler.source}</compilerCompliance>
@@ -171,7 +175,7 @@
<plugin>
<groupId>net.revelc.code</groupId>
<artifactId>impsort-maven-plugin</artifactId>
<version>1.2.0</version>
<version>1.3.0</version>
<configuration>
<removeUnused>true</removeUnused>
<groups>java.,javax.,org.,com.</groups>
@@ -183,6 +187,30 @@
</plugins>
</pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<executions>
<execution>
<!-- must be same id as in the apache parent pom, to override the version -->
<id>enforce-maven-version</id>
<goals>
<goal>enforce</goal>
</goals>
<phase>validate</phase>
<configuration>
<rules>
<requireMavenVersion>
<version>[3.5.0,)</version>
</requireMavenVersion>
<requireJavaVersion>
<version>[${maven.compiler.target},)</version>
</requireJavaVersion>
</rules>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
@@ -200,13 +228,6 @@
</compilerArgs>
</configuration>
</plugin>
<plugin>
<!-- Allows us to get the apache-ds bundle artifacts -->
<groupId>org.apache.felix</groupId>
<artifactId>maven-bundle-plugin</artifactId>
<extensions>true</extensions>
<inherited>true</inherited>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
@@ -220,27 +241,19 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>1.5.0</version>
<configuration>
<cleanupDaemonThreads>false</cleanupDaemonThreads>
</configuration>
</plugin>
<plugin>
<!-- This was added to ensure project only uses public API -->
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>3.0.0</version>
<version>3.1.0</version>
<configuration>
<configLocation>contrib/checkstyle.xml</configLocation>
</configuration>
<dependencies>
<dependency>
<groupId>com.puppycrawl.tools</groupId>
<artifactId>checkstyle</artifactId>
<version>8.18</version>
<version>8.23</version>
</dependency>
</dependencies>
<executions>
@@ -350,6 +363,48 @@
</plugins>
</build>
</profile>
<profile>
<id>m2e</id>
<activation>
<property>
<name>m2e.version</name>
</property>
</activation>
<properties>
<maven.compiler.release>8</maven.compiler.release>
</properties>
<build>
<pluginManagement>
<plugins>
<!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.-->
<plugin>
<groupId>org.eclipse.m2e</groupId>
<artifactId>lifecycle-mapping</artifactId>
<version>1.0.0</version>
<configuration>
<lifecycleMappingMetadata>
<pluginExecutions>
<pluginExecution>
<pluginExecutionFilter>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-remote-resources-plugin</artifactId>
<versionRange>[0,)</versionRange>
<goals>
<goal>process</goal>
</goals>
</pluginExecutionFilter>
<action>
<ignore />
</action>
</pluginExecution>
</pluginExecutions>
</lifecycleMappingMetadata>
</configuration>
</plugin>
</plugins>
</pluginManagement>
</build>
</profile>
<profile>
<id>jdk-release-flag</id>
<activation>
@@ -112,44 +112,45 @@ public void map(Key key, Value data, Context context) throws IOException, Interr

@Override
public int run(String[] args) throws Exception {
try (ContinuousEnv env = new ContinuousEnv(args)) {

ContinuousEnv env = new ContinuousEnv(args);
Job job = Job.getInstance(getConf(),
this.getClass().getSimpleName() + "_" + System.currentTimeMillis());
job.setJarByClass(this.getClass());
job.setInputFormatClass(AccumuloInputFormat.class);

Job job = Job.getInstance(getConf(),
this.getClass().getSimpleName() + "_" + System.currentTimeMillis());
job.setJarByClass(this.getClass());
job.setInputFormatClass(AccumuloInputFormat.class);
int maxMaps = Integer.parseInt(env.getTestProperty(TestProps.CI_VERIFY_MAX_MAPS));
Set<Range> ranges = env.getAccumuloClient().tableOperations()
.splitRangeByTablets(env.getAccumuloTableName(), new Range(), maxMaps);

int maxMaps = Integer.parseInt(env.getTestProperty(TestProps.CI_VERIFY_MAX_MAPS));
Set<Range> ranges = env.getAccumuloClient().tableOperations()
.splitRangeByTablets(env.getAccumuloTableName(), new Range(), maxMaps);
AccumuloInputFormat.configure().clientProperties(env.getClientProps())
.table(env.getAccumuloTableName()).ranges(ranges).autoAdjustRanges(false).store(job);

AccumuloInputFormat.configure().clientProperties(env.getClientProps())
.table(env.getAccumuloTableName()).ranges(ranges).autoAdjustRanges(false).store(job);
job.setMapperClass(CMapper.class);
job.setNumReduceTasks(0);
job.setOutputFormatClass(AccumuloOutputFormat.class);

job.setMapperClass(CMapper.class);
job.setNumReduceTasks(0);
job.setOutputFormatClass(AccumuloOutputFormat.class);
AccumuloOutputFormat.configure().clientProperties(env.getClientProps()).createTables(true)
.defaultTable(env.getAccumuloTableName()).store(job);

AccumuloOutputFormat.configure().clientProperties(env.getClientProps()).createTables(true)
.defaultTable(env.getAccumuloTableName()).store(job);
Configuration conf = job.getConfiguration();
conf.setLong(MIN, env.getRowMin());
conf.setLong(MAX, env.getRowMax());
conf.setInt(MAX_CF, env.getMaxColF());
conf.setInt(MAX_CQ, env.getMaxColQ());
conf.set(CI_ID, UUID.randomUUID().toString());
conf.set("mapreduce.job.classloader", "true");

Configuration conf = job.getConfiguration();
conf.setLong(MIN, env.getRowMin());
conf.setLong(MAX, env.getRowMax());
conf.setInt(MAX_CF, env.getMaxColF());
conf.setInt(MAX_CQ, env.getMaxColQ());
conf.set(CI_ID, UUID.randomUUID().toString());
conf.set("mapreduce.job.classloader", "true");

job.waitForCompletion(true);
return job.isSuccessful() ? 0 : 1;
job.waitForCompletion(true);
return job.isSuccessful() ? 0 : 1;
}
}

public static void main(String[] args) throws Exception {
ContinuousEnv env = new ContinuousEnv(args);
int res = ToolRunner.run(env.getHadoopConfiguration(), new ContinuousMoru(), args);
if (res != 0)
System.exit(res);
try (ContinuousEnv env = new ContinuousEnv(args)) {
int res = ToolRunner.run(env.getHadoopConfiguration(), new ContinuousMoru(), args);
if (res != 0)
System.exit(res);
}
}
}
@@ -140,69 +140,71 @@ public void reduce(LongWritable key, Iterable<VLongWritable> values, Context con
@Override
public int run(String[] args) throws Exception {

ContinuousEnv env = new ContinuousEnv(args);

String tableName = env.getAccumuloTableName();

Job job = Job.getInstance(getConf(),
this.getClass().getSimpleName() + "_" + tableName + "_" + System.currentTimeMillis());
job.setJarByClass(this.getClass());

job.setInputFormatClass(AccumuloInputFormat.class);

boolean scanOffline = Boolean
.parseBoolean(env.getTestProperty(TestProps.CI_VERIFY_SCAN_OFFLINE));
int maxMaps = Integer.parseInt(env.getTestProperty(TestProps.CI_VERIFY_MAX_MAPS));
int reducers = Integer.parseInt(env.getTestProperty(TestProps.CI_VERIFY_REDUCERS));
String outputDir = env.getTestProperty(TestProps.CI_VERIFY_OUTPUT_DIR);

Set<Range> ranges;
String clone = "";
AccumuloClient client = env.getAccumuloClient();
String table;

if (scanOffline) {
Random random = new Random();
clone = tableName + "_" + String.format("%016x", (random.nextLong() & 0x7fffffffffffffffL));
client.tableOperations().clone(tableName, clone, true, new HashMap<>(), new HashSet<>());
ranges = client.tableOperations().splitRangeByTablets(tableName, new Range(), maxMaps);
client.tableOperations().offline(clone);
table = clone;
} else {
ranges = client.tableOperations().splitRangeByTablets(tableName, new Range(), maxMaps);
table = tableName;
}
try (ContinuousEnv env = new ContinuousEnv(args)) {

String tableName = env.getAccumuloTableName();

Job job = Job.getInstance(getConf(),
this.getClass().getSimpleName() + "_" + tableName + "_" + System.currentTimeMillis());
job.setJarByClass(this.getClass());

job.setInputFormatClass(AccumuloInputFormat.class);

boolean scanOffline = Boolean
.parseBoolean(env.getTestProperty(TestProps.CI_VERIFY_SCAN_OFFLINE));
int maxMaps = Integer.parseInt(env.getTestProperty(TestProps.CI_VERIFY_MAX_MAPS));
int reducers = Integer.parseInt(env.getTestProperty(TestProps.CI_VERIFY_REDUCERS));
String outputDir = env.getTestProperty(TestProps.CI_VERIFY_OUTPUT_DIR);

Set<Range> ranges;
String clone = "";
AccumuloClient client = env.getAccumuloClient();
String table;

if (scanOffline) {
Random random = new Random();
clone = tableName + "_" + String.format("%016x", (random.nextLong() & 0x7fffffffffffffffL));
client.tableOperations().clone(tableName, clone, true, new HashMap<>(), new HashSet<>());
ranges = client.tableOperations().splitRangeByTablets(tableName, new Range(), maxMaps);
client.tableOperations().offline(clone);
table = clone;
} else {
ranges = client.tableOperations().splitRangeByTablets(tableName, new Range(), maxMaps);
table = tableName;
}

AccumuloInputFormat.configure().clientProperties(env.getClientProps()).table(table)
.ranges(ranges).autoAdjustRanges(false).offlineScan(scanOffline).store(job);
AccumuloInputFormat.configure().clientProperties(env.getClientProps()).table(table)
.ranges(ranges).autoAdjustRanges(false).offlineScan(scanOffline).store(job);

job.setMapperClass(CMapper.class);
job.setMapOutputKeyClass(LongWritable.class);
job.setMapOutputValueClass(VLongWritable.class);
job.setMapperClass(CMapper.class);
job.setMapOutputKeyClass(LongWritable.class);
job.setMapOutputValueClass(VLongWritable.class);

job.setReducerClass(CReducer.class);
job.setNumReduceTasks(reducers);
job.setReducerClass(CReducer.class);
job.setNumReduceTasks(reducers);

job.setOutputFormatClass(TextOutputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);

job.getConfiguration().setBoolean("mapred.map.tasks.speculative.execution", scanOffline);
job.getConfiguration().set("mapreduce.job.classloader", "true");
job.getConfiguration().setBoolean("mapred.map.tasks.speculative.execution", scanOffline);
job.getConfiguration().set("mapreduce.job.classloader", "true");

TextOutputFormat.setOutputPath(job, new Path(outputDir));
TextOutputFormat.setOutputPath(job, new Path(outputDir));

job.waitForCompletion(true);
job.waitForCompletion(true);

if (scanOffline) {
client.tableOperations().delete(clone);
if (scanOffline) {
client.tableOperations().delete(clone);
}
return job.isSuccessful() ? 0 : 1;
}
return job.isSuccessful() ? 0 : 1;
}

public static void main(String[] args) throws Exception {
ContinuousEnv env = new ContinuousEnv(args);
try (ContinuousEnv env = new ContinuousEnv(args)) {

int res = ToolRunner.run(env.getHadoopConfiguration(), new ContinuousVerify(), args);
if (res != 0)
System.exit(res);
int res = ToolRunner.run(env.getHadoopConfiguration(), new ContinuousVerify(), args);
if (res != 0)
System.exit(res);
}
}
}
@@ -41,6 +41,7 @@ static class Opts extends ClientOpts {
String failures = null;
}

@SuppressWarnings("deprecation")
public static void main(String[] args)
throws IOException, AccumuloException, AccumuloSecurityException, TableNotFoundException {
final FileSystem fs = FileSystem.get(new Configuration());
@@ -62,10 +62,9 @@ public static void main(String[] args) throws Exception {
opts.parseArgs(VerifyIngest.class.getName(), args);
try (AccumuloClient client = Accumulo.newClient().from(opts.getClientProps()).build()) {
if (opts.trace) {
String name = VerifyIngest.class.getSimpleName();
/*
* DistributedTrace.enable(); Trace.on(name); Trace.data("cmdLine",
* Arrays.asList(args).toString());
* String name = VerifyIngest.class.getSimpleName(); DistributedTrace.enable();
* Trace.on(name); Trace.data("cmdLine", Arrays.asList(args).toString());
*/
}

0 comments on commit 237c0c5

Please sign in to comment.