Skip to content

Commit

Permalink
[MINOR][BUILD] Fix Java linter errors
Browse files Browse the repository at this point in the history
## What changes were proposed in this pull request?

This PR cleans up a few Java linter errors for Apache Spark 2.2 release.

## How was this patch tested?

```bash
$ dev/lint-java
Using `mvn` from path: /usr/local/bin/mvn
Checkstyle checks passed.
```

We can check the result at Travis CI, [here](https://travis-ci.org/dongjoon-hyun/spark/builds/244297894).

Author: Dongjoon Hyun <dongjoon@apache.org>

Closes apache#18345 from dongjoon-hyun/fix_lint_java_2.
  • Loading branch information
dongjoon-hyun authored and srowen committed Jun 19, 2017
1 parent e538701 commit ecc5631
Show file tree
Hide file tree
Showing 14 changed files with 16 additions and 30 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@
@Target({ElementType.FIELD, ElementType.METHOD})
public @interface KVIndex {

public static final String NATURAL_INDEX_NAME = "__main__";
String NATURAL_INDEX_NAME = "__main__";

/**
* The name of the index to be created for the annotated entity. Must be unique within
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,6 @@
package org.apache.spark.kvstore;

import java.io.Closeable;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;

/**
* Abstraction for a local key/value store for storing app data.
Expand Down Expand Up @@ -84,7 +81,7 @@ public interface KVStore extends Closeable {
*
* @param naturalKey The object's "natural key", which uniquely identifies it. Null keys
* are not allowed.
* @throws NoSuchElementException If an element with the given key does not exist.
* @throws java.util.NoSuchElementException If an element with the given key does not exist.
*/
<T> T read(Class<T> klass, Object naturalKey) throws Exception;

Expand All @@ -107,7 +104,7 @@ public interface KVStore extends Closeable {
* @param type The object's type.
* @param naturalKey The object's "natural key", which uniquely identifies it. Null keys
* are not allowed.
* @throws NoSuchElementException If an element with the given key does not exist.
* @throws java.util.NoSuchElementException If an element with the given key does not exist.
*/
void delete(Class<?> type, Object naturalKey) throws Exception;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,6 @@

package org.apache.spark.kvstore;

import java.util.Iterator;
import java.util.Map;

import com.google.common.base.Preconditions;

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@

import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.stream.Stream;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@
import static java.nio.charset.StandardCharsets.UTF_8;

import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Objects;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import org.fusesource.leveldbjni.JniDBFactory;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
package org.apache.spark.kvstore;

import java.io.IOException;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,17 +18,12 @@
package org.apache.spark.kvstore;

import java.lang.reflect.Array;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import static java.nio.charset.StandardCharsets.UTF_8;

import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import org.iq80.leveldb.WriteBatch;

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,9 @@
import java.util.List;
import java.util.Random;

import com.google.common.base.Predicate;
import com.google.common.collect.Iterables;
import com.google.common.collect.Iterators;
import com.google.common.collect.Lists;
import org.apache.commons.io.FileUtils;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
Expand All @@ -50,7 +48,7 @@ public abstract class DBIteratorSuite {
private static List<CustomType1> clashingEntries;
private static KVStore db;

private static interface BaseComparator extends Comparator<CustomType1> {
private interface BaseComparator extends Comparator<CustomType1> {
/**
* Returns a comparator that falls back to natural order if this comparator's ordering
* returns equality for two elements. Used to mimic how the index sorts things internally.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,7 @@
import java.io.File;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import static java.nio.charset.StandardCharsets.UTF_8;

import org.apache.commons.io.FileUtils;
import org.iq80.leveldb.DBIterator;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ private class DownloadCallback implements StreamCallback {
private File targetFile = null;
private int chunkIndex;

public DownloadCallback(File targetFile, int chunkIndex) throws IOException {
DownloadCallback(File targetFile, int chunkIndex) throws IOException {
this.targetFile = targetFile;
this.channel = Channels.newChannel(new FileOutputStream(targetFile));
this.chunkIndex = chunkIndex;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,8 @@ private long[] mergeSpillsWithFileStream(
// Use a counting output stream to avoid having to close the underlying file and ask
// the file system for its size after each partition is written.
final CountingOutputStream mergedFileOutputStream = new CountingOutputStream(bos);
final int inputBufferSizeInBytes = (int) sparkConf.getSizeAsKb("spark.shuffle.file.buffer", "32k") * 1024;
final int inputBufferSizeInBytes =
(int) sparkConf.getSizeAsKb("spark.shuffle.file.buffer", "32k") * 1024;

boolean threwException = true;
try {
Expand All @@ -375,8 +376,9 @@ private long[] mergeSpillsWithFileStream(
}
for (int partition = 0; partition < numPartitions; partition++) {
final long initialFileLength = mergedFileOutputStream.getByteCount();
// Shield the underlying output stream from close() and flush() calls, so that we can close the higher
// level streams to make sure all data is really flushed and internal state is cleaned.
// Shield the underlying output stream from close() and flush() calls, so that we can close
// the higher level streams to make sure all data is really flushed and internal state is
// cleaned.
OutputStream partitionOutput = new CloseAndFlushShieldOutputStream(
new TimeTrackingOutputStream(writeMetrics, mergedFileOutputStream));
partitionOutput = blockManager.serializerManager().wrapForEncryption(partitionOutput);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ public static void main(String[] args) {
// $example off$
userRecs.show();
movieRecs.show();

spark.stop();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,11 @@ private static void runBasicDataSourceExample(SparkSession spark) {
peopleDF.write().bucketBy(42, "name").sortBy("age").saveAsTable("people_bucketed");
// $example off:write_sorting_and_bucketing$
// $example on:write_partitioning$
usersDF.write().partitionBy("favorite_color").format("parquet").save("namesPartByColor.parquet");
usersDF
.write()
.partitionBy("favorite_color")
.format("parquet")
.save("namesPartByColor.parquet");
// $example off:write_partitioning$
// $example on:write_partition_and_bucket$
peopleDF
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

package org.apache.spark.sql.streaming;

import org.apache.spark.annotation.Experimental;
import org.apache.spark.annotation.InterfaceStability;
import org.apache.spark.sql.catalyst.streaming.InternalOutputModes;

Expand Down

0 comments on commit ecc5631

Please sign in to comment.