Skip to content

Commit

Permalink
[MINOR][BUILD] Fix Java linter errors
Browse files Browse the repository at this point in the history
  • Loading branch information
dongjoon-hyun committed Jan 12, 2018
1 parent b5042d7 commit 752daa3
Show file tree
Hide file tree
Showing 4 changed files with 5 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,8 @@ public void free(MemoryBlock memory) {
"page has already been freed";
assert ((memory.pageNumber == MemoryBlock.NO_PAGE_NUMBER)
|| (memory.pageNumber == MemoryBlock.FREED_IN_TMM_PAGE_NUMBER)) :
"TMM-allocated pages must first be freed via TMM.freePage(), not directly in allocator free()";
"TMM-allocated pages must first be freed via TMM.freePage(), not directly in allocator " +
"free()";

final long size = memory.size();
if (MemoryAllocator.MEMORY_DEBUG_FILL_ENABLED) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

package org.apache.spark.launcher;

import java.io.IOException;
import java.lang.reflect.Method;
import java.util.concurrent.atomic.AtomicLong;
import java.util.logging.Level;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ public class OrcColumnVector extends org.apache.spark.sql.vectorized.ColumnVecto
private BytesColumnVector bytesData;
private DecimalColumnVector decimalData;
private TimestampColumnVector timestampData;
final private boolean isTimestamp;
private final boolean isTimestamp;

private int batchSize;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -461,7 +461,8 @@ public void testCircularReferenceBean() {
public void testUDF() {
UserDefinedFunction foo = udf((Integer i, String s) -> i.toString() + s, DataTypes.StringType);
Dataset<Row> df = spark.table("testData").select(foo.apply(col("key"), col("value")));
String[] result = df.collectAsList().stream().map(row -> row.getString(0)).toArray(String[]::new);
String[] result = df.collectAsList().stream().map(row -> row.getString(0))
.toArray(String[]::new);
String[] expected = spark.table("testData").collectAsList().stream()
.map(row -> row.get(0).toString() + row.getString(1)).toArray(String[]::new);
Assert.assertArrayEquals(expected, result);
Expand Down

0 comments on commit 752daa3

Please sign in to comment.