Skip to content
This repository has been archived by the owner on Sep 20, 2022. It is now read-only.

Commit

Permalink
Applied spotless-maven-plugin formatter
Browse files Browse the repository at this point in the history
  • Loading branch information
myui committed Apr 27, 2018
1 parent 1efe60b commit 607cc4f
Show file tree
Hide file tree
Showing 41 changed files with 296 additions and 245 deletions.
3 changes: 1 addition & 2 deletions core/src/main/java/hivemall/sketch/bloom/BloomAndUDF.java
Expand Up @@ -30,8 +30,7 @@
import org.apache.hadoop.util.bloom.DynamicBloomFilter;
import org.apache.hadoop.util.bloom.Filter;

@Description(
name = "bloom_and",
@Description(name = "bloom_and",
value = "_FUNC_(string bloom1, string bloom2) - Returns the logical AND of two bloom filters")
@UDFType(deterministic = true, stateful = false)
public final class BloomAndUDF extends UDF {
Expand Down
Expand Up @@ -32,8 +32,7 @@
import org.apache.hadoop.util.bloom.Filter;
import org.apache.hadoop.util.bloom.Key;

@Description(
name = "bloom_contains",
@Description(name = "bloom_contains",
value = "_FUNC_(string bloom, string key) - Returns true if the bloom filter contains the given key")
@UDFType(deterministic = true, stateful = false)
public final class BloomContainsUDF extends UDF {
Expand Down
3 changes: 1 addition & 2 deletions core/src/main/java/hivemall/sketch/bloom/BloomOrUDF.java
Expand Up @@ -30,8 +30,7 @@
import org.apache.hadoop.util.bloom.DynamicBloomFilter;
import org.apache.hadoop.util.bloom.Filter;

@Description(
name = "bloom_or",
@Description(name = "bloom_or",
value = "_FUNC_(string bloom1, string bloom2) - Returns the logical OR of two bloom filters")
@UDFType(deterministic = true, stateful = false)
public final class BloomOrUDF extends UDF {
Expand Down
Expand Up @@ -19,6 +19,7 @@

import static hivemall.smile.utils.SmileExtUtils.resolveFeatureName;
import static hivemall.smile.utils.SmileExtUtils.resolveName;

import hivemall.annotations.VisibleForTesting;
import hivemall.math.matrix.Matrix;
import hivemall.math.matrix.ints.ColumnMajorIntMatrix;
Expand All @@ -36,6 +37,8 @@
import hivemall.utils.lang.StringUtils;
import hivemall.utils.lang.mutable.MutableInt;
import hivemall.utils.sampling.IntReservoirSampler;
import smile.classification.Classifier;
import smile.math.Math;

import java.io.Externalizable;
import java.io.IOException;
Expand All @@ -53,9 +56,6 @@
import org.roaringbitmap.IntConsumer;
import org.roaringbitmap.RoaringBitmap;

import smile.classification.Classifier;
import smile.math.Math;

/**
* Decision tree for classification. A decision tree can be learned by splitting the training set
* into subsets based on an attribute value test. This process is repeated on each derived subset in
Expand Down
10 changes: 5 additions & 5 deletions core/src/main/java/hivemall/smile/regression/RegressionTree.java
Expand Up @@ -18,6 +18,7 @@
package hivemall.smile.regression;

import static hivemall.smile.utils.SmileExtUtils.resolveFeatureName;

import hivemall.annotations.VisibleForTesting;
import hivemall.math.matrix.Matrix;
import hivemall.math.matrix.ints.ColumnMajorIntMatrix;
Expand All @@ -36,6 +37,10 @@
import hivemall.utils.lang.StringUtils;
import hivemall.utils.lang.mutable.MutableInt;
import hivemall.utils.math.MathUtils;
import smile.math.Math;
import smile.regression.GradientTreeBoost;
import smile.regression.RandomForest;
import smile.regression.Regression;

import java.io.Externalizable;
import java.io.IOException;
Expand All @@ -51,11 +56,6 @@

import org.apache.hadoop.hive.ql.metadata.HiveException;

import smile.math.Math;
import smile.regression.GradientTreeBoost;
import smile.regression.RandomForest;
import smile.regression.Regression;

/**
* Decision tree for regression. A decision tree can be learned by splitting the training set into
* subsets based on an attribute value test. This process is repeated on each derived subset in a
Expand Down
Expand Up @@ -65,7 +65,8 @@ public StructObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgu
this.forwardObjs = new Writable[] {result};

List<String> fieldNames = Arrays.asList("avg");
List<ObjectInspector> fieldOIs = Arrays.<ObjectInspector>asList(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
List<ObjectInspector> fieldOIs = Arrays.<ObjectInspector>asList(
PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);

return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}
Expand Down
5 changes: 3 additions & 2 deletions core/src/main/java/hivemall/tools/TryCastUDF.java
Expand Up @@ -32,8 +32,9 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;

@Description(name = "try_cast", value = "_FUNC_(ANY src, const string typeName)"
+ " - Explicitly cast a value as a type. Returns null if cast fails.",
@Description(name = "try_cast",
value = "_FUNC_(ANY src, const string typeName)"
+ " - Explicitly cast a value as a type. Returns null if cast fails.",
extended = "Usage: select try_cast(array(1.0,2.0,3.0), 'array<string>')\n"
+ " select try_cast(map('A',10,'B',20,'C',30), 'map<string,double>')")
@UDFType(deterministic = true, stateful = false)
Expand Down
6 changes: 4 additions & 2 deletions core/src/main/java/hivemall/tools/array/ArrayAppendUDF.java
Expand Up @@ -53,14 +53,16 @@ public final class ArrayAppendUDF extends GenericUDF {
@Override
public ObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException {
this.listInspector = HiveUtils.asListOI(argOIs[0]);
this.listElemInspector = HiveUtils.asPrimitiveObjectInspector(listInspector.getListElementObjectInspector());
this.listElemInspector =
HiveUtils.asPrimitiveObjectInspector(listInspector.getListElementObjectInspector());
this.primInspector = HiveUtils.asPrimitiveObjectInspector(argOIs[1]);
if (listElemInspector.getPrimitiveCategory() != primInspector.getPrimitiveCategory()) {
throw new UDFArgumentException(
"array_append expects the list type to match the type of the value being appended");
}
this.returnWritables = listElemInspector.preferWritable();
return ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorUtils.getStandardObjectInspector(listElemInspector));
return ObjectInspectorFactory.getStandardListObjectInspector(
ObjectInspectorUtils.getStandardObjectInspector(listElemInspector));
}

@Nullable
Expand Down
8 changes: 4 additions & 4 deletions core/src/main/java/hivemall/tools/array/ArrayConcatUDF.java
Expand Up @@ -65,10 +65,10 @@ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumen
break;
}
default:
throw new UDFArgumentTypeException(0, "Argument " + i
+ " of function CONCAT_ARRAY must be " + LIST_TYPE_NAME + "<"
+ Category.PRIMITIVE + ">, but " + arguments[0].getTypeName()
+ " was found.");
throw new UDFArgumentTypeException(0,
"Argument " + i + " of function CONCAT_ARRAY must be " + LIST_TYPE_NAME
+ "<" + Category.PRIMITIVE + ">, but " + arguments[0].getTypeName()
+ " was found.");
}
}

Expand Down
11 changes: 6 additions & 5 deletions core/src/main/java/hivemall/tools/array/ArrayFlattenUDF.java
Expand Up @@ -48,20 +48,21 @@ public final class ArrayFlattenUDF extends GenericUDF {
@Override
public ObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException {
if (argOIs.length != 1) {
throw new UDFArgumentException("array_flatten expects exactly one argument: "
+ argOIs.length);
throw new UDFArgumentException(
"array_flatten expects exactly one argument: " + argOIs.length);
}

this.listOI = HiveUtils.asListOI(argOIs[0]);
ObjectInspector listElemOI = listOI.getListElementObjectInspector();
if (listElemOI.getCategory() != Category.LIST) {
throw new UDFArgumentException("array_flatten takes array of array for the argument: "
+ listOI.toString());
throw new UDFArgumentException(
"array_flatten takes array of array for the argument: " + listOI.toString());
}
this.nextedListOI = HiveUtils.asListOI(listElemOI);
this.elemOI = nextedListOI.getListElementObjectInspector();

return ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorUtils.getStandardObjectInspector(elemOI));
return ObjectInspectorFactory.getStandardListObjectInspector(
ObjectInspectorUtils.getStandardObjectInspector(elemOI));
}

@Override
Expand Down
9 changes: 5 additions & 4 deletions core/src/main/java/hivemall/tools/array/ArrayUnionUDF.java
Expand Up @@ -67,16 +67,17 @@ public ObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentEx
ListObjectInspector checkOI = HiveUtils.asListOI(argOIs[i]);
if (!ObjectInspectorUtils.compareTypes(arg0ElemOI,
checkOI.getListElementObjectInspector())) {
throw new UDFArgumentException("Array types does not match: "
+ arg0OI.getTypeName() + " != " + checkOI.getTypeName());
throw new UDFArgumentException("Array types does not match: " + arg0OI.getTypeName()
+ " != " + checkOI.getTypeName());
}
listOIs[i] = checkOI;
}

this._listOIs = listOIs;

return ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorUtils.getStandardObjectInspector(
arg0ElemOI, ObjectInspectorCopyOption.WRITABLE));
return ObjectInspectorFactory.getStandardListObjectInspector(
ObjectInspectorUtils.getStandardObjectInspector(arg0ElemOI,
ObjectInspectorCopyOption.WRITABLE));
}

@Override
Expand Down
Expand Up @@ -79,7 +79,8 @@ public StructObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgu
this.condElemOI = HiveUtils.asBooleanOI(conditionsOI.getListElementObjectInspector());

this.featuresOI = HiveUtils.asListOI(argOIs[1]);
this.featureElemOI = HiveUtils.asPrimitiveObjectInspector(featuresOI.getListElementObjectInspector());
this.featureElemOI =
HiveUtils.asPrimitiveObjectInspector(featuresOI.getListElementObjectInspector());

List<String> fieldNames = Arrays.asList("feature");
List<ObjectInspector> fieldOIs = Arrays.<ObjectInspector>asList(featureElemOI);
Expand Down
13 changes: 8 additions & 5 deletions core/src/main/java/hivemall/tools/array/SelectKBestUDF.java
Expand Up @@ -82,7 +82,8 @@ public ObjectInspector initialize(ObjectInspector[] OIs) throws UDFArgumentExcep
this.featuresOI = HiveUtils.asListOI(OIs[0]);
this.featureOI = HiveUtils.asDoubleCompatibleOI(featuresOI.getListElementObjectInspector());
this.importanceListOI = HiveUtils.asListOI(OIs[1]);
this.importanceElemOI = HiveUtils.asDoubleCompatibleOI(importanceListOI.getListElementObjectInspector());
this.importanceElemOI =
HiveUtils.asDoubleCompatibleOI(importanceListOI.getListElementObjectInspector());

this._k = HiveUtils.getConstInt(OIs[2]);
Preconditions.checkArgument(_k >= 1, UDFArgumentException.class);
Expand All @@ -92,14 +93,15 @@ public ObjectInspector initialize(ObjectInspector[] OIs) throws UDFArgumentExcep
}
this._result = result;

return ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
return ObjectInspectorFactory.getStandardListObjectInspector(
PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
}

@Override
public List<DoubleWritable> evaluate(DeferredObject[] dObj) throws HiveException {
final double[] features = HiveUtils.asDoubleArray(dObj[0].get(), featuresOI, featureOI);
final double[] importanceList = HiveUtils.asDoubleArray(dObj[1].get(), importanceListOI,
importanceElemOI);
final double[] importanceList =
HiveUtils.asDoubleArray(dObj[1].get(), importanceListOI, importanceElemOI);

Preconditions.checkNotNull(features, UDFArgumentException.class);
Preconditions.checkNotNull(importanceList, UDFArgumentException.class);
Expand All @@ -109,7 +111,8 @@ public List<DoubleWritable> evaluate(DeferredObject[] dObj) throws HiveException

int[] topKIndices = _topKIndices;
if (topKIndices == null) {
final List<Map.Entry<Integer, Double>> list = new ArrayList<Map.Entry<Integer, Double>>();
final List<Map.Entry<Integer, Double>> list =
new ArrayList<Map.Entry<Integer, Double>>();
for (int i = 0; i < importanceList.length; i++) {
list.add(new AbstractMap.SimpleEntry<Integer, Double>(i, importanceList[i]));
}
Expand Down
Expand Up @@ -29,8 +29,7 @@
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;

@Description(
name = "sessionize",
@Description(name = "sessionize",
value = "_FUNC_(long timeInSec, long thresholdInSec [, String subject])"
+ "- Returns a UUID string of a session.",
extended = "SELECT sessionize(time, 3600, ip_addr) as session_id, time, ip_addr FROM (\n"
Expand Down
17 changes: 10 additions & 7 deletions core/src/main/java/hivemall/tools/json/FromJsonUDF.java
Expand Up @@ -43,8 +43,7 @@
import org.apache.hadoop.io.Text;
import org.apache.hive.hcatalog.data.HCatRecordObjectInspectorFactory;

@Description(
name = "from_json",
@Description(name = "from_json",
value = "_FUNC_(string jsonString, const string returnTypes [, const array<string>|const string columnNames])"
+ " - Return Hive object.")
@UDFType(deterministic = true, stateful = false)
Expand All @@ -59,8 +58,8 @@ public final class FromJsonUDF extends GenericUDF {
@Override
public ObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException {
if (argOIs.length != 2 && argOIs.length != 3) {
throw new UDFArgumentException("from_json takes two or three arguments: "
+ argOIs.length);
throw new UDFArgumentException(
"from_json takes two or three arguments: " + argOIs.length);
}

this.jsonOI = HiveUtils.asStringOI(argOIs[0]);
Expand Down Expand Up @@ -95,7 +94,8 @@ private static ObjectInspector getObjectInspector(@Nonnull final List<TypeInfo>
final int numColumns = columnTypes.size();
if (numColumns == 1) {
TypeInfo type = columnTypes.get(0);
returnOI = HCatRecordObjectInspectorFactory.getStandardObjectInspectorFromTypeInfo(type);
returnOI =
HCatRecordObjectInspectorFactory.getStandardObjectInspectorFromTypeInfo(type);
} else {
if (columnNames == null) {
columnNames = new ArrayList<>(numColumns);
Expand All @@ -111,7 +111,9 @@ private static ObjectInspector getObjectInspector(@Nonnull final List<TypeInfo>
final ObjectInspector[] fieldOIs = new ObjectInspector[numColumns];
for (int i = 0; i < fieldOIs.length; i++) {
TypeInfo type = columnTypes.get(i);
fieldOIs[i] = HCatRecordObjectInspectorFactory.getStandardObjectInspectorFromTypeInfo(type);
fieldOIs[i] =
HCatRecordObjectInspectorFactory.getStandardObjectInspectorFromTypeInfo(
type);
}
returnOI = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames,
Arrays.asList(fieldOIs));
Expand All @@ -132,7 +134,8 @@ public Object evaluate(DeferredObject[] args) throws HiveException {
result = JsonSerdeUtils.deserialize(jsonString, columnNames, columnTypes);
} catch (Throwable e) {
throw new HiveException("Failed to deserialize Json: \n" + jsonString.toString() + '\n'
+ ExceptionUtils.prettyPrintStackTrace(e), e);
+ ExceptionUtils.prettyPrintStackTrace(e),
e);
}
return result;
}
Expand Down
10 changes: 5 additions & 5 deletions core/src/main/java/hivemall/tools/json/ToJsonUDF.java
Expand Up @@ -37,8 +37,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;

@Description(
name = "to_json",
@Description(name = "to_json",
value = "_FUNC_(ANY object [, const array<string>|const string columnNames]) - Returns Json string")
@UDFType(deterministic = true, stateful = false)
public final class ToJsonUDF extends GenericUDF {
Expand All @@ -51,7 +50,8 @@ public final class ToJsonUDF extends GenericUDF {
@Override
public ObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException {
if (argOIs.length != 1 && argOIs.length != 2) {
throw new UDFArgumentException("from_json takes one or two arguments: " + argOIs.length);
throw new UDFArgumentException(
"from_json takes one or two arguments: " + argOIs.length);
}

this.objOI = argOIs[0];
Expand Down Expand Up @@ -81,8 +81,8 @@ public Text evaluate(DeferredObject[] args) throws HiveException {
try {
return JsonSerdeUtils.serialize(obj, objOI, columnNames);
} catch (Throwable e) {
throw new HiveException("Failed to serialize: " + obj + '\n'
+ ExceptionUtils.prettyPrintStackTrace(e), e);
throw new HiveException(
"Failed to serialize: " + obj + '\n' + ExceptionUtils.prettyPrintStackTrace(e), e);
}
}

Expand Down
19 changes: 11 additions & 8 deletions core/src/main/java/hivemall/tools/map/MapKeyValuesUDF.java
Expand Up @@ -37,8 +37,8 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;

@Description(name = "map_key_values", value = "_FUNC_(map) - "
+ "Returns a array of key-value pairs.")
@Description(name = "map_key_values",
value = "_FUNC_(map) - " + "Returns a array of key-value pairs.")
@UDFType(deterministic = true, stateful = false)
public final class MapKeyValuesUDF extends GenericUDF {

Expand All @@ -49,11 +49,13 @@ public final class MapKeyValuesUDF extends GenericUDF {
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 1) {
throw new UDFArgumentLengthException("The function MAP_KEYS only accepts one argument.");
throw new UDFArgumentLengthException(
"The function MAP_KEYS only accepts one argument.");
} else if (!(arguments[0] instanceof MapObjectInspector)) {
throw new UDFArgumentTypeException(0, "\"" + Category.MAP.toString().toLowerCase()
+ "\" is expected at function MAP_KEYS, " + "but \""
+ arguments[0].getTypeName() + "\" is found");
throw new UDFArgumentTypeException(0,
"\"" + Category.MAP.toString().toLowerCase()
+ "\" is expected at function MAP_KEYS, " + "but \""
+ arguments[0].getTypeName() + "\" is found");
}

this.mapOI = (MapObjectInspector) arguments[0];
Expand All @@ -65,8 +67,9 @@ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumen
structFieldNames.add("value");
structFieldObjectInspectors.add(mapOI.getMapValueObjectInspector());

return ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getStandardStructObjectInspector(
structFieldNames, structFieldObjectInspectors));
return ObjectInspectorFactory.getStandardListObjectInspector(
ObjectInspectorFactory.getStandardStructObjectInspector(structFieldNames,
structFieldObjectInspectors));
}

@Override
Expand Down
3 changes: 1 addition & 2 deletions core/src/main/java/hivemall/tools/map/MergeMapsUDAF.java
Expand Up @@ -39,8 +39,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

@Description(
name = "merge_maps",
@Description(name = "merge_maps",
value = "_FUNC_(x) - Returns a map which contains the union of an aggregation of maps."
+ " Note that an existing value of a key can be replaced with the other duplicate key entry.",
extended = "SELECT merge_maps(m) FROM ( "
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/java/hivemall/tools/text/SplitWordsUDF.java
Expand Up @@ -28,7 +28,7 @@
import org.apache.hadoop.io.Text;

@Description(name = "split_words",
value = "_FUNC_(string query [, string regex]) - Returns an array<text> containing split strings")
value = "_FUNC_(string query [, string regex]) - Returns an array<text> containing splitted strings")
@UDFType(deterministic = true, stateful = false)
public final class SplitWordsUDF extends UDF {

Expand Down

0 comments on commit 607cc4f

Please sign in to comment.