From 6c3808fbf094393dea37a02b44037f7665b88ffb Mon Sep 17 00:00:00 2001 From: Tim Ellison Date: Tue, 16 Aug 2016 11:45:13 +0100 Subject: [PATCH] Enhancements to partitioners, keyedHash, and prime generator. - Convert last few remaining ArrayList return types on partitioners to List interface, and deal with the flow through to calling methods. - Enhance partition utils tests. - Modify keyed hash to remove Maths.abs() call on string hashCode. - Move constant value to static var in prime generator. --- .../pirk/encryption/PrimeGenerator.java | 6 +- .../hadoop/BytesArrayWritable.java | 4 +- .../pirk/querier/wideskies/Querier.java | 7 +- .../wideskies/decrypt/DecryptResponse.java | 3 +- .../wideskies/encrypt/EncryptQuery.java | 7 +- .../pirk/query/wideskies/QueryUtils.java | 25 ++--- .../wideskies/common/ComputeEncryptedRow.java | 29 ++--- .../common/HashSelectorAndPartitionData.java | 30 ++---- .../wideskies/mapreduce/RowCalcReducer.java | 4 +- .../wideskies/spark/ComputeResponse.java | 7 +- .../responder/wideskies/spark/EncRowCalc.java | 9 +- .../spark/EncRowCalcPrecomputedCache.java | 11 +- .../spark/HashSelectorsAndPartitionData.java | 11 +- .../wideskies/standalone/Responder.java | 3 +- .../partitioner/PrimitiveTypePartitioner.java | 28 +++-- .../distributed/DistributedTestDriver.java | 9 +- .../distributed/testsuite/DistTestSuite.java | 8 +- .../org/apache/pirk/test/utils/BaseTests.java | 12 +-- .../org/apache/pirk/test/utils/Inputs.java | 5 +- .../pirk/test/utils/StandaloneQuery.java | 3 +- .../java/org/apache/pirk/utils/KeyedHash.java | 7 +- .../pirk/general/PartitionUtilsTest.java | 100 ++++++++---------- 22 files changed, 155 insertions(+), 173 deletions(-) diff --git a/src/main/java/org/apache/pirk/encryption/PrimeGenerator.java b/src/main/java/org/apache/pirk/encryption/PrimeGenerator.java index ddfbd00c..cb1fe2ea 100644 --- a/src/main/java/org/apache/pirk/encryption/PrimeGenerator.java +++ b/src/main/java/org/apache/pirk/encryption/PrimeGenerator.java @@ -55,6 +55,8 @@ public class PrimeGenerator { private static final Logger logger = LoggerFactory.getLogger(PrimeGenerator.class); + private static final BigDecimal SQRT_2 = BigDecimal.valueOf(Math.sqrt(2)); + private static final HashMap lowerBoundCache = new HashMap<>(); private static final HashMap minimumDifferenceCache = new HashMap<>(); @@ -88,7 +90,7 @@ public static BigInteger getSinglePrime(int bitLength, int certainty, Random rnd BigInteger lowerBound; if (!lowerBoundCache.containsKey(bitLength)) { - lowerBound = BigDecimal.valueOf(Math.sqrt(2)).multiply(BigDecimal.valueOf(2).pow((bitLength / 2) - 1)).toBigInteger(); + lowerBound = SQRT_2.multiply(BigDecimal.valueOf(2).pow((bitLength / 2) - 1)).toBigInteger(); lowerBoundCache.put(bitLength, lowerBound); } else @@ -149,7 +151,7 @@ public static BigInteger getSecondPrime(int bitLength, int certainty, Random rnd BigInteger lowerBound; if (!lowerBoundCache.containsKey(bitLength)) { - lowerBound = BigDecimal.valueOf(Math.sqrt(2)).multiply(BigDecimal.valueOf(2).pow((bitLength / 2) - 1)).toBigInteger(); + lowerBound = SQRT_2.multiply(BigDecimal.valueOf(2).pow((bitLength / 2) - 1)).toBigInteger(); lowerBoundCache.put(bitLength, lowerBound); } else diff --git a/src/main/java/org/apache/pirk/inputformat/hadoop/BytesArrayWritable.java b/src/main/java/org/apache/pirk/inputformat/hadoop/BytesArrayWritable.java index 48078df1..ac94f2a3 100644 --- a/src/main/java/org/apache/pirk/inputformat/hadoop/BytesArrayWritable.java +++ b/src/main/java/org/apache/pirk/inputformat/hadoop/BytesArrayWritable.java @@ -21,8 +21,8 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.math.BigInteger; -import java.util.ArrayList; import java.util.Arrays; +import java.util.List; import org.apache.hadoop.io.ArrayWritable; import org.apache.hadoop.io.BytesWritable; @@ -60,7 +60,7 @@ public BytesArrayWritable(byte[][] elements) /** * Constructor for use when underlying array will be ByteWritable representations of BigInteger objects */ - public BytesArrayWritable(ArrayList elements) + public BytesArrayWritable(List elements) { super(BytesWritable.class); diff --git a/src/main/java/org/apache/pirk/querier/wideskies/Querier.java b/src/main/java/org/apache/pirk/querier/wideskies/Querier.java index 2beed147..48be51d0 100644 --- a/src/main/java/org/apache/pirk/querier/wideskies/Querier.java +++ b/src/main/java/org/apache/pirk/querier/wideskies/Querier.java @@ -21,6 +21,7 @@ import java.io.Serializable; import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import org.apache.pirk.encryption.Paillier; import org.apache.pirk.query.wideskies.Query; @@ -37,14 +38,14 @@ public class Querier implements Serializable, Storable private Paillier paillier = null; // Paillier encryption functionality - private ArrayList selectors = null; // selectors + private List selectors = null; // selectors // map to check the embedded selectors in the results for false positives; // if the selector is a fixed size < 32 bits, it is included as is // if the selector is of variable lengths private HashMap embedSelectorMap = null; - public Querier(ArrayList selectorsInput, Paillier paillierInput, Query queryInput, HashMap embedSelectorMapInput) + public Querier(List selectorsInput, Paillier paillierInput, Query queryInput, HashMap embedSelectorMapInput) { selectors = selectorsInput; @@ -65,7 +66,7 @@ public Paillier getPaillier() return paillier; } - public ArrayList getSelectors() + public List getSelectors() { return selectors; } diff --git a/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponse.java b/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponse.java index 1ce62ecd..928c852d 100644 --- a/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponse.java +++ b/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponse.java @@ -26,6 +26,7 @@ import java.math.BigInteger; import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import java.util.Map.Entry; import java.util.TreeMap; import java.util.concurrent.ExecutorService; @@ -87,7 +88,7 @@ public void decrypt(int numThreads) throws InterruptedException, PIRException QueryInfo queryInfo = response.getQueryInfo(); Paillier paillier = querier.getPaillier(); - ArrayList selectors = querier.getSelectors(); + List selectors = querier.getSelectors(); HashMap embedSelectorMap = querier.getEmbedSelectorMap(); // Perform decryption on the encrypted columns diff --git a/src/main/java/org/apache/pirk/querier/wideskies/encrypt/EncryptQuery.java b/src/main/java/org/apache/pirk/querier/wideskies/encrypt/EncryptQuery.java index e02dacc1..49958e42 100644 --- a/src/main/java/org/apache/pirk/querier/wideskies/encrypt/EncryptQuery.java +++ b/src/main/java/org/apache/pirk/querier/wideskies/encrypt/EncryptQuery.java @@ -21,6 +21,7 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; @@ -56,14 +57,14 @@ public class EncryptQuery private Paillier paillier = null; // Paillier encryption functionality - private ArrayList selectors = null; // selectors for the query + private List selectors = null; // selectors for the query // Map to check the embedded selectors in the results for false positives; // if the selector is a fixed size < 32 bits, it is included as is // if the selector is of variable lengths private HashMap embedSelectorMap = null; - public EncryptQuery(QueryInfo queryInfoInput, ArrayList selectorsInput, Paillier paillierInput) + public EncryptQuery(QueryInfo queryInfoInput, List selectorsInput, Paillier paillierInput) { queryInfo = queryInfoInput; @@ -94,7 +95,7 @@ public Querier getQuerier() return querier; } - public ArrayList getSelectors() + public List getSelectors() { return selectors; } diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryUtils.java b/src/main/java/org/apache/pirk/query/wideskies/QueryUtils.java index 151d0129..7015b7c1 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/QueryUtils.java +++ b/src/main/java/org/apache/pirk/query/wideskies/QueryUtils.java @@ -34,6 +34,7 @@ import org.apache.pirk.schema.query.QuerySchema; import org.apache.pirk.schema.response.QueryResponseJSON; import org.apache.pirk.utils.KeyedHash; +import org.apache.pirk.utils.PIRException; import org.apache.pirk.utils.StringUtils; import org.apache.pirk.utils.SystemConfiguration; import org.elasticsearch.hadoop.mr.WritableArrayWritable; @@ -51,7 +52,7 @@ public class QueryUtils /** * Method to convert the given BigInteger raw data element partitions to a QueryResponseJSON object based upon the given queryType */ - public static QueryResponseJSON extractQueryResponseJSON(QueryInfo queryInfo, QuerySchema qSchema, ArrayList parts) throws Exception + public static QueryResponseJSON extractQueryResponseJSON(QueryInfo queryInfo, QuerySchema qSchema, List parts) throws PIRException { QueryResponseJSON qrJSON = new QueryResponseJSON(queryInfo); @@ -103,9 +104,9 @@ public static QueryResponseJSON extractQueryResponseJSON(QueryInfo queryInfo, Qu /** * Method to convert the given data element given by the JSONObject data element into the extracted BigInteger partitions based upon the given queryType */ - public static ArrayList partitionDataElement(QuerySchema qSchema, JSONObject jsonData, boolean embedSelector) throws Exception + public static List partitionDataElement(QuerySchema qSchema, JSONObject jsonData, boolean embedSelector) throws PIRException { - ArrayList parts = new ArrayList<>(); + List parts = new ArrayList<>(); DataSchema dSchema = DataSchemaRegistry.get(qSchema.getDataSchemaName()); // Add the embedded selector to the parts @@ -164,10 +165,10 @@ public static ArrayList partitionDataElement(QuerySchema qSchema, JS /** * Method to convert the given data element given by the MapWritable data element into the extracted BigInteger partitions based upon the given queryType */ - public static ArrayList partitionDataElement(MapWritable dataMap, QuerySchema qSchema, DataSchema dSchema, boolean embedSelector) - throws Exception + public static List partitionDataElement(MapWritable dataMap, QuerySchema qSchema, DataSchema dSchema, boolean embedSelector) + throws PIRException { - ArrayList parts = new ArrayList<>(); + List parts = new ArrayList<>(); logger.debug("queryType = " + qSchema.getSchemaName()); @@ -232,7 +233,7 @@ else if (dataElement instanceof Text) /** * Method to convert the given selector into the extracted BigInteger partitions */ - public static List embeddedSelectorToPartitions(String selector, String type, DataPartitioner partitioner) throws Exception + public static List embeddedSelectorToPartitions(String selector, String type, DataPartitioner partitioner) throws PIRException { List parts; @@ -255,7 +256,7 @@ public static List embeddedSelectorToPartitions(String selector, Str * Method get the embedded selector from a given selector * */ - public static String getEmbeddedSelector(String selector, String type, DataPartitioner partitioner) throws Exception + public static String getEmbeddedSelector(String selector, String type, DataPartitioner partitioner) throws PIRException { String embeddedSelector; @@ -276,7 +277,7 @@ public static String getEmbeddedSelector(String selector, String type, DataParti /** * Reconstructs the String version of the embedded selector from its partitions */ - public static String getEmbeddedSelectorFromPartitions(ArrayList parts, int partsIndex, String type, Object partitioner) throws Exception + public static String getEmbeddedSelectorFromPartitions(List parts, int partsIndex, String type, Object partitioner) throws PIRException { String embeddedSelector; @@ -339,7 +340,7 @@ public static String getSelectorByQueryTypeJSON(QuerySchema qSchema, JSONObject if (dSchema.isArrayElement(fieldName)) { - ArrayList elementArray = StringUtils.jsonArrayStringToArrayList(dataMap.get(fieldName).toString()); + List elementArray = StringUtils.jsonArrayStringToArrayList(dataMap.get(fieldName).toString()); selector = elementArray.get(0); } else @@ -350,12 +351,12 @@ public static String getSelectorByQueryTypeJSON(QuerySchema qSchema, JSONObject } // For debug - private static void printParts(ArrayList parts) + private static void printParts(List parts) { int i = 0; for (BigInteger part : parts) { - logger.debug("parts(" + i + ") = " + parts.get(i).intValue() + " parts bits = " + parts.get(i).toString(2)); + logger.debug("parts(" + i + ") = " + part.intValue() + " parts bits = " + part.toString(2)); ++i; } } diff --git a/src/main/java/org/apache/pirk/responder/wideskies/common/ComputeEncryptedRow.java b/src/main/java/org/apache/pirk/responder/wideskies/common/ComputeEncryptedRow.java index 5eed275d..b5796ad5 100644 --- a/src/main/java/org/apache/pirk/responder/wideskies/common/ComputeEncryptedRow.java +++ b/src/main/java/org/apache/pirk/responder/wideskies/common/ComputeEncryptedRow.java @@ -24,6 +24,7 @@ import java.math.BigInteger; import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import java.util.concurrent.ExecutionException; import org.apache.hadoop.fs.FileSystem; @@ -34,13 +35,13 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import scala.Tuple2; -import scala.Tuple3; - import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; +import scala.Tuple2; +import scala.Tuple3; + /** * Class to compute the encrypted row elements for a query from extracted data partitions * @@ -99,10 +100,10 @@ public static void loadCacheFromHDFS(FileSystem fs, String hdfsFileName, Query q * Emits {@code Tuple2<>} * */ - public static ArrayList> computeEncRow(Iterable dataPartitionsIter, Query query, int rowIndex, + public static List> computeEncRow(Iterable dataPartitionsIter, Query query, int rowIndex, boolean limitHitsPerSelector, int maxHitsPerSelector, boolean useCache) throws IOException { - ArrayList> returnPairs = new ArrayList<>(); + List> returnPairs = new ArrayList<>(); // Pull the corresponding encrypted row query BigInteger rowQuery = query.getQueryElement(rowIndex); @@ -154,7 +155,7 @@ public static ArrayList> computeEncRow(Iterable * * * * } + * Method to compute the encrypted row elements for a query from extracted data partitions in the form of Iterable{@link List * * * * } *

* For each row (as indicated by key = hash(selector)), iterates over the dataPartitions and calculates the column values. *

@@ -163,17 +164,17 @@ public static ArrayList> computeEncRow(Iterable>} * */ - public static ArrayList> computeEncRowBI(Iterable> dataPartitionsIter, Query query, int rowIndex, + public static List> computeEncRowBI(Iterable> dataPartitionsIter, Query query, int rowIndex, boolean limitHitsPerSelector, int maxHitsPerSelector, boolean useCache) throws IOException { - ArrayList> returnPairs = new ArrayList<>(); + List> returnPairs = new ArrayList<>(); // Pull the corresponding encrypted row query BigInteger rowQuery = query.getQueryElement(rowIndex); long colCounter = 0; int elementCounter = 0; - for (ArrayList dataPartitions : dataPartitionsIter) + for (List dataPartitions : dataPartitionsIter) { // long startTime = System.currentTimeMillis(); @@ -235,14 +236,14 @@ public static ArrayList> computeEncRowBI(Iterable>} * */ - public static ArrayList> computeEncRowCacheInput(Iterable> dataPartitionsIter, + public static List> computeEncRowCacheInput(Iterable> dataPartitionsIter, HashMap cache, int rowIndex, boolean limitHitsPerSelector, int maxHitsPerSelector) throws IOException { - ArrayList> returnPairs = new ArrayList<>(); + List> returnPairs = new ArrayList<>(); long colCounter = 0; int elementCounter = 0; - for (ArrayList dataPartitions : dataPartitionsIter) + for (List dataPartitions : dataPartitionsIter) { logger.debug("elementCounter = " + elementCounter); @@ -284,9 +285,9 @@ public static ArrayList> computeEncRowCacheInput(Iterabl * Emits {@code Tuple2<>} * */ - public static ArrayList> computeEncRow(BytesArrayWritable dataPartitions, Query query, int rowIndex, int colIndex) throws IOException + public static List> computeEncRow(BytesArrayWritable dataPartitions, Query query, int rowIndex, int colIndex) throws IOException { - ArrayList> returnPairs = new ArrayList<>(); + List> returnPairs = new ArrayList<>(); // Pull the corresponding encrypted row query BigInteger rowQuery = query.getQueryElement(rowIndex); diff --git a/src/main/java/org/apache/pirk/responder/wideskies/common/HashSelectorAndPartitionData.java b/src/main/java/org/apache/pirk/responder/wideskies/common/HashSelectorAndPartitionData.java index 7a652df7..61169f28 100644 --- a/src/main/java/org/apache/pirk/responder/wideskies/common/HashSelectorAndPartitionData.java +++ b/src/main/java/org/apache/pirk/responder/wideskies/common/HashSelectorAndPartitionData.java @@ -19,7 +19,7 @@ package org.apache.pirk.responder.wideskies.common; import java.math.BigInteger; -import java.util.ArrayList; +import java.util.List; import org.apache.hadoop.io.MapWritable; import org.apache.pirk.inputformat.hadoop.BytesArrayWritable; @@ -42,11 +42,9 @@ public class HashSelectorAndPartitionData { private static final Logger logger = LoggerFactory.getLogger(HashSelectorAndPartitionData.class); - public static Tuple2> hashSelectorAndFormPartitionsBigInteger(MapWritable dataElement, QuerySchema qSchema, DataSchema dSchema, + public static Tuple2> hashSelectorAndFormPartitionsBigInteger(MapWritable dataElement, QuerySchema qSchema, DataSchema dSchema, QueryInfo queryInfo) throws Exception { - Tuple2> returnTuple; - // Pull the selector based on the query type String selector = QueryUtils.getSelectorByQueryType(dataElement, qSchema, dSchema); int hash = KeyedHash.hash(queryInfo.getHashKey(), queryInfo.getHashBitSize(), selector); @@ -54,18 +52,14 @@ public static Tuple2> hashSelectorAndFormPartition // Extract the data bits based on the query type // Partition by the given partitionSize - ArrayList hitValPartitions = QueryUtils.partitionDataElement(dataElement, qSchema, dSchema, queryInfo.getEmbedSelector()); - - returnTuple = new Tuple2<>(hash, hitValPartitions); + List hitValPartitions = QueryUtils.partitionDataElement(dataElement, qSchema, dSchema, queryInfo.getEmbedSelector()); - return returnTuple; + return new Tuple2<>(hash, hitValPartitions); } public static Tuple2 hashSelectorAndFormPartitions(MapWritable dataElement, QuerySchema qSchema, DataSchema dSchema, QueryInfo queryInfo) throws Exception { - Tuple2 returnTuple; - // Pull the selector based on the query type String selector = QueryUtils.getSelectorByQueryType(dataElement, qSchema, dSchema); int hash = KeyedHash.hash(queryInfo.getHashKey(), queryInfo.getHashBitSize(), selector); @@ -73,18 +67,14 @@ public static Tuple2 hashSelectorAndFormPartitions(M // Extract the data bits based on the query type // Partition by the given partitionSize - ArrayList hitValPartitions = QueryUtils.partitionDataElement(dataElement, qSchema, dSchema, queryInfo.getEmbedSelector()); + List hitValPartitions = QueryUtils.partitionDataElement(dataElement, qSchema, dSchema, queryInfo.getEmbedSelector()); BytesArrayWritable bAW = new BytesArrayWritable(hitValPartitions); - returnTuple = new Tuple2<>(hash, bAW); - - return returnTuple; + return new Tuple2<>(hash, bAW); } - public static Tuple2> hashSelectorAndFormPartitions(JSONObject json, QueryInfo queryInfo, QuerySchema qSchema) throws Exception + public static Tuple2> hashSelectorAndFormPartitions(JSONObject json, QueryInfo queryInfo, QuerySchema qSchema) throws Exception { - Tuple2> returnTuple; - // Pull the selector based on the query type String selector = QueryUtils.getSelectorByQueryTypeJSON(qSchema, json); int hash = KeyedHash.hash(queryInfo.getHashKey(), queryInfo.getHashBitSize(), selector); @@ -92,10 +82,8 @@ public static Tuple2> hashSelectorAndFormPartition // Extract the data bits based on the query type // Partition by the given partitionSize - ArrayList hitValPartitions = QueryUtils.partitionDataElement(qSchema, json, queryInfo.getEmbedSelector()); - - returnTuple = new Tuple2<>(hash, hitValPartitions); + List hitValPartitions = QueryUtils.partitionDataElement(qSchema, json, queryInfo.getEmbedSelector()); - return returnTuple; + return new Tuple2<>(hash, hitValPartitions); } } diff --git a/src/main/java/org/apache/pirk/responder/wideskies/mapreduce/RowCalcReducer.java b/src/main/java/org/apache/pirk/responder/wideskies/mapreduce/RowCalcReducer.java index 3b05a52b..75b0529e 100644 --- a/src/main/java/org/apache/pirk/responder/wideskies/mapreduce/RowCalcReducer.java +++ b/src/main/java/org/apache/pirk/responder/wideskies/mapreduce/RowCalcReducer.java @@ -20,7 +20,7 @@ import java.io.IOException; import java.math.BigInteger; -import java.util.ArrayList; +import java.util.List; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.io.IntWritable; @@ -121,7 +121,7 @@ public void reduce(IntWritable rowIndex, Iterable dataElemen } // Compute the encrypted row elements for a query from extracted data partitions - ArrayList> encRowValues = ComputeEncryptedRow.computeEncRow(dataElementPartitions, query, rowIndex.get(), limitHitsPerSelector, + List> encRowValues = ComputeEncryptedRow.computeEncRow(dataElementPartitions, query, rowIndex.get(), limitHitsPerSelector, maxHitsPerSelector, useLocalCache); // Emit diff --git a/src/main/java/org/apache/pirk/responder/wideskies/spark/ComputeResponse.java b/src/main/java/org/apache/pirk/responder/wideskies/spark/ComputeResponse.java index 2de4a2ad..3124a3f0 100644 --- a/src/main/java/org/apache/pirk/responder/wideskies/spark/ComputeResponse.java +++ b/src/main/java/org/apache/pirk/responder/wideskies/spark/ComputeResponse.java @@ -21,6 +21,7 @@ import java.io.IOException; import java.math.BigInteger; import java.util.ArrayList; +import java.util.List; import java.util.Map; import java.util.Map.Entry; @@ -331,10 +332,10 @@ public void performQuery(JavaRDD inputRDD) throws PIRException // Extract the selectors for each dataElement based upon the query type // and perform a keyed hash of the selectors - JavaPairRDD> selectorHashToDocRDD = inputRDD.mapToPair(new HashSelectorsAndPartitionData(accum, bVars)); + JavaPairRDD> selectorHashToDocRDD = inputRDD.mapToPair(new HashSelectorsAndPartitionData(accum, bVars)); // Group by hashed selector (row) -- can combine with the line above, separating for testing and benchmarking... - JavaPairRDD>> selectorGroupRDD = selectorHashToDocRDD.groupByKey(); + JavaPairRDD>> selectorGroupRDD = selectorHashToDocRDD.groupByKey(); // Calculate the encrypted row values for each row, emit for each row JavaPairRDD encRowRDD; @@ -347,7 +348,7 @@ public void performQuery(JavaRDD inputRDD) throws PIRException JavaPairRDD>> expCalculations = ComputeExpLookupTable.computeExpTable(sc, fs, bVars, query, queryInput, outputDirExp, useModExpJoin); - JavaPairRDD>,Iterable>>> encMapDataJoin = expCalculations.join(selectorGroupRDD); + JavaPairRDD>,Iterable>>> encMapDataJoin = expCalculations.join(selectorGroupRDD); // Calculate the encrypted row values for each row, emit for each row encRowRDD = encMapDataJoin.flatMapToPair(new EncRowCalcPrecomputedCache(accum, bVars)); diff --git a/src/main/java/org/apache/pirk/responder/wideskies/spark/EncRowCalc.java b/src/main/java/org/apache/pirk/responder/wideskies/spark/EncRowCalc.java index 0e860dd1..04f8cc2d 100644 --- a/src/main/java/org/apache/pirk/responder/wideskies/spark/EncRowCalc.java +++ b/src/main/java/org/apache/pirk/responder/wideskies/spark/EncRowCalc.java @@ -21,6 +21,7 @@ import java.io.IOException; import java.math.BigInteger; import java.util.ArrayList; +import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; @@ -41,7 +42,7 @@ * Emits {@code } * */ -public class EncRowCalc implements PairFlatMapFunction>>,Long,BigInteger> +public class EncRowCalc implements PairFlatMapFunction>>,Long,BigInteger> { private static final long serialVersionUID = 1L; @@ -73,9 +74,9 @@ public EncRowCalc(Accumulators accumIn, BroadcastVars bvIn) } @Override - public Iterable> call(Tuple2>> hashDocTuple) throws Exception + public Iterable> call(Tuple2>> hashDocTuple) throws Exception { - ArrayList> returnPairs = new ArrayList<>(); + List> returnPairs = new ArrayList<>(); int rowIndex = hashDocTuple._1; accum.incNumHashes(1); @@ -98,7 +99,7 @@ public Iterable> call(Tuple2> encRowValues = ComputeEncryptedRow.computeEncRowBI(hashDocTuple._2, query, rowIndex, limitHitsPerSelector, + List> encRowValues = ComputeEncryptedRow.computeEncRowBI(hashDocTuple._2, query, rowIndex, limitHitsPerSelector, maxHitsPerSelector, useLocalCache); // long endTime = System.currentTimeMillis(); diff --git a/src/main/java/org/apache/pirk/responder/wideskies/spark/EncRowCalcPrecomputedCache.java b/src/main/java/org/apache/pirk/responder/wideskies/spark/EncRowCalcPrecomputedCache.java index c7610f8d..038287b1 100644 --- a/src/main/java/org/apache/pirk/responder/wideskies/spark/EncRowCalcPrecomputedCache.java +++ b/src/main/java/org/apache/pirk/responder/wideskies/spark/EncRowCalcPrecomputedCache.java @@ -21,6 +21,7 @@ import java.math.BigInteger; import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import org.apache.pirk.query.wideskies.Query; import org.apache.pirk.responder.wideskies.common.ComputeEncryptedRow; @@ -34,7 +35,7 @@ * Functionality for computing the encrypted rows using a pre-computed, passed in modular exponentiation lookup table */ public class EncRowCalcPrecomputedCache implements - PairFlatMapFunction>,Iterable>>>,Long,BigInteger> + PairFlatMapFunction>,Iterable>>>,Long,BigInteger> { private static final long serialVersionUID = 1L; @@ -64,10 +65,10 @@ public EncRowCalcPrecomputedCache(Accumulators accumIn, BroadcastVars bvIn) } @Override - public Iterable> call(Tuple2>,Iterable>>> hashDocTuple) + public Iterable> call(Tuple2>,Iterable>>> hashDocTuple) throws Exception { - ArrayList> returnPairs = new ArrayList<>(); + List> returnPairs = new ArrayList<>(); int rowIndex = hashDocTuple._1; accum.incNumHashes(1); @@ -80,13 +81,13 @@ public Iterable> call(Tuple2> dataPartitions = hashDocTuple._2._2; + Iterable> dataPartitions = hashDocTuple._2._2; // logger.debug("Encrypting row = " + rowIndex); // long startTime = System.currentTimeMillis(); // Compute the encrypted row elements for a query from extracted data partitions - ArrayList> encRowValues = ComputeEncryptedRow.computeEncRowCacheInput(dataPartitions, expTable, rowIndex, limitHitsPerSelector, + List> encRowValues = ComputeEncryptedRow.computeEncRowCacheInput(dataPartitions, expTable, rowIndex, limitHitsPerSelector, maxHitsPerSelector); // long endTime = System.currentTimeMillis(); diff --git a/src/main/java/org/apache/pirk/responder/wideskies/spark/HashSelectorsAndPartitionData.java b/src/main/java/org/apache/pirk/responder/wideskies/spark/HashSelectorsAndPartitionData.java index 6e9c715b..f426aad5 100644 --- a/src/main/java/org/apache/pirk/responder/wideskies/spark/HashSelectorsAndPartitionData.java +++ b/src/main/java/org/apache/pirk/responder/wideskies/spark/HashSelectorsAndPartitionData.java @@ -20,6 +20,7 @@ import java.math.BigInteger; import java.util.ArrayList; +import java.util.List; import org.apache.hadoop.io.MapWritable; import org.apache.pirk.query.wideskies.QueryInfo; @@ -37,7 +38,7 @@ * output {@code } * */ -public class HashSelectorsAndPartitionData implements PairFunction> +public class HashSelectorsAndPartitionData implements PairFunction> { private static final long serialVersionUID = 1L; @@ -57,13 +58,9 @@ public HashSelectorsAndPartitionData(Accumulators accumIn, BroadcastVars bvIn) } @Override - public Tuple2> call(MapWritable doc) throws Exception + public Tuple2> call(MapWritable doc) throws Exception { - Tuple2> returnTuple; - // Extract the selector, compute the hash, and partition the data element according to query type - returnTuple = HashSelectorAndPartitionData.hashSelectorAndFormPartitionsBigInteger(doc, qSchema, dSchema, queryInfo); - - return returnTuple; + return HashSelectorAndPartitionData.hashSelectorAndFormPartitionsBigInteger(doc, qSchema, dSchema, queryInfo); } } diff --git a/src/main/java/org/apache/pirk/responder/wideskies/standalone/Responder.java b/src/main/java/org/apache/pirk/responder/wideskies/standalone/Responder.java index 7883fb81..e4c3c6f7 100644 --- a/src/main/java/org/apache/pirk/responder/wideskies/standalone/Responder.java +++ b/src/main/java/org/apache/pirk/responder/wideskies/standalone/Responder.java @@ -23,6 +23,7 @@ import java.io.IOException; import java.math.BigInteger; import java.util.ArrayList; +import java.util.List; import java.util.TreeMap; import org.apache.pirk.encryption.ModPowAbstraction; @@ -170,7 +171,7 @@ public void addDataElement(String selector, JSONObject jsonData) throws Exceptio { // Extract the data bits based on the query type // Partition by the given partitionSize - ArrayList hitValPartitions = QueryUtils.partitionDataElement(qSchema, jsonData, queryInfo.getEmbedSelector()); + List hitValPartitions = QueryUtils.partitionDataElement(qSchema, jsonData, queryInfo.getEmbedSelector()); // Pull the necessary elements int rowIndex = KeyedHash.hash(queryInfo.getHashKey(), queryInfo.getHashBitSize(), selector); diff --git a/src/main/java/org/apache/pirk/schema/data/partitioner/PrimitiveTypePartitioner.java b/src/main/java/org/apache/pirk/schema/data/partitioner/PrimitiveTypePartitioner.java index f767a5a5..75594412 100644 --- a/src/main/java/org/apache/pirk/schema/data/partitioner/PrimitiveTypePartitioner.java +++ b/src/main/java/org/apache/pirk/schema/data/partitioner/PrimitiveTypePartitioner.java @@ -48,8 +48,7 @@ public class PrimitiveTypePartitioner implements DataPartitioner public static final String STRING = "string"; /** - * Splits the given BigInteger into partitions given by the partitionSize - * + * Splits the given BigInteger into partitions given by the partitionSize. */ public static List partitionBits(BigInteger value, int partitionSize, BigInteger mask) throws PIRException { @@ -90,12 +89,20 @@ public static List partitionBits(BigInteger value, int partitionSize } /** - * Method to form a BigInteger bit mask for the given partitionSize - * + * Returns a BigInteger bit mask for the given partitionSize. */ public static BigInteger formBitMask(int partitionSize) { - return BigInteger.valueOf(2).pow(partitionSize).subtract(BigInteger.ONE); + BigInteger mask; + if (partitionSize < 32) + { + mask = BigInteger.valueOf((1 << partitionSize) - 1); + } + else + { + mask = BigInteger.valueOf(2).pow(partitionSize).subtract(BigInteger.ONE); + } + return mask; } /** @@ -229,13 +236,12 @@ private byte[] partsToBytes(List parts, int partsIndex, String type) } /** - * - * Partitions an object to an ArrayList of BigInteger values, currently represents an 8-bit partitioning + * Partitions an object to a List of BigInteger values, currently represents an 8-bit partitioning */ @Override - public ArrayList toPartitions(Object obj, String type) throws PIRException + public List toPartitions(Object obj, String type) throws PIRException { - ArrayList parts = new ArrayList<>(); + List parts = new ArrayList<>(); byte[] bytes = new byte[0]; @@ -325,9 +331,9 @@ public List getPaddedPartitions(String type) throws PIRException * Create partitions for an array of the same type of elements - used when a data value field is an array and we wish to encode these into the return value */ @Override - public ArrayList arrayToPartitions(List elementList, String type) throws PIRException + public List arrayToPartitions(List elementList, String type) throws PIRException { - ArrayList parts = new ArrayList<>(); + List parts = new ArrayList<>(); int numArrayElementsToReturn = SystemConfiguration.getIntProperty("pir.numReturnArrayElements", 1); for (int i = 0; i < numArrayElementsToReturn; ++i) diff --git a/src/main/java/org/apache/pirk/test/distributed/DistributedTestDriver.java b/src/main/java/org/apache/pirk/test/distributed/DistributedTestDriver.java index 496840c5..ee37e630 100755 --- a/src/main/java/org/apache/pirk/test/distributed/DistributedTestDriver.java +++ b/src/main/java/org/apache/pirk/test/distributed/DistributedTestDriver.java @@ -19,6 +19,7 @@ package org.apache.pirk.test.distributed; import java.util.ArrayList; +import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; @@ -69,7 +70,7 @@ public static void main(String[] args) throws Exception logger.info("jarFile = " + jarFile); SystemConfiguration.setProperty("jarFile", jarFile); - ArrayList dataElements = initialize(fs); + List dataElements = initialize(fs); // Pull off the properties and reset upon completion String dataSchemasProp = SystemConfiguration.getProperty("data.schemas", "none"); @@ -85,9 +86,9 @@ public static void main(String[] args) throws Exception /** * Create all inputs */ - public static ArrayList initialize(FileSystem fs) throws Exception + public static List initialize(FileSystem fs) throws Exception { - ArrayList dataElements = Inputs.createPIRJSONInput(fs); + List dataElements = Inputs.createPIRJSONInput(fs); String localStopListFile = Inputs.createPIRStopList(fs, true); SystemConfiguration.setProperty("pir.stopListFile", localStopListFile); @@ -100,7 +101,7 @@ public static ArrayList initialize(FileSystem fs) throws Exception /** * Execute Tests */ - public static void test(FileSystem fs, DistributedTestCLI cli, ArrayList pirDataElements) throws Exception + public static void test(FileSystem fs, DistributedTestCLI cli, List pirDataElements) throws Exception { if (cli.run("1:J")) { diff --git a/src/main/java/org/apache/pirk/test/distributed/testsuite/DistTestSuite.java b/src/main/java/org/apache/pirk/test/distributed/testsuite/DistTestSuite.java index 0556ead1..58f835c4 100644 --- a/src/main/java/org/apache/pirk/test/distributed/testsuite/DistTestSuite.java +++ b/src/main/java/org/apache/pirk/test/distributed/testsuite/DistTestSuite.java @@ -58,7 +58,7 @@ public class DistTestSuite // This method also tests all non-query specific configuration options/properties // for the MapReduce version of PIR - public static void testJSONInputMR(FileSystem fs, ArrayList dataElements) throws Exception + public static void testJSONInputMR(FileSystem fs, List dataElements) throws Exception { logger.info("Starting testJSONInputMR"); @@ -151,7 +151,7 @@ public static void testJSONInputMR(FileSystem fs, ArrayList dataElem logger.info("Completed testJSONInputMR"); } - public static void testESInputMR(FileSystem fs, ArrayList dataElements) throws Exception + public static void testESInputMR(FileSystem fs, List dataElements) throws Exception { logger.info("Starting testESInputMR"); @@ -190,7 +190,7 @@ public static void testESInputMR(FileSystem fs, ArrayList dataElemen logger.info("Completed testESInputMR"); } - public static void testJSONInputSpark(FileSystem fs, ArrayList dataElements) throws Exception + public static void testJSONInputSpark(FileSystem fs, List dataElements) throws Exception { logger.info("Starting testJSONInputSpark"); @@ -282,7 +282,7 @@ public static void testJSONInputSpark(FileSystem fs, ArrayList dataE logger.info("Completed testJSONInputSpark"); } - public static void testESInputSpark(FileSystem fs, ArrayList dataElements) throws Exception + public static void testESInputSpark(FileSystem fs, List dataElements) throws Exception { logger.info("Starting testESInputSpark"); diff --git a/src/main/java/org/apache/pirk/test/utils/BaseTests.java b/src/main/java/org/apache/pirk/test/utils/BaseTests.java index 26ab2eb5..c1fa1e9c 100644 --- a/src/main/java/org/apache/pirk/test/utils/BaseTests.java +++ b/src/main/java/org/apache/pirk/test/utils/BaseTests.java @@ -65,14 +65,14 @@ public static void testDNSHostnameQuery(ArrayList dataElements, int testDNSHostnameQuery(dataElements, null, false, false, numThreads, testFalsePositive); } - public static void testDNSHostnameQuery(ArrayList dataElements, FileSystem fs, boolean isSpark, boolean isDistributed, int numThreads) + public static void testDNSHostnameQuery(List dataElements, FileSystem fs, boolean isSpark, boolean isDistributed, int numThreads) throws Exception { testDNSHostnameQuery(dataElements, fs, isSpark, isDistributed, numThreads, false); } // Query for the watched hostname occurred; ; watched value type: hostname (String) - public static void testDNSHostnameQuery(ArrayList dataElements, FileSystem fs, boolean isSpark, boolean isDistributed, int numThreads, + public static void testDNSHostnameQuery(List dataElements, FileSystem fs, boolean isSpark, boolean isDistributed, int numThreads, boolean testFalsePositive) throws Exception { logger.info("Running testDNSHostnameQuery(): "); @@ -197,7 +197,7 @@ public static void testDNSIPQuery(ArrayList dataElements, int numThr } // The watched IP address was detected in the response to a query; watched value type: IP address (String) - public static void testDNSIPQuery(ArrayList dataElements, FileSystem fs, boolean isSpark, boolean isDistributed, int numThreads) throws Exception + public static void testDNSIPQuery(List dataElements, FileSystem fs, boolean isSpark, boolean isDistributed, int numThreads) throws Exception { logger.info("Running testDNSIPQuery(): "); @@ -270,7 +270,7 @@ public static void testDNSNXDOMAINQuery(ArrayList dataElements, int } // A query that returned an nxdomain response was made for the watched hostname; watched value type: hostname (String) - public static void testDNSNXDOMAINQuery(ArrayList dataElements, FileSystem fs, boolean isSpark, boolean isDistributed, int numThreads) + public static void testDNSNXDOMAINQuery(List dataElements, FileSystem fs, boolean isSpark, boolean isDistributed, int numThreads) throws Exception { logger.info("Running testDNSNXDOMAINQuery(): "); @@ -334,7 +334,7 @@ public static void testSRCIPQuery(ArrayList dataElements, int numThr } // Query for responses from watched srcIPs - public static void testSRCIPQuery(ArrayList dataElements, FileSystem fs, boolean isSpark, boolean isDistributed, int numThreads) throws Exception + public static void testSRCIPQuery(List dataElements, FileSystem fs, boolean isSpark, boolean isDistributed, int numThreads) throws Exception { logger.info("Running testSRCIPQuery(): "); @@ -406,7 +406,7 @@ public static void testSRCIPQuery(ArrayList dataElements, FileSystem } // Query for responses from watched srcIPs - public static void testSRCIPQueryNoFilter(ArrayList dataElements, FileSystem fs, boolean isSpark, boolean isDistributed, int numThreads) + public static void testSRCIPQueryNoFilter(List dataElements, FileSystem fs, boolean isSpark, boolean isDistributed, int numThreads) throws Exception { logger.info("Running testSRCIPQueryNoFilter(): "); diff --git a/src/main/java/org/apache/pirk/test/utils/Inputs.java b/src/main/java/org/apache/pirk/test/utils/Inputs.java index 98aa5dbc..10c13860 100644 --- a/src/main/java/org/apache/pirk/test/utils/Inputs.java +++ b/src/main/java/org/apache/pirk/test/utils/Inputs.java @@ -260,13 +260,12 @@ public static ArrayList getRcode3JSONDataElements() /** * Creates PIR JSON input and writes to hdfs */ - @SuppressWarnings("unchecked") - public static ArrayList createPIRJSONInput(FileSystem fs) + public static List createPIRJSONInput(FileSystem fs) { String inputJSONFile = SystemConfiguration.getProperty(DistributedTestDriver.JSON_PIR_INPUT_FILE_PROPERTY); logger.info("PIR JSON input being created at " + inputJSONFile); - ArrayList dataElementsJSON = createJSONDataElements(); + List dataElementsJSON = createJSONDataElements(); HDFS.writeFile(dataElementsJSON, fs, inputJSONFile, true); logger.info("PIR JSON input successfully created!"); diff --git a/src/main/java/org/apache/pirk/test/utils/StandaloneQuery.java b/src/main/java/org/apache/pirk/test/utils/StandaloneQuery.java index a4bf67de..9ed366ff 100644 --- a/src/main/java/org/apache/pirk/test/utils/StandaloneQuery.java +++ b/src/main/java/org/apache/pirk/test/utils/StandaloneQuery.java @@ -22,7 +22,6 @@ import java.io.File; import java.io.IOException; -import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -57,7 +56,7 @@ public class StandaloneQuery String testQuerySchemaName = "testQuerySchema"; // Base method to perform the query - public static List performStandaloneQuery(ArrayList dataElements, String queryType, ArrayList selectors, + public static List performStandaloneQuery(List dataElements, String queryType, List selectors, int numThreads, boolean testFalsePositive) throws IOException, InterruptedException, PIRException { logger.info("Performing watchlisting: "); diff --git a/src/main/java/org/apache/pirk/utils/KeyedHash.java b/src/main/java/org/apache/pirk/utils/KeyedHash.java index 579b196f..665cb3fc 100644 --- a/src/main/java/org/apache/pirk/utils/KeyedHash.java +++ b/src/main/java/org/apache/pirk/utils/KeyedHash.java @@ -39,9 +39,7 @@ public class KeyedHash */ public static int hash(String key, int bitSize, String input) { - String concat = key + input; - - int fullHash = Math.abs(concat.hashCode()); + int fullHash = (key + input).hashCode(); // Take only the lower bitSize-many bits of the resultant hash int bitLimitedHash = fullHash; @@ -61,10 +59,9 @@ public static int hash(String key, int bitSize, String input, String hashType) { int bitLimitedHash; - MessageDigest md; try { - md = MessageDigest.getInstance(hashType); + MessageDigest md = MessageDigest.getInstance(hashType); byte[] array = md.digest(input.getBytes()); int hashInt = fromByteArray(array); diff --git a/src/test/java/org/apache/pirk/general/PartitionUtilsTest.java b/src/test/java/org/apache/pirk/general/PartitionUtilsTest.java index ccca2d77..5b49081d 100644 --- a/src/test/java/org/apache/pirk/general/PartitionUtilsTest.java +++ b/src/test/java/org/apache/pirk/general/PartitionUtilsTest.java @@ -22,7 +22,6 @@ import static org.junit.Assert.fail; import java.math.BigInteger; -import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -47,15 +46,23 @@ public void testMask() { logger.info("Starting testMask: "); - BigInteger mask = PrimitiveTypePartitioner.formBitMask(4); // 1111 + assertEquals(0, PrimitiveTypePartitioner.formBitMask(0).intValue()); - assertEquals(mask.intValue(), 15); + assertEquals(0b000000000000001, PrimitiveTypePartitioner.formBitMask(1).intValue()); + assertEquals(0b000000000001111, PrimitiveTypePartitioner.formBitMask(4).intValue()); + assertEquals(0b000000001111111, PrimitiveTypePartitioner.formBitMask(7).intValue()); + assertEquals(0b111111111111111, PrimitiveTypePartitioner.formBitMask(15).intValue()); + + assertEquals(new BigInteger("FFFFF", 16), PrimitiveTypePartitioner.formBitMask(20)); + assertEquals(new BigInteger("FFFFFFFF", 16), PrimitiveTypePartitioner.formBitMask(32)); + assertEquals(new BigInteger("3FFFFFFFFFF", 16), PrimitiveTypePartitioner.formBitMask(42)); + assertEquals(new BigInteger("7FFFFFFFFFFFFFFF", 16), PrimitiveTypePartitioner.formBitMask(63)); logger.info("Successfully completed testMask"); } @Test - public void testPartitionBits() + public void testPartitionBits() throws PIRException { logger.info("Starting testPartitionBits: "); @@ -65,52 +72,29 @@ public void testPartitionBits() BigInteger mask4 = PrimitiveTypePartitioner.formBitMask(4); // 1111 BigInteger mask8 = PrimitiveTypePartitioner.formBitMask(8); // 11111111 - try - { - List partitions = PrimitiveTypePartitioner.partitionBits(value, 4, mask4); + List partitions = PrimitiveTypePartitioner.partitionBits(value, 4, mask4); + assertEquals(2, partitions.size()); + assertEquals(0b1111, partitions.get(0).intValue()); + assertEquals(0b0101, partitions.get(1).intValue()); - assertEquals(2, partitions.size()); - assertEquals(partitions.get(0).intValue(), 15); // 1111 - assertEquals(partitions.get(1).intValue(), 5); // 0101 + partitions = PrimitiveTypePartitioner.partitionBits(value2, 4, mask4); + assertEquals(3, partitions.size()); + assertEquals(0b1111, partitions.get(0).intValue()); + assertEquals(0b0101, partitions.get(1).intValue()); + assertEquals(0b0011, partitions.get(2).intValue()); - } catch (Exception e) - { - fail(e.toString()); - } + partitions = PrimitiveTypePartitioner.partitionBits(value, 8, mask8); + assertEquals(1, partitions.size()); + assertEquals(0b11110101, partitions.get(0).intValue()); try { - List partitions = PrimitiveTypePartitioner.partitionBits(value2, 4, mask4); - - assertEquals(3, partitions.size()); - assertEquals(partitions.get(0).intValue(), 15); // 1111 - assertEquals(partitions.get(1).intValue(), 5); // 0101 - assertEquals(partitions.get(2).intValue(), 3); // 11 - - } catch (Exception e) - { - fail(e.toString()); - } - try - { - List partitions = PrimitiveTypePartitioner.partitionBits(value, 8, mask8); - - assertEquals(1, partitions.size()); - assertEquals(partitions.get(0).intValue(), 245); - - } catch (Exception e) - { - fail(e.toString()); - } - - try - { - List partitions = PrimitiveTypePartitioner.partitionBits(value, 4, mask8); - + partitions = PrimitiveTypePartitioner.partitionBits(value, 4, mask8); fail("BitConversionUtils.partitionBits did not throw error for mismatched partitionSize and mask size"); - } catch (Exception ignore) - {} + { + // Expected. + } logger.info("Successfully completed testPartitionBits"); } @@ -138,7 +122,7 @@ public void testPartitions() throws Exception // Test byte byte bTest = Byte.parseByte("10"); - ArrayList partsByte = primitivePartitioner.toPartitions(bTest, PrimitiveTypePartitioner.BYTE); + List partsByte = primitivePartitioner.toPartitions(bTest, PrimitiveTypePartitioner.BYTE); assertEquals(1, partsByte.size()); assertEquals(bTest, primitivePartitioner.fromPartitions(partsByte, 0, PrimitiveTypePartitioner.BYTE)); @@ -146,7 +130,7 @@ public void testPartitions() throws Exception assertEquals(1, partsByte.size()); assertEquals((byte) 12, primitivePartitioner.fromPartitions(partsByte, 0, PrimitiveTypePartitioner.BYTE)); - ArrayList partsByteMax = primitivePartitioner.toPartitions(Byte.MAX_VALUE, PrimitiveTypePartitioner.BYTE); + List partsByteMax = primitivePartitioner.toPartitions(Byte.MAX_VALUE, PrimitiveTypePartitioner.BYTE); assertEquals(1, partsByteMax.size()); assertEquals(Byte.MAX_VALUE, primitivePartitioner.fromPartitions(partsByteMax, 0, PrimitiveTypePartitioner.BYTE)); @@ -159,7 +143,7 @@ public void testPartitions() throws Exception // Test short short shortTest = Short.valueOf("2456"); - ArrayList partsShort = primitivePartitioner.toPartitions(shortTest, PrimitiveTypePartitioner.SHORT); + List partsShort = primitivePartitioner.toPartitions(shortTest, PrimitiveTypePartitioner.SHORT); assertEquals(2, partsShort.size()); assertEquals(shortTest, primitivePartitioner.fromPartitions(partsShort, 0, PrimitiveTypePartitioner.SHORT)); @@ -171,13 +155,13 @@ public void testPartitions() throws Exception assertEquals(2, partsShort.size()); assertEquals((short) -42, primitivePartitioner.fromPartitions(partsShort, 0, PrimitiveTypePartitioner.SHORT)); - ArrayList partsShortMax = primitivePartitioner.toPartitions(Short.MAX_VALUE, PrimitiveTypePartitioner.SHORT); + List partsShortMax = primitivePartitioner.toPartitions(Short.MAX_VALUE, PrimitiveTypePartitioner.SHORT); assertEquals(2, partsShortMax.size()); assertEquals(Short.MAX_VALUE, primitivePartitioner.fromPartitions(partsShortMax, 0, PrimitiveTypePartitioner.SHORT)); // Test int int intTest = Integer.parseInt("-5789"); - ArrayList partsInt = primitivePartitioner.toPartitions(intTest, PrimitiveTypePartitioner.INT); + List partsInt = primitivePartitioner.toPartitions(intTest, PrimitiveTypePartitioner.INT); assertEquals(4, partsInt.size()); assertEquals(intTest, primitivePartitioner.fromPartitions(partsInt, 0, PrimitiveTypePartitioner.INT)); @@ -189,23 +173,23 @@ public void testPartitions() throws Exception assertEquals(4, partsInt.size()); assertEquals(1386681237, primitivePartitioner.fromPartitions(partsInt, 0, PrimitiveTypePartitioner.INT)); - ArrayList partsIntMax = primitivePartitioner.toPartitions(Integer.MAX_VALUE, PrimitiveTypePartitioner.INT); + List partsIntMax = primitivePartitioner.toPartitions(Integer.MAX_VALUE, PrimitiveTypePartitioner.INT); assertEquals(4, partsIntMax.size()); assertEquals(Integer.MAX_VALUE, primitivePartitioner.fromPartitions(partsIntMax, 0, PrimitiveTypePartitioner.INT)); // Test long long longTest = Long.parseLong("56789"); - ArrayList partsLong = primitivePartitioner.toPartitions(longTest, PrimitiveTypePartitioner.LONG); + List partsLong = primitivePartitioner.toPartitions(longTest, PrimitiveTypePartitioner.LONG); assertEquals(8, partsLong.size()); assertEquals(longTest, primitivePartitioner.fromPartitions(partsLong, 0, PrimitiveTypePartitioner.LONG)); - ArrayList partsLongMax = primitivePartitioner.toPartitions(Long.MAX_VALUE, PrimitiveTypePartitioner.LONG); + List partsLongMax = primitivePartitioner.toPartitions(Long.MAX_VALUE, PrimitiveTypePartitioner.LONG); assertEquals(8, partsLongMax.size()); assertEquals(Long.MAX_VALUE, primitivePartitioner.fromPartitions(partsLongMax, 0, PrimitiveTypePartitioner.LONG)); // Test float float floatTest = Float.parseFloat("567.77"); - ArrayList partsFloat = primitivePartitioner.toPartitions(floatTest, PrimitiveTypePartitioner.FLOAT); + List partsFloat = primitivePartitioner.toPartitions(floatTest, PrimitiveTypePartitioner.FLOAT); assertEquals(4, partsFloat.size()); assertEquals(floatTest, primitivePartitioner.fromPartitions(partsFloat, 0, PrimitiveTypePartitioner.FLOAT)); @@ -213,23 +197,23 @@ public void testPartitions() throws Exception assertEquals(4, partsFloat.size()); assertEquals(-99.99f, primitivePartitioner.fromPartitions(partsFloat, 0, PrimitiveTypePartitioner.FLOAT)); - ArrayList partsFloatMax = primitivePartitioner.toPartitions(Float.MAX_VALUE, PrimitiveTypePartitioner.FLOAT); + List partsFloatMax = primitivePartitioner.toPartitions(Float.MAX_VALUE, PrimitiveTypePartitioner.FLOAT); assertEquals(4, partsFloatMax.size()); assertEquals(Float.MAX_VALUE, primitivePartitioner.fromPartitions(partsFloatMax, 0, PrimitiveTypePartitioner.FLOAT)); // Test double double doubleTest = Double.parseDouble("567.77"); - ArrayList partsDouble = primitivePartitioner.toPartitions(doubleTest, PrimitiveTypePartitioner.DOUBLE); + List partsDouble = primitivePartitioner.toPartitions(doubleTest, PrimitiveTypePartitioner.DOUBLE); assertEquals(8, partsDouble.size()); assertEquals(doubleTest, primitivePartitioner.fromPartitions(partsDouble, 0, PrimitiveTypePartitioner.DOUBLE)); - ArrayList partsDoubleMax = primitivePartitioner.toPartitions(Double.MAX_VALUE, PrimitiveTypePartitioner.DOUBLE); + List partsDoubleMax = primitivePartitioner.toPartitions(Double.MAX_VALUE, PrimitiveTypePartitioner.DOUBLE); assertEquals(8, partsDoubleMax.size()); assertEquals(Double.MAX_VALUE, primitivePartitioner.fromPartitions(partsDoubleMax, 0, PrimitiveTypePartitioner.DOUBLE)); // Test char char charTest = 'b'; - ArrayList partsChar = primitivePartitioner.toPartitions(charTest, PrimitiveTypePartitioner.CHAR); + List partsChar = primitivePartitioner.toPartitions(charTest, PrimitiveTypePartitioner.CHAR); assertEquals(2, partsChar.size()); assertEquals(charTest, primitivePartitioner.fromPartitions(partsChar, 0, PrimitiveTypePartitioner.CHAR)); @@ -244,7 +228,7 @@ public void testPartitions() throws Exception assertEquals(2, partsChar.size()); assertEquals(charTest, primitivePartitioner.fromPartitions(partsChar, 0, PrimitiveTypePartitioner.CHAR)); - ArrayList partsCharMax = primitivePartitioner.toPartitions(Character.MAX_VALUE, PrimitiveTypePartitioner.CHAR); + List partsCharMax = primitivePartitioner.toPartitions(Character.MAX_VALUE, PrimitiveTypePartitioner.CHAR); assertEquals(2, partsCharMax.size()); assertEquals(Character.MAX_VALUE, primitivePartitioner.fromPartitions(partsCharMax, 0, PrimitiveTypePartitioner.CHAR)); @@ -269,7 +253,7 @@ private void testString(String testString) throws Exception { PrimitiveTypePartitioner ptp = new PrimitiveTypePartitioner(); - ArrayList partsString = ptp.toPartitions(testString, PrimitiveTypePartitioner.STRING); + List partsString = ptp.toPartitions(testString, PrimitiveTypePartitioner.STRING); int numParts = Integer.parseInt(SystemConfiguration.getProperty("pir.stringBits")) / 8; assertEquals(numParts, partsString.size());